Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(707)

Side by Side Diff: src/a64/lithium-a64.cc

Issue 181453002: Reset trunk to 3.24.35.4 (Closed) Base URL: https://v8.googlecode.com/svn/trunk
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/a64/lithium-a64.h ('k') | src/a64/lithium-codegen-a64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #include "lithium-allocator-inl.h"
31 #include "a64/lithium-a64.h"
32 #include "a64/lithium-codegen-a64.h"
33 #include "hydrogen-osr.h"
34
35 namespace v8 {
36 namespace internal {
37
38
39 #define DEFINE_COMPILE(type) \
40 void L##type::CompileToNative(LCodeGen* generator) { \
41 generator->Do##type(this); \
42 }
43 LITHIUM_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
44 #undef DEFINE_COMPILE
45
46 #ifdef DEBUG
47 void LInstruction::VerifyCall() {
48 // Call instructions can use only fixed registers as temporaries and
49 // outputs because all registers are blocked by the calling convention.
50 // Inputs operands must use a fixed register or use-at-start policy or
51 // a non-register policy.
52 ASSERT(Output() == NULL ||
53 LUnallocated::cast(Output())->HasFixedPolicy() ||
54 !LUnallocated::cast(Output())->HasRegisterPolicy());
55 for (UseIterator it(this); !it.Done(); it.Advance()) {
56 LUnallocated* operand = LUnallocated::cast(it.Current());
57 ASSERT(operand->HasFixedPolicy() ||
58 operand->IsUsedAtStart());
59 }
60 for (TempIterator it(this); !it.Done(); it.Advance()) {
61 LUnallocated* operand = LUnallocated::cast(it.Current());
62 ASSERT(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
63 }
64 }
65 #endif
66
67
68 void LLabel::PrintDataTo(StringStream* stream) {
69 LGap::PrintDataTo(stream);
70 LLabel* rep = replacement();
71 if (rep != NULL) {
72 stream->Add(" Dead block replaced with B%d", rep->block_id());
73 }
74 }
75
76
77 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
78 arguments()->PrintTo(stream);
79 stream->Add(" length ");
80 length()->PrintTo(stream);
81 stream->Add(" index ");
82 index()->PrintTo(stream);
83 }
84
85
86 void LBranch::PrintDataTo(StringStream* stream) {
87 stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
88 value()->PrintTo(stream);
89 }
90
91
92 void LCallJSFunction::PrintDataTo(StringStream* stream) {
93 stream->Add("= ");
94 function()->PrintTo(stream);
95 stream->Add("#%d / ", arity());
96 }
97
98
99 void LCallWithDescriptor::PrintDataTo(StringStream* stream) {
100 for (int i = 0; i < InputCount(); i++) {
101 InputAt(i)->PrintTo(stream);
102 stream->Add(" ");
103 }
104 stream->Add("#%d / ", arity());
105 }
106
107
108 void LCallNew::PrintDataTo(StringStream* stream) {
109 stream->Add("= ");
110 constructor()->PrintTo(stream);
111 stream->Add(" #%d / ", arity());
112 }
113
114
115 void LCallNewArray::PrintDataTo(StringStream* stream) {
116 stream->Add("= ");
117 constructor()->PrintTo(stream);
118 stream->Add(" #%d / ", arity());
119 ElementsKind kind = hydrogen()->elements_kind();
120 stream->Add(" (%s) ", ElementsKindToString(kind));
121 }
122
123
124 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
125 stream->Add("if class_of_test(");
126 value()->PrintTo(stream);
127 stream->Add(", \"%o\") then B%d else B%d",
128 *hydrogen()->class_name(),
129 true_block_id(),
130 false_block_id());
131 }
132
133
134 void LCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
135 stream->Add("if ");
136 left()->PrintTo(stream);
137 stream->Add(" %s ", Token::String(op()));
138 right()->PrintTo(stream);
139 stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
140 }
141
142
143 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
144 stream->Add("if has_cached_array_index(");
145 value()->PrintTo(stream);
146 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
147 }
148
149
150 bool LGoto::HasInterestingComment(LCodeGen* gen) const {
151 return !gen->IsNextEmittedBlock(block_id());
152 }
153
154
155 void LGoto::PrintDataTo(StringStream* stream) {
156 stream->Add("B%d", block_id());
157 }
158
159
160 void LInnerAllocatedObject::PrintDataTo(StringStream* stream) {
161 stream->Add(" = ");
162 base_object()->PrintTo(stream);
163 stream->Add(" + ");
164 offset()->PrintTo(stream);
165 }
166
167
168 void LInvokeFunction::PrintDataTo(StringStream* stream) {
169 stream->Add("= ");
170 function()->PrintTo(stream);
171 stream->Add(" #%d / ", arity());
172 }
173
174
175 void LInstruction::PrintTo(StringStream* stream) {
176 stream->Add("%s ", this->Mnemonic());
177
178 PrintOutputOperandTo(stream);
179
180 PrintDataTo(stream);
181
182 if (HasEnvironment()) {
183 stream->Add(" ");
184 environment()->PrintTo(stream);
185 }
186
187 if (HasPointerMap()) {
188 stream->Add(" ");
189 pointer_map()->PrintTo(stream);
190 }
191 }
192
193
194 void LInstruction::PrintDataTo(StringStream* stream) {
195 stream->Add("= ");
196 for (int i = 0; i < InputCount(); i++) {
197 if (i > 0) stream->Add(" ");
198 if (InputAt(i) == NULL) {
199 stream->Add("NULL");
200 } else {
201 InputAt(i)->PrintTo(stream);
202 }
203 }
204 }
205
206
207 void LInstruction::PrintOutputOperandTo(StringStream* stream) {
208 if (HasResult()) result()->PrintTo(stream);
209 }
210
211
212 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
213 stream->Add("if has_instance_type(");
214 value()->PrintTo(stream);
215 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
216 }
217
218
219 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
220 stream->Add("if is_object(");
221 value()->PrintTo(stream);
222 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
223 }
224
225
226 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
227 stream->Add("if is_string(");
228 value()->PrintTo(stream);
229 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
230 }
231
232
233 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
234 stream->Add("if is_smi(");
235 value()->PrintTo(stream);
236 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
237 }
238
239
240 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
241 stream->Add("if typeof ");
242 value()->PrintTo(stream);
243 stream->Add(" == \"%s\" then B%d else B%d",
244 hydrogen()->type_literal()->ToCString().get(),
245 true_block_id(), false_block_id());
246 }
247
248
249 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
250 stream->Add("if is_undetectable(");
251 value()->PrintTo(stream);
252 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
253 }
254
255
256 bool LGap::IsRedundant() const {
257 for (int i = 0; i < 4; i++) {
258 if ((parallel_moves_[i] != NULL) && !parallel_moves_[i]->IsRedundant()) {
259 return false;
260 }
261 }
262
263 return true;
264 }
265
266
267 void LGap::PrintDataTo(StringStream* stream) {
268 for (int i = 0; i < 4; i++) {
269 stream->Add("(");
270 if (parallel_moves_[i] != NULL) {
271 parallel_moves_[i]->PrintDataTo(stream);
272 }
273 stream->Add(") ");
274 }
275 }
276
277
278 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
279 context()->PrintTo(stream);
280 stream->Add("[%d]", slot_index());
281 }
282
283
284 void LStoreCodeEntry::PrintDataTo(StringStream* stream) {
285 stream->Add(" = ");
286 function()->PrintTo(stream);
287 stream->Add(".code_entry = ");
288 code_object()->PrintTo(stream);
289 }
290
291
292 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
293 context()->PrintTo(stream);
294 stream->Add("[%d] <- ", slot_index());
295 value()->PrintTo(stream);
296 }
297
298
299 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
300 object()->PrintTo(stream);
301 stream->Add("[");
302 key()->PrintTo(stream);
303 stream->Add("] <- ");
304 value()->PrintTo(stream);
305 }
306
307
308 void LStoreNamedField::PrintDataTo(StringStream* stream) {
309 object()->PrintTo(stream);
310 hydrogen()->access().PrintTo(stream);
311 stream->Add(" <- ");
312 value()->PrintTo(stream);
313 }
314
315
316 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
317 object()->PrintTo(stream);
318 stream->Add(".");
319 stream->Add(String::cast(*name())->ToCString().get());
320 stream->Add(" <- ");
321 value()->PrintTo(stream);
322 }
323
324
325 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
326 stream->Add("if string_compare(");
327 left()->PrintTo(stream);
328 right()->PrintTo(stream);
329 stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
330 }
331
332
333 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
334 object()->PrintTo(stream);
335 stream->Add("%p -> %p", *original_map(), *transitioned_map());
336 }
337
338
339 template<int T>
340 void LUnaryMathOperation<T>::PrintDataTo(StringStream* stream) {
341 value()->PrintTo(stream);
342 }
343
344
345 const char* LArithmeticD::Mnemonic() const {
346 switch (op()) {
347 case Token::ADD: return "add-d";
348 case Token::SUB: return "sub-d";
349 case Token::MUL: return "mul-d";
350 case Token::DIV: return "div-d";
351 case Token::MOD: return "mod-d";
352 default:
353 UNREACHABLE();
354 return NULL;
355 }
356 }
357
358
359 const char* LArithmeticT::Mnemonic() const {
360 switch (op()) {
361 case Token::ADD: return "add-t";
362 case Token::SUB: return "sub-t";
363 case Token::MUL: return "mul-t";
364 case Token::MOD: return "mod-t";
365 case Token::DIV: return "div-t";
366 case Token::BIT_AND: return "bit-and-t";
367 case Token::BIT_OR: return "bit-or-t";
368 case Token::BIT_XOR: return "bit-xor-t";
369 case Token::ROR: return "ror-t";
370 case Token::SHL: return "shl-t";
371 case Token::SAR: return "sar-t";
372 case Token::SHR: return "shr-t";
373 default:
374 UNREACHABLE();
375 return NULL;
376 }
377 }
378
379
380 void LChunkBuilder::Abort(BailoutReason reason) {
381 info()->set_bailout_reason(reason);
382 status_ = ABORTED;
383 }
384
385
386 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
387 return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
388 Register::ToAllocationIndex(reg));
389 }
390
391
392 LUnallocated* LChunkBuilder::ToUnallocated(DoubleRegister reg) {
393 return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
394 DoubleRegister::ToAllocationIndex(reg));
395 }
396
397
398 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
399 if (value->EmitAtUses()) {
400 HInstruction* instr = HInstruction::cast(value);
401 VisitInstruction(instr);
402 }
403 operand->set_virtual_register(value->id());
404 return operand;
405 }
406
407
408 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
409 return Use(value, ToUnallocated(fixed_register));
410 }
411
412
413 LOperand* LChunkBuilder::UseFixedDouble(HValue* value,
414 DoubleRegister fixed_register) {
415 return Use(value, ToUnallocated(fixed_register));
416 }
417
418
419 LOperand* LChunkBuilder::UseRegister(HValue* value) {
420 return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
421 }
422
423
424 LOperand* LChunkBuilder::UseRegisterAndClobber(HValue* value) {
425 return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
426 }
427
428
429 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
430 return Use(value,
431 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
432 LUnallocated::USED_AT_START));
433 }
434
435
436 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
437 return value->IsConstant() ? UseConstant(value) : UseRegister(value);
438 }
439
440
441 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
442 return value->IsConstant() ? UseConstant(value) : UseRegisterAtStart(value);
443 }
444
445
446 LConstantOperand* LChunkBuilder::UseConstant(HValue* value) {
447 return chunk_->DefineConstantOperand(HConstant::cast(value));
448 }
449
450
451 LOperand* LChunkBuilder::UseAny(HValue* value) {
452 return value->IsConstant()
453 ? UseConstant(value)
454 : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
455 }
456
457
458 LInstruction* LChunkBuilder::Define(LTemplateResultInstruction<1>* instr,
459 LUnallocated* result) {
460 result->set_virtual_register(current_instruction_->id());
461 instr->set_result(result);
462 return instr;
463 }
464
465
466 LInstruction* LChunkBuilder::DefineAsRegister(
467 LTemplateResultInstruction<1>* instr) {
468 return Define(instr,
469 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
470 }
471
472
473 LInstruction* LChunkBuilder::DefineAsSpilled(
474 LTemplateResultInstruction<1>* instr, int index) {
475 return Define(instr,
476 new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
477 }
478
479
480 LInstruction* LChunkBuilder::DefineSameAsFirst(
481 LTemplateResultInstruction<1>* instr) {
482 return Define(instr,
483 new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
484 }
485
486
487 LInstruction* LChunkBuilder::DefineFixed(
488 LTemplateResultInstruction<1>* instr, Register reg) {
489 return Define(instr, ToUnallocated(reg));
490 }
491
492
493 LInstruction* LChunkBuilder::DefineFixedDouble(
494 LTemplateResultInstruction<1>* instr, DoubleRegister reg) {
495 return Define(instr, ToUnallocated(reg));
496 }
497
498
499 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
500 HInstruction* hinstr,
501 CanDeoptimize can_deoptimize) {
502 info()->MarkAsNonDeferredCalling();
503 #ifdef DEBUG
504 instr->VerifyCall();
505 #endif
506 instr->MarkAsCall();
507 instr = AssignPointerMap(instr);
508
509 if (hinstr->HasObservableSideEffects()) {
510 ASSERT(hinstr->next()->IsSimulate());
511 HSimulate* sim = HSimulate::cast(hinstr->next());
512 ASSERT(instruction_pending_deoptimization_environment_ == NULL);
513 ASSERT(pending_deoptimization_ast_id_.IsNone());
514 instruction_pending_deoptimization_environment_ = instr;
515 pending_deoptimization_ast_id_ = sim->ast_id();
516 }
517
518 // If instruction does not have side-effects lazy deoptimization
519 // after the call will try to deoptimize to the point before the call.
520 // Thus we still need to attach environment to this call even if
521 // call sequence can not deoptimize eagerly.
522 bool needs_environment =
523 (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
524 !hinstr->HasObservableSideEffects();
525 if (needs_environment && !instr->HasEnvironment()) {
526 instr = AssignEnvironment(instr);
527 }
528
529 return instr;
530 }
531
532
533 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
534 ASSERT(!instr->HasPointerMap());
535 instr->set_pointer_map(new(zone()) LPointerMap(zone()));
536 return instr;
537 }
538
539
540 LUnallocated* LChunkBuilder::TempRegister() {
541 LUnallocated* operand =
542 new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
543 int vreg = allocator_->GetVirtualRegister();
544 if (!allocator_->AllocationOk()) {
545 Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
546 vreg = 0;
547 }
548 operand->set_virtual_register(vreg);
549 return operand;
550 }
551
552
553 int LPlatformChunk::GetNextSpillIndex() {
554 return spill_slot_count_++;
555 }
556
557
558 LOperand* LPlatformChunk::GetNextSpillSlot(RegisterKind kind) {
559 int index = GetNextSpillIndex();
560 if (kind == DOUBLE_REGISTERS) {
561 return LDoubleStackSlot::Create(index, zone());
562 } else {
563 ASSERT(kind == GENERAL_REGISTERS);
564 return LStackSlot::Create(index, zone());
565 }
566 }
567
568
569 LOperand* LChunkBuilder::FixedTemp(DoubleRegister reg) {
570 LUnallocated* operand = ToUnallocated(reg);
571 ASSERT(operand->HasFixedPolicy());
572 return operand;
573 }
574
575
576 LPlatformChunk* LChunkBuilder::Build() {
577 ASSERT(is_unused());
578 chunk_ = new(zone()) LPlatformChunk(info_, graph_);
579 LPhase phase("L_Building chunk", chunk_);
580 status_ = BUILDING;
581
582 // If compiling for OSR, reserve space for the unoptimized frame,
583 // which will be subsumed into this frame.
584 if (graph()->has_osr()) {
585 // TODO(all): GetNextSpillIndex just increments a field. It has no other
586 // side effects, so we should get rid of this loop.
587 for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) {
588 chunk_->GetNextSpillIndex();
589 }
590 }
591
592 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
593 for (int i = 0; i < blocks->length(); i++) {
594 DoBasicBlock(blocks->at(i));
595 if (is_aborted()) return NULL;
596 }
597 status_ = DONE;
598 return chunk_;
599 }
600
601
602 void LChunkBuilder::DoBasicBlock(HBasicBlock* block) {
603 ASSERT(is_building());
604 current_block_ = block;
605
606 if (block->IsStartBlock()) {
607 block->UpdateEnvironment(graph_->start_environment());
608 argument_count_ = 0;
609 } else if (block->predecessors()->length() == 1) {
610 // We have a single predecessor => copy environment and outgoing
611 // argument count from the predecessor.
612 ASSERT(block->phis()->length() == 0);
613 HBasicBlock* pred = block->predecessors()->at(0);
614 HEnvironment* last_environment = pred->last_environment();
615 ASSERT(last_environment != NULL);
616
617 // Only copy the environment, if it is later used again.
618 if (pred->end()->SecondSuccessor() == NULL) {
619 ASSERT(pred->end()->FirstSuccessor() == block);
620 } else {
621 if ((pred->end()->FirstSuccessor()->block_id() > block->block_id()) ||
622 (pred->end()->SecondSuccessor()->block_id() > block->block_id())) {
623 last_environment = last_environment->Copy();
624 }
625 }
626 block->UpdateEnvironment(last_environment);
627 ASSERT(pred->argument_count() >= 0);
628 argument_count_ = pred->argument_count();
629 } else {
630 // We are at a state join => process phis.
631 HBasicBlock* pred = block->predecessors()->at(0);
632 // No need to copy the environment, it cannot be used later.
633 HEnvironment* last_environment = pred->last_environment();
634 for (int i = 0; i < block->phis()->length(); ++i) {
635 HPhi* phi = block->phis()->at(i);
636 if (phi->HasMergedIndex()) {
637 last_environment->SetValueAt(phi->merged_index(), phi);
638 }
639 }
640 for (int i = 0; i < block->deleted_phis()->length(); ++i) {
641 if (block->deleted_phis()->at(i) < last_environment->length()) {
642 last_environment->SetValueAt(block->deleted_phis()->at(i),
643 graph_->GetConstantUndefined());
644 }
645 }
646 block->UpdateEnvironment(last_environment);
647 // Pick up the outgoing argument count of one of the predecessors.
648 argument_count_ = pred->argument_count();
649 }
650
651 // Translate hydrogen instructions to lithium ones for the current block.
652 HInstruction* current = block->first();
653 int start = chunk_->instructions()->length();
654 while ((current != NULL) && !is_aborted()) {
655 // Code for constants in registers is generated lazily.
656 if (!current->EmitAtUses()) {
657 VisitInstruction(current);
658 }
659 current = current->next();
660 }
661 int end = chunk_->instructions()->length() - 1;
662 if (end >= start) {
663 block->set_first_instruction_index(start);
664 block->set_last_instruction_index(end);
665 }
666 block->set_argument_count(argument_count_);
667 current_block_ = NULL;
668 }
669
670
671 void LChunkBuilder::VisitInstruction(HInstruction* current) {
672 HInstruction* old_current = current_instruction_;
673 current_instruction_ = current;
674
675 LInstruction* instr = NULL;
676 if (current->CanReplaceWithDummyUses()) {
677 if (current->OperandCount() == 0) {
678 instr = DefineAsRegister(new(zone()) LDummy());
679 } else {
680 ASSERT(!current->OperandAt(0)->IsControlInstruction());
681 instr = DefineAsRegister(new(zone())
682 LDummyUse(UseAny(current->OperandAt(0))));
683 }
684 for (int i = 1; i < current->OperandCount(); ++i) {
685 if (current->OperandAt(i)->IsControlInstruction()) continue;
686 LInstruction* dummy =
687 new(zone()) LDummyUse(UseAny(current->OperandAt(i)));
688 dummy->set_hydrogen_value(current);
689 chunk_->AddInstruction(dummy, current_block_);
690 }
691 } else {
692 instr = current->CompileToLithium(this);
693 }
694
695 argument_count_ += current->argument_delta();
696 ASSERT(argument_count_ >= 0);
697
698 if (instr != NULL) {
699 // Associate the hydrogen instruction first, since we may need it for
700 // the ClobbersRegisters() or ClobbersDoubleRegisters() calls below.
701 instr->set_hydrogen_value(current);
702
703 #if DEBUG
704 // Make sure that the lithium instruction has either no fixed register
705 // constraints in temps or the result OR no uses that are only used at
706 // start. If this invariant doesn't hold, the register allocator can decide
707 // to insert a split of a range immediately before the instruction due to an
708 // already allocated register needing to be used for the instruction's fixed
709 // register constraint. In this case, the register allocator won't see an
710 // interference between the split child and the use-at-start (it would if
711 // the it was just a plain use), so it is free to move the split child into
712 // the same register that is used for the use-at-start.
713 // See https://code.google.com/p/chromium/issues/detail?id=201590
714 if (!(instr->ClobbersRegisters() && instr->ClobbersDoubleRegisters())) {
715 int fixed = 0;
716 int used_at_start = 0;
717 for (UseIterator it(instr); !it.Done(); it.Advance()) {
718 LUnallocated* operand = LUnallocated::cast(it.Current());
719 if (operand->IsUsedAtStart()) ++used_at_start;
720 }
721 if (instr->Output() != NULL) {
722 if (LUnallocated::cast(instr->Output())->HasFixedPolicy()) ++fixed;
723 }
724 for (TempIterator it(instr); !it.Done(); it.Advance()) {
725 LUnallocated* operand = LUnallocated::cast(it.Current());
726 if (operand->HasFixedPolicy()) ++fixed;
727 }
728 ASSERT(fixed == 0 || used_at_start == 0);
729 }
730 #endif
731
732 if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
733 instr = AssignPointerMap(instr);
734 }
735 if (FLAG_stress_environments && !instr->HasEnvironment()) {
736 instr = AssignEnvironment(instr);
737 }
738 chunk_->AddInstruction(instr, current_block_);
739 }
740 current_instruction_ = old_current;
741 }
742
743
744 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
745 HEnvironment* hydrogen_env = current_block_->last_environment();
746 int argument_index_accumulator = 0;
747 ZoneList<HValue*> objects_to_materialize(0, zone());
748 instr->set_environment(CreateEnvironment(hydrogen_env,
749 &argument_index_accumulator,
750 &objects_to_materialize));
751 return instr;
752 }
753
754
755 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
756 // The control instruction marking the end of a block that completed
757 // abruptly (e.g., threw an exception). There is nothing specific to do.
758 return NULL;
759 }
760
761
762 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
763 HArithmeticBinaryOperation* instr) {
764 ASSERT(instr->representation().IsDouble());
765 ASSERT(instr->left()->representation().IsDouble());
766 ASSERT(instr->right()->representation().IsDouble());
767
768 if (op == Token::MOD) {
769 LOperand* left = UseFixedDouble(instr->left(), d0);
770 LOperand* right = UseFixedDouble(instr->right(), d1);
771 LArithmeticD* result = new(zone()) LArithmeticD(Token::MOD, left, right);
772 return MarkAsCall(DefineFixedDouble(result, d0), instr);
773 } else {
774 LOperand* left = UseRegisterAtStart(instr->left());
775 LOperand* right = UseRegisterAtStart(instr->right());
776 LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
777 return DefineAsRegister(result);
778 }
779 }
780
781
782 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
783 HBinaryOperation* instr) {
784 ASSERT((op == Token::ADD) || (op == Token::SUB) || (op == Token::MUL) ||
785 (op == Token::DIV) || (op == Token::MOD) || (op == Token::SHR) ||
786 (op == Token::SHL) || (op == Token::SAR) || (op == Token::ROR) ||
787 (op == Token::BIT_OR) || (op == Token::BIT_AND) ||
788 (op == Token::BIT_XOR));
789 HValue* left = instr->left();
790 HValue* right = instr->right();
791
792 // TODO(jbramley): Once we've implemented smi support for all arithmetic
793 // operations, these assertions should check IsTagged().
794 ASSERT(instr->representation().IsSmiOrTagged());
795 ASSERT(left->representation().IsSmiOrTagged());
796 ASSERT(right->representation().IsSmiOrTagged());
797
798 LOperand* context = UseFixed(instr->context(), cp);
799 LOperand* left_operand = UseFixed(left, x1);
800 LOperand* right_operand = UseFixed(right, x0);
801 LArithmeticT* result =
802 new(zone()) LArithmeticT(op, context, left_operand, right_operand);
803 return MarkAsCall(DefineFixed(result, x0), instr);
804 }
805
806
807 LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
808 HBoundsCheckBaseIndexInformation* instr) {
809 UNREACHABLE();
810 return NULL;
811 }
812
813
814 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
815 info()->MarkAsRequiresFrame();
816 LOperand* args = NULL;
817 LOperand* length = NULL;
818 LOperand* index = NULL;
819
820 if (instr->length()->IsConstant() && instr->index()->IsConstant()) {
821 args = UseRegisterAtStart(instr->arguments());
822 length = UseConstant(instr->length());
823 index = UseConstant(instr->index());
824 } else {
825 args = UseRegister(instr->arguments());
826 length = UseRegisterAtStart(instr->length());
827 index = UseRegisterOrConstantAtStart(instr->index());
828 }
829
830 return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
831 }
832
833
834 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
835 if (instr->representation().IsSmiOrInteger32()) {
836 ASSERT(instr->left()->representation().Equals(instr->representation()));
837 ASSERT(instr->right()->representation().Equals(instr->representation()));
838 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
839 LOperand* right =
840 UseRegisterOrConstantAtStart(instr->BetterRightOperand());
841 LInstruction* result = instr->representation().IsSmi() ?
842 DefineAsRegister(new(zone()) LAddS(left, right)) :
843 DefineAsRegister(new(zone()) LAddI(left, right));
844 if (instr->CheckFlag(HValue::kCanOverflow)) {
845 result = AssignEnvironment(result);
846 }
847 return result;
848 } else if (instr->representation().IsExternal()) {
849 ASSERT(instr->left()->representation().IsExternal());
850 ASSERT(instr->right()->representation().IsInteger32());
851 ASSERT(!instr->CheckFlag(HValue::kCanOverflow));
852 LOperand* left = UseRegisterAtStart(instr->left());
853 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
854 return DefineAsRegister(new(zone()) LAddE(left, right));
855 } else if (instr->representation().IsDouble()) {
856 return DoArithmeticD(Token::ADD, instr);
857 } else {
858 ASSERT(instr->representation().IsTagged());
859 return DoArithmeticT(Token::ADD, instr);
860 }
861 }
862
863
864 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
865 info()->MarkAsDeferredCalling();
866 LOperand* context = UseAny(instr->context());
867 LOperand* size = UseRegisterOrConstant(instr->size());
868 LOperand* temp1 = TempRegister();
869 LOperand* temp2 = TempRegister();
870 LAllocate* result = new(zone()) LAllocate(context, size, temp1, temp2);
871 return AssignPointerMap(DefineAsRegister(result));
872 }
873
874
875 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
876 LOperand* function = UseFixed(instr->function(), x1);
877 LOperand* receiver = UseFixed(instr->receiver(), x0);
878 LOperand* length = UseFixed(instr->length(), x2);
879 LOperand* elements = UseFixed(instr->elements(), x3);
880 LApplyArguments* result = new(zone()) LApplyArguments(function,
881 receiver,
882 length,
883 elements);
884 return MarkAsCall(DefineFixed(result, x0), instr, CAN_DEOPTIMIZE_EAGERLY);
885 }
886
887
888 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* instr) {
889 info()->MarkAsRequiresFrame();
890 LOperand* temp = instr->from_inlined() ? NULL : TempRegister();
891 return DefineAsRegister(new(zone()) LArgumentsElements(temp));
892 }
893
894
895 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* instr) {
896 info()->MarkAsRequiresFrame();
897 LOperand* value = UseRegisterAtStart(instr->value());
898 return DefineAsRegister(new(zone()) LArgumentsLength(value));
899 }
900
901
902 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
903 // There are no real uses of the arguments object.
904 // arguments.length and element access are supported directly on
905 // stack arguments, and any real arguments object use causes a bailout.
906 // So this value is never used.
907 return NULL;
908 }
909
910
911 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
912 if (instr->representation().IsSmiOrInteger32()) {
913 ASSERT(instr->left()->representation().Equals(instr->representation()));
914 ASSERT(instr->right()->representation().Equals(instr->representation()));
915 ASSERT(instr->CheckFlag(HValue::kTruncatingToInt32));
916
917 LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
918 LOperand* right =
919 UseRegisterOrConstantAtStart(instr->BetterRightOperand());
920 return instr->representation().IsSmi() ?
921 DefineAsRegister(new(zone()) LBitS(left, right)) :
922 DefineAsRegister(new(zone()) LBitI(left, right));
923 } else {
924 return DoArithmeticT(instr->op(), instr);
925 }
926 }
927
928
929 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
930 // V8 expects a label to be generated for each basic block.
931 // This is used in some places like LAllocator::IsBlockBoundary
932 // in lithium-allocator.cc
933 return new(zone()) LLabel(instr->block());
934 }
935
936
937 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
938 LOperand* value = UseRegisterOrConstantAtStart(instr->index());
939 LOperand* length = UseRegister(instr->length());
940 return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
941 }
942
943
944 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
945 LInstruction* goto_instr = CheckElideControlInstruction(instr);
946 if (goto_instr != NULL) return goto_instr;
947
948 HValue* value = instr->value();
949 Representation r = value->representation();
950 HType type = value->type();
951
952 if (r.IsInteger32() || r.IsSmi() || r.IsDouble()) {
953 // These representations have simple checks that cannot deoptimize.
954 return new(zone()) LBranch(UseRegister(value), NULL, NULL);
955 } else {
956 ASSERT(r.IsTagged());
957 if (type.IsBoolean() || type.IsSmi() || type.IsJSArray() ||
958 type.IsHeapNumber()) {
959 // These types have simple checks that cannot deoptimize.
960 return new(zone()) LBranch(UseRegister(value), NULL, NULL);
961 }
962
963 if (type.IsString()) {
964 // This type cannot deoptimize, but needs a scratch register.
965 return new(zone()) LBranch(UseRegister(value), TempRegister(), NULL);
966 }
967
968 ToBooleanStub::Types expected = instr->expected_input_types();
969 bool needs_temps = expected.NeedsMap() || expected.IsEmpty();
970 LOperand* temp1 = needs_temps ? TempRegister() : NULL;
971 LOperand* temp2 = needs_temps ? TempRegister() : NULL;
972
973 if (expected.IsGeneric() || expected.IsEmpty()) {
974 // The generic case cannot deoptimize because it already supports every
975 // possible input type.
976 ASSERT(needs_temps);
977 return new(zone()) LBranch(UseRegister(value), temp1, temp2);
978 } else {
979 return AssignEnvironment(
980 new(zone()) LBranch(UseRegister(value), temp1, temp2));
981 }
982 }
983 }
984
985
986 LInstruction* LChunkBuilder::DoCallJSFunction(
987 HCallJSFunction* instr) {
988 LOperand* function = UseFixed(instr->function(), x1);
989
990 LCallJSFunction* result = new(zone()) LCallJSFunction(function);
991
992 return MarkAsCall(DefineFixed(result, x0), instr);
993 }
994
995
996 LInstruction* LChunkBuilder::DoCallWithDescriptor(
997 HCallWithDescriptor* instr) {
998 const CallInterfaceDescriptor* descriptor = instr->descriptor();
999
1000 LOperand* target = UseRegisterOrConstantAtStart(instr->target());
1001 ZoneList<LOperand*> ops(instr->OperandCount(), zone());
1002 ops.Add(target, zone());
1003 for (int i = 1; i < instr->OperandCount(); i++) {
1004 LOperand* op = UseFixed(instr->OperandAt(i),
1005 descriptor->GetParameterRegister(i - 1));
1006 ops.Add(op, zone());
1007 }
1008
1009 LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(descriptor,
1010 ops,
1011 zone());
1012 return MarkAsCall(DefineFixed(result, x0), instr);
1013 }
1014
1015
1016 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1017 LOperand* context = UseFixed(instr->context(), cp);
1018 LOperand* function = UseFixed(instr->function(), x1);
1019 LCallFunction* call = new(zone()) LCallFunction(context, function);
1020 return MarkAsCall(DefineFixed(call, x0), instr);
1021 }
1022
1023
1024 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1025 LOperand* context = UseFixed(instr->context(), cp);
1026 // The call to CallConstructStub will expect the constructor to be in x1.
1027 LOperand* constructor = UseFixed(instr->constructor(), x1);
1028 LCallNew* result = new(zone()) LCallNew(context, constructor);
1029 return MarkAsCall(DefineFixed(result, x0), instr);
1030 }
1031
1032
1033 LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
1034 LOperand* context = UseFixed(instr->context(), cp);
1035 // The call to ArrayConstructCode will expect the constructor to be in x1.
1036 LOperand* constructor = UseFixed(instr->constructor(), x1);
1037 LCallNewArray* result = new(zone()) LCallNewArray(context, constructor);
1038 return MarkAsCall(DefineFixed(result, x0), instr);
1039 }
1040
1041
1042 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1043 LOperand* context = UseFixed(instr->context(), cp);
1044 return MarkAsCall(DefineFixed(new(zone()) LCallRuntime(context), x0), instr);
1045 }
1046
1047
1048 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
1049 LOperand* context = UseFixed(instr->context(), cp);
1050 return MarkAsCall(DefineFixed(new(zone()) LCallStub(context), x0), instr);
1051 }
1052
1053
1054 LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) {
1055 instr->ReplayEnvironment(current_block_->last_environment());
1056
1057 // There are no real uses of a captured object.
1058 return NULL;
1059 }
1060
1061
1062 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1063 Representation from = instr->from();
1064 Representation to = instr->to();
1065
1066 if (from.IsSmi()) {
1067 if (to.IsTagged()) {
1068 LOperand* value = UseRegister(instr->value());
1069 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1070 }
1071 from = Representation::Tagged();
1072 }
1073
1074 if (from.IsTagged()) {
1075 if (to.IsDouble()) {
1076 LOperand* value = UseRegister(instr->value());
1077 LOperand* temp = TempRegister();
1078 LNumberUntagD* res = new(zone()) LNumberUntagD(value, temp);
1079 return AssignEnvironment(DefineAsRegister(res));
1080 } else if (to.IsSmi()) {
1081 LOperand* value = UseRegister(instr->value());
1082 if (instr->value()->type().IsSmi()) {
1083 return DefineSameAsFirst(new(zone()) LDummyUse(value));
1084 }
1085 return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value)));
1086 } else {
1087 ASSERT(to.IsInteger32());
1088 LInstruction* res = NULL;
1089
1090 if (instr->value()->type().IsSmi() ||
1091 instr->value()->representation().IsSmi()) {
1092 LOperand* value = UseRegisterAtStart(instr->value());
1093 res = DefineAsRegister(new(zone()) LSmiUntag(value, false));
1094 } else {
1095 LOperand* value = UseRegister(instr->value());
1096 LOperand* temp1 = TempRegister();
1097 LOperand* temp2 =
1098 instr->CanTruncateToInt32() ? TempRegister() : FixedTemp(d24);
1099 res = DefineAsRegister(new(zone()) LTaggedToI(value, temp1, temp2));
1100 res = AssignEnvironment(res);
1101 }
1102
1103 return res;
1104 }
1105 } else if (from.IsDouble()) {
1106 if (to.IsTagged()) {
1107 info()->MarkAsDeferredCalling();
1108 LOperand* value = UseRegister(instr->value());
1109 LOperand* temp1 = TempRegister();
1110 LOperand* temp2 = TempRegister();
1111
1112 LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2);
1113 return AssignPointerMap(DefineAsRegister(result));
1114 } else {
1115 ASSERT(to.IsSmi() || to.IsInteger32());
1116 LOperand* value = UseRegister(instr->value());
1117
1118 if (instr->CanTruncateToInt32()) {
1119 LTruncateDoubleToIntOrSmi* result =
1120 new(zone()) LTruncateDoubleToIntOrSmi(value);
1121 return DefineAsRegister(result);
1122 } else {
1123 LDoubleToIntOrSmi* result = new(zone()) LDoubleToIntOrSmi(value);
1124 return AssignEnvironment(DefineAsRegister(result));
1125 }
1126 }
1127 } else if (from.IsInteger32()) {
1128 info()->MarkAsDeferredCalling();
1129 if (to.IsTagged()) {
1130 if (instr->value()->CheckFlag(HInstruction::kUint32)) {
1131 LOperand* value = UseRegister(instr->value());
1132 LNumberTagU* result = new(zone()) LNumberTagU(value,
1133 TempRegister(),
1134 TempRegister());
1135 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1136 } else {
1137 STATIC_ASSERT((kMinInt == Smi::kMinValue) &&
1138 (kMaxInt == Smi::kMaxValue));
1139 LOperand* value = UseRegisterAtStart(instr->value());
1140 return DefineAsRegister(new(zone()) LSmiTag(value));
1141 }
1142 } else if (to.IsSmi()) {
1143 LOperand* value = UseRegisterAtStart(instr->value());
1144 if (instr->value()->CheckFlag(HInstruction::kUint32)) {
1145 LUint32ToSmi* result = new(zone()) LUint32ToSmi(value);
1146 return AssignEnvironment(DefineAsRegister(result));
1147 } else {
1148 // This cannot deoptimize because an A64 smi can represent any int32.
1149 return DefineAsRegister(new(zone()) LInteger32ToSmi(value));
1150 }
1151 } else {
1152 ASSERT(to.IsDouble());
1153 if (instr->value()->CheckFlag(HInstruction::kUint32)) {
1154 return DefineAsRegister(
1155 new(zone()) LUint32ToDouble(UseRegisterAtStart(instr->value())));
1156 } else {
1157 return DefineAsRegister(
1158 new(zone()) LInteger32ToDouble(UseRegisterAtStart(instr->value())));
1159 }
1160 }
1161 }
1162
1163 UNREACHABLE();
1164 return NULL;
1165 }
1166
1167
1168 LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) {
1169 // We only need a temp register if the target is in new space, but we can't
1170 // dereference the handle to test that here.
1171 // TODO(all): Check these constraints. The temp register is not always used.
1172 LOperand* value = UseRegister(instr->value());
1173 LOperand* temp = TempRegister();
1174 return AssignEnvironment(new(zone()) LCheckValue(value, temp));
1175 }
1176
1177
1178 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1179 LOperand* value = UseRegisterAtStart(instr->value());
1180 LOperand* temp = TempRegister();
1181 LInstruction* result = new(zone()) LCheckInstanceType(value, temp);
1182 return AssignEnvironment(result);
1183 }
1184
1185
1186 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1187 if (instr->CanOmitMapChecks()) {
1188 // LCheckMaps does nothing in this case.
1189 return new(zone()) LCheckMaps(NULL);
1190 } else {
1191 LOperand* value = UseRegisterAtStart(instr->value());
1192 LOperand* temp = TempRegister();
1193
1194 if (instr->has_migration_target()) {
1195 info()->MarkAsDeferredCalling();
1196 LInstruction* result = new(zone()) LCheckMaps(value, temp);
1197 return AssignPointerMap(AssignEnvironment(result));
1198 } else {
1199 return AssignEnvironment(new(zone()) LCheckMaps(value, temp));
1200 }
1201 }
1202 }
1203
1204
1205 LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
1206 LOperand* value = UseRegisterAtStart(instr->value());
1207 return AssignEnvironment(new(zone()) LCheckNonSmi(value));
1208 }
1209
1210
1211 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1212 LOperand* value = UseRegisterAtStart(instr->value());
1213 return AssignEnvironment(new(zone()) LCheckSmi(value));
1214 }
1215
1216
1217 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1218 HValue* value = instr->value();
1219 Representation input_rep = value->representation();
1220 LOperand* reg = UseRegister(value);
1221 if (input_rep.IsDouble()) {
1222 return DefineAsRegister(new(zone()) LClampDToUint8(reg));
1223 } else if (input_rep.IsInteger32()) {
1224 return DefineAsRegister(new(zone()) LClampIToUint8(reg));
1225 } else {
1226 ASSERT(input_rep.IsSmiOrTagged());
1227 return AssignEnvironment(
1228 DefineAsRegister(new(zone()) LClampTToUint8(reg,
1229 TempRegister(),
1230 FixedTemp(d24))));
1231 }
1232 }
1233
1234
1235 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1236 HClassOfTestAndBranch* instr) {
1237 ASSERT(instr->value()->representation().IsTagged());
1238 LOperand* value = UseRegisterAtStart(instr->value());
1239 return new(zone()) LClassOfTestAndBranch(value,
1240 TempRegister(),
1241 TempRegister());
1242 }
1243
1244
1245 LInstruction* LChunkBuilder::DoCompareNumericAndBranch(
1246 HCompareNumericAndBranch* instr) {
1247 Representation r = instr->representation();
1248
1249 if (r.IsSmiOrInteger32()) {
1250 ASSERT(instr->left()->representation().Equals(r));
1251 ASSERT(instr->right()->representation().Equals(r));
1252 LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1253 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1254 return new(zone()) LCompareNumericAndBranch(left, right);
1255 } else {
1256 ASSERT(r.IsDouble());
1257 ASSERT(instr->left()->representation().IsDouble());
1258 ASSERT(instr->right()->representation().IsDouble());
1259 // TODO(all): In fact the only case that we can handle more efficiently is
1260 // when one of the operand is the constant 0. Currently the MacroAssembler
1261 // will be able to cope with any constant by loading it into an internal
1262 // scratch register. This means that if the constant is used more that once,
1263 // it will be loaded multiple times. Unfortunatly crankshaft already
1264 // duplicates constant loads, but we should modify the code below once this
1265 // issue has been addressed in crankshaft.
1266 LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1267 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1268 return new(zone()) LCompareNumericAndBranch(left, right);
1269 }
1270 }
1271
1272
1273 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1274 ASSERT(instr->left()->representation().IsTagged());
1275 ASSERT(instr->right()->representation().IsTagged());
1276 LOperand* context = UseFixed(instr->context(), cp);
1277 LOperand* left = UseFixed(instr->left(), x1);
1278 LOperand* right = UseFixed(instr->right(), x0);
1279 LCmpT* result = new(zone()) LCmpT(context, left, right);
1280 return MarkAsCall(DefineFixed(result, x0), instr);
1281 }
1282
1283
1284 LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
1285 HCompareHoleAndBranch* instr) {
1286 LOperand* value = UseRegister(instr->value());
1287 if (instr->representation().IsTagged()) {
1288 return new(zone()) LCmpHoleAndBranchT(value);
1289 } else {
1290 LOperand* temp = TempRegister();
1291 return new(zone()) LCmpHoleAndBranchD(value, temp);
1292 }
1293 }
1294
1295
1296 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1297 HCompareObjectEqAndBranch* instr) {
1298 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1299 if (goto_instr != NULL) return goto_instr;
1300
1301 LOperand* left = UseRegisterAtStart(instr->left());
1302 LOperand* right = UseRegisterAtStart(instr->right());
1303 return new(zone()) LCmpObjectEqAndBranch(left, right);
1304 }
1305
1306
1307 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
1308 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1309 if (goto_instr != NULL) return goto_instr;
1310
1311 ASSERT(instr->value()->representation().IsTagged());
1312 LOperand* value = UseRegisterAtStart(instr->value());
1313 LOperand* temp = TempRegister();
1314 return new(zone()) LCmpMapAndBranch(value, temp);
1315 }
1316
1317
1318 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
1319 Representation r = instr->representation();
1320 if (r.IsSmi()) {
1321 return DefineAsRegister(new(zone()) LConstantS);
1322 } else if (r.IsInteger32()) {
1323 return DefineAsRegister(new(zone()) LConstantI);
1324 } else if (r.IsDouble()) {
1325 return DefineAsRegister(new(zone()) LConstantD);
1326 } else if (r.IsExternal()) {
1327 return DefineAsRegister(new(zone()) LConstantE);
1328 } else if (r.IsTagged()) {
1329 return DefineAsRegister(new(zone()) LConstantT);
1330 } else {
1331 UNREACHABLE();
1332 return NULL;
1333 }
1334 }
1335
1336
1337 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1338 if (instr->HasNoUses()) return NULL;
1339
1340 if (info()->IsStub()) {
1341 return DefineFixed(new(zone()) LContext, cp);
1342 }
1343
1344 return DefineAsRegister(new(zone()) LContext);
1345 }
1346
1347
1348 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1349 LOperand* object = UseFixed(instr->value(), x0);
1350 LDateField* result = new(zone()) LDateField(object, instr->index());
1351 return MarkAsCall(DefineFixed(result, x0), instr, CAN_DEOPTIMIZE_EAGERLY);
1352 }
1353
1354
1355 LInstruction* LChunkBuilder::DoDebugBreak(HDebugBreak* instr) {
1356 return new(zone()) LDebugBreak();
1357 }
1358
1359
1360 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1361 LOperand* context = UseFixed(instr->context(), cp);
1362 return MarkAsCall(new(zone()) LDeclareGlobals(context), instr);
1363 }
1364
1365
1366 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
1367 return AssignEnvironment(new(zone()) LDeoptimize);
1368 }
1369
1370
1371 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1372 if (instr->representation().IsInteger32()) {
1373 // TODO(all): Update this case to support smi inputs.
1374 ASSERT(instr->left()->representation().Equals(instr->representation()));
1375 ASSERT(instr->right()->representation().Equals(instr->representation()));
1376 if (instr->RightIsPowerOf2()) {
1377 ASSERT(!instr->CheckFlag(HValue::kCanBeDivByZero));
1378 LOperand* value = UseRegister(instr->left());
1379 LDivI* div = new(zone()) LDivI(value, UseConstant(instr->right()), NULL);
1380 return AssignEnvironment(DefineAsRegister(div));
1381 }
1382 LOperand* dividend = UseRegister(instr->left());
1383 LOperand* divisor = UseRegister(instr->right());
1384 LOperand* temp = instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)
1385 ? NULL : TempRegister();
1386 LDivI* div = new(zone()) LDivI(dividend, divisor, temp);
1387 return AssignEnvironment(DefineAsRegister(div));
1388 } else if (instr->representation().IsDouble()) {
1389 return DoArithmeticD(Token::DIV, instr);
1390 } else {
1391 return DoArithmeticT(Token::DIV, instr);
1392 }
1393 }
1394
1395
1396 LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) {
1397 return DefineAsRegister(new(zone()) LDummyUse(UseAny(instr->value())));
1398 }
1399
1400
1401 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
1402 HEnvironment* outer = current_block_->last_environment();
1403 HConstant* undefined = graph()->GetConstantUndefined();
1404 HEnvironment* inner = outer->CopyForInlining(instr->closure(),
1405 instr->arguments_count(),
1406 instr->function(),
1407 undefined,
1408 instr->inlining_kind());
1409 // Only replay binding of arguments object if it wasn't removed from graph.
1410 if ((instr->arguments_var() != NULL) &&
1411 instr->arguments_object()->IsLinked()) {
1412 inner->Bind(instr->arguments_var(), instr->arguments_object());
1413 }
1414 inner->set_entry(instr);
1415 current_block_->UpdateEnvironment(inner);
1416 chunk_->AddInlinedClosure(instr->closure());
1417 return NULL;
1418 }
1419
1420
1421 LInstruction* LChunkBuilder::DoEnvironmentMarker(HEnvironmentMarker* instr) {
1422 UNREACHABLE();
1423 return NULL;
1424 }
1425
1426
1427 LInstruction* LChunkBuilder::DoForceRepresentation(
1428 HForceRepresentation* instr) {
1429 // All HForceRepresentation instructions should be eliminated in the
1430 // representation change phase of Hydrogen.
1431 UNREACHABLE();
1432 return NULL;
1433 }
1434
1435
1436 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
1437 LOperand* context = UseFixed(instr->context(), cp);
1438 return MarkAsCall(
1439 DefineFixed(new(zone()) LFunctionLiteral(context), x0), instr);
1440 }
1441
1442
1443 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1444 HGetCachedArrayIndex* instr) {
1445 ASSERT(instr->value()->representation().IsTagged());
1446 LOperand* value = UseRegisterAtStart(instr->value());
1447 return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1448 }
1449
1450
1451 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
1452 return new(zone()) LGoto(instr->FirstSuccessor());
1453 }
1454
1455
1456 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1457 HHasCachedArrayIndexAndBranch* instr) {
1458 ASSERT(instr->value()->representation().IsTagged());
1459 return new(zone()) LHasCachedArrayIndexAndBranch(
1460 UseRegisterAtStart(instr->value()), TempRegister());
1461 }
1462
1463
1464 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1465 HHasInstanceTypeAndBranch* instr) {
1466 ASSERT(instr->value()->representation().IsTagged());
1467 LOperand* value = UseRegisterAtStart(instr->value());
1468 return new(zone()) LHasInstanceTypeAndBranch(value, TempRegister());
1469 }
1470
1471
1472 LInstruction* LChunkBuilder::DoInnerAllocatedObject(
1473 HInnerAllocatedObject* instr) {
1474 LOperand* base_object = UseRegisterAtStart(instr->base_object());
1475 LOperand* offset = UseRegisterOrConstantAtStart(instr->offset());
1476 return DefineAsRegister(
1477 new(zone()) LInnerAllocatedObject(base_object, offset));
1478 }
1479
1480
1481 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
1482 LOperand* context = UseFixed(instr->context(), cp);
1483 LInstanceOf* result = new(zone()) LInstanceOf(
1484 context,
1485 UseFixed(instr->left(), InstanceofStub::left()),
1486 UseFixed(instr->right(), InstanceofStub::right()));
1487 return MarkAsCall(DefineFixed(result, x0), instr);
1488 }
1489
1490
1491 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
1492 HInstanceOfKnownGlobal* instr) {
1493 LInstanceOfKnownGlobal* result = new(zone()) LInstanceOfKnownGlobal(
1494 UseFixed(instr->context(), cp),
1495 UseFixed(instr->left(), InstanceofStub::left()));
1496 return MarkAsCall(DefineFixed(result, x0), instr);
1497 }
1498
1499
1500 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1501 LOperand* context = UseFixed(instr->context(), cp);
1502 // The function is required (by MacroAssembler::InvokeFunction) to be in x1.
1503 LOperand* function = UseFixed(instr->function(), x1);
1504 LInvokeFunction* result = new(zone()) LInvokeFunction(context, function);
1505 return MarkAsCall(DefineFixed(result, x0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1506 }
1507
1508
1509 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
1510 HIsConstructCallAndBranch* instr) {
1511 return new(zone()) LIsConstructCallAndBranch(TempRegister(), TempRegister());
1512 }
1513
1514
1515 LInstruction* LChunkBuilder::DoCompareMinusZeroAndBranch(
1516 HCompareMinusZeroAndBranch* instr) {
1517 LInstruction* goto_instr = CheckElideControlInstruction(instr);
1518 if (goto_instr != NULL) return goto_instr;
1519 LOperand* value = UseRegister(instr->value());
1520 LOperand* scratch = TempRegister();
1521 return new(zone()) LCompareMinusZeroAndBranch(value, scratch);
1522 }
1523
1524
1525 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1526 ASSERT(instr->value()->representation().IsTagged());
1527 LOperand* value = UseRegisterAtStart(instr->value());
1528 LOperand* temp1 = TempRegister();
1529 LOperand* temp2 = TempRegister();
1530 return new(zone()) LIsObjectAndBranch(value, temp1, temp2);
1531 }
1532
1533
1534 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1535 ASSERT(instr->value()->representation().IsTagged());
1536 LOperand* value = UseRegisterAtStart(instr->value());
1537 LOperand* temp = TempRegister();
1538 return new(zone()) LIsStringAndBranch(value, temp);
1539 }
1540
1541
1542 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1543 ASSERT(instr->value()->representation().IsTagged());
1544 return new(zone()) LIsSmiAndBranch(UseRegisterAtStart(instr->value()));
1545 }
1546
1547
1548 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1549 HIsUndetectableAndBranch* instr) {
1550 ASSERT(instr->value()->representation().IsTagged());
1551 LOperand* value = UseRegisterAtStart(instr->value());
1552 return new(zone()) LIsUndetectableAndBranch(value, TempRegister());
1553 }
1554
1555
1556 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
1557 LInstruction* pop = NULL;
1558 HEnvironment* env = current_block_->last_environment();
1559
1560 if (env->entry()->arguments_pushed()) {
1561 int argument_count = env->arguments_environment()->parameter_count();
1562 pop = new(zone()) LDrop(argument_count);
1563 ASSERT(instr->argument_delta() == -argument_count);
1564 }
1565
1566 HEnvironment* outer =
1567 current_block_->last_environment()->DiscardInlined(false);
1568 current_block_->UpdateEnvironment(outer);
1569
1570 return pop;
1571 }
1572
1573
1574 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
1575 LOperand* context = UseRegisterAtStart(instr->value());
1576 LInstruction* result =
1577 DefineAsRegister(new(zone()) LLoadContextSlot(context));
1578 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
1579 }
1580
1581
1582 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
1583 HLoadFunctionPrototype* instr) {
1584 LOperand* function = UseRegister(instr->function());
1585 LOperand* temp = TempRegister();
1586 return AssignEnvironment(DefineAsRegister(
1587 new(zone()) LLoadFunctionPrototype(function, temp)));
1588 }
1589
1590
1591 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
1592 LLoadGlobalCell* result = new(zone()) LLoadGlobalCell();
1593 return instr->RequiresHoleCheck()
1594 ? AssignEnvironment(DefineAsRegister(result))
1595 : DefineAsRegister(result);
1596 }
1597
1598
1599 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
1600 LOperand* context = UseFixed(instr->context(), cp);
1601 LOperand* global_object = UseFixed(instr->global_object(), x0);
1602 LLoadGlobalGeneric* result =
1603 new(zone()) LLoadGlobalGeneric(context, global_object);
1604 return MarkAsCall(DefineFixed(result, x0), instr);
1605 }
1606
1607
1608 LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
1609 ASSERT(instr->key()->representation().IsSmiOrInteger32());
1610 ElementsKind elements_kind = instr->elements_kind();
1611 LOperand* elements = UseRegister(instr->elements());
1612 LOperand* key = UseRegisterOrConstantAtStart(instr->key());
1613
1614 if (!instr->is_typed_elements()) {
1615 if (instr->representation().IsDouble()) {
1616 LOperand* temp = (!instr->key()->IsConstant() ||
1617 instr->RequiresHoleCheck())
1618 ? TempRegister()
1619 : NULL;
1620
1621 LLoadKeyedFixedDouble* result =
1622 new(zone()) LLoadKeyedFixedDouble(elements, key, temp);
1623 return instr->RequiresHoleCheck()
1624 ? AssignEnvironment(DefineAsRegister(result))
1625 : DefineAsRegister(result);
1626 } else {
1627 ASSERT(instr->representation().IsSmiOrTagged() ||
1628 instr->representation().IsInteger32());
1629 LOperand* temp = instr->key()->IsConstant() ? NULL : TempRegister();
1630 LLoadKeyedFixed* result =
1631 new(zone()) LLoadKeyedFixed(elements, key, temp);
1632 return instr->RequiresHoleCheck()
1633 ? AssignEnvironment(DefineAsRegister(result))
1634 : DefineAsRegister(result);
1635 }
1636 } else {
1637 ASSERT((instr->representation().IsInteger32() &&
1638 !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
1639 (instr->representation().IsDouble() &&
1640 IsDoubleOrFloatElementsKind(instr->elements_kind())));
1641
1642 LOperand* temp = instr->key()->IsConstant() ? NULL : TempRegister();
1643 LLoadKeyedExternal* result =
1644 new(zone()) LLoadKeyedExternal(elements, key, temp);
1645 // An unsigned int array load might overflow and cause a deopt. Make sure it
1646 // has an environment.
1647 if (instr->RequiresHoleCheck() ||
1648 elements_kind == EXTERNAL_UINT32_ELEMENTS ||
1649 elements_kind == UINT32_ELEMENTS) {
1650 return AssignEnvironment(DefineAsRegister(result));
1651 } else {
1652 return DefineAsRegister(result);
1653 }
1654 }
1655 }
1656
1657
1658 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
1659 LOperand* context = UseFixed(instr->context(), cp);
1660 LOperand* object = UseFixed(instr->object(), x1);
1661 LOperand* key = UseFixed(instr->key(), x0);
1662
1663 LInstruction* result =
1664 DefineFixed(new(zone()) LLoadKeyedGeneric(context, object, key), x0);
1665 return MarkAsCall(result, instr);
1666 }
1667
1668
1669 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
1670 LOperand* object = UseRegisterAtStart(instr->object());
1671 return DefineAsRegister(new(zone()) LLoadNamedField(object));
1672 }
1673
1674
1675 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
1676 LOperand* context = UseFixed(instr->context(), cp);
1677 LOperand* object = UseFixed(instr->object(), x0);
1678 LInstruction* result =
1679 DefineFixed(new(zone()) LLoadNamedGeneric(context, object), x0);
1680 return MarkAsCall(result, instr);
1681 }
1682
1683
1684 LInstruction* LChunkBuilder::DoLoadRoot(HLoadRoot* instr) {
1685 return DefineAsRegister(new(zone()) LLoadRoot);
1686 }
1687
1688
1689 LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
1690 LOperand* map = UseRegisterAtStart(instr->value());
1691 return DefineAsRegister(new(zone()) LMapEnumLength(map));
1692 }
1693
1694
1695 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1696 HValue* right = instr->right();
1697 LOperand* dividend = UseRegister(instr->left());
1698 LOperand* divisor = UseRegister(right);
1699 LOperand* remainder = TempRegister();
1700 return AssignEnvironment(DefineAsRegister(
1701 new(zone()) LMathFloorOfDiv(dividend, divisor, remainder)));
1702 }
1703
1704
1705 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
1706 LOperand* left = NULL;
1707 LOperand* right = NULL;
1708 if (instr->representation().IsSmiOrInteger32()) {
1709 ASSERT(instr->left()->representation().Equals(instr->representation()));
1710 ASSERT(instr->right()->representation().Equals(instr->representation()));
1711 left = UseRegisterAtStart(instr->BetterLeftOperand());
1712 right = UseRegisterOrConstantAtStart(instr->BetterRightOperand());
1713 } else {
1714 ASSERT(instr->representation().IsDouble());
1715 ASSERT(instr->left()->representation().IsDouble());
1716 ASSERT(instr->right()->representation().IsDouble());
1717 left = UseRegisterAtStart(instr->left());
1718 right = UseRegisterAtStart(instr->right());
1719 }
1720 return DefineAsRegister(new(zone()) LMathMinMax(left, right));
1721 }
1722
1723
1724 LInstruction* LChunkBuilder::DoMod(HMod* hmod) {
1725 HValue* hleft = hmod->left();
1726 HValue* hright = hmod->right();
1727
1728 // TODO(jbramley): Add smi support.
1729 if (hmod->representation().IsInteger32()) {
1730 ASSERT(hleft->representation().IsInteger32());
1731 ASSERT(hleft->representation().IsInteger32());
1732 LOperand* left_op;
1733 LOperand* right_op;
1734
1735 if (hmod->RightIsPowerOf2()) {
1736 left_op = UseRegisterAtStart(hleft);
1737 right_op = UseConstant(hright);
1738 } else {
1739 right_op = UseRegister(hright);
1740 left_op = UseRegister(hleft);
1741 }
1742
1743 LModI* lmod = new(zone()) LModI(left_op, right_op);
1744
1745 if (hmod->right()->CanBeZero() ||
1746 (hmod->CheckFlag(HValue::kBailoutOnMinusZero) &&
1747 hmod->left()->CanBeNegative() && hmod->CanBeZero())) {
1748 AssignEnvironment(lmod);
1749 }
1750 return DefineAsRegister(lmod);
1751
1752 } else if (hmod->representation().IsSmiOrTagged()) {
1753 return DoArithmeticT(Token::MOD, hmod);
1754 } else {
1755 return DoArithmeticD(Token::MOD, hmod);
1756 }
1757 }
1758
1759
1760 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1761 if (instr->representation().IsSmiOrInteger32()) {
1762 ASSERT(instr->left()->representation().Equals(instr->representation()));
1763 ASSERT(instr->right()->representation().Equals(instr->representation()));
1764
1765 bool can_overflow = instr->CheckFlag(HValue::kCanOverflow);
1766 bool bailout_on_minus_zero = instr->CheckFlag(HValue::kBailoutOnMinusZero);
1767 bool needs_environment = can_overflow || bailout_on_minus_zero;
1768
1769 HValue* least_const = instr->BetterLeftOperand();
1770 HValue* most_const = instr->BetterRightOperand();
1771
1772 LOperand* left = UseRegisterAtStart(least_const);
1773
1774 // LMulConstI can handle a subset of constants:
1775 // With support for overflow detection:
1776 // -1, 0, 1, 2
1777 // Without support for overflow detection:
1778 // 2^n, -(2^n)
1779 // 2^n + 1, -(2^n - 1)
1780 if (most_const->IsConstant()) {
1781 int32_t constant = HConstant::cast(most_const)->Integer32Value();
1782 int32_t constant_abs = (constant >= 0) ? constant : -constant;
1783
1784 if (((constant >= -1) && (constant <= 2)) ||
1785 (!can_overflow && (IsPowerOf2(constant_abs) ||
1786 IsPowerOf2(constant_abs + 1) ||
1787 IsPowerOf2(constant_abs - 1)))) {
1788 LConstantOperand* right = UseConstant(most_const);
1789 LMulConstIS* mul = new(zone()) LMulConstIS(left, right);
1790 if (needs_environment) AssignEnvironment(mul);
1791 return DefineAsRegister(mul);
1792 }
1793 }
1794
1795 // LMulI/S can handle all cases, but it requires that a register is
1796 // allocated for the second operand.
1797 LInstruction* result;
1798 if (instr->representation().IsSmi()) {
1799 // TODO(jbramley/rmcilroy): Fix LMulS so we can UseRegisterAtStart here.
1800 LOperand* right = UseRegister(most_const);
1801 result = DefineAsRegister(new(zone()) LMulS(left, right));
1802 } else {
1803 LOperand* right = UseRegisterAtStart(most_const);
1804 result = DefineAsRegister(new(zone()) LMulI(left, right));
1805 }
1806 if (needs_environment) AssignEnvironment(result);
1807 return result;
1808 } else if (instr->representation().IsDouble()) {
1809 return DoArithmeticD(Token::MUL, instr);
1810 } else {
1811 return DoArithmeticT(Token::MUL, instr);
1812 }
1813 }
1814
1815
1816 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
1817 ASSERT(argument_count_ == 0);
1818 allocator_->MarkAsOsrEntry();
1819 current_block_->last_environment()->set_ast_id(instr->ast_id());
1820 return AssignEnvironment(new(zone()) LOsrEntry);
1821 }
1822
1823
1824 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
1825 LParameter* result = new(zone()) LParameter;
1826 if (instr->kind() == HParameter::STACK_PARAMETER) {
1827 int spill_index = chunk_->GetParameterStackSlot(instr->index());
1828 return DefineAsSpilled(result, spill_index);
1829 } else {
1830 ASSERT(info()->IsStub());
1831 CodeStubInterfaceDescriptor* descriptor =
1832 info()->code_stub()->GetInterfaceDescriptor(info()->isolate());
1833 int index = static_cast<int>(instr->index());
1834 Register reg = descriptor->GetParameterRegister(index);
1835 return DefineFixed(result, reg);
1836 }
1837 }
1838
1839
1840 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1841 ASSERT(instr->representation().IsDouble());
1842 // We call a C function for double power. It can't trigger a GC.
1843 // We need to use fixed result register for the call.
1844 Representation exponent_type = instr->right()->representation();
1845 ASSERT(instr->left()->representation().IsDouble());
1846 LOperand* left = UseFixedDouble(instr->left(), d0);
1847 LOperand* right = exponent_type.IsInteger32()
1848 ? UseFixed(instr->right(), x12)
1849 : exponent_type.IsDouble()
1850 ? UseFixedDouble(instr->right(), d1)
1851 : UseFixed(instr->right(), x11);
1852 LPower* result = new(zone()) LPower(left, right);
1853 return MarkAsCall(DefineFixedDouble(result, d0),
1854 instr,
1855 CAN_DEOPTIMIZE_EAGERLY);
1856 }
1857
1858
1859 LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
1860 LOperand* argument = UseRegister(instr->argument());
1861 return new(zone()) LPushArgument(argument);
1862 }
1863
1864
1865 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
1866 LOperand* context = UseFixed(instr->context(), cp);
1867 return MarkAsCall(
1868 DefineFixed(new(zone()) LRegExpLiteral(context), x0), instr);
1869 }
1870
1871
1872 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
1873 LOperand* context = info()->IsStub()
1874 ? UseFixed(instr->context(), cp)
1875 : NULL;
1876 LOperand* parameter_count = UseRegisterOrConstant(instr->parameter_count());
1877 return new(zone()) LReturn(UseFixed(instr->value(), x0), context,
1878 parameter_count);
1879 }
1880
1881
1882 LInstruction* LChunkBuilder::DoSeqStringGetChar(HSeqStringGetChar* instr) {
1883 // TODO(all): Use UseRegisterAtStart and UseRegisterOrConstantAtStart here.
1884 // We cannot do it now because the debug code in the implementation changes
1885 // temp.
1886 LOperand* string = UseRegister(instr->string());
1887 LOperand* index = UseRegisterOrConstant(instr->index());
1888 LOperand* temp = TempRegister();
1889 LSeqStringGetChar* result =
1890 new(zone()) LSeqStringGetChar(string, index, temp);
1891 return DefineAsRegister(result);
1892 }
1893
1894
1895 LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
1896 LOperand* string = UseRegister(instr->string());
1897 LOperand* index = FLAG_debug_code
1898 ? UseRegister(instr->index())
1899 : UseRegisterOrConstant(instr->index());
1900 LOperand* value = UseRegister(instr->value());
1901 LOperand* context = FLAG_debug_code ? UseFixed(instr->context(), cp) : NULL;
1902 LOperand* temp = TempRegister();
1903 LSeqStringSetChar* result =
1904 new(zone()) LSeqStringSetChar(context, string, index, value, temp);
1905 return DefineAsRegister(result);
1906 }
1907
1908
1909 LInstruction* LChunkBuilder::DoShift(Token::Value op,
1910 HBitwiseBinaryOperation* instr) {
1911 if (instr->representation().IsTagged()) {
1912 return DoArithmeticT(op, instr);
1913 }
1914
1915 ASSERT(instr->representation().IsInteger32() ||
1916 instr->representation().IsSmi());
1917 ASSERT(instr->left()->representation().Equals(instr->representation()));
1918 ASSERT(instr->right()->representation().Equals(instr->representation()));
1919
1920 LOperand* left = instr->representation().IsSmi()
1921 ? UseRegister(instr->left())
1922 : UseRegisterAtStart(instr->left());
1923
1924 HValue* right_value = instr->right();
1925 LOperand* right = NULL;
1926 LOperand* temp = NULL;
1927 int constant_value = 0;
1928 if (right_value->IsConstant()) {
1929 right = UseConstant(right_value);
1930 HConstant* constant = HConstant::cast(right_value);
1931 constant_value = constant->Integer32Value() & 0x1f;
1932 } else {
1933 right = UseRegisterAtStart(right_value);
1934 if (op == Token::ROR) {
1935 temp = TempRegister();
1936 }
1937 }
1938
1939 // Shift operations can only deoptimize if we do a logical shift by 0 and the
1940 // result cannot be truncated to int32.
1941 bool does_deopt = false;
1942 if ((op == Token::SHR) && (constant_value == 0)) {
1943 if (FLAG_opt_safe_uint32_operations) {
1944 does_deopt = !instr->CheckFlag(HInstruction::kUint32);
1945 } else {
1946 does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToInt32);
1947 }
1948 }
1949
1950 LInstruction* result;
1951 if (instr->representation().IsInteger32()) {
1952 result = DefineAsRegister(new(zone()) LShiftI(op, left, right, does_deopt));
1953 } else {
1954 ASSERT(instr->representation().IsSmi());
1955 result = DefineAsRegister(
1956 new(zone()) LShiftS(op, left, right, temp, does_deopt));
1957 }
1958
1959 return does_deopt ? AssignEnvironment(result) : result;
1960 }
1961
1962
1963 LInstruction* LChunkBuilder::DoRor(HRor* instr) {
1964 return DoShift(Token::ROR, instr);
1965 }
1966
1967
1968 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1969 return DoShift(Token::SAR, instr);
1970 }
1971
1972
1973 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1974 return DoShift(Token::SHL, instr);
1975 }
1976
1977
1978 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1979 return DoShift(Token::SHR, instr);
1980 }
1981
1982
1983 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
1984 instr->ReplayEnvironment(current_block_->last_environment());
1985
1986 // If there is an instruction pending deoptimization environment create a
1987 // lazy bailout instruction to capture the environment.
1988 if (pending_deoptimization_ast_id_ == instr->ast_id()) {
1989 LInstruction* result = new(zone()) LLazyBailout;
1990 result = AssignEnvironment(result);
1991 // Store the lazy deopt environment with the instruction if needed. Right
1992 // now it is only used for LInstanceOfKnownGlobal.
1993 instruction_pending_deoptimization_environment_->
1994 SetDeferredLazyDeoptimizationEnvironment(result->environment());
1995 instruction_pending_deoptimization_environment_ = NULL;
1996 pending_deoptimization_ast_id_ = BailoutId::None();
1997 return result;
1998 }
1999
2000 return NULL;
2001 }
2002
2003
2004 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2005 if (instr->is_function_entry()) {
2006 LOperand* context = UseFixed(instr->context(), cp);
2007 return MarkAsCall(new(zone()) LStackCheck(context), instr);
2008 } else {
2009 ASSERT(instr->is_backwards_branch());
2010 LOperand* context = UseAny(instr->context());
2011 return AssignEnvironment(
2012 AssignPointerMap(new(zone()) LStackCheck(context)));
2013 }
2014 }
2015
2016
2017 LInstruction* LChunkBuilder::DoStoreCodeEntry(HStoreCodeEntry* instr) {
2018 LOperand* function = UseRegister(instr->function());
2019 LOperand* code_object = UseRegisterAtStart(instr->code_object());
2020 LOperand* temp = TempRegister();
2021 return new(zone()) LStoreCodeEntry(function, code_object, temp);
2022 }
2023
2024
2025 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
2026 LOperand* temp = TempRegister();
2027 LOperand* context;
2028 LOperand* value;
2029 if (instr->NeedsWriteBarrier()) {
2030 // TODO(all): Replace these constraints when RecordWriteStub has been
2031 // rewritten.
2032 context = UseRegisterAndClobber(instr->context());
2033 value = UseRegisterAndClobber(instr->value());
2034 } else {
2035 context = UseRegister(instr->context());
2036 value = UseRegister(instr->value());
2037 }
2038 LInstruction* result = new(zone()) LStoreContextSlot(context, value, temp);
2039 return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
2040 }
2041
2042
2043 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
2044 LOperand* value = UseRegister(instr->value());
2045 if (instr->RequiresHoleCheck()) {
2046 return AssignEnvironment(new(zone()) LStoreGlobalCell(value,
2047 TempRegister(),
2048 TempRegister()));
2049 } else {
2050 return new(zone()) LStoreGlobalCell(value, TempRegister(), NULL);
2051 }
2052 }
2053
2054
2055 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
2056 LOperand* temp = NULL;
2057 LOperand* elements = NULL;
2058 LOperand* val = NULL;
2059 LOperand* key = NULL;
2060
2061 if (!instr->is_typed_elements() &&
2062 instr->value()->representation().IsTagged() &&
2063 instr->NeedsWriteBarrier()) {
2064 // RecordWrite() will clobber all registers.
2065 elements = UseRegisterAndClobber(instr->elements());
2066 val = UseRegisterAndClobber(instr->value());
2067 key = UseRegisterAndClobber(instr->key());
2068 } else {
2069 elements = UseRegister(instr->elements());
2070 val = UseRegister(instr->value());
2071 key = UseRegisterOrConstantAtStart(instr->key());
2072 }
2073
2074 if (instr->is_typed_elements()) {
2075 ASSERT((instr->value()->representation().IsInteger32() &&
2076 !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
2077 (instr->value()->representation().IsDouble() &&
2078 IsDoubleOrFloatElementsKind(instr->elements_kind())));
2079 ASSERT((instr->is_fixed_typed_array() &&
2080 instr->elements()->representation().IsTagged()) ||
2081 (instr->is_external() &&
2082 instr->elements()->representation().IsExternal()));
2083 temp = instr->key()->IsConstant() ? NULL : TempRegister();
2084 return new(zone()) LStoreKeyedExternal(elements, key, val, temp);
2085
2086 } else if (instr->value()->representation().IsDouble()) {
2087 ASSERT(instr->elements()->representation().IsTagged());
2088
2089 // The constraint used here is UseRegister, even though the StoreKeyed
2090 // instruction may canonicalize the value in the register if it is a NaN.
2091 temp = TempRegister();
2092 return new(zone()) LStoreKeyedFixedDouble(elements, key, val, temp);
2093
2094 } else {
2095 ASSERT(instr->elements()->representation().IsTagged());
2096 ASSERT(instr->value()->representation().IsSmiOrTagged() ||
2097 instr->value()->representation().IsInteger32());
2098
2099 temp = TempRegister();
2100 return new(zone()) LStoreKeyedFixed(elements, key, val, temp);
2101 }
2102 }
2103
2104
2105 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2106 LOperand* context = UseFixed(instr->context(), cp);
2107 LOperand* object = UseFixed(instr->object(), x2);
2108 LOperand* key = UseFixed(instr->key(), x1);
2109 LOperand* value = UseFixed(instr->value(), x0);
2110
2111 ASSERT(instr->object()->representation().IsTagged());
2112 ASSERT(instr->key()->representation().IsTagged());
2113 ASSERT(instr->value()->representation().IsTagged());
2114
2115 return MarkAsCall(
2116 new(zone()) LStoreKeyedGeneric(context, object, key, value), instr);
2117 }
2118
2119
2120 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2121 // TODO(jbramley): Optimize register usage in this instruction. For now, it
2122 // allocates everything that it might need because it keeps changing in the
2123 // merge and keeping it valid is time-consuming.
2124
2125 // TODO(jbramley): It might be beneficial to allow value to be a constant in
2126 // some cases. x64 makes use of this with FLAG_track_fields, for example.
2127
2128 LOperand* object = UseRegister(instr->object());
2129 LOperand* value = UseRegisterAndClobber(instr->value());
2130 LOperand* temp0 = TempRegister();
2131 LOperand* temp1 = TempRegister();
2132
2133 LStoreNamedField* result =
2134 new(zone()) LStoreNamedField(object, value, temp0, temp1);
2135 if (FLAG_track_heap_object_fields &&
2136 instr->field_representation().IsHeapObject() &&
2137 !instr->value()->type().IsHeapObject()) {
2138 return AssignEnvironment(result);
2139 }
2140 return result;
2141 }
2142
2143
2144 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2145 LOperand* context = UseFixed(instr->context(), cp);
2146 LOperand* object = UseFixed(instr->object(), x1);
2147 LOperand* value = UseFixed(instr->value(), x0);
2148 LInstruction* result = new(zone()) LStoreNamedGeneric(context, object, value);
2149 return MarkAsCall(result, instr);
2150 }
2151
2152
2153 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2154 LOperand* context = UseFixed(instr->context(), cp);
2155 LOperand* left = UseFixed(instr->left(), x1);
2156 LOperand* right = UseFixed(instr->right(), x0);
2157
2158 LStringAdd* result = new(zone()) LStringAdd(context, left, right);
2159 return MarkAsCall(DefineFixed(result, x0), instr);
2160 }
2161
2162
2163 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2164 LOperand* string = UseRegisterAndClobber(instr->string());
2165 LOperand* index = UseRegisterAndClobber(instr->index());
2166 LOperand* context = UseAny(instr->context());
2167 LStringCharCodeAt* result =
2168 new(zone()) LStringCharCodeAt(context, string, index);
2169 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2170 }
2171
2172
2173 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2174 // TODO(all) use at start and remove assert in codegen
2175 LOperand* char_code = UseRegister(instr->value());
2176 LOperand* context = UseAny(instr->context());
2177 LStringCharFromCode* result =
2178 new(zone()) LStringCharFromCode(context, char_code);
2179 return AssignPointerMap(DefineAsRegister(result));
2180 }
2181
2182
2183 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
2184 HStringCompareAndBranch* instr) {
2185 ASSERT(instr->left()->representation().IsTagged());
2186 ASSERT(instr->right()->representation().IsTagged());
2187 LOperand* context = UseFixed(instr->context(), cp);
2188 LOperand* left = UseFixed(instr->left(), x1);
2189 LOperand* right = UseFixed(instr->right(), x0);
2190 LStringCompareAndBranch* result =
2191 new(zone()) LStringCompareAndBranch(context, left, right);
2192 return MarkAsCall(result, instr);
2193 }
2194
2195
2196 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
2197 if (instr->representation().IsSmiOrInteger32()) {
2198 ASSERT(instr->left()->representation().Equals(instr->representation()));
2199 ASSERT(instr->right()->representation().Equals(instr->representation()));
2200 LOperand *left;
2201 if (instr->left()->IsConstant() &&
2202 (HConstant::cast(instr->left())->Integer32Value() == 0)) {
2203 left = UseConstant(instr->left());
2204 } else {
2205 left = UseRegisterAtStart(instr->left());
2206 }
2207 LOperand* right = UseRegisterOrConstantAtStart(instr->right());
2208 LInstruction* result = instr->representation().IsSmi() ?
2209 DefineAsRegister(new(zone()) LSubS(left, right)) :
2210 DefineAsRegister(new(zone()) LSubI(left, right));
2211 if (instr->CheckFlag(HValue::kCanOverflow)) {
2212 result = AssignEnvironment(result);
2213 }
2214 return result;
2215 } else if (instr->representation().IsDouble()) {
2216 return DoArithmeticD(Token::SUB, instr);
2217 } else {
2218 return DoArithmeticT(Token::SUB, instr);
2219 }
2220 }
2221
2222
2223 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
2224 if (instr->HasNoUses()) {
2225 return NULL;
2226 } else {
2227 return DefineAsRegister(new(zone()) LThisFunction);
2228 }
2229 }
2230
2231
2232 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2233 LOperand* object = UseFixed(instr->value(), x0);
2234 LToFastProperties* result = new(zone()) LToFastProperties(object);
2235 return MarkAsCall(DefineFixed(result, x0), instr);
2236 }
2237
2238
2239 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2240 HTransitionElementsKind* instr) {
2241 LOperand* object = UseRegister(instr->object());
2242 if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
2243 LTransitionElementsKind* result =
2244 new(zone()) LTransitionElementsKind(object, NULL,
2245 TempRegister(), TempRegister());
2246 return result;
2247 } else {
2248 LOperand* context = UseFixed(instr->context(), cp);
2249 LTransitionElementsKind* result =
2250 new(zone()) LTransitionElementsKind(object, context, TempRegister());
2251 return AssignPointerMap(result);
2252 }
2253 }
2254
2255
2256 LInstruction* LChunkBuilder::DoTrapAllocationMemento(
2257 HTrapAllocationMemento* instr) {
2258 LOperand* object = UseRegister(instr->object());
2259 LOperand* temp1 = TempRegister();
2260 LOperand* temp2 = TempRegister();
2261 LTrapAllocationMemento* result =
2262 new(zone()) LTrapAllocationMemento(object, temp1, temp2);
2263 return AssignEnvironment(result);
2264 }
2265
2266
2267 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2268 LOperand* context = UseFixed(instr->context(), cp);
2269 // TODO(jbramley): In ARM, this uses UseFixed to force the input to x0.
2270 // However, LCodeGen::DoTypeof just pushes it to the stack (for CallRuntime)
2271 // anyway, so the input doesn't have to be in x0. We might be able to improve
2272 // the ARM back-end a little by relaxing this restriction.
2273 LTypeof* result =
2274 new(zone()) LTypeof(context, UseRegisterAtStart(instr->value()));
2275 return MarkAsCall(DefineFixed(result, x0), instr);
2276 }
2277
2278
2279 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2280 LInstruction* goto_instr = CheckElideControlInstruction(instr);
2281 if (goto_instr != NULL) return goto_instr;
2282
2283 // We only need temp registers in some cases, but we can't dereference the
2284 // instr->type_literal() handle to test that here.
2285 LOperand* temp1 = TempRegister();
2286 LOperand* temp2 = TempRegister();
2287
2288 return new(zone()) LTypeofIsAndBranch(
2289 UseRegister(instr->value()), temp1, temp2);
2290 }
2291
2292
2293 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
2294 switch (instr->op()) {
2295 case kMathAbs: {
2296 Representation r = instr->representation();
2297 if (r.IsTagged()) {
2298 // The tagged case might need to allocate a HeapNumber for the result,
2299 // so it is handled by a separate LInstruction.
2300 LOperand* context = UseFixed(instr->context(), cp);
2301 LOperand* input = UseRegister(instr->value());
2302 LOperand* temp1 = TempRegister();
2303 LOperand* temp2 = TempRegister();
2304 LOperand* temp3 = TempRegister();
2305 LMathAbsTagged* result =
2306 new(zone()) LMathAbsTagged(context, input, temp1, temp2, temp3);
2307 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2308 } else {
2309 LOperand* input = UseRegisterAtStart(instr->value());
2310 LMathAbs* result = new(zone()) LMathAbs(input);
2311 if (r.IsDouble()) {
2312 // The Double case can never fail so it doesn't need an environment.
2313 return DefineAsRegister(result);
2314 } else {
2315 ASSERT(r.IsInteger32() || r.IsSmi());
2316 // The Integer32 and Smi cases need an environment because they can
2317 // deoptimize on minimum representable number.
2318 return AssignEnvironment(DefineAsRegister(result));
2319 }
2320 }
2321 }
2322 case kMathExp: {
2323 ASSERT(instr->representation().IsDouble());
2324 ASSERT(instr->value()->representation().IsDouble());
2325 LOperand* input = UseRegister(instr->value());
2326 // TODO(all): Implement TempFPRegister.
2327 LOperand* double_temp1 = FixedTemp(d24); // This was chosen arbitrarily.
2328 LOperand* temp1 = TempRegister();
2329 LOperand* temp2 = TempRegister();
2330 LOperand* temp3 = TempRegister();
2331 LMathExp* result = new(zone()) LMathExp(input, double_temp1,
2332 temp1, temp2, temp3);
2333 return DefineAsRegister(result);
2334 }
2335 case kMathFloor: {
2336 ASSERT(instr->representation().IsInteger32());
2337 ASSERT(instr->value()->representation().IsDouble());
2338 // TODO(jbramley): A64 can easily handle a double argument with frintm,
2339 // but we're never asked for it here. At the moment, we fall back to the
2340 // runtime if the result doesn't fit, like the other architectures.
2341 LOperand* input = UseRegisterAtStart(instr->value());
2342 LMathFloor* result = new(zone()) LMathFloor(input);
2343 return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
2344 }
2345 case kMathLog: {
2346 ASSERT(instr->representation().IsDouble());
2347 ASSERT(instr->value()->representation().IsDouble());
2348 LOperand* input = UseFixedDouble(instr->value(), d0);
2349 LMathLog* result = new(zone()) LMathLog(input);
2350 return MarkAsCall(DefineFixedDouble(result, d0), instr);
2351 }
2352 case kMathPowHalf: {
2353 ASSERT(instr->representation().IsDouble());
2354 ASSERT(instr->value()->representation().IsDouble());
2355 LOperand* input = UseRegister(instr->value());
2356 return DefineAsRegister(new(zone()) LMathPowHalf(input));
2357 }
2358 case kMathRound: {
2359 ASSERT(instr->representation().IsInteger32());
2360 ASSERT(instr->value()->representation().IsDouble());
2361 // TODO(jbramley): As with kMathFloor, we can probably handle double
2362 // results fairly easily, but we are never asked for them.
2363 LOperand* input = UseRegister(instr->value());
2364 LOperand* temp = FixedTemp(d24); // Choosen arbitrarily.
2365 LMathRound* result = new(zone()) LMathRound(input, temp);
2366 return AssignEnvironment(DefineAsRegister(result));
2367 }
2368 case kMathSqrt: {
2369 ASSERT(instr->representation().IsDouble());
2370 ASSERT(instr->value()->representation().IsDouble());
2371 LOperand* input = UseRegisterAtStart(instr->value());
2372 return DefineAsRegister(new(zone()) LMathSqrt(input));
2373 }
2374 case kMathClz32: {
2375 ASSERT(instr->representation().IsInteger32());
2376 ASSERT(instr->value()->representation().IsInteger32());
2377 LOperand* input = UseRegisterAtStart(instr->value());
2378 return DefineAsRegister(new(zone()) LMathClz32(input));
2379 }
2380 default:
2381 UNREACHABLE();
2382 return NULL;
2383 }
2384 }
2385
2386
2387 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2388 // Use an index that corresponds to the location in the unoptimized frame,
2389 // which the optimized frame will subsume.
2390 int env_index = instr->index();
2391 int spill_index = 0;
2392 if (instr->environment()->is_parameter_index(env_index)) {
2393 spill_index = chunk_->GetParameterStackSlot(env_index);
2394 } else {
2395 spill_index = env_index - instr->environment()->first_local_index();
2396 if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
2397 Abort(kTooManySpillSlotsNeededForOSR);
2398 spill_index = 0;
2399 }
2400 }
2401 return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2402 }
2403
2404
2405 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
2406 return NULL;
2407 }
2408
2409
2410 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2411 LOperand* context = UseFixed(instr->context(), cp);
2412 // Assign object to a fixed register different from those already used in
2413 // LForInPrepareMap.
2414 LOperand* object = UseFixed(instr->enumerable(), x0);
2415 LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
2416 return MarkAsCall(DefineFixed(result, x0), instr, CAN_DEOPTIMIZE_EAGERLY);
2417 }
2418
2419
2420 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2421 LOperand* map = UseRegister(instr->map());
2422 return AssignEnvironment(DefineAsRegister(new(zone()) LForInCacheArray(map)));
2423 }
2424
2425
2426 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2427 LOperand* value = UseRegisterAtStart(instr->value());
2428 LOperand* map = UseRegister(instr->map());
2429 LOperand* temp = TempRegister();
2430 return AssignEnvironment(new(zone()) LCheckMapValue(value, map, temp));
2431 }
2432
2433
2434 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2435 LOperand* object = UseRegisterAtStart(instr->object());
2436 LOperand* index = UseRegister(instr->index());
2437 return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index));
2438 }
2439
2440
2441 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
2442 LOperand* receiver = UseRegister(instr->receiver());
2443 LOperand* function = UseRegister(instr->function());
2444 LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
2445 return AssignEnvironment(DefineAsRegister(result));
2446 }
2447
2448
2449 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/a64/lithium-a64.h ('k') | src/a64/lithium-codegen-a64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698