Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(137)

Side by Side Diff: src/mips64/full-codegen-mips64.cc

Issue 430503007: Rename ASSERT* to DCHECK*. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: REBASE and fixes Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips64/disasm-mips64.cc ('k') | src/mips64/ic-mips64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_MIPS64 7 #if V8_TARGET_ARCH_MIPS64
8 8
9 // Note on Mips implementation: 9 // Note on Mips implementation:
10 // 10 //
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
43 // never be emitted by normal code. 43 // never be emitted by normal code.
44 class JumpPatchSite BASE_EMBEDDED { 44 class JumpPatchSite BASE_EMBEDDED {
45 public: 45 public:
46 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { 46 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
47 #ifdef DEBUG 47 #ifdef DEBUG
48 info_emitted_ = false; 48 info_emitted_ = false;
49 #endif 49 #endif
50 } 50 }
51 51
52 ~JumpPatchSite() { 52 ~JumpPatchSite() {
53 ASSERT(patch_site_.is_bound() == info_emitted_); 53 DCHECK(patch_site_.is_bound() == info_emitted_);
54 } 54 }
55 55
56 // When initially emitting this ensure that a jump is always generated to skip 56 // When initially emitting this ensure that a jump is always generated to skip
57 // the inlined smi code. 57 // the inlined smi code.
58 void EmitJumpIfNotSmi(Register reg, Label* target) { 58 void EmitJumpIfNotSmi(Register reg, Label* target) {
59 ASSERT(!patch_site_.is_bound() && !info_emitted_); 59 DCHECK(!patch_site_.is_bound() && !info_emitted_);
60 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); 60 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
61 __ bind(&patch_site_); 61 __ bind(&patch_site_);
62 __ andi(at, reg, 0); 62 __ andi(at, reg, 0);
63 // Always taken before patched. 63 // Always taken before patched.
64 __ BranchShort(target, eq, at, Operand(zero_reg)); 64 __ BranchShort(target, eq, at, Operand(zero_reg));
65 } 65 }
66 66
67 // When initially emitting this ensure that a jump is never generated to skip 67 // When initially emitting this ensure that a jump is never generated to skip
68 // the inlined smi code. 68 // the inlined smi code.
69 void EmitJumpIfSmi(Register reg, Label* target) { 69 void EmitJumpIfSmi(Register reg, Label* target) {
70 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); 70 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
71 ASSERT(!patch_site_.is_bound() && !info_emitted_); 71 DCHECK(!patch_site_.is_bound() && !info_emitted_);
72 __ bind(&patch_site_); 72 __ bind(&patch_site_);
73 __ andi(at, reg, 0); 73 __ andi(at, reg, 0);
74 // Never taken before patched. 74 // Never taken before patched.
75 __ BranchShort(target, ne, at, Operand(zero_reg)); 75 __ BranchShort(target, ne, at, Operand(zero_reg));
76 } 76 }
77 77
78 void EmitPatchInfo() { 78 void EmitPatchInfo() {
79 if (patch_site_.is_bound()) { 79 if (patch_site_.is_bound()) {
80 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); 80 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
81 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask); 81 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
150 // MANUAL indicates that the scope shouldn't actually generate code to set up 150 // MANUAL indicates that the scope shouldn't actually generate code to set up
151 // the frame (that is done below). 151 // the frame (that is done below).
152 FrameScope frame_scope(masm_, StackFrame::MANUAL); 152 FrameScope frame_scope(masm_, StackFrame::MANUAL);
153 info->set_prologue_offset(masm_->pc_offset()); 153 info->set_prologue_offset(masm_->pc_offset());
154 __ Prologue(info->IsCodePreAgingActive()); 154 __ Prologue(info->IsCodePreAgingActive());
155 info->AddNoFrameRange(0, masm_->pc_offset()); 155 info->AddNoFrameRange(0, masm_->pc_offset());
156 156
157 { Comment cmnt(masm_, "[ Allocate locals"); 157 { Comment cmnt(masm_, "[ Allocate locals");
158 int locals_count = info->scope()->num_stack_slots(); 158 int locals_count = info->scope()->num_stack_slots();
159 // Generators allocate locals, if any, in context slots. 159 // Generators allocate locals, if any, in context slots.
160 ASSERT(!info->function()->is_generator() || locals_count == 0); 160 DCHECK(!info->function()->is_generator() || locals_count == 0);
161 if (locals_count > 0) { 161 if (locals_count > 0) {
162 if (locals_count >= 128) { 162 if (locals_count >= 128) {
163 Label ok; 163 Label ok;
164 __ Dsubu(t1, sp, Operand(locals_count * kPointerSize)); 164 __ Dsubu(t1, sp, Operand(locals_count * kPointerSize));
165 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); 165 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
166 __ Branch(&ok, hs, t1, Operand(a2)); 166 __ Branch(&ok, hs, t1, Operand(a2));
167 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); 167 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
168 __ bind(&ok); 168 __ bind(&ok);
169 } 169 }
170 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex); 170 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
289 Comment cmnt(masm_, "[ Declarations"); 289 Comment cmnt(masm_, "[ Declarations");
290 scope()->VisitIllegalRedeclaration(this); 290 scope()->VisitIllegalRedeclaration(this);
291 291
292 } else { 292 } else {
293 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); 293 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
294 { Comment cmnt(masm_, "[ Declarations"); 294 { Comment cmnt(masm_, "[ Declarations");
295 // For named function expressions, declare the function name as a 295 // For named function expressions, declare the function name as a
296 // constant. 296 // constant.
297 if (scope()->is_function_scope() && scope()->function() != NULL) { 297 if (scope()->is_function_scope() && scope()->function() != NULL) {
298 VariableDeclaration* function = scope()->function(); 298 VariableDeclaration* function = scope()->function();
299 ASSERT(function->proxy()->var()->mode() == CONST || 299 DCHECK(function->proxy()->var()->mode() == CONST ||
300 function->proxy()->var()->mode() == CONST_LEGACY); 300 function->proxy()->var()->mode() == CONST_LEGACY);
301 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); 301 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
302 VisitVariableDeclaration(function); 302 VisitVariableDeclaration(function);
303 } 303 }
304 VisitDeclarations(scope()->declarations()); 304 VisitDeclarations(scope()->declarations());
305 } 305 }
306 { Comment cmnt(masm_, "[ Stack check"); 306 { Comment cmnt(masm_, "[ Stack check");
307 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); 307 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
308 Label ok; 308 Label ok;
309 __ LoadRoot(at, Heap::kStackLimitRootIndex); 309 __ LoadRoot(at, Heap::kStackLimitRootIndex);
310 __ Branch(&ok, hs, sp, Operand(at)); 310 __ Branch(&ok, hs, sp, Operand(at));
311 Handle<Code> stack_check = isolate()->builtins()->StackCheck(); 311 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
312 PredictableCodeSizeScope predictable(masm_, 312 PredictableCodeSizeScope predictable(masm_,
313 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET)); 313 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
314 __ Call(stack_check, RelocInfo::CODE_TARGET); 314 __ Call(stack_check, RelocInfo::CODE_TARGET);
315 __ bind(&ok); 315 __ bind(&ok);
316 } 316 }
317 317
318 { Comment cmnt(masm_, "[ Body"); 318 { Comment cmnt(masm_, "[ Body");
319 ASSERT(loop_depth() == 0); 319 DCHECK(loop_depth() == 0);
320 320
321 VisitStatements(function()->body()); 321 VisitStatements(function()->body());
322 322
323 ASSERT(loop_depth() == 0); 323 DCHECK(loop_depth() == 0);
324 } 324 }
325 } 325 }
326 326
327 // Always emit a 'return undefined' in case control fell off the end of 327 // Always emit a 'return undefined' in case control fell off the end of
328 // the body. 328 // the body.
329 { Comment cmnt(masm_, "[ return <undefined>;"); 329 { Comment cmnt(masm_, "[ return <undefined>;");
330 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); 330 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
331 } 331 }
332 EmitReturnSequence(); 332 EmitReturnSequence();
333 } 333 }
334 334
335 335
336 void FullCodeGenerator::ClearAccumulator() { 336 void FullCodeGenerator::ClearAccumulator() {
337 ASSERT(Smi::FromInt(0) == 0); 337 DCHECK(Smi::FromInt(0) == 0);
338 __ mov(v0, zero_reg); 338 __ mov(v0, zero_reg);
339 } 339 }
340 340
341 341
342 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { 342 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
343 __ li(a2, Operand(profiling_counter_)); 343 __ li(a2, Operand(profiling_counter_));
344 __ ld(a3, FieldMemOperand(a2, Cell::kValueOffset)); 344 __ ld(a3, FieldMemOperand(a2, Cell::kValueOffset));
345 __ Dsubu(a3, a3, Operand(Smi::FromInt(delta))); 345 __ Dsubu(a3, a3, Operand(Smi::FromInt(delta)));
346 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset)); 346 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
347 } 347 }
(...skipping 14 matching lines...) Expand all
362 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, 362 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
363 Label* back_edge_target) { 363 Label* back_edge_target) {
364 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need 364 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
365 // to make sure it is constant. Branch may emit a skip-or-jump sequence 365 // to make sure it is constant. Branch may emit a skip-or-jump sequence
366 // instead of the normal Branch. It seems that the "skip" part of that 366 // instead of the normal Branch. It seems that the "skip" part of that
367 // sequence is about as long as this Branch would be so it is safe to ignore 367 // sequence is about as long as this Branch would be so it is safe to ignore
368 // that. 368 // that.
369 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); 369 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
370 Comment cmnt(masm_, "[ Back edge bookkeeping"); 370 Comment cmnt(masm_, "[ Back edge bookkeeping");
371 Label ok; 371 Label ok;
372 ASSERT(back_edge_target->is_bound()); 372 DCHECK(back_edge_target->is_bound());
373 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); 373 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
374 int weight = Min(kMaxBackEdgeWeight, 374 int weight = Min(kMaxBackEdgeWeight,
375 Max(1, distance / kCodeSizeMultiplier)); 375 Max(1, distance / kCodeSizeMultiplier));
376 EmitProfilingCounterDecrement(weight); 376 EmitProfilingCounterDecrement(weight);
377 __ slt(at, a3, zero_reg); 377 __ slt(at, a3, zero_reg);
378 __ beq(at, zero_reg, &ok); 378 __ beq(at, zero_reg, &ok);
379 // Call will emit a li t9 first, so it is safe to use the delay slot. 379 // Call will emit a li t9 first, so it is safe to use the delay slot.
380 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); 380 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
381 // Record a mapping of this PC offset to the OSR id. This is used to find 381 // Record a mapping of this PC offset to the OSR id. This is used to find
382 // the AST id from the unoptimized code in order to use it as a key into 382 // the AST id from the unoptimized code in order to use it as a key into
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
441 int no_frame_start = masm_->pc_offset(); 441 int no_frame_start = masm_->pc_offset();
442 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit())); 442 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
443 masm_->Daddu(sp, sp, Operand(sp_delta)); 443 masm_->Daddu(sp, sp, Operand(sp_delta));
444 masm_->Jump(ra); 444 masm_->Jump(ra);
445 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); 445 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
446 } 446 }
447 447
448 #ifdef DEBUG 448 #ifdef DEBUG
449 // Check that the size of the code used for returning is large enough 449 // Check that the size of the code used for returning is large enough
450 // for the debugger's requirements. 450 // for the debugger's requirements.
451 ASSERT(Assembler::kJSReturnSequenceInstructions <= 451 DCHECK(Assembler::kJSReturnSequenceInstructions <=
452 masm_->InstructionsGeneratedSince(&check_exit_codesize)); 452 masm_->InstructionsGeneratedSince(&check_exit_codesize));
453 #endif 453 #endif
454 } 454 }
455 } 455 }
456 456
457 457
458 void FullCodeGenerator::EffectContext::Plug(Variable* var) const { 458 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
459 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 459 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
460 } 460 }
461 461
462 462
463 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const { 463 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
464 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 464 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
465 codegen()->GetVar(result_register(), var); 465 codegen()->GetVar(result_register(), var);
466 } 466 }
467 467
468 468
469 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { 469 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
470 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 470 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
471 codegen()->GetVar(result_register(), var); 471 codegen()->GetVar(result_register(), var);
472 __ push(result_register()); 472 __ push(result_register());
473 } 473 }
474 474
475 475
476 void FullCodeGenerator::TestContext::Plug(Variable* var) const { 476 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
477 // For simplicity we always test the accumulator register. 477 // For simplicity we always test the accumulator register.
478 codegen()->GetVar(result_register(), var); 478 codegen()->GetVar(result_register(), var);
479 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 479 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
480 codegen()->DoTest(this); 480 codegen()->DoTest(this);
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
531 __ li(result_register(), Operand(lit)); 531 __ li(result_register(), Operand(lit));
532 __ push(result_register()); 532 __ push(result_register());
533 } 533 }
534 534
535 535
536 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { 536 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
537 codegen()->PrepareForBailoutBeforeSplit(condition(), 537 codegen()->PrepareForBailoutBeforeSplit(condition(),
538 true, 538 true,
539 true_label_, 539 true_label_,
540 false_label_); 540 false_label_);
541 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals. 541 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
542 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { 542 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
543 if (false_label_ != fall_through_) __ Branch(false_label_); 543 if (false_label_ != fall_through_) __ Branch(false_label_);
544 } else if (lit->IsTrue() || lit->IsJSObject()) { 544 } else if (lit->IsTrue() || lit->IsJSObject()) {
545 if (true_label_ != fall_through_) __ Branch(true_label_); 545 if (true_label_ != fall_through_) __ Branch(true_label_);
546 } else if (lit->IsString()) { 546 } else if (lit->IsString()) {
547 if (String::cast(*lit)->length() == 0) { 547 if (String::cast(*lit)->length() == 0) {
548 if (false_label_ != fall_through_) __ Branch(false_label_); 548 if (false_label_ != fall_through_) __ Branch(false_label_);
549 } else { 549 } else {
550 if (true_label_ != fall_through_) __ Branch(true_label_); 550 if (true_label_ != fall_through_) __ Branch(true_label_);
551 } 551 }
552 } else if (lit->IsSmi()) { 552 } else if (lit->IsSmi()) {
553 if (Smi::cast(*lit)->value() == 0) { 553 if (Smi::cast(*lit)->value() == 0) {
554 if (false_label_ != fall_through_) __ Branch(false_label_); 554 if (false_label_ != fall_through_) __ Branch(false_label_);
555 } else { 555 } else {
556 if (true_label_ != fall_through_) __ Branch(true_label_); 556 if (true_label_ != fall_through_) __ Branch(true_label_);
557 } 557 }
558 } else { 558 } else {
559 // For simplicity we always test the accumulator register. 559 // For simplicity we always test the accumulator register.
560 __ li(result_register(), Operand(lit)); 560 __ li(result_register(), Operand(lit));
561 codegen()->DoTest(this); 561 codegen()->DoTest(this);
562 } 562 }
563 } 563 }
564 564
565 565
566 void FullCodeGenerator::EffectContext::DropAndPlug(int count, 566 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
567 Register reg) const { 567 Register reg) const {
568 ASSERT(count > 0); 568 DCHECK(count > 0);
569 __ Drop(count); 569 __ Drop(count);
570 } 570 }
571 571
572 572
573 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( 573 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
574 int count, 574 int count,
575 Register reg) const { 575 Register reg) const {
576 ASSERT(count > 0); 576 DCHECK(count > 0);
577 __ Drop(count); 577 __ Drop(count);
578 __ Move(result_register(), reg); 578 __ Move(result_register(), reg);
579 } 579 }
580 580
581 581
582 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, 582 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
583 Register reg) const { 583 Register reg) const {
584 ASSERT(count > 0); 584 DCHECK(count > 0);
585 if (count > 1) __ Drop(count - 1); 585 if (count > 1) __ Drop(count - 1);
586 __ sd(reg, MemOperand(sp, 0)); 586 __ sd(reg, MemOperand(sp, 0));
587 } 587 }
588 588
589 589
590 void FullCodeGenerator::TestContext::DropAndPlug(int count, 590 void FullCodeGenerator::TestContext::DropAndPlug(int count,
591 Register reg) const { 591 Register reg) const {
592 ASSERT(count > 0); 592 DCHECK(count > 0);
593 // For simplicity we always test the accumulator register. 593 // For simplicity we always test the accumulator register.
594 __ Drop(count); 594 __ Drop(count);
595 __ Move(result_register(), reg); 595 __ Move(result_register(), reg);
596 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 596 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
597 codegen()->DoTest(this); 597 codegen()->DoTest(this);
598 } 598 }
599 599
600 600
601 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 601 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
602 Label* materialize_false) const { 602 Label* materialize_false) const {
603 ASSERT(materialize_true == materialize_false); 603 DCHECK(materialize_true == materialize_false);
604 __ bind(materialize_true); 604 __ bind(materialize_true);
605 } 605 }
606 606
607 607
608 void FullCodeGenerator::AccumulatorValueContext::Plug( 608 void FullCodeGenerator::AccumulatorValueContext::Plug(
609 Label* materialize_true, 609 Label* materialize_true,
610 Label* materialize_false) const { 610 Label* materialize_false) const {
611 Label done; 611 Label done;
612 __ bind(materialize_true); 612 __ bind(materialize_true);
613 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); 613 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
(...skipping 15 matching lines...) Expand all
629 __ Branch(&done); 629 __ Branch(&done);
630 __ bind(materialize_false); 630 __ bind(materialize_false);
631 __ LoadRoot(at, Heap::kFalseValueRootIndex); 631 __ LoadRoot(at, Heap::kFalseValueRootIndex);
632 __ push(at); 632 __ push(at);
633 __ bind(&done); 633 __ bind(&done);
634 } 634 }
635 635
636 636
637 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 637 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
638 Label* materialize_false) const { 638 Label* materialize_false) const {
639 ASSERT(materialize_true == true_label_); 639 DCHECK(materialize_true == true_label_);
640 ASSERT(materialize_false == false_label_); 640 DCHECK(materialize_false == false_label_);
641 } 641 }
642 642
643 643
644 void FullCodeGenerator::EffectContext::Plug(bool flag) const { 644 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
645 } 645 }
646 646
647 647
648 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { 648 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
649 Heap::RootListIndex value_root_index = 649 Heap::RootListIndex value_root_index =
650 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 650 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
696 } else if (if_true == fall_through) { 696 } else if (if_true == fall_through) {
697 __ Branch(if_false, NegateCondition(cc), lhs, rhs); 697 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
698 } else { 698 } else {
699 __ Branch(if_true, cc, lhs, rhs); 699 __ Branch(if_true, cc, lhs, rhs);
700 __ Branch(if_false); 700 __ Branch(if_false);
701 } 701 }
702 } 702 }
703 703
704 704
705 MemOperand FullCodeGenerator::StackOperand(Variable* var) { 705 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
706 ASSERT(var->IsStackAllocated()); 706 DCHECK(var->IsStackAllocated());
707 // Offset is negative because higher indexes are at lower addresses. 707 // Offset is negative because higher indexes are at lower addresses.
708 int offset = -var->index() * kPointerSize; 708 int offset = -var->index() * kPointerSize;
709 // Adjust by a (parameter or local) base offset. 709 // Adjust by a (parameter or local) base offset.
710 if (var->IsParameter()) { 710 if (var->IsParameter()) {
711 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; 711 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
712 } else { 712 } else {
713 offset += JavaScriptFrameConstants::kLocal0Offset; 713 offset += JavaScriptFrameConstants::kLocal0Offset;
714 } 714 }
715 return MemOperand(fp, offset); 715 return MemOperand(fp, offset);
716 } 716 }
717 717
718 718
719 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { 719 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
720 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); 720 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
721 if (var->IsContextSlot()) { 721 if (var->IsContextSlot()) {
722 int context_chain_length = scope()->ContextChainLength(var->scope()); 722 int context_chain_length = scope()->ContextChainLength(var->scope());
723 __ LoadContext(scratch, context_chain_length); 723 __ LoadContext(scratch, context_chain_length);
724 return ContextOperand(scratch, var->index()); 724 return ContextOperand(scratch, var->index());
725 } else { 725 } else {
726 return StackOperand(var); 726 return StackOperand(var);
727 } 727 }
728 } 728 }
729 729
730 730
731 void FullCodeGenerator::GetVar(Register dest, Variable* var) { 731 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
732 // Use destination as scratch. 732 // Use destination as scratch.
733 MemOperand location = VarOperand(var, dest); 733 MemOperand location = VarOperand(var, dest);
734 __ ld(dest, location); 734 __ ld(dest, location);
735 } 735 }
736 736
737 737
738 void FullCodeGenerator::SetVar(Variable* var, 738 void FullCodeGenerator::SetVar(Variable* var,
739 Register src, 739 Register src,
740 Register scratch0, 740 Register scratch0,
741 Register scratch1) { 741 Register scratch1) {
742 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); 742 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
743 ASSERT(!scratch0.is(src)); 743 DCHECK(!scratch0.is(src));
744 ASSERT(!scratch0.is(scratch1)); 744 DCHECK(!scratch0.is(scratch1));
745 ASSERT(!scratch1.is(src)); 745 DCHECK(!scratch1.is(src));
746 MemOperand location = VarOperand(var, scratch0); 746 MemOperand location = VarOperand(var, scratch0);
747 __ sd(src, location); 747 __ sd(src, location);
748 // Emit the write barrier code if the location is in the heap. 748 // Emit the write barrier code if the location is in the heap.
749 if (var->IsContextSlot()) { 749 if (var->IsContextSlot()) {
750 __ RecordWriteContextSlot(scratch0, 750 __ RecordWriteContextSlot(scratch0,
751 location.offset(), 751 location.offset(),
752 src, 752 src,
753 scratch1, 753 scratch1,
754 kRAHasBeenSaved, 754 kRAHasBeenSaved,
755 kDontSaveFPRegs); 755 kDontSaveFPRegs);
(...skipping 17 matching lines...) Expand all
773 __ LoadRoot(a4, Heap::kTrueValueRootIndex); 773 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
774 Split(eq, a0, Operand(a4), if_true, if_false, NULL); 774 Split(eq, a0, Operand(a4), if_true, if_false, NULL);
775 __ bind(&skip); 775 __ bind(&skip);
776 } 776 }
777 } 777 }
778 778
779 779
780 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 780 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
781 // The variable in the declaration always resides in the current function 781 // The variable in the declaration always resides in the current function
782 // context. 782 // context.
783 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); 783 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
784 if (generate_debug_code_) { 784 if (generate_debug_code_) {
785 // Check that we're not inside a with or catch context. 785 // Check that we're not inside a with or catch context.
786 __ ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset)); 786 __ ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
787 __ LoadRoot(a4, Heap::kWithContextMapRootIndex); 787 __ LoadRoot(a4, Heap::kWithContextMapRootIndex);
788 __ Check(ne, kDeclarationInWithContext, 788 __ Check(ne, kDeclarationInWithContext,
789 a1, Operand(a4)); 789 a1, Operand(a4));
790 __ LoadRoot(a4, Heap::kCatchContextMapRootIndex); 790 __ LoadRoot(a4, Heap::kCatchContextMapRootIndex);
791 __ Check(ne, kDeclarationInCatchContext, 791 __ Check(ne, kDeclarationInCatchContext,
792 a1, Operand(a4)); 792 a1, Operand(a4));
793 } 793 }
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
829 __ sd(at, ContextOperand(cp, variable->index())); 829 __ sd(at, ContextOperand(cp, variable->index()));
830 // No write barrier since the_hole_value is in old space. 830 // No write barrier since the_hole_value is in old space.
831 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 831 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
832 } 832 }
833 break; 833 break;
834 834
835 case Variable::LOOKUP: { 835 case Variable::LOOKUP: {
836 Comment cmnt(masm_, "[ VariableDeclaration"); 836 Comment cmnt(masm_, "[ VariableDeclaration");
837 __ li(a2, Operand(variable->name())); 837 __ li(a2, Operand(variable->name()));
838 // Declaration nodes are always introduced in one of four modes. 838 // Declaration nodes are always introduced in one of four modes.
839 ASSERT(IsDeclaredVariableMode(mode)); 839 DCHECK(IsDeclaredVariableMode(mode));
840 PropertyAttributes attr = 840 PropertyAttributes attr =
841 IsImmutableVariableMode(mode) ? READ_ONLY : NONE; 841 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
842 __ li(a1, Operand(Smi::FromInt(attr))); 842 __ li(a1, Operand(Smi::FromInt(attr)));
843 // Push initial value, if any. 843 // Push initial value, if any.
844 // Note: For variables we must not push an initial value (such as 844 // Note: For variables we must not push an initial value (such as
845 // 'undefined') because we may have a (legal) redeclaration and we 845 // 'undefined') because we may have a (legal) redeclaration and we
846 // must not destroy the current value. 846 // must not destroy the current value.
847 if (hole_init) { 847 if (hole_init) {
848 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex); 848 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
849 __ Push(cp, a2, a1, a0); 849 __ Push(cp, a2, a1, a0);
850 } else { 850 } else {
851 ASSERT(Smi::FromInt(0) == 0); 851 DCHECK(Smi::FromInt(0) == 0);
852 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value. 852 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
853 __ Push(cp, a2, a1, a0); 853 __ Push(cp, a2, a1, a0);
854 } 854 }
855 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); 855 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
856 break; 856 break;
857 } 857 }
858 } 858 }
859 } 859 }
860 860
861 861
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
910 VisitForStackValue(declaration->fun()); 910 VisitForStackValue(declaration->fun());
911 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); 911 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
912 break; 912 break;
913 } 913 }
914 } 914 }
915 } 915 }
916 916
917 917
918 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { 918 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
919 Variable* variable = declaration->proxy()->var(); 919 Variable* variable = declaration->proxy()->var();
920 ASSERT(variable->location() == Variable::CONTEXT); 920 DCHECK(variable->location() == Variable::CONTEXT);
921 ASSERT(variable->interface()->IsFrozen()); 921 DCHECK(variable->interface()->IsFrozen());
922 Comment cmnt(masm_, "[ ModuleDeclaration"); 922 Comment cmnt(masm_, "[ ModuleDeclaration");
923 EmitDebugCheckDeclarationContext(variable); 923 EmitDebugCheckDeclarationContext(variable);
924 924
925 // Load instance object. 925 // Load instance object.
926 __ LoadContext(a1, scope_->ContextChainLength(scope_->GlobalScope())); 926 __ LoadContext(a1, scope_->ContextChainLength(scope_->GlobalScope()));
927 __ ld(a1, ContextOperand(a1, variable->interface()->Index())); 927 __ ld(a1, ContextOperand(a1, variable->interface()->Index()));
928 __ ld(a1, ContextOperand(a1, Context::EXTENSION_INDEX)); 928 __ ld(a1, ContextOperand(a1, Context::EXTENSION_INDEX));
929 929
930 // Assign it. 930 // Assign it.
931 __ sd(a1, ContextOperand(cp, variable->index())); 931 __ sd(a1, ContextOperand(cp, variable->index()));
(...skipping 277 matching lines...) Expand 10 before | Expand all | Expand 10 after
1209 1209
1210 // Check if the expected map still matches that of the enumerable. 1210 // Check if the expected map still matches that of the enumerable.
1211 // If not, we may have to filter the key. 1211 // If not, we may have to filter the key.
1212 Label update_each; 1212 Label update_each;
1213 __ ld(a1, MemOperand(sp, 4 * kPointerSize)); 1213 __ ld(a1, MemOperand(sp, 4 * kPointerSize));
1214 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset)); 1214 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1215 __ Branch(&update_each, eq, a4, Operand(a2)); 1215 __ Branch(&update_each, eq, a4, Operand(a2));
1216 1216
1217 // For proxies, no filtering is done. 1217 // For proxies, no filtering is done.
1218 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. 1218 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1219 ASSERT_EQ(Smi::FromInt(0), 0); 1219 DCHECK_EQ(Smi::FromInt(0), 0);
1220 __ Branch(&update_each, eq, a2, Operand(zero_reg)); 1220 __ Branch(&update_each, eq, a2, Operand(zero_reg));
1221 1221
1222 // Convert the entry to a string or (smi) 0 if it isn't a property 1222 // Convert the entry to a string or (smi) 0 if it isn't a property
1223 // any more. If the property has been removed while iterating, we 1223 // any more. If the property has been removed while iterating, we
1224 // just skip it. 1224 // just skip it.
1225 __ Push(a1, a3); // Enumerable and current entry. 1225 __ Push(a1, a3); // Enumerable and current entry.
1226 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); 1226 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1227 __ mov(a3, result_register()); 1227 __ mov(a3, result_register());
1228 __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg)); 1228 __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
1229 1229
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after
1391 1391
1392 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) 1392 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1393 ? NOT_CONTEXTUAL 1393 ? NOT_CONTEXTUAL
1394 : CONTEXTUAL; 1394 : CONTEXTUAL;
1395 CallLoadIC(mode); 1395 CallLoadIC(mode);
1396 } 1396 }
1397 1397
1398 1398
1399 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1399 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1400 Label* slow) { 1400 Label* slow) {
1401 ASSERT(var->IsContextSlot()); 1401 DCHECK(var->IsContextSlot());
1402 Register context = cp; 1402 Register context = cp;
1403 Register next = a3; 1403 Register next = a3;
1404 Register temp = a4; 1404 Register temp = a4;
1405 1405
1406 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1406 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1407 if (s->num_heap_slots() > 0) { 1407 if (s->num_heap_slots() > 0) {
1408 if (s->calls_sloppy_eval()) { 1408 if (s->calls_sloppy_eval()) {
1409 // Check that extension is NULL. 1409 // Check that extension is NULL.
1410 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1410 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1411 __ Branch(slow, ne, temp, Operand(zero_reg)); 1411 __ Branch(slow, ne, temp, Operand(zero_reg));
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
1488 case Variable::LOCAL: 1488 case Variable::LOCAL:
1489 case Variable::CONTEXT: { 1489 case Variable::CONTEXT: {
1490 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" 1490 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1491 : "[ Stack variable"); 1491 : "[ Stack variable");
1492 if (var->binding_needs_init()) { 1492 if (var->binding_needs_init()) {
1493 // var->scope() may be NULL when the proxy is located in eval code and 1493 // var->scope() may be NULL when the proxy is located in eval code and
1494 // refers to a potential outside binding. Currently those bindings are 1494 // refers to a potential outside binding. Currently those bindings are
1495 // always looked up dynamically, i.e. in that case 1495 // always looked up dynamically, i.e. in that case
1496 // var->location() == LOOKUP. 1496 // var->location() == LOOKUP.
1497 // always holds. 1497 // always holds.
1498 ASSERT(var->scope() != NULL); 1498 DCHECK(var->scope() != NULL);
1499 1499
1500 // Check if the binding really needs an initialization check. The check 1500 // Check if the binding really needs an initialization check. The check
1501 // can be skipped in the following situation: we have a LET or CONST 1501 // can be skipped in the following situation: we have a LET or CONST
1502 // binding in harmony mode, both the Variable and the VariableProxy have 1502 // binding in harmony mode, both the Variable and the VariableProxy have
1503 // the same declaration scope (i.e. they are both in global code, in the 1503 // the same declaration scope (i.e. they are both in global code, in the
1504 // same function or in the same eval code) and the VariableProxy is in 1504 // same function or in the same eval code) and the VariableProxy is in
1505 // the source physically located after the initializer of the variable. 1505 // the source physically located after the initializer of the variable.
1506 // 1506 //
1507 // We cannot skip any initialization checks for CONST in non-harmony 1507 // We cannot skip any initialization checks for CONST in non-harmony
1508 // mode because const variables may be declared but never initialized: 1508 // mode because const variables may be declared but never initialized:
1509 // if (false) { const x; }; var y = x; 1509 // if (false) { const x; }; var y = x;
1510 // 1510 //
1511 // The condition on the declaration scopes is a conservative check for 1511 // The condition on the declaration scopes is a conservative check for
1512 // nested functions that access a binding and are called before the 1512 // nested functions that access a binding and are called before the
1513 // binding is initialized: 1513 // binding is initialized:
1514 // function() { f(); let x = 1; function f() { x = 2; } } 1514 // function() { f(); let x = 1; function f() { x = 2; } }
1515 // 1515 //
1516 bool skip_init_check; 1516 bool skip_init_check;
1517 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) { 1517 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1518 skip_init_check = false; 1518 skip_init_check = false;
1519 } else { 1519 } else {
1520 // Check that we always have valid source position. 1520 // Check that we always have valid source position.
1521 ASSERT(var->initializer_position() != RelocInfo::kNoPosition); 1521 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1522 ASSERT(proxy->position() != RelocInfo::kNoPosition); 1522 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1523 skip_init_check = var->mode() != CONST_LEGACY && 1523 skip_init_check = var->mode() != CONST_LEGACY &&
1524 var->initializer_position() < proxy->position(); 1524 var->initializer_position() < proxy->position();
1525 } 1525 }
1526 1526
1527 if (!skip_init_check) { 1527 if (!skip_init_check) {
1528 // Let and const need a read barrier. 1528 // Let and const need a read barrier.
1529 GetVar(v0, var); 1529 GetVar(v0, var);
1530 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 1530 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1531 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq. 1531 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1532 if (var->mode() == LET || var->mode() == CONST) { 1532 if (var->mode() == LET || var->mode() == CONST) {
1533 // Throw a reference error when using an uninitialized let/const 1533 // Throw a reference error when using an uninitialized let/const
1534 // binding in harmony mode. 1534 // binding in harmony mode.
1535 Label done; 1535 Label done;
1536 __ Branch(&done, ne, at, Operand(zero_reg)); 1536 __ Branch(&done, ne, at, Operand(zero_reg));
1537 __ li(a0, Operand(var->name())); 1537 __ li(a0, Operand(var->name()));
1538 __ push(a0); 1538 __ push(a0);
1539 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1539 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1540 __ bind(&done); 1540 __ bind(&done);
1541 } else { 1541 } else {
1542 // Uninitalized const bindings outside of harmony mode are unholed. 1542 // Uninitalized const bindings outside of harmony mode are unholed.
1543 ASSERT(var->mode() == CONST_LEGACY); 1543 DCHECK(var->mode() == CONST_LEGACY);
1544 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); 1544 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1545 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole. 1545 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1546 } 1546 }
1547 context()->Plug(v0); 1547 context()->Plug(v0);
1548 break; 1548 break;
1549 } 1549 }
1550 } 1550 }
1551 context()->Plug(var); 1551 context()->Plug(var);
1552 break; 1552 break;
1553 } 1553 }
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
1673 Literal* key = property->key(); 1673 Literal* key = property->key();
1674 Expression* value = property->value(); 1674 Expression* value = property->value();
1675 if (!result_saved) { 1675 if (!result_saved) {
1676 __ push(v0); // Save result on stack. 1676 __ push(v0); // Save result on stack.
1677 result_saved = true; 1677 result_saved = true;
1678 } 1678 }
1679 switch (property->kind()) { 1679 switch (property->kind()) {
1680 case ObjectLiteral::Property::CONSTANT: 1680 case ObjectLiteral::Property::CONSTANT:
1681 UNREACHABLE(); 1681 UNREACHABLE();
1682 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1682 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1683 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value())); 1683 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1684 // Fall through. 1684 // Fall through.
1685 case ObjectLiteral::Property::COMPUTED: 1685 case ObjectLiteral::Property::COMPUTED:
1686 if (key->value()->IsInternalizedString()) { 1686 if (key->value()->IsInternalizedString()) {
1687 if (property->emit_store()) { 1687 if (property->emit_store()) {
1688 VisitForAccumulatorValue(value); 1688 VisitForAccumulatorValue(value);
1689 __ mov(StoreIC::ValueRegister(), result_register()); 1689 __ mov(StoreIC::ValueRegister(), result_register());
1690 ASSERT(StoreIC::ValueRegister().is(a0)); 1690 DCHECK(StoreIC::ValueRegister().is(a0));
1691 __ li(StoreIC::NameRegister(), Operand(key->value())); 1691 __ li(StoreIC::NameRegister(), Operand(key->value()));
1692 __ ld(StoreIC::ReceiverRegister(), MemOperand(sp)); 1692 __ ld(StoreIC::ReceiverRegister(), MemOperand(sp));
1693 CallStoreIC(key->LiteralFeedbackId()); 1693 CallStoreIC(key->LiteralFeedbackId());
1694 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1694 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1695 } else { 1695 } else {
1696 VisitForEffect(value); 1696 VisitForEffect(value);
1697 } 1697 }
1698 break; 1698 break;
1699 } 1699 }
1700 // Duplicate receiver on stack. 1700 // Duplicate receiver on stack.
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
1739 __ push(a0); 1739 __ push(a0);
1740 VisitForStackValue(it->first); 1740 VisitForStackValue(it->first);
1741 EmitAccessor(it->second->getter); 1741 EmitAccessor(it->second->getter);
1742 EmitAccessor(it->second->setter); 1742 EmitAccessor(it->second->setter);
1743 __ li(a0, Operand(Smi::FromInt(NONE))); 1743 __ li(a0, Operand(Smi::FromInt(NONE)));
1744 __ push(a0); 1744 __ push(a0);
1745 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5); 1745 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1746 } 1746 }
1747 1747
1748 if (expr->has_function()) { 1748 if (expr->has_function()) {
1749 ASSERT(result_saved); 1749 DCHECK(result_saved);
1750 __ ld(a0, MemOperand(sp)); 1750 __ ld(a0, MemOperand(sp));
1751 __ push(a0); 1751 __ push(a0);
1752 __ CallRuntime(Runtime::kToFastProperties, 1); 1752 __ CallRuntime(Runtime::kToFastProperties, 1);
1753 } 1753 }
1754 1754
1755 if (result_saved) { 1755 if (result_saved) {
1756 context()->PlugTOS(); 1756 context()->PlugTOS();
1757 } else { 1757 } else {
1758 context()->Plug(v0); 1758 context()->Plug(v0);
1759 } 1759 }
1760 } 1760 }
1761 1761
1762 1762
1763 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1763 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1764 Comment cmnt(masm_, "[ ArrayLiteral"); 1764 Comment cmnt(masm_, "[ ArrayLiteral");
1765 1765
1766 expr->BuildConstantElements(isolate()); 1766 expr->BuildConstantElements(isolate());
1767 int flags = expr->depth() == 1 1767 int flags = expr->depth() == 1
1768 ? ArrayLiteral::kShallowElements 1768 ? ArrayLiteral::kShallowElements
1769 : ArrayLiteral::kNoFlags; 1769 : ArrayLiteral::kNoFlags;
1770 1770
1771 ZoneList<Expression*>* subexprs = expr->values(); 1771 ZoneList<Expression*>* subexprs = expr->values();
1772 int length = subexprs->length(); 1772 int length = subexprs->length();
1773 1773
1774 Handle<FixedArray> constant_elements = expr->constant_elements(); 1774 Handle<FixedArray> constant_elements = expr->constant_elements();
1775 ASSERT_EQ(2, constant_elements->length()); 1775 DCHECK_EQ(2, constant_elements->length());
1776 ElementsKind constant_elements_kind = 1776 ElementsKind constant_elements_kind =
1777 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); 1777 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1778 bool has_fast_elements = 1778 bool has_fast_elements =
1779 IsFastObjectElementsKind(constant_elements_kind); 1779 IsFastObjectElementsKind(constant_elements_kind);
1780 Handle<FixedArrayBase> constant_elements_values( 1780 Handle<FixedArrayBase> constant_elements_values(
1781 FixedArrayBase::cast(constant_elements->get(1))); 1781 FixedArrayBase::cast(constant_elements->get(1)));
1782 1782
1783 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; 1783 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1784 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { 1784 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1785 // If the only customer of allocation sites is transitioning, then 1785 // If the only customer of allocation sites is transitioning, then
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
1840 if (result_saved) { 1840 if (result_saved) {
1841 __ Pop(); // literal index 1841 __ Pop(); // literal index
1842 context()->PlugTOS(); 1842 context()->PlugTOS();
1843 } else { 1843 } else {
1844 context()->Plug(v0); 1844 context()->Plug(v0);
1845 } 1845 }
1846 } 1846 }
1847 1847
1848 1848
1849 void FullCodeGenerator::VisitAssignment(Assignment* expr) { 1849 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1850 ASSERT(expr->target()->IsValidReferenceExpression()); 1850 DCHECK(expr->target()->IsValidReferenceExpression());
1851 1851
1852 Comment cmnt(masm_, "[ Assignment"); 1852 Comment cmnt(masm_, "[ Assignment");
1853 1853
1854 // Left-hand side can only be a property, a global or a (parameter or local) 1854 // Left-hand side can only be a property, a global or a (parameter or local)
1855 // slot. 1855 // slot.
1856 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 1856 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1857 LhsKind assign_type = VARIABLE; 1857 LhsKind assign_type = VARIABLE;
1858 Property* property = expr->target()->AsProperty(); 1858 Property* property = expr->target()->AsProperty();
1859 if (property != NULL) { 1859 if (property != NULL) {
1860 assign_type = (property->key()->IsPropertyName()) 1860 assign_type = (property->key()->IsPropertyName())
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after
1971 case Yield::INITIAL: { 1971 case Yield::INITIAL: {
1972 Label suspend, continuation, post_runtime, resume; 1972 Label suspend, continuation, post_runtime, resume;
1973 1973
1974 __ jmp(&suspend); 1974 __ jmp(&suspend);
1975 1975
1976 __ bind(&continuation); 1976 __ bind(&continuation);
1977 __ jmp(&resume); 1977 __ jmp(&resume);
1978 1978
1979 __ bind(&suspend); 1979 __ bind(&suspend);
1980 VisitForAccumulatorValue(expr->generator_object()); 1980 VisitForAccumulatorValue(expr->generator_object());
1981 ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); 1981 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1982 __ li(a1, Operand(Smi::FromInt(continuation.pos()))); 1982 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
1983 __ sd(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset)); 1983 __ sd(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
1984 __ sd(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset)); 1984 __ sd(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
1985 __ mov(a1, cp); 1985 __ mov(a1, cp);
1986 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2, 1986 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
1987 kRAHasBeenSaved, kDontSaveFPRegs); 1987 kRAHasBeenSaved, kDontSaveFPRegs);
1988 __ Daddu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset)); 1988 __ Daddu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1989 __ Branch(&post_runtime, eq, sp, Operand(a1)); 1989 __ Branch(&post_runtime, eq, sp, Operand(a1));
1990 __ push(v0); // generator object 1990 __ push(v0); // generator object
1991 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 1991 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
2044 const int handler_size = StackHandlerConstants::kSize; 2044 const int handler_size = StackHandlerConstants::kSize;
2045 __ push(a0); // result 2045 __ push(a0); // result
2046 __ jmp(&l_suspend); 2046 __ jmp(&l_suspend);
2047 __ bind(&l_continuation); 2047 __ bind(&l_continuation);
2048 __ mov(a0, v0); 2048 __ mov(a0, v0);
2049 __ jmp(&l_resume); 2049 __ jmp(&l_resume);
2050 __ bind(&l_suspend); 2050 __ bind(&l_suspend);
2051 const int generator_object_depth = kPointerSize + handler_size; 2051 const int generator_object_depth = kPointerSize + handler_size;
2052 __ ld(a0, MemOperand(sp, generator_object_depth)); 2052 __ ld(a0, MemOperand(sp, generator_object_depth));
2053 __ push(a0); // g 2053 __ push(a0); // g
2054 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); 2054 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2055 __ li(a1, Operand(Smi::FromInt(l_continuation.pos()))); 2055 __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2056 __ sd(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset)); 2056 __ sd(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
2057 __ sd(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset)); 2057 __ sd(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
2058 __ mov(a1, cp); 2058 __ mov(a1, cp);
2059 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2, 2059 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2060 kRAHasBeenSaved, kDontSaveFPRegs); 2060 kRAHasBeenSaved, kDontSaveFPRegs);
2061 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 2061 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2062 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2062 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2063 __ pop(v0); // result 2063 __ pop(v0); // result
2064 EmitReturnSequence(); 2064 EmitReturnSequence();
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after
2199 2199
2200 // Otherwise, we push holes for the operand stack and call the runtime to fix 2200 // Otherwise, we push holes for the operand stack and call the runtime to fix
2201 // up the stack and the handlers. 2201 // up the stack and the handlers.
2202 Label push_operand_holes, call_resume; 2202 Label push_operand_holes, call_resume;
2203 __ bind(&push_operand_holes); 2203 __ bind(&push_operand_holes);
2204 __ Dsubu(a3, a3, Operand(1)); 2204 __ Dsubu(a3, a3, Operand(1));
2205 __ Branch(&call_resume, lt, a3, Operand(zero_reg)); 2205 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2206 __ push(a2); 2206 __ push(a2);
2207 __ Branch(&push_operand_holes); 2207 __ Branch(&push_operand_holes);
2208 __ bind(&call_resume); 2208 __ bind(&call_resume);
2209 ASSERT(!result_register().is(a1)); 2209 DCHECK(!result_register().is(a1));
2210 __ Push(a1, result_register()); 2210 __ Push(a1, result_register());
2211 __ Push(Smi::FromInt(resume_mode)); 2211 __ Push(Smi::FromInt(resume_mode));
2212 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3); 2212 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2213 // Not reached: the runtime call returns elsewhere. 2213 // Not reached: the runtime call returns elsewhere.
2214 __ stop("not-reached"); 2214 __ stop("not-reached");
2215 2215
2216 // Reach here when generator is closed. 2216 // Reach here when generator is closed.
2217 __ bind(&closed_state); 2217 __ bind(&closed_state);
2218 if (resume_mode == JSGeneratorObject::NEXT) { 2218 if (resume_mode == JSGeneratorObject::NEXT) {
2219 // Return completed iterator result when generator is closed. 2219 // Return completed iterator result when generator is closed.
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
2251 __ Push(Smi::FromInt(map->instance_size())); 2251 __ Push(Smi::FromInt(map->instance_size()));
2252 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 2252 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2253 __ ld(context_register(), 2253 __ ld(context_register(),
2254 MemOperand(fp, StandardFrameConstants::kContextOffset)); 2254 MemOperand(fp, StandardFrameConstants::kContextOffset));
2255 2255
2256 __ bind(&allocated); 2256 __ bind(&allocated);
2257 __ li(a1, Operand(map)); 2257 __ li(a1, Operand(map));
2258 __ pop(a2); 2258 __ pop(a2);
2259 __ li(a3, Operand(isolate()->factory()->ToBoolean(done))); 2259 __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2260 __ li(a4, Operand(isolate()->factory()->empty_fixed_array())); 2260 __ li(a4, Operand(isolate()->factory()->empty_fixed_array()));
2261 ASSERT_EQ(map->instance_size(), 5 * kPointerSize); 2261 DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
2262 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); 2262 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2263 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset)); 2263 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2264 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset)); 2264 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
2265 __ sd(a2, 2265 __ sd(a2,
2266 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset)); 2266 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset));
2267 __ sd(a3, 2267 __ sd(a3,
2268 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset)); 2268 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset));
2269 2269
2270 // Only the value field needs a write barrier, as the other values are in the 2270 // Only the value field needs a write barrier, as the other values are in the
2271 // root set. 2271 // root set.
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after
2365 break; 2365 break;
2366 case Token::MUL: { 2366 case Token::MUL: {
2367 __ Dmulh(v0, left, right); 2367 __ Dmulh(v0, left, right);
2368 __ dsra32(scratch2, v0, 0); 2368 __ dsra32(scratch2, v0, 0);
2369 __ sra(scratch1, v0, 31); 2369 __ sra(scratch1, v0, 31);
2370 __ Branch(USE_DELAY_SLOT, &stub_call, ne, scratch2, Operand(scratch1)); 2370 __ Branch(USE_DELAY_SLOT, &stub_call, ne, scratch2, Operand(scratch1));
2371 __ SmiTag(v0); 2371 __ SmiTag(v0);
2372 __ Branch(USE_DELAY_SLOT, &done, ne, v0, Operand(zero_reg)); 2372 __ Branch(USE_DELAY_SLOT, &done, ne, v0, Operand(zero_reg));
2373 __ Daddu(scratch2, right, left); 2373 __ Daddu(scratch2, right, left);
2374 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg)); 2374 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2375 ASSERT(Smi::FromInt(0) == 0); 2375 DCHECK(Smi::FromInt(0) == 0);
2376 __ mov(v0, zero_reg); 2376 __ mov(v0, zero_reg);
2377 break; 2377 break;
2378 } 2378 }
2379 case Token::BIT_OR: 2379 case Token::BIT_OR:
2380 __ Or(v0, left, Operand(right)); 2380 __ Or(v0, left, Operand(right));
2381 break; 2381 break;
2382 case Token::BIT_AND: 2382 case Token::BIT_AND:
2383 __ And(v0, left, Operand(right)); 2383 __ And(v0, left, Operand(right));
2384 break; 2384 break;
2385 case Token::BIT_XOR: 2385 case Token::BIT_XOR:
(...skipping 15 matching lines...) Expand all
2401 __ pop(a1); 2401 __ pop(a1);
2402 BinaryOpICStub stub(isolate(), op, mode); 2402 BinaryOpICStub stub(isolate(), op, mode);
2403 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 2403 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2404 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId()); 2404 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
2405 patch_site.EmitPatchInfo(); 2405 patch_site.EmitPatchInfo();
2406 context()->Plug(v0); 2406 context()->Plug(v0);
2407 } 2407 }
2408 2408
2409 2409
2410 void FullCodeGenerator::EmitAssignment(Expression* expr) { 2410 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2411 ASSERT(expr->IsValidReferenceExpression()); 2411 DCHECK(expr->IsValidReferenceExpression());
2412 2412
2413 // Left-hand side can only be a property, a global or a (parameter or local) 2413 // Left-hand side can only be a property, a global or a (parameter or local)
2414 // slot. 2414 // slot.
2415 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 2415 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2416 LhsKind assign_type = VARIABLE; 2416 LhsKind assign_type = VARIABLE;
2417 Property* prop = expr->AsProperty(); 2417 Property* prop = expr->AsProperty();
2418 if (prop != NULL) { 2418 if (prop != NULL) {
2419 assign_type = (prop->key()->IsPropertyName()) 2419 assign_type = (prop->key()->IsPropertyName())
2420 ? NAMED_PROPERTY 2420 ? NAMED_PROPERTY
2421 : KEYED_PROPERTY; 2421 : KEYED_PROPERTY;
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
2470 2470
2471 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) { 2471 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2472 if (var->IsUnallocated()) { 2472 if (var->IsUnallocated()) {
2473 // Global var, const, or let. 2473 // Global var, const, or let.
2474 __ mov(StoreIC::ValueRegister(), result_register()); 2474 __ mov(StoreIC::ValueRegister(), result_register());
2475 __ li(StoreIC::NameRegister(), Operand(var->name())); 2475 __ li(StoreIC::NameRegister(), Operand(var->name()));
2476 __ ld(StoreIC::ReceiverRegister(), GlobalObjectOperand()); 2476 __ ld(StoreIC::ReceiverRegister(), GlobalObjectOperand());
2477 CallStoreIC(); 2477 CallStoreIC();
2478 } else if (op == Token::INIT_CONST_LEGACY) { 2478 } else if (op == Token::INIT_CONST_LEGACY) {
2479 // Const initializers need a write barrier. 2479 // Const initializers need a write barrier.
2480 ASSERT(!var->IsParameter()); // No const parameters. 2480 DCHECK(!var->IsParameter()); // No const parameters.
2481 if (var->IsLookupSlot()) { 2481 if (var->IsLookupSlot()) {
2482 __ li(a0, Operand(var->name())); 2482 __ li(a0, Operand(var->name()));
2483 __ Push(v0, cp, a0); // Context and name. 2483 __ Push(v0, cp, a0); // Context and name.
2484 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3); 2484 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2485 } else { 2485 } else {
2486 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 2486 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2487 Label skip; 2487 Label skip;
2488 MemOperand location = VarOperand(var, a1); 2488 MemOperand location = VarOperand(var, a1);
2489 __ ld(a2, location); 2489 __ ld(a2, location);
2490 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 2490 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2491 __ Branch(&skip, ne, a2, Operand(at)); 2491 __ Branch(&skip, ne, a2, Operand(at));
2492 EmitStoreToStackLocalOrContextSlot(var, location); 2492 EmitStoreToStackLocalOrContextSlot(var, location);
2493 __ bind(&skip); 2493 __ bind(&skip);
2494 } 2494 }
2495 2495
2496 } else if (var->mode() == LET && op != Token::INIT_LET) { 2496 } else if (var->mode() == LET && op != Token::INIT_LET) {
2497 // Non-initializing assignment to let variable needs a write barrier. 2497 // Non-initializing assignment to let variable needs a write barrier.
2498 ASSERT(!var->IsLookupSlot()); 2498 DCHECK(!var->IsLookupSlot());
2499 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 2499 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2500 Label assign; 2500 Label assign;
2501 MemOperand location = VarOperand(var, a1); 2501 MemOperand location = VarOperand(var, a1);
2502 __ ld(a3, location); 2502 __ ld(a3, location);
2503 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex); 2503 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2504 __ Branch(&assign, ne, a3, Operand(a4)); 2504 __ Branch(&assign, ne, a3, Operand(a4));
2505 __ li(a3, Operand(var->name())); 2505 __ li(a3, Operand(var->name()));
2506 __ push(a3); 2506 __ push(a3);
2507 __ CallRuntime(Runtime::kThrowReferenceError, 1); 2507 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2508 // Perform the assignment. 2508 // Perform the assignment.
2509 __ bind(&assign); 2509 __ bind(&assign);
2510 EmitStoreToStackLocalOrContextSlot(var, location); 2510 EmitStoreToStackLocalOrContextSlot(var, location);
2511 2511
2512 } else if (!var->is_const_mode() || op == Token::INIT_CONST) { 2512 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2513 if (var->IsLookupSlot()) { 2513 if (var->IsLookupSlot()) {
2514 // Assignment to var. 2514 // Assignment to var.
2515 __ li(a4, Operand(var->name())); 2515 __ li(a4, Operand(var->name()));
2516 __ li(a3, Operand(Smi::FromInt(strict_mode()))); 2516 __ li(a3, Operand(Smi::FromInt(strict_mode())));
2517 // jssp[0] : mode. 2517 // jssp[0] : mode.
2518 // jssp[8] : name. 2518 // jssp[8] : name.
2519 // jssp[16] : context. 2519 // jssp[16] : context.
2520 // jssp[24] : value. 2520 // jssp[24] : value.
2521 __ Push(v0, cp, a4, a3); 2521 __ Push(v0, cp, a4, a3);
2522 __ CallRuntime(Runtime::kStoreLookupSlot, 4); 2522 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2523 } else { 2523 } else {
2524 // Assignment to var or initializing assignment to let/const in harmony 2524 // Assignment to var or initializing assignment to let/const in harmony
2525 // mode. 2525 // mode.
2526 ASSERT((var->IsStackAllocated() || var->IsContextSlot())); 2526 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2527 MemOperand location = VarOperand(var, a1); 2527 MemOperand location = VarOperand(var, a1);
2528 if (generate_debug_code_ && op == Token::INIT_LET) { 2528 if (generate_debug_code_ && op == Token::INIT_LET) {
2529 // Check for an uninitialized let binding. 2529 // Check for an uninitialized let binding.
2530 __ ld(a2, location); 2530 __ ld(a2, location);
2531 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex); 2531 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2532 __ Check(eq, kLetBindingReInitialization, a2, Operand(a4)); 2532 __ Check(eq, kLetBindingReInitialization, a2, Operand(a4));
2533 } 2533 }
2534 EmitStoreToStackLocalOrContextSlot(var, location); 2534 EmitStoreToStackLocalOrContextSlot(var, location);
2535 } 2535 }
2536 } 2536 }
2537 // Non-initializing assignments to consts are ignored. 2537 // Non-initializing assignments to consts are ignored.
2538 } 2538 }
2539 2539
2540 2540
2541 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2541 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2542 // Assignment to a property, using a named store IC. 2542 // Assignment to a property, using a named store IC.
2543 Property* prop = expr->target()->AsProperty(); 2543 Property* prop = expr->target()->AsProperty();
2544 ASSERT(prop != NULL); 2544 DCHECK(prop != NULL);
2545 ASSERT(prop->key()->IsLiteral()); 2545 DCHECK(prop->key()->IsLiteral());
2546 2546
2547 // Record source code position before IC call. 2547 // Record source code position before IC call.
2548 SetSourcePosition(expr->position()); 2548 SetSourcePosition(expr->position());
2549 __ mov(StoreIC::ValueRegister(), result_register()); 2549 __ mov(StoreIC::ValueRegister(), result_register());
2550 __ li(StoreIC::NameRegister(), Operand(prop->key()->AsLiteral()->value())); 2550 __ li(StoreIC::NameRegister(), Operand(prop->key()->AsLiteral()->value()));
2551 __ pop(StoreIC::ReceiverRegister()); 2551 __ pop(StoreIC::ReceiverRegister());
2552 CallStoreIC(expr->AssignmentFeedbackId()); 2552 CallStoreIC(expr->AssignmentFeedbackId());
2553 2553
2554 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2554 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2555 context()->Plug(v0); 2555 context()->Plug(v0);
2556 } 2556 }
2557 2557
2558 2558
2559 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2559 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2560 // Assignment to a property, using a keyed store IC. 2560 // Assignment to a property, using a keyed store IC.
2561 2561
2562 // Record source code position before IC call. 2562 // Record source code position before IC call.
2563 SetSourcePosition(expr->position()); 2563 SetSourcePosition(expr->position());
2564 // Call keyed store IC. 2564 // Call keyed store IC.
2565 // The arguments are: 2565 // The arguments are:
2566 // - a0 is the value, 2566 // - a0 is the value,
2567 // - a1 is the key, 2567 // - a1 is the key,
2568 // - a2 is the receiver. 2568 // - a2 is the receiver.
2569 __ mov(KeyedStoreIC::ValueRegister(), result_register()); 2569 __ mov(KeyedStoreIC::ValueRegister(), result_register());
2570 __ Pop(KeyedStoreIC::ReceiverRegister(), KeyedStoreIC::NameRegister()); 2570 __ Pop(KeyedStoreIC::ReceiverRegister(), KeyedStoreIC::NameRegister());
2571 ASSERT(KeyedStoreIC::ValueRegister().is(a0)); 2571 DCHECK(KeyedStoreIC::ValueRegister().is(a0));
2572 2572
2573 Handle<Code> ic = strict_mode() == SLOPPY 2573 Handle<Code> ic = strict_mode() == SLOPPY
2574 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2574 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2575 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2575 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2576 CallIC(ic, expr->AssignmentFeedbackId()); 2576 CallIC(ic, expr->AssignmentFeedbackId());
2577 2577
2578 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2578 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2579 context()->Plug(v0); 2579 context()->Plug(v0);
2580 } 2580 }
2581 2581
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
2620 if (call_type == CallIC::FUNCTION) { 2620 if (call_type == CallIC::FUNCTION) {
2621 { StackValueContext context(this); 2621 { StackValueContext context(this);
2622 EmitVariableLoad(callee->AsVariableProxy()); 2622 EmitVariableLoad(callee->AsVariableProxy());
2623 PrepareForBailout(callee, NO_REGISTERS); 2623 PrepareForBailout(callee, NO_REGISTERS);
2624 } 2624 }
2625 // Push undefined as receiver. This is patched in the method prologue if it 2625 // Push undefined as receiver. This is patched in the method prologue if it
2626 // is a sloppy mode method. 2626 // is a sloppy mode method.
2627 __ Push(isolate()->factory()->undefined_value()); 2627 __ Push(isolate()->factory()->undefined_value());
2628 } else { 2628 } else {
2629 // Load the function from the receiver. 2629 // Load the function from the receiver.
2630 ASSERT(callee->IsProperty()); 2630 DCHECK(callee->IsProperty());
2631 __ ld(LoadIC::ReceiverRegister(), MemOperand(sp, 0)); 2631 __ ld(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
2632 EmitNamedPropertyLoad(callee->AsProperty()); 2632 EmitNamedPropertyLoad(callee->AsProperty());
2633 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2633 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2634 // Push the target function under the receiver. 2634 // Push the target function under the receiver.
2635 __ ld(at, MemOperand(sp, 0)); 2635 __ ld(at, MemOperand(sp, 0));
2636 __ push(at); 2636 __ push(at);
2637 __ sd(v0, MemOperand(sp, kPointerSize)); 2637 __ sd(v0, MemOperand(sp, kPointerSize));
2638 } 2638 }
2639 2639
2640 EmitCall(expr, call_type); 2640 EmitCall(expr, call_type);
2641 } 2641 }
2642 2642
2643 2643
2644 // Code common for calls using the IC. 2644 // Code common for calls using the IC.
2645 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, 2645 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2646 Expression* key) { 2646 Expression* key) {
2647 // Load the key. 2647 // Load the key.
2648 VisitForAccumulatorValue(key); 2648 VisitForAccumulatorValue(key);
2649 2649
2650 Expression* callee = expr->expression(); 2650 Expression* callee = expr->expression();
2651 2651
2652 // Load the function from the receiver. 2652 // Load the function from the receiver.
2653 ASSERT(callee->IsProperty()); 2653 DCHECK(callee->IsProperty());
2654 __ ld(LoadIC::ReceiverRegister(), MemOperand(sp, 0)); 2654 __ ld(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
2655 __ Move(LoadIC::NameRegister(), v0); 2655 __ Move(LoadIC::NameRegister(), v0);
2656 EmitKeyedPropertyLoad(callee->AsProperty()); 2656 EmitKeyedPropertyLoad(callee->AsProperty());
2657 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2657 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2658 2658
2659 // Push the target function under the receiver. 2659 // Push the target function under the receiver.
2660 __ ld(at, MemOperand(sp, 0)); 2660 __ ld(at, MemOperand(sp, 0));
2661 __ push(at); 2661 __ push(at);
2662 __ sd(v0, MemOperand(sp, kPointerSize)); 2662 __ sd(v0, MemOperand(sp, kPointerSize));
2663 2663
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
2773 2773
2774 { PreservePositionScope scope(masm()->positions_recorder()); 2774 { PreservePositionScope scope(masm()->positions_recorder());
2775 // Generate code for loading from variables potentially shadowed 2775 // Generate code for loading from variables potentially shadowed
2776 // by eval-introduced variables. 2776 // by eval-introduced variables.
2777 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done); 2777 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2778 } 2778 }
2779 2779
2780 __ bind(&slow); 2780 __ bind(&slow);
2781 // Call the runtime to find the function to call (returned in v0) 2781 // Call the runtime to find the function to call (returned in v0)
2782 // and the object holding it (returned in v1). 2782 // and the object holding it (returned in v1).
2783 ASSERT(!context_register().is(a2)); 2783 DCHECK(!context_register().is(a2));
2784 __ li(a2, Operand(proxy->name())); 2784 __ li(a2, Operand(proxy->name()));
2785 __ Push(context_register(), a2); 2785 __ Push(context_register(), a2);
2786 __ CallRuntime(Runtime::kLoadLookupSlot, 2); 2786 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2787 __ Push(v0, v1); // Function, receiver. 2787 __ Push(v0, v1); // Function, receiver.
2788 2788
2789 // If fast case code has been generated, emit code to push the 2789 // If fast case code has been generated, emit code to push the
2790 // function and receiver and have the slow path jump around this 2790 // function and receiver and have the slow path jump around this
2791 // code. 2791 // code.
2792 if (done.is_linked()) { 2792 if (done.is_linked()) {
2793 Label call; 2793 Label call;
(...skipping 15 matching lines...) Expand all
2809 Property* property = callee->AsProperty(); 2809 Property* property = callee->AsProperty();
2810 { PreservePositionScope scope(masm()->positions_recorder()); 2810 { PreservePositionScope scope(masm()->positions_recorder());
2811 VisitForStackValue(property->obj()); 2811 VisitForStackValue(property->obj());
2812 } 2812 }
2813 if (property->key()->IsPropertyName()) { 2813 if (property->key()->IsPropertyName()) {
2814 EmitCallWithLoadIC(expr); 2814 EmitCallWithLoadIC(expr);
2815 } else { 2815 } else {
2816 EmitKeyedCallWithLoadIC(expr, property->key()); 2816 EmitKeyedCallWithLoadIC(expr, property->key());
2817 } 2817 }
2818 } else { 2818 } else {
2819 ASSERT(call_type == Call::OTHER_CALL); 2819 DCHECK(call_type == Call::OTHER_CALL);
2820 // Call to an arbitrary expression not handled specially above. 2820 // Call to an arbitrary expression not handled specially above.
2821 { PreservePositionScope scope(masm()->positions_recorder()); 2821 { PreservePositionScope scope(masm()->positions_recorder());
2822 VisitForStackValue(callee); 2822 VisitForStackValue(callee);
2823 } 2823 }
2824 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); 2824 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2825 __ push(a1); 2825 __ push(a1);
2826 // Emit function call. 2826 // Emit function call.
2827 EmitCall(expr); 2827 EmitCall(expr);
2828 } 2828 }
2829 2829
2830 #ifdef DEBUG 2830 #ifdef DEBUG
2831 // RecordJSReturnSite should have been called. 2831 // RecordJSReturnSite should have been called.
2832 ASSERT(expr->return_is_recorded_); 2832 DCHECK(expr->return_is_recorded_);
2833 #endif 2833 #endif
2834 } 2834 }
2835 2835
2836 2836
2837 void FullCodeGenerator::VisitCallNew(CallNew* expr) { 2837 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2838 Comment cmnt(masm_, "[ CallNew"); 2838 Comment cmnt(masm_, "[ CallNew");
2839 // According to ECMA-262, section 11.2.2, page 44, the function 2839 // According to ECMA-262, section 11.2.2, page 44, the function
2840 // expression in new calls must be evaluated before the 2840 // expression in new calls must be evaluated before the
2841 // arguments. 2841 // arguments.
2842 2842
(...skipping 12 matching lines...) Expand all
2855 // constructor invocation. 2855 // constructor invocation.
2856 SetSourcePosition(expr->position()); 2856 SetSourcePosition(expr->position());
2857 2857
2858 // Load function and argument count into a1 and a0. 2858 // Load function and argument count into a1 and a0.
2859 __ li(a0, Operand(arg_count)); 2859 __ li(a0, Operand(arg_count));
2860 __ ld(a1, MemOperand(sp, arg_count * kPointerSize)); 2860 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
2861 2861
2862 // Record call targets in unoptimized code. 2862 // Record call targets in unoptimized code.
2863 if (FLAG_pretenuring_call_new) { 2863 if (FLAG_pretenuring_call_new) {
2864 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); 2864 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2865 ASSERT(expr->AllocationSiteFeedbackSlot() == 2865 DCHECK(expr->AllocationSiteFeedbackSlot() ==
2866 expr->CallNewFeedbackSlot() + 1); 2866 expr->CallNewFeedbackSlot() + 1);
2867 } 2867 }
2868 2868
2869 __ li(a2, FeedbackVector()); 2869 __ li(a2, FeedbackVector());
2870 __ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); 2870 __ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
2871 2871
2872 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); 2872 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2873 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); 2873 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2874 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 2874 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2875 context()->Plug(v0); 2875 context()->Plug(v0);
2876 } 2876 }
2877 2877
2878 2878
2879 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 2879 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2880 ZoneList<Expression*>* args = expr->arguments(); 2880 ZoneList<Expression*>* args = expr->arguments();
2881 ASSERT(args->length() == 1); 2881 DCHECK(args->length() == 1);
2882 2882
2883 VisitForAccumulatorValue(args->at(0)); 2883 VisitForAccumulatorValue(args->at(0));
2884 2884
2885 Label materialize_true, materialize_false; 2885 Label materialize_true, materialize_false;
2886 Label* if_true = NULL; 2886 Label* if_true = NULL;
2887 Label* if_false = NULL; 2887 Label* if_false = NULL;
2888 Label* fall_through = NULL; 2888 Label* fall_through = NULL;
2889 context()->PrepareTest(&materialize_true, &materialize_false, 2889 context()->PrepareTest(&materialize_true, &materialize_false,
2890 &if_true, &if_false, &fall_through); 2890 &if_true, &if_false, &fall_through);
2891 2891
2892 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2892 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2893 __ SmiTst(v0, a4); 2893 __ SmiTst(v0, a4);
2894 Split(eq, a4, Operand(zero_reg), if_true, if_false, fall_through); 2894 Split(eq, a4, Operand(zero_reg), if_true, if_false, fall_through);
2895 2895
2896 context()->Plug(if_true, if_false); 2896 context()->Plug(if_true, if_false);
2897 } 2897 }
2898 2898
2899 2899
2900 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) { 2900 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2901 ZoneList<Expression*>* args = expr->arguments(); 2901 ZoneList<Expression*>* args = expr->arguments();
2902 ASSERT(args->length() == 1); 2902 DCHECK(args->length() == 1);
2903 2903
2904 VisitForAccumulatorValue(args->at(0)); 2904 VisitForAccumulatorValue(args->at(0));
2905 2905
2906 Label materialize_true, materialize_false; 2906 Label materialize_true, materialize_false;
2907 Label* if_true = NULL; 2907 Label* if_true = NULL;
2908 Label* if_false = NULL; 2908 Label* if_false = NULL;
2909 Label* fall_through = NULL; 2909 Label* fall_through = NULL;
2910 context()->PrepareTest(&materialize_true, &materialize_false, 2910 context()->PrepareTest(&materialize_true, &materialize_false,
2911 &if_true, &if_false, &fall_through); 2911 &if_true, &if_false, &fall_through);
2912 2912
2913 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2913 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2914 __ NonNegativeSmiTst(v0, at); 2914 __ NonNegativeSmiTst(v0, at);
2915 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through); 2915 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
2916 2916
2917 context()->Plug(if_true, if_false); 2917 context()->Plug(if_true, if_false);
2918 } 2918 }
2919 2919
2920 2920
2921 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) { 2921 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2922 ZoneList<Expression*>* args = expr->arguments(); 2922 ZoneList<Expression*>* args = expr->arguments();
2923 ASSERT(args->length() == 1); 2923 DCHECK(args->length() == 1);
2924 2924
2925 VisitForAccumulatorValue(args->at(0)); 2925 VisitForAccumulatorValue(args->at(0));
2926 2926
2927 Label materialize_true, materialize_false; 2927 Label materialize_true, materialize_false;
2928 Label* if_true = NULL; 2928 Label* if_true = NULL;
2929 Label* if_false = NULL; 2929 Label* if_false = NULL;
2930 Label* fall_through = NULL; 2930 Label* fall_through = NULL;
2931 context()->PrepareTest(&materialize_true, &materialize_false, 2931 context()->PrepareTest(&materialize_true, &materialize_false,
2932 &if_true, &if_false, &fall_through); 2932 &if_true, &if_false, &fall_through);
2933 2933
(...skipping 10 matching lines...) Expand all
2944 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2944 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2945 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE), 2945 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
2946 if_true, if_false, fall_through); 2946 if_true, if_false, fall_through);
2947 2947
2948 context()->Plug(if_true, if_false); 2948 context()->Plug(if_true, if_false);
2949 } 2949 }
2950 2950
2951 2951
2952 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) { 2952 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2953 ZoneList<Expression*>* args = expr->arguments(); 2953 ZoneList<Expression*>* args = expr->arguments();
2954 ASSERT(args->length() == 1); 2954 DCHECK(args->length() == 1);
2955 2955
2956 VisitForAccumulatorValue(args->at(0)); 2956 VisitForAccumulatorValue(args->at(0));
2957 2957
2958 Label materialize_true, materialize_false; 2958 Label materialize_true, materialize_false;
2959 Label* if_true = NULL; 2959 Label* if_true = NULL;
2960 Label* if_false = NULL; 2960 Label* if_false = NULL;
2961 Label* fall_through = NULL; 2961 Label* fall_through = NULL;
2962 context()->PrepareTest(&materialize_true, &materialize_false, 2962 context()->PrepareTest(&materialize_true, &materialize_false,
2963 &if_true, &if_false, &fall_through); 2963 &if_true, &if_false, &fall_through);
2964 2964
2965 __ JumpIfSmi(v0, if_false); 2965 __ JumpIfSmi(v0, if_false);
2966 __ GetObjectType(v0, a1, a1); 2966 __ GetObjectType(v0, a1, a1);
2967 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2967 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2968 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE), 2968 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
2969 if_true, if_false, fall_through); 2969 if_true, if_false, fall_through);
2970 2970
2971 context()->Plug(if_true, if_false); 2971 context()->Plug(if_true, if_false);
2972 } 2972 }
2973 2973
2974 2974
2975 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) { 2975 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2976 ZoneList<Expression*>* args = expr->arguments(); 2976 ZoneList<Expression*>* args = expr->arguments();
2977 ASSERT(args->length() == 1); 2977 DCHECK(args->length() == 1);
2978 2978
2979 VisitForAccumulatorValue(args->at(0)); 2979 VisitForAccumulatorValue(args->at(0));
2980 2980
2981 Label materialize_true, materialize_false; 2981 Label materialize_true, materialize_false;
2982 Label* if_true = NULL; 2982 Label* if_true = NULL;
2983 Label* if_false = NULL; 2983 Label* if_false = NULL;
2984 Label* fall_through = NULL; 2984 Label* fall_through = NULL;
2985 context()->PrepareTest(&materialize_true, &materialize_false, 2985 context()->PrepareTest(&materialize_true, &materialize_false,
2986 &if_true, &if_false, &fall_through); 2986 &if_true, &if_false, &fall_through);
2987 2987
2988 __ JumpIfSmi(v0, if_false); 2988 __ JumpIfSmi(v0, if_false);
2989 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); 2989 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2990 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset)); 2990 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
2991 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2991 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2992 __ And(at, a1, Operand(1 << Map::kIsUndetectable)); 2992 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
2993 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through); 2993 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
2994 2994
2995 context()->Plug(if_true, if_false); 2995 context()->Plug(if_true, if_false);
2996 } 2996 }
2997 2997
2998 2998
2999 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( 2999 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3000 CallRuntime* expr) { 3000 CallRuntime* expr) {
3001 ZoneList<Expression*>* args = expr->arguments(); 3001 ZoneList<Expression*>* args = expr->arguments();
3002 ASSERT(args->length() == 1); 3002 DCHECK(args->length() == 1);
3003 3003
3004 VisitForAccumulatorValue(args->at(0)); 3004 VisitForAccumulatorValue(args->at(0));
3005 3005
3006 Label materialize_true, materialize_false, skip_lookup; 3006 Label materialize_true, materialize_false, skip_lookup;
3007 Label* if_true = NULL; 3007 Label* if_true = NULL;
3008 Label* if_false = NULL; 3008 Label* if_false = NULL;
3009 Label* fall_through = NULL; 3009 Label* fall_through = NULL;
3010 context()->PrepareTest(&materialize_true, &materialize_false, 3010 context()->PrepareTest(&materialize_true, &materialize_false,
3011 &if_true, &if_false, &fall_through); 3011 &if_true, &if_false, &fall_through);
3012 3012
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
3080 __ ld(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); 3080 __ ld(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3081 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3081 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3082 Split(eq, a2, Operand(a3), if_true, if_false, fall_through); 3082 Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3083 3083
3084 context()->Plug(if_true, if_false); 3084 context()->Plug(if_true, if_false);
3085 } 3085 }
3086 3086
3087 3087
3088 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { 3088 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3089 ZoneList<Expression*>* args = expr->arguments(); 3089 ZoneList<Expression*>* args = expr->arguments();
3090 ASSERT(args->length() == 1); 3090 DCHECK(args->length() == 1);
3091 3091
3092 VisitForAccumulatorValue(args->at(0)); 3092 VisitForAccumulatorValue(args->at(0));
3093 3093
3094 Label materialize_true, materialize_false; 3094 Label materialize_true, materialize_false;
3095 Label* if_true = NULL; 3095 Label* if_true = NULL;
3096 Label* if_false = NULL; 3096 Label* if_false = NULL;
3097 Label* fall_through = NULL; 3097 Label* fall_through = NULL;
3098 context()->PrepareTest(&materialize_true, &materialize_false, 3098 context()->PrepareTest(&materialize_true, &materialize_false,
3099 &if_true, &if_false, &fall_through); 3099 &if_true, &if_false, &fall_through);
3100 3100
3101 __ JumpIfSmi(v0, if_false); 3101 __ JumpIfSmi(v0, if_false);
3102 __ GetObjectType(v0, a1, a2); 3102 __ GetObjectType(v0, a1, a2);
3103 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3103 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3104 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE)); 3104 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3105 __ Branch(if_false); 3105 __ Branch(if_false);
3106 3106
3107 context()->Plug(if_true, if_false); 3107 context()->Plug(if_true, if_false);
3108 } 3108 }
3109 3109
3110 3110
3111 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) { 3111 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3112 ZoneList<Expression*>* args = expr->arguments(); 3112 ZoneList<Expression*>* args = expr->arguments();
3113 ASSERT(args->length() == 1); 3113 DCHECK(args->length() == 1);
3114 3114
3115 VisitForAccumulatorValue(args->at(0)); 3115 VisitForAccumulatorValue(args->at(0));
3116 3116
3117 Label materialize_true, materialize_false; 3117 Label materialize_true, materialize_false;
3118 Label* if_true = NULL; 3118 Label* if_true = NULL;
3119 Label* if_false = NULL; 3119 Label* if_false = NULL;
3120 Label* fall_through = NULL; 3120 Label* fall_through = NULL;
3121 context()->PrepareTest(&materialize_true, &materialize_false, 3121 context()->PrepareTest(&materialize_true, &materialize_false,
3122 &if_true, &if_false, &fall_through); 3122 &if_true, &if_false, &fall_through);
3123 3123
3124 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK); 3124 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3125 __ lwu(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset)); 3125 __ lwu(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3126 __ lwu(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset)); 3126 __ lwu(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3127 __ li(a4, 0x80000000); 3127 __ li(a4, 0x80000000);
3128 Label not_nan; 3128 Label not_nan;
3129 __ Branch(&not_nan, ne, a2, Operand(a4)); 3129 __ Branch(&not_nan, ne, a2, Operand(a4));
3130 __ mov(a4, zero_reg); 3130 __ mov(a4, zero_reg);
3131 __ mov(a2, a1); 3131 __ mov(a2, a1);
3132 __ bind(&not_nan); 3132 __ bind(&not_nan);
3133 3133
3134 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3134 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3135 Split(eq, a2, Operand(a4), if_true, if_false, fall_through); 3135 Split(eq, a2, Operand(a4), if_true, if_false, fall_through);
3136 3136
3137 context()->Plug(if_true, if_false); 3137 context()->Plug(if_true, if_false);
3138 } 3138 }
3139 3139
3140 3140
3141 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { 3141 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3142 ZoneList<Expression*>* args = expr->arguments(); 3142 ZoneList<Expression*>* args = expr->arguments();
3143 ASSERT(args->length() == 1); 3143 DCHECK(args->length() == 1);
3144 3144
3145 VisitForAccumulatorValue(args->at(0)); 3145 VisitForAccumulatorValue(args->at(0));
3146 3146
3147 Label materialize_true, materialize_false; 3147 Label materialize_true, materialize_false;
3148 Label* if_true = NULL; 3148 Label* if_true = NULL;
3149 Label* if_false = NULL; 3149 Label* if_false = NULL;
3150 Label* fall_through = NULL; 3150 Label* fall_through = NULL;
3151 context()->PrepareTest(&materialize_true, &materialize_false, 3151 context()->PrepareTest(&materialize_true, &materialize_false,
3152 &if_true, &if_false, &fall_through); 3152 &if_true, &if_false, &fall_through);
3153 3153
3154 __ JumpIfSmi(v0, if_false); 3154 __ JumpIfSmi(v0, if_false);
3155 __ GetObjectType(v0, a1, a1); 3155 __ GetObjectType(v0, a1, a1);
3156 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3156 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3157 Split(eq, a1, Operand(JS_ARRAY_TYPE), 3157 Split(eq, a1, Operand(JS_ARRAY_TYPE),
3158 if_true, if_false, fall_through); 3158 if_true, if_false, fall_through);
3159 3159
3160 context()->Plug(if_true, if_false); 3160 context()->Plug(if_true, if_false);
3161 } 3161 }
3162 3162
3163 3163
3164 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { 3164 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3165 ZoneList<Expression*>* args = expr->arguments(); 3165 ZoneList<Expression*>* args = expr->arguments();
3166 ASSERT(args->length() == 1); 3166 DCHECK(args->length() == 1);
3167 3167
3168 VisitForAccumulatorValue(args->at(0)); 3168 VisitForAccumulatorValue(args->at(0));
3169 3169
3170 Label materialize_true, materialize_false; 3170 Label materialize_true, materialize_false;
3171 Label* if_true = NULL; 3171 Label* if_true = NULL;
3172 Label* if_false = NULL; 3172 Label* if_false = NULL;
3173 Label* fall_through = NULL; 3173 Label* fall_through = NULL;
3174 context()->PrepareTest(&materialize_true, &materialize_false, 3174 context()->PrepareTest(&materialize_true, &materialize_false,
3175 &if_true, &if_false, &fall_through); 3175 &if_true, &if_false, &fall_through);
3176 3176
3177 __ JumpIfSmi(v0, if_false); 3177 __ JumpIfSmi(v0, if_false);
3178 __ GetObjectType(v0, a1, a1); 3178 __ GetObjectType(v0, a1, a1);
3179 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3179 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3180 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through); 3180 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3181 3181
3182 context()->Plug(if_true, if_false); 3182 context()->Plug(if_true, if_false);
3183 } 3183 }
3184 3184
3185 3185
3186 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) { 3186 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3187 ASSERT(expr->arguments()->length() == 0); 3187 DCHECK(expr->arguments()->length() == 0);
3188 3188
3189 Label materialize_true, materialize_false; 3189 Label materialize_true, materialize_false;
3190 Label* if_true = NULL; 3190 Label* if_true = NULL;
3191 Label* if_false = NULL; 3191 Label* if_false = NULL;
3192 Label* fall_through = NULL; 3192 Label* fall_through = NULL;
3193 context()->PrepareTest(&materialize_true, &materialize_false, 3193 context()->PrepareTest(&materialize_true, &materialize_false,
3194 &if_true, &if_false, &fall_through); 3194 &if_true, &if_false, &fall_through);
3195 3195
3196 // Get the frame pointer for the calling frame. 3196 // Get the frame pointer for the calling frame.
3197 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3197 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
(...skipping 11 matching lines...) Expand all
3209 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3209 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3210 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)), 3210 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3211 if_true, if_false, fall_through); 3211 if_true, if_false, fall_through);
3212 3212
3213 context()->Plug(if_true, if_false); 3213 context()->Plug(if_true, if_false);
3214 } 3214 }
3215 3215
3216 3216
3217 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { 3217 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3218 ZoneList<Expression*>* args = expr->arguments(); 3218 ZoneList<Expression*>* args = expr->arguments();
3219 ASSERT(args->length() == 2); 3219 DCHECK(args->length() == 2);
3220 3220
3221 // Load the two objects into registers and perform the comparison. 3221 // Load the two objects into registers and perform the comparison.
3222 VisitForStackValue(args->at(0)); 3222 VisitForStackValue(args->at(0));
3223 VisitForAccumulatorValue(args->at(1)); 3223 VisitForAccumulatorValue(args->at(1));
3224 3224
3225 Label materialize_true, materialize_false; 3225 Label materialize_true, materialize_false;
3226 Label* if_true = NULL; 3226 Label* if_true = NULL;
3227 Label* if_false = NULL; 3227 Label* if_false = NULL;
3228 Label* fall_through = NULL; 3228 Label* fall_through = NULL;
3229 context()->PrepareTest(&materialize_true, &materialize_false, 3229 context()->PrepareTest(&materialize_true, &materialize_false,
3230 &if_true, &if_false, &fall_through); 3230 &if_true, &if_false, &fall_through);
3231 3231
3232 __ pop(a1); 3232 __ pop(a1);
3233 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3233 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3234 Split(eq, v0, Operand(a1), if_true, if_false, fall_through); 3234 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3235 3235
3236 context()->Plug(if_true, if_false); 3236 context()->Plug(if_true, if_false);
3237 } 3237 }
3238 3238
3239 3239
3240 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { 3240 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3241 ZoneList<Expression*>* args = expr->arguments(); 3241 ZoneList<Expression*>* args = expr->arguments();
3242 ASSERT(args->length() == 1); 3242 DCHECK(args->length() == 1);
3243 3243
3244 // ArgumentsAccessStub expects the key in a1 and the formal 3244 // ArgumentsAccessStub expects the key in a1 and the formal
3245 // parameter count in a0. 3245 // parameter count in a0.
3246 VisitForAccumulatorValue(args->at(0)); 3246 VisitForAccumulatorValue(args->at(0));
3247 __ mov(a1, v0); 3247 __ mov(a1, v0);
3248 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 3248 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3249 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT); 3249 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3250 __ CallStub(&stub); 3250 __ CallStub(&stub);
3251 context()->Plug(v0); 3251 context()->Plug(v0);
3252 } 3252 }
3253 3253
3254 3254
3255 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { 3255 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3256 ASSERT(expr->arguments()->length() == 0); 3256 DCHECK(expr->arguments()->length() == 0);
3257 Label exit; 3257 Label exit;
3258 // Get the number of formal parameters. 3258 // Get the number of formal parameters.
3259 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 3259 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3260 3260
3261 // Check if the calling frame is an arguments adaptor frame. 3261 // Check if the calling frame is an arguments adaptor frame.
3262 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3262 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3263 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); 3263 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
3264 __ Branch(&exit, ne, a3, 3264 __ Branch(&exit, ne, a3,
3265 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3265 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3266 3266
3267 // Arguments adaptor case: Read the arguments length from the 3267 // Arguments adaptor case: Read the arguments length from the
3268 // adaptor frame. 3268 // adaptor frame.
3269 __ ld(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); 3269 __ ld(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3270 3270
3271 __ bind(&exit); 3271 __ bind(&exit);
3272 context()->Plug(v0); 3272 context()->Plug(v0);
3273 } 3273 }
3274 3274
3275 3275
3276 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { 3276 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3277 ZoneList<Expression*>* args = expr->arguments(); 3277 ZoneList<Expression*>* args = expr->arguments();
3278 ASSERT(args->length() == 1); 3278 DCHECK(args->length() == 1);
3279 Label done, null, function, non_function_constructor; 3279 Label done, null, function, non_function_constructor;
3280 3280
3281 VisitForAccumulatorValue(args->at(0)); 3281 VisitForAccumulatorValue(args->at(0));
3282 3282
3283 // If the object is a smi, we return null. 3283 // If the object is a smi, we return null.
3284 __ JumpIfSmi(v0, &null); 3284 __ JumpIfSmi(v0, &null);
3285 3285
3286 // Check that the object is a JS object but take special care of JS 3286 // Check that the object is a JS object but take special care of JS
3287 // functions to make sure they have 'Function' as their class. 3287 // functions to make sure they have 'Function' as their class.
3288 // Assume that there are only two callable types, and one of them is at 3288 // Assume that there are only two callable types, and one of them is at
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
3330 __ bind(&done); 3330 __ bind(&done);
3331 3331
3332 context()->Plug(v0); 3332 context()->Plug(v0);
3333 } 3333 }
3334 3334
3335 3335
3336 void FullCodeGenerator::EmitSubString(CallRuntime* expr) { 3336 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3337 // Load the arguments on the stack and call the stub. 3337 // Load the arguments on the stack and call the stub.
3338 SubStringStub stub(isolate()); 3338 SubStringStub stub(isolate());
3339 ZoneList<Expression*>* args = expr->arguments(); 3339 ZoneList<Expression*>* args = expr->arguments();
3340 ASSERT(args->length() == 3); 3340 DCHECK(args->length() == 3);
3341 VisitForStackValue(args->at(0)); 3341 VisitForStackValue(args->at(0));
3342 VisitForStackValue(args->at(1)); 3342 VisitForStackValue(args->at(1));
3343 VisitForStackValue(args->at(2)); 3343 VisitForStackValue(args->at(2));
3344 __ CallStub(&stub); 3344 __ CallStub(&stub);
3345 context()->Plug(v0); 3345 context()->Plug(v0);
3346 } 3346 }
3347 3347
3348 3348
3349 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { 3349 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3350 // Load the arguments on the stack and call the stub. 3350 // Load the arguments on the stack and call the stub.
3351 RegExpExecStub stub(isolate()); 3351 RegExpExecStub stub(isolate());
3352 ZoneList<Expression*>* args = expr->arguments(); 3352 ZoneList<Expression*>* args = expr->arguments();
3353 ASSERT(args->length() == 4); 3353 DCHECK(args->length() == 4);
3354 VisitForStackValue(args->at(0)); 3354 VisitForStackValue(args->at(0));
3355 VisitForStackValue(args->at(1)); 3355 VisitForStackValue(args->at(1));
3356 VisitForStackValue(args->at(2)); 3356 VisitForStackValue(args->at(2));
3357 VisitForStackValue(args->at(3)); 3357 VisitForStackValue(args->at(3));
3358 __ CallStub(&stub); 3358 __ CallStub(&stub);
3359 context()->Plug(v0); 3359 context()->Plug(v0);
3360 } 3360 }
3361 3361
3362 3362
3363 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { 3363 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3364 ZoneList<Expression*>* args = expr->arguments(); 3364 ZoneList<Expression*>* args = expr->arguments();
3365 ASSERT(args->length() == 1); 3365 DCHECK(args->length() == 1);
3366 3366
3367 VisitForAccumulatorValue(args->at(0)); // Load the object. 3367 VisitForAccumulatorValue(args->at(0)); // Load the object.
3368 3368
3369 Label done; 3369 Label done;
3370 // If the object is a smi return the object. 3370 // If the object is a smi return the object.
3371 __ JumpIfSmi(v0, &done); 3371 __ JumpIfSmi(v0, &done);
3372 // If the object is not a value type, return the object. 3372 // If the object is not a value type, return the object.
3373 __ GetObjectType(v0, a1, a1); 3373 __ GetObjectType(v0, a1, a1);
3374 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE)); 3374 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3375 3375
3376 __ ld(v0, FieldMemOperand(v0, JSValue::kValueOffset)); 3376 __ ld(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3377 3377
3378 __ bind(&done); 3378 __ bind(&done);
3379 context()->Plug(v0); 3379 context()->Plug(v0);
3380 } 3380 }
3381 3381
3382 3382
3383 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { 3383 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3384 ZoneList<Expression*>* args = expr->arguments(); 3384 ZoneList<Expression*>* args = expr->arguments();
3385 ASSERT(args->length() == 2); 3385 DCHECK(args->length() == 2);
3386 ASSERT_NE(NULL, args->at(1)->AsLiteral()); 3386 DCHECK_NE(NULL, args->at(1)->AsLiteral());
3387 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value())); 3387 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3388 3388
3389 VisitForAccumulatorValue(args->at(0)); // Load the object. 3389 VisitForAccumulatorValue(args->at(0)); // Load the object.
3390 3390
3391 Label runtime, done, not_date_object; 3391 Label runtime, done, not_date_object;
3392 Register object = v0; 3392 Register object = v0;
3393 Register result = v0; 3393 Register result = v0;
3394 Register scratch0 = t1; 3394 Register scratch0 = t1;
3395 Register scratch1 = a1; 3395 Register scratch1 = a1;
3396 3396
(...skipping 25 matching lines...) Expand all
3422 3422
3423 __ bind(&not_date_object); 3423 __ bind(&not_date_object);
3424 __ CallRuntime(Runtime::kThrowNotDateError, 0); 3424 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3425 __ bind(&done); 3425 __ bind(&done);
3426 context()->Plug(v0); 3426 context()->Plug(v0);
3427 } 3427 }
3428 3428
3429 3429
3430 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { 3430 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3431 ZoneList<Expression*>* args = expr->arguments(); 3431 ZoneList<Expression*>* args = expr->arguments();
3432 ASSERT_EQ(3, args->length()); 3432 DCHECK_EQ(3, args->length());
3433 3433
3434 Register string = v0; 3434 Register string = v0;
3435 Register index = a1; 3435 Register index = a1;
3436 Register value = a2; 3436 Register value = a2;
3437 3437
3438 VisitForStackValue(args->at(1)); // index 3438 VisitForStackValue(args->at(1)); // index
3439 VisitForStackValue(args->at(2)); // value 3439 VisitForStackValue(args->at(2)); // value
3440 VisitForAccumulatorValue(args->at(0)); // string 3440 VisitForAccumulatorValue(args->at(0)); // string
3441 __ Pop(index, value); 3441 __ Pop(index, value);
3442 3442
(...skipping 16 matching lines...) Expand all
3459 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3459 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3460 __ SmiUntag(index); 3460 __ SmiUntag(index);
3461 __ Daddu(at, at, index); 3461 __ Daddu(at, at, index);
3462 __ sb(value, MemOperand(at)); 3462 __ sb(value, MemOperand(at));
3463 context()->Plug(string); 3463 context()->Plug(string);
3464 } 3464 }
3465 3465
3466 3466
3467 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { 3467 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3468 ZoneList<Expression*>* args = expr->arguments(); 3468 ZoneList<Expression*>* args = expr->arguments();
3469 ASSERT_EQ(3, args->length()); 3469 DCHECK_EQ(3, args->length());
3470 3470
3471 Register string = v0; 3471 Register string = v0;
3472 Register index = a1; 3472 Register index = a1;
3473 Register value = a2; 3473 Register value = a2;
3474 3474
3475 VisitForStackValue(args->at(1)); // index 3475 VisitForStackValue(args->at(1)); // index
3476 VisitForStackValue(args->at(2)); // value 3476 VisitForStackValue(args->at(2)); // value
3477 VisitForAccumulatorValue(args->at(0)); // string 3477 VisitForAccumulatorValue(args->at(0)); // string
3478 __ Pop(index, value); 3478 __ Pop(index, value);
3479 3479
(...skipping 18 matching lines...) Expand all
3498 __ Daddu(at, at, index); 3498 __ Daddu(at, at, index);
3499 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); 3499 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3500 __ sh(value, MemOperand(at)); 3500 __ sh(value, MemOperand(at));
3501 context()->Plug(string); 3501 context()->Plug(string);
3502 } 3502 }
3503 3503
3504 3504
3505 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { 3505 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3506 // Load the arguments on the stack and call the runtime function. 3506 // Load the arguments on the stack and call the runtime function.
3507 ZoneList<Expression*>* args = expr->arguments(); 3507 ZoneList<Expression*>* args = expr->arguments();
3508 ASSERT(args->length() == 2); 3508 DCHECK(args->length() == 2);
3509 VisitForStackValue(args->at(0)); 3509 VisitForStackValue(args->at(0));
3510 VisitForStackValue(args->at(1)); 3510 VisitForStackValue(args->at(1));
3511 MathPowStub stub(isolate(), MathPowStub::ON_STACK); 3511 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3512 __ CallStub(&stub); 3512 __ CallStub(&stub);
3513 context()->Plug(v0); 3513 context()->Plug(v0);
3514 } 3514 }
3515 3515
3516 3516
3517 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { 3517 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3518 ZoneList<Expression*>* args = expr->arguments(); 3518 ZoneList<Expression*>* args = expr->arguments();
3519 ASSERT(args->length() == 2); 3519 DCHECK(args->length() == 2);
3520 3520
3521 VisitForStackValue(args->at(0)); // Load the object. 3521 VisitForStackValue(args->at(0)); // Load the object.
3522 VisitForAccumulatorValue(args->at(1)); // Load the value. 3522 VisitForAccumulatorValue(args->at(1)); // Load the value.
3523 __ pop(a1); // v0 = value. a1 = object. 3523 __ pop(a1); // v0 = value. a1 = object.
3524 3524
3525 Label done; 3525 Label done;
3526 // If the object is a smi, return the value. 3526 // If the object is a smi, return the value.
3527 __ JumpIfSmi(a1, &done); 3527 __ JumpIfSmi(a1, &done);
3528 3528
3529 // If the object is not a value type, return the value. 3529 // If the object is not a value type, return the value.
3530 __ GetObjectType(a1, a2, a2); 3530 __ GetObjectType(a1, a2, a2);
3531 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE)); 3531 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3532 3532
3533 // Store the value. 3533 // Store the value.
3534 __ sd(v0, FieldMemOperand(a1, JSValue::kValueOffset)); 3534 __ sd(v0, FieldMemOperand(a1, JSValue::kValueOffset));
3535 // Update the write barrier. Save the value as it will be 3535 // Update the write barrier. Save the value as it will be
3536 // overwritten by the write barrier code and is needed afterward. 3536 // overwritten by the write barrier code and is needed afterward.
3537 __ mov(a2, v0); 3537 __ mov(a2, v0);
3538 __ RecordWriteField( 3538 __ RecordWriteField(
3539 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs); 3539 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
3540 3540
3541 __ bind(&done); 3541 __ bind(&done);
3542 context()->Plug(v0); 3542 context()->Plug(v0);
3543 } 3543 }
3544 3544
3545 3545
3546 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { 3546 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3547 ZoneList<Expression*>* args = expr->arguments(); 3547 ZoneList<Expression*>* args = expr->arguments();
3548 ASSERT_EQ(args->length(), 1); 3548 DCHECK_EQ(args->length(), 1);
3549 3549
3550 // Load the argument into a0 and call the stub. 3550 // Load the argument into a0 and call the stub.
3551 VisitForAccumulatorValue(args->at(0)); 3551 VisitForAccumulatorValue(args->at(0));
3552 __ mov(a0, result_register()); 3552 __ mov(a0, result_register());
3553 3553
3554 NumberToStringStub stub(isolate()); 3554 NumberToStringStub stub(isolate());
3555 __ CallStub(&stub); 3555 __ CallStub(&stub);
3556 context()->Plug(v0); 3556 context()->Plug(v0);
3557 } 3557 }
3558 3558
3559 3559
3560 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { 3560 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3561 ZoneList<Expression*>* args = expr->arguments(); 3561 ZoneList<Expression*>* args = expr->arguments();
3562 ASSERT(args->length() == 1); 3562 DCHECK(args->length() == 1);
3563 3563
3564 VisitForAccumulatorValue(args->at(0)); 3564 VisitForAccumulatorValue(args->at(0));
3565 3565
3566 Label done; 3566 Label done;
3567 StringCharFromCodeGenerator generator(v0, a1); 3567 StringCharFromCodeGenerator generator(v0, a1);
3568 generator.GenerateFast(masm_); 3568 generator.GenerateFast(masm_);
3569 __ jmp(&done); 3569 __ jmp(&done);
3570 3570
3571 NopRuntimeCallHelper call_helper; 3571 NopRuntimeCallHelper call_helper;
3572 generator.GenerateSlow(masm_, call_helper); 3572 generator.GenerateSlow(masm_, call_helper);
3573 3573
3574 __ bind(&done); 3574 __ bind(&done);
3575 context()->Plug(a1); 3575 context()->Plug(a1);
3576 } 3576 }
3577 3577
3578 3578
3579 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { 3579 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3580 ZoneList<Expression*>* args = expr->arguments(); 3580 ZoneList<Expression*>* args = expr->arguments();
3581 ASSERT(args->length() == 2); 3581 DCHECK(args->length() == 2);
3582 3582
3583 VisitForStackValue(args->at(0)); 3583 VisitForStackValue(args->at(0));
3584 VisitForAccumulatorValue(args->at(1)); 3584 VisitForAccumulatorValue(args->at(1));
3585 __ mov(a0, result_register()); 3585 __ mov(a0, result_register());
3586 3586
3587 Register object = a1; 3587 Register object = a1;
3588 Register index = a0; 3588 Register index = a0;
3589 Register result = v0; 3589 Register result = v0;
3590 3590
3591 __ pop(object); 3591 __ pop(object);
(...skipping 26 matching lines...) Expand all
3618 NopRuntimeCallHelper call_helper; 3618 NopRuntimeCallHelper call_helper;
3619 generator.GenerateSlow(masm_, call_helper); 3619 generator.GenerateSlow(masm_, call_helper);
3620 3620
3621 __ bind(&done); 3621 __ bind(&done);
3622 context()->Plug(result); 3622 context()->Plug(result);
3623 } 3623 }
3624 3624
3625 3625
3626 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { 3626 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3627 ZoneList<Expression*>* args = expr->arguments(); 3627 ZoneList<Expression*>* args = expr->arguments();
3628 ASSERT(args->length() == 2); 3628 DCHECK(args->length() == 2);
3629 3629
3630 VisitForStackValue(args->at(0)); 3630 VisitForStackValue(args->at(0));
3631 VisitForAccumulatorValue(args->at(1)); 3631 VisitForAccumulatorValue(args->at(1));
3632 __ mov(a0, result_register()); 3632 __ mov(a0, result_register());
3633 3633
3634 Register object = a1; 3634 Register object = a1;
3635 Register index = a0; 3635 Register index = a0;
3636 Register scratch = a3; 3636 Register scratch = a3;
3637 Register result = v0; 3637 Register result = v0;
3638 3638
(...skipping 28 matching lines...) Expand all
3667 NopRuntimeCallHelper call_helper; 3667 NopRuntimeCallHelper call_helper;
3668 generator.GenerateSlow(masm_, call_helper); 3668 generator.GenerateSlow(masm_, call_helper);
3669 3669
3670 __ bind(&done); 3670 __ bind(&done);
3671 context()->Plug(result); 3671 context()->Plug(result);
3672 } 3672 }
3673 3673
3674 3674
3675 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { 3675 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3676 ZoneList<Expression*>* args = expr->arguments(); 3676 ZoneList<Expression*>* args = expr->arguments();
3677 ASSERT_EQ(2, args->length()); 3677 DCHECK_EQ(2, args->length());
3678 VisitForStackValue(args->at(0)); 3678 VisitForStackValue(args->at(0));
3679 VisitForAccumulatorValue(args->at(1)); 3679 VisitForAccumulatorValue(args->at(1));
3680 3680
3681 __ pop(a1); 3681 __ pop(a1);
3682 __ mov(a0, result_register()); // StringAddStub requires args in a0, a1. 3682 __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
3683 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED); 3683 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3684 __ CallStub(&stub); 3684 __ CallStub(&stub);
3685 context()->Plug(v0); 3685 context()->Plug(v0);
3686 } 3686 }
3687 3687
3688 3688
3689 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { 3689 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3690 ZoneList<Expression*>* args = expr->arguments(); 3690 ZoneList<Expression*>* args = expr->arguments();
3691 ASSERT_EQ(2, args->length()); 3691 DCHECK_EQ(2, args->length());
3692 3692
3693 VisitForStackValue(args->at(0)); 3693 VisitForStackValue(args->at(0));
3694 VisitForStackValue(args->at(1)); 3694 VisitForStackValue(args->at(1));
3695 3695
3696 StringCompareStub stub(isolate()); 3696 StringCompareStub stub(isolate());
3697 __ CallStub(&stub); 3697 __ CallStub(&stub);
3698 context()->Plug(v0); 3698 context()->Plug(v0);
3699 } 3699 }
3700 3700
3701 3701
3702 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { 3702 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3703 ZoneList<Expression*>* args = expr->arguments(); 3703 ZoneList<Expression*>* args = expr->arguments();
3704 ASSERT(args->length() >= 2); 3704 DCHECK(args->length() >= 2);
3705 3705
3706 int arg_count = args->length() - 2; // 2 ~ receiver and function. 3706 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3707 for (int i = 0; i < arg_count + 1; i++) { 3707 for (int i = 0; i < arg_count + 1; i++) {
3708 VisitForStackValue(args->at(i)); 3708 VisitForStackValue(args->at(i));
3709 } 3709 }
3710 VisitForAccumulatorValue(args->last()); // Function. 3710 VisitForAccumulatorValue(args->last()); // Function.
3711 3711
3712 Label runtime, done; 3712 Label runtime, done;
3713 // Check for non-function argument (including proxy). 3713 // Check for non-function argument (including proxy).
3714 __ JumpIfSmi(v0, &runtime); 3714 __ JumpIfSmi(v0, &runtime);
(...skipping 12 matching lines...) Expand all
3727 __ CallRuntime(Runtime::kCall, args->length()); 3727 __ CallRuntime(Runtime::kCall, args->length());
3728 __ bind(&done); 3728 __ bind(&done);
3729 3729
3730 context()->Plug(v0); 3730 context()->Plug(v0);
3731 } 3731 }
3732 3732
3733 3733
3734 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { 3734 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3735 RegExpConstructResultStub stub(isolate()); 3735 RegExpConstructResultStub stub(isolate());
3736 ZoneList<Expression*>* args = expr->arguments(); 3736 ZoneList<Expression*>* args = expr->arguments();
3737 ASSERT(args->length() == 3); 3737 DCHECK(args->length() == 3);
3738 VisitForStackValue(args->at(0)); 3738 VisitForStackValue(args->at(0));
3739 VisitForStackValue(args->at(1)); 3739 VisitForStackValue(args->at(1));
3740 VisitForAccumulatorValue(args->at(2)); 3740 VisitForAccumulatorValue(args->at(2));
3741 __ mov(a0, result_register()); 3741 __ mov(a0, result_register());
3742 __ pop(a1); 3742 __ pop(a1);
3743 __ pop(a2); 3743 __ pop(a2);
3744 __ CallStub(&stub); 3744 __ CallStub(&stub);
3745 context()->Plug(v0); 3745 context()->Plug(v0);
3746 } 3746 }
3747 3747
3748 3748
3749 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { 3749 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3750 ZoneList<Expression*>* args = expr->arguments(); 3750 ZoneList<Expression*>* args = expr->arguments();
3751 ASSERT_EQ(2, args->length()); 3751 DCHECK_EQ(2, args->length());
3752 3752
3753 ASSERT_NE(NULL, args->at(0)->AsLiteral()); 3753 DCHECK_NE(NULL, args->at(0)->AsLiteral());
3754 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value(); 3754 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3755 3755
3756 Handle<FixedArray> jsfunction_result_caches( 3756 Handle<FixedArray> jsfunction_result_caches(
3757 isolate()->native_context()->jsfunction_result_caches()); 3757 isolate()->native_context()->jsfunction_result_caches());
3758 if (jsfunction_result_caches->length() <= cache_id) { 3758 if (jsfunction_result_caches->length() <= cache_id) {
3759 __ Abort(kAttemptToUseUndefinedCache); 3759 __ Abort(kAttemptToUseUndefinedCache);
3760 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); 3760 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3761 context()->Plug(v0); 3761 context()->Plug(v0);
3762 return; 3762 return;
3763 } 3763 }
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
3816 3816
3817 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3817 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3818 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through); 3818 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3819 3819
3820 context()->Plug(if_true, if_false); 3820 context()->Plug(if_true, if_false);
3821 } 3821 }
3822 3822
3823 3823
3824 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { 3824 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3825 ZoneList<Expression*>* args = expr->arguments(); 3825 ZoneList<Expression*>* args = expr->arguments();
3826 ASSERT(args->length() == 1); 3826 DCHECK(args->length() == 1);
3827 VisitForAccumulatorValue(args->at(0)); 3827 VisitForAccumulatorValue(args->at(0));
3828 3828
3829 __ AssertString(v0); 3829 __ AssertString(v0);
3830 3830
3831 __ lwu(v0, FieldMemOperand(v0, String::kHashFieldOffset)); 3831 __ lwu(v0, FieldMemOperand(v0, String::kHashFieldOffset));
3832 __ IndexFromHash(v0, v0); 3832 __ IndexFromHash(v0, v0);
3833 3833
3834 context()->Plug(v0); 3834 context()->Plug(v0);
3835 } 3835 }
3836 3836
3837 3837
3838 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { 3838 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3839 Label bailout, done, one_char_separator, long_separator, 3839 Label bailout, done, one_char_separator, long_separator,
3840 non_trivial_array, not_size_one_array, loop, 3840 non_trivial_array, not_size_one_array, loop,
3841 empty_separator_loop, one_char_separator_loop, 3841 empty_separator_loop, one_char_separator_loop,
3842 one_char_separator_loop_entry, long_separator_loop; 3842 one_char_separator_loop_entry, long_separator_loop;
3843 ZoneList<Expression*>* args = expr->arguments(); 3843 ZoneList<Expression*>* args = expr->arguments();
3844 ASSERT(args->length() == 2); 3844 DCHECK(args->length() == 2);
3845 VisitForStackValue(args->at(1)); 3845 VisitForStackValue(args->at(1));
3846 VisitForAccumulatorValue(args->at(0)); 3846 VisitForAccumulatorValue(args->at(0));
3847 3847
3848 // All aliases of the same register have disjoint lifetimes. 3848 // All aliases of the same register have disjoint lifetimes.
3849 Register array = v0; 3849 Register array = v0;
3850 Register elements = no_reg; // Will be v0. 3850 Register elements = no_reg; // Will be v0.
3851 Register result = no_reg; // Will be v0. 3851 Register result = no_reg; // Will be v0.
3852 Register separator = a1; 3852 Register separator = a1;
3853 Register array_length = a2; 3853 Register array_length = a2;
3854 Register result_pos = no_reg; // Will be a2. 3854 Register result_pos = no_reg; // Will be a2.
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after
3993 3993
3994 // Copy next array element to the result. 3994 // Copy next array element to the result.
3995 __ ld(string, MemOperand(element)); 3995 __ ld(string, MemOperand(element));
3996 __ Daddu(element, element, kPointerSize); 3996 __ Daddu(element, element, kPointerSize);
3997 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset)); 3997 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
3998 __ SmiUntag(string_length); 3998 __ SmiUntag(string_length);
3999 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); 3999 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4000 __ CopyBytes(string, result_pos, string_length, scratch1); 4000 __ CopyBytes(string, result_pos, string_length, scratch1);
4001 // End while (element < elements_end). 4001 // End while (element < elements_end).
4002 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end)); 4002 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
4003 ASSERT(result.is(v0)); 4003 DCHECK(result.is(v0));
4004 __ Branch(&done); 4004 __ Branch(&done);
4005 4005
4006 // One-character separator case. 4006 // One-character separator case.
4007 __ bind(&one_char_separator); 4007 __ bind(&one_char_separator);
4008 // Replace separator with its ASCII character value. 4008 // Replace separator with its ASCII character value.
4009 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize)); 4009 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4010 // Jump into the loop after the code that copies the separator, so the first 4010 // Jump into the loop after the code that copies the separator, so the first
4011 // element is not preceded by a separator. 4011 // element is not preceded by a separator.
4012 __ jmp(&one_char_separator_loop_entry); 4012 __ jmp(&one_char_separator_loop_entry);
4013 4013
(...skipping 11 matching lines...) Expand all
4025 // Copy next array element to the result. 4025 // Copy next array element to the result.
4026 __ bind(&one_char_separator_loop_entry); 4026 __ bind(&one_char_separator_loop_entry);
4027 __ ld(string, MemOperand(element)); 4027 __ ld(string, MemOperand(element));
4028 __ Daddu(element, element, kPointerSize); 4028 __ Daddu(element, element, kPointerSize);
4029 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset)); 4029 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4030 __ SmiUntag(string_length); 4030 __ SmiUntag(string_length);
4031 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); 4031 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4032 __ CopyBytes(string, result_pos, string_length, scratch1); 4032 __ CopyBytes(string, result_pos, string_length, scratch1);
4033 // End while (element < elements_end). 4033 // End while (element < elements_end).
4034 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end)); 4034 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
4035 ASSERT(result.is(v0)); 4035 DCHECK(result.is(v0));
4036 __ Branch(&done); 4036 __ Branch(&done);
4037 4037
4038 // Long separator case (separator is more than one character). Entry is at the 4038 // Long separator case (separator is more than one character). Entry is at the
4039 // label long_separator below. 4039 // label long_separator below.
4040 __ bind(&long_separator_loop); 4040 __ bind(&long_separator_loop);
4041 // Live values in registers: 4041 // Live values in registers:
4042 // result_pos: the position to which we are currently copying characters. 4042 // result_pos: the position to which we are currently copying characters.
4043 // element: Current array element. 4043 // element: Current array element.
4044 // elements_end: Array end. 4044 // elements_end: Array end.
4045 // separator: Separator string. 4045 // separator: Separator string.
4046 4046
4047 // Copy the separator to the result. 4047 // Copy the separator to the result.
4048 __ ld(string_length, FieldMemOperand(separator, String::kLengthOffset)); 4048 __ ld(string_length, FieldMemOperand(separator, String::kLengthOffset));
4049 __ SmiUntag(string_length); 4049 __ SmiUntag(string_length);
4050 __ Daddu(string, 4050 __ Daddu(string,
4051 separator, 4051 separator,
4052 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4052 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4053 __ CopyBytes(string, result_pos, string_length, scratch1); 4053 __ CopyBytes(string, result_pos, string_length, scratch1);
4054 4054
4055 __ bind(&long_separator); 4055 __ bind(&long_separator);
4056 __ ld(string, MemOperand(element)); 4056 __ ld(string, MemOperand(element));
4057 __ Daddu(element, element, kPointerSize); 4057 __ Daddu(element, element, kPointerSize);
4058 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset)); 4058 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4059 __ SmiUntag(string_length); 4059 __ SmiUntag(string_length);
4060 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); 4060 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4061 __ CopyBytes(string, result_pos, string_length, scratch1); 4061 __ CopyBytes(string, result_pos, string_length, scratch1);
4062 // End while (element < elements_end). 4062 // End while (element < elements_end).
4063 __ Branch(&long_separator_loop, lt, element, Operand(elements_end)); 4063 __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
4064 ASSERT(result.is(v0)); 4064 DCHECK(result.is(v0));
4065 __ Branch(&done); 4065 __ Branch(&done);
4066 4066
4067 __ bind(&bailout); 4067 __ bind(&bailout);
4068 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); 4068 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4069 __ bind(&done); 4069 __ bind(&done);
4070 context()->Plug(v0); 4070 context()->Plug(v0);
4071 } 4071 }
4072 4072
4073 4073
4074 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { 4074 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4075 ASSERT(expr->arguments()->length() == 0); 4075 DCHECK(expr->arguments()->length() == 0);
4076 ExternalReference debug_is_active = 4076 ExternalReference debug_is_active =
4077 ExternalReference::debug_is_active_address(isolate()); 4077 ExternalReference::debug_is_active_address(isolate());
4078 __ li(at, Operand(debug_is_active)); 4078 __ li(at, Operand(debug_is_active));
4079 __ lbu(v0, MemOperand(at)); 4079 __ lbu(v0, MemOperand(at));
4080 __ SmiTag(v0); 4080 __ SmiTag(v0);
4081 context()->Plug(v0); 4081 context()->Plug(v0);
4082 } 4082 }
4083 4083
4084 4084
4085 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 4085 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
4156 VisitForStackValue(property->obj()); 4156 VisitForStackValue(property->obj());
4157 VisitForStackValue(property->key()); 4157 VisitForStackValue(property->key());
4158 __ li(a1, Operand(Smi::FromInt(strict_mode()))); 4158 __ li(a1, Operand(Smi::FromInt(strict_mode())));
4159 __ push(a1); 4159 __ push(a1);
4160 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4160 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4161 context()->Plug(v0); 4161 context()->Plug(v0);
4162 } else if (proxy != NULL) { 4162 } else if (proxy != NULL) {
4163 Variable* var = proxy->var(); 4163 Variable* var = proxy->var();
4164 // Delete of an unqualified identifier is disallowed in strict mode 4164 // Delete of an unqualified identifier is disallowed in strict mode
4165 // but "delete this" is allowed. 4165 // but "delete this" is allowed.
4166 ASSERT(strict_mode() == SLOPPY || var->is_this()); 4166 DCHECK(strict_mode() == SLOPPY || var->is_this());
4167 if (var->IsUnallocated()) { 4167 if (var->IsUnallocated()) {
4168 __ ld(a2, GlobalObjectOperand()); 4168 __ ld(a2, GlobalObjectOperand());
4169 __ li(a1, Operand(var->name())); 4169 __ li(a1, Operand(var->name()));
4170 __ li(a0, Operand(Smi::FromInt(SLOPPY))); 4170 __ li(a0, Operand(Smi::FromInt(SLOPPY)));
4171 __ Push(a2, a1, a0); 4171 __ Push(a2, a1, a0);
4172 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4172 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4173 context()->Plug(v0); 4173 context()->Plug(v0);
4174 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 4174 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4175 // Result of deleting non-global, non-dynamic variables is false. 4175 // Result of deleting non-global, non-dynamic variables is false.
4176 // The subexpression does not have side effects. 4176 // The subexpression does not have side effects.
4177 context()->Plug(var->is_this()); 4177 context()->Plug(var->is_this());
4178 } else { 4178 } else {
4179 // Non-global variable. Call the runtime to try to delete from the 4179 // Non-global variable. Call the runtime to try to delete from the
4180 // context where the variable was introduced. 4180 // context where the variable was introduced.
4181 ASSERT(!context_register().is(a2)); 4181 DCHECK(!context_register().is(a2));
4182 __ li(a2, Operand(var->name())); 4182 __ li(a2, Operand(var->name()));
4183 __ Push(context_register(), a2); 4183 __ Push(context_register(), a2);
4184 __ CallRuntime(Runtime::kDeleteLookupSlot, 2); 4184 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4185 context()->Plug(v0); 4185 context()->Plug(v0);
4186 } 4186 }
4187 } else { 4187 } else {
4188 // Result of deleting non-property, non-variable reference is true. 4188 // Result of deleting non-property, non-variable reference is true.
4189 // The subexpression may have side effects. 4189 // The subexpression may have side effects.
4190 VisitForEffect(expr->expression()); 4190 VisitForEffect(expr->expression());
4191 context()->Plug(true); 4191 context()->Plug(true);
(...skipping 20 matching lines...) Expand all
4212 VisitForControl(expr->expression(), 4212 VisitForControl(expr->expression(),
4213 test->false_label(), 4213 test->false_label(),
4214 test->true_label(), 4214 test->true_label(),
4215 test->fall_through()); 4215 test->fall_through());
4216 context()->Plug(test->true_label(), test->false_label()); 4216 context()->Plug(test->true_label(), test->false_label());
4217 } else { 4217 } else {
4218 // We handle value contexts explicitly rather than simply visiting 4218 // We handle value contexts explicitly rather than simply visiting
4219 // for control and plugging the control flow into the context, 4219 // for control and plugging the control flow into the context,
4220 // because we need to prepare a pair of extra administrative AST ids 4220 // because we need to prepare a pair of extra administrative AST ids
4221 // for the optimizing compiler. 4221 // for the optimizing compiler.
4222 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue()); 4222 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4223 Label materialize_true, materialize_false, done; 4223 Label materialize_true, materialize_false, done;
4224 VisitForControl(expr->expression(), 4224 VisitForControl(expr->expression(),
4225 &materialize_false, 4225 &materialize_false,
4226 &materialize_true, 4226 &materialize_true,
4227 &materialize_true); 4227 &materialize_true);
4228 __ bind(&materialize_true); 4228 __ bind(&materialize_true);
4229 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); 4229 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4230 __ LoadRoot(v0, Heap::kTrueValueRootIndex); 4230 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4231 if (context()->IsStackValue()) __ push(v0); 4231 if (context()->IsStackValue()) __ push(v0);
4232 __ jmp(&done); 4232 __ jmp(&done);
(...skipping 16 matching lines...) Expand all
4249 break; 4249 break;
4250 } 4250 }
4251 4251
4252 default: 4252 default:
4253 UNREACHABLE(); 4253 UNREACHABLE();
4254 } 4254 }
4255 } 4255 }
4256 4256
4257 4257
4258 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 4258 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4259 ASSERT(expr->expression()->IsValidReferenceExpression()); 4259 DCHECK(expr->expression()->IsValidReferenceExpression());
4260 4260
4261 Comment cmnt(masm_, "[ CountOperation"); 4261 Comment cmnt(masm_, "[ CountOperation");
4262 SetSourcePosition(expr->position()); 4262 SetSourcePosition(expr->position());
4263 4263
4264 // Expression can only be a property, a global or a (parameter or local) 4264 // Expression can only be a property, a global or a (parameter or local)
4265 // slot. 4265 // slot.
4266 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 4266 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4267 LhsKind assign_type = VARIABLE; 4267 LhsKind assign_type = VARIABLE;
4268 Property* prop = expr->expression()->AsProperty(); 4268 Property* prop = expr->expression()->AsProperty();
4269 // In case of a property we use the uninitialized expression context 4269 // In case of a property we use the uninitialized expression context
4270 // of the key to detect a named property. 4270 // of the key to detect a named property.
4271 if (prop != NULL) { 4271 if (prop != NULL) {
4272 assign_type = 4272 assign_type =
4273 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY; 4273 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4274 } 4274 }
4275 4275
4276 // Evaluate expression and get value. 4276 // Evaluate expression and get value.
4277 if (assign_type == VARIABLE) { 4277 if (assign_type == VARIABLE) {
4278 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL); 4278 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4279 AccumulatorValueContext context(this); 4279 AccumulatorValueContext context(this);
4280 EmitVariableLoad(expr->expression()->AsVariableProxy()); 4280 EmitVariableLoad(expr->expression()->AsVariableProxy());
4281 } else { 4281 } else {
4282 // Reserve space for result of postfix operation. 4282 // Reserve space for result of postfix operation.
4283 if (expr->is_postfix() && !context()->IsEffect()) { 4283 if (expr->is_postfix() && !context()->IsEffect()) {
4284 __ li(at, Operand(Smi::FromInt(0))); 4284 __ li(at, Operand(Smi::FromInt(0)));
4285 __ push(at); 4285 __ push(at);
4286 } 4286 }
4287 if (assign_type == NAMED_PROPERTY) { 4287 if (assign_type == NAMED_PROPERTY) {
4288 // Put the object both on the stack and in the register. 4288 // Put the object both on the stack and in the register.
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after
4434 } else { 4434 } else {
4435 context()->Plug(v0); 4435 context()->Plug(v0);
4436 } 4436 }
4437 break; 4437 break;
4438 } 4438 }
4439 } 4439 }
4440 } 4440 }
4441 4441
4442 4442
4443 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 4443 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4444 ASSERT(!context()->IsEffect()); 4444 DCHECK(!context()->IsEffect());
4445 ASSERT(!context()->IsTest()); 4445 DCHECK(!context()->IsTest());
4446 VariableProxy* proxy = expr->AsVariableProxy(); 4446 VariableProxy* proxy = expr->AsVariableProxy();
4447 if (proxy != NULL && proxy->var()->IsUnallocated()) { 4447 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4448 Comment cmnt(masm_, "[ Global variable"); 4448 Comment cmnt(masm_, "[ Global variable");
4449 __ ld(LoadIC::ReceiverRegister(), GlobalObjectOperand()); 4449 __ ld(LoadIC::ReceiverRegister(), GlobalObjectOperand());
4450 __ li(LoadIC::NameRegister(), Operand(proxy->name())); 4450 __ li(LoadIC::NameRegister(), Operand(proxy->name()));
4451 if (FLAG_vector_ics) { 4451 if (FLAG_vector_ics) {
4452 __ li(LoadIC::SlotRegister(), 4452 __ li(LoadIC::SlotRegister(),
4453 Operand(Smi::FromInt(proxy->VariableFeedbackSlot()))); 4453 Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
4454 } 4454 }
4455 // Use a regular load, not a contextual load, to avoid a reference 4455 // Use a regular load, not a contextual load, to avoid a reference
(...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after
4661 return v0; 4661 return v0;
4662 } 4662 }
4663 4663
4664 4664
4665 Register FullCodeGenerator::context_register() { 4665 Register FullCodeGenerator::context_register() {
4666 return cp; 4666 return cp;
4667 } 4667 }
4668 4668
4669 4669
4670 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 4670 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4671 // ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 4671 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4672 ASSERT(IsAligned(frame_offset, kPointerSize)); 4672 DCHECK(IsAligned(frame_offset, kPointerSize));
4673 // __ sw(value, MemOperand(fp, frame_offset)); 4673 // __ sw(value, MemOperand(fp, frame_offset));
4674 __ sd(value, MemOperand(fp, frame_offset)); 4674 __ sd(value, MemOperand(fp, frame_offset));
4675 } 4675 }
4676 4676
4677 4677
4678 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 4678 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4679 __ ld(dst, ContextOperand(cp, context_index)); 4679 __ ld(dst, ContextOperand(cp, context_index));
4680 } 4680 }
4681 4681
4682 4682
4683 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { 4683 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4684 Scope* declaration_scope = scope()->DeclarationScope(); 4684 Scope* declaration_scope = scope()->DeclarationScope();
4685 if (declaration_scope->is_global_scope() || 4685 if (declaration_scope->is_global_scope() ||
4686 declaration_scope->is_module_scope()) { 4686 declaration_scope->is_module_scope()) {
4687 // Contexts nested in the native context have a canonical empty function 4687 // Contexts nested in the native context have a canonical empty function
4688 // as their closure, not the anonymous closure containing the global 4688 // as their closure, not the anonymous closure containing the global
4689 // code. Pass a smi sentinel and let the runtime look up the empty 4689 // code. Pass a smi sentinel and let the runtime look up the empty
4690 // function. 4690 // function.
4691 __ li(at, Operand(Smi::FromInt(0))); 4691 __ li(at, Operand(Smi::FromInt(0)));
4692 } else if (declaration_scope->is_eval_scope()) { 4692 } else if (declaration_scope->is_eval_scope()) {
4693 // Contexts created by a call to eval have the same closure as the 4693 // Contexts created by a call to eval have the same closure as the
4694 // context calling eval, not the anonymous closure containing the eval 4694 // context calling eval, not the anonymous closure containing the eval
4695 // code. Fetch it from the context. 4695 // code. Fetch it from the context.
4696 __ ld(at, ContextOperand(cp, Context::CLOSURE_INDEX)); 4696 __ ld(at, ContextOperand(cp, Context::CLOSURE_INDEX));
4697 } else { 4697 } else {
4698 ASSERT(declaration_scope->is_function_scope()); 4698 DCHECK(declaration_scope->is_function_scope());
4699 __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4699 __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4700 } 4700 }
4701 __ push(at); 4701 __ push(at);
4702 } 4702 }
4703 4703
4704 4704
4705 // ---------------------------------------------------------------------------- 4705 // ----------------------------------------------------------------------------
4706 // Non-local control flow support. 4706 // Non-local control flow support.
4707 4707
4708 void FullCodeGenerator::EnterFinallyBlock() { 4708 void FullCodeGenerator::EnterFinallyBlock() {
4709 ASSERT(!result_register().is(a1)); 4709 DCHECK(!result_register().is(a1));
4710 // Store result register while executing finally block. 4710 // Store result register while executing finally block.
4711 __ push(result_register()); 4711 __ push(result_register());
4712 // Cook return address in link register to stack (smi encoded Code* delta). 4712 // Cook return address in link register to stack (smi encoded Code* delta).
4713 __ Dsubu(a1, ra, Operand(masm_->CodeObject())); 4713 __ Dsubu(a1, ra, Operand(masm_->CodeObject()));
4714 __ SmiTag(a1); 4714 __ SmiTag(a1);
4715 4715
4716 // Store result register while executing finally block. 4716 // Store result register while executing finally block.
4717 __ push(a1); 4717 __ push(a1);
4718 4718
4719 // Store pending message while executing finally block. 4719 // Store pending message while executing finally block.
(...skipping 12 matching lines...) Expand all
4732 4732
4733 ExternalReference pending_message_script = 4733 ExternalReference pending_message_script =
4734 ExternalReference::address_of_pending_message_script(isolate()); 4734 ExternalReference::address_of_pending_message_script(isolate());
4735 __ li(at, Operand(pending_message_script)); 4735 __ li(at, Operand(pending_message_script));
4736 __ ld(a1, MemOperand(at)); 4736 __ ld(a1, MemOperand(at));
4737 __ push(a1); 4737 __ push(a1);
4738 } 4738 }
4739 4739
4740 4740
4741 void FullCodeGenerator::ExitFinallyBlock() { 4741 void FullCodeGenerator::ExitFinallyBlock() {
4742 ASSERT(!result_register().is(a1)); 4742 DCHECK(!result_register().is(a1));
4743 // Restore pending message from stack. 4743 // Restore pending message from stack.
4744 __ pop(a1); 4744 __ pop(a1);
4745 ExternalReference pending_message_script = 4745 ExternalReference pending_message_script =
4746 ExternalReference::address_of_pending_message_script(isolate()); 4746 ExternalReference::address_of_pending_message_script(isolate());
4747 __ li(at, Operand(pending_message_script)); 4747 __ li(at, Operand(pending_message_script));
4748 __ sd(a1, MemOperand(at)); 4748 __ sd(a1, MemOperand(at));
4749 4749
4750 __ pop(a1); 4750 __ pop(a1);
4751 __ SmiUntag(a1); 4751 __ SmiUntag(a1);
4752 ExternalReference has_pending_message = 4752 ExternalReference has_pending_message =
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
4850 4850
4851 4851
4852 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( 4852 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4853 Isolate* isolate, 4853 Isolate* isolate,
4854 Code* unoptimized_code, 4854 Code* unoptimized_code,
4855 Address pc) { 4855 Address pc) {
4856 static const int kInstrSize = Assembler::kInstrSize; 4856 static const int kInstrSize = Assembler::kInstrSize;
4857 Address branch_address = pc - 8 * kInstrSize; 4857 Address branch_address = pc - 8 * kInstrSize;
4858 Address pc_immediate_load_address = pc - 6 * kInstrSize; 4858 Address pc_immediate_load_address = pc - 6 * kInstrSize;
4859 4859
4860 ASSERT(Assembler::IsBeq(Assembler::instr_at(pc - 7 * kInstrSize))); 4860 DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 7 * kInstrSize)));
4861 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) { 4861 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
4862 ASSERT(reinterpret_cast<uint64_t>( 4862 DCHECK(reinterpret_cast<uint64_t>(
4863 Assembler::target_address_at(pc_immediate_load_address)) == 4863 Assembler::target_address_at(pc_immediate_load_address)) ==
4864 reinterpret_cast<uint64_t>( 4864 reinterpret_cast<uint64_t>(
4865 isolate->builtins()->InterruptCheck()->entry())); 4865 isolate->builtins()->InterruptCheck()->entry()));
4866 return INTERRUPT; 4866 return INTERRUPT;
4867 } 4867 }
4868 4868
4869 ASSERT(Assembler::IsAddImmediate(Assembler::instr_at(branch_address))); 4869 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
4870 4870
4871 if (reinterpret_cast<uint64_t>( 4871 if (reinterpret_cast<uint64_t>(
4872 Assembler::target_address_at(pc_immediate_load_address)) == 4872 Assembler::target_address_at(pc_immediate_load_address)) ==
4873 reinterpret_cast<uint64_t>( 4873 reinterpret_cast<uint64_t>(
4874 isolate->builtins()->OnStackReplacement()->entry())) { 4874 isolate->builtins()->OnStackReplacement()->entry())) {
4875 return ON_STACK_REPLACEMENT; 4875 return ON_STACK_REPLACEMENT;
4876 } 4876 }
4877 4877
4878 ASSERT(reinterpret_cast<uint64_t>( 4878 DCHECK(reinterpret_cast<uint64_t>(
4879 Assembler::target_address_at(pc_immediate_load_address)) == 4879 Assembler::target_address_at(pc_immediate_load_address)) ==
4880 reinterpret_cast<uint64_t>( 4880 reinterpret_cast<uint64_t>(
4881 isolate->builtins()->OsrAfterStackCheck()->entry())); 4881 isolate->builtins()->OsrAfterStackCheck()->entry()));
4882 return OSR_AFTER_STACK_CHECK; 4882 return OSR_AFTER_STACK_CHECK;
4883 } 4883 }
4884 4884
4885 4885
4886 } } // namespace v8::internal 4886 } } // namespace v8::internal
4887 4887
4888 #endif // V8_TARGET_ARCH_MIPS64 4888 #endif // V8_TARGET_ARCH_MIPS64
OLDNEW
« no previous file with comments | « src/mips64/disasm-mips64.cc ('k') | src/mips64/ic-mips64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698