Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(483)

Side by Side Diff: src/arm/full-codegen-arm.cc

Issue 430503007: Rename ASSERT* to DCHECK*. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: REBASE and fixes Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/frames-arm.cc ('k') | src/arm/ic-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_ARM 7 #if V8_TARGET_ARCH_ARM
8 8
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/codegen.h" 10 #include "src/codegen.h"
(...skipping 22 matching lines...) Expand all
33 // the patchable code. 33 // the patchable code.
34 class JumpPatchSite BASE_EMBEDDED { 34 class JumpPatchSite BASE_EMBEDDED {
35 public: 35 public:
36 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { 36 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
37 #ifdef DEBUG 37 #ifdef DEBUG
38 info_emitted_ = false; 38 info_emitted_ = false;
39 #endif 39 #endif
40 } 40 }
41 41
42 ~JumpPatchSite() { 42 ~JumpPatchSite() {
43 ASSERT(patch_site_.is_bound() == info_emitted_); 43 DCHECK(patch_site_.is_bound() == info_emitted_);
44 } 44 }
45 45
46 // When initially emitting this ensure that a jump is always generated to skip 46 // When initially emitting this ensure that a jump is always generated to skip
47 // the inlined smi code. 47 // the inlined smi code.
48 void EmitJumpIfNotSmi(Register reg, Label* target) { 48 void EmitJumpIfNotSmi(Register reg, Label* target) {
49 ASSERT(!patch_site_.is_bound() && !info_emitted_); 49 DCHECK(!patch_site_.is_bound() && !info_emitted_);
50 Assembler::BlockConstPoolScope block_const_pool(masm_); 50 Assembler::BlockConstPoolScope block_const_pool(masm_);
51 __ bind(&patch_site_); 51 __ bind(&patch_site_);
52 __ cmp(reg, Operand(reg)); 52 __ cmp(reg, Operand(reg));
53 __ b(eq, target); // Always taken before patched. 53 __ b(eq, target); // Always taken before patched.
54 } 54 }
55 55
56 // When initially emitting this ensure that a jump is never generated to skip 56 // When initially emitting this ensure that a jump is never generated to skip
57 // the inlined smi code. 57 // the inlined smi code.
58 void EmitJumpIfSmi(Register reg, Label* target) { 58 void EmitJumpIfSmi(Register reg, Label* target) {
59 ASSERT(!patch_site_.is_bound() && !info_emitted_); 59 DCHECK(!patch_site_.is_bound() && !info_emitted_);
60 Assembler::BlockConstPoolScope block_const_pool(masm_); 60 Assembler::BlockConstPoolScope block_const_pool(masm_);
61 __ bind(&patch_site_); 61 __ bind(&patch_site_);
62 __ cmp(reg, Operand(reg)); 62 __ cmp(reg, Operand(reg));
63 __ b(ne, target); // Never taken before patched. 63 __ b(ne, target); // Never taken before patched.
64 } 64 }
65 65
66 void EmitPatchInfo() { 66 void EmitPatchInfo() {
67 // Block literal pool emission whilst recording patch site information. 67 // Block literal pool emission whilst recording patch site information.
68 Assembler::BlockConstPoolScope block_const_pool(masm_); 68 Assembler::BlockConstPoolScope block_const_pool(masm_);
69 if (patch_site_.is_bound()) { 69 if (patch_site_.is_bound()) {
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
145 // the frame (that is done below). 145 // the frame (that is done below).
146 FrameScope frame_scope(masm_, StackFrame::MANUAL); 146 FrameScope frame_scope(masm_, StackFrame::MANUAL);
147 147
148 info->set_prologue_offset(masm_->pc_offset()); 148 info->set_prologue_offset(masm_->pc_offset());
149 __ Prologue(info->IsCodePreAgingActive()); 149 __ Prologue(info->IsCodePreAgingActive());
150 info->AddNoFrameRange(0, masm_->pc_offset()); 150 info->AddNoFrameRange(0, masm_->pc_offset());
151 151
152 { Comment cmnt(masm_, "[ Allocate locals"); 152 { Comment cmnt(masm_, "[ Allocate locals");
153 int locals_count = info->scope()->num_stack_slots(); 153 int locals_count = info->scope()->num_stack_slots();
154 // Generators allocate locals, if any, in context slots. 154 // Generators allocate locals, if any, in context slots.
155 ASSERT(!info->function()->is_generator() || locals_count == 0); 155 DCHECK(!info->function()->is_generator() || locals_count == 0);
156 if (locals_count > 0) { 156 if (locals_count > 0) {
157 if (locals_count >= 128) { 157 if (locals_count >= 128) {
158 Label ok; 158 Label ok;
159 __ sub(r9, sp, Operand(locals_count * kPointerSize)); 159 __ sub(r9, sp, Operand(locals_count * kPointerSize));
160 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex); 160 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
161 __ cmp(r9, Operand(r2)); 161 __ cmp(r9, Operand(r2));
162 __ b(hs, &ok); 162 __ b(hs, &ok);
163 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); 163 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
164 __ bind(&ok); 164 __ bind(&ok);
165 } 165 }
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
285 Comment cmnt(masm_, "[ Declarations"); 285 Comment cmnt(masm_, "[ Declarations");
286 scope()->VisitIllegalRedeclaration(this); 286 scope()->VisitIllegalRedeclaration(this);
287 287
288 } else { 288 } else {
289 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); 289 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
290 { Comment cmnt(masm_, "[ Declarations"); 290 { Comment cmnt(masm_, "[ Declarations");
291 // For named function expressions, declare the function name as a 291 // For named function expressions, declare the function name as a
292 // constant. 292 // constant.
293 if (scope()->is_function_scope() && scope()->function() != NULL) { 293 if (scope()->is_function_scope() && scope()->function() != NULL) {
294 VariableDeclaration* function = scope()->function(); 294 VariableDeclaration* function = scope()->function();
295 ASSERT(function->proxy()->var()->mode() == CONST || 295 DCHECK(function->proxy()->var()->mode() == CONST ||
296 function->proxy()->var()->mode() == CONST_LEGACY); 296 function->proxy()->var()->mode() == CONST_LEGACY);
297 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); 297 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
298 VisitVariableDeclaration(function); 298 VisitVariableDeclaration(function);
299 } 299 }
300 VisitDeclarations(scope()->declarations()); 300 VisitDeclarations(scope()->declarations());
301 } 301 }
302 302
303 { Comment cmnt(masm_, "[ Stack check"); 303 { Comment cmnt(masm_, "[ Stack check");
304 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); 304 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
305 Label ok; 305 Label ok;
306 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 306 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
307 __ cmp(sp, Operand(ip)); 307 __ cmp(sp, Operand(ip));
308 __ b(hs, &ok); 308 __ b(hs, &ok);
309 Handle<Code> stack_check = isolate()->builtins()->StackCheck(); 309 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
310 PredictableCodeSizeScope predictable(masm_, 310 PredictableCodeSizeScope predictable(masm_,
311 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET)); 311 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
312 __ Call(stack_check, RelocInfo::CODE_TARGET); 312 __ Call(stack_check, RelocInfo::CODE_TARGET);
313 __ bind(&ok); 313 __ bind(&ok);
314 } 314 }
315 315
316 { Comment cmnt(masm_, "[ Body"); 316 { Comment cmnt(masm_, "[ Body");
317 ASSERT(loop_depth() == 0); 317 DCHECK(loop_depth() == 0);
318 VisitStatements(function()->body()); 318 VisitStatements(function()->body());
319 ASSERT(loop_depth() == 0); 319 DCHECK(loop_depth() == 0);
320 } 320 }
321 } 321 }
322 322
323 // Always emit a 'return undefined' in case control fell off the end of 323 // Always emit a 'return undefined' in case control fell off the end of
324 // the body. 324 // the body.
325 { Comment cmnt(masm_, "[ return <undefined>;"); 325 { Comment cmnt(masm_, "[ return <undefined>;");
326 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 326 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
327 } 327 }
328 EmitReturnSequence(); 328 EmitReturnSequence();
329 329
(...skipping 26 matching lines...) Expand all
356 Label start; 356 Label start;
357 __ bind(&start); 357 __ bind(&start);
358 int reset_value = FLAG_interrupt_budget; 358 int reset_value = FLAG_interrupt_budget;
359 if (info_->is_debug()) { 359 if (info_->is_debug()) {
360 // Detect debug break requests as soon as possible. 360 // Detect debug break requests as soon as possible.
361 reset_value = FLAG_interrupt_budget >> 4; 361 reset_value = FLAG_interrupt_budget >> 4;
362 } 362 }
363 __ mov(r2, Operand(profiling_counter_)); 363 __ mov(r2, Operand(profiling_counter_));
364 // The mov instruction above can be either 1, 2 or 3 instructions depending 364 // The mov instruction above can be either 1, 2 or 3 instructions depending
365 // upon whether it is an extended constant pool - insert nop to compensate. 365 // upon whether it is an extended constant pool - insert nop to compensate.
366 ASSERT(masm_->InstructionsGeneratedSince(&start) <= 3); 366 DCHECK(masm_->InstructionsGeneratedSince(&start) <= 3);
367 while (masm_->InstructionsGeneratedSince(&start) != 3) { 367 while (masm_->InstructionsGeneratedSince(&start) != 3) {
368 __ nop(); 368 __ nop();
369 } 369 }
370 __ mov(r3, Operand(Smi::FromInt(reset_value))); 370 __ mov(r3, Operand(Smi::FromInt(reset_value)));
371 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); 371 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
372 } 372 }
373 373
374 374
375 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, 375 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
376 Label* back_edge_target) { 376 Label* back_edge_target) {
377 Comment cmnt(masm_, "[ Back edge bookkeeping"); 377 Comment cmnt(masm_, "[ Back edge bookkeeping");
378 // Block literal pools whilst emitting back edge code. 378 // Block literal pools whilst emitting back edge code.
379 Assembler::BlockConstPoolScope block_const_pool(masm_); 379 Assembler::BlockConstPoolScope block_const_pool(masm_);
380 Label ok; 380 Label ok;
381 381
382 ASSERT(back_edge_target->is_bound()); 382 DCHECK(back_edge_target->is_bound());
383 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); 383 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
384 int weight = Min(kMaxBackEdgeWeight, 384 int weight = Min(kMaxBackEdgeWeight,
385 Max(1, distance / kCodeSizeMultiplier)); 385 Max(1, distance / kCodeSizeMultiplier));
386 EmitProfilingCounterDecrement(weight); 386 EmitProfilingCounterDecrement(weight);
387 __ b(pl, &ok); 387 __ b(pl, &ok);
388 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); 388 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
389 389
390 // Record a mapping of this PC offset to the OSR id. This is used to find 390 // Record a mapping of this PC offset to the OSR id. This is used to find
391 // the AST id from the unoptimized code in order to use it as a key into 391 // the AST id from the unoptimized code in order to use it as a key into
392 // the deoptimization input data found in the optimized code. 392 // the deoptimization input data found in the optimized code.
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
449 __ RecordJSReturn(); 449 __ RecordJSReturn();
450 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT); 450 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT);
451 __ add(sp, sp, Operand(sp_delta)); 451 __ add(sp, sp, Operand(sp_delta));
452 __ Jump(lr); 452 __ Jump(lr);
453 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); 453 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
454 } 454 }
455 455
456 #ifdef DEBUG 456 #ifdef DEBUG
457 // Check that the size of the code used for returning is large enough 457 // Check that the size of the code used for returning is large enough
458 // for the debugger's requirements. 458 // for the debugger's requirements.
459 ASSERT(Assembler::kJSReturnSequenceInstructions <= 459 DCHECK(Assembler::kJSReturnSequenceInstructions <=
460 masm_->InstructionsGeneratedSince(&check_exit_codesize)); 460 masm_->InstructionsGeneratedSince(&check_exit_codesize));
461 #endif 461 #endif
462 } 462 }
463 } 463 }
464 464
465 465
466 void FullCodeGenerator::EffectContext::Plug(Variable* var) const { 466 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
467 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 467 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
468 } 468 }
469 469
470 470
471 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const { 471 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
472 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 472 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
473 codegen()->GetVar(result_register(), var); 473 codegen()->GetVar(result_register(), var);
474 } 474 }
475 475
476 476
477 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { 477 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
478 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 478 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
479 codegen()->GetVar(result_register(), var); 479 codegen()->GetVar(result_register(), var);
480 __ push(result_register()); 480 __ push(result_register());
481 } 481 }
482 482
483 483
484 void FullCodeGenerator::TestContext::Plug(Variable* var) const { 484 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
485 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 485 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
486 // For simplicity we always test the accumulator register. 486 // For simplicity we always test the accumulator register.
487 codegen()->GetVar(result_register(), var); 487 codegen()->GetVar(result_register(), var);
488 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 488 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
489 codegen()->DoTest(this); 489 codegen()->DoTest(this);
490 } 490 }
491 491
492 492
493 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 493 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
494 } 494 }
495 495
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
540 __ mov(result_register(), Operand(lit)); 540 __ mov(result_register(), Operand(lit));
541 __ push(result_register()); 541 __ push(result_register());
542 } 542 }
543 543
544 544
545 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { 545 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
546 codegen()->PrepareForBailoutBeforeSplit(condition(), 546 codegen()->PrepareForBailoutBeforeSplit(condition(),
547 true, 547 true,
548 true_label_, 548 true_label_,
549 false_label_); 549 false_label_);
550 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals. 550 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
551 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { 551 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
552 if (false_label_ != fall_through_) __ b(false_label_); 552 if (false_label_ != fall_through_) __ b(false_label_);
553 } else if (lit->IsTrue() || lit->IsJSObject()) { 553 } else if (lit->IsTrue() || lit->IsJSObject()) {
554 if (true_label_ != fall_through_) __ b(true_label_); 554 if (true_label_ != fall_through_) __ b(true_label_);
555 } else if (lit->IsString()) { 555 } else if (lit->IsString()) {
556 if (String::cast(*lit)->length() == 0) { 556 if (String::cast(*lit)->length() == 0) {
557 if (false_label_ != fall_through_) __ b(false_label_); 557 if (false_label_ != fall_through_) __ b(false_label_);
558 } else { 558 } else {
559 if (true_label_ != fall_through_) __ b(true_label_); 559 if (true_label_ != fall_through_) __ b(true_label_);
560 } 560 }
561 } else if (lit->IsSmi()) { 561 } else if (lit->IsSmi()) {
562 if (Smi::cast(*lit)->value() == 0) { 562 if (Smi::cast(*lit)->value() == 0) {
563 if (false_label_ != fall_through_) __ b(false_label_); 563 if (false_label_ != fall_through_) __ b(false_label_);
564 } else { 564 } else {
565 if (true_label_ != fall_through_) __ b(true_label_); 565 if (true_label_ != fall_through_) __ b(true_label_);
566 } 566 }
567 } else { 567 } else {
568 // For simplicity we always test the accumulator register. 568 // For simplicity we always test the accumulator register.
569 __ mov(result_register(), Operand(lit)); 569 __ mov(result_register(), Operand(lit));
570 codegen()->DoTest(this); 570 codegen()->DoTest(this);
571 } 571 }
572 } 572 }
573 573
574 574
575 void FullCodeGenerator::EffectContext::DropAndPlug(int count, 575 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
576 Register reg) const { 576 Register reg) const {
577 ASSERT(count > 0); 577 DCHECK(count > 0);
578 __ Drop(count); 578 __ Drop(count);
579 } 579 }
580 580
581 581
582 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( 582 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
583 int count, 583 int count,
584 Register reg) const { 584 Register reg) const {
585 ASSERT(count > 0); 585 DCHECK(count > 0);
586 __ Drop(count); 586 __ Drop(count);
587 __ Move(result_register(), reg); 587 __ Move(result_register(), reg);
588 } 588 }
589 589
590 590
591 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, 591 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
592 Register reg) const { 592 Register reg) const {
593 ASSERT(count > 0); 593 DCHECK(count > 0);
594 if (count > 1) __ Drop(count - 1); 594 if (count > 1) __ Drop(count - 1);
595 __ str(reg, MemOperand(sp, 0)); 595 __ str(reg, MemOperand(sp, 0));
596 } 596 }
597 597
598 598
599 void FullCodeGenerator::TestContext::DropAndPlug(int count, 599 void FullCodeGenerator::TestContext::DropAndPlug(int count,
600 Register reg) const { 600 Register reg) const {
601 ASSERT(count > 0); 601 DCHECK(count > 0);
602 // For simplicity we always test the accumulator register. 602 // For simplicity we always test the accumulator register.
603 __ Drop(count); 603 __ Drop(count);
604 __ Move(result_register(), reg); 604 __ Move(result_register(), reg);
605 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 605 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
606 codegen()->DoTest(this); 606 codegen()->DoTest(this);
607 } 607 }
608 608
609 609
610 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 610 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
611 Label* materialize_false) const { 611 Label* materialize_false) const {
612 ASSERT(materialize_true == materialize_false); 612 DCHECK(materialize_true == materialize_false);
613 __ bind(materialize_true); 613 __ bind(materialize_true);
614 } 614 }
615 615
616 616
617 void FullCodeGenerator::AccumulatorValueContext::Plug( 617 void FullCodeGenerator::AccumulatorValueContext::Plug(
618 Label* materialize_true, 618 Label* materialize_true,
619 Label* materialize_false) const { 619 Label* materialize_false) const {
620 Label done; 620 Label done;
621 __ bind(materialize_true); 621 __ bind(materialize_true);
622 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); 622 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
(...skipping 13 matching lines...) Expand all
636 __ jmp(&done); 636 __ jmp(&done);
637 __ bind(materialize_false); 637 __ bind(materialize_false);
638 __ LoadRoot(ip, Heap::kFalseValueRootIndex); 638 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
639 __ bind(&done); 639 __ bind(&done);
640 __ push(ip); 640 __ push(ip);
641 } 641 }
642 642
643 643
644 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 644 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
645 Label* materialize_false) const { 645 Label* materialize_false) const {
646 ASSERT(materialize_true == true_label_); 646 DCHECK(materialize_true == true_label_);
647 ASSERT(materialize_false == false_label_); 647 DCHECK(materialize_false == false_label_);
648 } 648 }
649 649
650 650
651 void FullCodeGenerator::EffectContext::Plug(bool flag) const { 651 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
652 } 652 }
653 653
654 654
655 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { 655 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
656 Heap::RootListIndex value_root_index = 656 Heap::RootListIndex value_root_index =
657 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 657 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
700 } else if (if_true == fall_through) { 700 } else if (if_true == fall_through) {
701 __ b(NegateCondition(cond), if_false); 701 __ b(NegateCondition(cond), if_false);
702 } else { 702 } else {
703 __ b(cond, if_true); 703 __ b(cond, if_true);
704 __ b(if_false); 704 __ b(if_false);
705 } 705 }
706 } 706 }
707 707
708 708
709 MemOperand FullCodeGenerator::StackOperand(Variable* var) { 709 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
710 ASSERT(var->IsStackAllocated()); 710 DCHECK(var->IsStackAllocated());
711 // Offset is negative because higher indexes are at lower addresses. 711 // Offset is negative because higher indexes are at lower addresses.
712 int offset = -var->index() * kPointerSize; 712 int offset = -var->index() * kPointerSize;
713 // Adjust by a (parameter or local) base offset. 713 // Adjust by a (parameter or local) base offset.
714 if (var->IsParameter()) { 714 if (var->IsParameter()) {
715 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; 715 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
716 } else { 716 } else {
717 offset += JavaScriptFrameConstants::kLocal0Offset; 717 offset += JavaScriptFrameConstants::kLocal0Offset;
718 } 718 }
719 return MemOperand(fp, offset); 719 return MemOperand(fp, offset);
720 } 720 }
721 721
722 722
723 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { 723 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
724 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); 724 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
725 if (var->IsContextSlot()) { 725 if (var->IsContextSlot()) {
726 int context_chain_length = scope()->ContextChainLength(var->scope()); 726 int context_chain_length = scope()->ContextChainLength(var->scope());
727 __ LoadContext(scratch, context_chain_length); 727 __ LoadContext(scratch, context_chain_length);
728 return ContextOperand(scratch, var->index()); 728 return ContextOperand(scratch, var->index());
729 } else { 729 } else {
730 return StackOperand(var); 730 return StackOperand(var);
731 } 731 }
732 } 732 }
733 733
734 734
735 void FullCodeGenerator::GetVar(Register dest, Variable* var) { 735 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
736 // Use destination as scratch. 736 // Use destination as scratch.
737 MemOperand location = VarOperand(var, dest); 737 MemOperand location = VarOperand(var, dest);
738 __ ldr(dest, location); 738 __ ldr(dest, location);
739 } 739 }
740 740
741 741
742 void FullCodeGenerator::SetVar(Variable* var, 742 void FullCodeGenerator::SetVar(Variable* var,
743 Register src, 743 Register src,
744 Register scratch0, 744 Register scratch0,
745 Register scratch1) { 745 Register scratch1) {
746 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); 746 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
747 ASSERT(!scratch0.is(src)); 747 DCHECK(!scratch0.is(src));
748 ASSERT(!scratch0.is(scratch1)); 748 DCHECK(!scratch0.is(scratch1));
749 ASSERT(!scratch1.is(src)); 749 DCHECK(!scratch1.is(src));
750 MemOperand location = VarOperand(var, scratch0); 750 MemOperand location = VarOperand(var, scratch0);
751 __ str(src, location); 751 __ str(src, location);
752 752
753 // Emit the write barrier code if the location is in the heap. 753 // Emit the write barrier code if the location is in the heap.
754 if (var->IsContextSlot()) { 754 if (var->IsContextSlot()) {
755 __ RecordWriteContextSlot(scratch0, 755 __ RecordWriteContextSlot(scratch0,
756 location.offset(), 756 location.offset(),
757 src, 757 src,
758 scratch1, 758 scratch1,
759 kLRHasBeenSaved, 759 kLRHasBeenSaved,
(...skipping 19 matching lines...) Expand all
779 __ cmp(r0, ip); 779 __ cmp(r0, ip);
780 Split(eq, if_true, if_false, NULL); 780 Split(eq, if_true, if_false, NULL);
781 __ bind(&skip); 781 __ bind(&skip);
782 } 782 }
783 } 783 }
784 784
785 785
786 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 786 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
787 // The variable in the declaration always resides in the current function 787 // The variable in the declaration always resides in the current function
788 // context. 788 // context.
789 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); 789 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
790 if (generate_debug_code_) { 790 if (generate_debug_code_) {
791 // Check that we're not inside a with or catch context. 791 // Check that we're not inside a with or catch context.
792 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset)); 792 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
793 __ CompareRoot(r1, Heap::kWithContextMapRootIndex); 793 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
794 __ Check(ne, kDeclarationInWithContext); 794 __ Check(ne, kDeclarationInWithContext);
795 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); 795 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
796 __ Check(ne, kDeclarationInCatchContext); 796 __ Check(ne, kDeclarationInCatchContext);
797 } 797 }
798 } 798 }
799 799
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
833 __ str(ip, ContextOperand(cp, variable->index())); 833 __ str(ip, ContextOperand(cp, variable->index()));
834 // No write barrier since the_hole_value is in old space. 834 // No write barrier since the_hole_value is in old space.
835 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 835 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
836 } 836 }
837 break; 837 break;
838 838
839 case Variable::LOOKUP: { 839 case Variable::LOOKUP: {
840 Comment cmnt(masm_, "[ VariableDeclaration"); 840 Comment cmnt(masm_, "[ VariableDeclaration");
841 __ mov(r2, Operand(variable->name())); 841 __ mov(r2, Operand(variable->name()));
842 // Declaration nodes are always introduced in one of four modes. 842 // Declaration nodes are always introduced in one of four modes.
843 ASSERT(IsDeclaredVariableMode(mode)); 843 DCHECK(IsDeclaredVariableMode(mode));
844 PropertyAttributes attr = 844 PropertyAttributes attr =
845 IsImmutableVariableMode(mode) ? READ_ONLY : NONE; 845 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
846 __ mov(r1, Operand(Smi::FromInt(attr))); 846 __ mov(r1, Operand(Smi::FromInt(attr)));
847 // Push initial value, if any. 847 // Push initial value, if any.
848 // Note: For variables we must not push an initial value (such as 848 // Note: For variables we must not push an initial value (such as
849 // 'undefined') because we may have a (legal) redeclaration and we 849 // 'undefined') because we may have a (legal) redeclaration and we
850 // must not destroy the current value. 850 // must not destroy the current value.
851 if (hole_init) { 851 if (hole_init) {
852 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); 852 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
853 __ Push(cp, r2, r1, r0); 853 __ Push(cp, r2, r1, r0);
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
913 VisitForStackValue(declaration->fun()); 913 VisitForStackValue(declaration->fun());
914 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); 914 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
915 break; 915 break;
916 } 916 }
917 } 917 }
918 } 918 }
919 919
920 920
921 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { 921 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
922 Variable* variable = declaration->proxy()->var(); 922 Variable* variable = declaration->proxy()->var();
923 ASSERT(variable->location() == Variable::CONTEXT); 923 DCHECK(variable->location() == Variable::CONTEXT);
924 ASSERT(variable->interface()->IsFrozen()); 924 DCHECK(variable->interface()->IsFrozen());
925 925
926 Comment cmnt(masm_, "[ ModuleDeclaration"); 926 Comment cmnt(masm_, "[ ModuleDeclaration");
927 EmitDebugCheckDeclarationContext(variable); 927 EmitDebugCheckDeclarationContext(variable);
928 928
929 // Load instance object. 929 // Load instance object.
930 __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope())); 930 __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope()));
931 __ ldr(r1, ContextOperand(r1, variable->interface()->Index())); 931 __ ldr(r1, ContextOperand(r1, variable->interface()->Index()));
932 __ ldr(r1, ContextOperand(r1, Context::EXTENSION_INDEX)); 932 __ ldr(r1, ContextOperand(r1, Context::EXTENSION_INDEX));
933 933
934 // Assign it. 934 // Assign it.
(...skipping 468 matching lines...) Expand 10 before | Expand all | Expand 10 after
1403 1403
1404 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) 1404 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1405 ? NOT_CONTEXTUAL 1405 ? NOT_CONTEXTUAL
1406 : CONTEXTUAL; 1406 : CONTEXTUAL;
1407 CallLoadIC(mode); 1407 CallLoadIC(mode);
1408 } 1408 }
1409 1409
1410 1410
1411 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1411 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1412 Label* slow) { 1412 Label* slow) {
1413 ASSERT(var->IsContextSlot()); 1413 DCHECK(var->IsContextSlot());
1414 Register context = cp; 1414 Register context = cp;
1415 Register next = r3; 1415 Register next = r3;
1416 Register temp = r4; 1416 Register temp = r4;
1417 1417
1418 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1418 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1419 if (s->num_heap_slots() > 0) { 1419 if (s->num_heap_slots() > 0) {
1420 if (s->calls_sloppy_eval()) { 1420 if (s->calls_sloppy_eval()) {
1421 // Check that extension is NULL. 1421 // Check that extension is NULL.
1422 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1422 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1423 __ tst(temp, temp); 1423 __ tst(temp, temp);
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
1498 case Variable::LOCAL: 1498 case Variable::LOCAL:
1499 case Variable::CONTEXT: { 1499 case Variable::CONTEXT: {
1500 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" 1500 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1501 : "[ Stack variable"); 1501 : "[ Stack variable");
1502 if (var->binding_needs_init()) { 1502 if (var->binding_needs_init()) {
1503 // var->scope() may be NULL when the proxy is located in eval code and 1503 // var->scope() may be NULL when the proxy is located in eval code and
1504 // refers to a potential outside binding. Currently those bindings are 1504 // refers to a potential outside binding. Currently those bindings are
1505 // always looked up dynamically, i.e. in that case 1505 // always looked up dynamically, i.e. in that case
1506 // var->location() == LOOKUP. 1506 // var->location() == LOOKUP.
1507 // always holds. 1507 // always holds.
1508 ASSERT(var->scope() != NULL); 1508 DCHECK(var->scope() != NULL);
1509 1509
1510 // Check if the binding really needs an initialization check. The check 1510 // Check if the binding really needs an initialization check. The check
1511 // can be skipped in the following situation: we have a LET or CONST 1511 // can be skipped in the following situation: we have a LET or CONST
1512 // binding in harmony mode, both the Variable and the VariableProxy have 1512 // binding in harmony mode, both the Variable and the VariableProxy have
1513 // the same declaration scope (i.e. they are both in global code, in the 1513 // the same declaration scope (i.e. they are both in global code, in the
1514 // same function or in the same eval code) and the VariableProxy is in 1514 // same function or in the same eval code) and the VariableProxy is in
1515 // the source physically located after the initializer of the variable. 1515 // the source physically located after the initializer of the variable.
1516 // 1516 //
1517 // We cannot skip any initialization checks for CONST in non-harmony 1517 // We cannot skip any initialization checks for CONST in non-harmony
1518 // mode because const variables may be declared but never initialized: 1518 // mode because const variables may be declared but never initialized:
1519 // if (false) { const x; }; var y = x; 1519 // if (false) { const x; }; var y = x;
1520 // 1520 //
1521 // The condition on the declaration scopes is a conservative check for 1521 // The condition on the declaration scopes is a conservative check for
1522 // nested functions that access a binding and are called before the 1522 // nested functions that access a binding and are called before the
1523 // binding is initialized: 1523 // binding is initialized:
1524 // function() { f(); let x = 1; function f() { x = 2; } } 1524 // function() { f(); let x = 1; function f() { x = 2; } }
1525 // 1525 //
1526 bool skip_init_check; 1526 bool skip_init_check;
1527 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) { 1527 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1528 skip_init_check = false; 1528 skip_init_check = false;
1529 } else { 1529 } else {
1530 // Check that we always have valid source position. 1530 // Check that we always have valid source position.
1531 ASSERT(var->initializer_position() != RelocInfo::kNoPosition); 1531 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1532 ASSERT(proxy->position() != RelocInfo::kNoPosition); 1532 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1533 skip_init_check = var->mode() != CONST_LEGACY && 1533 skip_init_check = var->mode() != CONST_LEGACY &&
1534 var->initializer_position() < proxy->position(); 1534 var->initializer_position() < proxy->position();
1535 } 1535 }
1536 1536
1537 if (!skip_init_check) { 1537 if (!skip_init_check) {
1538 // Let and const need a read barrier. 1538 // Let and const need a read barrier.
1539 GetVar(r0, var); 1539 GetVar(r0, var);
1540 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); 1540 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1541 if (var->mode() == LET || var->mode() == CONST) { 1541 if (var->mode() == LET || var->mode() == CONST) {
1542 // Throw a reference error when using an uninitialized let/const 1542 // Throw a reference error when using an uninitialized let/const
1543 // binding in harmony mode. 1543 // binding in harmony mode.
1544 Label done; 1544 Label done;
1545 __ b(ne, &done); 1545 __ b(ne, &done);
1546 __ mov(r0, Operand(var->name())); 1546 __ mov(r0, Operand(var->name()));
1547 __ push(r0); 1547 __ push(r0);
1548 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1548 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1549 __ bind(&done); 1549 __ bind(&done);
1550 } else { 1550 } else {
1551 // Uninitalized const bindings outside of harmony mode are unholed. 1551 // Uninitalized const bindings outside of harmony mode are unholed.
1552 ASSERT(var->mode() == CONST_LEGACY); 1552 DCHECK(var->mode() == CONST_LEGACY);
1553 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); 1553 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1554 } 1554 }
1555 context()->Plug(r0); 1555 context()->Plug(r0);
1556 break; 1556 break;
1557 } 1557 }
1558 } 1558 }
1559 context()->Plug(var); 1559 context()->Plug(var);
1560 break; 1560 break;
1561 } 1561 }
1562 1562
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
1681 Literal* key = property->key(); 1681 Literal* key = property->key();
1682 Expression* value = property->value(); 1682 Expression* value = property->value();
1683 if (!result_saved) { 1683 if (!result_saved) {
1684 __ push(r0); // Save result on stack 1684 __ push(r0); // Save result on stack
1685 result_saved = true; 1685 result_saved = true;
1686 } 1686 }
1687 switch (property->kind()) { 1687 switch (property->kind()) {
1688 case ObjectLiteral::Property::CONSTANT: 1688 case ObjectLiteral::Property::CONSTANT:
1689 UNREACHABLE(); 1689 UNREACHABLE();
1690 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1690 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1691 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value())); 1691 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1692 // Fall through. 1692 // Fall through.
1693 case ObjectLiteral::Property::COMPUTED: 1693 case ObjectLiteral::Property::COMPUTED:
1694 if (key->value()->IsInternalizedString()) { 1694 if (key->value()->IsInternalizedString()) {
1695 if (property->emit_store()) { 1695 if (property->emit_store()) {
1696 VisitForAccumulatorValue(value); 1696 VisitForAccumulatorValue(value);
1697 ASSERT(StoreIC::ValueRegister().is(r0)); 1697 DCHECK(StoreIC::ValueRegister().is(r0));
1698 __ mov(StoreIC::NameRegister(), Operand(key->value())); 1698 __ mov(StoreIC::NameRegister(), Operand(key->value()));
1699 __ ldr(StoreIC::ReceiverRegister(), MemOperand(sp)); 1699 __ ldr(StoreIC::ReceiverRegister(), MemOperand(sp));
1700 CallStoreIC(key->LiteralFeedbackId()); 1700 CallStoreIC(key->LiteralFeedbackId());
1701 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1701 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1702 } else { 1702 } else {
1703 VisitForEffect(value); 1703 VisitForEffect(value);
1704 } 1704 }
1705 break; 1705 break;
1706 } 1706 }
1707 // Duplicate receiver on stack. 1707 // Duplicate receiver on stack.
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
1747 __ push(r0); 1747 __ push(r0);
1748 VisitForStackValue(it->first); 1748 VisitForStackValue(it->first);
1749 EmitAccessor(it->second->getter); 1749 EmitAccessor(it->second->getter);
1750 EmitAccessor(it->second->setter); 1750 EmitAccessor(it->second->setter);
1751 __ mov(r0, Operand(Smi::FromInt(NONE))); 1751 __ mov(r0, Operand(Smi::FromInt(NONE)));
1752 __ push(r0); 1752 __ push(r0);
1753 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5); 1753 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1754 } 1754 }
1755 1755
1756 if (expr->has_function()) { 1756 if (expr->has_function()) {
1757 ASSERT(result_saved); 1757 DCHECK(result_saved);
1758 __ ldr(r0, MemOperand(sp)); 1758 __ ldr(r0, MemOperand(sp));
1759 __ push(r0); 1759 __ push(r0);
1760 __ CallRuntime(Runtime::kToFastProperties, 1); 1760 __ CallRuntime(Runtime::kToFastProperties, 1);
1761 } 1761 }
1762 1762
1763 if (result_saved) { 1763 if (result_saved) {
1764 context()->PlugTOS(); 1764 context()->PlugTOS();
1765 } else { 1765 } else {
1766 context()->Plug(r0); 1766 context()->Plug(r0);
1767 } 1767 }
1768 } 1768 }
1769 1769
1770 1770
1771 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1771 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1772 Comment cmnt(masm_, "[ ArrayLiteral"); 1772 Comment cmnt(masm_, "[ ArrayLiteral");
1773 1773
1774 expr->BuildConstantElements(isolate()); 1774 expr->BuildConstantElements(isolate());
1775 int flags = expr->depth() == 1 1775 int flags = expr->depth() == 1
1776 ? ArrayLiteral::kShallowElements 1776 ? ArrayLiteral::kShallowElements
1777 : ArrayLiteral::kNoFlags; 1777 : ArrayLiteral::kNoFlags;
1778 1778
1779 ZoneList<Expression*>* subexprs = expr->values(); 1779 ZoneList<Expression*>* subexprs = expr->values();
1780 int length = subexprs->length(); 1780 int length = subexprs->length();
1781 Handle<FixedArray> constant_elements = expr->constant_elements(); 1781 Handle<FixedArray> constant_elements = expr->constant_elements();
1782 ASSERT_EQ(2, constant_elements->length()); 1782 DCHECK_EQ(2, constant_elements->length());
1783 ElementsKind constant_elements_kind = 1783 ElementsKind constant_elements_kind =
1784 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); 1784 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1785 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind); 1785 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1786 Handle<FixedArrayBase> constant_elements_values( 1786 Handle<FixedArrayBase> constant_elements_values(
1787 FixedArrayBase::cast(constant_elements->get(1))); 1787 FixedArrayBase::cast(constant_elements->get(1)));
1788 1788
1789 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; 1789 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1790 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { 1790 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1791 // If the only customer of allocation sites is transitioning, then 1791 // If the only customer of allocation sites is transitioning, then
1792 // we can turn it off if we don't have anywhere else to transition to. 1792 // we can turn it off if we don't have anywhere else to transition to.
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
1844 if (result_saved) { 1844 if (result_saved) {
1845 __ pop(); // literal index 1845 __ pop(); // literal index
1846 context()->PlugTOS(); 1846 context()->PlugTOS();
1847 } else { 1847 } else {
1848 context()->Plug(r0); 1848 context()->Plug(r0);
1849 } 1849 }
1850 } 1850 }
1851 1851
1852 1852
1853 void FullCodeGenerator::VisitAssignment(Assignment* expr) { 1853 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1854 ASSERT(expr->target()->IsValidReferenceExpression()); 1854 DCHECK(expr->target()->IsValidReferenceExpression());
1855 1855
1856 Comment cmnt(masm_, "[ Assignment"); 1856 Comment cmnt(masm_, "[ Assignment");
1857 1857
1858 // Left-hand side can only be a property, a global or a (parameter or local) 1858 // Left-hand side can only be a property, a global or a (parameter or local)
1859 // slot. 1859 // slot.
1860 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 1860 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1861 LhsKind assign_type = VARIABLE; 1861 LhsKind assign_type = VARIABLE;
1862 Property* property = expr->target()->AsProperty(); 1862 Property* property = expr->target()->AsProperty();
1863 if (property != NULL) { 1863 if (property != NULL) {
1864 assign_type = (property->key()->IsPropertyName()) 1864 assign_type = (property->key()->IsPropertyName())
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
1974 case Yield::INITIAL: { 1974 case Yield::INITIAL: {
1975 Label suspend, continuation, post_runtime, resume; 1975 Label suspend, continuation, post_runtime, resume;
1976 1976
1977 __ jmp(&suspend); 1977 __ jmp(&suspend);
1978 1978
1979 __ bind(&continuation); 1979 __ bind(&continuation);
1980 __ jmp(&resume); 1980 __ jmp(&resume);
1981 1981
1982 __ bind(&suspend); 1982 __ bind(&suspend);
1983 VisitForAccumulatorValue(expr->generator_object()); 1983 VisitForAccumulatorValue(expr->generator_object());
1984 ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); 1984 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1985 __ mov(r1, Operand(Smi::FromInt(continuation.pos()))); 1985 __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
1986 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset)); 1986 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
1987 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset)); 1987 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
1988 __ mov(r1, cp); 1988 __ mov(r1, cp);
1989 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2, 1989 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
1990 kLRHasBeenSaved, kDontSaveFPRegs); 1990 kLRHasBeenSaved, kDontSaveFPRegs);
1991 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset)); 1991 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1992 __ cmp(sp, r1); 1992 __ cmp(sp, r1);
1993 __ b(eq, &post_runtime); 1993 __ b(eq, &post_runtime);
1994 __ push(r0); // generator object 1994 __ push(r0); // generator object
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
2047 __ PushTryHandler(StackHandler::CATCH, expr->index()); 2047 __ PushTryHandler(StackHandler::CATCH, expr->index());
2048 const int handler_size = StackHandlerConstants::kSize; 2048 const int handler_size = StackHandlerConstants::kSize;
2049 __ push(r0); // result 2049 __ push(r0); // result
2050 __ jmp(&l_suspend); 2050 __ jmp(&l_suspend);
2051 __ bind(&l_continuation); 2051 __ bind(&l_continuation);
2052 __ jmp(&l_resume); 2052 __ jmp(&l_resume);
2053 __ bind(&l_suspend); 2053 __ bind(&l_suspend);
2054 const int generator_object_depth = kPointerSize + handler_size; 2054 const int generator_object_depth = kPointerSize + handler_size;
2055 __ ldr(r0, MemOperand(sp, generator_object_depth)); 2055 __ ldr(r0, MemOperand(sp, generator_object_depth));
2056 __ push(r0); // g 2056 __ push(r0); // g
2057 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); 2057 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2058 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos()))); 2058 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
2059 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset)); 2059 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2060 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset)); 2060 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2061 __ mov(r1, cp); 2061 __ mov(r1, cp);
2062 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2, 2062 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2063 kLRHasBeenSaved, kDontSaveFPRegs); 2063 kLRHasBeenSaved, kDontSaveFPRegs);
2064 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 2064 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2065 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2065 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2066 __ pop(r0); // result 2066 __ pop(r0); // result
2067 EmitReturnSequence(); 2067 EmitReturnSequence();
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after
2212 2212
2213 // Otherwise, we push holes for the operand stack and call the runtime to fix 2213 // Otherwise, we push holes for the operand stack and call the runtime to fix
2214 // up the stack and the handlers. 2214 // up the stack and the handlers.
2215 Label push_operand_holes, call_resume; 2215 Label push_operand_holes, call_resume;
2216 __ bind(&push_operand_holes); 2216 __ bind(&push_operand_holes);
2217 __ sub(r3, r3, Operand(1), SetCC); 2217 __ sub(r3, r3, Operand(1), SetCC);
2218 __ b(mi, &call_resume); 2218 __ b(mi, &call_resume);
2219 __ push(r2); 2219 __ push(r2);
2220 __ b(&push_operand_holes); 2220 __ b(&push_operand_holes);
2221 __ bind(&call_resume); 2221 __ bind(&call_resume);
2222 ASSERT(!result_register().is(r1)); 2222 DCHECK(!result_register().is(r1));
2223 __ Push(r1, result_register()); 2223 __ Push(r1, result_register());
2224 __ Push(Smi::FromInt(resume_mode)); 2224 __ Push(Smi::FromInt(resume_mode));
2225 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3); 2225 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2226 // Not reached: the runtime call returns elsewhere. 2226 // Not reached: the runtime call returns elsewhere.
2227 __ stop("not-reached"); 2227 __ stop("not-reached");
2228 2228
2229 // Reach here when generator is closed. 2229 // Reach here when generator is closed.
2230 __ bind(&closed_state); 2230 __ bind(&closed_state);
2231 if (resume_mode == JSGeneratorObject::NEXT) { 2231 if (resume_mode == JSGeneratorObject::NEXT) {
2232 // Return completed iterator result when generator is closed. 2232 // Return completed iterator result when generator is closed.
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
2264 __ Push(Smi::FromInt(map->instance_size())); 2264 __ Push(Smi::FromInt(map->instance_size()));
2265 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 2265 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2266 __ ldr(context_register(), 2266 __ ldr(context_register(),
2267 MemOperand(fp, StandardFrameConstants::kContextOffset)); 2267 MemOperand(fp, StandardFrameConstants::kContextOffset));
2268 2268
2269 __ bind(&allocated); 2269 __ bind(&allocated);
2270 __ mov(r1, Operand(map)); 2270 __ mov(r1, Operand(map));
2271 __ pop(r2); 2271 __ pop(r2);
2272 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done))); 2272 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
2273 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array())); 2273 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
2274 ASSERT_EQ(map->instance_size(), 5 * kPointerSize); 2274 DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
2275 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 2275 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2276 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset)); 2276 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2277 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset)); 2277 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2278 __ str(r2, 2278 __ str(r2,
2279 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset)); 2279 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset));
2280 __ str(r3, 2280 __ str(r3,
2281 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset)); 2281 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset));
2282 2282
2283 // Only the value field needs a write barrier, as the other values are in the 2283 // Only the value field needs a write barrier, as the other values are in the
2284 // root set. 2284 // root set.
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
2414 __ pop(r1); 2414 __ pop(r1);
2415 BinaryOpICStub stub(isolate(), op, mode); 2415 BinaryOpICStub stub(isolate(), op, mode);
2416 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 2416 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2417 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId()); 2417 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
2418 patch_site.EmitPatchInfo(); 2418 patch_site.EmitPatchInfo();
2419 context()->Plug(r0); 2419 context()->Plug(r0);
2420 } 2420 }
2421 2421
2422 2422
2423 void FullCodeGenerator::EmitAssignment(Expression* expr) { 2423 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2424 ASSERT(expr->IsValidReferenceExpression()); 2424 DCHECK(expr->IsValidReferenceExpression());
2425 2425
2426 // Left-hand side can only be a property, a global or a (parameter or local) 2426 // Left-hand side can only be a property, a global or a (parameter or local)
2427 // slot. 2427 // slot.
2428 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 2428 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2429 LhsKind assign_type = VARIABLE; 2429 LhsKind assign_type = VARIABLE;
2430 Property* prop = expr->AsProperty(); 2430 Property* prop = expr->AsProperty();
2431 if (prop != NULL) { 2431 if (prop != NULL) {
2432 assign_type = (prop->key()->IsPropertyName()) 2432 assign_type = (prop->key()->IsPropertyName())
2433 ? NAMED_PROPERTY 2433 ? NAMED_PROPERTY
2434 : KEYED_PROPERTY; 2434 : KEYED_PROPERTY;
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
2483 2483
2484 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) { 2484 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2485 if (var->IsUnallocated()) { 2485 if (var->IsUnallocated()) {
2486 // Global var, const, or let. 2486 // Global var, const, or let.
2487 __ mov(StoreIC::NameRegister(), Operand(var->name())); 2487 __ mov(StoreIC::NameRegister(), Operand(var->name()));
2488 __ ldr(StoreIC::ReceiverRegister(), GlobalObjectOperand()); 2488 __ ldr(StoreIC::ReceiverRegister(), GlobalObjectOperand());
2489 CallStoreIC(); 2489 CallStoreIC();
2490 2490
2491 } else if (op == Token::INIT_CONST_LEGACY) { 2491 } else if (op == Token::INIT_CONST_LEGACY) {
2492 // Const initializers need a write barrier. 2492 // Const initializers need a write barrier.
2493 ASSERT(!var->IsParameter()); // No const parameters. 2493 DCHECK(!var->IsParameter()); // No const parameters.
2494 if (var->IsLookupSlot()) { 2494 if (var->IsLookupSlot()) {
2495 __ push(r0); 2495 __ push(r0);
2496 __ mov(r0, Operand(var->name())); 2496 __ mov(r0, Operand(var->name()));
2497 __ Push(cp, r0); // Context and name. 2497 __ Push(cp, r0); // Context and name.
2498 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3); 2498 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2499 } else { 2499 } else {
2500 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 2500 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2501 Label skip; 2501 Label skip;
2502 MemOperand location = VarOperand(var, r1); 2502 MemOperand location = VarOperand(var, r1);
2503 __ ldr(r2, location); 2503 __ ldr(r2, location);
2504 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); 2504 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2505 __ b(ne, &skip); 2505 __ b(ne, &skip);
2506 EmitStoreToStackLocalOrContextSlot(var, location); 2506 EmitStoreToStackLocalOrContextSlot(var, location);
2507 __ bind(&skip); 2507 __ bind(&skip);
2508 } 2508 }
2509 2509
2510 } else if (var->mode() == LET && op != Token::INIT_LET) { 2510 } else if (var->mode() == LET && op != Token::INIT_LET) {
2511 // Non-initializing assignment to let variable needs a write barrier. 2511 // Non-initializing assignment to let variable needs a write barrier.
2512 ASSERT(!var->IsLookupSlot()); 2512 DCHECK(!var->IsLookupSlot());
2513 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 2513 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2514 Label assign; 2514 Label assign;
2515 MemOperand location = VarOperand(var, r1); 2515 MemOperand location = VarOperand(var, r1);
2516 __ ldr(r3, location); 2516 __ ldr(r3, location);
2517 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); 2517 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2518 __ b(ne, &assign); 2518 __ b(ne, &assign);
2519 __ mov(r3, Operand(var->name())); 2519 __ mov(r3, Operand(var->name()));
2520 __ push(r3); 2520 __ push(r3);
2521 __ CallRuntime(Runtime::kThrowReferenceError, 1); 2521 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2522 // Perform the assignment. 2522 // Perform the assignment.
2523 __ bind(&assign); 2523 __ bind(&assign);
2524 EmitStoreToStackLocalOrContextSlot(var, location); 2524 EmitStoreToStackLocalOrContextSlot(var, location);
2525 2525
2526 } else if (!var->is_const_mode() || op == Token::INIT_CONST) { 2526 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2527 if (var->IsLookupSlot()) { 2527 if (var->IsLookupSlot()) {
2528 // Assignment to var. 2528 // Assignment to var.
2529 __ push(r0); // Value. 2529 __ push(r0); // Value.
2530 __ mov(r1, Operand(var->name())); 2530 __ mov(r1, Operand(var->name()));
2531 __ mov(r0, Operand(Smi::FromInt(strict_mode()))); 2531 __ mov(r0, Operand(Smi::FromInt(strict_mode())));
2532 __ Push(cp, r1, r0); // Context, name, strict mode. 2532 __ Push(cp, r1, r0); // Context, name, strict mode.
2533 __ CallRuntime(Runtime::kStoreLookupSlot, 4); 2533 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2534 } else { 2534 } else {
2535 // Assignment to var or initializing assignment to let/const in harmony 2535 // Assignment to var or initializing assignment to let/const in harmony
2536 // mode. 2536 // mode.
2537 ASSERT((var->IsStackAllocated() || var->IsContextSlot())); 2537 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2538 MemOperand location = VarOperand(var, r1); 2538 MemOperand location = VarOperand(var, r1);
2539 if (generate_debug_code_ && op == Token::INIT_LET) { 2539 if (generate_debug_code_ && op == Token::INIT_LET) {
2540 // Check for an uninitialized let binding. 2540 // Check for an uninitialized let binding.
2541 __ ldr(r2, location); 2541 __ ldr(r2, location);
2542 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); 2542 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2543 __ Check(eq, kLetBindingReInitialization); 2543 __ Check(eq, kLetBindingReInitialization);
2544 } 2544 }
2545 EmitStoreToStackLocalOrContextSlot(var, location); 2545 EmitStoreToStackLocalOrContextSlot(var, location);
2546 } 2546 }
2547 } 2547 }
2548 // Non-initializing assignments to consts are ignored. 2548 // Non-initializing assignments to consts are ignored.
2549 } 2549 }
2550 2550
2551 2551
2552 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2552 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2553 // Assignment to a property, using a named store IC. 2553 // Assignment to a property, using a named store IC.
2554 Property* prop = expr->target()->AsProperty(); 2554 Property* prop = expr->target()->AsProperty();
2555 ASSERT(prop != NULL); 2555 DCHECK(prop != NULL);
2556 ASSERT(prop->key()->IsLiteral()); 2556 DCHECK(prop->key()->IsLiteral());
2557 2557
2558 // Record source code position before IC call. 2558 // Record source code position before IC call.
2559 SetSourcePosition(expr->position()); 2559 SetSourcePosition(expr->position());
2560 __ mov(StoreIC::NameRegister(), Operand(prop->key()->AsLiteral()->value())); 2560 __ mov(StoreIC::NameRegister(), Operand(prop->key()->AsLiteral()->value()));
2561 __ pop(StoreIC::ReceiverRegister()); 2561 __ pop(StoreIC::ReceiverRegister());
2562 CallStoreIC(expr->AssignmentFeedbackId()); 2562 CallStoreIC(expr->AssignmentFeedbackId());
2563 2563
2564 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2564 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2565 context()->Plug(r0); 2565 context()->Plug(r0);
2566 } 2566 }
2567 2567
2568 2568
2569 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2569 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2570 // Assignment to a property, using a keyed store IC. 2570 // Assignment to a property, using a keyed store IC.
2571 2571
2572 // Record source code position before IC call. 2572 // Record source code position before IC call.
2573 SetSourcePosition(expr->position()); 2573 SetSourcePosition(expr->position());
2574 __ Pop(KeyedStoreIC::ReceiverRegister(), KeyedStoreIC::NameRegister()); 2574 __ Pop(KeyedStoreIC::ReceiverRegister(), KeyedStoreIC::NameRegister());
2575 ASSERT(KeyedStoreIC::ValueRegister().is(r0)); 2575 DCHECK(KeyedStoreIC::ValueRegister().is(r0));
2576 2576
2577 Handle<Code> ic = strict_mode() == SLOPPY 2577 Handle<Code> ic = strict_mode() == SLOPPY
2578 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2578 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2579 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2579 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2580 CallIC(ic, expr->AssignmentFeedbackId()); 2580 CallIC(ic, expr->AssignmentFeedbackId());
2581 2581
2582 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2582 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2583 context()->Plug(r0); 2583 context()->Plug(r0);
2584 } 2584 }
2585 2585
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
2627 if (call_type == CallIC::FUNCTION) { 2627 if (call_type == CallIC::FUNCTION) {
2628 { StackValueContext context(this); 2628 { StackValueContext context(this);
2629 EmitVariableLoad(callee->AsVariableProxy()); 2629 EmitVariableLoad(callee->AsVariableProxy());
2630 PrepareForBailout(callee, NO_REGISTERS); 2630 PrepareForBailout(callee, NO_REGISTERS);
2631 } 2631 }
2632 // Push undefined as receiver. This is patched in the method prologue if it 2632 // Push undefined as receiver. This is patched in the method prologue if it
2633 // is a sloppy mode method. 2633 // is a sloppy mode method.
2634 __ Push(isolate()->factory()->undefined_value()); 2634 __ Push(isolate()->factory()->undefined_value());
2635 } else { 2635 } else {
2636 // Load the function from the receiver. 2636 // Load the function from the receiver.
2637 ASSERT(callee->IsProperty()); 2637 DCHECK(callee->IsProperty());
2638 __ ldr(LoadIC::ReceiverRegister(), MemOperand(sp, 0)); 2638 __ ldr(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
2639 EmitNamedPropertyLoad(callee->AsProperty()); 2639 EmitNamedPropertyLoad(callee->AsProperty());
2640 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2640 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2641 // Push the target function under the receiver. 2641 // Push the target function under the receiver.
2642 __ ldr(ip, MemOperand(sp, 0)); 2642 __ ldr(ip, MemOperand(sp, 0));
2643 __ push(ip); 2643 __ push(ip);
2644 __ str(r0, MemOperand(sp, kPointerSize)); 2644 __ str(r0, MemOperand(sp, kPointerSize));
2645 } 2645 }
2646 2646
2647 EmitCall(expr, call_type); 2647 EmitCall(expr, call_type);
2648 } 2648 }
2649 2649
2650 2650
2651 // Code common for calls using the IC. 2651 // Code common for calls using the IC.
2652 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, 2652 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2653 Expression* key) { 2653 Expression* key) {
2654 // Load the key. 2654 // Load the key.
2655 VisitForAccumulatorValue(key); 2655 VisitForAccumulatorValue(key);
2656 2656
2657 Expression* callee = expr->expression(); 2657 Expression* callee = expr->expression();
2658 2658
2659 // Load the function from the receiver. 2659 // Load the function from the receiver.
2660 ASSERT(callee->IsProperty()); 2660 DCHECK(callee->IsProperty());
2661 __ ldr(LoadIC::ReceiverRegister(), MemOperand(sp, 0)); 2661 __ ldr(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
2662 __ Move(LoadIC::NameRegister(), r0); 2662 __ Move(LoadIC::NameRegister(), r0);
2663 EmitKeyedPropertyLoad(callee->AsProperty()); 2663 EmitKeyedPropertyLoad(callee->AsProperty());
2664 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2664 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2665 2665
2666 // Push the target function under the receiver. 2666 // Push the target function under the receiver.
2667 __ ldr(ip, MemOperand(sp, 0)); 2667 __ ldr(ip, MemOperand(sp, 0));
2668 __ push(ip); 2668 __ push(ip);
2669 __ str(r0, MemOperand(sp, kPointerSize)); 2669 __ str(r0, MemOperand(sp, kPointerSize));
2670 2670
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after
2783 2783
2784 { PreservePositionScope scope(masm()->positions_recorder()); 2784 { PreservePositionScope scope(masm()->positions_recorder());
2785 // Generate code for loading from variables potentially shadowed 2785 // Generate code for loading from variables potentially shadowed
2786 // by eval-introduced variables. 2786 // by eval-introduced variables.
2787 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done); 2787 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2788 } 2788 }
2789 2789
2790 __ bind(&slow); 2790 __ bind(&slow);
2791 // Call the runtime to find the function to call (returned in r0) 2791 // Call the runtime to find the function to call (returned in r0)
2792 // and the object holding it (returned in edx). 2792 // and the object holding it (returned in edx).
2793 ASSERT(!context_register().is(r2)); 2793 DCHECK(!context_register().is(r2));
2794 __ mov(r2, Operand(proxy->name())); 2794 __ mov(r2, Operand(proxy->name()));
2795 __ Push(context_register(), r2); 2795 __ Push(context_register(), r2);
2796 __ CallRuntime(Runtime::kLoadLookupSlot, 2); 2796 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2797 __ Push(r0, r1); // Function, receiver. 2797 __ Push(r0, r1); // Function, receiver.
2798 2798
2799 // If fast case code has been generated, emit code to push the 2799 // If fast case code has been generated, emit code to push the
2800 // function and receiver and have the slow path jump around this 2800 // function and receiver and have the slow path jump around this
2801 // code. 2801 // code.
2802 if (done.is_linked()) { 2802 if (done.is_linked()) {
2803 Label call; 2803 Label call;
(...skipping 15 matching lines...) Expand all
2819 Property* property = callee->AsProperty(); 2819 Property* property = callee->AsProperty();
2820 { PreservePositionScope scope(masm()->positions_recorder()); 2820 { PreservePositionScope scope(masm()->positions_recorder());
2821 VisitForStackValue(property->obj()); 2821 VisitForStackValue(property->obj());
2822 } 2822 }
2823 if (property->key()->IsPropertyName()) { 2823 if (property->key()->IsPropertyName()) {
2824 EmitCallWithLoadIC(expr); 2824 EmitCallWithLoadIC(expr);
2825 } else { 2825 } else {
2826 EmitKeyedCallWithLoadIC(expr, property->key()); 2826 EmitKeyedCallWithLoadIC(expr, property->key());
2827 } 2827 }
2828 } else { 2828 } else {
2829 ASSERT(call_type == Call::OTHER_CALL); 2829 DCHECK(call_type == Call::OTHER_CALL);
2830 // Call to an arbitrary expression not handled specially above. 2830 // Call to an arbitrary expression not handled specially above.
2831 { PreservePositionScope scope(masm()->positions_recorder()); 2831 { PreservePositionScope scope(masm()->positions_recorder());
2832 VisitForStackValue(callee); 2832 VisitForStackValue(callee);
2833 } 2833 }
2834 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 2834 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2835 __ push(r1); 2835 __ push(r1);
2836 // Emit function call. 2836 // Emit function call.
2837 EmitCall(expr); 2837 EmitCall(expr);
2838 } 2838 }
2839 2839
2840 #ifdef DEBUG 2840 #ifdef DEBUG
2841 // RecordJSReturnSite should have been called. 2841 // RecordJSReturnSite should have been called.
2842 ASSERT(expr->return_is_recorded_); 2842 DCHECK(expr->return_is_recorded_);
2843 #endif 2843 #endif
2844 } 2844 }
2845 2845
2846 2846
2847 void FullCodeGenerator::VisitCallNew(CallNew* expr) { 2847 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2848 Comment cmnt(masm_, "[ CallNew"); 2848 Comment cmnt(masm_, "[ CallNew");
2849 // According to ECMA-262, section 11.2.2, page 44, the function 2849 // According to ECMA-262, section 11.2.2, page 44, the function
2850 // expression in new calls must be evaluated before the 2850 // expression in new calls must be evaluated before the
2851 // arguments. 2851 // arguments.
2852 2852
(...skipping 13 matching lines...) Expand all
2866 // constructor invocation. 2866 // constructor invocation.
2867 SetSourcePosition(expr->position()); 2867 SetSourcePosition(expr->position());
2868 2868
2869 // Load function and argument count into r1 and r0. 2869 // Load function and argument count into r1 and r0.
2870 __ mov(r0, Operand(arg_count)); 2870 __ mov(r0, Operand(arg_count));
2871 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); 2871 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2872 2872
2873 // Record call targets in unoptimized code. 2873 // Record call targets in unoptimized code.
2874 if (FLAG_pretenuring_call_new) { 2874 if (FLAG_pretenuring_call_new) {
2875 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); 2875 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2876 ASSERT(expr->AllocationSiteFeedbackSlot() == 2876 DCHECK(expr->AllocationSiteFeedbackSlot() ==
2877 expr->CallNewFeedbackSlot() + 1); 2877 expr->CallNewFeedbackSlot() + 1);
2878 } 2878 }
2879 2879
2880 __ Move(r2, FeedbackVector()); 2880 __ Move(r2, FeedbackVector());
2881 __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); 2881 __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
2882 2882
2883 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); 2883 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2884 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); 2884 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2885 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 2885 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2886 context()->Plug(r0); 2886 context()->Plug(r0);
2887 } 2887 }
2888 2888
2889 2889
2890 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 2890 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2891 ZoneList<Expression*>* args = expr->arguments(); 2891 ZoneList<Expression*>* args = expr->arguments();
2892 ASSERT(args->length() == 1); 2892 DCHECK(args->length() == 1);
2893 2893
2894 VisitForAccumulatorValue(args->at(0)); 2894 VisitForAccumulatorValue(args->at(0));
2895 2895
2896 Label materialize_true, materialize_false; 2896 Label materialize_true, materialize_false;
2897 Label* if_true = NULL; 2897 Label* if_true = NULL;
2898 Label* if_false = NULL; 2898 Label* if_false = NULL;
2899 Label* fall_through = NULL; 2899 Label* fall_through = NULL;
2900 context()->PrepareTest(&materialize_true, &materialize_false, 2900 context()->PrepareTest(&materialize_true, &materialize_false,
2901 &if_true, &if_false, &fall_through); 2901 &if_true, &if_false, &fall_through);
2902 2902
2903 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2903 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2904 __ SmiTst(r0); 2904 __ SmiTst(r0);
2905 Split(eq, if_true, if_false, fall_through); 2905 Split(eq, if_true, if_false, fall_through);
2906 2906
2907 context()->Plug(if_true, if_false); 2907 context()->Plug(if_true, if_false);
2908 } 2908 }
2909 2909
2910 2910
2911 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) { 2911 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2912 ZoneList<Expression*>* args = expr->arguments(); 2912 ZoneList<Expression*>* args = expr->arguments();
2913 ASSERT(args->length() == 1); 2913 DCHECK(args->length() == 1);
2914 2914
2915 VisitForAccumulatorValue(args->at(0)); 2915 VisitForAccumulatorValue(args->at(0));
2916 2916
2917 Label materialize_true, materialize_false; 2917 Label materialize_true, materialize_false;
2918 Label* if_true = NULL; 2918 Label* if_true = NULL;
2919 Label* if_false = NULL; 2919 Label* if_false = NULL;
2920 Label* fall_through = NULL; 2920 Label* fall_through = NULL;
2921 context()->PrepareTest(&materialize_true, &materialize_false, 2921 context()->PrepareTest(&materialize_true, &materialize_false,
2922 &if_true, &if_false, &fall_through); 2922 &if_true, &if_false, &fall_through);
2923 2923
2924 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2924 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2925 __ NonNegativeSmiTst(r0); 2925 __ NonNegativeSmiTst(r0);
2926 Split(eq, if_true, if_false, fall_through); 2926 Split(eq, if_true, if_false, fall_through);
2927 2927
2928 context()->Plug(if_true, if_false); 2928 context()->Plug(if_true, if_false);
2929 } 2929 }
2930 2930
2931 2931
2932 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) { 2932 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2933 ZoneList<Expression*>* args = expr->arguments(); 2933 ZoneList<Expression*>* args = expr->arguments();
2934 ASSERT(args->length() == 1); 2934 DCHECK(args->length() == 1);
2935 2935
2936 VisitForAccumulatorValue(args->at(0)); 2936 VisitForAccumulatorValue(args->at(0));
2937 2937
2938 Label materialize_true, materialize_false; 2938 Label materialize_true, materialize_false;
2939 Label* if_true = NULL; 2939 Label* if_true = NULL;
2940 Label* if_false = NULL; 2940 Label* if_false = NULL;
2941 Label* fall_through = NULL; 2941 Label* fall_through = NULL;
2942 context()->PrepareTest(&materialize_true, &materialize_false, 2942 context()->PrepareTest(&materialize_true, &materialize_false,
2943 &if_true, &if_false, &fall_through); 2943 &if_true, &if_false, &fall_through);
2944 2944
(...skipping 12 matching lines...) Expand all
2957 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); 2957 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2958 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2958 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2959 Split(le, if_true, if_false, fall_through); 2959 Split(le, if_true, if_false, fall_through);
2960 2960
2961 context()->Plug(if_true, if_false); 2961 context()->Plug(if_true, if_false);
2962 } 2962 }
2963 2963
2964 2964
2965 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) { 2965 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2966 ZoneList<Expression*>* args = expr->arguments(); 2966 ZoneList<Expression*>* args = expr->arguments();
2967 ASSERT(args->length() == 1); 2967 DCHECK(args->length() == 1);
2968 2968
2969 VisitForAccumulatorValue(args->at(0)); 2969 VisitForAccumulatorValue(args->at(0));
2970 2970
2971 Label materialize_true, materialize_false; 2971 Label materialize_true, materialize_false;
2972 Label* if_true = NULL; 2972 Label* if_true = NULL;
2973 Label* if_false = NULL; 2973 Label* if_false = NULL;
2974 Label* fall_through = NULL; 2974 Label* fall_through = NULL;
2975 context()->PrepareTest(&materialize_true, &materialize_false, 2975 context()->PrepareTest(&materialize_true, &materialize_false,
2976 &if_true, &if_false, &fall_through); 2976 &if_true, &if_false, &fall_through);
2977 2977
2978 __ JumpIfSmi(r0, if_false); 2978 __ JumpIfSmi(r0, if_false);
2979 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 2979 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
2980 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2980 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2981 Split(ge, if_true, if_false, fall_through); 2981 Split(ge, if_true, if_false, fall_through);
2982 2982
2983 context()->Plug(if_true, if_false); 2983 context()->Plug(if_true, if_false);
2984 } 2984 }
2985 2985
2986 2986
2987 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) { 2987 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2988 ZoneList<Expression*>* args = expr->arguments(); 2988 ZoneList<Expression*>* args = expr->arguments();
2989 ASSERT(args->length() == 1); 2989 DCHECK(args->length() == 1);
2990 2990
2991 VisitForAccumulatorValue(args->at(0)); 2991 VisitForAccumulatorValue(args->at(0));
2992 2992
2993 Label materialize_true, materialize_false; 2993 Label materialize_true, materialize_false;
2994 Label* if_true = NULL; 2994 Label* if_true = NULL;
2995 Label* if_false = NULL; 2995 Label* if_false = NULL;
2996 Label* fall_through = NULL; 2996 Label* fall_through = NULL;
2997 context()->PrepareTest(&materialize_true, &materialize_false, 2997 context()->PrepareTest(&materialize_true, &materialize_false,
2998 &if_true, &if_false, &fall_through); 2998 &if_true, &if_false, &fall_through);
2999 2999
3000 __ JumpIfSmi(r0, if_false); 3000 __ JumpIfSmi(r0, if_false);
3001 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 3001 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
3002 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset)); 3002 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
3003 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 3003 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3004 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3004 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3005 Split(ne, if_true, if_false, fall_through); 3005 Split(ne, if_true, if_false, fall_through);
3006 3006
3007 context()->Plug(if_true, if_false); 3007 context()->Plug(if_true, if_false);
3008 } 3008 }
3009 3009
3010 3010
3011 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( 3011 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3012 CallRuntime* expr) { 3012 CallRuntime* expr) {
3013 ZoneList<Expression*>* args = expr->arguments(); 3013 ZoneList<Expression*>* args = expr->arguments();
3014 ASSERT(args->length() == 1); 3014 DCHECK(args->length() == 1);
3015 3015
3016 VisitForAccumulatorValue(args->at(0)); 3016 VisitForAccumulatorValue(args->at(0));
3017 3017
3018 Label materialize_true, materialize_false, skip_lookup; 3018 Label materialize_true, materialize_false, skip_lookup;
3019 Label* if_true = NULL; 3019 Label* if_true = NULL;
3020 Label* if_false = NULL; 3020 Label* if_false = NULL;
3021 Label* fall_through = NULL; 3021 Label* fall_through = NULL;
3022 context()->PrepareTest(&materialize_true, &materialize_false, 3022 context()->PrepareTest(&materialize_true, &materialize_false,
3023 &if_true, &if_false, &fall_through); 3023 &if_true, &if_false, &fall_through);
3024 3024
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
3092 __ cmp(r2, r3); 3092 __ cmp(r2, r3);
3093 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3093 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3094 Split(eq, if_true, if_false, fall_through); 3094 Split(eq, if_true, if_false, fall_through);
3095 3095
3096 context()->Plug(if_true, if_false); 3096 context()->Plug(if_true, if_false);
3097 } 3097 }
3098 3098
3099 3099
3100 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { 3100 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3101 ZoneList<Expression*>* args = expr->arguments(); 3101 ZoneList<Expression*>* args = expr->arguments();
3102 ASSERT(args->length() == 1); 3102 DCHECK(args->length() == 1);
3103 3103
3104 VisitForAccumulatorValue(args->at(0)); 3104 VisitForAccumulatorValue(args->at(0));
3105 3105
3106 Label materialize_true, materialize_false; 3106 Label materialize_true, materialize_false;
3107 Label* if_true = NULL; 3107 Label* if_true = NULL;
3108 Label* if_false = NULL; 3108 Label* if_false = NULL;
3109 Label* fall_through = NULL; 3109 Label* fall_through = NULL;
3110 context()->PrepareTest(&materialize_true, &materialize_false, 3110 context()->PrepareTest(&materialize_true, &materialize_false,
3111 &if_true, &if_false, &fall_through); 3111 &if_true, &if_false, &fall_through);
3112 3112
3113 __ JumpIfSmi(r0, if_false); 3113 __ JumpIfSmi(r0, if_false);
3114 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE); 3114 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
3115 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3115 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3116 Split(eq, if_true, if_false, fall_through); 3116 Split(eq, if_true, if_false, fall_through);
3117 3117
3118 context()->Plug(if_true, if_false); 3118 context()->Plug(if_true, if_false);
3119 } 3119 }
3120 3120
3121 3121
3122 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) { 3122 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3123 ZoneList<Expression*>* args = expr->arguments(); 3123 ZoneList<Expression*>* args = expr->arguments();
3124 ASSERT(args->length() == 1); 3124 DCHECK(args->length() == 1);
3125 3125
3126 VisitForAccumulatorValue(args->at(0)); 3126 VisitForAccumulatorValue(args->at(0));
3127 3127
3128 Label materialize_true, materialize_false; 3128 Label materialize_true, materialize_false;
3129 Label* if_true = NULL; 3129 Label* if_true = NULL;
3130 Label* if_false = NULL; 3130 Label* if_false = NULL;
3131 Label* fall_through = NULL; 3131 Label* fall_through = NULL;
3132 context()->PrepareTest(&materialize_true, &materialize_false, 3132 context()->PrepareTest(&materialize_true, &materialize_false,
3133 &if_true, &if_false, &fall_through); 3133 &if_true, &if_false, &fall_through);
3134 3134
3135 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK); 3135 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3136 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 3136 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3137 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); 3137 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
3138 __ cmp(r2, Operand(0x80000000)); 3138 __ cmp(r2, Operand(0x80000000));
3139 __ cmp(r1, Operand(0x00000000), eq); 3139 __ cmp(r1, Operand(0x00000000), eq);
3140 3140
3141 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3141 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3142 Split(eq, if_true, if_false, fall_through); 3142 Split(eq, if_true, if_false, fall_through);
3143 3143
3144 context()->Plug(if_true, if_false); 3144 context()->Plug(if_true, if_false);
3145 } 3145 }
3146 3146
3147 3147
3148 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { 3148 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3149 ZoneList<Expression*>* args = expr->arguments(); 3149 ZoneList<Expression*>* args = expr->arguments();
3150 ASSERT(args->length() == 1); 3150 DCHECK(args->length() == 1);
3151 3151
3152 VisitForAccumulatorValue(args->at(0)); 3152 VisitForAccumulatorValue(args->at(0));
3153 3153
3154 Label materialize_true, materialize_false; 3154 Label materialize_true, materialize_false;
3155 Label* if_true = NULL; 3155 Label* if_true = NULL;
3156 Label* if_false = NULL; 3156 Label* if_false = NULL;
3157 Label* fall_through = NULL; 3157 Label* fall_through = NULL;
3158 context()->PrepareTest(&materialize_true, &materialize_false, 3158 context()->PrepareTest(&materialize_true, &materialize_false,
3159 &if_true, &if_false, &fall_through); 3159 &if_true, &if_false, &fall_through);
3160 3160
3161 __ JumpIfSmi(r0, if_false); 3161 __ JumpIfSmi(r0, if_false);
3162 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE); 3162 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
3163 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3163 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3164 Split(eq, if_true, if_false, fall_through); 3164 Split(eq, if_true, if_false, fall_through);
3165 3165
3166 context()->Plug(if_true, if_false); 3166 context()->Plug(if_true, if_false);
3167 } 3167 }
3168 3168
3169 3169
3170 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { 3170 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3171 ZoneList<Expression*>* args = expr->arguments(); 3171 ZoneList<Expression*>* args = expr->arguments();
3172 ASSERT(args->length() == 1); 3172 DCHECK(args->length() == 1);
3173 3173
3174 VisitForAccumulatorValue(args->at(0)); 3174 VisitForAccumulatorValue(args->at(0));
3175 3175
3176 Label materialize_true, materialize_false; 3176 Label materialize_true, materialize_false;
3177 Label* if_true = NULL; 3177 Label* if_true = NULL;
3178 Label* if_false = NULL; 3178 Label* if_false = NULL;
3179 Label* fall_through = NULL; 3179 Label* fall_through = NULL;
3180 context()->PrepareTest(&materialize_true, &materialize_false, 3180 context()->PrepareTest(&materialize_true, &materialize_false,
3181 &if_true, &if_false, &fall_through); 3181 &if_true, &if_false, &fall_through);
3182 3182
3183 __ JumpIfSmi(r0, if_false); 3183 __ JumpIfSmi(r0, if_false);
3184 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE); 3184 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
3185 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3185 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3186 Split(eq, if_true, if_false, fall_through); 3186 Split(eq, if_true, if_false, fall_through);
3187 3187
3188 context()->Plug(if_true, if_false); 3188 context()->Plug(if_true, if_false);
3189 } 3189 }
3190 3190
3191 3191
3192 3192
3193 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) { 3193 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3194 ASSERT(expr->arguments()->length() == 0); 3194 DCHECK(expr->arguments()->length() == 0);
3195 3195
3196 Label materialize_true, materialize_false; 3196 Label materialize_true, materialize_false;
3197 Label* if_true = NULL; 3197 Label* if_true = NULL;
3198 Label* if_false = NULL; 3198 Label* if_false = NULL;
3199 Label* fall_through = NULL; 3199 Label* fall_through = NULL;
3200 context()->PrepareTest(&materialize_true, &materialize_false, 3200 context()->PrepareTest(&materialize_true, &materialize_false,
3201 &if_true, &if_false, &fall_through); 3201 &if_true, &if_false, &fall_through);
3202 3202
3203 // Get the frame pointer for the calling frame. 3203 // Get the frame pointer for the calling frame.
3204 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3204 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3205 3205
3206 // Skip the arguments adaptor frame if it exists. 3206 // Skip the arguments adaptor frame if it exists.
3207 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset)); 3207 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
3208 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3208 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3209 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq); 3209 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq);
3210 3210
3211 // Check the marker in the calling frame. 3211 // Check the marker in the calling frame.
3212 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset)); 3212 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
3213 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); 3213 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
3214 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3214 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3215 Split(eq, if_true, if_false, fall_through); 3215 Split(eq, if_true, if_false, fall_through);
3216 3216
3217 context()->Plug(if_true, if_false); 3217 context()->Plug(if_true, if_false);
3218 } 3218 }
3219 3219
3220 3220
3221 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { 3221 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3222 ZoneList<Expression*>* args = expr->arguments(); 3222 ZoneList<Expression*>* args = expr->arguments();
3223 ASSERT(args->length() == 2); 3223 DCHECK(args->length() == 2);
3224 3224
3225 // Load the two objects into registers and perform the comparison. 3225 // Load the two objects into registers and perform the comparison.
3226 VisitForStackValue(args->at(0)); 3226 VisitForStackValue(args->at(0));
3227 VisitForAccumulatorValue(args->at(1)); 3227 VisitForAccumulatorValue(args->at(1));
3228 3228
3229 Label materialize_true, materialize_false; 3229 Label materialize_true, materialize_false;
3230 Label* if_true = NULL; 3230 Label* if_true = NULL;
3231 Label* if_false = NULL; 3231 Label* if_false = NULL;
3232 Label* fall_through = NULL; 3232 Label* fall_through = NULL;
3233 context()->PrepareTest(&materialize_true, &materialize_false, 3233 context()->PrepareTest(&materialize_true, &materialize_false,
3234 &if_true, &if_false, &fall_through); 3234 &if_true, &if_false, &fall_through);
3235 3235
3236 __ pop(r1); 3236 __ pop(r1);
3237 __ cmp(r0, r1); 3237 __ cmp(r0, r1);
3238 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3238 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3239 Split(eq, if_true, if_false, fall_through); 3239 Split(eq, if_true, if_false, fall_through);
3240 3240
3241 context()->Plug(if_true, if_false); 3241 context()->Plug(if_true, if_false);
3242 } 3242 }
3243 3243
3244 3244
3245 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { 3245 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3246 ZoneList<Expression*>* args = expr->arguments(); 3246 ZoneList<Expression*>* args = expr->arguments();
3247 ASSERT(args->length() == 1); 3247 DCHECK(args->length() == 1);
3248 3248
3249 // ArgumentsAccessStub expects the key in edx and the formal 3249 // ArgumentsAccessStub expects the key in edx and the formal
3250 // parameter count in r0. 3250 // parameter count in r0.
3251 VisitForAccumulatorValue(args->at(0)); 3251 VisitForAccumulatorValue(args->at(0));
3252 __ mov(r1, r0); 3252 __ mov(r1, r0);
3253 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 3253 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3254 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT); 3254 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3255 __ CallStub(&stub); 3255 __ CallStub(&stub);
3256 context()->Plug(r0); 3256 context()->Plug(r0);
3257 } 3257 }
3258 3258
3259 3259
3260 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { 3260 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3261 ASSERT(expr->arguments()->length() == 0); 3261 DCHECK(expr->arguments()->length() == 0);
3262 3262
3263 // Get the number of formal parameters. 3263 // Get the number of formal parameters.
3264 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 3264 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3265 3265
3266 // Check if the calling frame is an arguments adaptor frame. 3266 // Check if the calling frame is an arguments adaptor frame.
3267 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3267 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3268 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); 3268 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
3269 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3269 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3270 3270
3271 // Arguments adaptor case: Read the arguments length from the 3271 // Arguments adaptor case: Read the arguments length from the
3272 // adaptor frame. 3272 // adaptor frame.
3273 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq); 3273 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq);
3274 3274
3275 context()->Plug(r0); 3275 context()->Plug(r0);
3276 } 3276 }
3277 3277
3278 3278
3279 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { 3279 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3280 ZoneList<Expression*>* args = expr->arguments(); 3280 ZoneList<Expression*>* args = expr->arguments();
3281 ASSERT(args->length() == 1); 3281 DCHECK(args->length() == 1);
3282 Label done, null, function, non_function_constructor; 3282 Label done, null, function, non_function_constructor;
3283 3283
3284 VisitForAccumulatorValue(args->at(0)); 3284 VisitForAccumulatorValue(args->at(0));
3285 3285
3286 // If the object is a smi, we return null. 3286 // If the object is a smi, we return null.
3287 __ JumpIfSmi(r0, &null); 3287 __ JumpIfSmi(r0, &null);
3288 3288
3289 // Check that the object is a JS object but take special care of JS 3289 // Check that the object is a JS object but take special care of JS
3290 // functions to make sure they have 'Function' as their class. 3290 // functions to make sure they have 'Function' as their class.
3291 // Assume that there are only two callable types, and one of them is at 3291 // Assume that there are only two callable types, and one of them is at
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
3334 __ bind(&done); 3334 __ bind(&done);
3335 3335
3336 context()->Plug(r0); 3336 context()->Plug(r0);
3337 } 3337 }
3338 3338
3339 3339
3340 void FullCodeGenerator::EmitSubString(CallRuntime* expr) { 3340 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3341 // Load the arguments on the stack and call the stub. 3341 // Load the arguments on the stack and call the stub.
3342 SubStringStub stub(isolate()); 3342 SubStringStub stub(isolate());
3343 ZoneList<Expression*>* args = expr->arguments(); 3343 ZoneList<Expression*>* args = expr->arguments();
3344 ASSERT(args->length() == 3); 3344 DCHECK(args->length() == 3);
3345 VisitForStackValue(args->at(0)); 3345 VisitForStackValue(args->at(0));
3346 VisitForStackValue(args->at(1)); 3346 VisitForStackValue(args->at(1));
3347 VisitForStackValue(args->at(2)); 3347 VisitForStackValue(args->at(2));
3348 __ CallStub(&stub); 3348 __ CallStub(&stub);
3349 context()->Plug(r0); 3349 context()->Plug(r0);
3350 } 3350 }
3351 3351
3352 3352
3353 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { 3353 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3354 // Load the arguments on the stack and call the stub. 3354 // Load the arguments on the stack and call the stub.
3355 RegExpExecStub stub(isolate()); 3355 RegExpExecStub stub(isolate());
3356 ZoneList<Expression*>* args = expr->arguments(); 3356 ZoneList<Expression*>* args = expr->arguments();
3357 ASSERT(args->length() == 4); 3357 DCHECK(args->length() == 4);
3358 VisitForStackValue(args->at(0)); 3358 VisitForStackValue(args->at(0));
3359 VisitForStackValue(args->at(1)); 3359 VisitForStackValue(args->at(1));
3360 VisitForStackValue(args->at(2)); 3360 VisitForStackValue(args->at(2));
3361 VisitForStackValue(args->at(3)); 3361 VisitForStackValue(args->at(3));
3362 __ CallStub(&stub); 3362 __ CallStub(&stub);
3363 context()->Plug(r0); 3363 context()->Plug(r0);
3364 } 3364 }
3365 3365
3366 3366
3367 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { 3367 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3368 ZoneList<Expression*>* args = expr->arguments(); 3368 ZoneList<Expression*>* args = expr->arguments();
3369 ASSERT(args->length() == 1); 3369 DCHECK(args->length() == 1);
3370 VisitForAccumulatorValue(args->at(0)); // Load the object. 3370 VisitForAccumulatorValue(args->at(0)); // Load the object.
3371 3371
3372 Label done; 3372 Label done;
3373 // If the object is a smi return the object. 3373 // If the object is a smi return the object.
3374 __ JumpIfSmi(r0, &done); 3374 __ JumpIfSmi(r0, &done);
3375 // If the object is not a value type, return the object. 3375 // If the object is not a value type, return the object.
3376 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE); 3376 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3377 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq); 3377 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq);
3378 3378
3379 __ bind(&done); 3379 __ bind(&done);
3380 context()->Plug(r0); 3380 context()->Plug(r0);
3381 } 3381 }
3382 3382
3383 3383
3384 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { 3384 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3385 ZoneList<Expression*>* args = expr->arguments(); 3385 ZoneList<Expression*>* args = expr->arguments();
3386 ASSERT(args->length() == 2); 3386 DCHECK(args->length() == 2);
3387 ASSERT_NE(NULL, args->at(1)->AsLiteral()); 3387 DCHECK_NE(NULL, args->at(1)->AsLiteral());
3388 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value())); 3388 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3389 3389
3390 VisitForAccumulatorValue(args->at(0)); // Load the object. 3390 VisitForAccumulatorValue(args->at(0)); // Load the object.
3391 3391
3392 Label runtime, done, not_date_object; 3392 Label runtime, done, not_date_object;
3393 Register object = r0; 3393 Register object = r0;
3394 Register result = r0; 3394 Register result = r0;
3395 Register scratch0 = r9; 3395 Register scratch0 = r9;
3396 Register scratch1 = r1; 3396 Register scratch1 = r1;
3397 3397
(...skipping 25 matching lines...) Expand all
3423 3423
3424 __ bind(&not_date_object); 3424 __ bind(&not_date_object);
3425 __ CallRuntime(Runtime::kThrowNotDateError, 0); 3425 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3426 __ bind(&done); 3426 __ bind(&done);
3427 context()->Plug(r0); 3427 context()->Plug(r0);
3428 } 3428 }
3429 3429
3430 3430
3431 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { 3431 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3432 ZoneList<Expression*>* args = expr->arguments(); 3432 ZoneList<Expression*>* args = expr->arguments();
3433 ASSERT_EQ(3, args->length()); 3433 DCHECK_EQ(3, args->length());
3434 3434
3435 Register string = r0; 3435 Register string = r0;
3436 Register index = r1; 3436 Register index = r1;
3437 Register value = r2; 3437 Register value = r2;
3438 3438
3439 VisitForStackValue(args->at(1)); // index 3439 VisitForStackValue(args->at(1)); // index
3440 VisitForStackValue(args->at(2)); // value 3440 VisitForStackValue(args->at(2)); // value
3441 VisitForAccumulatorValue(args->at(0)); // string 3441 VisitForAccumulatorValue(args->at(0)); // string
3442 __ Pop(index, value); 3442 __ Pop(index, value);
3443 3443
(...skipping 12 matching lines...) Expand all
3456 __ add(ip, 3456 __ add(ip,
3457 string, 3457 string,
3458 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3458 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3459 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize)); 3459 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3460 context()->Plug(string); 3460 context()->Plug(string);
3461 } 3461 }
3462 3462
3463 3463
3464 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { 3464 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3465 ZoneList<Expression*>* args = expr->arguments(); 3465 ZoneList<Expression*>* args = expr->arguments();
3466 ASSERT_EQ(3, args->length()); 3466 DCHECK_EQ(3, args->length());
3467 3467
3468 Register string = r0; 3468 Register string = r0;
3469 Register index = r1; 3469 Register index = r1;
3470 Register value = r2; 3470 Register value = r2;
3471 3471
3472 VisitForStackValue(args->at(1)); // index 3472 VisitForStackValue(args->at(1)); // index
3473 VisitForStackValue(args->at(2)); // value 3473 VisitForStackValue(args->at(2)); // value
3474 VisitForAccumulatorValue(args->at(0)); // string 3474 VisitForAccumulatorValue(args->at(0)); // string
3475 __ Pop(index, value); 3475 __ Pop(index, value);
3476 3476
(...skipping 15 matching lines...) Expand all
3492 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); 3492 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3493 __ strh(value, MemOperand(ip, index)); 3493 __ strh(value, MemOperand(ip, index));
3494 context()->Plug(string); 3494 context()->Plug(string);
3495 } 3495 }
3496 3496
3497 3497
3498 3498
3499 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { 3499 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3500 // Load the arguments on the stack and call the runtime function. 3500 // Load the arguments on the stack and call the runtime function.
3501 ZoneList<Expression*>* args = expr->arguments(); 3501 ZoneList<Expression*>* args = expr->arguments();
3502 ASSERT(args->length() == 2); 3502 DCHECK(args->length() == 2);
3503 VisitForStackValue(args->at(0)); 3503 VisitForStackValue(args->at(0));
3504 VisitForStackValue(args->at(1)); 3504 VisitForStackValue(args->at(1));
3505 MathPowStub stub(isolate(), MathPowStub::ON_STACK); 3505 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3506 __ CallStub(&stub); 3506 __ CallStub(&stub);
3507 context()->Plug(r0); 3507 context()->Plug(r0);
3508 } 3508 }
3509 3509
3510 3510
3511 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { 3511 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3512 ZoneList<Expression*>* args = expr->arguments(); 3512 ZoneList<Expression*>* args = expr->arguments();
3513 ASSERT(args->length() == 2); 3513 DCHECK(args->length() == 2);
3514 VisitForStackValue(args->at(0)); // Load the object. 3514 VisitForStackValue(args->at(0)); // Load the object.
3515 VisitForAccumulatorValue(args->at(1)); // Load the value. 3515 VisitForAccumulatorValue(args->at(1)); // Load the value.
3516 __ pop(r1); // r0 = value. r1 = object. 3516 __ pop(r1); // r0 = value. r1 = object.
3517 3517
3518 Label done; 3518 Label done;
3519 // If the object is a smi, return the value. 3519 // If the object is a smi, return the value.
3520 __ JumpIfSmi(r1, &done); 3520 __ JumpIfSmi(r1, &done);
3521 3521
3522 // If the object is not a value type, return the value. 3522 // If the object is not a value type, return the value.
3523 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE); 3523 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
3524 __ b(ne, &done); 3524 __ b(ne, &done);
3525 3525
3526 // Store the value. 3526 // Store the value.
3527 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset)); 3527 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
3528 // Update the write barrier. Save the value as it will be 3528 // Update the write barrier. Save the value as it will be
3529 // overwritten by the write barrier code and is needed afterward. 3529 // overwritten by the write barrier code and is needed afterward.
3530 __ mov(r2, r0); 3530 __ mov(r2, r0);
3531 __ RecordWriteField( 3531 __ RecordWriteField(
3532 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs); 3532 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
3533 3533
3534 __ bind(&done); 3534 __ bind(&done);
3535 context()->Plug(r0); 3535 context()->Plug(r0);
3536 } 3536 }
3537 3537
3538 3538
3539 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { 3539 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3540 ZoneList<Expression*>* args = expr->arguments(); 3540 ZoneList<Expression*>* args = expr->arguments();
3541 ASSERT_EQ(args->length(), 1); 3541 DCHECK_EQ(args->length(), 1);
3542 // Load the argument into r0 and call the stub. 3542 // Load the argument into r0 and call the stub.
3543 VisitForAccumulatorValue(args->at(0)); 3543 VisitForAccumulatorValue(args->at(0));
3544 3544
3545 NumberToStringStub stub(isolate()); 3545 NumberToStringStub stub(isolate());
3546 __ CallStub(&stub); 3546 __ CallStub(&stub);
3547 context()->Plug(r0); 3547 context()->Plug(r0);
3548 } 3548 }
3549 3549
3550 3550
3551 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { 3551 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3552 ZoneList<Expression*>* args = expr->arguments(); 3552 ZoneList<Expression*>* args = expr->arguments();
3553 ASSERT(args->length() == 1); 3553 DCHECK(args->length() == 1);
3554 VisitForAccumulatorValue(args->at(0)); 3554 VisitForAccumulatorValue(args->at(0));
3555 3555
3556 Label done; 3556 Label done;
3557 StringCharFromCodeGenerator generator(r0, r1); 3557 StringCharFromCodeGenerator generator(r0, r1);
3558 generator.GenerateFast(masm_); 3558 generator.GenerateFast(masm_);
3559 __ jmp(&done); 3559 __ jmp(&done);
3560 3560
3561 NopRuntimeCallHelper call_helper; 3561 NopRuntimeCallHelper call_helper;
3562 generator.GenerateSlow(masm_, call_helper); 3562 generator.GenerateSlow(masm_, call_helper);
3563 3563
3564 __ bind(&done); 3564 __ bind(&done);
3565 context()->Plug(r1); 3565 context()->Plug(r1);
3566 } 3566 }
3567 3567
3568 3568
3569 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { 3569 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3570 ZoneList<Expression*>* args = expr->arguments(); 3570 ZoneList<Expression*>* args = expr->arguments();
3571 ASSERT(args->length() == 2); 3571 DCHECK(args->length() == 2);
3572 VisitForStackValue(args->at(0)); 3572 VisitForStackValue(args->at(0));
3573 VisitForAccumulatorValue(args->at(1)); 3573 VisitForAccumulatorValue(args->at(1));
3574 3574
3575 Register object = r1; 3575 Register object = r1;
3576 Register index = r0; 3576 Register index = r0;
3577 Register result = r3; 3577 Register result = r3;
3578 3578
3579 __ pop(object); 3579 __ pop(object);
3580 3580
3581 Label need_conversion; 3581 Label need_conversion;
(...skipping 24 matching lines...) Expand all
3606 NopRuntimeCallHelper call_helper; 3606 NopRuntimeCallHelper call_helper;
3607 generator.GenerateSlow(masm_, call_helper); 3607 generator.GenerateSlow(masm_, call_helper);
3608 3608
3609 __ bind(&done); 3609 __ bind(&done);
3610 context()->Plug(result); 3610 context()->Plug(result);
3611 } 3611 }
3612 3612
3613 3613
3614 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { 3614 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3615 ZoneList<Expression*>* args = expr->arguments(); 3615 ZoneList<Expression*>* args = expr->arguments();
3616 ASSERT(args->length() == 2); 3616 DCHECK(args->length() == 2);
3617 VisitForStackValue(args->at(0)); 3617 VisitForStackValue(args->at(0));
3618 VisitForAccumulatorValue(args->at(1)); 3618 VisitForAccumulatorValue(args->at(1));
3619 3619
3620 Register object = r1; 3620 Register object = r1;
3621 Register index = r0; 3621 Register index = r0;
3622 Register scratch = r3; 3622 Register scratch = r3;
3623 Register result = r0; 3623 Register result = r0;
3624 3624
3625 __ pop(object); 3625 __ pop(object);
3626 3626
(...skipping 26 matching lines...) Expand all
3653 NopRuntimeCallHelper call_helper; 3653 NopRuntimeCallHelper call_helper;
3654 generator.GenerateSlow(masm_, call_helper); 3654 generator.GenerateSlow(masm_, call_helper);
3655 3655
3656 __ bind(&done); 3656 __ bind(&done);
3657 context()->Plug(result); 3657 context()->Plug(result);
3658 } 3658 }
3659 3659
3660 3660
3661 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { 3661 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3662 ZoneList<Expression*>* args = expr->arguments(); 3662 ZoneList<Expression*>* args = expr->arguments();
3663 ASSERT_EQ(2, args->length()); 3663 DCHECK_EQ(2, args->length());
3664 VisitForStackValue(args->at(0)); 3664 VisitForStackValue(args->at(0));
3665 VisitForAccumulatorValue(args->at(1)); 3665 VisitForAccumulatorValue(args->at(1));
3666 3666
3667 __ pop(r1); 3667 __ pop(r1);
3668 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED); 3668 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3669 __ CallStub(&stub); 3669 __ CallStub(&stub);
3670 context()->Plug(r0); 3670 context()->Plug(r0);
3671 } 3671 }
3672 3672
3673 3673
3674 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { 3674 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3675 ZoneList<Expression*>* args = expr->arguments(); 3675 ZoneList<Expression*>* args = expr->arguments();
3676 ASSERT_EQ(2, args->length()); 3676 DCHECK_EQ(2, args->length());
3677 VisitForStackValue(args->at(0)); 3677 VisitForStackValue(args->at(0));
3678 VisitForStackValue(args->at(1)); 3678 VisitForStackValue(args->at(1));
3679 3679
3680 StringCompareStub stub(isolate()); 3680 StringCompareStub stub(isolate());
3681 __ CallStub(&stub); 3681 __ CallStub(&stub);
3682 context()->Plug(r0); 3682 context()->Plug(r0);
3683 } 3683 }
3684 3684
3685 3685
3686 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { 3686 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3687 ZoneList<Expression*>* args = expr->arguments(); 3687 ZoneList<Expression*>* args = expr->arguments();
3688 ASSERT(args->length() >= 2); 3688 DCHECK(args->length() >= 2);
3689 3689
3690 int arg_count = args->length() - 2; // 2 ~ receiver and function. 3690 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3691 for (int i = 0; i < arg_count + 1; i++) { 3691 for (int i = 0; i < arg_count + 1; i++) {
3692 VisitForStackValue(args->at(i)); 3692 VisitForStackValue(args->at(i));
3693 } 3693 }
3694 VisitForAccumulatorValue(args->last()); // Function. 3694 VisitForAccumulatorValue(args->last()); // Function.
3695 3695
3696 Label runtime, done; 3696 Label runtime, done;
3697 // Check for non-function argument (including proxy). 3697 // Check for non-function argument (including proxy).
3698 __ JumpIfSmi(r0, &runtime); 3698 __ JumpIfSmi(r0, &runtime);
(...skipping 12 matching lines...) Expand all
3711 __ CallRuntime(Runtime::kCall, args->length()); 3711 __ CallRuntime(Runtime::kCall, args->length());
3712 __ bind(&done); 3712 __ bind(&done);
3713 3713
3714 context()->Plug(r0); 3714 context()->Plug(r0);
3715 } 3715 }
3716 3716
3717 3717
3718 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { 3718 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3719 RegExpConstructResultStub stub(isolate()); 3719 RegExpConstructResultStub stub(isolate());
3720 ZoneList<Expression*>* args = expr->arguments(); 3720 ZoneList<Expression*>* args = expr->arguments();
3721 ASSERT(args->length() == 3); 3721 DCHECK(args->length() == 3);
3722 VisitForStackValue(args->at(0)); 3722 VisitForStackValue(args->at(0));
3723 VisitForStackValue(args->at(1)); 3723 VisitForStackValue(args->at(1));
3724 VisitForAccumulatorValue(args->at(2)); 3724 VisitForAccumulatorValue(args->at(2));
3725 __ pop(r1); 3725 __ pop(r1);
3726 __ pop(r2); 3726 __ pop(r2);
3727 __ CallStub(&stub); 3727 __ CallStub(&stub);
3728 context()->Plug(r0); 3728 context()->Plug(r0);
3729 } 3729 }
3730 3730
3731 3731
3732 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { 3732 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3733 ZoneList<Expression*>* args = expr->arguments(); 3733 ZoneList<Expression*>* args = expr->arguments();
3734 ASSERT_EQ(2, args->length()); 3734 DCHECK_EQ(2, args->length());
3735 ASSERT_NE(NULL, args->at(0)->AsLiteral()); 3735 DCHECK_NE(NULL, args->at(0)->AsLiteral());
3736 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value(); 3736 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3737 3737
3738 Handle<FixedArray> jsfunction_result_caches( 3738 Handle<FixedArray> jsfunction_result_caches(
3739 isolate()->native_context()->jsfunction_result_caches()); 3739 isolate()->native_context()->jsfunction_result_caches());
3740 if (jsfunction_result_caches->length() <= cache_id) { 3740 if (jsfunction_result_caches->length() <= cache_id) {
3741 __ Abort(kAttemptToUseUndefinedCache); 3741 __ Abort(kAttemptToUseUndefinedCache);
3742 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 3742 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3743 context()->Plug(r0); 3743 context()->Plug(r0);
3744 return; 3744 return;
3745 } 3745 }
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
3793 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask)); 3793 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
3794 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3794 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3795 Split(eq, if_true, if_false, fall_through); 3795 Split(eq, if_true, if_false, fall_through);
3796 3796
3797 context()->Plug(if_true, if_false); 3797 context()->Plug(if_true, if_false);
3798 } 3798 }
3799 3799
3800 3800
3801 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { 3801 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3802 ZoneList<Expression*>* args = expr->arguments(); 3802 ZoneList<Expression*>* args = expr->arguments();
3803 ASSERT(args->length() == 1); 3803 DCHECK(args->length() == 1);
3804 VisitForAccumulatorValue(args->at(0)); 3804 VisitForAccumulatorValue(args->at(0));
3805 3805
3806 __ AssertString(r0); 3806 __ AssertString(r0);
3807 3807
3808 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset)); 3808 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3809 __ IndexFromHash(r0, r0); 3809 __ IndexFromHash(r0, r0);
3810 3810
3811 context()->Plug(r0); 3811 context()->Plug(r0);
3812 } 3812 }
3813 3813
3814 3814
3815 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { 3815 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3816 Label bailout, done, one_char_separator, long_separator, non_trivial_array, 3816 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
3817 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop, 3817 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
3818 one_char_separator_loop_entry, long_separator_loop; 3818 one_char_separator_loop_entry, long_separator_loop;
3819 ZoneList<Expression*>* args = expr->arguments(); 3819 ZoneList<Expression*>* args = expr->arguments();
3820 ASSERT(args->length() == 2); 3820 DCHECK(args->length() == 2);
3821 VisitForStackValue(args->at(1)); 3821 VisitForStackValue(args->at(1));
3822 VisitForAccumulatorValue(args->at(0)); 3822 VisitForAccumulatorValue(args->at(0));
3823 3823
3824 // All aliases of the same register have disjoint lifetimes. 3824 // All aliases of the same register have disjoint lifetimes.
3825 Register array = r0; 3825 Register array = r0;
3826 Register elements = no_reg; // Will be r0. 3826 Register elements = no_reg; // Will be r0.
3827 Register result = no_reg; // Will be r0. 3827 Register result = no_reg; // Will be r0.
3828 Register separator = r1; 3828 Register separator = r1;
3829 Register array_length = r2; 3829 Register array_length = r2;
3830 Register result_pos = no_reg; // Will be r2 3830 Register result_pos = no_reg; // Will be r2
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after
3968 // Copy next array element to the result. 3968 // Copy next array element to the result.
3969 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 3969 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3970 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 3970 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3971 __ SmiUntag(string_length); 3971 __ SmiUntag(string_length);
3972 __ add(string, 3972 __ add(string,
3973 string, 3973 string,
3974 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3974 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3975 __ CopyBytes(string, result_pos, string_length, scratch); 3975 __ CopyBytes(string, result_pos, string_length, scratch);
3976 __ cmp(element, elements_end); 3976 __ cmp(element, elements_end);
3977 __ b(lt, &empty_separator_loop); // End while (element < elements_end). 3977 __ b(lt, &empty_separator_loop); // End while (element < elements_end).
3978 ASSERT(result.is(r0)); 3978 DCHECK(result.is(r0));
3979 __ b(&done); 3979 __ b(&done);
3980 3980
3981 // One-character separator case 3981 // One-character separator case
3982 __ bind(&one_char_separator); 3982 __ bind(&one_char_separator);
3983 // Replace separator with its ASCII character value. 3983 // Replace separator with its ASCII character value.
3984 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize)); 3984 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
3985 // Jump into the loop after the code that copies the separator, so the first 3985 // Jump into the loop after the code that copies the separator, so the first
3986 // element is not preceded by a separator 3986 // element is not preceded by a separator
3987 __ jmp(&one_char_separator_loop_entry); 3987 __ jmp(&one_char_separator_loop_entry);
3988 3988
(...skipping 11 matching lines...) Expand all
4000 __ bind(&one_char_separator_loop_entry); 4000 __ bind(&one_char_separator_loop_entry);
4001 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 4001 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4002 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 4002 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4003 __ SmiUntag(string_length); 4003 __ SmiUntag(string_length);
4004 __ add(string, 4004 __ add(string,
4005 string, 4005 string,
4006 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4006 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4007 __ CopyBytes(string, result_pos, string_length, scratch); 4007 __ CopyBytes(string, result_pos, string_length, scratch);
4008 __ cmp(element, elements_end); 4008 __ cmp(element, elements_end);
4009 __ b(lt, &one_char_separator_loop); // End while (element < elements_end). 4009 __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
4010 ASSERT(result.is(r0)); 4010 DCHECK(result.is(r0));
4011 __ b(&done); 4011 __ b(&done);
4012 4012
4013 // Long separator case (separator is more than one character). Entry is at the 4013 // Long separator case (separator is more than one character). Entry is at the
4014 // label long_separator below. 4014 // label long_separator below.
4015 __ bind(&long_separator_loop); 4015 __ bind(&long_separator_loop);
4016 // Live values in registers: 4016 // Live values in registers:
4017 // result_pos: the position to which we are currently copying characters. 4017 // result_pos: the position to which we are currently copying characters.
4018 // element: Current array element. 4018 // element: Current array element.
4019 // elements_end: Array end. 4019 // elements_end: Array end.
4020 // separator: Separator string. 4020 // separator: Separator string.
4021 4021
4022 // Copy the separator to the result. 4022 // Copy the separator to the result.
4023 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset)); 4023 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
4024 __ SmiUntag(string_length); 4024 __ SmiUntag(string_length);
4025 __ add(string, 4025 __ add(string,
4026 separator, 4026 separator,
4027 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4027 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4028 __ CopyBytes(string, result_pos, string_length, scratch); 4028 __ CopyBytes(string, result_pos, string_length, scratch);
4029 4029
4030 __ bind(&long_separator); 4030 __ bind(&long_separator);
4031 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 4031 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
4032 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 4032 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
4033 __ SmiUntag(string_length); 4033 __ SmiUntag(string_length);
4034 __ add(string, 4034 __ add(string,
4035 string, 4035 string,
4036 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4036 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4037 __ CopyBytes(string, result_pos, string_length, scratch); 4037 __ CopyBytes(string, result_pos, string_length, scratch);
4038 __ cmp(element, elements_end); 4038 __ cmp(element, elements_end);
4039 __ b(lt, &long_separator_loop); // End while (element < elements_end). 4039 __ b(lt, &long_separator_loop); // End while (element < elements_end).
4040 ASSERT(result.is(r0)); 4040 DCHECK(result.is(r0));
4041 __ b(&done); 4041 __ b(&done);
4042 4042
4043 __ bind(&bailout); 4043 __ bind(&bailout);
4044 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 4044 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
4045 __ bind(&done); 4045 __ bind(&done);
4046 context()->Plug(r0); 4046 context()->Plug(r0);
4047 } 4047 }
4048 4048
4049 4049
4050 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { 4050 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4051 ASSERT(expr->arguments()->length() == 0); 4051 DCHECK(expr->arguments()->length() == 0);
4052 ExternalReference debug_is_active = 4052 ExternalReference debug_is_active =
4053 ExternalReference::debug_is_active_address(isolate()); 4053 ExternalReference::debug_is_active_address(isolate());
4054 __ mov(ip, Operand(debug_is_active)); 4054 __ mov(ip, Operand(debug_is_active));
4055 __ ldrb(r0, MemOperand(ip)); 4055 __ ldrb(r0, MemOperand(ip));
4056 __ SmiTag(r0); 4056 __ SmiTag(r0);
4057 context()->Plug(r0); 4057 context()->Plug(r0);
4058 } 4058 }
4059 4059
4060 4060
4061 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 4061 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
4132 VisitForStackValue(property->obj()); 4132 VisitForStackValue(property->obj());
4133 VisitForStackValue(property->key()); 4133 VisitForStackValue(property->key());
4134 __ mov(r1, Operand(Smi::FromInt(strict_mode()))); 4134 __ mov(r1, Operand(Smi::FromInt(strict_mode())));
4135 __ push(r1); 4135 __ push(r1);
4136 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4136 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4137 context()->Plug(r0); 4137 context()->Plug(r0);
4138 } else if (proxy != NULL) { 4138 } else if (proxy != NULL) {
4139 Variable* var = proxy->var(); 4139 Variable* var = proxy->var();
4140 // Delete of an unqualified identifier is disallowed in strict mode 4140 // Delete of an unqualified identifier is disallowed in strict mode
4141 // but "delete this" is allowed. 4141 // but "delete this" is allowed.
4142 ASSERT(strict_mode() == SLOPPY || var->is_this()); 4142 DCHECK(strict_mode() == SLOPPY || var->is_this());
4143 if (var->IsUnallocated()) { 4143 if (var->IsUnallocated()) {
4144 __ ldr(r2, GlobalObjectOperand()); 4144 __ ldr(r2, GlobalObjectOperand());
4145 __ mov(r1, Operand(var->name())); 4145 __ mov(r1, Operand(var->name()));
4146 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); 4146 __ mov(r0, Operand(Smi::FromInt(SLOPPY)));
4147 __ Push(r2, r1, r0); 4147 __ Push(r2, r1, r0);
4148 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4148 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4149 context()->Plug(r0); 4149 context()->Plug(r0);
4150 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 4150 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4151 // Result of deleting non-global, non-dynamic variables is false. 4151 // Result of deleting non-global, non-dynamic variables is false.
4152 // The subexpression does not have side effects. 4152 // The subexpression does not have side effects.
4153 context()->Plug(var->is_this()); 4153 context()->Plug(var->is_this());
4154 } else { 4154 } else {
4155 // Non-global variable. Call the runtime to try to delete from the 4155 // Non-global variable. Call the runtime to try to delete from the
4156 // context where the variable was introduced. 4156 // context where the variable was introduced.
4157 ASSERT(!context_register().is(r2)); 4157 DCHECK(!context_register().is(r2));
4158 __ mov(r2, Operand(var->name())); 4158 __ mov(r2, Operand(var->name()));
4159 __ Push(context_register(), r2); 4159 __ Push(context_register(), r2);
4160 __ CallRuntime(Runtime::kDeleteLookupSlot, 2); 4160 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4161 context()->Plug(r0); 4161 context()->Plug(r0);
4162 } 4162 }
4163 } else { 4163 } else {
4164 // Result of deleting non-property, non-variable reference is true. 4164 // Result of deleting non-property, non-variable reference is true.
4165 // The subexpression may have side effects. 4165 // The subexpression may have side effects.
4166 VisitForEffect(expr->expression()); 4166 VisitForEffect(expr->expression());
4167 context()->Plug(true); 4167 context()->Plug(true);
(...skipping 20 matching lines...) Expand all
4188 VisitForControl(expr->expression(), 4188 VisitForControl(expr->expression(),
4189 test->false_label(), 4189 test->false_label(),
4190 test->true_label(), 4190 test->true_label(),
4191 test->fall_through()); 4191 test->fall_through());
4192 context()->Plug(test->true_label(), test->false_label()); 4192 context()->Plug(test->true_label(), test->false_label());
4193 } else { 4193 } else {
4194 // We handle value contexts explicitly rather than simply visiting 4194 // We handle value contexts explicitly rather than simply visiting
4195 // for control and plugging the control flow into the context, 4195 // for control and plugging the control flow into the context,
4196 // because we need to prepare a pair of extra administrative AST ids 4196 // because we need to prepare a pair of extra administrative AST ids
4197 // for the optimizing compiler. 4197 // for the optimizing compiler.
4198 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue()); 4198 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4199 Label materialize_true, materialize_false, done; 4199 Label materialize_true, materialize_false, done;
4200 VisitForControl(expr->expression(), 4200 VisitForControl(expr->expression(),
4201 &materialize_false, 4201 &materialize_false,
4202 &materialize_true, 4202 &materialize_true,
4203 &materialize_true); 4203 &materialize_true);
4204 __ bind(&materialize_true); 4204 __ bind(&materialize_true);
4205 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); 4205 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4206 __ LoadRoot(r0, Heap::kTrueValueRootIndex); 4206 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
4207 if (context()->IsStackValue()) __ push(r0); 4207 if (context()->IsStackValue()) __ push(r0);
4208 __ jmp(&done); 4208 __ jmp(&done);
(...skipping 16 matching lines...) Expand all
4225 break; 4225 break;
4226 } 4226 }
4227 4227
4228 default: 4228 default:
4229 UNREACHABLE(); 4229 UNREACHABLE();
4230 } 4230 }
4231 } 4231 }
4232 4232
4233 4233
4234 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 4234 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4235 ASSERT(expr->expression()->IsValidReferenceExpression()); 4235 DCHECK(expr->expression()->IsValidReferenceExpression());
4236 4236
4237 Comment cmnt(masm_, "[ CountOperation"); 4237 Comment cmnt(masm_, "[ CountOperation");
4238 SetSourcePosition(expr->position()); 4238 SetSourcePosition(expr->position());
4239 4239
4240 // Expression can only be a property, a global or a (parameter or local) 4240 // Expression can only be a property, a global or a (parameter or local)
4241 // slot. 4241 // slot.
4242 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 4242 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4243 LhsKind assign_type = VARIABLE; 4243 LhsKind assign_type = VARIABLE;
4244 Property* prop = expr->expression()->AsProperty(); 4244 Property* prop = expr->expression()->AsProperty();
4245 // In case of a property we use the uninitialized expression context 4245 // In case of a property we use the uninitialized expression context
4246 // of the key to detect a named property. 4246 // of the key to detect a named property.
4247 if (prop != NULL) { 4247 if (prop != NULL) {
4248 assign_type = 4248 assign_type =
4249 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY; 4249 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4250 } 4250 }
4251 4251
4252 // Evaluate expression and get value. 4252 // Evaluate expression and get value.
4253 if (assign_type == VARIABLE) { 4253 if (assign_type == VARIABLE) {
4254 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL); 4254 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4255 AccumulatorValueContext context(this); 4255 AccumulatorValueContext context(this);
4256 EmitVariableLoad(expr->expression()->AsVariableProxy()); 4256 EmitVariableLoad(expr->expression()->AsVariableProxy());
4257 } else { 4257 } else {
4258 // Reserve space for result of postfix operation. 4258 // Reserve space for result of postfix operation.
4259 if (expr->is_postfix() && !context()->IsEffect()) { 4259 if (expr->is_postfix() && !context()->IsEffect()) {
4260 __ mov(ip, Operand(Smi::FromInt(0))); 4260 __ mov(ip, Operand(Smi::FromInt(0)));
4261 __ push(ip); 4261 __ push(ip);
4262 } 4262 }
4263 if (assign_type == NAMED_PROPERTY) { 4263 if (assign_type == NAMED_PROPERTY) {
4264 // Put the object both on the stack and in the register. 4264 // Put the object both on the stack and in the register.
(...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after
4405 } else { 4405 } else {
4406 context()->Plug(r0); 4406 context()->Plug(r0);
4407 } 4407 }
4408 break; 4408 break;
4409 } 4409 }
4410 } 4410 }
4411 } 4411 }
4412 4412
4413 4413
4414 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 4414 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4415 ASSERT(!context()->IsEffect()); 4415 DCHECK(!context()->IsEffect());
4416 ASSERT(!context()->IsTest()); 4416 DCHECK(!context()->IsTest());
4417 VariableProxy* proxy = expr->AsVariableProxy(); 4417 VariableProxy* proxy = expr->AsVariableProxy();
4418 if (proxy != NULL && proxy->var()->IsUnallocated()) { 4418 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4419 Comment cmnt(masm_, "[ Global variable"); 4419 Comment cmnt(masm_, "[ Global variable");
4420 __ ldr(LoadIC::ReceiverRegister(), GlobalObjectOperand()); 4420 __ ldr(LoadIC::ReceiverRegister(), GlobalObjectOperand());
4421 __ mov(LoadIC::NameRegister(), Operand(proxy->name())); 4421 __ mov(LoadIC::NameRegister(), Operand(proxy->name()));
4422 if (FLAG_vector_ics) { 4422 if (FLAG_vector_ics) {
4423 __ mov(LoadIC::SlotRegister(), 4423 __ mov(LoadIC::SlotRegister(),
4424 Operand(Smi::FromInt(proxy->VariableFeedbackSlot()))); 4424 Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
4425 } 4425 }
4426 // Use a regular load, not a contextual load, to avoid a reference 4426 // Use a regular load, not a contextual load, to avoid a reference
(...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after
4639 return r0; 4639 return r0;
4640 } 4640 }
4641 4641
4642 4642
4643 Register FullCodeGenerator::context_register() { 4643 Register FullCodeGenerator::context_register() {
4644 return cp; 4644 return cp;
4645 } 4645 }
4646 4646
4647 4647
4648 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 4648 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4649 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 4649 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4650 __ str(value, MemOperand(fp, frame_offset)); 4650 __ str(value, MemOperand(fp, frame_offset));
4651 } 4651 }
4652 4652
4653 4653
4654 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 4654 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4655 __ ldr(dst, ContextOperand(cp, context_index)); 4655 __ ldr(dst, ContextOperand(cp, context_index));
4656 } 4656 }
4657 4657
4658 4658
4659 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { 4659 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4660 Scope* declaration_scope = scope()->DeclarationScope(); 4660 Scope* declaration_scope = scope()->DeclarationScope();
4661 if (declaration_scope->is_global_scope() || 4661 if (declaration_scope->is_global_scope() ||
4662 declaration_scope->is_module_scope()) { 4662 declaration_scope->is_module_scope()) {
4663 // Contexts nested in the native context have a canonical empty function 4663 // Contexts nested in the native context have a canonical empty function
4664 // as their closure, not the anonymous closure containing the global 4664 // as their closure, not the anonymous closure containing the global
4665 // code. Pass a smi sentinel and let the runtime look up the empty 4665 // code. Pass a smi sentinel and let the runtime look up the empty
4666 // function. 4666 // function.
4667 __ mov(ip, Operand(Smi::FromInt(0))); 4667 __ mov(ip, Operand(Smi::FromInt(0)));
4668 } else if (declaration_scope->is_eval_scope()) { 4668 } else if (declaration_scope->is_eval_scope()) {
4669 // Contexts created by a call to eval have the same closure as the 4669 // Contexts created by a call to eval have the same closure as the
4670 // context calling eval, not the anonymous closure containing the eval 4670 // context calling eval, not the anonymous closure containing the eval
4671 // code. Fetch it from the context. 4671 // code. Fetch it from the context.
4672 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX)); 4672 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
4673 } else { 4673 } else {
4674 ASSERT(declaration_scope->is_function_scope()); 4674 DCHECK(declaration_scope->is_function_scope());
4675 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4675 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4676 } 4676 }
4677 __ push(ip); 4677 __ push(ip);
4678 } 4678 }
4679 4679
4680 4680
4681 // ---------------------------------------------------------------------------- 4681 // ----------------------------------------------------------------------------
4682 // Non-local control flow support. 4682 // Non-local control flow support.
4683 4683
4684 void FullCodeGenerator::EnterFinallyBlock() { 4684 void FullCodeGenerator::EnterFinallyBlock() {
4685 ASSERT(!result_register().is(r1)); 4685 DCHECK(!result_register().is(r1));
4686 // Store result register while executing finally block. 4686 // Store result register while executing finally block.
4687 __ push(result_register()); 4687 __ push(result_register());
4688 // Cook return address in link register to stack (smi encoded Code* delta) 4688 // Cook return address in link register to stack (smi encoded Code* delta)
4689 __ sub(r1, lr, Operand(masm_->CodeObject())); 4689 __ sub(r1, lr, Operand(masm_->CodeObject()));
4690 __ SmiTag(r1); 4690 __ SmiTag(r1);
4691 4691
4692 // Store result register while executing finally block. 4692 // Store result register while executing finally block.
4693 __ push(r1); 4693 __ push(r1);
4694 4694
4695 // Store pending message while executing finally block. 4695 // Store pending message while executing finally block.
(...skipping 13 matching lines...) Expand all
4709 4709
4710 ExternalReference pending_message_script = 4710 ExternalReference pending_message_script =
4711 ExternalReference::address_of_pending_message_script(isolate()); 4711 ExternalReference::address_of_pending_message_script(isolate());
4712 __ mov(ip, Operand(pending_message_script)); 4712 __ mov(ip, Operand(pending_message_script));
4713 __ ldr(r1, MemOperand(ip)); 4713 __ ldr(r1, MemOperand(ip));
4714 __ push(r1); 4714 __ push(r1);
4715 } 4715 }
4716 4716
4717 4717
4718 void FullCodeGenerator::ExitFinallyBlock() { 4718 void FullCodeGenerator::ExitFinallyBlock() {
4719 ASSERT(!result_register().is(r1)); 4719 DCHECK(!result_register().is(r1));
4720 // Restore pending message from stack. 4720 // Restore pending message from stack.
4721 __ pop(r1); 4721 __ pop(r1);
4722 ExternalReference pending_message_script = 4722 ExternalReference pending_message_script =
4723 ExternalReference::address_of_pending_message_script(isolate()); 4723 ExternalReference::address_of_pending_message_script(isolate());
4724 __ mov(ip, Operand(pending_message_script)); 4724 __ mov(ip, Operand(pending_message_script));
4725 __ str(r1, MemOperand(ip)); 4725 __ str(r1, MemOperand(ip));
4726 4726
4727 __ pop(r1); 4727 __ pop(r1);
4728 __ SmiUntag(r1); 4728 __ SmiUntag(r1);
4729 ExternalReference has_pending_message = 4729 ExternalReference has_pending_message =
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
4775 return previous_; 4775 return previous_;
4776 } 4776 }
4777 4777
4778 4778
4779 #undef __ 4779 #undef __
4780 4780
4781 4781
4782 static Address GetInterruptImmediateLoadAddress(Address pc) { 4782 static Address GetInterruptImmediateLoadAddress(Address pc) {
4783 Address load_address = pc - 2 * Assembler::kInstrSize; 4783 Address load_address = pc - 2 * Assembler::kInstrSize;
4784 if (!FLAG_enable_ool_constant_pool) { 4784 if (!FLAG_enable_ool_constant_pool) {
4785 ASSERT(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address))); 4785 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
4786 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) { 4786 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
4787 // This is an extended constant pool lookup. 4787 // This is an extended constant pool lookup.
4788 load_address -= 2 * Assembler::kInstrSize; 4788 load_address -= 2 * Assembler::kInstrSize;
4789 ASSERT(Assembler::IsMovW(Memory::int32_at(load_address))); 4789 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
4790 ASSERT(Assembler::IsMovT( 4790 DCHECK(Assembler::IsMovT(
4791 Memory::int32_at(load_address + Assembler::kInstrSize))); 4791 Memory::int32_at(load_address + Assembler::kInstrSize)));
4792 } else if (Assembler::IsMovT(Memory::int32_at(load_address))) { 4792 } else if (Assembler::IsMovT(Memory::int32_at(load_address))) {
4793 // This is a movw_movt immediate load. 4793 // This is a movw_movt immediate load.
4794 load_address -= Assembler::kInstrSize; 4794 load_address -= Assembler::kInstrSize;
4795 ASSERT(Assembler::IsMovW(Memory::int32_at(load_address))); 4795 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
4796 } else { 4796 } else {
4797 // This is a small constant pool lookup. 4797 // This is a small constant pool lookup.
4798 ASSERT(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address))); 4798 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
4799 } 4799 }
4800 return load_address; 4800 return load_address;
4801 } 4801 }
4802 4802
4803 4803
4804 void BackEdgeTable::PatchAt(Code* unoptimized_code, 4804 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4805 Address pc, 4805 Address pc,
4806 BackEdgeState target_state, 4806 BackEdgeState target_state,
4807 Code* replacement_code) { 4807 Code* replacement_code) {
4808 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc); 4808 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
4852 4852
4853 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( 4853 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4854 unoptimized_code, pc_immediate_load_address, replacement_code); 4854 unoptimized_code, pc_immediate_load_address, replacement_code);
4855 } 4855 }
4856 4856
4857 4857
4858 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( 4858 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4859 Isolate* isolate, 4859 Isolate* isolate,
4860 Code* unoptimized_code, 4860 Code* unoptimized_code,
4861 Address pc) { 4861 Address pc) {
4862 ASSERT(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize))); 4862 DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize)));
4863 4863
4864 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc); 4864 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
4865 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize; 4865 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
4866 Address interrupt_address = Assembler::target_address_at( 4866 Address interrupt_address = Assembler::target_address_at(
4867 pc_immediate_load_address, unoptimized_code); 4867 pc_immediate_load_address, unoptimized_code);
4868 4868
4869 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) { 4869 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
4870 ASSERT(interrupt_address == 4870 DCHECK(interrupt_address ==
4871 isolate->builtins()->InterruptCheck()->entry()); 4871 isolate->builtins()->InterruptCheck()->entry());
4872 return INTERRUPT; 4872 return INTERRUPT;
4873 } 4873 }
4874 4874
4875 ASSERT(Assembler::IsNop(Assembler::instr_at(branch_address))); 4875 DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address)));
4876 4876
4877 if (interrupt_address == 4877 if (interrupt_address ==
4878 isolate->builtins()->OnStackReplacement()->entry()) { 4878 isolate->builtins()->OnStackReplacement()->entry()) {
4879 return ON_STACK_REPLACEMENT; 4879 return ON_STACK_REPLACEMENT;
4880 } 4880 }
4881 4881
4882 ASSERT(interrupt_address == 4882 DCHECK(interrupt_address ==
4883 isolate->builtins()->OsrAfterStackCheck()->entry()); 4883 isolate->builtins()->OsrAfterStackCheck()->entry());
4884 return OSR_AFTER_STACK_CHECK; 4884 return OSR_AFTER_STACK_CHECK;
4885 } 4885 }
4886 4886
4887 4887
4888 } } // namespace v8::internal 4888 } } // namespace v8::internal
4889 4889
4890 #endif // V8_TARGET_ARCH_ARM 4890 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/frames-arm.cc ('k') | src/arm/ic-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698