Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(5)

Side by Side Diff: src/arm64/full-codegen-arm64.cc

Issue 430503007: Rename ASSERT* to DCHECK*. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: REBASE and fixes Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm64/disasm-arm64.cc ('k') | src/arm64/ic-arm64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_ARM64 7 #if V8_TARGET_ARCH_ARM64
8 8
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/codegen.h" 10 #include "src/codegen.h"
(...skipping 16 matching lines...) Expand all
27 class JumpPatchSite BASE_EMBEDDED { 27 class JumpPatchSite BASE_EMBEDDED {
28 public: 28 public:
29 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) { 29 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
30 #ifdef DEBUG 30 #ifdef DEBUG
31 info_emitted_ = false; 31 info_emitted_ = false;
32 #endif 32 #endif
33 } 33 }
34 34
35 ~JumpPatchSite() { 35 ~JumpPatchSite() {
36 if (patch_site_.is_bound()) { 36 if (patch_site_.is_bound()) {
37 ASSERT(info_emitted_); 37 DCHECK(info_emitted_);
38 } else { 38 } else {
39 ASSERT(reg_.IsNone()); 39 DCHECK(reg_.IsNone());
40 } 40 }
41 } 41 }
42 42
43 void EmitJumpIfNotSmi(Register reg, Label* target) { 43 void EmitJumpIfNotSmi(Register reg, Label* target) {
44 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc. 44 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
45 InstructionAccurateScope scope(masm_, 1); 45 InstructionAccurateScope scope(masm_, 1);
46 ASSERT(!info_emitted_); 46 DCHECK(!info_emitted_);
47 ASSERT(reg.Is64Bits()); 47 DCHECK(reg.Is64Bits());
48 ASSERT(!reg.Is(csp)); 48 DCHECK(!reg.Is(csp));
49 reg_ = reg; 49 reg_ = reg;
50 __ bind(&patch_site_); 50 __ bind(&patch_site_);
51 __ tbz(xzr, 0, target); // Always taken before patched. 51 __ tbz(xzr, 0, target); // Always taken before patched.
52 } 52 }
53 53
54 void EmitJumpIfSmi(Register reg, Label* target) { 54 void EmitJumpIfSmi(Register reg, Label* target) {
55 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc. 55 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
56 InstructionAccurateScope scope(masm_, 1); 56 InstructionAccurateScope scope(masm_, 1);
57 ASSERT(!info_emitted_); 57 DCHECK(!info_emitted_);
58 ASSERT(reg.Is64Bits()); 58 DCHECK(reg.Is64Bits());
59 ASSERT(!reg.Is(csp)); 59 DCHECK(!reg.Is(csp));
60 reg_ = reg; 60 reg_ = reg;
61 __ bind(&patch_site_); 61 __ bind(&patch_site_);
62 __ tbnz(xzr, 0, target); // Never taken before patched. 62 __ tbnz(xzr, 0, target); // Never taken before patched.
63 } 63 }
64 64
65 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) { 65 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
66 UseScratchRegisterScope temps(masm_); 66 UseScratchRegisterScope temps(masm_);
67 Register temp = temps.AcquireX(); 67 Register temp = temps.AcquireX();
68 __ Orr(temp, reg1, reg2); 68 __ Orr(temp, reg1, reg2);
69 EmitJumpIfNotSmi(temp, target); 69 EmitJumpIfNotSmi(temp, target);
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
147 // Push(lr, fp, cp, x1); 147 // Push(lr, fp, cp, x1);
148 // Add(fp, jssp, 2 * kPointerSize); 148 // Add(fp, jssp, 2 * kPointerSize);
149 info->set_prologue_offset(masm_->pc_offset()); 149 info->set_prologue_offset(masm_->pc_offset());
150 __ Prologue(info->IsCodePreAgingActive()); 150 __ Prologue(info->IsCodePreAgingActive());
151 info->AddNoFrameRange(0, masm_->pc_offset()); 151 info->AddNoFrameRange(0, masm_->pc_offset());
152 152
153 // Reserve space on the stack for locals. 153 // Reserve space on the stack for locals.
154 { Comment cmnt(masm_, "[ Allocate locals"); 154 { Comment cmnt(masm_, "[ Allocate locals");
155 int locals_count = info->scope()->num_stack_slots(); 155 int locals_count = info->scope()->num_stack_slots();
156 // Generators allocate locals, if any, in context slots. 156 // Generators allocate locals, if any, in context slots.
157 ASSERT(!info->function()->is_generator() || locals_count == 0); 157 DCHECK(!info->function()->is_generator() || locals_count == 0);
158 158
159 if (locals_count > 0) { 159 if (locals_count > 0) {
160 if (locals_count >= 128) { 160 if (locals_count >= 128) {
161 Label ok; 161 Label ok;
162 ASSERT(jssp.Is(__ StackPointer())); 162 DCHECK(jssp.Is(__ StackPointer()));
163 __ Sub(x10, jssp, locals_count * kPointerSize); 163 __ Sub(x10, jssp, locals_count * kPointerSize);
164 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex); 164 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
165 __ B(hs, &ok); 165 __ B(hs, &ok);
166 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); 166 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
167 __ Bind(&ok); 167 __ Bind(&ok);
168 } 168 }
169 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); 169 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
170 if (FLAG_optimize_for_size) { 170 if (FLAG_optimize_for_size) {
171 __ PushMultipleTimes(x10 , locals_count); 171 __ PushMultipleTimes(x10 , locals_count);
172 } else { 172 } else {
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after
284 // redeclaration. 284 // redeclaration.
285 if (scope()->HasIllegalRedeclaration()) { 285 if (scope()->HasIllegalRedeclaration()) {
286 Comment cmnt(masm_, "[ Declarations"); 286 Comment cmnt(masm_, "[ Declarations");
287 scope()->VisitIllegalRedeclaration(this); 287 scope()->VisitIllegalRedeclaration(this);
288 288
289 } else { 289 } else {
290 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); 290 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
291 { Comment cmnt(masm_, "[ Declarations"); 291 { Comment cmnt(masm_, "[ Declarations");
292 if (scope()->is_function_scope() && scope()->function() != NULL) { 292 if (scope()->is_function_scope() && scope()->function() != NULL) {
293 VariableDeclaration* function = scope()->function(); 293 VariableDeclaration* function = scope()->function();
294 ASSERT(function->proxy()->var()->mode() == CONST || 294 DCHECK(function->proxy()->var()->mode() == CONST ||
295 function->proxy()->var()->mode() == CONST_LEGACY); 295 function->proxy()->var()->mode() == CONST_LEGACY);
296 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); 296 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
297 VisitVariableDeclaration(function); 297 VisitVariableDeclaration(function);
298 } 298 }
299 VisitDeclarations(scope()->declarations()); 299 VisitDeclarations(scope()->declarations());
300 } 300 }
301 } 301 }
302 302
303 { Comment cmnt(masm_, "[ Stack check"); 303 { Comment cmnt(masm_, "[ Stack check");
304 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); 304 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
305 Label ok; 305 Label ok;
306 ASSERT(jssp.Is(__ StackPointer())); 306 DCHECK(jssp.Is(__ StackPointer()));
307 __ CompareRoot(jssp, Heap::kStackLimitRootIndex); 307 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
308 __ B(hs, &ok); 308 __ B(hs, &ok);
309 PredictableCodeSizeScope predictable(masm_, 309 PredictableCodeSizeScope predictable(masm_,
310 Assembler::kCallSizeWithRelocation); 310 Assembler::kCallSizeWithRelocation);
311 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET); 311 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
312 __ Bind(&ok); 312 __ Bind(&ok);
313 } 313 }
314 314
315 { Comment cmnt(masm_, "[ Body"); 315 { Comment cmnt(masm_, "[ Body");
316 ASSERT(loop_depth() == 0); 316 DCHECK(loop_depth() == 0);
317 VisitStatements(function()->body()); 317 VisitStatements(function()->body());
318 ASSERT(loop_depth() == 0); 318 DCHECK(loop_depth() == 0);
319 } 319 }
320 320
321 // Always emit a 'return undefined' in case control fell off the end of 321 // Always emit a 'return undefined' in case control fell off the end of
322 // the body. 322 // the body.
323 { Comment cmnt(masm_, "[ return <undefined>;"); 323 { Comment cmnt(masm_, "[ return <undefined>;");
324 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex); 324 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
325 } 325 }
326 EmitReturnSequence(); 326 EmitReturnSequence();
327 327
328 // Force emission of the pools, so they don't get emitted in the middle 328 // Force emission of the pools, so they don't get emitted in the middle
(...skipping 23 matching lines...) Expand all
352 reset_value = FLAG_interrupt_budget >> 4; 352 reset_value = FLAG_interrupt_budget >> 4;
353 } 353 }
354 __ Mov(x2, Operand(profiling_counter_)); 354 __ Mov(x2, Operand(profiling_counter_));
355 __ Mov(x3, Smi::FromInt(reset_value)); 355 __ Mov(x3, Smi::FromInt(reset_value));
356 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset)); 356 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
357 } 357 }
358 358
359 359
360 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, 360 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
361 Label* back_edge_target) { 361 Label* back_edge_target) {
362 ASSERT(jssp.Is(__ StackPointer())); 362 DCHECK(jssp.Is(__ StackPointer()));
363 Comment cmnt(masm_, "[ Back edge bookkeeping"); 363 Comment cmnt(masm_, "[ Back edge bookkeeping");
364 // Block literal pools whilst emitting back edge code. 364 // Block literal pools whilst emitting back edge code.
365 Assembler::BlockPoolsScope block_const_pool(masm_); 365 Assembler::BlockPoolsScope block_const_pool(masm_);
366 Label ok; 366 Label ok;
367 367
368 ASSERT(back_edge_target->is_bound()); 368 DCHECK(back_edge_target->is_bound());
369 // We want to do a round rather than a floor of distance/kCodeSizeMultiplier 369 // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
370 // to reduce the absolute error due to the integer division. To do that, 370 // to reduce the absolute error due to the integer division. To do that,
371 // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to 371 // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
372 // the result). 372 // the result).
373 int distance = 373 int distance =
374 masm_->SizeOfCodeGeneratedSince(back_edge_target) + kCodeSizeMultiplier / 2; 374 masm_->SizeOfCodeGeneratedSince(back_edge_target) + kCodeSizeMultiplier / 2;
375 int weight = Min(kMaxBackEdgeWeight, 375 int weight = Min(kMaxBackEdgeWeight,
376 Max(1, distance / kCodeSizeMultiplier)); 376 Max(1, distance / kCodeSizeMultiplier));
377 EmitProfilingCounterDecrement(weight); 377 EmitProfilingCounterDecrement(weight);
378 __ B(pl, &ok); 378 __ B(pl, &ok);
(...skipping 21 matching lines...) Expand all
400 if (return_label_.is_bound()) { 400 if (return_label_.is_bound()) {
401 __ B(&return_label_); 401 __ B(&return_label_);
402 402
403 } else { 403 } else {
404 __ Bind(&return_label_); 404 __ Bind(&return_label_);
405 if (FLAG_trace) { 405 if (FLAG_trace) {
406 // Push the return value on the stack as the parameter. 406 // Push the return value on the stack as the parameter.
407 // Runtime::TraceExit returns its parameter in x0. 407 // Runtime::TraceExit returns its parameter in x0.
408 __ Push(result_register()); 408 __ Push(result_register());
409 __ CallRuntime(Runtime::kTraceExit, 1); 409 __ CallRuntime(Runtime::kTraceExit, 1);
410 ASSERT(x0.Is(result_register())); 410 DCHECK(x0.Is(result_register()));
411 } 411 }
412 // Pretend that the exit is a backwards jump to the entry. 412 // Pretend that the exit is a backwards jump to the entry.
413 int weight = 1; 413 int weight = 1;
414 if (info_->ShouldSelfOptimize()) { 414 if (info_->ShouldSelfOptimize()) {
415 weight = FLAG_interrupt_budget / FLAG_self_opt_count; 415 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
416 } else { 416 } else {
417 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2; 417 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
418 weight = Min(kMaxBackEdgeWeight, 418 weight = Min(kMaxBackEdgeWeight,
419 Max(1, distance / kCodeSizeMultiplier)); 419 Max(1, distance / kCodeSizeMultiplier));
420 } 420 }
(...skipping 13 matching lines...) Expand all
434 { 434 {
435 InstructionAccurateScope scope(masm_, 435 InstructionAccurateScope scope(masm_,
436 Assembler::kJSRetSequenceInstructions); 436 Assembler::kJSRetSequenceInstructions);
437 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); 437 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
438 __ RecordJSReturn(); 438 __ RecordJSReturn();
439 // This code is generated using Assembler methods rather than Macro 439 // This code is generated using Assembler methods rather than Macro
440 // Assembler methods because it will be patched later on, and so the size 440 // Assembler methods because it will be patched later on, and so the size
441 // of the generated code must be consistent. 441 // of the generated code must be consistent.
442 const Register& current_sp = __ StackPointer(); 442 const Register& current_sp = __ StackPointer();
443 // Nothing ensures 16 bytes alignment here. 443 // Nothing ensures 16 bytes alignment here.
444 ASSERT(!current_sp.Is(csp)); 444 DCHECK(!current_sp.Is(csp));
445 __ mov(current_sp, fp); 445 __ mov(current_sp, fp);
446 int no_frame_start = masm_->pc_offset(); 446 int no_frame_start = masm_->pc_offset();
447 __ ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex)); 447 __ ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
448 // Drop the arguments and receiver and return. 448 // Drop the arguments and receiver and return.
449 // TODO(all): This implementation is overkill as it supports 2**31+1 449 // TODO(all): This implementation is overkill as it supports 2**31+1
450 // arguments, consider how to improve it without creating a security 450 // arguments, consider how to improve it without creating a security
451 // hole. 451 // hole.
452 __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2); 452 __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2);
453 __ add(current_sp, current_sp, ip0); 453 __ add(current_sp, current_sp, ip0);
454 __ ret(); 454 __ ret();
455 __ dc64(kXRegSize * (info_->scope()->num_parameters() + 1)); 455 __ dc64(kXRegSize * (info_->scope()->num_parameters() + 1));
456 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); 456 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
457 } 457 }
458 } 458 }
459 } 459 }
460 460
461 461
462 void FullCodeGenerator::EffectContext::Plug(Variable* var) const { 462 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
463 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 463 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
464 } 464 }
465 465
466 466
467 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const { 467 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
468 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 468 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
469 codegen()->GetVar(result_register(), var); 469 codegen()->GetVar(result_register(), var);
470 } 470 }
471 471
472 472
473 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { 473 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
474 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 474 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
475 codegen()->GetVar(result_register(), var); 475 codegen()->GetVar(result_register(), var);
476 __ Push(result_register()); 476 __ Push(result_register());
477 } 477 }
478 478
479 479
480 void FullCodeGenerator::TestContext::Plug(Variable* var) const { 480 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
481 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 481 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
482 // For simplicity we always test the accumulator register. 482 // For simplicity we always test the accumulator register.
483 codegen()->GetVar(result_register(), var); 483 codegen()->GetVar(result_register(), var);
484 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 484 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
485 codegen()->DoTest(this); 485 codegen()->DoTest(this);
486 } 486 }
487 487
488 488
489 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 489 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
490 // Root values have no side effects. 490 // Root values have no side effects.
491 } 491 }
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
535 __ Mov(result_register(), Operand(lit)); 535 __ Mov(result_register(), Operand(lit));
536 __ Push(result_register()); 536 __ Push(result_register());
537 } 537 }
538 538
539 539
540 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { 540 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
541 codegen()->PrepareForBailoutBeforeSplit(condition(), 541 codegen()->PrepareForBailoutBeforeSplit(condition(),
542 true, 542 true,
543 true_label_, 543 true_label_,
544 false_label_); 544 false_label_);
545 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals. 545 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
546 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { 546 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
547 if (false_label_ != fall_through_) __ B(false_label_); 547 if (false_label_ != fall_through_) __ B(false_label_);
548 } else if (lit->IsTrue() || lit->IsJSObject()) { 548 } else if (lit->IsTrue() || lit->IsJSObject()) {
549 if (true_label_ != fall_through_) __ B(true_label_); 549 if (true_label_ != fall_through_) __ B(true_label_);
550 } else if (lit->IsString()) { 550 } else if (lit->IsString()) {
551 if (String::cast(*lit)->length() == 0) { 551 if (String::cast(*lit)->length() == 0) {
552 if (false_label_ != fall_through_) __ B(false_label_); 552 if (false_label_ != fall_through_) __ B(false_label_);
553 } else { 553 } else {
554 if (true_label_ != fall_through_) __ B(true_label_); 554 if (true_label_ != fall_through_) __ B(true_label_);
555 } 555 }
556 } else if (lit->IsSmi()) { 556 } else if (lit->IsSmi()) {
557 if (Smi::cast(*lit)->value() == 0) { 557 if (Smi::cast(*lit)->value() == 0) {
558 if (false_label_ != fall_through_) __ B(false_label_); 558 if (false_label_ != fall_through_) __ B(false_label_);
559 } else { 559 } else {
560 if (true_label_ != fall_through_) __ B(true_label_); 560 if (true_label_ != fall_through_) __ B(true_label_);
561 } 561 }
562 } else { 562 } else {
563 // For simplicity we always test the accumulator register. 563 // For simplicity we always test the accumulator register.
564 __ Mov(result_register(), Operand(lit)); 564 __ Mov(result_register(), Operand(lit));
565 codegen()->DoTest(this); 565 codegen()->DoTest(this);
566 } 566 }
567 } 567 }
568 568
569 569
570 void FullCodeGenerator::EffectContext::DropAndPlug(int count, 570 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
571 Register reg) const { 571 Register reg) const {
572 ASSERT(count > 0); 572 DCHECK(count > 0);
573 __ Drop(count); 573 __ Drop(count);
574 } 574 }
575 575
576 576
577 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( 577 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
578 int count, 578 int count,
579 Register reg) const { 579 Register reg) const {
580 ASSERT(count > 0); 580 DCHECK(count > 0);
581 __ Drop(count); 581 __ Drop(count);
582 __ Move(result_register(), reg); 582 __ Move(result_register(), reg);
583 } 583 }
584 584
585 585
586 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, 586 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
587 Register reg) const { 587 Register reg) const {
588 ASSERT(count > 0); 588 DCHECK(count > 0);
589 if (count > 1) __ Drop(count - 1); 589 if (count > 1) __ Drop(count - 1);
590 __ Poke(reg, 0); 590 __ Poke(reg, 0);
591 } 591 }
592 592
593 593
594 void FullCodeGenerator::TestContext::DropAndPlug(int count, 594 void FullCodeGenerator::TestContext::DropAndPlug(int count,
595 Register reg) const { 595 Register reg) const {
596 ASSERT(count > 0); 596 DCHECK(count > 0);
597 // For simplicity we always test the accumulator register. 597 // For simplicity we always test the accumulator register.
598 __ Drop(count); 598 __ Drop(count);
599 __ Mov(result_register(), reg); 599 __ Mov(result_register(), reg);
600 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 600 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
601 codegen()->DoTest(this); 601 codegen()->DoTest(this);
602 } 602 }
603 603
604 604
605 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 605 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
606 Label* materialize_false) const { 606 Label* materialize_false) const {
607 ASSERT(materialize_true == materialize_false); 607 DCHECK(materialize_true == materialize_false);
608 __ Bind(materialize_true); 608 __ Bind(materialize_true);
609 } 609 }
610 610
611 611
612 void FullCodeGenerator::AccumulatorValueContext::Plug( 612 void FullCodeGenerator::AccumulatorValueContext::Plug(
613 Label* materialize_true, 613 Label* materialize_true,
614 Label* materialize_false) const { 614 Label* materialize_false) const {
615 Label done; 615 Label done;
616 __ Bind(materialize_true); 616 __ Bind(materialize_true);
617 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); 617 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
(...skipping 13 matching lines...) Expand all
631 __ B(&done); 631 __ B(&done);
632 __ Bind(materialize_false); 632 __ Bind(materialize_false);
633 __ LoadRoot(x10, Heap::kFalseValueRootIndex); 633 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
634 __ Bind(&done); 634 __ Bind(&done);
635 __ Push(x10); 635 __ Push(x10);
636 } 636 }
637 637
638 638
639 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 639 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
640 Label* materialize_false) const { 640 Label* materialize_false) const {
641 ASSERT(materialize_true == true_label_); 641 DCHECK(materialize_true == true_label_);
642 ASSERT(materialize_false == false_label_); 642 DCHECK(materialize_false == false_label_);
643 } 643 }
644 644
645 645
646 void FullCodeGenerator::EffectContext::Plug(bool flag) const { 646 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
647 } 647 }
648 648
649 649
650 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { 650 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
651 Heap::RootListIndex value_root_index = 651 Heap::RootListIndex value_root_index =
652 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 652 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
693 // If (!cond), branch to if_false. 693 // If (!cond), branch to if_false.
694 // fall_through is used as an optimization in cases where only one branch 694 // fall_through is used as an optimization in cases where only one branch
695 // instruction is necessary. 695 // instruction is necessary.
696 void FullCodeGenerator::Split(Condition cond, 696 void FullCodeGenerator::Split(Condition cond,
697 Label* if_true, 697 Label* if_true,
698 Label* if_false, 698 Label* if_false,
699 Label* fall_through) { 699 Label* fall_through) {
700 if (if_false == fall_through) { 700 if (if_false == fall_through) {
701 __ B(cond, if_true); 701 __ B(cond, if_true);
702 } else if (if_true == fall_through) { 702 } else if (if_true == fall_through) {
703 ASSERT(if_false != fall_through); 703 DCHECK(if_false != fall_through);
704 __ B(NegateCondition(cond), if_false); 704 __ B(NegateCondition(cond), if_false);
705 } else { 705 } else {
706 __ B(cond, if_true); 706 __ B(cond, if_true);
707 __ B(if_false); 707 __ B(if_false);
708 } 708 }
709 } 709 }
710 710
711 711
712 MemOperand FullCodeGenerator::StackOperand(Variable* var) { 712 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
713 // Offset is negative because higher indexes are at lower addresses. 713 // Offset is negative because higher indexes are at lower addresses.
714 int offset = -var->index() * kXRegSize; 714 int offset = -var->index() * kXRegSize;
715 // Adjust by a (parameter or local) base offset. 715 // Adjust by a (parameter or local) base offset.
716 if (var->IsParameter()) { 716 if (var->IsParameter()) {
717 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; 717 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
718 } else { 718 } else {
719 offset += JavaScriptFrameConstants::kLocal0Offset; 719 offset += JavaScriptFrameConstants::kLocal0Offset;
720 } 720 }
721 return MemOperand(fp, offset); 721 return MemOperand(fp, offset);
722 } 722 }
723 723
724 724
725 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { 725 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
726 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); 726 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
727 if (var->IsContextSlot()) { 727 if (var->IsContextSlot()) {
728 int context_chain_length = scope()->ContextChainLength(var->scope()); 728 int context_chain_length = scope()->ContextChainLength(var->scope());
729 __ LoadContext(scratch, context_chain_length); 729 __ LoadContext(scratch, context_chain_length);
730 return ContextMemOperand(scratch, var->index()); 730 return ContextMemOperand(scratch, var->index());
731 } else { 731 } else {
732 return StackOperand(var); 732 return StackOperand(var);
733 } 733 }
734 } 734 }
735 735
736 736
737 void FullCodeGenerator::GetVar(Register dest, Variable* var) { 737 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
738 // Use destination as scratch. 738 // Use destination as scratch.
739 MemOperand location = VarOperand(var, dest); 739 MemOperand location = VarOperand(var, dest);
740 __ Ldr(dest, location); 740 __ Ldr(dest, location);
741 } 741 }
742 742
743 743
744 void FullCodeGenerator::SetVar(Variable* var, 744 void FullCodeGenerator::SetVar(Variable* var,
745 Register src, 745 Register src,
746 Register scratch0, 746 Register scratch0,
747 Register scratch1) { 747 Register scratch1) {
748 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); 748 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
749 ASSERT(!AreAliased(src, scratch0, scratch1)); 749 DCHECK(!AreAliased(src, scratch0, scratch1));
750 MemOperand location = VarOperand(var, scratch0); 750 MemOperand location = VarOperand(var, scratch0);
751 __ Str(src, location); 751 __ Str(src, location);
752 752
753 // Emit the write barrier code if the location is in the heap. 753 // Emit the write barrier code if the location is in the heap.
754 if (var->IsContextSlot()) { 754 if (var->IsContextSlot()) {
755 // scratch0 contains the correct context. 755 // scratch0 contains the correct context.
756 __ RecordWriteContextSlot(scratch0, 756 __ RecordWriteContextSlot(scratch0,
757 location.offset(), 757 location.offset(),
758 src, 758 src,
759 scratch1, 759 scratch1,
(...skipping 22 matching lines...) Expand all
782 __ CompareRoot(x0, Heap::kTrueValueRootIndex); 782 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
783 Split(eq, if_true, if_false, NULL); 783 Split(eq, if_true, if_false, NULL);
784 __ Bind(&skip); 784 __ Bind(&skip);
785 } 785 }
786 } 786 }
787 787
788 788
789 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 789 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
790 // The variable in the declaration always resides in the current function 790 // The variable in the declaration always resides in the current function
791 // context. 791 // context.
792 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); 792 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
793 if (generate_debug_code_) { 793 if (generate_debug_code_) {
794 // Check that we're not inside a with or catch context. 794 // Check that we're not inside a with or catch context.
795 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset)); 795 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
796 __ CompareRoot(x1, Heap::kWithContextMapRootIndex); 796 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
797 __ Check(ne, kDeclarationInWithContext); 797 __ Check(ne, kDeclarationInWithContext);
798 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex); 798 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
799 __ Check(ne, kDeclarationInCatchContext); 799 __ Check(ne, kDeclarationInCatchContext);
800 } 800 }
801 } 801 }
802 802
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
837 __ Str(x10, ContextMemOperand(cp, variable->index())); 837 __ Str(x10, ContextMemOperand(cp, variable->index()));
838 // No write barrier since the_hole_value is in old space. 838 // No write barrier since the_hole_value is in old space.
839 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 839 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
840 } 840 }
841 break; 841 break;
842 842
843 case Variable::LOOKUP: { 843 case Variable::LOOKUP: {
844 Comment cmnt(masm_, "[ VariableDeclaration"); 844 Comment cmnt(masm_, "[ VariableDeclaration");
845 __ Mov(x2, Operand(variable->name())); 845 __ Mov(x2, Operand(variable->name()));
846 // Declaration nodes are always introduced in one of four modes. 846 // Declaration nodes are always introduced in one of four modes.
847 ASSERT(IsDeclaredVariableMode(mode)); 847 DCHECK(IsDeclaredVariableMode(mode));
848 PropertyAttributes attr = IsImmutableVariableMode(mode) ? READ_ONLY 848 PropertyAttributes attr = IsImmutableVariableMode(mode) ? READ_ONLY
849 : NONE; 849 : NONE;
850 __ Mov(x1, Smi::FromInt(attr)); 850 __ Mov(x1, Smi::FromInt(attr));
851 // Push initial value, if any. 851 // Push initial value, if any.
852 // Note: For variables we must not push an initial value (such as 852 // Note: For variables we must not push an initial value (such as
853 // 'undefined') because we may have a (legal) redeclaration and we 853 // 'undefined') because we may have a (legal) redeclaration and we
854 // must not destroy the current value. 854 // must not destroy the current value.
855 if (hole_init) { 855 if (hole_init) {
856 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex); 856 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
857 __ Push(cp, x2, x1, x0); 857 __ Push(cp, x2, x1, x0);
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
917 VisitForStackValue(declaration->fun()); 917 VisitForStackValue(declaration->fun());
918 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); 918 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
919 break; 919 break;
920 } 920 }
921 } 921 }
922 } 922 }
923 923
924 924
925 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { 925 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
926 Variable* variable = declaration->proxy()->var(); 926 Variable* variable = declaration->proxy()->var();
927 ASSERT(variable->location() == Variable::CONTEXT); 927 DCHECK(variable->location() == Variable::CONTEXT);
928 ASSERT(variable->interface()->IsFrozen()); 928 DCHECK(variable->interface()->IsFrozen());
929 929
930 Comment cmnt(masm_, "[ ModuleDeclaration"); 930 Comment cmnt(masm_, "[ ModuleDeclaration");
931 EmitDebugCheckDeclarationContext(variable); 931 EmitDebugCheckDeclarationContext(variable);
932 932
933 // Load instance object. 933 // Load instance object.
934 __ LoadContext(x1, scope_->ContextChainLength(scope_->GlobalScope())); 934 __ LoadContext(x1, scope_->ContextChainLength(scope_->GlobalScope()));
935 __ Ldr(x1, ContextMemOperand(x1, variable->interface()->Index())); 935 __ Ldr(x1, ContextMemOperand(x1, variable->interface()->Index()));
936 __ Ldr(x1, ContextMemOperand(x1, Context::EXTENSION_INDEX)); 936 __ Ldr(x1, ContextMemOperand(x1, Context::EXTENSION_INDEX));
937 937
938 // Assign it. 938 // Assign it.
(...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after
1179 1179
1180 __ LoadObject(x1, FeedbackVector()); 1180 __ LoadObject(x1, FeedbackVector());
1181 __ Mov(x10, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate()))); 1181 __ Mov(x10, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
1182 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot))); 1182 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot)));
1183 1183
1184 __ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check. 1184 __ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check.
1185 __ Peek(x10, 0); // Get enumerated object. 1185 __ Peek(x10, 0); // Get enumerated object.
1186 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1186 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1187 // TODO(all): similar check was done already. Can we avoid it here? 1187 // TODO(all): similar check was done already. Can we avoid it here?
1188 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE); 1188 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE);
1189 ASSERT(Smi::FromInt(0) == 0); 1189 DCHECK(Smi::FromInt(0) == 0);
1190 __ CzeroX(x1, le); // Zero indicates proxy. 1190 __ CzeroX(x1, le); // Zero indicates proxy.
1191 __ Ldr(x2, FieldMemOperand(x0, FixedArray::kLengthOffset)); 1191 __ Ldr(x2, FieldMemOperand(x0, FixedArray::kLengthOffset));
1192 // Smi and array, fixed array length (as smi) and initial index. 1192 // Smi and array, fixed array length (as smi) and initial index.
1193 __ Push(x1, x0, x2, xzr); 1193 __ Push(x1, x0, x2, xzr);
1194 1194
1195 // Generate code for doing the condition check. 1195 // Generate code for doing the condition check.
1196 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); 1196 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1197 __ Bind(&loop); 1197 __ Bind(&loop);
1198 // Load the current count to x0, load the length to x1. 1198 // Load the current count to x0, load the length to x1.
1199 __ PeekPair(x0, x1, 0); 1199 __ PeekPair(x0, x1, 0);
(...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after
1392 } 1392 }
1393 1393
1394 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) ? NOT_CONTEXTUAL 1394 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) ? NOT_CONTEXTUAL
1395 : CONTEXTUAL; 1395 : CONTEXTUAL;
1396 CallLoadIC(mode); 1396 CallLoadIC(mode);
1397 } 1397 }
1398 1398
1399 1399
1400 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1400 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1401 Label* slow) { 1401 Label* slow) {
1402 ASSERT(var->IsContextSlot()); 1402 DCHECK(var->IsContextSlot());
1403 Register context = cp; 1403 Register context = cp;
1404 Register next = x10; 1404 Register next = x10;
1405 Register temp = x11; 1405 Register temp = x11;
1406 1406
1407 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1407 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1408 if (s->num_heap_slots() > 0) { 1408 if (s->num_heap_slots() > 0) {
1409 if (s->calls_sloppy_eval()) { 1409 if (s->calls_sloppy_eval()) {
1410 // Check that extension is NULL. 1410 // Check that extension is NULL.
1411 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); 1411 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1412 __ Cbnz(temp, slow); 1412 __ Cbnz(temp, slow);
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
1485 case Variable::CONTEXT: { 1485 case Variable::CONTEXT: {
1486 Comment cmnt(masm_, var->IsContextSlot() 1486 Comment cmnt(masm_, var->IsContextSlot()
1487 ? "Context variable" 1487 ? "Context variable"
1488 : "Stack variable"); 1488 : "Stack variable");
1489 if (var->binding_needs_init()) { 1489 if (var->binding_needs_init()) {
1490 // var->scope() may be NULL when the proxy is located in eval code and 1490 // var->scope() may be NULL when the proxy is located in eval code and
1491 // refers to a potential outside binding. Currently those bindings are 1491 // refers to a potential outside binding. Currently those bindings are
1492 // always looked up dynamically, i.e. in that case 1492 // always looked up dynamically, i.e. in that case
1493 // var->location() == LOOKUP. 1493 // var->location() == LOOKUP.
1494 // always holds. 1494 // always holds.
1495 ASSERT(var->scope() != NULL); 1495 DCHECK(var->scope() != NULL);
1496 1496
1497 // Check if the binding really needs an initialization check. The check 1497 // Check if the binding really needs an initialization check. The check
1498 // can be skipped in the following situation: we have a LET or CONST 1498 // can be skipped in the following situation: we have a LET or CONST
1499 // binding in harmony mode, both the Variable and the VariableProxy have 1499 // binding in harmony mode, both the Variable and the VariableProxy have
1500 // the same declaration scope (i.e. they are both in global code, in the 1500 // the same declaration scope (i.e. they are both in global code, in the
1501 // same function or in the same eval code) and the VariableProxy is in 1501 // same function or in the same eval code) and the VariableProxy is in
1502 // the source physically located after the initializer of the variable. 1502 // the source physically located after the initializer of the variable.
1503 // 1503 //
1504 // We cannot skip any initialization checks for CONST in non-harmony 1504 // We cannot skip any initialization checks for CONST in non-harmony
1505 // mode because const variables may be declared but never initialized: 1505 // mode because const variables may be declared but never initialized:
1506 // if (false) { const x; }; var y = x; 1506 // if (false) { const x; }; var y = x;
1507 // 1507 //
1508 // The condition on the declaration scopes is a conservative check for 1508 // The condition on the declaration scopes is a conservative check for
1509 // nested functions that access a binding and are called before the 1509 // nested functions that access a binding and are called before the
1510 // binding is initialized: 1510 // binding is initialized:
1511 // function() { f(); let x = 1; function f() { x = 2; } } 1511 // function() { f(); let x = 1; function f() { x = 2; } }
1512 // 1512 //
1513 bool skip_init_check; 1513 bool skip_init_check;
1514 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) { 1514 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1515 skip_init_check = false; 1515 skip_init_check = false;
1516 } else { 1516 } else {
1517 // Check that we always have valid source position. 1517 // Check that we always have valid source position.
1518 ASSERT(var->initializer_position() != RelocInfo::kNoPosition); 1518 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1519 ASSERT(proxy->position() != RelocInfo::kNoPosition); 1519 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1520 skip_init_check = var->mode() != CONST_LEGACY && 1520 skip_init_check = var->mode() != CONST_LEGACY &&
1521 var->initializer_position() < proxy->position(); 1521 var->initializer_position() < proxy->position();
1522 } 1522 }
1523 1523
1524 if (!skip_init_check) { 1524 if (!skip_init_check) {
1525 // Let and const need a read barrier. 1525 // Let and const need a read barrier.
1526 GetVar(x0, var); 1526 GetVar(x0, var);
1527 Label done; 1527 Label done;
1528 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done); 1528 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1529 if (var->mode() == LET || var->mode() == CONST) { 1529 if (var->mode() == LET || var->mode() == CONST) {
1530 // Throw a reference error when using an uninitialized let/const 1530 // Throw a reference error when using an uninitialized let/const
1531 // binding in harmony mode. 1531 // binding in harmony mode.
1532 __ Mov(x0, Operand(var->name())); 1532 __ Mov(x0, Operand(var->name()));
1533 __ Push(x0); 1533 __ Push(x0);
1534 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1534 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1535 __ Bind(&done); 1535 __ Bind(&done);
1536 } else { 1536 } else {
1537 // Uninitalized const bindings outside of harmony mode are unholed. 1537 // Uninitalized const bindings outside of harmony mode are unholed.
1538 ASSERT(var->mode() == CONST_LEGACY); 1538 DCHECK(var->mode() == CONST_LEGACY);
1539 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex); 1539 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1540 __ Bind(&done); 1540 __ Bind(&done);
1541 } 1541 }
1542 context()->Plug(x0); 1542 context()->Plug(x0);
1543 break; 1543 break;
1544 } 1544 }
1545 } 1545 }
1546 context()->Plug(var); 1546 context()->Plug(var);
1547 break; 1547 break;
1548 } 1548 }
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
1669 Literal* key = property->key(); 1669 Literal* key = property->key();
1670 Expression* value = property->value(); 1670 Expression* value = property->value();
1671 if (!result_saved) { 1671 if (!result_saved) {
1672 __ Push(x0); // Save result on stack 1672 __ Push(x0); // Save result on stack
1673 result_saved = true; 1673 result_saved = true;
1674 } 1674 }
1675 switch (property->kind()) { 1675 switch (property->kind()) {
1676 case ObjectLiteral::Property::CONSTANT: 1676 case ObjectLiteral::Property::CONSTANT:
1677 UNREACHABLE(); 1677 UNREACHABLE();
1678 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1678 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1679 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value())); 1679 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1680 // Fall through. 1680 // Fall through.
1681 case ObjectLiteral::Property::COMPUTED: 1681 case ObjectLiteral::Property::COMPUTED:
1682 if (key->value()->IsInternalizedString()) { 1682 if (key->value()->IsInternalizedString()) {
1683 if (property->emit_store()) { 1683 if (property->emit_store()) {
1684 VisitForAccumulatorValue(value); 1684 VisitForAccumulatorValue(value);
1685 ASSERT(StoreIC::ValueRegister().is(x0)); 1685 DCHECK(StoreIC::ValueRegister().is(x0));
1686 __ Mov(StoreIC::NameRegister(), Operand(key->value())); 1686 __ Mov(StoreIC::NameRegister(), Operand(key->value()));
1687 __ Peek(StoreIC::ReceiverRegister(), 0); 1687 __ Peek(StoreIC::ReceiverRegister(), 0);
1688 CallStoreIC(key->LiteralFeedbackId()); 1688 CallStoreIC(key->LiteralFeedbackId());
1689 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1689 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1690 } else { 1690 } else {
1691 VisitForEffect(value); 1691 VisitForEffect(value);
1692 } 1692 }
1693 break; 1693 break;
1694 } 1694 }
1695 if (property->emit_store()) { 1695 if (property->emit_store()) {
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
1735 __ Push(x10); 1735 __ Push(x10);
1736 VisitForStackValue(it->first); 1736 VisitForStackValue(it->first);
1737 EmitAccessor(it->second->getter); 1737 EmitAccessor(it->second->getter);
1738 EmitAccessor(it->second->setter); 1738 EmitAccessor(it->second->setter);
1739 __ Mov(x10, Smi::FromInt(NONE)); 1739 __ Mov(x10, Smi::FromInt(NONE));
1740 __ Push(x10); 1740 __ Push(x10);
1741 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5); 1741 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1742 } 1742 }
1743 1743
1744 if (expr->has_function()) { 1744 if (expr->has_function()) {
1745 ASSERT(result_saved); 1745 DCHECK(result_saved);
1746 __ Peek(x0, 0); 1746 __ Peek(x0, 0);
1747 __ Push(x0); 1747 __ Push(x0);
1748 __ CallRuntime(Runtime::kToFastProperties, 1); 1748 __ CallRuntime(Runtime::kToFastProperties, 1);
1749 } 1749 }
1750 1750
1751 if (result_saved) { 1751 if (result_saved) {
1752 context()->PlugTOS(); 1752 context()->PlugTOS();
1753 } else { 1753 } else {
1754 context()->Plug(x0); 1754 context()->Plug(x0);
1755 } 1755 }
1756 } 1756 }
1757 1757
1758 1758
1759 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1759 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1760 Comment cmnt(masm_, "[ ArrayLiteral"); 1760 Comment cmnt(masm_, "[ ArrayLiteral");
1761 1761
1762 expr->BuildConstantElements(isolate()); 1762 expr->BuildConstantElements(isolate());
1763 int flags = (expr->depth() == 1) ? ArrayLiteral::kShallowElements 1763 int flags = (expr->depth() == 1) ? ArrayLiteral::kShallowElements
1764 : ArrayLiteral::kNoFlags; 1764 : ArrayLiteral::kNoFlags;
1765 1765
1766 ZoneList<Expression*>* subexprs = expr->values(); 1766 ZoneList<Expression*>* subexprs = expr->values();
1767 int length = subexprs->length(); 1767 int length = subexprs->length();
1768 Handle<FixedArray> constant_elements = expr->constant_elements(); 1768 Handle<FixedArray> constant_elements = expr->constant_elements();
1769 ASSERT_EQ(2, constant_elements->length()); 1769 DCHECK_EQ(2, constant_elements->length());
1770 ElementsKind constant_elements_kind = 1770 ElementsKind constant_elements_kind =
1771 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); 1771 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1772 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind); 1772 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1773 Handle<FixedArrayBase> constant_elements_values( 1773 Handle<FixedArrayBase> constant_elements_values(
1774 FixedArrayBase::cast(constant_elements->get(1))); 1774 FixedArrayBase::cast(constant_elements->get(1)));
1775 1775
1776 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; 1776 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1777 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { 1777 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1778 // If the only customer of allocation sites is transitioning, then 1778 // If the only customer of allocation sites is transitioning, then
1779 // we can turn it off if we don't have anywhere else to transition to. 1779 // we can turn it off if we don't have anywhere else to transition to.
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
1831 if (result_saved) { 1831 if (result_saved) {
1832 __ Drop(1); // literal index 1832 __ Drop(1); // literal index
1833 context()->PlugTOS(); 1833 context()->PlugTOS();
1834 } else { 1834 } else {
1835 context()->Plug(x0); 1835 context()->Plug(x0);
1836 } 1836 }
1837 } 1837 }
1838 1838
1839 1839
1840 void FullCodeGenerator::VisitAssignment(Assignment* expr) { 1840 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1841 ASSERT(expr->target()->IsValidReferenceExpression()); 1841 DCHECK(expr->target()->IsValidReferenceExpression());
1842 1842
1843 Comment cmnt(masm_, "[ Assignment"); 1843 Comment cmnt(masm_, "[ Assignment");
1844 1844
1845 // Left-hand side can only be a property, a global or a (parameter or local) 1845 // Left-hand side can only be a property, a global or a (parameter or local)
1846 // slot. 1846 // slot.
1847 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 1847 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1848 LhsKind assign_type = VARIABLE; 1848 LhsKind assign_type = VARIABLE;
1849 Property* property = expr->target()->AsProperty(); 1849 Property* property = expr->target()->AsProperty();
1850 if (property != NULL) { 1850 if (property != NULL) {
1851 assign_type = (property->key()->IsPropertyName()) 1851 assign_type = (property->key()->IsPropertyName())
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after
2084 { 2084 {
2085 Assembler::BlockPoolsScope scope(masm_); 2085 Assembler::BlockPoolsScope scope(masm_);
2086 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId()); 2086 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
2087 patch_site.EmitPatchInfo(); 2087 patch_site.EmitPatchInfo();
2088 } 2088 }
2089 context()->Plug(x0); 2089 context()->Plug(x0);
2090 } 2090 }
2091 2091
2092 2092
2093 void FullCodeGenerator::EmitAssignment(Expression* expr) { 2093 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2094 ASSERT(expr->IsValidReferenceExpression()); 2094 DCHECK(expr->IsValidReferenceExpression());
2095 2095
2096 // Left-hand side can only be a property, a global or a (parameter or local) 2096 // Left-hand side can only be a property, a global or a (parameter or local)
2097 // slot. 2097 // slot.
2098 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 2098 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2099 LhsKind assign_type = VARIABLE; 2099 LhsKind assign_type = VARIABLE;
2100 Property* prop = expr->AsProperty(); 2100 Property* prop = expr->AsProperty();
2101 if (prop != NULL) { 2101 if (prop != NULL) {
2102 assign_type = (prop->key()->IsPropertyName()) 2102 assign_type = (prop->key()->IsPropertyName())
2103 ? NAMED_PROPERTY 2103 ? NAMED_PROPERTY
2104 : KEYED_PROPERTY; 2104 : KEYED_PROPERTY;
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
2157 Token::Value op) { 2157 Token::Value op) {
2158 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment"); 2158 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2159 if (var->IsUnallocated()) { 2159 if (var->IsUnallocated()) {
2160 // Global var, const, or let. 2160 // Global var, const, or let.
2161 __ Mov(StoreIC::NameRegister(), Operand(var->name())); 2161 __ Mov(StoreIC::NameRegister(), Operand(var->name()));
2162 __ Ldr(StoreIC::ReceiverRegister(), GlobalObjectMemOperand()); 2162 __ Ldr(StoreIC::ReceiverRegister(), GlobalObjectMemOperand());
2163 CallStoreIC(); 2163 CallStoreIC();
2164 2164
2165 } else if (op == Token::INIT_CONST_LEGACY) { 2165 } else if (op == Token::INIT_CONST_LEGACY) {
2166 // Const initializers need a write barrier. 2166 // Const initializers need a write barrier.
2167 ASSERT(!var->IsParameter()); // No const parameters. 2167 DCHECK(!var->IsParameter()); // No const parameters.
2168 if (var->IsLookupSlot()) { 2168 if (var->IsLookupSlot()) {
2169 __ Mov(x1, Operand(var->name())); 2169 __ Mov(x1, Operand(var->name()));
2170 __ Push(x0, cp, x1); 2170 __ Push(x0, cp, x1);
2171 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3); 2171 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2172 } else { 2172 } else {
2173 ASSERT(var->IsStackLocal() || var->IsContextSlot()); 2173 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2174 Label skip; 2174 Label skip;
2175 MemOperand location = VarOperand(var, x1); 2175 MemOperand location = VarOperand(var, x1);
2176 __ Ldr(x10, location); 2176 __ Ldr(x10, location);
2177 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip); 2177 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2178 EmitStoreToStackLocalOrContextSlot(var, location); 2178 EmitStoreToStackLocalOrContextSlot(var, location);
2179 __ Bind(&skip); 2179 __ Bind(&skip);
2180 } 2180 }
2181 2181
2182 } else if (var->mode() == LET && op != Token::INIT_LET) { 2182 } else if (var->mode() == LET && op != Token::INIT_LET) {
2183 // Non-initializing assignment to let variable needs a write barrier. 2183 // Non-initializing assignment to let variable needs a write barrier.
2184 ASSERT(!var->IsLookupSlot()); 2184 DCHECK(!var->IsLookupSlot());
2185 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 2185 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2186 Label assign; 2186 Label assign;
2187 MemOperand location = VarOperand(var, x1); 2187 MemOperand location = VarOperand(var, x1);
2188 __ Ldr(x10, location); 2188 __ Ldr(x10, location);
2189 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign); 2189 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2190 __ Mov(x10, Operand(var->name())); 2190 __ Mov(x10, Operand(var->name()));
2191 __ Push(x10); 2191 __ Push(x10);
2192 __ CallRuntime(Runtime::kThrowReferenceError, 1); 2192 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2193 // Perform the assignment. 2193 // Perform the assignment.
2194 __ Bind(&assign); 2194 __ Bind(&assign);
2195 EmitStoreToStackLocalOrContextSlot(var, location); 2195 EmitStoreToStackLocalOrContextSlot(var, location);
2196 2196
2197 } else if (!var->is_const_mode() || op == Token::INIT_CONST) { 2197 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2198 if (var->IsLookupSlot()) { 2198 if (var->IsLookupSlot()) {
2199 // Assignment to var. 2199 // Assignment to var.
2200 __ Mov(x11, Operand(var->name())); 2200 __ Mov(x11, Operand(var->name()));
2201 __ Mov(x10, Smi::FromInt(strict_mode())); 2201 __ Mov(x10, Smi::FromInt(strict_mode()));
2202 // jssp[0] : mode. 2202 // jssp[0] : mode.
2203 // jssp[8] : name. 2203 // jssp[8] : name.
2204 // jssp[16] : context. 2204 // jssp[16] : context.
2205 // jssp[24] : value. 2205 // jssp[24] : value.
2206 __ Push(x0, cp, x11, x10); 2206 __ Push(x0, cp, x11, x10);
2207 __ CallRuntime(Runtime::kStoreLookupSlot, 4); 2207 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2208 } else { 2208 } else {
2209 // Assignment to var or initializing assignment to let/const in harmony 2209 // Assignment to var or initializing assignment to let/const in harmony
2210 // mode. 2210 // mode.
2211 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 2211 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2212 MemOperand location = VarOperand(var, x1); 2212 MemOperand location = VarOperand(var, x1);
2213 if (FLAG_debug_code && op == Token::INIT_LET) { 2213 if (FLAG_debug_code && op == Token::INIT_LET) {
2214 __ Ldr(x10, location); 2214 __ Ldr(x10, location);
2215 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex); 2215 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2216 __ Check(eq, kLetBindingReInitialization); 2216 __ Check(eq, kLetBindingReInitialization);
2217 } 2217 }
2218 EmitStoreToStackLocalOrContextSlot(var, location); 2218 EmitStoreToStackLocalOrContextSlot(var, location);
2219 } 2219 }
2220 } 2220 }
2221 // Non-initializing assignments to consts are ignored. 2221 // Non-initializing assignments to consts are ignored.
2222 } 2222 }
2223 2223
2224 2224
2225 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2225 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2226 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment"); 2226 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2227 // Assignment to a property, using a named store IC. 2227 // Assignment to a property, using a named store IC.
2228 Property* prop = expr->target()->AsProperty(); 2228 Property* prop = expr->target()->AsProperty();
2229 ASSERT(prop != NULL); 2229 DCHECK(prop != NULL);
2230 ASSERT(prop->key()->IsLiteral()); 2230 DCHECK(prop->key()->IsLiteral());
2231 2231
2232 // Record source code position before IC call. 2232 // Record source code position before IC call.
2233 SetSourcePosition(expr->position()); 2233 SetSourcePosition(expr->position());
2234 __ Mov(StoreIC::NameRegister(), Operand(prop->key()->AsLiteral()->value())); 2234 __ Mov(StoreIC::NameRegister(), Operand(prop->key()->AsLiteral()->value()));
2235 __ Pop(StoreIC::ReceiverRegister()); 2235 __ Pop(StoreIC::ReceiverRegister());
2236 CallStoreIC(expr->AssignmentFeedbackId()); 2236 CallStoreIC(expr->AssignmentFeedbackId());
2237 2237
2238 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2238 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2239 context()->Plug(x0); 2239 context()->Plug(x0);
2240 } 2240 }
2241 2241
2242 2242
2243 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2243 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2244 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment"); 2244 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2245 // Assignment to a property, using a keyed store IC. 2245 // Assignment to a property, using a keyed store IC.
2246 2246
2247 // Record source code position before IC call. 2247 // Record source code position before IC call.
2248 SetSourcePosition(expr->position()); 2248 SetSourcePosition(expr->position());
2249 // TODO(all): Could we pass this in registers rather than on the stack? 2249 // TODO(all): Could we pass this in registers rather than on the stack?
2250 __ Pop(KeyedStoreIC::NameRegister(), KeyedStoreIC::ReceiverRegister()); 2250 __ Pop(KeyedStoreIC::NameRegister(), KeyedStoreIC::ReceiverRegister());
2251 ASSERT(KeyedStoreIC::ValueRegister().is(x0)); 2251 DCHECK(KeyedStoreIC::ValueRegister().is(x0));
2252 2252
2253 Handle<Code> ic = strict_mode() == SLOPPY 2253 Handle<Code> ic = strict_mode() == SLOPPY
2254 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2254 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2255 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2255 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2256 CallIC(ic, expr->AssignmentFeedbackId()); 2256 CallIC(ic, expr->AssignmentFeedbackId());
2257 2257
2258 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2258 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2259 context()->Plug(x0); 2259 context()->Plug(x0);
2260 } 2260 }
2261 2261
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
2302 if (call_type == CallIC::FUNCTION) { 2302 if (call_type == CallIC::FUNCTION) {
2303 { StackValueContext context(this); 2303 { StackValueContext context(this);
2304 EmitVariableLoad(callee->AsVariableProxy()); 2304 EmitVariableLoad(callee->AsVariableProxy());
2305 PrepareForBailout(callee, NO_REGISTERS); 2305 PrepareForBailout(callee, NO_REGISTERS);
2306 } 2306 }
2307 // Push undefined as receiver. This is patched in the method prologue if it 2307 // Push undefined as receiver. This is patched in the method prologue if it
2308 // is a sloppy mode method. 2308 // is a sloppy mode method.
2309 __ Push(isolate()->factory()->undefined_value()); 2309 __ Push(isolate()->factory()->undefined_value());
2310 } else { 2310 } else {
2311 // Load the function from the receiver. 2311 // Load the function from the receiver.
2312 ASSERT(callee->IsProperty()); 2312 DCHECK(callee->IsProperty());
2313 __ Peek(LoadIC::ReceiverRegister(), 0); 2313 __ Peek(LoadIC::ReceiverRegister(), 0);
2314 EmitNamedPropertyLoad(callee->AsProperty()); 2314 EmitNamedPropertyLoad(callee->AsProperty());
2315 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2315 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2316 // Push the target function under the receiver. 2316 // Push the target function under the receiver.
2317 __ Pop(x10); 2317 __ Pop(x10);
2318 __ Push(x0, x10); 2318 __ Push(x0, x10);
2319 } 2319 }
2320 2320
2321 EmitCall(expr, call_type); 2321 EmitCall(expr, call_type);
2322 } 2322 }
2323 2323
2324 2324
2325 // Code common for calls using the IC. 2325 // Code common for calls using the IC.
2326 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, 2326 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2327 Expression* key) { 2327 Expression* key) {
2328 // Load the key. 2328 // Load the key.
2329 VisitForAccumulatorValue(key); 2329 VisitForAccumulatorValue(key);
2330 2330
2331 Expression* callee = expr->expression(); 2331 Expression* callee = expr->expression();
2332 2332
2333 // Load the function from the receiver. 2333 // Load the function from the receiver.
2334 ASSERT(callee->IsProperty()); 2334 DCHECK(callee->IsProperty());
2335 __ Peek(LoadIC::ReceiverRegister(), 0); 2335 __ Peek(LoadIC::ReceiverRegister(), 0);
2336 __ Move(LoadIC::NameRegister(), x0); 2336 __ Move(LoadIC::NameRegister(), x0);
2337 EmitKeyedPropertyLoad(callee->AsProperty()); 2337 EmitKeyedPropertyLoad(callee->AsProperty());
2338 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2338 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2339 2339
2340 // Push the target function under the receiver. 2340 // Push the target function under the receiver.
2341 __ Pop(x10); 2341 __ Pop(x10);
2342 __ Push(x0, x10); 2342 __ Push(x0, x10);
2343 2343
2344 EmitCall(expr, CallIC::METHOD); 2344 EmitCall(expr, CallIC::METHOD);
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after
2497 { PreservePositionScope scope(masm()->positions_recorder()); 2497 { PreservePositionScope scope(masm()->positions_recorder());
2498 VisitForStackValue(property->obj()); 2498 VisitForStackValue(property->obj());
2499 } 2499 }
2500 if (property->key()->IsPropertyName()) { 2500 if (property->key()->IsPropertyName()) {
2501 EmitCallWithLoadIC(expr); 2501 EmitCallWithLoadIC(expr);
2502 } else { 2502 } else {
2503 EmitKeyedCallWithLoadIC(expr, property->key()); 2503 EmitKeyedCallWithLoadIC(expr, property->key());
2504 } 2504 }
2505 2505
2506 } else { 2506 } else {
2507 ASSERT(call_type == Call::OTHER_CALL); 2507 DCHECK(call_type == Call::OTHER_CALL);
2508 // Call to an arbitrary expression not handled specially above. 2508 // Call to an arbitrary expression not handled specially above.
2509 { PreservePositionScope scope(masm()->positions_recorder()); 2509 { PreservePositionScope scope(masm()->positions_recorder());
2510 VisitForStackValue(callee); 2510 VisitForStackValue(callee);
2511 } 2511 }
2512 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex); 2512 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2513 __ Push(x1); 2513 __ Push(x1);
2514 // Emit function call. 2514 // Emit function call.
2515 EmitCall(expr); 2515 EmitCall(expr);
2516 } 2516 }
2517 2517
2518 #ifdef DEBUG 2518 #ifdef DEBUG
2519 // RecordJSReturnSite should have been called. 2519 // RecordJSReturnSite should have been called.
2520 ASSERT(expr->return_is_recorded_); 2520 DCHECK(expr->return_is_recorded_);
2521 #endif 2521 #endif
2522 } 2522 }
2523 2523
2524 2524
2525 void FullCodeGenerator::VisitCallNew(CallNew* expr) { 2525 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2526 Comment cmnt(masm_, "[ CallNew"); 2526 Comment cmnt(masm_, "[ CallNew");
2527 // According to ECMA-262, section 11.2.2, page 44, the function 2527 // According to ECMA-262, section 11.2.2, page 44, the function
2528 // expression in new calls must be evaluated before the 2528 // expression in new calls must be evaluated before the
2529 // arguments. 2529 // arguments.
2530 2530
(...skipping 13 matching lines...) Expand all
2544 // constructor invocation. 2544 // constructor invocation.
2545 SetSourcePosition(expr->position()); 2545 SetSourcePosition(expr->position());
2546 2546
2547 // Load function and argument count into x1 and x0. 2547 // Load function and argument count into x1 and x0.
2548 __ Mov(x0, arg_count); 2548 __ Mov(x0, arg_count);
2549 __ Peek(x1, arg_count * kXRegSize); 2549 __ Peek(x1, arg_count * kXRegSize);
2550 2550
2551 // Record call targets in unoptimized code. 2551 // Record call targets in unoptimized code.
2552 if (FLAG_pretenuring_call_new) { 2552 if (FLAG_pretenuring_call_new) {
2553 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); 2553 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2554 ASSERT(expr->AllocationSiteFeedbackSlot() == 2554 DCHECK(expr->AllocationSiteFeedbackSlot() ==
2555 expr->CallNewFeedbackSlot() + 1); 2555 expr->CallNewFeedbackSlot() + 1);
2556 } 2556 }
2557 2557
2558 __ LoadObject(x2, FeedbackVector()); 2558 __ LoadObject(x2, FeedbackVector());
2559 __ Mov(x3, Smi::FromInt(expr->CallNewFeedbackSlot())); 2559 __ Mov(x3, Smi::FromInt(expr->CallNewFeedbackSlot()));
2560 2560
2561 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); 2561 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2562 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); 2562 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2563 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 2563 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2564 context()->Plug(x0); 2564 context()->Plug(x0);
2565 } 2565 }
2566 2566
2567 2567
2568 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 2568 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2569 ZoneList<Expression*>* args = expr->arguments(); 2569 ZoneList<Expression*>* args = expr->arguments();
2570 ASSERT(args->length() == 1); 2570 DCHECK(args->length() == 1);
2571 2571
2572 VisitForAccumulatorValue(args->at(0)); 2572 VisitForAccumulatorValue(args->at(0));
2573 2573
2574 Label materialize_true, materialize_false; 2574 Label materialize_true, materialize_false;
2575 Label* if_true = NULL; 2575 Label* if_true = NULL;
2576 Label* if_false = NULL; 2576 Label* if_false = NULL;
2577 Label* fall_through = NULL; 2577 Label* fall_through = NULL;
2578 context()->PrepareTest(&materialize_true, &materialize_false, 2578 context()->PrepareTest(&materialize_true, &materialize_false,
2579 &if_true, &if_false, &fall_through); 2579 &if_true, &if_false, &fall_through);
2580 2580
2581 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2581 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2582 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through); 2582 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
2583 2583
2584 context()->Plug(if_true, if_false); 2584 context()->Plug(if_true, if_false);
2585 } 2585 }
2586 2586
2587 2587
2588 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) { 2588 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2589 ZoneList<Expression*>* args = expr->arguments(); 2589 ZoneList<Expression*>* args = expr->arguments();
2590 ASSERT(args->length() == 1); 2590 DCHECK(args->length() == 1);
2591 2591
2592 VisitForAccumulatorValue(args->at(0)); 2592 VisitForAccumulatorValue(args->at(0));
2593 2593
2594 Label materialize_true, materialize_false; 2594 Label materialize_true, materialize_false;
2595 Label* if_true = NULL; 2595 Label* if_true = NULL;
2596 Label* if_false = NULL; 2596 Label* if_false = NULL;
2597 Label* fall_through = NULL; 2597 Label* fall_through = NULL;
2598 context()->PrepareTest(&materialize_true, &materialize_false, 2598 context()->PrepareTest(&materialize_true, &materialize_false,
2599 &if_true, &if_false, &fall_through); 2599 &if_true, &if_false, &fall_through);
2600 2600
2601 uint64_t sign_mask = V8_UINT64_C(1) << (kSmiShift + kSmiValueSize - 1); 2601 uint64_t sign_mask = V8_UINT64_C(1) << (kSmiShift + kSmiValueSize - 1);
2602 2602
2603 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2603 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2604 __ TestAndSplit(x0, kSmiTagMask | sign_mask, if_true, if_false, fall_through); 2604 __ TestAndSplit(x0, kSmiTagMask | sign_mask, if_true, if_false, fall_through);
2605 2605
2606 context()->Plug(if_true, if_false); 2606 context()->Plug(if_true, if_false);
2607 } 2607 }
2608 2608
2609 2609
2610 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) { 2610 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2611 ZoneList<Expression*>* args = expr->arguments(); 2611 ZoneList<Expression*>* args = expr->arguments();
2612 ASSERT(args->length() == 1); 2612 DCHECK(args->length() == 1);
2613 2613
2614 VisitForAccumulatorValue(args->at(0)); 2614 VisitForAccumulatorValue(args->at(0));
2615 2615
2616 Label materialize_true, materialize_false; 2616 Label materialize_true, materialize_false;
2617 Label* if_true = NULL; 2617 Label* if_true = NULL;
2618 Label* if_false = NULL; 2618 Label* if_false = NULL;
2619 Label* fall_through = NULL; 2619 Label* fall_through = NULL;
2620 context()->PrepareTest(&materialize_true, &materialize_false, 2620 context()->PrepareTest(&materialize_true, &materialize_false,
2621 &if_true, &if_false, &fall_through); 2621 &if_true, &if_false, &fall_through);
2622 2622
2623 __ JumpIfSmi(x0, if_false); 2623 __ JumpIfSmi(x0, if_false);
2624 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true); 2624 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
2625 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset)); 2625 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
2626 // Undetectable objects behave like undefined when tested with typeof. 2626 // Undetectable objects behave like undefined when tested with typeof.
2627 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset)); 2627 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
2628 __ Tbnz(x11, Map::kIsUndetectable, if_false); 2628 __ Tbnz(x11, Map::kIsUndetectable, if_false);
2629 __ Ldrb(x12, FieldMemOperand(x10, Map::kInstanceTypeOffset)); 2629 __ Ldrb(x12, FieldMemOperand(x10, Map::kInstanceTypeOffset));
2630 __ Cmp(x12, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); 2630 __ Cmp(x12, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
2631 __ B(lt, if_false); 2631 __ B(lt, if_false);
2632 __ Cmp(x12, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); 2632 __ Cmp(x12, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
2633 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2633 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2634 Split(le, if_true, if_false, fall_through); 2634 Split(le, if_true, if_false, fall_through);
2635 2635
2636 context()->Plug(if_true, if_false); 2636 context()->Plug(if_true, if_false);
2637 } 2637 }
2638 2638
2639 2639
2640 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) { 2640 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2641 ZoneList<Expression*>* args = expr->arguments(); 2641 ZoneList<Expression*>* args = expr->arguments();
2642 ASSERT(args->length() == 1); 2642 DCHECK(args->length() == 1);
2643 2643
2644 VisitForAccumulatorValue(args->at(0)); 2644 VisitForAccumulatorValue(args->at(0));
2645 2645
2646 Label materialize_true, materialize_false; 2646 Label materialize_true, materialize_false;
2647 Label* if_true = NULL; 2647 Label* if_true = NULL;
2648 Label* if_false = NULL; 2648 Label* if_false = NULL;
2649 Label* fall_through = NULL; 2649 Label* fall_through = NULL;
2650 context()->PrepareTest(&materialize_true, &materialize_false, 2650 context()->PrepareTest(&materialize_true, &materialize_false,
2651 &if_true, &if_false, &fall_through); 2651 &if_true, &if_false, &fall_through);
2652 2652
2653 __ JumpIfSmi(x0, if_false); 2653 __ JumpIfSmi(x0, if_false);
2654 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE); 2654 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
2655 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2655 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2656 Split(ge, if_true, if_false, fall_through); 2656 Split(ge, if_true, if_false, fall_through);
2657 2657
2658 context()->Plug(if_true, if_false); 2658 context()->Plug(if_true, if_false);
2659 } 2659 }
2660 2660
2661 2661
2662 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) { 2662 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2663 ASM_LOCATION("FullCodeGenerator::EmitIsUndetectableObject"); 2663 ASM_LOCATION("FullCodeGenerator::EmitIsUndetectableObject");
2664 ZoneList<Expression*>* args = expr->arguments(); 2664 ZoneList<Expression*>* args = expr->arguments();
2665 ASSERT(args->length() == 1); 2665 DCHECK(args->length() == 1);
2666 2666
2667 VisitForAccumulatorValue(args->at(0)); 2667 VisitForAccumulatorValue(args->at(0));
2668 2668
2669 Label materialize_true, materialize_false; 2669 Label materialize_true, materialize_false;
2670 Label* if_true = NULL; 2670 Label* if_true = NULL;
2671 Label* if_false = NULL; 2671 Label* if_false = NULL;
2672 Label* fall_through = NULL; 2672 Label* fall_through = NULL;
2673 context()->PrepareTest(&materialize_true, &materialize_false, 2673 context()->PrepareTest(&materialize_true, &materialize_false,
2674 &if_true, &if_false, &fall_through); 2674 &if_true, &if_false, &fall_through);
2675 2675
2676 __ JumpIfSmi(x0, if_false); 2676 __ JumpIfSmi(x0, if_false);
2677 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset)); 2677 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
2678 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset)); 2678 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
2679 __ Tst(x11, 1 << Map::kIsUndetectable); 2679 __ Tst(x11, 1 << Map::kIsUndetectable);
2680 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2680 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2681 Split(ne, if_true, if_false, fall_through); 2681 Split(ne, if_true, if_false, fall_through);
2682 2682
2683 context()->Plug(if_true, if_false); 2683 context()->Plug(if_true, if_false);
2684 } 2684 }
2685 2685
2686 2686
2687 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( 2687 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2688 CallRuntime* expr) { 2688 CallRuntime* expr) {
2689 ZoneList<Expression*>* args = expr->arguments(); 2689 ZoneList<Expression*>* args = expr->arguments();
2690 ASSERT(args->length() == 1); 2690 DCHECK(args->length() == 1);
2691 VisitForAccumulatorValue(args->at(0)); 2691 VisitForAccumulatorValue(args->at(0));
2692 2692
2693 Label materialize_true, materialize_false, skip_lookup; 2693 Label materialize_true, materialize_false, skip_lookup;
2694 Label* if_true = NULL; 2694 Label* if_true = NULL;
2695 Label* if_false = NULL; 2695 Label* if_false = NULL;
2696 Label* fall_through = NULL; 2696 Label* fall_through = NULL;
2697 context()->PrepareTest(&materialize_true, &materialize_false, 2697 context()->PrepareTest(&materialize_true, &materialize_false,
2698 &if_true, &if_false, &fall_through); 2698 &if_true, &if_false, &fall_through);
2699 2699
2700 Register object = x0; 2700 Register object = x0;
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
2781 2781
2782 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2782 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2783 Split(eq, if_true, if_false, fall_through); 2783 Split(eq, if_true, if_false, fall_through);
2784 2784
2785 context()->Plug(if_true, if_false); 2785 context()->Plug(if_true, if_false);
2786 } 2786 }
2787 2787
2788 2788
2789 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { 2789 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2790 ZoneList<Expression*>* args = expr->arguments(); 2790 ZoneList<Expression*>* args = expr->arguments();
2791 ASSERT(args->length() == 1); 2791 DCHECK(args->length() == 1);
2792 2792
2793 VisitForAccumulatorValue(args->at(0)); 2793 VisitForAccumulatorValue(args->at(0));
2794 2794
2795 Label materialize_true, materialize_false; 2795 Label materialize_true, materialize_false;
2796 Label* if_true = NULL; 2796 Label* if_true = NULL;
2797 Label* if_false = NULL; 2797 Label* if_false = NULL;
2798 Label* fall_through = NULL; 2798 Label* fall_through = NULL;
2799 context()->PrepareTest(&materialize_true, &materialize_false, 2799 context()->PrepareTest(&materialize_true, &materialize_false,
2800 &if_true, &if_false, &fall_through); 2800 &if_true, &if_false, &fall_through);
2801 2801
2802 __ JumpIfSmi(x0, if_false); 2802 __ JumpIfSmi(x0, if_false);
2803 __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE); 2803 __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE);
2804 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2804 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2805 Split(eq, if_true, if_false, fall_through); 2805 Split(eq, if_true, if_false, fall_through);
2806 2806
2807 context()->Plug(if_true, if_false); 2807 context()->Plug(if_true, if_false);
2808 } 2808 }
2809 2809
2810 2810
2811 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) { 2811 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
2812 ZoneList<Expression*>* args = expr->arguments(); 2812 ZoneList<Expression*>* args = expr->arguments();
2813 ASSERT(args->length() == 1); 2813 DCHECK(args->length() == 1);
2814 2814
2815 VisitForAccumulatorValue(args->at(0)); 2815 VisitForAccumulatorValue(args->at(0));
2816 2816
2817 Label materialize_true, materialize_false; 2817 Label materialize_true, materialize_false;
2818 Label* if_true = NULL; 2818 Label* if_true = NULL;
2819 Label* if_false = NULL; 2819 Label* if_false = NULL;
2820 Label* fall_through = NULL; 2820 Label* fall_through = NULL;
2821 context()->PrepareTest(&materialize_true, &materialize_false, 2821 context()->PrepareTest(&materialize_true, &materialize_false,
2822 &if_true, &if_false, &fall_through); 2822 &if_true, &if_false, &fall_through);
2823 2823
2824 // Only a HeapNumber can be -0.0, so return false if we have something else. 2824 // Only a HeapNumber can be -0.0, so return false if we have something else.
2825 __ CheckMap(x0, x1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK); 2825 __ CheckMap(x0, x1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
2826 2826
2827 // Test the bit pattern. 2827 // Test the bit pattern.
2828 __ Ldr(x10, FieldMemOperand(x0, HeapNumber::kValueOffset)); 2828 __ Ldr(x10, FieldMemOperand(x0, HeapNumber::kValueOffset));
2829 __ Cmp(x10, 1); // Set V on 0x8000000000000000. 2829 __ Cmp(x10, 1); // Set V on 0x8000000000000000.
2830 2830
2831 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2831 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2832 Split(vs, if_true, if_false, fall_through); 2832 Split(vs, if_true, if_false, fall_through);
2833 2833
2834 context()->Plug(if_true, if_false); 2834 context()->Plug(if_true, if_false);
2835 } 2835 }
2836 2836
2837 2837
2838 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { 2838 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2839 ZoneList<Expression*>* args = expr->arguments(); 2839 ZoneList<Expression*>* args = expr->arguments();
2840 ASSERT(args->length() == 1); 2840 DCHECK(args->length() == 1);
2841 2841
2842 VisitForAccumulatorValue(args->at(0)); 2842 VisitForAccumulatorValue(args->at(0));
2843 2843
2844 Label materialize_true, materialize_false; 2844 Label materialize_true, materialize_false;
2845 Label* if_true = NULL; 2845 Label* if_true = NULL;
2846 Label* if_false = NULL; 2846 Label* if_false = NULL;
2847 Label* fall_through = NULL; 2847 Label* fall_through = NULL;
2848 context()->PrepareTest(&materialize_true, &materialize_false, 2848 context()->PrepareTest(&materialize_true, &materialize_false,
2849 &if_true, &if_false, &fall_through); 2849 &if_true, &if_false, &fall_through);
2850 2850
2851 __ JumpIfSmi(x0, if_false); 2851 __ JumpIfSmi(x0, if_false);
2852 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE); 2852 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
2853 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2853 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2854 Split(eq, if_true, if_false, fall_through); 2854 Split(eq, if_true, if_false, fall_through);
2855 2855
2856 context()->Plug(if_true, if_false); 2856 context()->Plug(if_true, if_false);
2857 } 2857 }
2858 2858
2859 2859
2860 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { 2860 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2861 ZoneList<Expression*>* args = expr->arguments(); 2861 ZoneList<Expression*>* args = expr->arguments();
2862 ASSERT(args->length() == 1); 2862 DCHECK(args->length() == 1);
2863 2863
2864 VisitForAccumulatorValue(args->at(0)); 2864 VisitForAccumulatorValue(args->at(0));
2865 2865
2866 Label materialize_true, materialize_false; 2866 Label materialize_true, materialize_false;
2867 Label* if_true = NULL; 2867 Label* if_true = NULL;
2868 Label* if_false = NULL; 2868 Label* if_false = NULL;
2869 Label* fall_through = NULL; 2869 Label* fall_through = NULL;
2870 context()->PrepareTest(&materialize_true, &materialize_false, 2870 context()->PrepareTest(&materialize_true, &materialize_false,
2871 &if_true, &if_false, &fall_through); 2871 &if_true, &if_false, &fall_through);
2872 2872
2873 __ JumpIfSmi(x0, if_false); 2873 __ JumpIfSmi(x0, if_false);
2874 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE); 2874 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
2875 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2875 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2876 Split(eq, if_true, if_false, fall_through); 2876 Split(eq, if_true, if_false, fall_through);
2877 2877
2878 context()->Plug(if_true, if_false); 2878 context()->Plug(if_true, if_false);
2879 } 2879 }
2880 2880
2881 2881
2882 2882
2883 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) { 2883 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2884 ASSERT(expr->arguments()->length() == 0); 2884 DCHECK(expr->arguments()->length() == 0);
2885 2885
2886 Label materialize_true, materialize_false; 2886 Label materialize_true, materialize_false;
2887 Label* if_true = NULL; 2887 Label* if_true = NULL;
2888 Label* if_false = NULL; 2888 Label* if_false = NULL;
2889 Label* fall_through = NULL; 2889 Label* fall_through = NULL;
2890 context()->PrepareTest(&materialize_true, &materialize_false, 2890 context()->PrepareTest(&materialize_true, &materialize_false,
2891 &if_true, &if_false, &fall_through); 2891 &if_true, &if_false, &fall_through);
2892 2892
2893 // Get the frame pointer for the calling frame. 2893 // Get the frame pointer for the calling frame.
2894 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 2894 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
(...skipping 11 matching lines...) Expand all
2906 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT)); 2906 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
2907 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2907 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2908 Split(eq, if_true, if_false, fall_through); 2908 Split(eq, if_true, if_false, fall_through);
2909 2909
2910 context()->Plug(if_true, if_false); 2910 context()->Plug(if_true, if_false);
2911 } 2911 }
2912 2912
2913 2913
2914 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { 2914 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2915 ZoneList<Expression*>* args = expr->arguments(); 2915 ZoneList<Expression*>* args = expr->arguments();
2916 ASSERT(args->length() == 2); 2916 DCHECK(args->length() == 2);
2917 2917
2918 // Load the two objects into registers and perform the comparison. 2918 // Load the two objects into registers and perform the comparison.
2919 VisitForStackValue(args->at(0)); 2919 VisitForStackValue(args->at(0));
2920 VisitForAccumulatorValue(args->at(1)); 2920 VisitForAccumulatorValue(args->at(1));
2921 2921
2922 Label materialize_true, materialize_false; 2922 Label materialize_true, materialize_false;
2923 Label* if_true = NULL; 2923 Label* if_true = NULL;
2924 Label* if_false = NULL; 2924 Label* if_false = NULL;
2925 Label* fall_through = NULL; 2925 Label* fall_through = NULL;
2926 context()->PrepareTest(&materialize_true, &materialize_false, 2926 context()->PrepareTest(&materialize_true, &materialize_false,
2927 &if_true, &if_false, &fall_through); 2927 &if_true, &if_false, &fall_through);
2928 2928
2929 __ Pop(x1); 2929 __ Pop(x1);
2930 __ Cmp(x0, x1); 2930 __ Cmp(x0, x1);
2931 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2931 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2932 Split(eq, if_true, if_false, fall_through); 2932 Split(eq, if_true, if_false, fall_through);
2933 2933
2934 context()->Plug(if_true, if_false); 2934 context()->Plug(if_true, if_false);
2935 } 2935 }
2936 2936
2937 2937
2938 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { 2938 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2939 ZoneList<Expression*>* args = expr->arguments(); 2939 ZoneList<Expression*>* args = expr->arguments();
2940 ASSERT(args->length() == 1); 2940 DCHECK(args->length() == 1);
2941 2941
2942 // ArgumentsAccessStub expects the key in x1. 2942 // ArgumentsAccessStub expects the key in x1.
2943 VisitForAccumulatorValue(args->at(0)); 2943 VisitForAccumulatorValue(args->at(0));
2944 __ Mov(x1, x0); 2944 __ Mov(x1, x0);
2945 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters())); 2945 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
2946 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT); 2946 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
2947 __ CallStub(&stub); 2947 __ CallStub(&stub);
2948 context()->Plug(x0); 2948 context()->Plug(x0);
2949 } 2949 }
2950 2950
2951 2951
2952 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { 2952 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2953 ASSERT(expr->arguments()->length() == 0); 2953 DCHECK(expr->arguments()->length() == 0);
2954 Label exit; 2954 Label exit;
2955 // Get the number of formal parameters. 2955 // Get the number of formal parameters.
2956 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters())); 2956 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
2957 2957
2958 // Check if the calling frame is an arguments adaptor frame. 2958 // Check if the calling frame is an arguments adaptor frame.
2959 __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 2959 __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2960 __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset)); 2960 __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset));
2961 __ Cmp(x13, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 2961 __ Cmp(x13, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2962 __ B(ne, &exit); 2962 __ B(ne, &exit);
2963 2963
2964 // Arguments adaptor case: Read the arguments length from the 2964 // Arguments adaptor case: Read the arguments length from the
2965 // adaptor frame. 2965 // adaptor frame.
2966 __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2966 __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset));
2967 2967
2968 __ Bind(&exit); 2968 __ Bind(&exit);
2969 context()->Plug(x0); 2969 context()->Plug(x0);
2970 } 2970 }
2971 2971
2972 2972
2973 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { 2973 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2974 ASM_LOCATION("FullCodeGenerator::EmitClassOf"); 2974 ASM_LOCATION("FullCodeGenerator::EmitClassOf");
2975 ZoneList<Expression*>* args = expr->arguments(); 2975 ZoneList<Expression*>* args = expr->arguments();
2976 ASSERT(args->length() == 1); 2976 DCHECK(args->length() == 1);
2977 Label done, null, function, non_function_constructor; 2977 Label done, null, function, non_function_constructor;
2978 2978
2979 VisitForAccumulatorValue(args->at(0)); 2979 VisitForAccumulatorValue(args->at(0));
2980 2980
2981 // If the object is a smi, we return null. 2981 // If the object is a smi, we return null.
2982 __ JumpIfSmi(x0, &null); 2982 __ JumpIfSmi(x0, &null);
2983 2983
2984 // Check that the object is a JS object but take special care of JS 2984 // Check that the object is a JS object but take special care of JS
2985 // functions to make sure they have 'Function' as their class. 2985 // functions to make sure they have 'Function' as their class.
2986 // Assume that there are only two callable types, and one of them is at 2986 // Assume that there are only two callable types, and one of them is at
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
3031 __ Bind(&done); 3031 __ Bind(&done);
3032 3032
3033 context()->Plug(x0); 3033 context()->Plug(x0);
3034 } 3034 }
3035 3035
3036 3036
3037 void FullCodeGenerator::EmitSubString(CallRuntime* expr) { 3037 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3038 // Load the arguments on the stack and call the stub. 3038 // Load the arguments on the stack and call the stub.
3039 SubStringStub stub(isolate()); 3039 SubStringStub stub(isolate());
3040 ZoneList<Expression*>* args = expr->arguments(); 3040 ZoneList<Expression*>* args = expr->arguments();
3041 ASSERT(args->length() == 3); 3041 DCHECK(args->length() == 3);
3042 VisitForStackValue(args->at(0)); 3042 VisitForStackValue(args->at(0));
3043 VisitForStackValue(args->at(1)); 3043 VisitForStackValue(args->at(1));
3044 VisitForStackValue(args->at(2)); 3044 VisitForStackValue(args->at(2));
3045 __ CallStub(&stub); 3045 __ CallStub(&stub);
3046 context()->Plug(x0); 3046 context()->Plug(x0);
3047 } 3047 }
3048 3048
3049 3049
3050 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { 3050 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3051 // Load the arguments on the stack and call the stub. 3051 // Load the arguments on the stack and call the stub.
3052 RegExpExecStub stub(isolate()); 3052 RegExpExecStub stub(isolate());
3053 ZoneList<Expression*>* args = expr->arguments(); 3053 ZoneList<Expression*>* args = expr->arguments();
3054 ASSERT(args->length() == 4); 3054 DCHECK(args->length() == 4);
3055 VisitForStackValue(args->at(0)); 3055 VisitForStackValue(args->at(0));
3056 VisitForStackValue(args->at(1)); 3056 VisitForStackValue(args->at(1));
3057 VisitForStackValue(args->at(2)); 3057 VisitForStackValue(args->at(2));
3058 VisitForStackValue(args->at(3)); 3058 VisitForStackValue(args->at(3));
3059 __ CallStub(&stub); 3059 __ CallStub(&stub);
3060 context()->Plug(x0); 3060 context()->Plug(x0);
3061 } 3061 }
3062 3062
3063 3063
3064 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { 3064 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3065 ASM_LOCATION("FullCodeGenerator::EmitValueOf"); 3065 ASM_LOCATION("FullCodeGenerator::EmitValueOf");
3066 ZoneList<Expression*>* args = expr->arguments(); 3066 ZoneList<Expression*>* args = expr->arguments();
3067 ASSERT(args->length() == 1); 3067 DCHECK(args->length() == 1);
3068 VisitForAccumulatorValue(args->at(0)); // Load the object. 3068 VisitForAccumulatorValue(args->at(0)); // Load the object.
3069 3069
3070 Label done; 3070 Label done;
3071 // If the object is a smi return the object. 3071 // If the object is a smi return the object.
3072 __ JumpIfSmi(x0, &done); 3072 __ JumpIfSmi(x0, &done);
3073 // If the object is not a value type, return the object. 3073 // If the object is not a value type, return the object.
3074 __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done); 3074 __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
3075 __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset)); 3075 __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
3076 3076
3077 __ Bind(&done); 3077 __ Bind(&done);
3078 context()->Plug(x0); 3078 context()->Plug(x0);
3079 } 3079 }
3080 3080
3081 3081
3082 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { 3082 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3083 ZoneList<Expression*>* args = expr->arguments(); 3083 ZoneList<Expression*>* args = expr->arguments();
3084 ASSERT(args->length() == 2); 3084 DCHECK(args->length() == 2);
3085 ASSERT_NE(NULL, args->at(1)->AsLiteral()); 3085 DCHECK_NE(NULL, args->at(1)->AsLiteral());
3086 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value())); 3086 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3087 3087
3088 VisitForAccumulatorValue(args->at(0)); // Load the object. 3088 VisitForAccumulatorValue(args->at(0)); // Load the object.
3089 3089
3090 Label runtime, done, not_date_object; 3090 Label runtime, done, not_date_object;
3091 Register object = x0; 3091 Register object = x0;
3092 Register result = x0; 3092 Register result = x0;
3093 Register stamp_addr = x10; 3093 Register stamp_addr = x10;
3094 Register stamp_cache = x11; 3094 Register stamp_cache = x11;
3095 3095
(...skipping 24 matching lines...) Expand all
3120 3120
3121 __ Bind(&not_date_object); 3121 __ Bind(&not_date_object);
3122 __ CallRuntime(Runtime::kThrowNotDateError, 0); 3122 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3123 __ Bind(&done); 3123 __ Bind(&done);
3124 context()->Plug(x0); 3124 context()->Plug(x0);
3125 } 3125 }
3126 3126
3127 3127
3128 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { 3128 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3129 ZoneList<Expression*>* args = expr->arguments(); 3129 ZoneList<Expression*>* args = expr->arguments();
3130 ASSERT_EQ(3, args->length()); 3130 DCHECK_EQ(3, args->length());
3131 3131
3132 Register string = x0; 3132 Register string = x0;
3133 Register index = x1; 3133 Register index = x1;
3134 Register value = x2; 3134 Register value = x2;
3135 Register scratch = x10; 3135 Register scratch = x10;
3136 3136
3137 VisitForStackValue(args->at(1)); // index 3137 VisitForStackValue(args->at(1)); // index
3138 VisitForStackValue(args->at(2)); // value 3138 VisitForStackValue(args->at(2)); // value
3139 VisitForAccumulatorValue(args->at(0)); // string 3139 VisitForAccumulatorValue(args->at(0)); // string
3140 __ Pop(value, index); 3140 __ Pop(value, index);
3141 3141
3142 if (FLAG_debug_code) { 3142 if (FLAG_debug_code) {
3143 __ AssertSmi(value, kNonSmiValue); 3143 __ AssertSmi(value, kNonSmiValue);
3144 __ AssertSmi(index, kNonSmiIndex); 3144 __ AssertSmi(index, kNonSmiIndex);
3145 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; 3145 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3146 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch, 3146 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3147 one_byte_seq_type); 3147 one_byte_seq_type);
3148 } 3148 }
3149 3149
3150 __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); 3150 __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3151 __ SmiUntag(value); 3151 __ SmiUntag(value);
3152 __ SmiUntag(index); 3152 __ SmiUntag(index);
3153 __ Strb(value, MemOperand(scratch, index)); 3153 __ Strb(value, MemOperand(scratch, index));
3154 context()->Plug(string); 3154 context()->Plug(string);
3155 } 3155 }
3156 3156
3157 3157
3158 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { 3158 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3159 ZoneList<Expression*>* args = expr->arguments(); 3159 ZoneList<Expression*>* args = expr->arguments();
3160 ASSERT_EQ(3, args->length()); 3160 DCHECK_EQ(3, args->length());
3161 3161
3162 Register string = x0; 3162 Register string = x0;
3163 Register index = x1; 3163 Register index = x1;
3164 Register value = x2; 3164 Register value = x2;
3165 Register scratch = x10; 3165 Register scratch = x10;
3166 3166
3167 VisitForStackValue(args->at(1)); // index 3167 VisitForStackValue(args->at(1)); // index
3168 VisitForStackValue(args->at(2)); // value 3168 VisitForStackValue(args->at(2)); // value
3169 VisitForAccumulatorValue(args->at(0)); // string 3169 VisitForAccumulatorValue(args->at(0)); // string
3170 __ Pop(value, index); 3170 __ Pop(value, index);
(...skipping 10 matching lines...) Expand all
3181 __ SmiUntag(value); 3181 __ SmiUntag(value);
3182 __ SmiUntag(index); 3182 __ SmiUntag(index);
3183 __ Strh(value, MemOperand(scratch, index, LSL, 1)); 3183 __ Strh(value, MemOperand(scratch, index, LSL, 1));
3184 context()->Plug(string); 3184 context()->Plug(string);
3185 } 3185 }
3186 3186
3187 3187
3188 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { 3188 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3189 // Load the arguments on the stack and call the MathPow stub. 3189 // Load the arguments on the stack and call the MathPow stub.
3190 ZoneList<Expression*>* args = expr->arguments(); 3190 ZoneList<Expression*>* args = expr->arguments();
3191 ASSERT(args->length() == 2); 3191 DCHECK(args->length() == 2);
3192 VisitForStackValue(args->at(0)); 3192 VisitForStackValue(args->at(0));
3193 VisitForStackValue(args->at(1)); 3193 VisitForStackValue(args->at(1));
3194 MathPowStub stub(isolate(), MathPowStub::ON_STACK); 3194 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3195 __ CallStub(&stub); 3195 __ CallStub(&stub);
3196 context()->Plug(x0); 3196 context()->Plug(x0);
3197 } 3197 }
3198 3198
3199 3199
3200 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { 3200 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3201 ZoneList<Expression*>* args = expr->arguments(); 3201 ZoneList<Expression*>* args = expr->arguments();
3202 ASSERT(args->length() == 2); 3202 DCHECK(args->length() == 2);
3203 VisitForStackValue(args->at(0)); // Load the object. 3203 VisitForStackValue(args->at(0)); // Load the object.
3204 VisitForAccumulatorValue(args->at(1)); // Load the value. 3204 VisitForAccumulatorValue(args->at(1)); // Load the value.
3205 __ Pop(x1); 3205 __ Pop(x1);
3206 // x0 = value. 3206 // x0 = value.
3207 // x1 = object. 3207 // x1 = object.
3208 3208
3209 Label done; 3209 Label done;
3210 // If the object is a smi, return the value. 3210 // If the object is a smi, return the value.
3211 __ JumpIfSmi(x1, &done); 3211 __ JumpIfSmi(x1, &done);
3212 3212
3213 // If the object is not a value type, return the value. 3213 // If the object is not a value type, return the value.
3214 __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done); 3214 __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done);
3215 3215
3216 // Store the value. 3216 // Store the value.
3217 __ Str(x0, FieldMemOperand(x1, JSValue::kValueOffset)); 3217 __ Str(x0, FieldMemOperand(x1, JSValue::kValueOffset));
3218 // Update the write barrier. Save the value as it will be 3218 // Update the write barrier. Save the value as it will be
3219 // overwritten by the write barrier code and is needed afterward. 3219 // overwritten by the write barrier code and is needed afterward.
3220 __ Mov(x10, x0); 3220 __ Mov(x10, x0);
3221 __ RecordWriteField( 3221 __ RecordWriteField(
3222 x1, JSValue::kValueOffset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs); 3222 x1, JSValue::kValueOffset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
3223 3223
3224 __ Bind(&done); 3224 __ Bind(&done);
3225 context()->Plug(x0); 3225 context()->Plug(x0);
3226 } 3226 }
3227 3227
3228 3228
3229 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { 3229 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3230 ZoneList<Expression*>* args = expr->arguments(); 3230 ZoneList<Expression*>* args = expr->arguments();
3231 ASSERT_EQ(args->length(), 1); 3231 DCHECK_EQ(args->length(), 1);
3232 3232
3233 // Load the argument into x0 and call the stub. 3233 // Load the argument into x0 and call the stub.
3234 VisitForAccumulatorValue(args->at(0)); 3234 VisitForAccumulatorValue(args->at(0));
3235 3235
3236 NumberToStringStub stub(isolate()); 3236 NumberToStringStub stub(isolate());
3237 __ CallStub(&stub); 3237 __ CallStub(&stub);
3238 context()->Plug(x0); 3238 context()->Plug(x0);
3239 } 3239 }
3240 3240
3241 3241
3242 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { 3242 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3243 ZoneList<Expression*>* args = expr->arguments(); 3243 ZoneList<Expression*>* args = expr->arguments();
3244 ASSERT(args->length() == 1); 3244 DCHECK(args->length() == 1);
3245 3245
3246 VisitForAccumulatorValue(args->at(0)); 3246 VisitForAccumulatorValue(args->at(0));
3247 3247
3248 Label done; 3248 Label done;
3249 Register code = x0; 3249 Register code = x0;
3250 Register result = x1; 3250 Register result = x1;
3251 3251
3252 StringCharFromCodeGenerator generator(code, result); 3252 StringCharFromCodeGenerator generator(code, result);
3253 generator.GenerateFast(masm_); 3253 generator.GenerateFast(masm_);
3254 __ B(&done); 3254 __ B(&done);
3255 3255
3256 NopRuntimeCallHelper call_helper; 3256 NopRuntimeCallHelper call_helper;
3257 generator.GenerateSlow(masm_, call_helper); 3257 generator.GenerateSlow(masm_, call_helper);
3258 3258
3259 __ Bind(&done); 3259 __ Bind(&done);
3260 context()->Plug(result); 3260 context()->Plug(result);
3261 } 3261 }
3262 3262
3263 3263
3264 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { 3264 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3265 ZoneList<Expression*>* args = expr->arguments(); 3265 ZoneList<Expression*>* args = expr->arguments();
3266 ASSERT(args->length() == 2); 3266 DCHECK(args->length() == 2);
3267 3267
3268 VisitForStackValue(args->at(0)); 3268 VisitForStackValue(args->at(0));
3269 VisitForAccumulatorValue(args->at(1)); 3269 VisitForAccumulatorValue(args->at(1));
3270 3270
3271 Register object = x1; 3271 Register object = x1;
3272 Register index = x0; 3272 Register index = x0;
3273 Register result = x3; 3273 Register result = x3;
3274 3274
3275 __ Pop(object); 3275 __ Pop(object);
3276 3276
(...skipping 24 matching lines...) Expand all
3301 NopRuntimeCallHelper call_helper; 3301 NopRuntimeCallHelper call_helper;
3302 generator.GenerateSlow(masm_, call_helper); 3302 generator.GenerateSlow(masm_, call_helper);
3303 3303
3304 __ Bind(&done); 3304 __ Bind(&done);
3305 context()->Plug(result); 3305 context()->Plug(result);
3306 } 3306 }
3307 3307
3308 3308
3309 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { 3309 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3310 ZoneList<Expression*>* args = expr->arguments(); 3310 ZoneList<Expression*>* args = expr->arguments();
3311 ASSERT(args->length() == 2); 3311 DCHECK(args->length() == 2);
3312 3312
3313 VisitForStackValue(args->at(0)); 3313 VisitForStackValue(args->at(0));
3314 VisitForAccumulatorValue(args->at(1)); 3314 VisitForAccumulatorValue(args->at(1));
3315 3315
3316 Register object = x1; 3316 Register object = x1;
3317 Register index = x0; 3317 Register index = x0;
3318 Register result = x0; 3318 Register result = x0;
3319 3319
3320 __ Pop(object); 3320 __ Pop(object);
3321 3321
(...skipping 26 matching lines...) Expand all
3348 generator.GenerateSlow(masm_, call_helper); 3348 generator.GenerateSlow(masm_, call_helper);
3349 3349
3350 __ Bind(&done); 3350 __ Bind(&done);
3351 context()->Plug(result); 3351 context()->Plug(result);
3352 } 3352 }
3353 3353
3354 3354
3355 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { 3355 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3356 ASM_LOCATION("FullCodeGenerator::EmitStringAdd"); 3356 ASM_LOCATION("FullCodeGenerator::EmitStringAdd");
3357 ZoneList<Expression*>* args = expr->arguments(); 3357 ZoneList<Expression*>* args = expr->arguments();
3358 ASSERT_EQ(2, args->length()); 3358 DCHECK_EQ(2, args->length());
3359 3359
3360 VisitForStackValue(args->at(0)); 3360 VisitForStackValue(args->at(0));
3361 VisitForAccumulatorValue(args->at(1)); 3361 VisitForAccumulatorValue(args->at(1));
3362 3362
3363 __ Pop(x1); 3363 __ Pop(x1);
3364 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED); 3364 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3365 __ CallStub(&stub); 3365 __ CallStub(&stub);
3366 3366
3367 context()->Plug(x0); 3367 context()->Plug(x0);
3368 } 3368 }
3369 3369
3370 3370
3371 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { 3371 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3372 ZoneList<Expression*>* args = expr->arguments(); 3372 ZoneList<Expression*>* args = expr->arguments();
3373 ASSERT_EQ(2, args->length()); 3373 DCHECK_EQ(2, args->length());
3374 VisitForStackValue(args->at(0)); 3374 VisitForStackValue(args->at(0));
3375 VisitForStackValue(args->at(1)); 3375 VisitForStackValue(args->at(1));
3376 3376
3377 StringCompareStub stub(isolate()); 3377 StringCompareStub stub(isolate());
3378 __ CallStub(&stub); 3378 __ CallStub(&stub);
3379 context()->Plug(x0); 3379 context()->Plug(x0);
3380 } 3380 }
3381 3381
3382 3382
3383 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { 3383 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3384 ASM_LOCATION("FullCodeGenerator::EmitCallFunction"); 3384 ASM_LOCATION("FullCodeGenerator::EmitCallFunction");
3385 ZoneList<Expression*>* args = expr->arguments(); 3385 ZoneList<Expression*>* args = expr->arguments();
3386 ASSERT(args->length() >= 2); 3386 DCHECK(args->length() >= 2);
3387 3387
3388 int arg_count = args->length() - 2; // 2 ~ receiver and function. 3388 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3389 for (int i = 0; i < arg_count + 1; i++) { 3389 for (int i = 0; i < arg_count + 1; i++) {
3390 VisitForStackValue(args->at(i)); 3390 VisitForStackValue(args->at(i));
3391 } 3391 }
3392 VisitForAccumulatorValue(args->last()); // Function. 3392 VisitForAccumulatorValue(args->last()); // Function.
3393 3393
3394 Label runtime, done; 3394 Label runtime, done;
3395 // Check for non-function argument (including proxy). 3395 // Check for non-function argument (including proxy).
3396 __ JumpIfSmi(x0, &runtime); 3396 __ JumpIfSmi(x0, &runtime);
(...skipping 11 matching lines...) Expand all
3408 __ CallRuntime(Runtime::kCall, args->length()); 3408 __ CallRuntime(Runtime::kCall, args->length());
3409 __ Bind(&done); 3409 __ Bind(&done);
3410 3410
3411 context()->Plug(x0); 3411 context()->Plug(x0);
3412 } 3412 }
3413 3413
3414 3414
3415 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { 3415 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3416 RegExpConstructResultStub stub(isolate()); 3416 RegExpConstructResultStub stub(isolate());
3417 ZoneList<Expression*>* args = expr->arguments(); 3417 ZoneList<Expression*>* args = expr->arguments();
3418 ASSERT(args->length() == 3); 3418 DCHECK(args->length() == 3);
3419 VisitForStackValue(args->at(0)); 3419 VisitForStackValue(args->at(0));
3420 VisitForStackValue(args->at(1)); 3420 VisitForStackValue(args->at(1));
3421 VisitForAccumulatorValue(args->at(2)); 3421 VisitForAccumulatorValue(args->at(2));
3422 __ Pop(x1, x2); 3422 __ Pop(x1, x2);
3423 __ CallStub(&stub); 3423 __ CallStub(&stub);
3424 context()->Plug(x0); 3424 context()->Plug(x0);
3425 } 3425 }
3426 3426
3427 3427
3428 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { 3428 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3429 ZoneList<Expression*>* args = expr->arguments(); 3429 ZoneList<Expression*>* args = expr->arguments();
3430 ASSERT_EQ(2, args->length()); 3430 DCHECK_EQ(2, args->length());
3431 ASSERT_NE(NULL, args->at(0)->AsLiteral()); 3431 DCHECK_NE(NULL, args->at(0)->AsLiteral());
3432 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value(); 3432 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3433 3433
3434 Handle<FixedArray> jsfunction_result_caches( 3434 Handle<FixedArray> jsfunction_result_caches(
3435 isolate()->native_context()->jsfunction_result_caches()); 3435 isolate()->native_context()->jsfunction_result_caches());
3436 if (jsfunction_result_caches->length() <= cache_id) { 3436 if (jsfunction_result_caches->length() <= cache_id) {
3437 __ Abort(kAttemptToUseUndefinedCache); 3437 __ Abort(kAttemptToUseUndefinedCache);
3438 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex); 3438 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3439 context()->Plug(x0); 3439 context()->Plug(x0);
3440 return; 3440 return;
3441 } 3441 }
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
3488 __ Tst(x10, String::kContainsCachedArrayIndexMask); 3488 __ Tst(x10, String::kContainsCachedArrayIndexMask);
3489 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3489 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3490 Split(eq, if_true, if_false, fall_through); 3490 Split(eq, if_true, if_false, fall_through);
3491 3491
3492 context()->Plug(if_true, if_false); 3492 context()->Plug(if_true, if_false);
3493 } 3493 }
3494 3494
3495 3495
3496 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { 3496 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3497 ZoneList<Expression*>* args = expr->arguments(); 3497 ZoneList<Expression*>* args = expr->arguments();
3498 ASSERT(args->length() == 1); 3498 DCHECK(args->length() == 1);
3499 VisitForAccumulatorValue(args->at(0)); 3499 VisitForAccumulatorValue(args->at(0));
3500 3500
3501 __ AssertString(x0); 3501 __ AssertString(x0);
3502 3502
3503 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset)); 3503 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3504 __ IndexFromHash(x10, x0); 3504 __ IndexFromHash(x10, x0);
3505 3505
3506 context()->Plug(x0); 3506 context()->Plug(x0);
3507 } 3507 }
3508 3508
3509 3509
3510 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { 3510 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3511 ASM_LOCATION("FullCodeGenerator::EmitFastAsciiArrayJoin"); 3511 ASM_LOCATION("FullCodeGenerator::EmitFastAsciiArrayJoin");
3512 3512
3513 ZoneList<Expression*>* args = expr->arguments(); 3513 ZoneList<Expression*>* args = expr->arguments();
3514 ASSERT(args->length() == 2); 3514 DCHECK(args->length() == 2);
3515 VisitForStackValue(args->at(1)); 3515 VisitForStackValue(args->at(1));
3516 VisitForAccumulatorValue(args->at(0)); 3516 VisitForAccumulatorValue(args->at(0));
3517 3517
3518 Register array = x0; 3518 Register array = x0;
3519 Register result = x0; 3519 Register result = x0;
3520 Register elements = x1; 3520 Register elements = x1;
3521 Register element = x2; 3521 Register element = x2;
3522 Register separator = x3; 3522 Register separator = x3;
3523 Register array_length = x4; 3523 Register array_length = x4;
3524 Register result_pos = x5; 3524 Register result_pos = x5;
(...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after
3717 3717
3718 __ Bind(&bailout); 3718 __ Bind(&bailout);
3719 // Returning undefined will force slower code to handle it. 3719 // Returning undefined will force slower code to handle it.
3720 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); 3720 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3721 __ Bind(&done); 3721 __ Bind(&done);
3722 context()->Plug(result); 3722 context()->Plug(result);
3723 } 3723 }
3724 3724
3725 3725
3726 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { 3726 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3727 ASSERT(expr->arguments()->length() == 0); 3727 DCHECK(expr->arguments()->length() == 0);
3728 ExternalReference debug_is_active = 3728 ExternalReference debug_is_active =
3729 ExternalReference::debug_is_active_address(isolate()); 3729 ExternalReference::debug_is_active_address(isolate());
3730 __ Mov(x10, debug_is_active); 3730 __ Mov(x10, debug_is_active);
3731 __ Ldrb(x0, MemOperand(x10)); 3731 __ Ldrb(x0, MemOperand(x10));
3732 __ SmiTag(x0); 3732 __ SmiTag(x0);
3733 context()->Plug(x0); 3733 context()->Plug(x0);
3734 } 3734 }
3735 3735
3736 3736
3737 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 3737 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
3807 VisitForStackValue(property->obj()); 3807 VisitForStackValue(property->obj());
3808 VisitForStackValue(property->key()); 3808 VisitForStackValue(property->key());
3809 __ Mov(x10, Smi::FromInt(strict_mode())); 3809 __ Mov(x10, Smi::FromInt(strict_mode()));
3810 __ Push(x10); 3810 __ Push(x10);
3811 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 3811 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3812 context()->Plug(x0); 3812 context()->Plug(x0);
3813 } else if (proxy != NULL) { 3813 } else if (proxy != NULL) {
3814 Variable* var = proxy->var(); 3814 Variable* var = proxy->var();
3815 // Delete of an unqualified identifier is disallowed in strict mode 3815 // Delete of an unqualified identifier is disallowed in strict mode
3816 // but "delete this" is allowed. 3816 // but "delete this" is allowed.
3817 ASSERT(strict_mode() == SLOPPY || var->is_this()); 3817 DCHECK(strict_mode() == SLOPPY || var->is_this());
3818 if (var->IsUnallocated()) { 3818 if (var->IsUnallocated()) {
3819 __ Ldr(x12, GlobalObjectMemOperand()); 3819 __ Ldr(x12, GlobalObjectMemOperand());
3820 __ Mov(x11, Operand(var->name())); 3820 __ Mov(x11, Operand(var->name()));
3821 __ Mov(x10, Smi::FromInt(SLOPPY)); 3821 __ Mov(x10, Smi::FromInt(SLOPPY));
3822 __ Push(x12, x11, x10); 3822 __ Push(x12, x11, x10);
3823 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 3823 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3824 context()->Plug(x0); 3824 context()->Plug(x0);
3825 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 3825 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3826 // Result of deleting non-global, non-dynamic variables is false. 3826 // Result of deleting non-global, non-dynamic variables is false.
3827 // The subexpression does not have side effects. 3827 // The subexpression does not have side effects.
(...skipping 29 matching lines...) Expand all
3857 VisitForEffect(expr->expression()); 3857 VisitForEffect(expr->expression());
3858 } else if (context()->IsTest()) { 3858 } else if (context()->IsTest()) {
3859 const TestContext* test = TestContext::cast(context()); 3859 const TestContext* test = TestContext::cast(context());
3860 // The labels are swapped for the recursive call. 3860 // The labels are swapped for the recursive call.
3861 VisitForControl(expr->expression(), 3861 VisitForControl(expr->expression(),
3862 test->false_label(), 3862 test->false_label(),
3863 test->true_label(), 3863 test->true_label(),
3864 test->fall_through()); 3864 test->fall_through());
3865 context()->Plug(test->true_label(), test->false_label()); 3865 context()->Plug(test->true_label(), test->false_label());
3866 } else { 3866 } else {
3867 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue()); 3867 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3868 // TODO(jbramley): This could be much more efficient using (for 3868 // TODO(jbramley): This could be much more efficient using (for
3869 // example) the CSEL instruction. 3869 // example) the CSEL instruction.
3870 Label materialize_true, materialize_false, done; 3870 Label materialize_true, materialize_false, done;
3871 VisitForControl(expr->expression(), 3871 VisitForControl(expr->expression(),
3872 &materialize_false, 3872 &materialize_false,
3873 &materialize_true, 3873 &materialize_true,
3874 &materialize_true); 3874 &materialize_true);
3875 3875
3876 __ Bind(&materialize_true); 3876 __ Bind(&materialize_true);
3877 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); 3877 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
(...skipping 22 matching lines...) Expand all
3900 context()->Plug(x0); 3900 context()->Plug(x0);
3901 break; 3901 break;
3902 } 3902 }
3903 default: 3903 default:
3904 UNREACHABLE(); 3904 UNREACHABLE();
3905 } 3905 }
3906 } 3906 }
3907 3907
3908 3908
3909 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 3909 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3910 ASSERT(expr->expression()->IsValidReferenceExpression()); 3910 DCHECK(expr->expression()->IsValidReferenceExpression());
3911 3911
3912 Comment cmnt(masm_, "[ CountOperation"); 3912 Comment cmnt(masm_, "[ CountOperation");
3913 SetSourcePosition(expr->position()); 3913 SetSourcePosition(expr->position());
3914 3914
3915 // Expression can only be a property, a global or a (parameter or local) 3915 // Expression can only be a property, a global or a (parameter or local)
3916 // slot. 3916 // slot.
3917 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 3917 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3918 LhsKind assign_type = VARIABLE; 3918 LhsKind assign_type = VARIABLE;
3919 Property* prop = expr->expression()->AsProperty(); 3919 Property* prop = expr->expression()->AsProperty();
3920 // In case of a property we use the uninitialized expression context 3920 // In case of a property we use the uninitialized expression context
3921 // of the key to detect a named property. 3921 // of the key to detect a named property.
3922 if (prop != NULL) { 3922 if (prop != NULL) {
3923 assign_type = 3923 assign_type =
3924 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY; 3924 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3925 } 3925 }
3926 3926
3927 // Evaluate expression and get value. 3927 // Evaluate expression and get value.
3928 if (assign_type == VARIABLE) { 3928 if (assign_type == VARIABLE) {
3929 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL); 3929 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3930 AccumulatorValueContext context(this); 3930 AccumulatorValueContext context(this);
3931 EmitVariableLoad(expr->expression()->AsVariableProxy()); 3931 EmitVariableLoad(expr->expression()->AsVariableProxy());
3932 } else { 3932 } else {
3933 // Reserve space for result of postfix operation. 3933 // Reserve space for result of postfix operation.
3934 if (expr->is_postfix() && !context()->IsEffect()) { 3934 if (expr->is_postfix() && !context()->IsEffect()) {
3935 __ Push(xzr); 3935 __ Push(xzr);
3936 } 3936 }
3937 if (assign_type == NAMED_PROPERTY) { 3937 if (assign_type == NAMED_PROPERTY) {
3938 // Put the object both on the stack and in the register. 3938 // Put the object both on the stack and in the register.
3939 VisitForStackValue(prop->obj()); 3939 VisitForStackValue(prop->obj());
(...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after
4083 } else { 4083 } else {
4084 context()->Plug(x0); 4084 context()->Plug(x0);
4085 } 4085 }
4086 break; 4086 break;
4087 } 4087 }
4088 } 4088 }
4089 } 4089 }
4090 4090
4091 4091
4092 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 4092 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4093 ASSERT(!context()->IsEffect()); 4093 DCHECK(!context()->IsEffect());
4094 ASSERT(!context()->IsTest()); 4094 DCHECK(!context()->IsTest());
4095 VariableProxy* proxy = expr->AsVariableProxy(); 4095 VariableProxy* proxy = expr->AsVariableProxy();
4096 if (proxy != NULL && proxy->var()->IsUnallocated()) { 4096 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4097 Comment cmnt(masm_, "Global variable"); 4097 Comment cmnt(masm_, "Global variable");
4098 __ Ldr(LoadIC::ReceiverRegister(), GlobalObjectMemOperand()); 4098 __ Ldr(LoadIC::ReceiverRegister(), GlobalObjectMemOperand());
4099 __ Mov(LoadIC::NameRegister(), Operand(proxy->name())); 4099 __ Mov(LoadIC::NameRegister(), Operand(proxy->name()));
4100 if (FLAG_vector_ics) { 4100 if (FLAG_vector_ics) {
4101 __ Mov(LoadIC::SlotRegister(), 4101 __ Mov(LoadIC::SlotRegister(),
4102 Smi::FromInt(proxy->VariableFeedbackSlot())); 4102 Smi::FromInt(proxy->VariableFeedbackSlot()));
4103 } 4103 }
4104 // Use a regular load, not a contextual load, to avoid a reference 4104 // Use a regular load, not a contextual load, to avoid a reference
(...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after
4343 __ B(&suspend); 4343 __ B(&suspend);
4344 4344
4345 // TODO(jbramley): This label is bound here because the following code 4345 // TODO(jbramley): This label is bound here because the following code
4346 // looks at its pos(). Is it possible to do something more efficient here, 4346 // looks at its pos(). Is it possible to do something more efficient here,
4347 // perhaps using Adr? 4347 // perhaps using Adr?
4348 __ Bind(&continuation); 4348 __ Bind(&continuation);
4349 __ B(&resume); 4349 __ B(&resume);
4350 4350
4351 __ Bind(&suspend); 4351 __ Bind(&suspend);
4352 VisitForAccumulatorValue(expr->generator_object()); 4352 VisitForAccumulatorValue(expr->generator_object());
4353 ASSERT((continuation.pos() > 0) && Smi::IsValid(continuation.pos())); 4353 DCHECK((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
4354 __ Mov(x1, Smi::FromInt(continuation.pos())); 4354 __ Mov(x1, Smi::FromInt(continuation.pos()));
4355 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset)); 4355 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4356 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset)); 4356 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4357 __ Mov(x1, cp); 4357 __ Mov(x1, cp);
4358 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2, 4358 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4359 kLRHasBeenSaved, kDontSaveFPRegs); 4359 kLRHasBeenSaved, kDontSaveFPRegs);
4360 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset); 4360 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
4361 __ Cmp(__ StackPointer(), x1); 4361 __ Cmp(__ StackPointer(), x1);
4362 __ B(eq, &post_runtime); 4362 __ B(eq, &post_runtime);
4363 __ Push(x0); // generator object 4363 __ Push(x0); // generator object
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
4421 // TODO(jbramley): This label is bound here because the following code 4421 // TODO(jbramley): This label is bound here because the following code
4422 // looks at its pos(). Is it possible to do something more efficient here, 4422 // looks at its pos(). Is it possible to do something more efficient here,
4423 // perhaps using Adr? 4423 // perhaps using Adr?
4424 __ Bind(&l_continuation); 4424 __ Bind(&l_continuation);
4425 __ B(&l_resume); 4425 __ B(&l_resume);
4426 4426
4427 __ Bind(&l_suspend); 4427 __ Bind(&l_suspend);
4428 const int generator_object_depth = kPointerSize + handler_size; 4428 const int generator_object_depth = kPointerSize + handler_size;
4429 __ Peek(x0, generator_object_depth); 4429 __ Peek(x0, generator_object_depth);
4430 __ Push(x0); // g 4430 __ Push(x0); // g
4431 ASSERT((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos())); 4431 DCHECK((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos()));
4432 __ Mov(x1, Smi::FromInt(l_continuation.pos())); 4432 __ Mov(x1, Smi::FromInt(l_continuation.pos()));
4433 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset)); 4433 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4434 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset)); 4434 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4435 __ Mov(x1, cp); 4435 __ Mov(x1, cp);
4436 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2, 4436 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4437 kLRHasBeenSaved, kDontSaveFPRegs); 4437 kLRHasBeenSaved, kDontSaveFPRegs);
4438 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 4438 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4439 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 4439 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4440 __ Pop(x0); // result 4440 __ Pop(x0); // result
4441 EmitReturnSequence(); 4441 EmitReturnSequence();
(...skipping 199 matching lines...) Expand 10 before | Expand all | Expand 10 after
4641 __ Bind(&allocated); 4641 __ Bind(&allocated);
4642 Register map_reg = x1; 4642 Register map_reg = x1;
4643 Register result_value = x2; 4643 Register result_value = x2;
4644 Register boolean_done = x3; 4644 Register boolean_done = x3;
4645 Register empty_fixed_array = x4; 4645 Register empty_fixed_array = x4;
4646 Register untagged_result = x5; 4646 Register untagged_result = x5;
4647 __ Mov(map_reg, Operand(map)); 4647 __ Mov(map_reg, Operand(map));
4648 __ Pop(result_value); 4648 __ Pop(result_value);
4649 __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done))); 4649 __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done)));
4650 __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array())); 4650 __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array()));
4651 ASSERT_EQ(map->instance_size(), 5 * kPointerSize); 4651 DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
4652 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize == 4652 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
4653 JSObject::kElementsOffset); 4653 JSObject::kElementsOffset);
4654 STATIC_ASSERT(JSGeneratorObject::kResultValuePropertyOffset + kPointerSize == 4654 STATIC_ASSERT(JSGeneratorObject::kResultValuePropertyOffset + kPointerSize ==
4655 JSGeneratorObject::kResultDonePropertyOffset); 4655 JSGeneratorObject::kResultDonePropertyOffset);
4656 __ ObjectUntag(untagged_result, result); 4656 __ ObjectUntag(untagged_result, result);
4657 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset)); 4657 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
4658 __ Stp(empty_fixed_array, empty_fixed_array, 4658 __ Stp(empty_fixed_array, empty_fixed_array,
4659 MemOperand(untagged_result, JSObject::kPropertiesOffset)); 4659 MemOperand(untagged_result, JSObject::kPropertiesOffset));
4660 __ Stp(result_value, boolean_done, 4660 __ Stp(result_value, boolean_done,
4661 MemOperand(untagged_result, 4661 MemOperand(untagged_result,
(...skipping 19 matching lines...) Expand all
4681 return x0; 4681 return x0;
4682 } 4682 }
4683 4683
4684 4684
4685 Register FullCodeGenerator::context_register() { 4685 Register FullCodeGenerator::context_register() {
4686 return cp; 4686 return cp;
4687 } 4687 }
4688 4688
4689 4689
4690 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 4690 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4691 ASSERT(POINTER_SIZE_ALIGN(frame_offset) == frame_offset); 4691 DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
4692 __ Str(value, MemOperand(fp, frame_offset)); 4692 __ Str(value, MemOperand(fp, frame_offset));
4693 } 4693 }
4694 4694
4695 4695
4696 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 4696 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4697 __ Ldr(dst, ContextMemOperand(cp, context_index)); 4697 __ Ldr(dst, ContextMemOperand(cp, context_index));
4698 } 4698 }
4699 4699
4700 4700
4701 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { 4701 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4702 Scope* declaration_scope = scope()->DeclarationScope(); 4702 Scope* declaration_scope = scope()->DeclarationScope();
4703 if (declaration_scope->is_global_scope() || 4703 if (declaration_scope->is_global_scope() ||
4704 declaration_scope->is_module_scope()) { 4704 declaration_scope->is_module_scope()) {
4705 // Contexts nested in the native context have a canonical empty function 4705 // Contexts nested in the native context have a canonical empty function
4706 // as their closure, not the anonymous closure containing the global 4706 // as their closure, not the anonymous closure containing the global
4707 // code. Pass a smi sentinel and let the runtime look up the empty 4707 // code. Pass a smi sentinel and let the runtime look up the empty
4708 // function. 4708 // function.
4709 ASSERT(kSmiTag == 0); 4709 DCHECK(kSmiTag == 0);
4710 __ Push(xzr); 4710 __ Push(xzr);
4711 } else if (declaration_scope->is_eval_scope()) { 4711 } else if (declaration_scope->is_eval_scope()) {
4712 // Contexts created by a call to eval have the same closure as the 4712 // Contexts created by a call to eval have the same closure as the
4713 // context calling eval, not the anonymous closure containing the eval 4713 // context calling eval, not the anonymous closure containing the eval
4714 // code. Fetch it from the context. 4714 // code. Fetch it from the context.
4715 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX)); 4715 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
4716 __ Push(x10); 4716 __ Push(x10);
4717 } else { 4717 } else {
4718 ASSERT(declaration_scope->is_function_scope()); 4718 DCHECK(declaration_scope->is_function_scope());
4719 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4719 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4720 __ Push(x10); 4720 __ Push(x10);
4721 } 4721 }
4722 } 4722 }
4723 4723
4724 4724
4725 void FullCodeGenerator::EnterFinallyBlock() { 4725 void FullCodeGenerator::EnterFinallyBlock() {
4726 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock"); 4726 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
4727 ASSERT(!result_register().is(x10)); 4727 DCHECK(!result_register().is(x10));
4728 // Preserve the result register while executing finally block. 4728 // Preserve the result register while executing finally block.
4729 // Also cook the return address in lr to the stack (smi encoded Code* delta). 4729 // Also cook the return address in lr to the stack (smi encoded Code* delta).
4730 __ Sub(x10, lr, Operand(masm_->CodeObject())); 4730 __ Sub(x10, lr, Operand(masm_->CodeObject()));
4731 __ SmiTag(x10); 4731 __ SmiTag(x10);
4732 __ Push(result_register(), x10); 4732 __ Push(result_register(), x10);
4733 4733
4734 // Store pending message while executing finally block. 4734 // Store pending message while executing finally block.
4735 ExternalReference pending_message_obj = 4735 ExternalReference pending_message_obj =
4736 ExternalReference::address_of_pending_message_obj(isolate()); 4736 ExternalReference::address_of_pending_message_obj(isolate());
4737 __ Mov(x10, pending_message_obj); 4737 __ Mov(x10, pending_message_obj);
(...skipping 11 matching lines...) Expand all
4749 ExternalReference pending_message_script = 4749 ExternalReference pending_message_script =
4750 ExternalReference::address_of_pending_message_script(isolate()); 4750 ExternalReference::address_of_pending_message_script(isolate());
4751 __ Mov(x10, pending_message_script); 4751 __ Mov(x10, pending_message_script);
4752 __ Ldr(x10, MemOperand(x10)); 4752 __ Ldr(x10, MemOperand(x10));
4753 __ Push(x10); 4753 __ Push(x10);
4754 } 4754 }
4755 4755
4756 4756
4757 void FullCodeGenerator::ExitFinallyBlock() { 4757 void FullCodeGenerator::ExitFinallyBlock() {
4758 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock"); 4758 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
4759 ASSERT(!result_register().is(x10)); 4759 DCHECK(!result_register().is(x10));
4760 4760
4761 // Restore pending message from stack. 4761 // Restore pending message from stack.
4762 __ Pop(x10, x11, x12); 4762 __ Pop(x10, x11, x12);
4763 ExternalReference pending_message_script = 4763 ExternalReference pending_message_script =
4764 ExternalReference::address_of_pending_message_script(isolate()); 4764 ExternalReference::address_of_pending_message_script(isolate());
4765 __ Mov(x13, pending_message_script); 4765 __ Mov(x13, pending_message_script);
4766 __ Str(x10, MemOperand(x13)); 4766 __ Str(x10, MemOperand(x13));
4767 4767
4768 __ SmiUntag(x11); 4768 __ SmiUntag(x11);
4769 ExternalReference has_pending_message = 4769 ExternalReference has_pending_message =
(...skipping 21 matching lines...) Expand all
4791 4791
4792 4792
4793 void BackEdgeTable::PatchAt(Code* unoptimized_code, 4793 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4794 Address pc, 4794 Address pc,
4795 BackEdgeState target_state, 4795 BackEdgeState target_state,
4796 Code* replacement_code) { 4796 Code* replacement_code) {
4797 // Turn the jump into a nop. 4797 // Turn the jump into a nop.
4798 Address branch_address = pc - 3 * kInstructionSize; 4798 Address branch_address = pc - 3 * kInstructionSize;
4799 PatchingAssembler patcher(branch_address, 1); 4799 PatchingAssembler patcher(branch_address, 1);
4800 4800
4801 ASSERT(Instruction::Cast(branch_address) 4801 DCHECK(Instruction::Cast(branch_address)
4802 ->IsNop(Assembler::INTERRUPT_CODE_NOP) || 4802 ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
4803 (Instruction::Cast(branch_address)->IsCondBranchImm() && 4803 (Instruction::Cast(branch_address)->IsCondBranchImm() &&
4804 Instruction::Cast(branch_address)->ImmPCOffset() == 4804 Instruction::Cast(branch_address)->ImmPCOffset() ==
4805 6 * kInstructionSize)); 4805 6 * kInstructionSize));
4806 4806
4807 switch (target_state) { 4807 switch (target_state) {
4808 case INTERRUPT: 4808 case INTERRUPT:
4809 // <decrement profiling counter> 4809 // <decrement profiling counter>
4810 // .. .. .. .. b.pl ok 4810 // .. .. .. .. b.pl ok
4811 // .. .. .. .. ldr x16, pc+<interrupt stub address> 4811 // .. .. .. .. ldr x16, pc+<interrupt stub address>
(...skipping 10 matching lines...) Expand all
4822 // .. .. .. .. ldr x16, pc+<on-stack replacement address> 4822 // .. .. .. .. ldr x16, pc+<on-stack replacement address>
4823 // .. .. .. .. blr x16 4823 // .. .. .. .. blr x16
4824 patcher.nop(Assembler::INTERRUPT_CODE_NOP); 4824 patcher.nop(Assembler::INTERRUPT_CODE_NOP);
4825 break; 4825 break;
4826 } 4826 }
4827 4827
4828 // Replace the call address. 4828 // Replace the call address.
4829 Instruction* load = Instruction::Cast(pc)->preceding(2); 4829 Instruction* load = Instruction::Cast(pc)->preceding(2);
4830 Address interrupt_address_pointer = 4830 Address interrupt_address_pointer =
4831 reinterpret_cast<Address>(load) + load->ImmPCOffset(); 4831 reinterpret_cast<Address>(load) + load->ImmPCOffset();
4832 ASSERT((Memory::uint64_at(interrupt_address_pointer) == 4832 DCHECK((Memory::uint64_at(interrupt_address_pointer) ==
4833 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate() 4833 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4834 ->builtins() 4834 ->builtins()
4835 ->OnStackReplacement() 4835 ->OnStackReplacement()
4836 ->entry())) || 4836 ->entry())) ||
4837 (Memory::uint64_at(interrupt_address_pointer) == 4837 (Memory::uint64_at(interrupt_address_pointer) ==
4838 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate() 4838 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4839 ->builtins() 4839 ->builtins()
4840 ->InterruptCheck() 4840 ->InterruptCheck()
4841 ->entry())) || 4841 ->entry())) ||
4842 (Memory::uint64_at(interrupt_address_pointer) == 4842 (Memory::uint64_at(interrupt_address_pointer) ==
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
4912 return previous_; 4912 return previous_;
4913 } 4913 }
4914 4914
4915 4915
4916 #undef __ 4916 #undef __
4917 4917
4918 4918
4919 } } // namespace v8::internal 4919 } } // namespace v8::internal
4920 4920
4921 #endif // V8_TARGET_ARCH_ARM64 4921 #endif // V8_TARGET_ARCH_ARM64
OLDNEW
« no previous file with comments | « src/arm64/disasm-arm64.cc ('k') | src/arm64/ic-arm64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698