Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(473)

Side by Side Diff: src/arm/full-codegen-arm.cc

Issue 2163006: ARM: Update the full compiler to handle all code... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 10 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/codegen-arm.cc ('k') | src/compiler.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
57 // frames-arm.h for its layout. 57 // frames-arm.h for its layout.
58 void FullCodeGenerator::Generate(CompilationInfo* info, Mode mode) { 58 void FullCodeGenerator::Generate(CompilationInfo* info, Mode mode) {
59 ASSERT(info_ == NULL); 59 ASSERT(info_ == NULL);
60 info_ = info; 60 info_ = info;
61 SetFunctionPosition(function()); 61 SetFunctionPosition(function());
62 Comment cmnt(masm_, "[ function compiled by full code generator"); 62 Comment cmnt(masm_, "[ function compiled by full code generator");
63 63
64 if (mode == PRIMARY) { 64 if (mode == PRIMARY) {
65 int locals_count = scope()->num_stack_slots(); 65 int locals_count = scope()->num_stack_slots();
66 66
67 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit()); 67 __ Push(lr, fp, cp, r1);
68 if (locals_count > 0) { 68 if (locals_count > 0) {
69 // Load undefined value here, so the value is ready for the loop 69 // Load undefined value here, so the value is ready for the loop
70 // below. 70 // below.
71 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 71 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
72 } 72 }
73 // Adjust fp to point to caller's fp. 73 // Adjust fp to point to caller's fp.
74 __ add(fp, sp, Operand(2 * kPointerSize)); 74 __ add(fp, sp, Operand(2 * kPointerSize));
75 75
76 { Comment cmnt(masm_, "[ Allocate locals"); 76 { Comment cmnt(masm_, "[ Allocate locals");
77 for (int i = 0; i < locals_count; i++) { 77 for (int i = 0; i < locals_count; i++) {
78 __ push(ip); 78 __ push(ip);
79 } 79 }
80 } 80 }
81 81
82 bool function_in_register = true; 82 bool function_in_register = true;
83 83
84 // Possibly allocate a local context. 84 // Possibly allocate a local context.
85 if (scope()->num_heap_slots() > 0) { 85 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
86 if (heap_slots > 0) {
86 Comment cmnt(masm_, "[ Allocate local context"); 87 Comment cmnt(masm_, "[ Allocate local context");
87 // Argument to NewContext is the function, which is in r1. 88 // Argument to NewContext is the function, which is in r1.
88 __ push(r1); 89 __ push(r1);
89 __ CallRuntime(Runtime::kNewContext, 1); 90 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
91 FastNewContextStub stub(heap_slots);
92 __ CallStub(&stub);
93 } else {
94 __ CallRuntime(Runtime::kNewContext, 1);
95 }
90 function_in_register = false; 96 function_in_register = false;
91 // Context is returned in both r0 and cp. It replaces the context 97 // Context is returned in both r0 and cp. It replaces the context
92 // passed to us. It's saved in the stack and kept live in cp. 98 // passed to us. It's saved in the stack and kept live in cp.
93 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 99 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
94 // Copy any necessary parameters into the context. 100 // Copy any necessary parameters into the context.
95 int num_parameters = scope()->num_parameters(); 101 int num_parameters = scope()->num_parameters();
96 for (int i = 0; i < num_parameters; i++) { 102 for (int i = 0; i < num_parameters; i++) {
97 Slot* slot = scope()->parameter(i)->slot(); 103 Slot* slot = scope()->parameter(i)->slot();
98 if (slot != NULL && slot->type() == Slot::CONTEXT) { 104 if (slot != NULL && slot->type() == Slot::CONTEXT) {
99 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 105 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
137 __ CallStub(&stub); 143 __ CallStub(&stub);
138 // Duplicate the value; move-to-slot operation might clobber registers. 144 // Duplicate the value; move-to-slot operation might clobber registers.
139 __ mov(r3, r0); 145 __ mov(r3, r0);
140 Move(arguments->slot(), r0, r1, r2); 146 Move(arguments->slot(), r0, r1, r2);
141 Slot* dot_arguments_slot = 147 Slot* dot_arguments_slot =
142 scope()->arguments_shadow()->AsVariable()->slot(); 148 scope()->arguments_shadow()->AsVariable()->slot();
143 Move(dot_arguments_slot, r3, r1, r2); 149 Move(dot_arguments_slot, r3, r1, r2);
144 } 150 }
145 } 151 }
146 152
153 { Comment cmnt(masm_, "[ Declarations");
154 // For named function expressions, declare the function name as a
155 // constant.
156 if (scope()->is_function_scope() && scope()->function() != NULL) {
157 EmitDeclaration(scope()->function(), Variable::CONST, NULL);
158 }
159 // Visit all the explicit declarations unless there is an illegal
160 // redeclaration.
161 if (scope()->HasIllegalRedeclaration()) {
162 scope()->VisitIllegalRedeclaration(this);
163 } else {
164 VisitDeclarations(scope()->declarations());
165 }
166 }
167
147 // Check the stack for overflow or break request. 168 // Check the stack for overflow or break request.
148 // Put the lr setup instruction in the delay slot. The kInstrSize is 169 // Put the lr setup instruction in the delay slot. The kInstrSize is
149 // added to the implicit 8 byte offset that always applies to operations 170 // added to the implicit 8 byte offset that always applies to operations
150 // with pc and gives a return address 12 bytes down. 171 // with pc and gives a return address 12 bytes down.
151 { Comment cmnt(masm_, "[ Stack check"); 172 { Comment cmnt(masm_, "[ Stack check");
152 __ LoadRoot(r2, Heap::kStackLimitRootIndex); 173 __ LoadRoot(r2, Heap::kStackLimitRootIndex);
153 __ add(lr, pc, Operand(Assembler::kInstrSize)); 174 __ add(lr, pc, Operand(Assembler::kInstrSize));
154 __ cmp(sp, Operand(r2)); 175 __ cmp(sp, Operand(r2));
155 StackCheckStub stub; 176 StackCheckStub stub;
156 __ mov(pc, 177 __ mov(pc,
157 Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()), 178 Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
158 RelocInfo::CODE_TARGET), 179 RelocInfo::CODE_TARGET),
159 LeaveCC, 180 LeaveCC,
160 lo); 181 lo);
161 } 182 }
162 183
163 { Comment cmnt(masm_, "[ Declarations");
164 VisitDeclarations(scope()->declarations());
165 }
166
167 if (FLAG_trace) { 184 if (FLAG_trace) {
168 __ CallRuntime(Runtime::kTraceEnter, 0); 185 __ CallRuntime(Runtime::kTraceEnter, 0);
169 } 186 }
170 187
171 { Comment cmnt(masm_, "[ Body"); 188 { Comment cmnt(masm_, "[ Body");
172 ASSERT(loop_depth() == 0); 189 ASSERT(loop_depth() == 0);
173 VisitStatements(function()->body()); 190 VisitStatements(function()->body());
174 ASSERT(loop_depth() == 0); 191 ASSERT(loop_depth() == 0);
175 } 192 }
176 193
(...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after
377 } else { // count > 1 394 } else { // count > 1
378 __ Drop(count - 2); 395 __ Drop(count - 2);
379 __ str(reg, MemOperand(sp, kPointerSize)); 396 __ str(reg, MemOperand(sp, kPointerSize));
380 __ str(reg, MemOperand(sp)); 397 __ str(reg, MemOperand(sp));
381 } 398 }
382 DoTest(context); 399 DoTest(context);
383 break; 400 break;
384 } 401 }
385 } 402 }
386 403
404 void FullCodeGenerator::PrepareTest(Label* materialize_true,
405 Label* materialize_false,
406 Label** if_true,
407 Label** if_false) {
408 switch (context_) {
409 case Expression::kUninitialized:
410 UNREACHABLE();
411 break;
412 case Expression::kEffect:
413 // In an effect context, the true and the false case branch to the
414 // same label.
415 *if_true = *if_false = materialize_true;
416 break;
417 case Expression::kValue:
418 *if_true = materialize_true;
419 *if_false = materialize_false;
420 break;
421 case Expression::kTest:
422 *if_true = true_label_;
423 *if_false = false_label_;
424 break;
425 case Expression::kValueTest:
426 *if_true = materialize_true;
427 *if_false = false_label_;
428 break;
429 case Expression::kTestValue:
430 *if_true = true_label_;
431 *if_false = materialize_false;
432 break;
433 }
434 }
435
387 436
388 void FullCodeGenerator::Apply(Expression::Context context, 437 void FullCodeGenerator::Apply(Expression::Context context,
389 Label* materialize_true, 438 Label* materialize_true,
390 Label* materialize_false) { 439 Label* materialize_false) {
391 switch (context) { 440 switch (context) {
392 case Expression::kUninitialized: 441 case Expression::kUninitialized:
393 442
394 case Expression::kEffect: 443 case Expression::kEffect:
395 ASSERT_EQ(materialize_true, materialize_false); 444 ASSERT_EQ(materialize_true, materialize_false);
396 __ bind(materialize_true); 445 __ bind(materialize_true);
397 break; 446 break;
398 447
399 case Expression::kValue: { 448 case Expression::kValue: {
400 Label done; 449 Label done;
401 __ bind(materialize_true);
402 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
403 __ jmp(&done);
404 __ bind(materialize_false);
405 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
406 __ bind(&done);
407 switch (location_) { 450 switch (location_) {
408 case kAccumulator: 451 case kAccumulator:
452 __ bind(materialize_true);
453 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
454 __ jmp(&done);
455 __ bind(materialize_false);
456 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
409 break; 457 break;
410 case kStack: 458 case kStack:
411 __ push(result_register()); 459 __ bind(materialize_true);
460 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
461 __ push(ip);
462 __ jmp(&done);
463 __ bind(materialize_false);
464 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
465 __ push(ip);
412 break; 466 break;
413 } 467 }
468 __ bind(&done);
414 break; 469 break;
415 } 470 }
416 471
417 case Expression::kTest: 472 case Expression::kTest:
418 break; 473 break;
419 474
420 case Expression::kValueTest: 475 case Expression::kValueTest:
421 __ bind(materialize_true); 476 __ bind(materialize_true);
422 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
423 switch (location_) { 477 switch (location_) {
424 case kAccumulator: 478 case kAccumulator:
479 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
425 break; 480 break;
426 case kStack: 481 case kStack:
427 __ push(result_register()); 482 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
483 __ push(ip);
428 break; 484 break;
429 } 485 }
430 __ jmp(true_label_); 486 __ jmp(true_label_);
431 break; 487 break;
432 488
433 case Expression::kTestValue: 489 case Expression::kTestValue:
434 __ bind(materialize_false); 490 __ bind(materialize_false);
435 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
436 switch (location_) { 491 switch (location_) {
437 case kAccumulator: 492 case kAccumulator:
493 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
438 break; 494 break;
439 case kStack: 495 case kStack:
440 __ push(result_register()); 496 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
497 __ push(ip);
441 break; 498 break;
442 } 499 }
443 __ jmp(false_label_); 500 __ jmp(false_label_);
444 break; 501 break;
445 } 502 }
446 } 503 }
447 504
448 505
506 // Convert constant control flow (true or false) to the result expected for
507 // a given expression context.
508 void FullCodeGenerator::Apply(Expression::Context context, bool flag) {
509 switch (context) {
510 case Expression::kUninitialized:
511 UNREACHABLE();
512 break;
513 case Expression::kEffect:
514 break;
515 case Expression::kValue: {
516 Heap::RootListIndex value_root_index =
517 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
518 switch (location_) {
519 case kAccumulator:
520 __ LoadRoot(result_register(), value_root_index);
521 break;
522 case kStack:
523 __ LoadRoot(ip, value_root_index);
524 __ push(ip);
525 break;
526 }
527 break;
528 }
529 case Expression::kTest:
530 __ b(flag ? true_label_ : false_label_);
531 break;
532 case Expression::kTestValue:
533 switch (location_) {
534 case kAccumulator:
535 // If value is false it's needed.
536 if (!flag) __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
537 break;
538 case kStack:
539 // If value is false it's needed.
540 if (!flag) {
541 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
542 __ push(ip);
543 }
544 break;
545 }
546 __ b(flag ? true_label_ : false_label_);
547 break;
548 case Expression::kValueTest:
549 switch (location_) {
550 case kAccumulator:
551 // If value is true it's needed.
552 if (flag) __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
553 break;
554 case kStack:
555 // If value is true it's needed.
556 if (flag) {
557 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
558 __ push(ip);
559 }
560 break;
561 }
562 __ b(flag ? true_label_ : false_label_);
563 break;
564 }
565 }
566
567
449 void FullCodeGenerator::DoTest(Expression::Context context) { 568 void FullCodeGenerator::DoTest(Expression::Context context) {
450 // The value to test is pushed on the stack, and duplicated on the stack 569 // The value to test is pushed on the stack, and duplicated on the stack
451 // if necessary (for value/test and test/value contexts). 570 // if necessary (for value/test and test/value contexts).
452 ASSERT_NE(NULL, true_label_); 571 ASSERT_NE(NULL, true_label_);
453 ASSERT_NE(NULL, false_label_); 572 ASSERT_NE(NULL, false_label_);
454 573
455 // Call the runtime to find the boolean value of the source and then 574 // Call the runtime to find the boolean value of the source and then
456 // translate it into control flow to the pair of labels. 575 // translate it into control flow to the pair of labels.
457 __ CallRuntime(Runtime::kToBool, 1); 576 __ CallRuntime(Runtime::kToBool, 1);
458 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 577 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
544 MemOperand location = EmitSlotSearch(dst, scratch1); 663 MemOperand location = EmitSlotSearch(dst, scratch1);
545 __ str(src, location); 664 __ str(src, location);
546 // Emit the write barrier code if the location is in the heap. 665 // Emit the write barrier code if the location is in the heap.
547 if (dst->type() == Slot::CONTEXT) { 666 if (dst->type() == Slot::CONTEXT) {
548 __ mov(scratch2, Operand(Context::SlotOffset(dst->index()))); 667 __ mov(scratch2, Operand(Context::SlotOffset(dst->index())));
549 __ RecordWrite(scratch1, scratch2, src); 668 __ RecordWrite(scratch1, scratch2, src);
550 } 669 }
551 } 670 }
552 671
553 672
554 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { 673 void FullCodeGenerator::EmitDeclaration(Variable* variable,
674 Variable::Mode mode,
675 FunctionLiteral* function) {
555 Comment cmnt(masm_, "[ Declaration"); 676 Comment cmnt(masm_, "[ Declaration");
556 Variable* var = decl->proxy()->var(); 677 ASSERT(variable != NULL); // Must have been resolved.
557 ASSERT(var != NULL); // Must have been resolved. 678 Slot* slot = variable->slot();
558 Slot* slot = var->slot(); 679 Property* prop = variable->AsProperty();
559 Property* prop = var->AsProperty();
560 680
561 if (slot != NULL) { 681 if (slot != NULL) {
562 switch (slot->type()) { 682 switch (slot->type()) {
563 case Slot::PARAMETER: 683 case Slot::PARAMETER:
564 case Slot::LOCAL: 684 case Slot::LOCAL:
565 if (decl->mode() == Variable::CONST) { 685 if (mode == Variable::CONST) {
566 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 686 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
567 __ str(ip, MemOperand(fp, SlotOffset(slot))); 687 __ str(ip, MemOperand(fp, SlotOffset(slot)));
568 } else if (decl->fun() != NULL) { 688 } else if (function != NULL) {
569 VisitForValue(decl->fun(), kAccumulator); 689 VisitForValue(function, kAccumulator);
570 __ str(result_register(), MemOperand(fp, SlotOffset(slot))); 690 __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
571 } 691 }
572 break; 692 break;
573 693
574 case Slot::CONTEXT: 694 case Slot::CONTEXT:
575 // We bypass the general EmitSlotSearch because we know more about 695 // We bypass the general EmitSlotSearch because we know more about
576 // this specific context. 696 // this specific context.
577 697
578 // The variable in the decl always resides in the current context. 698 // The variable in the decl always resides in the current context.
579 ASSERT_EQ(0, scope()->ContextChainLength(var->scope())); 699 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
580 if (FLAG_debug_code) { 700 if (FLAG_debug_code) {
581 // Check if we have the correct context pointer. 701 // Check if we have the correct context pointer.
582 __ ldr(r1, 702 __ ldr(r1,
583 CodeGenerator::ContextOperand(cp, Context::FCONTEXT_INDEX)); 703 CodeGenerator::ContextOperand(cp, Context::FCONTEXT_INDEX));
584 __ cmp(r1, cp); 704 __ cmp(r1, cp);
585 __ Check(eq, "Unexpected declaration in current context."); 705 __ Check(eq, "Unexpected declaration in current context.");
586 } 706 }
587 if (decl->mode() == Variable::CONST) { 707 if (mode == Variable::CONST) {
588 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 708 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
589 __ str(ip, CodeGenerator::ContextOperand(cp, slot->index())); 709 __ str(ip, CodeGenerator::ContextOperand(cp, slot->index()));
590 // No write barrier since the_hole_value is in old space. 710 // No write barrier since the_hole_value is in old space.
591 } else if (decl->fun() != NULL) { 711 } else if (function != NULL) {
592 VisitForValue(decl->fun(), kAccumulator); 712 VisitForValue(function, kAccumulator);
593 __ str(result_register(), 713 __ str(result_register(),
594 CodeGenerator::ContextOperand(cp, slot->index())); 714 CodeGenerator::ContextOperand(cp, slot->index()));
595 int offset = Context::SlotOffset(slot->index()); 715 int offset = Context::SlotOffset(slot->index());
596 __ mov(r2, Operand(offset)); 716 __ mov(r2, Operand(offset));
597 // We know that we have written a function, which is not a smi. 717 // We know that we have written a function, which is not a smi.
598 __ mov(r1, Operand(cp)); 718 __ mov(r1, Operand(cp));
599 __ RecordWrite(r1, r2, result_register()); 719 __ RecordWrite(r1, r2, result_register());
600 } 720 }
601 break; 721 break;
602 722
603 case Slot::LOOKUP: { 723 case Slot::LOOKUP: {
604 __ mov(r2, Operand(var->name())); 724 __ mov(r2, Operand(variable->name()));
605 // Declaration nodes are always introduced in one of two modes. 725 // Declaration nodes are always introduced in one of two modes.
606 ASSERT(decl->mode() == Variable::VAR || 726 ASSERT(mode == Variable::VAR ||
607 decl->mode() == Variable::CONST); 727 mode == Variable::CONST);
608 PropertyAttributes attr = 728 PropertyAttributes attr =
609 (decl->mode() == Variable::VAR) ? NONE : READ_ONLY; 729 (mode == Variable::VAR) ? NONE : READ_ONLY;
610 __ mov(r1, Operand(Smi::FromInt(attr))); 730 __ mov(r1, Operand(Smi::FromInt(attr)));
611 // Push initial value, if any. 731 // Push initial value, if any.
612 // Note: For variables we must not push an initial value (such as 732 // Note: For variables we must not push an initial value (such as
613 // 'undefined') because we may have a (legal) redeclaration and we 733 // 'undefined') because we may have a (legal) redeclaration and we
614 // must not destroy the current value. 734 // must not destroy the current value.
615 if (decl->mode() == Variable::CONST) { 735 if (mode == Variable::CONST) {
616 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); 736 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
617 __ stm(db_w, sp, cp.bit() | r2.bit() | r1.bit() | r0.bit()); 737 __ Push(cp, r2, r1, r0);
618 } else if (decl->fun() != NULL) { 738 } else if (function != NULL) {
619 __ stm(db_w, sp, cp.bit() | r2.bit() | r1.bit()); 739 __ Push(cp, r2, r1);
620 // Push initial value for function declaration. 740 // Push initial value for function declaration.
621 VisitForValue(decl->fun(), kStack); 741 VisitForValue(function, kStack);
622 } else { 742 } else {
623 __ mov(r0, Operand(Smi::FromInt(0))); // No initial value! 743 __ mov(r0, Operand(Smi::FromInt(0))); // No initial value!
624 __ stm(db_w, sp, cp.bit() | r2.bit() | r1.bit() | r0.bit()); 744 __ Push(cp, r2, r1, r0);
625 } 745 }
626 __ CallRuntime(Runtime::kDeclareContextSlot, 4); 746 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
627 break; 747 break;
628 } 748 }
629 } 749 }
630 750
631 } else if (prop != NULL) { 751 } else if (prop != NULL) {
632 if (decl->fun() != NULL || decl->mode() == Variable::CONST) { 752 if (function != NULL || mode == Variable::CONST) {
633 // We are declaring a function or constant that rewrites to a 753 // We are declaring a function or constant that rewrites to a
634 // property. Use (keyed) IC to set the initial value. 754 // property. Use (keyed) IC to set the initial value.
635 VisitForValue(prop->obj(), kStack); 755 VisitForValue(prop->obj(), kStack);
636 VisitForValue(prop->key(), kStack); 756 if (function != NULL) {
637 757 VisitForValue(prop->key(), kStack);
638 if (decl->fun() != NULL) { 758 VisitForValue(function, kAccumulator);
639 VisitForValue(decl->fun(), kAccumulator); 759 __ pop(r1); // Key.
640 } else { 760 } else {
761 VisitForValue(prop->key(), kAccumulator);
762 __ mov(r1, result_register()); // Key.
641 __ LoadRoot(result_register(), Heap::kTheHoleValueRootIndex); 763 __ LoadRoot(result_register(), Heap::kTheHoleValueRootIndex);
642 } 764 }
765 __ pop(r2); // Receiver.
643 766
644 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); 767 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
645 __ pop(r1); // Key.
646 __ pop(r2); // Receiver.
647 __ Call(ic, RelocInfo::CODE_TARGET); 768 __ Call(ic, RelocInfo::CODE_TARGET);
648
649 // Value in r0 is ignored (declarations are statements). 769 // Value in r0 is ignored (declarations are statements).
650 } 770 }
651 } 771 }
652 } 772 }
653 773
654 774
775 void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
776 EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun());
777 }
778
779
655 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 780 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
656 // Call the runtime to declare the globals. 781 // Call the runtime to declare the globals.
657 // The context is the first argument. 782 // The context is the first argument.
658 __ mov(r1, Operand(pairs)); 783 __ mov(r1, Operand(pairs));
659 __ mov(r0, Operand(Smi::FromInt(is_eval() ? 1 : 0))); 784 __ mov(r0, Operand(Smi::FromInt(is_eval() ? 1 : 0)));
660 __ stm(db_w, sp, cp.bit() | r1.bit() | r0.bit()); 785 __ Push(cp, r1, r0);
661 __ CallRuntime(Runtime::kDeclareGlobals, 3); 786 __ CallRuntime(Runtime::kDeclareGlobals, 3);
662 // Return value is ignored. 787 // Return value is ignored.
663 } 788 }
664 789
665 790
666 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 791 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
667 UNREACHABLE(); 792 Comment cmnt(masm_, "[ SwitchStatement");
793 Breakable nested_statement(this, stmt);
794 SetStatementPosition(stmt);
795 // Keep the switch value on the stack until a case matches.
796 VisitForValue(stmt->tag(), kStack);
797
798 ZoneList<CaseClause*>* clauses = stmt->cases();
799 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
800
801 Label next_test; // Recycled for each test.
802 // Compile all the tests with branches to their bodies.
803 for (int i = 0; i < clauses->length(); i++) {
804 CaseClause* clause = clauses->at(i);
805 // The default is not a test, but remember it as final fall through.
806 if (clause->is_default()) {
807 default_clause = clause;
808 continue;
809 }
810
811 Comment cmnt(masm_, "[ Case comparison");
812 __ bind(&next_test);
813 next_test.Unuse();
814
815 // Compile the label expression.
816 VisitForValue(clause->label(), kAccumulator);
817
818 // Perform the comparison as if via '==='. The comparison stub expects
819 // the smi vs. smi case to be handled before it is called.
820 Label slow_case;
821 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
822 __ mov(r2, r1);
823 __ orr(r2, r2, r0);
824 __ tst(r2, Operand(kSmiTagMask));
825 __ b(ne, &slow_case);
826 __ cmp(r1, r0);
827 __ b(ne, &next_test);
828 __ Drop(1); // Switch value is no longer needed.
829 __ b(clause->body_target()->entry_label());
830
831 __ bind(&slow_case);
832 CompareStub stub(eq, true);
833 __ CallStub(&stub);
834 __ tst(r0, r0);
835 __ b(ne, &next_test);
836 __ Drop(1); // Switch value is no longer needed.
837 __ b(clause->body_target()->entry_label());
838 }
839
840 // Discard the test value and jump to the default if present, otherwise to
841 // the end of the statement.
842 __ bind(&next_test);
843 __ Drop(1); // Switch value is no longer needed.
844 if (default_clause == NULL) {
845 __ b(nested_statement.break_target());
846 } else {
847 __ b(default_clause->body_target()->entry_label());
848 }
849
850 // Compile all the case bodies.
851 for (int i = 0; i < clauses->length(); i++) {
852 Comment cmnt(masm_, "[ Case body");
853 CaseClause* clause = clauses->at(i);
854 __ bind(clause->body_target()->entry_label());
855 VisitStatements(clause->statements());
856 }
857
858 __ bind(nested_statement.break_target());
668 } 859 }
669 860
670 861
671 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { 862 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
672 UNREACHABLE(); 863 Comment cmnt(masm_, "[ ForInStatement");
673 } 864 SetStatementPosition(stmt);
674 865
675 866 Label loop, exit;
867 ForIn loop_statement(this, stmt);
868 increment_loop_depth();
869
870 // Get the object to enumerate over. Both SpiderMonkey and JSC
871 // ignore null and undefined in contrast to the specification; see
872 // ECMA-262 section 12.6.4.
873 VisitForValue(stmt->enumerable(), kAccumulator);
874 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
875 __ cmp(r0, ip);
876 __ b(eq, &exit);
877 __ LoadRoot(ip, Heap::kNullValueRootIndex);
878 __ cmp(r0, ip);
879 __ b(eq, &exit);
880
881 // Convert the object to a JS object.
882 Label convert, done_convert;
883 __ BranchOnSmi(r0, &convert);
884 __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
885 __ b(hs, &done_convert);
886 __ bind(&convert);
887 __ push(r0);
888 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS);
889 __ bind(&done_convert);
890 __ push(r0);
891
892 // TODO(kasperl): Check cache validity in generated code. This is a
893 // fast case for the JSObject::IsSimpleEnum cache validity
894 // checks. If we cannot guarantee cache validity, call the runtime
895 // system to check cache validity or get the property names in a
896 // fixed array.
897
898 // Get the set of properties to enumerate.
899 __ push(r0); // Duplicate the enumerable object on the stack.
900 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
901
902 // If we got a map from the runtime call, we can do a fast
903 // modification check. Otherwise, we got a fixed array, and we have
904 // to do a slow check.
905 Label fixed_array;
906 __ mov(r2, r0);
907 __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
908 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
909 __ cmp(r1, ip);
910 __ b(ne, &fixed_array);
911
912 // We got a map in register r0. Get the enumeration cache from it.
913 __ ldr(r1, FieldMemOperand(r0, Map::kInstanceDescriptorsOffset));
914 __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset));
915 __ ldr(r2, FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset));
916
917 // Setup the four remaining stack slots.
918 __ push(r0); // Map.
919 __ ldr(r1, FieldMemOperand(r2, FixedArray::kLengthOffset));
920 __ mov(r0, Operand(Smi::FromInt(0)));
921 // Push enumeration cache, enumeration cache length (as smi) and zero.
922 __ Push(r2, r1, r0);
923 __ jmp(&loop);
924
925 // We got a fixed array in register r0. Iterate through that.
926 __ bind(&fixed_array);
927 __ mov(r1, Operand(Smi::FromInt(0))); // Map (0) - force slow check.
928 __ Push(r1, r0);
929 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
930 __ mov(r0, Operand(Smi::FromInt(0)));
931 __ Push(r1, r0); // Fixed array length (as smi) and initial index.
932
933 // Generate code for doing the condition check.
934 __ bind(&loop);
935 // Load the current count to r0, load the length to r1.
936 __ ldrd(r0, MemOperand(sp, 0 * kPointerSize));
937 __ cmp(r0, r1); // Compare to the array length.
938 __ b(hs, loop_statement.break_target());
939
940 // Get the current entry of the array into register r3.
941 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
942 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
943 __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
944
945 // Get the expected map from the stack or a zero map in the
946 // permanent slow case into register r2.
947 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
948
949 // Check if the expected map still matches that of the enumerable.
950 // If not, we have to filter the key.
951 Label update_each;
952 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
953 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
954 __ cmp(r4, Operand(r2));
955 __ b(eq, &update_each);
956
957 // Convert the entry to a string or null if it isn't a property
958 // anymore. If the property has been removed while iterating, we
959 // just skip it.
960 __ push(r1); // Enumerable.
961 __ push(r3); // Current entry.
962 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS);
963 __ mov(r3, Operand(r0));
964 __ LoadRoot(ip, Heap::kNullValueRootIndex);
965 __ cmp(r3, ip);
966 __ b(eq, loop_statement.continue_target());
967
968 // Update the 'each' property or variable from the possibly filtered
969 // entry in register r3.
970 __ bind(&update_each);
971 __ mov(result_register(), r3);
972 // Perform the assignment as if via '='.
973 EmitAssignment(stmt->each());
974
975 // Generate code for the body of the loop.
976 Label stack_limit_hit, stack_check_done;
977 Visit(stmt->body());
978
979 __ StackLimitCheck(&stack_limit_hit);
980 __ bind(&stack_check_done);
981
982 // Generate code for the going to the next element by incrementing
983 // the index (smi) stored on top of the stack.
984 __ bind(loop_statement.continue_target());
985 __ pop(r0);
986 __ add(r0, r0, Operand(Smi::FromInt(1)));
987 __ push(r0);
988 __ b(&loop);
989
990 // Slow case for the stack limit check.
991 StackCheckStub stack_check_stub;
992 __ bind(&stack_limit_hit);
993 __ CallStub(&stack_check_stub);
994 __ b(&stack_check_done);
995
996 // Remove the pointers stored on the stack.
997 __ bind(loop_statement.break_target());
998 __ Drop(5);
999
1000 // Exit and decrement the loop depth.
1001 __ bind(&exit);
1002 decrement_loop_depth();
1003 }
1004
1005
676 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info) { 1006 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info) {
677 // Use the fast case closure allocation code that allocates in new 1007 // Use the fast case closure allocation code that allocates in new
678 // space for nested functions that don't need literals cloning. 1008 // space for nested functions that don't need literals cloning.
679 if (scope()->is_function_scope() && info->num_literals() == 0) { 1009 if (scope()->is_function_scope() && info->num_literals() == 0) {
680 FastNewClosureStub stub; 1010 FastNewClosureStub stub;
681 __ mov(r0, Operand(info)); 1011 __ mov(r0, Operand(info));
682 __ push(r0); 1012 __ push(r0);
683 __ CallStub(&stub); 1013 __ CallStub(&stub);
684 } else { 1014 } else {
685 __ mov(r0, Operand(info)); 1015 __ mov(r0, Operand(info));
686 __ stm(db_w, sp, cp.bit() | r0.bit()); 1016 __ Push(cp, r0);
687 __ CallRuntime(Runtime::kNewClosure, 2); 1017 __ CallRuntime(Runtime::kNewClosure, 2);
688 } 1018 }
689 Apply(context_, r0); 1019 Apply(context_, r0);
690 } 1020 }
691 1021
692 1022
693 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 1023 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
694 Comment cmnt(masm_, "[ VariableProxy"); 1024 Comment cmnt(masm_, "[ VariableProxy");
695 EmitVariableLoad(expr->var(), context_); 1025 EmitVariableLoad(expr->var(), context_);
696 } 1026 }
(...skipping 13 matching lines...) Expand all
710 // object (receiver) in r0. 1040 // object (receiver) in r0.
711 __ ldr(r0, CodeGenerator::GlobalObject()); 1041 __ ldr(r0, CodeGenerator::GlobalObject());
712 __ mov(r2, Operand(var->name())); 1042 __ mov(r2, Operand(var->name()));
713 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); 1043 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
714 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); 1044 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
715 Apply(context, r0); 1045 Apply(context, r0);
716 1046
717 } else if (slot != NULL && slot->type() == Slot::LOOKUP) { 1047 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
718 Comment cmnt(masm_, "Lookup slot"); 1048 Comment cmnt(masm_, "Lookup slot");
719 __ mov(r1, Operand(var->name())); 1049 __ mov(r1, Operand(var->name()));
720 __ stm(db_w, sp, cp.bit() | r1.bit()); // Context and name. 1050 __ Push(cp, r1); // Context and name.
721 __ CallRuntime(Runtime::kLoadContextSlot, 2); 1051 __ CallRuntime(Runtime::kLoadContextSlot, 2);
722 Apply(context, r0); 1052 Apply(context, r0);
723 1053
724 } else if (slot != NULL) { 1054 } else if (slot != NULL) {
725 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT) 1055 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
726 ? "Context slot" 1056 ? "Context slot"
727 : "Stack slot"); 1057 : "Stack slot");
728 Apply(context, slot); 1058 if (var->mode() == Variable::CONST) {
729 1059 // Constants may be the hole value if they have not been initialized.
1060 // Unhole them.
1061 Label done;
1062 MemOperand slot_operand = EmitSlotSearch(slot, r0);
1063 __ ldr(r0, slot_operand);
1064 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1065 __ cmp(r0, ip);
1066 __ b(ne, &done);
1067 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1068 __ bind(&done);
1069 Apply(context, r0);
1070 } else {
1071 Apply(context, slot);
1072 }
730 } else { 1073 } else {
731 Comment cmnt(masm_, "Rewritten parameter"); 1074 Comment cmnt(masm_, "Rewritten parameter");
732 ASSERT_NOT_NULL(property); 1075 ASSERT_NOT_NULL(property);
733 // Rewritten parameter accesses are of the form "slot[literal]". 1076 // Rewritten parameter accesses are of the form "slot[literal]".
734 1077
735 // Assert that the object is in a slot. 1078 // Assert that the object is in a slot.
736 Variable* object_var = property->obj()->AsVariableProxy()->AsVariable(); 1079 Variable* object_var = property->obj()->AsVariableProxy()->AsVariable();
737 ASSERT_NOT_NULL(object_var); 1080 ASSERT_NOT_NULL(object_var);
738 Slot* object_slot = object_var->slot(); 1081 Slot* object_slot = object_var->slot();
739 ASSERT_NOT_NULL(object_slot); 1082 ASSERT_NOT_NULL(object_slot);
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after
855 if (result_saved) { 1198 if (result_saved) {
856 ApplyTOS(context_); 1199 ApplyTOS(context_);
857 } else { 1200 } else {
858 Apply(context_, r0); 1201 Apply(context_, r0);
859 } 1202 }
860 } 1203 }
861 1204
862 1205
863 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1206 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
864 Comment cmnt(masm_, "[ ArrayLiteral"); 1207 Comment cmnt(masm_, "[ ArrayLiteral");
1208
1209 ZoneList<Expression*>* subexprs = expr->values();
1210 int length = subexprs->length();
1211
865 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1212 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
866 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); 1213 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
867 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); 1214 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
868 __ mov(r1, Operand(expr->constant_elements())); 1215 __ mov(r1, Operand(expr->constant_elements()));
869 __ Push(r3, r2, r1); 1216 __ Push(r3, r2, r1);
870 if (expr->depth() > 1) { 1217 if (expr->depth() > 1) {
871 __ CallRuntime(Runtime::kCreateArrayLiteral, 3); 1218 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1219 } else if (length > FastCloneShallowArrayStub::kMaximumLength) {
1220 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
872 } else { 1221 } else {
873 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); 1222 FastCloneShallowArrayStub stub(length);
1223 __ CallStub(&stub);
874 } 1224 }
875 1225
876 bool result_saved = false; // Is the result saved to the stack? 1226 bool result_saved = false; // Is the result saved to the stack?
877 1227
878 // Emit code to evaluate all the non-constant subexpressions and to store 1228 // Emit code to evaluate all the non-constant subexpressions and to store
879 // them into the newly cloned array. 1229 // them into the newly cloned array.
880 ZoneList<Expression*>* subexprs = expr->values(); 1230 for (int i = 0; i < length; i++) {
881 for (int i = 0, len = subexprs->length(); i < len; i++) {
882 Expression* subexpr = subexprs->at(i); 1231 Expression* subexpr = subexprs->at(i);
883 // If the subexpression is a literal or a simple materialized literal it 1232 // If the subexpression is a literal or a simple materialized literal it
884 // is already set in the cloned array. 1233 // is already set in the cloned array.
885 if (subexpr->AsLiteral() != NULL || 1234 if (subexpr->AsLiteral() != NULL ||
886 CompileTimeValue::IsCompileTimeValue(subexpr)) { 1235 CompileTimeValue::IsCompileTimeValue(subexpr)) {
887 continue; 1236 continue;
888 } 1237 }
889 1238
890 if (!result_saved) { 1239 if (!result_saved) {
891 __ push(r0); 1240 __ push(r0);
(...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after
1034 1383
1035 void FullCodeGenerator::EmitBinaryOp(Token::Value op, 1384 void FullCodeGenerator::EmitBinaryOp(Token::Value op,
1036 Expression::Context context) { 1385 Expression::Context context) {
1037 __ pop(r1); 1386 __ pop(r1);
1038 GenericBinaryOpStub stub(op, NO_OVERWRITE, r1, r0); 1387 GenericBinaryOpStub stub(op, NO_OVERWRITE, r1, r0);
1039 __ CallStub(&stub); 1388 __ CallStub(&stub);
1040 Apply(context, r0); 1389 Apply(context, r0);
1041 } 1390 }
1042 1391
1043 1392
1393 void FullCodeGenerator::EmitAssignment(Expression* expr) {
1394 // Invalid left-hand sides are rewritten to have a 'throw
1395 // ReferenceError' on the left-hand side.
1396 if (!expr->IsValidLeftHandSide()) {
1397 VisitForEffect(expr);
1398 return;
1399 }
1400
1401 // Left-hand side can only be a property, a global or a (parameter or local)
1402 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1403 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1404 LhsKind assign_type = VARIABLE;
1405 Property* prop = expr->AsProperty();
1406 if (prop != NULL) {
1407 assign_type = (prop->key()->IsPropertyName())
1408 ? NAMED_PROPERTY
1409 : KEYED_PROPERTY;
1410 }
1411
1412 switch (assign_type) {
1413 case VARIABLE: {
1414 Variable* var = expr->AsVariableProxy()->var();
1415 EmitVariableAssignment(var, Token::ASSIGN, Expression::kEffect);
1416 break;
1417 }
1418 case NAMED_PROPERTY: {
1419 __ push(r0); // Preserve value.
1420 VisitForValue(prop->obj(), kAccumulator);
1421 __ mov(r1, r0);
1422 __ pop(r0); // Restore value.
1423 __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
1424 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
1425 __ Call(ic, RelocInfo::CODE_TARGET);
1426 break;
1427 }
1428 case KEYED_PROPERTY: {
1429 __ push(r0); // Preserve value.
1430 VisitForValue(prop->obj(), kStack);
1431 VisitForValue(prop->key(), kAccumulator);
1432 __ mov(r1, r0);
1433 __ pop(r2);
1434 __ pop(r0); // Restore value.
1435 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
1436 __ Call(ic, RelocInfo::CODE_TARGET);
1437 break;
1438 }
1439 }
1440 }
1441
1442
1044 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 1443 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1045 Token::Value op, 1444 Token::Value op,
1046 Expression::Context context) { 1445 Expression::Context context) {
1047 // Left-hand sides that rewrite to explicit property accesses do not reach 1446 // Left-hand sides that rewrite to explicit property accesses do not reach
1048 // here. 1447 // here.
1049 ASSERT(var != NULL); 1448 ASSERT(var != NULL);
1050 ASSERT(var->is_global() || var->slot() != NULL); 1449 ASSERT(var->is_global() || var->slot() != NULL);
1051 1450
1052 if (var->is_global()) { 1451 if (var->is_global()) {
1053 ASSERT(!var->is_this()); 1452 ASSERT(!var->is_this());
(...skipping 21 matching lines...) Expand all
1075 __ b(ne, &done); 1474 __ b(ne, &done);
1076 } 1475 }
1077 // Perform the assignment. 1476 // Perform the assignment.
1078 __ str(result_register(), MemOperand(fp, SlotOffset(slot))); 1477 __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
1079 break; 1478 break;
1080 1479
1081 case Slot::CONTEXT: { 1480 case Slot::CONTEXT: {
1082 MemOperand target = EmitSlotSearch(slot, r1); 1481 MemOperand target = EmitSlotSearch(slot, r1);
1083 if (op == Token::INIT_CONST) { 1482 if (op == Token::INIT_CONST) {
1084 // Detect const reinitialization by checking for the hole value. 1483 // Detect const reinitialization by checking for the hole value.
1085 __ ldr(r1, target); 1484 __ ldr(r2, target);
1086 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 1485 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1087 __ cmp(r1, ip); 1486 __ cmp(r2, ip);
1088 __ b(ne, &done); 1487 __ b(ne, &done);
1089 } 1488 }
1090 // Perform the assignment and issue the write barrier. 1489 // Perform the assignment and issue the write barrier.
1091 __ str(result_register(), target); 1490 __ str(result_register(), target);
1092 // RecordWrite may destroy all its register arguments. 1491 // RecordWrite may destroy all its register arguments.
1093 __ mov(r3, result_register()); 1492 __ mov(r3, result_register());
1094 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; 1493 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
1095 __ mov(r2, Operand(offset)); 1494 __ mov(r2, Operand(offset));
1096 __ RecordWrite(r1, r2, r3); 1495 __ RecordWrite(r1, r2, r3);
1097 break; 1496 break;
(...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after
1249 1648
1250 void FullCodeGenerator::EmitCallWithStub(Call* expr) { 1649 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
1251 // Code common for calls using the call stub. 1650 // Code common for calls using the call stub.
1252 ZoneList<Expression*>* args = expr->arguments(); 1651 ZoneList<Expression*>* args = expr->arguments();
1253 int arg_count = args->length(); 1652 int arg_count = args->length();
1254 for (int i = 0; i < arg_count; i++) { 1653 for (int i = 0; i < arg_count; i++) {
1255 VisitForValue(args->at(i), kStack); 1654 VisitForValue(args->at(i), kStack);
1256 } 1655 }
1257 // Record source position for debugger. 1656 // Record source position for debugger.
1258 SetSourcePosition(expr->position()); 1657 SetSourcePosition(expr->position());
1259 CallFunctionStub stub(arg_count, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE); 1658 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
1659 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
1260 __ CallStub(&stub); 1660 __ CallStub(&stub);
1261 // Restore context register. 1661 // Restore context register.
1262 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 1662 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1263 DropAndApply(1, context_, r0); 1663 DropAndApply(1, context_, r0);
1264 } 1664 }
1265 1665
1266 1666
1267 void FullCodeGenerator::VisitCall(Call* expr) { 1667 void FullCodeGenerator::VisitCall(Call* expr) {
1268 Comment cmnt(masm_, "[ Call"); 1668 Comment cmnt(masm_, "[ Call");
1269 Expression* fun = expr->expression(); 1669 Expression* fun = expr->expression();
1270 Variable* var = fun->AsVariableProxy()->AsVariable(); 1670 Variable* var = fun->AsVariableProxy()->AsVariable();
1271 1671
1272 if (var != NULL && var->is_possibly_eval()) { 1672 if (var != NULL && var->is_possibly_eval()) {
1273 // Call to the identifier 'eval'. 1673 // In a call to eval, we first call %ResolvePossiblyDirectEval to
1274 UNREACHABLE(); 1674 // resolve the function we need to call and the receiver of the
1675 // call. Then we call the resolved function using the given
1676 // arguments.
1677 VisitForValue(fun, kStack);
1678 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1679 __ push(r2); // Reserved receiver slot.
1680
1681 // Push the arguments.
1682 ZoneList<Expression*>* args = expr->arguments();
1683 int arg_count = args->length();
1684 for (int i = 0; i < arg_count; i++) {
1685 VisitForValue(args->at(i), kStack);
1686 }
1687
1688 // Push copy of the function - found below the arguments.
1689 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
1690 __ push(r1);
1691
1692 // Push copy of the first argument or undefined if it doesn't exist.
1693 if (arg_count > 0) {
1694 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
1695 __ push(r1);
1696 } else {
1697 __ push(r2);
1698 }
1699
1700 // Push the receiver of the enclosing function and do runtime call.
1701 __ ldr(r1, MemOperand(fp, (2 + scope()->num_parameters()) * kPointerSize));
1702 __ push(r1);
1703 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
1704
1705 // The runtime call returns a pair of values in r0 (function) and
1706 // r1 (receiver). Touch up the stack with the right values.
1707 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
1708 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
1709
1710 // Record source position for debugger.
1711 SetSourcePosition(expr->position());
1712 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
1713 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
1714 __ CallStub(&stub);
1715 // Restore context register.
1716 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1717 DropAndApply(1, context_, r0);
1275 } else if (var != NULL && !var->is_this() && var->is_global()) { 1718 } else if (var != NULL && !var->is_this() && var->is_global()) {
1276 // Push global object as receiver for the call IC. 1719 // Push global object as receiver for the call IC.
1277 __ ldr(r0, CodeGenerator::GlobalObject()); 1720 __ ldr(r0, CodeGenerator::GlobalObject());
1278 __ push(r0); 1721 __ push(r0);
1279 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT); 1722 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
1280 } else if (var != NULL && var->slot() != NULL && 1723 } else if (var != NULL && var->slot() != NULL &&
1281 var->slot()->type() == Slot::LOOKUP) { 1724 var->slot()->type() == Slot::LOOKUP) {
1282 // Call to a lookup slot. 1725 // Call to a lookup slot (dynamically introduced variable). Call the
1283 UNREACHABLE(); 1726 // runtime to find the function to call (returned in eax) and the object
1727 // holding it (returned in edx).
1728 __ push(context_register());
1729 __ mov(r2, Operand(var->name()));
1730 __ push(r2);
1731 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1732 __ push(r0); // Function.
1733 __ push(r1); // Receiver.
1734 EmitCallWithStub(expr);
1284 } else if (fun->AsProperty() != NULL) { 1735 } else if (fun->AsProperty() != NULL) {
1285 // Call to an object property. 1736 // Call to an object property.
1286 Property* prop = fun->AsProperty(); 1737 Property* prop = fun->AsProperty();
1287 Literal* key = prop->key()->AsLiteral(); 1738 Literal* key = prop->key()->AsLiteral();
1288 if (key != NULL && key->handle()->IsSymbol()) { 1739 if (key != NULL && key->handle()->IsSymbol()) {
1289 // Call to a named property, use call IC. 1740 // Call to a named property, use call IC.
1290 VisitForValue(prop->obj(), kStack); 1741 VisitForValue(prop->obj(), kStack);
1291 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET); 1742 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
1292 } else { 1743 } else {
1293 // Call to a keyed property, use keyed load IC followed by function 1744 // Call to a keyed property, use keyed load IC followed by function
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
1369 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 1820 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
1370 1821
1371 Handle<Code> construct_builtin(Builtins::builtin(Builtins::JSConstructCall)); 1822 Handle<Code> construct_builtin(Builtins::builtin(Builtins::JSConstructCall));
1372 __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL); 1823 __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
1373 1824
1374 // Replace function on TOS with result in r0, or pop it. 1825 // Replace function on TOS with result in r0, or pop it.
1375 DropAndApply(1, context_, r0); 1826 DropAndApply(1, context_, r0);
1376 } 1827 }
1377 1828
1378 1829
1830 void FullCodeGenerator::EmitInlineRuntimeCall(CallRuntime* expr) {
1831 Handle<String> name = expr->name();
1832 if (strcmp("_IsSmi", *name->ToCString()) == 0) {
1833 EmitIsSmi(expr->arguments());
1834 } else if (strcmp("_IsNonNegativeSmi", *name->ToCString()) == 0) {
1835 EmitIsNonNegativeSmi(expr->arguments());
1836 } else if (strcmp("_IsObject", *name->ToCString()) == 0) {
1837 EmitIsObject(expr->arguments());
1838 } else if (strcmp("_IsUndetectableObject", *name->ToCString()) == 0) {
1839 EmitIsUndetectableObject(expr->arguments());
1840 } else if (strcmp("_IsFunction", *name->ToCString()) == 0) {
1841 EmitIsFunction(expr->arguments());
1842 } else if (strcmp("_IsArray", *name->ToCString()) == 0) {
1843 EmitIsArray(expr->arguments());
1844 } else if (strcmp("_IsRegExp", *name->ToCString()) == 0) {
1845 EmitIsRegExp(expr->arguments());
1846 } else if (strcmp("_IsConstructCall", *name->ToCString()) == 0) {
1847 EmitIsConstructCall(expr->arguments());
1848 } else if (strcmp("_ObjectEquals", *name->ToCString()) == 0) {
1849 EmitObjectEquals(expr->arguments());
1850 } else if (strcmp("_Arguments", *name->ToCString()) == 0) {
1851 EmitArguments(expr->arguments());
1852 } else if (strcmp("_ArgumentsLength", *name->ToCString()) == 0) {
1853 EmitArgumentsLength(expr->arguments());
1854 } else if (strcmp("_ClassOf", *name->ToCString()) == 0) {
1855 EmitClassOf(expr->arguments());
1856 } else if (strcmp("_Log", *name->ToCString()) == 0) {
1857 EmitLog(expr->arguments());
1858 } else if (strcmp("_RandomHeapNumber", *name->ToCString()) == 0) {
1859 EmitRandomHeapNumber(expr->arguments());
1860 } else if (strcmp("_SubString", *name->ToCString()) == 0) {
1861 EmitSubString(expr->arguments());
1862 } else if (strcmp("_RegExpExec", *name->ToCString()) == 0) {
1863 EmitRegExpExec(expr->arguments());
1864 } else if (strcmp("_ValueOf", *name->ToCString()) == 0) {
1865 EmitValueOf(expr->arguments());
1866 } else if (strcmp("_SetValueOf", *name->ToCString()) == 0) {
1867 EmitSetValueOf(expr->arguments());
1868 } else if (strcmp("_NumberToString", *name->ToCString()) == 0) {
1869 EmitNumberToString(expr->arguments());
1870 } else if (strcmp("_CharFromCode", *name->ToCString()) == 0) {
1871 EmitCharFromCode(expr->arguments());
1872 } else if (strcmp("_FastCharCodeAt", *name->ToCString()) == 0) {
1873 EmitFastCharCodeAt(expr->arguments());
1874 } else if (strcmp("_StringAdd", *name->ToCString()) == 0) {
1875 EmitStringAdd(expr->arguments());
1876 } else if (strcmp("_StringCompare", *name->ToCString()) == 0) {
1877 EmitStringCompare(expr->arguments());
1878 } else if (strcmp("_MathPow", *name->ToCString()) == 0) {
1879 EmitMathPow(expr->arguments());
1880 } else if (strcmp("_MathSin", *name->ToCString()) == 0) {
1881 EmitMathSin(expr->arguments());
1882 } else if (strcmp("_MathCos", *name->ToCString()) == 0) {
1883 EmitMathCos(expr->arguments());
1884 } else if (strcmp("_MathSqrt", *name->ToCString()) == 0) {
1885 EmitMathSqrt(expr->arguments());
1886 } else if (strcmp("_CallFunction", *name->ToCString()) == 0) {
1887 EmitCallFunction(expr->arguments());
1888 } else if (strcmp("_RegExpConstructResult", *name->ToCString()) == 0) {
1889 EmitRegExpConstructResult(expr->arguments());
1890 } else if (strcmp("_SwapElements", *name->ToCString()) == 0) {
1891 EmitSwapElements(expr->arguments());
1892 } else if (strcmp("_GetFromCache", *name->ToCString()) == 0) {
1893 EmitGetFromCache(expr->arguments());
1894 } else {
1895 UNREACHABLE();
1896 }
1897 }
1898
1899
1900 void FullCodeGenerator::EmitIsSmi(ZoneList<Expression*>* args) {
1901 ASSERT(args->length() == 1);
1902
1903 VisitForValue(args->at(0), kAccumulator);
1904
1905 Label materialize_true, materialize_false;
1906 Label* if_true = NULL;
1907 Label* if_false = NULL;
1908 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
1909
1910 __ BranchOnSmi(r0, if_true);
1911 __ b(if_false);
1912
1913 Apply(context_, if_true, if_false);
1914 }
1915
1916
1917 void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) {
1918 ASSERT(args->length() == 1);
1919
1920 VisitForValue(args->at(0), kAccumulator);
1921
1922 Label materialize_true, materialize_false;
1923 Label* if_true = NULL;
1924 Label* if_false = NULL;
1925 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
1926
1927 __ tst(r0, Operand(kSmiTagMask | 0x80000000));
1928 __ b(eq, if_true);
1929 __ b(if_false);
1930
1931 Apply(context_, if_true, if_false);
1932 }
1933
1934
1935 void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
1936 ASSERT(args->length() == 1);
1937
1938 VisitForValue(args->at(0), kAccumulator);
1939
1940 Label materialize_true, materialize_false;
1941 Label* if_true = NULL;
1942 Label* if_false = NULL;
1943 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
1944 __ BranchOnSmi(r0, if_false);
1945 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1946 __ cmp(r0, ip);
1947 __ b(eq, if_true);
1948 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1949 // Undetectable objects behave like undefined when tested with typeof.
1950 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
1951 __ tst(r1, Operand(1 << Map::kIsUndetectable));
1952 __ b(ne, if_false);
1953 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
1954 __ cmp(r1, Operand(FIRST_JS_OBJECT_TYPE));
1955 __ b(lt, if_false);
1956 __ cmp(r1, Operand(LAST_JS_OBJECT_TYPE));
1957 __ b(le, if_true);
1958 __ b(if_false);
1959
1960 Apply(context_, if_true, if_false);
1961 }
1962
1963
1964 void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) {
1965 ASSERT(args->length() == 1);
1966
1967 VisitForValue(args->at(0), kAccumulator);
1968
1969 Label materialize_true, materialize_false;
1970 Label* if_true = NULL;
1971 Label* if_false = NULL;
1972 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
1973
1974 __ BranchOnSmi(r0, if_false);
1975 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
1976 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
1977 __ tst(r1, Operand(1 << Map::kIsUndetectable));
1978 __ b(ne, if_true);
1979 __ b(if_false);
1980
1981 Apply(context_, if_true, if_false);
1982 }
1983
1984
1985 void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
1986 ASSERT(args->length() == 1);
1987
1988 VisitForValue(args->at(0), kAccumulator);
1989
1990 Label materialize_true, materialize_false;
1991 Label* if_true = NULL;
1992 Label* if_false = NULL;
1993 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
1994
1995 __ BranchOnSmi(r0, if_false);
1996 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
1997 __ b(eq, if_true);
1998 __ b(if_false);
1999
2000 Apply(context_, if_true, if_false);
2001 }
2002
2003
2004 void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) {
2005 ASSERT(args->length() == 1);
2006
2007 VisitForValue(args->at(0), kAccumulator);
2008
2009 Label materialize_true, materialize_false;
2010 Label* if_true = NULL;
2011 Label* if_false = NULL;
2012 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
2013
2014 __ BranchOnSmi(r0, if_false);
2015 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
2016 __ b(eq, if_true);
2017 __ b(if_false);
2018
2019 Apply(context_, if_true, if_false);
2020 }
2021
2022
2023 void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) {
2024 ASSERT(args->length() == 1);
2025
2026 VisitForValue(args->at(0), kAccumulator);
2027
2028 Label materialize_true, materialize_false;
2029 Label* if_true = NULL;
2030 Label* if_false = NULL;
2031 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
2032
2033 __ BranchOnSmi(r0, if_false);
2034 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
2035 __ b(eq, if_true);
2036 __ b(if_false);
2037
2038 Apply(context_, if_true, if_false);
2039 }
2040
2041
2042
2043 void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) {
2044 ASSERT(args->length() == 0);
2045
2046 Label materialize_true, materialize_false;
2047 Label* if_true = NULL;
2048 Label* if_false = NULL;
2049 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
2050
2051 // Get the frame pointer for the calling frame.
2052 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2053
2054 // Skip the arguments adaptor frame if it exists.
2055 Label check_frame_marker;
2056 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
2057 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2058 __ b(ne, &check_frame_marker);
2059 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
2060
2061 // Check the marker in the calling frame.
2062 __ bind(&check_frame_marker);
2063 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
2064 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
2065 __ b(eq, if_true);
2066 __ b(if_false);
2067
2068 Apply(context_, if_true, if_false);
2069 }
2070
2071
2072 void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) {
2073 ASSERT(args->length() == 2);
2074
2075 // Load the two objects into registers and perform the comparison.
2076 VisitForValue(args->at(0), kStack);
2077 VisitForValue(args->at(1), kAccumulator);
2078
2079 Label materialize_true, materialize_false;
2080 Label* if_true = NULL;
2081 Label* if_false = NULL;
2082 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
2083
2084 __ pop(r1);
2085 __ cmp(r0, r1);
2086 __ b(eq, if_true);
2087 __ b(if_false);
2088
2089 Apply(context_, if_true, if_false);
2090 }
2091
2092
2093 void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) {
2094 ASSERT(args->length() == 1);
2095
2096 // ArgumentsAccessStub expects the key in edx and the formal
2097 // parameter count in eax.
2098 VisitForValue(args->at(0), kAccumulator);
2099 __ mov(r1, r0);
2100 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
2101 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2102 __ CallStub(&stub);
2103 Apply(context_, r0);
2104 }
2105
2106
2107 void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) {
2108 ASSERT(args->length() == 0);
2109
2110 Label exit;
2111 // Get the number of formal parameters.
2112 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
2113
2114 // Check if the calling frame is an arguments adaptor frame.
2115 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2116 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
2117 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2118 __ b(ne, &exit);
2119
2120 // Arguments adaptor case: Read the arguments length from the
2121 // adaptor frame.
2122 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
2123
2124 __ bind(&exit);
2125 Apply(context_, r0);
2126 }
2127
2128
2129 void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
2130 ASSERT(args->length() == 1);
2131 Label done, null, function, non_function_constructor;
2132
2133 VisitForValue(args->at(0), kAccumulator);
2134
2135 // If the object is a smi, we return null.
2136 __ BranchOnSmi(r0, &null);
2137
2138 // Check that the object is a JS object but take special care of JS
2139 // functions to make sure they have 'Function' as their class.
2140 __ CompareObjectType(r0, r0, r1, FIRST_JS_OBJECT_TYPE); // Map is now in r0.
2141 __ b(lt, &null);
2142
2143 // As long as JS_FUNCTION_TYPE is the last instance type and it is
2144 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
2145 // LAST_JS_OBJECT_TYPE.
2146 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
2147 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
2148 __ cmp(r1, Operand(JS_FUNCTION_TYPE));
2149 __ b(eq, &function);
2150
2151 // Check if the constructor in the map is a function.
2152 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
2153 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
2154 __ b(ne, &non_function_constructor);
2155
2156 // r0 now contains the constructor function. Grab the
2157 // instance class name from there.
2158 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
2159 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
2160 __ b(&done);
2161
2162 // Functions have class 'Function'.
2163 __ bind(&function);
2164 __ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex);
2165 __ jmp(&done);
2166
2167 // Objects with a non-function constructor have class 'Object'.
2168 __ bind(&non_function_constructor);
2169 __ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex);
2170 __ jmp(&done);
2171
2172 // Non-JS objects have class null.
2173 __ bind(&null);
2174 __ LoadRoot(r0, Heap::kNullValueRootIndex);
2175
2176 // All done.
2177 __ bind(&done);
2178
2179 Apply(context_, r0);
2180 }
2181
2182
2183 void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) {
2184 // Conditionally generate a log call.
2185 // Args:
2186 // 0 (literal string): The type of logging (corresponds to the flags).
2187 // This is used to determine whether or not to generate the log call.
2188 // 1 (string): Format string. Access the string at argument index 2
2189 // with '%2s' (see Logger::LogRuntime for all the formats).
2190 // 2 (array): Arguments to the format string.
2191 ASSERT_EQ(args->length(), 3);
2192 #ifdef ENABLE_LOGGING_AND_PROFILING
2193 if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2194 VisitForValue(args->at(1), kStack);
2195 VisitForValue(args->at(2), kStack);
2196 __ CallRuntime(Runtime::kLog, 2);
2197 }
2198 #endif
2199 // Finally, we're expected to leave a value on the top of the stack.
2200 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2201 Apply(context_, r0);
2202 }
2203
2204
2205 void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) {
2206 ASSERT(args->length() == 0);
2207
2208 Label slow_allocate_heapnumber;
2209 Label heapnumber_allocated;
2210
2211 __ AllocateHeapNumber(r4, r1, r2, &slow_allocate_heapnumber);
2212 __ jmp(&heapnumber_allocated);
2213
2214 __ bind(&slow_allocate_heapnumber);
2215 // To allocate a heap number, and ensure that it is not a smi, we
2216 // call the runtime function FUnaryMinus on 0, returning the double
2217 // -0.0. A new, distinct heap number is returned each time.
2218 __ mov(r0, Operand(Smi::FromInt(0)));
2219 __ push(r0);
2220 __ CallRuntime(Runtime::kNumberUnaryMinus, 1);
2221 __ mov(r4, Operand(r0));
2222
2223 __ bind(&heapnumber_allocated);
2224
2225 // Convert 32 random bits in r0 to 0.(32 random bits) in a double
2226 // by computing:
2227 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2228 if (CpuFeatures::IsSupported(VFP3)) {
2229 __ PrepareCallCFunction(0, r1);
2230 __ CallCFunction(ExternalReference::random_uint32_function(), 0);
2231
2232 CpuFeatures::Scope scope(VFP3);
2233 // 0x41300000 is the top half of 1.0 x 2^20 as a double.
2234 // Create this constant using mov/orr to avoid PC relative load.
2235 __ mov(r1, Operand(0x41000000));
2236 __ orr(r1, r1, Operand(0x300000));
2237 // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
2238 __ vmov(d7, r0, r1);
2239 // Move 0x4130000000000000 to VFP.
2240 __ mov(r0, Operand(0));
2241 __ vmov(d8, r0, r1);
2242 // Subtract and store the result in the heap number.
2243 __ vsub(d7, d7, d8);
2244 __ sub(r0, r4, Operand(kHeapObjectTag));
2245 __ vstr(d7, r0, HeapNumber::kValueOffset);
2246 __ mov(r0, r4);
2247 } else {
2248 __ mov(r0, Operand(r4));
2249 __ PrepareCallCFunction(1, r1);
2250 __ CallCFunction(
2251 ExternalReference::fill_heap_number_with_random_function(), 1);
2252 }
2253
2254 Apply(context_, r0);
2255 }
2256
2257
2258 void FullCodeGenerator::EmitSubString(ZoneList<Expression*>* args) {
2259 // Load the arguments on the stack and call the stub.
2260 SubStringStub stub;
2261 ASSERT(args->length() == 3);
2262 VisitForValue(args->at(0), kStack);
2263 VisitForValue(args->at(1), kStack);
2264 VisitForValue(args->at(2), kStack);
2265 __ CallStub(&stub);
2266 Apply(context_, r0);
2267 }
2268
2269
2270 void FullCodeGenerator::EmitRegExpExec(ZoneList<Expression*>* args) {
2271 // Load the arguments on the stack and call the stub.
2272 RegExpExecStub stub;
2273 ASSERT(args->length() == 4);
2274 VisitForValue(args->at(0), kStack);
2275 VisitForValue(args->at(1), kStack);
2276 VisitForValue(args->at(2), kStack);
2277 VisitForValue(args->at(3), kStack);
2278 __ CallStub(&stub);
2279 Apply(context_, r0);
2280 }
2281
2282
2283 void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) {
2284 ASSERT(args->length() == 1);
2285
2286 VisitForValue(args->at(0), kAccumulator); // Load the object.
2287
2288 Label done;
2289 // If the object is a smi return the object.
2290 __ BranchOnSmi(r0, &done);
2291 // If the object is not a value type, return the object.
2292 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
2293 __ b(ne, &done);
2294 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset));
2295
2296 __ bind(&done);
2297 Apply(context_, r0);
2298 }
2299
2300
2301 void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
2302 // Load the arguments on the stack and call the runtime function.
2303 ASSERT(args->length() == 2);
2304 VisitForValue(args->at(0), kStack);
2305 VisitForValue(args->at(1), kStack);
2306 __ CallRuntime(Runtime::kMath_pow, 2);
2307 Apply(context_, r0);
2308 }
2309
2310
2311 void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) {
2312 ASSERT(args->length() == 2);
2313
2314 VisitForValue(args->at(0), kStack); // Load the object.
2315 VisitForValue(args->at(1), kAccumulator); // Load the value.
2316 __ pop(r1); // r0 = value. r1 = object.
2317
2318 Label done;
2319 // If the object is a smi, return the value.
2320 __ BranchOnSmi(r1, &done);
2321
2322 // If the object is not a value type, return the value.
2323 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
2324 __ b(ne, &done);
2325
2326 // Store the value.
2327 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
2328 // Update the write barrier. Save the value as it will be
2329 // overwritten by the write barrier code and is needed afterward.
2330 __ mov(r2, Operand(JSValue::kValueOffset - kHeapObjectTag));
2331 __ RecordWrite(r1, r2, r3);
2332
2333 __ bind(&done);
2334 Apply(context_, r0);
2335 }
2336
2337
2338 void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) {
2339 ASSERT_EQ(args->length(), 1);
2340
2341 // Load the argument on the stack and call the stub.
2342 VisitForValue(args->at(0), kStack);
2343
2344 NumberToStringStub stub;
2345 __ CallStub(&stub);
2346 Apply(context_, r0);
2347 }
2348
2349
2350 void FullCodeGenerator::EmitCharFromCode(ZoneList<Expression*>* args) {
2351 ASSERT(args->length() == 1);
2352
2353 VisitForValue(args->at(0), kAccumulator);
2354
2355 Label slow_case, done;
2356 // Fast case of Heap::LookupSingleCharacterStringFromCode.
2357 ASSERT(kSmiTag == 0);
2358 ASSERT(kSmiShiftSize == 0);
2359 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1));
2360 __ tst(r0, Operand(kSmiTagMask |
2361 ((~String::kMaxAsciiCharCode) << kSmiTagSize)));
2362 __ b(nz, &slow_case);
2363 __ mov(r1, Operand(Factory::single_character_string_cache()));
2364 ASSERT(kSmiTag == 0);
2365 ASSERT(kSmiTagSize == 1);
2366 ASSERT(kSmiShiftSize == 0);
2367 // At this point code register contains smi tagged ascii char code.
2368 __ add(r1, r1, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
2369 __ ldr(r1, MemOperand(r1, FixedArray::kHeaderSize - kHeapObjectTag));
2370 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2371 __ cmp(r1, r2);
2372 __ b(eq, &slow_case);
2373 __ mov(r0, r1);
2374 __ b(&done);
2375
2376 __ bind(&slow_case);
2377 __ push(r0);
2378 __ CallRuntime(Runtime::kCharFromCode, 1);
2379
2380 __ bind(&done);
2381 Apply(context_, r0);
2382 }
2383
2384
2385 void FullCodeGenerator::EmitFastCharCodeAt(ZoneList<Expression*>* args) {
2386 // TODO(fsc): Port the complete implementation from the classic back-end.
2387 // Move the undefined value into the result register, which will
2388 // trigger the slow case.
2389 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2390 Apply(context_, r0);
2391 }
2392
2393 void FullCodeGenerator::EmitStringAdd(ZoneList<Expression*>* args) {
2394 ASSERT_EQ(2, args->length());
2395
2396 VisitForValue(args->at(0), kStack);
2397 VisitForValue(args->at(1), kStack);
2398
2399 StringAddStub stub(NO_STRING_ADD_FLAGS);
2400 __ CallStub(&stub);
2401 Apply(context_, r0);
2402 }
2403
2404
2405 void FullCodeGenerator::EmitStringCompare(ZoneList<Expression*>* args) {
2406 ASSERT_EQ(2, args->length());
2407
2408 VisitForValue(args->at(0), kStack);
2409 VisitForValue(args->at(1), kStack);
2410
2411 StringCompareStub stub;
2412 __ CallStub(&stub);
2413 Apply(context_, r0);
2414 }
2415
2416
2417 void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
2418 // Load the argument on the stack and call the runtime.
2419 ASSERT(args->length() == 1);
2420 VisitForValue(args->at(0), kStack);
2421 __ CallRuntime(Runtime::kMath_sin, 1);
2422 Apply(context_, r0);
2423 }
2424
2425
2426 void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
2427 // Load the argument on the stack and call the runtime.
2428 ASSERT(args->length() == 1);
2429 VisitForValue(args->at(0), kStack);
2430 __ CallRuntime(Runtime::kMath_cos, 1);
2431 Apply(context_, r0);
2432 }
2433
2434
2435 void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) {
2436 // Load the argument on the stack and call the runtime function.
2437 ASSERT(args->length() == 1);
2438 VisitForValue(args->at(0), kStack);
2439 __ CallRuntime(Runtime::kMath_sqrt, 1);
2440 Apply(context_, r0);
2441 }
2442
2443
2444 void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
2445 ASSERT(args->length() >= 2);
2446
2447 int arg_count = args->length() - 2; // For receiver and function.
2448 VisitForValue(args->at(0), kStack); // Receiver.
2449 for (int i = 0; i < arg_count; i++) {
2450 VisitForValue(args->at(i + 1), kStack);
2451 }
2452 VisitForValue(args->at(arg_count + 1), kAccumulator); // Function.
2453
2454 // InvokeFunction requires function in r1. Move it in there.
2455 if (!result_register().is(r1)) __ mov(r1, result_register());
2456 ParameterCount count(arg_count);
2457 __ InvokeFunction(r1, count, CALL_FUNCTION);
2458 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2459 Apply(context_, r0);
2460 }
2461
2462
2463 void FullCodeGenerator::EmitRegExpConstructResult(ZoneList<Expression*>* args) {
2464 ASSERT(args->length() == 3);
2465 VisitForValue(args->at(0), kStack);
2466 VisitForValue(args->at(1), kStack);
2467 VisitForValue(args->at(2), kStack);
2468 __ CallRuntime(Runtime::kRegExpConstructResult, 3);
2469 Apply(context_, r0);
2470 }
2471
2472
2473 void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
2474 ASSERT(args->length() == 3);
2475 VisitForValue(args->at(0), kStack);
2476 VisitForValue(args->at(1), kStack);
2477 VisitForValue(args->at(2), kStack);
2478 __ CallRuntime(Runtime::kSwapElements, 3);
2479 Apply(context_, r0);
2480 }
2481
2482
2483 void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
2484 ASSERT_EQ(2, args->length());
2485
2486 ASSERT_NE(NULL, args->at(0)->AsLiteral());
2487 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
2488
2489 Handle<FixedArray> jsfunction_result_caches(
2490 Top::global_context()->jsfunction_result_caches());
2491 if (jsfunction_result_caches->length() <= cache_id) {
2492 __ Abort("Attempt to use undefined cache.");
2493 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2494 Apply(context_, r0);
2495 return;
2496 }
2497
2498 VisitForValue(args->at(1), kAccumulator);
2499
2500 Register key = r0;
2501 Register cache = r1;
2502 __ ldr(cache, CodeGenerator::ContextOperand(cp, Context::GLOBAL_INDEX));
2503 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kGlobalContextOffset));
2504 __ ldr(cache,
2505 CodeGenerator::ContextOperand(
2506 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
2507 __ ldr(cache,
2508 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
2509
2510
2511 Label done, not_found;
2512 // tmp now holds finger offset as a smi.
2513 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
2514 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
2515 // r2 now holds finger offset as a smi.
2516 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2517 // r3 now points to the start of fixed array elements.
2518 __ ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex));
2519 // Note side effect of PreIndex: r3 now points to the key of the pair.
2520 __ cmp(key, r2);
2521 __ b(ne, &not_found);
2522
2523 __ ldr(r0, MemOperand(r3, kPointerSize));
2524 __ b(&done);
2525
2526 __ bind(&not_found);
2527 // Call runtime to perform the lookup.
2528 __ Push(cache, key);
2529 __ CallRuntime(Runtime::kGetFromCache, 2);
2530
2531 __ bind(&done);
2532 Apply(context_, r0);
2533 }
2534
2535
1379 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 2536 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
2537 Handle<String> name = expr->name();
2538 if (name->length() > 0 && name->Get(0) == '_') {
2539 Comment cmnt(masm_, "[ InlineRuntimeCall");
2540 EmitInlineRuntimeCall(expr);
2541 return;
2542 }
2543
1380 Comment cmnt(masm_, "[ CallRuntime"); 2544 Comment cmnt(masm_, "[ CallRuntime");
1381 ZoneList<Expression*>* args = expr->arguments(); 2545 ZoneList<Expression*>* args = expr->arguments();
1382 2546
1383 if (expr->is_jsruntime()) { 2547 if (expr->is_jsruntime()) {
1384 // Prepare for calling JS runtime function. 2548 // Prepare for calling JS runtime function.
1385 __ ldr(r0, CodeGenerator::GlobalObject()); 2549 __ ldr(r0, CodeGenerator::GlobalObject());
1386 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kBuiltinsOffset)); 2550 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kBuiltinsOffset));
1387 __ push(r0); 2551 __ push(r0);
1388 } 2552 }
1389 2553
(...skipping 14 matching lines...) Expand all
1404 } else { 2568 } else {
1405 // Call the C runtime function. 2569 // Call the C runtime function.
1406 __ CallRuntime(expr->function(), arg_count); 2570 __ CallRuntime(expr->function(), arg_count);
1407 } 2571 }
1408 Apply(context_, r0); 2572 Apply(context_, r0);
1409 } 2573 }
1410 2574
1411 2575
1412 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 2576 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
1413 switch (expr->op()) { 2577 switch (expr->op()) {
2578 case Token::DELETE: {
2579 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
2580 Property* prop = expr->expression()->AsProperty();
2581 Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
2582 if (prop == NULL && var == NULL) {
2583 // Result of deleting non-property, non-variable reference is true.
2584 // The subexpression may have side effects.
2585 VisitForEffect(expr->expression());
2586 Apply(context_, true);
2587 } else if (var != NULL &&
2588 !var->is_global() &&
2589 var->slot() != NULL &&
2590 var->slot()->type() != Slot::LOOKUP) {
2591 // Result of deleting non-global, non-dynamic variables is false.
2592 // The subexpression does not have side effects.
2593 Apply(context_, false);
2594 } else {
2595 // Property or variable reference. Call the delete builtin with
2596 // object and property name as arguments.
2597 if (prop != NULL) {
2598 VisitForValue(prop->obj(), kStack);
2599 VisitForValue(prop->key(), kStack);
2600 } else if (var->is_global()) {
2601 __ ldr(r1, CodeGenerator::GlobalObject());
2602 __ mov(r0, Operand(var->name()));
2603 __ Push(r1, r0);
2604 } else {
2605 // Non-global variable. Call the runtime to look up the context
2606 // where the variable was introduced.
2607 __ push(context_register());
2608 __ mov(r2, Operand(var->name()));
2609 __ push(r2);
2610 __ CallRuntime(Runtime::kLookupContext, 2);
2611 __ push(r0);
2612 __ mov(r2, Operand(var->name()));
2613 __ push(r2);
2614 }
2615 __ InvokeBuiltin(Builtins::DELETE, CALL_JS);
2616 Apply(context_, r0);
2617 }
2618 break;
2619 }
2620
1414 case Token::VOID: { 2621 case Token::VOID: {
1415 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); 2622 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
1416 VisitForEffect(expr->expression()); 2623 VisitForEffect(expr->expression());
1417 switch (context_) { 2624 switch (context_) {
1418 case Expression::kUninitialized: 2625 case Expression::kUninitialized:
1419 UNREACHABLE(); 2626 UNREACHABLE();
1420 break; 2627 break;
1421 case Expression::kEffect: 2628 case Expression::kEffect:
1422 break; 2629 break;
1423 case Expression::kValue: 2630 case Expression::kValue:
(...skipping 20 matching lines...) Expand all
1444 case Expression::kTest: 2651 case Expression::kTest:
1445 case Expression::kValueTest: 2652 case Expression::kValueTest:
1446 __ jmp(false_label_); 2653 __ jmp(false_label_);
1447 break; 2654 break;
1448 } 2655 }
1449 break; 2656 break;
1450 } 2657 }
1451 2658
1452 case Token::NOT: { 2659 case Token::NOT: {
1453 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); 2660 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
1454 Label materialize_true, materialize_false, done; 2661 Label materialize_true, materialize_false;
1455 // Initially assume a pure test context. Notice that the labels are 2662 Label* if_true = NULL;
1456 // swapped. 2663 Label* if_false = NULL;
1457 Label* if_true = false_label_; 2664
1458 Label* if_false = true_label_; 2665 // Notice that the labels are swapped.
1459 switch (context_) { 2666 PrepareTest(&materialize_true, &materialize_false, &if_false, &if_true);
1460 case Expression::kUninitialized: 2667
1461 UNREACHABLE();
1462 break;
1463 case Expression::kEffect:
1464 if_true = &done;
1465 if_false = &done;
1466 break;
1467 case Expression::kValue:
1468 if_true = &materialize_false;
1469 if_false = &materialize_true;
1470 break;
1471 case Expression::kTest:
1472 break;
1473 case Expression::kValueTest:
1474 if_false = &materialize_true;
1475 break;
1476 case Expression::kTestValue:
1477 if_true = &materialize_false;
1478 break;
1479 }
1480 VisitForControl(expr->expression(), if_true, if_false); 2668 VisitForControl(expr->expression(), if_true, if_false);
2669
1481 Apply(context_, if_false, if_true); // Labels swapped. 2670 Apply(context_, if_false, if_true); // Labels swapped.
1482 break; 2671 break;
1483 } 2672 }
1484 2673
1485 case Token::TYPEOF: { 2674 case Token::TYPEOF: {
1486 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 2675 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
1487 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 2676 VariableProxy* proxy = expr->expression()->AsVariableProxy();
1488 if (proxy != NULL && 2677 if (proxy != NULL &&
1489 !proxy->var()->is_this() && 2678 !proxy->var()->is_this() &&
1490 proxy->var()->is_global()) { 2679 proxy->var()->is_global()) {
1491 Comment cmnt(masm_, "Global variable"); 2680 Comment cmnt(masm_, "Global variable");
1492 __ ldr(r0, CodeGenerator::GlobalObject()); 2681 __ ldr(r0, CodeGenerator::GlobalObject());
1493 __ mov(r2, Operand(proxy->name())); 2682 __ mov(r2, Operand(proxy->name()));
1494 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); 2683 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1495 // Use a regular load, not a contextual load, to avoid a reference 2684 // Use a regular load, not a contextual load, to avoid a reference
1496 // error. 2685 // error.
1497 __ Call(ic, RelocInfo::CODE_TARGET); 2686 __ Call(ic, RelocInfo::CODE_TARGET);
1498 __ push(r0); 2687 __ push(r0);
1499 } else if (proxy != NULL && 2688 } else if (proxy != NULL &&
1500 proxy->var()->slot() != NULL && 2689 proxy->var()->slot() != NULL &&
1501 proxy->var()->slot()->type() == Slot::LOOKUP) { 2690 proxy->var()->slot()->type() == Slot::LOOKUP) {
1502 __ mov(r0, Operand(proxy->name())); 2691 __ mov(r0, Operand(proxy->name()));
1503 __ stm(db_w, sp, cp.bit() | r0.bit()); 2692 __ Push(cp, r0);
1504 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); 2693 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
1505 __ push(r0); 2694 __ push(r0);
1506 } else { 2695 } else {
1507 // This expression cannot throw a reference error at the top level. 2696 // This expression cannot throw a reference error at the top level.
1508 VisitForValue(expr->expression(), kStack); 2697 VisitForValue(expr->expression(), kStack);
1509 } 2698 }
1510 2699
1511 __ CallRuntime(Runtime::kTypeof, 1); 2700 __ CallRuntime(Runtime::kTypeof, 1);
1512 Apply(context_, r0); 2701 Apply(context_, r0);
1513 break; 2702 break;
(...skipping 30 matching lines...) Expand all
1544 Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)"); 2733 Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)");
1545 bool overwrite = 2734 bool overwrite =
1546 (expr->expression()->AsBinaryOperation() != NULL && 2735 (expr->expression()->AsBinaryOperation() != NULL &&
1547 expr->expression()->AsBinaryOperation()->ResultOverwriteAllowed()); 2736 expr->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
1548 GenericUnaryOpStub stub(Token::BIT_NOT, overwrite); 2737 GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
1549 // GenericUnaryOpStub expects the argument to be in the 2738 // GenericUnaryOpStub expects the argument to be in the
1550 // accumulator register r0. 2739 // accumulator register r0.
1551 VisitForValue(expr->expression(), kAccumulator); 2740 VisitForValue(expr->expression(), kAccumulator);
1552 // Avoid calling the stub for Smis. 2741 // Avoid calling the stub for Smis.
1553 Label smi, done; 2742 Label smi, done;
1554 __ tst(result_register(), Operand(kSmiTagMask)); 2743 __ BranchOnSmi(result_register(), &smi);
1555 __ b(eq, &smi);
1556 // Non-smi: call stub leaving result in accumulator register. 2744 // Non-smi: call stub leaving result in accumulator register.
1557 __ CallStub(&stub); 2745 __ CallStub(&stub);
1558 __ b(&done); 2746 __ b(&done);
1559 // Perform operation directly on Smis. 2747 // Perform operation directly on Smis.
1560 __ bind(&smi); 2748 __ bind(&smi);
1561 __ mvn(result_register(), Operand(result_register())); 2749 __ mvn(result_register(), Operand(result_register()));
1562 // Bit-clear inverted smi-tag. 2750 // Bit-clear inverted smi-tag.
1563 __ bic(result_register(), result_register(), Operand(kSmiTagMask)); 2751 __ bic(result_register(), result_register(), Operand(kSmiTagMask));
1564 __ bind(&done); 2752 __ bind(&done);
1565 Apply(context_, result_register()); 2753 Apply(context_, result_register());
1566 break; 2754 break;
1567 } 2755 }
1568 2756
1569 default: 2757 default:
1570 UNREACHABLE(); 2758 UNREACHABLE();
1571 } 2759 }
1572 } 2760 }
1573 2761
1574 2762
1575 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 2763 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
1576 Comment cmnt(masm_, "[ CountOperation"); 2764 Comment cmnt(masm_, "[ CountOperation");
2765 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
2766 // as the left-hand side.
2767 if (!expr->expression()->IsValidLeftHandSide()) {
2768 VisitForEffect(expr->expression());
2769 return;
2770 }
1577 2771
1578 // Expression can only be a property, a global or a (parameter or local) 2772 // Expression can only be a property, a global or a (parameter or local)
1579 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY. 2773 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1580 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 2774 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1581 LhsKind assign_type = VARIABLE; 2775 LhsKind assign_type = VARIABLE;
1582 Property* prop = expr->expression()->AsProperty(); 2776 Property* prop = expr->expression()->AsProperty();
1583 // In case of a property we use the uninitialized expression context 2777 // In case of a property we use the uninitialized expression context
1584 // of the key to detect a named property. 2778 // of the key to detect a named property.
1585 if (prop != NULL) { 2779 if (prop != NULL) {
1586 assign_type = 2780 assign_type =
(...skipping 23 matching lines...) Expand all
1610 VisitForValue(prop->obj(), kStack); 2804 VisitForValue(prop->obj(), kStack);
1611 VisitForValue(prop->key(), kAccumulator); 2805 VisitForValue(prop->key(), kAccumulator);
1612 __ ldr(r1, MemOperand(sp, 0)); 2806 __ ldr(r1, MemOperand(sp, 0));
1613 __ push(r0); 2807 __ push(r0);
1614 EmitKeyedPropertyLoad(prop); 2808 EmitKeyedPropertyLoad(prop);
1615 } 2809 }
1616 } 2810 }
1617 2811
1618 // Call ToNumber only if operand is not a smi. 2812 // Call ToNumber only if operand is not a smi.
1619 Label no_conversion; 2813 Label no_conversion;
1620 __ tst(r0, Operand(kSmiTagMask)); 2814 __ BranchOnSmi(r0, &no_conversion);
1621 __ b(eq, &no_conversion);
1622 __ push(r0); 2815 __ push(r0);
1623 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS); 2816 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS);
1624 __ bind(&no_conversion); 2817 __ bind(&no_conversion);
1625 2818
1626 // Save result for postfix expressions. 2819 // Save result for postfix expressions.
1627 if (expr->is_postfix()) { 2820 if (expr->is_postfix()) {
1628 switch (context_) { 2821 switch (context_) {
1629 case Expression::kUninitialized: 2822 case Expression::kUninitialized:
1630 UNREACHABLE(); 2823 UNREACHABLE();
1631 case Expression::kEffect: 2824 case Expression::kEffect:
(...skipping 23 matching lines...) Expand all
1655 2848
1656 2849
1657 // Inline smi case if we are in a loop. 2850 // Inline smi case if we are in a loop.
1658 Label stub_call, done; 2851 Label stub_call, done;
1659 int count_value = expr->op() == Token::INC ? 1 : -1; 2852 int count_value = expr->op() == Token::INC ? 1 : -1;
1660 if (loop_depth() > 0) { 2853 if (loop_depth() > 0) {
1661 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC); 2854 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
1662 __ b(vs, &stub_call); 2855 __ b(vs, &stub_call);
1663 // We could eliminate this smi check if we split the code at 2856 // We could eliminate this smi check if we split the code at
1664 // the first smi check before calling ToNumber. 2857 // the first smi check before calling ToNumber.
1665 __ tst(r0, Operand(kSmiTagMask)); 2858 __ BranchOnSmi(r0, &done);
1666 __ b(eq, &done);
1667 __ bind(&stub_call); 2859 __ bind(&stub_call);
1668 // Call stub. Undo operation first. 2860 // Call stub. Undo operation first.
1669 __ sub(r0, r0, Operand(Smi::FromInt(count_value))); 2861 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
1670 } 2862 }
1671 __ mov(r1, Operand(Smi::FromInt(count_value))); 2863 __ mov(r1, Operand(Smi::FromInt(count_value)));
1672 GenericBinaryOpStub stub(Token::ADD, NO_OVERWRITE, r1, r0); 2864 GenericBinaryOpStub stub(Token::ADD, NO_OVERWRITE, r1, r0);
1673 __ CallStub(&stub); 2865 __ CallStub(&stub);
1674 __ bind(&done); 2866 __ bind(&done);
1675 2867
1676 // Store the value returned in r0. 2868 // Store the value returned in r0.
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
1751 VisitForValue(expr->right(), kAccumulator); 2943 VisitForValue(expr->right(), kAccumulator);
1752 EmitBinaryOp(expr->op(), context_); 2944 EmitBinaryOp(expr->op(), context_);
1753 break; 2945 break;
1754 2946
1755 default: 2947 default:
1756 UNREACHABLE(); 2948 UNREACHABLE();
1757 } 2949 }
1758 } 2950 }
1759 2951
1760 2952
2953 void FullCodeGenerator::EmitNullCompare(bool strict,
2954 Register obj,
2955 Register null_const,
2956 Label* if_true,
2957 Label* if_false,
2958 Register scratch) {
2959 __ cmp(obj, null_const);
2960 if (strict) {
2961 __ b(eq, if_true);
2962 } else {
2963 __ b(eq, if_true);
2964 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2965 __ cmp(obj, ip);
2966 __ b(eq, if_true);
2967 __ BranchOnSmi(obj, if_false);
2968 // It can be an undetectable object.
2969 __ ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
2970 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
2971 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
2972 __ b(ne, if_true);
2973 }
2974 __ jmp(if_false);
2975 }
2976
2977
1761 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 2978 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
1762 Comment cmnt(masm_, "[ CompareOperation"); 2979 Comment cmnt(masm_, "[ CompareOperation");
1763 2980
1764 // Always perform the comparison for its control flow. Pack the result 2981 // Always perform the comparison for its control flow. Pack the result
1765 // into the expression's context after the comparison is performed. 2982 // into the expression's context after the comparison is performed.
1766 Label materialize_true, materialize_false, done; 2983
1767 // Initially assume we are in a test context. 2984 Label materialize_true, materialize_false;
1768 Label* if_true = true_label_; 2985 Label* if_true = NULL;
1769 Label* if_false = false_label_; 2986 Label* if_false = NULL;
1770 switch (context_) { 2987 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false);
1771 case Expression::kUninitialized:
1772 UNREACHABLE();
1773 break;
1774 case Expression::kEffect:
1775 if_true = &done;
1776 if_false = &done;
1777 break;
1778 case Expression::kValue:
1779 if_true = &materialize_true;
1780 if_false = &materialize_false;
1781 break;
1782 case Expression::kTest:
1783 break;
1784 case Expression::kValueTest:
1785 if_true = &materialize_true;
1786 break;
1787 case Expression::kTestValue:
1788 if_false = &materialize_false;
1789 break;
1790 }
1791 2988
1792 VisitForValue(expr->left(), kStack); 2989 VisitForValue(expr->left(), kStack);
1793 switch (expr->op()) { 2990 switch (expr->op()) {
1794 case Token::IN: 2991 case Token::IN:
1795 VisitForValue(expr->right(), kStack); 2992 VisitForValue(expr->right(), kStack);
1796 __ InvokeBuiltin(Builtins::IN, CALL_JS); 2993 __ InvokeBuiltin(Builtins::IN, CALL_JS);
1797 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 2994 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1798 __ cmp(r0, ip); 2995 __ cmp(r0, ip);
1799 __ b(eq, if_true); 2996 __ b(eq, if_true);
1800 __ jmp(if_false); 2997 __ jmp(if_false);
(...skipping 10 matching lines...) Expand all
1811 } 3008 }
1812 3009
1813 default: { 3010 default: {
1814 VisitForValue(expr->right(), kAccumulator); 3011 VisitForValue(expr->right(), kAccumulator);
1815 Condition cc = eq; 3012 Condition cc = eq;
1816 bool strict = false; 3013 bool strict = false;
1817 switch (expr->op()) { 3014 switch (expr->op()) {
1818 case Token::EQ_STRICT: 3015 case Token::EQ_STRICT:
1819 strict = true; 3016 strict = true;
1820 // Fall through 3017 // Fall through
1821 case Token::EQ: 3018 case Token::EQ: {
1822 cc = eq; 3019 cc = eq;
1823 __ pop(r1); 3020 __ pop(r1);
3021 // If either operand is constant null we do a fast compare
3022 // against null.
3023 Literal* right_literal = expr->right()->AsLiteral();
3024 Literal* left_literal = expr->left()->AsLiteral();
3025 if (right_literal != NULL && right_literal->handle()->IsNull()) {
3026 EmitNullCompare(strict, r1, r0, if_true, if_false, r2);
3027 Apply(context_, if_true, if_false);
3028 return;
3029 } else if (left_literal != NULL && left_literal->handle()->IsNull()) {
3030 EmitNullCompare(strict, r0, r1, if_true, if_false, r2);
3031 Apply(context_, if_true, if_false);
3032 return;
3033 }
1824 break; 3034 break;
3035 }
1825 case Token::LT: 3036 case Token::LT:
1826 cc = lt; 3037 cc = lt;
1827 __ pop(r1); 3038 __ pop(r1);
1828 break; 3039 break;
1829 case Token::GT: 3040 case Token::GT:
1830 // Reverse left and right sides to obtain ECMA-262 conversion order. 3041 // Reverse left and right sides to obtain ECMA-262 conversion order.
1831 cc = lt; 3042 cc = lt;
1832 __ mov(r1, result_register()); 3043 __ mov(r1, result_register());
1833 __ pop(r0); 3044 __ pop(r0);
1834 break; 3045 break;
(...skipping 10 matching lines...) Expand all
1845 case Token::IN: 3056 case Token::IN:
1846 case Token::INSTANCEOF: 3057 case Token::INSTANCEOF:
1847 default: 3058 default:
1848 UNREACHABLE(); 3059 UNREACHABLE();
1849 } 3060 }
1850 3061
1851 // The comparison stub expects the smi vs. smi case to be handled 3062 // The comparison stub expects the smi vs. smi case to be handled
1852 // before it is called. 3063 // before it is called.
1853 Label slow_case; 3064 Label slow_case;
1854 __ orr(r2, r0, Operand(r1)); 3065 __ orr(r2, r0, Operand(r1));
1855 __ tst(r2, Operand(kSmiTagMask)); 3066 __ BranchOnNotSmi(r2, &slow_case);
1856 __ b(ne, &slow_case);
1857 __ cmp(r1, r0); 3067 __ cmp(r1, r0);
1858 __ b(cc, if_true); 3068 __ b(cc, if_true);
1859 __ jmp(if_false); 3069 __ jmp(if_false);
1860 3070
1861 __ bind(&slow_case); 3071 __ bind(&slow_case);
1862 CompareStub stub(cc, strict); 3072 CompareStub stub(cc, strict);
1863 __ CallStub(&stub); 3073 __ CallStub(&stub);
1864 __ cmp(r0, Operand(0)); 3074 __ cmp(r0, Operand(0));
1865 __ b(cc, if_true); 3075 __ b(cc, if_true);
1866 __ jmp(if_false); 3076 __ jmp(if_false);
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
1922 __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value. 3132 __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value.
1923 __ add(pc, r1, Operand(masm_->CodeObject())); 3133 __ add(pc, r1, Operand(masm_->CodeObject()));
1924 } 3134 }
1925 3135
1926 3136
1927 #undef __ 3137 #undef __
1928 3138
1929 } } // namespace v8::internal 3139 } } // namespace v8::internal
1930 3140
1931 #endif // V8_TARGET_ARCH_ARM 3141 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/codegen-arm.cc ('k') | src/compiler.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698