Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(66)

Side by Side Diff: src/ia32/codegen-ia32.cc

Issue 6880010: Merge (7265, 7271] from bleeding_edge to experimental/gc branch.... (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: '' Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
147 masm_(masm), 147 masm_(masm),
148 info_(NULL), 148 info_(NULL),
149 frame_(NULL), 149 frame_(NULL),
150 allocator_(NULL), 150 allocator_(NULL),
151 state_(NULL), 151 state_(NULL),
152 loop_nesting_(0), 152 loop_nesting_(0),
153 in_safe_int32_mode_(false), 153 in_safe_int32_mode_(false),
154 safe_int32_mode_enabled_(true), 154 safe_int32_mode_enabled_(true),
155 function_return_is_shadowed_(false), 155 function_return_is_shadowed_(false),
156 in_spilled_code_(false), 156 in_spilled_code_(false),
157 jit_cookie_((FLAG_mask_constants_with_cookie) ? V8::RandomPrivate() : 0) { 157 jit_cookie_((FLAG_mask_constants_with_cookie) ?
158 V8::RandomPrivate(Isolate::Current()) : 0) {
158 } 159 }
159 160
160 161
161 // Calling conventions: 162 // Calling conventions:
162 // ebp: caller's frame pointer 163 // ebp: caller's frame pointer
163 // esp: stack pointer 164 // esp: stack pointer
164 // edi: called JS function 165 // edi: called JS function
165 // esi: callee's context 166 // esi: callee's context
166 167
167 void CodeGenerator::Generate(CompilationInfo* info) { 168 void CodeGenerator::Generate(CompilationInfo* info) {
168 // Record the position for debugging purposes. 169 // Record the position for debugging purposes.
169 CodeForFunctionPosition(info->function()); 170 CodeForFunctionPosition(info->function());
170 Comment cmnt(masm_, "[ function compiled by virtual frame code generator"); 171 Comment cmnt(masm_, "[ function compiled by virtual frame code generator");
171 172
172 // Initialize state. 173 // Initialize state.
173 info_ = info; 174 info_ = info;
174 ASSERT(allocator_ == NULL); 175 ASSERT(allocator_ == NULL);
175 RegisterAllocator register_allocator(this); 176 RegisterAllocator register_allocator(this);
176 allocator_ = &register_allocator; 177 allocator_ = &register_allocator;
177 ASSERT(frame_ == NULL); 178 ASSERT(frame_ == NULL);
178 frame_ = new VirtualFrame(); 179 frame_ = new VirtualFrame();
179 set_in_spilled_code(false); 180 set_in_spilled_code(false);
180 181
181 // Adjust for function-level loop nesting. 182 // Adjust for function-level loop nesting.
182 ASSERT_EQ(0, loop_nesting_); 183 ASSERT_EQ(0, loop_nesting_);
183 loop_nesting_ = info->is_in_loop() ? 1 : 0; 184 loop_nesting_ = info->is_in_loop() ? 1 : 0;
184 185
185 JumpTarget::set_compiling_deferred_code(false); 186 Isolate::Current()->set_jump_target_compiling_deferred_code(false);
186 187
187 { 188 {
188 CodeGenState state(this); 189 CodeGenState state(this);
189 190
190 // Entry: 191 // Entry:
191 // Stack: receiver, arguments, return address. 192 // Stack: receiver, arguments, return address.
192 // ebp: caller's frame pointer 193 // ebp: caller's frame pointer
193 // esp: stack pointer 194 // esp: stack pointer
194 // edi: called JS function 195 // edi: called JS function
195 // esi: callee's context 196 // esi: callee's context
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
281 282
282 // Store the arguments object. This must happen after context 283 // Store the arguments object. This must happen after context
283 // initialization because the arguments object may be stored in 284 // initialization because the arguments object may be stored in
284 // the context. 285 // the context.
285 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) { 286 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
286 StoreArgumentsObject(true); 287 StoreArgumentsObject(true);
287 } 288 }
288 289
289 // Initialize ThisFunction reference if present. 290 // Initialize ThisFunction reference if present.
290 if (scope()->is_function_scope() && scope()->function() != NULL) { 291 if (scope()->is_function_scope() && scope()->function() != NULL) {
291 frame_->Push(Factory::the_hole_value()); 292 frame_->Push(FACTORY->the_hole_value());
292 StoreToSlot(scope()->function()->AsSlot(), NOT_CONST_INIT); 293 StoreToSlot(scope()->function()->AsSlot(), NOT_CONST_INIT);
293 } 294 }
294 295
295 296
296 // Initialize the function return target after the locals are set 297 // Initialize the function return target after the locals are set
297 // up, because it needs the expected frame height from the frame. 298 // up, because it needs the expected frame height from the frame.
298 function_return_.set_direction(JumpTarget::BIDIRECTIONAL); 299 function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
299 function_return_is_shadowed_ = false; 300 function_return_is_shadowed_ = false;
300 301
301 // Generate code to 'execute' declarations and initialize functions 302 // Generate code to 'execute' declarations and initialize functions
(...skipping 15 matching lines...) Expand all
317 // Ignore the return value. 318 // Ignore the return value.
318 } 319 }
319 CheckStack(); 320 CheckStack();
320 321
321 // Compile the body of the function in a vanilla state. Don't 322 // Compile the body of the function in a vanilla state. Don't
322 // bother compiling all the code if the scope has an illegal 323 // bother compiling all the code if the scope has an illegal
323 // redeclaration. 324 // redeclaration.
324 if (!scope()->HasIllegalRedeclaration()) { 325 if (!scope()->HasIllegalRedeclaration()) {
325 Comment cmnt(masm_, "[ function body"); 326 Comment cmnt(masm_, "[ function body");
326 #ifdef DEBUG 327 #ifdef DEBUG
327 bool is_builtin = Bootstrapper::IsActive(); 328 bool is_builtin = info->isolate()->bootstrapper()->IsActive();
328 bool should_trace = 329 bool should_trace =
329 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls; 330 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
330 if (should_trace) { 331 if (should_trace) {
331 frame_->CallRuntime(Runtime::kDebugTrace, 0); 332 frame_->CallRuntime(Runtime::kDebugTrace, 0);
332 // Ignore the return value. 333 // Ignore the return value.
333 } 334 }
334 #endif 335 #endif
335 VisitStatements(info->function()->body()); 336 VisitStatements(info->function()->body());
336 337
337 // Handle the return from the function. 338 // Handle the return from the function.
338 if (has_valid_frame()) { 339 if (has_valid_frame()) {
339 // If there is a valid frame, control flow can fall off the end of 340 // If there is a valid frame, control flow can fall off the end of
340 // the body. In that case there is an implicit return statement. 341 // the body. In that case there is an implicit return statement.
341 ASSERT(!function_return_is_shadowed_); 342 ASSERT(!function_return_is_shadowed_);
342 CodeForReturnPosition(info->function()); 343 CodeForReturnPosition(info->function());
343 frame_->PrepareForReturn(); 344 frame_->PrepareForReturn();
344 Result undefined(Factory::undefined_value()); 345 Result undefined(FACTORY->undefined_value());
345 if (function_return_.is_bound()) { 346 if (function_return_.is_bound()) {
346 function_return_.Jump(&undefined); 347 function_return_.Jump(&undefined);
347 } else { 348 } else {
348 function_return_.Bind(&undefined); 349 function_return_.Bind(&undefined);
349 GenerateReturnSequence(&undefined); 350 GenerateReturnSequence(&undefined);
350 } 351 }
351 } else if (function_return_.is_linked()) { 352 } else if (function_return_.is_linked()) {
352 // If the return target has dangling jumps to it, then we have not 353 // If the return target has dangling jumps to it, then we have not
353 // yet generated the return sequence. This can happen when (a) 354 // yet generated the return sequence. This can happen when (a)
354 // control does not flow off the end of the body so we did not 355 // control does not flow off the end of the body so we did not
(...skipping 11 matching lines...) Expand all
366 loop_nesting_ = 0; 367 loop_nesting_ = 0;
367 368
368 // Code generation state must be reset. 369 // Code generation state must be reset.
369 ASSERT(state_ == NULL); 370 ASSERT(state_ == NULL);
370 ASSERT(!function_return_is_shadowed_); 371 ASSERT(!function_return_is_shadowed_);
371 function_return_.Unuse(); 372 function_return_.Unuse();
372 DeleteFrame(); 373 DeleteFrame();
373 374
374 // Process any deferred code using the register allocator. 375 // Process any deferred code using the register allocator.
375 if (!HasStackOverflow()) { 376 if (!HasStackOverflow()) {
376 JumpTarget::set_compiling_deferred_code(true); 377 info->isolate()->set_jump_target_compiling_deferred_code(true);
377 ProcessDeferred(); 378 ProcessDeferred();
378 JumpTarget::set_compiling_deferred_code(false); 379 info->isolate()->set_jump_target_compiling_deferred_code(false);
379 } 380 }
380 381
381 // There is no need to delete the register allocator, it is a 382 // There is no need to delete the register allocator, it is a
382 // stack-allocated local. 383 // stack-allocated local.
383 allocator_ = NULL; 384 allocator_ = NULL;
384 } 385 }
385 386
386 387
387 Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) { 388 Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
388 // Currently, this assertion will fail if we try to assign to 389 // Currently, this assertion will fail if we try to assign to
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after
552 void CodeGenerator::ConvertInt32ResultToNumber(Result* value) { 553 void CodeGenerator::ConvertInt32ResultToNumber(Result* value) {
553 ASSERT(value->is_untagged_int32()); 554 ASSERT(value->is_untagged_int32());
554 if (value->is_register()) { 555 if (value->is_register()) {
555 Register val = value->reg(); 556 Register val = value->reg();
556 JumpTarget done; 557 JumpTarget done;
557 __ add(val, Operand(val)); 558 __ add(val, Operand(val));
558 done.Branch(no_overflow, value); 559 done.Branch(no_overflow, value);
559 __ sar(val, 1); 560 __ sar(val, 1);
560 // If there was an overflow, bits 30 and 31 of the original number disagree. 561 // If there was an overflow, bits 30 and 31 of the original number disagree.
561 __ xor_(val, 0x80000000u); 562 __ xor_(val, 0x80000000u);
562 if (CpuFeatures::IsSupported(SSE2)) { 563 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
563 CpuFeatures::Scope fscope(SSE2); 564 CpuFeatures::Scope fscope(SSE2);
564 __ cvtsi2sd(xmm0, Operand(val)); 565 __ cvtsi2sd(xmm0, Operand(val));
565 } else { 566 } else {
566 // Move val to ST[0] in the FPU 567 // Move val to ST[0] in the FPU
567 // Push and pop are safe with respect to the virtual frame because 568 // Push and pop are safe with respect to the virtual frame because
568 // all synced elements are below the actual stack pointer. 569 // all synced elements are below the actual stack pointer.
569 __ push(val); 570 __ push(val);
570 __ fild_s(Operand(esp, 0)); 571 __ fild_s(Operand(esp, 0));
571 __ pop(val); 572 __ pop(val);
572 } 573 }
573 Result scratch = allocator_->Allocate(); 574 Result scratch = allocator_->Allocate();
574 ASSERT(scratch.is_register()); 575 ASSERT(scratch.is_register());
575 Label allocation_failed; 576 Label allocation_failed;
576 __ AllocateHeapNumber(val, scratch.reg(), 577 __ AllocateHeapNumber(val, scratch.reg(),
577 no_reg, &allocation_failed); 578 no_reg, &allocation_failed);
578 VirtualFrame* clone = new VirtualFrame(frame_); 579 VirtualFrame* clone = new VirtualFrame(frame_);
579 scratch.Unuse(); 580 scratch.Unuse();
580 if (CpuFeatures::IsSupported(SSE2)) { 581 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
581 CpuFeatures::Scope fscope(SSE2); 582 CpuFeatures::Scope fscope(SSE2);
582 __ movdbl(FieldOperand(val, HeapNumber::kValueOffset), xmm0); 583 __ movdbl(FieldOperand(val, HeapNumber::kValueOffset), xmm0);
583 } else { 584 } else {
584 __ fstp_d(FieldOperand(val, HeapNumber::kValueOffset)); 585 __ fstp_d(FieldOperand(val, HeapNumber::kValueOffset));
585 } 586 }
586 done.Jump(value); 587 done.Jump(value);
587 588
588 // Establish the virtual frame, cloned from where AllocateHeapNumber 589 // Establish the virtual frame, cloned from where AllocateHeapNumber
589 // jumped to allocation_failed. 590 // jumped to allocation_failed.
590 RegisterFile empty_regs; 591 RegisterFile empty_regs;
591 SetFrame(clone, &empty_regs); 592 SetFrame(clone, &empty_regs);
592 __ bind(&allocation_failed); 593 __ bind(&allocation_failed);
593 if (!CpuFeatures::IsSupported(SSE2)) { 594 if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
594 // Pop the value from the floating point stack. 595 // Pop the value from the floating point stack.
595 __ fstp(0); 596 __ fstp(0);
596 } 597 }
597 unsafe_bailout_->Jump(); 598 unsafe_bailout_->Jump();
598 599
599 done.Bind(value); 600 done.Bind(value);
600 } else { 601 } else {
601 ASSERT(value->is_constant()); 602 ASSERT(value->is_constant());
602 } 603 }
603 value->set_untagged_int32(false); 604 value->set_untagged_int32(false);
604 value->set_type_info(TypeInfo::Integer32()); 605 value->set_type_info(TypeInfo::Integer32());
605 } 606 }
606 607
607 608
608 void CodeGenerator::Load(Expression* expr) { 609 void CodeGenerator::Load(Expression* expr) {
609 #ifdef DEBUG 610 #ifdef DEBUG
610 int original_height = frame_->height(); 611 int original_height = frame_->height();
611 #endif 612 #endif
612 ASSERT(!in_spilled_code()); 613 ASSERT(!in_spilled_code());
613 614
614 // If the expression should be a side-effect-free 32-bit int computation, 615 // If the expression should be a side-effect-free 32-bit int computation,
615 // compile that SafeInt32 path, and a bailout path. 616 // compile that SafeInt32 path, and a bailout path.
616 if (!in_safe_int32_mode() && 617 if (!in_safe_int32_mode() &&
617 safe_int32_mode_enabled() && 618 safe_int32_mode_enabled() &&
618 expr->side_effect_free() && 619 expr->side_effect_free() &&
619 expr->num_bit_ops() > 2 && 620 expr->num_bit_ops() > 2 &&
620 CpuFeatures::IsSupported(SSE2)) { 621 Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
621 BreakTarget unsafe_bailout; 622 BreakTarget unsafe_bailout;
622 JumpTarget done; 623 JumpTarget done;
623 unsafe_bailout.set_expected_height(frame_->height()); 624 unsafe_bailout.set_expected_height(frame_->height());
624 LoadInSafeInt32Mode(expr, &unsafe_bailout); 625 LoadInSafeInt32Mode(expr, &unsafe_bailout);
625 done.Jump(); 626 done.Jump();
626 627
627 if (unsafe_bailout.is_linked()) { 628 if (unsafe_bailout.is_linked()) {
628 unsafe_bailout.Bind(); 629 unsafe_bailout.Bind();
629 LoadWithSafeInt32ModeDisabled(expr); 630 LoadWithSafeInt32ModeDisabled(expr);
630 } 631 }
631 done.Bind(); 632 done.Bind();
632 } else { 633 } else {
633 JumpTarget true_target; 634 JumpTarget true_target;
634 JumpTarget false_target; 635 JumpTarget false_target;
635 ControlDestination dest(&true_target, &false_target, true); 636 ControlDestination dest(&true_target, &false_target, true);
636 LoadCondition(expr, &dest, false); 637 LoadCondition(expr, &dest, false);
637 638
638 if (dest.false_was_fall_through()) { 639 if (dest.false_was_fall_through()) {
639 // The false target was just bound. 640 // The false target was just bound.
640 JumpTarget loaded; 641 JumpTarget loaded;
641 frame_->Push(Factory::false_value()); 642 frame_->Push(FACTORY->false_value());
642 // There may be dangling jumps to the true target. 643 // There may be dangling jumps to the true target.
643 if (true_target.is_linked()) { 644 if (true_target.is_linked()) {
644 loaded.Jump(); 645 loaded.Jump();
645 true_target.Bind(); 646 true_target.Bind();
646 frame_->Push(Factory::true_value()); 647 frame_->Push(FACTORY->true_value());
647 loaded.Bind(); 648 loaded.Bind();
648 } 649 }
649 650
650 } else if (dest.is_used()) { 651 } else if (dest.is_used()) {
651 // There is true, and possibly false, control flow (with true as 652 // There is true, and possibly false, control flow (with true as
652 // the fall through). 653 // the fall through).
653 JumpTarget loaded; 654 JumpTarget loaded;
654 frame_->Push(Factory::true_value()); 655 frame_->Push(FACTORY->true_value());
655 if (false_target.is_linked()) { 656 if (false_target.is_linked()) {
656 loaded.Jump(); 657 loaded.Jump();
657 false_target.Bind(); 658 false_target.Bind();
658 frame_->Push(Factory::false_value()); 659 frame_->Push(FACTORY->false_value());
659 loaded.Bind(); 660 loaded.Bind();
660 } 661 }
661 662
662 } else { 663 } else {
663 // We have a valid value on top of the frame, but we still may 664 // We have a valid value on top of the frame, but we still may
664 // have dangling jumps to the true and false targets from nested 665 // have dangling jumps to the true and false targets from nested
665 // subexpressions (eg, the left subexpressions of the 666 // subexpressions (eg, the left subexpressions of the
666 // short-circuited boolean operators). 667 // short-circuited boolean operators).
667 ASSERT(has_valid_frame()); 668 ASSERT(has_valid_frame());
668 if (true_target.is_linked() || false_target.is_linked()) { 669 if (true_target.is_linked() || false_target.is_linked()) {
669 JumpTarget loaded; 670 JumpTarget loaded;
670 loaded.Jump(); // Don't lose the current TOS. 671 loaded.Jump(); // Don't lose the current TOS.
671 if (true_target.is_linked()) { 672 if (true_target.is_linked()) {
672 true_target.Bind(); 673 true_target.Bind();
673 frame_->Push(Factory::true_value()); 674 frame_->Push(FACTORY->true_value());
674 if (false_target.is_linked()) { 675 if (false_target.is_linked()) {
675 loaded.Jump(); 676 loaded.Jump();
676 } 677 }
677 } 678 }
678 if (false_target.is_linked()) { 679 if (false_target.is_linked()) {
679 false_target.Bind(); 680 false_target.Bind();
680 frame_->Push(Factory::false_value()); 681 frame_->Push(FACTORY->false_value());
681 } 682 }
682 loaded.Bind(); 683 loaded.Bind();
683 } 684 }
684 } 685 }
685 } 686 }
686 ASSERT(has_valid_frame()); 687 ASSERT(has_valid_frame());
687 ASSERT(frame_->height() == original_height + 1); 688 ASSERT(frame_->height() == original_height + 1);
688 } 689 }
689 690
690 691
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
748 749
749 Result CodeGenerator::StoreArgumentsObject(bool initial) { 750 Result CodeGenerator::StoreArgumentsObject(bool initial) {
750 ArgumentsAllocationMode mode = ArgumentsMode(); 751 ArgumentsAllocationMode mode = ArgumentsMode();
751 ASSERT(mode != NO_ARGUMENTS_ALLOCATION); 752 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
752 753
753 Comment cmnt(masm_, "[ store arguments object"); 754 Comment cmnt(masm_, "[ store arguments object");
754 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) { 755 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
755 // When using lazy arguments allocation, we store the arguments marker value 756 // When using lazy arguments allocation, we store the arguments marker value
756 // as a sentinel indicating that the arguments object hasn't been 757 // as a sentinel indicating that the arguments object hasn't been
757 // allocated yet. 758 // allocated yet.
758 frame_->Push(Factory::arguments_marker()); 759 frame_->Push(FACTORY->arguments_marker());
759 } else { 760 } else {
760 ArgumentsAccessStub stub(is_strict_mode() 761 ArgumentsAccessStub stub(is_strict_mode()
761 ? ArgumentsAccessStub::NEW_STRICT 762 ? ArgumentsAccessStub::NEW_STRICT
762 : ArgumentsAccessStub::NEW_NON_STRICT); 763 : ArgumentsAccessStub::NEW_NON_STRICT);
763 frame_->PushFunction(); 764 frame_->PushFunction();
764 frame_->PushReceiverSlotAddress(); 765 frame_->PushReceiverSlotAddress();
765 frame_->Push(Smi::FromInt(scope()->num_parameters())); 766 frame_->Push(Smi::FromInt(scope()->num_parameters()));
766 Result result = frame_->CallStub(&stub, 3); 767 Result result = frame_->CallStub(&stub, 3);
767 frame_->Push(&result); 768 frame_->Push(&result);
768 } 769 }
(...skipping 11 matching lines...) Expand all
780 // We have to skip storing into the arguments slot if it has 781 // We have to skip storing into the arguments slot if it has
781 // already been written to. This can happen if the a function 782 // already been written to. This can happen if the a function
782 // has a local variable named 'arguments'. 783 // has a local variable named 'arguments'.
783 LoadFromSlot(arguments->AsSlot(), NOT_INSIDE_TYPEOF); 784 LoadFromSlot(arguments->AsSlot(), NOT_INSIDE_TYPEOF);
784 Result probe = frame_->Pop(); 785 Result probe = frame_->Pop();
785 if (probe.is_constant()) { 786 if (probe.is_constant()) {
786 // We have to skip updating the arguments object if it has 787 // We have to skip updating the arguments object if it has
787 // been assigned a proper value. 788 // been assigned a proper value.
788 skip_arguments = !probe.handle()->IsArgumentsMarker(); 789 skip_arguments = !probe.handle()->IsArgumentsMarker();
789 } else { 790 } else {
790 __ cmp(Operand(probe.reg()), Immediate(Factory::arguments_marker())); 791 __ cmp(Operand(probe.reg()), Immediate(FACTORY->arguments_marker()));
791 probe.Unuse(); 792 probe.Unuse();
792 done.Branch(not_equal); 793 done.Branch(not_equal);
793 } 794 }
794 } 795 }
795 if (!skip_arguments) { 796 if (!skip_arguments) {
796 StoreToSlot(arguments->AsSlot(), NOT_CONST_INIT); 797 StoreToSlot(arguments->AsSlot(), NOT_CONST_INIT);
797 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind(); 798 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
798 } 799 }
799 if (shadow != NULL) { 800 if (shadow != NULL) {
800 StoreToSlot(shadow->AsSlot(), NOT_CONST_INIT); 801 StoreToSlot(shadow->AsSlot(), NOT_CONST_INIT);
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after
911 __ test(value.reg(), Immediate(kSmiTagMask)); 912 __ test(value.reg(), Immediate(kSmiTagMask));
912 dest->true_target()->Branch(zero); 913 dest->true_target()->Branch(zero);
913 __ fldz(); 914 __ fldz();
914 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset)); 915 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
915 __ FCmp(); 916 __ FCmp();
916 value.Unuse(); 917 value.Unuse();
917 dest->Split(not_zero); 918 dest->Split(not_zero);
918 } else { 919 } else {
919 // Fast case checks. 920 // Fast case checks.
920 // 'false' => false. 921 // 'false' => false.
921 __ cmp(value.reg(), Factory::false_value()); 922 __ cmp(value.reg(), FACTORY->false_value());
922 dest->false_target()->Branch(equal); 923 dest->false_target()->Branch(equal);
923 924
924 // 'true' => true. 925 // 'true' => true.
925 __ cmp(value.reg(), Factory::true_value()); 926 __ cmp(value.reg(), FACTORY->true_value());
926 dest->true_target()->Branch(equal); 927 dest->true_target()->Branch(equal);
927 928
928 // 'undefined' => false. 929 // 'undefined' => false.
929 __ cmp(value.reg(), Factory::undefined_value()); 930 __ cmp(value.reg(), FACTORY->undefined_value());
930 dest->false_target()->Branch(equal); 931 dest->false_target()->Branch(equal);
931 932
932 // Smi => false iff zero. 933 // Smi => false iff zero.
933 STATIC_ASSERT(kSmiTag == 0); 934 STATIC_ASSERT(kSmiTag == 0);
934 __ test(value.reg(), Operand(value.reg())); 935 __ test(value.reg(), Operand(value.reg()));
935 dest->false_target()->Branch(zero); 936 dest->false_target()->Branch(zero);
936 __ test(value.reg(), Immediate(kSmiTagMask)); 937 __ test(value.reg(), Immediate(kSmiTagMask));
937 dest->true_target()->Branch(zero); 938 dest->true_target()->Branch(zero);
938 939
939 // Call the stub for all other cases. 940 // Call the stub for all other cases.
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
990 TypeInfo right_info_; 991 TypeInfo right_info_;
991 OverwriteMode mode_; 992 OverwriteMode mode_;
992 Label answer_out_of_range_; 993 Label answer_out_of_range_;
993 Label non_smi_input_; 994 Label non_smi_input_;
994 Label constant_rhs_; 995 Label constant_rhs_;
995 Smi* smi_value_; 996 Smi* smi_value_;
996 }; 997 };
997 998
998 999
999 Label* DeferredInlineBinaryOperation::NonSmiInputLabel() { 1000 Label* DeferredInlineBinaryOperation::NonSmiInputLabel() {
1000 if (Token::IsBitOp(op_) && CpuFeatures::IsSupported(SSE2)) { 1001 if (Token::IsBitOp(op_) &&
1002 Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
1001 return &non_smi_input_; 1003 return &non_smi_input_;
1002 } else { 1004 } else {
1003 return entry_label(); 1005 return entry_label();
1004 } 1006 }
1005 } 1007 }
1006 1008
1007 1009
1008 void DeferredInlineBinaryOperation::JumpToAnswerOutOfRange(Condition cond) { 1010 void DeferredInlineBinaryOperation::JumpToAnswerOutOfRange(Condition cond) {
1009 __ j(cond, &answer_out_of_range_); 1011 __ j(cond, &answer_out_of_range_);
1010 } 1012 }
1011 1013
1012 1014
1013 void DeferredInlineBinaryOperation::JumpToConstantRhs(Condition cond, 1015 void DeferredInlineBinaryOperation::JumpToConstantRhs(Condition cond,
1014 Smi* smi_value) { 1016 Smi* smi_value) {
1015 smi_value_ = smi_value; 1017 smi_value_ = smi_value;
1016 __ j(cond, &constant_rhs_); 1018 __ j(cond, &constant_rhs_);
1017 } 1019 }
1018 1020
1019 1021
1020 void DeferredInlineBinaryOperation::Generate() { 1022 void DeferredInlineBinaryOperation::Generate() {
1021 // Registers are not saved implicitly for this stub, so we should not 1023 // Registers are not saved implicitly for this stub, so we should not
1022 // tread on the registers that were not passed to us. 1024 // tread on the registers that were not passed to us.
1023 if (CpuFeatures::IsSupported(SSE2) && 1025 if (Isolate::Current()->cpu_features()->IsSupported(SSE2) &&
1024 ((op_ == Token::ADD) || 1026 ((op_ == Token::ADD) ||
1025 (op_ == Token::SUB) || 1027 (op_ == Token::SUB) ||
1026 (op_ == Token::MUL) || 1028 (op_ == Token::MUL) ||
1027 (op_ == Token::DIV))) { 1029 (op_ == Token::DIV))) {
1028 CpuFeatures::Scope use_sse2(SSE2); 1030 CpuFeatures::Scope use_sse2(SSE2);
1029 Label call_runtime, after_alloc_failure; 1031 Label call_runtime, after_alloc_failure;
1030 Label left_smi, right_smi, load_right, do_op; 1032 Label left_smi, right_smi, load_right, do_op;
1031 if (!left_info_.IsSmi()) { 1033 if (!left_info_.IsSmi()) {
1032 __ test(left_, Immediate(kSmiTagMask)); 1034 __ test(left_, Immediate(kSmiTagMask));
1033 __ j(zero, &left_smi); 1035 __ j(zero, &left_smi);
1034 if (!left_info_.IsNumber()) { 1036 if (!left_info_.IsNumber()) {
1035 __ cmp(FieldOperand(left_, HeapObject::kMapOffset), 1037 __ cmp(FieldOperand(left_, HeapObject::kMapOffset),
1036 Factory::heap_number_map()); 1038 FACTORY->heap_number_map());
1037 __ j(not_equal, &call_runtime); 1039 __ j(not_equal, &call_runtime);
1038 } 1040 }
1039 __ movdbl(xmm0, FieldOperand(left_, HeapNumber::kValueOffset)); 1041 __ movdbl(xmm0, FieldOperand(left_, HeapNumber::kValueOffset));
1040 if (mode_ == OVERWRITE_LEFT) { 1042 if (mode_ == OVERWRITE_LEFT) {
1041 __ mov(dst_, left_); 1043 __ mov(dst_, left_);
1042 } 1044 }
1043 __ jmp(&load_right); 1045 __ jmp(&load_right);
1044 1046
1045 __ bind(&left_smi); 1047 __ bind(&left_smi);
1046 } else { 1048 } else {
1047 if (FLAG_debug_code) __ AbortIfNotSmi(left_); 1049 if (FLAG_debug_code) __ AbortIfNotSmi(left_);
1048 } 1050 }
1049 __ SmiUntag(left_); 1051 __ SmiUntag(left_);
1050 __ cvtsi2sd(xmm0, Operand(left_)); 1052 __ cvtsi2sd(xmm0, Operand(left_));
1051 __ SmiTag(left_); 1053 __ SmiTag(left_);
1052 if (mode_ == OVERWRITE_LEFT) { 1054 if (mode_ == OVERWRITE_LEFT) {
1053 Label alloc_failure; 1055 Label alloc_failure;
1054 __ push(left_); 1056 __ push(left_);
1055 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure); 1057 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1056 __ pop(left_); 1058 __ pop(left_);
1057 } 1059 }
1058 1060
1059 __ bind(&load_right); 1061 __ bind(&load_right);
1060 if (!right_info_.IsSmi()) { 1062 if (!right_info_.IsSmi()) {
1061 __ test(right_, Immediate(kSmiTagMask)); 1063 __ test(right_, Immediate(kSmiTagMask));
1062 __ j(zero, &right_smi); 1064 __ j(zero, &right_smi);
1063 if (!right_info_.IsNumber()) { 1065 if (!right_info_.IsNumber()) {
1064 __ cmp(FieldOperand(right_, HeapObject::kMapOffset), 1066 __ cmp(FieldOperand(right_, HeapObject::kMapOffset),
1065 Factory::heap_number_map()); 1067 FACTORY->heap_number_map());
1066 __ j(not_equal, &call_runtime); 1068 __ j(not_equal, &call_runtime);
1067 } 1069 }
1068 __ movdbl(xmm1, FieldOperand(right_, HeapNumber::kValueOffset)); 1070 __ movdbl(xmm1, FieldOperand(right_, HeapNumber::kValueOffset));
1069 if (mode_ == OVERWRITE_RIGHT) { 1071 if (mode_ == OVERWRITE_RIGHT) {
1070 __ mov(dst_, right_); 1072 __ mov(dst_, right_);
1071 } else if (mode_ == NO_OVERWRITE) { 1073 } else if (mode_ == NO_OVERWRITE) {
1072 Label alloc_failure; 1074 Label alloc_failure;
1073 __ push(left_); 1075 __ push(left_);
1074 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure); 1076 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1075 __ pop(left_); 1077 __ pop(left_);
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
1149 } 1151 }
1150 1152
1151 __ bind(&non_smi_input_); 1153 __ bind(&non_smi_input_);
1152 1154
1153 if (rhs_is_constant) { 1155 if (rhs_is_constant) {
1154 __ bind(&constant_rhs_); 1156 __ bind(&constant_rhs_);
1155 // In this case the input is a heap object and it is in the dst_ register. 1157 // In this case the input is a heap object and it is in the dst_ register.
1156 // The left_ and right_ registers have not been initialized yet. 1158 // The left_ and right_ registers have not been initialized yet.
1157 __ mov(right_, Immediate(smi_value_)); 1159 __ mov(right_, Immediate(smi_value_));
1158 __ mov(left_, Operand(dst_)); 1160 __ mov(left_, Operand(dst_));
1159 if (!CpuFeatures::IsSupported(SSE2)) { 1161 if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
1160 __ jmp(entry_label()); 1162 __ jmp(entry_label());
1161 return; 1163 return;
1162 } else { 1164 } else {
1163 CpuFeatures::Scope use_sse2(SSE2); 1165 CpuFeatures::Scope use_sse2(SSE2);
1164 __ JumpIfNotNumber(dst_, left_info_, entry_label()); 1166 __ JumpIfNotNumber(dst_, left_info_, entry_label());
1165 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label()); 1167 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label());
1166 __ SmiUntag(right_); 1168 __ SmiUntag(right_);
1167 } 1169 }
1168 } else { 1170 } else {
1169 // We know we have SSE2 here because otherwise the label is not linked (see 1171 // We know we have SSE2 here because otherwise the label is not linked (see
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
1262 // Put a heap number pointer in left_. 1264 // Put a heap number pointer in left_.
1263 __ bind(&answer_out_of_range_); 1265 __ bind(&answer_out_of_range_);
1264 SaveRegisters(); 1266 SaveRegisters();
1265 if (mode_ == OVERWRITE_LEFT) { 1267 if (mode_ == OVERWRITE_LEFT) {
1266 __ test(left_, Immediate(kSmiTagMask)); 1268 __ test(left_, Immediate(kSmiTagMask));
1267 __ j(not_zero, &allocation_ok); 1269 __ j(not_zero, &allocation_ok);
1268 } 1270 }
1269 // This trashes right_. 1271 // This trashes right_.
1270 __ AllocateHeapNumber(left_, right_, no_reg, &after_alloc_failure2); 1272 __ AllocateHeapNumber(left_, right_, no_reg, &after_alloc_failure2);
1271 __ bind(&allocation_ok); 1273 __ bind(&allocation_ok);
1272 if (CpuFeatures::IsSupported(SSE2) && op_ != Token::SHR) { 1274 if (Isolate::Current()->cpu_features()->IsSupported(SSE2) &&
1275 op_ != Token::SHR) {
1273 CpuFeatures::Scope use_sse2(SSE2); 1276 CpuFeatures::Scope use_sse2(SSE2);
1274 ASSERT(Token::IsBitOp(op_)); 1277 ASSERT(Token::IsBitOp(op_));
1275 // Signed conversion. 1278 // Signed conversion.
1276 __ cvtsi2sd(xmm0, Operand(dst_)); 1279 __ cvtsi2sd(xmm0, Operand(dst_));
1277 __ movdbl(FieldOperand(left_, HeapNumber::kValueOffset), xmm0); 1280 __ movdbl(FieldOperand(left_, HeapNumber::kValueOffset), xmm0);
1278 } else { 1281 } else {
1279 if (op_ == Token::SHR) { 1282 if (op_ == Token::SHR) {
1280 __ push(Immediate(0)); // High word of unsigned value. 1283 __ push(Immediate(0)); // High word of unsigned value.
1281 __ push(dst_); 1284 __ push(dst_);
1282 __ fild_d(Operand(esp, 0)); 1285 __ fild_d(Operand(esp, 0));
(...skipping 221 matching lines...) Expand 10 before | Expand all | Expand 10 after
1504 return frame_->CallStub(stub, left, right); 1507 return frame_->CallStub(stub, left, right);
1505 } else { 1508 } else {
1506 frame_->Push(left); 1509 frame_->Push(left);
1507 frame_->Push(right); 1510 frame_->Push(right);
1508 return frame_->CallStub(stub, 2); 1511 return frame_->CallStub(stub, 2);
1509 } 1512 }
1510 } 1513 }
1511 1514
1512 1515
1513 bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) { 1516 bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
1514 Object* answer_object = Heap::undefined_value(); 1517 Object* answer_object = HEAP->undefined_value();
1515 switch (op) { 1518 switch (op) {
1516 case Token::ADD: 1519 case Token::ADD:
1517 if (Smi::IsValid(left + right)) { 1520 if (Smi::IsValid(left + right)) {
1518 answer_object = Smi::FromInt(left + right); 1521 answer_object = Smi::FromInt(left + right);
1519 } 1522 }
1520 break; 1523 break;
1521 case Token::SUB: 1524 case Token::SUB:
1522 if (Smi::IsValid(left - right)) { 1525 if (Smi::IsValid(left - right)) {
1523 answer_object = Smi::FromInt(left - right); 1526 answer_object = Smi::FromInt(left - right);
1524 } 1527 }
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
1576 unsigned_left >>= shift_amount; 1579 unsigned_left >>= shift_amount;
1577 } 1580 }
1578 ASSERT(Smi::IsValid(static_cast<int32_t>(unsigned_left))); 1581 ASSERT(Smi::IsValid(static_cast<int32_t>(unsigned_left)));
1579 answer_object = Smi::FromInt(static_cast<int32_t>(unsigned_left)); 1582 answer_object = Smi::FromInt(static_cast<int32_t>(unsigned_left));
1580 break; 1583 break;
1581 } 1584 }
1582 default: 1585 default:
1583 UNREACHABLE(); 1586 UNREACHABLE();
1584 break; 1587 break;
1585 } 1588 }
1586 if (answer_object == Heap::undefined_value()) { 1589 if (answer_object->IsUndefined()) {
1587 return false; 1590 return false;
1588 } 1591 }
1589 frame_->Push(Handle<Object>(answer_object)); 1592 frame_->Push(Handle<Object>(answer_object));
1590 return true; 1593 return true;
1591 } 1594 }
1592 1595
1593 1596
1594 void CodeGenerator::JumpIfBothSmiUsingTypeInfo(Result* left, 1597 void CodeGenerator::JumpIfBothSmiUsingTypeInfo(Result* left,
1595 Result* right, 1598 Result* right,
1596 JumpTarget* both_smi) { 1599 JumpTarget* both_smi) {
(...skipping 1428 matching lines...) Expand 10 before | Expand all | Expand 10 after
3025 dest->false_target()->Branch(zero); 3028 dest->false_target()->Branch(zero);
3026 } else { 3029 } else {
3027 // Do the smi check, then the comparison. 3030 // Do the smi check, then the comparison.
3028 __ test(left_reg, Immediate(kSmiTagMask)); 3031 __ test(left_reg, Immediate(kSmiTagMask));
3029 is_smi.Branch(zero, left_side, right_side); 3032 is_smi.Branch(zero, left_side, right_side);
3030 } 3033 }
3031 3034
3032 // Jump or fall through to here if we are comparing a non-smi to a 3035 // Jump or fall through to here if we are comparing a non-smi to a
3033 // constant smi. If the non-smi is a heap number and this is not 3036 // constant smi. If the non-smi is a heap number and this is not
3034 // a loop condition, inline the floating point code. 3037 // a loop condition, inline the floating point code.
3035 if (!is_loop_condition && CpuFeatures::IsSupported(SSE2)) { 3038 if (!is_loop_condition &&
3039 Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
3036 // Right side is a constant smi and left side has been checked 3040 // Right side is a constant smi and left side has been checked
3037 // not to be a smi. 3041 // not to be a smi.
3038 CpuFeatures::Scope use_sse2(SSE2); 3042 CpuFeatures::Scope use_sse2(SSE2);
3039 JumpTarget not_number; 3043 JumpTarget not_number;
3040 __ cmp(FieldOperand(left_reg, HeapObject::kMapOffset), 3044 __ cmp(FieldOperand(left_reg, HeapObject::kMapOffset),
3041 Immediate(Factory::heap_number_map())); 3045 Immediate(FACTORY->heap_number_map()));
3042 not_number.Branch(not_equal, left_side); 3046 not_number.Branch(not_equal, left_side);
3043 __ movdbl(xmm1, 3047 __ movdbl(xmm1,
3044 FieldOperand(left_reg, HeapNumber::kValueOffset)); 3048 FieldOperand(left_reg, HeapNumber::kValueOffset));
3045 int value = Smi::cast(*right_val)->value(); 3049 int value = Smi::cast(*right_val)->value();
3046 if (value == 0) { 3050 if (value == 0) {
3047 __ xorpd(xmm0, xmm0); 3051 __ xorpd(xmm0, xmm0);
3048 } else { 3052 } else {
3049 Result temp = allocator()->Allocate(); 3053 Result temp = allocator()->Allocate();
3050 __ mov(temp.reg(), Immediate(value)); 3054 __ mov(temp.reg(), Immediate(value));
3051 __ cvtsi2sd(xmm0, Operand(temp.reg())); 3055 __ cvtsi2sd(xmm0, Operand(temp.reg()));
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
3097 Result* operand, 3101 Result* operand,
3098 Result* left_side, 3102 Result* left_side,
3099 Result* right_side, 3103 Result* right_side,
3100 JumpTarget* not_numbers) { 3104 JumpTarget* not_numbers) {
3101 // Perform check if operand is not known to be a number. 3105 // Perform check if operand is not known to be a number.
3102 if (!operand->type_info().IsNumber()) { 3106 if (!operand->type_info().IsNumber()) {
3103 Label done; 3107 Label done;
3104 __ test(operand->reg(), Immediate(kSmiTagMask)); 3108 __ test(operand->reg(), Immediate(kSmiTagMask));
3105 __ j(zero, &done); 3109 __ j(zero, &done);
3106 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset), 3110 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
3107 Immediate(Factory::heap_number_map())); 3111 Immediate(FACTORY->heap_number_map()));
3108 not_numbers->Branch(not_equal, left_side, right_side, not_taken); 3112 not_numbers->Branch(not_equal, left_side, right_side, not_taken);
3109 __ bind(&done); 3113 __ bind(&done);
3110 } 3114 }
3111 } 3115 }
3112 3116
3113 3117
3114 // Load a comparison operand to the FPU stack. This assumes that the operand has 3118 // Load a comparison operand to the FPU stack. This assumes that the operand has
3115 // already been checked and is a number. 3119 // already been checked and is a number.
3116 static void LoadComparisonOperand(MacroAssembler* masm_, 3120 static void LoadComparisonOperand(MacroAssembler* masm_,
3117 Result* operand) { 3121 Result* operand) {
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
3164 __ SmiUntag(operand->reg()); 3168 __ SmiUntag(operand->reg());
3165 __ cvtsi2sd(xmm_reg, Operand(operand->reg())); 3169 __ cvtsi2sd(xmm_reg, Operand(operand->reg()));
3166 __ SmiTag(operand->reg()); 3170 __ SmiTag(operand->reg());
3167 } else { 3171 } else {
3168 // Operand type not known, check for smi or heap number. 3172 // Operand type not known, check for smi or heap number.
3169 Label smi; 3173 Label smi;
3170 __ test(operand->reg(), Immediate(kSmiTagMask)); 3174 __ test(operand->reg(), Immediate(kSmiTagMask));
3171 __ j(zero, &smi); 3175 __ j(zero, &smi);
3172 if (!operand->type_info().IsNumber()) { 3176 if (!operand->type_info().IsNumber()) {
3173 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset), 3177 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
3174 Immediate(Factory::heap_number_map())); 3178 Immediate(FACTORY->heap_number_map()));
3175 not_numbers->Branch(not_equal, left_side, right_side, taken); 3179 not_numbers->Branch(not_equal, left_side, right_side, taken);
3176 } 3180 }
3177 __ movdbl(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset)); 3181 __ movdbl(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
3178 __ jmp(&done); 3182 __ jmp(&done);
3179 3183
3180 __ bind(&smi); 3184 __ bind(&smi);
3181 // Comvert smi to float and keep the original smi. 3185 // Comvert smi to float and keep the original smi.
3182 __ SmiUntag(operand->reg()); 3186 __ SmiUntag(operand->reg());
3183 __ cvtsi2sd(xmm_reg, Operand(operand->reg())); 3187 __ cvtsi2sd(xmm_reg, Operand(operand->reg()));
3184 __ SmiTag(operand->reg()); 3188 __ SmiTag(operand->reg());
3185 __ jmp(&done); 3189 __ jmp(&done);
3186 } 3190 }
3187 __ bind(&done); 3191 __ bind(&done);
3188 } 3192 }
3189 3193
3190 3194
3191 void CodeGenerator::GenerateInlineNumberComparison(Result* left_side, 3195 void CodeGenerator::GenerateInlineNumberComparison(Result* left_side,
3192 Result* right_side, 3196 Result* right_side,
3193 Condition cc, 3197 Condition cc,
3194 ControlDestination* dest) { 3198 ControlDestination* dest) {
3195 ASSERT(left_side->is_register()); 3199 ASSERT(left_side->is_register());
3196 ASSERT(right_side->is_register()); 3200 ASSERT(right_side->is_register());
3197 3201
3198 JumpTarget not_numbers; 3202 JumpTarget not_numbers;
3199 if (CpuFeatures::IsSupported(SSE2)) { 3203 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
3200 CpuFeatures::Scope use_sse2(SSE2); 3204 CpuFeatures::Scope use_sse2(SSE2);
3201 3205
3202 // Load left and right operand into registers xmm0 and xmm1 and compare. 3206 // Load left and right operand into registers xmm0 and xmm1 and compare.
3203 LoadComparisonOperandSSE2(masm_, left_side, xmm0, left_side, right_side, 3207 LoadComparisonOperandSSE2(masm_, left_side, xmm0, left_side, right_side,
3204 &not_numbers); 3208 &not_numbers);
3205 LoadComparisonOperandSSE2(masm_, right_side, xmm1, left_side, right_side, 3209 LoadComparisonOperandSSE2(masm_, right_side, xmm1, left_side, right_side,
3206 &not_numbers); 3210 &not_numbers);
3207 __ ucomisd(xmm0, xmm1); 3211 __ ucomisd(xmm0, xmm1);
3208 } else { 3212 } else {
3209 Label check_right, compare; 3213 Label check_right, compare;
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
3271 // stack, as receiver and arguments, and calls x. 3275 // stack, as receiver and arguments, and calls x.
3272 // In the implementation comments, we call x the applicand 3276 // In the implementation comments, we call x the applicand
3273 // and y the receiver. 3277 // and y the receiver.
3274 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION); 3278 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
3275 ASSERT(arguments->IsArguments()); 3279 ASSERT(arguments->IsArguments());
3276 3280
3277 // Load applicand.apply onto the stack. This will usually 3281 // Load applicand.apply onto the stack. This will usually
3278 // give us a megamorphic load site. Not super, but it works. 3282 // give us a megamorphic load site. Not super, but it works.
3279 Load(applicand); 3283 Load(applicand);
3280 frame()->Dup(); 3284 frame()->Dup();
3281 Handle<String> name = Factory::LookupAsciiSymbol("apply"); 3285 Handle<String> name = FACTORY->LookupAsciiSymbol("apply");
3282 frame()->Push(name); 3286 frame()->Push(name);
3283 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET); 3287 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
3284 __ nop(); 3288 __ nop();
3285 frame()->Push(&answer); 3289 frame()->Push(&answer);
3286 3290
3287 // Load the receiver and the existing arguments object onto the 3291 // Load the receiver and the existing arguments object onto the
3288 // expression stack. Avoid allocating the arguments object here. 3292 // expression stack. Avoid allocating the arguments object here.
3289 Load(receiver); 3293 Load(receiver);
3290 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF); 3294 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF);
3291 3295
(...skipping 11 matching lines...) Expand all
3303 // from the stack. This also deals with cases where a local variable 3307 // from the stack. This also deals with cases where a local variable
3304 // named 'arguments' has been introduced. 3308 // named 'arguments' has been introduced.
3305 frame_->Dup(); 3309 frame_->Dup();
3306 Result probe = frame_->Pop(); 3310 Result probe = frame_->Pop();
3307 { VirtualFrame::SpilledScope spilled_scope; 3311 { VirtualFrame::SpilledScope spilled_scope;
3308 Label slow, done; 3312 Label slow, done;
3309 bool try_lazy = true; 3313 bool try_lazy = true;
3310 if (probe.is_constant()) { 3314 if (probe.is_constant()) {
3311 try_lazy = probe.handle()->IsArgumentsMarker(); 3315 try_lazy = probe.handle()->IsArgumentsMarker();
3312 } else { 3316 } else {
3313 __ cmp(Operand(probe.reg()), Immediate(Factory::arguments_marker())); 3317 __ cmp(Operand(probe.reg()), Immediate(FACTORY->arguments_marker()));
3314 probe.Unuse(); 3318 probe.Unuse();
3315 __ j(not_equal, &slow); 3319 __ j(not_equal, &slow);
3316 } 3320 }
3317 3321
3318 if (try_lazy) { 3322 if (try_lazy) {
3319 Label build_args; 3323 Label build_args;
3320 // Get rid of the arguments object probe. 3324 // Get rid of the arguments object probe.
3321 frame_->Drop(); // Can be called on a spilled frame. 3325 frame_->Drop(); // Can be called on a spilled frame.
3322 // Stack now has 3 elements on it. 3326 // Stack now has 3 elements on it.
3323 // Contents of stack at this point: 3327 // Contents of stack at this point:
(...skipping 15 matching lines...) Expand all
3339 __ j(below, &build_args); 3343 __ j(below, &build_args);
3340 3344
3341 // Check that applicand.apply is Function.prototype.apply. 3345 // Check that applicand.apply is Function.prototype.apply.
3342 __ mov(eax, Operand(esp, kPointerSize)); 3346 __ mov(eax, Operand(esp, kPointerSize));
3343 __ test(eax, Immediate(kSmiTagMask)); 3347 __ test(eax, Immediate(kSmiTagMask));
3344 __ j(zero, &build_args); 3348 __ j(zero, &build_args);
3345 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ecx); 3349 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ecx);
3346 __ j(not_equal, &build_args); 3350 __ j(not_equal, &build_args);
3347 __ mov(ecx, FieldOperand(eax, JSFunction::kCodeEntryOffset)); 3351 __ mov(ecx, FieldOperand(eax, JSFunction::kCodeEntryOffset));
3348 __ sub(Operand(ecx), Immediate(Code::kHeaderSize - kHeapObjectTag)); 3352 __ sub(Operand(ecx), Immediate(Code::kHeaderSize - kHeapObjectTag));
3349 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply)); 3353 Handle<Code> apply_code(Isolate::Current()->builtins()->builtin(
3354 Builtins::FunctionApply));
3350 __ cmp(Operand(ecx), Immediate(apply_code)); 3355 __ cmp(Operand(ecx), Immediate(apply_code));
3351 __ j(not_equal, &build_args); 3356 __ j(not_equal, &build_args);
3352 3357
3353 // Check that applicand is a function. 3358 // Check that applicand is a function.
3354 __ mov(edi, Operand(esp, 2 * kPointerSize)); 3359 __ mov(edi, Operand(esp, 2 * kPointerSize));
3355 __ test(edi, Immediate(kSmiTagMask)); 3360 __ test(edi, Immediate(kSmiTagMask));
3356 __ j(zero, &build_args); 3361 __ j(zero, &build_args);
3357 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); 3362 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
3358 __ j(not_equal, &build_args); 3363 __ j(not_equal, &build_args);
3359 3364
(...skipping 206 matching lines...) Expand 10 before | Expand all | Expand 10 after
3566 frame_->EmitPush(Immediate(var->name())); 3571 frame_->EmitPush(Immediate(var->name()));
3567 // Declaration nodes are always introduced in one of two modes. 3572 // Declaration nodes are always introduced in one of two modes.
3568 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST); 3573 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
3569 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY; 3574 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
3570 frame_->EmitPush(Immediate(Smi::FromInt(attr))); 3575 frame_->EmitPush(Immediate(Smi::FromInt(attr)));
3571 // Push initial value, if any. 3576 // Push initial value, if any.
3572 // Note: For variables we must not push an initial value (such as 3577 // Note: For variables we must not push an initial value (such as
3573 // 'undefined') because we may have a (legal) redeclaration and we 3578 // 'undefined') because we may have a (legal) redeclaration and we
3574 // must not destroy the current value. 3579 // must not destroy the current value.
3575 if (node->mode() == Variable::CONST) { 3580 if (node->mode() == Variable::CONST) {
3576 frame_->EmitPush(Immediate(Factory::the_hole_value())); 3581 frame_->EmitPush(Immediate(FACTORY->the_hole_value()));
3577 } else if (node->fun() != NULL) { 3582 } else if (node->fun() != NULL) {
3578 Load(node->fun()); 3583 Load(node->fun());
3579 } else { 3584 } else {
3580 frame_->EmitPush(Immediate(Smi::FromInt(0))); // no initial value! 3585 frame_->EmitPush(Immediate(Smi::FromInt(0))); // no initial value!
3581 } 3586 }
3582 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4); 3587 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
3583 // Ignore the return value (declarations are statements). 3588 // Ignore the return value (declarations are statements).
3584 return; 3589 return;
3585 } 3590 }
3586 3591
3587 ASSERT(!var->is_global()); 3592 ASSERT(!var->is_global());
3588 3593
3589 // If we have a function or a constant, we need to initialize the variable. 3594 // If we have a function or a constant, we need to initialize the variable.
3590 Expression* val = NULL; 3595 Expression* val = NULL;
3591 if (node->mode() == Variable::CONST) { 3596 if (node->mode() == Variable::CONST) {
3592 val = new Literal(Factory::the_hole_value()); 3597 val = new Literal(FACTORY->the_hole_value());
3593 } else { 3598 } else {
3594 val = node->fun(); // NULL if we don't have a function 3599 val = node->fun(); // NULL if we don't have a function
3595 } 3600 }
3596 3601
3597 if (val != NULL) { 3602 if (val != NULL) {
3598 { 3603 {
3599 // Set the initial value. 3604 // Set the initial value.
3600 Reference target(this, node->proxy()); 3605 Reference target(this, node->proxy());
3601 Load(val); 3606 Load(val);
3602 target.SetValue(NOT_CONST_INIT); 3607 target.SetValue(NOT_CONST_INIT);
(...skipping 760 matching lines...) Expand 10 before | Expand all | Expand 10 after
4363 JumpTarget exit; 4368 JumpTarget exit;
4364 4369
4365 // Get the object to enumerate over (converted to JSObject). 4370 // Get the object to enumerate over (converted to JSObject).
4366 LoadAndSpill(node->enumerable()); 4371 LoadAndSpill(node->enumerable());
4367 4372
4368 // Both SpiderMonkey and kjs ignore null and undefined in contrast 4373 // Both SpiderMonkey and kjs ignore null and undefined in contrast
4369 // to the specification. 12.6.4 mandates a call to ToObject. 4374 // to the specification. 12.6.4 mandates a call to ToObject.
4370 frame_->EmitPop(eax); 4375 frame_->EmitPop(eax);
4371 4376
4372 // eax: value to be iterated over 4377 // eax: value to be iterated over
4373 __ cmp(eax, Factory::undefined_value()); 4378 __ cmp(eax, FACTORY->undefined_value());
4374 exit.Branch(equal); 4379 exit.Branch(equal);
4375 __ cmp(eax, Factory::null_value()); 4380 __ cmp(eax, FACTORY->null_value());
4376 exit.Branch(equal); 4381 exit.Branch(equal);
4377 4382
4378 // Stack layout in body: 4383 // Stack layout in body:
4379 // [iteration counter (smi)] <- slot 0 4384 // [iteration counter (smi)] <- slot 0
4380 // [length of array] <- slot 1 4385 // [length of array] <- slot 1
4381 // [FixedArray] <- slot 2 4386 // [FixedArray] <- slot 2
4382 // [Map or 0] <- slot 3 4387 // [Map or 0] <- slot 3
4383 // [Object] <- slot 4 4388 // [Object] <- slot 4
4384 4389
4385 // Check if enumerable is already a JSObject 4390 // Check if enumerable is already a JSObject
(...skipping 18 matching lines...) Expand all
4404 // guarantee cache validity, call the runtime system to check cache 4409 // guarantee cache validity, call the runtime system to check cache
4405 // validity or get the property names in a fixed array. 4410 // validity or get the property names in a fixed array.
4406 JumpTarget call_runtime; 4411 JumpTarget call_runtime;
4407 JumpTarget loop(JumpTarget::BIDIRECTIONAL); 4412 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
4408 JumpTarget check_prototype; 4413 JumpTarget check_prototype;
4409 JumpTarget use_cache; 4414 JumpTarget use_cache;
4410 __ mov(ecx, eax); 4415 __ mov(ecx, eax);
4411 loop.Bind(); 4416 loop.Bind();
4412 // Check that there are no elements. 4417 // Check that there are no elements.
4413 __ mov(edx, FieldOperand(ecx, JSObject::kElementsOffset)); 4418 __ mov(edx, FieldOperand(ecx, JSObject::kElementsOffset));
4414 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array())); 4419 __ cmp(Operand(edx), Immediate(FACTORY->empty_fixed_array()));
4415 call_runtime.Branch(not_equal); 4420 call_runtime.Branch(not_equal);
4416 // Check that instance descriptors are not empty so that we can 4421 // Check that instance descriptors are not empty so that we can
4417 // check for an enum cache. Leave the map in ebx for the subsequent 4422 // check for an enum cache. Leave the map in ebx for the subsequent
4418 // prototype load. 4423 // prototype load.
4419 __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset)); 4424 __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
4420 __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOffset)); 4425 __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOffset));
4421 __ cmp(Operand(edx), Immediate(Factory::empty_descriptor_array())); 4426 __ cmp(Operand(edx), Immediate(FACTORY->empty_descriptor_array()));
4422 call_runtime.Branch(equal); 4427 call_runtime.Branch(equal);
4423 // Check that there in an enum cache in the non-empty instance 4428 // Check that there in an enum cache in the non-empty instance
4424 // descriptors. This is the case if the next enumeration index 4429 // descriptors. This is the case if the next enumeration index
4425 // field does not contain a smi. 4430 // field does not contain a smi.
4426 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset)); 4431 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
4427 __ test(edx, Immediate(kSmiTagMask)); 4432 __ test(edx, Immediate(kSmiTagMask));
4428 call_runtime.Branch(zero); 4433 call_runtime.Branch(zero);
4429 // For all objects but the receiver, check that the cache is empty. 4434 // For all objects but the receiver, check that the cache is empty.
4430 __ cmp(ecx, Operand(eax)); 4435 __ cmp(ecx, Operand(eax));
4431 check_prototype.Branch(equal); 4436 check_prototype.Branch(equal);
4432 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset)); 4437 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4433 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array())); 4438 __ cmp(Operand(edx), Immediate(FACTORY->empty_fixed_array()));
4434 call_runtime.Branch(not_equal); 4439 call_runtime.Branch(not_equal);
4435 check_prototype.Bind(); 4440 check_prototype.Bind();
4436 // Load the prototype from the map and loop if non-null. 4441 // Load the prototype from the map and loop if non-null.
4437 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset)); 4442 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
4438 __ cmp(Operand(ecx), Immediate(Factory::null_value())); 4443 __ cmp(Operand(ecx), Immediate(FACTORY->null_value()));
4439 loop.Branch(not_equal); 4444 loop.Branch(not_equal);
4440 // The enum cache is valid. Load the map of the object being 4445 // The enum cache is valid. Load the map of the object being
4441 // iterated over and use the cache for the iteration. 4446 // iterated over and use the cache for the iteration.
4442 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); 4447 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
4443 use_cache.Jump(); 4448 use_cache.Jump();
4444 4449
4445 call_runtime.Bind(); 4450 call_runtime.Bind();
4446 // Call the runtime to get the property names for the object. 4451 // Call the runtime to get the property names for the object.
4447 frame_->EmitPush(eax); // push the Object (slot 4) for the runtime call 4452 frame_->EmitPush(eax); // push the Object (slot 4) for the runtime call
4448 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1); 4453 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
4449 4454
4450 // If we got a map from the runtime call, we can do a fast 4455 // If we got a map from the runtime call, we can do a fast
4451 // modification check. Otherwise, we got a fixed array, and we have 4456 // modification check. Otherwise, we got a fixed array, and we have
4452 // to do a slow check. 4457 // to do a slow check.
4453 // eax: map or fixed array (result from call to 4458 // eax: map or fixed array (result from call to
4454 // Runtime::kGetPropertyNamesFast) 4459 // Runtime::kGetPropertyNamesFast)
4455 __ mov(edx, Operand(eax)); 4460 __ mov(edx, Operand(eax));
4456 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); 4461 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
4457 __ cmp(ecx, Factory::meta_map()); 4462 __ cmp(ecx, FACTORY->meta_map());
4458 fixed_array.Branch(not_equal); 4463 fixed_array.Branch(not_equal);
4459 4464
4460 use_cache.Bind(); 4465 use_cache.Bind();
4461 // Get enum cache 4466 // Get enum cache
4462 // eax: map (either the result from a call to 4467 // eax: map (either the result from a call to
4463 // Runtime::kGetPropertyNamesFast or has been fetched directly from 4468 // Runtime::kGetPropertyNamesFast or has been fetched directly from
4464 // the object) 4469 // the object)
4465 __ mov(ecx, Operand(eax)); 4470 __ mov(ecx, Operand(eax));
4466 4471
4467 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset)); 4472 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset));
(...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after
4639 // After shadowing stops, the original targets are unshadowed and the 4644 // After shadowing stops, the original targets are unshadowed and the
4640 // ShadowTargets represent the formerly shadowing targets. 4645 // ShadowTargets represent the formerly shadowing targets.
4641 bool has_unlinks = false; 4646 bool has_unlinks = false;
4642 for (int i = 0; i < shadows.length(); i++) { 4647 for (int i = 0; i < shadows.length(); i++) {
4643 shadows[i]->StopShadowing(); 4648 shadows[i]->StopShadowing();
4644 has_unlinks = has_unlinks || shadows[i]->is_linked(); 4649 has_unlinks = has_unlinks || shadows[i]->is_linked();
4645 } 4650 }
4646 function_return_is_shadowed_ = function_return_was_shadowed; 4651 function_return_is_shadowed_ = function_return_was_shadowed;
4647 4652
4648 // Get an external reference to the handler address. 4653 // Get an external reference to the handler address.
4649 ExternalReference handler_address(Top::k_handler_address); 4654 ExternalReference handler_address(Isolate::k_handler_address);
4650 4655
4651 // Make sure that there's nothing left on the stack above the 4656 // Make sure that there's nothing left on the stack above the
4652 // handler structure. 4657 // handler structure.
4653 if (FLAG_debug_code) { 4658 if (FLAG_debug_code) {
4654 __ mov(eax, Operand::StaticVariable(handler_address)); 4659 __ mov(eax, Operand::StaticVariable(handler_address));
4655 __ cmp(esp, Operand(eax)); 4660 __ cmp(esp, Operand(eax));
4656 __ Assert(equal, "stack pointer should point to top handler"); 4661 __ Assert(equal, "stack pointer should point to top handler");
4657 } 4662 }
4658 4663
4659 // If we can fall off the end of the try block, unlink from try chain. 4664 // If we can fall off the end of the try block, unlink from try chain.
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after
4765 // After shadowing stops, the original targets are unshadowed and the 4770 // After shadowing stops, the original targets are unshadowed and the
4766 // ShadowTargets represent the formerly shadowing targets. 4771 // ShadowTargets represent the formerly shadowing targets.
4767 int nof_unlinks = 0; 4772 int nof_unlinks = 0;
4768 for (int i = 0; i < shadows.length(); i++) { 4773 for (int i = 0; i < shadows.length(); i++) {
4769 shadows[i]->StopShadowing(); 4774 shadows[i]->StopShadowing();
4770 if (shadows[i]->is_linked()) nof_unlinks++; 4775 if (shadows[i]->is_linked()) nof_unlinks++;
4771 } 4776 }
4772 function_return_is_shadowed_ = function_return_was_shadowed; 4777 function_return_is_shadowed_ = function_return_was_shadowed;
4773 4778
4774 // Get an external reference to the handler address. 4779 // Get an external reference to the handler address.
4775 ExternalReference handler_address(Top::k_handler_address); 4780 ExternalReference handler_address(Isolate::k_handler_address);
4776 4781
4777 // If we can fall off the end of the try block, unlink from the try 4782 // If we can fall off the end of the try block, unlink from the try
4778 // chain and set the state on the frame to FALLING. 4783 // chain and set the state on the frame to FALLING.
4779 if (has_valid_frame()) { 4784 if (has_valid_frame()) {
4780 // The next handler address is on top of the frame. 4785 // The next handler address is on top of the frame.
4781 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 4786 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
4782 frame_->EmitPop(Operand::StaticVariable(handler_address)); 4787 frame_->EmitPop(Operand::StaticVariable(handler_address));
4783 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); 4788 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4784 4789
4785 // Fake a top of stack value (unneeded when FALLING) and set the 4790 // Fake a top of stack value (unneeded when FALLING) and set the
4786 // state in ecx, then jump around the unlink blocks if any. 4791 // state in ecx, then jump around the unlink blocks if any.
4787 frame_->EmitPush(Immediate(Factory::undefined_value())); 4792 frame_->EmitPush(Immediate(FACTORY->undefined_value()));
4788 __ Set(ecx, Immediate(Smi::FromInt(FALLING))); 4793 __ Set(ecx, Immediate(Smi::FromInt(FALLING)));
4789 if (nof_unlinks > 0) { 4794 if (nof_unlinks > 0) {
4790 finally_block.Jump(); 4795 finally_block.Jump();
4791 } 4796 }
4792 } 4797 }
4793 4798
4794 // Generate code to unlink and set the state for the (formerly) 4799 // Generate code to unlink and set the state for the (formerly)
4795 // shadowing targets that have been jumped to. 4800 // shadowing targets that have been jumped to.
4796 for (int i = 0; i < shadows.length(); i++) { 4801 for (int i = 0; i < shadows.length(); i++) {
4797 if (shadows[i]->is_linked()) { 4802 if (shadows[i]->is_linked()) {
(...skipping 22 matching lines...) Expand all
4820 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 4825 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
4821 frame_->EmitPop(Operand::StaticVariable(handler_address)); 4826 frame_->EmitPop(Operand::StaticVariable(handler_address));
4822 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); 4827 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4823 4828
4824 if (i == kReturnShadowIndex) { 4829 if (i == kReturnShadowIndex) {
4825 // If this target shadowed the function return, materialize 4830 // If this target shadowed the function return, materialize
4826 // the return value on the stack. 4831 // the return value on the stack.
4827 frame_->EmitPush(eax); 4832 frame_->EmitPush(eax);
4828 } else { 4833 } else {
4829 // Fake TOS for targets that shadowed breaks and continues. 4834 // Fake TOS for targets that shadowed breaks and continues.
4830 frame_->EmitPush(Immediate(Factory::undefined_value())); 4835 frame_->EmitPush(Immediate(FACTORY->undefined_value()));
4831 } 4836 }
4832 __ Set(ecx, Immediate(Smi::FromInt(JUMPING + i))); 4837 __ Set(ecx, Immediate(Smi::FromInt(JUMPING + i)));
4833 if (--nof_unlinks > 0) { 4838 if (--nof_unlinks > 0) {
4834 // If this is not the last unlink block, jump around the next. 4839 // If this is not the last unlink block, jump around the next.
4835 finally_block.Jump(); 4840 finally_block.Jump();
4836 } 4841 }
4837 } 4842 }
4838 } 4843 }
4839 4844
4840 // --- Finally block --- 4845 // --- Finally block ---
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
4928 FastNewClosureStub stub( 4933 FastNewClosureStub stub(
4929 function_info->strict_mode() ? kStrictMode : kNonStrictMode); 4934 function_info->strict_mode() ? kStrictMode : kNonStrictMode);
4930 frame()->EmitPush(Immediate(function_info)); 4935 frame()->EmitPush(Immediate(function_info));
4931 return frame()->CallStub(&stub, 1); 4936 return frame()->CallStub(&stub, 1);
4932 } else { 4937 } else {
4933 // Call the runtime to instantiate the function based on the 4938 // Call the runtime to instantiate the function based on the
4934 // shared function info. 4939 // shared function info.
4935 frame()->EmitPush(esi); 4940 frame()->EmitPush(esi);
4936 frame()->EmitPush(Immediate(function_info)); 4941 frame()->EmitPush(Immediate(function_info));
4937 frame()->EmitPush(Immediate(pretenure 4942 frame()->EmitPush(Immediate(pretenure
4938 ? Factory::true_value() 4943 ? FACTORY->true_value()
4939 : Factory::false_value())); 4944 : FACTORY->false_value()));
4940 return frame()->CallRuntime(Runtime::kNewClosure, 3); 4945 return frame()->CallRuntime(Runtime::kNewClosure, 3);
4941 } 4946 }
4942 } 4947 }
4943 4948
4944 4949
4945 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) { 4950 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
4946 Comment cmnt(masm_, "[ FunctionLiteral"); 4951 Comment cmnt(masm_, "[ FunctionLiteral");
4947 ASSERT(!in_safe_int32_mode()); 4952 ASSERT(!in_safe_int32_mode());
4948 // Build the function info and instantiate it. 4953 // Build the function info and instantiate it.
4949 Handle<SharedFunctionInfo> function_info = 4954 Handle<SharedFunctionInfo> function_info =
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
5037 // Const slots may contain 'the hole' value (the constant hasn't been 5042 // Const slots may contain 'the hole' value (the constant hasn't been
5038 // initialized yet) which needs to be converted into the 'undefined' 5043 // initialized yet) which needs to be converted into the 'undefined'
5039 // value. 5044 // value.
5040 // 5045 //
5041 // We currently spill the virtual frame because constants use the 5046 // We currently spill the virtual frame because constants use the
5042 // potentially unsafe direct-frame access of SlotOperand. 5047 // potentially unsafe direct-frame access of SlotOperand.
5043 VirtualFrame::SpilledScope spilled_scope; 5048 VirtualFrame::SpilledScope spilled_scope;
5044 Comment cmnt(masm_, "[ Load const"); 5049 Comment cmnt(masm_, "[ Load const");
5045 Label exit; 5050 Label exit;
5046 __ mov(ecx, SlotOperand(slot, ecx)); 5051 __ mov(ecx, SlotOperand(slot, ecx));
5047 __ cmp(ecx, Factory::the_hole_value()); 5052 __ cmp(ecx, FACTORY->the_hole_value());
5048 __ j(not_equal, &exit); 5053 __ j(not_equal, &exit);
5049 __ mov(ecx, Factory::undefined_value()); 5054 __ mov(ecx, FACTORY->undefined_value());
5050 __ bind(&exit); 5055 __ bind(&exit);
5051 frame()->EmitPush(ecx); 5056 frame()->EmitPush(ecx);
5052 5057
5053 } else if (slot->type() == Slot::PARAMETER) { 5058 } else if (slot->type() == Slot::PARAMETER) {
5054 frame()->PushParameterAt(slot->index()); 5059 frame()->PushParameterAt(slot->index());
5055 5060
5056 } else if (slot->type() == Slot::LOCAL) { 5061 } else if (slot->type() == Slot::LOCAL) {
5057 frame()->PushLocalAt(slot->index()); 5062 frame()->PushLocalAt(slot->index());
5058 5063
5059 } else { 5064 } else {
(...skipping 29 matching lines...) Expand all
5089 result = StoreArgumentsObject(false); 5094 result = StoreArgumentsObject(false);
5090 } 5095 }
5091 frame()->Push(&result); 5096 frame()->Push(&result);
5092 return; 5097 return;
5093 } 5098 }
5094 ASSERT(result.is_register()); 5099 ASSERT(result.is_register());
5095 // The loaded value is in a register. If it is the sentinel that 5100 // The loaded value is in a register. If it is the sentinel that
5096 // indicates that we haven't loaded the arguments object yet, we 5101 // indicates that we haven't loaded the arguments object yet, we
5097 // need to do it now. 5102 // need to do it now.
5098 JumpTarget exit; 5103 JumpTarget exit;
5099 __ cmp(Operand(result.reg()), Immediate(Factory::arguments_marker())); 5104 __ cmp(Operand(result.reg()), Immediate(FACTORY->arguments_marker()));
5100 frame()->Push(&result); 5105 frame()->Push(&result);
5101 exit.Branch(not_equal); 5106 exit.Branch(not_equal);
5102 5107
5103 result = StoreArgumentsObject(false); 5108 result = StoreArgumentsObject(false);
5104 frame()->SetElementAt(0, &result); 5109 frame()->SetElementAt(0, &result);
5105 result.Unuse(); 5110 result.Unuse();
5106 exit.Bind(); 5111 exit.Bind();
5107 return; 5112 return;
5108 } 5113 }
5109 5114
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
5143 if (s != NULL && s->is_eval_scope()) { 5148 if (s != NULL && s->is_eval_scope()) {
5144 // Loop up the context chain. There is no frame effect so it is 5149 // Loop up the context chain. There is no frame effect so it is
5145 // safe to use raw labels here. 5150 // safe to use raw labels here.
5146 Label next, fast; 5151 Label next, fast;
5147 if (!context.is(tmp.reg())) { 5152 if (!context.is(tmp.reg())) {
5148 __ mov(tmp.reg(), context); 5153 __ mov(tmp.reg(), context);
5149 } 5154 }
5150 __ bind(&next); 5155 __ bind(&next);
5151 // Terminate at global context. 5156 // Terminate at global context.
5152 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset), 5157 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
5153 Immediate(Factory::global_context_map())); 5158 Immediate(FACTORY->global_context_map()));
5154 __ j(equal, &fast); 5159 __ j(equal, &fast);
5155 // Check that extension is NULL. 5160 // Check that extension is NULL.
5156 __ cmp(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0)); 5161 __ cmp(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0));
5157 slow->Branch(not_equal, not_taken); 5162 slow->Branch(not_equal, not_taken);
5158 // Load next context in chain. 5163 // Load next context in chain.
5159 __ mov(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX)); 5164 __ mov(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX));
5160 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset)); 5165 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
5161 __ jmp(&next); 5166 __ jmp(&next);
5162 __ bind(&fast); 5167 __ bind(&fast);
5163 } 5168 }
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
5203 if (potential_slot != NULL) { 5208 if (potential_slot != NULL) {
5204 // Generate fast case for locals that rewrite to slots. 5209 // Generate fast case for locals that rewrite to slots.
5205 // Allocate a fresh register to use as a temp in 5210 // Allocate a fresh register to use as a temp in
5206 // ContextSlotOperandCheckExtensions and to hold the result 5211 // ContextSlotOperandCheckExtensions and to hold the result
5207 // value. 5212 // value.
5208 *result = allocator()->Allocate(); 5213 *result = allocator()->Allocate();
5209 ASSERT(result->is_valid()); 5214 ASSERT(result->is_valid());
5210 __ mov(result->reg(), 5215 __ mov(result->reg(),
5211 ContextSlotOperandCheckExtensions(potential_slot, *result, slow)); 5216 ContextSlotOperandCheckExtensions(potential_slot, *result, slow));
5212 if (potential_slot->var()->mode() == Variable::CONST) { 5217 if (potential_slot->var()->mode() == Variable::CONST) {
5213 __ cmp(result->reg(), Factory::the_hole_value()); 5218 __ cmp(result->reg(), FACTORY->the_hole_value());
5214 done->Branch(not_equal, result); 5219 done->Branch(not_equal, result);
5215 __ mov(result->reg(), Factory::undefined_value()); 5220 __ mov(result->reg(), FACTORY->undefined_value());
5216 } 5221 }
5217 done->Jump(result); 5222 done->Jump(result);
5218 } else if (rewrite != NULL) { 5223 } else if (rewrite != NULL) {
5219 // Generate fast case for calls of an argument function. 5224 // Generate fast case for calls of an argument function.
5220 Property* property = rewrite->AsProperty(); 5225 Property* property = rewrite->AsProperty();
5221 if (property != NULL) { 5226 if (property != NULL) {
5222 VariableProxy* obj_proxy = property->obj()->AsVariableProxy(); 5227 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
5223 Literal* key_literal = property->key()->AsLiteral(); 5228 Literal* key_literal = property->key()->AsLiteral();
5224 if (obj_proxy != NULL && 5229 if (obj_proxy != NULL &&
5225 key_literal != NULL && 5230 key_literal != NULL &&
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
5292 // Only the first const initialization must be executed (the slot 5297 // Only the first const initialization must be executed (the slot
5293 // still contains 'the hole' value). When the assignment is executed, 5298 // still contains 'the hole' value). When the assignment is executed,
5294 // the code is identical to a normal store (see below). 5299 // the code is identical to a normal store (see below).
5295 // 5300 //
5296 // We spill the frame in the code below because the direct-frame 5301 // We spill the frame in the code below because the direct-frame
5297 // access of SlotOperand is potentially unsafe with an unspilled 5302 // access of SlotOperand is potentially unsafe with an unspilled
5298 // frame. 5303 // frame.
5299 VirtualFrame::SpilledScope spilled_scope; 5304 VirtualFrame::SpilledScope spilled_scope;
5300 Comment cmnt(masm_, "[ Init const"); 5305 Comment cmnt(masm_, "[ Init const");
5301 __ mov(ecx, SlotOperand(slot, ecx)); 5306 __ mov(ecx, SlotOperand(slot, ecx));
5302 __ cmp(ecx, Factory::the_hole_value()); 5307 __ cmp(ecx, FACTORY->the_hole_value());
5303 exit.Branch(not_equal); 5308 exit.Branch(not_equal);
5304 } 5309 }
5305 5310
5306 // We must execute the store. Storing a variable must keep the (new) 5311 // We must execute the store. Storing a variable must keep the (new)
5307 // value on the stack. This is necessary for compiling assignment 5312 // value on the stack. This is necessary for compiling assignment
5308 // expressions. 5313 // expressions.
5309 // 5314 //
5310 // Note: We will reach here even with slot->var()->mode() == 5315 // Note: We will reach here even with slot->var()->mode() ==
5311 // Variable::CONST because of const declarations which will initialize 5316 // Variable::CONST because of const declarations which will initialize
5312 // consts to 'the hole' value and by doing so, end up calling this code. 5317 // consts to 'the hole' value and by doing so, end up calling this code.
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after
5468 if (!boilerplate_.is(eax)) __ mov(boilerplate_, eax); 5473 if (!boilerplate_.is(eax)) __ mov(boilerplate_, eax);
5469 } 5474 }
5470 5475
5471 5476
5472 class DeferredAllocateInNewSpace: public DeferredCode { 5477 class DeferredAllocateInNewSpace: public DeferredCode {
5473 public: 5478 public:
5474 DeferredAllocateInNewSpace(int size, 5479 DeferredAllocateInNewSpace(int size,
5475 Register target, 5480 Register target,
5476 int registers_to_save = 0) 5481 int registers_to_save = 0)
5477 : size_(size), target_(target), registers_to_save_(registers_to_save) { 5482 : size_(size), target_(target), registers_to_save_(registers_to_save) {
5478 ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace()); 5483 ASSERT(size >= kPointerSize && size <= HEAP->MaxObjectSizeInNewSpace());
5479 ASSERT_EQ(0, registers_to_save & target.bit()); 5484 ASSERT_EQ(0, registers_to_save & target.bit());
5480 set_comment("[ DeferredAllocateInNewSpace"); 5485 set_comment("[ DeferredAllocateInNewSpace");
5481 } 5486 }
5482 void Generate(); 5487 void Generate();
5483 5488
5484 private: 5489 private:
5485 int size_; 5490 int size_;
5486 Register target_; 5491 Register target_;
5487 int registers_to_save_; 5492 int registers_to_save_;
5488 }; 5493 };
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
5529 Result boilerplate = allocator_->Allocate(); 5534 Result boilerplate = allocator_->Allocate();
5530 ASSERT(boilerplate.is_valid()); 5535 ASSERT(boilerplate.is_valid());
5531 int literal_offset = 5536 int literal_offset =
5532 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; 5537 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
5533 __ mov(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset)); 5538 __ mov(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
5534 5539
5535 // Check whether we need to materialize the RegExp object. If so, 5540 // Check whether we need to materialize the RegExp object. If so,
5536 // jump to the deferred code passing the literals array. 5541 // jump to the deferred code passing the literals array.
5537 DeferredRegExpLiteral* deferred = 5542 DeferredRegExpLiteral* deferred =
5538 new DeferredRegExpLiteral(boilerplate.reg(), literals.reg(), node); 5543 new DeferredRegExpLiteral(boilerplate.reg(), literals.reg(), node);
5539 __ cmp(boilerplate.reg(), Factory::undefined_value()); 5544 __ cmp(boilerplate.reg(), FACTORY->undefined_value());
5540 deferred->Branch(equal); 5545 deferred->Branch(equal);
5541 deferred->BindExit(); 5546 deferred->BindExit();
5542 5547
5543 // Register of boilerplate contains RegExp object. 5548 // Register of boilerplate contains RegExp object.
5544 5549
5545 Result tmp = allocator()->Allocate(); 5550 Result tmp = allocator()->Allocate();
5546 ASSERT(tmp.is_valid()); 5551 ASSERT(tmp.is_valid());
5547 5552
5548 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 5553 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
5549 5554
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after
5687 5692
5688 // Load the literals array of the function. 5693 // Load the literals array of the function.
5689 __ mov(literals.reg(), 5694 __ mov(literals.reg(),
5690 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset)); 5695 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
5691 5696
5692 frame_->Push(&literals); 5697 frame_->Push(&literals);
5693 frame_->Push(Smi::FromInt(node->literal_index())); 5698 frame_->Push(Smi::FromInt(node->literal_index()));
5694 frame_->Push(node->constant_elements()); 5699 frame_->Push(node->constant_elements());
5695 int length = node->values()->length(); 5700 int length = node->values()->length();
5696 Result clone; 5701 Result clone;
5697 if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) { 5702 if (node->constant_elements()->map() == HEAP->fixed_cow_array_map()) {
5698 FastCloneShallowArrayStub stub( 5703 FastCloneShallowArrayStub stub(
5699 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); 5704 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
5700 clone = frame_->CallStub(&stub, 3); 5705 clone = frame_->CallStub(&stub, 3);
5701 __ IncrementCounter(&Counters::cow_arrays_created_stub, 1); 5706 __ IncrementCounter(COUNTERS->cow_arrays_created_stub(), 1);
5702 } else if (node->depth() > 1) { 5707 } else if (node->depth() > 1) {
5703 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3); 5708 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
5704 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { 5709 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
5705 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); 5710 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
5706 } else { 5711 } else {
5707 FastCloneShallowArrayStub stub( 5712 FastCloneShallowArrayStub stub(
5708 FastCloneShallowArrayStub::CLONE_ELEMENTS, length); 5713 FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
5709 clone = frame_->CallStub(&stub, 3); 5714 clone = frame_->CallStub(&stub, 3);
5710 } 5715 }
5711 frame_->Push(&clone); 5716 frame_->Push(&clone);
(...skipping 386 matching lines...) Expand 10 before | Expand all | Expand 10 after
6098 6103
6099 // In a call to eval, we first call %ResolvePossiblyDirectEval to 6104 // In a call to eval, we first call %ResolvePossiblyDirectEval to
6100 // resolve the function we need to call and the receiver of the 6105 // resolve the function we need to call and the receiver of the
6101 // call. Then we call the resolved function using the given 6106 // call. Then we call the resolved function using the given
6102 // arguments. 6107 // arguments.
6103 6108
6104 // Prepare the stack for the call to the resolved function. 6109 // Prepare the stack for the call to the resolved function.
6105 Load(function); 6110 Load(function);
6106 6111
6107 // Allocate a frame slot for the receiver. 6112 // Allocate a frame slot for the receiver.
6108 frame_->Push(Factory::undefined_value()); 6113 frame_->Push(FACTORY->undefined_value());
6109 6114
6110 // Load the arguments. 6115 // Load the arguments.
6111 int arg_count = args->length(); 6116 int arg_count = args->length();
6112 for (int i = 0; i < arg_count; i++) { 6117 for (int i = 0; i < arg_count; i++) {
6113 Load(args->at(i)); 6118 Load(args->at(i));
6114 frame_->SpillTop(); 6119 frame_->SpillTop();
6115 } 6120 }
6116 6121
6117 // Result to hold the result of the function resolution and the 6122 // Result to hold the result of the function resolution and the
6118 // final result of the eval call. 6123 // final result of the eval call.
(...skipping 11 matching lines...) Expand all
6130 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded 6135 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded
6131 // function, the first argument to the eval call and the 6136 // function, the first argument to the eval call and the
6132 // receiver. 6137 // receiver.
6133 Result fun = LoadFromGlobalSlotCheckExtensions(var->AsSlot(), 6138 Result fun = LoadFromGlobalSlotCheckExtensions(var->AsSlot(),
6134 NOT_INSIDE_TYPEOF, 6139 NOT_INSIDE_TYPEOF,
6135 &slow); 6140 &slow);
6136 frame_->Push(&fun); 6141 frame_->Push(&fun);
6137 if (arg_count > 0) { 6142 if (arg_count > 0) {
6138 frame_->PushElementAt(arg_count); 6143 frame_->PushElementAt(arg_count);
6139 } else { 6144 } else {
6140 frame_->Push(Factory::undefined_value()); 6145 frame_->Push(FACTORY->undefined_value());
6141 } 6146 }
6142 frame_->PushParameterAt(-1); 6147 frame_->PushParameterAt(-1);
6143 6148
6144 // Push the strict mode flag. 6149 // Push the strict mode flag.
6145 frame_->Push(Smi::FromInt(strict_mode_flag())); 6150 frame_->Push(Smi::FromInt(strict_mode_flag()));
6146 6151
6147 // Resolve the call. 6152 // Resolve the call.
6148 result = 6153 result =
6149 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 4); 6154 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 4);
6150 6155
6151 done.Jump(&result); 6156 done.Jump(&result);
6152 slow.Bind(); 6157 slow.Bind();
6153 } 6158 }
6154 6159
6155 // Prepare the stack for the call to ResolvePossiblyDirectEval by 6160 // Prepare the stack for the call to ResolvePossiblyDirectEval by
6156 // pushing the loaded function, the first argument to the eval 6161 // pushing the loaded function, the first argument to the eval
6157 // call and the receiver. 6162 // call and the receiver.
6158 frame_->PushElementAt(arg_count + 1); 6163 frame_->PushElementAt(arg_count + 1);
6159 if (arg_count > 0) { 6164 if (arg_count > 0) {
6160 frame_->PushElementAt(arg_count); 6165 frame_->PushElementAt(arg_count);
6161 } else { 6166 } else {
6162 frame_->Push(Factory::undefined_value()); 6167 frame_->Push(FACTORY->undefined_value());
6163 } 6168 }
6164 frame_->PushParameterAt(-1); 6169 frame_->PushParameterAt(-1);
6165 6170
6166 // Push the strict mode flag. 6171 // Push the strict mode flag.
6167 frame_->Push(Smi::FromInt(strict_mode_flag())); 6172 frame_->Push(Smi::FromInt(strict_mode_flag()));
6168 6173
6169 // Resolve the call. 6174 // Resolve the call.
6170 result = frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 4); 6175 result = frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 4);
6171 6176
6172 // If we generated fast-case code bind the jump-target where fast 6177 // If we generated fast-case code bind the jump-target where fast
(...skipping 273 matching lines...) Expand 10 before | Expand all | Expand 10 after
6446 // 2 (array): Arguments to the format string. 6451 // 2 (array): Arguments to the format string.
6447 ASSERT_EQ(args->length(), 3); 6452 ASSERT_EQ(args->length(), 3);
6448 #ifdef ENABLE_LOGGING_AND_PROFILING 6453 #ifdef ENABLE_LOGGING_AND_PROFILING
6449 if (ShouldGenerateLog(args->at(0))) { 6454 if (ShouldGenerateLog(args->at(0))) {
6450 Load(args->at(1)); 6455 Load(args->at(1));
6451 Load(args->at(2)); 6456 Load(args->at(2));
6452 frame_->CallRuntime(Runtime::kLog, 2); 6457 frame_->CallRuntime(Runtime::kLog, 2);
6453 } 6458 }
6454 #endif 6459 #endif
6455 // Finally, we're expected to leave a value on the top of the stack. 6460 // Finally, we're expected to leave a value on the top of the stack.
6456 frame_->Push(Factory::undefined_value()); 6461 frame_->Push(FACTORY->undefined_value());
6457 } 6462 }
6458 6463
6459 6464
6460 void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) { 6465 void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
6461 ASSERT(args->length() == 1); 6466 ASSERT(args->length() == 1);
6462 Load(args->at(0)); 6467 Load(args->at(0));
6463 Result value = frame_->Pop(); 6468 Result value = frame_->Pop();
6464 value.ToRegister(); 6469 value.ToRegister();
6465 ASSERT(value.is_valid()); 6470 ASSERT(value.is_valid());
6466 __ test(value.reg(), Immediate(kSmiTagMask | kSmiSignMask)); 6471 __ test(value.reg(), Immediate(kSmiTagMask | kSmiSignMask));
(...skipping 22 matching lines...) Expand all
6489 return &char_code_at_generator_; 6494 return &char_code_at_generator_;
6490 } 6495 }
6491 6496
6492 virtual void Generate() { 6497 virtual void Generate() {
6493 VirtualFrameRuntimeCallHelper call_helper(frame_state()); 6498 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6494 char_code_at_generator_.GenerateSlow(masm(), call_helper); 6499 char_code_at_generator_.GenerateSlow(masm(), call_helper);
6495 6500
6496 __ bind(&need_conversion_); 6501 __ bind(&need_conversion_);
6497 // Move the undefined value into the result register, which will 6502 // Move the undefined value into the result register, which will
6498 // trigger conversion. 6503 // trigger conversion.
6499 __ Set(result_, Immediate(Factory::undefined_value())); 6504 __ Set(result_, Immediate(FACTORY->undefined_value()));
6500 __ jmp(exit_label()); 6505 __ jmp(exit_label());
6501 6506
6502 __ bind(&index_out_of_range_); 6507 __ bind(&index_out_of_range_);
6503 // When the index is out of range, the spec requires us to return 6508 // When the index is out of range, the spec requires us to return
6504 // NaN. 6509 // NaN.
6505 __ Set(result_, Immediate(Factory::nan_value())); 6510 __ Set(result_, Immediate(FACTORY->nan_value()));
6506 __ jmp(exit_label()); 6511 __ jmp(exit_label());
6507 } 6512 }
6508 6513
6509 private: 6514 private:
6510 Register result_; 6515 Register result_;
6511 6516
6512 Label need_conversion_; 6517 Label need_conversion_;
6513 Label index_out_of_range_; 6518 Label index_out_of_range_;
6514 6519
6515 StringCharCodeAtGenerator char_code_at_generator_; 6520 StringCharCodeAtGenerator char_code_at_generator_;
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after
6618 6623
6619 __ bind(&need_conversion_); 6624 __ bind(&need_conversion_);
6620 // Move smi zero into the result register, which will trigger 6625 // Move smi zero into the result register, which will trigger
6621 // conversion. 6626 // conversion.
6622 __ Set(result_, Immediate(Smi::FromInt(0))); 6627 __ Set(result_, Immediate(Smi::FromInt(0)));
6623 __ jmp(exit_label()); 6628 __ jmp(exit_label());
6624 6629
6625 __ bind(&index_out_of_range_); 6630 __ bind(&index_out_of_range_);
6626 // When the index is out of range, the spec requires us to return 6631 // When the index is out of range, the spec requires us to return
6627 // the empty string. 6632 // the empty string.
6628 __ Set(result_, Immediate(Factory::empty_string())); 6633 __ Set(result_, Immediate(FACTORY->empty_string()));
6629 __ jmp(exit_label()); 6634 __ jmp(exit_label());
6630 } 6635 }
6631 6636
6632 private: 6637 private:
6633 Register result_; 6638 Register result_;
6634 6639
6635 Label need_conversion_; 6640 Label need_conversion_;
6636 Label index_out_of_range_; 6641 Label index_out_of_range_;
6637 6642
6638 StringCharAtGenerator char_at_generator_; 6643 StringCharAtGenerator char_at_generator_;
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
6736 6741
6737 // Check that the array has fast elements. 6742 // Check that the array has fast elements.
6738 __ test_b(FieldOperand(scratch, Map::kBitField2Offset), 6743 __ test_b(FieldOperand(scratch, Map::kBitField2Offset),
6739 1 << Map::kHasFastElements); 6744 1 << Map::kHasFastElements);
6740 __ j(zero, &bailout); 6745 __ j(zero, &bailout);
6741 6746
6742 // If the array has length zero, return the empty string. 6747 // If the array has length zero, return the empty string.
6743 __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset)); 6748 __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
6744 __ sar(array_length, 1); 6749 __ sar(array_length, 1);
6745 __ j(not_zero, &non_trivial_array); 6750 __ j(not_zero, &non_trivial_array);
6746 __ mov(result_operand, Factory::empty_string()); 6751 __ mov(result_operand, FACTORY->empty_string());
6747 __ jmp(&done); 6752 __ jmp(&done);
6748 6753
6749 // Save the array length. 6754 // Save the array length.
6750 __ bind(&non_trivial_array); 6755 __ bind(&non_trivial_array);
6751 __ mov(array_length_operand, array_length); 6756 __ mov(array_length_operand, array_length);
6752 6757
6753 // Save the FixedArray containing array's elements. 6758 // Save the FixedArray containing array's elements.
6754 // End of array's live range. 6759 // End of array's live range.
6755 elements = array; 6760 elements = array;
6756 __ mov(elements, FieldOperand(array, JSArray::kElementsOffset)); 6761 __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after
6947 FieldOperand(string, SeqAsciiString::kHeaderSize)); 6952 FieldOperand(string, SeqAsciiString::kHeaderSize));
6948 __ CopyBytes(string, result_pos, string_length, scratch); 6953 __ CopyBytes(string, result_pos, string_length, scratch);
6949 __ add(Operand(index), Immediate(1)); 6954 __ add(Operand(index), Immediate(1));
6950 6955
6951 __ cmp(index, array_length_operand); 6956 __ cmp(index, array_length_operand);
6952 __ j(less, &loop_3); // End while (index < length). 6957 __ j(less, &loop_3); // End while (index < length).
6953 __ jmp(&done); 6958 __ jmp(&done);
6954 6959
6955 6960
6956 __ bind(&bailout); 6961 __ bind(&bailout);
6957 __ mov(result_operand, Factory::undefined_value()); 6962 __ mov(result_operand, FACTORY->undefined_value());
6958 __ bind(&done); 6963 __ bind(&done);
6959 __ mov(eax, result_operand); 6964 __ mov(eax, result_operand);
6960 // Drop temp values from the stack, and restore context register. 6965 // Drop temp values from the stack, and restore context register.
6961 __ add(Operand(esp), Immediate(2 * kPointerSize)); 6966 __ add(Operand(esp), Immediate(2 * kPointerSize));
6962 6967
6963 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 6968 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
6964 frame_->Drop(1); 6969 frame_->Drop(1);
6965 frame_->Push(&array_result); 6970 frame_->Push(&array_result);
6966 } 6971 }
6967 6972
(...skipping 20 matching lines...) Expand all
6988 void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) { 6993 void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
6989 // This generates a fast version of: 6994 // This generates a fast version of:
6990 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp') 6995 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
6991 ASSERT(args->length() == 1); 6996 ASSERT(args->length() == 1);
6992 Load(args->at(0)); 6997 Load(args->at(0));
6993 Result obj = frame_->Pop(); 6998 Result obj = frame_->Pop();
6994 obj.ToRegister(); 6999 obj.ToRegister();
6995 7000
6996 __ test(obj.reg(), Immediate(kSmiTagMask)); 7001 __ test(obj.reg(), Immediate(kSmiTagMask));
6997 destination()->false_target()->Branch(zero); 7002 destination()->false_target()->Branch(zero);
6998 __ cmp(obj.reg(), Factory::null_value()); 7003 __ cmp(obj.reg(), FACTORY->null_value());
6999 destination()->true_target()->Branch(equal); 7004 destination()->true_target()->Branch(equal);
7000 7005
7001 Result map = allocator()->Allocate(); 7006 Result map = allocator()->Allocate();
7002 ASSERT(map.is_valid()); 7007 ASSERT(map.is_valid());
7003 __ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset)); 7008 __ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
7004 // Undetectable objects behave like undefined when tested with typeof. 7009 // Undetectable objects behave like undefined when tested with typeof.
7005 __ test_b(FieldOperand(map.reg(), Map::kBitFieldOffset), 7010 __ test_b(FieldOperand(map.reg(), Map::kBitFieldOffset),
7006 1 << Map::kIsUndetectable); 7011 1 << Map::kIsUndetectable);
7007 destination()->false_target()->Branch(not_zero); 7012 destination()->false_target()->Branch(not_zero);
7008 // Do a range test for JSObject type. We can't use 7013 // Do a range test for JSObject type. We can't use
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
7059 7064
7060 // Check that map is loaded as expected. 7065 // Check that map is loaded as expected.
7061 if (FLAG_debug_code) { 7066 if (FLAG_debug_code) {
7062 __ cmp(map_result_, FieldOperand(object_, HeapObject::kMapOffset)); 7067 __ cmp(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
7063 __ Assert(equal, "Map not in expected register"); 7068 __ Assert(equal, "Map not in expected register");
7064 } 7069 }
7065 7070
7066 // Check for fast case object. Generate false result for slow case object. 7071 // Check for fast case object. Generate false result for slow case object.
7067 __ mov(scratch1_, FieldOperand(object_, JSObject::kPropertiesOffset)); 7072 __ mov(scratch1_, FieldOperand(object_, JSObject::kPropertiesOffset));
7068 __ mov(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset)); 7073 __ mov(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset));
7069 __ cmp(scratch1_, Factory::hash_table_map()); 7074 __ cmp(scratch1_, FACTORY->hash_table_map());
7070 __ j(equal, &false_result); 7075 __ j(equal, &false_result);
7071 7076
7072 // Look for valueOf symbol in the descriptor array, and indicate false if 7077 // Look for valueOf symbol in the descriptor array, and indicate false if
7073 // found. The type is not checked, so if it is a transition it is a false 7078 // found. The type is not checked, so if it is a transition it is a false
7074 // negative. 7079 // negative.
7075 __ mov(map_result_, 7080 __ mov(map_result_,
7076 FieldOperand(map_result_, Map::kInstanceDescriptorsOffset)); 7081 FieldOperand(map_result_, Map::kInstanceDescriptorsOffset));
7077 __ mov(scratch1_, FieldOperand(map_result_, FixedArray::kLengthOffset)); 7082 __ mov(scratch1_, FieldOperand(map_result_, FixedArray::kLengthOffset));
7078 // map_result_: descriptor array 7083 // map_result_: descriptor array
7079 // scratch1_: length of descriptor array 7084 // scratch1_: length of descriptor array
7080 // Calculate the end of the descriptor array. 7085 // Calculate the end of the descriptor array.
7081 STATIC_ASSERT(kSmiTag == 0); 7086 STATIC_ASSERT(kSmiTag == 0);
7082 STATIC_ASSERT(kSmiTagSize == 1); 7087 STATIC_ASSERT(kSmiTagSize == 1);
7083 STATIC_ASSERT(kPointerSize == 4); 7088 STATIC_ASSERT(kPointerSize == 4);
7084 __ lea(scratch1_, 7089 __ lea(scratch1_,
7085 Operand(map_result_, scratch1_, times_2, FixedArray::kHeaderSize)); 7090 Operand(map_result_, scratch1_, times_2, FixedArray::kHeaderSize));
7086 // Calculate location of the first key name. 7091 // Calculate location of the first key name.
7087 __ add(Operand(map_result_), 7092 __ add(Operand(map_result_),
7088 Immediate(FixedArray::kHeaderSize + 7093 Immediate(FixedArray::kHeaderSize +
7089 DescriptorArray::kFirstIndex * kPointerSize)); 7094 DescriptorArray::kFirstIndex * kPointerSize));
7090 // Loop through all the keys in the descriptor array. If one of these is the 7095 // Loop through all the keys in the descriptor array. If one of these is the
7091 // symbol valueOf the result is false. 7096 // symbol valueOf the result is false.
7092 Label entry, loop; 7097 Label entry, loop;
7093 __ jmp(&entry); 7098 __ jmp(&entry);
7094 __ bind(&loop); 7099 __ bind(&loop);
7095 __ mov(scratch2_, FieldOperand(map_result_, 0)); 7100 __ mov(scratch2_, FieldOperand(map_result_, 0));
7096 __ cmp(scratch2_, Factory::value_of_symbol()); 7101 __ cmp(scratch2_, FACTORY->value_of_symbol());
7097 __ j(equal, &false_result); 7102 __ j(equal, &false_result);
7098 __ add(Operand(map_result_), Immediate(kPointerSize)); 7103 __ add(Operand(map_result_), Immediate(kPointerSize));
7099 __ bind(&entry); 7104 __ bind(&entry);
7100 __ cmp(map_result_, Operand(scratch1_)); 7105 __ cmp(map_result_, Operand(scratch1_));
7101 __ j(not_equal, &loop); 7106 __ j(not_equal, &loop);
7102 7107
7103 // Reload map as register map_result_ was used as temporary above. 7108 // Reload map as register map_result_ was used as temporary above.
7104 __ mov(map_result_, FieldOperand(object_, HeapObject::kMapOffset)); 7109 __ mov(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
7105 7110
7106 // If a valueOf property is not found on the object check that it's 7111 // If a valueOf property is not found on the object check that it's
(...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after
7301 // instance class name from there. 7306 // instance class name from there.
7302 __ mov(obj.reg(), 7307 __ mov(obj.reg(),
7303 FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset)); 7308 FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset));
7304 __ mov(obj.reg(), 7309 __ mov(obj.reg(),
7305 FieldOperand(obj.reg(), SharedFunctionInfo::kInstanceClassNameOffset)); 7310 FieldOperand(obj.reg(), SharedFunctionInfo::kInstanceClassNameOffset));
7306 frame_->Push(&obj); 7311 frame_->Push(&obj);
7307 leave.Jump(); 7312 leave.Jump();
7308 7313
7309 // Functions have class 'Function'. 7314 // Functions have class 'Function'.
7310 function.Bind(); 7315 function.Bind();
7311 frame_->Push(Factory::function_class_symbol()); 7316 frame_->Push(FACTORY->function_class_symbol());
7312 leave.Jump(); 7317 leave.Jump();
7313 7318
7314 // Objects with a non-function constructor have class 'Object'. 7319 // Objects with a non-function constructor have class 'Object'.
7315 non_function_constructor.Bind(); 7320 non_function_constructor.Bind();
7316 frame_->Push(Factory::Object_symbol()); 7321 frame_->Push(FACTORY->Object_symbol());
7317 leave.Jump(); 7322 leave.Jump();
7318 7323
7319 // Non-JS objects have class null. 7324 // Non-JS objects have class null.
7320 null.Bind(); 7325 null.Bind();
7321 frame_->Push(Factory::null_value()); 7326 frame_->Push(FACTORY->null_value());
7322 7327
7323 // All done. 7328 // All done.
7324 leave.Bind(); 7329 leave.Bind();
7325 } 7330 }
7326 7331
7327 7332
7328 void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) { 7333 void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
7329 ASSERT(args->length() == 1); 7334 ASSERT(args->length() == 1);
7330 JumpTarget leave; 7335 JumpTarget leave;
7331 Load(args->at(0)); // Load the object. 7336 Load(args->at(0)); // Load the object.
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
7455 7460
7456 __ bind(&heapnumber_allocated); 7461 __ bind(&heapnumber_allocated);
7457 7462
7458 __ PrepareCallCFunction(0, ebx); 7463 __ PrepareCallCFunction(0, ebx);
7459 __ CallCFunction(ExternalReference::random_uint32_function(), 0); 7464 __ CallCFunction(ExternalReference::random_uint32_function(), 0);
7460 7465
7461 // Convert 32 random bits in eax to 0.(32 random bits) in a double 7466 // Convert 32 random bits in eax to 0.(32 random bits) in a double
7462 // by computing: 7467 // by computing:
7463 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). 7468 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
7464 // This is implemented on both SSE2 and FPU. 7469 // This is implemented on both SSE2 and FPU.
7465 if (CpuFeatures::IsSupported(SSE2)) { 7470 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
7466 CpuFeatures::Scope fscope(SSE2); 7471 CpuFeatures::Scope fscope(SSE2);
7467 __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single. 7472 __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
7468 __ movd(xmm1, Operand(ebx)); 7473 __ movd(xmm1, Operand(ebx));
7469 __ movd(xmm0, Operand(eax)); 7474 __ movd(xmm0, Operand(eax));
7470 __ cvtss2sd(xmm1, xmm1); 7475 __ cvtss2sd(xmm1, xmm1);
7471 __ pxor(xmm0, xmm1); 7476 __ pxor(xmm0, xmm1);
7472 __ subsd(xmm0, xmm1); 7477 __ subsd(xmm0, xmm1);
7473 __ movdbl(FieldOperand(edi, HeapNumber::kValueOffset), xmm0); 7478 __ movdbl(FieldOperand(edi, HeapNumber::kValueOffset), xmm0);
7474 } else { 7479 } else {
7475 // 0x4130000000000000 is 1.0 x 2^20 as a double. 7480 // 0x4130000000000000 is 1.0 x 2^20 as a double.
(...skipping 196 matching lines...) Expand 10 before | Expand all | Expand 10 after
7672 } 7677 }
7673 7678
7674 7679
7675 void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) { 7680 void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
7676 ASSERT_EQ(2, args->length()); 7681 ASSERT_EQ(2, args->length());
7677 7682
7678 ASSERT_NE(NULL, args->at(0)->AsLiteral()); 7683 ASSERT_NE(NULL, args->at(0)->AsLiteral());
7679 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); 7684 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
7680 7685
7681 Handle<FixedArray> jsfunction_result_caches( 7686 Handle<FixedArray> jsfunction_result_caches(
7682 Top::global_context()->jsfunction_result_caches()); 7687 Isolate::Current()->global_context()->jsfunction_result_caches());
7683 if (jsfunction_result_caches->length() <= cache_id) { 7688 if (jsfunction_result_caches->length() <= cache_id) {
7684 __ Abort("Attempt to use undefined cache."); 7689 __ Abort("Attempt to use undefined cache.");
7685 frame_->Push(Factory::undefined_value()); 7690 frame_->Push(FACTORY->undefined_value());
7686 return; 7691 return;
7687 } 7692 }
7688 7693
7689 Load(args->at(1)); 7694 Load(args->at(1));
7690 Result key = frame_->Pop(); 7695 Result key = frame_->Pop();
7691 key.ToRegister(); 7696 key.ToRegister();
7692 7697
7693 Result cache = allocator()->Allocate(); 7698 Result cache = allocator()->Allocate();
7694 ASSERT(cache.is_valid()); 7699 ASSERT(cache.is_valid());
7695 __ mov(cache.reg(), ContextOperand(esi, Context::GLOBAL_INDEX)); 7700 __ mov(cache.reg(), ContextOperand(esi, Context::GLOBAL_INDEX));
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
7792 // has no indexed interceptor. 7797 // has no indexed interceptor.
7793 __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg()); 7798 __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg());
7794 deferred->Branch(below); 7799 deferred->Branch(below);
7795 __ test_b(FieldOperand(tmp1.reg(), Map::kBitFieldOffset), 7800 __ test_b(FieldOperand(tmp1.reg(), Map::kBitFieldOffset),
7796 KeyedLoadIC::kSlowCaseBitFieldMask); 7801 KeyedLoadIC::kSlowCaseBitFieldMask);
7797 deferred->Branch(not_zero); 7802 deferred->Branch(not_zero);
7798 7803
7799 // Check the object's elements are in fast case and writable. 7804 // Check the object's elements are in fast case and writable.
7800 __ mov(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset)); 7805 __ mov(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset));
7801 __ cmp(FieldOperand(tmp1.reg(), HeapObject::kMapOffset), 7806 __ cmp(FieldOperand(tmp1.reg(), HeapObject::kMapOffset),
7802 Immediate(Factory::fixed_array_map())); 7807 Immediate(FACTORY->fixed_array_map()));
7803 deferred->Branch(not_equal); 7808 deferred->Branch(not_equal);
7804 7809
7805 // Smi-tagging is equivalent to multiplying by 2. 7810 // Smi-tagging is equivalent to multiplying by 2.
7806 STATIC_ASSERT(kSmiTag == 0); 7811 STATIC_ASSERT(kSmiTag == 0);
7807 STATIC_ASSERT(kSmiTagSize == 1); 7812 STATIC_ASSERT(kSmiTagSize == 1);
7808 7813
7809 // Check that both indices are smis. 7814 // Check that both indices are smis.
7810 __ mov(tmp2.reg(), index1.reg()); 7815 __ mov(tmp2.reg(), index1.reg());
7811 __ or_(tmp2.reg(), Operand(index2.reg())); 7816 __ or_(tmp2.reg(), Operand(index2.reg()));
7812 __ test(tmp2.reg(), Immediate(kSmiTagMask)); 7817 __ test(tmp2.reg(), Immediate(kSmiTagMask));
(...skipping 29 matching lines...) Expand all
7842 index1.reg(), 7847 index1.reg(),
7843 object.reg(), 7848 object.reg(),
7844 kDontSaveFPRegs); 7849 kDontSaveFPRegs);
7845 __ RememberedSetHelper(tmp1.reg(), 7850 __ RememberedSetHelper(tmp1.reg(),
7846 index2.reg(), 7851 index2.reg(),
7847 object.reg(), 7852 object.reg(),
7848 kDontSaveFPRegs); 7853 kDontSaveFPRegs);
7849 __ bind(&done); 7854 __ bind(&done);
7850 7855
7851 deferred->BindExit(); 7856 deferred->BindExit();
7852 frame_->Push(Factory::undefined_value()); 7857 frame_->Push(FACTORY->undefined_value());
7853 } 7858 }
7854 7859
7855 7860
7856 void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) { 7861 void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
7857 Comment cmnt(masm_, "[ GenerateCallFunction"); 7862 Comment cmnt(masm_, "[ GenerateCallFunction");
7858 7863
7859 ASSERT(args->length() >= 2); 7864 ASSERT(args->length() >= 2);
7860 7865
7861 int n_args = args->length() - 2; // for receiver and function. 7866 int n_args = args->length() - 2; // for receiver and function.
7862 Load(args->at(0)); // receiver 7867 Load(args->at(0)); // receiver
7863 for (int i = 0; i < n_args; i++) { 7868 for (int i = 0; i < n_args; i++) {
7864 Load(args->at(i + 1)); 7869 Load(args->at(i + 1));
7865 } 7870 }
7866 Load(args->at(n_args + 1)); // function 7871 Load(args->at(n_args + 1)); // function
7867 Result result = frame_->CallJSFunction(n_args); 7872 Result result = frame_->CallJSFunction(n_args);
7868 frame_->Push(&result); 7873 frame_->Push(&result);
7869 } 7874 }
7870 7875
7871 7876
7872 // Generates the Math.pow method. Only handles special cases and 7877 // Generates the Math.pow method. Only handles special cases and
7873 // branches to the runtime system for everything else. Please note 7878 // branches to the runtime system for everything else. Please note
7874 // that this function assumes that the callsite has executed ToNumber 7879 // that this function assumes that the callsite has executed ToNumber
7875 // on both arguments. 7880 // on both arguments.
7876 void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) { 7881 void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
7877 ASSERT(args->length() == 2); 7882 ASSERT(args->length() == 2);
7878 Load(args->at(0)); 7883 Load(args->at(0));
7879 Load(args->at(1)); 7884 Load(args->at(1));
7880 if (!CpuFeatures::IsSupported(SSE2)) { 7885 if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
7881 Result res = frame_->CallRuntime(Runtime::kMath_pow, 2); 7886 Result res = frame_->CallRuntime(Runtime::kMath_pow, 2);
7882 frame_->Push(&res); 7887 frame_->Push(&res);
7883 } else { 7888 } else {
7884 CpuFeatures::Scope use_sse2(SSE2); 7889 CpuFeatures::Scope use_sse2(SSE2);
7885 Label allocate_return; 7890 Label allocate_return;
7886 // Load the two operands while leaving the values on the frame. 7891 // Load the two operands while leaving the values on the frame.
7887 frame()->Dup(); 7892 frame()->Dup();
7888 Result exponent = frame()->Pop(); 7893 Result exponent = frame()->Pop();
7889 exponent.ToRegister(); 7894 exponent.ToRegister();
7890 frame()->Spill(exponent.reg()); 7895 frame()->Spill(exponent.reg());
(...skipping 20 matching lines...) Expand all
7911 __ j(not_zero, &base_nonsmi); 7916 __ j(not_zero, &base_nonsmi);
7912 7917
7913 // Optimized version when y is an integer. 7918 // Optimized version when y is an integer.
7914 Label powi; 7919 Label powi;
7915 __ SmiUntag(base.reg()); 7920 __ SmiUntag(base.reg());
7916 __ cvtsi2sd(xmm0, Operand(base.reg())); 7921 __ cvtsi2sd(xmm0, Operand(base.reg()));
7917 __ jmp(&powi); 7922 __ jmp(&powi);
7918 // exponent is smi and base is a heapnumber. 7923 // exponent is smi and base is a heapnumber.
7919 __ bind(&base_nonsmi); 7924 __ bind(&base_nonsmi);
7920 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset), 7925 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
7921 Factory::heap_number_map()); 7926 FACTORY->heap_number_map());
7922 call_runtime.Branch(not_equal); 7927 call_runtime.Branch(not_equal);
7923 7928
7924 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset)); 7929 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
7925 7930
7926 // Optimized version of pow if y is an integer. 7931 // Optimized version of pow if y is an integer.
7927 __ bind(&powi); 7932 __ bind(&powi);
7928 __ SmiUntag(exponent.reg()); 7933 __ SmiUntag(exponent.reg());
7929 7934
7930 // Save exponent in base as we need to check if exponent is negative later. 7935 // Save exponent in base as we need to check if exponent is negative later.
7931 // We know that base and exponent are in different registers. 7936 // We know that base and exponent are in different registers.
(...skipping 30 matching lines...) Expand all
7962 __ ucomisd(xmm0, xmm1); 7967 __ ucomisd(xmm0, xmm1);
7963 call_runtime.Branch(equal); 7968 call_runtime.Branch(equal);
7964 __ divsd(xmm3, xmm1); 7969 __ divsd(xmm3, xmm1);
7965 __ movsd(xmm1, xmm3); 7970 __ movsd(xmm1, xmm3);
7966 __ jmp(&allocate_return); 7971 __ jmp(&allocate_return);
7967 7972
7968 // exponent (or both) is a heapnumber - no matter what we should now work 7973 // exponent (or both) is a heapnumber - no matter what we should now work
7969 // on doubles. 7974 // on doubles.
7970 __ bind(&exponent_nonsmi); 7975 __ bind(&exponent_nonsmi);
7971 __ cmp(FieldOperand(exponent.reg(), HeapObject::kMapOffset), 7976 __ cmp(FieldOperand(exponent.reg(), HeapObject::kMapOffset),
7972 Factory::heap_number_map()); 7977 FACTORY->heap_number_map());
7973 call_runtime.Branch(not_equal); 7978 call_runtime.Branch(not_equal);
7974 __ movdbl(xmm1, FieldOperand(exponent.reg(), HeapNumber::kValueOffset)); 7979 __ movdbl(xmm1, FieldOperand(exponent.reg(), HeapNumber::kValueOffset));
7975 // Test if exponent is nan. 7980 // Test if exponent is nan.
7976 __ ucomisd(xmm1, xmm1); 7981 __ ucomisd(xmm1, xmm1);
7977 call_runtime.Branch(parity_even); 7982 call_runtime.Branch(parity_even);
7978 7983
7979 Label base_not_smi; 7984 Label base_not_smi;
7980 Label handle_special_cases; 7985 Label handle_special_cases;
7981 __ test(base.reg(), Immediate(kSmiTagMask)); 7986 __ test(base.reg(), Immediate(kSmiTagMask));
7982 __ j(not_zero, &base_not_smi); 7987 __ j(not_zero, &base_not_smi);
7983 __ SmiUntag(base.reg()); 7988 __ SmiUntag(base.reg());
7984 __ cvtsi2sd(xmm0, Operand(base.reg())); 7989 __ cvtsi2sd(xmm0, Operand(base.reg()));
7985 __ jmp(&handle_special_cases); 7990 __ jmp(&handle_special_cases);
7986 __ bind(&base_not_smi); 7991 __ bind(&base_not_smi);
7987 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset), 7992 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
7988 Factory::heap_number_map()); 7993 FACTORY->heap_number_map());
7989 call_runtime.Branch(not_equal); 7994 call_runtime.Branch(not_equal);
7990 __ mov(answer.reg(), FieldOperand(base.reg(), HeapNumber::kExponentOffset)); 7995 __ mov(answer.reg(), FieldOperand(base.reg(), HeapNumber::kExponentOffset));
7991 __ and_(answer.reg(), HeapNumber::kExponentMask); 7996 __ and_(answer.reg(), HeapNumber::kExponentMask);
7992 __ cmp(Operand(answer.reg()), Immediate(HeapNumber::kExponentMask)); 7997 __ cmp(Operand(answer.reg()), Immediate(HeapNumber::kExponentMask));
7993 // base is NaN or +/-Infinity 7998 // base is NaN or +/-Infinity
7994 call_runtime.Branch(greater_equal); 7999 call_runtime.Branch(greater_equal);
7995 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset)); 8000 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
7996 8001
7997 // base is in xmm0 and exponent is in xmm1. 8002 // base is in xmm0 and exponent is in xmm1.
7998 __ bind(&handle_special_cases); 8003 __ bind(&handle_special_cases);
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
8087 frame_->Push(&result); 8092 frame_->Push(&result);
8088 } 8093 }
8089 8094
8090 8095
8091 // Generates the Math.sqrt method. Please note - this function assumes that 8096 // Generates the Math.sqrt method. Please note - this function assumes that
8092 // the callsite has executed ToNumber on the argument. 8097 // the callsite has executed ToNumber on the argument.
8093 void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) { 8098 void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
8094 ASSERT_EQ(args->length(), 1); 8099 ASSERT_EQ(args->length(), 1);
8095 Load(args->at(0)); 8100 Load(args->at(0));
8096 8101
8097 if (!CpuFeatures::IsSupported(SSE2)) { 8102 if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
8098 Result result = frame()->CallRuntime(Runtime::kMath_sqrt, 1); 8103 Result result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
8099 frame()->Push(&result); 8104 frame()->Push(&result);
8100 } else { 8105 } else {
8101 CpuFeatures::Scope use_sse2(SSE2); 8106 CpuFeatures::Scope use_sse2(SSE2);
8102 // Leave original value on the frame if we need to call runtime. 8107 // Leave original value on the frame if we need to call runtime.
8103 frame()->Dup(); 8108 frame()->Dup();
8104 Result result = frame()->Pop(); 8109 Result result = frame()->Pop();
8105 result.ToRegister(); 8110 result.ToRegister();
8106 frame()->Spill(result.reg()); 8111 frame()->Spill(result.reg());
8107 Label runtime; 8112 Label runtime;
8108 Label non_smi; 8113 Label non_smi;
8109 Label load_done; 8114 Label load_done;
8110 JumpTarget end; 8115 JumpTarget end;
8111 8116
8112 __ test(result.reg(), Immediate(kSmiTagMask)); 8117 __ test(result.reg(), Immediate(kSmiTagMask));
8113 __ j(not_zero, &non_smi); 8118 __ j(not_zero, &non_smi);
8114 __ SmiUntag(result.reg()); 8119 __ SmiUntag(result.reg());
8115 __ cvtsi2sd(xmm0, Operand(result.reg())); 8120 __ cvtsi2sd(xmm0, Operand(result.reg()));
8116 __ jmp(&load_done); 8121 __ jmp(&load_done);
8117 __ bind(&non_smi); 8122 __ bind(&non_smi);
8118 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset), 8123 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset),
8119 Factory::heap_number_map()); 8124 FACTORY->heap_number_map());
8120 __ j(not_equal, &runtime); 8125 __ j(not_equal, &runtime);
8121 __ movdbl(xmm0, FieldOperand(result.reg(), HeapNumber::kValueOffset)); 8126 __ movdbl(xmm0, FieldOperand(result.reg(), HeapNumber::kValueOffset));
8122 8127
8123 __ bind(&load_done); 8128 __ bind(&load_done);
8124 __ sqrtsd(xmm0, xmm0); 8129 __ sqrtsd(xmm0, xmm0);
8125 // A copy of the virtual frame to allow us to go to runtime after the 8130 // A copy of the virtual frame to allow us to go to runtime after the
8126 // JumpTarget jump. 8131 // JumpTarget jump.
8127 Result scratch = allocator()->Allocate(); 8132 Result scratch = allocator()->Allocate();
8128 VirtualFrame* clone = new VirtualFrame(frame()); 8133 VirtualFrame* clone = new VirtualFrame(frame());
8129 __ AllocateHeapNumber(result.reg(), scratch.reg(), no_reg, &runtime); 8134 __ AllocateHeapNumber(result.reg(), scratch.reg(), no_reg, &runtime);
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
8215 8220
8216 8221
8217 void CodeGenerator::VisitCallRuntime(CallRuntime* node) { 8222 void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
8218 ASSERT(!in_safe_int32_mode()); 8223 ASSERT(!in_safe_int32_mode());
8219 if (CheckForInlineRuntimeCall(node)) { 8224 if (CheckForInlineRuntimeCall(node)) {
8220 return; 8225 return;
8221 } 8226 }
8222 8227
8223 ZoneList<Expression*>* args = node->arguments(); 8228 ZoneList<Expression*>* args = node->arguments();
8224 Comment cmnt(masm_, "[ CallRuntime"); 8229 Comment cmnt(masm_, "[ CallRuntime");
8225 Runtime::Function* function = node->function(); 8230 const Runtime::Function* function = node->function();
8226 8231
8227 if (function == NULL) { 8232 if (function == NULL) {
8228 // Push the builtins object found in the current global object. 8233 // Push the builtins object found in the current global object.
8229 Result temp = allocator()->Allocate(); 8234 Result temp = allocator()->Allocate();
8230 ASSERT(temp.is_valid()); 8235 ASSERT(temp.is_valid());
8231 __ mov(temp.reg(), GlobalObjectOperand()); 8236 __ mov(temp.reg(), GlobalObjectOperand());
8232 __ mov(temp.reg(), FieldOperand(temp.reg(), GlobalObject::kBuiltinsOffset)); 8237 __ mov(temp.reg(), FieldOperand(temp.reg(), GlobalObject::kBuiltinsOffset));
8233 frame_->Push(&temp); 8238 frame_->Push(&temp);
8234 } 8239 }
8235 8240
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
8298 // variable. Sync the virtual frame eagerly so we can push the 8303 // variable. Sync the virtual frame eagerly so we can push the
8299 // arguments directly into place. 8304 // arguments directly into place.
8300 frame_->SyncRange(0, frame_->element_count() - 1); 8305 frame_->SyncRange(0, frame_->element_count() - 1);
8301 frame_->EmitPush(esi); 8306 frame_->EmitPush(esi);
8302 frame_->EmitPush(Immediate(variable->name())); 8307 frame_->EmitPush(Immediate(variable->name()));
8303 Result answer = frame_->CallRuntime(Runtime::kDeleteContextSlot, 2); 8308 Result answer = frame_->CallRuntime(Runtime::kDeleteContextSlot, 2);
8304 frame_->Push(&answer); 8309 frame_->Push(&answer);
8305 } else { 8310 } else {
8306 // Default: Result of deleting non-global, not dynamically 8311 // Default: Result of deleting non-global, not dynamically
8307 // introduced variables is false. 8312 // introduced variables is false.
8308 frame_->Push(Factory::false_value()); 8313 frame_->Push(FACTORY->false_value());
8309 } 8314 }
8310 } else { 8315 } else {
8311 // Default: Result of deleting expressions is true. 8316 // Default: Result of deleting expressions is true.
8312 Load(node->expression()); // may have side-effects 8317 Load(node->expression()); // may have side-effects
8313 frame_->SetElementAt(0, Factory::true_value()); 8318 frame_->SetElementAt(0, FACTORY->true_value());
8314 } 8319 }
8315 8320
8316 } else if (op == Token::TYPEOF) { 8321 } else if (op == Token::TYPEOF) {
8317 // Special case for loading the typeof expression; see comment on 8322 // Special case for loading the typeof expression; see comment on
8318 // LoadTypeofExpression(). 8323 // LoadTypeofExpression().
8319 LoadTypeofExpression(node->expression()); 8324 LoadTypeofExpression(node->expression());
8320 Result answer = frame_->CallRuntime(Runtime::kTypeof, 1); 8325 Result answer = frame_->CallRuntime(Runtime::kTypeof, 1);
8321 frame_->Push(&answer); 8326 frame_->Push(&answer);
8322 8327
8323 } else if (op == Token::VOID) { 8328 } else if (op == Token::VOID) {
8324 Expression* expression = node->expression(); 8329 Expression* expression = node->expression();
8325 if (expression && expression->AsLiteral() && ( 8330 if (expression && expression->AsLiteral() && (
8326 expression->AsLiteral()->IsTrue() || 8331 expression->AsLiteral()->IsTrue() ||
8327 expression->AsLiteral()->IsFalse() || 8332 expression->AsLiteral()->IsFalse() ||
8328 expression->AsLiteral()->handle()->IsNumber() || 8333 expression->AsLiteral()->handle()->IsNumber() ||
8329 expression->AsLiteral()->handle()->IsString() || 8334 expression->AsLiteral()->handle()->IsString() ||
8330 expression->AsLiteral()->handle()->IsJSRegExp() || 8335 expression->AsLiteral()->handle()->IsJSRegExp() ||
8331 expression->AsLiteral()->IsNull())) { 8336 expression->AsLiteral()->IsNull())) {
8332 // Omit evaluating the value of the primitive literal. 8337 // Omit evaluating the value of the primitive literal.
8333 // It will be discarded anyway, and can have no side effect. 8338 // It will be discarded anyway, and can have no side effect.
8334 frame_->Push(Factory::undefined_value()); 8339 frame_->Push(FACTORY->undefined_value());
8335 } else { 8340 } else {
8336 Load(node->expression()); 8341 Load(node->expression());
8337 frame_->SetElementAt(0, Factory::undefined_value()); 8342 frame_->SetElementAt(0, FACTORY->undefined_value());
8338 } 8343 }
8339 8344
8340 } else { 8345 } else {
8341 if (in_safe_int32_mode()) { 8346 if (in_safe_int32_mode()) {
8342 Visit(node->expression()); 8347 Visit(node->expression());
8343 Result value = frame_->Pop(); 8348 Result value = frame_->Pop();
8344 ASSERT(value.is_untagged_int32()); 8349 ASSERT(value.is_untagged_int32());
8345 // Registers containing an int32 value are not multiply used. 8350 // Registers containing an int32 value are not multiply used.
8346 ASSERT(!value.is_register() || !frame_->is_used(value.reg())); 8351 ASSERT(!value.is_register() || !frame_->is_used(value.reg()));
8347 value.ToRegister(); 8352 value.ToRegister();
(...skipping 781 matching lines...) Expand 10 before | Expand all | Expand 10 after
9129 (operation != NULL && operation->op() == Token::TYPEOF) && 9134 (operation != NULL && operation->op() == Token::TYPEOF) &&
9130 (right->AsLiteral() != NULL && 9135 (right->AsLiteral() != NULL &&
9131 right->AsLiteral()->handle()->IsString())) { 9136 right->AsLiteral()->handle()->IsString())) {
9132 Handle<String> check(String::cast(*right->AsLiteral()->handle())); 9137 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
9133 9138
9134 // Load the operand and move it to a register. 9139 // Load the operand and move it to a register.
9135 LoadTypeofExpression(operation->expression()); 9140 LoadTypeofExpression(operation->expression());
9136 Result answer = frame_->Pop(); 9141 Result answer = frame_->Pop();
9137 answer.ToRegister(); 9142 answer.ToRegister();
9138 9143
9139 if (check->Equals(Heap::number_symbol())) { 9144 if (check->Equals(HEAP->number_symbol())) {
9140 __ test(answer.reg(), Immediate(kSmiTagMask)); 9145 __ test(answer.reg(), Immediate(kSmiTagMask));
9141 destination()->true_target()->Branch(zero); 9146 destination()->true_target()->Branch(zero);
9142 frame_->Spill(answer.reg()); 9147 frame_->Spill(answer.reg());
9143 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset)); 9148 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
9144 __ cmp(answer.reg(), Factory::heap_number_map()); 9149 __ cmp(answer.reg(), FACTORY->heap_number_map());
9145 answer.Unuse(); 9150 answer.Unuse();
9146 destination()->Split(equal); 9151 destination()->Split(equal);
9147 9152
9148 } else if (check->Equals(Heap::string_symbol())) { 9153 } else if (check->Equals(HEAP->string_symbol())) {
9149 __ test(answer.reg(), Immediate(kSmiTagMask)); 9154 __ test(answer.reg(), Immediate(kSmiTagMask));
9150 destination()->false_target()->Branch(zero); 9155 destination()->false_target()->Branch(zero);
9151 9156
9152 // It can be an undetectable string object. 9157 // It can be an undetectable string object.
9153 Result temp = allocator()->Allocate(); 9158 Result temp = allocator()->Allocate();
9154 ASSERT(temp.is_valid()); 9159 ASSERT(temp.is_valid());
9155 __ mov(temp.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset)); 9160 __ mov(temp.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
9156 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset), 9161 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
9157 1 << Map::kIsUndetectable); 9162 1 << Map::kIsUndetectable);
9158 destination()->false_target()->Branch(not_zero); 9163 destination()->false_target()->Branch(not_zero);
9159 __ CmpInstanceType(temp.reg(), FIRST_NONSTRING_TYPE); 9164 __ CmpInstanceType(temp.reg(), FIRST_NONSTRING_TYPE);
9160 temp.Unuse(); 9165 temp.Unuse();
9161 answer.Unuse(); 9166 answer.Unuse();
9162 destination()->Split(below); 9167 destination()->Split(below);
9163 9168
9164 } else if (check->Equals(Heap::boolean_symbol())) { 9169 } else if (check->Equals(HEAP->boolean_symbol())) {
9165 __ cmp(answer.reg(), Factory::true_value()); 9170 __ cmp(answer.reg(), FACTORY->true_value());
9166 destination()->true_target()->Branch(equal); 9171 destination()->true_target()->Branch(equal);
9167 __ cmp(answer.reg(), Factory::false_value()); 9172 __ cmp(answer.reg(), FACTORY->false_value());
9168 answer.Unuse(); 9173 answer.Unuse();
9169 destination()->Split(equal); 9174 destination()->Split(equal);
9170 9175
9171 } else if (check->Equals(Heap::undefined_symbol())) { 9176 } else if (check->Equals(HEAP->undefined_symbol())) {
9172 __ cmp(answer.reg(), Factory::undefined_value()); 9177 __ cmp(answer.reg(), FACTORY->undefined_value());
9173 destination()->true_target()->Branch(equal); 9178 destination()->true_target()->Branch(equal);
9174 9179
9175 __ test(answer.reg(), Immediate(kSmiTagMask)); 9180 __ test(answer.reg(), Immediate(kSmiTagMask));
9176 destination()->false_target()->Branch(zero); 9181 destination()->false_target()->Branch(zero);
9177 9182
9178 // It can be an undetectable object. 9183 // It can be an undetectable object.
9179 frame_->Spill(answer.reg()); 9184 frame_->Spill(answer.reg());
9180 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset)); 9185 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
9181 __ test_b(FieldOperand(answer.reg(), Map::kBitFieldOffset), 9186 __ test_b(FieldOperand(answer.reg(), Map::kBitFieldOffset),
9182 1 << Map::kIsUndetectable); 9187 1 << Map::kIsUndetectable);
9183 answer.Unuse(); 9188 answer.Unuse();
9184 destination()->Split(not_zero); 9189 destination()->Split(not_zero);
9185 9190
9186 } else if (check->Equals(Heap::function_symbol())) { 9191 } else if (check->Equals(HEAP->function_symbol())) {
9187 __ test(answer.reg(), Immediate(kSmiTagMask)); 9192 __ test(answer.reg(), Immediate(kSmiTagMask));
9188 destination()->false_target()->Branch(zero); 9193 destination()->false_target()->Branch(zero);
9189 frame_->Spill(answer.reg()); 9194 frame_->Spill(answer.reg());
9190 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg()); 9195 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg());
9191 destination()->true_target()->Branch(equal); 9196 destination()->true_target()->Branch(equal);
9192 // Regular expressions are callable so typeof == 'function'. 9197 // Regular expressions are callable so typeof == 'function'.
9193 __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE); 9198 __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE);
9194 answer.Unuse(); 9199 answer.Unuse();
9195 destination()->Split(equal); 9200 destination()->Split(equal);
9196 } else if (check->Equals(Heap::object_symbol())) { 9201 } else if (check->Equals(HEAP->object_symbol())) {
9197 __ test(answer.reg(), Immediate(kSmiTagMask)); 9202 __ test(answer.reg(), Immediate(kSmiTagMask));
9198 destination()->false_target()->Branch(zero); 9203 destination()->false_target()->Branch(zero);
9199 __ cmp(answer.reg(), Factory::null_value()); 9204 __ cmp(answer.reg(), FACTORY->null_value());
9200 destination()->true_target()->Branch(equal); 9205 destination()->true_target()->Branch(equal);
9201 9206
9202 Result map = allocator()->Allocate(); 9207 Result map = allocator()->Allocate();
9203 ASSERT(map.is_valid()); 9208 ASSERT(map.is_valid());
9204 // Regular expressions are typeof == 'function', not 'object'. 9209 // Regular expressions are typeof == 'function', not 'object'.
9205 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, map.reg()); 9210 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, map.reg());
9206 destination()->false_target()->Branch(equal); 9211 destination()->false_target()->Branch(equal);
9207 9212
9208 // It can be an undetectable object. 9213 // It can be an undetectable object.
9209 __ test_b(FieldOperand(map.reg(), Map::kBitFieldOffset), 9214 __ test_b(FieldOperand(map.reg(), Map::kBitFieldOffset),
(...skipping 22 matching lines...) Expand all
9232 if (check->value() == 2147483648.0) { // 0x80000000. 9237 if (check->value() == 2147483648.0) { // 0x80000000.
9233 Load(left); 9238 Load(left);
9234 left_already_loaded = true; 9239 left_already_loaded = true;
9235 Result lhs = frame_->Pop(); 9240 Result lhs = frame_->Pop();
9236 lhs.ToRegister(); 9241 lhs.ToRegister();
9237 __ test(lhs.reg(), Immediate(kSmiTagMask)); 9242 __ test(lhs.reg(), Immediate(kSmiTagMask));
9238 destination()->true_target()->Branch(zero); // All Smis are less. 9243 destination()->true_target()->Branch(zero); // All Smis are less.
9239 Result scratch = allocator()->Allocate(); 9244 Result scratch = allocator()->Allocate();
9240 ASSERT(scratch.is_valid()); 9245 ASSERT(scratch.is_valid());
9241 __ mov(scratch.reg(), FieldOperand(lhs.reg(), HeapObject::kMapOffset)); 9246 __ mov(scratch.reg(), FieldOperand(lhs.reg(), HeapObject::kMapOffset));
9242 __ cmp(scratch.reg(), Factory::heap_number_map()); 9247 __ cmp(scratch.reg(), FACTORY->heap_number_map());
9243 JumpTarget not_a_number; 9248 JumpTarget not_a_number;
9244 not_a_number.Branch(not_equal, &lhs); 9249 not_a_number.Branch(not_equal, &lhs);
9245 __ mov(scratch.reg(), 9250 __ mov(scratch.reg(),
9246 FieldOperand(lhs.reg(), HeapNumber::kExponentOffset)); 9251 FieldOperand(lhs.reg(), HeapNumber::kExponentOffset));
9247 __ cmp(Operand(scratch.reg()), Immediate(0xfff00000)); 9252 __ cmp(Operand(scratch.reg()), Immediate(0xfff00000));
9248 not_a_number.Branch(above_equal, &lhs); // It's a negative NaN or -Inf. 9253 not_a_number.Branch(above_equal, &lhs); // It's a negative NaN or -Inf.
9249 const uint32_t borderline_exponent = 9254 const uint32_t borderline_exponent =
9250 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift; 9255 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
9251 __ cmp(Operand(scratch.reg()), Immediate(borderline_exponent)); 9256 __ cmp(Operand(scratch.reg()), Immediate(borderline_exponent));
9252 scratch.Unuse(); 9257 scratch.Unuse();
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
9319 } 9324 }
9320 9325
9321 9326
9322 void CodeGenerator::VisitCompareToNull(CompareToNull* node) { 9327 void CodeGenerator::VisitCompareToNull(CompareToNull* node) {
9323 ASSERT(!in_safe_int32_mode()); 9328 ASSERT(!in_safe_int32_mode());
9324 Comment cmnt(masm_, "[ CompareToNull"); 9329 Comment cmnt(masm_, "[ CompareToNull");
9325 9330
9326 Load(node->expression()); 9331 Load(node->expression());
9327 Result operand = frame_->Pop(); 9332 Result operand = frame_->Pop();
9328 operand.ToRegister(); 9333 operand.ToRegister();
9329 __ cmp(operand.reg(), Factory::null_value()); 9334 __ cmp(operand.reg(), FACTORY->null_value());
9330 if (node->is_strict()) { 9335 if (node->is_strict()) {
9331 operand.Unuse(); 9336 operand.Unuse();
9332 destination()->Split(equal); 9337 destination()->Split(equal);
9333 } else { 9338 } else {
9334 // The 'null' value is only equal to 'undefined' if using non-strict 9339 // The 'null' value is only equal to 'undefined' if using non-strict
9335 // comparisons. 9340 // comparisons.
9336 destination()->true_target()->Branch(equal); 9341 destination()->true_target()->Branch(equal);
9337 __ cmp(operand.reg(), Factory::undefined_value()); 9342 __ cmp(operand.reg(), FACTORY->undefined_value());
9338 destination()->true_target()->Branch(equal); 9343 destination()->true_target()->Branch(equal);
9339 __ test(operand.reg(), Immediate(kSmiTagMask)); 9344 __ test(operand.reg(), Immediate(kSmiTagMask));
9340 destination()->false_target()->Branch(equal); 9345 destination()->false_target()->Branch(equal);
9341 9346
9342 // It can be an undetectable object. 9347 // It can be an undetectable object.
9343 // Use a scratch register in preference to spilling operand.reg(). 9348 // Use a scratch register in preference to spilling operand.reg().
9344 Result temp = allocator()->Allocate(); 9349 Result temp = allocator()->Allocate();
9345 ASSERT(temp.is_valid()); 9350 ASSERT(temp.is_valid());
9346 __ mov(temp.reg(), 9351 __ mov(temp.reg(),
9347 FieldOperand(operand.reg(), HeapObject::kMapOffset)); 9352 FieldOperand(operand.reg(), HeapObject::kMapOffset));
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
9400 bool is_contextual_; 9405 bool is_contextual_;
9401 bool is_dont_delete_; 9406 bool is_dont_delete_;
9402 }; 9407 };
9403 9408
9404 9409
9405 void DeferredReferenceGetNamedValue::Generate() { 9410 void DeferredReferenceGetNamedValue::Generate() {
9406 if (!receiver_.is(eax)) { 9411 if (!receiver_.is(eax)) {
9407 __ mov(eax, receiver_); 9412 __ mov(eax, receiver_);
9408 } 9413 }
9409 __ Set(ecx, Immediate(name_)); 9414 __ Set(ecx, Immediate(name_));
9410 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); 9415 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
9416 Builtins::LoadIC_Initialize));
9411 RelocInfo::Mode mode = is_contextual_ 9417 RelocInfo::Mode mode = is_contextual_
9412 ? RelocInfo::CODE_TARGET_CONTEXT 9418 ? RelocInfo::CODE_TARGET_CONTEXT
9413 : RelocInfo::CODE_TARGET; 9419 : RelocInfo::CODE_TARGET;
9414 __ call(ic, mode); 9420 __ call(ic, mode);
9415 // The call must be followed by: 9421 // The call must be followed by:
9416 // - a test eax instruction to indicate that the inobject property 9422 // - a test eax instruction to indicate that the inobject property
9417 // case was inlined. 9423 // case was inlined.
9418 // - a mov ecx or mov edx instruction to indicate that the 9424 // - a mov ecx or mov edx instruction to indicate that the
9419 // contextual property load was inlined. 9425 // contextual property load was inlined.
9420 // 9426 //
9421 // Store the delta to the map check instruction here in the test 9427 // Store the delta to the map check instruction here in the test
9422 // instruction. Use masm_-> instead of the __ macro since the 9428 // instruction. Use masm_-> instead of the __ macro since the
9423 // latter can't return a value. 9429 // latter can't return a value.
9424 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); 9430 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9425 // Here we use masm_-> instead of the __ macro because this is the 9431 // Here we use masm_-> instead of the __ macro because this is the
9426 // instruction that gets patched and coverage code gets in the way. 9432 // instruction that gets patched and coverage code gets in the way.
9427 if (is_contextual_) { 9433 if (is_contextual_) {
9428 masm_->mov(is_dont_delete_ ? edx : ecx, -delta_to_patch_site); 9434 masm_->mov(is_dont_delete_ ? edx : ecx, -delta_to_patch_site);
9429 __ IncrementCounter(&Counters::named_load_global_inline_miss, 1); 9435 __ IncrementCounter(COUNTERS->named_load_global_inline_miss(), 1);
9430 if (is_dont_delete_) { 9436 if (is_dont_delete_) {
9431 __ IncrementCounter(&Counters::dont_delete_hint_miss, 1); 9437 __ IncrementCounter(COUNTERS->dont_delete_hint_miss(), 1);
9432 } 9438 }
9433 } else { 9439 } else {
9434 masm_->test(eax, Immediate(-delta_to_patch_site)); 9440 masm_->test(eax, Immediate(-delta_to_patch_site));
9435 __ IncrementCounter(&Counters::named_load_inline_miss, 1); 9441 __ IncrementCounter(COUNTERS->named_load_inline_miss(), 1);
9436 } 9442 }
9437 9443
9438 if (!dst_.is(eax)) __ mov(dst_, eax); 9444 if (!dst_.is(eax)) __ mov(dst_, eax);
9439 } 9445 }
9440 9446
9441 9447
9442 class DeferredReferenceGetKeyedValue: public DeferredCode { 9448 class DeferredReferenceGetKeyedValue: public DeferredCode {
9443 public: 9449 public:
9444 explicit DeferredReferenceGetKeyedValue(Register dst, 9450 explicit DeferredReferenceGetKeyedValue(Register dst,
9445 Register receiver, 9451 Register receiver,
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
9479 } 9485 }
9480 } else { 9486 } else {
9481 __ xchg(edx, eax); 9487 __ xchg(edx, eax);
9482 } 9488 }
9483 // Calculate the delta from the IC call instruction to the map check 9489 // Calculate the delta from the IC call instruction to the map check
9484 // cmp instruction in the inlined version. This delta is stored in 9490 // cmp instruction in the inlined version. This delta is stored in
9485 // a test(eax, delta) instruction after the call so that we can find 9491 // a test(eax, delta) instruction after the call so that we can find
9486 // it in the IC initialization code and patch the cmp instruction. 9492 // it in the IC initialization code and patch the cmp instruction.
9487 // This means that we cannot allow test instructions after calls to 9493 // This means that we cannot allow test instructions after calls to
9488 // KeyedLoadIC stubs in other places. 9494 // KeyedLoadIC stubs in other places.
9489 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); 9495 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
9496 Builtins::KeyedLoadIC_Initialize));
9490 __ call(ic, RelocInfo::CODE_TARGET); 9497 __ call(ic, RelocInfo::CODE_TARGET);
9491 // The delta from the start of the map-compare instruction to the 9498 // The delta from the start of the map-compare instruction to the
9492 // test instruction. We use masm_-> directly here instead of the __ 9499 // test instruction. We use masm_-> directly here instead of the __
9493 // macro because the macro sometimes uses macro expansion to turn 9500 // macro because the macro sometimes uses macro expansion to turn
9494 // into something that can't return a value. This is encountered 9501 // into something that can't return a value. This is encountered
9495 // when doing generated code coverage tests. 9502 // when doing generated code coverage tests.
9496 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); 9503 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9497 // Here we use masm_-> instead of the __ macro because this is the 9504 // Here we use masm_-> instead of the __ macro because this is the
9498 // instruction that gets patched and coverage code gets in the way. 9505 // instruction that gets patched and coverage code gets in the way.
9499 masm_->test(eax, Immediate(-delta_to_patch_site)); 9506 masm_->test(eax, Immediate(-delta_to_patch_site));
9500 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1); 9507 __ IncrementCounter(COUNTERS->keyed_load_inline_miss(), 1);
9501 9508
9502 if (!dst_.is(eax)) __ mov(dst_, eax); 9509 if (!dst_.is(eax)) __ mov(dst_, eax);
9503 } 9510 }
9504 9511
9505 9512
9506 class DeferredReferenceSetKeyedValue: public DeferredCode { 9513 class DeferredReferenceSetKeyedValue: public DeferredCode {
9507 public: 9514 public:
9508 DeferredReferenceSetKeyedValue(Register value, 9515 DeferredReferenceSetKeyedValue(Register value,
9509 Register key, 9516 Register key,
9510 Register receiver, 9517 Register receiver,
(...skipping 15 matching lines...) Expand all
9526 Register value_; 9533 Register value_;
9527 Register key_; 9534 Register key_;
9528 Register receiver_; 9535 Register receiver_;
9529 Register scratch_; 9536 Register scratch_;
9530 Label patch_site_; 9537 Label patch_site_;
9531 StrictModeFlag strict_mode_; 9538 StrictModeFlag strict_mode_;
9532 }; 9539 };
9533 9540
9534 9541
9535 void DeferredReferenceSetKeyedValue::Generate() { 9542 void DeferredReferenceSetKeyedValue::Generate() {
9536 __ IncrementCounter(&Counters::keyed_store_inline_miss, 1); 9543 __ IncrementCounter(COUNTERS->keyed_store_inline_miss(), 1);
9537 // Move value_ to eax, key_ to ecx, and receiver_ to edx. 9544 // Move value_ to eax, key_ to ecx, and receiver_ to edx.
9538 Register old_value = value_; 9545 Register old_value = value_;
9539 9546
9540 // First, move value to eax. 9547 // First, move value to eax.
9541 if (!value_.is(eax)) { 9548 if (!value_.is(eax)) {
9542 if (key_.is(eax)) { 9549 if (key_.is(eax)) {
9543 // Move key_ out of eax, preferably to ecx. 9550 // Move key_ out of eax, preferably to ecx.
9544 if (!value_.is(ecx) && !receiver_.is(ecx)) { 9551 if (!value_.is(ecx) && !receiver_.is(ecx)) {
9545 __ mov(ecx, key_); 9552 __ mov(ecx, key_);
9546 key_ = ecx; 9553 key_ = ecx;
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
9580 } 9587 }
9581 } 9588 }
9582 } else { // Key is not in edx or ecx. 9589 } else { // Key is not in edx or ecx.
9583 if (!receiver_.is(edx)) { 9590 if (!receiver_.is(edx)) {
9584 __ mov(edx, receiver_); 9591 __ mov(edx, receiver_);
9585 } 9592 }
9586 __ mov(ecx, key_); 9593 __ mov(ecx, key_);
9587 } 9594 }
9588 9595
9589 // Call the IC stub. 9596 // Call the IC stub.
9590 Handle<Code> ic(Builtins::builtin( 9597 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
9591 (strict_mode_ == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict 9598 (strict_mode_ == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict
9592 : Builtins::KeyedStoreIC_Initialize)); 9599 : Builtins::KeyedStoreIC_Initialize));
9593 __ call(ic, RelocInfo::CODE_TARGET); 9600 __ call(ic, RelocInfo::CODE_TARGET);
9594 // The delta from the start of the map-compare instruction to the 9601 // The delta from the start of the map-compare instruction to the
9595 // test instruction. We use masm_-> directly here instead of the 9602 // test instruction. We use masm_-> directly here instead of the
9596 // __ macro because the macro sometimes uses macro expansion to turn 9603 // __ macro because the macro sometimes uses macro expansion to turn
9597 // into something that can't return a value. This is encountered 9604 // into something that can't return a value. This is encountered
9598 // when doing generated code coverage tests. 9605 // when doing generated code coverage tests.
9599 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); 9606 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9600 // Here we use masm_-> instead of the __ macro because this is the 9607 // Here we use masm_-> instead of the __ macro because this is the
9601 // instruction that gets patched and coverage code gets in the way. 9608 // instruction that gets patched and coverage code gets in the way.
9602 masm_->test(eax, Immediate(-delta_to_patch_site)); 9609 masm_->test(eax, Immediate(-delta_to_patch_site));
9603 // Restore value (returned from store IC) register. 9610 // Restore value (returned from store IC) register.
9604 if (!old_value.is(eax)) __ mov(old_value, eax); 9611 if (!old_value.is(eax)) __ mov(old_value, eax);
9605 } 9612 }
9606 9613
9607 9614
9608 Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) { 9615 Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
9609 #ifdef DEBUG 9616 #ifdef DEBUG
9610 int original_height = frame()->height(); 9617 int original_height = frame()->height();
9611 #endif 9618 #endif
9612 9619
9613 bool contextual_load_in_builtin = 9620 bool contextual_load_in_builtin =
9614 is_contextual && 9621 is_contextual &&
9615 (Bootstrapper::IsActive() || 9622 (Isolate::Current()->bootstrapper()->IsActive() ||
9616 (!info_->closure().is_null() && info_->closure()->IsBuiltin())); 9623 (!info_->closure().is_null() && info_->closure()->IsBuiltin()));
9617 9624
9618 Result result; 9625 Result result;
9619 // Do not inline in the global code or when not in loop. 9626 // Do not inline in the global code or when not in loop.
9620 if (scope()->is_global_scope() || 9627 if (scope()->is_global_scope() ||
9621 loop_nesting() == 0 || 9628 loop_nesting() == 0 ||
9622 contextual_load_in_builtin) { 9629 contextual_load_in_builtin) {
9623 Comment cmnt(masm(), "[ Load from named Property"); 9630 Comment cmnt(masm(), "[ Load from named Property");
9624 frame()->Push(name); 9631 frame()->Push(name);
9625 9632
(...skipping 25 matching lines...) Expand all
9651 // Check that the receiver is a heap object. 9658 // Check that the receiver is a heap object.
9652 __ test(receiver.reg(), Immediate(kSmiTagMask)); 9659 __ test(receiver.reg(), Immediate(kSmiTagMask));
9653 deferred->Branch(zero); 9660 deferred->Branch(zero);
9654 } 9661 }
9655 9662
9656 __ bind(deferred->patch_site()); 9663 __ bind(deferred->patch_site());
9657 // This is the map check instruction that will be patched (so we can't 9664 // This is the map check instruction that will be patched (so we can't
9658 // use the double underscore macro that may insert instructions). 9665 // use the double underscore macro that may insert instructions).
9659 // Initially use an invalid map to force a failure. 9666 // Initially use an invalid map to force a failure.
9660 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset), 9667 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
9661 Immediate(Factory::null_value())); 9668 Immediate(FACTORY->null_value()));
9662 // This branch is always a forwards branch so it's always a fixed size 9669 // This branch is always a forwards branch so it's always a fixed size
9663 // which allows the assert below to succeed and patching to work. 9670 // which allows the assert below to succeed and patching to work.
9664 deferred->Branch(not_equal); 9671 deferred->Branch(not_equal);
9665 9672
9666 // The delta from the patch label to the actual load must be 9673 // The delta from the patch label to the actual load must be
9667 // statically known. 9674 // statically known.
9668 ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) == 9675 ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) ==
9669 LoadIC::kOffsetToLoadInstruction); 9676 LoadIC::kOffsetToLoadInstruction);
9670 9677
9671 if (is_contextual) { 9678 if (is_contextual) {
9672 // Load the (initialy invalid) cell and get its value. 9679 // Load the (initialy invalid) cell and get its value.
9673 masm()->mov(result.reg(), Factory::null_value()); 9680 masm()->mov(result.reg(), FACTORY->null_value());
9674 if (FLAG_debug_code) { 9681 if (FLAG_debug_code) {
9675 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset), 9682 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset),
9676 Factory::global_property_cell_map()); 9683 FACTORY->global_property_cell_map());
9677 __ Assert(equal, "Uninitialized inlined contextual load"); 9684 __ Assert(equal, "Uninitialized inlined contextual load");
9678 } 9685 }
9679 __ mov(result.reg(), 9686 __ mov(result.reg(),
9680 FieldOperand(result.reg(), JSGlobalPropertyCell::kValueOffset)); 9687 FieldOperand(result.reg(), JSGlobalPropertyCell::kValueOffset));
9688 __ cmp(result.reg(), FACTORY->the_hole_value());
9689 deferred->Branch(equal);
9681 bool is_dont_delete = false; 9690 bool is_dont_delete = false;
9682 if (!info_->closure().is_null()) { 9691 if (!info_->closure().is_null()) {
9683 // When doing lazy compilation we can check if the global cell 9692 // When doing lazy compilation we can check if the global cell
9684 // already exists and use its "don't delete" status as a hint. 9693 // already exists and use its "don't delete" status as a hint.
9685 AssertNoAllocation no_gc; 9694 AssertNoAllocation no_gc;
9686 v8::internal::GlobalObject* global_object = 9695 v8::internal::GlobalObject* global_object =
9687 info_->closure()->context()->global(); 9696 info_->closure()->context()->global();
9688 LookupResult lookup; 9697 LookupResult lookup;
9689 global_object->LocalLookupRealNamedProperty(*name, &lookup); 9698 global_object->LocalLookupRealNamedProperty(*name, &lookup);
9690 if (lookup.IsProperty() && lookup.type() == NORMAL) { 9699 if (lookup.IsProperty() && lookup.type() == NORMAL) {
9691 ASSERT(lookup.holder() == global_object); 9700 ASSERT(lookup.holder() == global_object);
9692 ASSERT(global_object->property_dictionary()->ValueAt( 9701 ASSERT(global_object->property_dictionary()->ValueAt(
9693 lookup.GetDictionaryEntry())->IsJSGlobalPropertyCell()); 9702 lookup.GetDictionaryEntry())->IsJSGlobalPropertyCell());
9694 is_dont_delete = lookup.IsDontDelete(); 9703 is_dont_delete = lookup.IsDontDelete();
9695 } 9704 }
9696 } 9705 }
9697 deferred->set_is_dont_delete(is_dont_delete); 9706 deferred->set_is_dont_delete(is_dont_delete);
9698 if (!is_dont_delete) { 9707 if (!is_dont_delete) {
9699 __ cmp(result.reg(), Factory::the_hole_value()); 9708 __ cmp(result.reg(), FACTORY->the_hole_value());
9700 deferred->Branch(equal); 9709 deferred->Branch(equal);
9701 } else if (FLAG_debug_code) { 9710 } else if (FLAG_debug_code) {
9702 __ cmp(result.reg(), Factory::the_hole_value()); 9711 __ cmp(result.reg(), FACTORY->the_hole_value());
9703 __ Check(not_equal, "DontDelete cells can't contain the hole"); 9712 __ Check(not_equal, "DontDelete cells can't contain the hole");
9704 } 9713 }
9705 __ IncrementCounter(&Counters::named_load_global_inline, 1); 9714 __ IncrementCounter(COUNTERS->named_load_global_inline(), 1);
9706 if (is_dont_delete) { 9715 if (is_dont_delete) {
9707 __ IncrementCounter(&Counters::dont_delete_hint_hit, 1); 9716 __ IncrementCounter(COUNTERS->dont_delete_hint_hit(), 1);
9708 } 9717 }
9709 } else { 9718 } else {
9710 // The initial (invalid) offset has to be large enough to force a 32-bit 9719 // The initial (invalid) offset has to be large enough to force a 32-bit
9711 // instruction encoding to allow patching with an arbitrary offset. Use 9720 // instruction encoding to allow patching with an arbitrary offset. Use
9712 // kMaxInt (minus kHeapObjectTag). 9721 // kMaxInt (minus kHeapObjectTag).
9713 int offset = kMaxInt; 9722 int offset = kMaxInt;
9714 masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset)); 9723 masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset));
9715 __ IncrementCounter(&Counters::named_load_inline, 1); 9724 __ IncrementCounter(COUNTERS->named_load_inline(), 1);
9716 } 9725 }
9717 9726
9718 deferred->BindExit(); 9727 deferred->BindExit();
9719 } 9728 }
9720 ASSERT(frame()->height() == original_height - 1); 9729 ASSERT(frame()->height() == original_height - 1);
9721 return result; 9730 return result;
9722 } 9731 }
9723 9732
9724 9733
9725 Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) { 9734 Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
(...skipping 25 matching lines...) Expand all
9751 9760
9752 // Check that the receiver is a heap object. 9761 // Check that the receiver is a heap object.
9753 __ test(receiver.reg(), Immediate(kSmiTagMask)); 9762 __ test(receiver.reg(), Immediate(kSmiTagMask));
9754 slow.Branch(zero, &value, &receiver); 9763 slow.Branch(zero, &value, &receiver);
9755 9764
9756 // This is the map check instruction that will be patched (so we can't 9765 // This is the map check instruction that will be patched (so we can't
9757 // use the double underscore macro that may insert instructions). 9766 // use the double underscore macro that may insert instructions).
9758 // Initially use an invalid map to force a failure. 9767 // Initially use an invalid map to force a failure.
9759 __ bind(&patch_site); 9768 __ bind(&patch_site);
9760 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset), 9769 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
9761 Immediate(Factory::null_value())); 9770 Immediate(FACTORY->null_value()));
9762 // This branch is always a forwards branch so it's always a fixed size 9771 // This branch is always a forwards branch so it's always a fixed size
9763 // which allows the assert below to succeed and patching to work. 9772 // which allows the assert below to succeed and patching to work.
9764 slow.Branch(not_equal, &value, &receiver); 9773 slow.Branch(not_equal, &value, &receiver);
9765 9774
9766 // The delta from the patch label to the store offset must be 9775 // The delta from the patch label to the store offset must be
9767 // statically known. 9776 // statically known.
9768 ASSERT(masm()->SizeOfCodeGeneratedSince(&patch_site) == 9777 ASSERT(masm()->SizeOfCodeGeneratedSince(&patch_site) ==
9769 StoreIC::kOffsetToStoreInstruction); 9778 StoreIC::kOffsetToStoreInstruction);
9770 9779
9771 // The initial (invalid) offset has to be large enough to force a 32-bit 9780 // The initial (invalid) offset has to be large enough to force a 32-bit
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after
9866 __ test(receiver.reg(), Immediate(kSmiTagMask)); 9875 __ test(receiver.reg(), Immediate(kSmiTagMask));
9867 deferred->Branch(zero); 9876 deferred->Branch(zero);
9868 9877
9869 // Check that the receiver has the expected map. 9878 // Check that the receiver has the expected map.
9870 // Initially, use an invalid map. The map is patched in the IC 9879 // Initially, use an invalid map. The map is patched in the IC
9871 // initialization code. 9880 // initialization code.
9872 __ bind(deferred->patch_site()); 9881 __ bind(deferred->patch_site());
9873 // Use masm-> here instead of the double underscore macro since extra 9882 // Use masm-> here instead of the double underscore macro since extra
9874 // coverage code can interfere with the patching. 9883 // coverage code can interfere with the patching.
9875 masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset), 9884 masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
9876 Immediate(Factory::null_value())); 9885 Immediate(FACTORY->null_value()));
9877 deferred->Branch(not_equal); 9886 deferred->Branch(not_equal);
9878 9887
9879 // Check that the key is a smi. 9888 // Check that the key is a smi.
9880 if (!key.is_smi()) { 9889 if (!key.is_smi()) {
9881 __ test(key.reg(), Immediate(kSmiTagMask)); 9890 __ test(key.reg(), Immediate(kSmiTagMask));
9882 deferred->Branch(not_zero); 9891 deferred->Branch(not_zero);
9883 } else { 9892 } else {
9884 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg()); 9893 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg());
9885 } 9894 }
9886 9895
9887 // Get the elements array from the receiver. 9896 // Get the elements array from the receiver.
9888 __ mov(elements.reg(), 9897 __ mov(elements.reg(),
9889 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); 9898 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
9890 __ AssertFastElements(elements.reg()); 9899 __ AssertFastElements(elements.reg());
9891 9900
9892 // Check that the key is within bounds. 9901 // Check that the key is within bounds.
9893 __ cmp(key.reg(), 9902 __ cmp(key.reg(),
9894 FieldOperand(elements.reg(), FixedArray::kLengthOffset)); 9903 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
9895 deferred->Branch(above_equal); 9904 deferred->Branch(above_equal);
9896 9905
9897 // Load and check that the result is not the hole. 9906 // Load and check that the result is not the hole.
9898 // Key holds a smi. 9907 // Key holds a smi.
9899 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); 9908 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
9900 __ mov(elements.reg(), 9909 __ mov(elements.reg(),
9901 FieldOperand(elements.reg(), 9910 FieldOperand(elements.reg(),
9902 key.reg(), 9911 key.reg(),
9903 times_2, 9912 times_2,
9904 FixedArray::kHeaderSize)); 9913 FixedArray::kHeaderSize));
9905 result = elements; 9914 result = elements;
9906 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value())); 9915 __ cmp(Operand(result.reg()), Immediate(FACTORY->the_hole_value()));
9907 deferred->Branch(equal); 9916 deferred->Branch(equal);
9908 __ IncrementCounter(&Counters::keyed_load_inline, 1); 9917 __ IncrementCounter(COUNTERS->keyed_load_inline(), 1);
9909 9918
9910 deferred->BindExit(); 9919 deferred->BindExit();
9911 } else { 9920 } else {
9912 Comment cmnt(masm_, "[ Load from keyed Property"); 9921 Comment cmnt(masm_, "[ Load from keyed Property");
9913 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET); 9922 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET);
9914 // Make sure that we do not have a test instruction after the 9923 // Make sure that we do not have a test instruction after the
9915 // call. A test instruction after the call is used to 9924 // call. A test instruction after the call is used to
9916 // indicate that we have generated an inline version of the 9925 // indicate that we have generated an inline version of the
9917 // keyed load. The explicit nop instruction is here because 9926 // keyed load. The explicit nop instruction is here because
9918 // the push that follows might be peep-hole optimized away. 9927 // the push that follows might be peep-hole optimized away.
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
9991 } 10000 }
9992 10001
9993 10002
9994 __ bind(&in_new_space); 10003 __ bind(&in_new_space);
9995 // Bind the deferred code patch site to be able to locate the fixed 10004 // Bind the deferred code patch site to be able to locate the fixed
9996 // array map comparison. When debugging, we patch this comparison to 10005 // array map comparison. When debugging, we patch this comparison to
9997 // always fail so that we will hit the IC call in the deferred code 10006 // always fail so that we will hit the IC call in the deferred code
9998 // which will allow the debugger to break for fast case stores. 10007 // which will allow the debugger to break for fast case stores.
9999 __ bind(deferred->patch_site()); 10008 __ bind(deferred->patch_site());
10000 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset), 10009 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
10001 Immediate(Factory::fixed_array_map())); 10010 Immediate(FACTORY->fixed_array_map()));
10002 deferred->Branch(not_equal); 10011 deferred->Branch(not_equal);
10003 10012
10004 // Check that the key is within bounds. Both the key and the length of 10013 // Check that the key is within bounds. Both the key and the length of
10005 // the JSArray are smis (because the fixed array check above ensures the 10014 // the JSArray are smis (because the fixed array check above ensures the
10006 // elements are in fast case). Use unsigned comparison to handle negative 10015 // elements are in fast case). Use unsigned comparison to handle negative
10007 // keys. 10016 // keys.
10008 __ cmp(key.reg(), 10017 __ cmp(key.reg(),
10009 FieldOperand(receiver.reg(), JSArray::kLengthOffset)); 10018 FieldOperand(receiver.reg(), JSArray::kLengthOffset));
10010 deferred->Branch(above_equal); 10019 deferred->Branch(above_equal);
10011 10020
10012 // Store the value. 10021 // Store the value.
10013 __ mov(FixedArrayElementOperand(tmp.reg(), key.reg()), result.reg()); 10022 __ mov(FixedArrayElementOperand(tmp.reg(), key.reg()), result.reg());
10014 __ IncrementCounter(&Counters::keyed_store_inline, 1); 10023 __ IncrementCounter(COUNTERS->keyed_store_inline(), 1);
10015 10024
10016 deferred->BindExit(); 10025 deferred->BindExit();
10017 } else { 10026 } else {
10018 result = frame()->CallKeyedStoreIC(strict_mode_flag()); 10027 result = frame()->CallKeyedStoreIC(strict_mode_flag());
10019 // Make sure that we do not have a test instruction after the 10028 // Make sure that we do not have a test instruction after the
10020 // call. A test instruction after the call is used to 10029 // call. A test instruction after the call is used to
10021 // indicate that we have generated an inline version of the 10030 // indicate that we have generated an inline version of the
10022 // keyed store. 10031 // keyed store.
10023 __ nop(); 10032 __ nop();
10024 } 10033 }
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after
10207 int stack_offset = 0; // Update if we change the stack height. 10216 int stack_offset = 0; // Update if we change the stack height.
10208 10217
10209 if (FLAG_debug_code) { 10218 if (FLAG_debug_code) {
10210 __ cmp(Operand(esp, kSizeOffset + stack_offset), 10219 __ cmp(Operand(esp, kSizeOffset + stack_offset),
10211 Immediate(kMinComplexMemCopy)); 10220 Immediate(kMinComplexMemCopy));
10212 Label ok; 10221 Label ok;
10213 __ j(greater_equal, &ok); 10222 __ j(greater_equal, &ok);
10214 __ int3(); 10223 __ int3();
10215 __ bind(&ok); 10224 __ bind(&ok);
10216 } 10225 }
10217 if (CpuFeatures::IsSupported(SSE2)) { 10226 if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
10218 CpuFeatures::Scope enable(SSE2); 10227 CpuFeatures::Scope enable(SSE2);
10219 __ push(edi); 10228 __ push(edi);
10220 __ push(esi); 10229 __ push(esi);
10221 stack_offset += 2 * kPointerSize; 10230 stack_offset += 2 * kPointerSize;
10222 Register dst = edi; 10231 Register dst = edi;
10223 Register src = esi; 10232 Register src = esi;
10224 Register count = ecx; 10233 Register count = ecx;
10225 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset)); 10234 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
10226 __ mov(src, Operand(esp, stack_offset + kSourceOffset)); 10235 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
10227 __ mov(count, Operand(esp, stack_offset + kSizeOffset)); 10236 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
10228 10237
10229 10238
10230 __ movdqu(xmm0, Operand(src, 0)); 10239 __ movdqu(xmm0, Operand(src, 0));
10231 __ movdqu(Operand(dst, 0), xmm0); 10240 __ movdqu(Operand(dst, 0), xmm0);
10232 __ mov(edx, dst); 10241 __ mov(edx, dst);
10233 __ and_(edx, 0xF); 10242 __ and_(edx, 0xF);
10234 __ neg(edx); 10243 __ neg(edx);
10235 __ add(Operand(edx), Immediate(16)); 10244 __ add(Operand(edx), Immediate(16));
10236 __ add(dst, Operand(edx)); 10245 __ add(dst, Operand(edx));
10237 __ add(src, Operand(edx)); 10246 __ add(src, Operand(edx));
10238 __ sub(Operand(count), edx); 10247 __ sub(Operand(count), edx);
10239 10248
10240 // edi is now aligned. Check if esi is also aligned. 10249 // edi is now aligned. Check if esi is also aligned.
10241 Label unaligned_source; 10250 Label unaligned_source;
10242 __ test(Operand(src), Immediate(0x0F)); 10251 __ test(Operand(src), Immediate(0x0F));
10243 __ j(not_zero, &unaligned_source); 10252 __ j(not_zero, &unaligned_source);
10244 { 10253 {
10245 __ IncrementCounter(&Counters::memcopy_aligned, 1); 10254 __ IncrementCounter(COUNTERS->memcopy_aligned(), 1);
10246 // Copy loop for aligned source and destination. 10255 // Copy loop for aligned source and destination.
10247 __ mov(edx, count); 10256 __ mov(edx, count);
10248 Register loop_count = ecx; 10257 Register loop_count = ecx;
10249 Register count = edx; 10258 Register count = edx;
10250 __ shr(loop_count, 5); 10259 __ shr(loop_count, 5);
10251 { 10260 {
10252 // Main copy loop. 10261 // Main copy loop.
10253 Label loop; 10262 Label loop;
10254 __ bind(&loop); 10263 __ bind(&loop);
10255 __ prefetch(Operand(src, 0x20), 1); 10264 __ prefetch(Operand(src, 0x20), 1);
(...skipping 27 matching lines...) Expand all
10283 __ mov(eax, Operand(esp, stack_offset + kDestinationOffset)); 10292 __ mov(eax, Operand(esp, stack_offset + kDestinationOffset));
10284 __ pop(esi); 10293 __ pop(esi);
10285 __ pop(edi); 10294 __ pop(edi);
10286 __ ret(0); 10295 __ ret(0);
10287 } 10296 }
10288 __ Align(16); 10297 __ Align(16);
10289 { 10298 {
10290 // Copy loop for unaligned source and aligned destination. 10299 // Copy loop for unaligned source and aligned destination.
10291 // If source is not aligned, we can't read it as efficiently. 10300 // If source is not aligned, we can't read it as efficiently.
10292 __ bind(&unaligned_source); 10301 __ bind(&unaligned_source);
10293 __ IncrementCounter(&Counters::memcopy_unaligned, 1); 10302 __ IncrementCounter(COUNTERS->memcopy_unaligned(), 1);
10294 __ mov(edx, ecx); 10303 __ mov(edx, ecx);
10295 Register loop_count = ecx; 10304 Register loop_count = ecx;
10296 Register count = edx; 10305 Register count = edx;
10297 __ shr(loop_count, 5); 10306 __ shr(loop_count, 5);
10298 { 10307 {
10299 // Main copy loop 10308 // Main copy loop
10300 Label loop; 10309 Label loop;
10301 __ bind(&loop); 10310 __ bind(&loop);
10302 __ prefetch(Operand(src, 0x20), 1); 10311 __ prefetch(Operand(src, 0x20), 1);
10303 __ movdqu(xmm0, Operand(src, 0x00)); 10312 __ movdqu(xmm0, Operand(src, 0x00));
(...skipping 23 matching lines...) Expand all
10327 __ movdqu(xmm0, Operand(src, count, times_1, -0x10)); 10336 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
10328 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0); 10337 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
10329 10338
10330 __ mov(eax, Operand(esp, stack_offset + kDestinationOffset)); 10339 __ mov(eax, Operand(esp, stack_offset + kDestinationOffset));
10331 __ pop(esi); 10340 __ pop(esi);
10332 __ pop(edi); 10341 __ pop(edi);
10333 __ ret(0); 10342 __ ret(0);
10334 } 10343 }
10335 10344
10336 } else { 10345 } else {
10337 __ IncrementCounter(&Counters::memcopy_noxmm, 1); 10346 __ IncrementCounter(COUNTERS->memcopy_noxmm(), 1);
10338 // SSE2 not supported. Unlikely to happen in practice. 10347 // SSE2 not supported. Unlikely to happen in practice.
10339 __ push(edi); 10348 __ push(edi);
10340 __ push(esi); 10349 __ push(esi);
10341 stack_offset += 2 * kPointerSize; 10350 stack_offset += 2 * kPointerSize;
10342 __ cld(); 10351 __ cld();
10343 Register dst = edi; 10352 Register dst = edi;
10344 Register src = esi; 10353 Register src = esi;
10345 Register count = ecx; 10354 Register count = ecx;
10346 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset)); 10355 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
10347 __ mov(src, Operand(esp, stack_offset + kSourceOffset)); 10356 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
10391 memcpy(base, desc.buffer, desc.instr_size); 10400 memcpy(base, desc.buffer, desc.instr_size);
10392 CPU::FlushICache(base, desc.instr_size); 10401 CPU::FlushICache(base, desc.instr_size);
10393 return FUNCTION_CAST<MemCopyFunction>(reinterpret_cast<Address>(base)); 10402 return FUNCTION_CAST<MemCopyFunction>(reinterpret_cast<Address>(base));
10394 } 10403 }
10395 10404
10396 #undef __ 10405 #undef __
10397 10406
10398 } } // namespace v8::internal 10407 } } // namespace v8::internal
10399 10408
10400 #endif // V8_TARGET_ARCH_IA32 10409 #endif // V8_TARGET_ARCH_IA32
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698