Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(132)

Side by Side Diff: src/arm/full-codegen-arm.cc

Issue 8139027: Version 3.6.5 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: '' Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/frames-arm.h ('k') | src/arm/ic-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 21 matching lines...) Expand all
32 #include "code-stubs.h" 32 #include "code-stubs.h"
33 #include "codegen.h" 33 #include "codegen.h"
34 #include "compiler.h" 34 #include "compiler.h"
35 #include "debug.h" 35 #include "debug.h"
36 #include "full-codegen.h" 36 #include "full-codegen.h"
37 #include "parser.h" 37 #include "parser.h"
38 #include "scopes.h" 38 #include "scopes.h"
39 #include "stub-cache.h" 39 #include "stub-cache.h"
40 40
41 #include "arm/code-stubs-arm.h" 41 #include "arm/code-stubs-arm.h"
42 #include "arm/macro-assembler-arm.h"
42 43
43 namespace v8 { 44 namespace v8 {
44 namespace internal { 45 namespace internal {
45 46
46 #define __ ACCESS_MASM(masm_) 47 #define __ ACCESS_MASM(masm_)
47 48
48 49
49 static unsigned GetPropertyId(Property* property) { 50 static unsigned GetPropertyId(Property* property) {
50 return property->id(); 51 return property->id();
51 } 52 }
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
148 if (info->is_strict_mode() || info->is_native()) { 149 if (info->is_strict_mode() || info->is_native()) {
149 Label ok; 150 Label ok;
150 __ cmp(r5, Operand(0)); 151 __ cmp(r5, Operand(0));
151 __ b(eq, &ok); 152 __ b(eq, &ok);
152 int receiver_offset = info->scope()->num_parameters() * kPointerSize; 153 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
153 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 154 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
154 __ str(r2, MemOperand(sp, receiver_offset)); 155 __ str(r2, MemOperand(sp, receiver_offset));
155 __ bind(&ok); 156 __ bind(&ok);
156 } 157 }
157 158
159 // Open a frame scope to indicate that there is a frame on the stack. The
160 // MANUAL indicates that the scope shouldn't actually generate code to set up
161 // the frame (that is done below).
162 FrameScope frame_scope(masm_, StackFrame::MANUAL);
163
158 int locals_count = info->scope()->num_stack_slots(); 164 int locals_count = info->scope()->num_stack_slots();
159 165
160 __ Push(lr, fp, cp, r1); 166 __ Push(lr, fp, cp, r1);
161 if (locals_count > 0) { 167 if (locals_count > 0) {
162 // Load undefined value here, so the value is ready for the loop 168 // Load undefined value here, so the value is ready for the loop
163 // below. 169 // below.
164 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 170 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
165 } 171 }
166 // Adjust fp to point to caller's fp. 172 // Adjust fp to point to caller's fp.
167 __ add(fp, sp, Operand(2 * kPointerSize)); 173 __ add(fp, sp, Operand(2 * kPointerSize));
(...skipping 25 matching lines...) Expand all
193 // Copy any necessary parameters into the context. 199 // Copy any necessary parameters into the context.
194 int num_parameters = info->scope()->num_parameters(); 200 int num_parameters = info->scope()->num_parameters();
195 for (int i = 0; i < num_parameters; i++) { 201 for (int i = 0; i < num_parameters; i++) {
196 Variable* var = scope()->parameter(i); 202 Variable* var = scope()->parameter(i);
197 if (var->IsContextSlot()) { 203 if (var->IsContextSlot()) {
198 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 204 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
199 (num_parameters - 1 - i) * kPointerSize; 205 (num_parameters - 1 - i) * kPointerSize;
200 // Load parameter from stack. 206 // Load parameter from stack.
201 __ ldr(r0, MemOperand(fp, parameter_offset)); 207 __ ldr(r0, MemOperand(fp, parameter_offset));
202 // Store it in the context. 208 // Store it in the context.
203 __ mov(r1, Operand(Context::SlotOffset(var->index()))); 209 MemOperand target = ContextOperand(cp, var->index());
204 __ str(r0, MemOperand(cp, r1)); 210 __ str(r0, target);
205 // Update the write barrier. This clobbers all involved 211
206 // registers, so we have to use two more registers to avoid 212 // Update the write barrier.
207 // clobbering cp. 213 __ RecordWriteContextSlot(
208 __ mov(r2, Operand(cp)); 214 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
209 __ RecordWrite(r2, Operand(r1), r3, r0);
210 } 215 }
211 } 216 }
212 } 217 }
213 218
214 Variable* arguments = scope()->arguments(); 219 Variable* arguments = scope()->arguments();
215 if (arguments != NULL) { 220 if (arguments != NULL) {
216 // Function uses arguments object. 221 // Function uses arguments object.
217 Comment cmnt(masm_, "[ Allocate arguments object"); 222 Comment cmnt(masm_, "[ Allocate arguments object");
218 if (!function_in_register) { 223 if (!function_in_register) {
219 // Load this again, if it's used by the local context below. 224 // Load this again, if it's used by the local context below.
(...skipping 438 matching lines...) Expand 10 before | Expand all | Expand 10 after
658 void FullCodeGenerator::SetVar(Variable* var, 663 void FullCodeGenerator::SetVar(Variable* var,
659 Register src, 664 Register src,
660 Register scratch0, 665 Register scratch0,
661 Register scratch1) { 666 Register scratch1) {
662 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); 667 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
663 ASSERT(!scratch0.is(src)); 668 ASSERT(!scratch0.is(src));
664 ASSERT(!scratch0.is(scratch1)); 669 ASSERT(!scratch0.is(scratch1));
665 ASSERT(!scratch1.is(src)); 670 ASSERT(!scratch1.is(src));
666 MemOperand location = VarOperand(var, scratch0); 671 MemOperand location = VarOperand(var, scratch0);
667 __ str(src, location); 672 __ str(src, location);
673
668 // Emit the write barrier code if the location is in the heap. 674 // Emit the write barrier code if the location is in the heap.
669 if (var->IsContextSlot()) { 675 if (var->IsContextSlot()) {
670 __ RecordWrite(scratch0, 676 __ RecordWriteContextSlot(scratch0,
671 Operand(Context::SlotOffset(var->index())), 677 location.offset(),
672 scratch1, 678 src,
673 src); 679 scratch1,
680 kLRHasBeenSaved,
681 kDontSaveFPRegs);
674 } 682 }
675 } 683 }
676 684
677 685
678 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, 686 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
679 bool should_normalize, 687 bool should_normalize,
680 Label* if_true, 688 Label* if_true,
681 Label* if_false) { 689 Label* if_false) {
682 // Only prepare for bailouts before splits if we're in a test 690 // Only prepare for bailouts before splits if we're in a test
683 // context. Otherwise, we let the Visit function deal with the 691 // context. Otherwise, we let the Visit function deal with the
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
739 __ Check(ne, "Declaration in with context."); 747 __ Check(ne, "Declaration in with context.");
740 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); 748 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
741 __ Check(ne, "Declaration in catch context."); 749 __ Check(ne, "Declaration in catch context.");
742 } 750 }
743 if (function != NULL) { 751 if (function != NULL) {
744 Comment cmnt(masm_, "[ Declaration"); 752 Comment cmnt(masm_, "[ Declaration");
745 VisitForAccumulatorValue(function); 753 VisitForAccumulatorValue(function);
746 __ str(result_register(), ContextOperand(cp, variable->index())); 754 __ str(result_register(), ContextOperand(cp, variable->index()));
747 int offset = Context::SlotOffset(variable->index()); 755 int offset = Context::SlotOffset(variable->index());
748 // We know that we have written a function, which is not a smi. 756 // We know that we have written a function, which is not a smi.
749 __ mov(r1, Operand(cp)); 757 __ RecordWriteContextSlot(cp,
750 __ RecordWrite(r1, Operand(offset), r2, result_register()); 758 offset,
759 result_register(),
760 r2,
761 kLRHasBeenSaved,
762 kDontSaveFPRegs,
763 EMIT_REMEMBERED_SET,
764 OMIT_SMI_CHECK);
751 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 765 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
752 } else if (mode == Variable::CONST || mode == Variable::LET) { 766 } else if (mode == Variable::CONST || mode == Variable::LET) {
753 Comment cmnt(masm_, "[ Declaration"); 767 Comment cmnt(masm_, "[ Declaration");
754 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 768 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
755 __ str(ip, ContextOperand(cp, variable->index())); 769 __ str(ip, ContextOperand(cp, variable->index()));
756 // No write barrier since the_hole_value is in old space. 770 // No write barrier since the_hole_value is in old space.
757 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 771 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
758 } 772 }
759 break; 773 break;
760 774
(...skipping 722 matching lines...) Expand 10 before | Expand all | Expand 10 after
1483 continue; 1497 continue;
1484 } 1498 }
1485 1499
1486 if (!result_saved) { 1500 if (!result_saved) {
1487 __ push(r0); 1501 __ push(r0);
1488 result_saved = true; 1502 result_saved = true;
1489 } 1503 }
1490 VisitForAccumulatorValue(subexpr); 1504 VisitForAccumulatorValue(subexpr);
1491 1505
1492 // Store the subexpression value in the array's elements. 1506 // Store the subexpression value in the array's elements.
1493 __ ldr(r1, MemOperand(sp)); // Copy of array literal. 1507 __ ldr(r6, MemOperand(sp)); // Copy of array literal.
1494 __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset)); 1508 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
1495 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1509 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1496 __ str(result_register(), FieldMemOperand(r1, offset)); 1510 __ str(result_register(), FieldMemOperand(r1, offset));
1497 1511
1512 Label no_map_change;
1513 __ JumpIfSmi(result_register(), &no_map_change);
1498 // Update the write barrier for the array store with r0 as the scratch 1514 // Update the write barrier for the array store with r0 as the scratch
1499 // register. 1515 // register.
1500 __ RecordWrite(r1, Operand(offset), r2, result_register()); 1516 __ RecordWriteField(
1517 r1, offset, result_register(), r2, kLRHasBeenSaved, kDontSaveFPRegs,
1518 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1519 if (FLAG_smi_only_arrays) {
1520 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
1521 __ CheckFastSmiOnlyElements(r3, r2, &no_map_change);
1522 __ push(r6); // Copy of array literal.
1523 __ CallRuntime(Runtime::kNonSmiElementStored, 1);
1524 }
1525 __ bind(&no_map_change);
1501 1526
1502 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); 1527 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1503 } 1528 }
1504 1529
1505 if (result_saved) { 1530 if (result_saved) {
1506 context()->PlugTOS(); 1531 context()->PlugTOS();
1507 } else { 1532 } else {
1508 context()->Plug(r0); 1533 context()->Plug(r0);
1509 } 1534 }
1510 } 1535 }
(...skipping 351 matching lines...) Expand 10 before | Expand all | Expand 10 after
1862 __ mov(r3, Operand(var->name())); 1887 __ mov(r3, Operand(var->name()));
1863 __ push(r3); 1888 __ push(r3);
1864 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1889 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1865 // Perform the assignment. 1890 // Perform the assignment.
1866 __ bind(&assign); 1891 __ bind(&assign);
1867 __ str(result_register(), location); 1892 __ str(result_register(), location);
1868 if (var->IsContextSlot()) { 1893 if (var->IsContextSlot()) {
1869 // RecordWrite may destroy all its register arguments. 1894 // RecordWrite may destroy all its register arguments.
1870 __ mov(r3, result_register()); 1895 __ mov(r3, result_register());
1871 int offset = Context::SlotOffset(var->index()); 1896 int offset = Context::SlotOffset(var->index());
1872 __ RecordWrite(r1, Operand(offset), r2, r3); 1897 __ RecordWriteContextSlot(
1898 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
1873 } 1899 }
1874 } 1900 }
1875 1901
1876 } else if (var->mode() != Variable::CONST) { 1902 } else if (var->mode() != Variable::CONST) {
1877 // Assignment to var or initializing assignment to let. 1903 // Assignment to var or initializing assignment to let.
1878 if (var->IsStackAllocated() || var->IsContextSlot()) { 1904 if (var->IsStackAllocated() || var->IsContextSlot()) {
1879 MemOperand location = VarOperand(var, r1); 1905 MemOperand location = VarOperand(var, r1);
1880 if (FLAG_debug_code && op == Token::INIT_LET) { 1906 if (FLAG_debug_code && op == Token::INIT_LET) {
1881 // Check for an uninitialized let binding. 1907 // Check for an uninitialized let binding.
1882 __ ldr(r2, location); 1908 __ ldr(r2, location);
1883 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); 1909 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
1884 __ Check(eq, "Let binding re-initialization."); 1910 __ Check(eq, "Let binding re-initialization.");
1885 } 1911 }
1886 // Perform the assignment. 1912 // Perform the assignment.
1887 __ str(r0, location); 1913 __ str(r0, location);
1888 if (var->IsContextSlot()) { 1914 if (var->IsContextSlot()) {
1889 __ mov(r3, r0); 1915 __ mov(r3, r0);
1890 __ RecordWrite(r1, Operand(Context::SlotOffset(var->index())), r2, r3); 1916 int offset = Context::SlotOffset(var->index());
1917 __ RecordWriteContextSlot(
1918 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
1891 } 1919 }
1892 } else { 1920 } else {
1893 ASSERT(var->IsLookupSlot()); 1921 ASSERT(var->IsLookupSlot());
1894 __ push(r0); // Value. 1922 __ push(r0); // Value.
1895 __ mov(r1, Operand(var->name())); 1923 __ mov(r1, Operand(var->name()));
1896 __ mov(r0, Operand(Smi::FromInt(strict_mode_flag()))); 1924 __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
1897 __ Push(cp, r1, r0); // Context, name, strict mode. 1925 __ Push(cp, r1, r0); // Context, name, strict mode.
1898 __ CallRuntime(Runtime::kStoreContextSlot, 4); 1926 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1899 } 1927 }
1900 } 1928 }
(...skipping 754 matching lines...) Expand 10 before | Expand all | Expand 10 after
2655 ASSERT(args->length() == 1); 2683 ASSERT(args->length() == 1);
2656 Label done, null, function, non_function_constructor; 2684 Label done, null, function, non_function_constructor;
2657 2685
2658 VisitForAccumulatorValue(args->at(0)); 2686 VisitForAccumulatorValue(args->at(0));
2659 2687
2660 // If the object is a smi, we return null. 2688 // If the object is a smi, we return null.
2661 __ JumpIfSmi(r0, &null); 2689 __ JumpIfSmi(r0, &null);
2662 2690
2663 // Check that the object is a JS object but take special care of JS 2691 // Check that the object is a JS object but take special care of JS
2664 // functions to make sure they have 'Function' as their class. 2692 // functions to make sure they have 'Function' as their class.
2693 // Assume that there are only two callable types, and one of them is at
2694 // either end of the type range for JS object types. Saves extra comparisons.
2695 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
2665 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE); 2696 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
2666 // Map is now in r0. 2697 // Map is now in r0.
2667 __ b(lt, &null); 2698 __ b(lt, &null);
2699 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2700 FIRST_SPEC_OBJECT_TYPE + 1);
2701 __ b(eq, &function);
2668 2702
2669 // As long as LAST_CALLABLE_SPEC_OBJECT_TYPE is the last instance type, and 2703 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
2670 // FIRST_CALLABLE_SPEC_OBJECT_TYPE comes right after 2704 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2671 // LAST_NONCALLABLE_SPEC_OBJECT_TYPE, we can avoid checking for the latter. 2705 LAST_SPEC_OBJECT_TYPE - 1);
2672 STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE); 2706 __ b(eq, &function);
2673 STATIC_ASSERT(FIRST_CALLABLE_SPEC_OBJECT_TYPE == 2707 // Assume that there is no larger type.
2674 LAST_NONCALLABLE_SPEC_OBJECT_TYPE + 1); 2708 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
2675 __ cmp(r1, Operand(FIRST_CALLABLE_SPEC_OBJECT_TYPE));
2676 __ b(ge, &function);
2677 2709
2678 // Check if the constructor in the map is a function. 2710 // Check if the constructor in the map is a JS function.
2679 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset)); 2711 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
2680 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); 2712 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
2681 __ b(ne, &non_function_constructor); 2713 __ b(ne, &non_function_constructor);
2682 2714
2683 // r0 now contains the constructor function. Grab the 2715 // r0 now contains the constructor function. Grab the
2684 // instance class name from there. 2716 // instance class name from there.
2685 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); 2717 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
2686 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset)); 2718 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
2687 __ b(&done); 2719 __ b(&done);
2688 2720
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after
2846 __ JumpIfSmi(r1, &done); 2878 __ JumpIfSmi(r1, &done);
2847 2879
2848 // If the object is not a value type, return the value. 2880 // If the object is not a value type, return the value.
2849 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE); 2881 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
2850 __ b(ne, &done); 2882 __ b(ne, &done);
2851 2883
2852 // Store the value. 2884 // Store the value.
2853 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset)); 2885 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
2854 // Update the write barrier. Save the value as it will be 2886 // Update the write barrier. Save the value as it will be
2855 // overwritten by the write barrier code and is needed afterward. 2887 // overwritten by the write barrier code and is needed afterward.
2856 __ RecordWrite(r1, Operand(JSValue::kValueOffset - kHeapObjectTag), r2, r3); 2888 __ mov(r2, r0);
2889 __ RecordWriteField(
2890 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
2857 2891
2858 __ bind(&done); 2892 __ bind(&done);
2859 context()->Plug(r0); 2893 context()->Plug(r0);
2860 } 2894 }
2861 2895
2862 2896
2863 void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) { 2897 void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) {
2864 ASSERT_EQ(args->length(), 1); 2898 ASSERT_EQ(args->length(), 1);
2865 2899
2866 // Load the argument on the stack and call the stub. 2900 // Load the argument on the stack and call the stub.
(...skipping 267 matching lines...) Expand 10 before | Expand all | Expand 10 after
3134 __ add(index2, 3168 __ add(index2,
3135 scratch1, 3169 scratch1,
3136 Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize)); 3170 Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize));
3137 3171
3138 // Swap elements. 3172 // Swap elements.
3139 __ ldr(scratch1, MemOperand(index1, 0)); 3173 __ ldr(scratch1, MemOperand(index1, 0));
3140 __ ldr(scratch2, MemOperand(index2, 0)); 3174 __ ldr(scratch2, MemOperand(index2, 0));
3141 __ str(scratch1, MemOperand(index2, 0)); 3175 __ str(scratch1, MemOperand(index2, 0));
3142 __ str(scratch2, MemOperand(index1, 0)); 3176 __ str(scratch2, MemOperand(index1, 0));
3143 3177
3144 Label new_space; 3178 Label no_remembered_set;
3145 __ InNewSpace(elements, scratch1, eq, &new_space); 3179 __ CheckPageFlag(elements,
3180 scratch1,
3181 1 << MemoryChunk::SCAN_ON_SCAVENGE,
3182 ne,
3183 &no_remembered_set);
3146 // Possible optimization: do a check that both values are Smis 3184 // Possible optimization: do a check that both values are Smis
3147 // (or them and test against Smi mask.) 3185 // (or them and test against Smi mask.)
3148 3186
3149 __ mov(scratch1, elements); 3187 // We are swapping two objects in an array and the incremental marker never
3150 __ RecordWriteHelper(elements, index1, scratch2); 3188 // pauses in the middle of scanning a single object. Therefore the
3151 __ RecordWriteHelper(scratch1, index2, scratch2); // scratch1 holds elements. 3189 // incremental marker is not disturbed, so we don't need to call the
3190 // RecordWrite stub that notifies the incremental marker.
3191 __ RememberedSetHelper(elements,
3192 index1,
3193 scratch2,
3194 kDontSaveFPRegs,
3195 MacroAssembler::kFallThroughAtEnd);
3196 __ RememberedSetHelper(elements,
3197 index2,
3198 scratch2,
3199 kDontSaveFPRegs,
3200 MacroAssembler::kFallThroughAtEnd);
3152 3201
3153 __ bind(&new_space); 3202 __ bind(&no_remembered_set);
3154 // We are done. Drop elements from the stack, and return undefined. 3203 // We are done. Drop elements from the stack, and return undefined.
3155 __ Drop(3); 3204 __ Drop(3);
3156 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 3205 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3157 __ jmp(&done); 3206 __ jmp(&done);
3158 3207
3159 __ bind(&slow_case); 3208 __ bind(&slow_case);
3160 __ CallRuntime(Runtime::kSwapElements, 3); 3209 __ CallRuntime(Runtime::kSwapElements, 3);
3161 3210
3162 __ bind(&done); 3211 __ bind(&done);
3163 context()->Plug(r0); 3212 context()->Plug(r0);
(...skipping 727 matching lines...) Expand 10 before | Expand all | Expand 10 after
3891 3940
3892 context()->Plug(r0); 3941 context()->Plug(r0);
3893 } else { 3942 } else {
3894 // This expression cannot throw a reference error at the top level. 3943 // This expression cannot throw a reference error at the top level.
3895 VisitInCurrentContext(expr); 3944 VisitInCurrentContext(expr);
3896 } 3945 }
3897 } 3946 }
3898 3947
3899 3948
3900 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, 3949 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3901 Handle<String> check, 3950 Handle<String> check) {
3902 Label* if_true, 3951 Label materialize_true, materialize_false;
3903 Label* if_false, 3952 Label* if_true = NULL;
3904 Label* fall_through) { 3953 Label* if_false = NULL;
3954 Label* fall_through = NULL;
3955 context()->PrepareTest(&materialize_true, &materialize_false,
3956 &if_true, &if_false, &fall_through);
3957
3905 { AccumulatorValueContext context(this); 3958 { AccumulatorValueContext context(this);
3906 VisitForTypeofValue(expr); 3959 VisitForTypeofValue(expr);
3907 } 3960 }
3908 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); 3961 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3909 3962
3910 if (check->Equals(isolate()->heap()->number_symbol())) { 3963 if (check->Equals(isolate()->heap()->number_symbol())) {
3911 __ JumpIfSmi(r0, if_true); 3964 __ JumpIfSmi(r0, if_true);
3912 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 3965 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3913 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); 3966 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3914 __ cmp(r0, ip); 3967 __ cmp(r0, ip);
(...skipping 20 matching lines...) Expand all
3935 __ b(eq, if_true); 3988 __ b(eq, if_true);
3936 __ JumpIfSmi(r0, if_false); 3989 __ JumpIfSmi(r0, if_false);
3937 // Check for undetectable objects => true. 3990 // Check for undetectable objects => true.
3938 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 3991 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
3939 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 3992 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3940 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 3993 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3941 Split(ne, if_true, if_false, fall_through); 3994 Split(ne, if_true, if_false, fall_through);
3942 3995
3943 } else if (check->Equals(isolate()->heap()->function_symbol())) { 3996 } else if (check->Equals(isolate()->heap()->function_symbol())) {
3944 __ JumpIfSmi(r0, if_false); 3997 __ JumpIfSmi(r0, if_false);
3945 __ CompareObjectType(r0, r1, r0, FIRST_CALLABLE_SPEC_OBJECT_TYPE); 3998 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3946 Split(ge, if_true, if_false, fall_through); 3999 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
3947 4000 __ b(eq, if_true);
4001 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
4002 Split(eq, if_true, if_false, fall_through);
3948 } else if (check->Equals(isolate()->heap()->object_symbol())) { 4003 } else if (check->Equals(isolate()->heap()->object_symbol())) {
3949 __ JumpIfSmi(r0, if_false); 4004 __ JumpIfSmi(r0, if_false);
3950 if (!FLAG_harmony_typeof) { 4005 if (!FLAG_harmony_typeof) {
3951 __ CompareRoot(r0, Heap::kNullValueRootIndex); 4006 __ CompareRoot(r0, Heap::kNullValueRootIndex);
3952 __ b(eq, if_true); 4007 __ b(eq, if_true);
3953 } 4008 }
3954 // Check for JS objects => true. 4009 // Check for JS objects => true.
3955 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); 4010 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
3956 __ b(lt, if_false); 4011 __ b(lt, if_false);
3957 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); 4012 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3958 __ b(gt, if_false); 4013 __ b(gt, if_false);
3959 // Check for undetectable objects => false. 4014 // Check for undetectable objects => false.
3960 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4015 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
3961 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4016 __ tst(r1, Operand(1 << Map::kIsUndetectable));
3962 Split(eq, if_true, if_false, fall_through); 4017 Split(eq, if_true, if_false, fall_through);
3963 } else { 4018 } else {
3964 if (if_false != fall_through) __ jmp(if_false); 4019 if (if_false != fall_through) __ jmp(if_false);
3965 } 4020 }
3966 } 4021 context()->Plug(if_true, if_false);
3967
3968
3969 void FullCodeGenerator::EmitLiteralCompareUndefined(Expression* expr,
3970 Label* if_true,
3971 Label* if_false,
3972 Label* fall_through) {
3973 VisitForAccumulatorValue(expr);
3974 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3975
3976 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
3977 Split(eq, if_true, if_false, fall_through);
3978 } 4022 }
3979 4023
3980 4024
3981 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 4025 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3982 Comment cmnt(masm_, "[ CompareOperation"); 4026 Comment cmnt(masm_, "[ CompareOperation");
3983 SetSourcePosition(expr->position()); 4027 SetSourcePosition(expr->position());
3984 4028
4029 // First we try a fast inlined version of the compare when one of
4030 // the operands is a literal.
4031 if (TryLiteralCompare(expr)) return;
4032
3985 // Always perform the comparison for its control flow. Pack the result 4033 // Always perform the comparison for its control flow. Pack the result
3986 // into the expression's context after the comparison is performed. 4034 // into the expression's context after the comparison is performed.
3987
3988 Label materialize_true, materialize_false; 4035 Label materialize_true, materialize_false;
3989 Label* if_true = NULL; 4036 Label* if_true = NULL;
3990 Label* if_false = NULL; 4037 Label* if_false = NULL;
3991 Label* fall_through = NULL; 4038 Label* fall_through = NULL;
3992 context()->PrepareTest(&materialize_true, &materialize_false, 4039 context()->PrepareTest(&materialize_true, &materialize_false,
3993 &if_true, &if_false, &fall_through); 4040 &if_true, &if_false, &fall_through);
3994 4041
3995 // First we try a fast inlined version of the compare when one of
3996 // the operands is a literal.
3997 if (TryLiteralCompare(expr, if_true, if_false, fall_through)) {
3998 context()->Plug(if_true, if_false);
3999 return;
4000 }
4001
4002 Token::Value op = expr->op(); 4042 Token::Value op = expr->op();
4003 VisitForStackValue(expr->left()); 4043 VisitForStackValue(expr->left());
4004 switch (op) { 4044 switch (op) {
4005 case Token::IN: 4045 case Token::IN:
4006 VisitForStackValue(expr->right()); 4046 VisitForStackValue(expr->right());
4007 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); 4047 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4008 PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); 4048 PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
4009 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 4049 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4010 __ cmp(r0, ip); 4050 __ cmp(r0, ip);
4011 Split(eq, if_true, if_false, fall_through); 4051 Split(eq, if_true, if_false, fall_through);
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
4078 Split(cond, if_true, if_false, fall_through); 4118 Split(cond, if_true, if_false, fall_through);
4079 } 4119 }
4080 } 4120 }
4081 4121
4082 // Convert the result of the comparison into one expected for this 4122 // Convert the result of the comparison into one expected for this
4083 // expression's context. 4123 // expression's context.
4084 context()->Plug(if_true, if_false); 4124 context()->Plug(if_true, if_false);
4085 } 4125 }
4086 4126
4087 4127
4088 void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) { 4128 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4089 Comment cmnt(masm_, "[ CompareToNull"); 4129 Expression* sub_expr,
4130 NilValue nil) {
4090 Label materialize_true, materialize_false; 4131 Label materialize_true, materialize_false;
4091 Label* if_true = NULL; 4132 Label* if_true = NULL;
4092 Label* if_false = NULL; 4133 Label* if_false = NULL;
4093 Label* fall_through = NULL; 4134 Label* fall_through = NULL;
4094 context()->PrepareTest(&materialize_true, &materialize_false, 4135 context()->PrepareTest(&materialize_true, &materialize_false,
4095 &if_true, &if_false, &fall_through); 4136 &if_true, &if_false, &fall_through);
4096 4137
4097 VisitForAccumulatorValue(expr->expression()); 4138 VisitForAccumulatorValue(sub_expr);
4098 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); 4139 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4099 __ LoadRoot(r1, Heap::kNullValueRootIndex); 4140 Heap::RootListIndex nil_value = nil == kNullValue ?
4141 Heap::kNullValueRootIndex :
4142 Heap::kUndefinedValueRootIndex;
4143 __ LoadRoot(r1, nil_value);
4100 __ cmp(r0, r1); 4144 __ cmp(r0, r1);
4101 if (expr->is_strict()) { 4145 if (expr->op() == Token::EQ_STRICT) {
4102 Split(eq, if_true, if_false, fall_through); 4146 Split(eq, if_true, if_false, fall_through);
4103 } else { 4147 } else {
4148 Heap::RootListIndex other_nil_value = nil == kNullValue ?
4149 Heap::kUndefinedValueRootIndex :
4150 Heap::kNullValueRootIndex;
4104 __ b(eq, if_true); 4151 __ b(eq, if_true);
4105 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 4152 __ LoadRoot(r1, other_nil_value);
4106 __ cmp(r0, r1); 4153 __ cmp(r0, r1);
4107 __ b(eq, if_true); 4154 __ b(eq, if_true);
4108 __ JumpIfSmi(r0, if_false); 4155 __ JumpIfSmi(r0, if_false);
4109 // It can be an undetectable object. 4156 // It can be an undetectable object.
4110 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 4157 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
4111 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset)); 4158 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
4112 __ and_(r1, r1, Operand(1 << Map::kIsUndetectable)); 4159 __ and_(r1, r1, Operand(1 << Map::kIsUndetectable));
4113 __ cmp(r1, Operand(1 << Map::kIsUndetectable)); 4160 __ cmp(r1, Operand(1 << Map::kIsUndetectable));
4114 Split(eq, if_true, if_false, fall_through); 4161 Split(eq, if_true, if_false, fall_through);
4115 } 4162 }
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
4219 *context_length = 0; 4266 *context_length = 0;
4220 return previous_; 4267 return previous_;
4221 } 4268 }
4222 4269
4223 4270
4224 #undef __ 4271 #undef __
4225 4272
4226 } } // namespace v8::internal 4273 } } // namespace v8::internal
4227 4274
4228 #endif // V8_TARGET_ARCH_ARM 4275 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/frames-arm.h ('k') | src/arm/ic-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698