Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(35)

Side by Side Diff: src/arm/full-codegen-arm.cc

Issue 7945009: Merge experimental/gc branch to the bleeding_edge. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 9 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/frames-arm.h ('k') | src/arm/ic-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 21 matching lines...) Expand all
32 #include "code-stubs.h" 32 #include "code-stubs.h"
33 #include "codegen.h" 33 #include "codegen.h"
34 #include "compiler.h" 34 #include "compiler.h"
35 #include "debug.h" 35 #include "debug.h"
36 #include "full-codegen.h" 36 #include "full-codegen.h"
37 #include "parser.h" 37 #include "parser.h"
38 #include "scopes.h" 38 #include "scopes.h"
39 #include "stub-cache.h" 39 #include "stub-cache.h"
40 40
41 #include "arm/code-stubs-arm.h" 41 #include "arm/code-stubs-arm.h"
42 #include "arm/macro-assembler-arm.h"
42 43
43 namespace v8 { 44 namespace v8 {
44 namespace internal { 45 namespace internal {
45 46
46 #define __ ACCESS_MASM(masm_) 47 #define __ ACCESS_MASM(masm_)
47 48
48 49
49 static unsigned GetPropertyId(Property* property) { 50 static unsigned GetPropertyId(Property* property) {
50 return property->id(); 51 return property->id();
51 } 52 }
(...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after
198 // Copy any necessary parameters into the context. 199 // Copy any necessary parameters into the context.
199 int num_parameters = info->scope()->num_parameters(); 200 int num_parameters = info->scope()->num_parameters();
200 for (int i = 0; i < num_parameters; i++) { 201 for (int i = 0; i < num_parameters; i++) {
201 Variable* var = scope()->parameter(i); 202 Variable* var = scope()->parameter(i);
202 if (var->IsContextSlot()) { 203 if (var->IsContextSlot()) {
203 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 204 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
204 (num_parameters - 1 - i) * kPointerSize; 205 (num_parameters - 1 - i) * kPointerSize;
205 // Load parameter from stack. 206 // Load parameter from stack.
206 __ ldr(r0, MemOperand(fp, parameter_offset)); 207 __ ldr(r0, MemOperand(fp, parameter_offset));
207 // Store it in the context. 208 // Store it in the context.
208 __ mov(r1, Operand(Context::SlotOffset(var->index()))); 209 MemOperand target = ContextOperand(cp, var->index());
209 __ str(r0, MemOperand(cp, r1)); 210 __ str(r0, target);
210 // Update the write barrier. This clobbers all involved 211
211 // registers, so we have to use two more registers to avoid 212 // Update the write barrier.
212 // clobbering cp. 213 __ RecordWriteContextSlot(
213 __ mov(r2, Operand(cp)); 214 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
214 __ RecordWrite(r2, Operand(r1), r3, r0);
215 } 215 }
216 } 216 }
217 } 217 }
218 218
219 Variable* arguments = scope()->arguments(); 219 Variable* arguments = scope()->arguments();
220 if (arguments != NULL) { 220 if (arguments != NULL) {
221 // Function uses arguments object. 221 // Function uses arguments object.
222 Comment cmnt(masm_, "[ Allocate arguments object"); 222 Comment cmnt(masm_, "[ Allocate arguments object");
223 if (!function_in_register) { 223 if (!function_in_register) {
224 // Load this again, if it's used by the local context below. 224 // Load this again, if it's used by the local context below.
(...skipping 438 matching lines...) Expand 10 before | Expand all | Expand 10 after
663 void FullCodeGenerator::SetVar(Variable* var, 663 void FullCodeGenerator::SetVar(Variable* var,
664 Register src, 664 Register src,
665 Register scratch0, 665 Register scratch0,
666 Register scratch1) { 666 Register scratch1) {
667 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); 667 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
668 ASSERT(!scratch0.is(src)); 668 ASSERT(!scratch0.is(src));
669 ASSERT(!scratch0.is(scratch1)); 669 ASSERT(!scratch0.is(scratch1));
670 ASSERT(!scratch1.is(src)); 670 ASSERT(!scratch1.is(src));
671 MemOperand location = VarOperand(var, scratch0); 671 MemOperand location = VarOperand(var, scratch0);
672 __ str(src, location); 672 __ str(src, location);
673
673 // Emit the write barrier code if the location is in the heap. 674 // Emit the write barrier code if the location is in the heap.
674 if (var->IsContextSlot()) { 675 if (var->IsContextSlot()) {
675 __ RecordWrite(scratch0, 676 __ RecordWriteContextSlot(scratch0,
676 Operand(Context::SlotOffset(var->index())), 677 location.offset(),
677 scratch1, 678 src,
678 src); 679 scratch1,
680 kLRHasBeenSaved,
681 kDontSaveFPRegs);
679 } 682 }
680 } 683 }
681 684
682 685
683 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, 686 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
684 bool should_normalize, 687 bool should_normalize,
685 Label* if_true, 688 Label* if_true,
686 Label* if_false) { 689 Label* if_false) {
687 // Only prepare for bailouts before splits if we're in a test 690 // Only prepare for bailouts before splits if we're in a test
688 // context. Otherwise, we let the Visit function deal with the 691 // context. Otherwise, we let the Visit function deal with the
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
744 __ Check(ne, "Declaration in with context."); 747 __ Check(ne, "Declaration in with context.");
745 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); 748 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
746 __ Check(ne, "Declaration in catch context."); 749 __ Check(ne, "Declaration in catch context.");
747 } 750 }
748 if (function != NULL) { 751 if (function != NULL) {
749 Comment cmnt(masm_, "[ Declaration"); 752 Comment cmnt(masm_, "[ Declaration");
750 VisitForAccumulatorValue(function); 753 VisitForAccumulatorValue(function);
751 __ str(result_register(), ContextOperand(cp, variable->index())); 754 __ str(result_register(), ContextOperand(cp, variable->index()));
752 int offset = Context::SlotOffset(variable->index()); 755 int offset = Context::SlotOffset(variable->index());
753 // We know that we have written a function, which is not a smi. 756 // We know that we have written a function, which is not a smi.
754 __ mov(r1, Operand(cp)); 757 __ RecordWriteContextSlot(cp,
755 __ RecordWrite(r1, Operand(offset), r2, result_register()); 758 offset,
759 result_register(),
760 r2,
761 kLRHasBeenSaved,
762 kDontSaveFPRegs,
763 EMIT_REMEMBERED_SET,
764 OMIT_SMI_CHECK);
756 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 765 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
757 } else if (mode == Variable::CONST || mode == Variable::LET) { 766 } else if (mode == Variable::CONST || mode == Variable::LET) {
758 Comment cmnt(masm_, "[ Declaration"); 767 Comment cmnt(masm_, "[ Declaration");
759 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 768 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
760 __ str(ip, ContextOperand(cp, variable->index())); 769 __ str(ip, ContextOperand(cp, variable->index()));
761 // No write barrier since the_hole_value is in old space. 770 // No write barrier since the_hole_value is in old space.
762 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 771 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
763 } 772 }
764 break; 773 break;
765 774
(...skipping 729 matching lines...) Expand 10 before | Expand all | Expand 10 after
1495 VisitForAccumulatorValue(subexpr); 1504 VisitForAccumulatorValue(subexpr);
1496 1505
1497 // Store the subexpression value in the array's elements. 1506 // Store the subexpression value in the array's elements.
1498 __ ldr(r1, MemOperand(sp)); // Copy of array literal. 1507 __ ldr(r1, MemOperand(sp)); // Copy of array literal.
1499 __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset)); 1508 __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
1500 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1509 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1501 __ str(result_register(), FieldMemOperand(r1, offset)); 1510 __ str(result_register(), FieldMemOperand(r1, offset));
1502 1511
1503 // Update the write barrier for the array store with r0 as the scratch 1512 // Update the write barrier for the array store with r0 as the scratch
1504 // register. 1513 // register.
1505 __ RecordWrite(r1, Operand(offset), r2, result_register()); 1514 __ RecordWriteField(
1515 r1, offset, result_register(), r2, kLRHasBeenSaved, kDontSaveFPRegs);
1506 1516
1507 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); 1517 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1508 } 1518 }
1509 1519
1510 if (result_saved) { 1520 if (result_saved) {
1511 context()->PlugTOS(); 1521 context()->PlugTOS();
1512 } else { 1522 } else {
1513 context()->Plug(r0); 1523 context()->Plug(r0);
1514 } 1524 }
1515 } 1525 }
(...skipping 351 matching lines...) Expand 10 before | Expand all | Expand 10 after
1867 __ mov(r3, Operand(var->name())); 1877 __ mov(r3, Operand(var->name()));
1868 __ push(r3); 1878 __ push(r3);
1869 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1879 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1870 // Perform the assignment. 1880 // Perform the assignment.
1871 __ bind(&assign); 1881 __ bind(&assign);
1872 __ str(result_register(), location); 1882 __ str(result_register(), location);
1873 if (var->IsContextSlot()) { 1883 if (var->IsContextSlot()) {
1874 // RecordWrite may destroy all its register arguments. 1884 // RecordWrite may destroy all its register arguments.
1875 __ mov(r3, result_register()); 1885 __ mov(r3, result_register());
1876 int offset = Context::SlotOffset(var->index()); 1886 int offset = Context::SlotOffset(var->index());
1877 __ RecordWrite(r1, Operand(offset), r2, r3); 1887 __ RecordWriteContextSlot(
1888 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
1878 } 1889 }
1879 } 1890 }
1880 1891
1881 } else if (var->mode() != Variable::CONST) { 1892 } else if (var->mode() != Variable::CONST) {
1882 // Assignment to var or initializing assignment to let. 1893 // Assignment to var or initializing assignment to let.
1883 if (var->IsStackAllocated() || var->IsContextSlot()) { 1894 if (var->IsStackAllocated() || var->IsContextSlot()) {
1884 MemOperand location = VarOperand(var, r1); 1895 MemOperand location = VarOperand(var, r1);
1885 if (FLAG_debug_code && op == Token::INIT_LET) { 1896 if (FLAG_debug_code && op == Token::INIT_LET) {
1886 // Check for an uninitialized let binding. 1897 // Check for an uninitialized let binding.
1887 __ ldr(r2, location); 1898 __ ldr(r2, location);
1888 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); 1899 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
1889 __ Check(eq, "Let binding re-initialization."); 1900 __ Check(eq, "Let binding re-initialization.");
1890 } 1901 }
1891 // Perform the assignment. 1902 // Perform the assignment.
1892 __ str(r0, location); 1903 __ str(r0, location);
1893 if (var->IsContextSlot()) { 1904 if (var->IsContextSlot()) {
1894 __ mov(r3, r0); 1905 __ mov(r3, r0);
1895 __ RecordWrite(r1, Operand(Context::SlotOffset(var->index())), r2, r3); 1906 int offset = Context::SlotOffset(var->index());
1907 __ RecordWriteContextSlot(
1908 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
1896 } 1909 }
1897 } else { 1910 } else {
1898 ASSERT(var->IsLookupSlot()); 1911 ASSERT(var->IsLookupSlot());
1899 __ push(r0); // Value. 1912 __ push(r0); // Value.
1900 __ mov(r1, Operand(var->name())); 1913 __ mov(r1, Operand(var->name()));
1901 __ mov(r0, Operand(Smi::FromInt(strict_mode_flag()))); 1914 __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
1902 __ Push(cp, r1, r0); // Context, name, strict mode. 1915 __ Push(cp, r1, r0); // Context, name, strict mode.
1903 __ CallRuntime(Runtime::kStoreContextSlot, 4); 1916 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1904 } 1917 }
1905 } 1918 }
(...skipping 945 matching lines...) Expand 10 before | Expand all | Expand 10 after
2851 __ JumpIfSmi(r1, &done); 2864 __ JumpIfSmi(r1, &done);
2852 2865
2853 // If the object is not a value type, return the value. 2866 // If the object is not a value type, return the value.
2854 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE); 2867 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
2855 __ b(ne, &done); 2868 __ b(ne, &done);
2856 2869
2857 // Store the value. 2870 // Store the value.
2858 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset)); 2871 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
2859 // Update the write barrier. Save the value as it will be 2872 // Update the write barrier. Save the value as it will be
2860 // overwritten by the write barrier code and is needed afterward. 2873 // overwritten by the write barrier code and is needed afterward.
2861 __ RecordWrite(r1, Operand(JSValue::kValueOffset - kHeapObjectTag), r2, r3); 2874 __ mov(r2, r0);
2875 __ RecordWriteField(
2876 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
2862 2877
2863 __ bind(&done); 2878 __ bind(&done);
2864 context()->Plug(r0); 2879 context()->Plug(r0);
2865 } 2880 }
2866 2881
2867 2882
2868 void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) { 2883 void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) {
2869 ASSERT_EQ(args->length(), 1); 2884 ASSERT_EQ(args->length(), 1);
2870 2885
2871 // Load the argument on the stack and call the stub. 2886 // Load the argument on the stack and call the stub.
(...skipping 267 matching lines...) Expand 10 before | Expand all | Expand 10 after
3139 __ add(index2, 3154 __ add(index2,
3140 scratch1, 3155 scratch1,
3141 Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize)); 3156 Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize));
3142 3157
3143 // Swap elements. 3158 // Swap elements.
3144 __ ldr(scratch1, MemOperand(index1, 0)); 3159 __ ldr(scratch1, MemOperand(index1, 0));
3145 __ ldr(scratch2, MemOperand(index2, 0)); 3160 __ ldr(scratch2, MemOperand(index2, 0));
3146 __ str(scratch1, MemOperand(index2, 0)); 3161 __ str(scratch1, MemOperand(index2, 0));
3147 __ str(scratch2, MemOperand(index1, 0)); 3162 __ str(scratch2, MemOperand(index1, 0));
3148 3163
3149 Label new_space; 3164 Label no_remembered_set;
3150 __ InNewSpace(elements, scratch1, eq, &new_space); 3165 __ CheckPageFlag(elements,
3166 scratch1,
3167 1 << MemoryChunk::SCAN_ON_SCAVENGE,
3168 ne,
3169 &no_remembered_set);
3151 // Possible optimization: do a check that both values are Smis 3170 // Possible optimization: do a check that both values are Smis
3152 // (or them and test against Smi mask.) 3171 // (or them and test against Smi mask.)
3153 3172
3154 __ mov(scratch1, elements); 3173 // We are swapping two objects in an array and the incremental marker never
3155 __ RecordWriteHelper(elements, index1, scratch2); 3174 // pauses in the middle of scanning a single object. Therefore the
3156 __ RecordWriteHelper(scratch1, index2, scratch2); // scratch1 holds elements. 3175 // incremental marker is not disturbed, so we don't need to call the
3176 // RecordWrite stub that notifies the incremental marker.
3177 __ RememberedSetHelper(
3178 index1, scratch2, kDontSaveFPRegs, MacroAssembler::kFallThroughAtEnd);
3179 __ RememberedSetHelper(
3180 index2, scratch2, kDontSaveFPRegs, MacroAssembler::kFallThroughAtEnd);
3157 3181
3158 __ bind(&new_space); 3182 __ bind(&no_remembered_set);
3159 // We are done. Drop elements from the stack, and return undefined. 3183 // We are done. Drop elements from the stack, and return undefined.
3160 __ Drop(3); 3184 __ Drop(3);
3161 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 3185 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3162 __ jmp(&done); 3186 __ jmp(&done);
3163 3187
3164 __ bind(&slow_case); 3188 __ bind(&slow_case);
3165 __ CallRuntime(Runtime::kSwapElements, 3); 3189 __ CallRuntime(Runtime::kSwapElements, 3);
3166 3190
3167 __ bind(&done); 3191 __ bind(&done);
3168 context()->Plug(r0); 3192 context()->Plug(r0);
(...skipping 1051 matching lines...) Expand 10 before | Expand all | Expand 10 after
4220 *context_length = 0; 4244 *context_length = 0;
4221 return previous_; 4245 return previous_;
4222 } 4246 }
4223 4247
4224 4248
4225 #undef __ 4249 #undef __
4226 4250
4227 } } // namespace v8::internal 4251 } } // namespace v8::internal
4228 4252
4229 #endif // V8_TARGET_ARCH_ARM 4253 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/frames-arm.h ('k') | src/arm/ic-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698