Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(15)

Side by Side Diff: src/mips/full-codegen-mips.cc

Issue 8106002: MIPS: port Merge experimental/gc branch to the bleeding_edge. (Closed)
Patch Set: Rebased on r9598, greatly simplified. Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/mips/frames-mips.h ('k') | src/mips/ic-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 29 matching lines...) Expand all
40 #include "code-stubs.h" 40 #include "code-stubs.h"
41 #include "codegen.h" 41 #include "codegen.h"
42 #include "compiler.h" 42 #include "compiler.h"
43 #include "debug.h" 43 #include "debug.h"
44 #include "full-codegen.h" 44 #include "full-codegen.h"
45 #include "parser.h" 45 #include "parser.h"
46 #include "scopes.h" 46 #include "scopes.h"
47 #include "stub-cache.h" 47 #include "stub-cache.h"
48 48
49 #include "mips/code-stubs-mips.h" 49 #include "mips/code-stubs-mips.h"
50 #include "mips/macro-assembler-mips.h"
50 51
51 namespace v8 { 52 namespace v8 {
52 namespace internal { 53 namespace internal {
53 54
54 #define __ ACCESS_MASM(masm_) 55 #define __ ACCESS_MASM(masm_)
55 56
56 57
57 static unsigned GetPropertyId(Property* property) { 58 static unsigned GetPropertyId(Property* property) {
58 return property->id(); 59 return property->id();
59 } 60 }
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after
207 // Copy any necessary parameters into the context. 208 // Copy any necessary parameters into the context.
208 int num_parameters = info->scope()->num_parameters(); 209 int num_parameters = info->scope()->num_parameters();
209 for (int i = 0; i < num_parameters; i++) { 210 for (int i = 0; i < num_parameters; i++) {
210 Variable* var = scope()->parameter(i); 211 Variable* var = scope()->parameter(i);
211 if (var->IsContextSlot()) { 212 if (var->IsContextSlot()) {
212 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 213 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
213 (num_parameters - 1 - i) * kPointerSize; 214 (num_parameters - 1 - i) * kPointerSize;
214 // Load parameter from stack. 215 // Load parameter from stack.
215 __ lw(a0, MemOperand(fp, parameter_offset)); 216 __ lw(a0, MemOperand(fp, parameter_offset));
216 // Store it in the context. 217 // Store it in the context.
217 __ li(a1, Operand(Context::SlotOffset(var->index()))); 218 MemOperand target = ContextOperand(cp, var->index());
218 __ addu(a2, cp, a1); 219 __ sw(a0, target);
219 __ sw(a0, MemOperand(a2, 0)); 220
220 // Update the write barrier. This clobbers all involved 221 // Update the write barrier.
221 // registers, so we have to use two more registers to avoid 222 __ RecordWriteContextSlot(
222 // clobbering cp. 223 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
223 __ mov(a2, cp);
224 __ RecordWrite(a2, a1, a3);
225 } 224 }
226 } 225 }
227 } 226 }
228 227
229 Variable* arguments = scope()->arguments(); 228 Variable* arguments = scope()->arguments();
230 if (arguments != NULL) { 229 if (arguments != NULL) {
231 // Function uses arguments object. 230 // Function uses arguments object.
232 Comment cmnt(masm_, "[ Allocate arguments object"); 231 Comment cmnt(masm_, "[ Allocate arguments object");
233 if (!function_in_register) { 232 if (!function_in_register) {
234 // Load this again, if it's used by the local context below. 233 // Load this again, if it's used by the local context below.
(...skipping 443 matching lines...) Expand 10 before | Expand all | Expand 10 after
678 Register scratch0, 677 Register scratch0,
679 Register scratch1) { 678 Register scratch1) {
680 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); 679 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
681 ASSERT(!scratch0.is(src)); 680 ASSERT(!scratch0.is(src));
682 ASSERT(!scratch0.is(scratch1)); 681 ASSERT(!scratch0.is(scratch1));
683 ASSERT(!scratch1.is(src)); 682 ASSERT(!scratch1.is(src));
684 MemOperand location = VarOperand(var, scratch0); 683 MemOperand location = VarOperand(var, scratch0);
685 __ sw(src, location); 684 __ sw(src, location);
686 // Emit the write barrier code if the location is in the heap. 685 // Emit the write barrier code if the location is in the heap.
687 if (var->IsContextSlot()) { 686 if (var->IsContextSlot()) {
688 __ RecordWrite(scratch0, 687 __ RecordWriteContextSlot(scratch0,
689 Operand(Context::SlotOffset(var->index())), 688 location.offset(),
690 scratch1, 689 src,
691 src); 690 scratch1,
691 kRAHasBeenSaved,
692 kDontSaveFPRegs);
692 } 693 }
693 } 694 }
694 695
695 696
696 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, 697 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
697 bool should_normalize, 698 bool should_normalize,
698 Label* if_true, 699 Label* if_true,
699 Label* if_false) { 700 Label* if_false) {
700 // Only prepare for bailouts before splits if we're in a test 701 // Only prepare for bailouts before splits if we're in a test
701 // context. Otherwise, we let the Visit function deal with the 702 // context. Otherwise, we let the Visit function deal with the
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
758 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex); 759 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
759 __ Check(ne, "Declaration in catch context.", 760 __ Check(ne, "Declaration in catch context.",
760 a1, Operand(t0)); 761 a1, Operand(t0));
761 } 762 }
762 if (function != NULL) { 763 if (function != NULL) {
763 Comment cmnt(masm_, "[ Declaration"); 764 Comment cmnt(masm_, "[ Declaration");
764 VisitForAccumulatorValue(function); 765 VisitForAccumulatorValue(function);
765 __ sw(result_register(), ContextOperand(cp, variable->index())); 766 __ sw(result_register(), ContextOperand(cp, variable->index()));
766 int offset = Context::SlotOffset(variable->index()); 767 int offset = Context::SlotOffset(variable->index());
767 // We know that we have written a function, which is not a smi. 768 // We know that we have written a function, which is not a smi.
768 __ mov(a1, cp); 769 __ RecordWriteContextSlot(cp,
769 __ RecordWrite(a1, Operand(offset), a2, result_register()); 770 offset,
771 result_register(),
772 a2,
773 kRAHasBeenSaved,
774 kDontSaveFPRegs,
775 EMIT_REMEMBERED_SET,
776 OMIT_SMI_CHECK);
770 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 777 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
771 } else if (mode == Variable::CONST || mode == Variable::LET) { 778 } else if (mode == Variable::CONST || mode == Variable::LET) {
772 Comment cmnt(masm_, "[ Declaration"); 779 Comment cmnt(masm_, "[ Declaration");
773 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 780 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
774 __ sw(at, ContextOperand(cp, variable->index())); 781 __ sw(at, ContextOperand(cp, variable->index()));
775 // No write barrier since the_hole_value is in old space. 782 // No write barrier since the_hole_value is in old space.
776 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 783 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
777 } 784 }
778 break; 785 break;
779 786
(...skipping 726 matching lines...) Expand 10 before | Expand all | Expand 10 after
1506 VisitForAccumulatorValue(subexpr); 1513 VisitForAccumulatorValue(subexpr);
1507 1514
1508 // Store the subexpression value in the array's elements. 1515 // Store the subexpression value in the array's elements.
1509 __ lw(a1, MemOperand(sp)); // Copy of array literal. 1516 __ lw(a1, MemOperand(sp)); // Copy of array literal.
1510 __ lw(a1, FieldMemOperand(a1, JSObject::kElementsOffset)); 1517 __ lw(a1, FieldMemOperand(a1, JSObject::kElementsOffset));
1511 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1518 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1512 __ sw(result_register(), FieldMemOperand(a1, offset)); 1519 __ sw(result_register(), FieldMemOperand(a1, offset));
1513 1520
1514 // Update the write barrier for the array store with v0 as the scratch 1521 // Update the write barrier for the array store with v0 as the scratch
1515 // register. 1522 // register.
1516 __ RecordWrite(a1, Operand(offset), a2, result_register()); 1523 __ RecordWriteField(
1524 a1, offset, result_register(), a2, kRAHasBeenSaved, kDontSaveFPRegs);
1517 1525
1518 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); 1526 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1519 } 1527 }
1520 1528
1521 if (result_saved) { 1529 if (result_saved) {
1522 context()->PlugTOS(); 1530 context()->PlugTOS();
1523 } else { 1531 } else {
1524 context()->Plug(v0); 1532 context()->Plug(v0);
1525 } 1533 }
1526 } 1534 }
(...skipping 356 matching lines...) Expand 10 before | Expand all | Expand 10 after
1883 __ li(a3, Operand(var->name())); 1891 __ li(a3, Operand(var->name()));
1884 __ push(a3); 1892 __ push(a3);
1885 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1893 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1886 // Perform the assignment. 1894 // Perform the assignment.
1887 __ bind(&assign); 1895 __ bind(&assign);
1888 __ sw(result_register(), location); 1896 __ sw(result_register(), location);
1889 if (var->IsContextSlot()) { 1897 if (var->IsContextSlot()) {
1890 // RecordWrite may destroy all its register arguments. 1898 // RecordWrite may destroy all its register arguments.
1891 __ mov(a3, result_register()); 1899 __ mov(a3, result_register());
1892 int offset = Context::SlotOffset(var->index()); 1900 int offset = Context::SlotOffset(var->index());
1893 __ RecordWrite(a1, Operand(offset), a2, a3); 1901 __ RecordWriteContextSlot(
1902 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
1894 } 1903 }
1895 } 1904 }
1896 1905
1897 } else if (var->mode() != Variable::CONST) { 1906 } else if (var->mode() != Variable::CONST) {
1898 // Assignment to var or initializing assignment to let. 1907 // Assignment to var or initializing assignment to let.
1899 if (var->IsStackAllocated() || var->IsContextSlot()) { 1908 if (var->IsStackAllocated() || var->IsContextSlot()) {
1900 MemOperand location = VarOperand(var, a1); 1909 MemOperand location = VarOperand(var, a1);
1901 if (FLAG_debug_code && op == Token::INIT_LET) { 1910 if (FLAG_debug_code && op == Token::INIT_LET) {
1902 // Check for an uninitialized let binding. 1911 // Check for an uninitialized let binding.
1903 __ lw(a2, location); 1912 __ lw(a2, location);
1904 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); 1913 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
1905 __ Check(eq, "Let binding re-initialization.", a2, Operand(t0)); 1914 __ Check(eq, "Let binding re-initialization.", a2, Operand(t0));
1906 } 1915 }
1907 // Perform the assignment. 1916 // Perform the assignment.
1908 __ sw(v0, location); 1917 __ sw(v0, location);
1909 if (var->IsContextSlot()) { 1918 if (var->IsContextSlot()) {
1910 __ mov(a3, v0); 1919 __ mov(a3, v0);
1911 __ RecordWrite(a1, Operand(Context::SlotOffset(var->index())), a2, a3); 1920 int offset = Context::SlotOffset(var->index());
1921 __ RecordWriteContextSlot(
1922 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
1912 } 1923 }
1913 } else { 1924 } else {
1914 ASSERT(var->IsLookupSlot()); 1925 ASSERT(var->IsLookupSlot());
1915 __ push(v0); // Value. 1926 __ push(v0); // Value.
1916 __ li(a1, Operand(var->name())); 1927 __ li(a1, Operand(var->name()));
1917 __ li(a0, Operand(Smi::FromInt(strict_mode_flag()))); 1928 __ li(a0, Operand(Smi::FromInt(strict_mode_flag())));
1918 __ Push(cp, a1, a0); // Context, name, strict mode. 1929 __ Push(cp, a1, a0); // Context, name, strict mode.
1919 __ CallRuntime(Runtime::kStoreContextSlot, 4); 1930 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1920 } 1931 }
1921 } 1932 }
(...skipping 945 matching lines...) Expand 10 before | Expand all | Expand 10 after
2867 __ JumpIfSmi(a1, &done); 2878 __ JumpIfSmi(a1, &done);
2868 2879
2869 // If the object is not a value type, return the value. 2880 // If the object is not a value type, return the value.
2870 __ GetObjectType(a1, a2, a2); 2881 __ GetObjectType(a1, a2, a2);
2871 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE)); 2882 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
2872 2883
2873 // Store the value. 2884 // Store the value.
2874 __ sw(v0, FieldMemOperand(a1, JSValue::kValueOffset)); 2885 __ sw(v0, FieldMemOperand(a1, JSValue::kValueOffset));
2875 // Update the write barrier. Save the value as it will be 2886 // Update the write barrier. Save the value as it will be
2876 // overwritten by the write barrier code and is needed afterward. 2887 // overwritten by the write barrier code and is needed afterward.
2877 __ RecordWrite(a1, Operand(JSValue::kValueOffset - kHeapObjectTag), a2, a3); 2888 __ mov(a2, v0);
2889 __ RecordWriteField(
2890 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
2878 2891
2879 __ bind(&done); 2892 __ bind(&done);
2880 context()->Plug(v0); 2893 context()->Plug(v0);
2881 } 2894 }
2882 2895
2883 2896
2884 void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) { 2897 void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) {
2885 ASSERT_EQ(args->length(), 1); 2898 ASSERT_EQ(args->length(), 1);
2886 2899
2887 // Load the argument on the stack and call the stub. 2900 // Load the argument on the stack and call the stub.
(...skipping 272 matching lines...) Expand 10 before | Expand all | Expand 10 after
3160 __ Addu(index1, scratch1, index1); 3173 __ Addu(index1, scratch1, index1);
3161 __ sll(index2, index2, kPointerSizeLog2 - kSmiTagSize); 3174 __ sll(index2, index2, kPointerSizeLog2 - kSmiTagSize);
3162 __ Addu(index2, scratch1, index2); 3175 __ Addu(index2, scratch1, index2);
3163 3176
3164 // Swap elements. 3177 // Swap elements.
3165 __ lw(scratch1, MemOperand(index1, 0)); 3178 __ lw(scratch1, MemOperand(index1, 0));
3166 __ lw(scratch2, MemOperand(index2, 0)); 3179 __ lw(scratch2, MemOperand(index2, 0));
3167 __ sw(scratch1, MemOperand(index2, 0)); 3180 __ sw(scratch1, MemOperand(index2, 0));
3168 __ sw(scratch2, MemOperand(index1, 0)); 3181 __ sw(scratch2, MemOperand(index1, 0));
3169 3182
3170 Label new_space; 3183 Label no_remembered_set;
3171 __ InNewSpace(elements, scratch1, eq, &new_space); 3184 __ CheckPageFlag(elements,
3185 scratch1,
3186 1 << MemoryChunk::SCAN_ON_SCAVENGE,
3187 ne,
3188 &no_remembered_set);
3172 // Possible optimization: do a check that both values are Smis 3189 // Possible optimization: do a check that both values are Smis
3173 // (or them and test against Smi mask). 3190 // (or them and test against Smi mask).
3174 3191
3175 __ mov(scratch1, elements); 3192 // We are swapping two objects in an array and the incremental marker never
3176 __ RecordWriteHelper(elements, index1, scratch2); 3193 // pauses in the middle of scanning a single object. Therefore the
3177 __ RecordWriteHelper(scratch1, index2, scratch2); // scratch1 holds elements. 3194 // incremental marker is not disturbed, so we don't need to call the
3195 // RecordWrite stub that notifies the incremental marker.
3196 __ RememberedSetHelper(
3197 index1, scratch2, kDontSaveFPRegs, MacroAssembler::kFallThroughAtEnd);
3198 __ RememberedSetHelper(
3199 index2, scratch2, kDontSaveFPRegs, MacroAssembler::kFallThroughAtEnd);
3178 3200
3179 __ bind(&new_space); 3201 __ bind(&no_remembered_set);
3180 // We are done. Drop elements from the stack, and return undefined. 3202 // We are done. Drop elements from the stack, and return undefined.
3181 __ Drop(3); 3203 __ Drop(3);
3182 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); 3204 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3183 __ jmp(&done); 3205 __ jmp(&done);
3184 3206
3185 __ bind(&slow_case); 3207 __ bind(&slow_case);
3186 __ CallRuntime(Runtime::kSwapElements, 3); 3208 __ CallRuntime(Runtime::kSwapElements, 3);
3187 3209
3188 __ bind(&done); 3210 __ bind(&done);
3189 context()->Plug(v0); 3211 context()->Plug(v0);
(...skipping 1059 matching lines...) Expand 10 before | Expand all | Expand 10 after
4249 *context_length = 0; 4271 *context_length = 0;
4250 return previous_; 4272 return previous_;
4251 } 4273 }
4252 4274
4253 4275
4254 #undef __ 4276 #undef __
4255 4277
4256 } } // namespace v8::internal 4278 } } // namespace v8::internal
4257 4279
4258 #endif // V8_TARGET_ARCH_MIPS 4280 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/frames-mips.h ('k') | src/mips/ic-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698