OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 187 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
198 for (int i = 0; i < num_parameters; i++) { | 198 for (int i = 0; i < num_parameters; i++) { |
199 Variable* var = scope()->parameter(i); | 199 Variable* var = scope()->parameter(i); |
200 if (var->IsContextSlot()) { | 200 if (var->IsContextSlot()) { |
201 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | 201 int parameter_offset = StandardFrameConstants::kCallerSPOffset + |
202 (num_parameters - 1 - i) * kPointerSize; | 202 (num_parameters - 1 - i) * kPointerSize; |
203 // Load parameter from stack. | 203 // Load parameter from stack. |
204 __ mov(eax, Operand(ebp, parameter_offset)); | 204 __ mov(eax, Operand(ebp, parameter_offset)); |
205 // Store it in the context. | 205 // Store it in the context. |
206 int context_offset = Context::SlotOffset(var->index()); | 206 int context_offset = Context::SlotOffset(var->index()); |
207 __ mov(Operand(esi, context_offset), eax); | 207 __ mov(Operand(esi, context_offset), eax); |
208 // Update the write barrier. This clobbers all involved | 208 // Update the write barrier. This clobbers eax and ebx. |
209 // registers, so we have use a third register to avoid | 209 __ RecordWriteContextSlot(esi, |
210 // clobbering esi. | 210 context_offset, |
211 __ mov(ecx, esi); | 211 eax, |
212 __ RecordWrite(ecx, context_offset, eax, ebx); | 212 ebx, |
| 213 kDontSaveFPRegs); |
213 } | 214 } |
214 } | 215 } |
215 } | 216 } |
216 | 217 |
217 Variable* arguments = scope()->arguments(); | 218 Variable* arguments = scope()->arguments(); |
218 if (arguments != NULL) { | 219 if (arguments != NULL) { |
219 // Function uses arguments object. | 220 // Function uses arguments object. |
220 Comment cmnt(masm_, "[ Allocate arguments object"); | 221 Comment cmnt(masm_, "[ Allocate arguments object"); |
221 if (function_in_register) { | 222 if (function_in_register) { |
222 __ push(edi); | 223 __ push(edi); |
(...skipping 436 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
659 void FullCodeGenerator::SetVar(Variable* var, | 660 void FullCodeGenerator::SetVar(Variable* var, |
660 Register src, | 661 Register src, |
661 Register scratch0, | 662 Register scratch0, |
662 Register scratch1) { | 663 Register scratch1) { |
663 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); | 664 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); |
664 ASSERT(!scratch0.is(src)); | 665 ASSERT(!scratch0.is(src)); |
665 ASSERT(!scratch0.is(scratch1)); | 666 ASSERT(!scratch0.is(scratch1)); |
666 ASSERT(!scratch1.is(src)); | 667 ASSERT(!scratch1.is(src)); |
667 MemOperand location = VarOperand(var, scratch0); | 668 MemOperand location = VarOperand(var, scratch0); |
668 __ mov(location, src); | 669 __ mov(location, src); |
| 670 |
669 // Emit the write barrier code if the location is in the heap. | 671 // Emit the write barrier code if the location is in the heap. |
670 if (var->IsContextSlot()) { | 672 if (var->IsContextSlot()) { |
671 int offset = Context::SlotOffset(var->index()); | 673 int offset = Context::SlotOffset(var->index()); |
672 ASSERT(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi)); | 674 ASSERT(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi)); |
673 __ RecordWrite(scratch0, offset, src, scratch1); | 675 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs); |
674 } | 676 } |
675 } | 677 } |
676 | 678 |
677 | 679 |
678 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, | 680 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, |
679 bool should_normalize, | 681 bool should_normalize, |
680 Label* if_true, | 682 Label* if_true, |
681 Label* if_false) { | 683 Label* if_false) { |
682 // Only prepare for bailouts before splits if we're in a test | 684 // Only prepare for bailouts before splits if we're in a test |
683 // context. Otherwise, we let the Visit function deal with the | 685 // context. Otherwise, we let the Visit function deal with the |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
736 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset)); | 738 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset)); |
737 __ cmp(ebx, isolate()->factory()->with_context_map()); | 739 __ cmp(ebx, isolate()->factory()->with_context_map()); |
738 __ Check(not_equal, "Declaration in with context."); | 740 __ Check(not_equal, "Declaration in with context."); |
739 __ cmp(ebx, isolate()->factory()->catch_context_map()); | 741 __ cmp(ebx, isolate()->factory()->catch_context_map()); |
740 __ Check(not_equal, "Declaration in catch context."); | 742 __ Check(not_equal, "Declaration in catch context."); |
741 } | 743 } |
742 if (function != NULL) { | 744 if (function != NULL) { |
743 Comment cmnt(masm_, "[ Declaration"); | 745 Comment cmnt(masm_, "[ Declaration"); |
744 VisitForAccumulatorValue(function); | 746 VisitForAccumulatorValue(function); |
745 __ mov(ContextOperand(esi, variable->index()), result_register()); | 747 __ mov(ContextOperand(esi, variable->index()), result_register()); |
746 int offset = Context::SlotOffset(variable->index()); | 748 // We know that we have written a function, which is not a smi. |
747 __ mov(ebx, esi); | 749 __ RecordWriteContextSlot(esi, |
748 __ RecordWrite(ebx, offset, result_register(), ecx); | 750 Context::SlotOffset(variable->index()), |
| 751 result_register(), |
| 752 ecx, |
| 753 kDontSaveFPRegs, |
| 754 EMIT_REMEMBERED_SET, |
| 755 OMIT_SMI_CHECK); |
749 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | 756 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); |
750 } else if (mode == Variable::CONST || mode == Variable::LET) { | 757 } else if (mode == Variable::CONST || mode == Variable::LET) { |
751 Comment cmnt(masm_, "[ Declaration"); | 758 Comment cmnt(masm_, "[ Declaration"); |
752 __ mov(ContextOperand(esi, variable->index()), | 759 __ mov(ContextOperand(esi, variable->index()), |
753 Immediate(isolate()->factory()->the_hole_value())); | 760 Immediate(isolate()->factory()->the_hole_value())); |
754 // No write barrier since the hole value is in old space. | 761 // No write barrier since the hole value is in old space. |
755 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | 762 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); |
756 } | 763 } |
757 break; | 764 break; |
758 | 765 |
(...skipping 720 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1479 } | 1486 } |
1480 VisitForAccumulatorValue(subexpr); | 1487 VisitForAccumulatorValue(subexpr); |
1481 | 1488 |
1482 // Store the subexpression value in the array's elements. | 1489 // Store the subexpression value in the array's elements. |
1483 __ mov(ebx, Operand(esp, 0)); // Copy of array literal. | 1490 __ mov(ebx, Operand(esp, 0)); // Copy of array literal. |
1484 __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset)); | 1491 __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset)); |
1485 int offset = FixedArray::kHeaderSize + (i * kPointerSize); | 1492 int offset = FixedArray::kHeaderSize + (i * kPointerSize); |
1486 __ mov(FieldOperand(ebx, offset), result_register()); | 1493 __ mov(FieldOperand(ebx, offset), result_register()); |
1487 | 1494 |
1488 // Update the write barrier for the array store. | 1495 // Update the write barrier for the array store. |
1489 __ RecordWrite(ebx, offset, result_register(), ecx); | 1496 __ RecordWriteField(ebx, offset, result_register(), ecx, kDontSaveFPRegs); |
1490 | 1497 |
1491 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); | 1498 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); |
1492 } | 1499 } |
1493 | 1500 |
1494 if (result_saved) { | 1501 if (result_saved) { |
1495 context()->PlugTOS(); | 1502 context()->PlugTOS(); |
1496 } else { | 1503 } else { |
1497 context()->Plug(eax); | 1504 context()->Plug(eax); |
1498 } | 1505 } |
1499 } | 1506 } |
(...skipping 357 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1857 MemOperand location = VarOperand(var, ecx); | 1864 MemOperand location = VarOperand(var, ecx); |
1858 __ mov(edx, location); | 1865 __ mov(edx, location); |
1859 __ cmp(edx, isolate()->factory()->the_hole_value()); | 1866 __ cmp(edx, isolate()->factory()->the_hole_value()); |
1860 __ j(not_equal, &assign, Label::kNear); | 1867 __ j(not_equal, &assign, Label::kNear); |
1861 __ push(Immediate(var->name())); | 1868 __ push(Immediate(var->name())); |
1862 __ CallRuntime(Runtime::kThrowReferenceError, 1); | 1869 __ CallRuntime(Runtime::kThrowReferenceError, 1); |
1863 __ bind(&assign); | 1870 __ bind(&assign); |
1864 __ mov(location, eax); | 1871 __ mov(location, eax); |
1865 if (var->IsContextSlot()) { | 1872 if (var->IsContextSlot()) { |
1866 __ mov(edx, eax); | 1873 __ mov(edx, eax); |
1867 __ RecordWrite(ecx, Context::SlotOffset(var->index()), edx, ebx); | 1874 int offset = Context::SlotOffset(var->index()); |
| 1875 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs); |
1868 } | 1876 } |
1869 } | 1877 } |
1870 | 1878 |
1871 } else if (var->mode() != Variable::CONST) { | 1879 } else if (var->mode() != Variable::CONST) { |
1872 // Assignment to var or initializing assignment to let. | 1880 // Assignment to var or initializing assignment to let. |
1873 if (var->IsStackAllocated() || var->IsContextSlot()) { | 1881 if (var->IsStackAllocated() || var->IsContextSlot()) { |
1874 MemOperand location = VarOperand(var, ecx); | 1882 MemOperand location = VarOperand(var, ecx); |
1875 if (FLAG_debug_code && op == Token::INIT_LET) { | 1883 if (FLAG_debug_code && op == Token::INIT_LET) { |
1876 // Check for an uninitialized let binding. | 1884 // Check for an uninitialized let binding. |
1877 __ mov(edx, location); | 1885 __ mov(edx, location); |
1878 __ cmp(edx, isolate()->factory()->the_hole_value()); | 1886 __ cmp(edx, isolate()->factory()->the_hole_value()); |
1879 __ Check(equal, "Let binding re-initialization."); | 1887 __ Check(equal, "Let binding re-initialization."); |
1880 } | 1888 } |
1881 // Perform the assignment. | 1889 // Perform the assignment. |
1882 __ mov(location, eax); | 1890 __ mov(location, eax); |
1883 if (var->IsContextSlot()) { | 1891 if (var->IsContextSlot()) { |
1884 __ mov(edx, eax); | 1892 __ mov(edx, eax); |
1885 __ RecordWrite(ecx, Context::SlotOffset(var->index()), edx, ebx); | 1893 int offset = Context::SlotOffset(var->index()); |
| 1894 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs); |
1886 } | 1895 } |
1887 } else { | 1896 } else { |
1888 ASSERT(var->IsLookupSlot()); | 1897 ASSERT(var->IsLookupSlot()); |
1889 __ push(eax); // Value. | 1898 __ push(eax); // Value. |
1890 __ push(esi); // Context. | 1899 __ push(esi); // Context. |
1891 __ push(Immediate(var->name())); | 1900 __ push(Immediate(var->name())); |
1892 __ push(Immediate(Smi::FromInt(strict_mode_flag()))); | 1901 __ push(Immediate(Smi::FromInt(strict_mode_flag()))); |
1893 __ CallRuntime(Runtime::kStoreContextSlot, 4); | 1902 __ CallRuntime(Runtime::kStoreContextSlot, 4); |
1894 } | 1903 } |
1895 } | 1904 } |
(...skipping 945 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2841 Label done; | 2850 Label done; |
2842 // If the object is a smi, return the value. | 2851 // If the object is a smi, return the value. |
2843 __ JumpIfSmi(ebx, &done, Label::kNear); | 2852 __ JumpIfSmi(ebx, &done, Label::kNear); |
2844 | 2853 |
2845 // If the object is not a value type, return the value. | 2854 // If the object is not a value type, return the value. |
2846 __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx); | 2855 __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx); |
2847 __ j(not_equal, &done, Label::kNear); | 2856 __ j(not_equal, &done, Label::kNear); |
2848 | 2857 |
2849 // Store the value. | 2858 // Store the value. |
2850 __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax); | 2859 __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax); |
| 2860 |
2851 // Update the write barrier. Save the value as it will be | 2861 // Update the write barrier. Save the value as it will be |
2852 // overwritten by the write barrier code and is needed afterward. | 2862 // overwritten by the write barrier code and is needed afterward. |
2853 __ mov(edx, eax); | 2863 __ mov(edx, eax); |
2854 __ RecordWrite(ebx, JSValue::kValueOffset, edx, ecx); | 2864 __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs); |
2855 | 2865 |
2856 __ bind(&done); | 2866 __ bind(&done); |
2857 context()->Plug(eax); | 2867 context()->Plug(eax); |
2858 } | 2868 } |
2859 | 2869 |
2860 | 2870 |
2861 void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) { | 2871 void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) { |
2862 ASSERT_EQ(args->length(), 1); | 2872 ASSERT_EQ(args->length(), 1); |
2863 | 2873 |
2864 // Load the argument on the stack and call the stub. | 2874 // Load the argument on the stack and call the stub. |
(...skipping 272 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3137 // Bring addresses into index1 and index2. | 3147 // Bring addresses into index1 and index2. |
3138 __ lea(index_1, CodeGenerator::FixedArrayElementOperand(elements, index_1)); | 3148 __ lea(index_1, CodeGenerator::FixedArrayElementOperand(elements, index_1)); |
3139 __ lea(index_2, CodeGenerator::FixedArrayElementOperand(elements, index_2)); | 3149 __ lea(index_2, CodeGenerator::FixedArrayElementOperand(elements, index_2)); |
3140 | 3150 |
3141 // Swap elements. Use object and temp as scratch registers. | 3151 // Swap elements. Use object and temp as scratch registers. |
3142 __ mov(object, Operand(index_1, 0)); | 3152 __ mov(object, Operand(index_1, 0)); |
3143 __ mov(temp, Operand(index_2, 0)); | 3153 __ mov(temp, Operand(index_2, 0)); |
3144 __ mov(Operand(index_2, 0), object); | 3154 __ mov(Operand(index_2, 0), object); |
3145 __ mov(Operand(index_1, 0), temp); | 3155 __ mov(Operand(index_1, 0), temp); |
3146 | 3156 |
3147 Label new_space; | 3157 Label no_remembered_set; |
3148 __ InNewSpace(elements, temp, equal, &new_space); | 3158 __ CheckPageFlag(elements, |
| 3159 temp, |
| 3160 1 << MemoryChunk::SCAN_ON_SCAVENGE, |
| 3161 not_zero, |
| 3162 &no_remembered_set, |
| 3163 Label::kNear); |
| 3164 // Possible optimization: do a check that both values are Smis |
| 3165 // (or them and test against Smi mask.) |
3149 | 3166 |
3150 __ mov(object, elements); | 3167 // We are swapping two objects in an array and the incremental marker never |
3151 __ RecordWriteHelper(object, index_1, temp); | 3168 // pauses in the middle of scanning a single object. Therefore the |
3152 __ RecordWriteHelper(elements, index_2, temp); | 3169 // incremental marker is not disturbed, so we don't need to call the |
| 3170 // RecordWrite stub that notifies the incremental marker. |
| 3171 __ RememberedSetHelper( |
| 3172 index_1, temp, kDontSaveFPRegs, MacroAssembler::kFallThroughAtEnd); |
| 3173 __ RememberedSetHelper( |
| 3174 index_2, temp, kDontSaveFPRegs, MacroAssembler::kFallThroughAtEnd); |
3153 | 3175 |
3154 __ bind(&new_space); | 3176 __ bind(&no_remembered_set); |
| 3177 |
3155 // We are done. Drop elements from the stack, and return undefined. | 3178 // We are done. Drop elements from the stack, and return undefined. |
3156 __ add(Operand(esp), Immediate(3 * kPointerSize)); | 3179 __ add(Operand(esp), Immediate(3 * kPointerSize)); |
3157 __ mov(eax, isolate()->factory()->undefined_value()); | 3180 __ mov(eax, isolate()->factory()->undefined_value()); |
3158 __ jmp(&done); | 3181 __ jmp(&done); |
3159 | 3182 |
3160 __ bind(&slow_case); | 3183 __ bind(&slow_case); |
3161 __ CallRuntime(Runtime::kSwapElements, 3); | 3184 __ CallRuntime(Runtime::kSwapElements, 3); |
3162 | 3185 |
3163 __ bind(&done); | 3186 __ bind(&done); |
3164 decrement_stack_height(3); | 3187 decrement_stack_height(3); |
(...skipping 1112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4277 *context_length = 0; | 4300 *context_length = 0; |
4278 return previous_; | 4301 return previous_; |
4279 } | 4302 } |
4280 | 4303 |
4281 | 4304 |
4282 #undef __ | 4305 #undef __ |
4283 | 4306 |
4284 } } // namespace v8::internal | 4307 } } // namespace v8::internal |
4285 | 4308 |
4286 #endif // V8_TARGET_ARCH_IA32 | 4309 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |