| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_MIPS | 7 #if V8_TARGET_ARCH_MIPS |
| 8 | 8 |
| 9 // Note on Mips implementation: | 9 // Note on Mips implementation: |
| 10 // | 10 // |
| (...skipping 2246 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2257 | 2257 |
| 2258 __ bind(&done); | 2258 __ bind(&done); |
| 2259 context()->Plug(result_register()); | 2259 context()->Plug(result_register()); |
| 2260 } | 2260 } |
| 2261 | 2261 |
| 2262 | 2262 |
| 2263 void FullCodeGenerator::EmitCreateIteratorResult(bool done) { | 2263 void FullCodeGenerator::EmitCreateIteratorResult(bool done) { |
| 2264 Label gc_required; | 2264 Label gc_required; |
| 2265 Label allocated; | 2265 Label allocated; |
| 2266 | 2266 |
| 2267 Handle<Map> map(isolate()->native_context()->iterator_result_map()); | 2267 const int instance_size = 5 * kPointerSize; |
| 2268 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(), |
| 2269 instance_size); |
| 2268 | 2270 |
| 2269 __ Allocate(map->instance_size(), v0, a2, a3, &gc_required, TAG_OBJECT); | 2271 __ Allocate(instance_size, v0, a2, a3, &gc_required, TAG_OBJECT); |
| 2270 __ jmp(&allocated); | 2272 __ jmp(&allocated); |
| 2271 | 2273 |
| 2272 __ bind(&gc_required); | 2274 __ bind(&gc_required); |
| 2273 __ Push(Smi::FromInt(map->instance_size())); | 2275 __ Push(Smi::FromInt(instance_size)); |
| 2274 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); | 2276 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); |
| 2275 __ lw(context_register(), | 2277 __ lw(context_register(), |
| 2276 MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2278 MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2277 | 2279 |
| 2278 __ bind(&allocated); | 2280 __ bind(&allocated); |
| 2279 __ li(a1, Operand(map)); | 2281 __ lw(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); |
| 2282 __ lw(a1, FieldMemOperand(a1, GlobalObject::kNativeContextOffset)); |
| 2283 __ lw(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX)); |
| 2280 __ pop(a2); | 2284 __ pop(a2); |
| 2281 __ li(a3, Operand(isolate()->factory()->ToBoolean(done))); | 2285 __ li(a3, Operand(isolate()->factory()->ToBoolean(done))); |
| 2282 __ li(t0, Operand(isolate()->factory()->empty_fixed_array())); | 2286 __ li(t0, Operand(isolate()->factory()->empty_fixed_array())); |
| 2283 DCHECK_EQ(map->instance_size(), 5 * kPointerSize); | |
| 2284 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); | 2287 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); |
| 2285 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset)); | 2288 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset)); |
| 2286 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); | 2289 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); |
| 2287 __ sw(a2, | 2290 __ sw(a2, |
| 2288 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset)); | 2291 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset)); |
| 2289 __ sw(a3, | 2292 __ sw(a3, |
| 2290 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset)); | 2293 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset)); |
| 2291 | 2294 |
| 2292 // Only the value field needs a write barrier, as the other values are in the | 2295 // Only the value field needs a write barrier, as the other values are in the |
| 2293 // root set. | 2296 // root set. |
| (...skipping 468 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2762 CallIC(ic); | 2765 CallIC(ic); |
| 2763 | 2766 |
| 2764 RecordJSReturnSite(expr); | 2767 RecordJSReturnSite(expr); |
| 2765 // Restore context register. | 2768 // Restore context register. |
| 2766 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2769 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2767 context()->DropAndPlug(1, v0); | 2770 context()->DropAndPlug(1, v0); |
| 2768 } | 2771 } |
| 2769 | 2772 |
| 2770 | 2773 |
| 2771 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { | 2774 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { |
| 2772 // t2: copy of the first argument or undefined if it doesn't exist. | 2775 // t3: copy of the first argument or undefined if it doesn't exist. |
| 2773 if (arg_count > 0) { | 2776 if (arg_count > 0) { |
| 2774 __ lw(t2, MemOperand(sp, arg_count * kPointerSize)); | 2777 __ lw(t3, MemOperand(sp, arg_count * kPointerSize)); |
| 2775 } else { | 2778 } else { |
| 2776 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex); | 2779 __ LoadRoot(t3, Heap::kUndefinedValueRootIndex); |
| 2777 } | 2780 } |
| 2778 | 2781 |
| 2782 // t2: the receiver of the enclosing function. |
| 2783 __ lw(t2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 2784 |
| 2779 // t1: the receiver of the enclosing function. | 2785 // t1: the receiver of the enclosing function. |
| 2780 int receiver_offset = 2 + info_->scope()->num_parameters(); | 2786 int receiver_offset = 2 + info_->scope()->num_parameters(); |
| 2781 __ lw(t1, MemOperand(fp, receiver_offset * kPointerSize)); | 2787 __ lw(t1, MemOperand(fp, receiver_offset * kPointerSize)); |
| 2782 | 2788 |
| 2783 // t0: the strict mode. | 2789 // t0: the strict mode. |
| 2784 __ li(t0, Operand(Smi::FromInt(strict_mode()))); | 2790 __ li(t0, Operand(Smi::FromInt(strict_mode()))); |
| 2785 | 2791 |
| 2786 // a1: the start position of the scope the calls resides in. | 2792 // a1: the start position of the scope the calls resides in. |
| 2787 __ li(a1, Operand(Smi::FromInt(scope()->start_position()))); | 2793 __ li(a1, Operand(Smi::FromInt(scope()->start_position()))); |
| 2788 | 2794 |
| 2789 // Do the runtime call. | 2795 // Do the runtime call. |
| 2796 __ Push(t3); |
| 2790 __ Push(t2, t1, t0, a1); | 2797 __ Push(t2, t1, t0, a1); |
| 2791 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); | 2798 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6); |
| 2792 } | 2799 } |
| 2793 | 2800 |
| 2794 | 2801 |
| 2795 void FullCodeGenerator::VisitCall(Call* expr) { | 2802 void FullCodeGenerator::VisitCall(Call* expr) { |
| 2796 #ifdef DEBUG | 2803 #ifdef DEBUG |
| 2797 // We want to verify that RecordJSReturnSite gets called on all paths | 2804 // We want to verify that RecordJSReturnSite gets called on all paths |
| 2798 // through this function. Avoid early returns. | 2805 // through this function. Avoid early returns. |
| 2799 expr->return_is_recorded_ = false; | 2806 expr->return_is_recorded_ = false; |
| 2800 #endif | 2807 #endif |
| 2801 | 2808 |
| (...skipping 2154 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4956 Assembler::target_address_at(pc_immediate_load_address)) == | 4963 Assembler::target_address_at(pc_immediate_load_address)) == |
| 4957 reinterpret_cast<uint32_t>( | 4964 reinterpret_cast<uint32_t>( |
| 4958 isolate->builtins()->OsrAfterStackCheck()->entry())); | 4965 isolate->builtins()->OsrAfterStackCheck()->entry())); |
| 4959 return OSR_AFTER_STACK_CHECK; | 4966 return OSR_AFTER_STACK_CHECK; |
| 4960 } | 4967 } |
| 4961 | 4968 |
| 4962 | 4969 |
| 4963 } } // namespace v8::internal | 4970 } } // namespace v8::internal |
| 4964 | 4971 |
| 4965 #endif // V8_TARGET_ARCH_MIPS | 4972 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |