OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
150 __ AllocateInNewSpace(FixedArray::SizeFor(length), | 150 __ AllocateInNewSpace(FixedArray::SizeFor(length), |
151 v0, | 151 v0, |
152 a1, | 152 a1, |
153 a2, | 153 a2, |
154 &gc, | 154 &gc, |
155 TAG_OBJECT); | 155 TAG_OBJECT); |
156 | 156 |
157 // Load the function from the stack. | 157 // Load the function from the stack. |
158 __ lw(a3, MemOperand(sp, 0)); | 158 __ lw(a3, MemOperand(sp, 0)); |
159 | 159 |
160 // Setup the object header. | 160 // Set up the object header. |
161 __ LoadRoot(a2, Heap::kFunctionContextMapRootIndex); | 161 __ LoadRoot(a2, Heap::kFunctionContextMapRootIndex); |
162 __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); | 162 __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); |
163 __ li(a2, Operand(Smi::FromInt(length))); | 163 __ li(a2, Operand(Smi::FromInt(length))); |
164 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); | 164 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); |
165 | 165 |
166 // Setup the fixed slots. | 166 // Set up the fixed slots. |
167 __ li(a1, Operand(Smi::FromInt(0))); | 167 __ li(a1, Operand(Smi::FromInt(0))); |
168 __ sw(a3, MemOperand(v0, Context::SlotOffset(Context::CLOSURE_INDEX))); | 168 __ sw(a3, MemOperand(v0, Context::SlotOffset(Context::CLOSURE_INDEX))); |
169 __ sw(cp, MemOperand(v0, Context::SlotOffset(Context::PREVIOUS_INDEX))); | 169 __ sw(cp, MemOperand(v0, Context::SlotOffset(Context::PREVIOUS_INDEX))); |
170 __ sw(a1, MemOperand(v0, Context::SlotOffset(Context::EXTENSION_INDEX))); | 170 __ sw(a1, MemOperand(v0, Context::SlotOffset(Context::EXTENSION_INDEX))); |
171 | 171 |
172 // Copy the global object from the previous context. | 172 // Copy the global object from the previous context. |
173 __ lw(a1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); | 173 __ lw(a1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); |
174 __ sw(a1, MemOperand(v0, Context::SlotOffset(Context::GLOBAL_INDEX))); | 174 __ sw(a1, MemOperand(v0, Context::SlotOffset(Context::GLOBAL_INDEX))); |
175 | 175 |
176 // Initialize the rest of the slots to undefined. | 176 // Initialize the rest of the slots to undefined. |
(...skipping 24 matching lines...) Expand all Loading... |
201 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 201 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
202 __ AllocateInNewSpace(FixedArray::SizeFor(length), | 202 __ AllocateInNewSpace(FixedArray::SizeFor(length), |
203 v0, a1, a2, &gc, TAG_OBJECT); | 203 v0, a1, a2, &gc, TAG_OBJECT); |
204 | 204 |
205 // Load the function from the stack. | 205 // Load the function from the stack. |
206 __ lw(a3, MemOperand(sp, 0)); | 206 __ lw(a3, MemOperand(sp, 0)); |
207 | 207 |
208 // Load the serialized scope info from the stack. | 208 // Load the serialized scope info from the stack. |
209 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); | 209 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); |
210 | 210 |
211 // Setup the object header. | 211 // Set up the object header. |
212 __ LoadRoot(a2, Heap::kBlockContextMapRootIndex); | 212 __ LoadRoot(a2, Heap::kBlockContextMapRootIndex); |
213 __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); | 213 __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); |
214 __ li(a2, Operand(Smi::FromInt(length))); | 214 __ li(a2, Operand(Smi::FromInt(length))); |
215 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); | 215 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); |
216 | 216 |
217 // If this block context is nested in the global context we get a smi | 217 // If this block context is nested in the global context we get a smi |
218 // sentinel instead of a function. The block context should get the | 218 // sentinel instead of a function. The block context should get the |
219 // canonical empty function of the global context as its closure which | 219 // canonical empty function of the global context as its closure which |
220 // we still have to look up. | 220 // we still have to look up. |
221 Label after_sentinel; | 221 Label after_sentinel; |
222 __ JumpIfNotSmi(a3, &after_sentinel); | 222 __ JumpIfNotSmi(a3, &after_sentinel); |
223 if (FLAG_debug_code) { | 223 if (FLAG_debug_code) { |
224 const char* message = "Expected 0 as a Smi sentinel"; | 224 const char* message = "Expected 0 as a Smi sentinel"; |
225 __ Assert(eq, message, a3, Operand(zero_reg)); | 225 __ Assert(eq, message, a3, Operand(zero_reg)); |
226 } | 226 } |
227 __ lw(a3, GlobalObjectOperand()); | 227 __ lw(a3, GlobalObjectOperand()); |
228 __ lw(a3, FieldMemOperand(a3, GlobalObject::kGlobalContextOffset)); | 228 __ lw(a3, FieldMemOperand(a3, GlobalObject::kGlobalContextOffset)); |
229 __ lw(a3, ContextOperand(a3, Context::CLOSURE_INDEX)); | 229 __ lw(a3, ContextOperand(a3, Context::CLOSURE_INDEX)); |
230 __ bind(&after_sentinel); | 230 __ bind(&after_sentinel); |
231 | 231 |
232 // Setup the fixed slots. | 232 // Set up the fixed slots. |
233 __ sw(a3, ContextOperand(v0, Context::CLOSURE_INDEX)); | 233 __ sw(a3, ContextOperand(v0, Context::CLOSURE_INDEX)); |
234 __ sw(cp, ContextOperand(v0, Context::PREVIOUS_INDEX)); | 234 __ sw(cp, ContextOperand(v0, Context::PREVIOUS_INDEX)); |
235 __ sw(a1, ContextOperand(v0, Context::EXTENSION_INDEX)); | 235 __ sw(a1, ContextOperand(v0, Context::EXTENSION_INDEX)); |
236 | 236 |
237 // Copy the global object from the previous context. | 237 // Copy the global object from the previous context. |
238 __ lw(a1, ContextOperand(cp, Context::GLOBAL_INDEX)); | 238 __ lw(a1, ContextOperand(cp, Context::GLOBAL_INDEX)); |
239 __ sw(a1, ContextOperand(v0, Context::GLOBAL_INDEX)); | 239 __ sw(a1, ContextOperand(v0, Context::GLOBAL_INDEX)); |
240 | 240 |
241 // Initialize the rest of the slots to the hole value. | 241 // Initialize the rest of the slots to the hole value. |
242 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex); | 242 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex); |
(...skipping 3755 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3998 | 3998 |
3999 // Compute the argv pointer in a callee-saved register. | 3999 // Compute the argv pointer in a callee-saved register. |
4000 __ sll(s1, a0, kPointerSizeLog2); | 4000 __ sll(s1, a0, kPointerSizeLog2); |
4001 __ Addu(s1, sp, s1); | 4001 __ Addu(s1, sp, s1); |
4002 __ Subu(s1, s1, Operand(kPointerSize)); | 4002 __ Subu(s1, s1, Operand(kPointerSize)); |
4003 | 4003 |
4004 // Enter the exit frame that transitions from JavaScript to C++. | 4004 // Enter the exit frame that transitions from JavaScript to C++. |
4005 FrameScope scope(masm, StackFrame::MANUAL); | 4005 FrameScope scope(masm, StackFrame::MANUAL); |
4006 __ EnterExitFrame(save_doubles_); | 4006 __ EnterExitFrame(save_doubles_); |
4007 | 4007 |
4008 // Setup argc and the builtin function in callee-saved registers. | 4008 // Set up argc and the builtin function in callee-saved registers. |
4009 __ mov(s0, a0); | 4009 __ mov(s0, a0); |
4010 __ mov(s2, a1); | 4010 __ mov(s2, a1); |
4011 | 4011 |
4012 // s0: number of arguments (C callee-saved) | 4012 // s0: number of arguments (C callee-saved) |
4013 // s1: pointer to first argument (C callee-saved) | 4013 // s1: pointer to first argument (C callee-saved) |
4014 // s2: pointer to builtin function (C callee-saved) | 4014 // s2: pointer to builtin function (C callee-saved) |
4015 | 4015 |
4016 Label throw_normal_exception; | 4016 Label throw_normal_exception; |
4017 Label throw_termination_exception; | 4017 Label throw_termination_exception; |
4018 Label throw_out_of_memory_exception; | 4018 Label throw_out_of_memory_exception; |
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4090 | 4090 |
4091 // We build an EntryFrame. | 4091 // We build an EntryFrame. |
4092 __ li(t3, Operand(-1)); // Push a bad frame pointer to fail if it is used. | 4092 __ li(t3, Operand(-1)); // Push a bad frame pointer to fail if it is used. |
4093 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; | 4093 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; |
4094 __ li(t2, Operand(Smi::FromInt(marker))); | 4094 __ li(t2, Operand(Smi::FromInt(marker))); |
4095 __ li(t1, Operand(Smi::FromInt(marker))); | 4095 __ li(t1, Operand(Smi::FromInt(marker))); |
4096 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress, | 4096 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress, |
4097 isolate))); | 4097 isolate))); |
4098 __ lw(t0, MemOperand(t0)); | 4098 __ lw(t0, MemOperand(t0)); |
4099 __ Push(t3, t2, t1, t0); | 4099 __ Push(t3, t2, t1, t0); |
4100 // Setup frame pointer for the frame to be pushed. | 4100 // Set up frame pointer for the frame to be pushed. |
4101 __ addiu(fp, sp, -EntryFrameConstants::kCallerFPOffset); | 4101 __ addiu(fp, sp, -EntryFrameConstants::kCallerFPOffset); |
4102 | 4102 |
4103 // Registers: | 4103 // Registers: |
4104 // a0: entry_address | 4104 // a0: entry_address |
4105 // a1: function | 4105 // a1: function |
4106 // a2: reveiver_pointer | 4106 // a2: reveiver_pointer |
4107 // a3: argc | 4107 // a3: argc |
4108 // s0: argv | 4108 // s0: argv |
4109 // | 4109 // |
4110 // Stack: | 4110 // Stack: |
(...skipping 466 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4577 // v0 = address of new object (tagged) | 4577 // v0 = address of new object (tagged) |
4578 // a1 = mapped parameter count (tagged) | 4578 // a1 = mapped parameter count (tagged) |
4579 // a2 = argument count (tagged) | 4579 // a2 = argument count (tagged) |
4580 // t0 = address of boilerplate object (tagged) | 4580 // t0 = address of boilerplate object (tagged) |
4581 // Copy the JS object part. | 4581 // Copy the JS object part. |
4582 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { | 4582 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { |
4583 __ lw(a3, FieldMemOperand(t0, i)); | 4583 __ lw(a3, FieldMemOperand(t0, i)); |
4584 __ sw(a3, FieldMemOperand(v0, i)); | 4584 __ sw(a3, FieldMemOperand(v0, i)); |
4585 } | 4585 } |
4586 | 4586 |
4587 // Setup the callee in-object property. | 4587 // Set up the callee in-object property. |
4588 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); | 4588 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); |
4589 __ lw(a3, MemOperand(sp, 2 * kPointerSize)); | 4589 __ lw(a3, MemOperand(sp, 2 * kPointerSize)); |
4590 const int kCalleeOffset = JSObject::kHeaderSize + | 4590 const int kCalleeOffset = JSObject::kHeaderSize + |
4591 Heap::kArgumentsCalleeIndex * kPointerSize; | 4591 Heap::kArgumentsCalleeIndex * kPointerSize; |
4592 __ sw(a3, FieldMemOperand(v0, kCalleeOffset)); | 4592 __ sw(a3, FieldMemOperand(v0, kCalleeOffset)); |
4593 | 4593 |
4594 // Use the length (smi tagged) and set that as an in-object property too. | 4594 // Use the length (smi tagged) and set that as an in-object property too. |
4595 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); | 4595 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); |
4596 const int kLengthOffset = JSObject::kHeaderSize + | 4596 const int kLengthOffset = JSObject::kHeaderSize + |
4597 Heap::kArgumentsLengthIndex * kPointerSize; | 4597 Heap::kArgumentsLengthIndex * kPointerSize; |
4598 __ sw(a2, FieldMemOperand(v0, kLengthOffset)); | 4598 __ sw(a2, FieldMemOperand(v0, kLengthOffset)); |
4599 | 4599 |
4600 // Setup the elements pointer in the allocated arguments object. | 4600 // Set up the elements pointer in the allocated arguments object. |
4601 // If we allocated a parameter map, t0 will point there, otherwise | 4601 // If we allocated a parameter map, t0 will point there, otherwise |
4602 // it will point to the backing store. | 4602 // it will point to the backing store. |
4603 __ Addu(t0, v0, Operand(Heap::kArgumentsObjectSize)); | 4603 __ Addu(t0, v0, Operand(Heap::kArgumentsObjectSize)); |
4604 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); | 4604 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); |
4605 | 4605 |
4606 // v0 = address of new object (tagged) | 4606 // v0 = address of new object (tagged) |
4607 // a1 = mapped parameter count (tagged) | 4607 // a1 = mapped parameter count (tagged) |
4608 // a2 = argument count (tagged) | 4608 // a2 = argument count (tagged) |
4609 // t0 = address of parameter map or backing store (tagged) | 4609 // t0 = address of parameter map or backing store (tagged) |
4610 // Initialize parameter map. If there are no mapped arguments, we're done. | 4610 // Initialize parameter map. If there are no mapped arguments, we're done. |
(...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4767 __ lw(a1, MemOperand(sp, 0 * kPointerSize)); | 4767 __ lw(a1, MemOperand(sp, 0 * kPointerSize)); |
4768 __ sw(a1, FieldMemOperand(v0, JSObject::kHeaderSize + | 4768 __ sw(a1, FieldMemOperand(v0, JSObject::kHeaderSize + |
4769 Heap::kArgumentsLengthIndex * kPointerSize)); | 4769 Heap::kArgumentsLengthIndex * kPointerSize)); |
4770 | 4770 |
4771 Label done; | 4771 Label done; |
4772 __ Branch(&done, eq, a1, Operand(zero_reg)); | 4772 __ Branch(&done, eq, a1, Operand(zero_reg)); |
4773 | 4773 |
4774 // Get the parameters pointer from the stack. | 4774 // Get the parameters pointer from the stack. |
4775 __ lw(a2, MemOperand(sp, 1 * kPointerSize)); | 4775 __ lw(a2, MemOperand(sp, 1 * kPointerSize)); |
4776 | 4776 |
4777 // Setup the elements pointer in the allocated arguments object and | 4777 // Set up the elements pointer in the allocated arguments object and |
4778 // initialize the header in the elements fixed array. | 4778 // initialize the header in the elements fixed array. |
4779 __ Addu(t0, v0, Operand(Heap::kArgumentsObjectSizeStrict)); | 4779 __ Addu(t0, v0, Operand(Heap::kArgumentsObjectSizeStrict)); |
4780 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); | 4780 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); |
4781 __ LoadRoot(a3, Heap::kFixedArrayMapRootIndex); | 4781 __ LoadRoot(a3, Heap::kFixedArrayMapRootIndex); |
4782 __ sw(a3, FieldMemOperand(t0, FixedArray::kMapOffset)); | 4782 __ sw(a3, FieldMemOperand(t0, FixedArray::kMapOffset)); |
4783 __ sw(a1, FieldMemOperand(t0, FixedArray::kLengthOffset)); | 4783 __ sw(a1, FieldMemOperand(t0, FixedArray::kLengthOffset)); |
4784 // Untag the length for the loop. | 4784 // Untag the length for the loop. |
4785 __ srl(a1, a1, kSmiTagSize); | 4785 __ srl(a1, a1, kSmiTagSize); |
4786 | 4786 |
4787 // Copy the fixed array slots. | 4787 // Copy the fixed array slots. |
4788 Label loop; | 4788 Label loop; |
4789 // Setup t0 to point to the first array slot. | 4789 // Set up t0 to point to the first array slot. |
4790 __ Addu(t0, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 4790 __ Addu(t0, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
4791 __ bind(&loop); | 4791 __ bind(&loop); |
4792 // Pre-decrement a2 with kPointerSize on each iteration. | 4792 // Pre-decrement a2 with kPointerSize on each iteration. |
4793 // Pre-decrement in order to skip receiver. | 4793 // Pre-decrement in order to skip receiver. |
4794 __ Addu(a2, a2, Operand(-kPointerSize)); | 4794 __ Addu(a2, a2, Operand(-kPointerSize)); |
4795 __ lw(a3, MemOperand(a2)); | 4795 __ lw(a3, MemOperand(a2)); |
4796 // Post-increment t0 with kPointerSize on each iteration. | 4796 // Post-increment t0 with kPointerSize on each iteration. |
4797 __ sw(a3, MemOperand(t0)); | 4797 __ sw(a3, MemOperand(t0)); |
4798 __ Addu(t0, t0, Operand(kPointerSize)); | 4798 __ Addu(t0, t0, Operand(kPointerSize)); |
4799 __ Subu(a1, a1, Operand(1)); | 4799 __ Subu(a1, a1, Operand(1)); |
(...skipping 618 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5418 { | 5418 { |
5419 Handle<Code> adaptor = | 5419 Handle<Code> adaptor = |
5420 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); | 5420 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
5421 __ Jump(adaptor, RelocInfo::CODE_TARGET); | 5421 __ Jump(adaptor, RelocInfo::CODE_TARGET); |
5422 } | 5422 } |
5423 | 5423 |
5424 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead | 5424 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
5425 // of the original receiver from the call site). | 5425 // of the original receiver from the call site). |
5426 __ bind(&non_function); | 5426 __ bind(&non_function); |
5427 __ sw(a1, MemOperand(sp, argc_ * kPointerSize)); | 5427 __ sw(a1, MemOperand(sp, argc_ * kPointerSize)); |
5428 __ li(a0, Operand(argc_)); // Setup the number of arguments. | 5428 __ li(a0, Operand(argc_)); // Set up the number of arguments. |
5429 __ mov(a2, zero_reg); | 5429 __ mov(a2, zero_reg); |
5430 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION); | 5430 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION); |
5431 __ SetCallKind(t1, CALL_AS_METHOD); | 5431 __ SetCallKind(t1, CALL_AS_METHOD); |
5432 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 5432 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
5433 RelocInfo::CODE_TARGET); | 5433 RelocInfo::CODE_TARGET); |
5434 } | 5434 } |
5435 | 5435 |
5436 | 5436 |
5437 // Unfortunately you have to run without snapshots to see most of these | 5437 // Unfortunately you have to run without snapshots to see most of these |
5438 // names in the profile since most compare stubs end up in the snapshot. | 5438 // names in the profile since most compare stubs end up in the snapshot. |
(...skipping 2142 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7581 __ Ret(USE_DELAY_SLOT); | 7581 __ Ret(USE_DELAY_SLOT); |
7582 __ mov(v0, a0); | 7582 __ mov(v0, a0); |
7583 } | 7583 } |
7584 | 7584 |
7585 | 7585 |
7586 #undef __ | 7586 #undef __ |
7587 | 7587 |
7588 } } // namespace v8::internal | 7588 } } // namespace v8::internal |
7589 | 7589 |
7590 #endif // V8_TARGET_ARCH_MIPS | 7590 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |