OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
158 | 158 |
159 // Setup the object header. | 159 // Setup the object header. |
160 __ LoadRoot(r2, Heap::kFunctionContextMapRootIndex); | 160 __ LoadRoot(r2, Heap::kFunctionContextMapRootIndex); |
161 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); | 161 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); |
162 __ mov(r2, Operand(Smi::FromInt(length))); | 162 __ mov(r2, Operand(Smi::FromInt(length))); |
163 __ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset)); | 163 __ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset)); |
164 | 164 |
165 // Setup the fixed slots. | 165 // Setup the fixed slots. |
166 __ mov(r1, Operand(Smi::FromInt(0))); | 166 __ mov(r1, Operand(Smi::FromInt(0))); |
167 __ str(r3, MemOperand(r0, Context::SlotOffset(Context::CLOSURE_INDEX))); | 167 __ str(r3, MemOperand(r0, Context::SlotOffset(Context::CLOSURE_INDEX))); |
168 __ str(r0, MemOperand(r0, Context::SlotOffset(Context::FCONTEXT_INDEX))); | |
169 __ str(cp, MemOperand(r0, Context::SlotOffset(Context::PREVIOUS_INDEX))); | 168 __ str(cp, MemOperand(r0, Context::SlotOffset(Context::PREVIOUS_INDEX))); |
170 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::EXTENSION_INDEX))); | 169 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::EXTENSION_INDEX))); |
171 | 170 |
172 // Copy the global object from the previous context. | 171 // Copy the global object from the previous context. |
173 __ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); | 172 __ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); |
174 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_INDEX))); | 173 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_INDEX))); |
175 | 174 |
176 // Initialize the rest of the slots to undefined. | 175 // Initialize the rest of the slots to undefined. |
177 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); | 176 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); |
178 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { | 177 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { |
(...skipping 6422 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6601 __ mov(result, Operand(0)); | 6600 __ mov(result, Operand(0)); |
6602 __ Ret(); | 6601 __ Ret(); |
6603 } | 6602 } |
6604 | 6603 |
6605 | 6604 |
6606 #undef __ | 6605 #undef __ |
6607 | 6606 |
6608 } } // namespace v8::internal | 6607 } } // namespace v8::internal |
6609 | 6608 |
6610 #endif // V8_TARGET_ARCH_ARM | 6609 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |