| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 151 r0, | 151 r0, |
| 152 r1, | 152 r1, |
| 153 r2, | 153 r2, |
| 154 &gc, | 154 &gc, |
| 155 TAG_OBJECT); | 155 TAG_OBJECT); |
| 156 | 156 |
| 157 // Load the function from the stack. | 157 // Load the function from the stack. |
| 158 __ ldr(r3, MemOperand(sp, 0)); | 158 __ ldr(r3, MemOperand(sp, 0)); |
| 159 | 159 |
| 160 // Setup the object header. | 160 // Setup the object header. |
| 161 __ LoadRoot(r2, Heap::kContextMapRootIndex); | 161 __ LoadRoot(r2, Heap::kFunctionContextMapRootIndex); |
| 162 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); | 162 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 163 __ mov(r2, Operand(Smi::FromInt(length))); | 163 __ mov(r2, Operand(Smi::FromInt(length))); |
| 164 __ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset)); | 164 __ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset)); |
| 165 | 165 |
| 166 // Setup the fixed slots. | 166 // Setup the fixed slots. |
| 167 __ mov(r1, Operand(Smi::FromInt(0))); | 167 __ mov(r1, Operand(Smi::FromInt(0))); |
| 168 __ str(r3, MemOperand(r0, Context::SlotOffset(Context::CLOSURE_INDEX))); | 168 __ str(r3, MemOperand(r0, Context::SlotOffset(Context::CLOSURE_INDEX))); |
| 169 __ str(r0, MemOperand(r0, Context::SlotOffset(Context::FCONTEXT_INDEX))); | 169 __ str(r0, MemOperand(r0, Context::SlotOffset(Context::FCONTEXT_INDEX))); |
| 170 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::PREVIOUS_INDEX))); | 170 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::PREVIOUS_INDEX))); |
| 171 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::EXTENSION_INDEX))); | 171 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::EXTENSION_INDEX))); |
| 172 | 172 |
| 173 // Copy the global object from the surrounding context. | 173 // Copy the global object from the surrounding context. |
| 174 __ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); | 174 __ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 175 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_INDEX))); | 175 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 176 | 176 |
| 177 // Initialize the rest of the slots to undefined. | 177 // Initialize the rest of the slots to undefined. |
| 178 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); | 178 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); |
| 179 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { | 179 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { |
| 180 __ str(r1, MemOperand(r0, Context::SlotOffset(i))); | 180 __ str(r1, MemOperand(r0, Context::SlotOffset(i))); |
| 181 } | 181 } |
| 182 | 182 |
| 183 // Remove the on-stack argument and return. | 183 // Remove the on-stack argument and return. |
| 184 __ mov(cp, r0); | 184 __ mov(cp, r0); |
| 185 __ pop(); | 185 __ pop(); |
| 186 __ Ret(); | 186 __ Ret(); |
| 187 | 187 |
| 188 // Need to collect. Call into runtime system. | 188 // Need to collect. Call into runtime system. |
| 189 __ bind(&gc); | 189 __ bind(&gc); |
| 190 __ TailCallRuntime(Runtime::kNewContext, 1, 1); | 190 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1); |
| 191 } | 191 } |
| 192 | 192 |
| 193 | 193 |
| 194 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { | 194 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { |
| 195 // Stack layout on entry: | 195 // Stack layout on entry: |
| 196 // | 196 // |
| 197 // [sp]: constant elements. | 197 // [sp]: constant elements. |
| 198 // [sp + kPointerSize]: literal index. | 198 // [sp + kPointerSize]: literal index. |
| 199 // [sp + (2 * kPointerSize)]: literals array. | 199 // [sp + (2 * kPointerSize)]: literals array. |
| 200 | 200 |
| (...skipping 6215 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6416 __ mov(result, Operand(0)); | 6416 __ mov(result, Operand(0)); |
| 6417 __ Ret(); | 6417 __ Ret(); |
| 6418 } | 6418 } |
| 6419 | 6419 |
| 6420 | 6420 |
| 6421 #undef __ | 6421 #undef __ |
| 6422 | 6422 |
| 6423 } } // namespace v8::internal | 6423 } } // namespace v8::internal |
| 6424 | 6424 |
| 6425 #endif // V8_TARGET_ARCH_ARM | 6425 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |