OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
84 __ push(edx); | 84 __ push(edx); |
85 __ push(Immediate(Factory::false_value())); | 85 __ push(Immediate(Factory::false_value())); |
86 __ push(ecx); // Restore return address. | 86 __ push(ecx); // Restore return address. |
87 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); | 87 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); |
88 } | 88 } |
89 | 89 |
90 | 90 |
91 void FastNewContextStub::Generate(MacroAssembler* masm) { | 91 void FastNewContextStub::Generate(MacroAssembler* masm) { |
92 // Try to allocate the context in new space. | 92 // Try to allocate the context in new space. |
93 Label gc; | 93 Label gc; |
94 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 94 __ AllocateInNewSpace((slots_ * kPointerSize) + FixedArray::kHeaderSize, |
95 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize, | |
96 eax, ebx, ecx, &gc, TAG_OBJECT); | 95 eax, ebx, ecx, &gc, TAG_OBJECT); |
97 | 96 |
98 // Get the function from the stack. | 97 // Get the function from the stack. |
99 __ mov(ecx, Operand(esp, 1 * kPointerSize)); | 98 __ mov(ecx, Operand(esp, 1 * kPointerSize)); |
100 | 99 |
101 // Setup the object header. | 100 // Setup the object header. |
102 __ mov(FieldOperand(eax, HeapObject::kMapOffset), Factory::context_map()); | 101 __ mov(FieldOperand(eax, HeapObject::kMapOffset), Factory::context_map()); |
103 __ mov(FieldOperand(eax, Context::kLengthOffset), | 102 __ mov(FieldOperand(eax, Context::kLengthOffset), |
104 Immediate(Smi::FromInt(length))); | 103 Immediate(Smi::FromInt(slots_))); |
105 | 104 |
106 // Setup the fixed slots. | 105 // Setup the fixed slots. |
107 __ Set(ebx, Immediate(0)); // Set to NULL. | 106 __ Set(ebx, Immediate(0)); // Set to NULL. |
108 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx); | 107 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx); |
109 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax); | 108 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax); |
110 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx); | 109 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx); |
111 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx); | 110 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx); |
112 | 111 |
113 // Copy the global object from the surrounding context. We go through the | 112 // Copy the global object from the surrounding context. We go through the |
114 // context in the function (ecx) to match the allocation behavior we have | 113 // context in the function (ecx) to match the allocation behavior we have |
115 // in the runtime system (see Heap::AllocateFunctionContext). | 114 // in the runtime system (see Heap::AllocateFunctionContext). |
116 __ mov(ebx, FieldOperand(ecx, JSFunction::kContextOffset)); | 115 __ mov(ebx, FieldOperand(ecx, JSFunction::kContextOffset)); |
117 __ mov(ebx, Operand(ebx, Context::SlotOffset(Context::GLOBAL_INDEX))); | 116 __ mov(ebx, Operand(ebx, Context::SlotOffset(Context::GLOBAL_INDEX))); |
118 __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx); | 117 __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx); |
119 | 118 |
120 // Initialize the rest of the slots to undefined. | 119 // Initialize the rest of the slots to undefined. |
121 __ mov(ebx, Factory::undefined_value()); | 120 __ mov(ebx, Factory::undefined_value()); |
122 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { | 121 for (int i = Context::MIN_CONTEXT_SLOTS; i < slots_; i++) { |
123 __ mov(Operand(eax, Context::SlotOffset(i)), ebx); | 122 __ mov(Operand(eax, Context::SlotOffset(i)), ebx); |
124 } | 123 } |
125 | 124 |
126 // Return and remove the on-stack parameter. | 125 // Return and remove the on-stack parameter. |
127 __ mov(esi, Operand(eax)); | 126 __ mov(esi, Operand(eax)); |
128 __ ret(1 * kPointerSize); | 127 __ ret(1 * kPointerSize); |
129 | 128 |
130 // Need to collect. Call into runtime system. | 129 // Need to collect. Call into runtime system. |
131 __ bind(&gc); | 130 __ bind(&gc); |
132 __ TailCallRuntime(Runtime::kNewContext, 1, 1); | 131 __ TailCallRuntime(Runtime::kNewContext, 1, 1); |
(...skipping 6374 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6507 // Do a tail call to the rewritten stub. | 6506 // Do a tail call to the rewritten stub. |
6508 __ jmp(Operand(edi)); | 6507 __ jmp(Operand(edi)); |
6509 } | 6508 } |
6510 | 6509 |
6511 | 6510 |
6512 #undef __ | 6511 #undef __ |
6513 | 6512 |
6514 } } // namespace v8::internal | 6513 } } // namespace v8::internal |
6515 | 6514 |
6516 #endif // V8_TARGET_ARCH_IA32 | 6515 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |