OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
152 // Return and remove the on-stack parameter. | 152 // Return and remove the on-stack parameter. |
153 __ mov(esi, Operand(eax)); | 153 __ mov(esi, Operand(eax)); |
154 __ ret(1 * kPointerSize); | 154 __ ret(1 * kPointerSize); |
155 | 155 |
156 // Need to collect. Call into runtime system. | 156 // Need to collect. Call into runtime system. |
157 __ bind(&gc); | 157 __ bind(&gc); |
158 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1); | 158 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1); |
159 } | 159 } |
160 | 160 |
161 | 161 |
| 162 void FastNewBlockContextStub::Generate(MacroAssembler* masm) { |
| 163 // Stack layout on entry: |
| 164 // |
| 165 // [esp + (1 * kPointerSize)]: function |
| 166 // [esp + (2 * kPointerSize)]: serialized scope info |
| 167 |
| 168 // Try to allocate the context in new space. |
| 169 Label gc; |
| 170 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
| 171 __ AllocateInNewSpace(FixedArray::SizeFor(length), |
| 172 eax, ebx, ecx, &gc, TAG_OBJECT); |
| 173 |
| 174 // Get the function or sentinel from the stack. |
| 175 __ mov(ecx, Operand(esp, 1 * kPointerSize)); |
| 176 |
| 177 // Get the serialized scope info from the stack. |
| 178 __ mov(ebx, Operand(esp, 2 * kPointerSize)); |
| 179 |
| 180 // Setup the object header. |
| 181 Factory* factory = masm->isolate()->factory(); |
| 182 __ mov(FieldOperand(eax, HeapObject::kMapOffset), |
| 183 factory->block_context_map()); |
| 184 __ mov(FieldOperand(eax, Context::kLengthOffset), |
| 185 Immediate(Smi::FromInt(length))); |
| 186 |
| 187 // If this block context is nested in the global context we get a smi |
| 188 // sentinel instead of a function. The block context should get the |
| 189 // canonical empty function of the global context as its closure which |
| 190 // we still have to look up. |
| 191 Label after_sentinel; |
| 192 __ JumpIfNotSmi(ecx, &after_sentinel, Label::kNear); |
| 193 __ mov(ecx, GlobalObjectOperand()); |
| 194 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset)); |
| 195 __ mov(ecx, ContextOperand(ecx, Context::CLOSURE_INDEX)); |
| 196 __ bind(&after_sentinel); |
| 197 |
| 198 // Setup the fixed slots. |
| 199 __ mov(ContextOperand(eax, Context::CLOSURE_INDEX), ecx); |
| 200 __ mov(ContextOperand(eax, Context::PREVIOUS_INDEX), esi); |
| 201 __ mov(ContextOperand(eax, Context::EXTENSION_INDEX), ebx); |
| 202 |
| 203 // Copy the global object from the previous context. |
| 204 __ mov(ebx, ContextOperand(esi, Context::GLOBAL_INDEX)); |
| 205 __ mov(ContextOperand(eax, Context::GLOBAL_INDEX), ebx); |
| 206 |
| 207 // Initialize the rest of the slots to the hole value. |
| 208 if (slots_ == 1) { |
| 209 __ mov(ContextOperand(eax, Context::MIN_CONTEXT_SLOTS), |
| 210 factory->the_hole_value()); |
| 211 } else { |
| 212 __ mov(ebx, factory->the_hole_value()); |
| 213 for (int i = 0; i < slots_; i++) { |
| 214 __ mov(ContextOperand(eax, i + Context::MIN_CONTEXT_SLOTS), ebx); |
| 215 } |
| 216 } |
| 217 |
| 218 // Return and remove the on-stack parameters. |
| 219 __ mov(esi, eax); |
| 220 __ ret(2 * kPointerSize); |
| 221 |
| 222 // Need to collect. Call into runtime system. |
| 223 __ bind(&gc); |
| 224 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); |
| 225 } |
| 226 |
| 227 |
162 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { | 228 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { |
163 // Stack layout on entry: | 229 // Stack layout on entry: |
164 // | 230 // |
165 // [esp + kPointerSize]: constant elements. | 231 // [esp + kPointerSize]: constant elements. |
166 // [esp + (2 * kPointerSize)]: literal index. | 232 // [esp + (2 * kPointerSize)]: literal index. |
167 // [esp + (3 * kPointerSize)]: literals array. | 233 // [esp + (3 * kPointerSize)]: literals array. |
168 | 234 |
169 // All sizes here are multiples of kPointerSize. | 235 // All sizes here are multiples of kPointerSize. |
170 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0; | 236 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0; |
171 int size = JSArray::kSize + elements_size; | 237 int size = JSArray::kSize + elements_size; |
(...skipping 6750 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6922 | 6988 |
6923 // Fall through when we need to inform the incremental marker. | 6989 // Fall through when we need to inform the incremental marker. |
6924 } | 6990 } |
6925 | 6991 |
6926 | 6992 |
6927 #undef __ | 6993 #undef __ |
6928 | 6994 |
6929 } } // namespace v8::internal | 6995 } } // namespace v8::internal |
6930 | 6996 |
6931 #endif // V8_TARGET_ARCH_IA32 | 6997 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |