OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
152 // Return and remove the on-stack parameter. | 152 // Return and remove the on-stack parameter. |
153 __ mov(esi, Operand(eax)); | 153 __ mov(esi, Operand(eax)); |
154 __ ret(1 * kPointerSize); | 154 __ ret(1 * kPointerSize); |
155 | 155 |
156 // Need to collect. Call into runtime system. | 156 // Need to collect. Call into runtime system. |
157 __ bind(&gc); | 157 __ bind(&gc); |
158 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1); | 158 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1); |
159 } | 159 } |
160 | 160 |
161 | 161 |
162 void FastNewBlockContextStub::Generate(MacroAssembler* masm) { | |
163 // Stack layout on entry: | |
164 // | |
165 // [esp + (1 * kPointerSize)]: function | |
166 // [esp + (2 * kPointerSize)]: serialized scope info | |
167 | |
168 // Try to allocate the context in new space. | |
169 Label gc; | |
170 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | |
171 __ AllocateInNewSpace(FixedArray::SizeFor(length), | |
172 eax, ebx, ecx, &gc, TAG_OBJECT); | |
173 | |
174 // Get the function from the stack. | |
175 __ mov(ecx, Operand(esp, 1 * kPointerSize)); | |
176 | |
177 // Get the serialized scope info from the stack. | |
178 __ mov(ebx, Operand(esp, 2 * kPointerSize)); | |
179 | |
180 // Setup the object header. | |
181 Factory* factory = masm->isolate()->factory(); | |
182 __ mov(FieldOperand(eax, HeapObject::kMapOffset), | |
183 factory->block_context_map()); | |
184 __ mov(FieldOperand(eax, Context::kLengthOffset), | |
185 Immediate(Smi::FromInt(length))); | |
186 | |
187 // Setup the fixed slots. | |
188 __ mov(ContextOperand(eax, Context::CLOSURE_INDEX), ecx); | |
189 __ mov(ContextOperand(eax, Context::PREVIOUS_INDEX), esi); | |
190 __ mov(ContextOperand(eax, Context::EXTENSION_INDEX), ebx); | |
191 | |
192 // Copy the global object from the previous context. | |
193 __ mov(ebx, ContextOperand(esi, Context::GLOBAL_INDEX)); | |
194 __ mov(ContextOperand(eax, Context::GLOBAL_INDEX), ebx); | |
195 | |
196 // Initialize the rest of the slots to the hole value. | |
197 if (slots_ == 1) { | |
198 __ mov(ContextOperand(eax, Context::MIN_CONTEXT_SLOTS), | |
199 factory->the_hole_value()); | |
200 } else { | |
201 __ mov(ebx, factory->the_hole_value()); | |
202 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { | |
danno
2011/10/05 12:30:09
Maybe it's clearer to iterate from 0 to slots_ and
Steven
2011/10/05 15:40:03
Done.
| |
203 __ mov(ContextOperand(eax, i), ebx); | |
204 } | |
205 } | |
206 | |
207 // Return and remove the on-stack parameters. | |
208 __ mov(esi, Operand(eax)); | |
danno
2011/10/05 12:30:09
just use eax, not Operand(eax)
Steven
2011/10/05 15:40:03
Done.
| |
209 __ ret(2 * kPointerSize); | |
210 | |
211 // Need to collect. Call into runtime system. | |
212 __ bind(&gc); | |
213 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); | |
214 } | |
215 | |
216 | |
162 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { | 217 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { |
163 // Stack layout on entry: | 218 // Stack layout on entry: |
164 // | 219 // |
165 // [esp + kPointerSize]: constant elements. | 220 // [esp + kPointerSize]: constant elements. |
166 // [esp + (2 * kPointerSize)]: literal index. | 221 // [esp + (2 * kPointerSize)]: literal index. |
167 // [esp + (3 * kPointerSize)]: literals array. | 222 // [esp + (3 * kPointerSize)]: literals array. |
168 | 223 |
169 // All sizes here are multiples of kPointerSize. | 224 // All sizes here are multiples of kPointerSize. |
170 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0; | 225 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0; |
171 int size = JSArray::kSize + elements_size; | 226 int size = JSArray::kSize + elements_size; |
(...skipping 6750 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6922 | 6977 |
6923 // Fall through when we need to inform the incremental marker. | 6978 // Fall through when we need to inform the incremental marker. |
6924 } | 6979 } |
6925 | 6980 |
6926 | 6981 |
6927 #undef __ | 6982 #undef __ |
6928 | 6983 |
6929 } } // namespace v8::internal | 6984 } } // namespace v8::internal |
6930 | 6985 |
6931 #endif // V8_TARGET_ARCH_IA32 | 6986 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |