Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(273)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 8066002: Fast allocation of block contexts. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Reintroduced sentinel for global context and addressed comments. Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« src/arm/code-stubs-arm.cc ('K') | « src/ia32/code-stubs-ia32.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after
148 // Return and remove the on-stack parameter. 148 // Return and remove the on-stack parameter.
149 __ movq(rsi, rax); 149 __ movq(rsi, rax);
150 __ ret(1 * kPointerSize); 150 __ ret(1 * kPointerSize);
151 151
152 // Need to collect. Call into runtime system. 152 // Need to collect. Call into runtime system.
153 __ bind(&gc); 153 __ bind(&gc);
154 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1); 154 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
155 } 155 }
156 156
157 157
158 void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
159 // Stack layout on entry:
160 //
161 // [rsp + (1 * kPointerSize)]: function
162 // [rsp + (2 * kPointerSize)]: serialized scope info
163
164 // Try to allocate the context in new space.
165 Label gc;
166 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
167 __ AllocateInNewSpace(FixedArray::SizeFor(length),
168 rax, rbx, rcx, &gc, TAG_OBJECT);
169
170 // Get the function from the stack.
171 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
172
173 // Get the serialized scope info from the stack.
174 __ movq(rbx, Operand(rsp, 2 * kPointerSize));
175
176 // Setup the object header.
177 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex);
178 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
179 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
180
181 // If this block context is nested in the global context we get a smi
182 // sentinel instead of a function. The block context should get the
183 // canonical empty function of the global context as its closure which
184 // we still have to look up.
185 Label after_sentinel;
186 __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear);
187 __ movq(rcx, GlobalObjectOperand());
188 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
189 __ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX));
190 __ bind(&after_sentinel);
191
192 // Setup the fixed slots.
193 __ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx);
194 __ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi);
195 __ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx);
196
197 // Copy the global object from the previous context.
198 __ movq(rbx, ContextOperand(rsi, Context::GLOBAL_INDEX));
199 __ movq(ContextOperand(rax, Context::GLOBAL_INDEX), rbx);
200
201 // Initialize the rest of the slots to the hole value.
202 __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex);
203 for (int i = 0; i < slots_; i++) {
204 __ movq(ContextOperand(rax, i + Context::MIN_CONTEXT_SLOTS), rbx);
205 }
206
207 // Return and remove the on-stack parameter.
208 __ movq(rsi, rax);
209 __ ret(2 * kPointerSize);
210
211 // Need to collect. Call into runtime system.
212 __ bind(&gc);
213 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
214 }
215
216
158 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { 217 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
159 // Stack layout on entry: 218 // Stack layout on entry:
160 // 219 //
161 // [rsp + kPointerSize]: constant elements. 220 // [rsp + kPointerSize]: constant elements.
162 // [rsp + (2 * kPointerSize)]: literal index. 221 // [rsp + (2 * kPointerSize)]: literal index.
163 // [rsp + (3 * kPointerSize)]: literals array. 222 // [rsp + (3 * kPointerSize)]: literals array.
164 223
165 // All sizes here are multiples of kPointerSize. 224 // All sizes here are multiples of kPointerSize.
166 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0; 225 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
167 int size = JSArray::kSize + elements_size; 226 int size = JSArray::kSize + elements_size;
(...skipping 5671 matching lines...) Expand 10 before | Expand all | Expand 10 after
5839 5898
5840 // Fall through when we need to inform the incremental marker. 5899 // Fall through when we need to inform the incremental marker.
5841 } 5900 }
5842 5901
5843 5902
5844 #undef __ 5903 #undef __
5845 5904
5846 } } // namespace v8::internal 5905 } } // namespace v8::internal
5847 5906
5848 #endif // V8_TARGET_ARCH_X64 5907 #endif // V8_TARGET_ARCH_X64
OLDNEW
« src/arm/code-stubs-arm.cc ('K') | « src/ia32/code-stubs-ia32.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698