Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(36)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 8574058: Add version of x64 FastCloneShallowArrayStub that copies all boilerplate kinds (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: correct diff Created 9 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after
212 // Return and remove the on-stack parameter. 212 // Return and remove the on-stack parameter.
213 __ movq(rsi, rax); 213 __ movq(rsi, rax);
214 __ ret(2 * kPointerSize); 214 __ ret(2 * kPointerSize);
215 215
216 // Need to collect. Call into runtime system. 216 // Need to collect. Call into runtime system.
217 __ bind(&gc); 217 __ bind(&gc);
218 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); 218 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
219 } 219 }
220 220
221 221
222 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) { 222 static void GenerateFastCloneShallowArrayCommon(
223 // Stack layout on entry: 223 MacroAssembler* masm,
224 int length,
225 FastCloneShallowArrayStub::Mode mode,
226 Label* fail) {
227 // Stack layout and register on entry:
Jakob Kummerow 2011/11/17 14:34:19 I don't think the ...Common() method cares about t
224 // 228 //
225 // [rsp + kPointerSize]: constant elements. 229 // [rsp + kPointerSize]: constant elements.
226 // [rsp + (2 * kPointerSize)]: literal index. 230 // [rsp + (2 * kPointerSize)]: literal index.
227 // [rsp + (3 * kPointerSize)]: literals array. 231 // [rsp + (3 * kPointerSize)]: literals array.
232 // rcx: boilerplate array.
233 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS);
228 234
229 // All sizes here are multiples of kPointerSize. 235 // All sizes here are multiples of kPointerSize.
230 int elements_size = 0; 236 int elements_size = 0;
231 if (length_ > 0) { 237 if (length > 0) {
232 elements_size = mode_ == CLONE_DOUBLE_ELEMENTS 238 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
233 ? FixedDoubleArray::SizeFor(length_) 239 ? FixedDoubleArray::SizeFor(length)
234 : FixedArray::SizeFor(length_); 240 : FixedArray::SizeFor(length);
235 } 241 }
236 int size = JSArray::kSize + elements_size; 242 int size = JSArray::kSize + elements_size;
237 243
238 // Load boilerplate object into rcx and check if we need to create a
239 // boilerplate.
240 Label slow_case;
241 __ movq(rcx, Operand(rsp, 3 * kPointerSize));
242 __ movq(rax, Operand(rsp, 2 * kPointerSize));
243 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
244 __ movq(rcx,
245 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
246 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
247 __ j(equal, &slow_case);
248
249 if (FLAG_debug_code) {
250 const char* message;
251 Heap::RootListIndex expected_map_index;
252 if (mode_ == CLONE_ELEMENTS) {
253 message = "Expected (writable) fixed array";
254 expected_map_index = Heap::kFixedArrayMapRootIndex;
255 } else if (mode_ == CLONE_DOUBLE_ELEMENTS) {
256 message = "Expected (writable) fixed double array";
257 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
258 } else {
259 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
260 message = "Expected copy-on-write fixed array";
261 expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
262 }
263 __ push(rcx);
264 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
265 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
266 expected_map_index);
267 __ Assert(equal, message);
268 __ pop(rcx);
269 }
270
271 // Allocate both the JS array and the elements array in one big 244 // Allocate both the JS array and the elements array in one big
272 // allocation. This avoids multiple limit checks. 245 // allocation. This avoids multiple limit checks.
273 __ AllocateInNewSpace(size, rax, rbx, rdx, &slow_case, TAG_OBJECT); 246 __ AllocateInNewSpace(size, rax, rbx, rdx, fail, TAG_OBJECT);
274 247
275 // Copy the JS array part. 248 // Copy the JS array part.
276 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { 249 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
277 if ((i != JSArray::kElementsOffset) || (length_ == 0)) { 250 if ((i != JSArray::kElementsOffset) || (length == 0)) {
278 __ movq(rbx, FieldOperand(rcx, i)); 251 __ movq(rbx, FieldOperand(rcx, i));
279 __ movq(FieldOperand(rax, i), rbx); 252 __ movq(FieldOperand(rax, i), rbx);
280 } 253 }
281 } 254 }
282 255
283 if (length_ > 0) { 256 if (length > 0) {
284 // Get hold of the elements array of the boilerplate and setup the 257 // Get hold of the elements array of the boilerplate and setup the
285 // elements pointer in the resulting object. 258 // elements pointer in the resulting object.
286 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); 259 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
287 __ lea(rdx, Operand(rax, JSArray::kSize)); 260 __ lea(rdx, Operand(rax, JSArray::kSize));
288 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx); 261 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx);
289 262
290 // Copy the elements array. 263 // Copy the elements array.
291 if (mode_ == CLONE_ELEMENTS) { 264 if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) {
292 for (int i = 0; i < elements_size; i += kPointerSize) { 265 for (int i = 0; i < elements_size; i += kPointerSize) {
293 __ movq(rbx, FieldOperand(rcx, i)); 266 __ movq(rbx, FieldOperand(rcx, i));
294 __ movq(FieldOperand(rdx, i), rbx); 267 __ movq(FieldOperand(rdx, i), rbx);
295 } 268 }
296 } else { 269 } else {
297 ASSERT(mode_ == CLONE_DOUBLE_ELEMENTS); 270 ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS);
298 int i; 271 int i;
299 for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) { 272 for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) {
300 __ movq(rbx, FieldOperand(rcx, i)); 273 __ movq(rbx, FieldOperand(rcx, i));
301 __ movq(FieldOperand(rdx, i), rbx); 274 __ movq(FieldOperand(rdx, i), rbx);
302 } 275 }
303 while (i < elements_size) { 276 while (i < elements_size) {
304 __ movsd(xmm0, FieldOperand(rcx, i)); 277 __ movsd(xmm0, FieldOperand(rcx, i));
305 __ movsd(FieldOperand(rdx, i), xmm0); 278 __ movsd(FieldOperand(rdx, i), xmm0);
306 i += kDoubleSize; 279 i += kDoubleSize;
307 } 280 }
308 ASSERT(i == elements_size); 281 ASSERT(i == elements_size);
309 } 282 }
310 } 283 }
284 }
311 285
312 // Return and remove the on-stack parameters. 286 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
287 // Stack layout on entry:
288 //
289 // [rsp + kPointerSize]: constant elements.
290 // [rsp + (2 * kPointerSize)]: literal index.
291 // [rsp + (3 * kPointerSize)]: literals array.
292
293 // Load boilerplate object into rcx and check if we need to create a
294 // boilerplate.
295 __ movq(rcx, Operand(rsp, 3 * kPointerSize));
296 __ movq(rax, Operand(rsp, 2 * kPointerSize));
297 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
298 __ movq(rcx,
299 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
300 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
301 Label slow_case;
302 __ j(equal, &slow_case);
303
304 FastCloneShallowArrayStub::Mode mode = mode_;
305 // rcx is boilerplate object.
306 Factory* factory = masm->isolate()->factory();
307 if (mode == CLONE_ANY_ELEMENTS) {
308 Label double_elements, check_fast_elements;
309 __ movq(rbx, FieldOperand(rcx, JSArray::kElementsOffset));
310 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
311 factory->fixed_cow_array_map());
312 __ j(not_equal, &check_fast_elements);
313 GenerateFastCloneShallowArrayCommon(masm, 0,
314 COPY_ON_WRITE_ELEMENTS, &slow_case);
315 __ ret(3 * kPointerSize);
316
317 __ bind(&check_fast_elements);
318 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
319 factory->fixed_array_map());
320 __ j(not_equal, &double_elements);
321 GenerateFastCloneShallowArrayCommon(masm, 0,
322 CLONE_ELEMENTS, &slow_case);
323 __ ret(3 * kPointerSize);
324
325 __ bind(&double_elements);
326 mode = CLONE_DOUBLE_ELEMENTS;
327 // Fall through to generate the code to handle double elements.
328 }
329
330 if (FLAG_debug_code) {
331 const char* message;
332 Heap::RootListIndex expected_map_index;
333 if (mode == CLONE_ELEMENTS) {
334 message = "Expected (writable) fixed array";
335 expected_map_index = Heap::kFixedArrayMapRootIndex;
336 } else if (mode == CLONE_DOUBLE_ELEMENTS) {
337 message = "Expected (writable) fixed double array";
338 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
339 } else {
340 ASSERT(mode == COPY_ON_WRITE_ELEMENTS);
341 message = "Expected copy-on-write fixed array";
342 expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
343 }
344 __ push(rcx);
345 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
346 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
347 expected_map_index);
348 __ Assert(equal, message);
349 __ pop(rcx);
350 }
351
352 GenerateFastCloneShallowArrayCommon(masm, 0,
353 CLONE_DOUBLE_ELEMENTS, &slow_case);
Jakob Kummerow 2011/11/17 14:34:19 s/CLONE_DOUBLE_ELEMENTS/mode/ !
313 __ ret(3 * kPointerSize); 354 __ ret(3 * kPointerSize);
314 355
315 __ bind(&slow_case); 356 __ bind(&slow_case);
316 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); 357 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
317 } 358 }
318 359
319 360
320 // The stub expects its argument on the stack and returns its result in tos_: 361 // The stub expects its argument on the stack and returns its result in tos_:
321 // zero for false, and a non-zero value for true. 362 // zero for false, and a non-zero value for true.
322 void ToBooleanStub::Generate(MacroAssembler* masm) { 363 void ToBooleanStub::Generate(MacroAssembler* masm) {
(...skipping 5701 matching lines...) Expand 10 before | Expand all | Expand 10 after
6024 xmm0, 6065 xmm0,
6025 &slow_elements); 6066 &slow_elements);
6026 __ ret(0); 6067 __ ret(0);
6027 } 6068 }
6028 6069
6029 #undef __ 6070 #undef __
6030 6071
6031 } } // namespace v8::internal 6072 } } // namespace v8::internal
6032 6073
6033 #endif // V8_TARGET_ARCH_X64 6074 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698