Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(113)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 11663005: Adapt Danno's Track Allocation Info idea to fast literals. When allocating a literal array, (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Ported to other platforms Created 7 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 298 matching lines...) Expand 10 before | Expand all | Expand 10 after
309 // Need to collect. Call into runtime system. 309 // Need to collect. Call into runtime system.
310 __ bind(&gc); 310 __ bind(&gc);
311 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); 311 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
312 } 312 }
313 313
314 314
315 static void GenerateFastCloneShallowArrayCommon( 315 static void GenerateFastCloneShallowArrayCommon(
316 MacroAssembler* masm, 316 MacroAssembler* masm,
317 int length, 317 int length,
318 FastCloneShallowArrayStub::Mode mode, 318 FastCloneShallowArrayStub::Mode mode,
319 FastCloneShallowArrayStub::AllocationInfoMode allocation_info_mode,
319 Label* fail) { 320 Label* fail) {
320 // Registers on entry: 321 // Registers on entry:
321 // 322 //
322 // rcx: boilerplate literal array. 323 // rcx: boilerplate literal array.
323 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); 324 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS);
324 325
325 // All sizes here are multiples of kPointerSize. 326 // All sizes here are multiples of kPointerSize.
326 int elements_size = 0; 327 int elements_size = 0;
327 if (length > 0) { 328 if (length > 0) {
328 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS 329 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
329 ? FixedDoubleArray::SizeFor(length) 330 ? FixedDoubleArray::SizeFor(length)
330 : FixedArray::SizeFor(length); 331 : FixedArray::SizeFor(length);
331 } 332 }
332 int size = JSArray::kSize + elements_size; 333 int size = JSArray::kSize;
334 int allocation_info_start = size;
335 if (allocation_info_mode ==
336 FastCloneShallowArrayStub::TRACK_ALLOCATION_INFO) {
337 size += AllocationSiteInfo::kSize;
338 }
339 size += elements_size;
333 340
334 // Allocate both the JS array and the elements array in one big 341 // Allocate both the JS array and the elements array in one big
335 // allocation. This avoids multiple limit checks. 342 // allocation. This avoids multiple limit checks.
336 AllocationFlags flags = TAG_OBJECT; 343 AllocationFlags flags = TAG_OBJECT;
337 if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) { 344 if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) {
338 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags); 345 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags);
339 } 346 }
340 __ AllocateInNewSpace(size, rax, rbx, rdx, fail, flags); 347 __ AllocateInNewSpace(size, rax, rbx, rdx, fail, flags);
341 348
349 if (allocation_info_mode ==
350 FastCloneShallowArrayStub::TRACK_ALLOCATION_INFO) {
351 __ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex);
352 __ movq(FieldOperand(rax, allocation_info_start), kScratchRegister);
353 __ movq(FieldOperand(rax, allocation_info_start + kPointerSize), rcx);
354 }
355
342 // Copy the JS array part. 356 // Copy the JS array part.
343 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { 357 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
344 if ((i != JSArray::kElementsOffset) || (length == 0)) { 358 if ((i != JSArray::kElementsOffset) || (length == 0)) {
345 __ movq(rbx, FieldOperand(rcx, i)); 359 __ movq(rbx, FieldOperand(rcx, i));
346 __ movq(FieldOperand(rax, i), rbx); 360 __ movq(FieldOperand(rax, i), rbx);
347 } 361 }
348 } 362 }
349 363
350 if (length > 0) { 364 if (length > 0) {
351 // Get hold of the elements array of the boilerplate and setup the 365 // Get hold of the elements array of the boilerplate and setup the
352 // elements pointer in the resulting object. 366 // elements pointer in the resulting object.
353 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); 367 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
354 __ lea(rdx, Operand(rax, JSArray::kSize)); 368 if (allocation_info_mode ==
369 FastCloneShallowArrayStub::TRACK_ALLOCATION_INFO) {
370 __ lea(rdx, Operand(rax, JSArray::kSize + AllocationSiteInfo::kSize));
371 } else {
372 __ lea(rdx, Operand(rax, JSArray::kSize));
373 }
355 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx); 374 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx);
356 375
357 // Copy the elements array. 376 // Copy the elements array.
358 if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) { 377 if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) {
359 for (int i = 0; i < elements_size; i += kPointerSize) { 378 for (int i = 0; i < elements_size; i += kPointerSize) {
360 __ movq(rbx, FieldOperand(rcx, i)); 379 __ movq(rbx, FieldOperand(rcx, i));
361 __ movq(FieldOperand(rdx, i), rbx); 380 __ movq(FieldOperand(rdx, i), rbx);
362 } 381 }
363 } else { 382 } else {
364 ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS); 383 ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS);
(...skipping 26 matching lines...) Expand all
391 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2); 410 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
392 __ movq(rcx, 411 __ movq(rcx,
393 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize)); 412 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
394 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex); 413 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
395 Label slow_case; 414 Label slow_case;
396 __ j(equal, &slow_case); 415 __ j(equal, &slow_case);
397 416
398 FastCloneShallowArrayStub::Mode mode = mode_; 417 FastCloneShallowArrayStub::Mode mode = mode_;
399 // rcx is boilerplate object. 418 // rcx is boilerplate object.
400 Factory* factory = masm->isolate()->factory(); 419 Factory* factory = masm->isolate()->factory();
420 FastCloneShallowArrayStub::AllocationInfoMode allocation_info_mode = DONT_TRAC K_ALLOCATION_INFO;
danno 2013/01/04 08:50:55 80 col
mvstanton 2013/01/04 12:07:52 Done.
421 if (mode == CLONE_ANY_ELEMENTS_WITH_ALLOCATION_INFO) {
422 mode = CLONE_ANY_ELEMENTS;
423 allocation_info_mode = TRACK_ALLOCATION_INFO;
424 }
425
401 if (mode == CLONE_ANY_ELEMENTS) { 426 if (mode == CLONE_ANY_ELEMENTS) {
402 Label double_elements, check_fast_elements; 427 Label double_elements, check_fast_elements;
403 __ movq(rbx, FieldOperand(rcx, JSArray::kElementsOffset)); 428 __ movq(rbx, FieldOperand(rcx, JSArray::kElementsOffset));
404 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), 429 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
405 factory->fixed_cow_array_map()); 430 factory->fixed_cow_array_map());
406 __ j(not_equal, &check_fast_elements); 431 __ j(not_equal, &check_fast_elements);
407 GenerateFastCloneShallowArrayCommon(masm, 0, 432 GenerateFastCloneShallowArrayCommon(masm, 0,
408 COPY_ON_WRITE_ELEMENTS, &slow_case); 433 COPY_ON_WRITE_ELEMENTS,
434 allocation_info_mode,
435 &slow_case);
409 __ ret(3 * kPointerSize); 436 __ ret(3 * kPointerSize);
410 437
411 __ bind(&check_fast_elements); 438 __ bind(&check_fast_elements);
412 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), 439 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
413 factory->fixed_array_map()); 440 factory->fixed_array_map());
414 __ j(not_equal, &double_elements); 441 __ j(not_equal, &double_elements);
415 GenerateFastCloneShallowArrayCommon(masm, length_, 442 GenerateFastCloneShallowArrayCommon(masm, length_,
416 CLONE_ELEMENTS, &slow_case); 443 CLONE_ELEMENTS,
444 allocation_info_mode,
445 &slow_case);
417 __ ret(3 * kPointerSize); 446 __ ret(3 * kPointerSize);
418 447
419 __ bind(&double_elements); 448 __ bind(&double_elements);
420 mode = CLONE_DOUBLE_ELEMENTS; 449 mode = CLONE_DOUBLE_ELEMENTS;
421 // Fall through to generate the code to handle double elements. 450 // Fall through to generate the code to handle double elements.
422 } 451 }
423 452
424 if (FLAG_debug_code) { 453 if (FLAG_debug_code) {
425 const char* message; 454 const char* message;
426 Heap::RootListIndex expected_map_index; 455 Heap::RootListIndex expected_map_index;
427 if (mode == CLONE_ELEMENTS) { 456 if (mode == CLONE_ELEMENTS) {
428 message = "Expected (writable) fixed array"; 457 message = "Expected (writable) fixed array";
429 expected_map_index = Heap::kFixedArrayMapRootIndex; 458 expected_map_index = Heap::kFixedArrayMapRootIndex;
430 } else if (mode == CLONE_DOUBLE_ELEMENTS) { 459 } else if (mode == CLONE_DOUBLE_ELEMENTS) {
431 message = "Expected (writable) fixed double array"; 460 message = "Expected (writable) fixed double array";
432 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex; 461 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
433 } else { 462 } else {
434 ASSERT(mode == COPY_ON_WRITE_ELEMENTS); 463 ASSERT(mode == COPY_ON_WRITE_ELEMENTS);
435 message = "Expected copy-on-write fixed array"; 464 message = "Expected copy-on-write fixed array";
436 expected_map_index = Heap::kFixedCOWArrayMapRootIndex; 465 expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
437 } 466 }
438 __ push(rcx); 467 __ push(rcx);
439 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); 468 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
440 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), 469 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
441 expected_map_index); 470 expected_map_index);
442 __ Assert(equal, message); 471 __ Assert(equal, message);
443 __ pop(rcx); 472 __ pop(rcx);
444 } 473 }
445 474
446 GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case); 475 GenerateFastCloneShallowArrayCommon(masm, length_, mode,
476 allocation_info_mode, &slow_case);
447 __ ret(3 * kPointerSize); 477 __ ret(3 * kPointerSize);
448 478
449 __ bind(&slow_case); 479 __ bind(&slow_case);
450 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); 480 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
451 } 481 }
452 482
453 483
454 void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) { 484 void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) {
455 // Stack layout on entry: 485 // Stack layout on entry:
456 // 486 //
(...skipping 6027 matching lines...) Expand 10 before | Expand all | Expand 10 after
6484 #endif 6514 #endif
6485 6515
6486 __ Ret(); 6516 __ Ret();
6487 } 6517 }
6488 6518
6489 #undef __ 6519 #undef __
6490 6520
6491 } } // namespace v8::internal 6521 } } // namespace v8::internal
6492 6522
6493 #endif // V8_TARGET_ARCH_X64 6523 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698