Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(317)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 11817017: Additional work to get array literal allocation tracking working, even with --always-opt (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Removed MIPs changes, and found a bug. COPY_ON_WRITE shallow array stub didn't track allocation inf… Created 7 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 298 matching lines...) Expand 10 before | Expand all | Expand 10 after
309 // Need to collect. Call into runtime system. 309 // Need to collect. Call into runtime system.
310 __ bind(&gc); 310 __ bind(&gc);
311 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); 311 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
312 } 312 }
313 313
314 314
315 static void GenerateFastCloneShallowArrayCommon( 315 static void GenerateFastCloneShallowArrayCommon(
316 MacroAssembler* masm, 316 MacroAssembler* masm,
317 int length, 317 int length,
318 FastCloneShallowArrayStub::Mode mode, 318 FastCloneShallowArrayStub::Mode mode,
319 AllocationSiteInfoMode allocation_site_info_mode,
320 Label* fail) { 319 Label* fail) {
321 // Registers on entry: 320 // Registers on entry:
322 // 321 //
323 // rcx: boilerplate literal array. 322 // rcx: boilerplate literal array.
324 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); 323 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS);
324 bool tracking_on = FastCloneShallowArrayStub::TrackAllocationSiteInfo(mode);
325 325
326 // All sizes here are multiples of kPointerSize. 326 // All sizes here are multiples of kPointerSize.
327 int elements_size = 0; 327 int elements_size = 0;
328 if (length > 0) { 328 if (length > 0) {
329 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS 329 elements_size = FastCloneShallowArrayStub::IsCloneDoubleElementsMode(mode)
330 ? FixedDoubleArray::SizeFor(length) 330 ? FixedDoubleArray::SizeFor(length)
331 : FixedArray::SizeFor(length); 331 : FixedArray::SizeFor(length);
332 } 332 }
333 int size = JSArray::kSize; 333 int size = JSArray::kSize;
334 int allocation_info_start = size; 334 int allocation_info_start = size;
335 if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { 335 size += tracking_on ? AllocationSiteInfo::kSize + elements_size
336 size += AllocationSiteInfo::kSize; 336 : elements_size;
337 }
338 size += elements_size;
339 337
340 // Allocate both the JS array and the elements array in one big 338 // Allocate both the JS array and the elements array in one big
341 // allocation. This avoids multiple limit checks. 339 // allocation. This avoids multiple limit checks.
342 AllocationFlags flags = TAG_OBJECT; 340 AllocationFlags flags = TAG_OBJECT;
343 if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) { 341 if (FastCloneShallowArrayStub::IsCloneDoubleElementsMode(mode)) {
344 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags); 342 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags);
345 } 343 }
346 __ AllocateInNewSpace(size, rax, rbx, rdx, fail, flags); 344 __ AllocateInNewSpace(size, rax, rbx, rdx, fail, flags);
347 345
348 if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { 346 if (tracking_on) {
349 __ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex); 347 __ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex);
350 __ movq(FieldOperand(rax, allocation_info_start), kScratchRegister); 348 __ movq(FieldOperand(rax, allocation_info_start), kScratchRegister);
351 __ movq(FieldOperand(rax, allocation_info_start + kPointerSize), rcx); 349 __ movq(FieldOperand(rax, allocation_info_start + kPointerSize), rcx);
352 } 350 }
353 351
354 // Copy the JS array part. 352 // Copy the JS array part.
355 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { 353 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
356 if ((i != JSArray::kElementsOffset) || (length == 0)) { 354 if ((i != JSArray::kElementsOffset) || (length == 0)) {
357 __ movq(rbx, FieldOperand(rcx, i)); 355 __ movq(rbx, FieldOperand(rcx, i));
358 __ movq(FieldOperand(rax, i), rbx); 356 __ movq(FieldOperand(rax, i), rbx);
359 } 357 }
360 } 358 }
361 359
362 if (length > 0) { 360 if (length > 0) {
363 // Get hold of the elements array of the boilerplate and setup the 361 // Get hold of the elements array of the boilerplate and setup the
364 // elements pointer in the resulting object. 362 // elements pointer in the resulting object.
365 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); 363 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
366 if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { 364 if (tracking_on) {
367 __ lea(rdx, Operand(rax, JSArray::kSize + AllocationSiteInfo::kSize)); 365 __ lea(rdx, Operand(rax, JSArray::kSize + AllocationSiteInfo::kSize));
368 } else { 366 } else {
369 __ lea(rdx, Operand(rax, JSArray::kSize)); 367 __ lea(rdx, Operand(rax, JSArray::kSize));
370 } 368 }
371 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx); 369 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx);
372 370
373 // Copy the elements array. 371 // Copy the elements array.
374 if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) { 372 if (FastCloneShallowArrayStub::IsCloneElementsMode(mode)) {
375 for (int i = 0; i < elements_size; i += kPointerSize) { 373 for (int i = 0; i < elements_size; i += kPointerSize) {
376 __ movq(rbx, FieldOperand(rcx, i)); 374 __ movq(rbx, FieldOperand(rcx, i));
377 __ movq(FieldOperand(rdx, i), rbx); 375 __ movq(FieldOperand(rdx, i), rbx);
378 } 376 }
379 } else { 377 } else {
380 ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS); 378 ASSERT(FastCloneShallowArrayStub::IsCloneDoubleElementsMode(mode));
381 int i; 379 int i;
382 for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) { 380 for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) {
383 __ movq(rbx, FieldOperand(rcx, i)); 381 __ movq(rbx, FieldOperand(rcx, i));
384 __ movq(FieldOperand(rdx, i), rbx); 382 __ movq(FieldOperand(rdx, i), rbx);
385 } 383 }
386 while (i < elements_size) { 384 while (i < elements_size) {
387 __ movsd(xmm0, FieldOperand(rcx, i)); 385 __ movsd(xmm0, FieldOperand(rcx, i));
388 __ movsd(FieldOperand(rdx, i), xmm0); 386 __ movsd(FieldOperand(rdx, i), xmm0);
389 i += kDoubleSize; 387 i += kDoubleSize;
390 } 388 }
(...skipping 16 matching lines...) Expand all
407 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2); 405 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
408 __ movq(rcx, 406 __ movq(rcx,
409 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize)); 407 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
410 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex); 408 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
411 Label slow_case; 409 Label slow_case;
412 __ j(equal, &slow_case); 410 __ j(equal, &slow_case);
413 411
414 FastCloneShallowArrayStub::Mode mode = mode_; 412 FastCloneShallowArrayStub::Mode mode = mode_;
415 // rcx is boilerplate object. 413 // rcx is boilerplate object.
416 Factory* factory = masm->isolate()->factory(); 414 Factory* factory = masm->isolate()->factory();
417 AllocationSiteInfoMode allocation_site_info_mode = 415 if (FastCloneShallowArrayStub::IsCloneAnyElementsMode(mode)) {
418 DONT_TRACK_ALLOCATION_SITE_INFO;
419 if (mode == CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO) {
420 mode = CLONE_ANY_ELEMENTS;
421 allocation_site_info_mode = TRACK_ALLOCATION_SITE_INFO;
422 }
423
424 if (mode == CLONE_ANY_ELEMENTS) {
425 Label double_elements, check_fast_elements; 416 Label double_elements, check_fast_elements;
426 __ movq(rbx, FieldOperand(rcx, JSArray::kElementsOffset)); 417 __ movq(rbx, FieldOperand(rcx, JSArray::kElementsOffset));
427 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), 418 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
428 factory->fixed_cow_array_map()); 419 factory->fixed_cow_array_map());
429 __ j(not_equal, &check_fast_elements); 420 __ j(not_equal, &check_fast_elements);
430 GenerateFastCloneShallowArrayCommon(masm, 0, 421 bool tracking_on = FastCloneShallowArrayStub::TrackAllocationSiteInfo(mode);
431 COPY_ON_WRITE_ELEMENTS, 422 mode = tracking_on ? COPY_ON_WRITE_ELEMENTS_WITH_ALLOCATION_SITE_INFO
432 allocation_site_info_mode, 423 : COPY_ON_WRITE_ELEMENTS;
433 &slow_case); 424 GenerateFastCloneShallowArrayCommon(masm, 0, mode, &slow_case);
434 __ ret(3 * kPointerSize); 425 __ ret(3 * kPointerSize);
435 426
436 __ bind(&check_fast_elements); 427 __ bind(&check_fast_elements);
437 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), 428 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
438 factory->fixed_array_map()); 429 factory->fixed_array_map());
439 __ j(not_equal, &double_elements); 430 __ j(not_equal, &double_elements);
440 GenerateFastCloneShallowArrayCommon(masm, length_, 431 mode = tracking_on ? CLONE_ELEMENTS_WITH_ALLOCATION_SITE_INFO
441 CLONE_ELEMENTS, 432 : CLONE_ELEMENTS;
442 allocation_site_info_mode, 433 GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case);
443 &slow_case);
444 __ ret(3 * kPointerSize); 434 __ ret(3 * kPointerSize);
445 435
446 __ bind(&double_elements); 436 __ bind(&double_elements);
447 mode = CLONE_DOUBLE_ELEMENTS; 437 mode = tracking_on ? CLONE_DOUBLE_ELEMENTS_WITH_ALLOCATION_SITE_INFO
438 : CLONE_DOUBLE_ELEMENTS;
448 // Fall through to generate the code to handle double elements. 439 // Fall through to generate the code to handle double elements.
449 } 440 }
450 441
451 if (FLAG_debug_code) { 442 if (FLAG_debug_code) {
452 const char* message; 443 const char* message;
453 Heap::RootListIndex expected_map_index; 444 Heap::RootListIndex expected_map_index;
454 if (mode == CLONE_ELEMENTS) { 445 if (FastCloneShallowArrayStub::IsCloneElementsMode(mode)) {
455 message = "Expected (writable) fixed array"; 446 message = "Expected (writable) fixed array";
456 expected_map_index = Heap::kFixedArrayMapRootIndex; 447 expected_map_index = Heap::kFixedArrayMapRootIndex;
457 } else if (mode == CLONE_DOUBLE_ELEMENTS) { 448 } else if (FastCloneShallowArrayStub::IsCloneDoubleElementsMode(mode)) {
458 message = "Expected (writable) fixed double array"; 449 message = "Expected (writable) fixed double array";
459 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex; 450 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
460 } else { 451 } else {
461 ASSERT(mode == COPY_ON_WRITE_ELEMENTS); 452 ASSERT(FastCloneShallowArrayStub::IsCopyOnWriteElementsMode(mode));
462 message = "Expected copy-on-write fixed array"; 453 message = "Expected copy-on-write fixed array";
463 expected_map_index = Heap::kFixedCOWArrayMapRootIndex; 454 expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
464 } 455 }
465 __ push(rcx); 456 __ push(rcx);
466 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); 457 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
467 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), 458 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
468 expected_map_index); 459 expected_map_index);
469 __ Assert(equal, message); 460 __ Assert(equal, message);
470 __ pop(rcx); 461 __ pop(rcx);
471 } 462 }
472 463
473 GenerateFastCloneShallowArrayCommon(masm, length_, mode, 464 GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case);
474 allocation_site_info_mode, &slow_case);
475 __ ret(3 * kPointerSize); 465 __ ret(3 * kPointerSize);
476 466
477 __ bind(&slow_case); 467 __ bind(&slow_case);
478 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); 468 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
479 } 469 }
480 470
481 471
482 void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) { 472 void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) {
483 // Stack layout on entry: 473 // Stack layout on entry:
484 // 474 //
(...skipping 6027 matching lines...) Expand 10 before | Expand all | Expand 10 after
6512 #endif 6502 #endif
6513 6503
6514 __ Ret(); 6504 __ Ret();
6515 } 6505 }
6516 6506
6517 #undef __ 6507 #undef __
6518 6508
6519 } } // namespace v8::internal 6509 } } // namespace v8::internal
6520 6510
6521 #endif // V8_TARGET_ARCH_X64 6511 #endif // V8_TARGET_ARCH_X64
OLDNEW
« src/objects.cc ('K') | « src/runtime.cc ('k') | src/x64/codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698