Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(254)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 11817017: Additional work to get array literal allocation tracking working, even with --always-opt (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Quick adjustment to bit fields Created 7 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | src/arm/codegen-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 326 matching lines...) Expand 10 before | Expand all | Expand 10 after
337 // Need to collect. Call into runtime system. 337 // Need to collect. Call into runtime system.
338 __ bind(&gc); 338 __ bind(&gc);
339 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); 339 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
340 } 340 }
341 341
342 342
343 static void GenerateFastCloneShallowArrayCommon( 343 static void GenerateFastCloneShallowArrayCommon(
344 MacroAssembler* masm, 344 MacroAssembler* masm,
345 int length, 345 int length,
346 FastCloneShallowArrayStub::Mode mode, 346 FastCloneShallowArrayStub::Mode mode,
347 AllocationSiteInfoMode allocation_site_info_mode, 347 AllocationSiteMode allocation_site_mode,
348 Label* fail) { 348 Label* fail) {
349 // Registers on entry: 349 // Registers on entry:
350 // 350 //
351 // r3: boilerplate literal array. 351 // r3: boilerplate literal array.
352 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); 352 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS);
353 353
354 // All sizes here are multiples of kPointerSize. 354 // All sizes here are multiples of kPointerSize.
355 int elements_size = 0; 355 int elements_size = 0;
356 if (length > 0) { 356 if (length > 0) {
357 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS 357 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
358 ? FixedDoubleArray::SizeFor(length) 358 ? FixedDoubleArray::SizeFor(length)
359 : FixedArray::SizeFor(length); 359 : FixedArray::SizeFor(length);
360 } 360 }
361
361 int size = JSArray::kSize; 362 int size = JSArray::kSize;
362 int allocation_info_start = size; 363 int allocation_info_start = size;
363 if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { 364 if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
364 size += AllocationSiteInfo::kSize; 365 size += AllocationSiteInfo::kSize;
365 } 366 }
366 size += elements_size; 367 size += elements_size;
367 368
368 // Allocate both the JS array and the elements array in one big 369 // Allocate both the JS array and the elements array in one big
369 // allocation. This avoids multiple limit checks. 370 // allocation. This avoids multiple limit checks.
370 AllocationFlags flags = TAG_OBJECT; 371 AllocationFlags flags = TAG_OBJECT;
371 if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) { 372 if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) {
372 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags); 373 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags);
373 } 374 }
374 __ AllocateInNewSpace(size, r0, r1, r2, fail, flags); 375 __ AllocateInNewSpace(size, r0, r1, r2, fail, flags);
375 376
376 if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { 377 if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
377 __ mov(r2, Operand(Handle<Map>(masm->isolate()->heap()-> 378 __ mov(r2, Operand(Handle<Map>(masm->isolate()->heap()->
378 allocation_site_info_map()))); 379 allocation_site_info_map())));
379 __ str(r2, FieldMemOperand(r0, allocation_info_start)); 380 __ str(r2, FieldMemOperand(r0, allocation_info_start));
380 __ str(r3, FieldMemOperand(r0, allocation_info_start + kPointerSize)); 381 __ str(r3, FieldMemOperand(r0, allocation_info_start + kPointerSize));
381 } 382 }
382 383
383 // Copy the JS array part. 384 // Copy the JS array part.
384 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { 385 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
385 if ((i != JSArray::kElementsOffset) || (length == 0)) { 386 if ((i != JSArray::kElementsOffset) || (length == 0)) {
386 __ ldr(r1, FieldMemOperand(r3, i)); 387 __ ldr(r1, FieldMemOperand(r3, i));
387 __ str(r1, FieldMemOperand(r0, i)); 388 __ str(r1, FieldMemOperand(r0, i));
388 } 389 }
389 } 390 }
390 391
391 if (length > 0) { 392 if (length > 0) {
392 // Get hold of the elements array of the boilerplate and setup the 393 // Get hold of the elements array of the boilerplate and setup the
393 // elements pointer in the resulting object. 394 // elements pointer in the resulting object.
394 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); 395 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
395 if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { 396 if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
396 __ add(r2, r0, Operand(JSArray::kSize + AllocationSiteInfo::kSize)); 397 __ add(r2, r0, Operand(JSArray::kSize + AllocationSiteInfo::kSize));
397 } else { 398 } else {
398 __ add(r2, r0, Operand(JSArray::kSize)); 399 __ add(r2, r0, Operand(JSArray::kSize));
399 } 400 }
400 __ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset)); 401 __ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset));
401 402
402 // Copy the elements array. 403 // Copy the elements array.
403 ASSERT((elements_size % kPointerSize) == 0); 404 ASSERT((elements_size % kPointerSize) == 0);
404 __ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize); 405 __ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize);
405 } 406 }
(...skipping 10 matching lines...) Expand all
416 // boilerplate. 417 // boilerplate.
417 Label slow_case; 418 Label slow_case;
418 __ ldr(r3, MemOperand(sp, 2 * kPointerSize)); 419 __ ldr(r3, MemOperand(sp, 2 * kPointerSize));
419 __ ldr(r0, MemOperand(sp, 1 * kPointerSize)); 420 __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
420 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 421 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
421 __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); 422 __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
422 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); 423 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
423 __ b(eq, &slow_case); 424 __ b(eq, &slow_case);
424 425
425 FastCloneShallowArrayStub::Mode mode = mode_; 426 FastCloneShallowArrayStub::Mode mode = mode_;
426 AllocationSiteInfoMode allocation_site_info_mode =
427 DONT_TRACK_ALLOCATION_SITE_INFO;
428 if (mode == CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO) {
429 mode = CLONE_ANY_ELEMENTS;
430 allocation_site_info_mode = TRACK_ALLOCATION_SITE_INFO;
431 }
432
433 if (mode == CLONE_ANY_ELEMENTS) { 427 if (mode == CLONE_ANY_ELEMENTS) {
434 Label double_elements, check_fast_elements; 428 Label double_elements, check_fast_elements;
435 __ ldr(r0, FieldMemOperand(r3, JSArray::kElementsOffset)); 429 __ ldr(r0, FieldMemOperand(r3, JSArray::kElementsOffset));
436 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 430 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
437 __ CompareRoot(r0, Heap::kFixedCOWArrayMapRootIndex); 431 __ CompareRoot(r0, Heap::kFixedCOWArrayMapRootIndex);
438 __ b(ne, &check_fast_elements); 432 __ b(ne, &check_fast_elements);
439 GenerateFastCloneShallowArrayCommon(masm, 0, 433 GenerateFastCloneShallowArrayCommon(masm, 0, COPY_ON_WRITE_ELEMENTS,
440 COPY_ON_WRITE_ELEMENTS, 434 allocation_site_mode_,
441 allocation_site_info_mode,
442 &slow_case); 435 &slow_case);
443 // Return and remove the on-stack parameters. 436 // Return and remove the on-stack parameters.
444 __ add(sp, sp, Operand(3 * kPointerSize)); 437 __ add(sp, sp, Operand(3 * kPointerSize));
445 __ Ret(); 438 __ Ret();
446 439
447 __ bind(&check_fast_elements); 440 __ bind(&check_fast_elements);
448 __ CompareRoot(r0, Heap::kFixedArrayMapRootIndex); 441 __ CompareRoot(r0, Heap::kFixedArrayMapRootIndex);
449 __ b(ne, &double_elements); 442 __ b(ne, &double_elements);
450 GenerateFastCloneShallowArrayCommon(masm, length_, 443 GenerateFastCloneShallowArrayCommon(masm, length_, CLONE_ELEMENTS,
451 CLONE_ELEMENTS, 444 allocation_site_mode_,
452 allocation_site_info_mode,
453 &slow_case); 445 &slow_case);
454 // Return and remove the on-stack parameters. 446 // Return and remove the on-stack parameters.
455 __ add(sp, sp, Operand(3 * kPointerSize)); 447 __ add(sp, sp, Operand(3 * kPointerSize));
456 __ Ret(); 448 __ Ret();
457 449
458 __ bind(&double_elements); 450 __ bind(&double_elements);
459 mode = CLONE_DOUBLE_ELEMENTS; 451 mode = CLONE_DOUBLE_ELEMENTS;
460 // Fall through to generate the code to handle double elements. 452 // Fall through to generate the code to handle double elements.
461 } 453 }
462 454
(...skipping 13 matching lines...) Expand all
476 } 468 }
477 __ push(r3); 469 __ push(r3);
478 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); 470 __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
479 __ ldr(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); 471 __ ldr(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
480 __ CompareRoot(r3, expected_map_index); 472 __ CompareRoot(r3, expected_map_index);
481 __ Assert(eq, message); 473 __ Assert(eq, message);
482 __ pop(r3); 474 __ pop(r3);
483 } 475 }
484 476
485 GenerateFastCloneShallowArrayCommon(masm, length_, mode, 477 GenerateFastCloneShallowArrayCommon(masm, length_, mode,
486 allocation_site_info_mode, &slow_case); 478 allocation_site_mode_,
479 &slow_case);
487 480
488 // Return and remove the on-stack parameters. 481 // Return and remove the on-stack parameters.
489 __ add(sp, sp, Operand(3 * kPointerSize)); 482 __ add(sp, sp, Operand(3 * kPointerSize));
490 __ Ret(); 483 __ Ret();
491 484
492 __ bind(&slow_case); 485 __ bind(&slow_case);
493 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); 486 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
494 } 487 }
495 488
496 489
(...skipping 7260 matching lines...) Expand 10 before | Expand all | Expand 10 after
7757 7750
7758 __ Pop(lr, r5, r1); 7751 __ Pop(lr, r5, r1);
7759 __ Ret(); 7752 __ Ret();
7760 } 7753 }
7761 7754
7762 #undef __ 7755 #undef __
7763 7756
7764 } } // namespace v8::internal 7757 } } // namespace v8::internal
7765 7758
7766 #endif // V8_TARGET_ARCH_ARM 7759 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « no previous file | src/arm/codegen-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698