Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(535)

Side by Side Diff: src/ia32/code-stubs-ia32.cc

Issue 11817017: Additional work to get array literal allocation tracking working, even with --always-opt (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Removed MIPs changes, and found a bug. COPY_ON_WRITE shallow array stub didn't track allocation inf… Created 7 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 305 matching lines...) Expand 10 before | Expand all | Expand 10 after
316 // Need to collect. Call into runtime system. 316 // Need to collect. Call into runtime system.
317 __ bind(&gc); 317 __ bind(&gc);
318 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); 318 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
319 } 319 }
320 320
321 321
322 static void GenerateFastCloneShallowArrayCommon( 322 static void GenerateFastCloneShallowArrayCommon(
323 MacroAssembler* masm, 323 MacroAssembler* masm,
324 int length, 324 int length,
325 FastCloneShallowArrayStub::Mode mode, 325 FastCloneShallowArrayStub::Mode mode,
326 AllocationSiteInfoMode allocation_site_info_mode,
327 Label* fail) { 326 Label* fail) {
328 // Registers on entry: 327 // Registers on entry:
329 // 328 //
330 // ecx: boilerplate literal array. 329 // ecx: boilerplate literal array.
331 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS); 330 ASSERT(!FastCloneShallowArrayStub::IsCloneAnyElementsMode(mode));
331 bool tracking_on = FastCloneShallowArrayStub::TrackAllocationSiteInfo(mode);
332 332
333 // All sizes here are multiples of kPointerSize. 333 // All sizes here are multiples of kPointerSize.
334 int elements_size = 0; 334 int elements_size = 0;
335 if (length > 0) { 335 if (length > 0) {
336 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS 336 elements_size = FastCloneShallowArrayStub::IsCloneDoubleElementsMode(mode)
337 ? FixedDoubleArray::SizeFor(length) 337 ? FixedDoubleArray::SizeFor(length)
338 : FixedArray::SizeFor(length); 338 : FixedArray::SizeFor(length);
339 } 339 }
340 int size = JSArray::kSize; 340 int size = JSArray::kSize;
341 int allocation_info_start = size; 341 int allocation_info_start = size;
342 if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { 342 size += tracking_on ? AllocationSiteInfo::kSize + elements_size
343 size += AllocationSiteInfo::kSize; 343 : elements_size;
344 }
345 size += elements_size;
346 344
347 // Allocate both the JS array and the elements array in one big 345 // Allocate both the JS array and the elements array in one big
348 // allocation. This avoids multiple limit checks. 346 // allocation. This avoids multiple limit checks.
349 AllocationFlags flags = TAG_OBJECT; 347 AllocationFlags flags = TAG_OBJECT;
350 if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) { 348 if (FastCloneShallowArrayStub::IsCloneDoubleElementsMode(mode)) {
351 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags); 349 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags);
352 } 350 }
353 __ AllocateInNewSpace(size, eax, ebx, edx, fail, flags); 351 __ AllocateInNewSpace(size, eax, ebx, edx, fail, flags);
354 352
355 if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { 353 if (tracking_on) {
356 __ mov(FieldOperand(eax, allocation_info_start), 354 __ mov(FieldOperand(eax, allocation_info_start),
357 Immediate(Handle<Map>(masm->isolate()->heap()-> 355 Immediate(Handle<Map>(masm->isolate()->heap()->
358 allocation_site_info_map()))); 356 allocation_site_info_map())));
359 __ mov(FieldOperand(eax, allocation_info_start + kPointerSize), ecx); 357 __ mov(FieldOperand(eax, allocation_info_start + kPointerSize), ecx);
360 } 358 }
361 359
362 // Copy the JS array part. 360 // Copy the JS array part.
363 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { 361 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
364 if ((i != JSArray::kElementsOffset) || (length == 0)) { 362 if ((i != JSArray::kElementsOffset) || (length == 0)) {
365 __ mov(ebx, FieldOperand(ecx, i)); 363 __ mov(ebx, FieldOperand(ecx, i));
366 __ mov(FieldOperand(eax, i), ebx); 364 __ mov(FieldOperand(eax, i), ebx);
367 } 365 }
368 } 366 }
369 367
370 if (length > 0) { 368 if (length > 0) {
371 // Get hold of the elements array of the boilerplate and setup the 369 // Get hold of the elements array of the boilerplate and setup the
372 // elements pointer in the resulting object. 370 // elements pointer in the resulting object.
373 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); 371 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
374 if (allocation_site_info_mode == TRACK_ALLOCATION_SITE_INFO) { 372 if (tracking_on) {
375 __ lea(edx, Operand(eax, JSArray::kSize + AllocationSiteInfo::kSize)); 373 __ lea(edx, Operand(eax, JSArray::kSize + AllocationSiteInfo::kSize));
376 } else { 374 } else {
377 __ lea(edx, Operand(eax, JSArray::kSize)); 375 __ lea(edx, Operand(eax, JSArray::kSize));
378 } 376 }
379 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx); 377 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx);
380 378
381 // Copy the elements array. 379 // Copy the elements array.
382 if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) { 380 if (FastCloneShallowArrayStub::IsCloneElementsMode(mode)) {
383 for (int i = 0; i < elements_size; i += kPointerSize) { 381 for (int i = 0; i < elements_size; i += kPointerSize) {
384 __ mov(ebx, FieldOperand(ecx, i)); 382 __ mov(ebx, FieldOperand(ecx, i));
385 __ mov(FieldOperand(edx, i), ebx); 383 __ mov(FieldOperand(edx, i), ebx);
386 } 384 }
387 } else { 385 } else {
388 ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS); 386 ASSERT(FastCloneShallowArrayStub::IsCloneDoubleElementsMode(mode));
389 int i; 387 int i;
390 for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) { 388 for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) {
391 __ mov(ebx, FieldOperand(ecx, i)); 389 __ mov(ebx, FieldOperand(ecx, i));
392 __ mov(FieldOperand(edx, i), ebx); 390 __ mov(FieldOperand(edx, i), ebx);
393 } 391 }
394 while (i < elements_size) { 392 while (i < elements_size) {
395 __ fld_d(FieldOperand(ecx, i)); 393 __ fld_d(FieldOperand(ecx, i));
396 __ fstp_d(FieldOperand(edx, i)); 394 __ fstp_d(FieldOperand(edx, i));
397 i += kDoubleSize; 395 i += kDoubleSize;
398 } 396 }
(...skipping 19 matching lines...) Expand all
418 STATIC_ASSERT(kSmiTag == 0); 416 STATIC_ASSERT(kSmiTag == 0);
419 __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size, 417 __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size,
420 FixedArray::kHeaderSize)); 418 FixedArray::kHeaderSize));
421 Factory* factory = masm->isolate()->factory(); 419 Factory* factory = masm->isolate()->factory();
422 __ cmp(ecx, factory->undefined_value()); 420 __ cmp(ecx, factory->undefined_value());
423 Label slow_case; 421 Label slow_case;
424 __ j(equal, &slow_case); 422 __ j(equal, &slow_case);
425 423
426 FastCloneShallowArrayStub::Mode mode = mode_; 424 FastCloneShallowArrayStub::Mode mode = mode_;
427 // ecx is boilerplate object. 425 // ecx is boilerplate object.
428 AllocationSiteInfoMode allocation_site_info_mode = 426 if (FastCloneShallowArrayStub::IsCloneAnyElementsMode(mode)) {
429 DONT_TRACK_ALLOCATION_SITE_INFO;
430 if (mode == CLONE_ANY_ELEMENTS_WITH_ALLOCATION_SITE_INFO) {
431 mode = CLONE_ANY_ELEMENTS;
432 allocation_site_info_mode = TRACK_ALLOCATION_SITE_INFO;
433 }
434
435 if (mode == CLONE_ANY_ELEMENTS) {
436 Label double_elements, check_fast_elements; 427 Label double_elements, check_fast_elements;
437 __ mov(ebx, FieldOperand(ecx, JSArray::kElementsOffset)); 428 __ mov(ebx, FieldOperand(ecx, JSArray::kElementsOffset));
438 __ CheckMap(ebx, factory->fixed_cow_array_map(), 429 __ CheckMap(ebx, factory->fixed_cow_array_map(),
439 &check_fast_elements, DONT_DO_SMI_CHECK); 430 &check_fast_elements, DONT_DO_SMI_CHECK);
440 GenerateFastCloneShallowArrayCommon(masm, 0, 431 bool tracking_on = FastCloneShallowArrayStub::TrackAllocationSiteInfo(mode);
441 COPY_ON_WRITE_ELEMENTS, 432 mode = tracking_on ? COPY_ON_WRITE_ELEMENTS_WITH_ALLOCATION_SITE_INFO
442 allocation_site_info_mode, 433 : COPY_ON_WRITE_ELEMENTS;
443 &slow_case); 434 GenerateFastCloneShallowArrayCommon(masm, 0, mode, &slow_case);
444 __ ret(3 * kPointerSize); 435 __ ret(3 * kPointerSize);
445 436
446 __ bind(&check_fast_elements); 437 __ bind(&check_fast_elements);
447 __ CheckMap(ebx, factory->fixed_array_map(), 438 __ CheckMap(ebx, factory->fixed_array_map(),
448 &double_elements, DONT_DO_SMI_CHECK); 439 &double_elements, DONT_DO_SMI_CHECK);
449 GenerateFastCloneShallowArrayCommon(masm, length_, 440 mode = tracking_on ? CLONE_ELEMENTS_WITH_ALLOCATION_SITE_INFO
450 CLONE_ELEMENTS, 441 : CLONE_ELEMENTS;
451 allocation_site_info_mode, 442 GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case);
452 &slow_case);
453 __ ret(3 * kPointerSize); 443 __ ret(3 * kPointerSize);
454 444
455 __ bind(&double_elements); 445 __ bind(&double_elements);
456 mode = CLONE_DOUBLE_ELEMENTS; 446 mode = tracking_on ? CLONE_DOUBLE_ELEMENTS_WITH_ALLOCATION_SITE_INFO
447 : CLONE_DOUBLE_ELEMENTS;
457 // Fall through to generate the code to handle double elements. 448 // Fall through to generate the code to handle double elements.
458 } 449 }
459 450
460 if (FLAG_debug_code) { 451 if (FLAG_debug_code) {
461 const char* message; 452 const char* message;
462 Handle<Map> expected_map; 453 Handle<Map> expected_map;
463 if (mode == CLONE_ELEMENTS) { 454 if (FastCloneShallowArrayStub::IsCloneElementsMode(mode)) {
464 message = "Expected (writable) fixed array"; 455 message = "Expected (writable) fixed array";
465 expected_map = factory->fixed_array_map(); 456 expected_map = factory->fixed_array_map();
466 } else if (mode == CLONE_DOUBLE_ELEMENTS) { 457 } else if (FastCloneShallowArrayStub::IsCloneDoubleElementsMode(mode)) {
467 message = "Expected (writable) fixed double array"; 458 message = "Expected (writable) fixed double array";
468 expected_map = factory->fixed_double_array_map(); 459 expected_map = factory->fixed_double_array_map();
469 } else { 460 } else {
470 ASSERT(mode == COPY_ON_WRITE_ELEMENTS); 461 ASSERT(FastCloneShallowArrayStub::IsCopyOnWriteElementsMode(mode));
471 message = "Expected copy-on-write fixed array"; 462 message = "Expected copy-on-write fixed array";
472 expected_map = factory->fixed_cow_array_map(); 463 expected_map = factory->fixed_cow_array_map();
473 } 464 }
474 __ push(ecx); 465 __ push(ecx);
475 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset)); 466 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
476 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map); 467 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map);
477 __ Assert(equal, message); 468 __ Assert(equal, message);
478 __ pop(ecx); 469 __ pop(ecx);
479 } 470 }
480 471
481 GenerateFastCloneShallowArrayCommon(masm, length_, mode, 472 GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case);
482 allocation_site_info_mode, &slow_case); 473
483 // Return and remove the on-stack parameters. 474 // Return and remove the on-stack parameters.
484 __ ret(3 * kPointerSize); 475 __ ret(3 * kPointerSize);
485 476
486 __ bind(&slow_case); 477 __ bind(&slow_case);
487 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); 478 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
488 } 479 }
489 480
490 481
491 void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) { 482 void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) {
492 // Stack layout on entry: 483 // Stack layout on entry:
(...skipping 7012 matching lines...) Expand 10 before | Expand all | Expand 10 after
7505 // Restore ecx. 7496 // Restore ecx.
7506 __ pop(ecx); 7497 __ pop(ecx);
7507 __ ret(0); 7498 __ ret(0);
7508 } 7499 }
7509 7500
7510 #undef __ 7501 #undef __
7511 7502
7512 } } // namespace v8::internal 7503 } } // namespace v8::internal
7513 7504
7514 #endif // V8_TARGET_ARCH_IA32 7505 #endif // V8_TARGET_ARCH_IA32
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698