Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(258)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 12521011: Compile FastCloneShallowArrayStub using Crankshaft. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Rebased. Created 7 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/isolate.cc ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 21 matching lines...) Expand all
32 #include "bootstrapper.h" 32 #include "bootstrapper.h"
33 #include "code-stubs.h" 33 #include "code-stubs.h"
34 #include "regexp-macro-assembler.h" 34 #include "regexp-macro-assembler.h"
35 #include "stub-cache.h" 35 #include "stub-cache.h"
36 #include "runtime.h" 36 #include "runtime.h"
37 37
38 namespace v8 { 38 namespace v8 {
39 namespace internal { 39 namespace internal {
40 40
41 41
42 void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
43 Isolate* isolate,
44 CodeStubInterfaceDescriptor* descriptor) {
45 static Register registers[] = { rax, rbx, rcx };
46 descriptor->register_param_count_ = 3;
47 descriptor->register_params_ = registers;
48 descriptor->stack_parameter_count_ = NULL;
49 descriptor->deoptimization_handler_ =
50 Runtime::FunctionForId(Runtime::kCreateArrayLiteralShallow)->entry;
51 }
52
53
42 void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( 54 void FastCloneShallowObjectStub::InitializeInterfaceDescriptor(
43 Isolate* isolate, 55 Isolate* isolate,
44 CodeStubInterfaceDescriptor* descriptor) { 56 CodeStubInterfaceDescriptor* descriptor) {
45 static Register registers[] = { rax, rbx, rcx, rdx }; 57 static Register registers[] = { rax, rbx, rcx, rdx };
46 descriptor->register_param_count_ = 4; 58 descriptor->register_param_count_ = 4;
47 descriptor->register_params_ = registers; 59 descriptor->register_params_ = registers;
48 descriptor->stack_parameter_count_ = NULL; 60 descriptor->stack_parameter_count_ = NULL;
49 descriptor->deoptimization_handler_ = 61 descriptor->deoptimization_handler_ =
50 Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry; 62 Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry;
51 } 63 }
(...skipping 327 matching lines...) Expand 10 before | Expand all | Expand 10 after
379 // Return and remove the on-stack parameter. 391 // Return and remove the on-stack parameter.
380 __ movq(rsi, rax); 392 __ movq(rsi, rax);
381 __ ret(2 * kPointerSize); 393 __ ret(2 * kPointerSize);
382 394
383 // Need to collect. Call into runtime system. 395 // Need to collect. Call into runtime system.
384 __ bind(&gc); 396 __ bind(&gc);
385 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); 397 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
386 } 398 }
387 399
388 400
389 static void GenerateFastCloneShallowArrayCommon(
390 MacroAssembler* masm,
391 int length,
392 FastCloneShallowArrayStub::Mode mode,
393 AllocationSiteMode allocation_site_mode,
394 Label* fail) {
395 // Registers on entry:
396 //
397 // rcx: boilerplate literal array.
398 ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS);
399
400 // All sizes here are multiples of kPointerSize.
401 int elements_size = 0;
402 if (length > 0) {
403 elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
404 ? FixedDoubleArray::SizeFor(length)
405 : FixedArray::SizeFor(length);
406 }
407 int size = JSArray::kSize;
408 int allocation_info_start = size;
409 if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
410 size += AllocationSiteInfo::kSize;
411 }
412 size += elements_size;
413
414 // Allocate both the JS array and the elements array in one big
415 // allocation. This avoids multiple limit checks.
416 AllocationFlags flags = TAG_OBJECT;
417 if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) {
418 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags);
419 }
420 __ Allocate(size, rax, rbx, rdx, fail, flags);
421
422 if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
423 __ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex);
424 __ movq(FieldOperand(rax, allocation_info_start), kScratchRegister);
425 __ movq(FieldOperand(rax, allocation_info_start + kPointerSize), rcx);
426 }
427
428 // Copy the JS array part.
429 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
430 if ((i != JSArray::kElementsOffset) || (length == 0)) {
431 __ movq(rbx, FieldOperand(rcx, i));
432 __ movq(FieldOperand(rax, i), rbx);
433 }
434 }
435
436 if (length > 0) {
437 // Get hold of the elements array of the boilerplate and setup the
438 // elements pointer in the resulting object.
439 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
440 if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
441 __ lea(rdx, Operand(rax, JSArray::kSize + AllocationSiteInfo::kSize));
442 } else {
443 __ lea(rdx, Operand(rax, JSArray::kSize));
444 }
445 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx);
446
447 // Copy the elements array.
448 if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) {
449 for (int i = 0; i < elements_size; i += kPointerSize) {
450 __ movq(rbx, FieldOperand(rcx, i));
451 __ movq(FieldOperand(rdx, i), rbx);
452 }
453 } else {
454 ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS);
455 int i;
456 for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) {
457 __ movq(rbx, FieldOperand(rcx, i));
458 __ movq(FieldOperand(rdx, i), rbx);
459 }
460 while (i < elements_size) {
461 __ movsd(xmm0, FieldOperand(rcx, i));
462 __ movsd(FieldOperand(rdx, i), xmm0);
463 i += kDoubleSize;
464 }
465 ASSERT(i == elements_size);
466 }
467 }
468 }
469
470 void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
471 // Stack layout on entry:
472 //
473 // [rsp + kPointerSize]: constant elements.
474 // [rsp + (2 * kPointerSize)]: literal index.
475 // [rsp + (3 * kPointerSize)]: literals array.
476
477 // Load boilerplate object into rcx and check if we need to create a
478 // boilerplate.
479 __ movq(rcx, Operand(rsp, 3 * kPointerSize));
480 __ movq(rax, Operand(rsp, 2 * kPointerSize));
481 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
482 __ movq(rcx,
483 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
484 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
485 Label slow_case;
486 __ j(equal, &slow_case);
487
488 FastCloneShallowArrayStub::Mode mode = mode_;
489 // rcx is boilerplate object.
490 Factory* factory = masm->isolate()->factory();
491 if (mode == CLONE_ANY_ELEMENTS) {
492 Label double_elements, check_fast_elements;
493 __ movq(rbx, FieldOperand(rcx, JSArray::kElementsOffset));
494 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
495 factory->fixed_cow_array_map());
496 __ j(not_equal, &check_fast_elements);
497 GenerateFastCloneShallowArrayCommon(masm, 0, COPY_ON_WRITE_ELEMENTS,
498 allocation_site_mode_,
499 &slow_case);
500 __ ret(3 * kPointerSize);
501
502 __ bind(&check_fast_elements);
503 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
504 factory->fixed_array_map());
505 __ j(not_equal, &double_elements);
506 GenerateFastCloneShallowArrayCommon(masm, length_, CLONE_ELEMENTS,
507 allocation_site_mode_,
508 &slow_case);
509 __ ret(3 * kPointerSize);
510
511 __ bind(&double_elements);
512 mode = CLONE_DOUBLE_ELEMENTS;
513 // Fall through to generate the code to handle double elements.
514 }
515
516 if (FLAG_debug_code) {
517 const char* message;
518 Heap::RootListIndex expected_map_index;
519 if (mode == CLONE_ELEMENTS) {
520 message = "Expected (writable) fixed array";
521 expected_map_index = Heap::kFixedArrayMapRootIndex;
522 } else if (mode == CLONE_DOUBLE_ELEMENTS) {
523 message = "Expected (writable) fixed double array";
524 expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
525 } else {
526 ASSERT(mode == COPY_ON_WRITE_ELEMENTS);
527 message = "Expected copy-on-write fixed array";
528 expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
529 }
530 __ push(rcx);
531 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
532 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
533 expected_map_index);
534 __ Assert(equal, message);
535 __ pop(rcx);
536 }
537
538 GenerateFastCloneShallowArrayCommon(masm, length_, mode,
539 allocation_site_mode_,
540 &slow_case);
541 __ ret(3 * kPointerSize);
542
543 __ bind(&slow_case);
544 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
545 }
546
547
548 // The stub expects its argument on the stack and returns its result in tos_: 401 // The stub expects its argument on the stack and returns its result in tos_:
549 // zero for false, and a non-zero value for true. 402 // zero for false, and a non-zero value for true.
550 void ToBooleanStub::Generate(MacroAssembler* masm) { 403 void ToBooleanStub::Generate(MacroAssembler* masm) {
551 // This stub overrides SometimesSetsUpAFrame() to return false. That means 404 // This stub overrides SometimesSetsUpAFrame() to return false. That means
552 // we cannot call anything that could cause a GC from this stub. 405 // we cannot call anything that could cause a GC from this stub.
553 Label patch; 406 Label patch;
554 const Register argument = rax; 407 const Register argument = rax;
555 const Register map = rdx; 408 const Register map = rdx;
556 409
557 if (!types_.IsEmpty()) { 410 if (!types_.IsEmpty()) {
(...skipping 3589 matching lines...) Expand 10 before | Expand all | Expand 10 after
4147 return result_size_ == 1; 4000 return result_size_ == 1;
4148 #else 4001 #else
4149 return true; 4002 return true;
4150 #endif 4003 #endif
4151 } 4004 }
4152 4005
4153 4006
4154 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 4007 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
4155 CEntryStub::GenerateAheadOfTime(isolate); 4008 CEntryStub::GenerateAheadOfTime(isolate);
4156 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 4009 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
4010 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
4157 // It is important that the store buffer overflow stubs are generated first. 4011 // It is important that the store buffer overflow stubs are generated first.
4158 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); 4012 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
4159 } 4013 }
4160 4014
4161 4015
4162 void CodeStub::GenerateFPStubs(Isolate* isolate) { 4016 void CodeStub::GenerateFPStubs(Isolate* isolate) {
4163 } 4017 }
4164 4018
4165 4019
4166 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { 4020 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
(...skipping 2601 matching lines...) Expand 10 before | Expand all | Expand 10 after
6768 __ StoreNumberToDoubleElements(rax, 6622 __ StoreNumberToDoubleElements(rax,
6769 r9, 6623 r9,
6770 r11, 6624 r11,
6771 xmm0, 6625 xmm0,
6772 &slow_elements); 6626 &slow_elements);
6773 __ ret(0); 6627 __ ret(0);
6774 } 6628 }
6775 6629
6776 6630
6777 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { 6631 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
6778 ASSERT(!Serializer::enabled()); 6632 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
6779 CEntryStub ces(1, kSaveFPRegs);
6780 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); 6633 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
6781 int parameter_count_offset = 6634 int parameter_count_offset =
6782 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; 6635 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
6783 __ movq(rbx, MemOperand(rbp, parameter_count_offset)); 6636 __ movq(rbx, MemOperand(rbp, parameter_count_offset));
6784 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); 6637 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
6785 __ pop(rcx); 6638 __ pop(rcx);
6786 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE 6639 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE
6787 ? kPointerSize 6640 ? kPointerSize
6788 : 0; 6641 : 0;
6789 __ lea(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset)); 6642 __ lea(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset));
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
6856 #endif 6709 #endif
6857 6710
6858 __ Ret(); 6711 __ Ret();
6859 } 6712 }
6860 6713
6861 #undef __ 6714 #undef __
6862 6715
6863 } } // namespace v8::internal 6716 } } // namespace v8::internal
6864 6717
6865 #endif // V8_TARGET_ARCH_X64 6718 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/isolate.cc ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698