Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(22)

Unified Diff: src/arm/code-stubs-arm.cc

Issue 12521011: Compile FastCloneShallowArrayStub using Crankshaft. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Rebased. Created 7 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/arm/code-stubs-arm.h ('k') | src/arm/full-codegen-arm.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/arm/code-stubs-arm.cc
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index d982f27065788a1f25eed56cb99e6b3a3ff4bdaf..f883c4f3a4f9fd6c034696c2ee2110f732bc51ee 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -38,6 +38,18 @@ namespace v8 {
namespace internal {
+void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
+ Isolate* isolate,
+ CodeStubInterfaceDescriptor* descriptor) {
+ static Register registers[] = { r3, r2, r1 };
+ descriptor->register_param_count_ = 3;
+ descriptor->register_params_ = registers;
+ descriptor->stack_parameter_count_ = NULL;
+ descriptor->deoptimization_handler_ =
+ Runtime::FunctionForId(Runtime::kCreateArrayLiteralShallow)->entry;
+}
+
+
void FastCloneShallowObjectStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
@@ -403,153 +415,6 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
}
-static void GenerateFastCloneShallowArrayCommon(
- MacroAssembler* masm,
- int length,
- FastCloneShallowArrayStub::Mode mode,
- AllocationSiteMode allocation_site_mode,
- Label* fail) {
- // Registers on entry:
- //
- // r3: boilerplate literal array.
- ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS);
-
- // All sizes here are multiples of kPointerSize.
- int elements_size = 0;
- if (length > 0) {
- elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
- ? FixedDoubleArray::SizeFor(length)
- : FixedArray::SizeFor(length);
- }
-
- int size = JSArray::kSize;
- int allocation_info_start = size;
- if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
- size += AllocationSiteInfo::kSize;
- }
- size += elements_size;
-
- // Allocate both the JS array and the elements array in one big
- // allocation. This avoids multiple limit checks.
- AllocationFlags flags = TAG_OBJECT;
- if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) {
- flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags);
- }
- __ Allocate(size, r0, r1, r2, fail, flags);
-
- if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
- __ mov(r2, Operand(Handle<Map>(masm->isolate()->heap()->
- allocation_site_info_map())));
- __ str(r2, FieldMemOperand(r0, allocation_info_start));
- __ str(r3, FieldMemOperand(r0, allocation_info_start + kPointerSize));
- }
-
- // Copy the JS array part.
- for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
- if ((i != JSArray::kElementsOffset) || (length == 0)) {
- __ ldr(r1, FieldMemOperand(r3, i));
- __ str(r1, FieldMemOperand(r0, i));
- }
- }
-
- if (length > 0) {
- // Get hold of the elements array of the boilerplate and setup the
- // elements pointer in the resulting object.
- __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
- if (allocation_site_mode == TRACK_ALLOCATION_SITE) {
- __ add(r2, r0, Operand(JSArray::kSize + AllocationSiteInfo::kSize));
- } else {
- __ add(r2, r0, Operand(JSArray::kSize));
- }
- __ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset));
-
- // Copy the elements array.
- ASSERT((elements_size % kPointerSize) == 0);
- __ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize);
- }
-}
-
-void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
- // Stack layout on entry:
- //
- // [sp]: constant elements.
- // [sp + kPointerSize]: literal index.
- // [sp + (2 * kPointerSize)]: literals array.
-
- // Load boilerplate object into r3 and check if we need to create a
- // boilerplate.
- Label slow_case;
- __ ldr(r3, MemOperand(sp, 2 * kPointerSize));
- __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
- __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
- __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
- __ b(eq, &slow_case);
-
- FastCloneShallowArrayStub::Mode mode = mode_;
- if (mode == CLONE_ANY_ELEMENTS) {
- Label double_elements, check_fast_elements;
- __ ldr(r0, FieldMemOperand(r3, JSArray::kElementsOffset));
- __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
- __ CompareRoot(r0, Heap::kFixedCOWArrayMapRootIndex);
- __ b(ne, &check_fast_elements);
- GenerateFastCloneShallowArrayCommon(masm, 0, COPY_ON_WRITE_ELEMENTS,
- allocation_site_mode_,
- &slow_case);
- // Return and remove the on-stack parameters.
- __ add(sp, sp, Operand(3 * kPointerSize));
- __ Ret();
-
- __ bind(&check_fast_elements);
- __ CompareRoot(r0, Heap::kFixedArrayMapRootIndex);
- __ b(ne, &double_elements);
- GenerateFastCloneShallowArrayCommon(masm, length_, CLONE_ELEMENTS,
- allocation_site_mode_,
- &slow_case);
- // Return and remove the on-stack parameters.
- __ add(sp, sp, Operand(3 * kPointerSize));
- __ Ret();
-
- __ bind(&double_elements);
- mode = CLONE_DOUBLE_ELEMENTS;
- // Fall through to generate the code to handle double elements.
- }
-
- if (FLAG_debug_code) {
- const char* message;
- Heap::RootListIndex expected_map_index;
- if (mode == CLONE_ELEMENTS) {
- message = "Expected (writable) fixed array";
- expected_map_index = Heap::kFixedArrayMapRootIndex;
- } else if (mode == CLONE_DOUBLE_ELEMENTS) {
- message = "Expected (writable) fixed double array";
- expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
- } else {
- ASSERT(mode == COPY_ON_WRITE_ELEMENTS);
- message = "Expected copy-on-write fixed array";
- expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
- }
- __ push(r3);
- __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
- __ ldr(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
- __ CompareRoot(r3, expected_map_index);
- __ Assert(eq, message);
- __ pop(r3);
- }
-
- GenerateFastCloneShallowArrayCommon(masm, length_, mode,
- allocation_site_mode_,
- &slow_case);
-
- // Return and remove the on-stack parameters.
- __ add(sp, sp, Operand(3 * kPointerSize));
- __ Ret();
-
- __ bind(&slow_case);
- __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
-}
-
-
// Takes a Smi and converts to an IEEE 64 bit floating point value in two
// registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and
// 52 fraction bits (20 in the first word, 32 in the second). Zeros is a
@@ -3879,6 +3744,7 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
CEntryStub::GenerateAheadOfTime(isolate);
WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate);
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
+ StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
}
@@ -3895,11 +3761,13 @@ void CodeStub::GenerateFPStubs(Isolate* isolate) {
Code* save_doubles_code;
if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) {
save_doubles_code = *save_doubles.GetCode(isolate);
- save_doubles_code->set_is_pregenerated(true);
-
- Code* store_buffer_overflow_code = *stub.GetCode(isolate);
- store_buffer_overflow_code->set_is_pregenerated(true);
}
+ Code* store_buffer_overflow_code;
+ if (!stub.FindCodeInCache(&store_buffer_overflow_code, isolate)) {
+ store_buffer_overflow_code = *stub.GetCode(isolate);
+ }
+ save_doubles_code->set_is_pregenerated(true);
+ store_buffer_overflow_code->set_is_pregenerated(true);
isolate->set_fp_stubs_generated(true);
}
@@ -7662,11 +7530,6 @@ bool RecordWriteStub::IsPregenerated() {
}
-bool StoreBufferOverflowStub::IsPregenerated() {
- return save_doubles_ == kDontSaveFPRegs || ISOLATE->fp_stubs_generated();
-}
-
-
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
Isolate* isolate) {
StoreBufferOverflowStub stub1(kDontSaveFPRegs);
@@ -7955,9 +7818,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
- ASSERT(!Serializer::enabled());
- bool save_fp_regs = CpuFeatures::IsSupported(VFP2);
- CEntryStub ces(1, save_fp_regs ? kSaveFPRegs : kDontSaveFPRegs);
+ CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
__ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
int parameter_count_offset =
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
« no previous file with comments | « src/arm/code-stubs-arm.h ('k') | src/arm/full-codegen-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698