| Index: src/ia32/lithium-codegen-ia32.cc
|
| diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc
|
| index 0cd5f1fce9e31fe24a7e7f8ddc85069365ee06e0..f354659e965e7d64a6efe57b5d99581c338b1d28 100644
|
| --- a/src/ia32/lithium-codegen-ia32.cc
|
| +++ b/src/ia32/lithium-codegen-ia32.cc
|
| @@ -5615,6 +5615,10 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
|
| DeoptimizeIf(not_equal, instr->environment());
|
| }
|
|
|
| + int flags = allocation_site_mode == TRACK_ALLOCATION_SITE
|
| + ? ArrayLiteral::kCreateAllocationSiteInfos
|
| + : ArrayLiteral::kNoFlags;
|
| +
|
| // Set up the parameters to the stub/runtime call.
|
| __ PushHeapObject(literals);
|
| __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
|
| @@ -5631,9 +5635,11 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
|
| FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length);
|
| CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
| } else if (instr->hydrogen()->depth() > 1) {
|
| - CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
|
| + __ push(Immediate(Smi::FromInt(flags)));
|
| + CallRuntime(Runtime::kCreateArrayLiteral, 4, instr);
|
| } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
|
| - CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
|
| + __ push(Immediate(Smi::FromInt(flags)));
|
| + CallRuntime(Runtime::kCreateArrayLiteralShallow, 4, instr);
|
| } else {
|
| FastCloneShallowArrayStub::Mode mode =
|
| boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
|
| @@ -5646,6 +5652,7 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
|
|
|
|
|
| void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
|
| + Handle<JSObject> original_object,
|
| Register result,
|
| Register source,
|
| int* offset,
|
| @@ -5653,8 +5660,9 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
|
| ASSERT(!source.is(ecx));
|
| ASSERT(!result.is(ecx));
|
|
|
| + // Should we track allocation info for *this* object in the tree?
|
| bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE &&
|
| - object->map()->CanTrackAllocationSite();
|
| + object->ShouldTrackAllocationInfo();
|
|
|
| if (FLAG_debug_code) {
|
| __ LoadHeapObject(ecx, object);
|
| @@ -5671,6 +5679,7 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
|
|
|
| // Only elements backing stores for non-COW arrays need to be copied.
|
| Handle<FixedArrayBase> elements(object->elements());
|
| + Handle<FixedArrayBase> original_elements(original_object->elements());
|
| bool has_elements = elements->length() > 0 &&
|
| elements->map() != isolate()->heap()->fixed_cow_array_map();
|
|
|
| @@ -5707,11 +5716,14 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
|
| isolate());
|
| if (value->IsJSObject()) {
|
| Handle<JSObject> value_object = Handle<JSObject>::cast(value);
|
| + Handle<JSObject> original_value_object = Handle<JSObject>::cast(
|
| + Handle<Object>(original_object->InObjectPropertyAt(i), isolate()));
|
| +
|
| __ lea(ecx, Operand(result, *offset));
|
| __ mov(FieldOperand(result, total_offset), ecx);
|
| __ LoadHeapObject(source, value_object);
|
| - EmitDeepCopy(value_object, result, source, offset,
|
| - DONT_TRACK_ALLOCATION_SITE);
|
| + EmitDeepCopy(value_object, original_value_object, result, source,
|
| + offset, mode);
|
| } else if (value->IsHeapObject()) {
|
| __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value));
|
| __ mov(FieldOperand(result, total_offset), ecx);
|
| @@ -5722,10 +5734,12 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
|
|
|
| // Build Allocation Site Info if desired
|
| if (create_allocation_site_info) {
|
| - __ mov(FieldOperand(result, object_size),
|
| + __ mov(FieldOperand(result, object_size + object_offset),
|
| Immediate(Handle<Map>(isolate()->heap()->
|
| allocation_site_info_map())));
|
| - __ mov(FieldOperand(result, object_size + kPointerSize), source);
|
| + __ LoadHeapObject(ecx, original_object);
|
| + __ mov(FieldOperand(result, object_size + object_offset + kPointerSize),
|
| + ecx);
|
| }
|
|
|
| if (has_elements) {
|
| @@ -5752,16 +5766,22 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
|
| }
|
| } else if (elements->IsFixedArray()) {
|
| Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
|
| + ASSERT(original_object->HasFastObjectElements());
|
| + Handle<FixedArray> original_fast_elements =
|
| + Handle<FixedArray>::cast(original_elements);
|
| for (int i = 0; i < elements_length; i++) {
|
| int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
|
| Handle<Object> value(fast_elements->get(i), isolate());
|
| if (value->IsJSObject()) {
|
| Handle<JSObject> value_object = Handle<JSObject>::cast(value);
|
| + Handle<JSObject> original_value_object = Handle<JSObject>::cast(
|
| + Handle<Object>(original_fast_elements->get(i), isolate()));
|
| __ lea(ecx, Operand(result, *offset));
|
| __ mov(FieldOperand(result, total_offset), ecx);
|
| __ LoadHeapObject(source, value_object);
|
| - EmitDeepCopy(value_object, result, source, offset,
|
| - DONT_TRACK_ALLOCATION_SITE);
|
| + ASSERT(!value_object.is_identical_to(original_value_object));
|
| + EmitDeepCopy(value_object, original_value_object, result, source,
|
| + offset, mode);
|
| } else if (value->IsHeapObject()) {
|
| __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value));
|
| __ mov(FieldOperand(result, total_offset), ecx);
|
| @@ -5776,27 +5796,10 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
|
| }
|
|
|
|
|
| -void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
|
| - ASSERT(ToRegister(instr->context()).is(esi));
|
| - int size = instr->hydrogen()->total_size();
|
| - ElementsKind boilerplate_elements_kind =
|
| - instr->hydrogen()->boilerplate()->GetElementsKind();
|
| -
|
| - // Deopt if the literal boilerplate ElementsKind is of a type different than
|
| - // the expected one. The check isn't necessary if the boilerplate has already
|
| - // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
|
| - if (CanTransitionToMoreGeneralFastElementsKind(
|
| - boilerplate_elements_kind, true)) {
|
| - __ LoadHeapObject(ebx, instr->hydrogen()->boilerplate());
|
| - __ mov(ecx, FieldOperand(ebx, HeapObject::kMapOffset));
|
| - // Load the map's "bit field 2". We only need the first byte,
|
| - // but the following masking takes care of that anyway.
|
| - __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
|
| - // Retrieve elements_kind from bit field 2.
|
| - __ and_(ecx, Map::kElementsKindMask);
|
| - __ cmp(ecx, boilerplate_elements_kind << Map::kElementsKindShift);
|
| - DeoptimizeIf(not_equal, instr->environment());
|
| - }
|
| +void LCodeGen::FastLiteralHelper(LFastLiteral* instr, AllocationSiteMode mode) {
|
| + int size = mode == DONT_TRACK_ALLOCATION_SITE
|
| + ? instr->hydrogen()->size_without_allocation_sites()
|
| + : instr->hydrogen()->total_size();
|
|
|
| // Allocate all objects that are part of the literal in one big
|
| // allocation. This avoids multiple limit checks.
|
| @@ -5811,12 +5814,40 @@ void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
|
| __ bind(&allocated);
|
| int offset = 0;
|
| __ LoadHeapObject(ebx, instr->hydrogen()->boilerplate());
|
| - EmitDeepCopy(instr->hydrogen()->boilerplate(), eax, ebx, &offset,
|
| - instr->hydrogen()->allocation_site_mode());
|
| + EmitDeepCopy(instr->hydrogen()->boilerplate(),
|
| + instr->hydrogen()->original_boilerplate(),
|
| + eax, ebx, &offset,
|
| + mode);
|
| ASSERT_EQ(size, offset);
|
| }
|
|
|
|
|
| +void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
|
| + ASSERT(ToRegister(instr->context()).is(esi));
|
| +
|
| + // TODO(mvstanton): Revisit this heuristic as site info matures.
|
| + // If allocation site mode is on, then we need the ability to turn it off
|
| + // after "awhile." Later, better options should be available, but for
|
| + // now just allow a certain number of gcs to pass.
|
| + if (instr->hydrogen()->allocation_site_mode() == TRACK_ALLOCATION_SITE) {
|
| + // How many gcs have passed?
|
| + const int maxCount = 3 + isolate()->heap()->gc_count();
|
| + ExternalReference gc_count_address =
|
| + ExternalReference::gc_count_address(isolate());
|
| + Label continue_using, done;
|
| + __ cmp(Operand::StaticVariable(gc_count_address), Immediate(maxCount));
|
| + __ j(less, &continue_using);
|
| + FastLiteralHelper(instr, DONT_TRACK_ALLOCATION_SITE);
|
| + __ jmp(&done);
|
| + __ bind(&continue_using);
|
| + FastLiteralHelper(instr, TRACK_ALLOCATION_SITE);
|
| + __ bind(&done);
|
| + } else {
|
| + FastLiteralHelper(instr, instr->hydrogen()->allocation_site_mode());
|
| + }
|
| +}
|
| +
|
| +
|
| void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
|
| ASSERT(ToRegister(instr->context()).is(esi));
|
| Handle<FixedArray> literals(instr->environment()->closure()->literals());
|
| @@ -5830,6 +5861,10 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
|
| ? ObjectLiteral::kHasFunction
|
| : ObjectLiteral::kNoFlags;
|
|
|
| + if (instr->hydrogen()->allocation_site_mode() == TRACK_ALLOCATION_SITE) {
|
| + flags |= ObjectLiteral::kCreateAllocationSiteInfos;
|
| + }
|
| +
|
| // Set up the parameters to the stub/runtime call and pick the right
|
| // runtime function or stub to call.
|
| int properties_count = constant_properties->length() / 2;
|
|
|