Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(49)

Side by Side Diff: src/ia32/lithium-codegen-ia32.cc

Issue 12114054: Supporting AllocationSiteInfo for Nested arrays (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressing a port compile failure Created 7 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | src/objects.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 5597 matching lines...) Expand 10 before | Expand all | Expand 10 after
5608 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 5608 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
5609 // Load the map's "bit field 2". We only need the first byte, 5609 // Load the map's "bit field 2". We only need the first byte,
5610 // but the following masking takes care of that anyway. 5610 // but the following masking takes care of that anyway.
5611 __ mov(ebx, FieldOperand(ebx, Map::kBitField2Offset)); 5611 __ mov(ebx, FieldOperand(ebx, Map::kBitField2Offset));
5612 // Retrieve elements_kind from bit field 2. 5612 // Retrieve elements_kind from bit field 2.
5613 __ and_(ebx, Map::kElementsKindMask); 5613 __ and_(ebx, Map::kElementsKindMask);
5614 __ cmp(ebx, boilerplate_elements_kind << Map::kElementsKindShift); 5614 __ cmp(ebx, boilerplate_elements_kind << Map::kElementsKindShift);
5615 DeoptimizeIf(not_equal, instr->environment()); 5615 DeoptimizeIf(not_equal, instr->environment());
5616 } 5616 }
5617 5617
5618 int flags = allocation_site_mode == TRACK_ALLOCATION_SITE
5619 ? ArrayLiteral::kCreateAllocationSiteInfos
5620 : ArrayLiteral::kNoFlags;
5621
5618 // Set up the parameters to the stub/runtime call. 5622 // Set up the parameters to the stub/runtime call.
5619 __ PushHeapObject(literals); 5623 __ PushHeapObject(literals);
5620 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); 5624 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
5621 // Boilerplate already exists, constant elements are never accessed. 5625 // Boilerplate already exists, constant elements are never accessed.
5622 // Pass an empty fixed array. 5626 // Pass an empty fixed array.
5623 __ push(Immediate(isolate()->factory()->empty_fixed_array())); 5627 __ push(Immediate(isolate()->factory()->empty_fixed_array()));
5624 5628
5625 // Pick the right runtime function or stub to call. 5629 // Pick the right runtime function or stub to call.
5626 int length = instr->hydrogen()->length(); 5630 int length = instr->hydrogen()->length();
5627 if (instr->hydrogen()->IsCopyOnWrite()) { 5631 if (instr->hydrogen()->IsCopyOnWrite()) {
5628 ASSERT(instr->hydrogen()->depth() == 1); 5632 ASSERT(instr->hydrogen()->depth() == 1);
5629 FastCloneShallowArrayStub::Mode mode = 5633 FastCloneShallowArrayStub::Mode mode =
5630 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; 5634 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
5631 FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length); 5635 FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length);
5632 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 5636 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
5633 } else if (instr->hydrogen()->depth() > 1) { 5637 } else if (instr->hydrogen()->depth() > 1) {
5634 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr); 5638 __ push(Immediate(Smi::FromInt(flags)));
5639 CallRuntime(Runtime::kCreateArrayLiteral, 4, instr);
5635 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { 5640 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
5636 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr); 5641 __ push(Immediate(Smi::FromInt(flags)));
5642 CallRuntime(Runtime::kCreateArrayLiteralShallow, 4, instr);
5637 } else { 5643 } else {
5638 FastCloneShallowArrayStub::Mode mode = 5644 FastCloneShallowArrayStub::Mode mode =
5639 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS 5645 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
5640 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS 5646 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
5641 : FastCloneShallowArrayStub::CLONE_ELEMENTS; 5647 : FastCloneShallowArrayStub::CLONE_ELEMENTS;
5642 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); 5648 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
5643 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 5649 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
5644 } 5650 }
5645 } 5651 }
5646 5652
5647 5653
5648 void LCodeGen::EmitDeepCopy(Handle<JSObject> object, 5654 void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
5655 Handle<JSObject> original_object,
5649 Register result, 5656 Register result,
5650 Register source, 5657 Register source,
5651 int* offset, 5658 int* offset,
5652 AllocationSiteMode mode) { 5659 AllocationSiteMode mode) {
5653 ASSERT(!source.is(ecx)); 5660 ASSERT(!source.is(ecx));
5654 ASSERT(!result.is(ecx)); 5661 ASSERT(!result.is(ecx));
5655 5662
5663 // Should we track allocation info for *this* object in the tree?
5656 bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE && 5664 bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE &&
5657 object->map()->CanTrackAllocationSite(); 5665 object->ShouldTrackAllocationInfo();
5658 5666
5659 if (FLAG_debug_code) { 5667 if (FLAG_debug_code) {
5660 __ LoadHeapObject(ecx, object); 5668 __ LoadHeapObject(ecx, object);
5661 __ cmp(source, ecx); 5669 __ cmp(source, ecx);
5662 __ Assert(equal, "Unexpected object literal boilerplate"); 5670 __ Assert(equal, "Unexpected object literal boilerplate");
5663 __ mov(ecx, FieldOperand(source, HeapObject::kMapOffset)); 5671 __ mov(ecx, FieldOperand(source, HeapObject::kMapOffset));
5664 __ cmp(ecx, Handle<Map>(object->map())); 5672 __ cmp(ecx, Handle<Map>(object->map()));
5665 __ Assert(equal, "Unexpected boilerplate map"); 5673 __ Assert(equal, "Unexpected boilerplate map");
5666 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset)); 5674 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
5667 __ and_(ecx, Map::kElementsKindMask); 5675 __ and_(ecx, Map::kElementsKindMask);
5668 __ cmp(ecx, object->GetElementsKind() << Map::kElementsKindShift); 5676 __ cmp(ecx, object->GetElementsKind() << Map::kElementsKindShift);
5669 __ Assert(equal, "Unexpected boilerplate elements kind"); 5677 __ Assert(equal, "Unexpected boilerplate elements kind");
5670 } 5678 }
5671 5679
5672 // Only elements backing stores for non-COW arrays need to be copied. 5680 // Only elements backing stores for non-COW arrays need to be copied.
5673 Handle<FixedArrayBase> elements(object->elements()); 5681 Handle<FixedArrayBase> elements(object->elements());
5682 Handle<FixedArrayBase> original_elements(original_object->elements());
5674 bool has_elements = elements->length() > 0 && 5683 bool has_elements = elements->length() > 0 &&
5675 elements->map() != isolate()->heap()->fixed_cow_array_map(); 5684 elements->map() != isolate()->heap()->fixed_cow_array_map();
5676 5685
5677 // Increase the offset so that subsequent objects end up right after 5686 // Increase the offset so that subsequent objects end up right after
5678 // this object and its backing store. 5687 // this object and its backing store.
5679 int object_offset = *offset; 5688 int object_offset = *offset;
5680 int object_size = object->map()->instance_size(); 5689 int object_size = object->map()->instance_size();
5681 int elements_size = has_elements ? elements->Size() : 0; 5690 int elements_size = has_elements ? elements->Size() : 0;
5682 int elements_offset = *offset + object_size; 5691 int elements_offset = *offset + object_size;
5683 if (create_allocation_site_info) { 5692 if (create_allocation_site_info) {
(...skipping 16 matching lines...) Expand all
5700 __ mov(FieldOperand(result, object_offset + i), ecx); 5709 __ mov(FieldOperand(result, object_offset + i), ecx);
5701 } 5710 }
5702 5711
5703 // Copy in-object properties. 5712 // Copy in-object properties.
5704 for (int i = 0; i < inobject_properties; i++) { 5713 for (int i = 0; i < inobject_properties; i++) {
5705 int total_offset = object_offset + object->GetInObjectPropertyOffset(i); 5714 int total_offset = object_offset + object->GetInObjectPropertyOffset(i);
5706 Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i), 5715 Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i),
5707 isolate()); 5716 isolate());
5708 if (value->IsJSObject()) { 5717 if (value->IsJSObject()) {
5709 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 5718 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5719 Handle<JSObject> original_value_object = Handle<JSObject>::cast(
5720 Handle<Object>(original_object->InObjectPropertyAt(i), isolate()));
5721
5710 __ lea(ecx, Operand(result, *offset)); 5722 __ lea(ecx, Operand(result, *offset));
5711 __ mov(FieldOperand(result, total_offset), ecx); 5723 __ mov(FieldOperand(result, total_offset), ecx);
5712 __ LoadHeapObject(source, value_object); 5724 __ LoadHeapObject(source, value_object);
5713 EmitDeepCopy(value_object, result, source, offset, 5725 EmitDeepCopy(value_object, original_value_object, result, source,
5714 DONT_TRACK_ALLOCATION_SITE); 5726 offset, mode);
5715 } else if (value->IsHeapObject()) { 5727 } else if (value->IsHeapObject()) {
5716 __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value)); 5728 __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value));
5717 __ mov(FieldOperand(result, total_offset), ecx); 5729 __ mov(FieldOperand(result, total_offset), ecx);
5718 } else { 5730 } else {
5719 __ mov(FieldOperand(result, total_offset), Immediate(value)); 5731 __ mov(FieldOperand(result, total_offset), Immediate(value));
5720 } 5732 }
5721 } 5733 }
5722 5734
5723 // Build Allocation Site Info if desired 5735 // Build Allocation Site Info if desired
5724 if (create_allocation_site_info) { 5736 if (create_allocation_site_info) {
5725 __ mov(FieldOperand(result, object_size), 5737 __ mov(FieldOperand(result, object_size + object_offset),
5726 Immediate(Handle<Map>(isolate()->heap()-> 5738 Immediate(Handle<Map>(isolate()->heap()->
5727 allocation_site_info_map()))); 5739 allocation_site_info_map())));
5728 __ mov(FieldOperand(result, object_size + kPointerSize), source); 5740 __ LoadHeapObject(ecx, original_object);
5741 __ mov(FieldOperand(result, object_size + object_offset + kPointerSize),
5742 ecx);
5729 } 5743 }
5730 5744
5731 if (has_elements) { 5745 if (has_elements) {
5732 // Copy elements backing store header. 5746 // Copy elements backing store header.
5733 __ LoadHeapObject(source, elements); 5747 __ LoadHeapObject(source, elements);
5734 for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) { 5748 for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) {
5735 __ mov(ecx, FieldOperand(source, i)); 5749 __ mov(ecx, FieldOperand(source, i));
5736 __ mov(FieldOperand(result, elements_offset + i), ecx); 5750 __ mov(FieldOperand(result, elements_offset + i), ecx);
5737 } 5751 }
5738 5752
5739 // Copy elements backing store content. 5753 // Copy elements backing store content.
5740 int elements_length = elements->length(); 5754 int elements_length = elements->length();
5741 if (elements->IsFixedDoubleArray()) { 5755 if (elements->IsFixedDoubleArray()) {
5742 Handle<FixedDoubleArray> double_array = 5756 Handle<FixedDoubleArray> double_array =
5743 Handle<FixedDoubleArray>::cast(elements); 5757 Handle<FixedDoubleArray>::cast(elements);
5744 for (int i = 0; i < elements_length; i++) { 5758 for (int i = 0; i < elements_length; i++) {
5745 int64_t value = double_array->get_representation(i); 5759 int64_t value = double_array->get_representation(i);
5746 int32_t value_low = static_cast<int32_t>(value & 0xFFFFFFFF); 5760 int32_t value_low = static_cast<int32_t>(value & 0xFFFFFFFF);
5747 int32_t value_high = static_cast<int32_t>(value >> 32); 5761 int32_t value_high = static_cast<int32_t>(value >> 32);
5748 int total_offset = 5762 int total_offset =
5749 elements_offset + FixedDoubleArray::OffsetOfElementAt(i); 5763 elements_offset + FixedDoubleArray::OffsetOfElementAt(i);
5750 __ mov(FieldOperand(result, total_offset), Immediate(value_low)); 5764 __ mov(FieldOperand(result, total_offset), Immediate(value_low));
5751 __ mov(FieldOperand(result, total_offset + 4), Immediate(value_high)); 5765 __ mov(FieldOperand(result, total_offset + 4), Immediate(value_high));
5752 } 5766 }
5753 } else if (elements->IsFixedArray()) { 5767 } else if (elements->IsFixedArray()) {
5754 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements); 5768 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
5769 ASSERT(original_object->HasFastObjectElements());
5770 Handle<FixedArray> original_fast_elements =
5771 Handle<FixedArray>::cast(original_elements);
5755 for (int i = 0; i < elements_length; i++) { 5772 for (int i = 0; i < elements_length; i++) {
5756 int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i); 5773 int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
5757 Handle<Object> value(fast_elements->get(i), isolate()); 5774 Handle<Object> value(fast_elements->get(i), isolate());
5758 if (value->IsJSObject()) { 5775 if (value->IsJSObject()) {
5759 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 5776 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5777 Handle<JSObject> original_value_object = Handle<JSObject>::cast(
5778 Handle<Object>(original_fast_elements->get(i), isolate()));
5760 __ lea(ecx, Operand(result, *offset)); 5779 __ lea(ecx, Operand(result, *offset));
5761 __ mov(FieldOperand(result, total_offset), ecx); 5780 __ mov(FieldOperand(result, total_offset), ecx);
5762 __ LoadHeapObject(source, value_object); 5781 __ LoadHeapObject(source, value_object);
5763 EmitDeepCopy(value_object, result, source, offset, 5782 ASSERT(!value_object.is_identical_to(original_value_object));
5764 DONT_TRACK_ALLOCATION_SITE); 5783 EmitDeepCopy(value_object, original_value_object, result, source,
5784 offset, mode);
5765 } else if (value->IsHeapObject()) { 5785 } else if (value->IsHeapObject()) {
5766 __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value)); 5786 __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value));
5767 __ mov(FieldOperand(result, total_offset), ecx); 5787 __ mov(FieldOperand(result, total_offset), ecx);
5768 } else { 5788 } else {
5769 __ mov(FieldOperand(result, total_offset), Immediate(value)); 5789 __ mov(FieldOperand(result, total_offset), Immediate(value));
5770 } 5790 }
5771 } 5791 }
5772 } else { 5792 } else {
5773 UNREACHABLE(); 5793 UNREACHABLE();
5774 } 5794 }
5775 } 5795 }
5776 } 5796 }
5777 5797
5778 5798
5779 void LCodeGen::DoFastLiteral(LFastLiteral* instr) { 5799 void LCodeGen::FastLiteralHelper(LFastLiteral* instr, AllocationSiteMode mode) {
5780 ASSERT(ToRegister(instr->context()).is(esi)); 5800 int size = mode == DONT_TRACK_ALLOCATION_SITE
5781 int size = instr->hydrogen()->total_size(); 5801 ? instr->hydrogen()->size_without_allocation_sites()
5782 ElementsKind boilerplate_elements_kind = 5802 : instr->hydrogen()->total_size();
5783 instr->hydrogen()->boilerplate()->GetElementsKind();
5784
5785 // Deopt if the literal boilerplate ElementsKind is of a type different than
5786 // the expected one. The check isn't necessary if the boilerplate has already
5787 // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
5788 if (CanTransitionToMoreGeneralFastElementsKind(
5789 boilerplate_elements_kind, true)) {
5790 __ LoadHeapObject(ebx, instr->hydrogen()->boilerplate());
5791 __ mov(ecx, FieldOperand(ebx, HeapObject::kMapOffset));
5792 // Load the map's "bit field 2". We only need the first byte,
5793 // but the following masking takes care of that anyway.
5794 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
5795 // Retrieve elements_kind from bit field 2.
5796 __ and_(ecx, Map::kElementsKindMask);
5797 __ cmp(ecx, boilerplate_elements_kind << Map::kElementsKindShift);
5798 DeoptimizeIf(not_equal, instr->environment());
5799 }
5800 5803
5801 // Allocate all objects that are part of the literal in one big 5804 // Allocate all objects that are part of the literal in one big
5802 // allocation. This avoids multiple limit checks. 5805 // allocation. This avoids multiple limit checks.
5803 Label allocated, runtime_allocate; 5806 Label allocated, runtime_allocate;
5804 __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); 5807 __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
5805 __ jmp(&allocated); 5808 __ jmp(&allocated);
5806 5809
5807 __ bind(&runtime_allocate); 5810 __ bind(&runtime_allocate);
5808 __ push(Immediate(Smi::FromInt(size))); 5811 __ push(Immediate(Smi::FromInt(size)));
5809 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); 5812 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
5810 5813
5811 __ bind(&allocated); 5814 __ bind(&allocated);
5812 int offset = 0; 5815 int offset = 0;
5813 __ LoadHeapObject(ebx, instr->hydrogen()->boilerplate()); 5816 __ LoadHeapObject(ebx, instr->hydrogen()->boilerplate());
5814 EmitDeepCopy(instr->hydrogen()->boilerplate(), eax, ebx, &offset, 5817 EmitDeepCopy(instr->hydrogen()->boilerplate(),
5815 instr->hydrogen()->allocation_site_mode()); 5818 instr->hydrogen()->original_boilerplate(),
5819 eax, ebx, &offset,
5820 mode);
5816 ASSERT_EQ(size, offset); 5821 ASSERT_EQ(size, offset);
5817 } 5822 }
5818 5823
5819 5824
5825 void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
5826 ASSERT(ToRegister(instr->context()).is(esi));
5827
5828 // TODO(mvstanton): Revisit this heuristic as site info matures.
5829 // If allocation site mode is on, then we need the ability to turn it off
5830 // after "awhile." Later, better options should be available, but for
5831 // now just allow a certain number of gcs to pass.
5832 if (instr->hydrogen()->allocation_site_mode() == TRACK_ALLOCATION_SITE) {
5833 // How many gcs have passed?
5834 const int maxCount = 3 + isolate()->heap()->gc_count();
5835 ExternalReference gc_count_address =
5836 ExternalReference::gc_count_address(isolate());
5837 Label continue_using, done;
5838 __ cmp(Operand::StaticVariable(gc_count_address), Immediate(maxCount));
5839 __ j(less, &continue_using);
5840 FastLiteralHelper(instr, DONT_TRACK_ALLOCATION_SITE);
5841 __ jmp(&done);
5842 __ bind(&continue_using);
5843 FastLiteralHelper(instr, TRACK_ALLOCATION_SITE);
5844 __ bind(&done);
5845 } else {
5846 FastLiteralHelper(instr, instr->hydrogen()->allocation_site_mode());
5847 }
5848 }
5849
5850
5820 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { 5851 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
5821 ASSERT(ToRegister(instr->context()).is(esi)); 5852 ASSERT(ToRegister(instr->context()).is(esi));
5822 Handle<FixedArray> literals(instr->environment()->closure()->literals()); 5853 Handle<FixedArray> literals(instr->environment()->closure()->literals());
5823 Handle<FixedArray> constant_properties = 5854 Handle<FixedArray> constant_properties =
5824 instr->hydrogen()->constant_properties(); 5855 instr->hydrogen()->constant_properties();
5825 5856
5826 int flags = instr->hydrogen()->fast_elements() 5857 int flags = instr->hydrogen()->fast_elements()
5827 ? ObjectLiteral::kFastElements 5858 ? ObjectLiteral::kFastElements
5828 : ObjectLiteral::kNoFlags; 5859 : ObjectLiteral::kNoFlags;
5829 flags |= instr->hydrogen()->has_function() 5860 flags |= instr->hydrogen()->has_function()
5830 ? ObjectLiteral::kHasFunction 5861 ? ObjectLiteral::kHasFunction
5831 : ObjectLiteral::kNoFlags; 5862 : ObjectLiteral::kNoFlags;
5832 5863
5864 if (instr->hydrogen()->allocation_site_mode() == TRACK_ALLOCATION_SITE) {
5865 flags |= ObjectLiteral::kCreateAllocationSiteInfos;
5866 }
5867
5833 // Set up the parameters to the stub/runtime call and pick the right 5868 // Set up the parameters to the stub/runtime call and pick the right
5834 // runtime function or stub to call. 5869 // runtime function or stub to call.
5835 int properties_count = constant_properties->length() / 2; 5870 int properties_count = constant_properties->length() / 2;
5836 if (instr->hydrogen()->depth() > 1) { 5871 if (instr->hydrogen()->depth() > 1) {
5837 __ PushHeapObject(literals); 5872 __ PushHeapObject(literals);
5838 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); 5873 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
5839 __ push(Immediate(constant_properties)); 5874 __ push(Immediate(constant_properties));
5840 __ push(Immediate(Smi::FromInt(flags))); 5875 __ push(Immediate(Smi::FromInt(flags)));
5841 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); 5876 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
5842 } else if (flags != ObjectLiteral::kFastElements || 5877 } else if (flags != ObjectLiteral::kFastElements ||
(...skipping 450 matching lines...) Expand 10 before | Expand all | Expand 10 after
6293 FixedArray::kHeaderSize - kPointerSize)); 6328 FixedArray::kHeaderSize - kPointerSize));
6294 __ bind(&done); 6329 __ bind(&done);
6295 } 6330 }
6296 6331
6297 6332
6298 #undef __ 6333 #undef __
6299 6334
6300 } } // namespace v8::internal 6335 } } // namespace v8::internal
6301 6336
6302 #endif // V8_TARGET_ARCH_IA32 6337 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | src/objects.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698