Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 5487 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 5498 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); | 5498 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); |
| 5499 // Load the map's "bit field 2". We only need the first byte, | 5499 // Load the map's "bit field 2". We only need the first byte, |
| 5500 // but the following masking takes care of that anyway. | 5500 // but the following masking takes care of that anyway. |
| 5501 __ mov(ebx, FieldOperand(ebx, Map::kBitField2Offset)); | 5501 __ mov(ebx, FieldOperand(ebx, Map::kBitField2Offset)); |
| 5502 // Retrieve elements_kind from bit field 2. | 5502 // Retrieve elements_kind from bit field 2. |
| 5503 __ and_(ebx, Map::kElementsKindMask); | 5503 __ and_(ebx, Map::kElementsKindMask); |
| 5504 __ cmp(ebx, boilerplate_elements_kind << Map::kElementsKindShift); | 5504 __ cmp(ebx, boilerplate_elements_kind << Map::kElementsKindShift); |
| 5505 DeoptimizeIf(not_equal, instr->environment()); | 5505 DeoptimizeIf(not_equal, instr->environment()); |
| 5506 } | 5506 } |
| 5507 | 5507 |
| 5508 int flags = allocation_site_mode == TRACK_ALLOCATION_SITE | |
| 5509 ? ArrayLiteral::kAllocationSiteInfoAllowed | |
| 5510 : ArrayLiteral::kNoFlags; | |
| 5511 | |
| 5508 // Set up the parameters to the stub/runtime call. | 5512 // Set up the parameters to the stub/runtime call. |
| 5509 __ PushHeapObject(literals); | 5513 __ PushHeapObject(literals); |
| 5510 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); | 5514 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
| 5511 // Boilerplate already exists, constant elements are never accessed. | 5515 // Boilerplate already exists, constant elements are never accessed. |
| 5512 // Pass an empty fixed array. | 5516 // Pass an empty fixed array. |
| 5513 __ push(Immediate(isolate()->factory()->empty_fixed_array())); | 5517 __ push(Immediate(isolate()->factory()->empty_fixed_array())); |
| 5514 | 5518 |
| 5515 // Pick the right runtime function or stub to call. | 5519 // Pick the right runtime function or stub to call. |
| 5516 int length = instr->hydrogen()->length(); | 5520 int length = instr->hydrogen()->length(); |
| 5517 if (instr->hydrogen()->IsCopyOnWrite()) { | 5521 if (instr->hydrogen()->IsCopyOnWrite()) { |
| 5518 ASSERT(instr->hydrogen()->depth() == 1); | 5522 ASSERT(instr->hydrogen()->depth() == 1); |
| 5519 FastCloneShallowArrayStub::Mode mode = | 5523 FastCloneShallowArrayStub::Mode mode = |
| 5520 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; | 5524 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; |
| 5521 FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length); | 5525 FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length); |
| 5522 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 5526 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 5523 } else if (instr->hydrogen()->depth() > 1) { | 5527 } else if (instr->hydrogen()->depth() > 1) { |
| 5524 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr); | 5528 __ push(Immediate(Smi::FromInt(flags))); |
| 5529 CallRuntime(Runtime::kCreateArrayLiteral, 4, instr); | |
| 5525 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { | 5530 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
| 5526 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr); | 5531 __ push(Immediate(Smi::FromInt(flags))); |
| 5532 CallRuntime(Runtime::kCreateArrayLiteralShallow, 4, instr); | |
| 5527 } else { | 5533 } else { |
| 5528 FastCloneShallowArrayStub::Mode mode = | 5534 FastCloneShallowArrayStub::Mode mode = |
| 5529 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS | 5535 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS |
| 5530 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS | 5536 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS |
| 5531 : FastCloneShallowArrayStub::CLONE_ELEMENTS; | 5537 : FastCloneShallowArrayStub::CLONE_ELEMENTS; |
| 5532 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); | 5538 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); |
| 5533 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 5539 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 5534 } | 5540 } |
| 5535 } | 5541 } |
| 5536 | 5542 |
| 5537 | 5543 |
| 5538 void LCodeGen::EmitDeepCopy(Handle<JSObject> object, | 5544 void LCodeGen::EmitDeepCopy(Handle<JSObject> object, |
| 5545 Handle<JSObject> original_object, | |
| 5539 Register result, | 5546 Register result, |
| 5540 Register source, | 5547 Register source, |
| 5541 int* offset, | 5548 int* offset, |
| 5542 AllocationSiteMode mode) { | 5549 AllocationSiteMode mode) { |
| 5543 ASSERT(!source.is(ecx)); | 5550 ASSERT(!source.is(ecx)); |
| 5544 ASSERT(!result.is(ecx)); | 5551 ASSERT(!result.is(ecx)); |
| 5545 | 5552 |
| 5553 // Should we track allocation info for *this* object in the tree? | |
| 5546 bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE && | 5554 bool create_allocation_site_info = mode == TRACK_ALLOCATION_SITE && |
| 5547 object->map()->CanTrackAllocationSite(); | 5555 object->IsJSArray() && object->ShouldTrackAllocationInfo(); |
| 5548 | 5556 |
| 5549 if (FLAG_debug_code) { | 5557 if (FLAG_debug_code) { |
| 5550 __ LoadHeapObject(ecx, object); | 5558 __ LoadHeapObject(ecx, object); |
| 5551 __ cmp(source, ecx); | 5559 __ cmp(source, ecx); |
| 5552 __ Assert(equal, "Unexpected object literal boilerplate"); | 5560 __ Assert(equal, "Unexpected object literal boilerplate"); |
| 5553 __ mov(ecx, FieldOperand(source, HeapObject::kMapOffset)); | 5561 __ mov(ecx, FieldOperand(source, HeapObject::kMapOffset)); |
| 5554 __ cmp(ecx, Handle<Map>(object->map())); | 5562 __ cmp(ecx, Handle<Map>(object->map())); |
| 5555 __ Assert(equal, "Unexpected boilerplate map"); | 5563 __ Assert(equal, "Unexpected boilerplate map"); |
| 5556 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset)); | 5564 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset)); |
| 5557 __ and_(ecx, Map::kElementsKindMask); | 5565 __ and_(ecx, Map::kElementsKindMask); |
| 5558 __ cmp(ecx, object->GetElementsKind() << Map::kElementsKindShift); | 5566 __ cmp(ecx, object->GetElementsKind() << Map::kElementsKindShift); |
| 5559 __ Assert(equal, "Unexpected boilerplate elements kind"); | 5567 __ Assert(equal, "Unexpected boilerplate elements kind"); |
| 5560 } | 5568 } |
| 5561 | 5569 |
| 5562 // Only elements backing stores for non-COW arrays need to be copied. | 5570 // Only elements backing stores for non-COW arrays need to be copied. |
| 5563 Handle<FixedArrayBase> elements(object->elements()); | 5571 Handle<FixedArrayBase> elements(object->elements()); |
| 5572 Handle<FixedArrayBase> original_elements(original_object->elements()); | |
| 5564 bool has_elements = elements->length() > 0 && | 5573 bool has_elements = elements->length() > 0 && |
| 5565 elements->map() != isolate()->heap()->fixed_cow_array_map(); | 5574 elements->map() != isolate()->heap()->fixed_cow_array_map(); |
| 5566 | 5575 |
| 5567 // Increase the offset so that subsequent objects end up right after | 5576 // Increase the offset so that subsequent objects end up right after |
| 5568 // this object and its backing store. | 5577 // this object and its backing store. |
| 5569 int object_offset = *offset; | 5578 int object_offset = *offset; |
| 5570 int object_size = object->map()->instance_size(); | 5579 int object_size = object->map()->instance_size(); |
| 5571 int elements_size = has_elements ? elements->Size() : 0; | 5580 int elements_size = has_elements ? elements->Size() : 0; |
| 5572 int elements_offset = *offset + object_size; | 5581 int elements_offset = *offset + object_size; |
| 5573 if (create_allocation_site_info) { | 5582 if (create_allocation_site_info) { |
| (...skipping 15 matching lines...) Expand all Loading... | |
| 5589 } | 5598 } |
| 5590 __ mov(FieldOperand(result, object_offset + i), ecx); | 5599 __ mov(FieldOperand(result, object_offset + i), ecx); |
| 5591 } | 5600 } |
| 5592 | 5601 |
| 5593 // Copy in-object properties. | 5602 // Copy in-object properties. |
| 5594 for (int i = 0; i < inobject_properties; i++) { | 5603 for (int i = 0; i < inobject_properties; i++) { |
| 5595 int total_offset = object_offset + object->GetInObjectPropertyOffset(i); | 5604 int total_offset = object_offset + object->GetInObjectPropertyOffset(i); |
| 5596 Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i)); | 5605 Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i)); |
| 5597 if (value->IsJSObject()) { | 5606 if (value->IsJSObject()) { |
| 5598 Handle<JSObject> value_object = Handle<JSObject>::cast(value); | 5607 Handle<JSObject> value_object = Handle<JSObject>::cast(value); |
| 5608 Handle<JSObject> original_value_object = Handle<JSObject>::cast( | |
| 5609 Handle<Object>(original_object->InObjectPropertyAt(i))); | |
| 5610 | |
| 5599 __ lea(ecx, Operand(result, *offset)); | 5611 __ lea(ecx, Operand(result, *offset)); |
| 5600 __ mov(FieldOperand(result, total_offset), ecx); | 5612 __ mov(FieldOperand(result, total_offset), ecx); |
| 5601 __ LoadHeapObject(source, value_object); | 5613 __ LoadHeapObject(source, value_object); |
| 5602 EmitDeepCopy(value_object, result, source, offset, | 5614 EmitDeepCopy(value_object, original_value_object, result, source, |
| 5603 DONT_TRACK_ALLOCATION_SITE); | 5615 offset, mode); |
| 5604 } else if (value->IsHeapObject()) { | 5616 } else if (value->IsHeapObject()) { |
| 5605 __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value)); | 5617 __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value)); |
| 5606 __ mov(FieldOperand(result, total_offset), ecx); | 5618 __ mov(FieldOperand(result, total_offset), ecx); |
| 5607 } else { | 5619 } else { |
| 5608 __ mov(FieldOperand(result, total_offset), Immediate(value)); | 5620 __ mov(FieldOperand(result, total_offset), Immediate(value)); |
| 5609 } | 5621 } |
| 5610 } | 5622 } |
| 5611 | 5623 |
| 5612 // Build Allocation Site Info if desired | 5624 // Build Allocation Site Info if desired |
| 5613 if (create_allocation_site_info) { | 5625 if (create_allocation_site_info) { |
| 5614 __ mov(FieldOperand(result, object_size), | 5626 __ mov(FieldOperand(result, object_size + object_offset), |
| 5615 Immediate(Handle<Map>(isolate()->heap()-> | 5627 Immediate(Handle<Map>(isolate()->heap()-> |
| 5616 allocation_site_info_map()))); | 5628 allocation_site_info_map()))); |
| 5617 __ mov(FieldOperand(result, object_size + kPointerSize), source); | 5629 __ LoadHeapObject(ecx, original_object); |
| 5630 __ mov(FieldOperand(result, object_size + object_offset + kPointerSize), | |
| 5631 ecx); | |
| 5618 } | 5632 } |
| 5619 | 5633 |
| 5620 if (has_elements) { | 5634 if (has_elements) { |
| 5621 // Copy elements backing store header. | 5635 // Copy elements backing store header. |
| 5622 __ LoadHeapObject(source, elements); | 5636 __ LoadHeapObject(source, elements); |
| 5623 for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) { | 5637 for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) { |
| 5624 __ mov(ecx, FieldOperand(source, i)); | 5638 __ mov(ecx, FieldOperand(source, i)); |
| 5625 __ mov(FieldOperand(result, elements_offset + i), ecx); | 5639 __ mov(FieldOperand(result, elements_offset + i), ecx); |
| 5626 } | 5640 } |
| 5627 | 5641 |
| 5628 // Copy elements backing store content. | 5642 // Copy elements backing store content. |
| 5629 int elements_length = elements->length(); | 5643 int elements_length = elements->length(); |
| 5630 if (elements->IsFixedDoubleArray()) { | 5644 if (elements->IsFixedDoubleArray()) { |
| 5631 Handle<FixedDoubleArray> double_array = | 5645 Handle<FixedDoubleArray> double_array = |
| 5632 Handle<FixedDoubleArray>::cast(elements); | 5646 Handle<FixedDoubleArray>::cast(elements); |
| 5633 for (int i = 0; i < elements_length; i++) { | 5647 for (int i = 0; i < elements_length; i++) { |
| 5634 int64_t value = double_array->get_representation(i); | 5648 int64_t value = double_array->get_representation(i); |
| 5635 int32_t value_low = static_cast<int32_t>(value & 0xFFFFFFFF); | 5649 int32_t value_low = static_cast<int32_t>(value & 0xFFFFFFFF); |
| 5636 int32_t value_high = static_cast<int32_t>(value >> 32); | 5650 int32_t value_high = static_cast<int32_t>(value >> 32); |
| 5637 int total_offset = | 5651 int total_offset = |
| 5638 elements_offset + FixedDoubleArray::OffsetOfElementAt(i); | 5652 elements_offset + FixedDoubleArray::OffsetOfElementAt(i); |
| 5639 __ mov(FieldOperand(result, total_offset), Immediate(value_low)); | 5653 __ mov(FieldOperand(result, total_offset), Immediate(value_low)); |
| 5640 __ mov(FieldOperand(result, total_offset + 4), Immediate(value_high)); | 5654 __ mov(FieldOperand(result, total_offset + 4), Immediate(value_high)); |
| 5641 } | 5655 } |
| 5642 } else if (elements->IsFixedArray()) { | 5656 } else if (elements->IsFixedArray()) { |
| 5643 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements); | 5657 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements); |
| 5658 ASSERT(original_object->HasFastObjectElements()); | |
| 5659 Handle<FixedArray> original_fast_elements = | |
| 5660 Handle<FixedArray>::cast(original_elements); | |
| 5644 for (int i = 0; i < elements_length; i++) { | 5661 for (int i = 0; i < elements_length; i++) { |
| 5645 int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i); | 5662 int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i); |
| 5646 Handle<Object> value(fast_elements->get(i)); | 5663 Handle<Object> value(fast_elements->get(i)); |
| 5647 if (value->IsJSObject()) { | 5664 if (value->IsJSObject()) { |
| 5648 Handle<JSObject> value_object = Handle<JSObject>::cast(value); | 5665 Handle<JSObject> value_object = Handle<JSObject>::cast(value); |
| 5666 Handle<JSObject> original_value_object = Handle<JSObject>::cast( | |
| 5667 Handle<Object>(original_fast_elements->get(i))); | |
| 5649 __ lea(ecx, Operand(result, *offset)); | 5668 __ lea(ecx, Operand(result, *offset)); |
| 5650 __ mov(FieldOperand(result, total_offset), ecx); | 5669 __ mov(FieldOperand(result, total_offset), ecx); |
| 5651 __ LoadHeapObject(source, value_object); | 5670 __ LoadHeapObject(source, value_object); |
| 5652 EmitDeepCopy(value_object, result, source, offset, | 5671 ASSERT(!value_object.is_identical_to(original_value_object)); |
| 5653 DONT_TRACK_ALLOCATION_SITE); | 5672 EmitDeepCopy(value_object, original_value_object, result, source, |
| 5673 offset, mode); | |
| 5654 } else if (value->IsHeapObject()) { | 5674 } else if (value->IsHeapObject()) { |
| 5655 __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value)); | 5675 __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value)); |
| 5656 __ mov(FieldOperand(result, total_offset), ecx); | 5676 __ mov(FieldOperand(result, total_offset), ecx); |
| 5657 } else { | 5677 } else { |
| 5658 __ mov(FieldOperand(result, total_offset), Immediate(value)); | 5678 __ mov(FieldOperand(result, total_offset), Immediate(value)); |
| 5659 } | 5679 } |
| 5660 } | 5680 } |
| 5661 } else { | 5681 } else { |
| 5662 UNREACHABLE(); | 5682 UNREACHABLE(); |
| 5663 } | 5683 } |
| 5664 } | 5684 } |
| 5665 } | 5685 } |
| 5666 | 5686 |
| 5667 | 5687 |
| 5668 void LCodeGen::DoFastLiteral(LFastLiteral* instr) { | 5688 void LCodeGen::DoFastLiteral(LFastLiteral* instr) { |
| 5669 ASSERT(ToRegister(instr->context()).is(esi)); | 5689 ASSERT(ToRegister(instr->context()).is(esi)); |
| 5670 int size = instr->hydrogen()->total_size(); | 5690 int size = instr->hydrogen()->total_size(); |
| 5671 ElementsKind boilerplate_elements_kind = | |
| 5672 instr->hydrogen()->boilerplate()->GetElementsKind(); | |
| 5673 | |
| 5674 // Deopt if the literal boilerplate ElementsKind is of a type different than | |
| 5675 // the expected one. The check isn't necessary if the boilerplate has already | |
| 5676 // already been converted to TERMINAL_FAST_ELEMENTS_KIND. | |
| 5677 if (CanTransitionToMoreGeneralFastElementsKind( | |
| 5678 boilerplate_elements_kind, true)) { | |
| 5679 __ LoadHeapObject(ebx, instr->hydrogen()->boilerplate()); | |
| 5680 __ mov(ecx, FieldOperand(ebx, HeapObject::kMapOffset)); | |
| 5681 // Load the map's "bit field 2". We only need the first byte, | |
| 5682 // but the following masking takes care of that anyway. | |
| 5683 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset)); | |
| 5684 // Retrieve elements_kind from bit field 2. | |
| 5685 __ and_(ecx, Map::kElementsKindMask); | |
| 5686 __ cmp(ecx, boilerplate_elements_kind << Map::kElementsKindShift); | |
| 5687 DeoptimizeIf(not_equal, instr->environment()); | |
| 5688 } | |
| 5689 | 5691 |
| 5690 // Allocate all objects that are part of the literal in one big | 5692 // Allocate all objects that are part of the literal in one big |
| 5691 // allocation. This avoids multiple limit checks. | 5693 // allocation. This avoids multiple limit checks. |
| 5692 Label allocated, runtime_allocate; | 5694 Label allocated, runtime_allocate; |
| 5693 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); | 5695 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); |
| 5694 __ jmp(&allocated); | 5696 __ jmp(&allocated); |
| 5695 | 5697 |
| 5696 __ bind(&runtime_allocate); | 5698 __ bind(&runtime_allocate); |
| 5697 __ push(Immediate(Smi::FromInt(size))); | 5699 __ push(Immediate(Smi::FromInt(size))); |
| 5698 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); | 5700 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); |
| 5699 | 5701 |
| 5700 __ bind(&allocated); | 5702 __ bind(&allocated); |
| 5701 int offset = 0; | 5703 int offset = 0; |
| 5702 __ LoadHeapObject(ebx, instr->hydrogen()->boilerplate()); | 5704 __ LoadHeapObject(ebx, instr->hydrogen()->boilerplate()); |
| 5703 EmitDeepCopy(instr->hydrogen()->boilerplate(), eax, ebx, &offset, | 5705 EmitDeepCopy(instr->hydrogen()->boilerplate(), |
| 5706 instr->hydrogen()->original_boilerplate(), | |
| 5707 eax, ebx, &offset, | |
| 5704 instr->hydrogen()->allocation_site_mode()); | 5708 instr->hydrogen()->allocation_site_mode()); |
| 5705 ASSERT_EQ(size, offset); | 5709 ASSERT_EQ(size, offset); |
| 5706 } | 5710 } |
| 5707 | 5711 |
| 5708 | 5712 |
| 5709 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { | 5713 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { |
| 5710 ASSERT(ToRegister(instr->context()).is(esi)); | 5714 ASSERT(ToRegister(instr->context()).is(esi)); |
| 5711 Handle<FixedArray> literals(instr->environment()->closure()->literals()); | 5715 Handle<FixedArray> literals(instr->environment()->closure()->literals()); |
| 5712 Handle<FixedArray> constant_properties = | 5716 Handle<FixedArray> constant_properties = |
| 5713 instr->hydrogen()->constant_properties(); | 5717 instr->hydrogen()->constant_properties(); |
| 5714 | 5718 |
| 5715 // Set up the parameters to the stub/runtime call. | 5719 // Set up the parameters to the stub/runtime call. |
| 5716 __ PushHeapObject(literals); | 5720 __ PushHeapObject(literals); |
| 5717 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); | 5721 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); |
| 5718 __ push(Immediate(constant_properties)); | 5722 __ push(Immediate(constant_properties)); |
| 5719 int flags = instr->hydrogen()->fast_elements() | 5723 int flags = instr->hydrogen()->fast_elements() |
| 5720 ? ObjectLiteral::kFastElements | 5724 ? ObjectLiteral::kFastElements |
| 5721 : ObjectLiteral::kNoFlags; | 5725 : ObjectLiteral::kNoFlags; |
| 5722 flags |= instr->hydrogen()->has_function() | 5726 flags |= instr->hydrogen()->has_function() |
| 5723 ? ObjectLiteral::kHasFunction | 5727 ? ObjectLiteral::kHasFunction |
| 5724 : ObjectLiteral::kNoFlags; | 5728 : ObjectLiteral::kNoFlags; |
| 5729 | |
| 5730 if (instr->hydrogen()->allocation_site_mode() == TRACK_ALLOCATION_SITE) { | |
|
danno
2013/02/11 15:05:21
If you check for FLAG_track_allocation_sites in th
| |
| 5731 flags |= ObjectLiteral::kAllocationSiteInfoAllowed; | |
| 5732 } | |
| 5733 | |
| 5725 __ push(Immediate(Smi::FromInt(flags))); | 5734 __ push(Immediate(Smi::FromInt(flags))); |
| 5726 | 5735 |
| 5727 // Pick the right runtime function or stub to call. | 5736 // Pick the right runtime function or stub to call. |
| 5728 int properties_count = constant_properties->length() / 2; | 5737 int properties_count = constant_properties->length() / 2; |
| 5729 if (instr->hydrogen()->depth() > 1) { | 5738 if (instr->hydrogen()->depth() > 1) { |
| 5730 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); | 5739 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); |
| 5731 } else if (flags != ObjectLiteral::kFastElements || | 5740 } else if (flags != ObjectLiteral::kFastElements || |
| 5732 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { | 5741 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { |
| 5733 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); | 5742 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr); |
| 5734 } else { | 5743 } else { |
| (...skipping 433 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 6168 FixedArray::kHeaderSize - kPointerSize)); | 6177 FixedArray::kHeaderSize - kPointerSize)); |
| 6169 __ bind(&done); | 6178 __ bind(&done); |
| 6170 } | 6179 } |
| 6171 | 6180 |
| 6172 | 6181 |
| 6173 #undef __ | 6182 #undef __ |
| 6174 | 6183 |
| 6175 } } // namespace v8::internal | 6184 } } // namespace v8::internal |
| 6176 | 6185 |
| 6177 #endif // V8_TARGET_ARCH_IA32 | 6186 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |