| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 5548 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5559 Handle<Map> initial_map(constructor->initial_map()); | 5559 Handle<Map> initial_map(constructor->initial_map()); |
| 5560 int instance_size = initial_map->instance_size(); | 5560 int instance_size = initial_map->instance_size(); |
| 5561 ASSERT(initial_map->pre_allocated_property_fields() + | 5561 ASSERT(initial_map->pre_allocated_property_fields() + |
| 5562 initial_map->unused_property_fields() - | 5562 initial_map->unused_property_fields() - |
| 5563 initial_map->inobject_properties() == 0); | 5563 initial_map->inobject_properties() == 0); |
| 5564 | 5564 |
| 5565 // Allocate memory for the object. The initial map might change when | 5565 // Allocate memory for the object. The initial map might change when |
| 5566 // the constructor's prototype changes, but instance size and property | 5566 // the constructor's prototype changes, but instance size and property |
| 5567 // counts remain unchanged (if slack tracking finished). | 5567 // counts remain unchanged (if slack tracking finished). |
| 5568 ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress()); | 5568 ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress()); |
| 5569 __ AllocateInNewSpace(instance_size, | 5569 __ Allocate(instance_size, result, scratch, scratch2, deferred->entry(), |
| 5570 result, | 5570 TAG_OBJECT); |
| 5571 scratch, | |
| 5572 scratch2, | |
| 5573 deferred->entry(), | |
| 5574 TAG_OBJECT); | |
| 5575 | 5571 |
| 5576 __ bind(deferred->exit()); | 5572 __ bind(deferred->exit()); |
| 5577 if (FLAG_debug_code) { | 5573 if (FLAG_debug_code) { |
| 5578 Label is_in_new_space; | 5574 Label is_in_new_space; |
| 5579 __ JumpIfInNewSpace(result, scratch, &is_in_new_space); | 5575 __ JumpIfInNewSpace(result, scratch, &is_in_new_space); |
| 5580 __ Abort("Allocated object is not in new-space"); | 5576 __ Abort("Allocated object is not in new-space"); |
| 5581 __ bind(&is_in_new_space); | 5577 __ bind(&is_in_new_space); |
| 5582 } | 5578 } |
| 5583 | 5579 |
| 5584 // Load the initial map. | 5580 // Load the initial map. |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5639 Register scratch = ToRegister(instr->temp1()); | 5635 Register scratch = ToRegister(instr->temp1()); |
| 5640 Register scratch2 = ToRegister(instr->temp2()); | 5636 Register scratch2 = ToRegister(instr->temp2()); |
| 5641 | 5637 |
| 5642 // Allocate memory for the object. | 5638 // Allocate memory for the object. |
| 5643 AllocationFlags flags = TAG_OBJECT; | 5639 AllocationFlags flags = TAG_OBJECT; |
| 5644 if (instr->hydrogen()->MustAllocateDoubleAligned()) { | 5640 if (instr->hydrogen()->MustAllocateDoubleAligned()) { |
| 5645 flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT); | 5641 flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT); |
| 5646 } | 5642 } |
| 5647 if (instr->size()->IsConstantOperand()) { | 5643 if (instr->size()->IsConstantOperand()) { |
| 5648 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); | 5644 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); |
| 5649 __ AllocateInNewSpace(size, | 5645 if (instr->hydrogen()->CanAllocateInOldPointerSpace()) { |
| 5650 result, | 5646 flags = static_cast<AllocationFlags>(flags | PRETENURE); |
| 5651 scratch, | 5647 } |
| 5652 scratch2, | 5648 __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags); |
| 5653 deferred->entry(), | |
| 5654 flags); | |
| 5655 } else { | 5649 } else { |
| 5656 Register size = ToRegister(instr->size()); | 5650 Register size = ToRegister(instr->size()); |
| 5657 __ AllocateInNewSpace(size, | 5651 __ AllocateInNewSpace(size, |
| 5658 result, | 5652 result, |
| 5659 scratch, | 5653 scratch, |
| 5660 scratch2, | 5654 scratch2, |
| 5661 deferred->entry(), | 5655 deferred->entry(), |
| 5662 flags); | 5656 flags); |
| 5663 } | 5657 } |
| 5664 | 5658 |
| (...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5877 __ ldrb(r2, FieldMemOperand(r2, Map::kBitField2Offset)); | 5871 __ ldrb(r2, FieldMemOperand(r2, Map::kBitField2Offset)); |
| 5878 // Retrieve elements_kind from bit field 2. | 5872 // Retrieve elements_kind from bit field 2. |
| 5879 __ ubfx(r2, r2, Map::kElementsKindShift, Map::kElementsKindBitCount); | 5873 __ ubfx(r2, r2, Map::kElementsKindShift, Map::kElementsKindBitCount); |
| 5880 __ cmp(r2, Operand(boilerplate_elements_kind)); | 5874 __ cmp(r2, Operand(boilerplate_elements_kind)); |
| 5881 DeoptimizeIf(ne, instr->environment()); | 5875 DeoptimizeIf(ne, instr->environment()); |
| 5882 } | 5876 } |
| 5883 | 5877 |
| 5884 // Allocate all objects that are part of the literal in one big | 5878 // Allocate all objects that are part of the literal in one big |
| 5885 // allocation. This avoids multiple limit checks. | 5879 // allocation. This avoids multiple limit checks. |
| 5886 Label allocated, runtime_allocate; | 5880 Label allocated, runtime_allocate; |
| 5887 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT); | 5881 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT); |
| 5888 __ jmp(&allocated); | 5882 __ jmp(&allocated); |
| 5889 | 5883 |
| 5890 __ bind(&runtime_allocate); | 5884 __ bind(&runtime_allocate); |
| 5891 __ mov(r0, Operand(Smi::FromInt(size))); | 5885 __ mov(r0, Operand(Smi::FromInt(size))); |
| 5892 __ push(r0); | 5886 __ push(r0); |
| 5893 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); | 5887 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); |
| 5894 | 5888 |
| 5895 __ bind(&allocated); | 5889 __ bind(&allocated); |
| 5896 int offset = 0; | 5890 int offset = 0; |
| 5897 __ LoadHeapObject(r1, instr->hydrogen()->boilerplate()); | 5891 __ LoadHeapObject(r1, instr->hydrogen()->boilerplate()); |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5959 __ mov(r5, Operand(instr->hydrogen()->pattern())); | 5953 __ mov(r5, Operand(instr->hydrogen()->pattern())); |
| 5960 __ mov(r4, Operand(instr->hydrogen()->flags())); | 5954 __ mov(r4, Operand(instr->hydrogen()->flags())); |
| 5961 __ Push(r7, r6, r5, r4); | 5955 __ Push(r7, r6, r5, r4); |
| 5962 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); | 5956 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); |
| 5963 __ mov(r1, r0); | 5957 __ mov(r1, r0); |
| 5964 | 5958 |
| 5965 __ bind(&materialized); | 5959 __ bind(&materialized); |
| 5966 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | 5960 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; |
| 5967 Label allocated, runtime_allocate; | 5961 Label allocated, runtime_allocate; |
| 5968 | 5962 |
| 5969 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT); | 5963 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT); |
| 5970 __ jmp(&allocated); | 5964 __ jmp(&allocated); |
| 5971 | 5965 |
| 5972 __ bind(&runtime_allocate); | 5966 __ bind(&runtime_allocate); |
| 5973 __ mov(r0, Operand(Smi::FromInt(size))); | 5967 __ mov(r0, Operand(Smi::FromInt(size))); |
| 5974 __ Push(r1, r0); | 5968 __ Push(r1, r0); |
| 5975 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); | 5969 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); |
| 5976 __ pop(r1); | 5970 __ pop(r1); |
| 5977 | 5971 |
| 5978 __ bind(&allocated); | 5972 __ bind(&allocated); |
| 5979 // Copy the content into the newly allocated memory. | 5973 // Copy the content into the newly allocated memory. |
| (...skipping 389 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6369 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize)); | 6363 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 6370 __ ldr(result, FieldMemOperand(scratch, | 6364 __ ldr(result, FieldMemOperand(scratch, |
| 6371 FixedArray::kHeaderSize - kPointerSize)); | 6365 FixedArray::kHeaderSize - kPointerSize)); |
| 6372 __ bind(&done); | 6366 __ bind(&done); |
| 6373 } | 6367 } |
| 6374 | 6368 |
| 6375 | 6369 |
| 6376 #undef __ | 6370 #undef __ |
| 6377 | 6371 |
| 6378 } } // namespace v8::internal | 6372 } } // namespace v8::internal |
| OLD | NEW |