| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 5335 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5346 template | 5346 template |
| 5347 MaybeObject* Heap::AllocateInternalizedStringImpl<false>( | 5347 MaybeObject* Heap::AllocateInternalizedStringImpl<false>( |
| 5348 Vector<const char>, int, uint32_t); | 5348 Vector<const char>, int, uint32_t); |
| 5349 | 5349 |
| 5350 | 5350 |
| 5351 MaybeObject* Heap::AllocateRawOneByteString(int length, | 5351 MaybeObject* Heap::AllocateRawOneByteString(int length, |
| 5352 PretenureFlag pretenure) { | 5352 PretenureFlag pretenure) { |
| 5353 if (length < 0 || length > SeqOneByteString::kMaxLength) { | 5353 if (length < 0 || length > SeqOneByteString::kMaxLength) { |
| 5354 return Failure::OutOfMemoryException(0xb); | 5354 return Failure::OutOfMemoryException(0xb); |
| 5355 } | 5355 } |
| 5356 | |
| 5357 int size = SeqOneByteString::SizeFor(length); | 5356 int size = SeqOneByteString::SizeFor(length); |
| 5358 ASSERT(size <= SeqOneByteString::kMaxSize); | 5357 ASSERT(size <= SeqOneByteString::kMaxSize); |
| 5359 | |
| 5360 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; | 5358 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; |
| 5361 AllocationSpace retry_space = OLD_DATA_SPACE; | 5359 AllocationSpace retry_space = OLD_DATA_SPACE; |
| 5362 | 5360 |
| 5363 if (space == NEW_SPACE) { | 5361 if (size > Page::kMaxNonCodeHeapObjectSize) { |
| 5364 if (size > kMaxObjectSizeInNewSpace) { | 5362 // Allocate in large object space, retry space will be ignored. |
| 5365 // Allocate in large object space, retry space will be ignored. | |
| 5366 space = LO_SPACE; | |
| 5367 } else if (size > Page::kMaxNonCodeHeapObjectSize) { | |
| 5368 // Allocate in new space, retry in large object space. | |
| 5369 retry_space = LO_SPACE; | |
| 5370 } | |
| 5371 } else if (space == OLD_DATA_SPACE && | |
| 5372 size > Page::kMaxNonCodeHeapObjectSize) { | |
| 5373 space = LO_SPACE; | 5363 space = LO_SPACE; |
| 5374 } | 5364 } |
| 5365 |
| 5375 Object* result; | 5366 Object* result; |
| 5376 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); | 5367 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); |
| 5377 if (!maybe_result->ToObject(&result)) return maybe_result; | 5368 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 5378 } | 5369 } |
| 5379 | 5370 |
| 5380 // Partially initialize the object. | 5371 // Partially initialize the object. |
| 5381 HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map()); | 5372 HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map()); |
| 5382 String::cast(result)->set_length(length); | 5373 String::cast(result)->set_length(length); |
| 5383 String::cast(result)->set_hash_field(String::kEmptyHashField); | 5374 String::cast(result)->set_hash_field(String::kEmptyHashField); |
| 5384 ASSERT_EQ(size, HeapObject::cast(result)->Size()); | 5375 ASSERT_EQ(size, HeapObject::cast(result)->Size()); |
| 5385 | 5376 |
| 5386 return result; | 5377 return result; |
| 5387 } | 5378 } |
| 5388 | 5379 |
| 5389 | 5380 |
| 5390 MaybeObject* Heap::AllocateRawTwoByteString(int length, | 5381 MaybeObject* Heap::AllocateRawTwoByteString(int length, |
| 5391 PretenureFlag pretenure) { | 5382 PretenureFlag pretenure) { |
| 5392 if (length < 0 || length > SeqTwoByteString::kMaxLength) { | 5383 if (length < 0 || length > SeqTwoByteString::kMaxLength) { |
| 5393 return Failure::OutOfMemoryException(0xc); | 5384 return Failure::OutOfMemoryException(0xc); |
| 5394 } | 5385 } |
| 5395 int size = SeqTwoByteString::SizeFor(length); | 5386 int size = SeqTwoByteString::SizeFor(length); |
| 5396 ASSERT(size <= SeqTwoByteString::kMaxSize); | 5387 ASSERT(size <= SeqTwoByteString::kMaxSize); |
| 5397 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; | 5388 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; |
| 5398 AllocationSpace retry_space = OLD_DATA_SPACE; | 5389 AllocationSpace retry_space = OLD_DATA_SPACE; |
| 5399 | 5390 |
| 5400 if (space == NEW_SPACE) { | 5391 if (size > Page::kMaxNonCodeHeapObjectSize) { |
| 5401 if (size > kMaxObjectSizeInNewSpace) { | 5392 // Allocate in large object space, retry space will be ignored. |
| 5402 // Allocate in large object space, retry space will be ignored. | |
| 5403 space = LO_SPACE; | |
| 5404 } else if (size > Page::kMaxNonCodeHeapObjectSize) { | |
| 5405 // Allocate in new space, retry in large object space. | |
| 5406 retry_space = LO_SPACE; | |
| 5407 } | |
| 5408 } else if (space == OLD_DATA_SPACE && | |
| 5409 size > Page::kMaxNonCodeHeapObjectSize) { | |
| 5410 space = LO_SPACE; | 5393 space = LO_SPACE; |
| 5411 } | 5394 } |
| 5395 |
| 5412 Object* result; | 5396 Object* result; |
| 5413 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); | 5397 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); |
| 5414 if (!maybe_result->ToObject(&result)) return maybe_result; | 5398 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 5415 } | 5399 } |
| 5416 | 5400 |
| 5417 // Partially initialize the object. | 5401 // Partially initialize the object. |
| 5418 HeapObject::cast(result)->set_map_no_write_barrier(string_map()); | 5402 HeapObject::cast(result)->set_map_no_write_barrier(string_map()); |
| 5419 String::cast(result)->set_length(length); | 5403 String::cast(result)->set_length(length); |
| 5420 String::cast(result)->set_hash_field(String::kEmptyHashField); | 5404 String::cast(result)->set_hash_field(String::kEmptyHashField); |
| 5421 ASSERT_EQ(size, HeapObject::cast(result)->Size()); | 5405 ASSERT_EQ(size, HeapObject::cast(result)->Size()); |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5475 | 5459 |
| 5476 MaybeObject* Heap::AllocateRawFixedArray(int length) { | 5460 MaybeObject* Heap::AllocateRawFixedArray(int length) { |
| 5477 if (length < 0 || length > FixedArray::kMaxLength) { | 5461 if (length < 0 || length > FixedArray::kMaxLength) { |
| 5478 return Failure::OutOfMemoryException(0xd); | 5462 return Failure::OutOfMemoryException(0xd); |
| 5479 } | 5463 } |
| 5480 ASSERT(length > 0); | 5464 ASSERT(length > 0); |
| 5481 // Use the general function if we're forced to always allocate. | 5465 // Use the general function if we're forced to always allocate. |
| 5482 if (always_allocate()) return AllocateFixedArray(length, TENURED); | 5466 if (always_allocate()) return AllocateFixedArray(length, TENURED); |
| 5483 // Allocate the raw data for a fixed array. | 5467 // Allocate the raw data for a fixed array. |
| 5484 int size = FixedArray::SizeFor(length); | 5468 int size = FixedArray::SizeFor(length); |
| 5485 return size <= kMaxObjectSizeInNewSpace | 5469 return size <= Page::kMaxNonCodeHeapObjectSize |
| 5486 ? new_space_.AllocateRaw(size) | 5470 ? new_space_.AllocateRaw(size) |
| 5487 : lo_space_->AllocateRaw(size, NOT_EXECUTABLE); | 5471 : lo_space_->AllocateRaw(size, NOT_EXECUTABLE); |
| 5488 } | 5472 } |
| 5489 | 5473 |
| 5490 | 5474 |
| 5491 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { | 5475 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { |
| 5492 int len = src->length(); | 5476 int len = src->length(); |
| 5493 Object* obj; | 5477 Object* obj; |
| 5494 { MaybeObject* maybe_obj = AllocateRawFixedArray(len); | 5478 { MaybeObject* maybe_obj = AllocateRawFixedArray(len); |
| 5495 if (!maybe_obj->ToObject(&obj)) return maybe_obj; | 5479 if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5546 ASSERT(!InNewSpace(undefined_value())); | 5530 ASSERT(!InNewSpace(undefined_value())); |
| 5547 MemsetPointer(array->data_start(), undefined_value(), length); | 5531 MemsetPointer(array->data_start(), undefined_value(), length); |
| 5548 return result; | 5532 return result; |
| 5549 } | 5533 } |
| 5550 | 5534 |
| 5551 | 5535 |
| 5552 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { | 5536 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { |
| 5553 if (length < 0 || length > FixedArray::kMaxLength) { | 5537 if (length < 0 || length > FixedArray::kMaxLength) { |
| 5554 return Failure::OutOfMemoryException(0xe); | 5538 return Failure::OutOfMemoryException(0xe); |
| 5555 } | 5539 } |
| 5556 | 5540 int size = FixedArray::SizeFor(length); |
| 5557 AllocationSpace space = | 5541 AllocationSpace space = |
| 5558 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; | 5542 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; |
| 5559 int size = FixedArray::SizeFor(length); | 5543 AllocationSpace retry_space = OLD_POINTER_SPACE; |
| 5560 if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) { | 5544 |
| 5561 // Too big for new space. | 5545 if (size > Page::kMaxNonCodeHeapObjectSize) { |
| 5562 space = LO_SPACE; | 5546 // Allocate in large object space, retry space will be ignored. |
| 5563 } else if (space == OLD_POINTER_SPACE && | |
| 5564 size > Page::kMaxNonCodeHeapObjectSize) { | |
| 5565 // Too big for old pointer space. | |
| 5566 space = LO_SPACE; | 5547 space = LO_SPACE; |
| 5567 } | 5548 } |
| 5568 | 5549 |
| 5569 AllocationSpace retry_space = | |
| 5570 (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_POINTER_SPACE : LO_SPACE; | |
| 5571 | |
| 5572 return AllocateRaw(size, space, retry_space); | 5550 return AllocateRaw(size, space, retry_space); |
| 5573 } | 5551 } |
| 5574 | 5552 |
| 5575 | 5553 |
| 5576 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller( | 5554 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller( |
| 5577 Heap* heap, | 5555 Heap* heap, |
| 5578 int length, | 5556 int length, |
| 5579 PretenureFlag pretenure, | 5557 PretenureFlag pretenure, |
| 5580 Object* filler) { | 5558 Object* filler) { |
| 5581 ASSERT(length >= 0); | 5559 ASSERT(length >= 0); |
| (...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5679 elements->set_length(length); | 5657 elements->set_length(length); |
| 5680 return elements; | 5658 return elements; |
| 5681 } | 5659 } |
| 5682 | 5660 |
| 5683 | 5661 |
| 5684 MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, | 5662 MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, |
| 5685 PretenureFlag pretenure) { | 5663 PretenureFlag pretenure) { |
| 5686 if (length < 0 || length > FixedDoubleArray::kMaxLength) { | 5664 if (length < 0 || length > FixedDoubleArray::kMaxLength) { |
| 5687 return Failure::OutOfMemoryException(0xf); | 5665 return Failure::OutOfMemoryException(0xf); |
| 5688 } | 5666 } |
| 5689 | |
| 5690 AllocationSpace space = | |
| 5691 (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; | |
| 5692 int size = FixedDoubleArray::SizeFor(length); | 5667 int size = FixedDoubleArray::SizeFor(length); |
| 5668 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; |
| 5669 AllocationSpace retry_space = OLD_DATA_SPACE; |
| 5693 | 5670 |
| 5694 #ifndef V8_HOST_ARCH_64_BIT | 5671 #ifndef V8_HOST_ARCH_64_BIT |
| 5695 size += kPointerSize; | 5672 size += kPointerSize; |
| 5696 #endif | 5673 #endif |
| 5697 | 5674 |
| 5698 if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) { | 5675 if (size > Page::kMaxNonCodeHeapObjectSize) { |
| 5699 // Too big for new space. | 5676 // Allocate in large object space, retry space will be ignored. |
| 5700 space = LO_SPACE; | |
| 5701 } else if (space == OLD_DATA_SPACE && | |
| 5702 size > Page::kMaxNonCodeHeapObjectSize) { | |
| 5703 // Too big for old data space. | |
| 5704 space = LO_SPACE; | 5677 space = LO_SPACE; |
| 5705 } | 5678 } |
| 5706 | 5679 |
| 5707 AllocationSpace retry_space = | |
| 5708 (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_DATA_SPACE : LO_SPACE; | |
| 5709 | |
| 5710 HeapObject* object; | 5680 HeapObject* object; |
| 5711 { MaybeObject* maybe_object = AllocateRaw(size, space, retry_space); | 5681 { MaybeObject* maybe_object = AllocateRaw(size, space, retry_space); |
| 5712 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; | 5682 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; |
| 5713 } | 5683 } |
| 5714 | 5684 |
| 5715 return EnsureDoubleAligned(this, object, size); | 5685 return EnsureDoubleAligned(this, object, size); |
| 5716 } | 5686 } |
| 5717 | 5687 |
| 5718 | 5688 |
| 5719 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { | 5689 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { |
| (...skipping 2455 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 8175 if (FLAG_parallel_recompilation) { | 8145 if (FLAG_parallel_recompilation) { |
| 8176 heap_->relocation_mutex_->Lock(); | 8146 heap_->relocation_mutex_->Lock(); |
| 8177 #ifdef DEBUG | 8147 #ifdef DEBUG |
| 8178 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 8148 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
| 8179 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 8149 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
| 8180 #endif // DEBUG | 8150 #endif // DEBUG |
| 8181 } | 8151 } |
| 8182 } | 8152 } |
| 8183 | 8153 |
| 8184 } } // namespace v8::internal | 8154 } } // namespace v8::internal |
| OLD | NEW |