OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 5335 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5346 template | 5346 template |
5347 MaybeObject* Heap::AllocateInternalizedStringImpl<false>( | 5347 MaybeObject* Heap::AllocateInternalizedStringImpl<false>( |
5348 Vector<const char>, int, uint32_t); | 5348 Vector<const char>, int, uint32_t); |
5349 | 5349 |
5350 | 5350 |
5351 MaybeObject* Heap::AllocateRawOneByteString(int length, | 5351 MaybeObject* Heap::AllocateRawOneByteString(int length, |
5352 PretenureFlag pretenure) { | 5352 PretenureFlag pretenure) { |
5353 if (length < 0 || length > SeqOneByteString::kMaxLength) { | 5353 if (length < 0 || length > SeqOneByteString::kMaxLength) { |
5354 return Failure::OutOfMemoryException(0xb); | 5354 return Failure::OutOfMemoryException(0xb); |
5355 } | 5355 } |
5356 | 5356 |
Michael Starzinger
2013/07/22 19:27:50
nit: Let's drop this empty new-line for consistenc
Hannes Payer (out of office)
2013/07/23 20:00:51
Done.
| |
5357 int size = SeqOneByteString::SizeFor(length); | 5357 int size = SeqOneByteString::SizeFor(length); |
5358 ASSERT(size <= SeqOneByteString::kMaxSize); | 5358 ASSERT(size <= SeqOneByteString::kMaxSize); |
5359 | 5359 |
Michael Starzinger
2013/07/22 19:27:50
nit: Let's drop this empty new-line for consistenc
Hannes Payer (out of office)
2013/07/23 20:00:51
Done.
| |
5360 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; | 5360 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; |
5361 AllocationSpace retry_space = OLD_DATA_SPACE; | 5361 AllocationSpace retry_space = OLD_DATA_SPACE; |
5362 | 5362 |
5363 if (space == NEW_SPACE) { | 5363 if (size > Page::kMaxNonCodeHeapObjectSize) { |
5364 if (size > kMaxObjectSizeInNewSpace) { | 5364 // Allocate in large object space, retry space will be ignored. |
5365 // Allocate in large object space, retry space will be ignored. | |
5366 space = LO_SPACE; | |
5367 } else if (size > Page::kMaxNonCodeHeapObjectSize) { | |
5368 // Allocate in new space, retry in large object space. | |
5369 retry_space = LO_SPACE; | |
5370 } | |
5371 } else if (space == OLD_DATA_SPACE && | |
5372 size > Page::kMaxNonCodeHeapObjectSize) { | |
5373 space = LO_SPACE; | 5365 space = LO_SPACE; |
5374 } | 5366 } |
5367 | |
5375 Object* result; | 5368 Object* result; |
5376 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); | 5369 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); |
5377 if (!maybe_result->ToObject(&result)) return maybe_result; | 5370 if (!maybe_result->ToObject(&result)) return maybe_result; |
5378 } | 5371 } |
5379 | 5372 |
5380 // Partially initialize the object. | 5373 // Partially initialize the object. |
5381 HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map()); | 5374 HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map()); |
5382 String::cast(result)->set_length(length); | 5375 String::cast(result)->set_length(length); |
5383 String::cast(result)->set_hash_field(String::kEmptyHashField); | 5376 String::cast(result)->set_hash_field(String::kEmptyHashField); |
5384 ASSERT_EQ(size, HeapObject::cast(result)->Size()); | 5377 ASSERT_EQ(size, HeapObject::cast(result)->Size()); |
5385 | 5378 |
5386 return result; | 5379 return result; |
5387 } | 5380 } |
5388 | 5381 |
5389 | 5382 |
5390 MaybeObject* Heap::AllocateRawTwoByteString(int length, | 5383 MaybeObject* Heap::AllocateRawTwoByteString(int length, |
5391 PretenureFlag pretenure) { | 5384 PretenureFlag pretenure) { |
5392 if (length < 0 || length > SeqTwoByteString::kMaxLength) { | 5385 if (length < 0 || length > SeqTwoByteString::kMaxLength) { |
5393 return Failure::OutOfMemoryException(0xc); | 5386 return Failure::OutOfMemoryException(0xc); |
5394 } | 5387 } |
5395 int size = SeqTwoByteString::SizeFor(length); | 5388 int size = SeqTwoByteString::SizeFor(length); |
5396 ASSERT(size <= SeqTwoByteString::kMaxSize); | 5389 ASSERT(size <= SeqTwoByteString::kMaxSize); |
5397 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; | 5390 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; |
5398 AllocationSpace retry_space = OLD_DATA_SPACE; | 5391 AllocationSpace retry_space = OLD_DATA_SPACE; |
5399 | 5392 |
5400 if (space == NEW_SPACE) { | 5393 if (size > Page::kMaxNonCodeHeapObjectSize) { |
5401 if (size > kMaxObjectSizeInNewSpace) { | 5394 // Allocate in large object space, retry space will be ignored. |
5402 // Allocate in large object space, retry space will be ignored. | |
5403 space = LO_SPACE; | |
5404 } else if (size > Page::kMaxNonCodeHeapObjectSize) { | |
5405 // Allocate in new space, retry in large object space. | |
5406 retry_space = LO_SPACE; | |
5407 } | |
5408 } else if (space == OLD_DATA_SPACE && | |
5409 size > Page::kMaxNonCodeHeapObjectSize) { | |
5410 space = LO_SPACE; | 5395 space = LO_SPACE; |
5411 } | 5396 } |
5397 | |
5412 Object* result; | 5398 Object* result; |
5413 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); | 5399 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); |
5414 if (!maybe_result->ToObject(&result)) return maybe_result; | 5400 if (!maybe_result->ToObject(&result)) return maybe_result; |
5415 } | 5401 } |
5416 | 5402 |
5417 // Partially initialize the object. | 5403 // Partially initialize the object. |
5418 HeapObject::cast(result)->set_map_no_write_barrier(string_map()); | 5404 HeapObject::cast(result)->set_map_no_write_barrier(string_map()); |
5419 String::cast(result)->set_length(length); | 5405 String::cast(result)->set_length(length); |
5420 String::cast(result)->set_hash_field(String::kEmptyHashField); | 5406 String::cast(result)->set_hash_field(String::kEmptyHashField); |
5421 ASSERT_EQ(size, HeapObject::cast(result)->Size()); | 5407 ASSERT_EQ(size, HeapObject::cast(result)->Size()); |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5475 | 5461 |
5476 MaybeObject* Heap::AllocateRawFixedArray(int length) { | 5462 MaybeObject* Heap::AllocateRawFixedArray(int length) { |
5477 if (length < 0 || length > FixedArray::kMaxLength) { | 5463 if (length < 0 || length > FixedArray::kMaxLength) { |
5478 return Failure::OutOfMemoryException(0xd); | 5464 return Failure::OutOfMemoryException(0xd); |
5479 } | 5465 } |
5480 ASSERT(length > 0); | 5466 ASSERT(length > 0); |
5481 // Use the general function if we're forced to always allocate. | 5467 // Use the general function if we're forced to always allocate. |
5482 if (always_allocate()) return AllocateFixedArray(length, TENURED); | 5468 if (always_allocate()) return AllocateFixedArray(length, TENURED); |
5483 // Allocate the raw data for a fixed array. | 5469 // Allocate the raw data for a fixed array. |
5484 int size = FixedArray::SizeFor(length); | 5470 int size = FixedArray::SizeFor(length); |
5485 return size <= kMaxObjectSizeInNewSpace | 5471 return size <= Page::kMaxNonCodeHeapObjectSize |
5486 ? new_space_.AllocateRaw(size) | 5472 ? new_space_.AllocateRaw(size) |
5487 : lo_space_->AllocateRaw(size, NOT_EXECUTABLE); | 5473 : lo_space_->AllocateRaw(size, NOT_EXECUTABLE); |
5488 } | 5474 } |
5489 | 5475 |
5490 | 5476 |
5491 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { | 5477 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { |
5492 int len = src->length(); | 5478 int len = src->length(); |
5493 Object* obj; | 5479 Object* obj; |
5494 { MaybeObject* maybe_obj = AllocateRawFixedArray(len); | 5480 { MaybeObject* maybe_obj = AllocateRawFixedArray(len); |
5495 if (!maybe_obj->ToObject(&obj)) return maybe_obj; | 5481 if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5546 ASSERT(!InNewSpace(undefined_value())); | 5532 ASSERT(!InNewSpace(undefined_value())); |
5547 MemsetPointer(array->data_start(), undefined_value(), length); | 5533 MemsetPointer(array->data_start(), undefined_value(), length); |
5548 return result; | 5534 return result; |
5549 } | 5535 } |
5550 | 5536 |
5551 | 5537 |
5552 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { | 5538 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { |
5553 if (length < 0 || length > FixedArray::kMaxLength) { | 5539 if (length < 0 || length > FixedArray::kMaxLength) { |
5554 return Failure::OutOfMemoryException(0xe); | 5540 return Failure::OutOfMemoryException(0xe); |
5555 } | 5541 } |
5556 | 5542 |
Michael Starzinger
2013/07/22 19:27:50
nit: Let's drop this empty new-line for consistenc
Hannes Payer (out of office)
2013/07/23 20:00:51
Done.
| |
5543 int size = FixedArray::SizeFor(length); | |
5557 AllocationSpace space = | 5544 AllocationSpace space = |
5558 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; | 5545 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; |
5559 int size = FixedArray::SizeFor(length); | 5546 AllocationSpace retry_space = OLD_POINTER_SPACE; |
5560 if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) { | 5547 |
5561 // Too big for new space. | 5548 if (size > Page::kMaxNonCodeHeapObjectSize) { |
5562 space = LO_SPACE; | 5549 // Allocate in large object space, retry space will be ignored. |
5563 } else if (space == OLD_POINTER_SPACE && | |
5564 size > Page::kMaxNonCodeHeapObjectSize) { | |
5565 // Too big for old pointer space. | |
5566 space = LO_SPACE; | 5550 space = LO_SPACE; |
5567 } | 5551 } |
5568 | 5552 |
5569 AllocationSpace retry_space = | |
5570 (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_POINTER_SPACE : LO_SPACE; | |
5571 | |
5572 return AllocateRaw(size, space, retry_space); | 5553 return AllocateRaw(size, space, retry_space); |
5573 } | 5554 } |
5574 | 5555 |
5575 | 5556 |
5576 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller( | 5557 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller( |
5577 Heap* heap, | 5558 Heap* heap, |
5578 int length, | 5559 int length, |
5579 PretenureFlag pretenure, | 5560 PretenureFlag pretenure, |
5580 Object* filler) { | 5561 Object* filler) { |
5581 ASSERT(length >= 0); | 5562 ASSERT(length >= 0); |
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5679 elements->set_length(length); | 5660 elements->set_length(length); |
5680 return elements; | 5661 return elements; |
5681 } | 5662 } |
5682 | 5663 |
5683 | 5664 |
5684 MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, | 5665 MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, |
5685 PretenureFlag pretenure) { | 5666 PretenureFlag pretenure) { |
5686 if (length < 0 || length > FixedDoubleArray::kMaxLength) { | 5667 if (length < 0 || length > FixedDoubleArray::kMaxLength) { |
5687 return Failure::OutOfMemoryException(0xf); | 5668 return Failure::OutOfMemoryException(0xf); |
5688 } | 5669 } |
5689 | 5670 |
Michael Starzinger
2013/07/22 19:27:50
nit: Let's drop this empty new-line for consistenc
Hannes Payer (out of office)
2013/07/23 20:00:51
Done.
| |
5690 AllocationSpace space = | 5671 AllocationSpace space = |
5691 (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; | 5672 (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; |
Michael Starzinger
2013/07/22 19:27:50
nit: Should fit into one line.
Hannes Payer (out of office)
2013/07/23 20:00:51
Done.
| |
5673 AllocationSpace retry_space = OLD_DATA_SPACE; | |
5674 | |
5692 int size = FixedDoubleArray::SizeFor(length); | 5675 int size = FixedDoubleArray::SizeFor(length); |
Michael Starzinger
2013/07/22 19:27:50
nit: Let's move this line (i.e. the size calculati
Hannes Payer (out of office)
2013/07/23 20:00:51
Done.
| |
5693 | 5676 |
5694 #ifndef V8_HOST_ARCH_64_BIT | 5677 #ifndef V8_HOST_ARCH_64_BIT |
5695 size += kPointerSize; | 5678 size += kPointerSize; |
5696 #endif | 5679 #endif |
5697 | 5680 |
5698 if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) { | 5681 if (size > Page::kMaxNonCodeHeapObjectSize) { |
5699 // Too big for new space. | 5682 // Allocate in large object space, retry space will be ignored. |
5700 space = LO_SPACE; | |
5701 } else if (space == OLD_DATA_SPACE && | |
5702 size > Page::kMaxNonCodeHeapObjectSize) { | |
5703 // Too big for old data space. | |
5704 space = LO_SPACE; | 5683 space = LO_SPACE; |
5705 } | 5684 } |
5706 | 5685 |
5707 AllocationSpace retry_space = | |
5708 (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_DATA_SPACE : LO_SPACE; | |
5709 | |
5710 HeapObject* object; | 5686 HeapObject* object; |
5711 { MaybeObject* maybe_object = AllocateRaw(size, space, retry_space); | 5687 { MaybeObject* maybe_object = AllocateRaw(size, space, retry_space); |
5712 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; | 5688 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; |
5713 } | 5689 } |
5714 | 5690 |
5715 return EnsureDoubleAligned(this, object, size); | 5691 return EnsureDoubleAligned(this, object, size); |
5716 } | 5692 } |
5717 | 5693 |
5718 | 5694 |
5719 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { | 5695 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { |
(...skipping 2455 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
8175 if (FLAG_parallel_recompilation) { | 8151 if (FLAG_parallel_recompilation) { |
8176 heap_->relocation_mutex_->Lock(); | 8152 heap_->relocation_mutex_->Lock(); |
8177 #ifdef DEBUG | 8153 #ifdef DEBUG |
8178 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 8154 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
8179 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 8155 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
8180 #endif // DEBUG | 8156 #endif // DEBUG |
8181 } | 8157 } |
8182 } | 8158 } |
8183 | 8159 |
8184 } } // namespace v8::internal | 8160 } } // namespace v8::internal |
OLD | NEW |