Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(395)

Side by Side Diff: src/heap.cc

Issue 22925004: Introduce Heap::SelectSpace helper for allocations. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Fix comment. Created 7 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« src/heap.h ('K') | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 2823 matching lines...) Expand 10 before | Expand all | Expand 10 after
2834 2834
2835 ASSERT(!InNewSpace(empty_fixed_array())); 2835 ASSERT(!InNewSpace(empty_fixed_array()));
2836 return true; 2836 return true;
2837 } 2837 }
2838 2838
2839 2839
2840 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) { 2840 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) {
2841 // Statically ensure that it is safe to allocate heap numbers in paged 2841 // Statically ensure that it is safe to allocate heap numbers in paged
2842 // spaces. 2842 // spaces.
2843 STATIC_ASSERT(HeapNumber::kSize <= Page::kNonCodeObjectAreaSize); 2843 STATIC_ASSERT(HeapNumber::kSize <= Page::kNonCodeObjectAreaSize);
2844 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 2844 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
Hannes Payer (out of office) 2013/08/20 09:49:19 You can also use the select function here.
Michael Starzinger 2013/09/23 11:35:05 Done.
2845 2845
2846 Object* result; 2846 Object* result;
2847 { MaybeObject* maybe_result = 2847 { MaybeObject* maybe_result =
2848 AllocateRaw(HeapNumber::kSize, space, OLD_DATA_SPACE); 2848 AllocateRaw(HeapNumber::kSize, space, OLD_DATA_SPACE);
2849 if (!maybe_result->ToObject(&result)) return maybe_result; 2849 if (!maybe_result->ToObject(&result)) return maybe_result;
2850 } 2850 }
2851 2851
2852 HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map()); 2852 HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map());
2853 HeapNumber::cast(result)->set_value(value); 2853 HeapNumber::cast(result)->set_value(value);
2854 return result; 2854 return result;
(...skipping 1147 matching lines...) Expand 10 before | Expand all | Expand 10 after
4002 String* answer = String::cast(result); 4002 String* answer = String::cast(result);
4003 answer->Set(0, code); 4003 answer->Set(0, code);
4004 return answer; 4004 return answer;
4005 } 4005 }
4006 4006
4007 4007
4008 MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) { 4008 MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
4009 if (length < 0 || length > ByteArray::kMaxLength) { 4009 if (length < 0 || length > ByteArray::kMaxLength) {
4010 return Failure::OutOfMemoryException(0x7); 4010 return Failure::OutOfMemoryException(0x7);
4011 } 4011 }
4012 if (pretenure == NOT_TENURED) {
4013 return AllocateByteArray(length);
4014 }
4015 int size = ByteArray::SizeFor(length); 4012 int size = ByteArray::SizeFor(length);
4016 AllocationSpace space = 4013 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
4017 (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : OLD_DATA_SPACE;
4018 Object* result;
4019 { MaybeObject* maybe_result = AllocateRaw(size, space, space);
4020 if (!maybe_result->ToObject(&result)) return maybe_result;
4021 }
4022
4023 reinterpret_cast<ByteArray*>(result)->set_map_no_write_barrier(
4024 byte_array_map());
4025 reinterpret_cast<ByteArray*>(result)->set_length(length);
4026 return result;
4027 }
4028
4029
4030 MaybeObject* Heap::AllocateByteArray(int length) {
4031 if (length < 0 || length > ByteArray::kMaxLength) {
4032 return Failure::OutOfMemoryException(0x8);
4033 }
4034 int size = ByteArray::SizeFor(length);
4035 AllocationSpace space =
4036 (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : NEW_SPACE;
4037 Object* result; 4014 Object* result;
4038 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); 4015 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
4039 if (!maybe_result->ToObject(&result)) return maybe_result; 4016 if (!maybe_result->ToObject(&result)) return maybe_result;
4040 } 4017 }
4041 4018
4042 reinterpret_cast<ByteArray*>(result)->set_map_no_write_barrier( 4019 reinterpret_cast<ByteArray*>(result)->set_map_no_write_barrier(
4043 byte_array_map()); 4020 byte_array_map());
4044 reinterpret_cast<ByteArray*>(result)->set_length(length); 4021 reinterpret_cast<ByteArray*>(result)->set_length(length);
4045 return result; 4022 return result;
4046 } 4023 }
(...skipping 10 matching lines...) Expand all
4057 filler->set_map_no_write_barrier(free_space_map()); 4034 filler->set_map_no_write_barrier(free_space_map());
4058 FreeSpace::cast(filler)->set_size(size); 4035 FreeSpace::cast(filler)->set_size(size);
4059 } 4036 }
4060 } 4037 }
4061 4038
4062 4039
4063 MaybeObject* Heap::AllocateExternalArray(int length, 4040 MaybeObject* Heap::AllocateExternalArray(int length,
4064 ExternalArrayType array_type, 4041 ExternalArrayType array_type,
4065 void* external_pointer, 4042 void* external_pointer,
4066 PretenureFlag pretenure) { 4043 PretenureFlag pretenure) {
4067 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 4044 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
Hannes Payer (out of office) 2013/08/20 09:49:19 You can also use the select function here.
Michael Starzinger 2013/09/23 11:35:05 Done.
4068 Object* result; 4045 Object* result;
4069 { MaybeObject* maybe_result = AllocateRaw(ExternalArray::kAlignedSize, 4046 { MaybeObject* maybe_result = AllocateRaw(ExternalArray::kAlignedSize,
4070 space, 4047 space,
4071 OLD_DATA_SPACE); 4048 OLD_DATA_SPACE);
4072 if (!maybe_result->ToObject(&result)) return maybe_result; 4049 if (!maybe_result->ToObject(&result)) return maybe_result;
4073 } 4050 }
4074 4051
4075 reinterpret_cast<ExternalArray*>(result)->set_map_no_write_barrier( 4052 reinterpret_cast<ExternalArray*>(result)->set_map_no_write_barrier(
4076 MapForExternalArrayType(array_type)); 4053 MapForExternalArrayType(array_type));
4077 reinterpret_cast<ExternalArray*>(result)->set_length(length); 4054 reinterpret_cast<ExternalArray*>(result)->set_length(length);
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
4157 code->Verify(); 4134 code->Verify();
4158 } 4135 }
4159 #endif 4136 #endif
4160 return code; 4137 return code;
4161 } 4138 }
4162 4139
4163 4140
4164 MaybeObject* Heap::CopyCode(Code* code) { 4141 MaybeObject* Heap::CopyCode(Code* code) {
4165 // Allocate an object the same size as the code object. 4142 // Allocate an object the same size as the code object.
4166 int obj_size = code->Size(); 4143 int obj_size = code->Size();
4167 MaybeObject* maybe_result; 4144 MaybeObject* maybe_result;
Hannes Payer (out of office) 2013/08/20 09:49:19 We could also clean up the code allocation with a
Michael Starzinger 2013/09/23 11:35:05 As discussed offline: Let's do this in a separate
4168 if (obj_size > code_space()->AreaSize()) { 4145 if (obj_size > code_space()->AreaSize()) {
4169 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); 4146 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
4170 } else { 4147 } else {
4171 maybe_result = code_space_->AllocateRaw(obj_size); 4148 maybe_result = code_space_->AllocateRaw(obj_size);
4172 } 4149 }
4173 4150
4174 Object* result; 4151 Object* result;
4175 if (!maybe_result->ToObject(&result)) return maybe_result; 4152 if (!maybe_result->ToObject(&result)) return maybe_result;
4176 4153
4177 // Copy code object. 4154 // Copy code object.
(...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after
4370 // duplicate the check here. 4347 // duplicate the check here.
4371 ASSERT(AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC); 4348 ASSERT(AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC);
4372 4349
4373 // Check that the size of the boilerplate matches our 4350 // Check that the size of the boilerplate matches our
4374 // expectations. The ArgumentsAccessStub::GenerateNewObject relies 4351 // expectations. The ArgumentsAccessStub::GenerateNewObject relies
4375 // on the size being a known constant. 4352 // on the size being a known constant.
4376 ASSERT(arguments_object_size == boilerplate->map()->instance_size()); 4353 ASSERT(arguments_object_size == boilerplate->map()->instance_size());
4377 4354
4378 // Do the allocation. 4355 // Do the allocation.
4379 Object* result; 4356 Object* result;
4380 { MaybeObject* maybe_result = 4357 { MaybeObject* maybe_result =
Hannes Payer (out of office) 2013/08/20 09:49:19 We could do a proper space selection here as well.
Michael Starzinger 2013/09/23 11:35:05 As discussed offline: Let's do this in a separate
4381 AllocateRaw(arguments_object_size, NEW_SPACE, OLD_POINTER_SPACE); 4358 AllocateRaw(arguments_object_size, NEW_SPACE, OLD_POINTER_SPACE);
4382 if (!maybe_result->ToObject(&result)) return maybe_result; 4359 if (!maybe_result->ToObject(&result)) return maybe_result;
4383 } 4360 }
4384 4361
4385 // Copy the content. The arguments boilerplate doesn't have any 4362 // Copy the content. The arguments boilerplate doesn't have any
4386 // fields that point to new space so it's safe to skip the write 4363 // fields that point to new space so it's safe to skip the write
4387 // barrier here. 4364 // barrier here.
4388 CopyBlock(HeapObject::cast(result)->address(), 4365 CopyBlock(HeapObject::cast(result)->address(),
4389 boilerplate->address(), 4366 boilerplate->address(),
4390 JSObject::kHeaderSize); 4367 JSObject::kHeaderSize);
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after
4498 int prop_size = map->InitialPropertiesLength(); 4475 int prop_size = map->InitialPropertiesLength();
4499 ASSERT(prop_size >= 0); 4476 ASSERT(prop_size >= 0);
4500 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure); 4477 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure);
4501 if (!maybe_properties->To(&properties)) return maybe_properties; 4478 if (!maybe_properties->To(&properties)) return maybe_properties;
4502 } 4479 }
4503 } else { 4480 } else {
4504 properties = empty_fixed_array(); 4481 properties = empty_fixed_array();
4505 } 4482 }
4506 4483
4507 // Allocate the JSObject. 4484 // Allocate the JSObject.
4508 AllocationSpace space = 4485 int size = map->instance_size();
4509 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; 4486 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure);
4510 if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE;
4511 Object* obj; 4487 Object* obj;
4512 MaybeObject* maybe_obj = Allocate(map, space); 4488 MaybeObject* maybe_obj = Allocate(map, space);
4513 if (!maybe_obj->To(&obj)) return maybe_obj; 4489 if (!maybe_obj->To(&obj)) return maybe_obj;
4514 4490
4515 // Initialize the JSObject. 4491 // Initialize the JSObject.
4516 InitializeJSObjectFromMap(JSObject::cast(obj), properties, map); 4492 InitializeJSObjectFromMap(JSObject::cast(obj), properties, map);
4517 ASSERT(JSObject::cast(obj)->HasFastElements() || 4493 ASSERT(JSObject::cast(obj)->HasFastElements() ||
4518 JSObject::cast(obj)->HasExternalArrayElements()); 4494 JSObject::cast(obj)->HasExternalArrayElements());
4519 return obj; 4495 return obj;
4520 } 4496 }
(...skipping 12 matching lines...) Expand all
4533 4509
4534 // Allocate the backing storage for the properties. 4510 // Allocate the backing storage for the properties.
4535 int prop_size = map->InitialPropertiesLength(); 4511 int prop_size = map->InitialPropertiesLength();
4536 ASSERT(prop_size >= 0); 4512 ASSERT(prop_size >= 0);
4537 FixedArray* properties; 4513 FixedArray* properties;
4538 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size); 4514 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size);
4539 if (!maybe_properties->To(&properties)) return maybe_properties; 4515 if (!maybe_properties->To(&properties)) return maybe_properties;
4540 } 4516 }
4541 4517
4542 // Allocate the JSObject. 4518 // Allocate the JSObject.
4543 AllocationSpace space = NEW_SPACE; 4519 int size = map->instance_size();
4544 if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE; 4520 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, NOT_TENURED);
4545 Object* obj; 4521 Object* obj;
4546 MaybeObject* maybe_obj = 4522 MaybeObject* maybe_obj =
4547 AllocateWithAllocationSite(map, space, allocation_site); 4523 AllocateWithAllocationSite(map, space, allocation_site);
4548 if (!maybe_obj->To(&obj)) return maybe_obj; 4524 if (!maybe_obj->To(&obj)) return maybe_obj;
4549 4525
4550 // Initialize the JSObject. 4526 // Initialize the JSObject.
4551 InitializeJSObjectFromMap(JSObject::cast(obj), properties, map); 4527 InitializeJSObjectFromMap(JSObject::cast(obj), properties, map);
4552 ASSERT(JSObject::cast(obj)->HasFastElements()); 4528 ASSERT(JSObject::cast(obj)->HasFastElements());
4553 return obj; 4529 return obj;
4554 } 4530 }
(...skipping 332 matching lines...) Expand 10 before | Expand all | Expand 10 after
4887 4863
4888 // Make the clone. 4864 // Make the clone.
4889 Map* map = source->map(); 4865 Map* map = source->map();
4890 int object_size = map->instance_size(); 4866 int object_size = map->instance_size();
4891 Object* clone; 4867 Object* clone;
4892 4868
4893 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER; 4869 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
4894 4870
4895 // If we're forced to always allocate, we use the general allocation 4871 // If we're forced to always allocate, we use the general allocation
4896 // functions which may leave us with an object in old space. 4872 // functions which may leave us with an object in old space.
4897 if (always_allocate()) { 4873 if (always_allocate()) {
Hannes Payer (out of office) 2013/08/20 09:49:19 You could also use the select function here... but
4898 { MaybeObject* maybe_clone = 4874 { MaybeObject* maybe_clone =
4899 AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE); 4875 AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
4900 if (!maybe_clone->ToObject(&clone)) return maybe_clone; 4876 if (!maybe_clone->ToObject(&clone)) return maybe_clone;
4901 } 4877 }
4902 Address clone_address = HeapObject::cast(clone)->address(); 4878 Address clone_address = HeapObject::cast(clone)->address();
4903 CopyBlock(clone_address, 4879 CopyBlock(clone_address,
4904 source->address(), 4880 source->address(),
4905 object_size); 4881 object_size);
4906 // Update write barrier for all fields that lie beyond the header. 4882 // Update write barrier for all fields that lie beyond the header.
4907 RecordWrites(clone_address, 4883 RecordWrites(clone_address,
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
4965 int object_size = map->instance_size(); 4941 int object_size = map->instance_size();
4966 Object* clone; 4942 Object* clone;
4967 4943
4968 ASSERT(map->CanTrackAllocationSite()); 4944 ASSERT(map->CanTrackAllocationSite());
4969 ASSERT(map->instance_type() == JS_ARRAY_TYPE); 4945 ASSERT(map->instance_type() == JS_ARRAY_TYPE);
4970 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER; 4946 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
4971 4947
4972 // If we're forced to always allocate, we use the general allocation 4948 // If we're forced to always allocate, we use the general allocation
4973 // functions which may leave us with an object in old space. 4949 // functions which may leave us with an object in old space.
4974 int adjusted_object_size = object_size; 4950 int adjusted_object_size = object_size;
4975 if (always_allocate()) { 4951 if (always_allocate()) {
Hannes Payer (out of office) 2013/08/20 09:49:19 Likewise.
4976 // We'll only track origin if we are certain to allocate in new space 4952 // We'll only track origin if we are certain to allocate in new space
4977 const int kMinFreeNewSpaceAfterGC = InitialSemiSpaceSize() * 3/4; 4953 const int kMinFreeNewSpaceAfterGC = InitialSemiSpaceSize() * 3/4;
4978 if ((object_size + AllocationMemento::kSize) < kMinFreeNewSpaceAfterGC) { 4954 if ((object_size + AllocationMemento::kSize) < kMinFreeNewSpaceAfterGC) {
4979 adjusted_object_size += AllocationMemento::kSize; 4955 adjusted_object_size += AllocationMemento::kSize;
4980 } 4956 }
4981 4957
4982 { MaybeObject* maybe_clone = 4958 { MaybeObject* maybe_clone =
4983 AllocateRaw(adjusted_object_size, NEW_SPACE, OLD_POINTER_SPACE); 4959 AllocateRaw(adjusted_object_size, NEW_SPACE, OLD_POINTER_SPACE);
4984 if (!maybe_clone->ToObject(&clone)) return maybe_clone; 4960 if (!maybe_clone->ToObject(&clone)) return maybe_clone;
4985 } 4961 }
(...skipping 325 matching lines...) Expand 10 before | Expand all | Expand 10 after
5311 } 5287 }
5312 map = ascii_internalized_string_map(); 5288 map = ascii_internalized_string_map();
5313 size = SeqOneByteString::SizeFor(chars); 5289 size = SeqOneByteString::SizeFor(chars);
5314 } else { 5290 } else {
5315 if (chars > SeqTwoByteString::kMaxLength) { 5291 if (chars > SeqTwoByteString::kMaxLength) {
5316 return Failure::OutOfMemoryException(0xa); 5292 return Failure::OutOfMemoryException(0xa);
5317 } 5293 }
5318 map = internalized_string_map(); 5294 map = internalized_string_map();
5319 size = SeqTwoByteString::SizeFor(chars); 5295 size = SeqTwoByteString::SizeFor(chars);
5320 } 5296 }
5297 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
5321 5298
5322 // Allocate string. 5299 // Allocate string.
5323 Object* result; 5300 Object* result;
5324 { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize) 5301 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
5325 ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
5326 : old_data_space_->AllocateRaw(size);
5327 if (!maybe_result->ToObject(&result)) return maybe_result; 5302 if (!maybe_result->ToObject(&result)) return maybe_result;
5328 } 5303 }
5329 5304
5330 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map); 5305 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map);
5331 // Set length and hash fields of the allocated string. 5306 // Set length and hash fields of the allocated string.
5332 String* answer = String::cast(result); 5307 String* answer = String::cast(result);
5333 answer->set_length(chars); 5308 answer->set_length(chars);
5334 answer->set_hash_field(hash_field); 5309 answer->set_hash_field(hash_field);
5335 5310
5336 ASSERT_EQ(size, answer->Size()); 5311 ASSERT_EQ(size, answer->Size());
(...skipping 18 matching lines...) Expand all
5355 Vector<const char>, int, uint32_t); 5330 Vector<const char>, int, uint32_t);
5356 5331
5357 5332
5358 MaybeObject* Heap::AllocateRawOneByteString(int length, 5333 MaybeObject* Heap::AllocateRawOneByteString(int length,
5359 PretenureFlag pretenure) { 5334 PretenureFlag pretenure) {
5360 if (length < 0 || length > SeqOneByteString::kMaxLength) { 5335 if (length < 0 || length > SeqOneByteString::kMaxLength) {
5361 return Failure::OutOfMemoryException(0xb); 5336 return Failure::OutOfMemoryException(0xb);
5362 } 5337 }
5363 int size = SeqOneByteString::SizeFor(length); 5338 int size = SeqOneByteString::SizeFor(length);
5364 ASSERT(size <= SeqOneByteString::kMaxSize); 5339 ASSERT(size <= SeqOneByteString::kMaxSize);
5365 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 5340 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
5366 AllocationSpace retry_space = OLD_DATA_SPACE;
5367
5368 if (size > Page::kMaxNonCodeHeapObjectSize) {
5369 // Allocate in large object space, retry space will be ignored.
5370 space = LO_SPACE;
5371 }
5372 5341
5373 Object* result; 5342 Object* result;
5374 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); 5343 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
5375 if (!maybe_result->ToObject(&result)) return maybe_result; 5344 if (!maybe_result->ToObject(&result)) return maybe_result;
5376 } 5345 }
5377 5346
5378 // Partially initialize the object. 5347 // Partially initialize the object.
5379 HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map()); 5348 HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map());
5380 String::cast(result)->set_length(length); 5349 String::cast(result)->set_length(length);
5381 String::cast(result)->set_hash_field(String::kEmptyHashField); 5350 String::cast(result)->set_hash_field(String::kEmptyHashField);
5382 ASSERT_EQ(size, HeapObject::cast(result)->Size()); 5351 ASSERT_EQ(size, HeapObject::cast(result)->Size());
5383 5352
5384 return result; 5353 return result;
5385 } 5354 }
5386 5355
5387 5356
5388 MaybeObject* Heap::AllocateRawTwoByteString(int length, 5357 MaybeObject* Heap::AllocateRawTwoByteString(int length,
5389 PretenureFlag pretenure) { 5358 PretenureFlag pretenure) {
5390 if (length < 0 || length > SeqTwoByteString::kMaxLength) { 5359 if (length < 0 || length > SeqTwoByteString::kMaxLength) {
5391 return Failure::OutOfMemoryException(0xc); 5360 return Failure::OutOfMemoryException(0xc);
5392 } 5361 }
5393 int size = SeqTwoByteString::SizeFor(length); 5362 int size = SeqTwoByteString::SizeFor(length);
5394 ASSERT(size <= SeqTwoByteString::kMaxSize); 5363 ASSERT(size <= SeqTwoByteString::kMaxSize);
5395 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; 5364 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
5396 AllocationSpace retry_space = OLD_DATA_SPACE;
5397
5398 if (size > Page::kMaxNonCodeHeapObjectSize) {
5399 // Allocate in large object space, retry space will be ignored.
5400 space = LO_SPACE;
5401 }
5402 5365
5403 Object* result; 5366 Object* result;
5404 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); 5367 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
5405 if (!maybe_result->ToObject(&result)) return maybe_result; 5368 if (!maybe_result->ToObject(&result)) return maybe_result;
5406 } 5369 }
5407 5370
5408 // Partially initialize the object. 5371 // Partially initialize the object.
5409 HeapObject::cast(result)->set_map_no_write_barrier(string_map()); 5372 HeapObject::cast(result)->set_map_no_write_barrier(string_map());
5410 String::cast(result)->set_length(length); 5373 String::cast(result)->set_length(length);
5411 String::cast(result)->set_hash_field(String::kEmptyHashField); 5374 String::cast(result)->set_hash_field(String::kEmptyHashField);
5412 ASSERT_EQ(size, HeapObject::cast(result)->Size()); 5375 ASSERT_EQ(size, HeapObject::cast(result)->Size());
5413 return result; 5376 return result;
5414 } 5377 }
(...skipping 26 matching lines...) Expand all
5441 } 5404 }
5442 } 5405 }
5443 return AllocateJSObjectFromMapWithAllocationSite(map, allocation_site); 5406 return AllocateJSObjectFromMapWithAllocationSite(map, allocation_site);
5444 } 5407 }
5445 5408
5446 5409
5447 MaybeObject* Heap::AllocateEmptyFixedArray() { 5410 MaybeObject* Heap::AllocateEmptyFixedArray() {
5448 int size = FixedArray::SizeFor(0); 5411 int size = FixedArray::SizeFor(0);
5449 Object* result; 5412 Object* result;
5450 { MaybeObject* maybe_result = 5413 { MaybeObject* maybe_result =
5451 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); 5414 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
Hannes Payer (out of office) 2013/08/20 09:49:19 For cases like that we could add an AllocateRaw(si
Michael Starzinger 2013/09/23 11:35:05 As discussed offline: Let's do this in a separate
5452 if (!maybe_result->ToObject(&result)) return maybe_result; 5415 if (!maybe_result->ToObject(&result)) return maybe_result;
5453 } 5416 }
5454 // Initialize the object. 5417 // Initialize the object.
5455 reinterpret_cast<FixedArray*>(result)->set_map_no_write_barrier( 5418 reinterpret_cast<FixedArray*>(result)->set_map_no_write_barrier(
5456 fixed_array_map()); 5419 fixed_array_map());
5457 reinterpret_cast<FixedArray*>(result)->set_length(0); 5420 reinterpret_cast<FixedArray*>(result)->set_length(0);
5458 return result; 5421 return result;
5459 } 5422 }
5460 5423
5461 5424
5462 MaybeObject* Heap::AllocateEmptyExternalArray(ExternalArrayType array_type) { 5425 MaybeObject* Heap::AllocateEmptyExternalArray(ExternalArrayType array_type) {
5463 return AllocateExternalArray(0, array_type, NULL, TENURED); 5426 return AllocateExternalArray(0, array_type, NULL, TENURED);
5464 } 5427 }
5465 5428
5466 5429
5467 MaybeObject* Heap::AllocateRawFixedArray(int length) { 5430 MaybeObject* Heap::AllocateRawFixedArray(int length) {
5468 if (length < 0 || length > FixedArray::kMaxLength) { 5431 if (length < 0 || length > FixedArray::kMaxLength) {
5469 return Failure::OutOfMemoryException(0xd); 5432 return Failure::OutOfMemoryException(0xd);
5470 } 5433 }
5471 ASSERT(length > 0); 5434 ASSERT(length > 0);
5472 // Use the general function if we're forced to always allocate. 5435 // Use the general function if we're forced to always allocate.
5473 if (always_allocate()) return AllocateFixedArray(length, TENURED); 5436 if (always_allocate()) return AllocateFixedArray(length, TENURED);
5474 // Allocate the raw data for a fixed array. 5437 // Allocate the raw data for a fixed array.
5475 int size = FixedArray::SizeFor(length); 5438 int size = FixedArray::SizeFor(length);
5476 return size <= Page::kMaxNonCodeHeapObjectSize 5439 return size <= Page::kMaxNonCodeHeapObjectSize
Hannes Payer (out of office) 2013/08/20 09:49:19 Can we merge this function with AllocateRawFixedAr
Michael Starzinger 2013/09/23 11:35:05 As discussed offline: Let's do this in a separate
5477 ? new_space_.AllocateRaw(size) 5440 ? new_space_.AllocateRaw(size)
5478 : lo_space_->AllocateRaw(size, NOT_EXECUTABLE); 5441 : lo_space_->AllocateRaw(size, NOT_EXECUTABLE);
5479 } 5442 }
5480 5443
5481 5444
5482 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { 5445 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
5483 int len = src->length(); 5446 int len = src->length();
5484 Object* obj; 5447 Object* obj;
5485 { MaybeObject* maybe_obj = AllocateRawFixedArray(len); 5448 { MaybeObject* maybe_obj = AllocateRawFixedArray(len);
5486 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 5449 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
5538 MemsetPointer(array->data_start(), undefined_value(), length); 5501 MemsetPointer(array->data_start(), undefined_value(), length);
5539 return result; 5502 return result;
5540 } 5503 }
5541 5504
5542 5505
5543 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { 5506 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
5544 if (length < 0 || length > FixedArray::kMaxLength) { 5507 if (length < 0 || length > FixedArray::kMaxLength) {
5545 return Failure::OutOfMemoryException(0xe); 5508 return Failure::OutOfMemoryException(0xe);
5546 } 5509 }
5547 int size = FixedArray::SizeFor(length); 5510 int size = FixedArray::SizeFor(length);
5548 AllocationSpace space = 5511 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure);
5549 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
5550 AllocationSpace retry_space = OLD_POINTER_SPACE;
5551 5512
5552 if (size > Page::kMaxNonCodeHeapObjectSize) { 5513 return AllocateRaw(size, space, OLD_POINTER_SPACE);
5553 // Allocate in large object space, retry space will be ignored.
5554 space = LO_SPACE;
5555 }
5556
5557 return AllocateRaw(size, space, retry_space);
5558 } 5514 }
5559 5515
5560 5516
5561 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller( 5517 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
5562 Heap* heap, 5518 Heap* heap,
5563 int length, 5519 int length,
5564 PretenureFlag pretenure, 5520 PretenureFlag pretenure,
5565 Object* filler) { 5521 Object* filler) {
5566 ASSERT(length >= 0); 5522 ASSERT(length >= 0);
5567 ASSERT(heap->empty_fixed_array()->IsFixedArray()); 5523 ASSERT(heap->empty_fixed_array()->IsFixedArray());
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
5610 fixed_array_map()); 5566 fixed_array_map());
5611 FixedArray::cast(obj)->set_length(length); 5567 FixedArray::cast(obj)->set_length(length);
5612 return obj; 5568 return obj;
5613 } 5569 }
5614 5570
5615 5571
5616 MaybeObject* Heap::AllocateEmptyFixedDoubleArray() { 5572 MaybeObject* Heap::AllocateEmptyFixedDoubleArray() {
5617 int size = FixedDoubleArray::SizeFor(0); 5573 int size = FixedDoubleArray::SizeFor(0);
5618 Object* result; 5574 Object* result;
5619 { MaybeObject* maybe_result = 5575 { MaybeObject* maybe_result =
5620 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); 5576 AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
Hannes Payer (out of office) 2013/08/20 09:49:19 here we could use the AllocateRaw(size, space) wra
Michael Starzinger 2013/09/23 11:35:05 As discussed offline: Let's do this in a separate
5621 if (!maybe_result->ToObject(&result)) return maybe_result; 5577 if (!maybe_result->ToObject(&result)) return maybe_result;
5622 } 5578 }
5623 // Initialize the object. 5579 // Initialize the object.
5624 reinterpret_cast<FixedDoubleArray*>(result)->set_map_no_write_barrier( 5580 reinterpret_cast<FixedDoubleArray*>(result)->set_map_no_write_barrier(
5625 fixed_double_array_map()); 5581 fixed_double_array_map());
5626 reinterpret_cast<FixedDoubleArray*>(result)->set_length(0); 5582 reinterpret_cast<FixedDoubleArray*>(result)->set_length(0);
5627 return result; 5583 return result;
5628 } 5584 }
5629 5585
5630 5586
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
5665 return elements; 5621 return elements;
5666 } 5622 }
5667 5623
5668 5624
5669 MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, 5625 MaybeObject* Heap::AllocateRawFixedDoubleArray(int length,
5670 PretenureFlag pretenure) { 5626 PretenureFlag pretenure) {
5671 if (length < 0 || length > FixedDoubleArray::kMaxLength) { 5627 if (length < 0 || length > FixedDoubleArray::kMaxLength) {
5672 return Failure::OutOfMemoryException(0xf); 5628 return Failure::OutOfMemoryException(0xf);
5673 } 5629 }
5674 int size = FixedDoubleArray::SizeFor(length); 5630 int size = FixedDoubleArray::SizeFor(length);
5675 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
5676 AllocationSpace retry_space = OLD_DATA_SPACE;
5677
5678 #ifndef V8_HOST_ARCH_64_BIT 5631 #ifndef V8_HOST_ARCH_64_BIT
5679 size += kPointerSize; 5632 size += kPointerSize;
5680 #endif 5633 #endif
5681 5634 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
5682 if (size > Page::kMaxNonCodeHeapObjectSize) {
5683 // Allocate in large object space, retry space will be ignored.
5684 space = LO_SPACE;
5685 }
5686 5635
5687 HeapObject* object; 5636 HeapObject* object;
5688 { MaybeObject* maybe_object = AllocateRaw(size, space, retry_space); 5637 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE);
5689 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; 5638 if (!maybe_object->To<HeapObject>(&object)) return maybe_object;
5690 } 5639 }
5691 5640
5692 return EnsureDoubleAligned(this, object, size); 5641 return EnsureDoubleAligned(this, object, size);
5693 } 5642 }
5694 5643
5695 5644
5696 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { 5645 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
5697 Object* result; 5646 Object* result;
5698 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); 5647 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure);
(...skipping 183 matching lines...) Expand 10 before | Expand all | Expand 10 after
5882 switch (type) { 5831 switch (type) {
5883 #define MAKE_CASE(NAME, Name, name) \ 5832 #define MAKE_CASE(NAME, Name, name) \
5884 case NAME##_TYPE: map = name##_map(); break; 5833 case NAME##_TYPE: map = name##_map(); break;
5885 STRUCT_LIST(MAKE_CASE) 5834 STRUCT_LIST(MAKE_CASE)
5886 #undef MAKE_CASE 5835 #undef MAKE_CASE
5887 default: 5836 default:
5888 UNREACHABLE(); 5837 UNREACHABLE();
5889 return Failure::InternalError(); 5838 return Failure::InternalError();
5890 } 5839 }
5891 int size = map->instance_size(); 5840 int size = map->instance_size();
5892 AllocationSpace space = 5841 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
5893 (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : OLD_POINTER_SPACE;
5894 Object* result; 5842 Object* result;
5895 { MaybeObject* maybe_result = Allocate(map, space); 5843 { MaybeObject* maybe_result = Allocate(map, space);
5896 if (!maybe_result->ToObject(&result)) return maybe_result; 5844 if (!maybe_result->ToObject(&result)) return maybe_result;
5897 } 5845 }
5898 Struct::cast(result)->InitializeBody(size); 5846 Struct::cast(result)->InitializeBody(size);
5899 return result; 5847 return result;
5900 } 5848 }
5901 5849
5902 5850
5903 bool Heap::IsHeapIterable() { 5851 bool Heap::IsHeapIterable() {
(...skipping 2141 matching lines...) Expand 10 before | Expand all | Expand 10 after
8045 if (FLAG_parallel_recompilation) { 7993 if (FLAG_parallel_recompilation) {
8046 heap_->relocation_mutex_->Lock(); 7994 heap_->relocation_mutex_->Lock();
8047 #ifdef DEBUG 7995 #ifdef DEBUG
8048 heap_->relocation_mutex_locked_by_optimizer_thread_ = 7996 heap_->relocation_mutex_locked_by_optimizer_thread_ =
8049 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); 7997 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread();
8050 #endif // DEBUG 7998 #endif // DEBUG
8051 } 7999 }
8052 } 8000 }
8053 8001
8054 } } // namespace v8::internal 8002 } } // namespace v8::internal
OLDNEW
« src/heap.h ('K') | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698