OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_INL_H_ | 5 #ifndef V8_HEAP_INL_H_ |
6 #define V8_HEAP_INL_H_ | 6 #define V8_HEAP_INL_H_ |
7 | 7 |
8 #include <cmath> | 8 #include <cmath> |
9 | 9 |
10 #include "heap.h" | 10 #include "heap.h" |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
69 return chars == str.length(); | 69 return chars == str.length(); |
70 } | 70 } |
71 | 71 |
72 | 72 |
73 template<> | 73 template<> |
74 bool inline Heap::IsOneByte(String* str, int chars) { | 74 bool inline Heap::IsOneByte(String* str, int chars) { |
75 return str->IsOneByteRepresentation(); | 75 return str->IsOneByteRepresentation(); |
76 } | 76 } |
77 | 77 |
78 | 78 |
79 MaybeObject* Heap::AllocateInternalizedStringFromUtf8( | 79 AllocationResult Heap::AllocateInternalizedStringFromUtf8( |
80 Vector<const char> str, int chars, uint32_t hash_field) { | 80 Vector<const char> str, int chars, uint32_t hash_field) { |
81 if (IsOneByte(str, chars)) { | 81 if (IsOneByte(str, chars)) { |
82 return AllocateOneByteInternalizedString( | 82 return AllocateOneByteInternalizedString( |
83 Vector<const uint8_t>::cast(str), hash_field); | 83 Vector<const uint8_t>::cast(str), hash_field); |
84 } | 84 } |
85 return AllocateInternalizedStringImpl<false>(str, chars, hash_field); | 85 return AllocateInternalizedStringImpl<false>(str, chars, hash_field); |
86 } | 86 } |
87 | 87 |
88 | 88 |
89 template<typename T> | 89 template<typename T> |
90 MaybeObject* Heap::AllocateInternalizedStringImpl( | 90 AllocationResult Heap::AllocateInternalizedStringImpl( |
91 T t, int chars, uint32_t hash_field) { | 91 T t, int chars, uint32_t hash_field) { |
92 if (IsOneByte(t, chars)) { | 92 if (IsOneByte(t, chars)) { |
93 return AllocateInternalizedStringImpl<true>(t, chars, hash_field); | 93 return AllocateInternalizedStringImpl<true>(t, chars, hash_field); |
94 } | 94 } |
95 return AllocateInternalizedStringImpl<false>(t, chars, hash_field); | 95 return AllocateInternalizedStringImpl<false>(t, chars, hash_field); |
96 } | 96 } |
97 | 97 |
98 | 98 |
99 MaybeObject* Heap::AllocateOneByteInternalizedString(Vector<const uint8_t> str, | 99 AllocationResult Heap::AllocateOneByteInternalizedString( |
100 uint32_t hash_field) { | 100 Vector<const uint8_t> str, |
101 uint32_t hash_field) { | |
101 if (str.length() > String::kMaxLength) { | 102 if (str.length() > String::kMaxLength) { |
102 return isolate()->ThrowInvalidStringLength(); | 103 return isolate()->ThrowInvalidStringLength(); |
103 } | 104 } |
104 // Compute map and object size. | 105 // Compute map and object size. |
105 Map* map = ascii_internalized_string_map(); | 106 Map* map = ascii_internalized_string_map(); |
106 int size = SeqOneByteString::SizeFor(str.length()); | 107 int size = SeqOneByteString::SizeFor(str.length()); |
107 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); | 108 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); |
108 | 109 |
109 // Allocate string. | 110 // Allocate string. |
110 Object* result; | 111 HeapObject* result; |
111 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); | 112 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); |
112 if (!maybe_result->ToObject(&result)) return maybe_result; | 113 if (!allocation.To(&result)) return allocation; |
113 } | 114 } |
114 | 115 |
115 // String maps are all immortal immovable objects. | 116 // String maps are all immortal immovable objects. |
116 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map); | 117 result->set_map_no_write_barrier(map); |
117 // Set length and hash fields of the allocated string. | 118 // Set length and hash fields of the allocated string. |
118 String* answer = String::cast(result); | 119 String* answer = String::cast(result); |
119 answer->set_length(str.length()); | 120 answer->set_length(str.length()); |
120 answer->set_hash_field(hash_field); | 121 answer->set_hash_field(hash_field); |
121 | 122 |
122 ASSERT_EQ(size, answer->Size()); | 123 ASSERT_EQ(size, answer->Size()); |
123 | 124 |
124 // Fill in the characters. | 125 // Fill in the characters. |
125 OS::MemCopy(answer->address() + SeqOneByteString::kHeaderSize, | 126 OS::MemCopy(answer->address() + SeqOneByteString::kHeaderSize, |
126 str.start(), str.length()); | 127 str.start(), str.length()); |
127 | 128 |
128 return answer; | 129 return answer; |
129 } | 130 } |
130 | 131 |
131 | 132 |
132 MaybeObject* Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str, | 133 AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str, |
133 uint32_t hash_field) { | 134 uint32_t hash_field) { |
Hannes Payer (out of office)
2014/04/30 07:07:07
indent
| |
134 if (str.length() > String::kMaxLength) { | 135 if (str.length() > String::kMaxLength) { |
135 return isolate()->ThrowInvalidStringLength(); | 136 return isolate()->ThrowInvalidStringLength(); |
136 } | 137 } |
137 // Compute map and object size. | 138 // Compute map and object size. |
138 Map* map = internalized_string_map(); | 139 Map* map = internalized_string_map(); |
139 int size = SeqTwoByteString::SizeFor(str.length()); | 140 int size = SeqTwoByteString::SizeFor(str.length()); |
140 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); | 141 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); |
141 | 142 |
142 // Allocate string. | 143 // Allocate string. |
143 Object* result; | 144 HeapObject* result; |
144 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); | 145 { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); |
145 if (!maybe_result->ToObject(&result)) return maybe_result; | 146 if (!allocation.To(&result)) return allocation; |
146 } | 147 } |
147 | 148 |
148 reinterpret_cast<HeapObject*>(result)->set_map(map); | 149 result->set_map(map); |
149 // Set length and hash fields of the allocated string. | 150 // Set length and hash fields of the allocated string. |
150 String* answer = String::cast(result); | 151 String* answer = String::cast(result); |
151 answer->set_length(str.length()); | 152 answer->set_length(str.length()); |
152 answer->set_hash_field(hash_field); | 153 answer->set_hash_field(hash_field); |
153 | 154 |
154 ASSERT_EQ(size, answer->Size()); | 155 ASSERT_EQ(size, answer->Size()); |
155 | 156 |
156 // Fill in the characters. | 157 // Fill in the characters. |
157 OS::MemCopy(answer->address() + SeqTwoByteString::kHeaderSize, | 158 OS::MemCopy(answer->address() + SeqTwoByteString::kHeaderSize, |
158 str.start(), str.length() * kUC16Size); | 159 str.start(), str.length() * kUC16Size); |
159 | 160 |
160 return answer; | 161 return answer; |
161 } | 162 } |
162 | 163 |
163 MaybeObject* Heap::CopyFixedArray(FixedArray* src) { | 164 AllocationResult Heap::CopyFixedArray(FixedArray* src) { |
164 if (src->length() == 0) return src; | 165 if (src->length() == 0) return src; |
165 return CopyFixedArrayWithMap(src, src->map()); | 166 return CopyFixedArrayWithMap(src, src->map()); |
166 } | 167 } |
167 | 168 |
168 | 169 |
169 MaybeObject* Heap::CopyFixedDoubleArray(FixedDoubleArray* src) { | 170 AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) { |
170 if (src->length() == 0) return src; | 171 if (src->length() == 0) return src; |
171 return CopyFixedDoubleArrayWithMap(src, src->map()); | 172 return CopyFixedDoubleArrayWithMap(src, src->map()); |
172 } | 173 } |
173 | 174 |
174 | 175 |
175 MaybeObject* Heap::CopyConstantPoolArray(ConstantPoolArray* src) { | 176 AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) { |
176 if (src->length() == 0) return src; | 177 if (src->length() == 0) return src; |
177 return CopyConstantPoolArrayWithMap(src, src->map()); | 178 return CopyConstantPoolArrayWithMap(src, src->map()); |
178 } | 179 } |
179 | 180 |
180 | 181 |
181 MaybeObject* Heap::AllocateRaw(int size_in_bytes, | 182 AllocationResult Heap::AllocateRaw(int size_in_bytes, |
182 AllocationSpace space, | 183 AllocationSpace space, |
183 AllocationSpace retry_space) { | 184 AllocationSpace retry_space) { |
184 ASSERT(AllowHandleAllocation::IsAllowed()); | 185 ASSERT(AllowHandleAllocation::IsAllowed()); |
185 ASSERT(AllowHeapAllocation::IsAllowed()); | 186 ASSERT(AllowHeapAllocation::IsAllowed()); |
186 ASSERT(gc_state_ == NOT_IN_GC); | 187 ASSERT(gc_state_ == NOT_IN_GC); |
187 HeapProfiler* profiler = isolate_->heap_profiler(); | 188 HeapProfiler* profiler = isolate_->heap_profiler(); |
188 #ifdef DEBUG | 189 #ifdef DEBUG |
189 if (FLAG_gc_interval >= 0 && | 190 if (FLAG_gc_interval >= 0 && |
190 AllowAllocationFailure::IsAllowed(isolate_) && | 191 AllowAllocationFailure::IsAllowed(isolate_) && |
191 Heap::allocation_timeout_-- <= 0) { | 192 Heap::allocation_timeout_-- <= 0) { |
192 return Failure::RetryAfterGC(space); | 193 return AllocationResult::Retry(space); |
193 } | 194 } |
194 isolate_->counters()->objs_since_last_full()->Increment(); | 195 isolate_->counters()->objs_since_last_full()->Increment(); |
195 isolate_->counters()->objs_since_last_young()->Increment(); | 196 isolate_->counters()->objs_since_last_young()->Increment(); |
196 #endif | 197 #endif |
197 | 198 |
198 HeapObject* object; | 199 HeapObject* object; |
199 MaybeObject* result; | 200 AllocationResult allocation; |
200 if (NEW_SPACE == space) { | 201 if (NEW_SPACE == space) { |
201 result = new_space_.AllocateRaw(size_in_bytes); | 202 allocation = new_space_.AllocateRaw(size_in_bytes); |
202 if (always_allocate() && result->IsFailure() && retry_space != NEW_SPACE) { | 203 if (always_allocate() && |
204 allocation.IsRetry() && | |
205 retry_space != NEW_SPACE) { | |
203 space = retry_space; | 206 space = retry_space; |
204 } else { | 207 } else { |
205 if (profiler->is_tracking_allocations() && result->To(&object)) { | 208 if (profiler->is_tracking_allocations() && allocation.To(&object)) { |
206 profiler->AllocationEvent(object->address(), size_in_bytes); | 209 profiler->AllocationEvent(object->address(), size_in_bytes); |
207 } | 210 } |
208 return result; | 211 return allocation; |
209 } | 212 } |
210 } | 213 } |
211 | 214 |
212 if (OLD_POINTER_SPACE == space) { | 215 if (OLD_POINTER_SPACE == space) { |
213 result = old_pointer_space_->AllocateRaw(size_in_bytes); | 216 allocation = old_pointer_space_->AllocateRaw(size_in_bytes); |
214 } else if (OLD_DATA_SPACE == space) { | 217 } else if (OLD_DATA_SPACE == space) { |
215 result = old_data_space_->AllocateRaw(size_in_bytes); | 218 allocation = old_data_space_->AllocateRaw(size_in_bytes); |
216 } else if (CODE_SPACE == space) { | 219 } else if (CODE_SPACE == space) { |
217 result = code_space_->AllocateRaw(size_in_bytes); | 220 allocation = code_space_->AllocateRaw(size_in_bytes); |
218 } else if (LO_SPACE == space) { | 221 } else if (LO_SPACE == space) { |
219 result = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); | 222 allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); |
220 } else if (CELL_SPACE == space) { | 223 } else if (CELL_SPACE == space) { |
221 result = cell_space_->AllocateRaw(size_in_bytes); | 224 allocation = cell_space_->AllocateRaw(size_in_bytes); |
222 } else if (PROPERTY_CELL_SPACE == space) { | 225 } else if (PROPERTY_CELL_SPACE == space) { |
223 result = property_cell_space_->AllocateRaw(size_in_bytes); | 226 allocation = property_cell_space_->AllocateRaw(size_in_bytes); |
224 } else { | 227 } else { |
225 ASSERT(MAP_SPACE == space); | 228 ASSERT(MAP_SPACE == space); |
226 result = map_space_->AllocateRaw(size_in_bytes); | 229 allocation = map_space_->AllocateRaw(size_in_bytes); |
227 } | 230 } |
228 if (result->IsFailure()) old_gen_exhausted_ = true; | 231 if (allocation.IsRetry()) old_gen_exhausted_ = true; |
229 if (profiler->is_tracking_allocations() && result->To(&object)) { | 232 if (profiler->is_tracking_allocations() && allocation.To(&object)) { |
230 profiler->AllocationEvent(object->address(), size_in_bytes); | 233 profiler->AllocationEvent(object->address(), size_in_bytes); |
231 } | 234 } |
232 return result; | 235 return allocation; |
233 } | 236 } |
234 | 237 |
235 | 238 |
236 MaybeObject* Heap::NumberFromUint32( | |
237 uint32_t value, PretenureFlag pretenure) { | |
238 if (static_cast<int32_t>(value) >= 0 && | |
239 Smi::IsValid(static_cast<int32_t>(value))) { | |
240 return Smi::FromInt(static_cast<int32_t>(value)); | |
241 } | |
242 // Bypass NumberFromDouble to avoid various redundant checks. | |
243 return AllocateHeapNumber(FastUI2D(value), pretenure); | |
244 } | |
245 | |
246 | |
247 void Heap::FinalizeExternalString(String* string) { | 239 void Heap::FinalizeExternalString(String* string) { |
248 ASSERT(string->IsExternalString()); | 240 ASSERT(string->IsExternalString()); |
249 v8::String::ExternalStringResourceBase** resource_addr = | 241 v8::String::ExternalStringResourceBase** resource_addr = |
250 reinterpret_cast<v8::String::ExternalStringResourceBase**>( | 242 reinterpret_cast<v8::String::ExternalStringResourceBase**>( |
251 reinterpret_cast<byte*>(string) + | 243 reinterpret_cast<byte*>(string) + |
252 ExternalString::kResourceOffset - | 244 ExternalString::kResourceOffset - |
253 kHeapObjectTag); | 245 kHeapObjectTag); |
254 | 246 |
255 // Dispose of the C++ object if it has not already been disposed. | 247 // Dispose of the C++ object if it has not already been disposed. |
256 if (*resource_addr != NULL) { | 248 if (*resource_addr != NULL) { |
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
402 (obj->IsExternalString() && ExternalString::cast(obj)->is_short())); | 394 (obj->IsExternalString() && ExternalString::cast(obj)->is_short())); |
403 case OLD_DATA_SPACE: | 395 case OLD_DATA_SPACE: |
404 return dst == src && dst == TargetSpaceId(type); | 396 return dst == src && dst == TargetSpaceId(type); |
405 case CODE_SPACE: | 397 case CODE_SPACE: |
406 return dst == src && type == CODE_TYPE; | 398 return dst == src && type == CODE_TYPE; |
407 case MAP_SPACE: | 399 case MAP_SPACE: |
408 case CELL_SPACE: | 400 case CELL_SPACE: |
409 case PROPERTY_CELL_SPACE: | 401 case PROPERTY_CELL_SPACE: |
410 case LO_SPACE: | 402 case LO_SPACE: |
411 return false; | 403 return false; |
404 case INVALID_SPACE: | |
405 UNREACHABLE(); | |
406 return false; | |
Hannes Payer (out of office)
2014/04/30 07:07:07
Is this necessary here? This case is caught by the
Yang
2014/04/30 12:25:34
The compiler would otherwise complain that there i
Sven Panne
2014/04/30 12:30:51
Already too late, but nevertheless: Please don't u
| |
412 } | 407 } |
413 UNREACHABLE(); | 408 UNREACHABLE(); |
414 return false; | 409 return false; |
415 } | 410 } |
416 | 411 |
417 | 412 |
418 void Heap::CopyBlock(Address dst, Address src, int byte_size) { | 413 void Heap::CopyBlock(Address dst, Address src, int byte_size) { |
419 CopyWords(reinterpret_cast<Object**>(dst), | 414 CopyWords(reinterpret_cast<Object**>(dst), |
420 reinterpret_cast<Object**>(src), | 415 reinterpret_cast<Object**>(src), |
421 static_cast<size_t>(byte_size / kPointerSize)); | 416 static_cast<size_t>(byte_size / kPointerSize)); |
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
583 | 578 |
584 | 579 |
585 // Calls the FUNCTION_CALL function and retries it up to three times | 580 // Calls the FUNCTION_CALL function and retries it up to three times |
586 // to guarantee that any allocations performed during the call will | 581 // to guarantee that any allocations performed during the call will |
587 // succeed if there's enough memory. | 582 // succeed if there's enough memory. |
588 | 583 |
589 // Warning: Do not use the identifiers __object__, __maybe_object__ or | 584 // Warning: Do not use the identifiers __object__, __maybe_object__ or |
590 // __scope__ in a call to this macro. | 585 // __scope__ in a call to this macro. |
591 | 586 |
592 #define RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY) \ | 587 #define RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY) \ |
593 if (__maybe_object__->ToObject(&__object__)) { \ | 588 if (!__allocation__.IsRetry()) { \ |
589 __object__ = __allocation__.ToObjectChecked(); \ | |
594 if (__object__ == (ISOLATE)->heap()->exception()) { RETURN_EMPTY; } \ | 590 if (__object__ == (ISOLATE)->heap()->exception()) { RETURN_EMPTY; } \ |
595 RETURN_VALUE; \ | 591 RETURN_VALUE; \ |
596 } | 592 } |
597 | 593 |
598 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ | 594 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ |
599 do { \ | 595 do { \ |
600 MaybeObject* __maybe_object__ = FUNCTION_CALL; \ | 596 AllocationResult __allocation__ = FUNCTION_CALL; \ |
601 Object* __object__ = NULL; \ | 597 Object* __object__ = NULL; \ |
602 RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY) \ | 598 RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY) \ |
603 ASSERT(__maybe_object__->IsRetryAfterGC()); \ | 599 (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(), \ |
604 (ISOLATE)->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \ | 600 "allocation failure"); \ |
605 allocation_space(), \ | 601 __allocation__ = FUNCTION_CALL; \ |
606 "allocation failure"); \ | |
607 __maybe_object__ = FUNCTION_CALL; \ | |
608 RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY) \ | 602 RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY) \ |
609 ASSERT(__maybe_object__->IsRetryAfterGC()); \ | |
610 (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \ | 603 (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \ |
611 (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \ | 604 (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \ |
612 { \ | 605 { \ |
613 AlwaysAllocateScope __scope__(ISOLATE); \ | 606 AlwaysAllocateScope __scope__(ISOLATE); \ |
614 __maybe_object__ = FUNCTION_CALL; \ | 607 __allocation__ = FUNCTION_CALL; \ |
615 } \ | 608 } \ |
616 RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY) \ | 609 RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY) \ |
617 ASSERT(__maybe_object__->IsRetryAfterGC()); \ | |
618 /* TODO(1181417): Fix this. */ \ | 610 /* TODO(1181417): Fix this. */ \ |
619 v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \ | 611 v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \ |
620 RETURN_EMPTY; \ | 612 RETURN_EMPTY; \ |
621 } while (false) | 613 } while (false) |
622 | 614 |
623 #define CALL_AND_RETRY_OR_DIE( \ | 615 #define CALL_AND_RETRY_OR_DIE( \ |
624 ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ | 616 ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ |
625 CALL_AND_RETRY( \ | 617 CALL_AND_RETRY( \ |
626 ISOLATE, \ | 618 ISOLATE, \ |
627 FUNCTION_CALL, \ | 619 FUNCTION_CALL, \ |
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
777 | 769 |
778 | 770 |
779 double GCTracer::SizeOfHeapObjects() { | 771 double GCTracer::SizeOfHeapObjects() { |
780 return (static_cast<double>(heap_->SizeOfObjects())) / MB; | 772 return (static_cast<double>(heap_->SizeOfObjects())) / MB; |
781 } | 773 } |
782 | 774 |
783 | 775 |
784 } } // namespace v8::internal | 776 } } // namespace v8::internal |
785 | 777 |
786 #endif // V8_HEAP_INL_H_ | 778 #endif // V8_HEAP_INL_H_ |
OLD | NEW |