Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(334)

Side by Side Diff: src/heap-inl.h

Issue 3970005: Make Failure inherit from MaybeObject instead of Object. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 10 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.cc ('k') | src/ia32/assembler-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. 1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 22 matching lines...) Expand all
33 #include "v8-counters.h" 33 #include "v8-counters.h"
34 34
35 namespace v8 { 35 namespace v8 {
36 namespace internal { 36 namespace internal {
37 37
38 int Heap::MaxObjectSizeInPagedSpace() { 38 int Heap::MaxObjectSizeInPagedSpace() {
39 return Page::kMaxHeapObjectSize; 39 return Page::kMaxHeapObjectSize;
40 } 40 }
41 41
42 42
43 Object* Heap::AllocateSymbol(Vector<const char> str, 43 MaybeObject* Heap::AllocateSymbol(Vector<const char> str,
44 int chars, 44 int chars,
45 uint32_t hash_field) { 45 uint32_t hash_field) {
46 unibrow::Utf8InputBuffer<> buffer(str.start(), 46 unibrow::Utf8InputBuffer<> buffer(str.start(),
47 static_cast<unsigned>(str.length())); 47 static_cast<unsigned>(str.length()));
48 return AllocateInternalSymbol(&buffer, chars, hash_field); 48 return AllocateInternalSymbol(&buffer, chars, hash_field);
49 } 49 }
50 50
51 51
52 Object* Heap::CopyFixedArray(FixedArray* src) { 52 MaybeObject* Heap::CopyFixedArray(FixedArray* src) {
53 return CopyFixedArrayWithMap(src, src->map()); 53 return CopyFixedArrayWithMap(src, src->map());
54 } 54 }
55 55
56 56
57 Object* Heap::AllocateRaw(int size_in_bytes, 57 MaybeObject* Heap::AllocateRaw(int size_in_bytes,
58 AllocationSpace space, 58 AllocationSpace space,
59 AllocationSpace retry_space) { 59 AllocationSpace retry_space) {
60 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); 60 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
61 ASSERT(space != NEW_SPACE || 61 ASSERT(space != NEW_SPACE ||
62 retry_space == OLD_POINTER_SPACE || 62 retry_space == OLD_POINTER_SPACE ||
63 retry_space == OLD_DATA_SPACE || 63 retry_space == OLD_DATA_SPACE ||
64 retry_space == LO_SPACE); 64 retry_space == LO_SPACE);
65 #ifdef DEBUG 65 #ifdef DEBUG
66 if (FLAG_gc_interval >= 0 && 66 if (FLAG_gc_interval >= 0 &&
67 !disallow_allocation_failure_ && 67 !disallow_allocation_failure_ &&
68 Heap::allocation_timeout_-- <= 0) { 68 Heap::allocation_timeout_-- <= 0) {
69 return Failure::RetryAfterGC(space); 69 return Failure::RetryAfterGC(space);
70 } 70 }
71 Counters::objs_since_last_full.Increment(); 71 Counters::objs_since_last_full.Increment();
72 Counters::objs_since_last_young.Increment(); 72 Counters::objs_since_last_young.Increment();
73 #endif 73 #endif
74 Object* result; 74 MaybeObject* result;
75 if (NEW_SPACE == space) { 75 if (NEW_SPACE == space) {
76 result = new_space_.AllocateRaw(size_in_bytes); 76 result = new_space_.AllocateRaw(size_in_bytes);
77 if (always_allocate() && result->IsFailure()) { 77 if (always_allocate() && result->IsFailure()) {
78 space = retry_space; 78 space = retry_space;
79 } else { 79 } else {
80 return result; 80 return result;
81 } 81 }
82 } 82 }
83 83
84 if (OLD_POINTER_SPACE == space) { 84 if (OLD_POINTER_SPACE == space) {
85 result = old_pointer_space_->AllocateRaw(size_in_bytes); 85 result = old_pointer_space_->AllocateRaw(size_in_bytes);
86 } else if (OLD_DATA_SPACE == space) { 86 } else if (OLD_DATA_SPACE == space) {
87 result = old_data_space_->AllocateRaw(size_in_bytes); 87 result = old_data_space_->AllocateRaw(size_in_bytes);
88 } else if (CODE_SPACE == space) { 88 } else if (CODE_SPACE == space) {
89 result = code_space_->AllocateRaw(size_in_bytes); 89 result = code_space_->AllocateRaw(size_in_bytes);
90 } else if (LO_SPACE == space) { 90 } else if (LO_SPACE == space) {
91 result = lo_space_->AllocateRaw(size_in_bytes); 91 result = lo_space_->AllocateRaw(size_in_bytes);
92 } else if (CELL_SPACE == space) { 92 } else if (CELL_SPACE == space) {
93 result = cell_space_->AllocateRaw(size_in_bytes); 93 result = cell_space_->AllocateRaw(size_in_bytes);
94 } else { 94 } else {
95 ASSERT(MAP_SPACE == space); 95 ASSERT(MAP_SPACE == space);
96 result = map_space_->AllocateRaw(size_in_bytes); 96 result = map_space_->AllocateRaw(size_in_bytes);
97 } 97 }
98 if (result->IsFailure()) old_gen_exhausted_ = true; 98 if (result->IsFailure()) old_gen_exhausted_ = true;
99 return result; 99 return result;
100 } 100 }
101 101
102 102
103 Object* Heap::NumberFromInt32(int32_t value) { 103 MaybeObject* Heap::NumberFromInt32(int32_t value) {
104 if (Smi::IsValid(value)) return Smi::FromInt(value); 104 if (Smi::IsValid(value)) return Smi::FromInt(value);
105 // Bypass NumberFromDouble to avoid various redundant checks. 105 // Bypass NumberFromDouble to avoid various redundant checks.
106 return AllocateHeapNumber(FastI2D(value)); 106 return AllocateHeapNumber(FastI2D(value));
107 } 107 }
108 108
109 109
110 Object* Heap::NumberFromUint32(uint32_t value) { 110 MaybeObject* Heap::NumberFromUint32(uint32_t value) {
111 if ((int32_t)value >= 0 && Smi::IsValid((int32_t)value)) { 111 if ((int32_t)value >= 0 && Smi::IsValid((int32_t)value)) {
112 return Smi::FromInt((int32_t)value); 112 return Smi::FromInt((int32_t)value);
113 } 113 }
114 // Bypass NumberFromDouble to avoid various redundant checks. 114 // Bypass NumberFromDouble to avoid various redundant checks.
115 return AllocateHeapNumber(FastUI2D(value)); 115 return AllocateHeapNumber(FastUI2D(value));
116 } 116 }
117 117
118 118
119 void Heap::FinalizeExternalString(String* string) { 119 void Heap::FinalizeExternalString(String* string) {
120 ASSERT(string->IsExternalString()); 120 ASSERT(string->IsExternalString());
121 v8::String::ExternalStringResourceBase** resource_addr = 121 v8::String::ExternalStringResourceBase** resource_addr =
122 reinterpret_cast<v8::String::ExternalStringResourceBase**>( 122 reinterpret_cast<v8::String::ExternalStringResourceBase**>(
123 reinterpret_cast<byte*>(string) + 123 reinterpret_cast<byte*>(string) +
124 ExternalString::kResourceOffset - 124 ExternalString::kResourceOffset -
125 kHeapObjectTag); 125 kHeapObjectTag);
126 126
127 // Dispose of the C++ object if it has not already been disposed. 127 // Dispose of the C++ object if it has not already been disposed.
128 if (*resource_addr != NULL) { 128 if (*resource_addr != NULL) {
129 (*resource_addr)->Dispose(); 129 (*resource_addr)->Dispose();
130 } 130 }
131 131
132 // Clear the resource pointer in the string. 132 // Clear the resource pointer in the string.
133 *resource_addr = NULL; 133 *resource_addr = NULL;
134 } 134 }
135 135
136 136
137 Object* Heap::AllocateRawMap() { 137 MaybeObject* Heap::AllocateRawMap() {
138 #ifdef DEBUG 138 #ifdef DEBUG
139 Counters::objs_since_last_full.Increment(); 139 Counters::objs_since_last_full.Increment();
140 Counters::objs_since_last_young.Increment(); 140 Counters::objs_since_last_young.Increment();
141 #endif 141 #endif
142 Object* result = map_space_->AllocateRaw(Map::kSize); 142 MaybeObject* result = map_space_->AllocateRaw(Map::kSize);
143 if (result->IsFailure()) old_gen_exhausted_ = true; 143 if (result->IsFailure()) old_gen_exhausted_ = true;
144 #ifdef DEBUG 144 #ifdef DEBUG
145 if (!result->IsFailure()) { 145 if (!result->IsFailure()) {
146 // Maps have their own alignment. 146 // Maps have their own alignment.
147 CHECK((reinterpret_cast<intptr_t>(result) & kMapAlignmentMask) == 147 CHECK((reinterpret_cast<intptr_t>(result) & kMapAlignmentMask) ==
148 static_cast<intptr_t>(kHeapObjectTag)); 148 static_cast<intptr_t>(kHeapObjectTag));
149 } 149 }
150 #endif 150 #endif
151 return result; 151 return result;
152 } 152 }
153 153
154 154
155 Object* Heap::AllocateRawCell() { 155 MaybeObject* Heap::AllocateRawCell() {
156 #ifdef DEBUG 156 #ifdef DEBUG
157 Counters::objs_since_last_full.Increment(); 157 Counters::objs_since_last_full.Increment();
158 Counters::objs_since_last_young.Increment(); 158 Counters::objs_since_last_young.Increment();
159 #endif 159 #endif
160 Object* result = cell_space_->AllocateRaw(JSGlobalPropertyCell::kSize); 160 MaybeObject* result = cell_space_->AllocateRaw(JSGlobalPropertyCell::kSize);
161 if (result->IsFailure()) old_gen_exhausted_ = true; 161 if (result->IsFailure()) old_gen_exhausted_ = true;
162 return result; 162 return result;
163 } 163 }
164 164
165 165
166 bool Heap::InNewSpace(Object* object) { 166 bool Heap::InNewSpace(Object* object) {
167 bool result = new_space_.Contains(object); 167 bool result = new_space_.Contains(object);
168 ASSERT(!result || // Either not in new space 168 ASSERT(!result || // Either not in new space
169 gc_state_ != NOT_IN_GC || // ... or in the middle of GC 169 gc_state_ != NOT_IN_GC || // ... or in the middle of GC
170 InToSpace(object)); // ... or in to-space (where we allocate). 170 InToSpace(object)); // ... or in to-space (where we allocate).
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after
323 if (first_word.IsForwardingAddress()) { 323 if (first_word.IsForwardingAddress()) {
324 *p = first_word.ToForwardingAddress(); 324 *p = first_word.ToForwardingAddress();
325 return; 325 return;
326 } 326 }
327 327
328 // Call the slow part of scavenge object. 328 // Call the slow part of scavenge object.
329 return ScavengeObjectSlow(p, object); 329 return ScavengeObjectSlow(p, object);
330 } 330 }
331 331
332 332
333 Object* Heap::PrepareForCompare(String* str) { 333 MaybeObject* Heap::PrepareForCompare(String* str) {
334 // Always flatten small strings and force flattening of long strings 334 // Always flatten small strings and force flattening of long strings
335 // after we have accumulated a certain amount we failed to flatten. 335 // after we have accumulated a certain amount we failed to flatten.
336 static const int kMaxAlwaysFlattenLength = 32; 336 static const int kMaxAlwaysFlattenLength = 32;
337 static const int kFlattenLongThreshold = 16*KB; 337 static const int kFlattenLongThreshold = 16*KB;
338 338
339 const int length = str->length(); 339 const int length = str->length();
340 Object* obj = str->TryFlatten(); 340 MaybeObject* obj = str->TryFlatten();
341 if (length <= kMaxAlwaysFlattenLength || 341 if (length <= kMaxAlwaysFlattenLength ||
342 unflattened_strings_length_ >= kFlattenLongThreshold) { 342 unflattened_strings_length_ >= kFlattenLongThreshold) {
343 return obj; 343 return obj;
344 } 344 }
345 if (obj->IsFailure()) { 345 if (obj->IsFailure()) {
346 unflattened_strings_length_ += length; 346 unflattened_strings_length_ += length;
347 } 347 }
348 return str; 348 return str;
349 } 349 }
350 350
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
384 if (FLAG_gc_greedy) v8::internal::Heap::GarbageCollectionGreedyCheck() 384 if (FLAG_gc_greedy) v8::internal::Heap::GarbageCollectionGreedyCheck()
385 #else 385 #else
386 #define GC_GREEDY_CHECK() { } 386 #define GC_GREEDY_CHECK() { }
387 #endif 387 #endif
388 388
389 389
390 // Calls the FUNCTION_CALL function and retries it up to three times 390 // Calls the FUNCTION_CALL function and retries it up to three times
391 // to guarantee that any allocations performed during the call will 391 // to guarantee that any allocations performed during the call will
392 // succeed if there's enough memory. 392 // succeed if there's enough memory.
393 393
394 // Warning: Do not use the identifiers __object__ or __scope__ in a 394 // Warning: Do not use the identifiers __object__, __maybe_object__ or
395 // call to this macro. 395 // __scope__ in a call to this macro.
396 396
397 #define CALL_AND_RETRY(FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ 397 #define CALL_AND_RETRY(FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \
398 do { \ 398 do { \
399 GC_GREEDY_CHECK(); \ 399 GC_GREEDY_CHECK(); \
400 Object* __object__ = FUNCTION_CALL; \ 400 MaybeObject* __maybe_object__ = FUNCTION_CALL; \
401 if (!__object__->IsFailure()) RETURN_VALUE; \ 401 Object* __object__ = NULL; \
402 if (__object__->IsOutOfMemoryFailure()) { \ 402 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
403 if (__maybe_object__->IsOutOfMemory()) { \
403 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0", true);\ 404 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0", true);\
404 } \ 405 } \
405 if (!__object__->IsRetryAfterGC()) RETURN_EMPTY; \ 406 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
406 Heap::CollectGarbage(Failure::cast(__object__)->allocation_space()); \ 407 Heap::CollectGarbage(Failure::cast(__maybe_object__)-> \
407 __object__ = FUNCTION_CALL; \ 408 allocation_space()); \
408 if (!__object__->IsFailure()) RETURN_VALUE; \ 409 __maybe_object__ = FUNCTION_CALL; \
409 if (__object__->IsOutOfMemoryFailure()) { \ 410 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
411 if (__maybe_object__->IsOutOfMemory()) { \
410 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1", true);\ 412 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1", true);\
411 } \ 413 } \
412 if (!__object__->IsRetryAfterGC()) RETURN_EMPTY; \ 414 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
413 Counters::gc_last_resort_from_handles.Increment(); \ 415 Counters::gc_last_resort_from_handles.Increment(); \
414 Heap::CollectAllGarbage(false); \ 416 Heap::CollectAllGarbage(false); \
415 { \ 417 { \
416 AlwaysAllocateScope __scope__; \ 418 AlwaysAllocateScope __scope__; \
417 __object__ = FUNCTION_CALL; \ 419 __maybe_object__ = FUNCTION_CALL; \
418 } \ 420 } \
419 if (!__object__->IsFailure()) RETURN_VALUE; \ 421 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
420 if (__object__->IsOutOfMemoryFailure() || \ 422 if (__maybe_object__->IsOutOfMemory() || \
421 __object__->IsRetryAfterGC()) { \ 423 __maybe_object__->IsRetryAfterGC()) { \
422 /* TODO(1181417): Fix this. */ \ 424 /* TODO(1181417): Fix this. */ \
423 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2", true);\ 425 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2", true);\
424 } \ 426 } \
425 RETURN_EMPTY; \ 427 RETURN_EMPTY; \
426 } while (false) 428 } while (false)
427 429
428 430
429 #define CALL_HEAP_FUNCTION(FUNCTION_CALL, TYPE) \ 431 #define CALL_HEAP_FUNCTION(FUNCTION_CALL, TYPE) \
430 CALL_AND_RETRY(FUNCTION_CALL, \ 432 CALL_AND_RETRY(FUNCTION_CALL, \
431 return Handle<TYPE>(TYPE::cast(__object__)), \ 433 return Handle<TYPE>(TYPE::cast(__object__)), \
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
493 495
494 496
495 void ExternalStringTable::ShrinkNewStrings(int position) { 497 void ExternalStringTable::ShrinkNewStrings(int position) {
496 new_space_strings_.Rewind(position); 498 new_space_strings_.Rewind(position);
497 Verify(); 499 Verify();
498 } 500 }
499 501
500 } } // namespace v8::internal 502 } } // namespace v8::internal
501 503
502 #endif // V8_HEAP_INL_H_ 504 #endif // V8_HEAP_INL_H_
OLDNEW
« no previous file with comments | « src/heap.cc ('k') | src/ia32/assembler-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698