OLD | NEW |
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
219 // to guarantee that any allocations performed during the call will | 219 // to guarantee that any allocations performed during the call will |
220 // succeed if there's enough memory. | 220 // succeed if there's enough memory. |
221 | 221 |
222 // Warning: Do not use the identifiers __object__ or __scope__ in a | 222 // Warning: Do not use the identifiers __object__ or __scope__ in a |
223 // call to this macro. | 223 // call to this macro. |
224 | 224 |
225 #define CALL_AND_RETRY(FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ | 225 #define CALL_AND_RETRY(FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ |
226 do { \ | 226 do { \ |
227 GC_GREEDY_CHECK(); \ | 227 GC_GREEDY_CHECK(); \ |
228 Object* __object__ = FUNCTION_CALL; \ | 228 Object* __object__ = FUNCTION_CALL; \ |
229 if (!__object__->IsFailure()) return RETURN_VALUE; \ | 229 if (!__object__->IsFailure()) RETURN_VALUE; \ |
230 if (__object__->IsOutOfMemoryFailure()) { \ | 230 if (__object__->IsOutOfMemoryFailure()) { \ |
231 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0"); \ | 231 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0"); \ |
232 } \ | 232 } \ |
233 if (!__object__->IsRetryAfterGC()) return RETURN_EMPTY; \ | 233 if (!__object__->IsRetryAfterGC()) RETURN_EMPTY; \ |
234 Heap::CollectGarbage(Failure::cast(__object__)->requested(), \ | 234 Heap::CollectGarbage(Failure::cast(__object__)->requested(), \ |
235 Failure::cast(__object__)->allocation_space()); \ | 235 Failure::cast(__object__)->allocation_space()); \ |
236 __object__ = FUNCTION_CALL; \ | 236 __object__ = FUNCTION_CALL; \ |
237 if (!__object__->IsFailure()) return RETURN_VALUE; \ | 237 if (!__object__->IsFailure()) RETURN_VALUE; \ |
238 if (__object__->IsOutOfMemoryFailure()) { \ | 238 if (__object__->IsOutOfMemoryFailure()) { \ |
239 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1"); \ | 239 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1"); \ |
240 } \ | 240 } \ |
241 if (!__object__->IsRetryAfterGC()) return RETURN_EMPTY; \ | 241 if (!__object__->IsRetryAfterGC()) RETURN_EMPTY; \ |
242 Counters::gc_last_resort_from_handles.Increment(); \ | 242 Counters::gc_last_resort_from_handles.Increment(); \ |
243 Heap::CollectAllGarbage(); \ | 243 Heap::CollectAllGarbage(); \ |
244 { \ | 244 { \ |
245 AlwaysAllocateScope __scope__; \ | 245 AlwaysAllocateScope __scope__; \ |
246 __object__ = FUNCTION_CALL; \ | 246 __object__ = FUNCTION_CALL; \ |
247 } \ | 247 } \ |
248 if (!__object__->IsFailure()) return RETURN_VALUE; \ | 248 if (!__object__->IsFailure()) RETURN_VALUE; \ |
249 if (__object__->IsOutOfMemoryFailure()) { \ | 249 if (__object__->IsOutOfMemoryFailure()) { \ |
250 /* TODO(1181417): Fix this. */ \ | 250 /* TODO(1181417): Fix this. */ \ |
251 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2"); \ | 251 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2"); \ |
252 } \ | 252 } \ |
253 ASSERT(!__object__->IsRetryAfterGC()); \ | 253 ASSERT(!__object__->IsRetryAfterGC()); \ |
254 return RETURN_EMPTY; \ | 254 RETURN_EMPTY; \ |
255 } while (false) | 255 } while (false) |
256 | 256 |
257 | 257 |
258 #define CALL_HEAP_FUNCTION(FUNCTION_CALL, TYPE) \ | 258 #define CALL_HEAP_FUNCTION(FUNCTION_CALL, TYPE) \ |
259 CALL_AND_RETRY(FUNCTION_CALL, \ | 259 CALL_AND_RETRY(FUNCTION_CALL, \ |
260 Handle<TYPE>(TYPE::cast(__object__)), \ | 260 return Handle<TYPE>(TYPE::cast(__object__)), \ |
261 Handle<TYPE>()) | 261 return Handle<TYPE>()) |
262 | 262 |
263 | 263 |
264 #define CALL_HEAP_FUNCTION_VOID(FUNCTION_CALL) \ | 264 #define CALL_HEAP_FUNCTION_VOID(FUNCTION_CALL) \ |
265 CALL_AND_RETRY(FUNCTION_CALL, ;, ;) // NOLINT | 265 CALL_AND_RETRY(FUNCTION_CALL, return, return) |
266 | 266 |
267 | 267 |
268 #ifdef DEBUG | 268 #ifdef DEBUG |
269 | 269 |
270 inline bool Heap::allow_allocation(bool new_state) { | 270 inline bool Heap::allow_allocation(bool new_state) { |
271 bool old = allocation_allowed_; | 271 bool old = allocation_allowed_; |
272 allocation_allowed_ = new_state; | 272 allocation_allowed_ = new_state; |
273 return old; | 273 return old; |
274 } | 274 } |
275 | 275 |
276 #endif | 276 #endif |
277 | 277 |
278 | 278 |
279 } } // namespace v8::internal | 279 } } // namespace v8::internal |
280 | 280 |
281 #endif // V8_HEAP_INL_H_ | 281 #endif // V8_HEAP_INL_H_ |
OLD | NEW |