| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 11 matching lines...) Expand all Loading... |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #ifndef V8_HEAP_INL_H_ | 28 #ifndef V8_HEAP_INL_H_ |
| 29 #define V8_HEAP_INL_H_ | 29 #define V8_HEAP_INL_H_ |
| 30 | 30 |
| 31 #include "heap.h" | 31 #include "heap.h" |
| 32 #include "heap-profiler.h" |
| 32 #include "isolate.h" | 33 #include "isolate.h" |
| 33 #include "list-inl.h" | 34 #include "list-inl.h" |
| 34 #include "objects.h" | 35 #include "objects.h" |
| 35 #include "platform.h" | 36 #include "platform.h" |
| 36 #include "v8-counters.h" | 37 #include "v8-counters.h" |
| 37 #include "store-buffer.h" | 38 #include "store-buffer.h" |
| 38 #include "store-buffer-inl.h" | 39 #include "store-buffer-inl.h" |
| 39 | 40 |
| 40 namespace v8 { | 41 namespace v8 { |
| 41 namespace internal { | 42 namespace internal { |
| (...skipping 601 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 643 { \ | 644 { \ |
| 644 AlwaysAllocateScope __scope__; \ | 645 AlwaysAllocateScope __scope__; \ |
| 645 __maybe_object__ = FUNCTION_CALL; \ | 646 __maybe_object__ = FUNCTION_CALL; \ |
| 646 } \ | 647 } \ |
| 647 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ | 648 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ |
| 648 if (__maybe_object__->IsOutOfMemory()) { \ | 649 if (__maybe_object__->IsOutOfMemory()) { \ |
| 649 OOM; \ | 650 OOM; \ |
| 650 } \ | 651 } \ |
| 651 if (__maybe_object__->IsRetryAfterGC()) { \ | 652 if (__maybe_object__->IsRetryAfterGC()) { \ |
| 652 /* TODO(1181417): Fix this. */ \ | 653 /* TODO(1181417): Fix this. */ \ |
| 653 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \ | 654 v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true);\ |
| 654 } \ | 655 } \ |
| 655 RETURN_EMPTY; \ | 656 RETURN_EMPTY; \ |
| 656 } while (false) | 657 } while (false) |
| 657 | 658 |
| 658 #define CALL_AND_RETRY_OR_DIE( \ | 659 #define CALL_AND_RETRY_OR_DIE( \ |
| 659 ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ | 660 ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ |
| 660 CALL_AND_RETRY( \ | 661 CALL_AND_RETRY( \ |
| 661 ISOLATE, \ | 662 ISOLATE, \ |
| 662 FUNCTION_CALL, \ | 663 FUNCTION_CALL, \ |
| 663 RETURN_VALUE, \ | 664 RETURN_VALUE, \ |
| 664 RETURN_EMPTY, \ | 665 RETURN_EMPTY, \ |
| 665 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY", true)) | 666 v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY", true)) |
| 666 | 667 |
| 667 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \ | 668 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \ |
| 668 CALL_AND_RETRY_OR_DIE(ISOLATE, \ | 669 CALL_AND_RETRY_OR_DIE(ISOLATE, \ |
| 669 FUNCTION_CALL, \ | 670 FUNCTION_CALL, \ |
| 670 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \ | 671 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \ |
| 671 return Handle<TYPE>()) \ | 672 return Handle<TYPE>()) \ |
| 672 | 673 |
| 673 | 674 |
| 674 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \ | 675 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \ |
| 675 CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return) | 676 CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return) |
| (...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 817 #ifdef DEBUG | 818 #ifdef DEBUG |
| 818 Isolate* isolate = Isolate::Current(); | 819 Isolate* isolate = Isolate::Current(); |
| 819 isolate->heap()->disallow_allocation_failure_ = old_state_; | 820 isolate->heap()->disallow_allocation_failure_ = old_state_; |
| 820 #endif | 821 #endif |
| 821 } | 822 } |
| 822 | 823 |
| 823 | 824 |
| 824 } } // namespace v8::internal | 825 } } // namespace v8::internal |
| 825 | 826 |
| 826 #endif // V8_HEAP_INL_H_ | 827 #endif // V8_HEAP_INL_H_ |
| OLD | NEW |