| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/factory.h" | 5 #include "src/factory.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/allocation-site-scopes.h" | 8 #include "src/allocation-site-scopes.h" |
| 9 #include "src/ast/ast.h" | 9 #include "src/ast/ast.h" |
| 10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
| (...skipping 16 matching lines...) Expand all Loading... |
| 27 // | 27 // |
| 28 // Warning: Do not use the identifiers __object__, __maybe_object__, | 28 // Warning: Do not use the identifiers __object__, __maybe_object__, |
| 29 // __allocation__ or __scope__ in a call to this macro. | 29 // __allocation__ or __scope__ in a call to this macro. |
| 30 | 30 |
| 31 #define RETURN_OBJECT_UNLESS_RETRY(ISOLATE, TYPE) \ | 31 #define RETURN_OBJECT_UNLESS_RETRY(ISOLATE, TYPE) \ |
| 32 if (__allocation__.To(&__object__)) { \ | 32 if (__allocation__.To(&__object__)) { \ |
| 33 DCHECK(__object__ != (ISOLATE)->heap()->exception()); \ | 33 DCHECK(__object__ != (ISOLATE)->heap()->exception()); \ |
| 34 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE); \ | 34 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE); \ |
| 35 } | 35 } |
| 36 | 36 |
| 37 // TODO(jkummerow): Handlify more of heap.cc to maintain the rule that |
| 38 // "there are no raw pointers anywhere on the stack when an allocation |
| 39 // is happening", and then move the ZapHeapPointersInCppFrames() call |
| 40 // into Heap::AllocateRaw. |
| 41 #if DEBUG |
| 42 #define MAYBE_ZAP_STACK(isolate) \ |
| 43 if (FLAG_zap_cpp_pointers) { \ |
| 44 ZapHeapPointersInCppFrames(isolate); \ |
| 45 } |
| 46 #else |
| 47 #define MAYBE_ZAP_STACK(isolate) |
| 48 #endif |
| 49 |
| 37 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \ | 50 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \ |
| 38 do { \ | 51 do { \ |
| 52 MAYBE_ZAP_STACK(ISOLATE); \ |
| 39 AllocationResult __allocation__ = FUNCTION_CALL; \ | 53 AllocationResult __allocation__ = FUNCTION_CALL; \ |
| 40 Object* __object__ = NULL; \ | 54 Object* __object__ = NULL; \ |
| 41 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, TYPE) \ | 55 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, TYPE) \ |
| 42 /* Two GCs before panicking. In newspace will almost always succeed. */ \ | 56 /* Two GCs before panicking. In newspace will almost always succeed. */ \ |
| 43 for (int __i__ = 0; __i__ < 2; __i__++) { \ | 57 for (int __i__ = 0; __i__ < 2; __i__++) { \ |
| 44 (ISOLATE)->heap()->CollectGarbage( \ | 58 (ISOLATE)->heap()->CollectGarbage( \ |
| 45 __allocation__.RetrySpace(), \ | 59 __allocation__.RetrySpace(), \ |
| 46 GarbageCollectionReason::kAllocationFailure); \ | 60 GarbageCollectionReason::kAllocationFailure); \ |
| 47 __allocation__ = FUNCTION_CALL; \ | 61 __allocation__ = FUNCTION_CALL; \ |
| 48 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, TYPE) \ | 62 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, TYPE) \ |
| (...skipping 2897 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2946 Handle<AccessorInfo> prototype = | 2960 Handle<AccessorInfo> prototype = |
| 2947 Accessors::FunctionPrototypeInfo(isolate(), rw_attribs); | 2961 Accessors::FunctionPrototypeInfo(isolate(), rw_attribs); |
| 2948 Descriptor d = Descriptor::AccessorConstant( | 2962 Descriptor d = Descriptor::AccessorConstant( |
| 2949 Handle<Name>(Name::cast(prototype->name())), prototype, rw_attribs); | 2963 Handle<Name>(Name::cast(prototype->name())), prototype, rw_attribs); |
| 2950 map->AppendDescriptor(&d); | 2964 map->AppendDescriptor(&d); |
| 2951 } | 2965 } |
| 2952 } | 2966 } |
| 2953 | 2967 |
| 2954 } // namespace internal | 2968 } // namespace internal |
| 2955 } // namespace v8 | 2969 } // namespace v8 |
| OLD | NEW |