OLD | NEW |
1 /* | 1 /* |
2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
3 * | 3 * |
4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
6 * met: | 6 * met: |
7 * | 7 * |
8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
(...skipping 1054 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1065 #if COMPILER(CLANG) && !defined(ADDRESS_SANITIZER) | 1065 #if COMPILER(CLANG) && !defined(ADDRESS_SANITIZER) |
1066 #define STACK_ALLOCATED() \ | 1066 #define STACK_ALLOCATED() \ |
1067 private: \ | 1067 private: \ |
1068 __attribute__((annotate("blink_stack_allocated"))) \ | 1068 __attribute__((annotate("blink_stack_allocated"))) \ |
1069 void* operator new(size_t) = delete; \ | 1069 void* operator new(size_t) = delete; \ |
1070 void* operator new(size_t, NotNullTag, void*) = delete; \ | 1070 void* operator new(size_t, NotNullTag, void*) = delete; \ |
1071 void* operator new(size_t, void*) = delete; | 1071 void* operator new(size_t, void*) = delete; |
1072 | 1072 |
1073 #define GC_PLUGIN_IGNORE(bug) \ | 1073 #define GC_PLUGIN_IGNORE(bug) \ |
1074 __attribute__((annotate("blink_gc_plugin_ignore"))) | 1074 __attribute__((annotate("blink_gc_plugin_ignore"))) |
| 1075 |
| 1076 #define GC_PLUGIN_IGNORE_CYCLE \ |
| 1077 __attribute__((annotate("blink_gc_plugin_ignore_cycle"))) |
1075 #else | 1078 #else |
1076 #define STACK_ALLOCATED() DISALLOW_ALLOCATION() | 1079 #define STACK_ALLOCATED() DISALLOW_ALLOCATION() |
1077 #define GC_PLUGIN_IGNORE(bug) | 1080 #define GC_PLUGIN_IGNORE(bug) |
| 1081 #define GC_PLUGIN_IGNORE_CYCLE |
1078 #endif | 1082 #endif |
1079 | 1083 |
1080 NO_SANITIZE_ADDRESS | 1084 NO_SANITIZE_ADDRESS |
1081 void HeapObjectHeader::checkHeader() const | 1085 void HeapObjectHeader::checkHeader() const |
1082 { | 1086 { |
1083 ASSERT(m_magic == magic); | 1087 ASSERT(m_magic == magic); |
1084 } | 1088 } |
1085 | 1089 |
1086 Address HeapObjectHeader::payload() | 1090 Address HeapObjectHeader::payload() |
1087 { | 1091 { |
(...skipping 646 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1734 // to export. This forces it to export all the methods from ThreadHeap. | 1738 // to export. This forces it to export all the methods from ThreadHeap. |
1735 template<> void ThreadHeap<FinalizedHeapObjectHeader>::addPageToHeap(const GCInf
o*); | 1739 template<> void ThreadHeap<FinalizedHeapObjectHeader>::addPageToHeap(const GCInf
o*); |
1736 template<> void ThreadHeap<HeapObjectHeader>::addPageToHeap(const GCInfo*); | 1740 template<> void ThreadHeap<HeapObjectHeader>::addPageToHeap(const GCInfo*); |
1737 extern template class HEAP_EXPORT ThreadHeap<FinalizedHeapObjectHeader>; | 1741 extern template class HEAP_EXPORT ThreadHeap<FinalizedHeapObjectHeader>; |
1738 extern template class HEAP_EXPORT ThreadHeap<HeapObjectHeader>; | 1742 extern template class HEAP_EXPORT ThreadHeap<HeapObjectHeader>; |
1739 #endif | 1743 #endif |
1740 | 1744 |
1741 } | 1745 } |
1742 | 1746 |
1743 #endif // Heap_h | 1747 #endif // Heap_h |
OLD | NEW |