| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1864 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1875 } else if (type == FIXED_ARRAY_TYPE) { | 1875 } else if (type == FIXED_ARRAY_TYPE) { |
| 1876 ASSERT(sub_type <= LAST_FIXED_ARRAY_SUB_TYPE); | 1876 ASSERT(sub_type <= LAST_FIXED_ARRAY_SUB_TYPE); |
| 1877 object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + sub_type]++; | 1877 object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + sub_type]++; |
| 1878 object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + sub_type] += size; | 1878 object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + sub_type] += size; |
| 1879 } | 1879 } |
| 1880 } | 1880 } |
| 1881 } | 1881 } |
| 1882 | 1882 |
| 1883 void CheckpointObjectStats(); | 1883 void CheckpointObjectStats(); |
| 1884 | 1884 |
| 1885 // We don't use a ScopedLock here since we want to lock the heap | 1885 // We don't use a LockGuard here since we want to lock the heap |
| 1886 // only when FLAG_concurrent_recompilation is true. | 1886 // only when FLAG_concurrent_recompilation is true. |
| 1887 class RelocationLock { | 1887 class RelocationLock { |
| 1888 public: | 1888 public: |
| 1889 explicit RelocationLock(Heap* heap); | 1889 explicit RelocationLock(Heap* heap); |
| 1890 | 1890 |
| 1891 ~RelocationLock() { | 1891 ~RelocationLock() { |
| 1892 if (FLAG_concurrent_recompilation) { | 1892 if (FLAG_concurrent_recompilation) { |
| 1893 #ifdef DEBUG | 1893 #ifdef DEBUG |
| 1894 heap_->relocation_mutex_locked_by_optimizer_thread_ = false; | 1894 heap_->relocation_mutex_locked_by_optimizer_thread_ = false; |
| 1895 #endif // DEBUG | 1895 #endif // DEBUG |
| (...skipping 1163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3059 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. | 3059 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. |
| 3060 | 3060 |
| 3061 private: | 3061 private: |
| 3062 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); | 3062 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); |
| 3063 }; | 3063 }; |
| 3064 #endif // DEBUG | 3064 #endif // DEBUG |
| 3065 | 3065 |
| 3066 } } // namespace v8::internal | 3066 } } // namespace v8::internal |
| 3067 | 3067 |
| 3068 #endif // V8_HEAP_H_ | 3068 #endif // V8_HEAP_H_ |
| OLD | NEW |