| OLD | NEW |
| 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 95 | 95 |
| 96 | 96 |
| 97 // We keep the lowest and highest addresses mapped as a quick way of | 97 // We keep the lowest and highest addresses mapped as a quick way of |
| 98 // determining that pointers are outside the heap (used mostly in assertions | 98 // determining that pointers are outside the heap (used mostly in assertions |
| 99 // and verification). The estimate is conservative, ie, not all addresses in | 99 // and verification). The estimate is conservative, ie, not all addresses in |
| 100 // 'allocated' space are actually allocated to our heap. The range is | 100 // 'allocated' space are actually allocated to our heap. The range is |
| 101 // [lowest, highest), inclusive on the low and and exclusive on the high end. | 101 // [lowest, highest), inclusive on the low and and exclusive on the high end. |
| 102 static void* lowest_ever_allocated = reinterpret_cast<void*>(-1); | 102 static void* lowest_ever_allocated = reinterpret_cast<void*>(-1); |
| 103 static void* highest_ever_allocated = reinterpret_cast<void*>(0); | 103 static void* highest_ever_allocated = reinterpret_cast<void*>(0); |
| 104 | 104 |
| 105 static MutexLockAdapter heap_limits_lock(OS::CreateMutex()); |
| 105 | 106 |
| 106 static void UpdateAllocatedSpaceLimits(void* address, int size) { | 107 static void UpdateAllocatedSpaceLimits(void* address, int size) { |
| 108 V8SharedStateLocker heap_limits_locker(&heap_limits_lock); |
| 107 lowest_ever_allocated = Min(lowest_ever_allocated, address); | 109 lowest_ever_allocated = Min(lowest_ever_allocated, address); |
| 108 highest_ever_allocated = | 110 highest_ever_allocated = |
| 109 Max(highest_ever_allocated, | 111 Max(highest_ever_allocated, |
| 110 reinterpret_cast<void*>(reinterpret_cast<char*>(address) + size)); | 112 reinterpret_cast<void*>(reinterpret_cast<char*>(address) + size)); |
| 111 } | 113 } |
| 112 | 114 |
| 113 | 115 |
| 114 bool OS::IsOutsideAllocatedSpace(void* address) { | 116 bool OS::IsOutsideAllocatedSpace(void* address) { |
| 117 V8SharedStateLocker heap_limits_locker(&heap_limits_lock); |
| 115 return address < lowest_ever_allocated || address >= highest_ever_allocated; | 118 return address < lowest_ever_allocated || address >= highest_ever_allocated; |
| 116 } | 119 } |
| 117 | 120 |
| 118 | 121 |
| 119 size_t OS::AllocateAlignment() { | 122 size_t OS::AllocateAlignment() { |
| 120 return getpagesize(); | 123 return getpagesize(); |
| 121 } | 124 } |
| 122 | 125 |
| 123 | 126 |
| 124 void* OS::Allocate(const size_t requested, | 127 void* OS::Allocate(const size_t requested, |
| (...skipping 463 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 588 } | 591 } |
| 589 | 592 |
| 590 // This sampler is no longer the active sampler. | 593 // This sampler is no longer the active sampler. |
| 591 active_sampler_ = NULL; | 594 active_sampler_ = NULL; |
| 592 active_ = false; | 595 active_ = false; |
| 593 } | 596 } |
| 594 | 597 |
| 595 #endif // ENABLE_LOGGING_AND_PROFILING | 598 #endif // ENABLE_LOGGING_AND_PROFILING |
| 596 | 599 |
| 597 } } // namespace v8::internal | 600 } } // namespace v8::internal |
| OLD | NEW |