| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 // Platform-specific code for FreeBSD goes here. For the POSIX-compatible | 5 // Platform-specific code for FreeBSD goes here. For the POSIX-compatible |
| 6 // parts, the implementation is in platform-posix.cc. | 6 // parts, the implementation is in platform-posix.cc. |
| 7 | 7 |
| 8 #include <pthread.h> | 8 #include <pthread.h> |
| 9 #include <semaphore.h> | 9 #include <semaphore.h> |
| 10 #include <signal.h> | 10 #include <signal.h> |
| (...skipping 164 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 175 | 175 |
| 176 VirtualMemory::VirtualMemory() : address_(NULL), size_(0) { } | 176 VirtualMemory::VirtualMemory() : address_(NULL), size_(0) { } |
| 177 | 177 |
| 178 | 178 |
| 179 VirtualMemory::VirtualMemory(size_t size) | 179 VirtualMemory::VirtualMemory(size_t size) |
| 180 : address_(ReserveRegion(size)), size_(size) { } | 180 : address_(ReserveRegion(size)), size_(size) { } |
| 181 | 181 |
| 182 | 182 |
| 183 VirtualMemory::VirtualMemory(size_t size, size_t alignment) | 183 VirtualMemory::VirtualMemory(size_t size, size_t alignment) |
| 184 : address_(NULL), size_(0) { | 184 : address_(NULL), size_(0) { |
| 185 ASSERT(IsAligned(alignment, static_cast<intptr_t>(OS::AllocateAlignment()))); | 185 DCHECK(IsAligned(alignment, static_cast<intptr_t>(OS::AllocateAlignment()))); |
| 186 size_t request_size = RoundUp(size + alignment, | 186 size_t request_size = RoundUp(size + alignment, |
| 187 static_cast<intptr_t>(OS::AllocateAlignment())); | 187 static_cast<intptr_t>(OS::AllocateAlignment())); |
| 188 void* reservation = mmap(OS::GetRandomMmapAddr(), | 188 void* reservation = mmap(OS::GetRandomMmapAddr(), |
| 189 request_size, | 189 request_size, |
| 190 PROT_NONE, | 190 PROT_NONE, |
| 191 MAP_PRIVATE | MAP_ANON | MAP_NORESERVE, | 191 MAP_PRIVATE | MAP_ANON | MAP_NORESERVE, |
| 192 kMmapFd, | 192 kMmapFd, |
| 193 kMmapFdOffset); | 193 kMmapFdOffset); |
| 194 if (reservation == MAP_FAILED) return; | 194 if (reservation == MAP_FAILED) return; |
| 195 | 195 |
| 196 uint8_t* base = static_cast<uint8_t*>(reservation); | 196 uint8_t* base = static_cast<uint8_t*>(reservation); |
| 197 uint8_t* aligned_base = RoundUp(base, alignment); | 197 uint8_t* aligned_base = RoundUp(base, alignment); |
| 198 ASSERT_LE(base, aligned_base); | 198 DCHECK_LE(base, aligned_base); |
| 199 | 199 |
| 200 // Unmap extra memory reserved before and after the desired block. | 200 // Unmap extra memory reserved before and after the desired block. |
| 201 if (aligned_base != base) { | 201 if (aligned_base != base) { |
| 202 size_t prefix_size = static_cast<size_t>(aligned_base - base); | 202 size_t prefix_size = static_cast<size_t>(aligned_base - base); |
| 203 OS::Free(base, prefix_size); | 203 OS::Free(base, prefix_size); |
| 204 request_size -= prefix_size; | 204 request_size -= prefix_size; |
| 205 } | 205 } |
| 206 | 206 |
| 207 size_t aligned_size = RoundUp(size, OS::AllocateAlignment()); | 207 size_t aligned_size = RoundUp(size, OS::AllocateAlignment()); |
| 208 ASSERT_LE(aligned_size, request_size); | 208 DCHECK_LE(aligned_size, request_size); |
| 209 | 209 |
| 210 if (aligned_size != request_size) { | 210 if (aligned_size != request_size) { |
| 211 size_t suffix_size = request_size - aligned_size; | 211 size_t suffix_size = request_size - aligned_size; |
| 212 OS::Free(aligned_base + aligned_size, suffix_size); | 212 OS::Free(aligned_base + aligned_size, suffix_size); |
| 213 request_size -= suffix_size; | 213 request_size -= suffix_size; |
| 214 } | 214 } |
| 215 | 215 |
| 216 ASSERT(aligned_size == request_size); | 216 DCHECK(aligned_size == request_size); |
| 217 | 217 |
| 218 address_ = static_cast<void*>(aligned_base); | 218 address_ = static_cast<void*>(aligned_base); |
| 219 size_ = aligned_size; | 219 size_ = aligned_size; |
| 220 } | 220 } |
| 221 | 221 |
| 222 | 222 |
| 223 VirtualMemory::~VirtualMemory() { | 223 VirtualMemory::~VirtualMemory() { |
| 224 if (IsReserved()) { | 224 if (IsReserved()) { |
| 225 bool result = ReleaseRegion(address(), size()); | 225 bool result = ReleaseRegion(address(), size()); |
| 226 ASSERT(result); | 226 DCHECK(result); |
| 227 USE(result); | 227 USE(result); |
| 228 } | 228 } |
| 229 } | 229 } |
| 230 | 230 |
| 231 | 231 |
| 232 bool VirtualMemory::IsReserved() { | 232 bool VirtualMemory::IsReserved() { |
| 233 return address_ != NULL; | 233 return address_ != NULL; |
| 234 } | 234 } |
| 235 | 235 |
| 236 | 236 |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 298 return munmap(base, size) == 0; | 298 return munmap(base, size) == 0; |
| 299 } | 299 } |
| 300 | 300 |
| 301 | 301 |
| 302 bool VirtualMemory::HasLazyCommits() { | 302 bool VirtualMemory::HasLazyCommits() { |
| 303 // TODO(alph): implement for the platform. | 303 // TODO(alph): implement for the platform. |
| 304 return false; | 304 return false; |
| 305 } | 305 } |
| 306 | 306 |
| 307 } } // namespace v8::base | 307 } } // namespace v8::base |
| OLD | NEW |