OLD | NEW |
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
11 // with the distribution. | 11 // with the distribution. |
(...skipping 315 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
327 // Make sure line termination is in place. | 327 // Make sure line termination is in place. |
328 frames[i].text[kStackWalkMaxTextLen - 1] = '\0'; | 328 frames[i].text[kStackWalkMaxTextLen - 1] = '\0'; |
329 } | 329 } |
330 | 330 |
331 free(symbols); | 331 free(symbols); |
332 | 332 |
333 return frames_count; | 333 return frames_count; |
334 } | 334 } |
335 | 335 |
336 | 336 |
337 VirtualMemory::VirtualMemory(size_t size) { | 337 VirtualMemory::VirtualMemory() : address_(NULL), size_(size) { } |
338 address_ = ReserveRegion(size); | 338 |
| 339 |
| 340 VirtualMemory::VirtualMemory(size_t size) |
| 341 : address_(ReserveRegion(size)), size_(size) { } |
| 342 |
| 343 |
| 344 VirtualMemory::VirtualMemory(size_t size, size_t alignment) |
| 345 : address_(NULL), size_(0) { |
| 346 ASSERT(IsAligned(alignment, static_cast<intptr_t>(OS::AllocateAlignment()))); |
| 347 size_t request_size = RoundUp(size + alignment, |
| 348 static_cast<intptr_t>(OS::AllocateAlignment())); |
| 349 void* reservation = mmap(GetRandomMmapAddr(), |
| 350 request_size, |
| 351 PROT_NONE, |
| 352 MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE, |
| 353 kMmapFd, |
| 354 kMmapFdOffset); |
| 355 if (reservation == MAP_FAILED) return; |
| 356 Address base = static_cast<Address>(reservation); |
| 357 Address aligned_base = RoundUp(base, alignment); |
| 358 ASSERT(base <= aligned_base); |
| 359 |
| 360 // Unmap extra memory reserved before and after the desired block. |
| 361 size_t bytes_prior = static_cast<size_t>(aligned_base - base); |
| 362 if (bytes_prior > 0) { |
| 363 munmap(base, bytes_prior); |
| 364 } |
| 365 if (static_cast<size_t>(aligned_base - base) < request_size - size) { |
| 366 munmap(aligned_base + size, request_size - size - bytes_prior); |
| 367 } |
| 368 |
| 369 address_ = static_cast<void*>(aligned_base); |
339 size_ = size; | 370 size_ = size; |
340 } | 371 } |
341 | 372 |
342 | 373 |
| 374 VirtualMemory::~VirtualMemory() { |
| 375 if (IsReserved()) { |
| 376 bool result = ReleaseRegion(address(), size()); |
| 377 ASSERT(result); |
| 378 USE(result); |
| 379 } |
| 380 } |
| 381 |
| 382 |
| 383 void VirtualMemory::Reset() { |
| 384 address_ = NULL; |
| 385 size_ = 0; |
| 386 } |
| 387 |
| 388 |
343 void* VirtualMemory::ReserveRegion(size_t size) { | 389 void* VirtualMemory::ReserveRegion(size_t size) { |
344 void* result = mmap(NULL, | 390 void* result = mmap(NULL, |
345 size, | 391 size, |
346 PROT_NONE, | 392 PROT_NONE, |
347 MAP_PRIVATE | MAP_ANON | MAP_NORESERVE, | 393 MAP_PRIVATE | MAP_ANON | MAP_NORESERVE, |
348 kMmapFd, | 394 kMmapFd, |
349 kMmapFdOffset); | 395 kMmapFdOffset); |
350 | 396 |
351 if (result == MAP_FAILED) return NULL; | 397 if (result == MAP_FAILED) return NULL; |
352 | 398 |
353 return result; | 399 return result; |
354 } | 400 } |
355 | 401 |
356 | 402 |
357 VirtualMemory::~VirtualMemory() { | |
358 if (IsReserved()) { | |
359 if (ReleaseRegion(address_, size_)) address_ = MAP_FAILED; | |
360 } | |
361 } | |
362 | |
363 | |
364 bool VirtualMemory::IsReserved() { | 403 bool VirtualMemory::IsReserved() { |
365 return address_ != MAP_FAILED; | 404 return address_ != NULL; |
366 } | 405 } |
367 | 406 |
368 | 407 |
369 bool VirtualMemory::Commit(void* address, size_t size, bool is_executable) { | 408 bool VirtualMemory::Commit(void* address, size_t size, bool is_executable) { |
370 return CommitRegion(address, size, is_executable); | 409 return CommitRegion(address, size, is_executable); |
371 } | 410 } |
372 | 411 |
373 | 412 |
374 bool VirtualMemory::CommitRegion(void* address, | 413 bool VirtualMemory::CommitRegion(void* address, |
375 size_t size, | 414 size_t size, |
(...skipping 455 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
831 | 870 |
832 | 871 |
833 void Sampler::Stop() { | 872 void Sampler::Stop() { |
834 ASSERT(IsActive()); | 873 ASSERT(IsActive()); |
835 SamplerThread::RemoveActiveSampler(this); | 874 SamplerThread::RemoveActiveSampler(this); |
836 SetActive(false); | 875 SetActive(false); |
837 } | 876 } |
838 | 877 |
839 | 878 |
840 } } // namespace v8::internal | 879 } } // namespace v8::internal |
OLD | NEW |