OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
81 static void* GetRandomMmapAddr() { | 81 static void* GetRandomMmapAddr() { |
82 Isolate* isolate = Isolate::UncheckedCurrent(); | 82 Isolate* isolate = Isolate::UncheckedCurrent(); |
83 // Note that the current isolate isn't set up in a call path via | 83 // Note that the current isolate isn't set up in a call path via |
84 // CpuFeatures::Probe. We don't care about randomization in this case because | 84 // CpuFeatures::Probe. We don't care about randomization in this case because |
85 // the code page is immediately freed. | 85 // the code page is immediately freed. |
86 if (isolate != NULL) { | 86 if (isolate != NULL) { |
87 #ifdef V8_TARGET_ARCH_X64 | 87 #ifdef V8_TARGET_ARCH_X64 |
88 uint64_t rnd1 = V8::RandomPrivate(isolate); | 88 uint64_t rnd1 = V8::RandomPrivate(isolate); |
89 uint64_t rnd2 = V8::RandomPrivate(isolate); | 89 uint64_t rnd2 = V8::RandomPrivate(isolate); |
90 uint64_t raw_addr = (rnd1 << 32) ^ rnd2; | 90 uint64_t raw_addr = (rnd1 << 32) ^ rnd2; |
| 91 // Currently available CPUs have 48 bits of virtual addressing. Truncate |
| 92 // the hint address to 46 bits to give the kernel a fighting chance of |
| 93 // fulfilling our placement request. |
91 raw_addr &= V8_UINT64_C(0x3ffffffff000); | 94 raw_addr &= V8_UINT64_C(0x3ffffffff000); |
92 #else | 95 #else |
93 uint32_t raw_addr = V8::RandomPrivate(isolate); | 96 uint32_t raw_addr = V8::RandomPrivate(isolate); |
94 // The range 0x20000000 - 0x60000000 is relatively unpopulated across a | 97 // The range 0x20000000 - 0x60000000 is relatively unpopulated across a |
95 // variety of ASLR modes (PAE kernel, NX compat mode, etc). | 98 // variety of ASLR modes (PAE kernel, NX compat mode, etc). |
96 raw_addr &= 0x3ffff000; | 99 raw_addr &= 0x3ffff000; |
97 raw_addr += 0x20000000; | 100 raw_addr += 0x20000000; |
98 #endif | 101 #endif |
99 return reinterpret_cast<void*>(raw_addr); | 102 return reinterpret_cast<void*>(raw_addr); |
100 } | 103 } |
(...skipping 273 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
374 | 377 |
375 | 378 |
376 size_t OS::AllocateAlignment() { | 379 size_t OS::AllocateAlignment() { |
377 return sysconf(_SC_PAGESIZE); | 380 return sysconf(_SC_PAGESIZE); |
378 } | 381 } |
379 | 382 |
380 | 383 |
381 void* OS::Allocate(const size_t requested, | 384 void* OS::Allocate(const size_t requested, |
382 size_t* allocated, | 385 size_t* allocated, |
383 bool is_executable) { | 386 bool is_executable) { |
384 const size_t msize = RoundUp(requested, sysconf(_SC_PAGESIZE)); | 387 const size_t msize = RoundUp(requested, AllocateAlignment()); |
385 int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0); | 388 int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0); |
386 void* addr = GetRandomMmapAddr(); | 389 void* addr = GetRandomMmapAddr(); |
387 void* mbase = mmap(addr, msize, prot, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0); | 390 void* mbase = mmap(addr, msize, prot, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0); |
388 if (mbase == MAP_FAILED) { | 391 if (mbase == MAP_FAILED) { |
389 LOG(i::Isolate::Current(), | 392 LOG(i::Isolate::Current(), |
390 StringEvent("OS::Allocate", "mmap failed")); | 393 StringEvent("OS::Allocate", "mmap failed")); |
391 return NULL; | 394 return NULL; |
392 } | 395 } |
393 *allocated = msize; | 396 *allocated = msize; |
394 UpdateAllocatedSpaceLimits(mbase, msize); | 397 UpdateAllocatedSpaceLimits(mbase, msize); |
(...skipping 196 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
591 #else // ndef __GLIBC__ | 594 #else // ndef __GLIBC__ |
592 return 0; | 595 return 0; |
593 #endif // ndef __GLIBC__ | 596 #endif // ndef __GLIBC__ |
594 } | 597 } |
595 | 598 |
596 | 599 |
597 // Constants used for mmap. | 600 // Constants used for mmap. |
598 static const int kMmapFd = -1; | 601 static const int kMmapFd = -1; |
599 static const int kMmapFdOffset = 0; | 602 static const int kMmapFdOffset = 0; |
600 | 603 |
| 604 VirtualMemory::VirtualMemory() : address_(NULL), size_(0) { } |
601 | 605 |
602 VirtualMemory::VirtualMemory(size_t size) { | 606 VirtualMemory::VirtualMemory(size_t size) { |
603 address_ = mmap(GetRandomMmapAddr(), size, PROT_NONE, | 607 address_ = ReserveRegion(size); |
604 MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE, | 608 size_ = size; |
605 kMmapFd, kMmapFdOffset); | 609 } |
| 610 |
| 611 |
| 612 VirtualMemory::VirtualMemory(size_t size, size_t alignment) |
| 613 : address_(NULL), size_(0) { |
| 614 ASSERT(IsAligned(alignment, static_cast<intptr_t>(OS::AllocateAlignment()))); |
| 615 size_t request_size = RoundUp(size + alignment, |
| 616 static_cast<intptr_t>(OS::AllocateAlignment())); |
| 617 void* reservation = mmap(GetRandomMmapAddr(), |
| 618 request_size, |
| 619 PROT_NONE, |
| 620 MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE, |
| 621 kMmapFd, |
| 622 kMmapFdOffset); |
| 623 if (reservation == MAP_FAILED) return; |
| 624 Address base = static_cast<Address>(reservation); |
| 625 Address aligned_base = RoundUp(base, alignment); |
| 626 ASSERT(base <= aligned_base); |
| 627 |
| 628 // Unmap extra memory reserved before and after the desired block. |
| 629 size_t bytes_prior = static_cast<size_t>(aligned_base - base); |
| 630 if (bytes_prior > 0) { |
| 631 munmap(base, bytes_prior); |
| 632 } |
| 633 if (static_cast<size_t>(aligned_base - base) < request_size - size) { |
| 634 munmap(aligned_base + size, request_size - size - bytes_prior); |
| 635 } |
| 636 |
| 637 address_ = static_cast<void*>(aligned_base); |
606 size_ = size; | 638 size_ = size; |
607 } | 639 } |
608 | 640 |
609 | 641 |
610 VirtualMemory::~VirtualMemory() { | 642 VirtualMemory::~VirtualMemory() { |
611 if (IsReserved()) { | 643 if (IsReserved()) { |
612 if (0 == munmap(address(), size())) address_ = MAP_FAILED; | 644 bool result = ReleaseRegion(address(), size()); |
| 645 ASSERT(result); |
| 646 USE(result); |
613 } | 647 } |
614 } | 648 } |
615 | 649 |
616 | 650 |
617 bool VirtualMemory::IsReserved() { | 651 bool VirtualMemory::IsReserved() { |
618 return address_ != MAP_FAILED; | 652 return address_ != NULL; |
| 653 } |
| 654 |
| 655 |
| 656 void VirtualMemory::Reset() { |
| 657 address_ = NULL; |
| 658 size_ = 0; |
619 } | 659 } |
620 | 660 |
621 | 661 |
622 bool VirtualMemory::Commit(void* address, size_t size, bool is_executable) { | 662 bool VirtualMemory::Commit(void* address, size_t size, bool is_executable) { |
| 663 return CommitRegion(address, size, is_executable); |
| 664 } |
| 665 |
| 666 |
| 667 bool VirtualMemory::Uncommit(void* address, size_t size) { |
| 668 return UncommitRegion(address, size); |
| 669 } |
| 670 |
| 671 |
| 672 void* VirtualMemory::ReserveRegion(size_t size) { |
| 673 void* result = mmap(GetRandomMmapAddr(), |
| 674 size, |
| 675 PROT_NONE, |
| 676 MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE, |
| 677 kMmapFd, |
| 678 kMmapFdOffset); |
| 679 |
| 680 if (result == MAP_FAILED) return NULL; |
| 681 |
| 682 return result; |
| 683 } |
| 684 |
| 685 |
| 686 bool VirtualMemory::CommitRegion(void* base, size_t size, bool is_executable) { |
623 int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0); | 687 int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0); |
624 if (MAP_FAILED == mmap(address, size, prot, | 688 if (MAP_FAILED == mmap(base, |
| 689 size, |
| 690 prot, |
625 MAP_PRIVATE | MAP_ANONYMOUS | MAP_FIXED, | 691 MAP_PRIVATE | MAP_ANONYMOUS | MAP_FIXED, |
626 kMmapFd, kMmapFdOffset)) { | 692 kMmapFd, |
| 693 kMmapFdOffset)) { |
627 return false; | 694 return false; |
628 } | 695 } |
629 | 696 |
630 UpdateAllocatedSpaceLimits(address, size); | 697 UpdateAllocatedSpaceLimits(base, size); |
631 return true; | 698 return true; |
632 } | 699 } |
633 | 700 |
634 | 701 |
635 bool VirtualMemory::Uncommit(void* address, size_t size) { | 702 bool VirtualMemory::UncommitRegion(void* base, size_t size) { |
636 return mmap(address, size, PROT_NONE, | 703 return mmap(base, |
| 704 size, |
| 705 PROT_NONE, |
637 MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE | MAP_FIXED, | 706 MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE | MAP_FIXED, |
638 kMmapFd, kMmapFdOffset) != MAP_FAILED; | 707 kMmapFd, |
| 708 kMmapFdOffset) != MAP_FAILED; |
| 709 } |
| 710 |
| 711 |
| 712 bool VirtualMemory::ReleaseRegion(void* base, size_t size) { |
| 713 return munmap(base, size) == 0; |
639 } | 714 } |
640 | 715 |
641 | 716 |
642 class Thread::PlatformData : public Malloced { | 717 class Thread::PlatformData : public Malloced { |
643 public: | 718 public: |
644 PlatformData() : thread_(kNoThread) {} | 719 PlatformData() : thread_(kNoThread) {} |
645 | 720 |
646 pthread_t thread_; // Thread handle for pthread. | 721 pthread_t thread_; // Thread handle for pthread. |
647 }; | 722 }; |
648 | 723 |
(...skipping 488 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1137 | 1212 |
1138 | 1213 |
1139 void Sampler::Stop() { | 1214 void Sampler::Stop() { |
1140 ASSERT(IsActive()); | 1215 ASSERT(IsActive()); |
1141 SignalSender::RemoveActiveSampler(this); | 1216 SignalSender::RemoveActiveSampler(this); |
1142 SetActive(false); | 1217 SetActive(false); |
1143 } | 1218 } |
1144 | 1219 |
1145 | 1220 |
1146 } } // namespace v8::internal | 1221 } } // namespace v8::internal |
OLD | NEW |