OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
57 #include "vm-state-inl.h" | 57 #include "vm-state-inl.h" |
58 | 58 |
59 | 59 |
60 namespace v8 { | 60 namespace v8 { |
61 namespace internal { | 61 namespace internal { |
62 | 62 |
63 | 63 |
64 static Mutex* limit_mutex = NULL; | 64 static Mutex* limit_mutex = NULL; |
65 | 65 |
66 | 66 |
67 static void* GetRandomMmapAddr() { | |
68 Isolate* isolate = Isolate::UncheckedCurrent(); | |
69 // Note that the current isolate isn't set up in a call path via | |
70 // CpuFeatures::Probe. We don't care about randomization in this case because | |
71 // the code page is immediately freed. | |
72 if (isolate != NULL) { | |
73 #if V8_TARGET_ARCH_X64 | |
74 uint64_t rnd1 = V8::RandomPrivate(isolate); | |
75 uint64_t rnd2 = V8::RandomPrivate(isolate); | |
76 uint64_t raw_addr = (rnd1 << 32) ^ rnd2; | |
77 // Currently available CPUs have 48 bits of virtual addressing. Truncate | |
78 // the hint address to 46 bits to give the kernel a fighting chance of | |
79 // fulfilling our placement request. | |
80 raw_addr &= V8_UINT64_C(0x3ffffffff000); | |
81 #else | |
82 uint32_t raw_addr = V8::RandomPrivate(isolate); | |
83 // The range 0x20000000 - 0x60000000 is relatively unpopulated across a | |
84 // variety of ASLR modes (PAE kernel, NX compat mode, etc). | |
85 raw_addr &= 0x3ffff000; | |
86 raw_addr += 0x20000000; | |
87 #endif | |
88 return reinterpret_cast<void*>(raw_addr); | |
89 } | |
90 return NULL; | |
91 } | |
92 | |
93 | |
94 int OS::ActivationFrameAlignment() { | |
95 // With gcc 4.4 the tree vectorization optimizer can generate code | |
96 // that requires 16 byte alignment such as movdqa on x86. | |
97 return 16; | |
98 } | |
99 | |
100 | |
101 const char* OS::LocalTimezone(double time) { | 67 const char* OS::LocalTimezone(double time) { |
102 if (std::isnan(time)) return ""; | 68 if (std::isnan(time)) return ""; |
103 time_t tv = static_cast<time_t>(floor(time/msPerSecond)); | 69 time_t tv = static_cast<time_t>(floor(time/msPerSecond)); |
104 struct tm* t = localtime(&tv); | 70 struct tm* t = localtime(&tv); |
105 if (NULL == t) return ""; | 71 if (NULL == t) return ""; |
106 return t->tm_zone; | 72 return t->tm_zone; |
107 } | 73 } |
108 | 74 |
109 | 75 |
110 double OS::LocalTimeOffset() { | 76 double OS::LocalTimeOffset() { |
(...skipping 28 matching lines...) Expand all Loading... |
139 bool OS::IsOutsideAllocatedSpace(void* address) { | 105 bool OS::IsOutsideAllocatedSpace(void* address) { |
140 return address < lowest_ever_allocated || address >= highest_ever_allocated; | 106 return address < lowest_ever_allocated || address >= highest_ever_allocated; |
141 } | 107 } |
142 | 108 |
143 | 109 |
144 void* OS::Allocate(const size_t requested, | 110 void* OS::Allocate(const size_t requested, |
145 size_t* allocated, | 111 size_t* allocated, |
146 bool is_executable) { | 112 bool is_executable) { |
147 const size_t msize = RoundUp(requested, AllocateAlignment()); | 113 const size_t msize = RoundUp(requested, AllocateAlignment()); |
148 int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0); | 114 int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0); |
149 void* addr = GetRandomMmapAddr(); | 115 void* addr = OS::GetRandomMmapAddr(); |
150 void* mbase = mmap(addr, msize, prot, MAP_PRIVATE | MAP_ANON, -1, 0); | 116 void* mbase = mmap(addr, msize, prot, MAP_PRIVATE | MAP_ANON, -1, 0); |
151 if (mbase == MAP_FAILED) { | 117 if (mbase == MAP_FAILED) { |
152 LOG(i::Isolate::Current(), | 118 LOG(i::Isolate::Current(), |
153 StringEvent("OS::Allocate", "mmap failed")); | 119 StringEvent("OS::Allocate", "mmap failed")); |
154 return NULL; | 120 return NULL; |
155 } | 121 } |
156 *allocated = msize; | 122 *allocated = msize; |
157 UpdateAllocatedSpaceLimits(mbase, msize); | 123 UpdateAllocatedSpaceLimits(mbase, msize); |
158 return mbase; | 124 return mbase; |
159 } | 125 } |
(...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
334 | 300 |
335 VirtualMemory::VirtualMemory(size_t size) | 301 VirtualMemory::VirtualMemory(size_t size) |
336 : address_(ReserveRegion(size)), size_(size) { } | 302 : address_(ReserveRegion(size)), size_(size) { } |
337 | 303 |
338 | 304 |
339 VirtualMemory::VirtualMemory(size_t size, size_t alignment) | 305 VirtualMemory::VirtualMemory(size_t size, size_t alignment) |
340 : address_(NULL), size_(0) { | 306 : address_(NULL), size_(0) { |
341 ASSERT(IsAligned(alignment, static_cast<intptr_t>(OS::AllocateAlignment()))); | 307 ASSERT(IsAligned(alignment, static_cast<intptr_t>(OS::AllocateAlignment()))); |
342 size_t request_size = RoundUp(size + alignment, | 308 size_t request_size = RoundUp(size + alignment, |
343 static_cast<intptr_t>(OS::AllocateAlignment())); | 309 static_cast<intptr_t>(OS::AllocateAlignment())); |
344 void* reservation = mmap(GetRandomMmapAddr(), | 310 void* reservation = mmap(OS::GetRandomMmapAddr(), |
345 request_size, | 311 request_size, |
346 PROT_NONE, | 312 PROT_NONE, |
347 MAP_PRIVATE | MAP_ANON | MAP_NORESERVE, | 313 MAP_PRIVATE | MAP_ANON | MAP_NORESERVE, |
348 kMmapFd, | 314 kMmapFd, |
349 kMmapFdOffset); | 315 kMmapFdOffset); |
350 if (reservation == MAP_FAILED) return; | 316 if (reservation == MAP_FAILED) return; |
351 | 317 |
352 Address base = static_cast<Address>(reservation); | 318 Address base = static_cast<Address>(reservation); |
353 Address aligned_base = RoundUp(base, alignment); | 319 Address aligned_base = RoundUp(base, alignment); |
354 ASSERT_LE(base, aligned_base); | 320 ASSERT_LE(base, aligned_base); |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
406 } | 372 } |
407 | 373 |
408 | 374 |
409 bool VirtualMemory::Guard(void* address) { | 375 bool VirtualMemory::Guard(void* address) { |
410 OS::Guard(address, OS::CommitPageSize()); | 376 OS::Guard(address, OS::CommitPageSize()); |
411 return true; | 377 return true; |
412 } | 378 } |
413 | 379 |
414 | 380 |
415 void* VirtualMemory::ReserveRegion(size_t size) { | 381 void* VirtualMemory::ReserveRegion(size_t size) { |
416 void* result = mmap(GetRandomMmapAddr(), | 382 void* result = mmap(OS::GetRandomMmapAddr(), |
417 size, | 383 size, |
418 PROT_NONE, | 384 PROT_NONE, |
419 MAP_PRIVATE | MAP_ANON | MAP_NORESERVE, | 385 MAP_PRIVATE | MAP_ANON | MAP_NORESERVE, |
420 kMmapFd, | 386 kMmapFd, |
421 kMmapFdOffset); | 387 kMmapFdOffset); |
422 | 388 |
423 if (result == MAP_FAILED) return NULL; | 389 if (result == MAP_FAILED) return NULL; |
424 | 390 |
425 return result; | 391 return result; |
426 } | 392 } |
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
539 limit_mutex = CreateMutex(); | 505 limit_mutex = CreateMutex(); |
540 } | 506 } |
541 | 507 |
542 | 508 |
543 void OS::TearDown() { | 509 void OS::TearDown() { |
544 delete limit_mutex; | 510 delete limit_mutex; |
545 } | 511 } |
546 | 512 |
547 | 513 |
548 } } // namespace v8::internal | 514 } } // namespace v8::internal |
OLD | NEW |