Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include <memory> | 5 #include <memory> |
| 6 | 6 |
| 7 #include "src/base/atomic-utils.h" | 7 #include "src/base/atomic-utils.h" |
| 8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
| 9 | 9 |
| 10 #include "src/macro-assembler.h" | 10 #include "src/macro-assembler.h" |
| (...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 107 void ReplaceReferenceInCode(Handle<Code> code, Handle<Object> old_ref, | 107 void ReplaceReferenceInCode(Handle<Code> code, Handle<Object> old_ref, |
| 108 Handle<Object> new_ref) { | 108 Handle<Object> new_ref) { |
| 109 for (RelocIterator it(*code, 1 << RelocInfo::EMBEDDED_OBJECT); !it.done(); | 109 for (RelocIterator it(*code, 1 << RelocInfo::EMBEDDED_OBJECT); !it.done(); |
| 110 it.next()) { | 110 it.next()) { |
| 111 if (it.rinfo()->target_object() == *old_ref) { | 111 if (it.rinfo()->target_object() == *old_ref) { |
| 112 it.rinfo()->set_target_object(*new_ref); | 112 it.rinfo()->set_target_object(*new_ref); |
| 113 } | 113 } |
| 114 } | 114 } |
| 115 } | 115 } |
| 116 | 116 |
| 117 Handle<JSArrayBuffer> NewArrayBuffer(Isolate* isolate, size_t size) { | 117 static void MemoryFinalizer(const v8::WeakCallbackInfo<void>& data) { |
| 118 if (size > (WasmModule::kV8MaxPages * WasmModule::kPageSize)) { | 118 JSArrayBuffer** p = reinterpret_cast<JSArrayBuffer**>(data.GetParameter()); |
| 119 // TODO(titzer): lift restriction on maximum memory allocated here. | 119 JSArrayBuffer* buffer = *p; |
| 120 return Handle<JSArrayBuffer>::null(); | |
| 121 } | |
| 122 void* memory = isolate->array_buffer_allocator()->Allocate(size); | |
| 123 if (memory == nullptr) { | |
| 124 return Handle<JSArrayBuffer>::null(); | |
| 125 } | |
| 126 | 120 |
| 127 #if DEBUG | 121 void* memory = buffer->backing_store(); |
| 128 // Double check the API allocator actually zero-initialized the memory. | 122 base::OS::Free(memory, |
| 129 const byte* bytes = reinterpret_cast<const byte*>(memory); | 123 RoundUp(kWasmMaxHeapOffset, base::OS::CommitPageSize())); |
| 130 for (size_t i = 0; i < size; ++i) { | 124 |
| 131 DCHECK_EQ(0, bytes[i]); | 125 data.GetIsolate()->AdjustAmountOfExternalAllocatedMemory( |
| 132 } | 126 -buffer->byte_length()->Number()); |
| 127 | |
| 128 GlobalHandles::Destroy(reinterpret_cast<Object**>(p)); | |
| 129 } | |
| 130 | |
| 131 #if V8_TARGET_ARCH_64_BIT | |
| 132 const bool kGuardRegionsSupported = true; | |
| 133 #else | |
| 134 const bool kGuardRegionsSupported = false; | |
| 133 #endif | 135 #endif |
| 134 | 136 |
| 135 Handle<JSArrayBuffer> buffer = isolate->factory()->NewJSArrayBuffer(); | 137 bool EnableGuardRegions() { |
| 136 JSArrayBuffer::Setup(buffer, isolate, false, memory, static_cast<int>(size)); | 138 return FLAG_wasm_guard_pages && kGuardRegionsSupported; |
| 137 buffer->set_is_neuterable(false); | 139 } |
| 138 return buffer; | 140 |
| 141 void* TryAllocateBackingStore(Isolate* isolate, size_t size, | |
| 142 bool enable_guard_regions, bool& is_external) { | |
| 143 is_external = false; | |
| 144 // TODO(eholk): Right now enable_guard_regions has no effect on 32-bit | |
| 145 // systems. It may be safer to fail instead, given that other code might do | |
| 146 // things that would be unsafe if they expected guard pages where there | |
| 147 // weren't any. | |
| 148 if (enable_guard_regions && kGuardRegionsSupported) { | |
| 149 void* memory; | |
| 150 // TODO(eholk): On Windows we want to make sure we don't commit the guard | |
| 151 // pages yet. | |
| 152 | |
| 153 // We always allocate the largest possible offset into the heap, so the | |
| 154 // addressable memory after the guard page can be made inaccessible. | |
| 155 const size_t alloc_size = | |
| 156 RoundUp(kWasmMaxHeapOffset, base::OS::CommitPageSize()); | |
| 157 DCHECK_EQ(0, size % base::OS::CommitPageSize()); | |
| 158 | |
| 159 size_t allocated_size = 0; | |
| 160 const bool is_executable = false; | |
| 161 memory = base::OS::Allocate(alloc_size, &allocated_size, is_executable); | |
| 162 if (allocated_size < alloc_size) { | |
| 163 base::OS::Free(memory, allocated_size); | |
| 164 return nullptr; | |
| 165 } | |
| 166 | |
| 167 if (memory == nullptr) { | |
| 168 return nullptr; | |
| 169 } | |
| 170 | |
| 171 byte* bytes = reinterpret_cast<byte*>(memory); | |
| 172 base::OS::Guard(bytes + size, alloc_size - size); | |
| 173 | |
| 174 reinterpret_cast<v8::Isolate*>(isolate) | |
| 175 ->AdjustAmountOfExternalAllocatedMemory(size); | |
| 176 | |
| 177 is_external = true; | |
| 178 return memory; | |
| 179 } else { | |
| 180 void* memory = isolate->array_buffer_allocator()->Allocate(size); | |
| 181 return memory; | |
| 182 } | |
| 139 } | 183 } |
| 140 | 184 |
| 141 void RelocateMemoryReferencesInCode(Handle<FixedArray> code_table, | 185 void RelocateMemoryReferencesInCode(Handle<FixedArray> code_table, |
| 142 Address old_start, Address start, | 186 Address old_start, Address start, |
| 143 uint32_t prev_size, uint32_t new_size) { | 187 uint32_t prev_size, uint32_t new_size) { |
| 144 for (int i = 0; i < code_table->length(); ++i) { | 188 for (int i = 0; i < code_table->length(); ++i) { |
| 145 DCHECK(code_table->get(i)->IsCode()); | 189 DCHECK(code_table->get(i)->IsCode()); |
| 146 Handle<Code> code = Handle<Code>(Code::cast(code_table->get(i))); | 190 Handle<Code> code = Handle<Code>(Code::cast(code_table->get(i))); |
| 147 AllowDeferredHandleDereference embedding_raw_address; | 191 AllowDeferredHandleDereference embedding_raw_address; |
| 148 int mask = (1 << RelocInfo::WASM_MEMORY_REFERENCE) | | 192 int mask = (1 << RelocInfo::WASM_MEMORY_REFERENCE) | |
| (...skipping 475 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 624 TRACE_CHAIN(WasmCompiledModule::cast(wasm_module->GetInternalField(0))); | 668 TRACE_CHAIN(WasmCompiledModule::cast(wasm_module->GetInternalField(0))); |
| 625 TRACE("}\n"); | 669 TRACE("}\n"); |
| 626 } | 670 } |
| 627 compiled_module->reset_weak_owning_instance(); | 671 compiled_module->reset_weak_owning_instance(); |
| 628 GlobalHandles::Destroy(reinterpret_cast<Object**>(p)); | 672 GlobalHandles::Destroy(reinterpret_cast<Object**>(p)); |
| 629 TRACE("}\n"); | 673 TRACE("}\n"); |
| 630 } | 674 } |
| 631 | 675 |
| 632 } // namespace | 676 } // namespace |
| 633 | 677 |
| 678 Handle<JSArrayBuffer> wasm::NewArrayBuffer(Isolate* isolate, size_t size, | |
| 679 bool enable_guard_regions) { | |
| 680 if (size > (WasmModule::kV8MaxPages * WasmModule::kPageSize)) { | |
| 681 // TODO(titzer): lift restriction on maximum memory allocated here. | |
| 682 return Handle<JSArrayBuffer>::null(); | |
| 683 } | |
| 684 | |
| 685 enable_guard_regions = enable_guard_regions && kGuardRegionsSupported; | |
| 686 | |
| 687 bool is_external; // Set by TryAllocateBackingStore | |
| 688 void* memory = | |
| 689 TryAllocateBackingStore(isolate, size, enable_guard_regions, is_external); | |
| 690 | |
| 691 if (memory == nullptr) { | |
| 692 return Handle<JSArrayBuffer>::null(); | |
| 693 } | |
| 694 | |
| 695 #if DEBUG | |
| 696 // Double check the API allocator actually zero-initialized the memory. | |
| 697 const byte* bytes = reinterpret_cast<const byte*>(memory); | |
| 698 for (size_t i = 0; i < size; ++i) { | |
| 699 DCHECK_EQ(0, bytes[i]); | |
| 700 } | |
| 701 #endif | |
| 702 | |
| 703 Handle<JSArrayBuffer> buffer = isolate->factory()->NewJSArrayBuffer(); | |
| 704 JSArrayBuffer::Setup(buffer, isolate, is_external, memory, | |
| 705 static_cast<int>(size)); | |
| 706 buffer->set_is_neuterable(false); | |
| 707 buffer->set_has_guard_region(enable_guard_regions); | |
| 708 | |
| 709 if (is_external) { | |
| 710 // We mark the buffer as external if we allocated it here with guard | |
| 711 // pages. That means we need to arrange for it to be freed. | |
| 712 | |
| 713 // TODO(eholk): Finalizers may not run when the main thread is shutting | |
| 714 // down, which means we may leak memory here. | |
|
titzer
2016/11/09 19:23:47
To fix this, we probably need to do something at i
Eric Holk
2016/11/09 20:02:59
Agreed. At this point I'd prefer to address this i
| |
| 715 Handle<Object> global_handle = isolate->global_handles()->Create(*buffer); | |
| 716 GlobalHandles::MakeWeak(global_handle.location(), global_handle.location(), | |
| 717 &MemoryFinalizer, v8::WeakCallbackType::kFinalizer); | |
| 718 } | |
| 719 | |
| 720 return buffer; | |
| 721 } | |
| 722 | |
| 634 const char* wasm::SectionName(WasmSectionCode code) { | 723 const char* wasm::SectionName(WasmSectionCode code) { |
| 635 switch (code) { | 724 switch (code) { |
| 636 case kUnknownSectionCode: | 725 case kUnknownSectionCode: |
| 637 return "Unknown"; | 726 return "Unknown"; |
| 638 case kTypeSectionCode: | 727 case kTypeSectionCode: |
| 639 return "Type"; | 728 return "Type"; |
| 640 case kImportSectionCode: | 729 case kImportSectionCode: |
| 641 return "Import"; | 730 return "Import"; |
| 642 case kFunctionSectionCode: | 731 case kFunctionSectionCode: |
| 643 return "Function"; | 732 return "Function"; |
| (...skipping 426 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1070 Handle<JSObject> instance = factory->NewJSObjectFromMap(map, TENURED); | 1159 Handle<JSObject> instance = factory->NewJSObjectFromMap(map, TENURED); |
| 1071 instance->SetInternalField(kWasmMemObject, | 1160 instance->SetInternalField(kWasmMemObject, |
| 1072 isolate_->heap()->undefined_value()); | 1161 isolate_->heap()->undefined_value()); |
| 1073 | 1162 |
| 1074 //-------------------------------------------------------------------------- | 1163 //-------------------------------------------------------------------------- |
| 1075 // Set up the globals for the new instance. | 1164 // Set up the globals for the new instance. |
| 1076 //-------------------------------------------------------------------------- | 1165 //-------------------------------------------------------------------------- |
| 1077 MaybeHandle<JSArrayBuffer> old_globals; | 1166 MaybeHandle<JSArrayBuffer> old_globals; |
| 1078 uint32_t globals_size = module_->globals_size; | 1167 uint32_t globals_size = module_->globals_size; |
| 1079 if (globals_size > 0) { | 1168 if (globals_size > 0) { |
| 1169 const bool enable_guard_regions = false; | |
| 1080 Handle<JSArrayBuffer> global_buffer = | 1170 Handle<JSArrayBuffer> global_buffer = |
| 1081 NewArrayBuffer(isolate_, globals_size); | 1171 NewArrayBuffer(isolate_, globals_size, enable_guard_regions); |
| 1082 globals_ = global_buffer; | 1172 globals_ = global_buffer; |
| 1083 if (globals_.is_null()) { | 1173 if (globals_.is_null()) { |
| 1084 thrower_->RangeError("Out of memory: wasm globals"); | 1174 thrower_->RangeError("Out of memory: wasm globals"); |
| 1085 return nothing; | 1175 return nothing; |
| 1086 } | 1176 } |
| 1087 Address old_address = owner.is_null() | 1177 Address old_address = owner.is_null() |
| 1088 ? nullptr | 1178 ? nullptr |
| 1089 : GetGlobalStartAddressFromCodeTemplate( | 1179 : GetGlobalStartAddressFromCodeTemplate( |
| 1090 isolate_->heap()->undefined_value(), | 1180 isolate_->heap()->undefined_value(), |
| 1091 JSObject::cast(*owner.ToHandleChecked())); | 1181 JSObject::cast(*owner.ToHandleChecked())); |
| (...skipping 29 matching lines...) Expand all Loading... | |
| 1121 // Set up the memory for the new instance. | 1211 // Set up the memory for the new instance. |
| 1122 //-------------------------------------------------------------------------- | 1212 //-------------------------------------------------------------------------- |
| 1123 MaybeHandle<JSArrayBuffer> old_memory; | 1213 MaybeHandle<JSArrayBuffer> old_memory; |
| 1124 | 1214 |
| 1125 uint32_t min_mem_pages = module_->min_mem_pages; | 1215 uint32_t min_mem_pages = module_->min_mem_pages; |
| 1126 isolate_->counters()->wasm_min_mem_pages_count()->AddSample(min_mem_pages); | 1216 isolate_->counters()->wasm_min_mem_pages_count()->AddSample(min_mem_pages); |
| 1127 | 1217 |
| 1128 if (!memory_.is_null()) { | 1218 if (!memory_.is_null()) { |
| 1129 // Set externally passed ArrayBuffer non neuterable. | 1219 // Set externally passed ArrayBuffer non neuterable. |
| 1130 memory_->set_is_neuterable(false); | 1220 memory_->set_is_neuterable(false); |
| 1221 | |
| 1222 DCHECK_IMPLIES(EnableGuardRegions(), module_->origin == kAsmJsOrigin || | |
| 1223 memory_->has_guard_region()); | |
| 1131 } else if (min_mem_pages > 0) { | 1224 } else if (min_mem_pages > 0) { |
| 1132 memory_ = AllocateMemory(min_mem_pages); | 1225 memory_ = AllocateMemory(min_mem_pages); |
| 1133 if (memory_.is_null()) return nothing; // failed to allocate memory | 1226 if (memory_.is_null()) return nothing; // failed to allocate memory |
| 1134 } | 1227 } |
| 1135 | 1228 |
| 1136 if (!memory_.is_null()) { | 1229 if (!memory_.is_null()) { |
| 1137 instance->SetInternalField(kWasmMemArrayBuffer, *memory_); | 1230 instance->SetInternalField(kWasmMemArrayBuffer, *memory_); |
| 1138 Address mem_start = static_cast<Address>(memory_->backing_store()); | 1231 Address mem_start = static_cast<Address>(memory_->backing_store()); |
| 1139 uint32_t mem_size = | 1232 uint32_t mem_size = |
| 1140 static_cast<uint32_t>(memory_->byte_length()->Number()); | 1233 static_cast<uint32_t>(memory_->byte_length()->Number()); |
| (...skipping 437 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1578 } | 1671 } |
| 1579 } | 1672 } |
| 1580 } | 1673 } |
| 1581 | 1674 |
| 1582 // Allocate memory for a module instance as a new JSArrayBuffer. | 1675 // Allocate memory for a module instance as a new JSArrayBuffer. |
| 1583 Handle<JSArrayBuffer> AllocateMemory(uint32_t min_mem_pages) { | 1676 Handle<JSArrayBuffer> AllocateMemory(uint32_t min_mem_pages) { |
| 1584 if (min_mem_pages > WasmModule::kV8MaxPages) { | 1677 if (min_mem_pages > WasmModule::kV8MaxPages) { |
| 1585 thrower_->RangeError("Out of memory: wasm memory too large"); | 1678 thrower_->RangeError("Out of memory: wasm memory too large"); |
| 1586 return Handle<JSArrayBuffer>::null(); | 1679 return Handle<JSArrayBuffer>::null(); |
| 1587 } | 1680 } |
| 1588 Handle<JSArrayBuffer> mem_buffer = | 1681 const bool enable_guard_regions = EnableGuardRegions(); |
| 1589 NewArrayBuffer(isolate_, min_mem_pages * WasmModule::kPageSize); | 1682 Handle<JSArrayBuffer> mem_buffer = NewArrayBuffer( |
| 1683 isolate_, min_mem_pages * WasmModule::kPageSize, enable_guard_regions); | |
| 1590 | 1684 |
| 1591 if (mem_buffer.is_null()) { | 1685 if (mem_buffer.is_null()) { |
| 1592 thrower_->RangeError("Out of memory: wasm memory"); | 1686 thrower_->RangeError("Out of memory: wasm memory"); |
| 1593 } | 1687 } |
| 1594 return mem_buffer; | 1688 return mem_buffer; |
| 1595 } | 1689 } |
| 1596 | 1690 |
| 1597 // Process the exports, creating wrappers for functions, tables, memories, | 1691 // Process the exports, creating wrappers for functions, tables, memories, |
| 1598 // and globals. | 1692 // and globals. |
| 1599 void ProcessExports(Handle<FixedArray> code_table, | 1693 void ProcessExports(Handle<FixedArray> code_table, |
| (...skipping 548 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2148 DCHECK_NOT_NULL(old_mem_start); | 2242 DCHECK_NOT_NULL(old_mem_start); |
| 2149 DCHECK(old_size + pages * WasmModule::kPageSize <= | 2243 DCHECK(old_size + pages * WasmModule::kPageSize <= |
| 2150 std::numeric_limits<uint32_t>::max()); | 2244 std::numeric_limits<uint32_t>::max()); |
| 2151 new_size = old_size + pages * WasmModule::kPageSize; | 2245 new_size = old_size + pages * WasmModule::kPageSize; |
| 2152 } | 2246 } |
| 2153 | 2247 |
| 2154 if (new_size <= old_size || max_pages * WasmModule::kPageSize < new_size || | 2248 if (new_size <= old_size || max_pages * WasmModule::kPageSize < new_size || |
| 2155 WasmModule::kV8MaxPages * WasmModule::kPageSize < new_size) { | 2249 WasmModule::kV8MaxPages * WasmModule::kPageSize < new_size) { |
| 2156 return -1; | 2250 return -1; |
| 2157 } | 2251 } |
| 2158 Handle<JSArrayBuffer> buffer = NewArrayBuffer(isolate, new_size); | 2252 |
| 2159 if (buffer.is_null()) return -1; | 2253 Handle<JSArrayBuffer> buffer; |
| 2160 Address new_mem_start = static_cast<Address>(buffer->backing_store()); | 2254 |
| 2161 if (old_size != 0) { | 2255 if (!old_buffer.is_null() && old_buffer->has_guard_region()) { |
| 2162 memcpy(new_mem_start, old_mem_start, old_size); | 2256 // We don't move the backing store, we simply change the protection to make |
| 2257 // more of it accessible. | |
| 2258 base::OS::Unprotect(old_buffer->backing_store(), new_size); | |
| 2259 reinterpret_cast<v8::Isolate*>(isolate) | |
| 2260 ->AdjustAmountOfExternalAllocatedMemory(pages * WasmModule::kPageSize); | |
| 2261 Handle<Object> new_size_object = | |
| 2262 isolate->factory()->NewNumberFromSize(new_size); | |
| 2263 old_buffer->set_byte_length(*new_size_object); | |
| 2264 | |
| 2265 SetInstanceMemory(instance, *old_buffer); | |
| 2266 Handle<FixedArray> code_table = GetCompiledModule(*instance)->code_table(); | |
| 2267 RelocateMemoryReferencesInCode(code_table, old_mem_start, old_mem_start, | |
| 2268 old_size, new_size); | |
| 2269 buffer = old_buffer; | |
| 2270 } else { | |
| 2271 const bool enable_guard_regions = EnableGuardRegions(); | |
|
titzer
2016/11/09 19:23:47
Should this always be false? I.e. if the memory be
Eric Holk
2016/11/09 20:02:59
You are correct. Done.
| |
| 2272 buffer = NewArrayBuffer(isolate, new_size, enable_guard_regions); | |
| 2273 if (buffer.is_null()) return -1; | |
| 2274 Address new_mem_start = static_cast<Address>(buffer->backing_store()); | |
| 2275 if (old_size != 0) { | |
| 2276 memcpy(new_mem_start, old_mem_start, old_size); | |
| 2277 } | |
| 2278 SetInstanceMemory(instance, *buffer); | |
| 2279 Handle<FixedArray> code_table = GetCompiledModule(*instance)->code_table(); | |
| 2280 RelocateMemoryReferencesInCode(code_table, old_mem_start, new_mem_start, | |
| 2281 old_size, new_size); | |
| 2163 } | 2282 } |
| 2164 SetInstanceMemory(instance, *buffer); | 2283 |
| 2165 Handle<FixedArray> code_table = GetCompiledModule(*instance)->code_table(); | |
| 2166 RelocateMemoryReferencesInCode(code_table, old_mem_start, new_mem_start, | |
| 2167 old_size, new_size); | |
| 2168 Handle<Object> memory_object(instance->GetInternalField(kWasmMemObject), | 2284 Handle<Object> memory_object(instance->GetInternalField(kWasmMemObject), |
| 2169 isolate); | 2285 isolate); |
| 2170 if (!memory_object->IsUndefined(isolate)) { | 2286 if (!memory_object->IsUndefined(isolate)) { |
| 2171 WasmJs::SetWasmMemoryArrayBuffer(isolate, memory_object, buffer); | 2287 WasmJs::SetWasmMemoryArrayBuffer(isolate, memory_object, buffer); |
| 2172 } | 2288 } |
| 2173 | 2289 |
| 2174 DCHECK(old_size % WasmModule::kPageSize == 0); | 2290 DCHECK(old_size % WasmModule::kPageSize == 0); |
| 2175 return (old_size / WasmModule::kPageSize); | 2291 return (old_size / WasmModule::kPageSize); |
| 2176 } | 2292 } |
| 2177 | 2293 |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2246 CHECK_NOT_NULL(result.val); | 2362 CHECK_NOT_NULL(result.val); |
| 2247 module = const_cast<WasmModule*>(result.val); | 2363 module = const_cast<WasmModule*>(result.val); |
| 2248 } | 2364 } |
| 2249 | 2365 |
| 2250 Handle<WasmModuleWrapper> module_wrapper = | 2366 Handle<WasmModuleWrapper> module_wrapper = |
| 2251 WasmModuleWrapper::New(isolate, module); | 2367 WasmModuleWrapper::New(isolate, module); |
| 2252 | 2368 |
| 2253 compiled_module->set_module_wrapper(module_wrapper); | 2369 compiled_module->set_module_wrapper(module_wrapper); |
| 2254 DCHECK(WasmCompiledModule::IsWasmCompiledModule(*compiled_module)); | 2370 DCHECK(WasmCompiledModule::IsWasmCompiledModule(*compiled_module)); |
| 2255 } | 2371 } |
| OLD | NEW |