Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/wasm/wasm-module.cc

Issue 2396433008: [wasm] Add guard regions to end of WebAssembly.Memory buffers (Closed)
Patch Set: Merging with master Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include <memory> 5 #include <memory>
6 6
7 #include "src/base/atomic-utils.h" 7 #include "src/base/atomic-utils.h"
8 #include "src/code-stubs.h" 8 #include "src/code-stubs.h"
9 9
10 #include "src/macro-assembler.h" 10 #include "src/macro-assembler.h"
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
61 void ReplaceReferenceInCode(Handle<Code> code, Handle<Object> old_ref, 61 void ReplaceReferenceInCode(Handle<Code> code, Handle<Object> old_ref,
62 Handle<Object> new_ref) { 62 Handle<Object> new_ref) {
63 for (RelocIterator it(*code, 1 << RelocInfo::EMBEDDED_OBJECT); !it.done(); 63 for (RelocIterator it(*code, 1 << RelocInfo::EMBEDDED_OBJECT); !it.done();
64 it.next()) { 64 it.next()) {
65 if (it.rinfo()->target_object() == *old_ref) { 65 if (it.rinfo()->target_object() == *old_ref) {
66 it.rinfo()->set_target_object(*new_ref); 66 it.rinfo()->set_target_object(*new_ref);
67 } 67 }
68 } 68 }
69 } 69 }
70 70
71 Handle<JSArrayBuffer> NewArrayBuffer(Isolate* isolate, size_t size) { 71 static void MemoryFinalizer(const v8::WeakCallbackInfo<void>& data) {
72 if (size > (WasmModule::kV8MaxPages * WasmModule::kPageSize)) { 72 JSArrayBuffer** p = reinterpret_cast<JSArrayBuffer**>(data.GetParameter());
73 // TODO(titzer): lift restriction on maximum memory allocated here. 73 JSArrayBuffer* buffer = *p;
74 return Handle<JSArrayBuffer>::null();
75 }
76 void* memory = isolate->array_buffer_allocator()->Allocate(size);
77 if (memory == nullptr) {
78 return Handle<JSArrayBuffer>::null();
79 }
80 74
81 #if DEBUG 75 void* memory = buffer->backing_store();
82 // Double check the API allocator actually zero-initialized the memory. 76 base::OS::Free(memory,
83 const byte* bytes = reinterpret_cast<const byte*>(memory); 77 RoundUp(kWasmMaxHeapOffset, base::OS::CommitPageSize()));
84 for (size_t i = 0; i < size; ++i) { 78
85 DCHECK_EQ(0, bytes[i]); 79 data.GetIsolate()->AdjustAmountOfExternalAllocatedMemory(
86 } 80 -buffer->byte_length()->Number());
81
82 GlobalHandles::Destroy(reinterpret_cast<Object**>(p));
83 }
84
85 #if V8_TARGET_ARCH_64_BIT
86 const bool kGuardRegionsSupported = true;
87 #else
88 const bool kGuardRegionsSupported = false;
87 #endif 89 #endif
88 90
89 Handle<JSArrayBuffer> buffer = isolate->factory()->NewJSArrayBuffer(); 91 bool EnableGuardRegions() {
90 JSArrayBuffer::Setup(buffer, isolate, false, memory, static_cast<int>(size)); 92 return FLAG_wasm_guard_pages && kGuardRegionsSupported;
91 buffer->set_is_neuterable(false); 93 }
92 return buffer; 94
95 void* TryAllocateBackingStore(Isolate* isolate, size_t size,
96 bool enable_guard_regions, bool& is_external) {
97 is_external = false;
98 // TODO(eholk): Right now enable_guard_regions has no effect on 32-bit
99 // systems. It may be safer to fail instead, given that other code might do
100 // things that would be unsafe if they expected guard pages where there
101 // weren't any.
102 if (enable_guard_regions && kGuardRegionsSupported) {
103 // TODO(eholk): On Windows we want to make sure we don't commit the guard
104 // pages yet.
105
106 // We always allocate the largest possible offset into the heap, so the
107 // addressable memory after the guard page can be made inaccessible.
108 const size_t alloc_size =
109 RoundUp(kWasmMaxHeapOffset, base::OS::CommitPageSize());
110 DCHECK_EQ(0, size % base::OS::CommitPageSize());
111
112 // AllocateGuarded makes the whole region inaccessible by default.
113 void* memory = base::OS::AllocateGuarded(alloc_size);
114 if (memory == nullptr) {
115 return nullptr;
116 }
117
118 // Make the part we care about accessible.
119 base::OS::Unprotect(memory, size);
120
121 reinterpret_cast<v8::Isolate*>(isolate)
122 ->AdjustAmountOfExternalAllocatedMemory(size);
123
124 is_external = true;
125 return memory;
126 } else {
127 void* memory = isolate->array_buffer_allocator()->Allocate(size);
128 return memory;
129 }
93 } 130 }
94 131
95 void RelocateMemoryReferencesInCode(Handle<FixedArray> code_table, 132 void RelocateMemoryReferencesInCode(Handle<FixedArray> code_table,
96 Address old_start, Address start, 133 Address old_start, Address start,
97 uint32_t prev_size, uint32_t new_size) { 134 uint32_t prev_size, uint32_t new_size) {
98 for (int i = 0; i < code_table->length(); ++i) { 135 for (int i = 0; i < code_table->length(); ++i) {
99 DCHECK(code_table->get(i)->IsCode()); 136 DCHECK(code_table->get(i)->IsCode());
100 Handle<Code> code = Handle<Code>(Code::cast(code_table->get(i))); 137 Handle<Code> code = Handle<Code>(Code::cast(code_table->get(i)));
101 AllowDeferredHandleDereference embedding_raw_address; 138 AllowDeferredHandleDereference embedding_raw_address;
102 int mask = (1 << RelocInfo::WASM_MEMORY_REFERENCE) | 139 int mask = (1 << RelocInfo::WASM_MEMORY_REFERENCE) |
(...skipping 475 matching lines...) Expand 10 before | Expand all | Expand 10 after
578 TRACE_CHAIN(WasmCompiledModule::cast(wasm_module->GetInternalField(0))); 615 TRACE_CHAIN(WasmCompiledModule::cast(wasm_module->GetInternalField(0)));
579 TRACE("}\n"); 616 TRACE("}\n");
580 } 617 }
581 compiled_module->reset_weak_owning_instance(); 618 compiled_module->reset_weak_owning_instance();
582 GlobalHandles::Destroy(reinterpret_cast<Object**>(p)); 619 GlobalHandles::Destroy(reinterpret_cast<Object**>(p));
583 TRACE("}\n"); 620 TRACE("}\n");
584 } 621 }
585 622
586 } // namespace 623 } // namespace
587 624
625 Handle<JSArrayBuffer> wasm::NewArrayBuffer(Isolate* isolate, size_t size,
626 bool enable_guard_regions) {
627 if (size > (WasmModule::kV8MaxPages * WasmModule::kPageSize)) {
628 // TODO(titzer): lift restriction on maximum memory allocated here.
629 return Handle<JSArrayBuffer>::null();
630 }
631
632 enable_guard_regions = enable_guard_regions && kGuardRegionsSupported;
633
634 bool is_external; // Set by TryAllocateBackingStore
635 void* memory =
636 TryAllocateBackingStore(isolate, size, enable_guard_regions, is_external);
637
638 if (memory == nullptr) {
639 return Handle<JSArrayBuffer>::null();
640 }
641
642 #if DEBUG
643 // Double check the API allocator actually zero-initialized the memory.
644 const byte* bytes = reinterpret_cast<const byte*>(memory);
645 for (size_t i = 0; i < size; ++i) {
646 DCHECK_EQ(0, bytes[i]);
647 }
648 #endif
649
650 Handle<JSArrayBuffer> buffer = isolate->factory()->NewJSArrayBuffer();
651 JSArrayBuffer::Setup(buffer, isolate, is_external, memory,
652 static_cast<int>(size));
653 buffer->set_is_neuterable(false);
654 buffer->set_has_guard_region(enable_guard_regions);
655
656 if (is_external) {
657 // We mark the buffer as external if we allocated it here with guard
658 // pages. That means we need to arrange for it to be freed.
659
660 // TODO(eholk): Finalizers may not run when the main thread is shutting
661 // down, which means we may leak memory here.
662 Handle<Object> global_handle = isolate->global_handles()->Create(*buffer);
663 GlobalHandles::MakeWeak(global_handle.location(), global_handle.location(),
664 &MemoryFinalizer, v8::WeakCallbackType::kFinalizer);
665 }
666
667 return buffer;
668 }
669
588 const char* wasm::SectionName(WasmSectionCode code) { 670 const char* wasm::SectionName(WasmSectionCode code) {
589 switch (code) { 671 switch (code) {
590 case kUnknownSectionCode: 672 case kUnknownSectionCode:
591 return "Unknown"; 673 return "Unknown";
592 case kTypeSectionCode: 674 case kTypeSectionCode:
593 return "Type"; 675 return "Type";
594 case kImportSectionCode: 676 case kImportSectionCode:
595 return "Import"; 677 return "Import";
596 case kFunctionSectionCode: 678 case kFunctionSectionCode:
597 return "Function"; 679 return "Function";
(...skipping 402 matching lines...) Expand 10 before | Expand all | Expand 10 after
1000 //-------------------------------------------------------------------------- 1082 //--------------------------------------------------------------------------
1001 Handle<WasmInstanceObject> instance = 1083 Handle<WasmInstanceObject> instance =
1002 WasmInstanceObject::New(isolate_, compiled_module_); 1084 WasmInstanceObject::New(isolate_, compiled_module_);
1003 1085
1004 //-------------------------------------------------------------------------- 1086 //--------------------------------------------------------------------------
1005 // Set up the globals for the new instance. 1087 // Set up the globals for the new instance.
1006 //-------------------------------------------------------------------------- 1088 //--------------------------------------------------------------------------
1007 MaybeHandle<JSArrayBuffer> old_globals; 1089 MaybeHandle<JSArrayBuffer> old_globals;
1008 uint32_t globals_size = module_->globals_size; 1090 uint32_t globals_size = module_->globals_size;
1009 if (globals_size > 0) { 1091 if (globals_size > 0) {
1092 const bool enable_guard_regions = false;
1010 Handle<JSArrayBuffer> global_buffer = 1093 Handle<JSArrayBuffer> global_buffer =
1011 NewArrayBuffer(isolate_, globals_size); 1094 NewArrayBuffer(isolate_, globals_size, enable_guard_regions);
1012 globals_ = global_buffer; 1095 globals_ = global_buffer;
1013 if (globals_.is_null()) { 1096 if (globals_.is_null()) {
1014 thrower_->RangeError("Out of memory: wasm globals"); 1097 thrower_->RangeError("Out of memory: wasm globals");
1015 return nothing; 1098 return nothing;
1016 } 1099 }
1017 Address old_address = 1100 Address old_address =
1018 owner.is_null() ? nullptr : GetGlobalStartAddressFromCodeTemplate( 1101 owner.is_null() ? nullptr : GetGlobalStartAddressFromCodeTemplate(
1019 isolate_->heap()->undefined_value(), 1102 isolate_->heap()->undefined_value(),
1020 *owner.ToHandleChecked()); 1103 *owner.ToHandleChecked());
1021 RelocateGlobals(code_table, old_address, 1104 RelocateGlobals(code_table, old_address,
(...skipping 28 matching lines...) Expand all
1050 // Set up the memory for the new instance. 1133 // Set up the memory for the new instance.
1051 //-------------------------------------------------------------------------- 1134 //--------------------------------------------------------------------------
1052 MaybeHandle<JSArrayBuffer> old_memory; 1135 MaybeHandle<JSArrayBuffer> old_memory;
1053 1136
1054 uint32_t min_mem_pages = module_->min_mem_pages; 1137 uint32_t min_mem_pages = module_->min_mem_pages;
1055 isolate_->counters()->wasm_min_mem_pages_count()->AddSample(min_mem_pages); 1138 isolate_->counters()->wasm_min_mem_pages_count()->AddSample(min_mem_pages);
1056 1139
1057 if (!memory_.is_null()) { 1140 if (!memory_.is_null()) {
1058 // Set externally passed ArrayBuffer non neuterable. 1141 // Set externally passed ArrayBuffer non neuterable.
1059 memory_->set_is_neuterable(false); 1142 memory_->set_is_neuterable(false);
1143
1144 DCHECK_IMPLIES(EnableGuardRegions(), module_->origin == kAsmJsOrigin ||
1145 memory_->has_guard_region());
1060 } else if (min_mem_pages > 0) { 1146 } else if (min_mem_pages > 0) {
1061 memory_ = AllocateMemory(min_mem_pages); 1147 memory_ = AllocateMemory(min_mem_pages);
1062 if (memory_.is_null()) return nothing; // failed to allocate memory 1148 if (memory_.is_null()) return nothing; // failed to allocate memory
1063 } 1149 }
1064 1150
1065 if (!memory_.is_null()) { 1151 if (!memory_.is_null()) {
1066 instance->set_memory_buffer(*memory_); 1152 instance->set_memory_buffer(*memory_);
1067 Address mem_start = static_cast<Address>(memory_->backing_store()); 1153 Address mem_start = static_cast<Address>(memory_->backing_store());
1068 uint32_t mem_size = 1154 uint32_t mem_size =
1069 static_cast<uint32_t>(memory_->byte_length()->Number()); 1155 static_cast<uint32_t>(memory_->byte_length()->Number());
(...skipping 452 matching lines...) Expand 10 before | Expand all | Expand 10 after
1522 } 1608 }
1523 } 1609 }
1524 } 1610 }
1525 1611
1526 // Allocate memory for a module instance as a new JSArrayBuffer. 1612 // Allocate memory for a module instance as a new JSArrayBuffer.
1527 Handle<JSArrayBuffer> AllocateMemory(uint32_t min_mem_pages) { 1613 Handle<JSArrayBuffer> AllocateMemory(uint32_t min_mem_pages) {
1528 if (min_mem_pages > WasmModule::kV8MaxPages) { 1614 if (min_mem_pages > WasmModule::kV8MaxPages) {
1529 thrower_->RangeError("Out of memory: wasm memory too large"); 1615 thrower_->RangeError("Out of memory: wasm memory too large");
1530 return Handle<JSArrayBuffer>::null(); 1616 return Handle<JSArrayBuffer>::null();
1531 } 1617 }
1532 Handle<JSArrayBuffer> mem_buffer = 1618 const bool enable_guard_regions = EnableGuardRegions();
1533 NewArrayBuffer(isolate_, min_mem_pages * WasmModule::kPageSize); 1619 Handle<JSArrayBuffer> mem_buffer = NewArrayBuffer(
1620 isolate_, min_mem_pages * WasmModule::kPageSize, enable_guard_regions);
1534 1621
1535 if (mem_buffer.is_null()) { 1622 if (mem_buffer.is_null()) {
1536 thrower_->RangeError("Out of memory: wasm memory"); 1623 thrower_->RangeError("Out of memory: wasm memory");
1537 } 1624 }
1538 return mem_buffer; 1625 return mem_buffer;
1539 } 1626 }
1540 1627
1541 // Process the exports, creating wrappers for functions, tables, memories, 1628 // Process the exports, creating wrappers for functions, tables, memories,
1542 // and globals. 1629 // and globals.
1543 void ProcessExports(Handle<FixedArray> code_table, 1630 void ProcessExports(Handle<FixedArray> code_table,
(...skipping 464 matching lines...) Expand 10 before | Expand all | Expand 10 after
2008 DCHECK_NOT_NULL(old_mem_start); 2095 DCHECK_NOT_NULL(old_mem_start);
2009 DCHECK(old_size + pages * WasmModule::kPageSize <= 2096 DCHECK(old_size + pages * WasmModule::kPageSize <=
2010 std::numeric_limits<uint32_t>::max()); 2097 std::numeric_limits<uint32_t>::max());
2011 new_size = old_size + pages * WasmModule::kPageSize; 2098 new_size = old_size + pages * WasmModule::kPageSize;
2012 } 2099 }
2013 2100
2014 if (new_size <= old_size || max_pages * WasmModule::kPageSize < new_size || 2101 if (new_size <= old_size || max_pages * WasmModule::kPageSize < new_size ||
2015 WasmModule::kV8MaxPages * WasmModule::kPageSize < new_size) { 2102 WasmModule::kV8MaxPages * WasmModule::kPageSize < new_size) {
2016 return -1; 2103 return -1;
2017 } 2104 }
2018 Handle<JSArrayBuffer> buffer = NewArrayBuffer(isolate, new_size); 2105
2019 if (buffer.is_null()) return -1; 2106 Handle<JSArrayBuffer> buffer;
2020 Address new_mem_start = static_cast<Address>(buffer->backing_store()); 2107
2021 if (old_size != 0) { 2108 if (!old_buffer.is_null() && old_buffer->has_guard_region()) {
2022 memcpy(new_mem_start, old_mem_start, old_size); 2109 // We don't move the backing store, we simply change the protection to make
2110 // more of it accessible.
2111 base::OS::Unprotect(old_buffer->backing_store(), new_size);
2112 reinterpret_cast<v8::Isolate*>(isolate)
2113 ->AdjustAmountOfExternalAllocatedMemory(pages * WasmModule::kPageSize);
2114 Handle<Object> new_size_object =
2115 isolate->factory()->NewNumberFromSize(new_size);
2116 old_buffer->set_byte_length(*new_size_object);
2117
2118 SetInstanceMemory(instance, *old_buffer);
2119 Handle<FixedArray> code_table = GetCompiledModule(*instance)->code_table();
2120 RelocateMemoryReferencesInCode(code_table, old_mem_start, old_mem_start,
2121 old_size, new_size);
2122 buffer = old_buffer;
2123 } else {
2124 const bool enable_guard_regions = false;
2125 buffer = NewArrayBuffer(isolate, new_size, enable_guard_regions);
2126 if (buffer.is_null()) return -1;
2127 Address new_mem_start = static_cast<Address>(buffer->backing_store());
2128 if (old_size != 0) {
2129 memcpy(new_mem_start, old_mem_start, old_size);
2130 }
2131 SetInstanceMemory(instance, *buffer);
2132 Handle<FixedArray> code_table = GetCompiledModule(*instance)->code_table();
2133 RelocateMemoryReferencesInCode(code_table, old_mem_start, new_mem_start,
2134 old_size, new_size);
2023 } 2135 }
2136
2024 SetInstanceMemory(instance, *buffer); 2137 SetInstanceMemory(instance, *buffer);
2025 Handle<FixedArray> code_table = instance->get_compiled_module()->code_table();
2026 RelocateMemoryReferencesInCode(code_table, old_mem_start, new_mem_start,
2027 old_size, new_size);
2028 if (instance->has_memory_object()) { 2138 if (instance->has_memory_object()) {
2029 instance->get_memory_object()->set_buffer(*buffer); 2139 instance->get_memory_object()->set_buffer(*buffer);
2030 } 2140 }
2031 2141
2032 DCHECK(old_size % WasmModule::kPageSize == 0); 2142 DCHECK(old_size % WasmModule::kPageSize == 0);
2033 return (old_size / WasmModule::kPageSize); 2143 return (old_size / WasmModule::kPageSize);
2034 } 2144 }
2035 2145
2036 void testing::ValidateInstancesChain(Isolate* isolate, 2146 void testing::ValidateInstancesChain(Isolate* isolate,
2037 Handle<JSObject> wasm_module, 2147 Handle<JSObject> wasm_module,
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
2104 CHECK_NOT_NULL(result.val); 2214 CHECK_NOT_NULL(result.val);
2105 module = const_cast<WasmModule*>(result.val); 2215 module = const_cast<WasmModule*>(result.val);
2106 } 2216 }
2107 2217
2108 Handle<WasmModuleWrapper> module_wrapper = 2218 Handle<WasmModuleWrapper> module_wrapper =
2109 WasmModuleWrapper::New(isolate, module); 2219 WasmModuleWrapper::New(isolate, module);
2110 2220
2111 compiled_module->set_module_wrapper(module_wrapper); 2221 compiled_module->set_module_wrapper(module_wrapper);
2112 DCHECK(WasmCompiledModule::IsWasmCompiledModule(*compiled_module)); 2222 DCHECK(WasmCompiledModule::IsWasmCompiledModule(*compiled_module));
2113 } 2223 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698