Index: src/deoptimizer.cc |
=================================================================== |
--- src/deoptimizer.cc (revision 13214) |
+++ src/deoptimizer.cc (working copy) |
@@ -44,8 +44,20 @@ |
eager_deoptimization_entry_code_entries_ = -1; |
lazy_deoptimization_entry_code_entries_ = -1; |
size_t deopt_table_size = Deoptimizer::GetMaxDeoptTableSize(); |
+#if defined(V8_TARGET_ARCH_X64) |
+ CodeRange* code_range = Isolate::Current()->code_range(); |
+ eager_deoptimization_entry_start_ = |
+ code_range->ReserveChunk(deopt_table_size, |
+ &eager_deoptimization_reserved_size_); |
+ eager_deoptimization_entry_code_ = NULL; |
+ lazy_deoptimization_entry_start_ = |
+ code_range->ReserveChunk(deopt_table_size, |
+ &lazy_deoptimization_reserved_size_); |
+ lazy_deoptimization_entry_code_ = NULL; |
+#else |
eager_deoptimization_entry_code_ = new VirtualMemory(deopt_table_size); |
lazy_deoptimization_entry_code_ = new VirtualMemory(deopt_table_size); |
+#endif |
current_ = NULL; |
deoptimizing_code_list_ = NULL; |
#ifdef ENABLE_DEBUGGER_SUPPORT |
@@ -55,10 +67,23 @@ |
DeoptimizerData::~DeoptimizerData() { |
+#if defined(V8_TARGET_ARCH_X64) |
+ if (eager_deoptimization_entry_code_ != NULL) { |
+ Isolate::Current()->memory_allocator()->Free( |
+ eager_deoptimization_entry_code_); |
+ eager_deoptimization_entry_code_ = NULL; |
+ } |
+ if (lazy_deoptimization_entry_code_ != NULL) { |
+ Isolate::Current()->memory_allocator()->Free( |
+ lazy_deoptimization_entry_code_); |
+ lazy_deoptimization_entry_code_ = NULL; |
+ } |
+#else |
delete eager_deoptimization_entry_code_; |
eager_deoptimization_entry_code_ = NULL; |
delete lazy_deoptimization_entry_code_; |
lazy_deoptimization_entry_code_ = NULL; |
+#endif |
DeoptimizingCodeListNode* current = deoptimizing_code_list_; |
while (current != NULL) { |
@@ -478,24 +503,59 @@ |
GetEntryMode mode) { |
ASSERT(id >= 0); |
if (id >= kMaxNumberOfEntries) return NULL; |
+#if defined(V8_TARGET_ARCH_X64) |
+ Address base = 0; |
+#else |
VirtualMemory* base = NULL; |
+#endif |
if (mode == ENSURE_ENTRY_CODE) { |
EnsureCodeForDeoptimizationEntry(type, id); |
} else { |
ASSERT(mode == CALCULATE_ENTRY_ADDRESS); |
} |
DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); |
+#if defined(V8_TARGET_ARCH_X64) |
if (type == EAGER) { |
+ base = data->eager_deoptimization_entry_start_ + |
+ MemoryAllocator::CodePageAreaStartOffset(); |
+ } else { |
+ base = data->lazy_deoptimization_entry_start_ + |
+ MemoryAllocator::CodePageAreaStartOffset(); |
+ } |
+ return |
+ base + (id * table_entry_size_); |
+#else |
+ if (type == EAGER) { |
base = data->eager_deoptimization_entry_code_; |
} else { |
base = data->lazy_deoptimization_entry_code_; |
} |
return |
static_cast<Address>(base->address()) + (id * table_entry_size_); |
+#endif |
} |
int Deoptimizer::GetDeoptimizationId(Address addr, BailoutType type) { |
+#if defined(V8_TARGET_ARCH_X64) |
+ Address base = 0; |
+ DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); |
+ if (type == EAGER) { |
+ base = data->eager_deoptimization_entry_start_ + |
+ MemoryAllocator::CodePageAreaStartOffset(); |
+ } else { |
+ base = data->lazy_deoptimization_entry_start_ + |
+ MemoryAllocator::CodePageAreaStartOffset(); |
+ } |
+ if (base == 0 || |
+ addr < base || |
+ addr >= base + (kMaxNumberOfEntries * table_entry_size_)) { |
+ return kNotDeoptimizationEntry; |
+ } |
+ ASSERT_EQ(0, |
+ static_cast<int>(addr - base) % table_entry_size_); |
+ return static_cast<int>(addr - base) / table_entry_size_; |
+#else |
VirtualMemory* base = NULL; |
DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); |
if (type == EAGER) { |
@@ -512,6 +572,7 @@ |
ASSERT_EQ(0, |
static_cast<int>(addr - base_casted) % table_entry_size_); |
return static_cast<int>(addr - base_casted) / table_entry_size_; |
+#endif |
} |
@@ -1421,14 +1482,33 @@ |
masm.GetCode(&desc); |
ASSERT(desc.reloc_size == 0); |
+ size_t table_size = Deoptimizer::GetMaxDeoptTableSize(); |
+ ASSERT(static_cast<int>(table_size) >= desc.instr_size); |
+#if defined(V8_TARGET_ARCH_X64) |
+ MemoryAllocator* allocator = Isolate::Current()->memory_allocator(); |
+ Address base = type == EAGER |
+ ? data->eager_deoptimization_entry_start_ |
+ : data->lazy_deoptimization_entry_start_; |
+ size_t reserved_size = type == EAGER |
+ ? data->eager_deoptimization_reserved_size_ |
+ : data->lazy_deoptimization_reserved_size_; |
+ MemoryChunk** chunk = type == EAGER |
+ ? &data->eager_deoptimization_entry_code_ |
+ : &data->lazy_deoptimization_entry_code_; |
+ *chunk = allocator->CommitChunkInCodeRange(base, table_size, reserved_size); |
+ if (*chunk == NULL) { |
+ V8::FatalProcessOutOfMemory("Not enough memory for deoptimization table"); |
+ } |
+ memcpy((*chunk)->area_start(), desc.buffer, desc.instr_size); |
+ CPU::FlushICache((*chunk)->area_start(), desc.instr_size); |
+#else |
VirtualMemory* memory = type == EAGER |
? data->eager_deoptimization_entry_code_ |
: data->lazy_deoptimization_entry_code_; |
- size_t table_size = Deoptimizer::GetMaxDeoptTableSize(); |
- ASSERT(static_cast<int>(table_size) >= desc.instr_size); |
memory->Commit(memory->address(), table_size, true); |
memcpy(memory->address(), desc.buffer, desc.instr_size); |
CPU::FlushICache(memory->address(), desc.instr_size); |
+#endif |
if (type == EAGER) { |
data->eager_deoptimization_entry_code_entries_ = entry_count; |