Index: src/deoptimizer.cc |
diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc |
index 9d16211f767a8f10b3e253526f273c7dce6e225e..a259329c8d2cd520e8c09dd7df4f895c15dca2c2 100644 |
--- a/src/deoptimizer.cc |
+++ b/src/deoptimizer.cc |
@@ -78,7 +78,6 @@ void DeoptimizerData::Iterate(ObjectVisitor* v) { |
} |
#endif |
- |
// We rely on this function not causing a GC. It is called from generated code |
// without having a real stack frame in place. |
Deoptimizer* Deoptimizer::New(JSFunction* function, |
@@ -410,7 +409,14 @@ Deoptimizer::Deoptimizer(Isolate* isolate, |
reinterpret_cast<intptr_t>(from), |
fp_to_sp_delta - (2 * kPointerSize)); |
} |
- function->shared()->increment_deopt_count(); |
+ // For COMPILED_STUBs called from builtins, the function pointer |
+ // is a SMI indicating an internal frame. |
+ if (function->IsSmi()) { |
+ function = NULL; |
+ } |
+ if (function != NULL && function->IsOptimized()) { |
+ function->shared()->increment_deopt_count(); |
+ } |
// Find the optimized code. |
if (type == EAGER) { |
ASSERT(from == NULL); |
@@ -437,6 +443,10 @@ Deoptimizer::Deoptimizer(Isolate* isolate, |
} |
} else if (type == LAZY) { |
optimized_code_ = FindDeoptimizingCodeFromAddress(from); |
+ if (optimized_code_ == NULL) { |
+ optimized_code_ = |
+ static_cast<Code*>(isolate->heap()->FindCodeObject(from)); |
+ } |
ASSERT(optimized_code_ != NULL); |
} else if (type == OSR) { |
// The function has already been optimized and we're transitioning |
@@ -618,6 +628,9 @@ void Deoptimizer::DoComputeOutputFrames() { |
case Translation::SETTER_STUB_FRAME: |
DoComputeAccessorStubFrame(&iterator, i, true); |
break; |
+ case Translation::COMPILED_STUB_PSEUDO_FRAME: |
+ DoCompiledStubPseudoFrame(&iterator, i); |
+ break; |
case Translation::BEGIN: |
case Translation::REGISTER: |
case Translation::INT32_REGISTER: |
@@ -630,6 +643,7 @@ void Deoptimizer::DoComputeOutputFrames() { |
case Translation::LITERAL: |
case Translation::ARGUMENTS_OBJECT: |
case Translation::DUPLICATE: |
+ default: |
UNREACHABLE(); |
break; |
} |
@@ -809,6 +823,7 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator, |
case Translation::CONSTRUCT_STUB_FRAME: |
case Translation::GETTER_STUB_FRAME: |
case Translation::SETTER_STUB_FRAME: |
+ case Translation::COMPILED_STUB_PSEUDO_FRAME: |
case Translation::DUPLICATE: |
UNREACHABLE(); |
return; |
@@ -1117,6 +1132,7 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator, |
case Translation::CONSTRUCT_STUB_FRAME: |
case Translation::GETTER_STUB_FRAME: |
case Translation::SETTER_STUB_FRAME: |
+ case Translation::COMPILED_STUB_PSEUDO_FRAME: |
case Translation::DUPLICATE: |
UNREACHABLE(); // Malformed input. |
return false; |
@@ -1326,6 +1342,9 @@ void Deoptimizer::RevertStackCheckCode(Code* unoptimized_code, |
unsigned Deoptimizer::ComputeInputFrameSize() const { |
+ if (optimized_code_->kind() == Code::COMPILED_STUB) { |
+ return 0; |
+ } |
unsigned fixed_size = ComputeFixedSize(function_); |
// The fp-to-sp delta already takes the context and the function |
// into account so we have to avoid double counting them (-2). |
@@ -1338,7 +1357,8 @@ unsigned Deoptimizer::ComputeInputFrameSize() const { |
// the DoComputeOsrOutputFrame function for now. |
} else { |
unsigned stack_slots = optimized_code_->stack_slots(); |
- unsigned outgoing_size = ComputeOutgoingArgumentSize(); |
+ unsigned outgoing_size = optimized_code_->kind() == Code::COMPILED_STUB |
+ ? 0 : ComputeOutgoingArgumentSize(); |
ASSERT(result == fixed_size + (stack_slots * kPointerSize) + outgoing_size); |
} |
#endif |
@@ -1403,8 +1423,6 @@ void Deoptimizer::EnsureCodeForDeoptimizationEntry(BailoutType type, |
// cause us to emit relocation information for the external |
// references. This is fine because the deoptimizer's code section |
// isn't meant to be serialized at all. |
- ASSERT(!Serializer::enabled()); |
- |
ASSERT(type == EAGER || type == LAZY); |
DeoptimizerData* data = Isolate::Current()->deoptimizer_data(); |
int entry_count = (type == EAGER) |
@@ -1419,7 +1437,6 @@ void Deoptimizer::EnsureCodeForDeoptimizationEntry(BailoutType type, |
GenerateDeoptimizationEntries(&masm, entry_count, type); |
CodeDesc desc; |
masm.GetCode(&desc); |
- ASSERT(desc.reloc_size == 0); |
VirtualMemory* memory = type == EAGER |
? data->eager_deoptimization_entry_code_ |
@@ -1681,6 +1698,12 @@ void Translation::BeginJSFrame(BailoutId node_id, |
} |
+void Translation::BeginCompiledStubPseudoFrame(Code::Kind stub_kind) { |
+ buffer_->Add(COMPILED_STUB_PSEUDO_FRAME, zone()); |
+ buffer_->Add(stub_kind, zone()); |
+} |
+ |
+ |
void Translation::StoreRegister(Register reg) { |
buffer_->Add(REGISTER, zone()); |
buffer_->Add(reg.code(), zone()); |
@@ -1762,6 +1785,7 @@ int Translation::NumberOfOperandsFor(Opcode opcode) { |
case UINT32_STACK_SLOT: |
case DOUBLE_STACK_SLOT: |
case LITERAL: |
+ case COMPILED_STUB_PSEUDO_FRAME: |
return 1; |
case BEGIN: |
case ARGUMENTS_ADAPTOR_FRAME: |
@@ -1792,6 +1816,8 @@ const char* Translation::StringFor(Opcode opcode) { |
return "GETTER_STUB_FRAME"; |
case SETTER_STUB_FRAME: |
return "SETTER_STUB_FRAME"; |
+ case COMPILED_STUB_PSEUDO_FRAME: |
+ return "COMPILED_STUB_PSEUDO_FRAME"; |
case REGISTER: |
return "REGISTER"; |
case INT32_REGISTER: |
@@ -1899,6 +1925,10 @@ SlotRef SlotRef::ComputeSlotForNextArgument(TranslationIterator* iterator, |
int literal_index = iterator->Next(); |
return SlotRef(data->LiteralArray()->get(literal_index)); |
} |
+ |
+ case Translation::COMPILED_STUB_PSEUDO_FRAME: |
+ UNREACHABLE(); |
+ break; |
} |
UNREACHABLE(); |