| Index: src/mips/full-codegen-mips.cc
|
| diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc
|
| index df3f4170b1ae7fa3b0355e6f52ce18a9d73638d0..853ee08963e0a5a15c51b4250d040444f2d29d79 100644
|
| --- a/src/mips/full-codegen-mips.cc
|
| +++ b/src/mips/full-codegen-mips.cc
|
| @@ -1653,13 +1653,11 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
|
| __ li(a0, Operand(Smi::FromInt(flags)));
|
| int properties_count = constant_properties->length() / 2;
|
| if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
|
| - expr->depth() > 1) {
|
| - __ Push(a3, a2, a1, a0);
|
| - __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
|
| - } else if (Serializer::enabled() || flags != ObjectLiteral::kFastElements ||
|
| + expr->depth() > 1 || Serializer::enabled() ||
|
| + flags != ObjectLiteral::kFastElements ||
|
| properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
|
| __ Push(a3, a2, a1, a0);
|
| - __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
|
| + __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
|
| } else {
|
| FastCloneShallowObjectStub stub(properties_count);
|
| __ CallStub(&stub);
|
| @@ -4926,6 +4924,89 @@ FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
|
|
|
| #undef __
|
|
|
| +
|
| +// This structure comes from FullCodeGenerator::EmitBackEdgeBookkeeping.
|
| +// The back edge bookkeeping code matches the pattern:
|
| +//
|
| +// sltu at, sp, t0 / slt at, a3, zero_reg (in case of count based interrupts)
|
| +// beq at, zero_reg, ok
|
| +// lui t9, <interrupt stub address> upper
|
| +// ori t9, <interrupt stub address> lower
|
| +// jalr t9
|
| +// nop
|
| +// ok-label ----- pc_after points here
|
| +//
|
| +// We patch the code to the following form:
|
| +//
|
| +// addiu at, zero_reg, 1
|
| +// beq at, zero_reg, ok ;; Not changed
|
| +// lui t9, <on-stack replacement address> upper
|
| +// ori t9, <on-stack replacement address> lower
|
| +// jalr t9 ;; Not changed
|
| +// nop ;; Not changed
|
| +// ok-label ----- pc_after points here
|
| +
|
| +void BackEdgeTable::PatchAt(Code* unoptimized_code,
|
| + Address pc_after,
|
| + Code* replacement_code) {
|
| + static const int kInstrSize = Assembler::kInstrSize;
|
| + // Replace the sltu instruction with load-imm 1 to at, so beq is not taken.
|
| + CodePatcher patcher(pc_after - 6 * kInstrSize, 1);
|
| + patcher.masm()->addiu(at, zero_reg, 1);
|
| + // Replace the stack check address in the load-immediate (lui/ori pair)
|
| + // with the entry address of the replacement code.
|
| + Assembler::set_target_address_at(pc_after - 4 * kInstrSize,
|
| + replacement_code->entry());
|
| +
|
| + unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
|
| + unoptimized_code, pc_after - 4 * kInstrSize, replacement_code);
|
| +}
|
| +
|
| +
|
| +void BackEdgeTable::RevertAt(Code* unoptimized_code,
|
| + Address pc_after,
|
| + Code* interrupt_code) {
|
| + static const int kInstrSize = Assembler::kInstrSize;
|
| + // Restore the sltu instruction so beq can be taken again.
|
| + CodePatcher patcher(pc_after - 6 * kInstrSize, 1);
|
| + patcher.masm()->slt(at, a3, zero_reg);
|
| + // Restore the original call address.
|
| + Assembler::set_target_address_at(pc_after - 4 * kInstrSize,
|
| + interrupt_code->entry());
|
| +
|
| + interrupt_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
|
| + unoptimized_code, pc_after - 4 * kInstrSize, interrupt_code);
|
| +}
|
| +
|
| +
|
| +#ifdef DEBUG
|
| +BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
|
| + Isolate* isolate,
|
| + Code* unoptimized_code,
|
| + Address pc_after) {
|
| + static const int kInstrSize = Assembler::kInstrSize;
|
| + ASSERT(Assembler::IsBeq(Assembler::instr_at(pc_after - 5 * kInstrSize)));
|
| + if (Assembler::IsAddImmediate(
|
| + Assembler::instr_at(pc_after - 6 * kInstrSize))) {
|
| + Code* osr_builtin =
|
| + isolate->builtins()->builtin(Builtins::kOnStackReplacement);
|
| + ASSERT(reinterpret_cast<uint32_t>(
|
| + Assembler::target_address_at(pc_after - 4 * kInstrSize)) ==
|
| + reinterpret_cast<uint32_t>(osr_builtin->entry()));
|
| + return ON_STACK_REPLACEMENT;
|
| + } else {
|
| + // Get the interrupt stub code object to match against from cache.
|
| + Code* interrupt_builtin =
|
| + isolate->builtins()->builtin(Builtins::kInterruptCheck);
|
| + ASSERT(reinterpret_cast<uint32_t>(
|
| + Assembler::target_address_at(pc_after - 4 * kInstrSize)) ==
|
| + reinterpret_cast<uint32_t>(interrupt_builtin->entry()));
|
| + return INTERRUPT;
|
| + }
|
| +}
|
| +#endif // DEBUG
|
| +
|
| +
|
| } } // namespace v8::internal
|
|
|
| #endif // V8_TARGET_ARCH_MIPS
|
|
|