| Index: src/x64/lithium-codegen-x64.cc
|
| ===================================================================
|
| --- src/x64/lithium-codegen-x64.cc (revision 6340)
|
| +++ src/x64/lithium-codegen-x64.cc (working copy)
|
| @@ -338,8 +338,9 @@
|
|
|
|
|
| bool LCodeGen::GenerateSafepointTable() {
|
| - Abort("Unimplemented: %s", "GeneratePrologue");
|
| - return false;
|
| + ASSERT(is_done());
|
| + safepoints_.Emit(masm(), StackSlotCount());
|
| + return !is_aborted();
|
| }
|
|
|
|
|
| @@ -492,7 +493,24 @@
|
| void LCodeGen::CallCode(Handle<Code> code,
|
| RelocInfo::Mode mode,
|
| LInstruction* instr) {
|
| - Abort("Unimplemented: %s", "CallCode");
|
| + if (instr != NULL) {
|
| + LPointerMap* pointers = instr->pointer_map();
|
| + RecordPosition(pointers->position());
|
| + __ call(code, mode);
|
| + RegisterLazyDeoptimization(instr);
|
| + } else {
|
| + LPointerMap no_pointers(0);
|
| + RecordPosition(no_pointers.position());
|
| + __ call(code, mode);
|
| + RecordSafepoint(&no_pointers, Safepoint::kNoDeoptimizationIndex);
|
| + }
|
| +
|
| + // Signal that we don't inline smi code before these stubs in the
|
| + // optimizing code generator.
|
| + if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
|
| + code->kind() == Code::COMPARE_IC) {
|
| + __ nop();
|
| + }
|
| }
|
|
|
|
|
| @@ -521,7 +539,10 @@
|
|
|
|
|
| void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
|
| - Abort("Unimplemented: %s", "RegisterEnvironmentForDeoptimization");
|
| + // TODO(ricow).
|
| + // This method is intentionally left unimplemented and simply returns
|
| + // until we have the deoptimization support in.
|
| + return;
|
| }
|
|
|
|
|
| @@ -859,7 +880,19 @@
|
|
|
|
|
| void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
|
| - Abort("Unimplemented: %s", "EmitGoto");
|
| + block = chunk_->LookupDestination(block);
|
| + int next_block = GetNextEmittedBlock(current_block_);
|
| + if (block != next_block) {
|
| + // Perform stack overflow check if this goto needs it before jumping.
|
| + if (deferred_stack_check != NULL) {
|
| + __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
|
| + __ j(above_equal, chunk_->GetAssemblyLabel(block));
|
| + __ jmp(deferred_stack_check->entry());
|
| + deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
|
| + } else {
|
| + __ jmp(chunk_->GetAssemblyLabel(block));
|
| + }
|
| + }
|
| }
|
|
|
|
|
|
|