| Index: src/x64/lithium-codegen-x64.cc
|
| ===================================================================
|
| --- src/x64/lithium-codegen-x64.cc (revision 6955)
|
| +++ src/x64/lithium-codegen-x64.cc (working copy)
|
| @@ -73,12 +73,69 @@
|
|
|
|
|
| void LCodeGen::Comment(const char* format, ...) {
|
| - Abort("Unimplemented: %s", "Comment");
|
| + if (!FLAG_code_comments) return;
|
| + char buffer[4 * KB];
|
| + StringBuilder builder(buffer, ARRAY_SIZE(buffer));
|
| + va_list arguments;
|
| + va_start(arguments, format);
|
| + builder.AddFormattedList(format, arguments);
|
| + va_end(arguments);
|
| +
|
| + // Copy the string before recording it in the assembler to avoid
|
| + // issues when the stack allocated buffer goes out of scope.
|
| + int length = builder.position();
|
| + Vector<char> copy = Vector<char>::New(length + 1);
|
| + memcpy(copy.start(), builder.Finalize(), copy.length());
|
| + masm()->RecordComment(copy.start());
|
| }
|
|
|
|
|
| bool LCodeGen::GeneratePrologue() {
|
| - Abort("Unimplemented: %s", "GeneratePrologue");
|
| + ASSERT(is_generating());
|
| +
|
| +#ifdef DEBUG
|
| + if (strlen(FLAG_stop_at) > 0 &&
|
| + info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
|
| + __ int3();
|
| + }
|
| +#endif
|
| +
|
| + __ push(rbp); // Caller's frame pointer.
|
| + __ movq(rbp, rsp);
|
| + __ push(rsi); // Callee's context.
|
| + __ push(rdi); // Callee's JS function.
|
| +
|
| + // Reserve space for the stack slots needed by the code.
|
| + int slots = StackSlotCount();
|
| + if (slots > 0) {
|
| + if (FLAG_debug_code) {
|
| + __ movl(rax, Immediate(slots));
|
| + __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE);
|
| + Label loop;
|
| + __ bind(&loop);
|
| + __ push(kScratchRegister);
|
| + __ decl(rax);
|
| + __ j(not_zero, &loop);
|
| + } else {
|
| + __ subq(rsp, Immediate(slots * kPointerSize));
|
| +#ifdef _MSC_VER
|
| + // On windows, you may not access the stack more than one page below
|
| + // the most recently mapped page. To make the allocated area randomly
|
| + // accessible, we write to each page in turn (the value is irrelevant).
|
| + const int kPageSize = 4 * KB;
|
| + for (int offset = slots * kPointerSize - kPageSize;
|
| + offset > 0;
|
| + offset -= kPageSize) {
|
| + __ movq(Operand(rsp, offset), rax);
|
| + }
|
| +#endif
|
| + }
|
| + }
|
| +
|
| + // Trace the call.
|
| + if (FLAG_trace) {
|
| + __ CallRuntime(Runtime::kTraceEnter, 0);
|
| + }
|
| return !is_aborted();
|
| }
|
|
|
| @@ -130,7 +187,8 @@
|
|
|
|
|
| bool LCodeGen::GenerateSafepointTable() {
|
| - Abort("Unimplemented: %s", "GeneratePrologue");
|
| + ASSERT(is_done());
|
| + safepoints_.Emit(masm(), StackSlotCount());
|
| return !is_aborted();
|
| }
|
|
|
| @@ -157,6 +215,18 @@
|
| }
|
|
|
|
|
| +bool LCodeGen::IsInteger32Constant(LConstantOperand* op) const {
|
| + return op->IsConstantOperand() &&
|
| + chunk_->LookupLiteralRepresentation(op).IsInteger32();
|
| +}
|
| +
|
| +
|
| +bool LCodeGen::IsTaggedConstant(LConstantOperand* op) const {
|
| + return op->IsConstantOperand() &&
|
| + chunk_->LookupLiteralRepresentation(op).IsTagged();
|
| +}
|
| +
|
| +
|
| int LCodeGen::ToInteger32(LConstantOperand* op) const {
|
| Handle<Object> value = chunk_->LookupLiteral(op);
|
| ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
|
| @@ -166,6 +236,14 @@
|
| }
|
|
|
|
|
| +Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
|
| + Handle<Object> literal = chunk_->LookupLiteral(op);
|
| + Representation r = chunk_->LookupLiteralRepresentation(op);
|
| + ASSERT(r.IsTagged());
|
| + return literal;
|
| +}
|
| +
|
| +
|
| Operand LCodeGen::ToOperand(LOperand* op) const {
|
| // Does not handle registers. In X64 assembler, plain registers are not
|
| // representable as an Operand.
|
| @@ -264,7 +342,24 @@
|
| void LCodeGen::CallCode(Handle<Code> code,
|
| RelocInfo::Mode mode,
|
| LInstruction* instr) {
|
| - Abort("Unimplemented: %s", "CallCode");
|
| + if (instr != NULL) {
|
| + LPointerMap* pointers = instr->pointer_map();
|
| + RecordPosition(pointers->position());
|
| + __ call(code, mode);
|
| + RegisterLazyDeoptimization(instr);
|
| + } else {
|
| + LPointerMap no_pointers(0);
|
| + RecordPosition(no_pointers.position());
|
| + __ call(code, mode);
|
| + RecordSafepoint(&no_pointers, Safepoint::kNoDeoptimizationIndex);
|
| + }
|
| +
|
| + // Signal that we don't inline smi code before these stubs in the
|
| + // optimizing code generator.
|
| + if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
|
| + code->kind() == Code::COMPARE_IC) {
|
| + __ nop();
|
| + }
|
| }
|
|
|
|
|
| @@ -293,12 +388,52 @@
|
|
|
|
|
| void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
|
| - Abort("Unimplemented: %s", "RegisterEnvironmentForDeoptimization");
|
| + if (!environment->HasBeenRegistered()) {
|
| + // Physical stack frame layout:
|
| + // -x ............. -4 0 ..................................... y
|
| + // [incoming arguments] [spill slots] [pushed outgoing arguments]
|
| +
|
| + // Layout of the environment:
|
| + // 0 ..................................................... size-1
|
| + // [parameters] [locals] [expression stack including arguments]
|
| +
|
| + // Layout of the translation:
|
| + // 0 ........................................................ size - 1 + 4
|
| + // [expression stack including arguments] [locals] [4 words] [parameters]
|
| + // |>------------ translation_size ------------<|
|
| +
|
| + int frame_count = 0;
|
| + for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
|
| + ++frame_count;
|
| + }
|
| + Translation translation(&translations_, frame_count);
|
| + WriteTranslation(environment, &translation);
|
| + int deoptimization_index = deoptimizations_.length();
|
| + environment->Register(deoptimization_index, translation.index());
|
| + deoptimizations_.Add(environment);
|
| + }
|
| }
|
|
|
|
|
| void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
|
| - Abort("Unimplemented: %s", "Deoptimiz");
|
| + RegisterEnvironmentForDeoptimization(environment);
|
| + ASSERT(environment->HasBeenRegistered());
|
| + int id = environment->deoptimization_index();
|
| + Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
|
| + ASSERT(entry != NULL);
|
| + if (entry == NULL) {
|
| + Abort("bailout was not prepared");
|
| + return;
|
| + }
|
| +
|
| + if (cc == no_condition) {
|
| + __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
|
| + } else {
|
| + NearLabel done;
|
| + __ j(NegateCondition(cc), &done);
|
| + __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
|
| + __ bind(&done);
|
| + }
|
| }
|
|
|
|
|
| @@ -413,7 +548,7 @@
|
|
|
|
|
| void LCodeGen::DoParallelMove(LParallelMove* move) {
|
| - Abort("Unimplemented: %s", "DoParallelMove");
|
| + resolver_.Resolve(move);
|
| }
|
|
|
|
|
| @@ -472,22 +607,57 @@
|
|
|
|
|
| void LCodeGen::DoSubI(LSubI* instr) {
|
| - Abort("Unimplemented: %s", "DoSubI");
|
| + LOperand* left = instr->InputAt(0);
|
| + LOperand* right = instr->InputAt(1);
|
| + ASSERT(left->Equals(instr->result()));
|
| +
|
| + if (right->IsConstantOperand()) {
|
| + __ subl(ToRegister(left),
|
| + Immediate(ToInteger32(LConstantOperand::cast(right))));
|
| + } else if (right->IsRegister()) {
|
| + __ subl(ToRegister(left), ToRegister(right));
|
| + } else {
|
| + __ subl(ToRegister(left), ToOperand(right));
|
| + }
|
| +
|
| + if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
|
| + DeoptimizeIf(overflow, instr->environment());
|
| + }
|
| }
|
|
|
|
|
| void LCodeGen::DoConstantI(LConstantI* instr) {
|
| - Abort("Unimplemented: %s", "DoConstantI");
|
| + ASSERT(instr->result()->IsRegister());
|
| + __ movl(ToRegister(instr->result()), Immediate(instr->value()));
|
| }
|
|
|
|
|
| void LCodeGen::DoConstantD(LConstantD* instr) {
|
| - Abort("Unimplemented: %s", "DoConstantI");
|
| + ASSERT(instr->result()->IsDoubleRegister());
|
| + XMMRegister res = ToDoubleRegister(instr->result());
|
| + double v = instr->value();
|
| + // Use xor to produce +0.0 in a fast and compact way, but avoid to
|
| + // do so if the constant is -0.0.
|
| + if (BitCast<uint64_t, double>(v) == 0) {
|
| + __ xorpd(res, res);
|
| + } else {
|
| + Register tmp = ToRegister(instr->TempAt(0));
|
| + int32_t v_int32 = static_cast<int32_t>(v);
|
| + if (static_cast<double>(v_int32) == v) {
|
| + __ movl(tmp, Immediate(v_int32));
|
| + __ cvtlsi2sd(res, tmp);
|
| + } else {
|
| + uint64_t int_val = BitCast<uint64_t, double>(v);
|
| + __ Set(tmp, int_val);
|
| + __ movd(res, tmp);
|
| + }
|
| + }
|
| }
|
|
|
|
|
| void LCodeGen::DoConstantT(LConstantT* instr) {
|
| - Abort("Unimplemented: %s", "DoConstantT");
|
| + ASSERT(instr->result()->IsRegister());
|
| + __ Move(ToRegister(instr->result()), instr->value());
|
| }
|
|
|
|
|
| @@ -517,7 +687,22 @@
|
|
|
|
|
| void LCodeGen::DoAddI(LAddI* instr) {
|
| - Abort("Unimplemented: %s", "DoAddI");
|
| + LOperand* left = instr->InputAt(0);
|
| + LOperand* right = instr->InputAt(1);
|
| + ASSERT(left->Equals(instr->result()));
|
| +
|
| + if (right->IsConstantOperand()) {
|
| + __ addl(ToRegister(left),
|
| + Immediate(ToInteger32(LConstantOperand::cast(right))));
|
| + } else if (right->IsRegister()) {
|
| + __ addl(ToRegister(left), ToRegister(right));
|
| + } else {
|
| + __ addl(ToRegister(left), ToOperand(right));
|
| + }
|
| +
|
| + if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
|
| + DeoptimizeIf(overflow, instr->environment());
|
| + }
|
| }
|
|
|
|
|
| @@ -527,7 +712,13 @@
|
|
|
|
|
| void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
|
| - Abort("Unimplemented: %s", "DoArithmeticT");
|
| + ASSERT(ToRegister(instr->InputAt(0)).is(rdx));
|
| + ASSERT(ToRegister(instr->InputAt(1)).is(rax));
|
| + ASSERT(ToRegister(instr->result()).is(rax));
|
| +
|
| + GenericBinaryOpStub stub(instr->op(), NO_OVERWRITE, NO_GENERIC_BINARY_FLAGS);
|
| + stub.SetArgsInRegisters();
|
| + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
| }
|
|
|
|
|
| @@ -541,17 +732,105 @@
|
|
|
|
|
| void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
|
| - Abort("Unimplemented: %s", "EmitBranch");
|
| + int next_block = GetNextEmittedBlock(current_block_);
|
| + right_block = chunk_->LookupDestination(right_block);
|
| + left_block = chunk_->LookupDestination(left_block);
|
| +
|
| + if (right_block == left_block) {
|
| + EmitGoto(left_block);
|
| + } else if (left_block == next_block) {
|
| + __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
|
| + } else if (right_block == next_block) {
|
| + __ j(cc, chunk_->GetAssemblyLabel(left_block));
|
| + } else {
|
| + __ j(cc, chunk_->GetAssemblyLabel(left_block));
|
| + if (cc != always) {
|
| + __ jmp(chunk_->GetAssemblyLabel(right_block));
|
| + }
|
| + }
|
| }
|
|
|
|
|
| void LCodeGen::DoBranch(LBranch* instr) {
|
| - Abort("Unimplemented: %s", "DoBranch");
|
| + int true_block = chunk_->LookupDestination(instr->true_block_id());
|
| + int false_block = chunk_->LookupDestination(instr->false_block_id());
|
| +
|
| + Representation r = instr->hydrogen()->representation();
|
| + if (r.IsInteger32()) {
|
| + Register reg = ToRegister(instr->InputAt(0));
|
| + __ testl(reg, reg);
|
| + EmitBranch(true_block, false_block, not_zero);
|
| + } else if (r.IsDouble()) {
|
| + XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
|
| + __ xorpd(xmm0, xmm0);
|
| + __ ucomisd(reg, xmm0);
|
| + EmitBranch(true_block, false_block, not_equal);
|
| + } else {
|
| + ASSERT(r.IsTagged());
|
| + Register reg = ToRegister(instr->InputAt(0));
|
| + HType type = instr->hydrogen()->type();
|
| + if (type.IsBoolean()) {
|
| + __ Cmp(reg, FACTORY->true_value());
|
| + EmitBranch(true_block, false_block, equal);
|
| + } else if (type.IsSmi()) {
|
| + __ SmiCompare(reg, Smi::FromInt(0));
|
| + EmitBranch(true_block, false_block, not_equal);
|
| + } else {
|
| + Label* true_label = chunk_->GetAssemblyLabel(true_block);
|
| + Label* false_label = chunk_->GetAssemblyLabel(false_block);
|
| +
|
| + __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
|
| + __ j(equal, false_label);
|
| + __ CompareRoot(reg, Heap::kTrueValueRootIndex);
|
| + __ j(equal, true_label);
|
| + __ CompareRoot(reg, Heap::kFalseValueRootIndex);
|
| + __ j(equal, false_label);
|
| + __ SmiCompare(reg, Smi::FromInt(0));
|
| + __ j(equal, false_label);
|
| + __ JumpIfSmi(reg, true_label);
|
| +
|
| + // Test for double values. Plus/minus zero and NaN are false.
|
| + NearLabel call_stub;
|
| + __ CompareRoot(FieldOperand(reg, HeapObject::kMapOffset),
|
| + Heap::kHeapNumberMapRootIndex);
|
| + __ j(not_equal, &call_stub);
|
| +
|
| + // HeapNumber => false iff +0, -0, or NaN. These three cases set the
|
| + // zero flag when compared to zero using ucomisd.
|
| + __ xorpd(xmm0, xmm0);
|
| + __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
|
| + __ j(zero, false_label);
|
| + __ jmp(true_label);
|
| +
|
| + // The conversion stub doesn't cause garbage collections so it's
|
| + // safe to not record a safepoint after the call.
|
| + __ bind(&call_stub);
|
| + ToBooleanStub stub;
|
| + __ Pushad();
|
| + __ push(reg);
|
| + __ CallStub(&stub);
|
| + __ testq(rax, rax);
|
| + __ Popad();
|
| + EmitBranch(true_block, false_block, not_zero);
|
| + }
|
| + }
|
| }
|
|
|
|
|
| void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
|
| - Abort("Unimplemented: %s", "EmitGoto");
|
| + block = chunk_->LookupDestination(block);
|
| + int next_block = GetNextEmittedBlock(current_block_);
|
| + if (block != next_block) {
|
| + // Perform stack overflow check if this goto needs it before jumping.
|
| + if (deferred_stack_check != NULL) {
|
| + __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
|
| + __ j(above_equal, chunk_->GetAssemblyLabel(block));
|
| + __ jmp(deferred_stack_check->entry());
|
| + deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
|
| + } else {
|
| + __ jmp(chunk_->GetAssemblyLabel(block));
|
| + }
|
| + }
|
| }
|
|
|
|
|
| @@ -561,11 +840,24 @@
|
|
|
|
|
| void LCodeGen::DoGoto(LGoto* instr) {
|
| - Abort("Unimplemented: %s", "DoGoto");
|
| + class DeferredStackCheck: public LDeferredCode {
|
| + public:
|
| + DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
|
| + : LDeferredCode(codegen), instr_(instr) { }
|
| + virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
|
| + private:
|
| + LGoto* instr_;
|
| + };
|
| +
|
| + DeferredStackCheck* deferred = NULL;
|
| + if (instr->include_stack_check()) {
|
| + deferred = new DeferredStackCheck(this, instr);
|
| + }
|
| + EmitGoto(instr->block_id(), deferred);
|
| }
|
|
|
|
|
| -Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
|
| +inline Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
|
| Condition cond = no_condition;
|
| switch (op) {
|
| case Token::EQ:
|
| @@ -594,83 +886,277 @@
|
|
|
|
|
| void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
|
| - Abort("Unimplemented: %s", "EmitCmpI");
|
| + if (right->IsConstantOperand()) {
|
| + int32_t value = ToInteger32(LConstantOperand::cast(right));
|
| + if (left->IsRegister()) {
|
| + __ cmpl(ToRegister(left), Immediate(value));
|
| + } else {
|
| + __ cmpl(ToOperand(left), Immediate(value));
|
| + }
|
| + } else if (right->IsRegister()) {
|
| + __ cmpq(ToRegister(left), ToRegister(right));
|
| + } else {
|
| + __ cmpq(ToRegister(left), ToOperand(right));
|
| + }
|
| }
|
|
|
|
|
| void LCodeGen::DoCmpID(LCmpID* instr) {
|
| - Abort("Unimplemented: %s", "DoCmpID");
|
| + LOperand* left = instr->InputAt(0);
|
| + LOperand* right = instr->InputAt(1);
|
| + LOperand* result = instr->result();
|
| +
|
| + NearLabel unordered;
|
| + if (instr->is_double()) {
|
| + // Don't base result on EFLAGS when a NaN is involved. Instead
|
| + // jump to the unordered case, which produces a false value.
|
| + __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
|
| + __ j(parity_even, &unordered);
|
| + } else {
|
| + EmitCmpI(left, right);
|
| + }
|
| +
|
| + NearLabel done;
|
| + Condition cc = TokenToCondition(instr->op(), instr->is_double());
|
| + __ LoadRoot(ToRegister(result), Heap::kTrueValueRootIndex);
|
| + __ j(cc, &done);
|
| +
|
| + __ bind(&unordered);
|
| + __ LoadRoot(ToRegister(result), Heap::kFalseValueRootIndex);
|
| + __ bind(&done);
|
| }
|
|
|
|
|
| void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
|
| - Abort("Unimplemented: %s", "DoCmpIDAndBranch");
|
| + LOperand* left = instr->InputAt(0);
|
| + LOperand* right = instr->InputAt(1);
|
| + int false_block = chunk_->LookupDestination(instr->false_block_id());
|
| + int true_block = chunk_->LookupDestination(instr->true_block_id());
|
| +
|
| + if (instr->is_double()) {
|
| + // Don't base result on EFLAGS when a NaN is involved. Instead
|
| + // jump to the false block.
|
| + __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
|
| + __ j(parity_even, chunk_->GetAssemblyLabel(false_block));
|
| + } else {
|
| + EmitCmpI(left, right);
|
| + }
|
| +
|
| + Condition cc = TokenToCondition(instr->op(), instr->is_double());
|
| + EmitBranch(true_block, false_block, cc);
|
| }
|
|
|
|
|
| void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
|
| - Abort("Unimplemented: %s", "DoCmpJSObjectEq");
|
| + Register left = ToRegister(instr->InputAt(0));
|
| + Register right = ToRegister(instr->InputAt(1));
|
| + Register result = ToRegister(instr->result());
|
| +
|
| + NearLabel different, done;
|
| + __ cmpq(left, right);
|
| + __ j(not_equal, &different);
|
| + __ LoadRoot(result, Heap::kTrueValueRootIndex);
|
| + __ jmp(&done);
|
| + __ bind(&different);
|
| + __ LoadRoot(result, Heap::kFalseValueRootIndex);
|
| + __ bind(&done);
|
| }
|
|
|
|
|
| void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
|
| - Abort("Unimplemented: %s", "DoCmpJSObjectAndBranch");
|
| + Register left = ToRegister(instr->InputAt(0));
|
| + Register right = ToRegister(instr->InputAt(1));
|
| + int false_block = chunk_->LookupDestination(instr->false_block_id());
|
| + int true_block = chunk_->LookupDestination(instr->true_block_id());
|
| +
|
| + __ cmpq(left, right);
|
| + EmitBranch(true_block, false_block, equal);
|
| }
|
|
|
|
|
| void LCodeGen::DoIsNull(LIsNull* instr) {
|
| - Abort("Unimplemented: %s", "DoIsNull");
|
| + Register reg = ToRegister(instr->InputAt(0));
|
| + Register result = ToRegister(instr->result());
|
| +
|
| + // If the expression is known to be a smi, then it's
|
| + // definitely not null. Materialize false.
|
| + // Consider adding other type and representation tests too.
|
| + if (instr->hydrogen()->value()->type().IsSmi()) {
|
| + __ LoadRoot(result, Heap::kFalseValueRootIndex);
|
| + return;
|
| + }
|
| +
|
| + __ CompareRoot(reg, Heap::kNullValueRootIndex);
|
| + if (instr->is_strict()) {
|
| + __ movl(result, Immediate(Heap::kTrueValueRootIndex));
|
| + NearLabel load;
|
| + __ j(equal, &load);
|
| + __ movl(result, Immediate(Heap::kFalseValueRootIndex));
|
| + __ bind(&load);
|
| + __ movq(result, Operand(kRootRegister, result, times_pointer_size, 0));
|
| + } else {
|
| + NearLabel true_value, false_value, done;
|
| + __ j(equal, &true_value);
|
| + __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
|
| + __ j(equal, &true_value);
|
| + __ JumpIfSmi(reg, &false_value);
|
| + // Check for undetectable objects by looking in the bit field in
|
| + // the map. The object has already been smi checked.
|
| + Register scratch = result;
|
| + __ movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
|
| + __ testb(FieldOperand(scratch, Map::kBitFieldOffset),
|
| + Immediate(1 << Map::kIsUndetectable));
|
| + __ j(not_zero, &true_value);
|
| + __ bind(&false_value);
|
| + __ LoadRoot(result, Heap::kFalseValueRootIndex);
|
| + __ jmp(&done);
|
| + __ bind(&true_value);
|
| + __ LoadRoot(result, Heap::kTrueValueRootIndex);
|
| + __ bind(&done);
|
| + }
|
| }
|
|
|
|
|
| void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
|
| - Abort("Unimplemented: %s", "DoIsNullAndBranch");
|
| + Register reg = ToRegister(instr->InputAt(0));
|
| +
|
| + int false_block = chunk_->LookupDestination(instr->false_block_id());
|
| +
|
| + if (instr->hydrogen()->representation().IsSpecialization() ||
|
| + instr->hydrogen()->type().IsSmi()) {
|
| + // If the expression is known to untagged or smi, then it's definitely
|
| + // not null, and it can't be a an undetectable object.
|
| + // Jump directly to the false block.
|
| + EmitGoto(false_block);
|
| + return;
|
| + }
|
| +
|
| + int true_block = chunk_->LookupDestination(instr->true_block_id());
|
| +
|
| + __ Cmp(reg, FACTORY->null_value());
|
| + if (instr->is_strict()) {
|
| + EmitBranch(true_block, false_block, equal);
|
| + } else {
|
| + Label* true_label = chunk_->GetAssemblyLabel(true_block);
|
| + Label* false_label = chunk_->GetAssemblyLabel(false_block);
|
| + __ j(equal, true_label);
|
| + __ Cmp(reg, FACTORY->undefined_value());
|
| + __ j(equal, true_label);
|
| + __ JumpIfSmi(reg, false_label);
|
| + // Check for undetectable objects by looking in the bit field in
|
| + // the map. The object has already been smi checked.
|
| + Register scratch = ToRegister(instr->TempAt(0));
|
| + __ movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
|
| + __ testb(FieldOperand(scratch, Map::kBitFieldOffset),
|
| + Immediate(1 << Map::kIsUndetectable));
|
| + EmitBranch(true_block, false_block, not_zero);
|
| + }
|
| }
|
|
|
|
|
| Condition LCodeGen::EmitIsObject(Register input,
|
| - Register temp1,
|
| - Register temp2,
|
| Label* is_not_object,
|
| Label* is_object) {
|
| - Abort("Unimplemented: %s", "EmitIsObject");
|
| + ASSERT(!input.is(kScratchRegister));
|
| +
|
| + __ JumpIfSmi(input, is_not_object);
|
| +
|
| + __ CompareRoot(input, Heap::kNullValueRootIndex);
|
| + __ j(equal, is_object);
|
| +
|
| + __ movq(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset));
|
| + // Undetectable objects behave like undefined.
|
| + __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
|
| + Immediate(1 << Map::kIsUndetectable));
|
| + __ j(not_zero, is_not_object);
|
| +
|
| + __ movzxbl(kScratchRegister,
|
| + FieldOperand(kScratchRegister, Map::kInstanceTypeOffset));
|
| + __ cmpb(kScratchRegister, Immediate(FIRST_JS_OBJECT_TYPE));
|
| + __ j(below, is_not_object);
|
| + __ cmpb(kScratchRegister, Immediate(LAST_JS_OBJECT_TYPE));
|
| return below_equal;
|
| }
|
|
|
|
|
| void LCodeGen::DoIsObject(LIsObject* instr) {
|
| - Abort("Unimplemented: %s", "DoIsObject");
|
| + Register reg = ToRegister(instr->InputAt(0));
|
| + Register result = ToRegister(instr->result());
|
| + Label is_false, is_true, done;
|
| +
|
| + Condition true_cond = EmitIsObject(reg, &is_false, &is_true);
|
| + __ j(true_cond, &is_true);
|
| +
|
| + __ bind(&is_false);
|
| + __ LoadRoot(result, Heap::kFalseValueRootIndex);
|
| + __ jmp(&done);
|
| +
|
| + __ bind(&is_true);
|
| + __ LoadRoot(result, Heap::kTrueValueRootIndex);
|
| +
|
| + __ bind(&done);
|
| }
|
|
|
|
|
| void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
|
| - Abort("Unimplemented: %s", "DoIsObjectAndBranch");
|
| + Register reg = ToRegister(instr->InputAt(0));
|
| +
|
| + int true_block = chunk_->LookupDestination(instr->true_block_id());
|
| + int false_block = chunk_->LookupDestination(instr->false_block_id());
|
| + Label* true_label = chunk_->GetAssemblyLabel(true_block);
|
| + Label* false_label = chunk_->GetAssemblyLabel(false_block);
|
| +
|
| + Condition true_cond = EmitIsObject(reg, false_label, true_label);
|
| +
|
| + EmitBranch(true_block, false_block, true_cond);
|
| }
|
|
|
|
|
| void LCodeGen::DoIsSmi(LIsSmi* instr) {
|
| - Abort("Unimplemented: %s", "DoIsSmi");
|
| + LOperand* input_operand = instr->InputAt(0);
|
| + Register result = ToRegister(instr->result());
|
| + if (input_operand->IsRegister()) {
|
| + Register input = ToRegister(input_operand);
|
| + __ CheckSmiToIndicator(result, input);
|
| + } else {
|
| + Operand input = ToOperand(instr->InputAt(0));
|
| + __ CheckSmiToIndicator(result, input);
|
| + }
|
| + // result is zero if input is a smi, and one otherwise.
|
| + ASSERT(Heap::kFalseValueRootIndex == Heap::kTrueValueRootIndex + 1);
|
| + __ movq(result, Operand(kRootRegister, result, times_pointer_size,
|
| + Heap::kTrueValueRootIndex * kPointerSize));
|
| }
|
|
|
|
|
| void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
|
| - Abort("Unimplemented: %s", "DoIsSmiAndBranch");
|
| + int true_block = chunk_->LookupDestination(instr->true_block_id());
|
| + int false_block = chunk_->LookupDestination(instr->false_block_id());
|
| +
|
| + Condition is_smi;
|
| + if (instr->InputAt(0)->IsRegister()) {
|
| + Register input = ToRegister(instr->InputAt(0));
|
| + is_smi = masm()->CheckSmi(input);
|
| + } else {
|
| + Operand input = ToOperand(instr->InputAt(0));
|
| + is_smi = masm()->CheckSmi(input);
|
| + }
|
| + EmitBranch(true_block, false_block, is_smi);
|
| }
|
|
|
|
|
| -InstanceType LHasInstanceType::TestType() {
|
| - InstanceType from = hydrogen()->from();
|
| - InstanceType to = hydrogen()->to();
|
| +static InstanceType TestType(HHasInstanceType* instr) {
|
| + InstanceType from = instr->from();
|
| + InstanceType to = instr->to();
|
| if (from == FIRST_TYPE) return to;
|
| ASSERT(from == to || to == LAST_TYPE);
|
| return from;
|
| }
|
|
|
|
|
| -
|
| -Condition LHasInstanceType::BranchCondition() {
|
| - InstanceType from = hydrogen()->from();
|
| - InstanceType to = hydrogen()->to();
|
| +static Condition BranchCondition(HHasInstanceType* instr) {
|
| + InstanceType from = instr->from();
|
| + InstanceType to = instr->to();
|
| if (from == to) return equal;
|
| if (to == LAST_TYPE) return above_equal;
|
| if (from == FIRST_TYPE) return below_equal;
|
| @@ -685,7 +1171,17 @@
|
|
|
|
|
| void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
|
| - Abort("Unimplemented: %s", "DoHasInstanceTypeAndBranch");
|
| + Register input = ToRegister(instr->InputAt(0));
|
| +
|
| + int true_block = chunk_->LookupDestination(instr->true_block_id());
|
| + int false_block = chunk_->LookupDestination(instr->false_block_id());
|
| +
|
| + Label* false_label = chunk_->GetAssemblyLabel(false_block);
|
| +
|
| + __ JumpIfSmi(input, false_label);
|
| +
|
| + __ CmpObjectType(input, TestType(instr->hydrogen()), kScratchRegister);
|
| + EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
|
| }
|
|
|
|
|
| @@ -696,34 +1192,118 @@
|
|
|
| void LCodeGen::DoHasCachedArrayIndexAndBranch(
|
| LHasCachedArrayIndexAndBranch* instr) {
|
| - Abort("Unimplemented: %s", "DoHasCachedArrayIndexAndBranch");
|
| + Register input = ToRegister(instr->InputAt(0));
|
| +
|
| + int true_block = chunk_->LookupDestination(instr->true_block_id());
|
| + int false_block = chunk_->LookupDestination(instr->false_block_id());
|
| +
|
| + __ testl(FieldOperand(input, String::kHashFieldOffset),
|
| + Immediate(String::kContainsCachedArrayIndexMask));
|
| + EmitBranch(true_block, false_block, not_equal);
|
| }
|
|
|
|
|
| -// Branches to a label or falls through with the answer in the z flag. Trashes
|
| -// the temp registers, but not the input. Only input and temp2 may alias.
|
| +// Branches to a label or falls through with the answer in the z flag.
|
| +// Trashes the temp register and possibly input (if it and temp are aliased).
|
| void LCodeGen::EmitClassOfTest(Label* is_true,
|
| Label* is_false,
|
| - Handle<String>class_name,
|
| + Handle<String> class_name,
|
| Register input,
|
| - Register temp,
|
| - Register temp2) {
|
| - Abort("Unimplemented: %s", "EmitClassOfTest");
|
| + Register temp) {
|
| + __ JumpIfSmi(input, is_false);
|
| + __ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, temp);
|
| + __ j(below, is_false);
|
| +
|
| + // Map is now in temp.
|
| + // Functions have class 'Function'.
|
| + __ CmpInstanceType(temp, JS_FUNCTION_TYPE);
|
| + if (class_name->IsEqualTo(CStrVector("Function"))) {
|
| + __ j(equal, is_true);
|
| + } else {
|
| + __ j(equal, is_false);
|
| + }
|
| +
|
| + // Check if the constructor in the map is a function.
|
| + __ movq(temp, FieldOperand(temp, Map::kConstructorOffset));
|
| +
|
| + // As long as JS_FUNCTION_TYPE is the last instance type and it is
|
| + // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
|
| + // LAST_JS_OBJECT_TYPE.
|
| + ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
|
| + ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
|
| +
|
| + // Objects with a non-function constructor have class 'Object'.
|
| + __ CmpObjectType(temp, JS_FUNCTION_TYPE, kScratchRegister);
|
| + if (class_name->IsEqualTo(CStrVector("Object"))) {
|
| + __ j(not_equal, is_true);
|
| + } else {
|
| + __ j(not_equal, is_false);
|
| + }
|
| +
|
| + // temp now contains the constructor function. Grab the
|
| + // instance class name from there.
|
| + __ movq(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset));
|
| + __ movq(temp, FieldOperand(temp,
|
| + SharedFunctionInfo::kInstanceClassNameOffset));
|
| + // The class name we are testing against is a symbol because it's a literal.
|
| + // The name in the constructor is a symbol because of the way the context is
|
| + // booted. This routine isn't expected to work for random API-created
|
| + // classes and it doesn't have to because you can't access it with natives
|
| + // syntax. Since both sides are symbols it is sufficient to use an identity
|
| + // comparison.
|
| + ASSERT(class_name->IsSymbol());
|
| + __ Cmp(temp, class_name);
|
| + // End with the answer in the z flag.
|
| }
|
|
|
|
|
| void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
|
| - Abort("Unimplemented: %s", "DoClassOfTest");
|
| + Register input = ToRegister(instr->InputAt(0));
|
| + Register result = ToRegister(instr->result());
|
| + ASSERT(input.is(result));
|
| + Register temp = ToRegister(instr->TempAt(0));
|
| + Handle<String> class_name = instr->hydrogen()->class_name();
|
| + NearLabel done;
|
| + Label is_true, is_false;
|
| +
|
| + EmitClassOfTest(&is_true, &is_false, class_name, input, temp);
|
| +
|
| + __ j(not_equal, &is_false);
|
| +
|
| + __ bind(&is_true);
|
| + __ LoadRoot(result, Heap::kTrueValueRootIndex);
|
| + __ jmp(&done);
|
| +
|
| + __ bind(&is_false);
|
| + __ LoadRoot(result, Heap::kFalseValueRootIndex);
|
| + __ bind(&done);
|
| }
|
|
|
|
|
| void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
|
| - Abort("Unimplemented: %s", "DoClassOfTestAndBranch");
|
| + Register input = ToRegister(instr->InputAt(0));
|
| + Register temp = ToRegister(instr->TempAt(0));
|
| + Handle<String> class_name = instr->hydrogen()->class_name();
|
| +
|
| + int true_block = chunk_->LookupDestination(instr->true_block_id());
|
| + int false_block = chunk_->LookupDestination(instr->false_block_id());
|
| +
|
| + Label* true_label = chunk_->GetAssemblyLabel(true_block);
|
| + Label* false_label = chunk_->GetAssemblyLabel(false_block);
|
| +
|
| + EmitClassOfTest(true_label, false_label, class_name, input, temp);
|
| +
|
| + EmitBranch(true_block, false_block, equal);
|
| }
|
|
|
|
|
| void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
|
| - Abort("Unimplemented: %s", "DoCmpMapAndBranch");
|
| + Register reg = ToRegister(instr->InputAt(0));
|
| + int true_block = instr->true_block_id();
|
| + int false_block = instr->false_block_id();
|
| +
|
| + __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
|
| + EmitBranch(true_block, false_block, equal);
|
| }
|
|
|
|
|
| @@ -733,7 +1313,13 @@
|
|
|
|
|
| void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
|
| - Abort("Unimplemented: %s", "DoInstanceOfAndBranch");
|
| + int true_block = chunk_->LookupDestination(instr->true_block_id());
|
| + int false_block = chunk_->LookupDestination(instr->false_block_id());
|
| +
|
| + InstanceofStub stub(InstanceofStub::kArgsInRegisters);
|
| + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
| + __ testq(rax, rax);
|
| + EmitBranch(true_block, false_block, zero);
|
| }
|
|
|
|
|
| @@ -749,17 +1335,55 @@
|
|
|
|
|
| void LCodeGen::DoCmpT(LCmpT* instr) {
|
| - Abort("Unimplemented: %s", "DoCmpT");
|
| + Token::Value op = instr->op();
|
| +
|
| + Handle<Code> ic = CompareIC::GetUninitialized(op);
|
| + CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
| +
|
| + Condition condition = TokenToCondition(op, false);
|
| + if (op == Token::GT || op == Token::LTE) {
|
| + condition = ReverseCondition(condition);
|
| + }
|
| + NearLabel true_value, done;
|
| + __ testq(rax, rax);
|
| + __ j(condition, &true_value);
|
| + __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
|
| + __ jmp(&done);
|
| + __ bind(&true_value);
|
| + __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
|
| + __ bind(&done);
|
| }
|
|
|
|
|
| void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
|
| - Abort("Unimplemented: %s", "DoCmpTAndBranch");
|
| + Token::Value op = instr->op();
|
| + int true_block = chunk_->LookupDestination(instr->true_block_id());
|
| + int false_block = chunk_->LookupDestination(instr->false_block_id());
|
| +
|
| + Handle<Code> ic = CompareIC::GetUninitialized(op);
|
| + CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
| +
|
| + // The compare stub expects compare condition and the input operands
|
| + // reversed for GT and LTE.
|
| + Condition condition = TokenToCondition(op, false);
|
| + if (op == Token::GT || op == Token::LTE) {
|
| + condition = ReverseCondition(condition);
|
| + }
|
| + __ testq(rax, rax);
|
| + EmitBranch(true_block, false_block, condition);
|
| }
|
|
|
|
|
| void LCodeGen::DoReturn(LReturn* instr) {
|
| - Abort("Unimplemented: %s", "DoReturn");
|
| + if (FLAG_trace) {
|
| + // Preserve the return value on the stack and rely on the runtime
|
| + // call to return the value in the same register.
|
| + __ push(rax);
|
| + __ CallRuntime(Runtime::kTraceExit, 1);
|
| + }
|
| + __ movq(rsp, rbp);
|
| + __ pop(rbp);
|
| + __ ret((ParameterCount() + 1) * kPointerSize);
|
| }
|
|
|
|
|
| @@ -773,6 +1397,11 @@
|
| }
|
|
|
|
|
| +void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
|
| + Abort("Unimplemented: %s", "DoLoadContextSlot");
|
| +}
|
| +
|
| +
|
| void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
|
| Abort("Unimplemented: %s", "DoLoadNamedField");
|
| }
|
| @@ -829,7 +1458,8 @@
|
|
|
|
|
| void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
|
| - Abort("Unimplemented: %s", "DoGlobalObject");
|
| + Register result = ToRegister(instr->result());
|
| + __ movq(result, GlobalObjectOperand());
|
| }
|
|
|
|
|
| @@ -966,27 +1596,63 @@
|
|
|
|
|
| void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
|
| - Abort("Unimplemented: %s", "DoInteger32ToDouble");
|
| + LOperand* input = instr->InputAt(0);
|
| + ASSERT(input->IsRegister() || input->IsStackSlot());
|
| + LOperand* output = instr->result();
|
| + ASSERT(output->IsDoubleRegister());
|
| + __ cvtlsi2sd(ToDoubleRegister(output), ToOperand(input));
|
| }
|
|
|
|
|
| void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
|
| - Abort("Unimplemented: %s", "DoNumberTagI");
|
| + LOperand* input = instr->InputAt(0);
|
| + ASSERT(input->IsRegister() && input->Equals(instr->result()));
|
| + Register reg = ToRegister(input);
|
| +
|
| + __ Integer32ToSmi(reg, reg);
|
| }
|
|
|
|
|
| -void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
|
| - Abort("Unimplemented: %s", "DoDeferredNumberTagI");
|
| -}
|
| +void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
|
| + class DeferredNumberTagD: public LDeferredCode {
|
| + public:
|
| + DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
|
| + : LDeferredCode(codegen), instr_(instr) { }
|
| + virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
|
| + private:
|
| + LNumberTagD* instr_;
|
| + };
|
|
|
| + XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
|
| + Register reg = ToRegister(instr->result());
|
| + Register tmp = ToRegister(instr->TempAt(0));
|
|
|
| -void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
|
| - Abort("Unimplemented: %s", "DoNumberTagD");
|
| + DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
|
| + if (FLAG_inline_new) {
|
| + __ AllocateHeapNumber(reg, tmp, deferred->entry());
|
| + } else {
|
| + __ jmp(deferred->entry());
|
| + }
|
| + __ bind(deferred->exit());
|
| + __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
|
| }
|
|
|
|
|
| void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
|
| - Abort("Unimplemented: %s", "DoDeferredNumberTagD");
|
| + // TODO(3095996): Get rid of this. For now, we need to make the
|
| + // result register contain a valid pointer because it is already
|
| + // contained in the register pointer map.
|
| + Register reg = ToRegister(instr->result());
|
| + __ Move(reg, Smi::FromInt(0));
|
| +
|
| + __ PushSafepointRegisters();
|
| + __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
|
| + RecordSafepointWithRegisters(
|
| + instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
|
| + // Ensure that value in rax survives popping registers.
|
| + __ movq(kScratchRegister, rax);
|
| + __ PopSafepointRegisters();
|
| + __ movq(reg, kScratchRegister);
|
| }
|
|
|
|
|
| @@ -1003,7 +1669,34 @@
|
| void LCodeGen::EmitNumberUntagD(Register input_reg,
|
| XMMRegister result_reg,
|
| LEnvironment* env) {
|
| - Abort("Unimplemented: %s", "EmitNumberUntagD");
|
| + NearLabel load_smi, heap_number, done;
|
| +
|
| + // Smi check.
|
| + __ JumpIfSmi(input_reg, &load_smi);
|
| +
|
| + // Heap number map check.
|
| + __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
|
| + Heap::kHeapNumberMapRootIndex);
|
| + __ j(equal, &heap_number);
|
| +
|
| + __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
|
| + DeoptimizeIf(not_equal, env);
|
| +
|
| + // Convert undefined to NaN. Compute NaN as 0/0.
|
| + __ xorpd(result_reg, result_reg);
|
| + __ divsd(result_reg, result_reg);
|
| + __ jmp(&done);
|
| +
|
| + // Heap number to XMM conversion.
|
| + __ bind(&heap_number);
|
| + __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
|
| + __ jmp(&done);
|
| +
|
| + // Smi to XMM conversion
|
| + __ bind(&load_smi);
|
| + __ SmiToInteger32(kScratchRegister, input_reg); // Untag smi first.
|
| + __ cvtlsi2sd(result_reg, kScratchRegister);
|
| + __ bind(&done);
|
| }
|
|
|
|
|
| @@ -1047,8 +1740,8 @@
|
| }
|
|
|
|
|
| -void LCodeGen::LoadPrototype(Register result, Handle<JSObject> prototype) {
|
| - Abort("Unimplemented: %s", "LoadPrototype");
|
| +void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) {
|
| + Abort("Unimplemented: %s", "LoadHeapObject");
|
| }
|
|
|
|
|
| @@ -1088,7 +1781,18 @@
|
|
|
|
|
| void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
|
| - Abort("Unimplemented: %s", "DoTypeofIsAndBranch");
|
| + Register input = ToRegister(instr->InputAt(0));
|
| + int true_block = chunk_->LookupDestination(instr->true_block_id());
|
| + int false_block = chunk_->LookupDestination(instr->false_block_id());
|
| + Label* true_label = chunk_->GetAssemblyLabel(true_block);
|
| + Label* false_label = chunk_->GetAssemblyLabel(false_block);
|
| +
|
| + Condition final_branch_condition = EmitTypeofIs(true_label,
|
| + false_label,
|
| + input,
|
| + instr->type_literal());
|
| +
|
| + EmitBranch(true_block, false_block, final_branch_condition);
|
| }
|
|
|
|
|
| @@ -1096,8 +1800,63 @@
|
| Label* false_label,
|
| Register input,
|
| Handle<String> type_name) {
|
| - Abort("Unimplemented: %s", "EmitTypeofIs");
|
| - return no_condition;
|
| + Condition final_branch_condition = no_condition;
|
| + if (type_name->Equals(HEAP->number_symbol())) {
|
| + __ JumpIfSmi(input, true_label);
|
| + __ Cmp(FieldOperand(input, HeapObject::kMapOffset),
|
| + FACTORY->heap_number_map());
|
| + final_branch_condition = equal;
|
| +
|
| + } else if (type_name->Equals(HEAP->string_symbol())) {
|
| + __ JumpIfSmi(input, false_label);
|
| + __ movq(input, FieldOperand(input, HeapObject::kMapOffset));
|
| + __ testb(FieldOperand(input, Map::kBitFieldOffset),
|
| + Immediate(1 << Map::kIsUndetectable));
|
| + __ j(not_zero, false_label);
|
| + __ CmpInstanceType(input, FIRST_NONSTRING_TYPE);
|
| + final_branch_condition = below;
|
| +
|
| + } else if (type_name->Equals(HEAP->boolean_symbol())) {
|
| + __ CompareRoot(input, Heap::kTrueValueRootIndex);
|
| + __ j(equal, true_label);
|
| + __ CompareRoot(input, Heap::kFalseValueRootIndex);
|
| + final_branch_condition = equal;
|
| +
|
| + } else if (type_name->Equals(HEAP->undefined_symbol())) {
|
| + __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
|
| + __ j(equal, true_label);
|
| + __ JumpIfSmi(input, false_label);
|
| + // Check for undetectable objects => true.
|
| + __ movq(input, FieldOperand(input, HeapObject::kMapOffset));
|
| + __ testb(FieldOperand(input, Map::kBitFieldOffset),
|
| + Immediate(1 << Map::kIsUndetectable));
|
| + final_branch_condition = not_zero;
|
| +
|
| + } else if (type_name->Equals(HEAP->function_symbol())) {
|
| + __ JumpIfSmi(input, false_label);
|
| + __ CmpObjectType(input, FIRST_FUNCTION_CLASS_TYPE, input);
|
| + final_branch_condition = above_equal;
|
| +
|
| + } else if (type_name->Equals(HEAP->object_symbol())) {
|
| + __ JumpIfSmi(input, false_label);
|
| + __ Cmp(input, FACTORY->null_value());
|
| + __ j(equal, true_label);
|
| + // Check for undetectable objects => false.
|
| + __ testb(FieldOperand(input, Map::kBitFieldOffset),
|
| + Immediate(1 << Map::kIsUndetectable));
|
| + __ j(not_zero, false_label);
|
| + // Check for JS objects that are not RegExp or Function => true.
|
| + __ CmpInstanceType(input, FIRST_JS_OBJECT_TYPE);
|
| + __ j(below, false_label);
|
| + __ CmpInstanceType(input, FIRST_FUNCTION_CLASS_TYPE);
|
| + final_branch_condition = below_equal;
|
| +
|
| + } else {
|
| + final_branch_condition = never;
|
| + __ jmp(false_label);
|
| + }
|
| +
|
| + return final_branch_condition;
|
| }
|
|
|
|
|
| @@ -1118,7 +1877,15 @@
|
|
|
|
|
| void LCodeGen::DoStackCheck(LStackCheck* instr) {
|
| - Abort("Unimplemented: %s", "DoStackCheck");
|
| + // Perform stack overflow check.
|
| + NearLabel done;
|
| + ExternalReference stack_limit = ExternalReference::address_of_stack_limit();
|
| + __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
|
| + __ j(above_equal, &done);
|
| +
|
| + StackCheckStub stub;
|
| + CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
| + __ bind(&done);
|
| }
|
|
|
|
|
|
|