| Index: src/x64/code-stubs-x64.cc
|
| diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
|
| index 8868c7a88adc6c9136d66e29ddce79e837a05ac7..05bdd26858984f42f4f2ced09cb43fd58bf6fd51 100644
|
| --- a/src/x64/code-stubs-x64.cc
|
| +++ b/src/x64/code-stubs-x64.cc
|
| @@ -222,7 +222,18 @@ void ToBooleanStub::InitializeInterfaceDescriptor(
|
| descriptor->deoptimization_handler_ =
|
| FUNCTION_ADDR(ToBooleanIC_Miss);
|
| descriptor->SetMissHandler(
|
| - ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate));
|
| + ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate));
|
| +}
|
| +
|
| +
|
| +void UnaryOpStub::InitializeInterfaceDescriptor(
|
| + Isolate* isolate,
|
| + CodeStubInterfaceDescriptor* descriptor) {
|
| + static Register registers[] = { rax };
|
| + descriptor->register_param_count_ = 1;
|
| + descriptor->register_params_ = registers;
|
| + descriptor->deoptimization_handler_ =
|
| + FUNCTION_ADDR(UnaryOpIC_Miss);
|
| }
|
|
|
|
|
| @@ -643,259 +654,6 @@ void IntegerConvert(MacroAssembler* masm,
|
| }
|
|
|
|
|
| -void UnaryOpStub::Generate(MacroAssembler* masm) {
|
| - switch (operand_type_) {
|
| - case UnaryOpIC::UNINITIALIZED:
|
| - GenerateTypeTransition(masm);
|
| - break;
|
| - case UnaryOpIC::SMI:
|
| - GenerateSmiStub(masm);
|
| - break;
|
| - case UnaryOpIC::NUMBER:
|
| - GenerateNumberStub(masm);
|
| - break;
|
| - case UnaryOpIC::GENERIC:
|
| - GenerateGenericStub(masm);
|
| - break;
|
| - }
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
|
| - __ pop(rcx); // Save return address.
|
| -
|
| - __ push(rax); // the operand
|
| - __ Push(Smi::FromInt(op_));
|
| - __ Push(Smi::FromInt(mode_));
|
| - __ Push(Smi::FromInt(operand_type_));
|
| -
|
| - __ push(rcx); // Push return address.
|
| -
|
| - // Patch the caller to an appropriate specialized stub and return the
|
| - // operation result to the caller of the stub.
|
| - __ TailCallExternalReference(
|
| - ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
|
| -}
|
| -
|
| -
|
| -// TODO(svenpanne): Use virtual functions instead of switch.
|
| -void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
|
| - switch (op_) {
|
| - case Token::SUB:
|
| - GenerateSmiStubSub(masm);
|
| - break;
|
| - case Token::BIT_NOT:
|
| - GenerateSmiStubBitNot(masm);
|
| - break;
|
| - default:
|
| - UNREACHABLE();
|
| - }
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
|
| - Label slow;
|
| - GenerateSmiCodeSub(masm, &slow, &slow, Label::kNear, Label::kNear);
|
| - __ bind(&slow);
|
| - GenerateTypeTransition(masm);
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
|
| - Label non_smi;
|
| - GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
|
| - __ bind(&non_smi);
|
| - GenerateTypeTransition(masm);
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
|
| - Label* non_smi,
|
| - Label* slow,
|
| - Label::Distance non_smi_near,
|
| - Label::Distance slow_near) {
|
| - Label done;
|
| - __ JumpIfNotSmi(rax, non_smi, non_smi_near);
|
| - __ SmiNeg(rax, rax, &done, Label::kNear);
|
| - __ jmp(slow, slow_near);
|
| - __ bind(&done);
|
| - __ ret(0);
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm,
|
| - Label* non_smi,
|
| - Label::Distance non_smi_near) {
|
| - __ JumpIfNotSmi(rax, non_smi, non_smi_near);
|
| - __ SmiNot(rax, rax);
|
| - __ ret(0);
|
| -}
|
| -
|
| -
|
| -// TODO(svenpanne): Use virtual functions instead of switch.
|
| -void UnaryOpStub::GenerateNumberStub(MacroAssembler* masm) {
|
| - switch (op_) {
|
| - case Token::SUB:
|
| - GenerateNumberStubSub(masm);
|
| - break;
|
| - case Token::BIT_NOT:
|
| - GenerateNumberStubBitNot(masm);
|
| - break;
|
| - default:
|
| - UNREACHABLE();
|
| - }
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateNumberStubSub(MacroAssembler* masm) {
|
| - Label non_smi, slow, call_builtin;
|
| - GenerateSmiCodeSub(masm, &non_smi, &call_builtin, Label::kNear);
|
| - __ bind(&non_smi);
|
| - GenerateHeapNumberCodeSub(masm, &slow);
|
| - __ bind(&slow);
|
| - GenerateTypeTransition(masm);
|
| - __ bind(&call_builtin);
|
| - GenerateGenericCodeFallback(masm);
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateNumberStubBitNot(
|
| - MacroAssembler* masm) {
|
| - Label non_smi, slow;
|
| - GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
|
| - __ bind(&non_smi);
|
| - GenerateHeapNumberCodeBitNot(masm, &slow);
|
| - __ bind(&slow);
|
| - GenerateTypeTransition(masm);
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
|
| - Label* slow) {
|
| - // Check if the operand is a heap number.
|
| - __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
|
| - Heap::kHeapNumberMapRootIndex);
|
| - __ j(not_equal, slow);
|
| -
|
| - // Operand is a float, negate its value by flipping the sign bit.
|
| - if (mode_ == UNARY_OVERWRITE) {
|
| - __ Set(kScratchRegister, 0x01);
|
| - __ shl(kScratchRegister, Immediate(63));
|
| - __ xor_(FieldOperand(rax, HeapNumber::kValueOffset), kScratchRegister);
|
| - } else {
|
| - // Allocate a heap number before calculating the answer,
|
| - // so we don't have an untagged double around during GC.
|
| - Label slow_allocate_heapnumber, heapnumber_allocated;
|
| - __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber);
|
| - __ jmp(&heapnumber_allocated);
|
| -
|
| - __ bind(&slow_allocate_heapnumber);
|
| - {
|
| - FrameScope scope(masm, StackFrame::INTERNAL);
|
| - __ push(rax);
|
| - __ CallRuntime(Runtime::kNumberAlloc, 0);
|
| - __ movq(rcx, rax);
|
| - __ pop(rax);
|
| - }
|
| - __ bind(&heapnumber_allocated);
|
| - // rcx: allocated 'empty' number
|
| -
|
| - // Copy the double value to the new heap number, flipping the sign.
|
| - __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset));
|
| - __ Set(kScratchRegister, 0x01);
|
| - __ shl(kScratchRegister, Immediate(63));
|
| - __ xor_(rdx, kScratchRegister); // Flip sign.
|
| - __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx);
|
| - __ movq(rax, rcx);
|
| - }
|
| - __ ret(0);
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm,
|
| - Label* slow) {
|
| - // Check if the operand is a heap number.
|
| - __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
|
| - Heap::kHeapNumberMapRootIndex);
|
| - __ j(not_equal, slow);
|
| -
|
| - // Convert the heap number in rax to an untagged integer in rcx.
|
| - IntegerConvert(masm, rax, rax);
|
| -
|
| - // Do the bitwise operation and smi tag the result.
|
| - __ notl(rax);
|
| - __ Integer32ToSmi(rax, rax);
|
| - __ ret(0);
|
| -}
|
| -
|
| -
|
| -// TODO(svenpanne): Use virtual functions instead of switch.
|
| -void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
|
| - switch (op_) {
|
| - case Token::SUB:
|
| - GenerateGenericStubSub(masm);
|
| - break;
|
| - case Token::BIT_NOT:
|
| - GenerateGenericStubBitNot(masm);
|
| - break;
|
| - default:
|
| - UNREACHABLE();
|
| - }
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
|
| - Label non_smi, slow;
|
| - GenerateSmiCodeSub(masm, &non_smi, &slow, Label::kNear);
|
| - __ bind(&non_smi);
|
| - GenerateHeapNumberCodeSub(masm, &slow);
|
| - __ bind(&slow);
|
| - GenerateGenericCodeFallback(masm);
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
|
| - Label non_smi, slow;
|
| - GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
|
| - __ bind(&non_smi);
|
| - GenerateHeapNumberCodeBitNot(masm, &slow);
|
| - __ bind(&slow);
|
| - GenerateGenericCodeFallback(masm);
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
|
| - // Handle the slow case by jumping to the JavaScript builtin.
|
| - __ pop(rcx); // pop return address
|
| - __ push(rax);
|
| - __ push(rcx); // push return address
|
| - switch (op_) {
|
| - case Token::SUB:
|
| - __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
|
| - break;
|
| - case Token::BIT_NOT:
|
| - __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
|
| - break;
|
| - default:
|
| - UNREACHABLE();
|
| - }
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::PrintName(StringStream* stream) {
|
| - const char* op_name = Token::Name(op_);
|
| - const char* overwrite_name = NULL; // Make g++ happy.
|
| - switch (mode_) {
|
| - case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
|
| - case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
|
| - }
|
| - stream->Add("UnaryOpStub_%s_%s_%s",
|
| - op_name,
|
| - overwrite_name,
|
| - UnaryOpIC::GetName(operand_type_));
|
| -}
|
| -
|
| -
|
| void BinaryOpStub::Initialize() {}
|
|
|
|
|
|
|