| Index: src/arm/code-stubs-arm.cc
|
| diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
|
| index 6af5ccea38e40a293c83da83d5f1ade653cb47b8..b8e8cb3d779ea34f280070aad340d1cc119dd225 100755
|
| --- a/src/arm/code-stubs-arm.cc
|
| +++ b/src/arm/code-stubs-arm.cc
|
| @@ -226,8 +226,20 @@ void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
|
| }
|
|
|
|
|
| +void UnaryOpStub::InitializeInterfaceDescriptor(
|
| + Isolate* isolate,
|
| + CodeStubInterfaceDescriptor* descriptor) {
|
| + static Register registers[] = { r0 };
|
| + descriptor->register_param_count_ = 1;
|
| + descriptor->register_params_ = registers;
|
| + descriptor->deoptimization_handler_ =
|
| + FUNCTION_ADDR(UnaryOpIC_Miss);
|
| +}
|
| +
|
| +
|
| #define __ ACCESS_MASM(masm)
|
|
|
| +
|
| static void EmitIdenticalObjectComparison(MacroAssembler* masm,
|
| Label* slow,
|
| Condition cond);
|
| @@ -1289,277 +1301,6 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
|
| }
|
|
|
|
|
| -void UnaryOpStub::PrintName(StringStream* stream) {
|
| - const char* op_name = Token::Name(op_);
|
| - const char* overwrite_name = NULL; // Make g++ happy.
|
| - switch (mode_) {
|
| - case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
|
| - case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
|
| - }
|
| - stream->Add("UnaryOpStub_%s_%s_%s",
|
| - op_name,
|
| - overwrite_name,
|
| - UnaryOpIC::GetName(operand_type_));
|
| -}
|
| -
|
| -
|
| -// TODO(svenpanne): Use virtual functions instead of switch.
|
| -void UnaryOpStub::Generate(MacroAssembler* masm) {
|
| - switch (operand_type_) {
|
| - case UnaryOpIC::UNINITIALIZED:
|
| - GenerateTypeTransition(masm);
|
| - break;
|
| - case UnaryOpIC::SMI:
|
| - GenerateSmiStub(masm);
|
| - break;
|
| - case UnaryOpIC::NUMBER:
|
| - GenerateNumberStub(masm);
|
| - break;
|
| - case UnaryOpIC::GENERIC:
|
| - GenerateGenericStub(masm);
|
| - break;
|
| - }
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
|
| - __ mov(r3, Operand(r0)); // the operand
|
| - __ mov(r2, Operand(Smi::FromInt(op_)));
|
| - __ mov(r1, Operand(Smi::FromInt(mode_)));
|
| - __ mov(r0, Operand(Smi::FromInt(operand_type_)));
|
| - __ Push(r3, r2, r1, r0);
|
| -
|
| - __ TailCallExternalReference(
|
| - ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
|
| -}
|
| -
|
| -
|
| -// TODO(svenpanne): Use virtual functions instead of switch.
|
| -void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
|
| - switch (op_) {
|
| - case Token::SUB:
|
| - GenerateSmiStubSub(masm);
|
| - break;
|
| - case Token::BIT_NOT:
|
| - GenerateSmiStubBitNot(masm);
|
| - break;
|
| - default:
|
| - UNREACHABLE();
|
| - }
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
|
| - Label non_smi, slow;
|
| - GenerateSmiCodeSub(masm, &non_smi, &slow);
|
| - __ bind(&non_smi);
|
| - __ bind(&slow);
|
| - GenerateTypeTransition(masm);
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
|
| - Label non_smi;
|
| - GenerateSmiCodeBitNot(masm, &non_smi);
|
| - __ bind(&non_smi);
|
| - GenerateTypeTransition(masm);
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
|
| - Label* non_smi,
|
| - Label* slow) {
|
| - __ JumpIfNotSmi(r0, non_smi);
|
| -
|
| - // The result of negating zero or the smallest negative smi is not a smi.
|
| - __ bic(ip, r0, Operand(0x80000000), SetCC);
|
| - __ b(eq, slow);
|
| -
|
| - // Return '0 - value'.
|
| - __ rsb(r0, r0, Operand::Zero());
|
| - __ Ret();
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm,
|
| - Label* non_smi) {
|
| - __ JumpIfNotSmi(r0, non_smi);
|
| -
|
| - // Flip bits and revert inverted smi-tag.
|
| - __ mvn(r0, Operand(r0));
|
| - __ bic(r0, r0, Operand(kSmiTagMask));
|
| - __ Ret();
|
| -}
|
| -
|
| -
|
| -// TODO(svenpanne): Use virtual functions instead of switch.
|
| -void UnaryOpStub::GenerateNumberStub(MacroAssembler* masm) {
|
| - switch (op_) {
|
| - case Token::SUB:
|
| - GenerateNumberStubSub(masm);
|
| - break;
|
| - case Token::BIT_NOT:
|
| - GenerateNumberStubBitNot(masm);
|
| - break;
|
| - default:
|
| - UNREACHABLE();
|
| - }
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateNumberStubSub(MacroAssembler* masm) {
|
| - Label non_smi, slow, call_builtin;
|
| - GenerateSmiCodeSub(masm, &non_smi, &call_builtin);
|
| - __ bind(&non_smi);
|
| - GenerateHeapNumberCodeSub(masm, &slow);
|
| - __ bind(&slow);
|
| - GenerateTypeTransition(masm);
|
| - __ bind(&call_builtin);
|
| - GenerateGenericCodeFallback(masm);
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateNumberStubBitNot(MacroAssembler* masm) {
|
| - Label non_smi, slow;
|
| - GenerateSmiCodeBitNot(masm, &non_smi);
|
| - __ bind(&non_smi);
|
| - GenerateHeapNumberCodeBitNot(masm, &slow);
|
| - __ bind(&slow);
|
| - GenerateTypeTransition(masm);
|
| -}
|
| -
|
| -void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
|
| - Label* slow) {
|
| - EmitCheckForHeapNumber(masm, r0, r1, r6, slow);
|
| - // r0 is a heap number. Get a new heap number in r1.
|
| - if (mode_ == UNARY_OVERWRITE) {
|
| - __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
|
| - __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
|
| - __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
|
| - } else {
|
| - Label slow_allocate_heapnumber, heapnumber_allocated;
|
| - __ AllocateHeapNumber(r1, r2, r3, r6, &slow_allocate_heapnumber);
|
| - __ jmp(&heapnumber_allocated);
|
| -
|
| - __ bind(&slow_allocate_heapnumber);
|
| - {
|
| - FrameScope scope(masm, StackFrame::INTERNAL);
|
| - __ push(r0);
|
| - __ CallRuntime(Runtime::kNumberAlloc, 0);
|
| - __ mov(r1, Operand(r0));
|
| - __ pop(r0);
|
| - }
|
| -
|
| - __ bind(&heapnumber_allocated);
|
| - __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
|
| - __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
|
| - __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset));
|
| - __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
|
| - __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset));
|
| - __ mov(r0, Operand(r1));
|
| - }
|
| - __ Ret();
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm,
|
| - Label* slow) {
|
| - EmitCheckForHeapNumber(masm, r0, r1, r6, slow);
|
| -
|
| - // Convert the heap number in r0 to an untagged integer in r1.
|
| - __ vldr(d0, FieldMemOperand(r0, HeapNumber::kValueOffset));
|
| - __ ECMAToInt32(r1, d0, r2, r3, r4, d1);
|
| -
|
| - // Do the bitwise operation and check if the result fits in a smi.
|
| - Label try_float;
|
| - __ mvn(r1, Operand(r1));
|
| - __ cmn(r1, Operand(0x40000000));
|
| - __ b(mi, &try_float);
|
| -
|
| - // Tag the result as a smi and we're done.
|
| - __ SmiTag(r0, r1);
|
| - __ Ret();
|
| -
|
| - // Try to store the result in a heap number.
|
| - __ bind(&try_float);
|
| - if (mode_ == UNARY_NO_OVERWRITE) {
|
| - Label slow_allocate_heapnumber, heapnumber_allocated;
|
| - __ AllocateHeapNumber(r0, r3, r4, r6, &slow_allocate_heapnumber);
|
| - __ jmp(&heapnumber_allocated);
|
| -
|
| - __ bind(&slow_allocate_heapnumber);
|
| - {
|
| - FrameScope scope(masm, StackFrame::INTERNAL);
|
| - // Push the lower bit of the result (left shifted to look like a smi).
|
| - __ mov(r2, Operand(r1, LSL, 31));
|
| - // Push the 31 high bits (bit 0 cleared to look like a smi).
|
| - __ bic(r1, r1, Operand(1));
|
| - __ Push(r2, r1);
|
| - __ CallRuntime(Runtime::kNumberAlloc, 0);
|
| - __ Pop(r2, r1); // Restore the result.
|
| - __ orr(r1, r1, Operand(r2, LSR, 31));
|
| - }
|
| - __ bind(&heapnumber_allocated);
|
| - }
|
| -
|
| - __ vmov(s0, r1);
|
| - __ vcvt_f64_s32(d0, s0);
|
| - __ vstr(d0, FieldMemOperand(r0, HeapNumber::kValueOffset));
|
| - __ Ret();
|
| -}
|
| -
|
| -
|
| -// TODO(svenpanne): Use virtual functions instead of switch.
|
| -void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
|
| - switch (op_) {
|
| - case Token::SUB:
|
| - GenerateGenericStubSub(masm);
|
| - break;
|
| - case Token::BIT_NOT:
|
| - GenerateGenericStubBitNot(masm);
|
| - break;
|
| - default:
|
| - UNREACHABLE();
|
| - }
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
|
| - Label non_smi, slow;
|
| - GenerateSmiCodeSub(masm, &non_smi, &slow);
|
| - __ bind(&non_smi);
|
| - GenerateHeapNumberCodeSub(masm, &slow);
|
| - __ bind(&slow);
|
| - GenerateGenericCodeFallback(masm);
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
|
| - Label non_smi, slow;
|
| - GenerateSmiCodeBitNot(masm, &non_smi);
|
| - __ bind(&non_smi);
|
| - GenerateHeapNumberCodeBitNot(masm, &slow);
|
| - __ bind(&slow);
|
| - GenerateGenericCodeFallback(masm);
|
| -}
|
| -
|
| -
|
| -void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
|
| - // Handle the slow case by jumping to the JavaScript builtin.
|
| - __ push(r0);
|
| - switch (op_) {
|
| - case Token::SUB:
|
| - __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
|
| - break;
|
| - case Token::BIT_NOT:
|
| - __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
|
| - break;
|
| - default:
|
| - UNREACHABLE();
|
| - }
|
| -}
|
| -
|
| -
|
| // Generates code to call a C function to do a double operation.
|
| // This code never falls through, but returns with a heap number containing
|
| // the result in r0.
|
|
|