| Index: src/x64/codegen-x64.cc
|
| ===================================================================
|
| --- src/x64/codegen-x64.cc (revision 3935)
|
| +++ src/x64/codegen-x64.cc (working copy)
|
| @@ -277,7 +277,7 @@
|
| }
|
|
|
|
|
| -void CodeGenerator::Generate(CompilationInfo* info, Mode mode) {
|
| +void CodeGenerator::Generate(CompilationInfo* info) {
|
| // Record the position for debugging purposes.
|
| CodeForFunctionPosition(info->function());
|
|
|
| @@ -316,7 +316,7 @@
|
| // rsi: callee's context
|
| allocator_->Initialize();
|
|
|
| - if (mode == PRIMARY) {
|
| + if (info->mode() == CompilationInfo::PRIMARY) {
|
| frame_->Enter();
|
|
|
| // Allocate space for locals and initialize them.
|
| @@ -407,6 +407,12 @@
|
| // frame to match this state.
|
| frame_->Adjust(3);
|
| allocator_->Unuse(rdi);
|
| +
|
| + // Bind all the bailout labels to the beginning of the function.
|
| + List<CompilationInfo::Bailout*>* bailouts = info->bailouts();
|
| + for (int i = 0; i < bailouts->length(); i++) {
|
| + __ bind(bailouts->at(i)->label());
|
| + }
|
| }
|
|
|
| // Initialize the function return target after the locals are set
|
| @@ -1221,7 +1227,7 @@
|
| // Compare and branch to the body if true or the next test if
|
| // false. Prefer the next test as a fall through.
|
| ControlDestination dest(clause->body_target(), &next_test, false);
|
| - Comparison(equal, true, &dest);
|
| + Comparison(node, equal, true, &dest);
|
|
|
| // If the comparison fell through to the true target, jump to the
|
| // actual body.
|
| @@ -2218,8 +2224,7 @@
|
| // Spill everything, even constants, to the frame.
|
| frame_->SpillAll();
|
|
|
| - DebuggerStatementStub ces;
|
| - frame_->CallStub(&ces, 0);
|
| + frame_->DebugBreak();
|
| // Ignore the return value.
|
| #endif
|
| }
|
| @@ -2496,17 +2501,19 @@
|
| // Load the literals array of the function.
|
| __ movq(literals.reg(),
|
| FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
|
| - // Literal array.
|
| +
|
| frame_->Push(&literals);
|
| - // Literal index.
|
| frame_->Push(Smi::FromInt(node->literal_index()));
|
| - // Constant elements.
|
| frame_->Push(node->constant_elements());
|
| + int length = node->values()->length();
|
| Result clone;
|
| if (node->depth() > 1) {
|
| clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
|
| + } else if (length > FastCloneShallowArrayStub::kMaximumLength) {
|
| + clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
|
| } else {
|
| - clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
|
| + FastCloneShallowArrayStub stub(length);
|
| + clone = frame_->CallStub(&stub, 3);
|
| }
|
| frame_->Push(&clone);
|
|
|
| @@ -2756,9 +2763,6 @@
|
| // JavaScript example: 'foo(1, 2, 3)' // foo is global
|
| // ----------------------------------
|
|
|
| - // Push the name of the function and the receiver onto the stack.
|
| - frame_->Push(var->name());
|
| -
|
| // Pass the global object as the receiver and let the IC stub
|
| // patch the stack to use the global proxy as 'this' in the
|
| // invoked function.
|
| @@ -2770,6 +2774,9 @@
|
| Load(args->at(i));
|
| }
|
|
|
| + // Push the name of the function on the frame.
|
| + frame_->Push(var->name());
|
| +
|
| // Call the IC initialization code.
|
| CodeForSourcePosition(node->position());
|
| Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET_CONTEXT,
|
| @@ -2777,7 +2784,7 @@
|
| loop_nesting());
|
| frame_->RestoreContextRegister();
|
| // Replace the function on the stack with the result.
|
| - frame_->SetElementAt(0, &result);
|
| + frame_->Push(&result);
|
|
|
| } else if (var != NULL && var->slot() != NULL &&
|
| var->slot()->type() == Slot::LOOKUP) {
|
| @@ -2830,8 +2837,7 @@
|
| node->position());
|
|
|
| } else {
|
| - // Push the name of the function and the receiver onto the stack.
|
| - frame_->Push(name);
|
| + // Push the receiver onto the frame.
|
| Load(property->obj());
|
|
|
| // Load the arguments.
|
| @@ -2840,14 +2846,16 @@
|
| Load(args->at(i));
|
| }
|
|
|
| + // Push the name of the function onto the frame.
|
| + frame_->Push(name);
|
| +
|
| // Call the IC initialization code.
|
| CodeForSourcePosition(node->position());
|
| Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET,
|
| arg_count,
|
| loop_nesting());
|
| frame_->RestoreContextRegister();
|
| - // Replace the function on the stack with the result.
|
| - frame_->SetElementAt(0, &result);
|
| + frame_->Push(&result);
|
| }
|
|
|
| } else {
|
| @@ -2938,8 +2946,6 @@
|
| Runtime::Function* function = node->function();
|
|
|
| if (function == NULL) {
|
| - // Prepare stack for calling JS runtime function.
|
| - frame_->Push(node->name());
|
| // Push the builtins object found in the current global object.
|
| Result temp = allocator()->Allocate();
|
| ASSERT(temp.is_valid());
|
| @@ -2957,11 +2963,12 @@
|
|
|
| if (function == NULL) {
|
| // Call the JS runtime function.
|
| + frame_->Push(node->name());
|
| Result answer = frame_->CallCallIC(RelocInfo::CODE_TARGET,
|
| arg_count,
|
| loop_nesting_);
|
| frame_->RestoreContextRegister();
|
| - frame_->SetElementAt(0, &answer);
|
| + frame_->Push(&answer);
|
| } else {
|
| // Call the C runtime function.
|
| Result answer = frame_->CallRuntime(function, arg_count);
|
| @@ -3070,7 +3077,6 @@
|
|
|
| case Token::SUB: {
|
| GenericUnaryOpStub stub(Token::SUB, overwrite);
|
| - // TODO(1222589): remove dependency of TOS being cached inside stub
|
| Result operand = frame_->Pop();
|
| Result answer = frame_->CallStub(&stub, &operand);
|
| frame_->Push(&answer);
|
| @@ -3586,7 +3592,7 @@
|
| }
|
| Load(left);
|
| Load(right);
|
| - Comparison(cc, strict, destination());
|
| + Comparison(node, cc, strict, destination());
|
| }
|
|
|
|
|
| @@ -3627,6 +3633,22 @@
|
| }
|
|
|
|
|
| +void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
|
| + ASSERT(args->length() == 1);
|
| + Load(args->at(0));
|
| + Result value = frame_->Pop();
|
| + value.ToRegister();
|
| + ASSERT(value.is_valid());
|
| + Condition is_smi = masm_->CheckSmi(value.reg());
|
| + destination()->false_target()->Branch(is_smi);
|
| + // It is a heap object - get map.
|
| + // Check if the object is a regexp.
|
| + __ CmpObjectType(value.reg(), JS_REGEXP_TYPE, kScratchRegister);
|
| + value.Unuse();
|
| + destination()->Split(equal);
|
| +}
|
| +
|
| +
|
| void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
|
| // This generates a fast version of:
|
| // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
|
| @@ -3971,6 +3993,35 @@
|
| }
|
|
|
|
|
| +void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
|
| + ASSERT_EQ(args->length(), 1);
|
| +
|
| + // Load the argument on the stack and jump to the runtime.
|
| + Load(args->at(0));
|
| +
|
| + Result answer = frame_->CallRuntime(Runtime::kNumberToString, 1);
|
| + frame_->Push(&answer);
|
| +}
|
| +
|
| +
|
| +void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
|
| + ASSERT_EQ(args->length(), 1);
|
| + // Load the argument on the stack and jump to the runtime.
|
| + Load(args->at(0));
|
| + Result answer = frame_->CallRuntime(Runtime::kMath_sin, 1);
|
| + frame_->Push(&answer);
|
| +}
|
| +
|
| +
|
| +void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
|
| + ASSERT_EQ(args->length(), 1);
|
| + // Load the argument on the stack and jump to the runtime.
|
| + Load(args->at(0));
|
| + Result answer = frame_->CallRuntime(Runtime::kMath_cos, 1);
|
| + frame_->Push(&answer);
|
| +}
|
| +
|
| +
|
| void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
|
| ASSERT_EQ(2, args->length());
|
|
|
| @@ -4260,34 +4311,52 @@
|
| // The value to convert should be popped from the frame.
|
| Result value = frame_->Pop();
|
| value.ToRegister();
|
| - // Fast case checks.
|
|
|
| - // 'false' => false.
|
| - __ CompareRoot(value.reg(), Heap::kFalseValueRootIndex);
|
| - dest->false_target()->Branch(equal);
|
| + if (value.is_number()) {
|
| + Comment cmnt(masm_, "ONLY_NUMBER");
|
| + // Fast case if NumberInfo indicates only numbers.
|
| + if (FLAG_debug_code) {
|
| + __ AbortIfNotNumber(value.reg(), "ToBoolean operand is not a number.");
|
| + }
|
| + // Smi => false iff zero.
|
| + __ SmiCompare(value.reg(), Smi::FromInt(0));
|
| + dest->false_target()->Branch(equal);
|
| + Condition is_smi = masm_->CheckSmi(value.reg());
|
| + dest->true_target()->Branch(is_smi);
|
| + __ fldz();
|
| + __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
|
| + __ FCmp();
|
| + value.Unuse();
|
| + dest->Split(not_zero);
|
| + } else {
|
| + // Fast case checks.
|
| + // 'false' => false.
|
| + __ CompareRoot(value.reg(), Heap::kFalseValueRootIndex);
|
| + dest->false_target()->Branch(equal);
|
|
|
| - // 'true' => true.
|
| - __ CompareRoot(value.reg(), Heap::kTrueValueRootIndex);
|
| - dest->true_target()->Branch(equal);
|
| + // 'true' => true.
|
| + __ CompareRoot(value.reg(), Heap::kTrueValueRootIndex);
|
| + dest->true_target()->Branch(equal);
|
|
|
| - // 'undefined' => false.
|
| - __ CompareRoot(value.reg(), Heap::kUndefinedValueRootIndex);
|
| - dest->false_target()->Branch(equal);
|
| + // 'undefined' => false.
|
| + __ CompareRoot(value.reg(), Heap::kUndefinedValueRootIndex);
|
| + dest->false_target()->Branch(equal);
|
|
|
| - // Smi => false iff zero.
|
| - __ SmiCompare(value.reg(), Smi::FromInt(0));
|
| - dest->false_target()->Branch(equal);
|
| - Condition is_smi = masm_->CheckSmi(value.reg());
|
| - dest->true_target()->Branch(is_smi);
|
| + // Smi => false iff zero.
|
| + __ SmiCompare(value.reg(), Smi::FromInt(0));
|
| + dest->false_target()->Branch(equal);
|
| + Condition is_smi = masm_->CheckSmi(value.reg());
|
| + dest->true_target()->Branch(is_smi);
|
|
|
| - // Call the stub for all other cases.
|
| - frame_->Push(&value); // Undo the Pop() from above.
|
| - ToBooleanStub stub;
|
| - Result temp = frame_->CallStub(&stub, 1);
|
| - // Convert the result to a condition code.
|
| - __ testq(temp.reg(), temp.reg());
|
| - temp.Unuse();
|
| - dest->Split(not_equal);
|
| + // Call the stub for all other cases.
|
| + frame_->Push(&value); // Undo the Pop() from above.
|
| + ToBooleanStub stub;
|
| + Result temp = frame_->CallStub(&stub, 1);
|
| + // Convert the result to a condition code.
|
| + __ testq(temp.reg(), temp.reg());
|
| + temp.Unuse();
|
| + dest->Split(not_equal);
|
| + }
|
| }
|
|
|
|
|
| @@ -4868,7 +4937,8 @@
|
| }
|
|
|
|
|
| -void CodeGenerator::Comparison(Condition cc,
|
| +void CodeGenerator::Comparison(AstNode* node,
|
| + Condition cc,
|
| bool strict,
|
| ControlDestination* dest) {
|
| // Strict only makes sense for equality comparisons.
|
| @@ -4915,7 +4985,8 @@
|
| default:
|
| UNREACHABLE();
|
| }
|
| - } else { // Only one side is a constant Smi.
|
| + } else {
|
| + // Only one side is a constant Smi.
|
| // If left side is a constant Smi, reverse the operands.
|
| // Since one side is a constant Smi, conversion order does not matter.
|
| if (left_side_constant_smi) {
|
| @@ -4929,6 +5000,8 @@
|
| // Implement comparison against a constant Smi, inlining the case
|
| // where both sides are Smis.
|
| left_side.ToRegister();
|
| + Register left_reg = left_side.reg();
|
| + Handle<Object> right_val = right_side.handle();
|
|
|
| // Here we split control flow to the stub call and inlined cases
|
| // before finally splitting it to the control destination. We use
|
| @@ -4936,12 +5009,48 @@
|
| // the first split. We manually handle the off-frame references
|
| // by reconstituting them on the non-fall-through path.
|
| JumpTarget is_smi;
|
| - Register left_reg = left_side.reg();
|
| - Handle<Object> right_val = right_side.handle();
|
|
|
| Condition left_is_smi = masm_->CheckSmi(left_side.reg());
|
| is_smi.Branch(left_is_smi);
|
|
|
| + bool is_for_loop_compare = (node->AsCompareOperation() != NULL)
|
| + && node->AsCompareOperation()->is_for_loop_condition();
|
| + if (!is_for_loop_compare && right_val->IsSmi()) {
|
| + // Right side is a constant smi and left side has been checked
|
| + // not to be a smi.
|
| + JumpTarget not_number;
|
| + __ Cmp(FieldOperand(left_reg, HeapObject::kMapOffset),
|
| + Factory::heap_number_map());
|
| + not_number.Branch(not_equal, &left_side);
|
| + __ movsd(xmm1,
|
| + FieldOperand(left_reg, HeapNumber::kValueOffset));
|
| + int value = Smi::cast(*right_val)->value();
|
| + if (value == 0) {
|
| + __ xorpd(xmm0, xmm0);
|
| + } else {
|
| + Result temp = allocator()->Allocate();
|
| + __ movl(temp.reg(), Immediate(value));
|
| + __ cvtlsi2sd(xmm0, temp.reg());
|
| + temp.Unuse();
|
| + }
|
| + __ ucomisd(xmm1, xmm0);
|
| + // Jump to builtin for NaN.
|
| + not_number.Branch(parity_even, &left_side);
|
| + left_side.Unuse();
|
| + Condition double_cc = cc;
|
| + switch (cc) {
|
| + case less: double_cc = below; break;
|
| + case equal: double_cc = equal; break;
|
| + case less_equal: double_cc = below_equal; break;
|
| + case greater: double_cc = above; break;
|
| + case greater_equal: double_cc = above_equal; break;
|
| + default: UNREACHABLE();
|
| + }
|
| + dest->true_target()->Branch(double_cc);
|
| + dest->false_target()->Jump();
|
| + not_number.Bind(&left_side);
|
| + }
|
| +
|
| // Setup and call the compare stub.
|
| CompareStub stub(cc, strict);
|
| Result result = frame_->CallStub(&stub, &left_side, &right_side);
|
| @@ -5114,26 +5223,34 @@
|
| // Neither operand is known to be a string.
|
| }
|
|
|
| - bool left_is_smi = left.is_constant() && left.handle()->IsSmi();
|
| - bool left_is_non_smi = left.is_constant() && !left.handle()->IsSmi();
|
| - bool right_is_smi = right.is_constant() && right.handle()->IsSmi();
|
| - bool right_is_non_smi = right.is_constant() && !right.handle()->IsSmi();
|
| + bool left_is_smi_constant = left.is_constant() && left.handle()->IsSmi();
|
| + bool left_is_non_smi_constant = left.is_constant() && !left.handle()->IsSmi();
|
| + bool right_is_smi_constant = right.is_constant() && right.handle()->IsSmi();
|
| + bool right_is_non_smi_constant =
|
| + right.is_constant() && !right.handle()->IsSmi();
|
|
|
| - if (left_is_smi && right_is_smi) {
|
| + if (left_is_smi_constant && right_is_smi_constant) {
|
| // Compute the constant result at compile time, and leave it on the frame.
|
| int left_int = Smi::cast(*left.handle())->value();
|
| int right_int = Smi::cast(*right.handle())->value();
|
| if (FoldConstantSmis(op, left_int, right_int)) return;
|
| }
|
|
|
| + // Get number type of left and right sub-expressions.
|
| + NumberInfo::Type operands_type =
|
| + NumberInfo::Combine(left.number_info(), right.number_info());
|
| +
|
| Result answer;
|
| - if (left_is_non_smi || right_is_non_smi) {
|
| - GenericBinaryOpStub stub(op, overwrite_mode, NO_SMI_CODE_IN_STUB);
|
| + if (left_is_non_smi_constant || right_is_non_smi_constant) {
|
| + GenericBinaryOpStub stub(op,
|
| + overwrite_mode,
|
| + NO_SMI_CODE_IN_STUB,
|
| + operands_type);
|
| answer = stub.GenerateCall(masm_, frame_, &left, &right);
|
| - } else if (right_is_smi) {
|
| + } else if (right_is_smi_constant) {
|
| answer = ConstantSmiBinaryOperation(op, &left, right.handle(),
|
| type, false, overwrite_mode);
|
| - } else if (left_is_smi) {
|
| + } else if (left_is_smi_constant) {
|
| answer = ConstantSmiBinaryOperation(op, &right, left.handle(),
|
| type, true, overwrite_mode);
|
| } else {
|
| @@ -5145,10 +5262,62 @@
|
| if (loop_nesting() > 0 && (Token::IsBitOp(op) || type->IsLikelySmi())) {
|
| answer = LikelySmiBinaryOperation(op, &left, &right, overwrite_mode);
|
| } else {
|
| - GenericBinaryOpStub stub(op, overwrite_mode, NO_GENERIC_BINARY_FLAGS);
|
| + GenericBinaryOpStub stub(op,
|
| + overwrite_mode,
|
| + NO_GENERIC_BINARY_FLAGS,
|
| + operands_type);
|
| answer = stub.GenerateCall(masm_, frame_, &left, &right);
|
| }
|
| }
|
| +
|
| + // Set NumberInfo of result according to the operation performed.
|
| + // We rely on the fact that smis have a 32 bit payload on x64.
|
| + ASSERT(kSmiValueSize == 32);
|
| + NumberInfo::Type result_type = NumberInfo::kUnknown;
|
| + switch (op) {
|
| + case Token::COMMA:
|
| + result_type = right.number_info();
|
| + break;
|
| + case Token::OR:
|
| + case Token::AND:
|
| + // Result type can be either of the two input types.
|
| + result_type = operands_type;
|
| + break;
|
| + case Token::BIT_OR:
|
| + case Token::BIT_XOR:
|
| + case Token::BIT_AND:
|
| + // Result is always a smi.
|
| + result_type = NumberInfo::kSmi;
|
| + break;
|
| + case Token::SAR:
|
| + case Token::SHL:
|
| + // Result is always a smi.
|
| + result_type = NumberInfo::kSmi;
|
| + break;
|
| + case Token::SHR:
|
| + // Result of x >>> y is always a smi if y >= 1, otherwise a number.
|
| + result_type = (right.is_constant() && right.handle()->IsSmi()
|
| + && Smi::cast(*right.handle())->value() >= 1)
|
| + ? NumberInfo::kSmi
|
| + : NumberInfo::kNumber;
|
| + break;
|
| + case Token::ADD:
|
| + // Result could be a string or a number. Check types of inputs.
|
| + result_type = NumberInfo::IsNumber(operands_type)
|
| + ? NumberInfo::kNumber
|
| + : NumberInfo::kUnknown;
|
| + break;
|
| + case Token::SUB:
|
| + case Token::MUL:
|
| + case Token::DIV:
|
| + case Token::MOD:
|
| + // Result is always a number.
|
| + result_type = NumberInfo::kNumber;
|
| + break;
|
| + default:
|
| + UNREACHABLE();
|
| + }
|
| + answer.set_number_info(result_type);
|
| frame_->Push(&answer);
|
| }
|
|
|
| @@ -6221,6 +6390,63 @@
|
| }
|
|
|
|
|
| +void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
|
| + // Stack layout on entry:
|
| + //
|
| + // [rsp + kPointerSize]: constant elements.
|
| + // [rsp + (2 * kPointerSize)]: literal index.
|
| + // [rsp + (3 * kPointerSize)]: literals array.
|
| +
|
| + // All sizes here are multiples of kPointerSize.
|
| + int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
|
| + int size = JSArray::kSize + elements_size;
|
| +
|
| + // Load boilerplate object into rcx and check if we need to create a
|
| + // boilerplate.
|
| + Label slow_case;
|
| + __ movq(rcx, Operand(rsp, 3 * kPointerSize));
|
| + __ movq(rax, Operand(rsp, 2 * kPointerSize));
|
| + SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
|
| + __ movq(rcx,
|
| + FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
|
| + __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
|
| + __ j(equal, &slow_case);
|
| +
|
| + // Allocate both the JS array and the elements array in one big
|
| + // allocation. This avoids multiple limit checks.
|
| + __ AllocateInNewSpace(size, rax, rbx, rdx, &slow_case, TAG_OBJECT);
|
| +
|
| + // Copy the JS array part.
|
| + for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
|
| + if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
|
| + __ movq(rbx, FieldOperand(rcx, i));
|
| + __ movq(FieldOperand(rax, i), rbx);
|
| + }
|
| + }
|
| +
|
| + if (length_ > 0) {
|
| + // Get hold of the elements array of the boilerplate and setup the
|
| + // elements pointer in the resulting object.
|
| + __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
|
| + __ lea(rdx, Operand(rax, JSArray::kSize));
|
| + __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx);
|
| +
|
| + // Copy the elements array.
|
| + for (int i = 0; i < elements_size; i += kPointerSize) {
|
| + __ movq(rbx, FieldOperand(rcx, i));
|
| + __ movq(FieldOperand(rdx, i), rbx);
|
| + }
|
| + }
|
| +
|
| + // Return and remove the on-stack parameters.
|
| + __ ret(3 * kPointerSize);
|
| +
|
| + __ bind(&slow_case);
|
| + ExternalReference runtime(Runtime::kCreateArrayLiteralShallow);
|
| + __ TailCallRuntime(runtime, 3, 1);
|
| +}
|
| +
|
| +
|
| void ToBooleanStub::Generate(MacroAssembler* masm) {
|
| Label false_result, true_result, not_string;
|
| __ movq(rax, Operand(rsp, 1 * kPointerSize));
|
| @@ -7234,30 +7460,107 @@
|
|
|
|
|
| void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
|
| + // rsp[0] : return address
|
| + // rsp[8] : number of parameters
|
| + // rsp[16] : receiver displacement
|
| + // rsp[24] : function
|
| +
|
| // The displacement is used for skipping the return address and the
|
| // frame pointer on the stack. It is the offset of the last
|
| // parameter (if any) relative to the frame pointer.
|
| static const int kDisplacement = 2 * kPointerSize;
|
|
|
| // Check if the calling frame is an arguments adaptor frame.
|
| - Label runtime;
|
| + Label adaptor_frame, try_allocate, runtime;
|
| __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
|
| __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
|
| Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
|
| - __ j(not_equal, &runtime);
|
| - // Value in rcx is Smi encoded.
|
| + __ j(equal, &adaptor_frame);
|
|
|
| + // Get the length from the frame.
|
| + __ movq(rcx, Operand(rsp, 1 * kPointerSize));
|
| + __ jmp(&try_allocate);
|
| +
|
| // Patch the arguments.length and the parameters pointer.
|
| + __ bind(&adaptor_frame);
|
| __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
|
| __ movq(Operand(rsp, 1 * kPointerSize), rcx);
|
| - SmiIndex index = masm->SmiToIndex(rcx, rcx, kPointerSizeLog2);
|
| + // Do not clobber the length index for the indexing operation since
|
| + // it is used compute the size for allocation later.
|
| + SmiIndex index = masm->SmiToIndex(rbx, rcx, kPointerSizeLog2);
|
| __ lea(rdx, Operand(rdx, index.reg, index.scale, kDisplacement));
|
| __ movq(Operand(rsp, 2 * kPointerSize), rdx);
|
|
|
| + // Try the new space allocation. Start out with computing the size of
|
| + // the arguments object and the elements array.
|
| + Label add_arguments_object;
|
| + __ bind(&try_allocate);
|
| + __ testq(rcx, rcx);
|
| + __ j(zero, &add_arguments_object);
|
| + index = masm->SmiToIndex(rcx, rcx, kPointerSizeLog2);
|
| + __ lea(rcx, Operand(index.reg, index.scale, FixedArray::kHeaderSize));
|
| + __ bind(&add_arguments_object);
|
| + __ addq(rcx, Immediate(Heap::kArgumentsObjectSize));
|
| +
|
| + // Do the allocation of both objects in one go.
|
| + __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
|
| +
|
| + // Get the arguments boilerplate from the current (global) context.
|
| + int offset = Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
|
| + __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
|
| + __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
|
| + __ movq(rdi, Operand(rdi, offset));
|
| +
|
| + // Copy the JS object part.
|
| + for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
|
| + __ movq(kScratchRegister, FieldOperand(rdi, i));
|
| + __ movq(FieldOperand(rax, i), kScratchRegister);
|
| + }
|
| +
|
| + // Setup the callee in-object property.
|
| + ASSERT(Heap::arguments_callee_index == 0);
|
| + __ movq(kScratchRegister, Operand(rsp, 3 * kPointerSize));
|
| + __ movq(FieldOperand(rax, JSObject::kHeaderSize), kScratchRegister);
|
| +
|
| + // Get the length (smi tagged) and set that as an in-object property too.
|
| + ASSERT(Heap::arguments_length_index == 1);
|
| + __ movq(rcx, Operand(rsp, 1 * kPointerSize));
|
| + __ movq(FieldOperand(rax, JSObject::kHeaderSize + kPointerSize), rcx);
|
| +
|
| + // If there are no actual arguments, we're done.
|
| + Label done;
|
| + __ testq(rcx, rcx);
|
| + __ j(zero, &done);
|
| +
|
| + // Get the parameters pointer from the stack and untag the length.
|
| + __ movq(rdx, Operand(rsp, 2 * kPointerSize));
|
| + __ SmiToInteger32(rcx, rcx);
|
| +
|
| + // Setup the elements pointer in the allocated arguments object and
|
| + // initialize the header in the elements fixed array.
|
| + __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize));
|
| + __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
|
| + __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
|
| + __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
|
| + __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
|
| +
|
| + // Copy the fixed array slots.
|
| + Label loop;
|
| + __ bind(&loop);
|
| + __ movq(kScratchRegister, Operand(rdx, -1 * kPointerSize)); // Skip receiver.
|
| + __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), kScratchRegister);
|
| + __ addq(rdi, Immediate(kPointerSize));
|
| + __ subq(rdx, Immediate(kPointerSize));
|
| + __ decq(rcx);
|
| + __ j(not_zero, &loop);
|
| +
|
| + // Return and remove the on-stack parameters.
|
| + __ bind(&done);
|
| + __ ret(3 * kPointerSize);
|
| +
|
| // Do the runtime call to allocate the arguments object.
|
| __ bind(&runtime);
|
| - Runtime::Function* f = Runtime::FunctionForId(Runtime::kNewArgumentsFast);
|
| - __ TailCallRuntime(ExternalReference(f), 3, f->result_size);
|
| + __ TailCallRuntime(ExternalReference(Runtime::kNewArgumentsFast), 3, 1);
|
| }
|
|
|
|
|
| @@ -7592,6 +7895,9 @@
|
|
|
| // Slow-case: Non-function called.
|
| __ bind(&slow);
|
| + // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
|
| + // of the original receiver from the call site).
|
| + __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdi);
|
| __ Set(rax, argc_);
|
| __ Set(rbx, 0);
|
| __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
|
| @@ -7980,13 +8286,14 @@
|
| }
|
|
|
| OS::SNPrintF(Vector<char>(name_, len),
|
| - "GenericBinaryOpStub_%s_%s%s_%s%s_%s",
|
| + "GenericBinaryOpStub_%s_%s%s_%s%s_%s%s",
|
| op_name,
|
| overwrite_name,
|
| (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "",
|
| args_in_registers_ ? "RegArgs" : "StackArgs",
|
| args_reversed_ ? "_R" : "",
|
| - use_sse3_ ? "SSE3" : "SSE2");
|
| + use_sse3_ ? "SSE3" : "SSE2",
|
| + NumberInfo::ToString(operands_type_));
|
| return name_;
|
| }
|
|
|
| @@ -8012,6 +8319,8 @@
|
| }
|
| } else if (left.is(left_arg)) {
|
| __ movq(right_arg, right);
|
| + } else if (right.is(right_arg)) {
|
| + __ movq(left_arg, left);
|
| } else if (left.is(right_arg)) {
|
| if (IsOperationCommutative()) {
|
| __ movq(left_arg, right);
|
| @@ -8030,8 +8339,6 @@
|
| __ movq(right_arg, right);
|
| __ movq(left_arg, left);
|
| }
|
| - } else if (right.is(right_arg)) {
|
| - __ movq(left_arg, left);
|
| } else {
|
| // Order of moves is not important.
|
| __ movq(left_arg, left);
|
| @@ -8067,6 +8374,10 @@
|
| __ Move(left_arg, right);
|
| SetArgsReversed();
|
| } else {
|
| + // For non-commutative operations, left and right_arg might be
|
| + // the same register. Therefore, the order of the moves is
|
| + // important here in order to not overwrite left before moving
|
| + // it to left_arg.
|
| __ movq(left_arg, left);
|
| __ Move(right_arg, right);
|
| }
|
| @@ -8099,8 +8410,12 @@
|
| __ Move(right_arg, left);
|
| SetArgsReversed();
|
| } else {
|
| + // For non-commutative operations, right and left_arg might be
|
| + // the same register. Therefore, the order of the moves is
|
| + // important here in order to not overwrite right before moving
|
| + // it to right_arg.
|
| + __ movq(right_arg, right);
|
| __ Move(left_arg, left);
|
| - __ movq(right_arg, right);
|
| }
|
| // Update flags to indicate that arguments are in registers.
|
| SetArgsInRegisters();
|
| @@ -8302,7 +8617,15 @@
|
| case Token::DIV: {
|
| // rax: y
|
| // rdx: x
|
| - FloatingPointHelper::CheckNumberOperands(masm, &call_runtime);
|
| + if (NumberInfo::IsNumber(operands_type_)) {
|
| + if (FLAG_debug_code) {
|
| + // Assert at runtime that inputs are only numbers.
|
| + __ AbortIfNotNumber(rdx, "GenericBinaryOpStub operand not a number.");
|
| + __ AbortIfNotNumber(rax, "GenericBinaryOpStub operand not a number.");
|
| + }
|
| + } else {
|
| + FloatingPointHelper::CheckNumberOperands(masm, &call_runtime);
|
| + }
|
| // Fast-case: Both operands are numbers.
|
| // xmm4 and xmm5 are volatile XMM registers.
|
| FloatingPointHelper::LoadFloatOperands(masm, xmm4, xmm5);
|
|
|