| Index: src/x64/codegen-x64.cc
|
| diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc
|
| index 8e6dbef2d16186720abee8dc4b0ea81299b48c8a..9a50a6015a6cd27ea51d6081194e3f3e1b339f8e 100644
|
| --- a/src/x64/codegen-x64.cc
|
| +++ b/src/x64/codegen-x64.cc
|
| @@ -54,6 +54,7 @@ void DeferredCode::SaveRegisters() {
|
| }
|
| }
|
|
|
| +
|
| void DeferredCode::RestoreRegisters() {
|
| // Restore registers in reverse order due to the stack.
|
| for (int i = RegisterAllocator::kNumRegisters - 1; i >= 0; i--) {
|
| @@ -237,8 +238,8 @@ class FloatingPointHelper : public AllStatic {
|
| // Test if operands are smi or number objects (fp). Requirements:
|
| // operand_1 in rax, operand_2 in rdx; falls through on float or smi
|
| // operands, jumps to the non_float label otherwise.
|
| - static void CheckFloatOperands(MacroAssembler* masm,
|
| - Label* non_float);
|
| + static void CheckNumberOperands(MacroAssembler* masm,
|
| + Label* non_float);
|
|
|
| // Allocate a heap number in new space with undefined value.
|
| // Returns tagged pointer in result, or jumps to need_gc if new space is full.
|
| @@ -278,7 +279,7 @@ void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
|
| __ movq(kScratchRegister, pairs, RelocInfo::EMBEDDED_OBJECT);
|
| frame_->EmitPush(kScratchRegister);
|
| frame_->EmitPush(rsi); // The context is the second argument.
|
| - frame_->EmitPush(Immediate(Smi::FromInt(is_eval() ? 1 : 0)));
|
| + frame_->EmitPush(Smi::FromInt(is_eval() ? 1 : 0));
|
| Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
|
| // Return value is ignored.
|
| }
|
| @@ -767,8 +768,8 @@ void CodeGenerator::CallApplyLazy(Property* apply,
|
| // adaptor frame below it.
|
| Label invoke, adapted;
|
| __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
|
| - __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
|
| - __ cmpq(rcx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
| + __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
|
| + Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
|
| __ j(equal, &adapted);
|
|
|
| // No arguments adaptor frame. Copy fixed number of arguments.
|
| @@ -793,12 +794,12 @@ void CodeGenerator::CallApplyLazy(Property* apply,
|
| // have to worry about getting rid of the elements from the virtual
|
| // frame.
|
| Label loop;
|
| - __ bind(&loop);
|
| __ testl(rcx, rcx);
|
| __ j(zero, &invoke);
|
| + __ bind(&loop);
|
| __ push(Operand(rdx, rcx, times_pointer_size, 1 * kPointerSize));
|
| __ decl(rcx);
|
| - __ jmp(&loop);
|
| + __ j(not_zero, &loop);
|
|
|
| // Invoke the function. The virtual frame knows about the receiver
|
| // so make sure to forget that explicitly.
|
| @@ -933,7 +934,7 @@ void CodeGenerator::VisitDeclaration(Declaration* node) {
|
| // Declaration nodes are always introduced in one of two modes.
|
| ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
|
| PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
|
| - frame_->EmitPush(Immediate(Smi::FromInt(attr)));
|
| + frame_->EmitPush(Smi::FromInt(attr));
|
| // Push initial value, if any.
|
| // Note: For variables we must not push an initial value (such as
|
| // 'undefined') because we may have a (legal) redeclaration and we
|
| @@ -943,7 +944,7 @@ void CodeGenerator::VisitDeclaration(Declaration* node) {
|
| } else if (node->fun() != NULL) {
|
| Load(node->fun());
|
| } else {
|
| - frame_->EmitPush(Immediate(Smi::FromInt(0))); // no initial value!
|
| + frame_->EmitPush(Smi::FromInt(0)); // no initial value!
|
| }
|
| Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
|
| // Ignore the return value (declarations are statements).
|
| @@ -1700,19 +1701,19 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) {
|
| __ movl(rax, FieldOperand(rdx, FixedArray::kLengthOffset));
|
| __ Integer32ToSmi(rax, rax);
|
| frame_->EmitPush(rax); // <- slot 1
|
| - frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
|
| + frame_->EmitPush(Smi::FromInt(0)); // <- slot 0
|
| entry.Jump();
|
|
|
| fixed_array.Bind();
|
| // rax: fixed array (result from call to Runtime::kGetPropertyNamesFast)
|
| - frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3
|
| + frame_->EmitPush(Smi::FromInt(0)); // <- slot 3
|
| frame_->EmitPush(rax); // <- slot 2
|
|
|
| // Push the length of the array and the initial index onto the stack.
|
| __ movl(rax, FieldOperand(rax, FixedArray::kLengthOffset));
|
| __ Integer32ToSmi(rax, rax);
|
| frame_->EmitPush(rax); // <- slot 1
|
| - frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
|
| + frame_->EmitPush(Smi::FromInt(0)); // <- slot 0
|
|
|
| // Condition.
|
| entry.Bind();
|
| @@ -1722,8 +1723,8 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) {
|
| node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
|
|
|
| __ movq(rax, frame_->ElementAt(0)); // load the current count
|
| - __ cmpl(rax, frame_->ElementAt(1)); // compare to the array length
|
| - node->break_target()->Branch(above_equal);
|
| + __ SmiCompare(frame_->ElementAt(1), rax); // compare to the array length
|
| + node->break_target()->Branch(below_equal);
|
|
|
| // Get the i'th entry of the array.
|
| __ movq(rdx, frame_->ElementAt(2));
|
| @@ -1796,7 +1797,7 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) {
|
| node->continue_target()->Bind();
|
| frame_->SpillAll();
|
| frame_->EmitPop(rax);
|
| - __ addq(rax, Immediate(Smi::FromInt(1)));
|
| + __ SmiAddConstant(rax, rax, Smi::FromInt(1));
|
| frame_->EmitPush(rax);
|
| entry.Jump();
|
|
|
| @@ -1969,7 +1970,7 @@ void CodeGenerator::VisitTryFinally(TryFinally* node) {
|
|
|
| frame_->EmitPush(rax);
|
| // In case of thrown exceptions, this is where we continue.
|
| - __ movq(rcx, Immediate(Smi::FromInt(THROWING)));
|
| + __ Move(rcx, Smi::FromInt(THROWING));
|
| finally_block.Jump();
|
|
|
| // --- Try block ---
|
| @@ -2028,7 +2029,7 @@ void CodeGenerator::VisitTryFinally(TryFinally* node) {
|
| // Fake a top of stack value (unneeded when FALLING) and set the
|
| // state in ecx, then jump around the unlink blocks if any.
|
| frame_->EmitPush(Heap::kUndefinedValueRootIndex);
|
| - __ movq(rcx, Immediate(Smi::FromInt(FALLING)));
|
| + __ Move(rcx, Smi::FromInt(FALLING));
|
| if (nof_unlinks > 0) {
|
| finally_block.Jump();
|
| }
|
| @@ -2074,7 +2075,7 @@ void CodeGenerator::VisitTryFinally(TryFinally* node) {
|
| // Fake TOS for targets that shadowed breaks and continues.
|
| frame_->EmitPush(Heap::kUndefinedValueRootIndex);
|
| }
|
| - __ movq(rcx, Immediate(Smi::FromInt(JUMPING + i)));
|
| + __ Move(rcx, Smi::FromInt(JUMPING + i));
|
| if (--nof_unlinks > 0) {
|
| // If this is not the last unlink block, jump around the next.
|
| finally_block.Jump();
|
| @@ -2105,7 +2106,7 @@ void CodeGenerator::VisitTryFinally(TryFinally* node) {
|
| for (int i = 0; i < shadows.length(); i++) {
|
| if (has_valid_frame() && shadows[i]->is_bound()) {
|
| BreakTarget* original = shadows[i]->other_target();
|
| - __ cmpq(rcx, Immediate(Smi::FromInt(JUMPING + i)));
|
| + __ SmiCompare(rcx, Smi::FromInt(JUMPING + i));
|
| if (i == kReturnShadowIndex) {
|
| // The return value is (already) in rax.
|
| Result return_value = allocator_->Allocate(rax);
|
| @@ -2130,7 +2131,7 @@ void CodeGenerator::VisitTryFinally(TryFinally* node) {
|
| if (has_valid_frame()) {
|
| // Check if we need to rethrow the exception.
|
| JumpTarget exit;
|
| - __ cmpq(rcx, Immediate(Smi::FromInt(THROWING)));
|
| + __ SmiCompare(rcx, Smi::FromInt(THROWING));
|
| exit.Branch(not_equal);
|
|
|
| // Rethrow exception.
|
| @@ -2278,7 +2279,7 @@ void DeferredRegExpLiteral::Generate() {
|
| // Literal array (0).
|
| __ push(literals_);
|
| // Literal index (1).
|
| - __ push(Immediate(Smi::FromInt(node_->literal_index())));
|
| + __ Push(Smi::FromInt(node_->literal_index()));
|
| // RegExp pattern (2).
|
| __ Push(node_->pattern());
|
| // RegExp flags (3).
|
| @@ -2351,7 +2352,7 @@ void DeferredObjectLiteral::Generate() {
|
| // Literal array (0).
|
| __ push(literals_);
|
| // Literal index (1).
|
| - __ push(Immediate(Smi::FromInt(node_->literal_index())));
|
| + __ Push(Smi::FromInt(node_->literal_index()));
|
| // Constant properties (2).
|
| __ Push(node_->constant_properties());
|
| __ CallRuntime(Runtime::kCreateObjectLiteralBoilerplate, 3);
|
| @@ -2484,7 +2485,7 @@ void DeferredArrayLiteral::Generate() {
|
| // Literal array (0).
|
| __ push(literals_);
|
| // Literal index (1).
|
| - __ push(Immediate(Smi::FromInt(node_->literal_index())));
|
| + __ Push(Smi::FromInt(node_->literal_index()));
|
| // Constant properties (2).
|
| __ Push(node_->literals());
|
| __ CallRuntime(Runtime::kCreateArrayLiteralBoilerplate, 3);
|
| @@ -3151,7 +3152,7 @@ void DeferredPrefixCountOperation::Generate() {
|
| __ push(dst_);
|
| __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
|
| __ push(rax);
|
| - __ push(Immediate(Smi::FromInt(1)));
|
| + __ Push(Smi::FromInt(1));
|
| if (is_increment_) {
|
| __ CallRuntime(Runtime::kNumberAdd, 2);
|
| } else {
|
| @@ -3191,7 +3192,7 @@ void DeferredPostfixCountOperation::Generate() {
|
|
|
| // Call the runtime for the addition or subtraction.
|
| __ push(rax);
|
| - __ push(Immediate(Smi::FromInt(1)));
|
| + __ Push(Smi::FromInt(1));
|
| if (is_increment_) {
|
| __ CallRuntime(Runtime::kNumberAdd, 2);
|
| } else {
|
| @@ -3249,15 +3250,18 @@ void CodeGenerator::VisitCountOperation(CountOperation* node) {
|
| is_increment);
|
| }
|
|
|
| - __ movq(kScratchRegister, new_value.reg());
|
| + __ JumpIfNotSmi(new_value.reg(), deferred->entry_label());
|
| if (is_increment) {
|
| - __ addl(kScratchRegister, Immediate(Smi::FromInt(1)));
|
| + __ SmiAddConstant(kScratchRegister,
|
| + new_value.reg(),
|
| + Smi::FromInt(1),
|
| + deferred->entry_label());
|
| } else {
|
| - __ subl(kScratchRegister, Immediate(Smi::FromInt(1)));
|
| + __ SmiSubConstant(kScratchRegister,
|
| + new_value.reg(),
|
| + Smi::FromInt(1),
|
| + deferred->entry_label());
|
| }
|
| - // Smi test.
|
| - deferred->Branch(overflow);
|
| - __ JumpIfNotSmi(kScratchRegister, deferred->entry_label());
|
| __ movq(new_value.reg(), kScratchRegister);
|
| deferred->BindExit();
|
|
|
| @@ -3634,15 +3638,15 @@ void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
|
|
|
| // Skip the arguments adaptor frame if it exists.
|
| Label check_frame_marker;
|
| - __ cmpq(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
|
| - Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
| + __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
|
| + Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
|
| __ j(not_equal, &check_frame_marker);
|
| __ movq(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
|
|
|
| // Check the marker in the calling frame.
|
| __ bind(&check_frame_marker);
|
| - __ cmpq(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
|
| - Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
|
| + __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
|
| + Smi::FromInt(StackFrame::CONSTRUCT));
|
| fp.Unuse();
|
| destination()->Split(equal);
|
| }
|
| @@ -3878,7 +3882,7 @@ void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
|
| void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) {
|
| ASSERT(args->length() == 0);
|
| // RBP value is aligned, so it should be tagged as a smi (without necesarily
|
| - // being padded as a smi).
|
| + // being padded as a smi, so it should not be treated as a smi.).
|
| ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
|
| Result rbp_as_smi = allocator_->Allocate();
|
| ASSERT(rbp_as_smi.is_valid());
|
| @@ -4262,8 +4266,8 @@ void CodeGenerator::ToBoolean(ControlDestination* dest) {
|
| dest->false_target()->Branch(equal);
|
|
|
| // Smi => false iff zero.
|
| - Condition equals = masm_->CheckSmiEqualsConstant(value.reg(), 0);
|
| - dest->false_target()->Branch(equals);
|
| + __ SmiCompare(value.reg(), Smi::FromInt(0));
|
| + dest->false_target()->Branch(equal);
|
| Condition is_smi = masm_->CheckSmi(value.reg());
|
| dest->true_target()->Branch(is_smi);
|
|
|
| @@ -4945,7 +4949,7 @@ void CodeGenerator::Comparison(Condition cc,
|
| right_side = Result(right_val);
|
| // Test smi equality and comparison by signed int comparison.
|
| // Both sides are smis, so we can use an Immediate.
|
| - __ cmpl(left_side.reg(), Immediate(Smi::cast(*right_side.handle())));
|
| + __ SmiCompare(left_side.reg(), Smi::cast(*right_side.handle()));
|
| left_side.Unuse();
|
| right_side.Unuse();
|
| dest->Split(cc);
|
| @@ -4978,7 +4982,7 @@ void CodeGenerator::Comparison(Condition cc,
|
| Result temp = allocator()->Allocate();
|
| ASSERT(temp.is_valid());
|
| __ movq(temp.reg(),
|
| - FieldOperand(operand.reg(), HeapObject::kMapOffset));
|
| + FieldOperand(operand.reg(), HeapObject::kMapOffset));
|
| __ testb(FieldOperand(temp.reg(), Map::kBitFieldOffset),
|
| Immediate(1 << Map::kIsUndetectable));
|
| temp.Unuse();
|
| @@ -4998,7 +5002,7 @@ void CodeGenerator::Comparison(Condition cc,
|
| CompareStub stub(cc, strict);
|
| Result answer = frame_->CallStub(&stub, &left_side, &right_side);
|
| // The result is a Smi, which is negative, zero, or positive.
|
| - __ testl(answer.reg(), answer.reg()); // Both zero and sign flag right.
|
| + __ SmiTest(answer.reg()); // Sets both zero and sign flag.
|
| answer.Unuse();
|
| dest->Split(cc);
|
| } else {
|
| @@ -5016,7 +5020,7 @@ void CodeGenerator::Comparison(Condition cc,
|
| // When non-smi, call out to the compare stub.
|
| CompareStub stub(cc, strict);
|
| Result answer = frame_->CallStub(&stub, &left_side, &right_side);
|
| - __ testl(answer.reg(), answer.reg()); // Sets both zero and sign flags.
|
| + __ SmiTest(answer.reg()); // Sets both zero and sign flags.
|
| answer.Unuse();
|
| dest->true_target()->Branch(cc);
|
| dest->false_target()->Jump();
|
| @@ -5024,7 +5028,7 @@ void CodeGenerator::Comparison(Condition cc,
|
| is_smi.Bind();
|
| left_side = Result(left_reg);
|
| right_side = Result(right_reg);
|
| - __ cmpl(left_side.reg(), right_side.reg());
|
| + __ SmiCompare(left_side.reg(), right_side.reg());
|
| right_side.Unuse();
|
| left_side.Unuse();
|
| dest->Split(cc);
|
| @@ -5221,7 +5225,7 @@ void DeferredReferenceGetNamedValue::Generate() {
|
|
|
| void DeferredInlineSmiAdd::Generate() {
|
| __ push(dst_);
|
| - __ push(Immediate(value_));
|
| + __ Push(value_);
|
| GenericBinaryOpStub igostub(Token::ADD, overwrite_mode_, SMI_CODE_INLINED);
|
| __ CallStub(&igostub);
|
| if (!dst_.is(rax)) __ movq(dst_, rax);
|
| @@ -5229,7 +5233,7 @@ void DeferredInlineSmiAdd::Generate() {
|
|
|
|
|
| void DeferredInlineSmiAddReversed::Generate() {
|
| - __ push(Immediate(value_)); // Note: sign extended.
|
| + __ Push(value_);
|
| __ push(dst_);
|
| GenericBinaryOpStub igostub(Token::ADD, overwrite_mode_, SMI_CODE_INLINED);
|
| __ CallStub(&igostub);
|
| @@ -5239,7 +5243,7 @@ void DeferredInlineSmiAddReversed::Generate() {
|
|
|
| void DeferredInlineSmiSub::Generate() {
|
| __ push(dst_);
|
| - __ push(Immediate(value_)); // Note: sign extended.
|
| + __ Push(value_);
|
| GenericBinaryOpStub igostub(Token::SUB, overwrite_mode_, SMI_CODE_INLINED);
|
| __ CallStub(&igostub);
|
| if (!dst_.is(rax)) __ movq(dst_, rax);
|
| @@ -5248,7 +5252,7 @@ void DeferredInlineSmiSub::Generate() {
|
|
|
| void DeferredInlineSmiOperation::Generate() {
|
| __ push(src_);
|
| - __ push(Immediate(value_)); // Note: sign extended.
|
| + __ Push(value_);
|
| // For mod we don't generate all the Smi code inline.
|
| GenericBinaryOpStub stub(
|
| op_,
|
| @@ -5306,7 +5310,7 @@ void CodeGenerator::ConstantSmiBinaryOperation(Token::Value op,
|
| __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
|
| __ SmiAddConstant(operand->reg(),
|
| operand->reg(),
|
| - int_value,
|
| + smi_value,
|
| deferred->entry_label());
|
| deferred->BindExit();
|
| frame_->Push(operand);
|
| @@ -5328,7 +5332,7 @@ void CodeGenerator::ConstantSmiBinaryOperation(Token::Value op,
|
| // A smi currently fits in a 32-bit Immediate.
|
| __ SmiSubConstant(operand->reg(),
|
| operand->reg(),
|
| - int_value,
|
| + smi_value,
|
| deferred->entry_label());
|
| deferred->BindExit();
|
| frame_->Push(operand);
|
| @@ -5382,9 +5386,9 @@ void CodeGenerator::ConstantSmiBinaryOperation(Token::Value op,
|
| overwrite_mode);
|
| __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
|
| __ SmiShiftLogicalRightConstant(answer.reg(),
|
| - operand->reg(),
|
| - shift_value,
|
| - deferred->entry_label());
|
| + operand->reg(),
|
| + shift_value,
|
| + deferred->entry_label());
|
| deferred->BindExit();
|
| operand->Unuse();
|
| frame_->Push(&answer);
|
| @@ -5453,15 +5457,15 @@ void CodeGenerator::ConstantSmiBinaryOperation(Token::Value op,
|
| overwrite_mode);
|
| __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
|
| if (op == Token::BIT_AND) {
|
| - __ SmiAndConstant(operand->reg(), operand->reg(), int_value);
|
| + __ SmiAndConstant(operand->reg(), operand->reg(), smi_value);
|
| } else if (op == Token::BIT_XOR) {
|
| if (int_value != 0) {
|
| - __ SmiXorConstant(operand->reg(), operand->reg(), int_value);
|
| + __ SmiXorConstant(operand->reg(), operand->reg(), smi_value);
|
| }
|
| } else {
|
| ASSERT(op == Token::BIT_OR);
|
| if (int_value != 0) {
|
| - __ SmiOrConstant(operand->reg(), operand->reg(), int_value);
|
| + __ SmiOrConstant(operand->reg(), operand->reg(), smi_value);
|
| }
|
| }
|
| deferred->BindExit();
|
| @@ -5476,18 +5480,21 @@ void CodeGenerator::ConstantSmiBinaryOperation(Token::Value op,
|
| (IsPowerOf2(int_value) || IsPowerOf2(-int_value))) {
|
| operand->ToRegister();
|
| frame_->Spill(operand->reg());
|
| - DeferredCode* deferred = new DeferredInlineSmiOperation(op,
|
| - operand->reg(),
|
| - operand->reg(),
|
| - smi_value,
|
| - overwrite_mode);
|
| + DeferredCode* deferred =
|
| + new DeferredInlineSmiOperation(op,
|
| + operand->reg(),
|
| + operand->reg(),
|
| + smi_value,
|
| + overwrite_mode);
|
| // Check for negative or non-Smi left hand side.
|
| __ JumpIfNotPositiveSmi(operand->reg(), deferred->entry_label());
|
| if (int_value < 0) int_value = -int_value;
|
| if (int_value == 1) {
|
| - __ movl(operand->reg(), Immediate(Smi::FromInt(0)));
|
| + __ Move(operand->reg(), Smi::FromInt(0));
|
| } else {
|
| - __ SmiAndConstant(operand->reg(), operand->reg(), int_value - 1);
|
| + __ SmiAndConstant(operand->reg(),
|
| + operand->reg(),
|
| + Smi::FromInt(int_value - 1));
|
| }
|
| deferred->BindExit();
|
| frame_->Push(operand);
|
| @@ -6085,8 +6092,6 @@ void Reference::SetValue(InitState init_state) {
|
|
|
| // Check that the key is a non-negative smi.
|
| __ JumpIfNotPositiveSmi(key.reg(), deferred->entry_label());
|
| - // Ensure that the smi is zero-extended. This is not guaranteed.
|
| - __ movl(key.reg(), key.reg());
|
|
|
| // Check that the receiver is not a smi.
|
| __ JumpIfSmi(receiver.reg(), deferred->entry_label());
|
| @@ -6096,10 +6101,10 @@ void Reference::SetValue(InitState init_state) {
|
| deferred->Branch(not_equal);
|
|
|
| // Check that the key is within bounds. Both the key and the
|
| - // length of the JSArray are smis, so compare only low 32 bits.
|
| - __ cmpl(key.reg(),
|
| - FieldOperand(receiver.reg(), JSArray::kLengthOffset));
|
| - deferred->Branch(greater_equal);
|
| + // length of the JSArray are smis.
|
| + __ SmiCompare(FieldOperand(receiver.reg(), JSArray::kLengthOffset),
|
| + key.reg());
|
| + deferred->Branch(less_equal);
|
|
|
| // Get the elements array from the receiver and check that it
|
| // is a flat array (not a dictionary).
|
| @@ -6303,22 +6308,17 @@ void UnarySubStub::Generate(MacroAssembler* masm) {
|
| Label slow;
|
| Label done;
|
| Label try_float;
|
| - Label special;
|
| // Check whether the value is a smi.
|
| __ JumpIfNotSmi(rax, &try_float);
|
|
|
| // Enter runtime system if the value of the smi is zero
|
| // to make sure that we switch between 0 and -0.
|
| - // Also enter it if the value of the smi is Smi::kMinValue
|
| - __ testl(rax, Immediate(0x7FFFFFFE));
|
| - __ j(zero, &special);
|
| - __ negl(rax);
|
| - __ jmp(&done);
|
| + // Also enter it if the value of the smi is Smi::kMinValue.
|
| + __ SmiNeg(rax, rax, &done);
|
|
|
| - __ bind(&special);
|
| - // Either zero or -0x4000000, neither of which become a smi when negated.
|
| - __ testl(rax, rax);
|
| - __ j(not_zero, &slow);
|
| + // Either zero or Smi::kMinValue, neither of which become a smi when negated.
|
| + __ SmiCompare(rax, Smi::FromInt(0));
|
| + __ j(not_equal, &slow);
|
| __ Move(rax, Factory::minus_zero_value());
|
| __ jmp(&done);
|
|
|
| @@ -6470,7 +6470,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
|
| // Call builtin if operands are not floating point or smi.
|
| Label check_for_symbols;
|
| // Push arguments on stack, for helper functions.
|
| - FloatingPointHelper::CheckFloatOperands(masm, &check_for_symbols);
|
| + FloatingPointHelper::CheckNumberOperands(masm, &check_for_symbols);
|
| FloatingPointHelper::LoadFloatOperands(masm, rax, rdx);
|
| __ FCmp();
|
|
|
| @@ -6527,7 +6527,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
|
| ASSERT(cc_ == greater || cc_ == greater_equal); // remaining cases
|
| ncr = LESS;
|
| }
|
| - __ push(Immediate(Smi::FromInt(ncr)));
|
| + __ Push(Smi::FromInt(ncr));
|
| }
|
|
|
| // Restore return address on the stack.
|
| @@ -6626,7 +6626,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
| __ ret(2 * kPointerSize);
|
|
|
| __ bind(&is_not_instance);
|
| - __ movq(rax, Immediate(Smi::FromInt(1)));
|
| + __ Move(rax, Smi::FromInt(1));
|
| __ ret(2 * kPointerSize);
|
|
|
| // Slow-case: Go through the JavaScript implementation.
|
| @@ -6644,8 +6644,8 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
|
| // Check if the calling frame is an arguments adaptor frame.
|
| Label runtime;
|
| __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
|
| - __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
|
| - __ cmpq(rcx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
| + __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
|
| + Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
|
| __ j(not_equal, &runtime);
|
| // Value in rcx is Smi encoded.
|
|
|
| @@ -6678,8 +6678,8 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
|
| // Check if the calling frame is an arguments adaptor frame.
|
| Label adaptor;
|
| __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
|
| - __ movq(rcx, Operand(rbx, StandardFrameConstants::kContextOffset));
|
| - __ cmpq(rcx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
| + __ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset),
|
| + Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
|
| __ j(equal, &adaptor);
|
|
|
| // Check index against formal parameters count limit passed in
|
| @@ -6726,8 +6726,8 @@ void ArgumentsAccessStub::GenerateReadLength(MacroAssembler* masm) {
|
| // Check if the calling frame is an arguments adaptor frame.
|
| Label adaptor;
|
| __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
|
| - __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
|
| - __ cmpq(rcx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
| + __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
|
| + Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
|
| __ j(equal, &adaptor);
|
|
|
| // Nothing to do: The formal number of parameters has already been
|
| @@ -7069,8 +7069,8 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
|
|
|
| // Push the stack frame type marker twice.
|
| int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
|
| - __ push(Immediate(Smi::FromInt(marker))); // context slot
|
| - __ push(Immediate(Smi::FromInt(marker))); // function slot
|
| + __ Push(Smi::FromInt(marker)); // context slot
|
| + __ Push(Smi::FromInt(marker)); // function slot
|
| // Save callee-saved registers (X64 calling conventions).
|
| __ push(r12);
|
| __ push(r13);
|
| @@ -7182,7 +7182,7 @@ void StackCheckStub::Generate(MacroAssembler* masm) {
|
| // must be inserted below the return address on the stack so we
|
| // temporarily store that in a register.
|
| __ pop(rax);
|
| - __ push(Immediate(Smi::FromInt(0)));
|
| + __ Push(Smi::FromInt(0));
|
| __ push(rax);
|
|
|
| // Do tail-call to runtime routine.
|
| @@ -7321,8 +7321,8 @@ void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
|
| }
|
|
|
|
|
| -void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
|
| - Label* non_float) {
|
| +void FloatingPointHelper::CheckNumberOperands(MacroAssembler* masm,
|
| + Label* non_float) {
|
| Label test_other, done;
|
| // Test if both operands are numbers (heap_numbers or smis).
|
| // If not, jump to label non_float.
|
| @@ -7403,17 +7403,17 @@ void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
|
| case Token::SHR:
|
| case Token::SAR:
|
| // Move the second operand into register ecx.
|
| - __ movl(rcx, rbx);
|
| + __ movq(rcx, rbx);
|
| // Perform the operation.
|
| switch (op_) {
|
| case Token::SAR:
|
| - __ SmiShiftArithmeticRight(rax, rax, rbx);
|
| + __ SmiShiftArithmeticRight(rax, rax, rcx);
|
| break;
|
| case Token::SHR:
|
| - __ SmiShiftLogicalRight(rax, rax, rbx, slow);
|
| + __ SmiShiftLogicalRight(rax, rax, rcx, slow);
|
| break;
|
| case Token::SHL:
|
| - __ SmiShiftLeft(rax, rax, rbx, slow);
|
| + __ SmiShiftLeft(rax, rax, rcx, slow);
|
| break;
|
| default:
|
| UNREACHABLE();
|
| @@ -7454,7 +7454,7 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
|
| case Token::DIV: {
|
| // rax: y
|
| // rdx: x
|
| - FloatingPointHelper::CheckFloatOperands(masm, &call_runtime);
|
| + FloatingPointHelper::CheckNumberOperands(masm, &call_runtime);
|
| // Fast-case: Both operands are numbers.
|
| // Allocate a heap number, if needed.
|
| Label skip_allocation;
|
| @@ -7499,7 +7499,7 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
|
| case Token::SAR:
|
| case Token::SHL:
|
| case Token::SHR: {
|
| - FloatingPointHelper::CheckFloatOperands(masm, &call_runtime);
|
| + FloatingPointHelper::CheckNumberOperands(masm, &call_runtime);
|
| // TODO(X64): Don't convert a Smi to float and then back to int32
|
| // afterwards.
|
| FloatingPointHelper::LoadFloatOperands(masm);
|
| @@ -7553,29 +7553,27 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
|
| __ pop(rcx);
|
| __ pop(rax);
|
| switch (op_) {
|
| - case Token::BIT_OR: __ or_(rax, rcx); break;
|
| - case Token::BIT_AND: __ and_(rax, rcx); break;
|
| - case Token::BIT_XOR: __ xor_(rax, rcx); break;
|
| + case Token::BIT_OR: __ orl(rax, rcx); break;
|
| + case Token::BIT_AND: __ andl(rax, rcx); break;
|
| + case Token::BIT_XOR: __ xorl(rax, rcx); break;
|
| case Token::SAR: __ sarl(rax); break;
|
| case Token::SHL: __ shll(rax); break;
|
| case Token::SHR: __ shrl(rax); break;
|
| default: UNREACHABLE();
|
| }
|
| if (op_ == Token::SHR) {
|
| - // Check if result is non-negative and fits in a smi.
|
| - __ testl(rax, Immediate(0xc0000000));
|
| - __ j(not_zero, &non_smi_result);
|
| - } else {
|
| - // Check if result fits in a smi.
|
| - __ cmpl(rax, Immediate(0xc0000000));
|
| + // Check if result is non-negative. This can only happen for a shift
|
| + // by zero, which also doesn't update the sign flag.
|
| + __ testl(rax, rax);
|
| __ j(negative, &non_smi_result);
|
| }
|
| - // Tag smi result and return.
|
| + __ JumpIfNotValidSmiValue(rax, &non_smi_result);
|
| + // Tag smi result, if possible, and return.
|
| __ Integer32ToSmi(rax, rax);
|
| __ ret(2 * kPointerSize);
|
|
|
| // All ops except SHR return a signed int32 that we load in a HeapNumber.
|
| - if (op_ != Token::SHR) {
|
| + if (op_ != Token::SHR && non_smi_result.is_linked()) {
|
| __ bind(&non_smi_result);
|
| // Allocate a heap number if needed.
|
| __ movsxlq(rbx, rax); // rbx: sign extended 32-bit result
|
|
|