Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(38)

Side by Side Diff: src/x64/lithium-codegen-x64.cc

Issue 47533002: Use nearlabel AFAP in lithium codegen (Closed) Base URL: git://github.com/v8/v8.git@master
Patch Set: fixed x64 assert fail Created 7 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/macro-assembler-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 631 matching lines...) Expand 10 before | Expand all | Expand 10 after
642 ASSERT(environment->HasBeenRegistered()); 642 ASSERT(environment->HasBeenRegistered());
643 int id = environment->deoptimization_index(); 643 int id = environment->deoptimization_index();
644 ASSERT(info()->IsOptimizing() || info()->IsStub()); 644 ASSERT(info()->IsOptimizing() || info()->IsStub());
645 Address entry = 645 Address entry =
646 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); 646 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
647 if (entry == NULL) { 647 if (entry == NULL) {
648 Abort(kBailoutWasNotPrepared); 648 Abort(kBailoutWasNotPrepared);
649 return; 649 return;
650 } 650 }
651 651
652 if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) { 652 if (DeoptEveryNTimes()) {
653 ExternalReference count = ExternalReference::stress_deopt_count(isolate()); 653 ExternalReference count = ExternalReference::stress_deopt_count(isolate());
654 Label no_deopt; 654 Label no_deopt;
655 __ pushfq(); 655 __ pushfq();
656 __ push(rax); 656 __ push(rax);
657 Operand count_operand = masm()->ExternalOperand(count, kScratchRegister); 657 Operand count_operand = masm()->ExternalOperand(count, kScratchRegister);
658 __ movl(rax, count_operand); 658 __ movl(rax, count_operand);
659 __ subl(rax, Immediate(1)); 659 __ subl(rax, Immediate(1));
660 __ j(not_zero, &no_deopt, Label::kNear); 660 __ j(not_zero, &no_deopt, Label::kNear);
661 if (FLAG_trap_on_deopt) __ int3(); 661 if (FLAG_trap_on_deopt) __ int3();
662 __ movl(rax, Immediate(FLAG_deopt_every_n_times)); 662 __ movl(rax, Immediate(FLAG_deopt_every_n_times));
(...skipping 969 matching lines...) Expand 10 before | Expand all | Expand 10 after
1632 } else { 1632 } else {
1633 if (index->value() < JSDate::kFirstUncachedField) { 1633 if (index->value() < JSDate::kFirstUncachedField) {
1634 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); 1634 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
1635 Operand stamp_operand = __ ExternalOperand(stamp); 1635 Operand stamp_operand = __ ExternalOperand(stamp);
1636 __ movq(kScratchRegister, stamp_operand); 1636 __ movq(kScratchRegister, stamp_operand);
1637 __ cmpq(kScratchRegister, FieldOperand(object, 1637 __ cmpq(kScratchRegister, FieldOperand(object,
1638 JSDate::kCacheStampOffset)); 1638 JSDate::kCacheStampOffset));
1639 __ j(not_equal, &runtime, Label::kNear); 1639 __ j(not_equal, &runtime, Label::kNear);
1640 __ movq(result, FieldOperand(object, JSDate::kValueOffset + 1640 __ movq(result, FieldOperand(object, JSDate::kValueOffset +
1641 kPointerSize * index->value())); 1641 kPointerSize * index->value()));
1642 __ jmp(&done); 1642 __ jmp(&done, Label::kNear);
1643 } 1643 }
1644 __ bind(&runtime); 1644 __ bind(&runtime);
1645 __ PrepareCallCFunction(2); 1645 __ PrepareCallCFunction(2);
1646 __ movq(arg_reg_1, object); 1646 __ movq(arg_reg_1, object);
1647 __ movq(arg_reg_2, index, RelocInfo::NONE64); 1647 __ movq(arg_reg_2, index, RelocInfo::NONE64);
1648 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); 1648 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
1649 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 1649 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1650 __ bind(&done); 1650 __ bind(&done);
1651 } 1651 }
1652 } 1652 }
(...skipping 855 matching lines...) Expand 10 before | Expand all | Expand 10 after
2508 }; 2508 };
2509 2509
2510 2510
2511 DeferredInstanceOfKnownGlobal* deferred; 2511 DeferredInstanceOfKnownGlobal* deferred;
2512 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr); 2512 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
2513 2513
2514 Label done, false_result; 2514 Label done, false_result;
2515 Register object = ToRegister(instr->value()); 2515 Register object = ToRegister(instr->value());
2516 2516
2517 // A Smi is not an instance of anything. 2517 // A Smi is not an instance of anything.
2518 __ JumpIfSmi(object, &false_result); 2518 __ JumpIfSmi(object, &false_result, Label::kNear);
2519 2519
2520 // This is the inlined call site instanceof cache. The two occurences of the 2520 // This is the inlined call site instanceof cache. The two occurences of the
2521 // hole value will be patched to the last map/result pair generated by the 2521 // hole value will be patched to the last map/result pair generated by the
2522 // instanceof stub. 2522 // instanceof stub.
2523 Label cache_miss; 2523 Label cache_miss;
2524 // Use a temp register to avoid memory operands with variable lengths. 2524 // Use a temp register to avoid memory operands with variable lengths.
2525 Register map = ToRegister(instr->temp()); 2525 Register map = ToRegister(instr->temp());
2526 __ movq(map, FieldOperand(object, HeapObject::kMapOffset)); 2526 __ movq(map, FieldOperand(object, HeapObject::kMapOffset));
2527 __ bind(deferred->map_check()); // Label for calculating code patching. 2527 __ bind(deferred->map_check()); // Label for calculating code patching.
2528 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value()); 2528 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value());
2529 __ movq(kScratchRegister, cache_cell, RelocInfo::CELL); 2529 __ movq(kScratchRegister, cache_cell, RelocInfo::CELL);
2530 __ cmpq(map, Operand(kScratchRegister, 0)); 2530 __ cmpq(map, Operand(kScratchRegister, 0));
2531 __ j(not_equal, &cache_miss, Label::kNear); 2531 __ j(not_equal, &cache_miss, Label::kNear);
2532 // Patched to load either true or false. 2532 // Patched to load either true or false.
2533 __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex); 2533 __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
2534 #ifdef DEBUG 2534 #ifdef DEBUG
2535 // Check that the code size between patch label and patch sites is invariant. 2535 // Check that the code size between patch label and patch sites is invariant.
2536 Label end_of_patched_code; 2536 Label end_of_patched_code;
2537 __ bind(&end_of_patched_code); 2537 __ bind(&end_of_patched_code);
2538 ASSERT(true); 2538 ASSERT(true);
2539 #endif 2539 #endif
2540 __ jmp(&done); 2540 __ jmp(&done, Label::kNear);
2541 2541
2542 // The inlined call site cache did not match. Check for null and string 2542 // The inlined call site cache did not match. Check for null and string
2543 // before calling the deferred code. 2543 // before calling the deferred code.
2544 __ bind(&cache_miss); // Null is not an instance of anything. 2544 __ bind(&cache_miss); // Null is not an instance of anything.
2545 __ CompareRoot(object, Heap::kNullValueRootIndex); 2545 __ CompareRoot(object, Heap::kNullValueRootIndex);
2546 __ j(equal, &false_result, Label::kNear); 2546 __ j(equal, &false_result, Label::kNear);
2547 2547
2548 // String values are not instances of anything. 2548 // String values are not instances of anything.
2549 __ JumpIfNotString(object, kScratchRegister, deferred->entry()); 2549 __ JumpIfNotString(object, kScratchRegister, deferred->entry());
2550 2550
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2585 ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check)); 2585 ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check));
2586 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); 2586 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
2587 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 2587 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2588 // Move result to a register that survives the end of the 2588 // Move result to a register that survives the end of the
2589 // PushSafepointRegisterScope. 2589 // PushSafepointRegisterScope.
2590 __ movq(kScratchRegister, rax); 2590 __ movq(kScratchRegister, rax);
2591 } 2591 }
2592 __ testq(kScratchRegister, kScratchRegister); 2592 __ testq(kScratchRegister, kScratchRegister);
2593 Label load_false; 2593 Label load_false;
2594 Label done; 2594 Label done;
2595 __ j(not_zero, &load_false); 2595 __ j(not_zero, &load_false, Label::kNear);
2596 __ LoadRoot(rax, Heap::kTrueValueRootIndex); 2596 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
2597 __ jmp(&done); 2597 __ jmp(&done, Label::kNear);
2598 __ bind(&load_false); 2598 __ bind(&load_false);
2599 __ LoadRoot(rax, Heap::kFalseValueRootIndex); 2599 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
2600 __ bind(&done); 2600 __ bind(&done);
2601 } 2601 }
2602 2602
2603 2603
2604 void LCodeGen::DoCmpT(LCmpT* instr) { 2604 void LCodeGen::DoCmpT(LCmpT* instr) {
2605 Token::Value op = instr->op(); 2605 Token::Value op = instr->op();
2606 2606
2607 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); 2607 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
(...skipping 544 matching lines...) Expand 10 before | Expand all | Expand 10 after
3152 3152
3153 3153
3154 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) { 3154 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
3155 Register receiver = ToRegister(instr->receiver()); 3155 Register receiver = ToRegister(instr->receiver());
3156 Register function = ToRegister(instr->function()); 3156 Register function = ToRegister(instr->function());
3157 3157
3158 // If the receiver is null or undefined, we have to pass the global 3158 // If the receiver is null or undefined, we have to pass the global
3159 // object as a receiver to normal functions. Values have to be 3159 // object as a receiver to normal functions. Values have to be
3160 // passed unchanged to builtins and strict-mode functions. 3160 // passed unchanged to builtins and strict-mode functions.
3161 Label global_object, receiver_ok; 3161 Label global_object, receiver_ok;
3162 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear;
3162 3163
3163 // Do not transform the receiver to object for strict mode 3164 // Do not transform the receiver to object for strict mode
3164 // functions. 3165 // functions.
3165 __ movq(kScratchRegister, 3166 __ movq(kScratchRegister,
3166 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); 3167 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
3167 __ testb(FieldOperand(kScratchRegister, 3168 __ testb(FieldOperand(kScratchRegister,
3168 SharedFunctionInfo::kStrictModeByteOffset), 3169 SharedFunctionInfo::kStrictModeByteOffset),
3169 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); 3170 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
3170 __ j(not_equal, &receiver_ok, Label::kNear); 3171 __ j(not_equal, &receiver_ok, dist);
3171 3172
3172 // Do not transform the receiver to object for builtins. 3173 // Do not transform the receiver to object for builtins.
3173 __ testb(FieldOperand(kScratchRegister, 3174 __ testb(FieldOperand(kScratchRegister,
3174 SharedFunctionInfo::kNativeByteOffset), 3175 SharedFunctionInfo::kNativeByteOffset),
3175 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); 3176 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
3176 __ j(not_equal, &receiver_ok, Label::kNear); 3177 __ j(not_equal, &receiver_ok, dist);
3177 3178
3178 // Normal function. Replace undefined or null with global receiver. 3179 // Normal function. Replace undefined or null with global receiver.
3179 __ CompareRoot(receiver, Heap::kNullValueRootIndex); 3180 __ CompareRoot(receiver, Heap::kNullValueRootIndex);
3180 __ j(equal, &global_object, Label::kNear); 3181 __ j(equal, &global_object, Label::kNear);
3181 __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex); 3182 __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex);
3182 __ j(equal, &global_object, Label::kNear); 3183 __ j(equal, &global_object, Label::kNear);
3183 3184
3184 // The receiver should be a JS object. 3185 // The receiver should be a JS object.
3185 Condition is_smi = __ CheckSmi(receiver); 3186 Condition is_smi = __ CheckSmi(receiver);
3186 DeoptimizeIf(is_smi, instr->environment()); 3187 DeoptimizeIf(is_smi, instr->environment());
(...skipping 301 matching lines...) Expand 10 before | Expand all | Expand 10 after
3488 __ j(below, &negative_sign, Label::kNear); 3489 __ j(below, &negative_sign, Label::kNear);
3489 3490
3490 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 3491 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3491 // Check for negative zero. 3492 // Check for negative zero.
3492 Label positive_sign; 3493 Label positive_sign;
3493 __ j(above, &positive_sign, Label::kNear); 3494 __ j(above, &positive_sign, Label::kNear);
3494 __ movmskpd(output_reg, input_reg); 3495 __ movmskpd(output_reg, input_reg);
3495 __ testq(output_reg, Immediate(1)); 3496 __ testq(output_reg, Immediate(1));
3496 DeoptimizeIf(not_zero, instr->environment()); 3497 DeoptimizeIf(not_zero, instr->environment());
3497 __ Set(output_reg, 0); 3498 __ Set(output_reg, 0);
3498 __ jmp(&done); 3499 __ jmp(&done, Label::kNear);
3499 __ bind(&positive_sign); 3500 __ bind(&positive_sign);
3500 } 3501 }
3501 3502
3502 // Use truncating instruction (OK because input is positive). 3503 // Use truncating instruction (OK because input is positive).
3503 __ cvttsd2si(output_reg, input_reg); 3504 __ cvttsd2si(output_reg, input_reg);
3504 // Overflow is signalled with minint. 3505 // Overflow is signalled with minint.
3505 __ cmpl(output_reg, Immediate(0x80000000)); 3506 __ cmpl(output_reg, Immediate(0x80000000));
3506 DeoptimizeIf(equal, instr->environment()); 3507 DeoptimizeIf(equal, instr->environment());
3507 __ jmp(&done, Label::kNear); 3508 __ jmp(&done, Label::kNear);
3508 3509
(...skipping 13 matching lines...) Expand all
3522 3523
3523 3524
3524 void LCodeGen::DoMathRound(LMathRound* instr) { 3525 void LCodeGen::DoMathRound(LMathRound* instr) {
3525 const XMMRegister xmm_scratch = double_scratch0(); 3526 const XMMRegister xmm_scratch = double_scratch0();
3526 Register output_reg = ToRegister(instr->result()); 3527 Register output_reg = ToRegister(instr->result());
3527 XMMRegister input_reg = ToDoubleRegister(instr->value()); 3528 XMMRegister input_reg = ToDoubleRegister(instr->value());
3528 static int64_t one_half = V8_INT64_C(0x3FE0000000000000); // 0.5 3529 static int64_t one_half = V8_INT64_C(0x3FE0000000000000); // 0.5
3529 static int64_t minus_one_half = V8_INT64_C(0xBFE0000000000000); // -0.5 3530 static int64_t minus_one_half = V8_INT64_C(0xBFE0000000000000); // -0.5
3530 3531
3531 Label done, round_to_zero, below_one_half, do_not_compensate, restore; 3532 Label done, round_to_zero, below_one_half, do_not_compensate, restore;
3533 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear;
3532 __ movq(kScratchRegister, one_half); 3534 __ movq(kScratchRegister, one_half);
3533 __ movq(xmm_scratch, kScratchRegister); 3535 __ movq(xmm_scratch, kScratchRegister);
3534 __ ucomisd(xmm_scratch, input_reg); 3536 __ ucomisd(xmm_scratch, input_reg);
3535 __ j(above, &below_one_half); 3537 __ j(above, &below_one_half, Label::kNear);
3536 3538
3537 // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x). 3539 // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x).
3538 __ addsd(xmm_scratch, input_reg); 3540 __ addsd(xmm_scratch, input_reg);
3539 __ cvttsd2si(output_reg, xmm_scratch); 3541 __ cvttsd2si(output_reg, xmm_scratch);
3540 // Overflow is signalled with minint. 3542 // Overflow is signalled with minint.
3541 __ cmpl(output_reg, Immediate(0x80000000)); 3543 __ cmpl(output_reg, Immediate(0x80000000));
3542 __ RecordComment("D2I conversion overflow"); 3544 __ RecordComment("D2I conversion overflow");
3543 DeoptimizeIf(equal, instr->environment()); 3545 DeoptimizeIf(equal, instr->environment());
3544 __ jmp(&done); 3546 __ jmp(&done, dist);
3545 3547
3546 __ bind(&below_one_half); 3548 __ bind(&below_one_half);
3547 __ movq(kScratchRegister, minus_one_half); 3549 __ movq(kScratchRegister, minus_one_half);
3548 __ movq(xmm_scratch, kScratchRegister); 3550 __ movq(xmm_scratch, kScratchRegister);
3549 __ ucomisd(xmm_scratch, input_reg); 3551 __ ucomisd(xmm_scratch, input_reg);
3550 __ j(below_equal, &round_to_zero); 3552 __ j(below_equal, &round_to_zero, Label::kNear);
3551 3553
3552 // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then 3554 // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then
3553 // compare and compensate. 3555 // compare and compensate.
3554 __ movq(kScratchRegister, input_reg); // Back up input_reg. 3556 __ movq(kScratchRegister, input_reg); // Back up input_reg.
3555 __ subsd(input_reg, xmm_scratch); 3557 __ subsd(input_reg, xmm_scratch);
3556 __ cvttsd2si(output_reg, input_reg); 3558 __ cvttsd2si(output_reg, input_reg);
3557 // Catch minint due to overflow, and to prevent overflow when compensating. 3559 // Catch minint due to overflow, and to prevent overflow when compensating.
3558 __ cmpl(output_reg, Immediate(0x80000000)); 3560 __ cmpl(output_reg, Immediate(0x80000000));
3559 __ RecordComment("D2I conversion overflow"); 3561 __ RecordComment("D2I conversion overflow");
3560 DeoptimizeIf(equal, instr->environment()); 3562 DeoptimizeIf(equal, instr->environment());
3561 3563
3562 __ Cvtlsi2sd(xmm_scratch, output_reg); 3564 __ Cvtlsi2sd(xmm_scratch, output_reg);
3563 __ ucomisd(input_reg, xmm_scratch); 3565 __ ucomisd(input_reg, xmm_scratch);
3564 __ j(equal, &restore, Label::kNear); 3566 __ j(equal, &restore, Label::kNear);
3565 __ subl(output_reg, Immediate(1)); 3567 __ subl(output_reg, Immediate(1));
3566 // No overflow because we already ruled out minint. 3568 // No overflow because we already ruled out minint.
3567 __ bind(&restore); 3569 __ bind(&restore);
3568 __ movq(input_reg, kScratchRegister); // Restore input_reg. 3570 __ movq(input_reg, kScratchRegister); // Restore input_reg.
3569 __ jmp(&done); 3571 __ jmp(&done, dist);
3570 3572
3571 __ bind(&round_to_zero); 3573 __ bind(&round_to_zero);
3572 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if 3574 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if
3573 // we can ignore the difference between a result of -0 and +0. 3575 // we can ignore the difference between a result of -0 and +0.
3574 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 3576 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3575 __ movq(output_reg, input_reg); 3577 __ movq(output_reg, input_reg);
3576 __ testq(output_reg, output_reg); 3578 __ testq(output_reg, output_reg);
3577 __ RecordComment("Minus zero"); 3579 __ RecordComment("Minus zero");
3578 DeoptimizeIf(negative, instr->environment()); 3580 DeoptimizeIf(negative, instr->environment());
3579 } 3581 }
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
3632 ASSERT(!instr->right()->IsDoubleRegister() || 3634 ASSERT(!instr->right()->IsDoubleRegister() ||
3633 ToDoubleRegister(instr->right()).is(xmm1)); 3635 ToDoubleRegister(instr->right()).is(xmm1));
3634 ASSERT(ToDoubleRegister(instr->left()).is(xmm2)); 3636 ASSERT(ToDoubleRegister(instr->left()).is(xmm2));
3635 ASSERT(ToDoubleRegister(instr->result()).is(xmm3)); 3637 ASSERT(ToDoubleRegister(instr->result()).is(xmm3));
3636 3638
3637 if (exponent_type.IsSmi()) { 3639 if (exponent_type.IsSmi()) {
3638 MathPowStub stub(MathPowStub::TAGGED); 3640 MathPowStub stub(MathPowStub::TAGGED);
3639 __ CallStub(&stub); 3641 __ CallStub(&stub);
3640 } else if (exponent_type.IsTagged()) { 3642 } else if (exponent_type.IsTagged()) {
3641 Label no_deopt; 3643 Label no_deopt;
3642 __ JumpIfSmi(exponent, &no_deopt); 3644 __ JumpIfSmi(exponent, &no_deopt, Label::kNear);
3643 __ CmpObjectType(exponent, HEAP_NUMBER_TYPE, rcx); 3645 __ CmpObjectType(exponent, HEAP_NUMBER_TYPE, rcx);
3644 DeoptimizeIf(not_equal, instr->environment()); 3646 DeoptimizeIf(not_equal, instr->environment());
3645 __ bind(&no_deopt); 3647 __ bind(&no_deopt);
3646 MathPowStub stub(MathPowStub::TAGGED); 3648 MathPowStub stub(MathPowStub::TAGGED);
3647 __ CallStub(&stub); 3649 __ CallStub(&stub);
3648 } else if (exponent_type.IsInteger32()) { 3650 } else if (exponent_type.IsInteger32()) {
3649 MathPowStub stub(MathPowStub::INTEGER); 3651 MathPowStub stub(MathPowStub::INTEGER);
3650 __ CallStub(&stub); 3652 __ CallStub(&stub);
3651 } else { 3653 } else {
3652 ASSERT(exponent_type.IsDouble()); 3654 ASSERT(exponent_type.IsDouble());
(...skipping 242 matching lines...) Expand 10 before | Expand all | Expand 10 after
3895 ArrayNoArgumentConstructorStub stub(kind, context_mode, override_mode); 3897 ArrayNoArgumentConstructorStub stub(kind, context_mode, override_mode);
3896 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 3898 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
3897 } else if (instr->arity() == 1) { 3899 } else if (instr->arity() == 1) {
3898 Label done; 3900 Label done;
3899 if (IsFastPackedElementsKind(kind)) { 3901 if (IsFastPackedElementsKind(kind)) {
3900 Label packed_case; 3902 Label packed_case;
3901 // We might need a change here 3903 // We might need a change here
3902 // look at the first argument 3904 // look at the first argument
3903 __ movq(rcx, Operand(rsp, 0)); 3905 __ movq(rcx, Operand(rsp, 0));
3904 __ testq(rcx, rcx); 3906 __ testq(rcx, rcx);
3905 __ j(zero, &packed_case); 3907 __ j(zero, &packed_case, Label::kNear);
3906 3908
3907 ElementsKind holey_kind = GetHoleyElementsKind(kind); 3909 ElementsKind holey_kind = GetHoleyElementsKind(kind);
3908 ArraySingleArgumentConstructorStub stub(holey_kind, context_mode, 3910 ArraySingleArgumentConstructorStub stub(holey_kind, context_mode,
3909 override_mode); 3911 override_mode);
3910 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 3912 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
3911 __ jmp(&done); 3913 __ jmp(&done, Label::kNear);
3912 __ bind(&packed_case); 3914 __ bind(&packed_case);
3913 } 3915 }
3914 3916
3915 ArraySingleArgumentConstructorStub stub(kind, context_mode, override_mode); 3917 ArraySingleArgumentConstructorStub stub(kind, context_mode, override_mode);
3916 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 3918 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
3917 __ bind(&done); 3919 __ bind(&done);
3918 } else { 3920 } else {
3919 ArrayNArgumentsConstructorStub stub(kind, context_mode, override_mode); 3921 ArrayNArgumentsConstructorStub stub(kind, context_mode, override_mode);
3920 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 3922 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
3921 } 3923 }
(...skipping 275 matching lines...) Expand 10 before | Expand all | Expand 10 after
4197 // Sign extend key because it could be a 32 bit negative value 4199 // Sign extend key because it could be a 32 bit negative value
4198 // and the dehoisted address computation happens in 64 bits 4200 // and the dehoisted address computation happens in 64 bits
4199 __ movsxlq(key_reg, key_reg); 4201 __ movsxlq(key_reg, key_reg);
4200 } 4202 }
4201 } 4203 }
4202 4204
4203 if (instr->NeedsCanonicalization()) { 4205 if (instr->NeedsCanonicalization()) {
4204 Label have_value; 4206 Label have_value;
4205 4207
4206 __ ucomisd(value, value); 4208 __ ucomisd(value, value);
4207 __ j(parity_odd, &have_value); // NaN. 4209 __ j(parity_odd, &have_value, Label::kNear); // NaN.
4208 4210
4209 __ Set(kScratchRegister, BitCast<uint64_t>( 4211 __ Set(kScratchRegister, BitCast<uint64_t>(
4210 FixedDoubleArray::canonical_not_the_hole_nan_as_double())); 4212 FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
4211 __ movq(value, kScratchRegister); 4213 __ movq(value, kScratchRegister);
4212 4214
4213 __ bind(&have_value); 4215 __ bind(&have_value);
4214 } 4216 }
4215 4217
4216 Operand double_store_operand = BuildFastArrayOperand( 4218 Operand double_store_operand = BuildFastArrayOperand(
4217 instr->elements(), 4219 instr->elements(),
(...skipping 448 matching lines...) Expand 10 before | Expand all | Expand 10 after
4666 4668
4667 // Heap number map check. 4669 // Heap number map check.
4668 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), 4670 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
4669 Heap::kHeapNumberMapRootIndex); 4671 Heap::kHeapNumberMapRootIndex);
4670 4672
4671 // On x64 it is safe to load at heap number offset before evaluating the map 4673 // On x64 it is safe to load at heap number offset before evaluating the map
4672 // check, since all heap objects are at least two words long. 4674 // check, since all heap objects are at least two words long.
4673 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset)); 4675 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
4674 4676
4675 if (can_convert_undefined_to_nan) { 4677 if (can_convert_undefined_to_nan) {
4676 __ j(not_equal, &convert); 4678 __ j(not_equal, &convert, Label::kNear);
4677 } else { 4679 } else {
4678 DeoptimizeIf(not_equal, env); 4680 DeoptimizeIf(not_equal, env);
4679 } 4681 }
4680 4682
4681 if (deoptimize_on_minus_zero) { 4683 if (deoptimize_on_minus_zero) {
4682 XMMRegister xmm_scratch = double_scratch0(); 4684 XMMRegister xmm_scratch = double_scratch0();
4683 __ xorps(xmm_scratch, xmm_scratch); 4685 __ xorps(xmm_scratch, xmm_scratch);
4684 __ ucomisd(xmm_scratch, result_reg); 4686 __ ucomisd(xmm_scratch, result_reg);
4685 __ j(not_equal, &done, Label::kNear); 4687 __ j(not_equal, &done, Label::kNear);
4686 __ movmskpd(kScratchRegister, result_reg); 4688 __ movmskpd(kScratchRegister, result_reg);
(...skipping 277 matching lines...) Expand 10 before | Expand all | Expand 10 after
4964 DeferredCheckMaps* deferred = NULL; 4966 DeferredCheckMaps* deferred = NULL;
4965 if (instr->hydrogen()->has_migration_target()) { 4967 if (instr->hydrogen()->has_migration_target()) {
4966 deferred = new(zone()) DeferredCheckMaps(this, instr, reg); 4968 deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
4967 __ bind(deferred->check_maps()); 4969 __ bind(deferred->check_maps());
4968 } 4970 }
4969 4971
4970 UniqueSet<Map> map_set = instr->hydrogen()->map_set(); 4972 UniqueSet<Map> map_set = instr->hydrogen()->map_set();
4971 Label success; 4973 Label success;
4972 for (int i = 0; i < map_set.size() - 1; i++) { 4974 for (int i = 0; i < map_set.size() - 1; i++) {
4973 Handle<Map> map = map_set.at(i).handle(); 4975 Handle<Map> map = map_set.at(i).handle();
4974 __ CompareMap(reg, map, &success); 4976 __ CompareMap(reg, map);
4975 __ j(equal, &success); 4977 __ j(equal, &success, Label::kNear);
4976 } 4978 }
4977 4979
4978 Handle<Map> map = map_set.at(map_set.size() - 1).handle(); 4980 Handle<Map> map = map_set.at(map_set.size() - 1).handle();
4979 __ CompareMap(reg, map, &success); 4981 __ CompareMap(reg, map);
4980 if (instr->hydrogen()->has_migration_target()) { 4982 if (instr->hydrogen()->has_migration_target()) {
4981 __ j(not_equal, deferred->entry()); 4983 __ j(not_equal, deferred->entry());
4982 } else { 4984 } else {
4983 DeoptimizeIf(not_equal, instr->environment()); 4985 DeoptimizeIf(not_equal, instr->environment());
4984 } 4986 }
4985 4987
4986 __ bind(&success); 4988 __ bind(&success);
4987 } 4989 }
4988 4990
4989 4991
(...skipping 11 matching lines...) Expand all
5001 __ ClampUint8(value_reg); 5003 __ ClampUint8(value_reg);
5002 } 5004 }
5003 5005
5004 5006
5005 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) { 5007 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
5006 ASSERT(instr->unclamped()->Equals(instr->result())); 5008 ASSERT(instr->unclamped()->Equals(instr->result()));
5007 Register input_reg = ToRegister(instr->unclamped()); 5009 Register input_reg = ToRegister(instr->unclamped());
5008 XMMRegister temp_xmm_reg = ToDoubleRegister(instr->temp_xmm()); 5010 XMMRegister temp_xmm_reg = ToDoubleRegister(instr->temp_xmm());
5009 XMMRegister xmm_scratch = double_scratch0(); 5011 XMMRegister xmm_scratch = double_scratch0();
5010 Label is_smi, done, heap_number; 5012 Label is_smi, done, heap_number;
5011 5013 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear;
5012 __ JumpIfSmi(input_reg, &is_smi); 5014 __ JumpIfSmi(input_reg, &is_smi, dist);
5013 5015
5014 // Check for heap number 5016 // Check for heap number
5015 __ Cmp(FieldOperand(input_reg, HeapObject::kMapOffset), 5017 __ Cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
5016 factory()->heap_number_map()); 5018 factory()->heap_number_map());
5017 __ j(equal, &heap_number, Label::kNear); 5019 __ j(equal, &heap_number, Label::kNear);
5018 5020
5019 // Check for undefined. Undefined is converted to zero for clamping 5021 // Check for undefined. Undefined is converted to zero for clamping
5020 // conversions. 5022 // conversions.
5021 __ Cmp(input_reg, factory()->undefined_value()); 5023 __ Cmp(input_reg, factory()->undefined_value());
5022 DeoptimizeIf(not_equal, instr->environment()); 5024 DeoptimizeIf(not_equal, instr->environment());
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
5159 __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); 5161 __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
5160 __ Push(instr->hydrogen()->pattern()); 5162 __ Push(instr->hydrogen()->pattern());
5161 __ Push(instr->hydrogen()->flags()); 5163 __ Push(instr->hydrogen()->flags());
5162 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); 5164 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
5163 __ movq(rbx, rax); 5165 __ movq(rbx, rax);
5164 5166
5165 __ bind(&materialized); 5167 __ bind(&materialized);
5166 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 5168 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
5167 Label allocated, runtime_allocate; 5169 Label allocated, runtime_allocate;
5168 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT); 5170 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
5169 __ jmp(&allocated); 5171 __ jmp(&allocated, Label::kNear);
5170 5172
5171 __ bind(&runtime_allocate); 5173 __ bind(&runtime_allocate);
5172 __ push(rbx); 5174 __ push(rbx);
5173 __ Push(Smi::FromInt(size)); 5175 __ Push(Smi::FromInt(size));
5174 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); 5176 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
5175 __ pop(rbx); 5177 __ pop(rbx);
5176 5178
5177 __ bind(&allocated); 5179 __ bind(&allocated);
5178 // Copy the content into the newly allocated memory. 5180 // Copy the content into the newly allocated memory.
5179 // (Unroll copy loop once for better throughput). 5181 // (Unroll copy loop once for better throughput).
(...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after
5492 __ bind(&use_cache); 5494 __ bind(&use_cache);
5493 } 5495 }
5494 5496
5495 5497
5496 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { 5498 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
5497 Register map = ToRegister(instr->map()); 5499 Register map = ToRegister(instr->map());
5498 Register result = ToRegister(instr->result()); 5500 Register result = ToRegister(instr->result());
5499 Label load_cache, done; 5501 Label load_cache, done;
5500 __ EnumLength(result, map); 5502 __ EnumLength(result, map);
5501 __ Cmp(result, Smi::FromInt(0)); 5503 __ Cmp(result, Smi::FromInt(0));
5502 __ j(not_equal, &load_cache); 5504 __ j(not_equal, &load_cache, Label::kNear);
5503 __ LoadRoot(result, Heap::kEmptyFixedArrayRootIndex); 5505 __ LoadRoot(result, Heap::kEmptyFixedArrayRootIndex);
5504 __ jmp(&done); 5506 __ jmp(&done, Label::kNear);
5505 __ bind(&load_cache); 5507 __ bind(&load_cache);
5506 __ LoadInstanceDescriptors(map, result); 5508 __ LoadInstanceDescriptors(map, result);
5507 __ movq(result, 5509 __ movq(result,
5508 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); 5510 FieldOperand(result, DescriptorArray::kEnumCacheOffset));
5509 __ movq(result, 5511 __ movq(result,
5510 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); 5512 FieldOperand(result, FixedArray::SizeFor(instr->idx())));
5511 __ bind(&done); 5513 __ bind(&done);
5512 Condition cc = masm()->CheckSmi(result); 5514 Condition cc = masm()->CheckSmi(result);
5513 DeoptimizeIf(cc, instr->environment()); 5515 DeoptimizeIf(cc, instr->environment());
5514 } 5516 }
5515 5517
5516 5518
5517 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { 5519 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
5518 Register object = ToRegister(instr->value()); 5520 Register object = ToRegister(instr->value());
5519 __ cmpq(ToRegister(instr->map()), 5521 __ cmpq(ToRegister(instr->map()),
5520 FieldOperand(object, HeapObject::kMapOffset)); 5522 FieldOperand(object, HeapObject::kMapOffset));
5521 DeoptimizeIf(not_equal, instr->environment()); 5523 DeoptimizeIf(not_equal, instr->environment());
5522 } 5524 }
5523 5525
5524 5526
5525 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { 5527 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
5526 Register object = ToRegister(instr->object()); 5528 Register object = ToRegister(instr->object());
5527 Register index = ToRegister(instr->index()); 5529 Register index = ToRegister(instr->index());
5528 5530
5529 Label out_of_object, done; 5531 Label out_of_object, done;
5530 __ SmiToInteger32(index, index); 5532 __ SmiToInteger32(index, index);
5531 __ cmpl(index, Immediate(0)); 5533 __ cmpl(index, Immediate(0));
5532 __ j(less, &out_of_object); 5534 __ j(less, &out_of_object, Label::kNear);
5533 __ movq(object, FieldOperand(object, 5535 __ movq(object, FieldOperand(object,
5534 index, 5536 index,
5535 times_pointer_size, 5537 times_pointer_size,
5536 JSObject::kHeaderSize)); 5538 JSObject::kHeaderSize));
5537 __ jmp(&done, Label::kNear); 5539 __ jmp(&done, Label::kNear);
5538 5540
5539 __ bind(&out_of_object); 5541 __ bind(&out_of_object);
5540 __ movq(object, FieldOperand(object, JSObject::kPropertiesOffset)); 5542 __ movq(object, FieldOperand(object, JSObject::kPropertiesOffset));
5541 __ negl(index); 5543 __ negl(index);
5542 // Index is now equal to out of object property index plus 1. 5544 // Index is now equal to out of object property index plus 1.
5543 __ movq(object, FieldOperand(object, 5545 __ movq(object, FieldOperand(object,
5544 index, 5546 index,
5545 times_pointer_size, 5547 times_pointer_size,
5546 FixedArray::kHeaderSize - kPointerSize)); 5548 FixedArray::kHeaderSize - kPointerSize));
5547 __ bind(&done); 5549 __ bind(&done);
5548 } 5550 }
5549 5551
5550 5552
5551 #undef __ 5553 #undef __
5552 5554
5553 } } // namespace v8::internal 5555 } } // namespace v8::internal
5554 5556
5555 #endif // V8_TARGET_ARCH_X64 5557 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/macro-assembler-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698