Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(171)

Side by Side Diff: src/x64/lithium-codegen-x64.cc

Issue 47533002: Use nearlabel AFAP in lithium codegen (Closed) Base URL: git://github.com/v8/v8.git@master
Patch Set: Created 7 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1601 matching lines...) Expand 10 before | Expand all | Expand 10 after
1612 } else { 1612 } else {
1613 if (index->value() < JSDate::kFirstUncachedField) { 1613 if (index->value() < JSDate::kFirstUncachedField) {
1614 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); 1614 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
1615 Operand stamp_operand = __ ExternalOperand(stamp); 1615 Operand stamp_operand = __ ExternalOperand(stamp);
1616 __ movq(kScratchRegister, stamp_operand); 1616 __ movq(kScratchRegister, stamp_operand);
1617 __ cmpq(kScratchRegister, FieldOperand(object, 1617 __ cmpq(kScratchRegister, FieldOperand(object,
1618 JSDate::kCacheStampOffset)); 1618 JSDate::kCacheStampOffset));
1619 __ j(not_equal, &runtime, Label::kNear); 1619 __ j(not_equal, &runtime, Label::kNear);
1620 __ movq(result, FieldOperand(object, JSDate::kValueOffset + 1620 __ movq(result, FieldOperand(object, JSDate::kValueOffset +
1621 kPointerSize * index->value())); 1621 kPointerSize * index->value()));
1622 __ jmp(&done); 1622 __ jmp(&done, Label::kNear);
1623 } 1623 }
1624 __ bind(&runtime); 1624 __ bind(&runtime);
1625 __ PrepareCallCFunction(2); 1625 __ PrepareCallCFunction(2);
1626 __ movq(arg_reg_1, object); 1626 __ movq(arg_reg_1, object);
1627 __ movq(arg_reg_2, index, RelocInfo::NONE64); 1627 __ movq(arg_reg_2, index, RelocInfo::NONE64);
1628 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); 1628 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
1629 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 1629 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1630 __ bind(&done); 1630 __ bind(&done);
1631 } 1631 }
1632 } 1632 }
(...skipping 799 matching lines...) Expand 10 before | Expand all | Expand 10 after
2432 }; 2432 };
2433 2433
2434 2434
2435 DeferredInstanceOfKnownGlobal* deferred; 2435 DeferredInstanceOfKnownGlobal* deferred;
2436 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr); 2436 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
2437 2437
2438 Label done, false_result; 2438 Label done, false_result;
2439 Register object = ToRegister(instr->value()); 2439 Register object = ToRegister(instr->value());
2440 2440
2441 // A Smi is not an instance of anything. 2441 // A Smi is not an instance of anything.
2442 __ JumpIfSmi(object, &false_result); 2442 __ JumpIfSmi(object, &false_result, Label::kNear);
2443 2443
2444 // This is the inlined call site instanceof cache. The two occurences of the 2444 // This is the inlined call site instanceof cache. The two occurences of the
2445 // hole value will be patched to the last map/result pair generated by the 2445 // hole value will be patched to the last map/result pair generated by the
2446 // instanceof stub. 2446 // instanceof stub.
2447 Label cache_miss; 2447 Label cache_miss;
2448 // Use a temp register to avoid memory operands with variable lengths. 2448 // Use a temp register to avoid memory operands with variable lengths.
2449 Register map = ToRegister(instr->temp()); 2449 Register map = ToRegister(instr->temp());
2450 __ movq(map, FieldOperand(object, HeapObject::kMapOffset)); 2450 __ movq(map, FieldOperand(object, HeapObject::kMapOffset));
2451 __ bind(deferred->map_check()); // Label for calculating code patching. 2451 __ bind(deferred->map_check()); // Label for calculating code patching.
2452 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value()); 2452 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value());
2453 __ movq(kScratchRegister, cache_cell, RelocInfo::CELL); 2453 __ movq(kScratchRegister, cache_cell, RelocInfo::CELL);
2454 __ cmpq(map, Operand(kScratchRegister, 0)); 2454 __ cmpq(map, Operand(kScratchRegister, 0));
2455 __ j(not_equal, &cache_miss, Label::kNear); 2455 __ j(not_equal, &cache_miss, Label::kNear);
2456 // Patched to load either true or false. 2456 // Patched to load either true or false.
2457 __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex); 2457 __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
2458 #ifdef DEBUG 2458 #ifdef DEBUG
2459 // Check that the code size between patch label and patch sites is invariant. 2459 // Check that the code size between patch label and patch sites is invariant.
2460 Label end_of_patched_code; 2460 Label end_of_patched_code;
2461 __ bind(&end_of_patched_code); 2461 __ bind(&end_of_patched_code);
2462 ASSERT(true); 2462 ASSERT(true);
2463 #endif 2463 #endif
2464 __ jmp(&done); 2464 __ jmp(&done, Label::kNear);
2465 2465
2466 // The inlined call site cache did not match. Check for null and string 2466 // The inlined call site cache did not match. Check for null and string
2467 // before calling the deferred code. 2467 // before calling the deferred code.
2468 __ bind(&cache_miss); // Null is not an instance of anything. 2468 __ bind(&cache_miss); // Null is not an instance of anything.
2469 __ CompareRoot(object, Heap::kNullValueRootIndex); 2469 __ CompareRoot(object, Heap::kNullValueRootIndex);
2470 __ j(equal, &false_result, Label::kNear); 2470 __ j(equal, &false_result, Label::kNear);
2471 2471
2472 // String values are not instances of anything. 2472 // String values are not instances of anything.
2473 __ JumpIfNotString(object, kScratchRegister, deferred->entry()); 2473 __ JumpIfNotString(object, kScratchRegister, deferred->entry());
2474 2474
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2509 ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check)); 2509 ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check));
2510 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); 2510 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
2511 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 2511 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2512 // Move result to a register that survives the end of the 2512 // Move result to a register that survives the end of the
2513 // PushSafepointRegisterScope. 2513 // PushSafepointRegisterScope.
2514 __ movq(kScratchRegister, rax); 2514 __ movq(kScratchRegister, rax);
2515 } 2515 }
2516 __ testq(kScratchRegister, kScratchRegister); 2516 __ testq(kScratchRegister, kScratchRegister);
2517 Label load_false; 2517 Label load_false;
2518 Label done; 2518 Label done;
2519 __ j(not_zero, &load_false); 2519 __ j(not_zero, &load_false, Label::kNear);
2520 __ LoadRoot(rax, Heap::kTrueValueRootIndex); 2520 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
2521 __ jmp(&done); 2521 __ jmp(&done, Label::kNear);
2522 __ bind(&load_false); 2522 __ bind(&load_false);
2523 __ LoadRoot(rax, Heap::kFalseValueRootIndex); 2523 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
2524 __ bind(&done); 2524 __ bind(&done);
2525 } 2525 }
2526 2526
2527 2527
2528 void LCodeGen::DoCmpT(LCmpT* instr) { 2528 void LCodeGen::DoCmpT(LCmpT* instr) {
2529 Token::Value op = instr->op(); 2529 Token::Value op = instr->op();
2530 2530
2531 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); 2531 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
(...skipping 880 matching lines...) Expand 10 before | Expand all | Expand 10 after
3412 __ j(below, &negative_sign, Label::kNear); 3412 __ j(below, &negative_sign, Label::kNear);
3413 3413
3414 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 3414 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3415 // Check for negative zero. 3415 // Check for negative zero.
3416 Label positive_sign; 3416 Label positive_sign;
3417 __ j(above, &positive_sign, Label::kNear); 3417 __ j(above, &positive_sign, Label::kNear);
3418 __ movmskpd(output_reg, input_reg); 3418 __ movmskpd(output_reg, input_reg);
3419 __ testq(output_reg, Immediate(1)); 3419 __ testq(output_reg, Immediate(1));
3420 DeoptimizeIf(not_zero, instr->environment()); 3420 DeoptimizeIf(not_zero, instr->environment());
3421 __ Set(output_reg, 0); 3421 __ Set(output_reg, 0);
3422 __ jmp(&done); 3422 __ jmp(&done, Label::kNear);
3423 __ bind(&positive_sign); 3423 __ bind(&positive_sign);
3424 } 3424 }
3425 3425
3426 // Use truncating instruction (OK because input is positive). 3426 // Use truncating instruction (OK because input is positive).
3427 __ cvttsd2si(output_reg, input_reg); 3427 __ cvttsd2si(output_reg, input_reg);
3428 // Overflow is signalled with minint. 3428 // Overflow is signalled with minint.
3429 __ cmpl(output_reg, Immediate(0x80000000)); 3429 __ cmpl(output_reg, Immediate(0x80000000));
3430 DeoptimizeIf(equal, instr->environment()); 3430 DeoptimizeIf(equal, instr->environment());
3431 __ jmp(&done, Label::kNear); 3431 __ jmp(&done, Label::kNear);
3432 3432
(...skipping 16 matching lines...) Expand all
3449 const XMMRegister xmm_scratch = double_scratch0(); 3449 const XMMRegister xmm_scratch = double_scratch0();
3450 Register output_reg = ToRegister(instr->result()); 3450 Register output_reg = ToRegister(instr->result());
3451 XMMRegister input_reg = ToDoubleRegister(instr->value()); 3451 XMMRegister input_reg = ToDoubleRegister(instr->value());
3452 static int64_t one_half = V8_INT64_C(0x3FE0000000000000); // 0.5 3452 static int64_t one_half = V8_INT64_C(0x3FE0000000000000); // 0.5
3453 static int64_t minus_one_half = V8_INT64_C(0xBFE0000000000000); // -0.5 3453 static int64_t minus_one_half = V8_INT64_C(0xBFE0000000000000); // -0.5
3454 3454
3455 Label done, round_to_zero, below_one_half, do_not_compensate, restore; 3455 Label done, round_to_zero, below_one_half, do_not_compensate, restore;
3456 __ movq(kScratchRegister, one_half, RelocInfo::NONE64); 3456 __ movq(kScratchRegister, one_half, RelocInfo::NONE64);
3457 __ movq(xmm_scratch, kScratchRegister); 3457 __ movq(xmm_scratch, kScratchRegister);
3458 __ ucomisd(xmm_scratch, input_reg); 3458 __ ucomisd(xmm_scratch, input_reg);
3459 __ j(above, &below_one_half); 3459 __ j(above, &below_one_half, Label::kNear);
3460 3460
3461 // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x). 3461 // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x).
3462 __ addsd(xmm_scratch, input_reg); 3462 __ addsd(xmm_scratch, input_reg);
3463 __ cvttsd2si(output_reg, xmm_scratch); 3463 __ cvttsd2si(output_reg, xmm_scratch);
3464 // Overflow is signalled with minint. 3464 // Overflow is signalled with minint.
3465 __ cmpl(output_reg, Immediate(0x80000000)); 3465 __ cmpl(output_reg, Immediate(0x80000000));
3466 __ RecordComment("D2I conversion overflow"); 3466 __ RecordComment("D2I conversion overflow");
3467 DeoptimizeIf(equal, instr->environment()); 3467 DeoptimizeIf(equal, instr->environment());
3468 __ jmp(&done); 3468 __ jmp(&done, Label::kNear);
3469 3469
3470 __ bind(&below_one_half); 3470 __ bind(&below_one_half);
3471 __ movq(kScratchRegister, minus_one_half, RelocInfo::NONE64); 3471 __ movq(kScratchRegister, minus_one_half, RelocInfo::NONE64);
3472 __ movq(xmm_scratch, kScratchRegister); 3472 __ movq(xmm_scratch, kScratchRegister);
3473 __ ucomisd(xmm_scratch, input_reg); 3473 __ ucomisd(xmm_scratch, input_reg);
3474 __ j(below_equal, &round_to_zero); 3474 __ j(below_equal, &round_to_zero, Label::kNear);
3475 3475
3476 // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then 3476 // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then
3477 // compare and compensate. 3477 // compare and compensate.
3478 __ movq(kScratchRegister, input_reg); // Back up input_reg. 3478 __ movq(kScratchRegister, input_reg); // Back up input_reg.
3479 __ subsd(input_reg, xmm_scratch); 3479 __ subsd(input_reg, xmm_scratch);
3480 __ cvttsd2si(output_reg, input_reg); 3480 __ cvttsd2si(output_reg, input_reg);
3481 // Catch minint due to overflow, and to prevent overflow when compensating. 3481 // Catch minint due to overflow, and to prevent overflow when compensating.
3482 __ cmpl(output_reg, Immediate(0x80000000)); 3482 __ cmpl(output_reg, Immediate(0x80000000));
3483 __ RecordComment("D2I conversion overflow"); 3483 __ RecordComment("D2I conversion overflow");
3484 DeoptimizeIf(equal, instr->environment()); 3484 DeoptimizeIf(equal, instr->environment());
3485 3485
3486 __ Cvtlsi2sd(xmm_scratch, output_reg); 3486 __ Cvtlsi2sd(xmm_scratch, output_reg);
3487 __ ucomisd(input_reg, xmm_scratch); 3487 __ ucomisd(input_reg, xmm_scratch);
3488 __ j(equal, &restore, Label::kNear); 3488 __ j(equal, &restore, Label::kNear);
3489 __ subl(output_reg, Immediate(1)); 3489 __ subl(output_reg, Immediate(1));
3490 // No overflow because we already ruled out minint. 3490 // No overflow because we already ruled out minint.
3491 __ bind(&restore); 3491 __ bind(&restore);
3492 __ movq(input_reg, kScratchRegister); // Restore input_reg. 3492 __ movq(input_reg, kScratchRegister); // Restore input_reg.
3493 __ jmp(&done); 3493 __ jmp(&done, Label::kNear);
3494 3494
3495 __ bind(&round_to_zero); 3495 __ bind(&round_to_zero);
3496 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if 3496 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if
3497 // we can ignore the difference between a result of -0 and +0. 3497 // we can ignore the difference between a result of -0 and +0.
3498 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 3498 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3499 __ movq(output_reg, input_reg); 3499 __ movq(output_reg, input_reg);
3500 __ testq(output_reg, output_reg); 3500 __ testq(output_reg, output_reg);
3501 __ RecordComment("Minus zero"); 3501 __ RecordComment("Minus zero");
3502 DeoptimizeIf(negative, instr->environment()); 3502 DeoptimizeIf(negative, instr->environment());
3503 } 3503 }
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
3556 ASSERT(!instr->right()->IsDoubleRegister() || 3556 ASSERT(!instr->right()->IsDoubleRegister() ||
3557 ToDoubleRegister(instr->right()).is(xmm1)); 3557 ToDoubleRegister(instr->right()).is(xmm1));
3558 ASSERT(ToDoubleRegister(instr->left()).is(xmm2)); 3558 ASSERT(ToDoubleRegister(instr->left()).is(xmm2));
3559 ASSERT(ToDoubleRegister(instr->result()).is(xmm3)); 3559 ASSERT(ToDoubleRegister(instr->result()).is(xmm3));
3560 3560
3561 if (exponent_type.IsSmi()) { 3561 if (exponent_type.IsSmi()) {
3562 MathPowStub stub(MathPowStub::TAGGED); 3562 MathPowStub stub(MathPowStub::TAGGED);
3563 __ CallStub(&stub); 3563 __ CallStub(&stub);
3564 } else if (exponent_type.IsTagged()) { 3564 } else if (exponent_type.IsTagged()) {
3565 Label no_deopt; 3565 Label no_deopt;
3566 __ JumpIfSmi(exponent, &no_deopt); 3566 __ JumpIfSmi(exponent, &no_deopt, Label::kNear);
3567 __ CmpObjectType(exponent, HEAP_NUMBER_TYPE, rcx); 3567 __ CmpObjectType(exponent, HEAP_NUMBER_TYPE, rcx);
3568 DeoptimizeIf(not_equal, instr->environment()); 3568 DeoptimizeIf(not_equal, instr->environment());
3569 __ bind(&no_deopt); 3569 __ bind(&no_deopt);
3570 MathPowStub stub(MathPowStub::TAGGED); 3570 MathPowStub stub(MathPowStub::TAGGED);
3571 __ CallStub(&stub); 3571 __ CallStub(&stub);
3572 } else if (exponent_type.IsInteger32()) { 3572 } else if (exponent_type.IsInteger32()) {
3573 MathPowStub stub(MathPowStub::INTEGER); 3573 MathPowStub stub(MathPowStub::INTEGER);
3574 __ CallStub(&stub); 3574 __ CallStub(&stub);
3575 } else { 3575 } else {
3576 ASSERT(exponent_type.IsDouble()); 3576 ASSERT(exponent_type.IsDouble());
(...skipping 243 matching lines...) Expand 10 before | Expand all | Expand 10 after
3820 ArrayNoArgumentConstructorStub stub(kind, context_mode, override_mode); 3820 ArrayNoArgumentConstructorStub stub(kind, context_mode, override_mode);
3821 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 3821 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
3822 } else if (instr->arity() == 1) { 3822 } else if (instr->arity() == 1) {
3823 Label done; 3823 Label done;
3824 if (IsFastPackedElementsKind(kind)) { 3824 if (IsFastPackedElementsKind(kind)) {
3825 Label packed_case; 3825 Label packed_case;
3826 // We might need a change here 3826 // We might need a change here
3827 // look at the first argument 3827 // look at the first argument
3828 __ movq(rcx, Operand(rsp, 0)); 3828 __ movq(rcx, Operand(rsp, 0));
3829 __ testq(rcx, rcx); 3829 __ testq(rcx, rcx);
3830 __ j(zero, &packed_case); 3830 __ j(zero, &packed_case, Label::kNear);
3831 3831
3832 ElementsKind holey_kind = GetHoleyElementsKind(kind); 3832 ElementsKind holey_kind = GetHoleyElementsKind(kind);
3833 ArraySingleArgumentConstructorStub stub(holey_kind, context_mode, 3833 ArraySingleArgumentConstructorStub stub(holey_kind, context_mode,
3834 override_mode); 3834 override_mode);
3835 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 3835 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
3836 __ jmp(&done); 3836 __ jmp(&done, Label::kNear);
3837 __ bind(&packed_case); 3837 __ bind(&packed_case);
3838 } 3838 }
3839 3839
3840 ArraySingleArgumentConstructorStub stub(kind, context_mode, override_mode); 3840 ArraySingleArgumentConstructorStub stub(kind, context_mode, override_mode);
3841 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 3841 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
3842 __ bind(&done); 3842 __ bind(&done);
3843 } else { 3843 } else {
3844 ArrayNArgumentsConstructorStub stub(kind, context_mode, override_mode); 3844 ArrayNArgumentsConstructorStub stub(kind, context_mode, override_mode);
3845 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 3845 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
3846 } 3846 }
(...skipping 275 matching lines...) Expand 10 before | Expand all | Expand 10 after
4122 // Sign extend key because it could be a 32 bit negative value 4122 // Sign extend key because it could be a 32 bit negative value
4123 // and the dehoisted address computation happens in 64 bits 4123 // and the dehoisted address computation happens in 64 bits
4124 __ movsxlq(key_reg, key_reg); 4124 __ movsxlq(key_reg, key_reg);
4125 } 4125 }
4126 } 4126 }
4127 4127
4128 if (instr->NeedsCanonicalization()) { 4128 if (instr->NeedsCanonicalization()) {
4129 Label have_value; 4129 Label have_value;
4130 4130
4131 __ ucomisd(value, value); 4131 __ ucomisd(value, value);
4132 __ j(parity_odd, &have_value); // NaN. 4132 __ j(parity_odd, &have_value, Label::kNear); // NaN.
4133 4133
4134 __ Set(kScratchRegister, BitCast<uint64_t>( 4134 __ Set(kScratchRegister, BitCast<uint64_t>(
4135 FixedDoubleArray::canonical_not_the_hole_nan_as_double())); 4135 FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
4136 __ movq(value, kScratchRegister); 4136 __ movq(value, kScratchRegister);
4137 4137
4138 __ bind(&have_value); 4138 __ bind(&have_value);
4139 } 4139 }
4140 4140
4141 Operand double_store_operand = BuildFastArrayOperand( 4141 Operand double_store_operand = BuildFastArrayOperand(
4142 instr->elements(), 4142 instr->elements(),
(...skipping 432 matching lines...) Expand 10 before | Expand all | Expand 10 after
4575 4575
4576 // Heap number map check. 4576 // Heap number map check.
4577 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), 4577 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
4578 Heap::kHeapNumberMapRootIndex); 4578 Heap::kHeapNumberMapRootIndex);
4579 4579
4580 // On x64 it is safe to load at heap number offset before evaluating the map 4580 // On x64 it is safe to load at heap number offset before evaluating the map
4581 // check, since all heap objects are at least two words long. 4581 // check, since all heap objects are at least two words long.
4582 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset)); 4582 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
4583 4583
4584 if (can_convert_undefined_to_nan) { 4584 if (can_convert_undefined_to_nan) {
4585 __ j(not_equal, &convert); 4585 __ j(not_equal, &convert, Label::kNear);
4586 } else { 4586 } else {
4587 DeoptimizeIf(not_equal, env); 4587 DeoptimizeIf(not_equal, env);
4588 } 4588 }
4589 4589
4590 if (deoptimize_on_minus_zero) { 4590 if (deoptimize_on_minus_zero) {
4591 XMMRegister xmm_scratch = double_scratch0(); 4591 XMMRegister xmm_scratch = double_scratch0();
4592 __ xorps(xmm_scratch, xmm_scratch); 4592 __ xorps(xmm_scratch, xmm_scratch);
4593 __ ucomisd(xmm_scratch, result_reg); 4593 __ ucomisd(xmm_scratch, result_reg);
4594 __ j(not_equal, &done, Label::kNear); 4594 __ j(not_equal, &done, Label::kNear);
4595 __ movmskpd(kScratchRegister, result_reg); 4595 __ movmskpd(kScratchRegister, result_reg);
(...skipping 277 matching lines...) Expand 10 before | Expand all | Expand 10 after
4873 DeferredCheckMaps* deferred = NULL; 4873 DeferredCheckMaps* deferred = NULL;
4874 if (instr->hydrogen()->has_migration_target()) { 4874 if (instr->hydrogen()->has_migration_target()) {
4875 deferred = new(zone()) DeferredCheckMaps(this, instr, reg); 4875 deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
4876 __ bind(deferred->check_maps()); 4876 __ bind(deferred->check_maps());
4877 } 4877 }
4878 4878
4879 UniqueSet<Map> map_set = instr->hydrogen()->map_set(); 4879 UniqueSet<Map> map_set = instr->hydrogen()->map_set();
4880 Label success; 4880 Label success;
4881 for (int i = 0; i < map_set.size() - 1; i++) { 4881 for (int i = 0; i < map_set.size() - 1; i++) {
4882 Handle<Map> map = map_set.at(i).handle(); 4882 Handle<Map> map = map_set.at(i).handle();
4883 __ CompareMap(reg, map, &success); 4883 __ CompareMap(reg, map);
4884 __ j(equal, &success); 4884 __ j(equal, &success, Label::kNear);
4885 } 4885 }
4886 4886
4887 Handle<Map> map = map_set.at(map_set.size() - 1).handle(); 4887 Handle<Map> map = map_set.at(map_set.size() - 1).handle();
4888 __ CompareMap(reg, map, &success); 4888 __ CompareMap(reg, map);
4889 if (instr->hydrogen()->has_migration_target()) { 4889 if (instr->hydrogen()->has_migration_target()) {
4890 __ j(not_equal, deferred->entry()); 4890 __ j(not_equal, deferred->entry());
4891 } else { 4891 } else {
4892 DeoptimizeIf(not_equal, instr->environment()); 4892 DeoptimizeIf(not_equal, instr->environment());
4893 } 4893 }
4894 4894
4895 __ bind(&success); 4895 __ bind(&success);
4896 } 4896 }
4897 4897
4898 4898
(...skipping 12 matching lines...) Expand all
4911 } 4911 }
4912 4912
4913 4913
4914 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) { 4914 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
4915 ASSERT(instr->unclamped()->Equals(instr->result())); 4915 ASSERT(instr->unclamped()->Equals(instr->result()));
4916 Register input_reg = ToRegister(instr->unclamped()); 4916 Register input_reg = ToRegister(instr->unclamped());
4917 XMMRegister temp_xmm_reg = ToDoubleRegister(instr->temp_xmm()); 4917 XMMRegister temp_xmm_reg = ToDoubleRegister(instr->temp_xmm());
4918 XMMRegister xmm_scratch = double_scratch0(); 4918 XMMRegister xmm_scratch = double_scratch0();
4919 Label is_smi, done, heap_number; 4919 Label is_smi, done, heap_number;
4920 4920
4921 __ JumpIfSmi(input_reg, &is_smi); 4921 __ JumpIfSmi(input_reg, &is_smi, Label::kNear);
4922 4922
4923 // Check for heap number 4923 // Check for heap number
4924 __ Cmp(FieldOperand(input_reg, HeapObject::kMapOffset), 4924 __ Cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
4925 factory()->heap_number_map()); 4925 factory()->heap_number_map());
4926 __ j(equal, &heap_number, Label::kNear); 4926 __ j(equal, &heap_number, Label::kNear);
4927 4927
4928 // Check for undefined. Undefined is converted to zero for clamping 4928 // Check for undefined. Undefined is converted to zero for clamping
4929 // conversions. 4929 // conversions.
4930 __ Cmp(input_reg, factory()->undefined_value()); 4930 __ Cmp(input_reg, factory()->undefined_value());
4931 DeoptimizeIf(not_equal, instr->environment()); 4931 DeoptimizeIf(not_equal, instr->environment());
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
5068 __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); 5068 __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
5069 __ Push(instr->hydrogen()->pattern()); 5069 __ Push(instr->hydrogen()->pattern());
5070 __ Push(instr->hydrogen()->flags()); 5070 __ Push(instr->hydrogen()->flags());
5071 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); 5071 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
5072 __ movq(rbx, rax); 5072 __ movq(rbx, rax);
5073 5073
5074 __ bind(&materialized); 5074 __ bind(&materialized);
5075 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 5075 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
5076 Label allocated, runtime_allocate; 5076 Label allocated, runtime_allocate;
5077 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT); 5077 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
5078 __ jmp(&allocated); 5078 __ jmp(&allocated, Label::kNear);
5079 5079
5080 __ bind(&runtime_allocate); 5080 __ bind(&runtime_allocate);
5081 __ push(rbx); 5081 __ push(rbx);
5082 __ Push(Smi::FromInt(size)); 5082 __ Push(Smi::FromInt(size));
5083 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); 5083 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
5084 __ pop(rbx); 5084 __ pop(rbx);
5085 5085
5086 __ bind(&allocated); 5086 __ bind(&allocated);
5087 // Copy the content into the newly allocated memory. 5087 // Copy the content into the newly allocated memory.
5088 // (Unroll copy loop once for better throughput). 5088 // (Unroll copy loop once for better throughput).
(...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after
5401 __ bind(&use_cache); 5401 __ bind(&use_cache);
5402 } 5402 }
5403 5403
5404 5404
5405 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { 5405 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
5406 Register map = ToRegister(instr->map()); 5406 Register map = ToRegister(instr->map());
5407 Register result = ToRegister(instr->result()); 5407 Register result = ToRegister(instr->result());
5408 Label load_cache, done; 5408 Label load_cache, done;
5409 __ EnumLength(result, map); 5409 __ EnumLength(result, map);
5410 __ Cmp(result, Smi::FromInt(0)); 5410 __ Cmp(result, Smi::FromInt(0));
5411 __ j(not_equal, &load_cache); 5411 __ j(not_equal, &load_cache, Label::kNear);
5412 __ LoadRoot(result, Heap::kEmptyFixedArrayRootIndex); 5412 __ LoadRoot(result, Heap::kEmptyFixedArrayRootIndex);
5413 __ jmp(&done); 5413 __ jmp(&done, Label::kNear);
5414 __ bind(&load_cache); 5414 __ bind(&load_cache);
5415 __ LoadInstanceDescriptors(map, result); 5415 __ LoadInstanceDescriptors(map, result);
5416 __ movq(result, 5416 __ movq(result,
5417 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); 5417 FieldOperand(result, DescriptorArray::kEnumCacheOffset));
5418 __ movq(result, 5418 __ movq(result,
5419 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); 5419 FieldOperand(result, FixedArray::SizeFor(instr->idx())));
5420 __ bind(&done); 5420 __ bind(&done);
5421 Condition cc = masm()->CheckSmi(result); 5421 Condition cc = masm()->CheckSmi(result);
5422 DeoptimizeIf(cc, instr->environment()); 5422 DeoptimizeIf(cc, instr->environment());
5423 } 5423 }
5424 5424
5425 5425
5426 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { 5426 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
5427 Register object = ToRegister(instr->value()); 5427 Register object = ToRegister(instr->value());
5428 __ cmpq(ToRegister(instr->map()), 5428 __ cmpq(ToRegister(instr->map()),
5429 FieldOperand(object, HeapObject::kMapOffset)); 5429 FieldOperand(object, HeapObject::kMapOffset));
5430 DeoptimizeIf(not_equal, instr->environment()); 5430 DeoptimizeIf(not_equal, instr->environment());
5431 } 5431 }
5432 5432
5433 5433
5434 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { 5434 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
5435 Register object = ToRegister(instr->object()); 5435 Register object = ToRegister(instr->object());
5436 Register index = ToRegister(instr->index()); 5436 Register index = ToRegister(instr->index());
5437 5437
5438 Label out_of_object, done; 5438 Label out_of_object, done;
5439 __ SmiToInteger32(index, index); 5439 __ SmiToInteger32(index, index);
5440 __ cmpl(index, Immediate(0)); 5440 __ cmpl(index, Immediate(0));
5441 __ j(less, &out_of_object); 5441 __ j(less, &out_of_object, Label::kNear);
5442 __ movq(object, FieldOperand(object, 5442 __ movq(object, FieldOperand(object,
5443 index, 5443 index,
5444 times_pointer_size, 5444 times_pointer_size,
5445 JSObject::kHeaderSize)); 5445 JSObject::kHeaderSize));
5446 __ jmp(&done, Label::kNear); 5446 __ jmp(&done, Label::kNear);
5447 5447
5448 __ bind(&out_of_object); 5448 __ bind(&out_of_object);
5449 __ movq(object, FieldOperand(object, JSObject::kPropertiesOffset)); 5449 __ movq(object, FieldOperand(object, JSObject::kPropertiesOffset));
5450 __ negl(index); 5450 __ negl(index);
5451 // Index is now equal to out of object property index plus 1. 5451 // Index is now equal to out of object property index plus 1.
5452 __ movq(object, FieldOperand(object, 5452 __ movq(object, FieldOperand(object,
5453 index, 5453 index,
5454 times_pointer_size, 5454 times_pointer_size,
5455 FixedArray::kHeaderSize - kPointerSize)); 5455 FixedArray::kHeaderSize - kPointerSize));
5456 __ bind(&done); 5456 __ bind(&done);
5457 } 5457 }
5458 5458
5459 5459
5460 #undef __ 5460 #undef __
5461 5461
5462 } } // namespace v8::internal 5462 } } // namespace v8::internal
5463 5463
5464 #endif // V8_TARGET_ARCH_X64 5464 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698