Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(544)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 8139027: Version 3.6.5 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: '' Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 215 matching lines...) Expand 10 before | Expand all | Expand 10 after
226 __ ret(3 * kPointerSize); 226 __ ret(3 * kPointerSize);
227 227
228 __ bind(&slow_case); 228 __ bind(&slow_case);
229 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); 229 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
230 } 230 }
231 231
232 232
233 // The stub expects its argument on the stack and returns its result in tos_: 233 // The stub expects its argument on the stack and returns its result in tos_:
234 // zero for false, and a non-zero value for true. 234 // zero for false, and a non-zero value for true.
235 void ToBooleanStub::Generate(MacroAssembler* masm) { 235 void ToBooleanStub::Generate(MacroAssembler* masm) {
236 // This stub overrides SometimesSetsUpAFrame() to return false. That means
237 // we cannot call anything that could cause a GC from this stub.
236 Label patch; 238 Label patch;
237 const Register argument = rax; 239 const Register argument = rax;
238 const Register map = rdx; 240 const Register map = rdx;
239 241
240 if (!types_.IsEmpty()) { 242 if (!types_.IsEmpty()) {
241 __ movq(argument, Operand(rsp, 1 * kPointerSize)); 243 __ movq(argument, Operand(rsp, 1 * kPointerSize));
242 } 244 }
243 245
244 // undefined -> false 246 // undefined -> false
245 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false); 247 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false);
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
321 __ Set(tos_, 0); 323 __ Set(tos_, 0);
322 __ ret(1 * kPointerSize); 324 __ ret(1 * kPointerSize);
323 __ bind(&not_heap_number); 325 __ bind(&not_heap_number);
324 } 326 }
325 327
326 __ bind(&patch); 328 __ bind(&patch);
327 GenerateTypeTransition(masm); 329 GenerateTypeTransition(masm);
328 } 330 }
329 331
330 332
333 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
334 __ PushCallerSaved(save_doubles_);
335 const int argument_count = 1;
336 __ PrepareCallCFunction(argument_count);
337 #ifdef _WIN64
338 __ LoadAddress(rcx, ExternalReference::isolate_address());
339 #else
340 __ LoadAddress(rdi, ExternalReference::isolate_address());
341 #endif
342
343 AllowExternalCallThatCantCauseGC scope(masm);
344 __ CallCFunction(
345 ExternalReference::store_buffer_overflow_function(masm->isolate()),
346 argument_count);
347 __ PopCallerSaved(save_doubles_);
348 __ ret(0);
349 }
350
351
331 void ToBooleanStub::CheckOddball(MacroAssembler* masm, 352 void ToBooleanStub::CheckOddball(MacroAssembler* masm,
332 Type type, 353 Type type,
333 Heap::RootListIndex value, 354 Heap::RootListIndex value,
334 bool result) { 355 bool result) {
335 const Register argument = rax; 356 const Register argument = rax;
336 if (types_.Contains(type)) { 357 if (types_.Contains(type)) {
337 // If we see an expected oddball, return its ToBoolean value tos_. 358 // If we see an expected oddball, return its ToBoolean value tos_.
338 Label different_value; 359 Label different_value;
339 __ CompareRoot(argument, value); 360 __ CompareRoot(argument, value);
340 __ j(not_equal, &different_value, Label::kNear); 361 __ j(not_equal, &different_value, Label::kNear);
(...skipping 274 matching lines...) Expand 10 before | Expand all | Expand 10 after
615 __ shl(kScratchRegister, Immediate(63)); 636 __ shl(kScratchRegister, Immediate(63));
616 __ xor_(FieldOperand(rax, HeapNumber::kValueOffset), kScratchRegister); 637 __ xor_(FieldOperand(rax, HeapNumber::kValueOffset), kScratchRegister);
617 } else { 638 } else {
618 // Allocate a heap number before calculating the answer, 639 // Allocate a heap number before calculating the answer,
619 // so we don't have an untagged double around during GC. 640 // so we don't have an untagged double around during GC.
620 Label slow_allocate_heapnumber, heapnumber_allocated; 641 Label slow_allocate_heapnumber, heapnumber_allocated;
621 __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber); 642 __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber);
622 __ jmp(&heapnumber_allocated); 643 __ jmp(&heapnumber_allocated);
623 644
624 __ bind(&slow_allocate_heapnumber); 645 __ bind(&slow_allocate_heapnumber);
625 __ EnterInternalFrame(); 646 {
626 __ push(rax); 647 FrameScope scope(masm, StackFrame::INTERNAL);
627 __ CallRuntime(Runtime::kNumberAlloc, 0); 648 __ push(rax);
628 __ movq(rcx, rax); 649 __ CallRuntime(Runtime::kNumberAlloc, 0);
629 __ pop(rax); 650 __ movq(rcx, rax);
630 __ LeaveInternalFrame(); 651 __ pop(rax);
652 }
631 __ bind(&heapnumber_allocated); 653 __ bind(&heapnumber_allocated);
632 // rcx: allocated 'empty' number 654 // rcx: allocated 'empty' number
633 655
634 // Copy the double value to the new heap number, flipping the sign. 656 // Copy the double value to the new heap number, flipping the sign.
635 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset)); 657 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset));
636 __ Set(kScratchRegister, 0x01); 658 __ Set(kScratchRegister, 0x01);
637 __ shl(kScratchRegister, Immediate(63)); 659 __ shl(kScratchRegister, Immediate(63));
638 __ xor_(rdx, kScratchRegister); // Flip sign. 660 __ xor_(rdx, kScratchRegister); // Flip sign.
639 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); 661 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx);
640 __ movq(rax, rcx); 662 __ movq(rax, rcx);
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
744 // operation result to the caller of the stub. 766 // operation result to the caller of the stub.
745 __ TailCallExternalReference( 767 __ TailCallExternalReference(
746 ExternalReference(IC_Utility(IC::kBinaryOp_Patch), 768 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
747 masm->isolate()), 769 masm->isolate()),
748 5, 770 5,
749 1); 771 1);
750 } 772 }
751 773
752 774
753 void BinaryOpStub::Generate(MacroAssembler* masm) { 775 void BinaryOpStub::Generate(MacroAssembler* masm) {
776 // Explicitly allow generation of nested stubs. It is safe here because
777 // generation code does not use any raw pointers.
778 AllowStubCallsScope allow_stub_calls(masm, true);
779
754 switch (operands_type_) { 780 switch (operands_type_) {
755 case BinaryOpIC::UNINITIALIZED: 781 case BinaryOpIC::UNINITIALIZED:
756 GenerateTypeTransition(masm); 782 GenerateTypeTransition(masm);
757 break; 783 break;
758 case BinaryOpIC::SMI: 784 case BinaryOpIC::SMI:
759 GenerateSmiStub(masm); 785 GenerateSmiStub(masm);
760 break; 786 break;
761 case BinaryOpIC::INT32: 787 case BinaryOpIC::INT32:
762 UNREACHABLE(); 788 UNREACHABLE();
763 // The int32 case is identical to the Smi case. We avoid creating this 789 // The int32 case is identical to the Smi case. We avoid creating this
(...skipping 682 matching lines...) Expand 10 before | Expand all | Expand 10 after
1446 __ bind(&skip_cache); 1472 __ bind(&skip_cache);
1447 __ subq(rsp, Immediate(kDoubleSize)); 1473 __ subq(rsp, Immediate(kDoubleSize));
1448 __ movsd(Operand(rsp, 0), xmm1); 1474 __ movsd(Operand(rsp, 0), xmm1);
1449 __ fld_d(Operand(rsp, 0)); 1475 __ fld_d(Operand(rsp, 0));
1450 GenerateOperation(masm); 1476 GenerateOperation(masm);
1451 __ fstp_d(Operand(rsp, 0)); 1477 __ fstp_d(Operand(rsp, 0));
1452 __ movsd(xmm1, Operand(rsp, 0)); 1478 __ movsd(xmm1, Operand(rsp, 0));
1453 __ addq(rsp, Immediate(kDoubleSize)); 1479 __ addq(rsp, Immediate(kDoubleSize));
1454 // We return the value in xmm1 without adding it to the cache, but 1480 // We return the value in xmm1 without adding it to the cache, but
1455 // we cause a scavenging GC so that future allocations will succeed. 1481 // we cause a scavenging GC so that future allocations will succeed.
1456 __ EnterInternalFrame(); 1482 {
1457 // Allocate an unused object bigger than a HeapNumber. 1483 FrameScope scope(masm, StackFrame::INTERNAL);
1458 __ Push(Smi::FromInt(2 * kDoubleSize)); 1484 // Allocate an unused object bigger than a HeapNumber.
1459 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); 1485 __ Push(Smi::FromInt(2 * kDoubleSize));
1460 __ LeaveInternalFrame(); 1486 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
1487 }
1461 __ Ret(); 1488 __ Ret();
1462 } 1489 }
1463 1490
1464 // Call runtime, doing whatever allocation and cleanup is necessary. 1491 // Call runtime, doing whatever allocation and cleanup is necessary.
1465 if (tagged) { 1492 if (tagged) {
1466 __ bind(&runtime_call_clear_stack); 1493 __ bind(&runtime_call_clear_stack);
1467 __ fstp(0); 1494 __ fstp(0);
1468 __ bind(&runtime_call); 1495 __ bind(&runtime_call);
1469 __ TailCallExternalReference( 1496 __ TailCallExternalReference(
1470 ExternalReference(RuntimeFunction(), masm->isolate()), 1, 1); 1497 ExternalReference(RuntimeFunction(), masm->isolate()), 1, 1);
1471 } else { // UNTAGGED. 1498 } else { // UNTAGGED.
1472 __ bind(&runtime_call_clear_stack); 1499 __ bind(&runtime_call_clear_stack);
1473 __ bind(&runtime_call); 1500 __ bind(&runtime_call);
1474 __ AllocateHeapNumber(rax, rdi, &skip_cache); 1501 __ AllocateHeapNumber(rax, rdi, &skip_cache);
1475 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1); 1502 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1);
1476 __ EnterInternalFrame(); 1503 {
1477 __ push(rax); 1504 FrameScope scope(masm, StackFrame::INTERNAL);
1478 __ CallRuntime(RuntimeFunction(), 1); 1505 __ push(rax);
1479 __ LeaveInternalFrame(); 1506 __ CallRuntime(RuntimeFunction(), 1);
1507 }
1480 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); 1508 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1481 __ Ret(); 1509 __ Ret();
1482 } 1510 }
1483 } 1511 }
1484 1512
1485 1513
1486 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { 1514 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
1487 switch (type_) { 1515 switch (type_) {
1488 // Add more cases when necessary. 1516 // Add more cases when necessary.
1489 case TranscendentalCache::SIN: return Runtime::kMath_sin; 1517 case TranscendentalCache::SIN: return Runtime::kMath_sin;
(...skipping 1173 matching lines...) Expand 10 before | Expand all | Expand 10 after
2663 2691
2664 // rbx: last_match_info backing store (FixedArray) 2692 // rbx: last_match_info backing store (FixedArray)
2665 // rdx: number of capture registers 2693 // rdx: number of capture registers
2666 // Store the capture count. 2694 // Store the capture count.
2667 __ Integer32ToSmi(kScratchRegister, rdx); 2695 __ Integer32ToSmi(kScratchRegister, rdx);
2668 __ movq(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset), 2696 __ movq(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
2669 kScratchRegister); 2697 kScratchRegister);
2670 // Store last subject and last input. 2698 // Store last subject and last input.
2671 __ movq(rax, Operand(rsp, kSubjectOffset)); 2699 __ movq(rax, Operand(rsp, kSubjectOffset));
2672 __ movq(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax); 2700 __ movq(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
2673 __ movq(rcx, rbx); 2701 __ RecordWriteField(rbx,
2674 __ RecordWrite(rcx, RegExpImpl::kLastSubjectOffset, rax, rdi); 2702 RegExpImpl::kLastSubjectOffset,
2703 rax,
2704 rdi,
2705 kDontSaveFPRegs);
2675 __ movq(rax, Operand(rsp, kSubjectOffset)); 2706 __ movq(rax, Operand(rsp, kSubjectOffset));
2676 __ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax); 2707 __ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
2677 __ movq(rcx, rbx); 2708 __ RecordWriteField(rbx,
2678 __ RecordWrite(rcx, RegExpImpl::kLastInputOffset, rax, rdi); 2709 RegExpImpl::kLastInputOffset,
2710 rax,
2711 rdi,
2712 kDontSaveFPRegs);
2679 2713
2680 // Get the static offsets vector filled by the native regexp code. 2714 // Get the static offsets vector filled by the native regexp code.
2681 __ LoadAddress(rcx, 2715 __ LoadAddress(rcx,
2682 ExternalReference::address_of_static_offsets_vector(isolate)); 2716 ExternalReference::address_of_static_offsets_vector(isolate));
2683 2717
2684 // rbx: last_match_info backing store (FixedArray) 2718 // rbx: last_match_info backing store (FixedArray)
2685 // rcx: offsets vector 2719 // rcx: offsets vector
2686 // rdx: number of capture registers 2720 // rdx: number of capture registers
2687 Label next_capture, done; 2721 Label next_capture, done;
2688 // Capture register counter starts from number of capture registers and 2722 // Capture register counter starts from number of capture registers and
(...skipping 535 matching lines...) Expand 10 before | Expand all | Expand 10 after
3224 __ testb(scratch, Immediate(kIsSymbolMask)); 3258 __ testb(scratch, Immediate(kIsSymbolMask));
3225 __ j(zero, label); 3259 __ j(zero, label);
3226 } 3260 }
3227 3261
3228 3262
3229 void StackCheckStub::Generate(MacroAssembler* masm) { 3263 void StackCheckStub::Generate(MacroAssembler* masm) {
3230 __ TailCallRuntime(Runtime::kStackGuard, 0, 1); 3264 __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
3231 } 3265 }
3232 3266
3233 3267
3268 void CallFunctionStub::FinishCode(Code* code) {
3269 code->set_has_function_cache(false);
3270 }
3271
3272
3273 void CallFunctionStub::Clear(Heap* heap, Address address) {
3274 UNREACHABLE();
3275 }
3276
3277
3278 Object* CallFunctionStub::GetCachedValue(Address address) {
3279 UNREACHABLE();
3280 return NULL;
3281 }
3282
3283
3234 void CallFunctionStub::Generate(MacroAssembler* masm) { 3284 void CallFunctionStub::Generate(MacroAssembler* masm) {
3235 Label slow, non_function; 3285 Label slow, non_function;
3236 3286
3237 // The receiver might implicitly be the global object. This is 3287 // The receiver might implicitly be the global object. This is
3238 // indicated by passing the hole as the receiver to the call 3288 // indicated by passing the hole as the receiver to the call
3239 // function stub. 3289 // function stub.
3240 if (ReceiverMightBeImplicit()) { 3290 if (ReceiverMightBeImplicit()) {
3241 Label call; 3291 Label call;
3242 // Get the receiver from the stack. 3292 // Get the receiver from the stack.
3243 // +1 ~ return address 3293 // +1 ~ return address
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
3312 Isolate::Current()->builtins()->ArgumentsAdaptorTrampoline(); 3362 Isolate::Current()->builtins()->ArgumentsAdaptorTrampoline();
3313 __ Jump(adaptor, RelocInfo::CODE_TARGET); 3363 __ Jump(adaptor, RelocInfo::CODE_TARGET);
3314 } 3364 }
3315 3365
3316 3366
3317 bool CEntryStub::NeedsImmovableCode() { 3367 bool CEntryStub::NeedsImmovableCode() {
3318 return false; 3368 return false;
3319 } 3369 }
3320 3370
3321 3371
3372 bool CEntryStub::IsPregenerated() {
3373 #ifdef _WIN64
3374 return result_size_ == 1;
3375 #else
3376 return true;
3377 #endif
3378 }
3379
3380
3381 void CodeStub::GenerateStubsAheadOfTime() {
3382 CEntryStub::GenerateAheadOfTime();
3383 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime();
3384 // It is important that the store buffer overflow stubs are generated first.
3385 RecordWriteStub::GenerateFixedRegStubsAheadOfTime();
3386 }
3387
3388
3389 void CodeStub::GenerateFPStubs() {
3390 }
3391
3392
3393 void CEntryStub::GenerateAheadOfTime() {
3394 CEntryStub stub(1, kDontSaveFPRegs);
3395 stub.GetCode()->set_is_pregenerated(true);
3396 CEntryStub save_doubles(1, kSaveFPRegs);
3397 save_doubles.GetCode()->set_is_pregenerated(true);
3398 }
3399
3400
3322 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { 3401 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
3323 // Throw exception in eax. 3402 // Throw exception in eax.
3324 __ Throw(rax); 3403 __ Throw(rax);
3325 } 3404 }
3326 3405
3327 3406
3328 void CEntryStub::GenerateCore(MacroAssembler* masm, 3407 void CEntryStub::GenerateCore(MacroAssembler* masm,
3329 Label* throw_normal_exception, 3408 Label* throw_normal_exception,
3330 Label* throw_termination_exception, 3409 Label* throw_termination_exception,
3331 Label* throw_out_of_memory_exception, 3410 Label* throw_out_of_memory_exception,
(...skipping 418 matching lines...) Expand 10 before | Expand all | Expand 10 after
3750 __ j(above, &slow); 3829 __ j(above, &slow);
3751 3830
3752 // Register mapping: 3831 // Register mapping:
3753 // rax is object map. 3832 // rax is object map.
3754 // rdx is function. 3833 // rdx is function.
3755 // rbx is function prototype. 3834 // rbx is function prototype.
3756 if (!HasCallSiteInlineCheck()) { 3835 if (!HasCallSiteInlineCheck()) {
3757 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); 3836 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
3758 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); 3837 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
3759 } else { 3838 } else {
3839 // Get return address and delta to inlined map check.
3760 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 3840 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
3761 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 3841 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
3762 __ movq(Operand(kScratchRegister, kOffsetToMapCheckValue), rax); 3842 __ movq(Operand(kScratchRegister, kOffsetToMapCheckValue), rax);
3763 if (FLAG_debug_code) { 3843 if (FLAG_debug_code) {
3764 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); 3844 __ movl(rdi, Immediate(kWordBeforeMapCheckValue));
3765 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); 3845 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
3766 __ Assert(equal, "InstanceofStub unexpected call site cache (check)."); 3846 __ Assert(equal, "InstanceofStub unexpected call site cache (check).");
3767 } 3847 }
3768 } 3848 }
3769 3849
(...skipping 14 matching lines...) Expand all
3784 __ jmp(&loop); 3864 __ jmp(&loop);
3785 3865
3786 __ bind(&is_instance); 3866 __ bind(&is_instance);
3787 if (!HasCallSiteInlineCheck()) { 3867 if (!HasCallSiteInlineCheck()) {
3788 __ xorl(rax, rax); 3868 __ xorl(rax, rax);
3789 // Store bitwise zero in the cache. This is a Smi in GC terms. 3869 // Store bitwise zero in the cache. This is a Smi in GC terms.
3790 STATIC_ASSERT(kSmiTag == 0); 3870 STATIC_ASSERT(kSmiTag == 0);
3791 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); 3871 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
3792 } else { 3872 } else {
3793 // Store offset of true in the root array at the inline check site. 3873 // Store offset of true in the root array at the inline check site.
3794 ASSERT((Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias 3874 int true_offset = 0x100 +
3795 == 0xB0 - 0x100); 3875 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
3796 __ movl(rax, Immediate(0xB0)); // TrueValue is at -10 * kPointerSize. 3876 // Assert it is a 1-byte signed value.
3877 ASSERT(true_offset >= 0 && true_offset < 0x100);
3878 __ movl(rax, Immediate(true_offset));
3797 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 3879 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
3798 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 3880 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
3799 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 3881 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
3800 if (FLAG_debug_code) { 3882 if (FLAG_debug_code) {
3801 __ movl(rax, Immediate(kWordBeforeResultValue)); 3883 __ movl(rax, Immediate(kWordBeforeResultValue));
3802 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 3884 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
3803 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)."); 3885 __ Assert(equal, "InstanceofStub unexpected call site cache (mov).");
3804 } 3886 }
3805 __ Set(rax, 0); 3887 __ Set(rax, 0);
3806 } 3888 }
3807 __ ret(2 * kPointerSize + extra_stack_space); 3889 __ ret(2 * kPointerSize + extra_stack_space);
3808 3890
3809 __ bind(&is_not_instance); 3891 __ bind(&is_not_instance);
3810 if (!HasCallSiteInlineCheck()) { 3892 if (!HasCallSiteInlineCheck()) {
3811 // We have to store a non-zero value in the cache. 3893 // We have to store a non-zero value in the cache.
3812 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); 3894 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
3813 } else { 3895 } else {
3814 // Store offset of false in the root array at the inline check site. 3896 // Store offset of false in the root array at the inline check site.
3815 ASSERT((Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias 3897 int false_offset = 0x100 +
3816 == 0xB8 - 0x100); 3898 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
3817 __ movl(rax, Immediate(0xB8)); // FalseValue is at -9 * kPointerSize. 3899 // Assert it is a 1-byte signed value.
3900 ASSERT(false_offset >= 0 && false_offset < 0x100);
3901 __ movl(rax, Immediate(false_offset));
3818 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 3902 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
3819 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 3903 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
3820 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 3904 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
3821 if (FLAG_debug_code) { 3905 if (FLAG_debug_code) {
3822 __ movl(rax, Immediate(kWordBeforeResultValue)); 3906 __ movl(rax, Immediate(kWordBeforeResultValue));
3823 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 3907 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
3824 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); 3908 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
3825 } 3909 }
3826 } 3910 }
3827 __ ret(2 * kPointerSize + extra_stack_space); 3911 __ ret(2 * kPointerSize + extra_stack_space);
(...skipping 1436 matching lines...) Expand 10 before | Expand all | Expand 10 after
5264 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { 5348 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
5265 // Save the registers. 5349 // Save the registers.
5266 __ pop(rcx); 5350 __ pop(rcx);
5267 __ push(rdx); 5351 __ push(rdx);
5268 __ push(rax); 5352 __ push(rax);
5269 __ push(rcx); 5353 __ push(rcx);
5270 5354
5271 // Call the runtime system in a fresh internal frame. 5355 // Call the runtime system in a fresh internal frame.
5272 ExternalReference miss = 5356 ExternalReference miss =
5273 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); 5357 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
5274 __ EnterInternalFrame(); 5358 {
5275 __ push(rdx); 5359 FrameScope scope(masm, StackFrame::INTERNAL);
5276 __ push(rax); 5360 __ push(rdx);
5277 __ Push(Smi::FromInt(op_)); 5361 __ push(rax);
5278 __ CallExternalReference(miss, 3); 5362 __ Push(Smi::FromInt(op_));
5279 __ LeaveInternalFrame(); 5363 __ CallExternalReference(miss, 3);
5364 }
5280 5365
5281 // Compute the entry point of the rewritten stub. 5366 // Compute the entry point of the rewritten stub.
5282 __ lea(rdi, FieldOperand(rax, Code::kHeaderSize)); 5367 __ lea(rdi, FieldOperand(rax, Code::kHeaderSize));
5283 5368
5284 // Restore registers. 5369 // Restore registers.
5285 __ pop(rcx); 5370 __ pop(rcx);
5286 __ pop(rax); 5371 __ pop(rax);
5287 __ pop(rdx); 5372 __ pop(rdx);
5288 __ push(rcx); 5373 __ push(rcx);
5289 5374
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after
5400 __ push(r0); 5485 __ push(r0);
5401 __ CallStub(&stub); 5486 __ CallStub(&stub);
5402 5487
5403 __ testq(r0, r0); 5488 __ testq(r0, r0);
5404 __ j(zero, miss); 5489 __ j(zero, miss);
5405 __ jmp(done); 5490 __ jmp(done);
5406 } 5491 }
5407 5492
5408 5493
5409 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { 5494 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
5495 // This stub overrides SometimesSetsUpAFrame() to return false. That means
5496 // we cannot call anything that could cause a GC from this stub.
5410 // Stack frame on entry: 5497 // Stack frame on entry:
5411 // esp[0 * kPointerSize]: return address. 5498 // esp[0 * kPointerSize]: return address.
5412 // esp[1 * kPointerSize]: key's hash. 5499 // esp[1 * kPointerSize]: key's hash.
5413 // esp[2 * kPointerSize]: key. 5500 // esp[2 * kPointerSize]: key.
5414 // Registers: 5501 // Registers:
5415 // dictionary_: StringDictionary to probe. 5502 // dictionary_: StringDictionary to probe.
5416 // result_: used as scratch. 5503 // result_: used as scratch.
5417 // index_: will hold an index of entry if lookup is successful. 5504 // index_: will hold an index of entry if lookup is successful.
5418 // might alias with result_. 5505 // might alias with result_.
5419 // Returns: 5506 // Returns:
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
5485 __ Drop(1); 5572 __ Drop(1);
5486 __ ret(2 * kPointerSize); 5573 __ ret(2 * kPointerSize);
5487 5574
5488 __ bind(&not_in_dictionary); 5575 __ bind(&not_in_dictionary);
5489 __ movq(scratch, Immediate(0)); 5576 __ movq(scratch, Immediate(0));
5490 __ Drop(1); 5577 __ Drop(1);
5491 __ ret(2 * kPointerSize); 5578 __ ret(2 * kPointerSize);
5492 } 5579 }
5493 5580
5494 5581
5582 struct AheadOfTimeWriteBarrierStubList {
5583 Register object, value, address;
5584 RememberedSetAction action;
5585 };
5586
5587
5588 struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
5589 // Used in RegExpExecStub.
5590 { rbx, rax, rdi, EMIT_REMEMBERED_SET },
5591 // Used in CompileArrayPushCall.
5592 { rbx, rcx, rdx, EMIT_REMEMBERED_SET },
5593 // Used in CompileStoreGlobal.
5594 { rbx, rcx, rdx, OMIT_REMEMBERED_SET },
5595 // Used in StoreStubCompiler::CompileStoreField and
5596 // KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
5597 { rdx, rcx, rbx, EMIT_REMEMBERED_SET },
5598 // GenerateStoreField calls the stub with two different permutations of
5599 // registers. This is the second.
5600 { rbx, rcx, rdx, EMIT_REMEMBERED_SET },
5601 // StoreIC::GenerateNormal via GenerateDictionaryStore.
5602 { rbx, r8, r9, EMIT_REMEMBERED_SET },
5603 // KeyedStoreIC::GenerateGeneric.
5604 { rbx, rdx, rcx, EMIT_REMEMBERED_SET},
5605 // KeyedStoreStubCompiler::GenerateStoreFastElement.
5606 { rdi, rdx, rcx, EMIT_REMEMBERED_SET},
5607 // Null termination.
5608 { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET}
5609 };
5610
5611
5612 bool RecordWriteStub::IsPregenerated() {
5613 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
5614 !entry->object.is(no_reg);
5615 entry++) {
5616 if (object_.is(entry->object) &&
5617 value_.is(entry->value) &&
5618 address_.is(entry->address) &&
5619 remembered_set_action_ == entry->action &&
5620 save_fp_regs_mode_ == kDontSaveFPRegs) {
5621 return true;
5622 }
5623 }
5624 return false;
5625 }
5626
5627
5628 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() {
5629 StoreBufferOverflowStub stub1(kDontSaveFPRegs);
5630 stub1.GetCode()->set_is_pregenerated(true);
5631 StoreBufferOverflowStub stub2(kSaveFPRegs);
5632 stub2.GetCode()->set_is_pregenerated(true);
5633 }
5634
5635
5636 void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
5637 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
5638 !entry->object.is(no_reg);
5639 entry++) {
5640 RecordWriteStub stub(entry->object,
5641 entry->value,
5642 entry->address,
5643 entry->action,
5644 kDontSaveFPRegs);
5645 stub.GetCode()->set_is_pregenerated(true);
5646 }
5647 }
5648
5649
5650 // Takes the input in 3 registers: address_ value_ and object_. A pointer to
5651 // the value has just been written into the object, now this stub makes sure
5652 // we keep the GC informed. The word in the object where the value has been
5653 // written is in the address register.
5654 void RecordWriteStub::Generate(MacroAssembler* masm) {
5655 Label skip_to_incremental_noncompacting;
5656 Label skip_to_incremental_compacting;
5657
5658 // The first two instructions are generated with labels so as to get the
5659 // offset fixed up correctly by the bind(Label*) call. We patch it back and
5660 // forth between a compare instructions (a nop in this position) and the
5661 // real branch when we start and stop incremental heap marking.
5662 // See RecordWriteStub::Patch for details.
5663 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
5664 __ jmp(&skip_to_incremental_compacting, Label::kFar);
5665
5666 if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
5667 __ RememberedSetHelper(object_,
5668 address_,
5669 value_,
5670 save_fp_regs_mode_,
5671 MacroAssembler::kReturnAtEnd);
5672 } else {
5673 __ ret(0);
5674 }
5675
5676 __ bind(&skip_to_incremental_noncompacting);
5677 GenerateIncremental(masm, INCREMENTAL);
5678
5679 __ bind(&skip_to_incremental_compacting);
5680 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
5681
5682 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
5683 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
5684 masm->set_byte_at(0, kTwoByteNopInstruction);
5685 masm->set_byte_at(2, kFiveByteNopInstruction);
5686 }
5687
5688
5689 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
5690 regs_.Save(masm);
5691
5692 if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
5693 Label dont_need_remembered_set;
5694
5695 __ movq(regs_.scratch0(), Operand(regs_.address(), 0));
5696 __ JumpIfNotInNewSpace(regs_.scratch0(),
5697 regs_.scratch0(),
5698 &dont_need_remembered_set);
5699
5700 __ CheckPageFlag(regs_.object(),
5701 regs_.scratch0(),
5702 1 << MemoryChunk::SCAN_ON_SCAVENGE,
5703 not_zero,
5704 &dont_need_remembered_set);
5705
5706 // First notify the incremental marker if necessary, then update the
5707 // remembered set.
5708 CheckNeedsToInformIncrementalMarker(
5709 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
5710 InformIncrementalMarker(masm, mode);
5711 regs_.Restore(masm);
5712 __ RememberedSetHelper(object_,
5713 address_,
5714 value_,
5715 save_fp_regs_mode_,
5716 MacroAssembler::kReturnAtEnd);
5717
5718 __ bind(&dont_need_remembered_set);
5719 }
5720
5721 CheckNeedsToInformIncrementalMarker(
5722 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
5723 InformIncrementalMarker(masm, mode);
5724 regs_.Restore(masm);
5725 __ ret(0);
5726 }
5727
5728
5729 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
5730 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
5731 #ifdef _WIN64
5732 Register arg3 = r8;
5733 Register arg2 = rdx;
5734 Register arg1 = rcx;
5735 #else
5736 Register arg3 = rdx;
5737 Register arg2 = rsi;
5738 Register arg1 = rdi;
5739 #endif
5740 Register address =
5741 arg1.is(regs_.address()) ? kScratchRegister : regs_.address();
5742 ASSERT(!address.is(regs_.object()));
5743 ASSERT(!address.is(arg1));
5744 __ Move(address, regs_.address());
5745 __ Move(arg1, regs_.object());
5746 if (mode == INCREMENTAL_COMPACTION) {
5747 // TODO(gc) Can we just set address arg2 in the beginning?
5748 __ Move(arg2, address);
5749 } else {
5750 ASSERT(mode == INCREMENTAL);
5751 __ movq(arg2, Operand(address, 0));
5752 }
5753 __ LoadAddress(arg3, ExternalReference::isolate_address());
5754 int argument_count = 3;
5755
5756 AllowExternalCallThatCantCauseGC scope(masm);
5757 __ PrepareCallCFunction(argument_count);
5758 if (mode == INCREMENTAL_COMPACTION) {
5759 __ CallCFunction(
5760 ExternalReference::incremental_evacuation_record_write_function(
5761 masm->isolate()),
5762 argument_count);
5763 } else {
5764 ASSERT(mode == INCREMENTAL);
5765 __ CallCFunction(
5766 ExternalReference::incremental_marking_record_write_function(
5767 masm->isolate()),
5768 argument_count);
5769 }
5770 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
5771 }
5772
5773
5774 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
5775 MacroAssembler* masm,
5776 OnNoNeedToInformIncrementalMarker on_no_need,
5777 Mode mode) {
5778 Label on_black;
5779 Label need_incremental;
5780 Label need_incremental_pop_object;
5781
5782 // Let's look at the color of the object: If it is not black we don't have
5783 // to inform the incremental marker.
5784 __ JumpIfBlack(regs_.object(),
5785 regs_.scratch0(),
5786 regs_.scratch1(),
5787 &on_black,
5788 Label::kNear);
5789
5790 regs_.Restore(masm);
5791 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
5792 __ RememberedSetHelper(object_,
5793 address_,
5794 value_,
5795 save_fp_regs_mode_,
5796 MacroAssembler::kReturnAtEnd);
5797 } else {
5798 __ ret(0);
5799 }
5800
5801 __ bind(&on_black);
5802
5803 // Get the value from the slot.
5804 __ movq(regs_.scratch0(), Operand(regs_.address(), 0));
5805
5806 if (mode == INCREMENTAL_COMPACTION) {
5807 Label ensure_not_white;
5808
5809 __ CheckPageFlag(regs_.scratch0(), // Contains value.
5810 regs_.scratch1(), // Scratch.
5811 MemoryChunk::kEvacuationCandidateMask,
5812 zero,
5813 &ensure_not_white,
5814 Label::kNear);
5815
5816 __ CheckPageFlag(regs_.object(),
5817 regs_.scratch1(), // Scratch.
5818 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
5819 zero,
5820 &need_incremental);
5821
5822 __ bind(&ensure_not_white);
5823 }
5824
5825 // We need an extra register for this, so we push the object register
5826 // temporarily.
5827 __ push(regs_.object());
5828 __ EnsureNotWhite(regs_.scratch0(), // The value.
5829 regs_.scratch1(), // Scratch.
5830 regs_.object(), // Scratch.
5831 &need_incremental_pop_object,
5832 Label::kNear);
5833 __ pop(regs_.object());
5834
5835 regs_.Restore(masm);
5836 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
5837 __ RememberedSetHelper(object_,
5838 address_,
5839 value_,
5840 save_fp_regs_mode_,
5841 MacroAssembler::kReturnAtEnd);
5842 } else {
5843 __ ret(0);
5844 }
5845
5846 __ bind(&need_incremental_pop_object);
5847 __ pop(regs_.object());
5848
5849 __ bind(&need_incremental);
5850
5851 // Fall through when we need to inform the incremental marker.
5852 }
5853
5854
5495 #undef __ 5855 #undef __
5496 5856
5497 } } // namespace v8::internal 5857 } } // namespace v8::internal
5498 5858
5499 #endif // V8_TARGET_ARCH_X64 5859 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698