Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(340)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 7015043: Fix the GC branch so it compiles and runs on 64 bit. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: '' Created 9 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 282 matching lines...) Expand 10 before | Expand all | Expand 10 after
293 } 293 }
294 // R12 to r15 are callee save on all platforms. 294 // R12 to r15 are callee save on all platforms.
295 if (save_doubles_ == kSaveFPRegs) { 295 if (save_doubles_ == kSaveFPRegs) {
296 CpuFeatures::Scope scope(SSE2); 296 CpuFeatures::Scope scope(SSE2);
297 __ subq(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters)); 297 __ subq(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
298 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { 298 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
299 XMMRegister reg = XMMRegister::from_code(i); 299 XMMRegister reg = XMMRegister::from_code(i);
300 __ movsd(Operand(rsp, i * kDoubleSize), reg); 300 __ movsd(Operand(rsp, i * kDoubleSize), reg);
301 } 301 }
302 } 302 }
303 const int argument_count = 0; 303 const int argument_count = 1;
304 __ PrepareCallCFunction(argument_count); 304 __ PrepareCallCFunction(argument_count);
305 __ CallCFunction(ExteranalReference::store_buffer_overflow_function(), 305 #ifdef _WIN64
306 argument_count); 306 __ LoadAddress(rcx, ExternalReference::isolate_address());
307 #else
308 __ LoadAddress(rdi, ExternalReference::isolate_address());
309 #endif
310 __ CallCFunction(
311 ExternalReference::store_buffer_overflow_function(masm->isolate()),
312 argument_count);
307 if (save_doubles_ == kSaveFPRegs) { 313 if (save_doubles_ == kSaveFPRegs) {
308 CpuFeatures::Scope scope(SSE2); 314 CpuFeatures::Scope scope(SSE2);
309 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { 315 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
310 XMMRegister reg = XMMRegister::from_code(i); 316 XMMRegister reg = XMMRegister::from_code(i);
311 __ movsd(reg, Operand(rsp, i * kDoubleSize)); 317 __ movsd(reg, Operand(rsp, i * kDoubleSize));
312 } 318 }
313 __ addq(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters)); 319 __ addq(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
314 } 320 }
315 for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) { 321 for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
316 __ pop(saved_regs[i]); 322 __ pop(saved_regs[i]);
(...skipping 3150 matching lines...) Expand 10 before | Expand all | Expand 10 after
3467 __ j(above, &slow); 3473 __ j(above, &slow);
3468 3474
3469 // Register mapping: 3475 // Register mapping:
3470 // rax is object map. 3476 // rax is object map.
3471 // rdx is function. 3477 // rdx is function.
3472 // rbx is function prototype. 3478 // rbx is function prototype.
3473 if (!HasCallSiteInlineCheck()) { 3479 if (!HasCallSiteInlineCheck()) {
3474 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); 3480 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
3475 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); 3481 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
3476 } else { 3482 } else {
3483 // Get return address and delta to inlined map check.
3477 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 3484 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
3478 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 3485 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
3479 __ movq(Operand(kScratchRegister, kOffsetToMapCheckValue), rax); 3486 __ movq(Operand(kScratchRegister, kOffsetToMapCheckValue), rax);
3480 if (FLAG_debug_code) { 3487 if (FLAG_debug_code) {
3481 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); 3488 __ movl(rdi, Immediate(kWordBeforeMapCheckValue));
3482 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); 3489 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
3483 __ Assert(equal, "InstanceofStub unexpected call site cache (check)."); 3490 __ Assert(equal, "InstanceofStub unexpected call site cache (check).");
3484 } 3491 }
3485 } 3492 }
3486 3493
(...skipping 14 matching lines...) Expand all
3501 __ jmp(&loop); 3508 __ jmp(&loop);
3502 3509
3503 __ bind(&is_instance); 3510 __ bind(&is_instance);
3504 if (!HasCallSiteInlineCheck()) { 3511 if (!HasCallSiteInlineCheck()) {
3505 __ xorl(rax, rax); 3512 __ xorl(rax, rax);
3506 // Store bitwise zero in the cache. This is a Smi in GC terms. 3513 // Store bitwise zero in the cache. This is a Smi in GC terms.
3507 STATIC_ASSERT(kSmiTag == 0); 3514 STATIC_ASSERT(kSmiTag == 0);
3508 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); 3515 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
3509 } else { 3516 } else {
3510 // Store offset of true in the root array at the inline check site. 3517 // Store offset of true in the root array at the inline check site.
3511 ASSERT((Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias 3518 int true_offset = 0x100 +
3512 == 0xB0 - 0x100); 3519 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
3513 __ movl(rax, Immediate(0xB0)); // TrueValue is at -10 * kPointerSize. 3520 // Assert it is a 1-byte signed value.
3521 ASSERT(true_offset >= 0 && true_offset < 0x100);
3522 __ movl(rax, Immediate(true_offset));
3514 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 3523 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
3515 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 3524 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
3516 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 3525 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
3517 if (FLAG_debug_code) { 3526 if (FLAG_debug_code) {
3518 __ movl(rax, Immediate(kWordBeforeResultValue)); 3527 __ movl(rax, Immediate(kWordBeforeResultValue));
3519 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 3528 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
3520 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)."); 3529 __ Assert(equal, "InstanceofStub unexpected call site cache (mov).");
3521 } 3530 }
3522 __ Set(rax, 0); 3531 __ Set(rax, 0);
3523 } 3532 }
3524 __ ret(2 * kPointerSize + extra_stack_space); 3533 __ ret(2 * kPointerSize + extra_stack_space);
3525 3534
3526 __ bind(&is_not_instance); 3535 __ bind(&is_not_instance);
3527 if (!HasCallSiteInlineCheck()) { 3536 if (!HasCallSiteInlineCheck()) {
3528 // We have to store a non-zero value in the cache. 3537 // We have to store a non-zero value in the cache.
3529 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); 3538 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
3530 } else { 3539 } else {
3531 // Store offset of false in the root array at the inline check site. 3540 // Store offset of false in the root array at the inline check site.
3532 ASSERT((Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias 3541 int false_offset = 0x100 +
3533 == 0xB8 - 0x100); 3542 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
3534 __ movl(rax, Immediate(0xB8)); // FalseValue is at -9 * kPointerSize. 3543 // Assert it is a 1-byte signed value.
3544 ASSERT(false_offset >= 0 && false_offset < 0x100);
3545 __ movl(rax, Immediate(false_offset));
3535 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 3546 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
3536 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 3547 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
3537 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 3548 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
3538 if (FLAG_debug_code) { 3549 if (FLAG_debug_code) {
3539 __ movl(rax, Immediate(kWordBeforeResultValue)); 3550 __ movl(rax, Immediate(kWordBeforeResultValue));
3540 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 3551 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
3541 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); 3552 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
3542 } 3553 }
3543 } 3554 }
3544 __ ret(2 * kPointerSize + extra_stack_space); 3555 __ ret(2 * kPointerSize + extra_stack_space);
(...skipping 1306 matching lines...) Expand 10 before | Expand all | Expand 10 after
4851 // Do a tail call to the rewritten stub. 4862 // Do a tail call to the rewritten stub.
4852 __ jmp(rdi); 4863 __ jmp(rdi);
4853 } 4864 }
4854 4865
4855 4866
4856 #undef __ 4867 #undef __
4857 4868
4858 } } // namespace v8::internal 4869 } } // namespace v8::internal
4859 4870
4860 #endif // V8_TARGET_ARCH_X64 4871 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/full-codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698