Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(4)

Side by Side Diff: src/x64/macro-assembler-x64.cc

Issue 6880010: Merge (7265, 7271] from bleeding_edge to experimental/gc branch.... (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: '' Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 26 matching lines...) Expand all
37 #include "debug.h" 37 #include "debug.h"
38 #include "heap.h" 38 #include "heap.h"
39 39
40 namespace v8 { 40 namespace v8 {
41 namespace internal { 41 namespace internal {
42 42
43 MacroAssembler::MacroAssembler(void* buffer, int size) 43 MacroAssembler::MacroAssembler(void* buffer, int size)
44 : Assembler(buffer, size), 44 : Assembler(buffer, size),
45 generating_stub_(false), 45 generating_stub_(false),
46 allow_stub_calls_(true), 46 allow_stub_calls_(true),
47 code_object_(Heap::undefined_value()) { 47 code_object_(HEAP->undefined_value()) {
48 } 48 }
49 49
50 50
51 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) { 51 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
52 movq(destination, Operand(kRootRegister, 52 movq(destination, Operand(kRootRegister,
53 (index << kPointerSizeLog2) - kRootRegisterBias)); 53 (index << kPointerSizeLog2) - kRootRegisterBias));
54 } 54 }
55 55
56 56
57 void MacroAssembler::LoadRootIndexed(Register destination, 57 void MacroAssembler::LoadRootIndexed(Register destination,
(...skipping 337 matching lines...) Expand 10 before | Expand all | Expand 10 after
395 Integer32ToSmi(index, hash); 395 Integer32ToSmi(index, hash);
396 } 396 }
397 397
398 398
399 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { 399 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
400 CallRuntime(Runtime::FunctionForId(id), num_arguments); 400 CallRuntime(Runtime::FunctionForId(id), num_arguments);
401 } 401 }
402 402
403 403
404 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { 404 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
405 Runtime::Function* function = Runtime::FunctionForId(id); 405 const Runtime::Function* function = Runtime::FunctionForId(id);
406 Set(rax, function->nargs); 406 Set(rax, function->nargs);
407 movq(rbx, ExternalReference(function)); 407 movq(rbx, ExternalReference(function));
408 CEntryStub ces(1); 408 CEntryStub ces(1);
409 ces.SaveDoubles(); 409 ces.SaveDoubles();
410 CallStub(&ces); 410 CallStub(&ces);
411 } 411 }
412 412
413 413
414 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id, 414 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
415 int num_arguments) { 415 int num_arguments) {
416 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments); 416 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
417 } 417 }
418 418
419 419
420 void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) { 420 void MacroAssembler::CallRuntime(const Runtime::Function* f,
421 int num_arguments) {
421 // If the expected number of arguments of the runtime function is 422 // If the expected number of arguments of the runtime function is
422 // constant, we check that the actual number of arguments match the 423 // constant, we check that the actual number of arguments match the
423 // expectation. 424 // expectation.
424 if (f->nargs >= 0 && f->nargs != num_arguments) { 425 if (f->nargs >= 0 && f->nargs != num_arguments) {
425 IllegalOperation(num_arguments); 426 IllegalOperation(num_arguments);
426 return; 427 return;
427 } 428 }
428 429
429 // TODO(1236192): Most runtime routines don't need the number of 430 // TODO(1236192): Most runtime routines don't need the number of
430 // arguments passed in because it is constant. At some point we 431 // arguments passed in because it is constant. At some point we
431 // should remove this need and make the runtime routine entry code 432 // should remove this need and make the runtime routine entry code
432 // smarter. 433 // smarter.
433 Set(rax, num_arguments); 434 Set(rax, num_arguments);
434 movq(rbx, ExternalReference(f)); 435 movq(rbx, ExternalReference(f));
435 CEntryStub ces(f->result_size); 436 CEntryStub ces(f->result_size);
436 CallStub(&ces); 437 CallStub(&ces);
437 } 438 }
438 439
439 440
440 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f, 441 MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f,
441 int num_arguments) { 442 int num_arguments) {
442 if (f->nargs >= 0 && f->nargs != num_arguments) { 443 if (f->nargs >= 0 && f->nargs != num_arguments) {
443 IllegalOperation(num_arguments); 444 IllegalOperation(num_arguments);
444 // Since we did not call the stub, there was no allocation failure. 445 // Since we did not call the stub, there was no allocation failure.
445 // Return some non-failure object. 446 // Return some non-failure object.
446 return Heap::undefined_value(); 447 return HEAP->undefined_value();
447 } 448 }
448 449
449 // TODO(1236192): Most runtime routines don't need the number of 450 // TODO(1236192): Most runtime routines don't need the number of
450 // arguments passed in because it is constant. At some point we 451 // arguments passed in because it is constant. At some point we
451 // should remove this need and make the runtime routine entry code 452 // should remove this need and make the runtime routine entry code
452 // smarter. 453 // smarter.
453 Set(rax, num_arguments); 454 Set(rax, num_arguments);
454 movq(rbx, ExternalReference(f)); 455 movq(rbx, ExternalReference(f));
455 CEntryStub ces(f->result_size); 456 CEntryStub ces(f->result_size);
456 return TryCallStub(&ces); 457 return TryCallStub(&ces);
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after
591 // No more valid handles (the result handle was the last one). Restore 592 // No more valid handles (the result handle was the last one). Restore
592 // previous handle scope. 593 // previous handle scope.
593 subl(Operand(base_reg, kLevelOffset), Immediate(1)); 594 subl(Operand(base_reg, kLevelOffset), Immediate(1));
594 movq(Operand(base_reg, kNextOffset), prev_next_address_reg); 595 movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
595 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset)); 596 cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
596 j(not_equal, &delete_allocated_handles); 597 j(not_equal, &delete_allocated_handles);
597 bind(&leave_exit_frame); 598 bind(&leave_exit_frame);
598 599
599 // Check if the function scheduled an exception. 600 // Check if the function scheduled an exception.
600 movq(rsi, scheduled_exception_address); 601 movq(rsi, scheduled_exception_address);
601 Cmp(Operand(rsi, 0), Factory::the_hole_value()); 602 Cmp(Operand(rsi, 0), FACTORY->the_hole_value());
602 j(not_equal, &promote_scheduled_exception); 603 j(not_equal, &promote_scheduled_exception);
603 604
604 LeaveApiExitFrame(); 605 LeaveApiExitFrame();
605 ret(stack_space * kPointerSize); 606 ret(stack_space * kPointerSize);
606 607
607 bind(&promote_scheduled_exception); 608 bind(&promote_scheduled_exception);
608 MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException, 609 MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException,
609 0, 1); 610 0, 1);
610 if (result->IsFailure()) { 611 if (result->IsFailure()) {
611 return result; 612 return result;
612 } 613 }
613 614
614 bind(&empty_result); 615 bind(&empty_result);
615 // It was zero; the result is undefined. 616 // It was zero; the result is undefined.
616 Move(rax, Factory::undefined_value()); 617 Move(rax, FACTORY->undefined_value());
617 jmp(&prologue); 618 jmp(&prologue);
618 619
619 // HandleScope limit has changed. Delete allocated extensions. 620 // HandleScope limit has changed. Delete allocated extensions.
620 bind(&delete_allocated_handles); 621 bind(&delete_allocated_handles);
621 movq(Operand(base_reg, kLimitOffset), prev_limit_reg); 622 movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
622 movq(prev_limit_reg, rax); 623 movq(prev_limit_reg, rax);
624 #ifdef _WIN64
625 movq(rcx, ExternalReference::isolate_address());
626 #else
627 movq(rdi, ExternalReference::isolate_address());
628 #endif
623 movq(rax, ExternalReference::delete_handle_scope_extensions()); 629 movq(rax, ExternalReference::delete_handle_scope_extensions());
624 call(rax); 630 call(rax);
625 movq(rax, prev_limit_reg); 631 movq(rax, prev_limit_reg);
626 jmp(&leave_exit_frame); 632 jmp(&leave_exit_frame);
627 633
628 return result; 634 return result;
629 } 635 }
630 636
631 637
632 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext, 638 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
(...skipping 980 matching lines...) Expand 10 before | Expand all | Expand 10 after
1613 push(rbp); 1619 push(rbp);
1614 } else { 1620 } else {
1615 ASSERT(try_location == IN_JS_ENTRY); 1621 ASSERT(try_location == IN_JS_ENTRY);
1616 // The frame pointer does not point to a JS frame so we save NULL 1622 // The frame pointer does not point to a JS frame so we save NULL
1617 // for rbp. We expect the code throwing an exception to check rbp 1623 // for rbp. We expect the code throwing an exception to check rbp
1618 // before dereferencing it to restore the context. 1624 // before dereferencing it to restore the context.
1619 push(Immediate(StackHandler::ENTRY)); 1625 push(Immediate(StackHandler::ENTRY));
1620 push(Immediate(0)); // NULL frame pointer. 1626 push(Immediate(0)); // NULL frame pointer.
1621 } 1627 }
1622 // Save the current handler. 1628 // Save the current handler.
1623 movq(kScratchRegister, ExternalReference(Top::k_handler_address)); 1629 movq(kScratchRegister, ExternalReference(Isolate::k_handler_address));
1624 push(Operand(kScratchRegister, 0)); 1630 push(Operand(kScratchRegister, 0));
1625 // Link this handler. 1631 // Link this handler.
1626 movq(Operand(kScratchRegister, 0), rsp); 1632 movq(Operand(kScratchRegister, 0), rsp);
1627 } 1633 }
1628 1634
1629 1635
1630 void MacroAssembler::PopTryHandler() { 1636 void MacroAssembler::PopTryHandler() {
1631 ASSERT_EQ(0, StackHandlerConstants::kNextOffset); 1637 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1632 // Unlink this handler. 1638 // Unlink this handler.
1633 movq(kScratchRegister, ExternalReference(Top::k_handler_address)); 1639 movq(kScratchRegister, ExternalReference(Isolate::k_handler_address));
1634 pop(Operand(kScratchRegister, 0)); 1640 pop(Operand(kScratchRegister, 0));
1635 // Remove the remaining fields. 1641 // Remove the remaining fields.
1636 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize)); 1642 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1637 } 1643 }
1638 1644
1639 1645
1640 void MacroAssembler::Throw(Register value) { 1646 void MacroAssembler::Throw(Register value) {
1641 // Check that stack should contain next handler, frame pointer, state and 1647 // Check that stack should contain next handler, frame pointer, state and
1642 // return address in that order. 1648 // return address in that order.
1643 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize == 1649 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
1644 StackHandlerConstants::kStateOffset); 1650 StackHandlerConstants::kStateOffset);
1645 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize == 1651 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
1646 StackHandlerConstants::kPCOffset); 1652 StackHandlerConstants::kPCOffset);
1647 // Keep thrown value in rax. 1653 // Keep thrown value in rax.
1648 if (!value.is(rax)) { 1654 if (!value.is(rax)) {
1649 movq(rax, value); 1655 movq(rax, value);
1650 } 1656 }
1651 1657
1652 ExternalReference handler_address(Top::k_handler_address); 1658 ExternalReference handler_address(Isolate::k_handler_address);
1653 movq(kScratchRegister, handler_address); 1659 movq(kScratchRegister, handler_address);
1654 movq(rsp, Operand(kScratchRegister, 0)); 1660 movq(rsp, Operand(kScratchRegister, 0));
1655 // get next in chain 1661 // get next in chain
1656 pop(rcx); 1662 pop(rcx);
1657 movq(Operand(kScratchRegister, 0), rcx); 1663 movq(Operand(kScratchRegister, 0), rcx);
1658 pop(rbp); // pop frame pointer 1664 pop(rbp); // pop frame pointer
1659 pop(rdx); // remove state 1665 pop(rdx); // remove state
1660 1666
1661 // Before returning we restore the context from the frame pointer if not NULL. 1667 // Before returning we restore the context from the frame pointer if not NULL.
1662 // The frame pointer is NULL in the exception handler of a JS entry frame. 1668 // The frame pointer is NULL in the exception handler of a JS entry frame.
1663 Set(rsi, 0); // Tentatively set context pointer to NULL 1669 Set(rsi, 0); // Tentatively set context pointer to NULL
1664 NearLabel skip; 1670 NearLabel skip;
1665 cmpq(rbp, Immediate(0)); 1671 cmpq(rbp, Immediate(0));
1666 j(equal, &skip); 1672 j(equal, &skip);
1667 movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 1673 movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1668 bind(&skip); 1674 bind(&skip);
1669 ret(0); 1675 ret(0);
1670 } 1676 }
1671 1677
1672 1678
1673 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, 1679 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
1674 Register value) { 1680 Register value) {
1675 // Keep thrown value in rax. 1681 // Keep thrown value in rax.
1676 if (!value.is(rax)) { 1682 if (!value.is(rax)) {
1677 movq(rax, value); 1683 movq(rax, value);
1678 } 1684 }
1679 // Fetch top stack handler. 1685 // Fetch top stack handler.
1680 ExternalReference handler_address(Top::k_handler_address); 1686 ExternalReference handler_address(Isolate::k_handler_address);
1681 movq(kScratchRegister, handler_address); 1687 movq(kScratchRegister, handler_address);
1682 movq(rsp, Operand(kScratchRegister, 0)); 1688 movq(rsp, Operand(kScratchRegister, 0));
1683 1689
1684 // Unwind the handlers until the ENTRY handler is found. 1690 // Unwind the handlers until the ENTRY handler is found.
1685 NearLabel loop, done; 1691 NearLabel loop, done;
1686 bind(&loop); 1692 bind(&loop);
1687 // Load the type of the current stack handler. 1693 // Load the type of the current stack handler.
1688 const int kStateOffset = StackHandlerConstants::kStateOffset; 1694 const int kStateOffset = StackHandlerConstants::kStateOffset;
1689 cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY)); 1695 cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
1690 j(equal, &done); 1696 j(equal, &done);
1691 // Fetch the next handler in the list. 1697 // Fetch the next handler in the list.
1692 const int kNextOffset = StackHandlerConstants::kNextOffset; 1698 const int kNextOffset = StackHandlerConstants::kNextOffset;
1693 movq(rsp, Operand(rsp, kNextOffset)); 1699 movq(rsp, Operand(rsp, kNextOffset));
1694 jmp(&loop); 1700 jmp(&loop);
1695 bind(&done); 1701 bind(&done);
1696 1702
1697 // Set the top handler address to next handler past the current ENTRY handler. 1703 // Set the top handler address to next handler past the current ENTRY handler.
1698 movq(kScratchRegister, handler_address); 1704 movq(kScratchRegister, handler_address);
1699 pop(Operand(kScratchRegister, 0)); 1705 pop(Operand(kScratchRegister, 0));
1700 1706
1701 if (type == OUT_OF_MEMORY) { 1707 if (type == OUT_OF_MEMORY) {
1702 // Set external caught exception to false. 1708 // Set external caught exception to false.
1703 ExternalReference external_caught(Top::k_external_caught_exception_address); 1709 ExternalReference external_caught(
1710 Isolate::k_external_caught_exception_address);
1704 movq(rax, Immediate(false)); 1711 movq(rax, Immediate(false));
1705 store_rax(external_caught); 1712 store_rax(external_caught);
1706 1713
1707 // Set pending exception and rax to out of memory exception. 1714 // Set pending exception and rax to out of memory exception.
1708 ExternalReference pending_exception(Top::k_pending_exception_address); 1715 ExternalReference pending_exception(Isolate::k_pending_exception_address);
1709 movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE); 1716 movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
1710 store_rax(pending_exception); 1717 store_rax(pending_exception);
1711 } 1718 }
1712 1719
1713 // Clear the context pointer. 1720 // Clear the context pointer.
1714 Set(rsi, 0); 1721 Set(rsi, 0);
1715 1722
1716 // Restore registers from handler. 1723 // Restore registers from handler.
1717 STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize == 1724 STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize ==
1718 StackHandlerConstants::kFPOffset); 1725 StackHandlerConstants::kFPOffset);
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
1774 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map); 1781 Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1775 j(not_equal, fail); 1782 j(not_equal, fail);
1776 } 1783 }
1777 1784
1778 1785
1779 void MacroAssembler::AbortIfNotNumber(Register object) { 1786 void MacroAssembler::AbortIfNotNumber(Register object) {
1780 NearLabel ok; 1787 NearLabel ok;
1781 Condition is_smi = CheckSmi(object); 1788 Condition is_smi = CheckSmi(object);
1782 j(is_smi, &ok); 1789 j(is_smi, &ok);
1783 Cmp(FieldOperand(object, HeapObject::kMapOffset), 1790 Cmp(FieldOperand(object, HeapObject::kMapOffset),
1784 Factory::heap_number_map()); 1791 FACTORY->heap_number_map());
1785 Assert(equal, "Operand not a number"); 1792 Assert(equal, "Operand not a number");
1786 bind(&ok); 1793 bind(&ok);
1787 } 1794 }
1788 1795
1789 1796
1790 void MacroAssembler::AbortIfSmi(Register object) { 1797 void MacroAssembler::AbortIfSmi(Register object) {
1791 NearLabel ok; 1798 NearLabel ok;
1792 Condition is_smi = CheckSmi(object); 1799 Condition is_smi = CheckSmi(object);
1793 Assert(NegateCondition(is_smi), "Operand is a smi"); 1800 Assert(NegateCondition(is_smi), "Operand is a smi");
1794 } 1801 }
(...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after
2033 2040
2034 void MacroAssembler::EnterFrame(StackFrame::Type type) { 2041 void MacroAssembler::EnterFrame(StackFrame::Type type) {
2035 push(rbp); 2042 push(rbp);
2036 movq(rbp, rsp); 2043 movq(rbp, rsp);
2037 push(rsi); // Context. 2044 push(rsi); // Context.
2038 Push(Smi::FromInt(type)); 2045 Push(Smi::FromInt(type));
2039 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); 2046 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2040 push(kScratchRegister); 2047 push(kScratchRegister);
2041 if (emit_debug_code()) { 2048 if (emit_debug_code()) {
2042 movq(kScratchRegister, 2049 movq(kScratchRegister,
2043 Factory::undefined_value(), 2050 FACTORY->undefined_value(),
2044 RelocInfo::EMBEDDED_OBJECT); 2051 RelocInfo::EMBEDDED_OBJECT);
2045 cmpq(Operand(rsp, 0), kScratchRegister); 2052 cmpq(Operand(rsp, 0), kScratchRegister);
2046 Check(not_equal, "code object not properly patched"); 2053 Check(not_equal, "code object not properly patched");
2047 } 2054 }
2048 } 2055 }
2049 2056
2050 2057
2051 void MacroAssembler::LeaveFrame(StackFrame::Type type) { 2058 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
2052 if (emit_debug_code()) { 2059 if (emit_debug_code()) {
2053 Move(kScratchRegister, Smi::FromInt(type)); 2060 Move(kScratchRegister, Smi::FromInt(type));
(...skipping 18 matching lines...) Expand all
2072 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize); 2079 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
2073 push(Immediate(0)); // Saved entry sp, patched before call. 2080 push(Immediate(0)); // Saved entry sp, patched before call.
2074 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); 2081 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2075 push(kScratchRegister); // Accessed from EditFrame::code_slot. 2082 push(kScratchRegister); // Accessed from EditFrame::code_slot.
2076 2083
2077 // Save the frame pointer and the context in top. 2084 // Save the frame pointer and the context in top.
2078 if (save_rax) { 2085 if (save_rax) {
2079 movq(r14, rax); // Backup rax in callee-save register. 2086 movq(r14, rax); // Backup rax in callee-save register.
2080 } 2087 }
2081 2088
2082 movq(kScratchRegister, ExternalReference(Top::k_c_entry_fp_address)); 2089 movq(kScratchRegister, ExternalReference(Isolate::k_c_entry_fp_address));
2083 movq(Operand(kScratchRegister, 0), rbp); 2090 movq(Operand(kScratchRegister, 0), rbp);
2084 2091
2085 movq(kScratchRegister, ExternalReference(Top::k_context_address)); 2092 movq(kScratchRegister, ExternalReference(Isolate::k_context_address));
2086 movq(Operand(kScratchRegister, 0), rsi); 2093 movq(Operand(kScratchRegister, 0), rsi);
2087 } 2094 }
2088 2095
2089 2096
2090 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space, 2097 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
2091 bool save_doubles) { 2098 bool save_doubles) {
2092 #ifdef _WIN64 2099 #ifdef _WIN64
2093 const int kShadowSpace = 4; 2100 const int kShadowSpace = 4;
2094 arg_stack_space += kShadowSpace; 2101 arg_stack_space += kShadowSpace;
2095 #endif 2102 #endif
2096 // Optionally save all XMM registers. 2103 // Optionally save all XMM registers.
2097 if (save_doubles) { 2104 if (save_doubles) {
2098 CpuFeatures::Scope scope(SSE2); 2105 CpuFeatures::Scope scope(SSE2);
2099 int space = XMMRegister::kNumRegisters * kDoubleSize + 2106 int space = XMMRegister::kNumRegisters * kDoubleSize +
2100 arg_stack_space * kPointerSize; 2107 arg_stack_space * kPointerSize;
2101 subq(rsp, Immediate(space)); 2108 subq(rsp, Immediate(space));
2102 int offset = -2 * kPointerSize; 2109 int offset = -2 * kPointerSize;
2103 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) { 2110 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
2104 XMMRegister reg = XMMRegister::FromAllocationIndex(i); 2111 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
2105 movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg); 2112 movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
2106 } 2113 }
2107 } else if (arg_stack_space > 0) { 2114 } else if (arg_stack_space > 0) {
2108 subq(rsp, Immediate(arg_stack_space * kPointerSize)); 2115 subq(rsp, Immediate(arg_stack_space * kPointerSize));
2109 } 2116 }
2110 2117
2111 // Get the required frame alignment for the OS. 2118 // Get the required frame alignment for the OS.
2112 static const int kFrameAlignment = OS::ActivationFrameAlignment(); 2119 const int kFrameAlignment = OS::ActivationFrameAlignment();
2113 if (kFrameAlignment > 0) { 2120 if (kFrameAlignment > 0) {
2114 ASSERT(IsPowerOf2(kFrameAlignment)); 2121 ASSERT(IsPowerOf2(kFrameAlignment));
2115 movq(kScratchRegister, Immediate(-kFrameAlignment)); 2122 movq(kScratchRegister, Immediate(-kFrameAlignment));
2116 and_(rsp, kScratchRegister); 2123 and_(rsp, kScratchRegister);
2117 } 2124 }
2118 2125
2119 // Patch the saved entry sp. 2126 // Patch the saved entry sp.
2120 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp); 2127 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2121 } 2128 }
2122 2129
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
2167 void MacroAssembler::LeaveApiExitFrame() { 2174 void MacroAssembler::LeaveApiExitFrame() {
2168 movq(rsp, rbp); 2175 movq(rsp, rbp);
2169 pop(rbp); 2176 pop(rbp);
2170 2177
2171 LeaveExitFrameEpilogue(); 2178 LeaveExitFrameEpilogue();
2172 } 2179 }
2173 2180
2174 2181
2175 void MacroAssembler::LeaveExitFrameEpilogue() { 2182 void MacroAssembler::LeaveExitFrameEpilogue() {
2176 // Restore current context from top and clear it in debug mode. 2183 // Restore current context from top and clear it in debug mode.
2177 ExternalReference context_address(Top::k_context_address); 2184 ExternalReference context_address(Isolate::k_context_address);
2178 movq(kScratchRegister, context_address); 2185 movq(kScratchRegister, context_address);
2179 movq(rsi, Operand(kScratchRegister, 0)); 2186 movq(rsi, Operand(kScratchRegister, 0));
2180 #ifdef DEBUG 2187 #ifdef DEBUG
2181 movq(Operand(kScratchRegister, 0), Immediate(0)); 2188 movq(Operand(kScratchRegister, 0), Immediate(0));
2182 #endif 2189 #endif
2183 2190
2184 // Clear the top frame. 2191 // Clear the top frame.
2185 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address); 2192 ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address);
2186 movq(kScratchRegister, c_entry_fp_address); 2193 movq(kScratchRegister, c_entry_fp_address);
2187 movq(Operand(kScratchRegister, 0), Immediate(0)); 2194 movq(Operand(kScratchRegister, 0), Immediate(0));
2188 } 2195 }
2189 2196
2190 2197
2191 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, 2198 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2192 Register scratch, 2199 Register scratch,
2193 Label* miss) { 2200 Label* miss) {
2194 Label same_contexts; 2201 Label same_contexts;
2195 2202
2196 ASSERT(!holder_reg.is(scratch)); 2203 ASSERT(!holder_reg.is(scratch));
2197 ASSERT(!scratch.is(kScratchRegister)); 2204 ASSERT(!scratch.is(kScratchRegister));
2198 // Load current lexical context from the stack frame. 2205 // Load current lexical context from the stack frame.
2199 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset)); 2206 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2200 2207
2201 // When generating debug code, make sure the lexical context is set. 2208 // When generating debug code, make sure the lexical context is set.
2202 if (emit_debug_code()) { 2209 if (emit_debug_code()) {
2203 cmpq(scratch, Immediate(0)); 2210 cmpq(scratch, Immediate(0));
2204 Check(not_equal, "we should not have an empty lexical context"); 2211 Check(not_equal, "we should not have an empty lexical context");
2205 } 2212 }
2206 // Load the global context of the current context. 2213 // Load the global context of the current context.
2207 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; 2214 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2208 movq(scratch, FieldOperand(scratch, offset)); 2215 movq(scratch, FieldOperand(scratch, offset));
2209 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset)); 2216 movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2210 2217
2211 // Check the context is a global context. 2218 // Check the context is a global context.
2212 if (emit_debug_code()) { 2219 if (emit_debug_code()) {
2213 Cmp(FieldOperand(scratch, HeapObject::kMapOffset), 2220 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2214 Factory::global_context_map()); 2221 FACTORY->global_context_map());
2215 Check(equal, "JSGlobalObject::global_context should be a global context."); 2222 Check(equal, "JSGlobalObject::global_context should be a global context.");
2216 } 2223 }
2217 2224
2218 // Check if both contexts are the same. 2225 // Check if both contexts are the same.
2219 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); 2226 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2220 j(equal, &same_contexts); 2227 j(equal, &same_contexts);
2221 2228
2222 // Compare security tokens. 2229 // Compare security tokens.
2223 // Check that the security token in the calling global object is 2230 // Check that the security token in the calling global object is
2224 // compatible with the security token in the receiving global 2231 // compatible with the security token in the receiving global
(...skipping 404 matching lines...) Expand 10 before | Expand all | Expand 10 after
2629 // (i.e., the static scope chain and runtime context chain do not agree). 2636 // (i.e., the static scope chain and runtime context chain do not agree).
2630 // A variable occurring in such a scope should have slot type LOOKUP and 2637 // A variable occurring in such a scope should have slot type LOOKUP and
2631 // not CONTEXT. 2638 // not CONTEXT.
2632 if (emit_debug_code()) { 2639 if (emit_debug_code()) {
2633 cmpq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX))); 2640 cmpq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2634 Check(equal, "Yo dawg, I heard you liked function contexts " 2641 Check(equal, "Yo dawg, I heard you liked function contexts "
2635 "so I put function contexts in all your contexts"); 2642 "so I put function contexts in all your contexts");
2636 } 2643 }
2637 } 2644 }
2638 2645
2646 #ifdef _WIN64
2647 static const int kRegisterPassedArguments = 4;
2648 #else
2649 static const int kRegisterPassedArguments = 6;
2650 #endif
2639 2651
2640 void MacroAssembler::LoadGlobalFunction(int index, Register function) { 2652 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2641 // Load the global or builtins object from the current context. 2653 // Load the global or builtins object from the current context.
2642 movq(function, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); 2654 movq(function, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2643 // Load the global context from the global or builtins object. 2655 // Load the global context from the global or builtins object.
2644 movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset)); 2656 movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
2645 // Load the function from the global context. 2657 // Load the function from the global context.
2646 movq(function, Operand(function, Context::SlotOffset(index))); 2658 movq(function, Operand(function, Context::SlotOffset(index)));
2647 } 2659 }
2648 2660
2649 2661
2650 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, 2662 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2651 Register map) { 2663 Register map) {
2652 // Load the initial map. The global functions all have initial maps. 2664 // Load the initial map. The global functions all have initial maps.
2653 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); 2665 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2654 if (emit_debug_code()) { 2666 if (emit_debug_code()) {
2655 Label ok, fail; 2667 Label ok, fail;
2656 CheckMap(map, Factory::meta_map(), &fail, false); 2668 CheckMap(map, FACTORY->meta_map(), &fail, false);
2657 jmp(&ok); 2669 jmp(&ok);
2658 bind(&fail); 2670 bind(&fail);
2659 Abort("Global functions must have initial map"); 2671 Abort("Global functions must have initial map");
2660 bind(&ok); 2672 bind(&ok);
2661 } 2673 }
2662 } 2674 }
2663 2675
2664 2676
2665 int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) { 2677 int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
2666 // On Windows 64 stack slots are reserved by the caller for all arguments 2678 // On Windows 64 stack slots are reserved by the caller for all arguments
2667 // including the ones passed in registers, and space is always allocated for 2679 // including the ones passed in registers, and space is always allocated for
2668 // the four register arguments even if the function takes fewer than four 2680 // the four register arguments even if the function takes fewer than four
2669 // arguments. 2681 // arguments.
2670 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers 2682 // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2671 // and the caller does not reserve stack slots for them. 2683 // and the caller does not reserve stack slots for them.
2672 ASSERT(num_arguments >= 0); 2684 ASSERT(num_arguments >= 0);
2673 #ifdef _WIN64 2685 #ifdef _WIN64
2674 static const int kMinimumStackSlots = 4; 2686 const int kMinimumStackSlots = kRegisterPassedArguments;
2675 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots; 2687 if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2676 return num_arguments; 2688 return num_arguments;
2677 #else 2689 #else
2678 static const int kRegisterPassedArguments = 6;
2679 if (num_arguments < kRegisterPassedArguments) return 0; 2690 if (num_arguments < kRegisterPassedArguments) return 0;
2680 return num_arguments - kRegisterPassedArguments; 2691 return num_arguments - kRegisterPassedArguments;
2681 #endif 2692 #endif
2682 } 2693 }
2683 2694
2684 2695
2685 void MacroAssembler::PrepareCallCFunction(int num_arguments) { 2696 void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2686 int frame_alignment = OS::ActivationFrameAlignment(); 2697 int frame_alignment = OS::ActivationFrameAlignment();
2687 ASSERT(frame_alignment != 0); 2698 ASSERT(frame_alignment != 0);
2688 ASSERT(num_arguments >= 0); 2699 ASSERT(num_arguments >= 0);
2700
2701 // Reserve space for Isolate address which is always passed as last parameter
2702 num_arguments += 1;
2703
2689 // Make stack end at alignment and allocate space for arguments and old rsp. 2704 // Make stack end at alignment and allocate space for arguments and old rsp.
2690 movq(kScratchRegister, rsp); 2705 movq(kScratchRegister, rsp);
2691 ASSERT(IsPowerOf2(frame_alignment)); 2706 ASSERT(IsPowerOf2(frame_alignment));
2692 int argument_slots_on_stack = 2707 int argument_slots_on_stack =
2693 ArgumentStackSlotsForCFunctionCall(num_arguments); 2708 ArgumentStackSlotsForCFunctionCall(num_arguments);
2694 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize)); 2709 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2695 and_(rsp, Immediate(-frame_alignment)); 2710 and_(rsp, Immediate(-frame_alignment));
2696 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister); 2711 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2697 } 2712 }
2698 2713
2699 2714
2700 void MacroAssembler::CallCFunction(ExternalReference function, 2715 void MacroAssembler::CallCFunction(ExternalReference function,
2701 int num_arguments) { 2716 int num_arguments) {
2702 movq(rax, function); 2717 movq(rax, function);
2703 CallCFunction(rax, num_arguments); 2718 CallCFunction(rax, num_arguments);
2704 } 2719 }
2705 2720
2706 2721
2707 void MacroAssembler::CallCFunction(Register function, int num_arguments) { 2722 void MacroAssembler::CallCFunction(Register function, int num_arguments) {
2723 // Pass current isolate address as additional parameter.
2724 if (num_arguments < kRegisterPassedArguments) {
2725 #ifdef _WIN64
2726 // First four arguments are passed in registers on Windows.
2727 Register arg_to_reg[] = {rcx, rdx, r8, r9};
2728 #else
2729 // First six arguments are passed in registers on other platforms.
2730 Register arg_to_reg[] = {rdi, rsi, rdx, rcx, r8, r9};
2731 #endif
2732 Register reg = arg_to_reg[num_arguments];
2733 movq(reg, ExternalReference::isolate_address());
2734 } else {
2735 // Push Isolate pointer after all parameters.
2736 int argument_slots_on_stack =
2737 ArgumentStackSlotsForCFunctionCall(num_arguments);
2738 movq(kScratchRegister, ExternalReference::isolate_address());
2739 movq(Operand(rsp, argument_slots_on_stack * kPointerSize),
2740 kScratchRegister);
2741 }
2742
2708 // Check stack alignment. 2743 // Check stack alignment.
2709 if (emit_debug_code()) { 2744 if (emit_debug_code()) {
2710 CheckStackAlignment(); 2745 CheckStackAlignment();
2711 } 2746 }
2712 2747
2713 call(function); 2748 call(function);
2714 ASSERT(OS::ActivationFrameAlignment() != 0); 2749 ASSERT(OS::ActivationFrameAlignment() != 0);
2715 ASSERT(num_arguments >= 0); 2750 ASSERT(num_arguments >= 0);
2751 num_arguments += 1;
2716 int argument_slots_on_stack = 2752 int argument_slots_on_stack =
2717 ArgumentStackSlotsForCFunctionCall(num_arguments); 2753 ArgumentStackSlotsForCFunctionCall(num_arguments);
2718 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize)); 2754 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2719 } 2755 }
2720 2756
2721 2757
2722 CodePatcher::CodePatcher(byte* address, int size) 2758 CodePatcher::CodePatcher(byte* address, int size)
2723 : address_(address), size_(size), masm_(address, size + Assembler::kGap) { 2759 : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2724 // Create a new macro assembler pointing to the address of the code to patch. 2760 // Create a new macro assembler pointing to the address of the code to patch.
2725 // The size is adjusted with kGap on order for the assembler to generate size 2761 // The size is adjusted with kGap on order for the assembler to generate size
2726 // bytes of instructions without failing with buffer size constraints. 2762 // bytes of instructions without failing with buffer size constraints.
2727 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); 2763 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2728 } 2764 }
2729 2765
2730 2766
2731 CodePatcher::~CodePatcher() { 2767 CodePatcher::~CodePatcher() {
2732 // Indicate that code has changed. 2768 // Indicate that code has changed.
2733 CPU::FlushICache(address_, size_); 2769 CPU::FlushICache(address_, size_);
2734 2770
2735 // Check that the code was patched as expected. 2771 // Check that the code was patched as expected.
2736 ASSERT(masm_.pc_ == address_ + size_); 2772 ASSERT(masm_.pc_ == address_ + size_);
2737 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); 2773 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2738 } 2774 }
2739 2775
2740 } } // namespace v8::internal 2776 } } // namespace v8::internal
2741 2777
2742 #endif // V8_TARGET_ARCH_X64 2778 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698