Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(120)

Side by Side Diff: src/x64/macro-assembler-x64.cc

Issue 6716018: X64: Optimize access to external references. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 26 matching lines...) Expand all
37 #include "debug.h" 37 #include "debug.h"
38 #include "heap.h" 38 #include "heap.h"
39 39
40 namespace v8 { 40 namespace v8 {
41 namespace internal { 41 namespace internal {
42 42
43 MacroAssembler::MacroAssembler(void* buffer, int size) 43 MacroAssembler::MacroAssembler(void* buffer, int size)
44 : Assembler(buffer, size), 44 : Assembler(buffer, size),
45 generating_stub_(false), 45 generating_stub_(false),
46 allow_stub_calls_(true), 46 allow_stub_calls_(true),
47 root_array_available_(true),
47 code_object_(HEAP->undefined_value()) { 48 code_object_(HEAP->undefined_value()) {
48 } 49 }
49 50
50 51
52 static intptr_t RootRegisterDelta(ExternalReference other) {
53 Address roots_register_value = kRootRegisterBias +
54 reinterpret_cast<Address>(Isolate::Current()->heap()->roots_address());
55 intptr_t delta = other.address() - roots_register_value;
56 return delta;
57 }
58
59
60 Operand MacroAssembler::ExternalOperand(ExternalReference target,
61 Register scratch) {
62 if (root_array_available_ && !Serializer::enabled()) {
63 intptr_t delta = RootRegisterDelta(target);
64 if (is_int32(delta)) {
65 Serializer::TooLateToEnableNow();
66 return Operand(kRootRegister, delta);
67 }
68 }
69 movq(scratch, target);
70 return Operand(scratch, 0);
71 }
72
73
74 void MacroAssembler::Load(Register destination, ExternalReference source) {
75 if (root_array_available_ && !Serializer::enabled()) {
76 intptr_t delta = RootRegisterDelta(source);
77 if (is_int32(delta)) {
78 Serializer::TooLateToEnableNow();
79 movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
80 return;
81 }
82 }
83 // Safe code.
84 if (destination.is(rax)) {
85 load_rax(source);
86 } else {
87 movq(kScratchRegister, source);
88 movq(destination, Operand(kScratchRegister, 0));
89 }
90 }
91
92
93 void MacroAssembler::Store(ExternalReference destination, Register source) {
94 if (root_array_available_ && !Serializer::enabled()) {
95 intptr_t delta = RootRegisterDelta(destination);
96 if (is_int32(delta)) {
97 Serializer::TooLateToEnableNow();
98 movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
99 return;
100 }
101 }
102 // Safe code.
103 if (source.is(rax)) {
104 store_rax(destination);
105 } else {
106 movq(kScratchRegister, destination);
107 movq(Operand(kScratchRegister, 0), source);
108 }
109 }
110
111
112 void MacroAssembler::LoadAddress(Register destination,
113 ExternalReference source) {
114 if (root_array_available_ && !Serializer::enabled()) {
115 intptr_t delta = RootRegisterDelta(source);
116 if (is_int32(delta)) {
117 Serializer::TooLateToEnableNow();
118 lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
119 return;
120 }
121 }
122 // Safe code.
123 movq(destination, source);
124 }
125
126
127 int MacroAssembler::LoadAddressSize(ExternalReference source) {
128 if (root_array_available_ && !Serializer::enabled()) {
129 // This calculation depends on the internals of LoadAddress.
130 // It's correctness is ensured by the asserts in the Call
131 // instruction below.
132 intptr_t delta = RootRegisterDelta(source);
133 if (is_int32(delta)) {
134 Serializer::TooLateToEnableNow();
135 // Operand is lea(scratch, Operand(kRootRegister, delta));
136 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
137 int size = 4;
138 if (!is_int8(static_cast<int32_t>(delta))) {
139 size += 3; // Need full four-byte displacement in lea.
140 }
141 return size;
142 }
143 }
144 // Size of movq(destination, src);
145 return 10;
146 }
147
148
51 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) { 149 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
150 ASSERT(root_array_available_);
52 movq(destination, Operand(kRootRegister, 151 movq(destination, Operand(kRootRegister,
53 (index << kPointerSizeLog2) - kRootRegisterBias)); 152 (index << kPointerSizeLog2) - kRootRegisterBias));
54 } 153 }
55 154
56 155
57 void MacroAssembler::LoadRootIndexed(Register destination, 156 void MacroAssembler::LoadRootIndexed(Register destination,
58 Register variable_offset, 157 Register variable_offset,
59 int fixed_offset) { 158 int fixed_offset) {
159 ASSERT(root_array_available_);
60 movq(destination, 160 movq(destination,
61 Operand(kRootRegister, 161 Operand(kRootRegister,
62 variable_offset, times_pointer_size, 162 variable_offset, times_pointer_size,
63 (fixed_offset << kPointerSizeLog2) - kRootRegisterBias)); 163 (fixed_offset << kPointerSizeLog2) - kRootRegisterBias));
64 } 164 }
65 165
66 166
67 void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) { 167 void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
168 ASSERT(root_array_available_);
68 movq(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias), 169 movq(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
69 source); 170 source);
70 } 171 }
71 172
72 173
73 void MacroAssembler::PushRoot(Heap::RootListIndex index) { 174 void MacroAssembler::PushRoot(Heap::RootListIndex index) {
175 ASSERT(root_array_available_);
74 push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias)); 176 push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
75 } 177 }
76 178
77 179
78 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) { 180 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
181 ASSERT(root_array_available_);
79 cmpq(with, Operand(kRootRegister, 182 cmpq(with, Operand(kRootRegister,
80 (index << kPointerSizeLog2) - kRootRegisterBias)); 183 (index << kPointerSizeLog2) - kRootRegisterBias));
81 } 184 }
82 185
83 186
84 void MacroAssembler::CompareRoot(const Operand& with, 187 void MacroAssembler::CompareRoot(const Operand& with,
85 Heap::RootListIndex index) { 188 Heap::RootListIndex index) {
189 ASSERT(root_array_available_);
86 ASSERT(!with.AddressUsesRegister(kScratchRegister)); 190 ASSERT(!with.AddressUsesRegister(kScratchRegister));
87 LoadRoot(kScratchRegister, index); 191 LoadRoot(kScratchRegister, index);
88 cmpq(with, kScratchRegister); 192 cmpq(with, kScratchRegister);
89 } 193 }
90 194
91 195
92 void MacroAssembler::RecordWriteHelper(Register object, 196 void MacroAssembler::RecordWriteHelper(Register object,
93 Register addr, 197 Register addr,
94 Register scratch) { 198 Register scratch) {
95 if (emit_debug_code()) { 199 if (emit_debug_code()) {
(...skipping 290 matching lines...) Expand 10 before | Expand all | Expand 10 after
386 490
387 491
388 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { 492 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
389 CallRuntime(Runtime::FunctionForId(id), num_arguments); 493 CallRuntime(Runtime::FunctionForId(id), num_arguments);
390 } 494 }
391 495
392 496
393 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { 497 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
394 const Runtime::Function* function = Runtime::FunctionForId(id); 498 const Runtime::Function* function = Runtime::FunctionForId(id);
395 Set(rax, function->nargs); 499 Set(rax, function->nargs);
396 movq(rbx, ExternalReference(function)); 500 LoadAddress(rbx, ExternalReference(function));
397 CEntryStub ces(1); 501 CEntryStub ces(1);
398 ces.SaveDoubles(); 502 ces.SaveDoubles();
399 CallStub(&ces); 503 CallStub(&ces);
400 } 504 }
401 505
402 506
403 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id, 507 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
404 int num_arguments) { 508 int num_arguments) {
405 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments); 509 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
406 } 510 }
407 511
408 512
409 void MacroAssembler::CallRuntime(const Runtime::Function* f, 513 void MacroAssembler::CallRuntime(const Runtime::Function* f,
410 int num_arguments) { 514 int num_arguments) {
411 // If the expected number of arguments of the runtime function is 515 // If the expected number of arguments of the runtime function is
412 // constant, we check that the actual number of arguments match the 516 // constant, we check that the actual number of arguments match the
413 // expectation. 517 // expectation.
414 if (f->nargs >= 0 && f->nargs != num_arguments) { 518 if (f->nargs >= 0 && f->nargs != num_arguments) {
415 IllegalOperation(num_arguments); 519 IllegalOperation(num_arguments);
416 return; 520 return;
417 } 521 }
418 522
419 // TODO(1236192): Most runtime routines don't need the number of 523 // TODO(1236192): Most runtime routines don't need the number of
420 // arguments passed in because it is constant. At some point we 524 // arguments passed in because it is constant. At some point we
421 // should remove this need and make the runtime routine entry code 525 // should remove this need and make the runtime routine entry code
422 // smarter. 526 // smarter.
423 Set(rax, num_arguments); 527 Set(rax, num_arguments);
424 movq(rbx, ExternalReference(f)); 528 LoadAddress(rbx, ExternalReference(f));
425 CEntryStub ces(f->result_size); 529 CEntryStub ces(f->result_size);
426 CallStub(&ces); 530 CallStub(&ces);
427 } 531 }
428 532
429 533
430 MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f, 534 MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f,
431 int num_arguments) { 535 int num_arguments) {
432 if (f->nargs >= 0 && f->nargs != num_arguments) { 536 if (f->nargs >= 0 && f->nargs != num_arguments) {
433 IllegalOperation(num_arguments); 537 IllegalOperation(num_arguments);
434 // Since we did not call the stub, there was no allocation failure. 538 // Since we did not call the stub, there was no allocation failure.
435 // Return some non-failure object. 539 // Return some non-failure object.
436 return HEAP->undefined_value(); 540 return HEAP->undefined_value();
437 } 541 }
438 542
439 // TODO(1236192): Most runtime routines don't need the number of 543 // TODO(1236192): Most runtime routines don't need the number of
440 // arguments passed in because it is constant. At some point we 544 // arguments passed in because it is constant. At some point we
441 // should remove this need and make the runtime routine entry code 545 // should remove this need and make the runtime routine entry code
442 // smarter. 546 // smarter.
443 Set(rax, num_arguments); 547 Set(rax, num_arguments);
444 movq(rbx, ExternalReference(f)); 548 LoadAddress(rbx, ExternalReference(f));
445 CEntryStub ces(f->result_size); 549 CEntryStub ces(f->result_size);
446 return TryCallStub(&ces); 550 return TryCallStub(&ces);
447 } 551 }
448 552
449 553
450 void MacroAssembler::CallExternalReference(const ExternalReference& ext, 554 void MacroAssembler::CallExternalReference(const ExternalReference& ext,
451 int num_arguments) { 555 int num_arguments) {
452 Set(rax, num_arguments); 556 Set(rax, num_arguments);
453 movq(rbx, ext); 557 LoadAddress(rbx, ext);
454 558
455 CEntryStub stub(1); 559 CEntryStub stub(1);
456 CallStub(&stub); 560 CallStub(&stub);
457 } 561 }
458 562
459 563
460 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext, 564 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
461 int num_arguments, 565 int num_arguments,
462 int result_size) { 566 int result_size) {
463 // ----------- S t a t e ------------- 567 // ----------- S t a t e -------------
(...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after
604 bind(&empty_result); 708 bind(&empty_result);
605 // It was zero; the result is undefined. 709 // It was zero; the result is undefined.
606 Move(rax, FACTORY->undefined_value()); 710 Move(rax, FACTORY->undefined_value());
607 jmp(&prologue); 711 jmp(&prologue);
608 712
609 // HandleScope limit has changed. Delete allocated extensions. 713 // HandleScope limit has changed. Delete allocated extensions.
610 bind(&delete_allocated_handles); 714 bind(&delete_allocated_handles);
611 movq(Operand(base_reg, kLimitOffset), prev_limit_reg); 715 movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
612 movq(prev_limit_reg, rax); 716 movq(prev_limit_reg, rax);
613 #ifdef _WIN64 717 #ifdef _WIN64
614 movq(rcx, ExternalReference::isolate_address()); 718 LoadAddress(rcx, ExternalReference::isolate_address());
615 #else 719 #else
616 movq(rdi, ExternalReference::isolate_address()); 720 LoadAddress(rdi, ExternalReference::isolate_address());
617 #endif 721 #endif
618 movq(rax, ExternalReference::delete_handle_scope_extensions()); 722 LoadAddress(rax, ExternalReference::delete_handle_scope_extensions());
619 call(rax); 723 call(rax);
620 movq(rax, prev_limit_reg); 724 movq(rax, prev_limit_reg);
621 jmp(&leave_exit_frame); 725 jmp(&leave_exit_frame);
622 726
623 return result; 727 return result;
624 } 728 }
625 729
626 730
627 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext, 731 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
628 int result_size) { 732 int result_size) {
629 // Set the entry point and jump to the C entry runtime stub. 733 // Set the entry point and jump to the C entry runtime stub.
630 movq(rbx, ext); 734 LoadAddress(rbx, ext);
631 CEntryStub ces(result_size); 735 CEntryStub ces(result_size);
632 jmp(ces.GetCode(), RelocInfo::CODE_TARGET); 736 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
633 } 737 }
634 738
635 739
636 MaybeObject* MacroAssembler::TryJumpToExternalReference( 740 MaybeObject* MacroAssembler::TryJumpToExternalReference(
637 const ExternalReference& ext, int result_size) { 741 const ExternalReference& ext, int result_size) {
638 // Set the entry point and jump to the C entry runtime stub. 742 // Set the entry point and jump to the C entry runtime stub.
639 movq(rbx, ext); 743 LoadAddress(rbx, ext);
640 CEntryStub ces(result_size); 744 CEntryStub ces(result_size);
641 return TryTailCallStub(&ces); 745 return TryTailCallStub(&ces);
642 } 746 }
643 747
644 748
645 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, 749 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
646 InvokeFlag flag, 750 InvokeFlag flag,
647 CallWrapper* call_wrapper) { 751 CallWrapper* call_wrapper) {
648 // Calls are not allowed in some stubs. 752 // Calls are not allowed in some stubs.
649 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls()); 753 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
(...skipping 786 matching lines...) Expand 10 before | Expand all | Expand 10 after
1436 } 1540 }
1437 } 1541 }
1438 1542
1439 1543
1440 void MacroAssembler::Test(const Operand& src, Smi* source) { 1544 void MacroAssembler::Test(const Operand& src, Smi* source) {
1441 testl(Operand(src, kIntSize), Immediate(source->value())); 1545 testl(Operand(src, kIntSize), Immediate(source->value()));
1442 } 1546 }
1443 1547
1444 1548
1445 void MacroAssembler::Jump(ExternalReference ext) { 1549 void MacroAssembler::Jump(ExternalReference ext) {
1446 movq(kScratchRegister, ext); 1550 LoadAddress(kScratchRegister, ext);
1447 jmp(kScratchRegister); 1551 jmp(kScratchRegister);
1448 } 1552 }
1449 1553
1450 1554
1451 void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) { 1555 void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1452 movq(kScratchRegister, destination, rmode); 1556 movq(kScratchRegister, destination, rmode);
1453 jmp(kScratchRegister); 1557 jmp(kScratchRegister);
1454 } 1558 }
1455 1559
1456 1560
1457 void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) { 1561 void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
1458 // TODO(X64): Inline this 1562 // TODO(X64): Inline this
1459 jmp(code_object, rmode); 1563 jmp(code_object, rmode);
1460 } 1564 }
1461 1565
1462 1566
1567 int MacroAssembler::CallSize(ExternalReference ext) {
1568 // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
1569 const int kCallInstructionSize = 3;
1570 return LoadAddressSize(ext) + kCallInstructionSize;
1571 }
1572
1573
1463 void MacroAssembler::Call(ExternalReference ext) { 1574 void MacroAssembler::Call(ExternalReference ext) {
1464 #ifdef DEBUG 1575 #ifdef DEBUG
1465 int pre_position = pc_offset(); 1576 int end_position = pc_offset() + CallSize(ext);
1466 #endif 1577 #endif
1467 movq(kScratchRegister, ext); 1578 LoadAddress(kScratchRegister, ext);
1468 call(kScratchRegister); 1579 call(kScratchRegister);
1469 #ifdef DEBUG 1580 #ifdef DEBUG
1470 int post_position = pc_offset(); 1581 CHECK_EQ(end_position, pc_offset());
1471 CHECK_EQ(pre_position + CallSize(ext), post_position);
1472 #endif 1582 #endif
1473 } 1583 }
1474 1584
1475 1585
1476 void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) { 1586 void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1477 #ifdef DEBUG 1587 #ifdef DEBUG
1478 int pre_position = pc_offset(); 1588 int end_position = pc_offset() + CallSize(destination, rmode);
1479 #endif 1589 #endif
1480 movq(kScratchRegister, destination, rmode); 1590 movq(kScratchRegister, destination, rmode);
1481 call(kScratchRegister); 1591 call(kScratchRegister);
1482 #ifdef DEBUG 1592 #ifdef DEBUG
1483 int post_position = pc_offset(); 1593 CHECK_EQ(pc_offset(), end_position);
1484 CHECK_EQ(pre_position + CallSize(destination, rmode), post_position);
1485 #endif 1594 #endif
1486 } 1595 }
1487 1596
1488 1597
1489 void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) { 1598 void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1490 #ifdef DEBUG 1599 #ifdef DEBUG
1491 int pre_position = pc_offset(); 1600 int end_position = pc_offset() + CallSize(code_object);
1492 #endif 1601 #endif
1493 ASSERT(RelocInfo::IsCodeTarget(rmode)); 1602 ASSERT(RelocInfo::IsCodeTarget(rmode));
1494 call(code_object, rmode); 1603 call(code_object, rmode);
1495 #ifdef DEBUG 1604 #ifdef DEBUG
1496 int post_position = pc_offset(); 1605 CHECK_EQ(end_position, pc_offset());
1497 CHECK_EQ(pre_position + CallSize(code_object), post_position);
1498 #endif 1606 #endif
1499 } 1607 }
1500 1608
1501 1609
1502 void MacroAssembler::Pushad() { 1610 void MacroAssembler::Pushad() {
1503 push(rax); 1611 push(rax);
1504 push(rcx); 1612 push(rcx);
1505 push(rdx); 1613 push(rdx);
1506 push(rbx); 1614 push(rbx);
1507 // Not pushing rsp or rbp. 1615 // Not pushing rsp or rbp.
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
1608 push(rbp); 1716 push(rbp);
1609 } else { 1717 } else {
1610 ASSERT(try_location == IN_JS_ENTRY); 1718 ASSERT(try_location == IN_JS_ENTRY);
1611 // The frame pointer does not point to a JS frame so we save NULL 1719 // The frame pointer does not point to a JS frame so we save NULL
1612 // for rbp. We expect the code throwing an exception to check rbp 1720 // for rbp. We expect the code throwing an exception to check rbp
1613 // before dereferencing it to restore the context. 1721 // before dereferencing it to restore the context.
1614 push(Immediate(StackHandler::ENTRY)); 1722 push(Immediate(StackHandler::ENTRY));
1615 push(Immediate(0)); // NULL frame pointer. 1723 push(Immediate(0)); // NULL frame pointer.
1616 } 1724 }
1617 // Save the current handler. 1725 // Save the current handler.
1618 movq(kScratchRegister, ExternalReference(Isolate::k_handler_address)); 1726 Operand handler_operand =
1619 push(Operand(kScratchRegister, 0)); 1727 ExternalOperand(ExternalReference(Isolate::k_handler_address));
1728 push(handler_operand);
1620 // Link this handler. 1729 // Link this handler.
1621 movq(Operand(kScratchRegister, 0), rsp); 1730 movq(handler_operand, rsp);
1622 } 1731 }
1623 1732
1624 1733
1625 void MacroAssembler::PopTryHandler() { 1734 void MacroAssembler::PopTryHandler() {
1626 ASSERT_EQ(0, StackHandlerConstants::kNextOffset); 1735 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1627 // Unlink this handler. 1736 // Unlink this handler.
1628 movq(kScratchRegister, ExternalReference(Isolate::k_handler_address)); 1737 Operand handler_operand =
1629 pop(Operand(kScratchRegister, 0)); 1738 ExternalOperand(ExternalReference(Isolate::k_handler_address));
1739 pop(handler_operand);
1630 // Remove the remaining fields. 1740 // Remove the remaining fields.
1631 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize)); 1741 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1632 } 1742 }
1633 1743
1634 1744
1635 void MacroAssembler::Throw(Register value) { 1745 void MacroAssembler::Throw(Register value) {
1636 // Check that stack should contain next handler, frame pointer, state and 1746 // Check that stack should contain next handler, frame pointer, state and
1637 // return address in that order. 1747 // return address in that order.
1638 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize == 1748 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
1639 StackHandlerConstants::kStateOffset); 1749 StackHandlerConstants::kStateOffset);
1640 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize == 1750 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
1641 StackHandlerConstants::kPCOffset); 1751 StackHandlerConstants::kPCOffset);
1642 // Keep thrown value in rax. 1752 // Keep thrown value in rax.
1643 if (!value.is(rax)) { 1753 if (!value.is(rax)) {
1644 movq(rax, value); 1754 movq(rax, value);
1645 } 1755 }
1646 1756
1647 ExternalReference handler_address(Isolate::k_handler_address); 1757 ExternalReference handler_address(Isolate::k_handler_address);
1648 movq(kScratchRegister, handler_address); 1758 Operand handler_operand = ExternalOperand(handler_address);
1649 movq(rsp, Operand(kScratchRegister, 0)); 1759 movq(rsp, handler_operand);
1650 // get next in chain 1760 // get next in chain
1651 pop(rcx); 1761 pop(handler_operand);
1652 movq(Operand(kScratchRegister, 0), rcx);
1653 pop(rbp); // pop frame pointer 1762 pop(rbp); // pop frame pointer
1654 pop(rdx); // remove state 1763 pop(rdx); // remove state
1655 1764
1656 // Before returning we restore the context from the frame pointer if not NULL. 1765 // Before returning we restore the context from the frame pointer if not NULL.
1657 // The frame pointer is NULL in the exception handler of a JS entry frame. 1766 // The frame pointer is NULL in the exception handler of a JS entry frame.
1658 Set(rsi, 0); // Tentatively set context pointer to NULL 1767 Set(rsi, 0); // Tentatively set context pointer to NULL
1659 NearLabel skip; 1768 NearLabel skip;
1660 cmpq(rbp, Immediate(0)); 1769 cmpq(rbp, Immediate(0));
1661 j(equal, &skip); 1770 j(equal, &skip);
1662 movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 1771 movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1663 bind(&skip); 1772 bind(&skip);
1664 ret(0); 1773 ret(0);
1665 } 1774 }
1666 1775
1667 1776
1668 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, 1777 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
1669 Register value) { 1778 Register value) {
1670 // Keep thrown value in rax. 1779 // Keep thrown value in rax.
1671 if (!value.is(rax)) { 1780 if (!value.is(rax)) {
1672 movq(rax, value); 1781 movq(rax, value);
1673 } 1782 }
1674 // Fetch top stack handler. 1783 // Fetch top stack handler.
1675 ExternalReference handler_address(Isolate::k_handler_address); 1784 ExternalReference handler_address(Isolate::k_handler_address);
1676 movq(kScratchRegister, handler_address); 1785 Load(rsp, handler_address);
1677 movq(rsp, Operand(kScratchRegister, 0));
1678 1786
1679 // Unwind the handlers until the ENTRY handler is found. 1787 // Unwind the handlers until the ENTRY handler is found.
1680 NearLabel loop, done; 1788 NearLabel loop, done;
1681 bind(&loop); 1789 bind(&loop);
1682 // Load the type of the current stack handler. 1790 // Load the type of the current stack handler.
1683 const int kStateOffset = StackHandlerConstants::kStateOffset; 1791 const int kStateOffset = StackHandlerConstants::kStateOffset;
1684 cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY)); 1792 cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
1685 j(equal, &done); 1793 j(equal, &done);
1686 // Fetch the next handler in the list. 1794 // Fetch the next handler in the list.
1687 const int kNextOffset = StackHandlerConstants::kNextOffset; 1795 const int kNextOffset = StackHandlerConstants::kNextOffset;
1688 movq(rsp, Operand(rsp, kNextOffset)); 1796 movq(rsp, Operand(rsp, kNextOffset));
1689 jmp(&loop); 1797 jmp(&loop);
1690 bind(&done); 1798 bind(&done);
1691 1799
1692 // Set the top handler address to next handler past the current ENTRY handler. 1800 // Set the top handler address to next handler past the current ENTRY handler.
1693 movq(kScratchRegister, handler_address); 1801 Operand handler_operand = ExternalOperand(handler_address);
1694 pop(Operand(kScratchRegister, 0)); 1802 pop(handler_operand);
1695 1803
1696 if (type == OUT_OF_MEMORY) { 1804 if (type == OUT_OF_MEMORY) {
1697 // Set external caught exception to false. 1805 // Set external caught exception to false.
1698 ExternalReference external_caught( 1806 ExternalReference external_caught(
1699 Isolate::k_external_caught_exception_address); 1807 Isolate::k_external_caught_exception_address);
1700 movq(rax, Immediate(false)); 1808 movq(rax, Immediate(false));
1701 store_rax(external_caught); 1809 Store(external_caught, rax);
1702 1810
1703 // Set pending exception and rax to out of memory exception. 1811 // Set pending exception and rax to out of memory exception.
1704 ExternalReference pending_exception(Isolate::k_pending_exception_address); 1812 ExternalReference pending_exception(Isolate::k_pending_exception_address);
1705 movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE); 1813 movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
1706 store_rax(pending_exception); 1814 Store(pending_exception, rax);
1707 } 1815 }
1708 1816
1709 // Clear the context pointer. 1817 // Clear the context pointer.
1710 Set(rsi, 0); 1818 Set(rsi, 0);
1711 1819
1712 // Restore registers from handler. 1820 // Restore registers from handler.
1713 STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize == 1821 STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize ==
1714 StackHandlerConstants::kFPOffset); 1822 StackHandlerConstants::kFPOffset);
1715 pop(rbp); // FP 1823 pop(rbp); // FP
1716 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize == 1824 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
1717 StackHandlerConstants::kStateOffset); 1825 StackHandlerConstants::kStateOffset);
1718 pop(rdx); // State 1826 pop(rdx); // State
1719 1827
1720 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize == 1828 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
1721 StackHandlerConstants::kPCOffset); 1829 StackHandlerConstants::kPCOffset);
1722 ret(0); 1830 ret(0);
1723 } 1831 }
1724 1832
1725 1833
1726 void MacroAssembler::Ret() { 1834 void MacroAssembler::Ret() {
1727 ret(0); 1835 ret(0);
1728 } 1836 }
1729 1837
1730 1838
1731 void MacroAssembler::Ret(int bytes_dropped, Register scratch) { 1839 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after
1876 bind(&non_instance); 1984 bind(&non_instance);
1877 movq(result, FieldOperand(result, Map::kConstructorOffset)); 1985 movq(result, FieldOperand(result, Map::kConstructorOffset));
1878 1986
1879 // All done. 1987 // All done.
1880 bind(&done); 1988 bind(&done);
1881 } 1989 }
1882 1990
1883 1991
1884 void MacroAssembler::SetCounter(StatsCounter* counter, int value) { 1992 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1885 if (FLAG_native_code_counters && counter->Enabled()) { 1993 if (FLAG_native_code_counters && counter->Enabled()) {
1886 movq(kScratchRegister, ExternalReference(counter)); 1994 Operand counter_operand = ExternalOperand(ExternalReference(counter));
1887 movl(Operand(kScratchRegister, 0), Immediate(value)); 1995 movq(counter_operand, Immediate(value));
1888 } 1996 }
1889 } 1997 }
1890 1998
1891 1999
1892 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) { 2000 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1893 ASSERT(value > 0); 2001 ASSERT(value > 0);
1894 if (FLAG_native_code_counters && counter->Enabled()) { 2002 if (FLAG_native_code_counters && counter->Enabled()) {
1895 movq(kScratchRegister, ExternalReference(counter)); 2003 Operand counter_operand = ExternalOperand(ExternalReference(counter));
1896 Operand operand(kScratchRegister, 0);
1897 if (value == 1) { 2004 if (value == 1) {
1898 incl(operand); 2005 incl(counter_operand);
1899 } else { 2006 } else {
1900 addl(operand, Immediate(value)); 2007 addl(counter_operand, Immediate(value));
1901 } 2008 }
1902 } 2009 }
1903 } 2010 }
1904 2011
1905 2012
1906 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) { 2013 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1907 ASSERT(value > 0); 2014 ASSERT(value > 0);
1908 if (FLAG_native_code_counters && counter->Enabled()) { 2015 if (FLAG_native_code_counters && counter->Enabled()) {
1909 movq(kScratchRegister, ExternalReference(counter)); 2016 Operand counter_operand = ExternalOperand(ExternalReference(counter));
1910 Operand operand(kScratchRegister, 0);
1911 if (value == 1) { 2017 if (value == 1) {
1912 decl(operand); 2018 decl(counter_operand);
1913 } else { 2019 } else {
1914 subl(operand, Immediate(value)); 2020 subl(counter_operand, Immediate(value));
1915 } 2021 }
1916 } 2022 }
1917 } 2023 }
1918 2024
1919 2025
1920 #ifdef ENABLE_DEBUGGER_SUPPORT 2026 #ifdef ENABLE_DEBUGGER_SUPPORT
1921 void MacroAssembler::DebugBreak() { 2027 void MacroAssembler::DebugBreak() {
1922 ASSERT(allow_stub_calls()); 2028 ASSERT(allow_stub_calls());
1923 Set(rax, 0); // No arguments. 2029 Set(rax, 0); // No arguments.
1924 movq(rbx, ExternalReference(Runtime::kDebugBreak)); 2030 LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak));
1925 CEntryStub ces(1); 2031 CEntryStub ces(1);
1926 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK); 2032 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
1927 } 2033 }
1928 #endif // ENABLE_DEBUGGER_SUPPORT 2034 #endif // ENABLE_DEBUGGER_SUPPORT
1929 2035
1930 2036
1931 void MacroAssembler::InvokeCode(Register code, 2037 void MacroAssembler::InvokeCode(Register code,
1932 const ParameterCount& expected, 2038 const ParameterCount& expected,
1933 const ParameterCount& actual, 2039 const ParameterCount& actual,
1934 InvokeFlag flag, 2040 InvokeFlag flag,
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after
2068 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize); 2174 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
2069 push(Immediate(0)); // Saved entry sp, patched before call. 2175 push(Immediate(0)); // Saved entry sp, patched before call.
2070 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); 2176 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2071 push(kScratchRegister); // Accessed from EditFrame::code_slot. 2177 push(kScratchRegister); // Accessed from EditFrame::code_slot.
2072 2178
2073 // Save the frame pointer and the context in top. 2179 // Save the frame pointer and the context in top.
2074 if (save_rax) { 2180 if (save_rax) {
2075 movq(r14, rax); // Backup rax in callee-save register. 2181 movq(r14, rax); // Backup rax in callee-save register.
2076 } 2182 }
2077 2183
2078 movq(kScratchRegister, ExternalReference(Isolate::k_c_entry_fp_address)); 2184 Store(ExternalReference(Isolate::k_c_entry_fp_address), rbp);
2079 movq(Operand(kScratchRegister, 0), rbp); 2185 Store(ExternalReference(Isolate::k_context_address), rsi);
2080
2081 movq(kScratchRegister, ExternalReference(Isolate::k_context_address));
2082 movq(Operand(kScratchRegister, 0), rsi);
2083 } 2186 }
2084 2187
2085 2188
2086 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space, 2189 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
2087 bool save_doubles) { 2190 bool save_doubles) {
2088 #ifdef _WIN64 2191 #ifdef _WIN64
2089 const int kShadowSpace = 4; 2192 const int kShadowSpace = 4;
2090 arg_stack_space += kShadowSpace; 2193 arg_stack_space += kShadowSpace;
2091 #endif 2194 #endif
2092 // Optionally save all XMM registers. 2195 // Optionally save all XMM registers.
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
2164 movq(rsp, rbp); 2267 movq(rsp, rbp);
2165 pop(rbp); 2268 pop(rbp);
2166 2269
2167 LeaveExitFrameEpilogue(); 2270 LeaveExitFrameEpilogue();
2168 } 2271 }
2169 2272
2170 2273
2171 void MacroAssembler::LeaveExitFrameEpilogue() { 2274 void MacroAssembler::LeaveExitFrameEpilogue() {
2172 // Restore current context from top and clear it in debug mode. 2275 // Restore current context from top and clear it in debug mode.
2173 ExternalReference context_address(Isolate::k_context_address); 2276 ExternalReference context_address(Isolate::k_context_address);
2174 movq(kScratchRegister, context_address); 2277 Operand context_operand = ExternalOperand(context_address);
2175 movq(rsi, Operand(kScratchRegister, 0)); 2278 movq(rsi, context_operand);
2176 #ifdef DEBUG 2279 #ifdef DEBUG
2177 movq(Operand(kScratchRegister, 0), Immediate(0)); 2280 movq(context_operand, Immediate(0));
2178 #endif 2281 #endif
2179 2282
2180 // Clear the top frame. 2283 // Clear the top frame.
2181 ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address); 2284 ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address);
2182 movq(kScratchRegister, c_entry_fp_address); 2285 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
2183 movq(Operand(kScratchRegister, 0), Immediate(0)); 2286 movq(c_entry_fp_operand, Immediate(0));
2184 } 2287 }
2185 2288
2186 2289
2187 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, 2290 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2188 Register scratch, 2291 Register scratch,
2189 Label* miss) { 2292 Label* miss) {
2190 Label same_contexts; 2293 Label same_contexts;
2191 2294
2192 ASSERT(!holder_reg.is(scratch)); 2295 ASSERT(!holder_reg.is(scratch));
2193 ASSERT(!scratch.is(kScratchRegister)); 2296 ASSERT(!scratch.is(kScratchRegister));
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
2252 AllocationFlags flags) { 2355 AllocationFlags flags) {
2253 ExternalReference new_space_allocation_top = 2356 ExternalReference new_space_allocation_top =
2254 ExternalReference::new_space_allocation_top_address(); 2357 ExternalReference::new_space_allocation_top_address();
2255 2358
2256 // Just return if allocation top is already known. 2359 // Just return if allocation top is already known.
2257 if ((flags & RESULT_CONTAINS_TOP) != 0) { 2360 if ((flags & RESULT_CONTAINS_TOP) != 0) {
2258 // No use of scratch if allocation top is provided. 2361 // No use of scratch if allocation top is provided.
2259 ASSERT(!scratch.is_valid()); 2362 ASSERT(!scratch.is_valid());
2260 #ifdef DEBUG 2363 #ifdef DEBUG
2261 // Assert that result actually contains top on entry. 2364 // Assert that result actually contains top on entry.
2262 movq(kScratchRegister, new_space_allocation_top); 2365 Operand top_operand = ExternalOperand(new_space_allocation_top);
2263 cmpq(result, Operand(kScratchRegister, 0)); 2366 cmpq(result, top_operand);
2264 Check(equal, "Unexpected allocation top"); 2367 Check(equal, "Unexpected allocation top");
2265 #endif 2368 #endif
2266 return; 2369 return;
2267 } 2370 }
2268 2371
2269 // Move address of new object to result. Use scratch register if available, 2372 // Move address of new object to result. Use scratch register if available,
2270 // and keep address in scratch until call to UpdateAllocationTopHelper. 2373 // and keep address in scratch until call to UpdateAllocationTopHelper.
2271 if (scratch.is_valid()) { 2374 if (scratch.is_valid()) {
2272 movq(scratch, new_space_allocation_top); 2375 LoadAddress(scratch, new_space_allocation_top);
2273 movq(result, Operand(scratch, 0)); 2376 movq(result, Operand(scratch, 0));
2274 } else if (result.is(rax)) {
2275 load_rax(new_space_allocation_top);
2276 } else { 2377 } else {
2277 movq(kScratchRegister, new_space_allocation_top); 2378 Load(result, new_space_allocation_top);
2278 movq(result, Operand(kScratchRegister, 0));
2279 } 2379 }
2280 } 2380 }
2281 2381
2282 2382
2283 void MacroAssembler::UpdateAllocationTopHelper(Register result_end, 2383 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2284 Register scratch) { 2384 Register scratch) {
2285 if (emit_debug_code()) { 2385 if (emit_debug_code()) {
2286 testq(result_end, Immediate(kObjectAlignmentMask)); 2386 testq(result_end, Immediate(kObjectAlignmentMask));
2287 Check(zero, "Unaligned allocation in new space"); 2387 Check(zero, "Unaligned allocation in new space");
2288 } 2388 }
2289 2389
2290 ExternalReference new_space_allocation_top = 2390 ExternalReference new_space_allocation_top =
2291 ExternalReference::new_space_allocation_top_address(); 2391 ExternalReference::new_space_allocation_top_address();
2292 2392
2293 // Update new top. 2393 // Update new top.
2294 if (result_end.is(rax)) { 2394 if (scratch.is_valid()) {
2295 // rax can be stored directly to a memory location. 2395 // Scratch already contains address of allocation top.
2296 store_rax(new_space_allocation_top); 2396 movq(Operand(scratch, 0), result_end);
2297 } else { 2397 } else {
2298 // Register required - use scratch provided if available. 2398 Store(new_space_allocation_top, result_end);
2299 if (scratch.is_valid()) {
2300 movq(Operand(scratch, 0), result_end);
2301 } else {
2302 movq(kScratchRegister, new_space_allocation_top);
2303 movq(Operand(kScratchRegister, 0), result_end);
2304 }
2305 } 2399 }
2306 } 2400 }
2307 2401
2308 2402
2309 void MacroAssembler::AllocateInNewSpace(int object_size, 2403 void MacroAssembler::AllocateInNewSpace(int object_size,
2310 Register result, 2404 Register result,
2311 Register result_end, 2405 Register result_end,
2312 Register scratch, 2406 Register scratch,
2313 Label* gc_required, 2407 Label* gc_required,
2314 AllocationFlags flags) { 2408 AllocationFlags flags) {
(...skipping 20 matching lines...) Expand all
2335 ExternalReference new_space_allocation_limit = 2429 ExternalReference new_space_allocation_limit =
2336 ExternalReference::new_space_allocation_limit_address(); 2430 ExternalReference::new_space_allocation_limit_address();
2337 2431
2338 Register top_reg = result_end.is_valid() ? result_end : result; 2432 Register top_reg = result_end.is_valid() ? result_end : result;
2339 2433
2340 if (!top_reg.is(result)) { 2434 if (!top_reg.is(result)) {
2341 movq(top_reg, result); 2435 movq(top_reg, result);
2342 } 2436 }
2343 addq(top_reg, Immediate(object_size)); 2437 addq(top_reg, Immediate(object_size));
2344 j(carry, gc_required); 2438 j(carry, gc_required);
2345 movq(kScratchRegister, new_space_allocation_limit); 2439 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
2346 cmpq(top_reg, Operand(kScratchRegister, 0)); 2440 cmpq(top_reg, limit_operand);
2347 j(above, gc_required); 2441 j(above, gc_required);
2348 2442
2349 // Update allocation top. 2443 // Update allocation top.
2350 UpdateAllocationTopHelper(top_reg, scratch); 2444 UpdateAllocationTopHelper(top_reg, scratch);
2351 2445
2352 if (top_reg.is(result)) { 2446 if (top_reg.is(result)) {
2353 if ((flags & TAG_OBJECT) != 0) { 2447 if ((flags & TAG_OBJECT) != 0) {
2354 subq(result, Immediate(object_size - kHeapObjectTag)); 2448 subq(result, Immediate(object_size - kHeapObjectTag));
2355 } else { 2449 } else {
2356 subq(result, Immediate(object_size)); 2450 subq(result, Immediate(object_size));
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
2390 2484
2391 // Calculate new top and bail out if new space is exhausted. 2485 // Calculate new top and bail out if new space is exhausted.
2392 ExternalReference new_space_allocation_limit = 2486 ExternalReference new_space_allocation_limit =
2393 ExternalReference::new_space_allocation_limit_address(); 2487 ExternalReference::new_space_allocation_limit_address();
2394 2488
2395 // We assume that element_count*element_size + header_size does not 2489 // We assume that element_count*element_size + header_size does not
2396 // overflow. 2490 // overflow.
2397 lea(result_end, Operand(element_count, element_size, header_size)); 2491 lea(result_end, Operand(element_count, element_size, header_size));
2398 addq(result_end, result); 2492 addq(result_end, result);
2399 j(carry, gc_required); 2493 j(carry, gc_required);
2400 movq(kScratchRegister, new_space_allocation_limit); 2494 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
2401 cmpq(result_end, Operand(kScratchRegister, 0)); 2495 cmpq(result_end, limit_operand);
2402 j(above, gc_required); 2496 j(above, gc_required);
2403 2497
2404 // Update allocation top. 2498 // Update allocation top.
2405 UpdateAllocationTopHelper(result_end, scratch); 2499 UpdateAllocationTopHelper(result_end, scratch);
2406 2500
2407 // Tag the result if requested. 2501 // Tag the result if requested.
2408 if ((flags & TAG_OBJECT) != 0) { 2502 if ((flags & TAG_OBJECT) != 0) {
2409 addq(result, Immediate(kHeapObjectTag)); 2503 addq(result, Immediate(kHeapObjectTag));
2410 } 2504 }
2411 } 2505 }
(...skipping 24 matching lines...) Expand all
2436 LoadAllocationTopHelper(result, scratch, flags); 2530 LoadAllocationTopHelper(result, scratch, flags);
2437 2531
2438 // Calculate new top and bail out if new space is exhausted. 2532 // Calculate new top and bail out if new space is exhausted.
2439 ExternalReference new_space_allocation_limit = 2533 ExternalReference new_space_allocation_limit =
2440 ExternalReference::new_space_allocation_limit_address(); 2534 ExternalReference::new_space_allocation_limit_address();
2441 if (!object_size.is(result_end)) { 2535 if (!object_size.is(result_end)) {
2442 movq(result_end, object_size); 2536 movq(result_end, object_size);
2443 } 2537 }
2444 addq(result_end, result); 2538 addq(result_end, result);
2445 j(carry, gc_required); 2539 j(carry, gc_required);
2446 movq(kScratchRegister, new_space_allocation_limit); 2540 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
2447 cmpq(result_end, Operand(kScratchRegister, 0)); 2541 cmpq(result_end, limit_operand);
2448 j(above, gc_required); 2542 j(above, gc_required);
2449 2543
2450 // Update allocation top. 2544 // Update allocation top.
2451 UpdateAllocationTopHelper(result_end, scratch); 2545 UpdateAllocationTopHelper(result_end, scratch);
2452 2546
2453 // Tag the result if requested. 2547 // Tag the result if requested.
2454 if ((flags & TAG_OBJECT) != 0) { 2548 if ((flags & TAG_OBJECT) != 0) {
2455 addq(result, Immediate(kHeapObjectTag)); 2549 addq(result, Immediate(kHeapObjectTag));
2456 } 2550 }
2457 } 2551 }
2458 2552
2459 2553
2460 void MacroAssembler::UndoAllocationInNewSpace(Register object) { 2554 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2461 ExternalReference new_space_allocation_top = 2555 ExternalReference new_space_allocation_top =
2462 ExternalReference::new_space_allocation_top_address(); 2556 ExternalReference::new_space_allocation_top_address();
2463 2557
2464 // Make sure the object has no tag before resetting top. 2558 // Make sure the object has no tag before resetting top.
2465 and_(object, Immediate(~kHeapObjectTagMask)); 2559 and_(object, Immediate(~kHeapObjectTagMask));
2466 movq(kScratchRegister, new_space_allocation_top); 2560 Operand top_operand = ExternalOperand(new_space_allocation_top);
2467 #ifdef DEBUG 2561 #ifdef DEBUG
2468 cmpq(object, Operand(kScratchRegister, 0)); 2562 cmpq(object, top_operand);
2469 Check(below, "Undo allocation of non allocated memory"); 2563 Check(below, "Undo allocation of non allocated memory");
2470 #endif 2564 #endif
2471 movq(Operand(kScratchRegister, 0), object); 2565 movq(top_operand, object);
2472 } 2566 }
2473 2567
2474 2568
2475 void MacroAssembler::AllocateHeapNumber(Register result, 2569 void MacroAssembler::AllocateHeapNumber(Register result,
2476 Register scratch, 2570 Register scratch,
2477 Label* gc_required) { 2571 Label* gc_required) {
2478 // Allocate heap number in new space. 2572 // Allocate heap number in new space.
2479 AllocateInNewSpace(HeapNumber::kSize, 2573 AllocateInNewSpace(HeapNumber::kSize,
2480 result, 2574 result,
2481 scratch, 2575 scratch,
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after
2696 int argument_slots_on_stack = 2790 int argument_slots_on_stack =
2697 ArgumentStackSlotsForCFunctionCall(num_arguments); 2791 ArgumentStackSlotsForCFunctionCall(num_arguments);
2698 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize)); 2792 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2699 and_(rsp, Immediate(-frame_alignment)); 2793 and_(rsp, Immediate(-frame_alignment));
2700 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister); 2794 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2701 } 2795 }
2702 2796
2703 2797
2704 void MacroAssembler::CallCFunction(ExternalReference function, 2798 void MacroAssembler::CallCFunction(ExternalReference function,
2705 int num_arguments) { 2799 int num_arguments) {
2706 movq(rax, function); 2800 LoadAddress(rax, function);
2707 CallCFunction(rax, num_arguments); 2801 CallCFunction(rax, num_arguments);
2708 } 2802 }
2709 2803
2710 2804
2711 void MacroAssembler::CallCFunction(Register function, int num_arguments) { 2805 void MacroAssembler::CallCFunction(Register function, int num_arguments) {
2712 // Pass current isolate address as additional parameter. 2806 // Pass current isolate address as additional parameter.
2713 if (num_arguments < kRegisterPassedArguments) { 2807 if (num_arguments < kRegisterPassedArguments) {
2714 #ifdef _WIN64 2808 #ifdef _WIN64
2715 // First four arguments are passed in registers on Windows. 2809 // First four arguments are passed in registers on Windows.
2716 Register arg_to_reg[] = {rcx, rdx, r8, r9}; 2810 Register arg_to_reg[] = {rcx, rdx, r8, r9};
2717 #else 2811 #else
2718 // First six arguments are passed in registers on other platforms. 2812 // First six arguments are passed in registers on other platforms.
2719 Register arg_to_reg[] = {rdi, rsi, rdx, rcx, r8, r9}; 2813 Register arg_to_reg[] = {rdi, rsi, rdx, rcx, r8, r9};
2720 #endif 2814 #endif
2721 Register reg = arg_to_reg[num_arguments]; 2815 Register reg = arg_to_reg[num_arguments];
2722 movq(reg, ExternalReference::isolate_address()); 2816 LoadAddress(reg, ExternalReference::isolate_address());
2723 } else { 2817 } else {
2724 // Push Isolate pointer after all parameters. 2818 // Push Isolate pointer after all parameters.
2725 int argument_slots_on_stack = 2819 int argument_slots_on_stack =
2726 ArgumentStackSlotsForCFunctionCall(num_arguments); 2820 ArgumentStackSlotsForCFunctionCall(num_arguments);
2727 movq(kScratchRegister, ExternalReference::isolate_address()); 2821 LoadAddress(kScratchRegister, ExternalReference::isolate_address());
2728 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), 2822 movq(Operand(rsp, argument_slots_on_stack * kPointerSize),
2729 kScratchRegister); 2823 kScratchRegister);
2730 } 2824 }
2731 2825
2732 // Check stack alignment. 2826 // Check stack alignment.
2733 if (emit_debug_code()) { 2827 if (emit_debug_code()) {
2734 CheckStackAlignment(); 2828 CheckStackAlignment();
2735 } 2829 }
2736 2830
2737 call(function); 2831 call(function);
(...skipping 20 matching lines...) Expand all
2758 CPU::FlushICache(address_, size_); 2852 CPU::FlushICache(address_, size_);
2759 2853
2760 // Check that the code was patched as expected. 2854 // Check that the code was patched as expected.
2761 ASSERT(masm_.pc_ == address_ + size_); 2855 ASSERT(masm_.pc_ == address_ + size_);
2762 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); 2856 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2763 } 2857 }
2764 2858
2765 } } // namespace v8::internal 2859 } } // namespace v8::internal
2766 2860
2767 #endif // V8_TARGET_ARCH_X64 2861 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698