Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(86)

Side by Side Diff: src/x64/macro-assembler-x64.cc

Issue 6716018: X64: Optimize access to external references. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Merge with tip of bleeding edge. Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/macro-assembler-x64.h ('k') | src/x64/regexp-macro-assembler-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 26 matching lines...) Expand all
37 #include "debug.h" 37 #include "debug.h"
38 #include "heap.h" 38 #include "heap.h"
39 39
40 namespace v8 { 40 namespace v8 {
41 namespace internal { 41 namespace internal {
42 42
43 MacroAssembler::MacroAssembler(void* buffer, int size) 43 MacroAssembler::MacroAssembler(void* buffer, int size)
44 : Assembler(buffer, size), 44 : Assembler(buffer, size),
45 generating_stub_(false), 45 generating_stub_(false),
46 allow_stub_calls_(true), 46 allow_stub_calls_(true),
47 root_array_available_(true),
47 code_object_(HEAP->undefined_value()) { 48 code_object_(HEAP->undefined_value()) {
48 } 49 }
49 50
50 51
52 static intptr_t RootRegisterDelta(ExternalReference other) {
53 Address roots_register_value = kRootRegisterBias +
54 reinterpret_cast<Address>(Isolate::Current()->heap()->roots_address());
55 intptr_t delta = other.address() - roots_register_value;
56 return delta;
57 }
58
59
60 Operand MacroAssembler::ExternalOperand(ExternalReference target,
61 Register scratch) {
62 if (root_array_available_ && !Serializer::enabled()) {
63 intptr_t delta = RootRegisterDelta(target);
64 if (is_int32(delta)) {
65 Serializer::TooLateToEnableNow();
66 return Operand(kRootRegister, delta);
67 }
68 }
69 movq(scratch, target);
70 return Operand(scratch, 0);
71 }
72
73
74 void MacroAssembler::Load(Register destination, ExternalReference source) {
75 if (root_array_available_ && !Serializer::enabled()) {
76 intptr_t delta = RootRegisterDelta(source);
77 if (is_int32(delta)) {
78 Serializer::TooLateToEnableNow();
79 movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
80 return;
81 }
82 }
83 // Safe code.
84 if (destination.is(rax)) {
85 load_rax(source);
86 } else {
87 movq(kScratchRegister, source);
88 movq(destination, Operand(kScratchRegister, 0));
89 }
90 }
91
92
93 void MacroAssembler::Store(ExternalReference destination, Register source) {
94 if (root_array_available_ && !Serializer::enabled()) {
95 intptr_t delta = RootRegisterDelta(destination);
96 if (is_int32(delta)) {
97 Serializer::TooLateToEnableNow();
98 movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
99 return;
100 }
101 }
102 // Safe code.
103 if (source.is(rax)) {
104 store_rax(destination);
105 } else {
106 movq(kScratchRegister, destination);
107 movq(Operand(kScratchRegister, 0), source);
108 }
109 }
110
111
112 void MacroAssembler::LoadAddress(Register destination,
113 ExternalReference source) {
114 if (root_array_available_ && !Serializer::enabled()) {
115 intptr_t delta = RootRegisterDelta(source);
116 if (is_int32(delta)) {
117 Serializer::TooLateToEnableNow();
118 lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
119 return;
120 }
121 }
122 // Safe code.
123 movq(destination, source);
124 }
125
126
127 int MacroAssembler::LoadAddressSize(ExternalReference source) {
128 if (root_array_available_ && !Serializer::enabled()) {
129 // This calculation depends on the internals of LoadAddress.
130 // It's correctness is ensured by the asserts in the Call
131 // instruction below.
132 intptr_t delta = RootRegisterDelta(source);
133 if (is_int32(delta)) {
134 Serializer::TooLateToEnableNow();
135 // Operand is lea(scratch, Operand(kRootRegister, delta));
136 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
137 int size = 4;
138 if (!is_int8(static_cast<int32_t>(delta))) {
139 size += 3; // Need full four-byte displacement in lea.
140 }
141 return size;
142 }
143 }
144 // Size of movq(destination, src);
145 return 10;
146 }
147
148
51 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) { 149 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
150 ASSERT(root_array_available_);
52 movq(destination, Operand(kRootRegister, 151 movq(destination, Operand(kRootRegister,
53 (index << kPointerSizeLog2) - kRootRegisterBias)); 152 (index << kPointerSizeLog2) - kRootRegisterBias));
54 } 153 }
55 154
56 155
57 void MacroAssembler::LoadRootIndexed(Register destination, 156 void MacroAssembler::LoadRootIndexed(Register destination,
58 Register variable_offset, 157 Register variable_offset,
59 int fixed_offset) { 158 int fixed_offset) {
159 ASSERT(root_array_available_);
60 movq(destination, 160 movq(destination,
61 Operand(kRootRegister, 161 Operand(kRootRegister,
62 variable_offset, times_pointer_size, 162 variable_offset, times_pointer_size,
63 (fixed_offset << kPointerSizeLog2) - kRootRegisterBias)); 163 (fixed_offset << kPointerSizeLog2) - kRootRegisterBias));
64 } 164 }
65 165
66 166
67 void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) { 167 void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
168 ASSERT(root_array_available_);
68 movq(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias), 169 movq(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
69 source); 170 source);
70 } 171 }
71 172
72 173
73 void MacroAssembler::PushRoot(Heap::RootListIndex index) { 174 void MacroAssembler::PushRoot(Heap::RootListIndex index) {
175 ASSERT(root_array_available_);
74 push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias)); 176 push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
75 } 177 }
76 178
77 179
78 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) { 180 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
181 ASSERT(root_array_available_);
79 cmpq(with, Operand(kRootRegister, 182 cmpq(with, Operand(kRootRegister,
80 (index << kPointerSizeLog2) - kRootRegisterBias)); 183 (index << kPointerSizeLog2) - kRootRegisterBias));
81 } 184 }
82 185
83 186
84 void MacroAssembler::CompareRoot(const Operand& with, 187 void MacroAssembler::CompareRoot(const Operand& with,
85 Heap::RootListIndex index) { 188 Heap::RootListIndex index) {
189 ASSERT(root_array_available_);
86 ASSERT(!with.AddressUsesRegister(kScratchRegister)); 190 ASSERT(!with.AddressUsesRegister(kScratchRegister));
87 LoadRoot(kScratchRegister, index); 191 LoadRoot(kScratchRegister, index);
88 cmpq(with, kScratchRegister); 192 cmpq(with, kScratchRegister);
89 } 193 }
90 194
91 195
92 void MacroAssembler::RecordWriteHelper(Register object, 196 void MacroAssembler::RecordWriteHelper(Register object,
93 Register addr, 197 Register addr,
94 Register scratch) { 198 Register scratch) {
95 if (emit_debug_code()) { 199 if (emit_debug_code()) {
(...skipping 290 matching lines...) Expand 10 before | Expand all | Expand 10 after
386 490
387 491
388 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { 492 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
389 CallRuntime(Runtime::FunctionForId(id), num_arguments); 493 CallRuntime(Runtime::FunctionForId(id), num_arguments);
390 } 494 }
391 495
392 496
393 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { 497 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
394 const Runtime::Function* function = Runtime::FunctionForId(id); 498 const Runtime::Function* function = Runtime::FunctionForId(id);
395 Set(rax, function->nargs); 499 Set(rax, function->nargs);
396 movq(rbx, ExternalReference(function, isolate())); 500 LoadAddress(rbx, ExternalReference(function, isolate()));
397 CEntryStub ces(1); 501 CEntryStub ces(1);
398 ces.SaveDoubles(); 502 ces.SaveDoubles();
399 CallStub(&ces); 503 CallStub(&ces);
400 } 504 }
401 505
402 506
403 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id, 507 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
404 int num_arguments) { 508 int num_arguments) {
405 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments); 509 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
406 } 510 }
407 511
408 512
409 void MacroAssembler::CallRuntime(const Runtime::Function* f, 513 void MacroAssembler::CallRuntime(const Runtime::Function* f,
410 int num_arguments) { 514 int num_arguments) {
411 // If the expected number of arguments of the runtime function is 515 // If the expected number of arguments of the runtime function is
412 // constant, we check that the actual number of arguments match the 516 // constant, we check that the actual number of arguments match the
413 // expectation. 517 // expectation.
414 if (f->nargs >= 0 && f->nargs != num_arguments) { 518 if (f->nargs >= 0 && f->nargs != num_arguments) {
415 IllegalOperation(num_arguments); 519 IllegalOperation(num_arguments);
416 return; 520 return;
417 } 521 }
418 522
419 // TODO(1236192): Most runtime routines don't need the number of 523 // TODO(1236192): Most runtime routines don't need the number of
420 // arguments passed in because it is constant. At some point we 524 // arguments passed in because it is constant. At some point we
421 // should remove this need and make the runtime routine entry code 525 // should remove this need and make the runtime routine entry code
422 // smarter. 526 // smarter.
423 Set(rax, num_arguments); 527 Set(rax, num_arguments);
424 movq(rbx, ExternalReference(f, isolate())); 528 LoadAddress(rbx, ExternalReference(f, isolate()));
425 CEntryStub ces(f->result_size); 529 CEntryStub ces(f->result_size);
426 CallStub(&ces); 530 CallStub(&ces);
427 } 531 }
428 532
429 533
430 MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f, 534 MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f,
431 int num_arguments) { 535 int num_arguments) {
432 if (f->nargs >= 0 && f->nargs != num_arguments) { 536 if (f->nargs >= 0 && f->nargs != num_arguments) {
433 IllegalOperation(num_arguments); 537 IllegalOperation(num_arguments);
434 // Since we did not call the stub, there was no allocation failure. 538 // Since we did not call the stub, there was no allocation failure.
435 // Return some non-failure object. 539 // Return some non-failure object.
436 return HEAP->undefined_value(); 540 return HEAP->undefined_value();
437 } 541 }
438 542
439 // TODO(1236192): Most runtime routines don't need the number of 543 // TODO(1236192): Most runtime routines don't need the number of
440 // arguments passed in because it is constant. At some point we 544 // arguments passed in because it is constant. At some point we
441 // should remove this need and make the runtime routine entry code 545 // should remove this need and make the runtime routine entry code
442 // smarter. 546 // smarter.
443 Set(rax, num_arguments); 547 Set(rax, num_arguments);
444 movq(rbx, ExternalReference(f, isolate())); 548 LoadAddress(rbx, ExternalReference(f, isolate()));
445 CEntryStub ces(f->result_size); 549 CEntryStub ces(f->result_size);
446 return TryCallStub(&ces); 550 return TryCallStub(&ces);
447 } 551 }
448 552
449 553
450 void MacroAssembler::CallExternalReference(const ExternalReference& ext, 554 void MacroAssembler::CallExternalReference(const ExternalReference& ext,
451 int num_arguments) { 555 int num_arguments) {
452 Set(rax, num_arguments); 556 Set(rax, num_arguments);
453 movq(rbx, ext); 557 LoadAddress(rbx, ext);
454 558
455 CEntryStub stub(1); 559 CEntryStub stub(1);
456 CallStub(&stub); 560 CallStub(&stub);
457 } 561 }
458 562
459 563
460 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext, 564 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
461 int num_arguments, 565 int num_arguments,
462 int result_size) { 566 int result_size) {
463 // ----------- S t a t e ------------- 567 // ----------- S t a t e -------------
(...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after
606 bind(&empty_result); 710 bind(&empty_result);
607 // It was zero; the result is undefined. 711 // It was zero; the result is undefined.
608 Move(rax, FACTORY->undefined_value()); 712 Move(rax, FACTORY->undefined_value());
609 jmp(&prologue); 713 jmp(&prologue);
610 714
611 // HandleScope limit has changed. Delete allocated extensions. 715 // HandleScope limit has changed. Delete allocated extensions.
612 bind(&delete_allocated_handles); 716 bind(&delete_allocated_handles);
613 movq(Operand(base_reg, kLimitOffset), prev_limit_reg); 717 movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
614 movq(prev_limit_reg, rax); 718 movq(prev_limit_reg, rax);
615 #ifdef _WIN64 719 #ifdef _WIN64
616 movq(rcx, ExternalReference::isolate_address()); 720 LoadAddress(rcx, ExternalReference::isolate_address());
617 #else 721 #else
618 movq(rdi, ExternalReference::isolate_address()); 722 LoadAddress(rdi, ExternalReference::isolate_address());
619 #endif 723 #endif
620 movq(rax, ExternalReference::delete_handle_scope_extensions(isolate())); 724 LoadAddress(rax,
725 ExternalReference::delete_handle_scope_extensions(isolate()));
621 call(rax); 726 call(rax);
622 movq(rax, prev_limit_reg); 727 movq(rax, prev_limit_reg);
623 jmp(&leave_exit_frame); 728 jmp(&leave_exit_frame);
624 729
625 return result; 730 return result;
626 } 731 }
627 732
628 733
629 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext, 734 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
630 int result_size) { 735 int result_size) {
631 // Set the entry point and jump to the C entry runtime stub. 736 // Set the entry point and jump to the C entry runtime stub.
632 movq(rbx, ext); 737 LoadAddress(rbx, ext);
633 CEntryStub ces(result_size); 738 CEntryStub ces(result_size);
634 jmp(ces.GetCode(), RelocInfo::CODE_TARGET); 739 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
635 } 740 }
636 741
637 742
638 MaybeObject* MacroAssembler::TryJumpToExternalReference( 743 MaybeObject* MacroAssembler::TryJumpToExternalReference(
639 const ExternalReference& ext, int result_size) { 744 const ExternalReference& ext, int result_size) {
640 // Set the entry point and jump to the C entry runtime stub. 745 // Set the entry point and jump to the C entry runtime stub.
641 movq(rbx, ext); 746 LoadAddress(rbx, ext);
642 CEntryStub ces(result_size); 747 CEntryStub ces(result_size);
643 return TryTailCallStub(&ces); 748 return TryTailCallStub(&ces);
644 } 749 }
645 750
646 751
647 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, 752 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
648 InvokeFlag flag, 753 InvokeFlag flag,
649 CallWrapper* call_wrapper) { 754 CallWrapper* call_wrapper) {
650 // Calls are not allowed in some stubs. 755 // Calls are not allowed in some stubs.
651 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls()); 756 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
(...skipping 786 matching lines...) Expand 10 before | Expand all | Expand 10 after
1438 } 1543 }
1439 } 1544 }
1440 1545
1441 1546
1442 void MacroAssembler::Test(const Operand& src, Smi* source) { 1547 void MacroAssembler::Test(const Operand& src, Smi* source) {
1443 testl(Operand(src, kIntSize), Immediate(source->value())); 1548 testl(Operand(src, kIntSize), Immediate(source->value()));
1444 } 1549 }
1445 1550
1446 1551
1447 void MacroAssembler::Jump(ExternalReference ext) { 1552 void MacroAssembler::Jump(ExternalReference ext) {
1448 movq(kScratchRegister, ext); 1553 LoadAddress(kScratchRegister, ext);
1449 jmp(kScratchRegister); 1554 jmp(kScratchRegister);
1450 } 1555 }
1451 1556
1452 1557
1453 void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) { 1558 void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1454 movq(kScratchRegister, destination, rmode); 1559 movq(kScratchRegister, destination, rmode);
1455 jmp(kScratchRegister); 1560 jmp(kScratchRegister);
1456 } 1561 }
1457 1562
1458 1563
1459 void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) { 1564 void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
1460 // TODO(X64): Inline this 1565 // TODO(X64): Inline this
1461 jmp(code_object, rmode); 1566 jmp(code_object, rmode);
1462 } 1567 }
1463 1568
1464 1569
1570 int MacroAssembler::CallSize(ExternalReference ext) {
1571 // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
1572 const int kCallInstructionSize = 3;
1573 return LoadAddressSize(ext) + kCallInstructionSize;
1574 }
1575
1576
1465 void MacroAssembler::Call(ExternalReference ext) { 1577 void MacroAssembler::Call(ExternalReference ext) {
1466 #ifdef DEBUG 1578 #ifdef DEBUG
1467 int pre_position = pc_offset(); 1579 int end_position = pc_offset() + CallSize(ext);
1468 #endif 1580 #endif
1469 movq(kScratchRegister, ext); 1581 LoadAddress(kScratchRegister, ext);
1470 call(kScratchRegister); 1582 call(kScratchRegister);
1471 #ifdef DEBUG 1583 #ifdef DEBUG
1472 int post_position = pc_offset(); 1584 CHECK_EQ(end_position, pc_offset());
1473 CHECK_EQ(pre_position + CallSize(ext), post_position);
1474 #endif 1585 #endif
1475 } 1586 }
1476 1587
1477 1588
1478 void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) { 1589 void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1479 #ifdef DEBUG 1590 #ifdef DEBUG
1480 int pre_position = pc_offset(); 1591 int end_position = pc_offset() + CallSize(destination, rmode);
1481 #endif 1592 #endif
1482 movq(kScratchRegister, destination, rmode); 1593 movq(kScratchRegister, destination, rmode);
1483 call(kScratchRegister); 1594 call(kScratchRegister);
1484 #ifdef DEBUG 1595 #ifdef DEBUG
1485 int post_position = pc_offset(); 1596 CHECK_EQ(pc_offset(), end_position);
1486 CHECK_EQ(pre_position + CallSize(destination, rmode), post_position);
1487 #endif 1597 #endif
1488 } 1598 }
1489 1599
1490 1600
1491 void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) { 1601 void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1492 #ifdef DEBUG 1602 #ifdef DEBUG
1493 int pre_position = pc_offset(); 1603 int end_position = pc_offset() + CallSize(code_object);
1494 #endif 1604 #endif
1495 ASSERT(RelocInfo::IsCodeTarget(rmode)); 1605 ASSERT(RelocInfo::IsCodeTarget(rmode));
1496 call(code_object, rmode); 1606 call(code_object, rmode);
1497 #ifdef DEBUG 1607 #ifdef DEBUG
1498 int post_position = pc_offset(); 1608 CHECK_EQ(end_position, pc_offset());
1499 CHECK_EQ(pre_position + CallSize(code_object), post_position);
1500 #endif 1609 #endif
1501 } 1610 }
1502 1611
1503 1612
1504 void MacroAssembler::Pushad() { 1613 void MacroAssembler::Pushad() {
1505 push(rax); 1614 push(rax);
1506 push(rcx); 1615 push(rcx);
1507 push(rdx); 1616 push(rdx);
1508 push(rbx); 1617 push(rbx);
1509 // Not pushing rsp or rbp. 1618 // Not pushing rsp or rbp.
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
1610 push(rbp); 1719 push(rbp);
1611 } else { 1720 } else {
1612 ASSERT(try_location == IN_JS_ENTRY); 1721 ASSERT(try_location == IN_JS_ENTRY);
1613 // The frame pointer does not point to a JS frame so we save NULL 1722 // The frame pointer does not point to a JS frame so we save NULL
1614 // for rbp. We expect the code throwing an exception to check rbp 1723 // for rbp. We expect the code throwing an exception to check rbp
1615 // before dereferencing it to restore the context. 1724 // before dereferencing it to restore the context.
1616 push(Immediate(StackHandler::ENTRY)); 1725 push(Immediate(StackHandler::ENTRY));
1617 push(Immediate(0)); // NULL frame pointer. 1726 push(Immediate(0)); // NULL frame pointer.
1618 } 1727 }
1619 // Save the current handler. 1728 // Save the current handler.
1620 movq(kScratchRegister, 1729 Operand handler_operand =
1621 ExternalReference(Isolate::k_handler_address, isolate())); 1730 ExternalOperand(ExternalReference(Isolate::k_handler_address, isolate()));
1622 push(Operand(kScratchRegister, 0)); 1731 push(handler_operand);
1623 // Link this handler. 1732 // Link this handler.
1624 movq(Operand(kScratchRegister, 0), rsp); 1733 movq(handler_operand, rsp);
1625 } 1734 }
1626 1735
1627 1736
1628 void MacroAssembler::PopTryHandler() { 1737 void MacroAssembler::PopTryHandler() {
1629 ASSERT_EQ(0, StackHandlerConstants::kNextOffset); 1738 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1630 // Unlink this handler. 1739 // Unlink this handler.
1631 movq(kScratchRegister, 1740 Operand handler_operand =
1632 ExternalReference(Isolate::k_handler_address, isolate())); 1741 ExternalOperand(ExternalReference(Isolate::k_handler_address, isolate()));
1633 pop(Operand(kScratchRegister, 0)); 1742 pop(handler_operand);
1634 // Remove the remaining fields. 1743 // Remove the remaining fields.
1635 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize)); 1744 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1636 } 1745 }
1637 1746
1638 1747
1639 void MacroAssembler::Throw(Register value) { 1748 void MacroAssembler::Throw(Register value) {
1640 // Check that stack should contain next handler, frame pointer, state and 1749 // Check that stack should contain next handler, frame pointer, state and
1641 // return address in that order. 1750 // return address in that order.
1642 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize == 1751 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
1643 StackHandlerConstants::kStateOffset); 1752 StackHandlerConstants::kStateOffset);
1644 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize == 1753 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
1645 StackHandlerConstants::kPCOffset); 1754 StackHandlerConstants::kPCOffset);
1646 // Keep thrown value in rax. 1755 // Keep thrown value in rax.
1647 if (!value.is(rax)) { 1756 if (!value.is(rax)) {
1648 movq(rax, value); 1757 movq(rax, value);
1649 } 1758 }
1650 1759
1651 ExternalReference handler_address(Isolate::k_handler_address, isolate()); 1760 ExternalReference handler_address(Isolate::k_handler_address, isolate());
1652 movq(kScratchRegister, handler_address); 1761 Operand handler_operand = ExternalOperand(handler_address);
1653 movq(rsp, Operand(kScratchRegister, 0)); 1762 movq(rsp, handler_operand);
1654 // get next in chain 1763 // get next in chain
1655 pop(rcx); 1764 pop(handler_operand);
1656 movq(Operand(kScratchRegister, 0), rcx);
1657 pop(rbp); // pop frame pointer 1765 pop(rbp); // pop frame pointer
1658 pop(rdx); // remove state 1766 pop(rdx); // remove state
1659 1767
1660 // Before returning we restore the context from the frame pointer if not NULL. 1768 // Before returning we restore the context from the frame pointer if not NULL.
1661 // The frame pointer is NULL in the exception handler of a JS entry frame. 1769 // The frame pointer is NULL in the exception handler of a JS entry frame.
1662 Set(rsi, 0); // Tentatively set context pointer to NULL 1770 Set(rsi, 0); // Tentatively set context pointer to NULL
1663 NearLabel skip; 1771 NearLabel skip;
1664 cmpq(rbp, Immediate(0)); 1772 cmpq(rbp, Immediate(0));
1665 j(equal, &skip); 1773 j(equal, &skip);
1666 movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 1774 movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1667 bind(&skip); 1775 bind(&skip);
1668 ret(0); 1776 ret(0);
1669 } 1777 }
1670 1778
1671 1779
1672 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, 1780 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
1673 Register value) { 1781 Register value) {
1674 // Keep thrown value in rax. 1782 // Keep thrown value in rax.
1675 if (!value.is(rax)) { 1783 if (!value.is(rax)) {
1676 movq(rax, value); 1784 movq(rax, value);
1677 } 1785 }
1678 // Fetch top stack handler. 1786 // Fetch top stack handler.
1679 ExternalReference handler_address(Isolate::k_handler_address, isolate()); 1787 ExternalReference handler_address(Isolate::k_handler_address, isolate());
1680 movq(kScratchRegister, handler_address); 1788 Load(rsp, handler_address);
1681 movq(rsp, Operand(kScratchRegister, 0));
1682 1789
1683 // Unwind the handlers until the ENTRY handler is found. 1790 // Unwind the handlers until the ENTRY handler is found.
1684 NearLabel loop, done; 1791 NearLabel loop, done;
1685 bind(&loop); 1792 bind(&loop);
1686 // Load the type of the current stack handler. 1793 // Load the type of the current stack handler.
1687 const int kStateOffset = StackHandlerConstants::kStateOffset; 1794 const int kStateOffset = StackHandlerConstants::kStateOffset;
1688 cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY)); 1795 cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
1689 j(equal, &done); 1796 j(equal, &done);
1690 // Fetch the next handler in the list. 1797 // Fetch the next handler in the list.
1691 const int kNextOffset = StackHandlerConstants::kNextOffset; 1798 const int kNextOffset = StackHandlerConstants::kNextOffset;
1692 movq(rsp, Operand(rsp, kNextOffset)); 1799 movq(rsp, Operand(rsp, kNextOffset));
1693 jmp(&loop); 1800 jmp(&loop);
1694 bind(&done); 1801 bind(&done);
1695 1802
1696 // Set the top handler address to next handler past the current ENTRY handler. 1803 // Set the top handler address to next handler past the current ENTRY handler.
1697 movq(kScratchRegister, handler_address); 1804 Operand handler_operand = ExternalOperand(handler_address);
1698 pop(Operand(kScratchRegister, 0)); 1805 pop(handler_operand);
1699 1806
1700 if (type == OUT_OF_MEMORY) { 1807 if (type == OUT_OF_MEMORY) {
1701 // Set external caught exception to false. 1808 // Set external caught exception to false.
1702 ExternalReference external_caught( 1809 ExternalReference external_caught(
1703 Isolate::k_external_caught_exception_address, isolate()); 1810 Isolate::k_external_caught_exception_address, isolate());
1704 movq(rax, Immediate(false)); 1811 movq(rax, Immediate(false));
1705 store_rax(external_caught); 1812 Store(external_caught, rax);
1706 1813
1707 // Set pending exception and rax to out of memory exception. 1814 // Set pending exception and rax to out of memory exception.
1708 ExternalReference pending_exception(Isolate::k_pending_exception_address, 1815 ExternalReference pending_exception(Isolate::k_pending_exception_address,
1709 isolate()); 1816 isolate());
1710 movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE); 1817 movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
1711 store_rax(pending_exception); 1818 Store(pending_exception, rax);
1712 } 1819 }
1713 1820
1714 // Clear the context pointer. 1821 // Clear the context pointer.
1715 Set(rsi, 0); 1822 Set(rsi, 0);
1716 1823
1717 // Restore registers from handler. 1824 // Restore registers from handler.
1718 STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize == 1825 STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize ==
1719 StackHandlerConstants::kFPOffset); 1826 StackHandlerConstants::kFPOffset);
1720 pop(rbp); // FP 1827 pop(rbp); // FP
1721 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize == 1828 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
1722 StackHandlerConstants::kStateOffset); 1829 StackHandlerConstants::kStateOffset);
1723 pop(rdx); // State 1830 pop(rdx); // State
1724 1831
1725 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize == 1832 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
1726 StackHandlerConstants::kPCOffset); 1833 StackHandlerConstants::kPCOffset);
1727 ret(0); 1834 ret(0);
1728 } 1835 }
1729 1836
1730 1837
1731 void MacroAssembler::Ret() { 1838 void MacroAssembler::Ret() {
1732 ret(0); 1839 ret(0);
1733 } 1840 }
1734 1841
1735 1842
1736 void MacroAssembler::Ret(int bytes_dropped, Register scratch) { 1843 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after
1881 bind(&non_instance); 1988 bind(&non_instance);
1882 movq(result, FieldOperand(result, Map::kConstructorOffset)); 1989 movq(result, FieldOperand(result, Map::kConstructorOffset));
1883 1990
1884 // All done. 1991 // All done.
1885 bind(&done); 1992 bind(&done);
1886 } 1993 }
1887 1994
1888 1995
1889 void MacroAssembler::SetCounter(StatsCounter* counter, int value) { 1996 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1890 if (FLAG_native_code_counters && counter->Enabled()) { 1997 if (FLAG_native_code_counters && counter->Enabled()) {
1891 movq(kScratchRegister, ExternalReference(counter)); 1998 Operand counter_operand = ExternalOperand(ExternalReference(counter));
1892 movl(Operand(kScratchRegister, 0), Immediate(value)); 1999 movq(counter_operand, Immediate(value));
1893 } 2000 }
1894 } 2001 }
1895 2002
1896 2003
1897 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) { 2004 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1898 ASSERT(value > 0); 2005 ASSERT(value > 0);
1899 if (FLAG_native_code_counters && counter->Enabled()) { 2006 if (FLAG_native_code_counters && counter->Enabled()) {
1900 movq(kScratchRegister, ExternalReference(counter)); 2007 Operand counter_operand = ExternalOperand(ExternalReference(counter));
1901 Operand operand(kScratchRegister, 0);
1902 if (value == 1) { 2008 if (value == 1) {
1903 incl(operand); 2009 incl(counter_operand);
1904 } else { 2010 } else {
1905 addl(operand, Immediate(value)); 2011 addl(counter_operand, Immediate(value));
1906 } 2012 }
1907 } 2013 }
1908 } 2014 }
1909 2015
1910 2016
1911 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) { 2017 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1912 ASSERT(value > 0); 2018 ASSERT(value > 0);
1913 if (FLAG_native_code_counters && counter->Enabled()) { 2019 if (FLAG_native_code_counters && counter->Enabled()) {
1914 movq(kScratchRegister, ExternalReference(counter)); 2020 Operand counter_operand = ExternalOperand(ExternalReference(counter));
1915 Operand operand(kScratchRegister, 0);
1916 if (value == 1) { 2021 if (value == 1) {
1917 decl(operand); 2022 decl(counter_operand);
1918 } else { 2023 } else {
1919 subl(operand, Immediate(value)); 2024 subl(counter_operand, Immediate(value));
1920 } 2025 }
1921 } 2026 }
1922 } 2027 }
1923 2028
1924 2029
1925 #ifdef ENABLE_DEBUGGER_SUPPORT 2030 #ifdef ENABLE_DEBUGGER_SUPPORT
1926 void MacroAssembler::DebugBreak() { 2031 void MacroAssembler::DebugBreak() {
1927 ASSERT(allow_stub_calls()); 2032 ASSERT(allow_stub_calls());
1928 Set(rax, 0); // No arguments. 2033 Set(rax, 0); // No arguments.
1929 movq(rbx, ExternalReference(Runtime::kDebugBreak, isolate())); 2034 LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
1930 CEntryStub ces(1); 2035 CEntryStub ces(1);
1931 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK); 2036 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
1932 } 2037 }
1933 #endif // ENABLE_DEBUGGER_SUPPORT 2038 #endif // ENABLE_DEBUGGER_SUPPORT
1934 2039
1935 2040
1936 void MacroAssembler::InvokeCode(Register code, 2041 void MacroAssembler::InvokeCode(Register code,
1937 const ParameterCount& expected, 2042 const ParameterCount& expected,
1938 const ParameterCount& actual, 2043 const ParameterCount& actual,
1939 InvokeFlag flag, 2044 InvokeFlag flag,
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after
2073 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize); 2178 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
2074 push(Immediate(0)); // Saved entry sp, patched before call. 2179 push(Immediate(0)); // Saved entry sp, patched before call.
2075 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); 2180 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2076 push(kScratchRegister); // Accessed from EditFrame::code_slot. 2181 push(kScratchRegister); // Accessed from EditFrame::code_slot.
2077 2182
2078 // Save the frame pointer and the context in top. 2183 // Save the frame pointer and the context in top.
2079 if (save_rax) { 2184 if (save_rax) {
2080 movq(r14, rax); // Backup rax in callee-save register. 2185 movq(r14, rax); // Backup rax in callee-save register.
2081 } 2186 }
2082 2187
2083 movq(kScratchRegister, 2188 Store(ExternalReference(Isolate::k_c_entry_fp_address, isolate()), rbp);
2084 ExternalReference(Isolate::k_c_entry_fp_address, isolate())); 2189 Store(ExternalReference(Isolate::k_context_address, isolate()), rsi);
2085 movq(Operand(kScratchRegister, 0), rbp);
2086
2087 movq(kScratchRegister,
2088 ExternalReference(Isolate::k_context_address, isolate()));
2089 movq(Operand(kScratchRegister, 0), rsi);
2090 } 2190 }
2091 2191
2092 2192
2093 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space, 2193 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
2094 bool save_doubles) { 2194 bool save_doubles) {
2095 #ifdef _WIN64 2195 #ifdef _WIN64
2096 const int kShadowSpace = 4; 2196 const int kShadowSpace = 4;
2097 arg_stack_space += kShadowSpace; 2197 arg_stack_space += kShadowSpace;
2098 #endif 2198 #endif
2099 // Optionally save all XMM registers. 2199 // Optionally save all XMM registers.
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
2171 movq(rsp, rbp); 2271 movq(rsp, rbp);
2172 pop(rbp); 2272 pop(rbp);
2173 2273
2174 LeaveExitFrameEpilogue(); 2274 LeaveExitFrameEpilogue();
2175 } 2275 }
2176 2276
2177 2277
2178 void MacroAssembler::LeaveExitFrameEpilogue() { 2278 void MacroAssembler::LeaveExitFrameEpilogue() {
2179 // Restore current context from top and clear it in debug mode. 2279 // Restore current context from top and clear it in debug mode.
2180 ExternalReference context_address(Isolate::k_context_address, isolate()); 2280 ExternalReference context_address(Isolate::k_context_address, isolate());
2181 movq(kScratchRegister, context_address); 2281 Operand context_operand = ExternalOperand(context_address);
2182 movq(rsi, Operand(kScratchRegister, 0)); 2282 movq(rsi, context_operand);
2183 #ifdef DEBUG 2283 #ifdef DEBUG
2184 movq(Operand(kScratchRegister, 0), Immediate(0)); 2284 movq(context_operand, Immediate(0));
2185 #endif 2285 #endif
2186 2286
2187 // Clear the top frame. 2287 // Clear the top frame.
2188 ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address, 2288 ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
2189 isolate()); 2289 isolate());
2190 movq(kScratchRegister, c_entry_fp_address); 2290 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
2191 movq(Operand(kScratchRegister, 0), Immediate(0)); 2291 movq(c_entry_fp_operand, Immediate(0));
2192 } 2292 }
2193 2293
2194 2294
2195 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, 2295 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2196 Register scratch, 2296 Register scratch,
2197 Label* miss) { 2297 Label* miss) {
2198 Label same_contexts; 2298 Label same_contexts;
2199 2299
2200 ASSERT(!holder_reg.is(scratch)); 2300 ASSERT(!holder_reg.is(scratch));
2201 ASSERT(!scratch.is(kScratchRegister)); 2301 ASSERT(!scratch.is(kScratchRegister));
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
2260 AllocationFlags flags) { 2360 AllocationFlags flags) {
2261 ExternalReference new_space_allocation_top = 2361 ExternalReference new_space_allocation_top =
2262 ExternalReference::new_space_allocation_top_address(isolate()); 2362 ExternalReference::new_space_allocation_top_address(isolate());
2263 2363
2264 // Just return if allocation top is already known. 2364 // Just return if allocation top is already known.
2265 if ((flags & RESULT_CONTAINS_TOP) != 0) { 2365 if ((flags & RESULT_CONTAINS_TOP) != 0) {
2266 // No use of scratch if allocation top is provided. 2366 // No use of scratch if allocation top is provided.
2267 ASSERT(!scratch.is_valid()); 2367 ASSERT(!scratch.is_valid());
2268 #ifdef DEBUG 2368 #ifdef DEBUG
2269 // Assert that result actually contains top on entry. 2369 // Assert that result actually contains top on entry.
2270 movq(kScratchRegister, new_space_allocation_top); 2370 Operand top_operand = ExternalOperand(new_space_allocation_top);
2271 cmpq(result, Operand(kScratchRegister, 0)); 2371 cmpq(result, top_operand);
2272 Check(equal, "Unexpected allocation top"); 2372 Check(equal, "Unexpected allocation top");
2273 #endif 2373 #endif
2274 return; 2374 return;
2275 } 2375 }
2276 2376
2277 // Move address of new object to result. Use scratch register if available, 2377 // Move address of new object to result. Use scratch register if available,
2278 // and keep address in scratch until call to UpdateAllocationTopHelper. 2378 // and keep address in scratch until call to UpdateAllocationTopHelper.
2279 if (scratch.is_valid()) { 2379 if (scratch.is_valid()) {
2280 movq(scratch, new_space_allocation_top); 2380 LoadAddress(scratch, new_space_allocation_top);
2281 movq(result, Operand(scratch, 0)); 2381 movq(result, Operand(scratch, 0));
2282 } else if (result.is(rax)) {
2283 load_rax(new_space_allocation_top);
2284 } else { 2382 } else {
2285 movq(kScratchRegister, new_space_allocation_top); 2383 Load(result, new_space_allocation_top);
2286 movq(result, Operand(kScratchRegister, 0));
2287 } 2384 }
2288 } 2385 }
2289 2386
2290 2387
2291 void MacroAssembler::UpdateAllocationTopHelper(Register result_end, 2388 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2292 Register scratch) { 2389 Register scratch) {
2293 if (emit_debug_code()) { 2390 if (emit_debug_code()) {
2294 testq(result_end, Immediate(kObjectAlignmentMask)); 2391 testq(result_end, Immediate(kObjectAlignmentMask));
2295 Check(zero, "Unaligned allocation in new space"); 2392 Check(zero, "Unaligned allocation in new space");
2296 } 2393 }
2297 2394
2298 ExternalReference new_space_allocation_top = 2395 ExternalReference new_space_allocation_top =
2299 ExternalReference::new_space_allocation_top_address(isolate()); 2396 ExternalReference::new_space_allocation_top_address(isolate());
2300 2397
2301 // Update new top. 2398 // Update new top.
2302 if (result_end.is(rax)) { 2399 if (scratch.is_valid()) {
2303 // rax can be stored directly to a memory location. 2400 // Scratch already contains address of allocation top.
2304 store_rax(new_space_allocation_top); 2401 movq(Operand(scratch, 0), result_end);
2305 } else { 2402 } else {
2306 // Register required - use scratch provided if available. 2403 Store(new_space_allocation_top, result_end);
2307 if (scratch.is_valid()) {
2308 movq(Operand(scratch, 0), result_end);
2309 } else {
2310 movq(kScratchRegister, new_space_allocation_top);
2311 movq(Operand(kScratchRegister, 0), result_end);
2312 }
2313 } 2404 }
2314 } 2405 }
2315 2406
2316 2407
2317 void MacroAssembler::AllocateInNewSpace(int object_size, 2408 void MacroAssembler::AllocateInNewSpace(int object_size,
2318 Register result, 2409 Register result,
2319 Register result_end, 2410 Register result_end,
2320 Register scratch, 2411 Register scratch,
2321 Label* gc_required, 2412 Label* gc_required,
2322 AllocationFlags flags) { 2413 AllocationFlags flags) {
(...skipping 20 matching lines...) Expand all
2343 ExternalReference new_space_allocation_limit = 2434 ExternalReference new_space_allocation_limit =
2344 ExternalReference::new_space_allocation_limit_address(isolate()); 2435 ExternalReference::new_space_allocation_limit_address(isolate());
2345 2436
2346 Register top_reg = result_end.is_valid() ? result_end : result; 2437 Register top_reg = result_end.is_valid() ? result_end : result;
2347 2438
2348 if (!top_reg.is(result)) { 2439 if (!top_reg.is(result)) {
2349 movq(top_reg, result); 2440 movq(top_reg, result);
2350 } 2441 }
2351 addq(top_reg, Immediate(object_size)); 2442 addq(top_reg, Immediate(object_size));
2352 j(carry, gc_required); 2443 j(carry, gc_required);
2353 movq(kScratchRegister, new_space_allocation_limit); 2444 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
2354 cmpq(top_reg, Operand(kScratchRegister, 0)); 2445 cmpq(top_reg, limit_operand);
2355 j(above, gc_required); 2446 j(above, gc_required);
2356 2447
2357 // Update allocation top. 2448 // Update allocation top.
2358 UpdateAllocationTopHelper(top_reg, scratch); 2449 UpdateAllocationTopHelper(top_reg, scratch);
2359 2450
2360 if (top_reg.is(result)) { 2451 if (top_reg.is(result)) {
2361 if ((flags & TAG_OBJECT) != 0) { 2452 if ((flags & TAG_OBJECT) != 0) {
2362 subq(result, Immediate(object_size - kHeapObjectTag)); 2453 subq(result, Immediate(object_size - kHeapObjectTag));
2363 } else { 2454 } else {
2364 subq(result, Immediate(object_size)); 2455 subq(result, Immediate(object_size));
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
2398 2489
2399 // Calculate new top and bail out if new space is exhausted. 2490 // Calculate new top and bail out if new space is exhausted.
2400 ExternalReference new_space_allocation_limit = 2491 ExternalReference new_space_allocation_limit =
2401 ExternalReference::new_space_allocation_limit_address(isolate()); 2492 ExternalReference::new_space_allocation_limit_address(isolate());
2402 2493
2403 // We assume that element_count*element_size + header_size does not 2494 // We assume that element_count*element_size + header_size does not
2404 // overflow. 2495 // overflow.
2405 lea(result_end, Operand(element_count, element_size, header_size)); 2496 lea(result_end, Operand(element_count, element_size, header_size));
2406 addq(result_end, result); 2497 addq(result_end, result);
2407 j(carry, gc_required); 2498 j(carry, gc_required);
2408 movq(kScratchRegister, new_space_allocation_limit); 2499 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
2409 cmpq(result_end, Operand(kScratchRegister, 0)); 2500 cmpq(result_end, limit_operand);
2410 j(above, gc_required); 2501 j(above, gc_required);
2411 2502
2412 // Update allocation top. 2503 // Update allocation top.
2413 UpdateAllocationTopHelper(result_end, scratch); 2504 UpdateAllocationTopHelper(result_end, scratch);
2414 2505
2415 // Tag the result if requested. 2506 // Tag the result if requested.
2416 if ((flags & TAG_OBJECT) != 0) { 2507 if ((flags & TAG_OBJECT) != 0) {
2417 addq(result, Immediate(kHeapObjectTag)); 2508 addq(result, Immediate(kHeapObjectTag));
2418 } 2509 }
2419 } 2510 }
(...skipping 24 matching lines...) Expand all
2444 LoadAllocationTopHelper(result, scratch, flags); 2535 LoadAllocationTopHelper(result, scratch, flags);
2445 2536
2446 // Calculate new top and bail out if new space is exhausted. 2537 // Calculate new top and bail out if new space is exhausted.
2447 ExternalReference new_space_allocation_limit = 2538 ExternalReference new_space_allocation_limit =
2448 ExternalReference::new_space_allocation_limit_address(isolate()); 2539 ExternalReference::new_space_allocation_limit_address(isolate());
2449 if (!object_size.is(result_end)) { 2540 if (!object_size.is(result_end)) {
2450 movq(result_end, object_size); 2541 movq(result_end, object_size);
2451 } 2542 }
2452 addq(result_end, result); 2543 addq(result_end, result);
2453 j(carry, gc_required); 2544 j(carry, gc_required);
2454 movq(kScratchRegister, new_space_allocation_limit); 2545 Operand limit_operand = ExternalOperand(new_space_allocation_limit);
2455 cmpq(result_end, Operand(kScratchRegister, 0)); 2546 cmpq(result_end, limit_operand);
2456 j(above, gc_required); 2547 j(above, gc_required);
2457 2548
2458 // Update allocation top. 2549 // Update allocation top.
2459 UpdateAllocationTopHelper(result_end, scratch); 2550 UpdateAllocationTopHelper(result_end, scratch);
2460 2551
2461 // Tag the result if requested. 2552 // Tag the result if requested.
2462 if ((flags & TAG_OBJECT) != 0) { 2553 if ((flags & TAG_OBJECT) != 0) {
2463 addq(result, Immediate(kHeapObjectTag)); 2554 addq(result, Immediate(kHeapObjectTag));
2464 } 2555 }
2465 } 2556 }
2466 2557
2467 2558
2468 void MacroAssembler::UndoAllocationInNewSpace(Register object) { 2559 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2469 ExternalReference new_space_allocation_top = 2560 ExternalReference new_space_allocation_top =
2470 ExternalReference::new_space_allocation_top_address(isolate()); 2561 ExternalReference::new_space_allocation_top_address(isolate());
2471 2562
2472 // Make sure the object has no tag before resetting top. 2563 // Make sure the object has no tag before resetting top.
2473 and_(object, Immediate(~kHeapObjectTagMask)); 2564 and_(object, Immediate(~kHeapObjectTagMask));
2474 movq(kScratchRegister, new_space_allocation_top); 2565 Operand top_operand = ExternalOperand(new_space_allocation_top);
2475 #ifdef DEBUG 2566 #ifdef DEBUG
2476 cmpq(object, Operand(kScratchRegister, 0)); 2567 cmpq(object, top_operand);
2477 Check(below, "Undo allocation of non allocated memory"); 2568 Check(below, "Undo allocation of non allocated memory");
2478 #endif 2569 #endif
2479 movq(Operand(kScratchRegister, 0), object); 2570 movq(top_operand, object);
2480 } 2571 }
2481 2572
2482 2573
2483 void MacroAssembler::AllocateHeapNumber(Register result, 2574 void MacroAssembler::AllocateHeapNumber(Register result,
2484 Register scratch, 2575 Register scratch,
2485 Label* gc_required) { 2576 Label* gc_required) {
2486 // Allocate heap number in new space. 2577 // Allocate heap number in new space.
2487 AllocateInNewSpace(HeapNumber::kSize, 2578 AllocateInNewSpace(HeapNumber::kSize,
2488 result, 2579 result,
2489 scratch, 2580 scratch,
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after
2704 int argument_slots_on_stack = 2795 int argument_slots_on_stack =
2705 ArgumentStackSlotsForCFunctionCall(num_arguments); 2796 ArgumentStackSlotsForCFunctionCall(num_arguments);
2706 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize)); 2797 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2707 and_(rsp, Immediate(-frame_alignment)); 2798 and_(rsp, Immediate(-frame_alignment));
2708 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister); 2799 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2709 } 2800 }
2710 2801
2711 2802
2712 void MacroAssembler::CallCFunction(ExternalReference function, 2803 void MacroAssembler::CallCFunction(ExternalReference function,
2713 int num_arguments) { 2804 int num_arguments) {
2714 movq(rax, function); 2805 LoadAddress(rax, function);
2715 CallCFunction(rax, num_arguments); 2806 CallCFunction(rax, num_arguments);
2716 } 2807 }
2717 2808
2718 2809
2719 void MacroAssembler::CallCFunction(Register function, int num_arguments) { 2810 void MacroAssembler::CallCFunction(Register function, int num_arguments) {
2720 // Pass current isolate address as additional parameter. 2811 // Pass current isolate address as additional parameter.
2721 if (num_arguments < kRegisterPassedArguments) { 2812 if (num_arguments < kRegisterPassedArguments) {
2722 #ifdef _WIN64 2813 #ifdef _WIN64
2723 // First four arguments are passed in registers on Windows. 2814 // First four arguments are passed in registers on Windows.
2724 Register arg_to_reg[] = {rcx, rdx, r8, r9}; 2815 Register arg_to_reg[] = {rcx, rdx, r8, r9};
2725 #else 2816 #else
2726 // First six arguments are passed in registers on other platforms. 2817 // First six arguments are passed in registers on other platforms.
2727 Register arg_to_reg[] = {rdi, rsi, rdx, rcx, r8, r9}; 2818 Register arg_to_reg[] = {rdi, rsi, rdx, rcx, r8, r9};
2728 #endif 2819 #endif
2729 Register reg = arg_to_reg[num_arguments]; 2820 Register reg = arg_to_reg[num_arguments];
2730 movq(reg, ExternalReference::isolate_address()); 2821 LoadAddress(reg, ExternalReference::isolate_address());
2731 } else { 2822 } else {
2732 // Push Isolate pointer after all parameters. 2823 // Push Isolate pointer after all parameters.
2733 int argument_slots_on_stack = 2824 int argument_slots_on_stack =
2734 ArgumentStackSlotsForCFunctionCall(num_arguments); 2825 ArgumentStackSlotsForCFunctionCall(num_arguments);
2735 movq(kScratchRegister, ExternalReference::isolate_address()); 2826 LoadAddress(kScratchRegister, ExternalReference::isolate_address());
2736 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), 2827 movq(Operand(rsp, argument_slots_on_stack * kPointerSize),
2737 kScratchRegister); 2828 kScratchRegister);
2738 } 2829 }
2739 2830
2740 // Check stack alignment. 2831 // Check stack alignment.
2741 if (emit_debug_code()) { 2832 if (emit_debug_code()) {
2742 CheckStackAlignment(); 2833 CheckStackAlignment();
2743 } 2834 }
2744 2835
2745 call(function); 2836 call(function);
(...skipping 20 matching lines...) Expand all
2766 CPU::FlushICache(address_, size_); 2857 CPU::FlushICache(address_, size_);
2767 2858
2768 // Check that the code was patched as expected. 2859 // Check that the code was patched as expected.
2769 ASSERT(masm_.pc_ == address_ + size_); 2860 ASSERT(masm_.pc_ == address_ + size_);
2770 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); 2861 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2771 } 2862 }
2772 2863
2773 } } // namespace v8::internal 2864 } } // namespace v8::internal
2774 2865
2775 #endif // V8_TARGET_ARCH_X64 2866 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/macro-assembler-x64.h ('k') | src/x64/regexp-macro-assembler-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698