| Index: src/x64/macro-assembler-x64.cc
|
| diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
|
| index a4d64b9fd5ad936f756a6246aee18024071ea67d..d2db523eb501b3f4eb429446b70c3d43a7802bef 100644
|
| --- a/src/x64/macro-assembler-x64.cc
|
| +++ b/src/x64/macro-assembler-x64.cc
|
| @@ -27,10 +27,11 @@
|
|
|
| #include "v8.h"
|
|
|
| -#if defined(V8_TARGET_ARCH_X64)
|
| +#if V8_TARGET_ARCH_X64
|
|
|
| #include "bootstrapper.h"
|
| #include "codegen.h"
|
| +#include "cpu-profiler.h"
|
| #include "assembler-x64.h"
|
| #include "macro-assembler-x64.h"
|
| #include "serialize.h"
|
| @@ -645,8 +646,8 @@ void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
|
| int num_arguments,
|
| int result_size) {
|
| // ----------- S t a t e -------------
|
| - // -- rsp[0] : return address
|
| - // -- rsp[8] : argument num_arguments - 1
|
| + // -- rsp[0] : return address
|
| + // -- rsp[8] : argument num_arguments - 1
|
| // ...
|
| // -- rsp[8 * num_arguments] : argument 0 (receiver)
|
| // -----------------------------------
|
| @@ -971,6 +972,7 @@ void MacroAssembler::Set(Register dst, int64_t x) {
|
| }
|
| }
|
|
|
| +
|
| void MacroAssembler::Set(const Operand& dst, int64_t x) {
|
| if (is_int32(x)) {
|
| movq(dst, Immediate(static_cast<int32_t>(x)));
|
| @@ -1028,6 +1030,7 @@ Register MacroAssembler::GetSmiConstant(Smi* source) {
|
| return kScratchRegister;
|
| }
|
|
|
| +
|
| void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
|
| if (emit_debug_code()) {
|
| movq(dst,
|
| @@ -3896,52 +3899,8 @@ void MacroAssembler::Allocate(int header_size,
|
| Label* gc_required,
|
| AllocationFlags flags) {
|
| ASSERT((flags & SIZE_IN_WORDS) == 0);
|
| - if (!FLAG_inline_new) {
|
| - if (emit_debug_code()) {
|
| - // Trash the registers to simulate an allocation failure.
|
| - movl(result, Immediate(0x7091));
|
| - movl(result_end, Immediate(0x7191));
|
| - if (scratch.is_valid()) {
|
| - movl(scratch, Immediate(0x7291));
|
| - }
|
| - // Register element_count is not modified by the function.
|
| - }
|
| - jmp(gc_required);
|
| - return;
|
| - }
|
| - ASSERT(!result.is(result_end));
|
| -
|
| - // Load address of new object into result.
|
| - LoadAllocationTopHelper(result, scratch, flags);
|
| -
|
| - // Align the next allocation. Storing the filler map without checking top is
|
| - // always safe because the limit of the heap is always aligned.
|
| - if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
|
| - testq(result, Immediate(kDoubleAlignmentMask));
|
| - Check(zero, "Allocation is not double aligned");
|
| - }
|
| -
|
| - // Calculate new top and bail out if new space is exhausted.
|
| - ExternalReference allocation_limit =
|
| - AllocationUtils::GetAllocationLimitReference(isolate(), flags);
|
| -
|
| - // We assume that element_count*element_size + header_size does not
|
| - // overflow.
|
| lea(result_end, Operand(element_count, element_size, header_size));
|
| - addq(result_end, result);
|
| - j(carry, gc_required);
|
| - Operand limit_operand = ExternalOperand(allocation_limit);
|
| - cmpq(result_end, limit_operand);
|
| - j(above, gc_required);
|
| -
|
| - // Update allocation top.
|
| - UpdateAllocationTopHelper(result_end, scratch, flags);
|
| -
|
| - // Tag the result if requested.
|
| - if ((flags & TAG_OBJECT) != 0) {
|
| - ASSERT(kHeapObjectTag == 1);
|
| - incq(result);
|
| - }
|
| + Allocate(result_end, result, result_end, scratch, gc_required, flags);
|
| }
|
|
|
|
|
| @@ -3951,7 +3910,7 @@ void MacroAssembler::Allocate(Register object_size,
|
| Register scratch,
|
| Label* gc_required,
|
| AllocationFlags flags) {
|
| - ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
|
| + ASSERT((flags & SIZE_IN_WORDS) == 0);
|
| if (!FLAG_inline_new) {
|
| if (emit_debug_code()) {
|
| // Trash the registers to simulate an allocation failure.
|
| @@ -3970,6 +3929,13 @@ void MacroAssembler::Allocate(Register object_size,
|
| // Load address of new object into result.
|
| LoadAllocationTopHelper(result, scratch, flags);
|
|
|
| + // Align the next allocation. Storing the filler map without checking top is
|
| + // always safe because the limit of the heap is always aligned.
|
| + if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
|
| + testq(result, Immediate(kDoubleAlignmentMask));
|
| + Check(zero, "Allocation is not double aligned");
|
| + }
|
| +
|
| // Calculate new top and bail out if new space is exhausted.
|
| ExternalReference allocation_limit =
|
| AllocationUtils::GetAllocationLimitReference(isolate(), flags);
|
| @@ -3985,13 +3951,6 @@ void MacroAssembler::Allocate(Register object_size,
|
| // Update allocation top.
|
| UpdateAllocationTopHelper(result_end, scratch, flags);
|
|
|
| - // Align the next allocation. Storing the filler map without checking top is
|
| - // always safe because the limit of the heap is always aligned.
|
| - if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
|
| - testq(result, Immediate(kDoubleAlignmentMask));
|
| - Check(zero, "Allocation is not double aligned");
|
| - }
|
| -
|
| // Tag the result if requested.
|
| if ((flags & TAG_OBJECT) != 0) {
|
| addq(result, Immediate(kHeapObjectTag));
|
| @@ -4216,12 +4175,12 @@ void MacroAssembler::CopyBytes(Register destination,
|
| // we keep source aligned for the rep movs operation by copying the odd bytes
|
| // at the end of the ranges.
|
| movq(scratch, length);
|
| - shrl(length, Immediate(3));
|
| + shrl(length, Immediate(kPointerSizeLog2));
|
| repmovsq();
|
| // Move remaining bytes of length.
|
| - andl(scratch, Immediate(0x7));
|
| - movq(length, Operand(source, scratch, times_1, -8));
|
| - movq(Operand(destination, scratch, times_1, -8), length);
|
| + andl(scratch, Immediate(kPointerSize - 1));
|
| + movq(length, Operand(source, scratch, times_1, -kPointerSize));
|
| + movq(Operand(destination, scratch, times_1, -kPointerSize), length);
|
| addq(destination, scratch);
|
|
|
| if (min_length <= kLongStringLimit) {
|
|
|