| Index: src/x64/macro-assembler-x64.cc
|
| diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
|
| index 5988b7d2a69ba058fac9abbed05ee5b943be0252..4c19fced69dd5b9fa7a20eb4ce28a27db78ddaf4 100644
|
| --- a/src/x64/macro-assembler-x64.cc
|
| +++ b/src/x64/macro-assembler-x64.cc
|
| @@ -505,8 +505,17 @@ void MacroAssembler::NegativeZeroTest(Register result,
|
|
|
|
|
| void MacroAssembler::Abort(BailoutReason reason) {
|
| -#ifdef DEBUG
|
| + // We want to pass the msg string like a smi to avoid GC
|
| + // problems, however msg is not guaranteed to be aligned
|
| + // properly. Instead, we pass an aligned pointer that is
|
| + // a proper v8 smi, but also pass the alignment difference
|
| + // from the real pointer as a smi.
|
| const char* msg = GetBailoutReason(reason);
|
| + intptr_t p1 = reinterpret_cast<intptr_t>(msg);
|
| + intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
|
| + // Note: p0 might not be a valid Smi _value_, but it has a valid Smi tag.
|
| + ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
|
| +#ifdef DEBUG
|
| if (msg != NULL) {
|
| RecordComment("Abort message: ");
|
| RecordComment(msg);
|
| @@ -519,7 +528,10 @@ void MacroAssembler::Abort(BailoutReason reason) {
|
| #endif
|
|
|
| push(rax);
|
| - Move(kScratchRegister, Smi::FromInt(static_cast<int>(reason)),
|
| + Move(kScratchRegister, reinterpret_cast<Smi*>(p0),
|
| + Assembler::RelocInfoNone());
|
| + push(kScratchRegister);
|
| + Move(kScratchRegister, Smi::FromInt(static_cast<int>(p1 - p0)),
|
| Assembler::RelocInfoNone());
|
| push(kScratchRegister);
|
|
|
| @@ -527,9 +539,9 @@ void MacroAssembler::Abort(BailoutReason reason) {
|
| // We don't actually want to generate a pile of code for this, so just
|
| // claim there is a stack frame, without generating one.
|
| FrameScope scope(this, StackFrame::NONE);
|
| - CallRuntime(Runtime::kAbort, 1);
|
| + CallRuntime(Runtime::kAbort, 2);
|
| } else {
|
| - CallRuntime(Runtime::kAbort, 1);
|
| + CallRuntime(Runtime::kAbort, 2);
|
| }
|
| // Control will not return here.
|
| int3();
|
| @@ -972,17 +984,12 @@ void MacroAssembler::Set(Register dst, int64_t x) {
|
| }
|
|
|
|
|
| -void MacroAssembler::Set(const Operand& dst, intptr_t x) {
|
| - if (kPointerSize == kInt64Size) {
|
| - if (is_int32(x)) {
|
| - movp(dst, Immediate(static_cast<int32_t>(x)));
|
| - } else {
|
| - Set(kScratchRegister, x);
|
| - movp(dst, kScratchRegister);
|
| - }
|
| +void MacroAssembler::Set(const Operand& dst, int64_t x) {
|
| + if (is_int32(x)) {
|
| + movq(dst, Immediate(static_cast<int32_t>(x)));
|
| } else {
|
| - ASSERT(kPointerSize == kInt32Size);
|
| - movp(dst, Immediate(static_cast<int32_t>(x)));
|
| + Set(kScratchRegister, x);
|
| + movq(dst, kScratchRegister);
|
| }
|
| }
|
|
|
| @@ -2585,17 +2592,6 @@ void MacroAssembler::Jump(ExternalReference ext) {
|
| }
|
|
|
|
|
| -void MacroAssembler::Jump(const Operand& op) {
|
| - if (kPointerSize == kInt64Size) {
|
| - jmp(op);
|
| - } else {
|
| - ASSERT(kPointerSize == kInt32Size);
|
| - movp(kScratchRegister, op);
|
| - jmp(kScratchRegister);
|
| - }
|
| -}
|
| -
|
| -
|
| void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
|
| Move(kScratchRegister, destination, rmode);
|
| jmp(kScratchRegister);
|
| @@ -2627,17 +2623,6 @@ void MacroAssembler::Call(ExternalReference ext) {
|
| }
|
|
|
|
|
| -void MacroAssembler::Call(const Operand& op) {
|
| - if (kPointerSize == kInt64Size) {
|
| - call(op);
|
| - } else {
|
| - ASSERT(kPointerSize == kInt32Size);
|
| - movp(kScratchRegister, op);
|
| - call(kScratchRegister);
|
| - }
|
| -}
|
| -
|
| -
|
| void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
|
| #ifdef DEBUG
|
| int end_position = pc_offset() + CallSize(destination);
|
| @@ -4530,6 +4515,30 @@ void MacroAssembler::LoadTransitionedArrayMapConditional(
|
| }
|
|
|
|
|
| +void MacroAssembler::LoadInitialArrayMap(
|
| + Register function_in, Register scratch,
|
| + Register map_out, bool can_have_holes) {
|
| + ASSERT(!function_in.is(map_out));
|
| + Label done;
|
| + movp(map_out, FieldOperand(function_in,
|
| + JSFunction::kPrototypeOrInitialMapOffset));
|
| + if (!FLAG_smi_only_arrays) {
|
| + ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
|
| + LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
|
| + kind,
|
| + map_out,
|
| + scratch,
|
| + &done);
|
| + } else if (can_have_holes) {
|
| + LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
|
| + FAST_HOLEY_SMI_ELEMENTS,
|
| + map_out,
|
| + scratch,
|
| + &done);
|
| + }
|
| + bind(&done);
|
| +}
|
| +
|
| #ifdef _WIN64
|
| static const int kRegisterPassedArguments = 4;
|
| #else
|
|
|