Index: src/x64/lithium-codegen-x64.cc |
diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc |
index 7f0ed3dbaeaafe00f3924c307a96a6e431d4d7d9..5682ae82df2fead77d59bd199d242cb78b8c5b1a 100644 |
--- a/src/x64/lithium-codegen-x64.cc |
+++ b/src/x64/lithium-codegen-x64.cc |
@@ -96,7 +96,7 @@ void LCodeGen::FinishCode(Handle<Code> code) { |
} |
-void LChunkBuilder::Abort(const char* reason) { |
+void LChunkBuilder::Abort(BailoutReason reason) { |
info()->set_bailout_reason(reason); |
status_ = ABORTED; |
} |
@@ -120,6 +120,16 @@ void LCodeGen::Comment(const char* format, ...) { |
} |
+#ifdef _MSC_VER |
+void LCodeGen::MakeSureStackPagesMapped(int offset) { |
+ const int kPageSize = 4 * KB; |
+ for (offset -= kPageSize; offset > 0; offset -= kPageSize) { |
+ __ movq(Operand(rsp, offset), rax); |
+ } |
+} |
+#endif |
+ |
+ |
bool LCodeGen::GeneratePrologue() { |
ASSERT(is_generating()); |
@@ -169,6 +179,9 @@ bool LCodeGen::GeneratePrologue() { |
if (slots > 0) { |
if (FLAG_debug_code) { |
__ subq(rsp, Immediate(slots * kPointerSize)); |
+#ifdef _MSC_VER |
+ MakeSureStackPagesMapped(slots * kPointerSize); |
+#endif |
__ push(rax); |
__ Set(rax, slots); |
__ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE64); |
@@ -182,15 +195,7 @@ bool LCodeGen::GeneratePrologue() { |
} else { |
__ subq(rsp, Immediate(slots * kPointerSize)); |
#ifdef _MSC_VER |
- // On windows, you may not access the stack more than one page below |
- // the most recently mapped page. To make the allocated area randomly |
- // accessible, we write to each page in turn (the value is irrelevant). |
- const int kPageSize = 4 * KB; |
- for (int offset = slots * kPointerSize - kPageSize; |
- offset > 0; |
- offset -= kPageSize) { |
- __ movq(Operand(rsp, offset), rax); |
- } |
+ MakeSureStackPagesMapped(slots * kPointerSize); |
#endif |
} |
@@ -429,6 +434,13 @@ double LCodeGen::ToDouble(LConstantOperand* op) const { |
} |
+ExternalReference LCodeGen::ToExternalReference(LConstantOperand* op) const { |
+ HConstant* constant = chunk_->LookupConstant(op); |
+ ASSERT(constant->HasExternalReferenceValue()); |
+ return constant->ExternalReferenceValue(); |
+} |
+ |
+ |
Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { |
HConstant* constant = chunk_->LookupConstant(op); |
ASSERT(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged()); |
@@ -649,7 +661,7 @@ void LCodeGen::DeoptimizeIf(Condition cc, |
Address entry = |
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |
if (entry == NULL) { |
- Abort("bailout was not prepared"); |
+ Abort(kBailoutWasNotPrepared); |
return; |
} |
@@ -1523,6 +1535,11 @@ void LCodeGen::DoConstantD(LConstantD* instr) { |
} |
+void LCodeGen::DoConstantE(LConstantE* instr) { |
+ __ LoadAddress(ToRegister(instr->result()), instr->value()); |
+} |
+ |
+ |
void LCodeGen::DoConstantT(LConstantT* instr) { |
Handle<Object> value = instr->value(); |
AllowDeferredHandleDereference smi_check; |
@@ -1625,7 +1642,7 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) { |
static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; |
__ cmpq(value, Immediate(encoding == String::ONE_BYTE_ENCODING |
? one_byte_seq_type : two_byte_seq_type)); |
- __ Check(equal, "Unexpected string type"); |
+ __ Check(equal, kUnexpectedStringType); |
__ pop(value); |
} |
@@ -2622,16 +2639,6 @@ void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { |
} |
-void LCodeGen::DoLinkObjectInList(LLinkObjectInList* instr) { |
- Register object = ToRegister(instr->object()); |
- ExternalReference sites_list_address = instr->GetReference(isolate()); |
- __ Load(kScratchRegister, sites_list_address); |
- __ movq(FieldOperand(object, instr->hydrogen()->store_field().offset()), |
- kScratchRegister); |
- __ Store(sites_list_address, object); |
-} |
- |
- |
void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { |
Register context = ToRegister(instr->context()); |
Register result = ToRegister(instr->result()); |
@@ -2689,6 +2696,19 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { |
void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { |
HObjectAccess access = instr->hydrogen()->access(); |
int offset = access.offset(); |
+ |
+ if (access.IsExternalMemory()) { |
+ Register result = ToRegister(instr->result()); |
+ if (instr->object()->IsConstantOperand()) { |
+ ASSERT(result.is(rax)); |
+ __ load_rax(ToExternalReference(LConstantOperand::cast(instr->object()))); |
+ } else { |
+ Register object = ToRegister(instr->object()); |
+ __ movq(result, MemOperand(object, offset)); |
+ } |
+ return; |
+ } |
+ |
Register object = ToRegister(instr->object()); |
if (FLAG_track_double_fields && |
instr->hydrogen()->representation().IsDouble()) { |
@@ -2755,9 +2775,6 @@ static bool CompactEmit(SmallMapList* list, |
int i, |
Isolate* isolate) { |
Handle<Map> map = list->at(i); |
- // If the map has ElementsKind transitions, we will generate map checks |
- // for each kind in __ CompareMap(..., ALLOW_ELEMENTS_TRANSITION_MAPS). |
- if (map->HasElementsTransition()) return false; |
LookupResult lookup(isolate); |
map->LookupDescriptor(NULL, *name, &lookup); |
return lookup.IsField() || lookup.IsConstant(); |
@@ -3071,7 +3088,7 @@ Operand LCodeGen::BuildFastArrayOperand( |
if (key->IsConstantOperand()) { |
int constant_value = ToInteger32(LConstantOperand::cast(key)); |
if (constant_value & 0xF0000000) { |
- Abort("array index constant value too big"); |
+ Abort(kArrayIndexConstantValueTooBig); |
} |
return Operand(elements_pointer_reg, |
((constant_value + additional_index) << shift_size) |
@@ -3409,6 +3426,17 @@ void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { |
} |
+void LCodeGen::EmitInteger64MathAbs(LMathAbs* instr) { |
+ Register input_reg = ToRegister(instr->value()); |
+ __ testq(input_reg, input_reg); |
+ Label is_positive; |
+ __ j(not_sign, &is_positive, Label::kNear); |
+ __ neg(input_reg); // Sets flags. |
+ DeoptimizeIf(negative, instr->environment()); |
+ __ bind(&is_positive); |
+} |
+ |
+ |
void LCodeGen::DoMathAbs(LMathAbs* instr) { |
// Class for deferred case. |
class DeferredMathAbsTaggedHeapNumber: public LDeferredCode { |
@@ -3434,6 +3462,8 @@ void LCodeGen::DoMathAbs(LMathAbs* instr) { |
__ andpd(input_reg, scratch); |
} else if (r.IsInteger32()) { |
EmitIntegerMathAbs(instr); |
+ } else if (r.IsSmi()) { |
+ EmitInteger64MathAbs(instr); |
} else { // Tagged case. |
DeferredMathAbsTaggedHeapNumber* deferred = |
new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr); |
@@ -3926,11 +3956,24 @@ void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) { |
void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { |
Representation representation = instr->representation(); |
- Register object = ToRegister(instr->object()); |
- |
HObjectAccess access = instr->hydrogen()->access(); |
int offset = access.offset(); |
+ if (access.IsExternalMemory()) { |
+ ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); |
+ Register value = ToRegister(instr->value()); |
+ if (instr->object()->IsConstantOperand()) { |
+ ASSERT(value.is(rax)); |
+ LConstantOperand* object = LConstantOperand::cast(instr->object()); |
+ __ store_rax(ToExternalReference(object)); |
+ } else { |
+ Register object = ToRegister(instr->object()); |
+ __ movq(MemOperand(object, offset), value); |
+ } |
+ return; |
+ } |
+ |
+ Register object = ToRegister(instr->object()); |
Handle<Map> transition = instr->transition(); |
if (FLAG_track_fields && representation.IsSmi()) { |
@@ -4420,13 +4463,6 @@ void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) { |
} |
-void LCodeGen::DoStringLength(LStringLength* instr) { |
- Register string = ToRegister(instr->string()); |
- Register result = ToRegister(instr->result()); |
- __ movq(result, FieldOperand(string, String::kLengthOffset)); |
-} |
- |
- |
void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { |
LOperand* input = instr->value(); |
ASSERT(input->IsRegister() || input->IsStackSlot()); |
@@ -5054,10 +5090,12 @@ void LCodeGen::DoAllocate(LAllocate* instr) { |
if (instr->hydrogen()->MustAllocateDoubleAligned()) { |
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT); |
} |
- if (instr->hydrogen()->CanAllocateInOldPointerSpace()) { |
- ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace()); |
+ if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { |
+ ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); |
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); |
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE); |
- } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) { |
+ } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { |
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); |
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE); |
} |
@@ -5109,10 +5147,12 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) { |
__ Push(Smi::FromInt(size)); |
} |
- if (instr->hydrogen()->CanAllocateInOldPointerSpace()) { |
- ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace()); |
+ if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { |
+ ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); |
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); |
CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr); |
- } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) { |
+ } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { |
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); |
CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr); |
} else { |
CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr); |