Index: src/ia32/lithium-codegen-ia32.cc |
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc |
index 4ce7fd6067d106d92209b07a5f17d272bd15b292..6f7b9d892762fd4d0f7a079c7285374f78784cfc 100644 |
--- a/src/ia32/lithium-codegen-ia32.cc |
+++ b/src/ia32/lithium-codegen-ia32.cc |
@@ -113,7 +113,7 @@ void LCodeGen::FinishCode(Handle<Code> code) { |
} |
-void LCodeGen::Abort(const char* reason) { |
+void LCodeGen::Abort(BailoutReason reason) { |
info()->set_bailout_reason(reason); |
status_ = ABORTED; |
} |
@@ -137,6 +137,16 @@ void LCodeGen::Comment(const char* format, ...) { |
} |
+#ifdef _MSC_VER |
+void LCodeGen::MakeSureStackPagesMapped(int offset) { |
+ const int kPageSize = 4 * KB; |
+ for (offset -= kPageSize; offset > 0; offset -= kPageSize) { |
+ __ mov(Operand(esp, offset), eax); |
+ } |
+} |
+#endif |
+ |
+ |
bool LCodeGen::GeneratePrologue() { |
ASSERT(is_generating()); |
@@ -210,7 +220,7 @@ bool LCodeGen::GeneratePrologue() { |
dynamic_frame_alignment_ && |
FLAG_debug_code) { |
__ test(esp, Immediate(kPointerSize)); |
- __ Assert(zero, "frame is expected to be aligned"); |
+ __ Assert(zero, kFrameIsExpectedToBeAligned); |
} |
// Reserve space for the stack slots needed by the code. |
@@ -226,6 +236,9 @@ bool LCodeGen::GeneratePrologue() { |
} else { |
if (FLAG_debug_code) { |
__ sub(Operand(esp), Immediate(slots * kPointerSize)); |
+#ifdef _MSC_VER |
+ MakeSureStackPagesMapped(slots * kPointerSize); |
+#endif |
__ push(eax); |
__ mov(Operand(eax), Immediate(slots)); |
Label loop; |
@@ -238,15 +251,7 @@ bool LCodeGen::GeneratePrologue() { |
} else { |
__ sub(Operand(esp), Immediate(slots * kPointerSize)); |
#ifdef _MSC_VER |
- // On windows, you may not access the stack more than one page below |
- // the most recently mapped page. To make the allocated area randomly |
- // accessible, we write to each page in turn (the value is irrelevant). |
- const int kPageSize = 4 * KB; |
- for (int offset = slots * kPointerSize - kPageSize; |
- offset > 0; |
- offset -= kPageSize) { |
- __ mov(Operand(esp, offset), eax); |
- } |
+ MakeSureStackPagesMapped(slots * kPointerSize); |
#endif |
} |
@@ -685,6 +690,13 @@ double LCodeGen::ToDouble(LConstantOperand* op) const { |
} |
+ExternalReference LCodeGen::ToExternalReference(LConstantOperand* op) const { |
+ HConstant* constant = chunk_->LookupConstant(op); |
+ ASSERT(constant->HasExternalReferenceValue()); |
+ return constant->ExternalReferenceValue(); |
+} |
+ |
+ |
bool LCodeGen::IsInteger32(LConstantOperand* op) const { |
return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32(); |
} |
@@ -936,7 +948,7 @@ void LCodeGen::DeoptimizeIf(Condition cc, |
Address entry = |
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |
if (entry == NULL) { |
- Abort("bailout was not prepared"); |
+ Abort(kBailoutWasNotPrepared); |
return; |
} |
@@ -1764,7 +1776,9 @@ void LCodeGen::DoShiftI(LShiftI* instr) { |
if (shift_count != 0) { |
if (instr->hydrogen_value()->representation().IsSmi() && |
instr->can_deopt()) { |
- __ shl(ToRegister(left), shift_count - 1); |
+ if (shift_count != 1) { |
+ __ shl(ToRegister(left), shift_count - 1); |
+ } |
__ SmiTag(ToRegister(left)); |
DeoptimizeIf(overflow, instr->environment()); |
} else { |
@@ -1853,6 +1867,11 @@ void LCodeGen::DoConstantD(LConstantD* instr) { |
} |
+void LCodeGen::DoConstantE(LConstantE* instr) { |
+ __ lea(ToRegister(instr->result()), Operand::StaticVariable(instr->value())); |
+} |
+ |
+ |
void LCodeGen::DoConstantT(LConstantT* instr) { |
Register reg = ToRegister(instr->result()); |
Handle<Object> handle = instr->value(); |
@@ -1957,7 +1976,7 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) { |
static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; |
__ cmp(value, Immediate(encoding == String::ONE_BYTE_ENCODING |
? one_byte_seq_type : two_byte_seq_type)); |
- __ Check(equal, "Unexpected string type"); |
+ __ Check(equal, kUnexpectedStringType); |
__ pop(value); |
} |
@@ -2844,7 +2863,7 @@ void LCodeGen::EmitReturn(LReturn* instr, bool dynamic_frame_alignment) { |
__ cmp(Operand(esp, |
(parameter_count + extra_value_count) * kPointerSize), |
Immediate(kAlignmentZapValue)); |
- __ Assert(equal, "expected alignment marker"); |
+ __ Assert(equal, kExpectedAlignmentMarker); |
} |
__ Ret((parameter_count + extra_value_count) * kPointerSize, ecx); |
} else { |
@@ -2857,7 +2876,7 @@ void LCodeGen::EmitReturn(LReturn* instr, bool dynamic_frame_alignment) { |
__ cmp(Operand(esp, reg, times_pointer_size, |
extra_value_count * kPointerSize), |
Immediate(kAlignmentZapValue)); |
- __ Assert(equal, "expected alignment marker"); |
+ __ Assert(equal, kExpectedAlignmentMarker); |
} |
// emit code to restore stack based on instr->parameter_count() |
@@ -2977,20 +2996,6 @@ void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { |
} |
-void LCodeGen::DoLinkObjectInList(LLinkObjectInList* instr) { |
- Register object = ToRegister(instr->object()); |
- Register temp = ToRegister(instr->temp()); |
- ExternalReference sites_list_address = instr->GetReference(isolate()); |
- |
- __ mov(temp, Immediate(sites_list_address)); |
- __ mov(temp, Operand(temp, 0)); |
- __ mov(FieldOperand(object, instr->hydrogen()->store_field().offset()), |
- temp); |
- __ mov(temp, Immediate(sites_list_address)); |
- __ mov(Operand(temp, 0), object); |
-} |
- |
- |
void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { |
Register context = ToRegister(instr->context()); |
Register result = ToRegister(instr->result()); |
@@ -3049,6 +3054,19 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { |
void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { |
HObjectAccess access = instr->hydrogen()->access(); |
int offset = access.offset(); |
+ |
+ if (access.IsExternalMemory()) { |
+ Register result = ToRegister(instr->result()); |
+ if (instr->object()->IsConstantOperand()) { |
+ ExternalReference external_reference = ToExternalReference( |
+ LConstantOperand::cast(instr->object())); |
+ __ mov(result, MemOperand::StaticVariable(external_reference)); |
+ } else { |
+ __ mov(result, MemOperand(ToRegister(instr->object()), offset)); |
+ } |
+ return; |
+ } |
+ |
Register object = ToRegister(instr->object()); |
if (FLAG_track_double_fields && |
instr->hydrogen()->representation().IsDouble()) { |
@@ -3138,9 +3156,6 @@ static bool CompactEmit(SmallMapList* list, |
int i, |
Isolate* isolate) { |
Handle<Map> map = list->at(i); |
- // If the map has ElementsKind transitions, we will generate map checks |
- // for each kind in __ CompareMap(..., ALLOW_ELEMENTS_TRANSITION_MAPS). |
- if (map->HasElementsTransition()) return false; |
LookupResult lookup(isolate); |
map->LookupDescriptor(NULL, *name, &lookup); |
return lookup.IsField() || lookup.IsConstant(); |
@@ -3432,7 +3447,7 @@ Operand LCodeGen::BuildFastArrayOperand( |
if (key->IsConstantOperand()) { |
int constant_value = ToInteger32(LConstantOperand::cast(key)); |
if (constant_value & 0xF0000000) { |
- Abort("array index constant value too big"); |
+ Abort(kArrayIndexConstantValueTooBig); |
} |
return Operand(elements_pointer_reg, |
((constant_value + additional_index) << shift_size) |
@@ -3806,7 +3821,7 @@ void LCodeGen::DoMathAbs(LMathAbs* instr) { |
__ xorps(scratch, scratch); |
__ subsd(scratch, input_reg); |
__ pand(input_reg, scratch); |
- } else if (r.IsInteger32()) { |
+ } else if (r.IsSmiOrInteger32()) { |
EmitIntegerMathAbs(instr); |
} else { // Tagged case. |
DeferredMathAbsTaggedHeapNumber* deferred = |
@@ -4328,10 +4343,26 @@ void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) { |
void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { |
Representation representation = instr->representation(); |
- Register object = ToRegister(instr->object()); |
HObjectAccess access = instr->hydrogen()->access(); |
int offset = access.offset(); |
+ if (access.IsExternalMemory()) { |
+ ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); |
+ MemOperand operand = instr->object()->IsConstantOperand() |
+ ? MemOperand::StaticVariable( |
+ ToExternalReference(LConstantOperand::cast(instr->object()))) |
+ : MemOperand(ToRegister(instr->object()), offset); |
+ if (instr->value()->IsConstantOperand()) { |
+ LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); |
+ __ mov(operand, Immediate(ToInteger32(operand_value))); |
+ } else { |
+ Register value = ToRegister(instr->value()); |
+ __ mov(operand, value); |
+ } |
+ return; |
+ } |
+ |
+ Register object = ToRegister(instr->object()); |
Handle<Map> transition = instr->transition(); |
if (FLAG_track_fields && representation.IsSmi()) { |
@@ -4396,8 +4427,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { |
Register write_register = object; |
if (!access.IsInobject()) { |
write_register = ToRegister(instr->temp()); |
- __ mov(write_register, |
- FieldOperand(object, JSObject::kPropertiesOffset)); |
+ __ mov(write_register, FieldOperand(object, JSObject::kPropertiesOffset)); |
} |
if (instr->value()->IsConstantOperand()) { |
@@ -4837,13 +4867,6 @@ void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) { |
} |
-void LCodeGen::DoStringLength(LStringLength* instr) { |
- Register string = ToRegister(instr->string()); |
- Register result = ToRegister(instr->result()); |
- __ mov(result, FieldOperand(string, String::kLengthOffset)); |
-} |
- |
- |
void LCodeGen::DoStringAdd(LStringAdd* instr) { |
EmitPushTaggedOperand(instr->left()); |
EmitPushTaggedOperand(instr->right()); |
@@ -6009,10 +6032,12 @@ void LCodeGen::DoAllocate(LAllocate* instr) { |
if (instr->hydrogen()->MustAllocateDoubleAligned()) { |
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT); |
} |
- if (instr->hydrogen()->CanAllocateInOldPointerSpace()) { |
- ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace()); |
+ if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { |
+ ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); |
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); |
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE); |
- } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) { |
+ } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { |
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); |
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE); |
} |
@@ -6064,11 +6089,13 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) { |
__ push(Immediate(Smi::FromInt(size))); |
} |
- if (instr->hydrogen()->CanAllocateInOldPointerSpace()) { |
- ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace()); |
+ if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { |
+ ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); |
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); |
CallRuntimeFromDeferred( |
Runtime::kAllocateInOldPointerSpace, 1, instr, instr->context()); |
- } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) { |
+ } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { |
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); |
CallRuntimeFromDeferred( |
Runtime::kAllocateInOldDataSpace, 1, instr, instr->context()); |
} else { |