Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3318)

Unified Diff: src/ia32/lithium-codegen-ia32.cc

Issue 6452001: Allow esi to be an allocatable register on IA32. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: src/ia32/lithium-codegen-ia32.cc
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc
index 50ed1048ba94343f0e8dac9d1e51c1c9327f4a0e..4c9c8e30717c12244cae568b4e4ae29f4971fe5d 100644
--- a/src/ia32/lithium-codegen-ia32.cc
+++ b/src/ia32/lithium-codegen-ia32.cc
@@ -157,6 +157,8 @@ bool LCodeGen::GeneratePrologue() {
// Trace the call.
if (FLAG_trace) {
+ // We have not executed any compiled code yet, so esi still holds the
+ // incoming context.
__ CallRuntime(Runtime::kTraceEnter, 0);
}
return !is_aborted();
@@ -367,10 +369,14 @@ void LCodeGen::AddToTranslation(Translation* translation,
void LCodeGen::CallCode(Handle<Code> code,
RelocInfo::Mode mode,
- LInstruction* instr) {
+ LInstruction* instr,
+ bool adjusted) {
ASSERT(instr != NULL);
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
+ if (!adjusted) {
fschneider 2011/02/09 13:41:53 Is there a convenient way to assert that "!adjus
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ }
__ call(code, mode);
RegisterLazyDeoptimization(instr);
@@ -383,15 +389,19 @@ void LCodeGen::CallCode(Handle<Code> code,
}
-void LCodeGen::CallRuntime(Runtime::Function* function,
- int num_arguments,
- LInstruction* instr) {
+void LCodeGen::CallRuntime(Runtime::Function* fun,
+ int argc,
+ LInstruction* instr,
+ bool adjusted) {
ASSERT(instr != NULL);
ASSERT(instr->HasPointerMap());
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
- __ CallRuntime(function, num_arguments);
+ if (!adjusted) {
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ }
+ __ CallRuntime(fun, argc);
RegisterLazyDeoptimization(instr);
}
@@ -568,10 +578,6 @@ void LCodeGen::RecordSafepoint(
safepoint.DefinePointerRegister(ToRegister(pointer));
}
}
- if (kind & Safepoint::kWithRegisters) {
- // Register esi always contains a pointer to the context.
- safepoint.DefinePointerRegister(esi);
- }
}
@@ -635,6 +641,7 @@ void LCodeGen::DoParameter(LParameter* instr) {
void LCodeGen::DoCallStub(LCallStub* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->result()).is(eax));
switch (instr->hydrogen()->major_key()) {
case CodeStub::RegExpConstructResult: {
@@ -1011,7 +1018,7 @@ void LCodeGen::DoBitNotI(LBitNotI* instr) {
void LCodeGen::DoThrow(LThrow* instr) {
__ push(ToOperand(instr->InputAt(0)));
- CallRuntime(Runtime::kThrow, 1, instr);
+ CallRuntime(Runtime::kThrow, 1, instr, false);
if (FLAG_debug_code) {
Comment("Unreachable code.");
@@ -1083,7 +1090,7 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
ASSERT(ToRegister(instr->result()).is(eax));
TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
}
@@ -1196,6 +1203,7 @@ void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
__ pushad();
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ CallRuntimeSaveDoubles(Runtime::kStackGuard);
RecordSafepointWithRegisters(
instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
@@ -1686,6 +1694,7 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
// Object and function are in fixed registers defined by the stub.
+ ASSERT(ToRegister(instr->context()).is(esi));
InstanceofStub stub(InstanceofStub::kArgsInRegisters);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
@@ -1701,6 +1710,7 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1794,12 +1804,13 @@ void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Register temp = ToRegister(instr->TempAt(0));
ASSERT(temp.is(edi));
__ mov(InstanceofStub::right(), Immediate(instr->function()));
- static const int kAdditionalDelta = 13;
+ static const int kAdditionalDelta = 16;
int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
Label before_push_delta;
__ bind(&before_push_delta);
__ mov(temp, Immediate(delta));
__ mov(Operand(esp, EspIndexForPushAll(temp) * kPointerSize), temp);
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ call(stub.GetCode(), RelocInfo::CODE_TARGET);
ASSERT_EQ(kAdditionalDelta,
masm_->SizeOfCodeGeneratedSince(&before_push_delta));
@@ -1836,7 +1847,7 @@ void LCodeGen::DoCmpT(LCmpT* instr) {
Token::Value op = instr->op();
Handle<Code> ic = CompareIC::GetUninitialized(op);
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ CallCode(ic, RelocInfo::CODE_TARGET, instr, false);
Condition condition = ComputeCompareCondition(op);
if (op == Token::GT || op == Token::LTE) {
@@ -1859,7 +1870,7 @@ void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
int false_block = chunk_->LookupDestination(instr->false_block_id());
Handle<Code> ic = CompareIC::GetUninitialized(op);
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ CallCode(ic, RelocInfo::CODE_TARGET, instr, false);
// The compare stub expects compare condition and the input operands
// reversed for GT and LTE.
@@ -1874,9 +1885,12 @@ void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
void LCodeGen::DoReturn(LReturn* instr) {
if (FLAG_trace) {
- // Preserve the return value on the stack and rely on the runtime
- // call to return the value in the same register.
+ // Preserve the return value on the stack and rely on the runtime call
+ // to return the value in the same register. We're leaving the code
+ // managed by the register allocator and tearing down the frame, it's
+ // safe to write to the context register.
__ push(eax);
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ CallRuntime(Runtime::kTraceExit, 1);
}
__ mov(esp, ebp);
@@ -1945,6 +1959,7 @@ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->object()).is(eax));
ASSERT(ToRegister(instr->result()).is(eax));
@@ -2047,6 +2062,7 @@ void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->object()).is(edx));
ASSERT(ToRegister(instr->key()).is(eax));
@@ -2115,7 +2131,11 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
__ cmp(receiver, Factory::undefined_value());
__ j(not_equal, &receiver_ok);
__ bind(&global_receiver);
- __ mov(receiver, GlobalObjectOperand());
+ // TODO(kmillikin): We have a hydrogen value for the global object. See
+ // if it's better to use it than to explicitly fetch it from the context
+ // here.
+ __ mov(receiver, Operand(ebp, StandardFrameConstants::kContextOffset));
+ __ mov(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX));
__ bind(&receiver_ok);
Register length = ToRegister(instr->length());
@@ -2156,9 +2176,6 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
ASSERT(receiver.is(eax));
v8::internal::ParameterCount actual(eax);
__ InvokeFunction(edi, actual, CALL_FUNCTION, &safepoint_generator);
-
- // Restore context.
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
@@ -2174,7 +2191,7 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) {
void LCodeGen::DoContext(LContext* instr) {
Register result = ToRegister(instr->result());
- __ mov(result, esi);
+ __ mov(result, Operand(ebp, StandardFrameConstants::kContextOffset));
}
@@ -2210,6 +2227,8 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
(scope()->num_heap_slots() > 0);
if (change_context) {
__ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
+ } else {
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
fschneider 2011/02/09 15:23:31 Should this function for now also take an addition
}
// Set eax to arguments count if adaption is not needed. Assumes that eax
@@ -2230,9 +2249,6 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
// Setup deoptimization.
RegisterLazyDeoptimization(instr);
-
- // Restore context.
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
@@ -2275,6 +2291,7 @@ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
// Slow case: Call the runtime system to do the number allocation.
__ bind(&slow);
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
RecordSafepointWithRegisters(
instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
@@ -2486,7 +2503,7 @@ void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
TranscendentalCacheStub stub(TranscendentalCache::LOG,
TranscendentalCacheStub::UNTAGGED);
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
}
@@ -2494,7 +2511,7 @@ void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
TranscendentalCacheStub stub(TranscendentalCache::COS,
TranscendentalCacheStub::UNTAGGED);
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
}
@@ -2502,7 +2519,7 @@ void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
TranscendentalCacheStub stub(TranscendentalCache::SIN,
TranscendentalCacheStub::UNTAGGED);
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
}
@@ -2540,46 +2557,46 @@ void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
+ ASSERT(ToRegister(instr->key()).is(ecx));
ASSERT(ToRegister(instr->result()).is(eax));
- ASSERT(ToRegister(instr->InputAt(0)).is(ecx));
int arity = instr->arity();
Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
CallCode(ic, RelocInfo::CODE_TARGET, instr);
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
void LCodeGen::DoCallNamed(LCallNamed* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->result()).is(eax));
int arity = instr->arity();
Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
__ mov(ecx, instr->name());
CallCode(ic, RelocInfo::CODE_TARGET, instr);
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
void LCodeGen::DoCallFunction(LCallFunction* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->result()).is(eax));
int arity = instr->arity();
CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
__ Drop(1);
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->result()).is(eax));
int arity = instr->arity();
Handle<Code> ic = StubCache::ComputeCallInitialize(arity, NOT_IN_LOOP);
__ mov(ecx, instr->name());
CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
@@ -2591,7 +2608,8 @@ void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
void LCodeGen::DoCallNew(LCallNew* instr) {
- ASSERT(ToRegister(instr->InputAt(0)).is(edi));
+ ASSERT(ToRegister(instr->context()).is(esi));
+ ASSERT(ToRegister(instr->constructor()).is(edi));
ASSERT(ToRegister(instr->result()).is(eax));
Handle<Code> builtin(Builtins::builtin(Builtins::JSConstructCall));
@@ -2601,7 +2619,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
- CallRuntime(instr->function(), instr->arity(), instr);
+ CallRuntime(instr->function(), instr->arity(), instr, false);
}
@@ -2636,6 +2654,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->object()).is(edx));
ASSERT(ToRegister(instr->value()).is(eax));
@@ -2684,6 +2703,7 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->object()).is(edx));
ASSERT(ToRegister(instr->key()).is(ecx));
ASSERT(ToRegister(instr->value()).is(eax));
@@ -2819,6 +2839,7 @@ void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
__ SmiTag(index);
__ push(index);
}
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt);
RecordSafepointWithRegisters(
instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex);
@@ -2896,6 +2917,7 @@ void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
// integer value.
__ mov(Operand(esp, EspIndexForPushAll(reg) * kPointerSize), Immediate(0));
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
RecordSafepointWithRegisters(
instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
@@ -2943,6 +2965,7 @@ void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
__ Set(reg, Immediate(0));
__ PushSafepointRegisters();
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
RecordSafepointWithRegisters(
instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
@@ -3358,21 +3381,22 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, length);
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
} else if (instr->hydrogen()->depth() > 1) {
- CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
+ CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, false);
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
- CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
+ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, false);
} else {
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, length);
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
}
}
void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
// Setup the parameters to the stub/runtime call.
__ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
@@ -3410,7 +3434,7 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
__ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
__ push(Immediate(instr->hydrogen()->pattern()));
__ push(Immediate(instr->hydrogen()->flags()));
- CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
+ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, false);
__ mov(ebx, eax);
__ bind(&materialized);
@@ -3422,7 +3446,7 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
__ bind(&runtime_allocate);
__ push(ebx);
__ push(Immediate(Smi::FromInt(size)));
- CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
+ CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, false);
__ pop(ebx);
__ bind(&allocated);
@@ -3449,14 +3473,14 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
if (shared_info->num_literals() == 0 && !pretenure) {
FastNewClosureStub stub;
__ push(Immediate(shared_info));
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
} else {
- __ push(esi);
+ __ push(Operand(ebp, StandardFrameConstants::kContextOffset));
__ push(Immediate(shared_info));
__ push(Immediate(pretenure
? Factory::true_value()
: Factory::false_value()));
- CallRuntime(Runtime::kNewClosure, 3, instr);
+ CallRuntime(Runtime::kNewClosure, 3, instr, false);
}
}
@@ -3468,7 +3492,7 @@ void LCodeGen::DoTypeof(LTypeof* instr) {
} else {
__ push(ToOperand(input));
}
- CallRuntime(Runtime::kTypeof, 1, instr);
+ CallRuntime(Runtime::kTypeof, 1, instr, false);
}
@@ -3662,6 +3686,7 @@ void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
SafepointGenerator safepoint_generator(this,
pointers,
env->deoptimization_index());
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator);
}
@@ -3674,7 +3699,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
__ j(above_equal, &done);
StackCheckStub stub;
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
__ bind(&done);
}

Powered by Google App Engine
This is Rietveld 408576698