Index: src/x64/codegen-x64.cc |
=================================================================== |
--- src/x64/codegen-x64.cc (revision 2201) |
+++ src/x64/codegen-x64.cc (working copy) |
@@ -29,7 +29,9 @@ |
#include "v8.h" |
#include "macro-assembler.h" |
#include "register-allocator-inl.h" |
-#include "codegen.h" |
+#include "codegen-inl.h" |
+#include "codegen-x64-inl.h" |
+ |
// TEST |
#include "compiler.h" |
@@ -101,7 +103,7 @@ |
void CodeGenerator::TestCodeGenerator() { |
// Compile a function from a string, and run it. |
Handle<JSFunction> test_function = Compiler::Compile( |
- Factory::NewStringFromAscii(CStrVector("42")), |
+ Factory::NewStringFromAscii(CStrVector("39; 42;")), |
Factory::NewStringFromAscii(CStrVector("CodeGeneratorTestScript")), |
0, |
0, |
@@ -128,6 +130,7 @@ |
0, |
NULL, |
&pending_exceptions); |
+ // Function compiles and runs, but returns a JSFunction object. |
CHECK(result->IsSmi()); |
CHECK_EQ(42, Smi::cast(*result)->value()); |
} |
@@ -136,7 +139,7 @@ |
void CodeGenerator::GenCode(FunctionLiteral* function) { |
// Record the position for debugging purposes. |
CodeForFunctionPosition(function); |
- // ZoneList<Statement*>* body = fun->body(); |
+ ZoneList<Statement*>* body = function->body(); |
// Initialize state. |
ASSERT(scope_ == NULL); |
@@ -176,12 +179,37 @@ |
allocator_->Initialize(); |
frame_->Enter(); |
- Result return_register = allocator_->Allocate(rax); |
+ // Allocate space for locals and initialize them. |
+ frame_->AllocateStackSlots(); |
+ // Initialize the function return target after the locals are set |
+ // up, because it needs the expected frame height from the frame. |
+ function_return_.set_direction(JumpTarget::BIDIRECTIONAL); |
+ function_return_is_shadowed_ = false; |
- __ movq(return_register.reg(), Immediate(0x54)); // Smi 42 |
+ VisitStatements(body); |
+ } |
+ // Adjust for function-level loop nesting. |
+ loop_nesting_ -= function->loop_nesting(); |
- GenerateReturnSequence(&return_register); |
+ // Code generation state must be reset. |
+ ASSERT(state_ == NULL); |
+ ASSERT(loop_nesting() == 0); |
+ ASSERT(!function_return_is_shadowed_); |
+ function_return_.Unuse(); |
+ DeleteFrame(); |
+ |
+ // Process any deferred code using the register allocator. |
+ if (!HasStackOverflow()) { |
+ HistogramTimerScope deferred_timer(&Counters::deferred_code_generation); |
+ JumpTarget::set_compiling_deferred_code(true); |
+ ProcessDeferred(); |
+ JumpTarget::set_compiling_deferred_code(false); |
} |
+ |
+ // There is no need to delete the register allocator, it is a |
+ // stack-allocated local. |
+ allocator_ = NULL; |
+ scope_ = NULL; |
} |
void CodeGenerator::GenerateReturnSequence(Result* return_value) { |
@@ -221,10 +249,32 @@ |
UNIMPLEMENTED(); |
} |
-void CodeGenerator::VisitStatements(ZoneList<Statement*>* a) { |
- UNIMPLEMENTED(); |
+#ifdef DEBUG |
+bool CodeGenerator::HasValidEntryRegisters() { |
+ return (allocator()->count(rax) == (frame()->is_used(rax) ? 1 : 0)) |
+ && (allocator()->count(rbx) == (frame()->is_used(rbx) ? 1 : 0)) |
+ && (allocator()->count(rcx) == (frame()->is_used(rcx) ? 1 : 0)) |
+ && (allocator()->count(rdx) == (frame()->is_used(rdx) ? 1 : 0)) |
+ && (allocator()->count(rdi) == (frame()->is_used(rdi) ? 1 : 0)) |
+ && (allocator()->count(r8) == (frame()->is_used(r8) ? 1 : 0)) |
+ && (allocator()->count(r9) == (frame()->is_used(r9) ? 1 : 0)) |
+ && (allocator()->count(r11) == (frame()->is_used(r11) ? 1 : 0)) |
+ && (allocator()->count(r14) == (frame()->is_used(r14) ? 1 : 0)) |
+ && (allocator()->count(r15) == (frame()->is_used(r15) ? 1 : 0)) |
+ && (allocator()->count(r13) == (frame()->is_used(r13) ? 1 : 0)) |
+ && (allocator()->count(r12) == (frame()->is_used(r12) ? 1 : 0)); |
} |
+#endif |
+ |
+void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) { |
+ ASSERT(!in_spilled_code()); |
+ for (int i = 0; has_valid_frame() && i < statements->length(); i++) { |
+ Visit(statements->at(i)); |
+ } |
+} |
+ |
+ |
void CodeGenerator::VisitBlock(Block* a) { |
UNIMPLEMENTED(); |
} |
@@ -233,10 +283,19 @@ |
UNIMPLEMENTED(); |
} |
-void CodeGenerator::VisitExpressionStatement(ExpressionStatement* a) { |
- UNIMPLEMENTED(); |
+ |
+void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) { |
+ ASSERT(!in_spilled_code()); |
+ Comment cmnt(masm_, "[ ExpressionStatement"); |
+ CodeForStatementPosition(node); |
+ Expression* expression = node->expression(); |
+ expression->MarkAsStatement(); |
+ Load(expression); |
+ // Remove the lingering expression result from the top of stack. |
+ frame_->Drop(); |
} |
+ |
void CodeGenerator::VisitEmptyStatement(EmptyStatement* a) { |
UNIMPLEMENTED(); |
} |
@@ -253,10 +312,32 @@ |
UNIMPLEMENTED(); |
} |
-void CodeGenerator::VisitReturnStatement(ReturnStatement* a) { |
- UNIMPLEMENTED(); |
+ |
+void CodeGenerator::VisitReturnStatement(ReturnStatement* node) { |
+ ASSERT(!in_spilled_code()); |
+ Comment cmnt(masm_, "[ ReturnStatement"); |
+ |
+ CodeForStatementPosition(node); |
+ Load(node->expression()); |
+ Result return_value = frame_->Pop(); |
+ /* if (function_return_is_shadowed_) { |
+ function_return_.Jump(&return_value); |
+ } else { |
+ frame_->PrepareForReturn(); |
+ if (function_return_.is_bound()) { |
+ // If the function return label is already bound we reuse the |
+ // code by jumping to the return site. |
+ function_return_.Jump(&return_value); |
+ } else { |
+ function_return_.Bind(&return_value); |
+ GenerateReturnSequence(&return_value); |
+ } |
+ } |
+ */ |
+ GenerateReturnSequence(&return_value); |
} |
+ |
void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* a) { |
UNIMPLEMENTED(); |
} |
@@ -302,18 +383,32 @@ |
UNIMPLEMENTED(); |
} |
-void CodeGenerator::VisitSlot(Slot* a) { |
- UNIMPLEMENTED(); |
+void CodeGenerator::VisitSlot(Slot* node) { |
+ Comment cmnt(masm_, "[ Slot"); |
+ LoadFromSlot(node, typeof_state()); |
} |
-void CodeGenerator::VisitVariableProxy(VariableProxy* a) { |
- UNIMPLEMENTED(); |
+ |
+void CodeGenerator::VisitVariableProxy(VariableProxy* node) { |
+ Comment cmnt(masm_, "[ VariableProxy"); |
+ Variable* var = node->var(); |
+ Expression* expr = var->rewrite(); |
+ if (expr != NULL) { |
+ Visit(expr); |
+ } else { |
+ ASSERT(var->is_global()); |
+ Reference ref(this, node); |
+ // ref.GetValue(typeof_state()); |
+ } |
} |
-void CodeGenerator::VisitLiteral(Literal* a) { |
- UNIMPLEMENTED(); |
+ |
+void CodeGenerator::VisitLiteral(Literal* node) { |
+ Comment cmnt(masm_, "[ Literal"); |
+ frame_->Push(node->handle()); |
} |
+ |
void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* a) { |
UNIMPLEMENTED(); |
} |
@@ -330,12 +425,96 @@ |
UNIMPLEMENTED(); |
} |
-void CodeGenerator::VisitAssignment(Assignment* a) { |
- UNIMPLEMENTED(); |
+ |
+void CodeGenerator::VisitAssignment(Assignment* node) { |
+ Comment cmnt(masm_, "[ Assignment"); |
+ CodeForStatementPosition(node); |
+ |
+ { Reference target(this, node->target()); |
+ if (target.is_illegal()) { |
+ // Fool the virtual frame into thinking that we left the assignment's |
+ // value on the frame. |
+ frame_->Push(Smi::FromInt(0)); |
+ return; |
+ } |
+ Variable* var = node->target()->AsVariableProxy()->AsVariable(); |
+ |
+ if (node->starts_initialization_block()) { |
+ ASSERT(target.type() == Reference::NAMED || |
+ target.type() == Reference::KEYED); |
+ // Change to slow case in the beginning of an initialization |
+ // block to avoid the quadratic behavior of repeatedly adding |
+ // fast properties. |
+ |
+ // The receiver is the argument to the runtime call. It is the |
+ // first value pushed when the reference was loaded to the |
+ // frame. |
+ frame_->PushElementAt(target.size() - 1); |
+ // Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1); |
+ } |
+ if (node->op() == Token::ASSIGN || |
+ node->op() == Token::INIT_VAR || |
+ node->op() == Token::INIT_CONST) { |
+ Load(node->value()); |
+ |
+ } else { |
+ // TODO(X64): Make compound assignments work. |
+ /* |
+ Literal* literal = node->value()->AsLiteral(); |
+ bool overwrite_value = |
+ (node->value()->AsBinaryOperation() != NULL && |
+ node->value()->AsBinaryOperation()->ResultOverwriteAllowed()); |
+ Variable* right_var = node->value()->AsVariableProxy()->AsVariable(); |
+ // There are two cases where the target is not read in the right hand |
+ // side, that are easy to test for: the right hand side is a literal, |
+ // or the right hand side is a different variable. TakeValue invalidates |
+ // the target, with an implicit promise that it will be written to again |
+ // before it is read. |
+ if (literal != NULL || (right_var != NULL && right_var != var)) { |
+ target.TakeValue(NOT_INSIDE_TYPEOF); |
+ } else { |
+ target.GetValue(NOT_INSIDE_TYPEOF); |
+ } |
+ */ |
+ Load(node->value()); |
+ /* |
+ GenericBinaryOperation(node->binary_op(), |
+ node->type(), |
+ overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE); |
+ */ |
+ } |
+ |
+ if (var != NULL && |
+ var->mode() == Variable::CONST && |
+ node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) { |
+ // Assignment ignored - leave the value on the stack. |
+ } else { |
+ CodeForSourcePosition(node->position()); |
+ if (node->op() == Token::INIT_CONST) { |
+ // Dynamic constant initializations must use the function context |
+ // and initialize the actual constant declared. Dynamic variable |
+ // initializations are simply assignments and use SetValue. |
+ target.SetValue(CONST_INIT); |
+ } else { |
+ target.SetValue(NOT_CONST_INIT); |
+ } |
+ if (node->ends_initialization_block()) { |
+ ASSERT(target.type() == Reference::NAMED || |
+ target.type() == Reference::KEYED); |
+ // End of initialization block. Revert to fast case. The |
+ // argument to the runtime call is the receiver, which is the |
+ // first value pushed as part of the reference, which is below |
+ // the lhs value. |
+ frame_->PushElementAt(target.size()); |
+ // Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1); |
+ } |
+ } |
+ } |
} |
+ |
void CodeGenerator::VisitThrow(Throw* a) { |
- UNIMPLEMENTED(); |
+ // UNIMPLEMENTED(); |
} |
void CodeGenerator::VisitProperty(Property* a) { |
@@ -426,7 +605,620 @@ |
UNIMPLEMENTED(); |
} |
+// ----------------------------------------------------------------------------- |
+// CodeGenerator implementation of Expressions |
+ |
+void CodeGenerator::Load(Expression* x, TypeofState typeof_state) { |
+#ifdef DEBUG |
+ int original_height = frame_->height(); |
+#endif |
+ ASSERT(!in_spilled_code()); |
+ JumpTarget true_target; |
+ JumpTarget false_target; |
+ ControlDestination dest(&true_target, &false_target, true); |
+ LoadCondition(x, typeof_state, &dest, false); |
+ |
+ if (dest.false_was_fall_through()) { |
+ // The false target was just bound. |
+ JumpTarget loaded; |
+ frame_->Push(Factory::false_value()); |
+ // There may be dangling jumps to the true target. |
+ if (true_target.is_linked()) { |
+ loaded.Jump(); |
+ true_target.Bind(); |
+ frame_->Push(Factory::true_value()); |
+ loaded.Bind(); |
+ } |
+ |
+ } else if (dest.is_used()) { |
+ // There is true, and possibly false, control flow (with true as |
+ // the fall through). |
+ JumpTarget loaded; |
+ frame_->Push(Factory::true_value()); |
+ if (false_target.is_linked()) { |
+ loaded.Jump(); |
+ false_target.Bind(); |
+ frame_->Push(Factory::false_value()); |
+ loaded.Bind(); |
+ } |
+ |
+ } else { |
+ // We have a valid value on top of the frame, but we still may |
+ // have dangling jumps to the true and false targets from nested |
+ // subexpressions (eg, the left subexpressions of the |
+ // short-circuited boolean operators). |
+ ASSERT(has_valid_frame()); |
+ if (true_target.is_linked() || false_target.is_linked()) { |
+ JumpTarget loaded; |
+ loaded.Jump(); // Don't lose the current TOS. |
+ if (true_target.is_linked()) { |
+ true_target.Bind(); |
+ frame_->Push(Factory::true_value()); |
+ if (false_target.is_linked()) { |
+ loaded.Jump(); |
+ } |
+ } |
+ if (false_target.is_linked()) { |
+ false_target.Bind(); |
+ frame_->Push(Factory::false_value()); |
+ } |
+ loaded.Bind(); |
+ } |
+ } |
+ |
+ ASSERT(has_valid_frame()); |
+ ASSERT(frame_->height() == original_height + 1); |
+} |
+ |
+ |
+// Emit code to load the value of an expression to the top of the |
+// frame. If the expression is boolean-valued it may be compiled (or |
+// partially compiled) into control flow to the control destination. |
+// If force_control is true, control flow is forced. |
+void CodeGenerator::LoadCondition(Expression* x, |
+ TypeofState typeof_state, |
+ ControlDestination* dest, |
+ bool force_control) { |
+ ASSERT(!in_spilled_code()); |
+ int original_height = frame_->height(); |
+ |
+ { CodeGenState new_state(this, typeof_state, dest); |
+ Visit(x); |
+ |
+ // If we hit a stack overflow, we may not have actually visited |
+ // the expression. In that case, we ensure that we have a |
+ // valid-looking frame state because we will continue to generate |
+ // code as we unwind the C++ stack. |
+ // |
+ // It's possible to have both a stack overflow and a valid frame |
+ // state (eg, a subexpression overflowed, visiting it returned |
+ // with a dummied frame state, and visiting this expression |
+ // returned with a normal-looking state). |
+ if (HasStackOverflow() && |
+ !dest->is_used() && |
+ frame_->height() == original_height) { |
+ dest->Goto(true); |
+ } |
+ } |
+ |
+ if (force_control && !dest->is_used()) { |
+ // Convert the TOS value into flow to the control destination. |
+ // TODO(X64): Make control flow to control destinations work. |
+ // ToBoolean(dest); |
+ } |
+ |
+ ASSERT(!(force_control && !dest->is_used())); |
+ ASSERT(dest->is_used() || frame_->height() == original_height + 1); |
+} |
+ |
+ |
+void CodeGenerator::LoadUnsafeSmi(Register target, Handle<Object> value) { |
+ UNIMPLEMENTED(); |
+ // TODO(X64): Implement security policy for loads of smis. |
+} |
+ |
+ |
+bool CodeGenerator::IsUnsafeSmi(Handle<Object> value) { |
+ return false; |
+} |
+ |
+//------------------------------------------------------------------------------ |
+// CodeGenerator implementation of variables, lookups, and stores. |
+ |
+Reference::Reference(CodeGenerator* cgen, Expression* expression) |
+ : cgen_(cgen), expression_(expression), type_(ILLEGAL) { |
+ cgen->LoadReference(this); |
+} |
+ |
+ |
+Reference::~Reference() { |
+ cgen_->UnloadReference(this); |
+} |
+ |
+ |
+void CodeGenerator::LoadReference(Reference* ref) { |
+ // References are loaded from both spilled and unspilled code. Set the |
+ // state to unspilled to allow that (and explicitly spill after |
+ // construction at the construction sites). |
+ bool was_in_spilled_code = in_spilled_code_; |
+ in_spilled_code_ = false; |
+ |
+ Comment cmnt(masm_, "[ LoadReference"); |
+ Expression* e = ref->expression(); |
+ Property* property = e->AsProperty(); |
+ Variable* var = e->AsVariableProxy()->AsVariable(); |
+ |
+ if (property != NULL) { |
+ // The expression is either a property or a variable proxy that rewrites |
+ // to a property. |
+ Load(property->obj()); |
+ // We use a named reference if the key is a literal symbol, unless it is |
+ // a string that can be legally parsed as an integer. This is because |
+ // otherwise we will not get into the slow case code that handles [] on |
+ // String objects. |
+ Literal* literal = property->key()->AsLiteral(); |
+ uint32_t dummy; |
+ if (literal != NULL && |
+ literal->handle()->IsSymbol() && |
+ !String::cast(*(literal->handle()))->AsArrayIndex(&dummy)) { |
+ ref->set_type(Reference::NAMED); |
+ } else { |
+ Load(property->key()); |
+ ref->set_type(Reference::KEYED); |
+ } |
+ } else if (var != NULL) { |
+ // The expression is a variable proxy that does not rewrite to a |
+ // property. Global variables are treated as named property references. |
+ if (var->is_global()) { |
+ LoadGlobal(); |
+ ref->set_type(Reference::NAMED); |
+ } else { |
+ ASSERT(var->slot() != NULL); |
+ ref->set_type(Reference::SLOT); |
+ } |
+ } else { |
+ // Anything else is a runtime error. |
+ Load(e); |
+ // frame_->CallRuntime(Runtime::kThrowReferenceError, 1); |
+ } |
+ |
+ in_spilled_code_ = was_in_spilled_code; |
+} |
+ |
+ |
+void CodeGenerator::UnloadReference(Reference* ref) { |
+ // Pop a reference from the stack while preserving TOS. |
+ Comment cmnt(masm_, "[ UnloadReference"); |
+ frame_->Nip(ref->size()); |
+} |
+ |
+ |
+void Reference::SetValue(InitState init_state) { |
+ ASSERT(cgen_->HasValidEntryRegisters()); |
+ ASSERT(!is_illegal()); |
+ MacroAssembler* masm = cgen_->masm(); |
+ switch (type_) { |
+ case SLOT: { |
+ Comment cmnt(masm, "[ Store to Slot"); |
+ Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot(); |
+ ASSERT(slot != NULL); |
+ cgen_->StoreToSlot(slot, init_state); |
+ break; |
+ } |
+ // TODO(X64): Make cases other than SLOT work. |
+ /* |
+ case NAMED: { |
+ Comment cmnt(masm, "[ Store to named Property"); |
+ cgen_->frame()->Push(GetName()); |
+ Result answer = cgen_->frame()->CallStoreIC(); |
+ cgen_->frame()->Push(&answer); |
+ break; |
+ } |
+ |
+ case KEYED: { |
+ Comment cmnt(masm, "[ Store to keyed Property"); |
+ |
+ // Generate inlined version of the keyed store if the code is in |
+ // a loop and the key is likely to be a smi. |
+ Property* property = expression()->AsProperty(); |
+ ASSERT(property != NULL); |
+ SmiAnalysis* key_smi_analysis = property->key()->type(); |
+ |
+ if (cgen_->loop_nesting() > 0 && key_smi_analysis->IsLikelySmi()) { |
+ Comment cmnt(masm, "[ Inlined store to keyed Property"); |
+ |
+ // Get the receiver, key and value into registers. |
+ Result value = cgen_->frame()->Pop(); |
+ Result key = cgen_->frame()->Pop(); |
+ Result receiver = cgen_->frame()->Pop(); |
+ |
+ Result tmp = cgen_->allocator_->Allocate(); |
+ ASSERT(tmp.is_valid()); |
+ |
+ // Determine whether the value is a constant before putting it |
+ // in a register. |
+ bool value_is_constant = value.is_constant(); |
+ |
+ // Make sure that value, key and receiver are in registers. |
+ value.ToRegister(); |
+ key.ToRegister(); |
+ receiver.ToRegister(); |
+ |
+ DeferredReferenceSetKeyedValue* deferred = |
+ new DeferredReferenceSetKeyedValue(value.reg(), |
+ key.reg(), |
+ receiver.reg()); |
+ |
+ // Check that the value is a smi if it is not a constant. We |
+ // can skip the write barrier for smis and constants. |
+ if (!value_is_constant) { |
+ __ test(value.reg(), Immediate(kSmiTagMask)); |
+ deferred->Branch(not_zero); |
+ } |
+ |
+ // Check that the key is a non-negative smi. |
+ __ test(key.reg(), Immediate(kSmiTagMask | 0x80000000)); |
+ deferred->Branch(not_zero); |
+ |
+ // Check that the receiver is not a smi. |
+ __ test(receiver.reg(), Immediate(kSmiTagMask)); |
+ deferred->Branch(zero); |
+ |
+ // Check that the receiver is a JSArray. |
+ __ mov(tmp.reg(), |
+ FieldOperand(receiver.reg(), HeapObject::kMapOffset)); |
+ __ movzx_b(tmp.reg(), |
+ FieldOperand(tmp.reg(), Map::kInstanceTypeOffset)); |
+ __ cmp(tmp.reg(), JS_ARRAY_TYPE); |
+ deferred->Branch(not_equal); |
+ |
+ // Check that the key is within bounds. Both the key and the |
+ // length of the JSArray are smis. |
+ __ cmp(key.reg(), |
+ FieldOperand(receiver.reg(), JSArray::kLengthOffset)); |
+ deferred->Branch(greater_equal); |
+ |
+ // Get the elements array from the receiver and check that it |
+ // is not a dictionary. |
+ __ mov(tmp.reg(), |
+ FieldOperand(receiver.reg(), JSObject::kElementsOffset)); |
+ // Bind the deferred code patch site to be able to locate the |
+ // fixed array map comparison. When debugging, we patch this |
+ // comparison to always fail so that we will hit the IC call |
+ // in the deferred code which will allow the debugger to |
+ // break for fast case stores. |
+ __ bind(deferred->patch_site()); |
+ __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset), |
+ Immediate(Factory::fixed_array_map())); |
+ deferred->Branch(not_equal); |
+ |
+ // Store the value. |
+ __ mov(Operand(tmp.reg(), |
+ key.reg(), |
+ times_2, |
+ Array::kHeaderSize - kHeapObjectTag), |
+ value.reg()); |
+ __ IncrementCounter(&Counters::keyed_store_inline, 1); |
+ |
+ deferred->BindExit(); |
+ |
+ cgen_->frame()->Push(&receiver); |
+ cgen_->frame()->Push(&key); |
+ cgen_->frame()->Push(&value); |
+ } else { |
+ Result answer = cgen_->frame()->CallKeyedStoreIC(); |
+ // Make sure that we do not have a test instruction after the |
+ // call. A test instruction after the call is used to |
+ // indicate that we have generated an inline version of the |
+ // keyed store. |
+ __ nop(); |
+ cgen_->frame()->Push(&answer); |
+ } |
+ break; |
+ } |
+ */ |
+ default: |
+ UNREACHABLE(); |
+ } |
+} |
+ |
+ |
+Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) { |
+ // Currently, this assertion will fail if we try to assign to |
+ // a constant variable that is constant because it is read-only |
+ // (such as the variable referring to a named function expression). |
+ // We need to implement assignments to read-only variables. |
+ // Ideally, we should do this during AST generation (by converting |
+ // such assignments into expression statements); however, in general |
+ // we may not be able to make the decision until past AST generation, |
+ // that is when the entire program is known. |
+ ASSERT(slot != NULL); |
+ int index = slot->index(); |
+ switch (slot->type()) { |
+ case Slot::PARAMETER: |
+ return frame_->ParameterAt(index); |
+ |
+ case Slot::LOCAL: |
+ return frame_->LocalAt(index); |
+ |
+ case Slot::CONTEXT: { |
+ // Follow the context chain if necessary. |
+ ASSERT(!tmp.is(rsi)); // do not overwrite context register |
+ Register context = rsi; |
+ int chain_length = scope()->ContextChainLength(slot->var()->scope()); |
+ for (int i = 0; i < chain_length; i++) { |
+ // Load the closure. |
+ // (All contexts, even 'with' contexts, have a closure, |
+ // and it is the same for all contexts inside a function. |
+ // There is no need to go to the function context first.) |
+ __ movq(tmp, ContextOperand(context, Context::CLOSURE_INDEX)); |
+ // Load the function context (which is the incoming, outer context). |
+ __ movq(tmp, FieldOperand(tmp, JSFunction::kContextOffset)); |
+ context = tmp; |
+ } |
+ // We may have a 'with' context now. Get the function context. |
+ // (In fact this mov may never be the needed, since the scope analysis |
+ // may not permit a direct context access in this case and thus we are |
+ // always at a function context. However it is safe to dereference be- |
+ // cause the function context of a function context is itself. Before |
+ // deleting this mov we should try to create a counter-example first, |
+ // though...) |
+ __ movq(tmp, ContextOperand(context, Context::FCONTEXT_INDEX)); |
+ return ContextOperand(tmp, index); |
+ } |
+ |
+ default: |
+ UNREACHABLE(); |
+ return Operand(rsp, 0); |
+ } |
+} |
+ |
+ |
+Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot, |
+ Result tmp, |
+ JumpTarget* slow) { |
+ UNIMPLEMENTED(); |
+ return Operand(rsp, 0); |
+} |
+ |
+ |
+void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) { |
+ if (slot->type() == Slot::LOOKUP) { |
+ ASSERT(slot->var()->is_dynamic()); |
+ |
+ JumpTarget slow; |
+ JumpTarget done; |
+ Result value; |
+ |
+ // Generate fast-case code for variables that might be shadowed by |
+ // eval-introduced variables. Eval is used a lot without |
+ // introducing variables. In those cases, we do not want to |
+ // perform a runtime call for all variables in the scope |
+ // containing the eval. |
+ if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) { |
+ value = LoadFromGlobalSlotCheckExtensions(slot, typeof_state, &slow); |
+ // If there was no control flow to slow, we can exit early. |
+ if (!slow.is_linked()) { |
+ frame_->Push(&value); |
+ return; |
+ } |
+ |
+ done.Jump(&value); |
+ |
+ } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) { |
+ Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot(); |
+ // Only generate the fast case for locals that rewrite to slots. |
+ // This rules out argument loads. |
+ if (potential_slot != NULL) { |
+ // Allocate a fresh register to use as a temp in |
+ // ContextSlotOperandCheckExtensions and to hold the result |
+ // value. |
+ value = allocator_->Allocate(); |
+ ASSERT(value.is_valid()); |
+ __ movq(value.reg(), |
+ ContextSlotOperandCheckExtensions(potential_slot, |
+ value, |
+ &slow)); |
+ if (potential_slot->var()->mode() == Variable::CONST) { |
+ __ movq(kScratchRegister, Factory::the_hole_value(), |
+ RelocInfo::EMBEDDED_OBJECT); |
+ __ cmpq(value.reg(), kScratchRegister); |
+ done.Branch(not_equal, &value); |
+ __ movq(value.reg(), Factory::undefined_value(), |
+ RelocInfo::EMBEDDED_OBJECT); |
+ } |
+ // There is always control flow to slow from |
+ // ContextSlotOperandCheckExtensions so we have to jump around |
+ // it. |
+ done.Jump(&value); |
+ } |
+ } |
+ |
+ slow.Bind(); |
+ // A runtime call is inevitable. We eagerly sync frame elements |
+ // to memory so that we can push the arguments directly into place |
+ // on top of the frame. |
+ frame_->SyncRange(0, frame_->element_count() - 1); |
+ frame_->EmitPush(rsi); |
+ __ movq(kScratchRegister, slot->var()->name(), RelocInfo::EMBEDDED_OBJECT); |
+ frame_->EmitPush(kScratchRegister); |
+ if (typeof_state == INSIDE_TYPEOF) { |
+ // value = |
+ // frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); |
+ } else { |
+ // value = frame_->CallRuntime(Runtime::kLoadContextSlot, 2); |
+ } |
+ |
+ done.Bind(&value); |
+ frame_->Push(&value); |
+ |
+ } else if (slot->var()->mode() == Variable::CONST) { |
+ // Const slots may contain 'the hole' value (the constant hasn't been |
+ // initialized yet) which needs to be converted into the 'undefined' |
+ // value. |
+ // |
+ // We currently spill the virtual frame because constants use the |
+ // potentially unsafe direct-frame access of SlotOperand. |
+ VirtualFrame::SpilledScope spilled_scope; |
+ Comment cmnt(masm_, "[ Load const"); |
+ JumpTarget exit; |
+ __ movq(rcx, SlotOperand(slot, rcx)); |
+ __ movq(kScratchRegister, Factory::the_hole_value(), |
+ RelocInfo::EMBEDDED_OBJECT); |
+ __ cmpq(rcx, kScratchRegister); |
+ exit.Branch(not_equal); |
+ __ movq(rcx, Factory::undefined_value(), RelocInfo::EMBEDDED_OBJECT); |
+ exit.Bind(); |
+ frame_->EmitPush(rcx); |
+ |
+ } else if (slot->type() == Slot::PARAMETER) { |
+ frame_->PushParameterAt(slot->index()); |
+ |
+ } else if (slot->type() == Slot::LOCAL) { |
+ frame_->PushLocalAt(slot->index()); |
+ |
+ } else { |
+ // The other remaining slot types (LOOKUP and GLOBAL) cannot reach |
+ // here. |
+ // |
+ // The use of SlotOperand below is safe for an unspilled frame |
+ // because it will always be a context slot. |
+ ASSERT(slot->type() == Slot::CONTEXT); |
+ Result temp = allocator_->Allocate(); |
+ ASSERT(temp.is_valid()); |
+ __ movq(temp.reg(), SlotOperand(slot, temp.reg())); |
+ frame_->Push(&temp); |
+ } |
+} |
+ |
+ |
+void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) { |
+ // TODO(X64): Enable more types of slot. |
+ |
+ if (slot->type() == Slot::LOOKUP) { |
+ UNIMPLEMENTED(); |
+ /* |
+ ASSERT(slot->var()->is_dynamic()); |
+ |
+ // For now, just do a runtime call. Since the call is inevitable, |
+ // we eagerly sync the virtual frame so we can directly push the |
+ // arguments into place. |
+ frame_->SyncRange(0, frame_->element_count() - 1); |
+ |
+ frame_->EmitPush(esi); |
+ frame_->EmitPush(Immediate(slot->var()->name())); |
+ |
+ Result value; |
+ if (init_state == CONST_INIT) { |
+ // Same as the case for a normal store, but ignores attribute |
+ // (e.g. READ_ONLY) of context slot so that we can initialize const |
+ // properties (introduced via eval("const foo = (some expr);")). Also, |
+ // uses the current function context instead of the top context. |
+ // |
+ // Note that we must declare the foo upon entry of eval(), via a |
+ // context slot declaration, but we cannot initialize it at the same |
+ // time, because the const declaration may be at the end of the eval |
+ // code (sigh...) and the const variable may have been used before |
+ // (where its value is 'undefined'). Thus, we can only do the |
+ // initialization when we actually encounter the expression and when |
+ // the expression operands are defined and valid, and thus we need the |
+ // split into 2 operations: declaration of the context slot followed |
+ // by initialization. |
+ value = frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3); |
+ } else { |
+ value = frame_->CallRuntime(Runtime::kStoreContextSlot, 3); |
+ } |
+ // Storing a variable must keep the (new) value on the expression |
+ // stack. This is necessary for compiling chained assignment |
+ // expressions. |
+ frame_->Push(&value); |
+ */ |
+ } else { |
+ ASSERT(!slot->var()->is_dynamic()); |
+ |
+ JumpTarget exit; |
+ if (init_state == CONST_INIT) { |
+ ASSERT(slot->var()->mode() == Variable::CONST); |
+ // Only the first const initialization must be executed (the slot |
+ // still contains 'the hole' value). When the assignment is executed, |
+ // the code is identical to a normal store (see below). |
+ // |
+ // We spill the frame in the code below because the direct-frame |
+ // access of SlotOperand is potentially unsafe with an unspilled |
+ // frame. |
+ VirtualFrame::SpilledScope spilled_scope; |
+ Comment cmnt(masm_, "[ Init const"); |
+ __ movq(rcx, SlotOperand(slot, rcx)); |
+ __ movq(kScratchRegister, Factory::the_hole_value(), |
+ RelocInfo::EMBEDDED_OBJECT); |
+ __ cmpq(rcx, kScratchRegister); |
+ exit.Branch(not_equal); |
+ } |
+ |
+ // We must execute the store. Storing a variable must keep the (new) |
+ // value on the stack. This is necessary for compiling assignment |
+ // expressions. |
+ // |
+ // Note: We will reach here even with slot->var()->mode() == |
+ // Variable::CONST because of const declarations which will initialize |
+ // consts to 'the hole' value and by doing so, end up calling this code. |
+ if (slot->type() == Slot::PARAMETER) { |
+ frame_->StoreToParameterAt(slot->index()); |
+ } else if (slot->type() == Slot::LOCAL) { |
+ frame_->StoreToLocalAt(slot->index()); |
+ } else { |
+ // The other slot types (LOOKUP and GLOBAL) cannot reach here. |
+ // |
+ // The use of SlotOperand below is safe for an unspilled frame |
+ // because the slot is a context slot. |
+ /* |
+ ASSERT(slot->type() == Slot::CONTEXT); |
+ frame_->Dup(); |
+ Result value = frame_->Pop(); |
+ value.ToRegister(); |
+ Result start = allocator_->Allocate(); |
+ ASSERT(start.is_valid()); |
+ __ mov(SlotOperand(slot, start.reg()), value.reg()); |
+ // RecordWrite may destroy the value registers. |
+ // |
+ // TODO(204): Avoid actually spilling when the value is not |
+ // needed (probably the common case). |
+ frame_->Spill(value.reg()); |
+ int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; |
+ Result temp = allocator_->Allocate(); |
+ ASSERT(temp.is_valid()); |
+ __ RecordWrite(start.reg(), offset, value.reg(), temp.reg()); |
+ // The results start, value, and temp are unused by going out of |
+ // scope. |
+ */ |
+ } |
+ |
+ exit.Bind(); |
+ } |
+} |
+ |
+ |
+Result CodeGenerator::LoadFromGlobalSlotCheckExtensions( |
+ Slot* slot, |
+ TypeofState typeof_state, |
+ JumpTarget* slow) { |
+ UNIMPLEMENTED(); |
+ return Result(rax); |
+} |
+ |
+ |
+void CodeGenerator::LoadGlobal() { |
+ if (in_spilled_code()) { |
+ frame_->EmitPush(GlobalObject()); |
+ } else { |
+ Result temp = allocator_->Allocate(); |
+ __ movq(temp.reg(), GlobalObject()); |
+ frame_->Push(&temp); |
+ } |
+} |
+ |
#undef __ |
+ |
// End of CodeGenerator implementation. |
// ----------------------------------------------------------------------------- |