Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(44)

Unified Diff: src/codegen-arm.cc

Issue 6075: Move code for code generator static member functions, code generation... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 12 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « no previous file | src/codegen-ia32.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/codegen-arm.cc
===================================================================
--- src/codegen-arm.cc (revision 408)
+++ src/codegen-arm.cc (working copy)
@@ -671,73 +671,6 @@
}
-#undef __
-#define __ masm->
-
-MemOperand ArmCodeGenerator::SlotOperand(CodeGenerator* cgen,
- Slot* slot,
- Register tmp) {
- // Currently, this assertion will fail if we try to assign to
- // a constant variable that is constant because it is read-only
- // (such as the variable referring to a named function expression).
- // We need to implement assignments to read-only variables.
- // Ideally, we should do this during AST generation (by converting
- // such assignments into expression statements); however, in general
- // we may not be able to make the decision until past AST generation,
- // that is when the entire program is known.
- ASSERT(slot != NULL);
- int index = slot->index();
- switch (slot->type()) {
- case Slot::PARAMETER:
- return ParameterOperand(cgen, index);
-
- case Slot::LOCAL: {
- ASSERT(0 <= index &&
- index < cgen->scope()->num_stack_slots() &&
- index >= 0);
- int local_offset = JavaScriptFrameConstants::kLocal0Offset -
- index * kPointerSize;
- return MemOperand(fp, local_offset);
- }
-
- case Slot::CONTEXT: {
- MacroAssembler* masm = cgen->masm();
- // Follow the context chain if necessary.
- ASSERT(!tmp.is(cp)); // do not overwrite context register
- Register context = cp;
- int chain_length =
- cgen->scope()->ContextChainLength(slot->var()->scope());
- for (int i = chain_length; i-- > 0;) {
- // Load the closure.
- // (All contexts, even 'with' contexts, have a closure,
- // and it is the same for all contexts inside a function.
- // There is no need to go to the function context first.)
- __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
- // Load the function context (which is the incoming, outer context).
- __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
- context = tmp;
- }
- // We may have a 'with' context now. Get the function context.
- // (In fact this mov may never be the needed, since the scope analysis
- // may not permit a direct context access in this case and thus we are
- // always at a function context. However it is safe to dereference be-
- // cause the function context of a function context is itself. Before
- // deleting this mov we should try to create a counter-example first,
- // though...)
- __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
- return ContextOperand(tmp, index);
- }
-
- default:
- UNREACHABLE();
- return MemOperand(r0, 0);
- }
-}
-
-
-#undef __
-#define __ masm_->
-
// Loads a value on the stack. If it is a boolean value, the result may have
// been (partially) translated into branches, or it may have set the condition
// code register. If force_cc is set, the value is forced to set the condition
@@ -902,126 +835,6 @@
}
-#undef __
-#define __ masm->
-
-void Property::GenerateStoreCode(CodeGenerator* cgen,
- Reference* ref,
- InitState init_state) {
- MacroAssembler* masm = cgen->masm();
- Comment cmnt(masm, "[ Store to Property");
- __ RecordPosition(position());
- ArmCodeGenerator::SetReferenceProperty(cgen, ref, key());
-}
-
-
-void VariableProxy::GenerateStoreCode(CodeGenerator* cgen,
- Reference* ref,
- InitState init_state) {
- MacroAssembler* masm = cgen->masm();
- Comment cmnt(masm, "[ Store to VariableProxy");
- Variable* node = var();
-
- Expression* expr = node->rewrite();
- if (expr != NULL) {
- expr->GenerateStoreCode(cgen, ref, init_state);
- } else {
- ASSERT(node->is_global());
- if (node->AsProperty() != NULL) {
- __ RecordPosition(node->AsProperty()->position());
- }
- Expression* key = new Literal(node->name());
- ArmCodeGenerator::SetReferenceProperty(cgen, ref, key);
- }
-}
-
-
-void Slot::GenerateStoreCode(CodeGenerator* cgen,
- Reference* ref,
- InitState init_state) {
- MacroAssembler* masm = cgen->masm();
- Comment cmnt(masm, "[ Store to Slot");
-
- if (type() == Slot::LOOKUP) {
- ASSERT(var()->mode() == Variable::DYNAMIC);
-
- // For now, just do a runtime call.
- __ push(cp);
- __ mov(r0, Operand(var()->name()));
- __ push(r0);
-
- if (init_state == CONST_INIT) {
- // Same as the case for a normal store, but ignores attribute
- // (e.g. READ_ONLY) of context slot so that we can initialize const
- // properties (introduced via eval("const foo = (some expr);")). Also,
- // uses the current function context instead of the top context.
- //
- // Note that we must declare the foo upon entry of eval(), via a
- // context slot declaration, but we cannot initialize it at the same
- // time, because the const declaration may be at the end of the eval
- // code (sigh...) and the const variable may have been used before
- // (where its value is 'undefined'). Thus, we can only do the
- // initialization when we actually encounter the expression and when
- // the expression operands are defined and valid, and thus we need the
- // split into 2 operations: declaration of the context slot followed
- // by initialization.
- __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
- } else {
- __ CallRuntime(Runtime::kStoreContextSlot, 3);
- }
- // Storing a variable must keep the (new) value on the expression
- // stack. This is necessary for compiling assignment expressions.
- __ push(r0);
-
- } else {
- ASSERT(var()->mode() != Variable::DYNAMIC);
-
- Label exit;
- if (init_state == CONST_INIT) {
- ASSERT(var()->mode() == Variable::CONST);
- // Only the first const initialization must be executed (the slot
- // still contains 'the hole' value). When the assignment is executed,
- // the code is identical to a normal store (see below).
- Comment cmnt(masm, "[ Init const");
- __ ldr(r2, ArmCodeGenerator::SlotOperand(cgen, this, r2));
- __ cmp(r2, Operand(Factory::the_hole_value()));
- __ b(ne, &exit);
- }
-
- // We must execute the store.
- // r2 may be loaded with context; used below in RecordWrite.
- // Storing a variable must keep the (new) value on the stack. This is
- // necessary for compiling assignment expressions.
- //
- // Note: We will reach here even with var()->mode() == Variable::CONST
- // because of const declarations which will initialize consts to 'the
- // hole' value and by doing so, end up calling this code. r2 may be
- // loaded with context; used below in RecordWrite.
- __ pop(r0);
- __ str(r0, ArmCodeGenerator::SlotOperand(cgen, this, r2));
- __ push(r0);
-
- if (type() == Slot::CONTEXT) {
- // Skip write barrier if the written value is a smi.
- __ tst(r0, Operand(kSmiTagMask));
- __ b(eq, &exit);
- // r2 is loaded with context when calling SlotOperand above.
- int offset = FixedArray::kHeaderSize + index() * kPointerSize;
- __ mov(r3, Operand(offset));
- __ RecordWrite(r2, r3, r1);
- }
- // If we definitely did not jump over the assignment, we do not need to
- // bind the exit label. Doing so can defeat peephole optimization.
- if (init_state == CONST_INIT || type() == Slot::CONTEXT) {
- __ bind(&exit);
- }
- }
-}
-
-
-#undef __
-#define __ masm_->
-
// ECMA-262, section 9.2, page 30: ToBoolean(). Convert the given
// register to a boolean in the condition code register. The code
// may jump to 'false_target' in case the register converts to 'false'.
@@ -1062,9 +875,6 @@
}
-#undef __
-#define __ masm->
-
class GetPropertyStub : public CodeStub {
public:
GetPropertyStub() { }
@@ -1078,61 +888,6 @@
};
-void GetPropertyStub::Generate(MacroAssembler* masm) {
- // sp[0]: key
- // sp[1]: receiver
- Label slow, fast;
- // Get the key and receiver object from the stack.
- __ ldm(ia, sp, r0.bit() | r1.bit());
- // Check that the key is a smi.
- __ tst(r0, Operand(kSmiTagMask));
- __ b(ne, &slow);
- __ mov(r0, Operand(r0, ASR, kSmiTagSize));
- // Check that the object isn't a smi.
- __ tst(r1, Operand(kSmiTagMask));
- __ b(eq, &slow);
-
- // Check that the object is some kind of JS object EXCEPT JS Value type.
- // In the case that the object is a value-wrapper object,
- // we enter the runtime system to make sure that indexing into string
- // objects work as intended.
- ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
- __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
- __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
- __ cmp(r2, Operand(JS_OBJECT_TYPE));
- __ b(lt, &slow);
-
- // Get the elements array of the object.
- __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
- // Check that the object is in fast mode (not dictionary).
- __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
- __ cmp(r3, Operand(Factory::hash_table_map()));
- __ b(eq, &slow);
- // Check that the key (index) is within bounds.
- __ ldr(r3, FieldMemOperand(r1, Array::kLengthOffset));
- __ cmp(r0, Operand(r3));
- __ b(lo, &fast);
-
- // Slow case: Push extra copies of the arguments (2).
- __ bind(&slow);
- __ ldm(ia, sp, r0.bit() | r1.bit());
- __ stm(db_w, sp, r0.bit() | r1.bit());
- // Do tail-call to runtime routine.
- __ TailCallRuntime(ExternalReference(Runtime::kGetProperty), 2);
-
- // Fast case: Do the load.
- __ bind(&fast);
- __ add(r3, r1, Operand(Array::kHeaderSize - kHeapObjectTag));
- __ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2));
- __ cmp(r0, Operand(Factory::the_hole_value()));
- // In case the loaded value is the_hole we have to consult GetProperty
- // to ensure the prototype chain is searched.
- __ b(eq, &slow);
-
- masm->StubReturn(1);
-}
-
-
class SetPropertyStub : public CodeStub {
public:
SetPropertyStub() { }
@@ -1146,116 +901,6 @@
};
-
-void SetPropertyStub::Generate(MacroAssembler* masm) {
- // r0 : value
- // sp[0] : key
- // sp[1] : receiver
-
- Label slow, fast, array, extra, exit;
- // Get the key and the object from the stack.
- __ ldm(ia, sp, r1.bit() | r3.bit()); // r1 = key, r3 = receiver
- // Check that the key is a smi.
- __ tst(r1, Operand(kSmiTagMask));
- __ b(ne, &slow);
- // Check that the object isn't a smi.
- __ tst(r3, Operand(kSmiTagMask));
- __ b(eq, &slow);
- // Get the type of the object from its map.
- __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
- __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
- // Check if the object is a JS array or not.
- __ cmp(r2, Operand(JS_ARRAY_TYPE));
- __ b(eq, &array);
- // Check that the object is some kind of JS object.
- __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE));
- __ b(lt, &slow);
-
-
- // Object case: Check key against length in the elements array.
- __ ldr(r3, FieldMemOperand(r3, JSObject::kElementsOffset));
- // Check that the object is in fast mode (not dictionary).
- __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
- __ cmp(r2, Operand(Factory::hash_table_map()));
- __ b(eq, &slow);
- // Untag the key (for checking against untagged length in the fixed array).
- __ mov(r1, Operand(r1, ASR, kSmiTagSize));
- // Compute address to store into and check array bounds.
- __ add(r2, r3, Operand(Array::kHeaderSize - kHeapObjectTag));
- __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2));
- __ ldr(ip, FieldMemOperand(r3, Array::kLengthOffset));
- __ cmp(r1, Operand(ip));
- __ b(lo, &fast);
-
-
- // Slow case: Push extra copies of the arguments (3).
- __ bind(&slow);
- __ ldm(ia, sp, r1.bit() | r3.bit()); // r0 == value, r1 == key, r3 == object
- __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit());
- // Do tail-call to runtime routine.
- __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3);
-
-
- // Extra capacity case: Check if there is extra capacity to
- // perform the store and update the length. Used for adding one
- // element to the array by writing to array[array.length].
- // r0 == value, r1 == key, r2 == elements, r3 == object
- __ bind(&extra);
- __ b(ne, &slow); // do not leave holes in the array
- __ mov(r1, Operand(r1, ASR, kSmiTagSize)); // untag
- __ ldr(ip, FieldMemOperand(r2, Array::kLengthOffset));
- __ cmp(r1, Operand(ip));
- __ b(hs, &slow);
- __ mov(r1, Operand(r1, LSL, kSmiTagSize)); // restore tag
- __ add(r1, r1, Operand(1 << kSmiTagSize)); // and increment
- __ str(r1, FieldMemOperand(r3, JSArray::kLengthOffset));
- __ mov(r3, Operand(r2));
- // NOTE: Computing the address to store into must take the fact
- // that the key has been incremented into account.
- int displacement = Array::kHeaderSize - kHeapObjectTag -
- ((1 << kSmiTagSize) * 2);
- __ add(r2, r2, Operand(displacement));
- __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
- __ b(&fast);
-
-
- // Array case: Get the length and the elements array from the JS
- // array. Check that the array is in fast mode; if it is the
- // length is always a smi.
- // r0 == value, r3 == object
- __ bind(&array);
- __ ldr(r2, FieldMemOperand(r3, JSObject::kElementsOffset));
- __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
- __ cmp(r1, Operand(Factory::hash_table_map()));
- __ b(eq, &slow);
-
- // Check the key against the length in the array, compute the
- // address to store into and fall through to fast case.
- __ ldr(r1, MemOperand(sp));
- // r0 == value, r1 == key, r2 == elements, r3 == object.
- __ ldr(ip, FieldMemOperand(r3, JSArray::kLengthOffset));
- __ cmp(r1, Operand(ip));
- __ b(hs, &extra);
- __ mov(r3, Operand(r2));
- __ add(r2, r2, Operand(Array::kHeaderSize - kHeapObjectTag));
- __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
-
-
- // Fast case: Do the store.
- // r0 == value, r2 == address to store into, r3 == elements
- __ bind(&fast);
- __ str(r0, MemOperand(r2));
- // Skip write barrier if the written value is a smi.
- __ tst(r0, Operand(kSmiTagMask));
- __ b(eq, &exit);
- // Update write barrier for the elements array address.
- __ sub(r1, r2, Operand(r3));
- __ RecordWrite(r3, r1, r2);
- __ bind(&exit);
- masm->StubReturn(1);
-}
-
-
class GenericBinaryOpStub : public CodeStub {
public:
explicit GenericBinaryOpStub(Token::Value op) : op_(op) { }
@@ -1289,242 +934,6 @@
};
-void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
- // r1 : x
- // r0 : y
- // result : r0
-
- switch (op_) {
- case Token::ADD: {
- Label slow, exit;
- // fast path
- __ orr(r2, r1, Operand(r0)); // r2 = x | y;
- __ add(r0, r1, Operand(r0), SetCC); // add y optimistically
- // go slow-path in case of overflow
- __ b(vs, &slow);
- // go slow-path in case of non-smi operands
- ASSERT(kSmiTag == 0); // adjust code below
- __ tst(r2, Operand(kSmiTagMask));
- __ b(eq, &exit);
- // slow path
- __ bind(&slow);
- __ sub(r0, r0, Operand(r1)); // revert optimistic add
- __ push(r1);
- __ push(r0);
- __ mov(r0, Operand(1)); // set number of arguments
- __ InvokeBuiltin(Builtins::ADD, JUMP_JS);
- // done
- __ bind(&exit);
- break;
- }
-
- case Token::SUB: {
- Label slow, exit;
- // fast path
- __ orr(r2, r1, Operand(r0)); // r2 = x | y;
- __ sub(r3, r1, Operand(r0), SetCC); // subtract y optimistically
- // go slow-path in case of overflow
- __ b(vs, &slow);
- // go slow-path in case of non-smi operands
- ASSERT(kSmiTag == 0); // adjust code below
- __ tst(r2, Operand(kSmiTagMask));
- __ mov(r0, Operand(r3), LeaveCC, eq); // conditionally set r0 to result
- __ b(eq, &exit);
- // slow path
- __ bind(&slow);
- __ push(r1);
- __ push(r0);
- __ mov(r0, Operand(1)); // set number of arguments
- __ InvokeBuiltin(Builtins::SUB, JUMP_JS);
- // done
- __ bind(&exit);
- break;
- }
-
- case Token::MUL: {
- Label slow, exit;
- // tag check
- __ orr(r2, r1, Operand(r0)); // r2 = x | y;
- ASSERT(kSmiTag == 0); // adjust code below
- __ tst(r2, Operand(kSmiTagMask));
- __ b(ne, &slow);
- // remove tag from one operand (but keep sign), so that result is smi
- __ mov(ip, Operand(r0, ASR, kSmiTagSize));
- // do multiplication
- __ smull(r3, r2, r1, ip); // r3 = lower 32 bits of ip*r1
- // go slow on overflows (overflow bit is not set)
- __ mov(ip, Operand(r3, ASR, 31));
- __ cmp(ip, Operand(r2)); // no overflow if higher 33 bits are identical
- __ b(ne, &slow);
- // go slow on zero result to handle -0
- __ tst(r3, Operand(r3));
- __ mov(r0, Operand(r3), LeaveCC, ne);
- __ b(ne, &exit);
- // slow case
- __ bind(&slow);
- __ push(r1);
- __ push(r0);
- __ mov(r0, Operand(1)); // set number of arguments
- __ InvokeBuiltin(Builtins::MUL, JUMP_JS);
- // done
- __ bind(&exit);
- break;
- }
-
- case Token::BIT_OR:
- case Token::BIT_AND:
- case Token::BIT_XOR: {
- Label slow, exit;
- // tag check
- __ orr(r2, r1, Operand(r0)); // r2 = x | y;
- ASSERT(kSmiTag == 0); // adjust code below
- __ tst(r2, Operand(kSmiTagMask));
- __ b(ne, &slow);
- switch (op_) {
- case Token::BIT_OR: __ orr(r0, r0, Operand(r1)); break;
- case Token::BIT_AND: __ and_(r0, r0, Operand(r1)); break;
- case Token::BIT_XOR: __ eor(r0, r0, Operand(r1)); break;
- default: UNREACHABLE();
- }
- __ b(&exit);
- __ bind(&slow);
- __ push(r1); // restore stack
- __ push(r0);
- __ mov(r0, Operand(1)); // 1 argument (not counting receiver).
- switch (op_) {
- case Token::BIT_OR:
- __ InvokeBuiltin(Builtins::BIT_OR, JUMP_JS);
- break;
- case Token::BIT_AND:
- __ InvokeBuiltin(Builtins::BIT_AND, JUMP_JS);
- break;
- case Token::BIT_XOR:
- __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_JS);
- break;
- default:
- UNREACHABLE();
- }
- __ bind(&exit);
- break;
- }
-
- case Token::SHL:
- case Token::SHR:
- case Token::SAR: {
- Label slow, exit;
- // tag check
- __ orr(r2, r1, Operand(r0)); // r2 = x | y;
- ASSERT(kSmiTag == 0); // adjust code below
- __ tst(r2, Operand(kSmiTagMask));
- __ b(ne, &slow);
- // remove tags from operands (but keep sign)
- __ mov(r3, Operand(r1, ASR, kSmiTagSize)); // x
- __ mov(r2, Operand(r0, ASR, kSmiTagSize)); // y
- // use only the 5 least significant bits of the shift count
- __ and_(r2, r2, Operand(0x1f));
- // perform operation
- switch (op_) {
- case Token::SAR:
- __ mov(r3, Operand(r3, ASR, r2));
- // no checks of result necessary
- break;
-
- case Token::SHR:
- __ mov(r3, Operand(r3, LSR, r2));
- // check that the *unsigned* result fits in a smi
- // neither of the two high-order bits can be set:
- // - 0x80000000: high bit would be lost when smi tagging
- // - 0x40000000: this number would convert to negative when
- // smi tagging these two cases can only happen with shifts
- // by 0 or 1 when handed a valid smi
- __ and_(r2, r3, Operand(0xc0000000), SetCC);
- __ b(ne, &slow);
- break;
-
- case Token::SHL:
- __ mov(r3, Operand(r3, LSL, r2));
- // check that the *signed* result fits in a smi
- __ add(r2, r3, Operand(0x40000000), SetCC);
- __ b(mi, &slow);
- break;
-
- default: UNREACHABLE();
- }
- // tag result and store it in r0
- ASSERT(kSmiTag == 0); // adjust code below
- __ mov(r0, Operand(r3, LSL, kSmiTagSize));
- __ b(&exit);
- // slow case
- __ bind(&slow);
- __ push(r1); // restore stack
- __ push(r0);
- __ mov(r0, Operand(1)); // 1 argument (not counting receiver).
- switch (op_) {
- case Token::SAR: __ InvokeBuiltin(Builtins::SAR, JUMP_JS); break;
- case Token::SHR: __ InvokeBuiltin(Builtins::SHR, JUMP_JS); break;
- case Token::SHL: __ InvokeBuiltin(Builtins::SHL, JUMP_JS); break;
- default: UNREACHABLE();
- }
- __ bind(&exit);
- break;
- }
-
- default: UNREACHABLE();
- }
- __ Ret();
-}
-
-
-void StackCheckStub::Generate(MacroAssembler* masm) {
- Label within_limit;
- __ mov(ip, Operand(ExternalReference::address_of_stack_guard_limit()));
- __ ldr(ip, MemOperand(ip));
- __ cmp(sp, Operand(ip));
- __ b(hs, &within_limit);
- // Do tail-call to runtime routine.
- __ push(r0);
- __ TailCallRuntime(ExternalReference(Runtime::kStackGuard), 1);
- __ bind(&within_limit);
-
- masm->StubReturn(1);
-}
-
-
-void UnarySubStub::Generate(MacroAssembler* masm) {
- Label undo;
- Label slow;
- Label done;
-
- // Enter runtime system if the value is not a smi.
- __ tst(r0, Operand(kSmiTagMask));
- __ b(ne, &slow);
-
- // Enter runtime system if the value of the expression is zero
- // to make sure that we switch between 0 and -0.
- __ cmp(r0, Operand(0));
- __ b(eq, &slow);
-
- // The value of the expression is a smi that is not zero. Try
- // optimistic subtraction '0 - value'.
- __ rsb(r1, r0, Operand(0), SetCC);
- __ b(vs, &slow);
-
- // If result is a smi we are done.
- __ tst(r1, Operand(kSmiTagMask));
- __ mov(r0, Operand(r1), LeaveCC, eq); // conditionally set r0 to result
- __ b(eq, &done);
-
- // Enter runtime system.
- __ bind(&slow);
- __ push(r0);
- __ mov(r0, Operand(0)); // set number of arguments
- __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_JS);
-
- __ bind(&done);
- masm->StubReturn(1);
-}
-
-
class InvokeBuiltinStub : public CodeStub {
public:
enum Kind { Inc, Dec, ToNumber };
@@ -1550,355 +959,6 @@
};
-void InvokeBuiltinStub::Generate(MacroAssembler* masm) {
- __ push(r0);
- __ mov(r0, Operand(0)); // set number of arguments
- switch (kind_) {
- case ToNumber: __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_JS); break;
- case Inc: __ InvokeBuiltin(Builtins::INC, JUMP_JS); break;
- case Dec: __ InvokeBuiltin(Builtins::DEC, JUMP_JS); break;
- default: UNREACHABLE();
- }
- masm->StubReturn(argc_);
-}
-
-
-void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
- // r0 holds exception
- ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code
- __ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
- __ ldr(sp, MemOperand(r3));
- __ pop(r2); // pop next in chain
- __ str(r2, MemOperand(r3));
- // restore parameter- and frame-pointer and pop state.
- __ ldm(ia_w, sp, r3.bit() | pp.bit() | fp.bit());
- // Before returning we restore the context from the frame pointer if not NULL.
- // The frame pointer is NULL in the exception handler of a JS entry frame.
- __ cmp(fp, Operand(0));
- // Set cp to NULL if fp is NULL.
- __ mov(cp, Operand(0), LeaveCC, eq);
- // Restore cp otherwise.
- __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
- if (kDebug && FLAG_debug_code) __ mov(lr, Operand(pc));
- __ pop(pc);
-}
-
-
-void CEntryStub::GenerateThrowOutOfMemory(MacroAssembler* masm) {
- // Fetch top stack handler.
- __ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
- __ ldr(r3, MemOperand(r3));
-
- // Unwind the handlers until the ENTRY handler is found.
- Label loop, done;
- __ bind(&loop);
- // Load the type of the current stack handler.
- const int kStateOffset = StackHandlerConstants::kAddressDisplacement +
- StackHandlerConstants::kStateOffset;
- __ ldr(r2, MemOperand(r3, kStateOffset));
- __ cmp(r2, Operand(StackHandler::ENTRY));
- __ b(eq, &done);
- // Fetch the next handler in the list.
- const int kNextOffset = StackHandlerConstants::kAddressDisplacement +
- StackHandlerConstants::kNextOffset;
- __ ldr(r3, MemOperand(r3, kNextOffset));
- __ jmp(&loop);
- __ bind(&done);
-
- // Set the top handler address to next handler past the current ENTRY handler.
- __ ldr(r0, MemOperand(r3, kNextOffset));
- __ mov(r2, Operand(ExternalReference(Top::k_handler_address)));
- __ str(r0, MemOperand(r2));
-
- // Set external caught exception to false.
- __ mov(r0, Operand(false));
- ExternalReference external_caught(Top::k_external_caught_exception_address);
- __ mov(r2, Operand(external_caught));
- __ str(r0, MemOperand(r2));
-
- // Set pending exception and r0 to out of memory exception.
- Failure* out_of_memory = Failure::OutOfMemoryException();
- __ mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
- __ mov(r2, Operand(ExternalReference(Top::k_pending_exception_address)));
- __ str(r0, MemOperand(r2));
-
- // Restore the stack to the address of the ENTRY handler
- __ mov(sp, Operand(r3));
-
- // restore parameter- and frame-pointer and pop state.
- __ ldm(ia_w, sp, r3.bit() | pp.bit() | fp.bit());
- // Before returning we restore the context from the frame pointer if not NULL.
- // The frame pointer is NULL in the exception handler of a JS entry frame.
- __ cmp(fp, Operand(0));
- // Set cp to NULL if fp is NULL.
- __ mov(cp, Operand(0), LeaveCC, eq);
- // Restore cp otherwise.
- __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
- if (kDebug && FLAG_debug_code) __ mov(lr, Operand(pc));
- __ pop(pc);
-}
-
-
-void CEntryStub::GenerateCore(MacroAssembler* masm,
- Label* throw_normal_exception,
- Label* throw_out_of_memory_exception,
- StackFrame::Type frame_type,
- bool do_gc) {
- // r0: result parameter for PerformGC, if any
- // r4: number of arguments including receiver (C callee-saved)
- // r5: pointer to builtin function (C callee-saved)
- // r6: pointer to the first argument (C callee-saved)
-
- if (do_gc) {
- // Passing r0.
- __ Call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
- }
-
- // Call C built-in.
- // r0 = argc, r1 = argv
- __ mov(r0, Operand(r4));
- __ mov(r1, Operand(r6));
-
- // TODO(1242173): To let the GC traverse the return address of the exit
- // frames, we need to know where the return address is. Right now,
- // we push it on the stack to be able to find it again, but we never
- // restore from it in case of changes, which makes it impossible to
- // support moving the C entry code stub. This should be fixed, but currently
- // this is OK because the CEntryStub gets generated so early in the V8 boot
- // sequence that it is not moving ever.
- __ add(lr, pc, Operand(4)); // compute return address: (pc + 8) + 4
- __ push(lr);
-#if !defined(__arm__)
- // Notify the simulator of the transition to C code.
- __ swi(assembler::arm::call_rt_r5);
-#else /* !defined(__arm__) */
- __ mov(pc, Operand(r5));
-#endif /* !defined(__arm__) */
- // result is in r0 or r0:r1 - do not destroy these registers!
-
- // check for failure result
- Label failure_returned;
- ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
- // Lower 2 bits of r2 are 0 iff r0 has failure tag.
- __ add(r2, r0, Operand(1));
- __ tst(r2, Operand(kFailureTagMask));
- __ b(eq, &failure_returned);
-
- // Exit C frame and return.
- // r0:r1: result
- // sp: stack pointer
- // fp: frame pointer
- // pp: caller's parameter pointer pp (restored as C callee-saved)
- __ LeaveExitFrame(frame_type);
-
- // check if we should retry or throw exception
- Label retry;
- __ bind(&failure_returned);
- ASSERT(Failure::RETRY_AFTER_GC == 0);
- __ tst(r0, Operand(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
- __ b(eq, &retry);
-
- Label continue_exception;
- // If the returned failure is EXCEPTION then promote Top::pending_exception().
- __ cmp(r0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
- __ b(ne, &continue_exception);
-
- // Retrieve the pending exception and clear the variable.
- __ mov(ip, Operand(Factory::the_hole_value().location()));
- __ ldr(r3, MemOperand(ip));
- __ mov(ip, Operand(Top::pending_exception_address()));
- __ ldr(r0, MemOperand(ip));
- __ str(r3, MemOperand(ip));
-
- __ bind(&continue_exception);
- // Special handling of out of memory exception.
- Failure* out_of_memory = Failure::OutOfMemoryException();
- __ cmp(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
- __ b(eq, throw_out_of_memory_exception);
-
- // Handle normal exception.
- __ jmp(throw_normal_exception);
-
- __ bind(&retry); // pass last failure (r0) as parameter (r0) when retrying
-}
-
-
-void CEntryStub::GenerateBody(MacroAssembler* masm, bool is_debug_break) {
- // Called from JavaScript; parameters are on stack as if calling JS function
- // r0: number of arguments including receiver
- // r1: pointer to builtin function
- // fp: frame pointer (restored after C call)
- // sp: stack pointer (restored as callee's pp after C call)
- // cp: current context (C callee-saved)
- // pp: caller's parameter pointer pp (C callee-saved)
-
- // NOTE: Invocations of builtins may return failure objects
- // instead of a proper result. The builtin entry handles
- // this by performing a garbage collection and retrying the
- // builtin once.
-
- StackFrame::Type frame_type = is_debug_break
- ? StackFrame::EXIT_DEBUG
- : StackFrame::EXIT;
-
- // Enter the exit frame that transitions from JavaScript to C++.
- __ EnterExitFrame(frame_type);
-
- // r4: number of arguments (C callee-saved)
- // r5: pointer to builtin function (C callee-saved)
- // r6: pointer to first argument (C callee-saved)
-
- Label throw_out_of_memory_exception;
- Label throw_normal_exception;
-
-#ifdef DEBUG
- if (FLAG_gc_greedy) {
- Failure* failure = Failure::RetryAfterGC(0, NEW_SPACE);
- __ mov(r0, Operand(reinterpret_cast<intptr_t>(failure)));
- }
- GenerateCore(masm,
- &throw_normal_exception,
- &throw_out_of_memory_exception,
- frame_type,
- FLAG_gc_greedy);
-#else
- GenerateCore(masm,
- &throw_normal_exception,
- &throw_out_of_memory_exception,
- frame_type,
- false);
-#endif
- GenerateCore(masm,
- &throw_normal_exception,
- &throw_out_of_memory_exception,
- frame_type,
- true);
-
- __ bind(&throw_out_of_memory_exception);
- GenerateThrowOutOfMemory(masm);
- // control flow for generated will not return.
-
- __ bind(&throw_normal_exception);
- GenerateThrowTOS(masm);
-}
-
-
-void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
- // r0: code entry
- // r1: function
- // r2: receiver
- // r3: argc
- // [sp+0]: argv
-
- Label invoke, exit;
-
- // Called from C, so do not pop argc and args on exit (preserve sp)
- // No need to save register-passed args
- // Save callee-saved registers (incl. cp, pp, and fp), sp, and lr
- __ stm(db_w, sp, kCalleeSaved | lr.bit());
-
- // Get address of argv, see stm above.
- // r0: code entry
- // r1: function
- // r2: receiver
- // r3: argc
- __ add(r4, sp, Operand((kNumCalleeSaved + 1)*kPointerSize));
- __ ldr(r4, MemOperand(r4)); // argv
-
- // Push a frame with special values setup to mark it as an entry frame.
- // r0: code entry
- // r1: function
- // r2: receiver
- // r3: argc
- // r4: argv
- int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
- __ mov(r8, Operand(-1)); // Push a bad frame pointer to fail if it is used.
- __ mov(r7, Operand(~ArgumentsAdaptorFrame::SENTINEL));
- __ mov(r6, Operand(Smi::FromInt(marker)));
- __ mov(r5, Operand(ExternalReference(Top::k_c_entry_fp_address)));
- __ ldr(r5, MemOperand(r5));
- __ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | r8.bit());
-
- // Setup frame pointer for the frame to be pushed.
- __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
-
- // Call a faked try-block that does the invoke.
- __ bl(&invoke);
-
- // Caught exception: Store result (exception) in the pending
- // exception field in the JSEnv and return a failure sentinel.
- // Coming in here the fp will be invalid because the PushTryHandler below
- // sets it to 0 to signal the existence of the JSEntry frame.
- __ mov(ip, Operand(Top::pending_exception_address()));
- __ str(r0, MemOperand(ip));
- __ mov(r0, Operand(Handle<Failure>(Failure::Exception())));
- __ b(&exit);
-
- // Invoke: Link this frame into the handler chain.
- __ bind(&invoke);
- // Must preserve r0-r4, r5-r7 are available.
- __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
- // If an exception not caught by another handler occurs, this handler returns
- // control to the code after the bl(&invoke) above, which restores all
- // kCalleeSaved registers (including cp, pp and fp) to their saved values
- // before returning a failure to C.
-
- // Clear any pending exceptions.
- __ mov(ip, Operand(ExternalReference::the_hole_value_location()));
- __ ldr(r5, MemOperand(ip));
- __ mov(ip, Operand(Top::pending_exception_address()));
- __ str(r5, MemOperand(ip));
-
- // Invoke the function by calling through JS entry trampoline builtin.
- // Notice that we cannot store a reference to the trampoline code directly in
- // this stub, because runtime stubs are not traversed when doing GC.
-
- // Expected registers by Builtins::JSEntryTrampoline
- // r0: code entry
- // r1: function
- // r2: receiver
- // r3: argc
- // r4: argv
- if (is_construct) {
- ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
- __ mov(ip, Operand(construct_entry));
- } else {
- ExternalReference entry(Builtins::JSEntryTrampoline);
- __ mov(ip, Operand(entry));
- }
- __ ldr(ip, MemOperand(ip)); // deref address
-
- // Branch and link to JSEntryTrampoline
- __ mov(lr, Operand(pc));
- __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
-
- // Unlink this frame from the handler chain. When reading the
- // address of the next handler, there is no need to use the address
- // displacement since the current stack pointer (sp) points directly
- // to the stack handler.
- __ ldr(r3, MemOperand(sp, StackHandlerConstants::kNextOffset));
- __ mov(ip, Operand(ExternalReference(Top::k_handler_address)));
- __ str(r3, MemOperand(ip));
- // No need to restore registers
- __ add(sp, sp, Operand(StackHandlerConstants::kSize));
-
- __ bind(&exit); // r0 holds result
- // Restore the top frame descriptors from the stack.
- __ pop(r3);
- __ mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
- __ str(r3, MemOperand(ip));
-
- // Reset the stack to the callee saved registers.
- __ add(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
-
- // Restore callee-saved registers and return.
-#ifdef DEBUG
- if (FLAG_debug_code) __ mov(lr, Operand(pc));
-#endif
- __ ldm(ia_w, sp, kCalleeSaved | pc.bit());
-}
-
-
class ArgumentsAccessStub: public CodeStub {
public:
explicit ArgumentsAccessStub(bool is_length) : is_length_(is_length) { }
@@ -1921,91 +981,6 @@
};
-void ArgumentsAccessStub::Generate(MacroAssembler* masm) {
- // ----------- S t a t e -------------
- // -- r0: formal number of parameters for the calling function
- // -- r1: key (if value access)
- // -- lr: return address
- // -----------------------------------
-
- // Check that the key is a smi for non-length accesses.
- Label slow;
- if (!is_length_) {
- __ tst(r1, Operand(kSmiTagMask));
- __ b(ne, &slow);
- }
-
- // Check if the calling frame is an arguments adaptor frame.
- // r0: formal number of parameters
- // r1: key (if access)
- Label adaptor;
- __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
- __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
- __ cmp(r3, Operand(ArgumentsAdaptorFrame::SENTINEL));
- __ b(eq, &adaptor);
-
- static const int kParamDisplacement =
- StandardFrameConstants::kCallerSPOffset - kPointerSize;
-
- if (is_length_) {
- // Nothing to do: the formal length of parameters has been passed in r0
- // by the calling function.
- } else {
- // Check index against formal parameter count. Use unsigned comparison to
- // get the negative check for free.
- // r0: formal number of parameters
- // r1: index
- __ cmp(r1, r0);
- __ b(cs, &slow);
-
- // Read the argument from the current frame.
- __ sub(r3, r0, r1);
- __ add(r3, fp, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
- __ ldr(r0, MemOperand(r3, kParamDisplacement));
- }
-
- // Return to the calling function.
- __ mov(pc, lr);
-
- // An arguments adaptor frame is present. Find the length or the actual
- // argument in the calling frame.
- // r0: formal number of parameters
- // r1: key
- // r2: adaptor frame pointer
- __ bind(&adaptor);
- // Read the arguments length from the adaptor frame. This is the result if
- // only accessing the length, otherwise it is used in accessing the value
- __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
-
- if (!is_length_) {
- // Check index against actual arguments count. Use unsigned comparison to
- // get the negative check for free.
- // r0: actual number of parameter
- // r1: index
- // r2: adaptor frame point
- __ cmp(r1, r0);
- __ b(cs, &slow);
-
- // Read the argument from the adaptor frame.
- __ sub(r3, r0, r1);
- __ add(r3, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
- __ ldr(r0, MemOperand(r3, kParamDisplacement));
- }
-
- // Return to the calling function.
- __ mov(pc, lr);
-
- if (!is_length_) {
- __ bind(&slow);
- __ push(r1);
- __ TailCallRuntime(ExternalReference(Runtime::kGetArgumentsProperty), 1);
- }
-}
-
-
-#undef __
-#define __ masm_->
-
void ArmCodeGenerator::GetReferenceProperty(Expression* key) {
ASSERT(!ref()->is_illegal());
Reference::Type type = ref()->type();
@@ -2044,42 +1019,6 @@
}
-#undef __
-#define __ masm->
-
-void ArmCodeGenerator::SetReferenceProperty(CodeGenerator* cgen,
- Reference* ref,
- Expression* key) {
- ASSERT(!ref->is_illegal());
- MacroAssembler* masm = cgen->masm();
-
- if (ref->type() == Reference::NAMED) {
- // Compute the name of the property.
- Literal* literal = key->AsLiteral();
- Handle<String> name(String::cast(*literal->handle()));
-
- // Call the appropriate IC code.
- masm->pop(r0); // value
- // Setup the name register.
- masm->mov(r2, Operand(name));
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
- masm->Call(ic, RelocInfo::CODE_TARGET);
-
- } else {
- // Access keyed property.
- ASSERT(ref->type() == Reference::KEYED);
-
- masm->pop(r0); // value
- SetPropertyStub stub;
- masm->CallStub(&stub);
- }
- masm->push(r0);
-}
-
-
-#undef __
-#define __ masm_->
-
void ArmCodeGenerator::GenericBinaryOperation(Token::Value op) {
// sp[0] : y
// sp[1] : x
@@ -2422,34 +1361,6 @@
};
-void CallFunctionStub::Generate(MacroAssembler* masm) {
- Label slow;
- // Get the function to call from the stack.
- // function, receiver [, arguments]
- masm->ldr(r1, MemOperand(sp, (argc_ + 1) * kPointerSize));
-
- // Check that the function is really a JavaScript function.
- // r1: pushed function (to be verified)
- masm->tst(r1, Operand(kSmiTagMask));
- masm->b(eq, &slow);
- // Get the map of the function object.
- masm->ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
- masm->ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
- masm->cmp(r2, Operand(JS_FUNCTION_TYPE));
- masm->b(ne, &slow);
-
- // Fast-case: Invoke the function now.
- // r1: pushed function
- ParameterCount actual(argc_);
- masm->InvokeFunction(r1, actual, JUMP_FUNCTION);
-
- // Slow-case: Non-function called.
- masm->bind(&slow);
- masm->mov(r0, Operand(argc_)); // Setup the number of arguments.
- masm->InvokeBuiltin(Builtins::CALL_NON_FUNCTION, JUMP_JS);
-}
-
-
// Call the function on the stack with the given arguments.
void ArmCodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
int position) {
@@ -4532,8 +3443,1074 @@
#undef __
+#define __ masm->
+MemOperand ArmCodeGenerator::SlotOperand(CodeGenerator* cgen,
+ Slot* slot,
+ Register tmp) {
+ // Currently, this assertion will fail if we try to assign to
+ // a constant variable that is constant because it is read-only
+ // (such as the variable referring to a named function expression).
+ // We need to implement assignments to read-only variables.
+ // Ideally, we should do this during AST generation (by converting
+ // such assignments into expression statements); however, in general
+ // we may not be able to make the decision until past AST generation,
+ // that is when the entire program is known.
+ ASSERT(slot != NULL);
+ int index = slot->index();
+ switch (slot->type()) {
+ case Slot::PARAMETER:
+ return ParameterOperand(cgen, index);
+ case Slot::LOCAL: {
+ ASSERT(0 <= index &&
+ index < cgen->scope()->num_stack_slots() &&
+ index >= 0);
+ int local_offset = JavaScriptFrameConstants::kLocal0Offset -
+ index * kPointerSize;
+ return MemOperand(fp, local_offset);
+ }
+
+ case Slot::CONTEXT: {
+ MacroAssembler* masm = cgen->masm();
+ // Follow the context chain if necessary.
+ ASSERT(!tmp.is(cp)); // do not overwrite context register
+ Register context = cp;
+ int chain_length =
+ cgen->scope()->ContextChainLength(slot->var()->scope());
+ for (int i = chain_length; i-- > 0;) {
+ // Load the closure.
+ // (All contexts, even 'with' contexts, have a closure,
+ // and it is the same for all contexts inside a function.
+ // There is no need to go to the function context first.)
+ __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
+ // Load the function context (which is the incoming, outer context).
+ __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
+ context = tmp;
+ }
+ // We may have a 'with' context now. Get the function context.
+ // (In fact this mov may never be the needed, since the scope analysis
+ // may not permit a direct context access in this case and thus we are
+ // always at a function context. However it is safe to dereference be-
+ // cause the function context of a function context is itself. Before
+ // deleting this mov we should try to create a counter-example first,
+ // though...)
+ __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
+ return ContextOperand(tmp, index);
+ }
+
+ default:
+ UNREACHABLE();
+ return MemOperand(r0, 0);
+ }
+}
+
+
+void Property::GenerateStoreCode(CodeGenerator* cgen,
+ Reference* ref,
+ InitState init_state) {
+ MacroAssembler* masm = cgen->masm();
+ Comment cmnt(masm, "[ Store to Property");
+ __ RecordPosition(position());
+ ArmCodeGenerator::SetReferenceProperty(cgen, ref, key());
+}
+
+
+void VariableProxy::GenerateStoreCode(CodeGenerator* cgen,
+ Reference* ref,
+ InitState init_state) {
+ MacroAssembler* masm = cgen->masm();
+ Comment cmnt(masm, "[ Store to VariableProxy");
+ Variable* node = var();
+
+ Expression* expr = node->rewrite();
+ if (expr != NULL) {
+ expr->GenerateStoreCode(cgen, ref, init_state);
+ } else {
+ ASSERT(node->is_global());
+ if (node->AsProperty() != NULL) {
+ __ RecordPosition(node->AsProperty()->position());
+ }
+ Expression* key = new Literal(node->name());
+ ArmCodeGenerator::SetReferenceProperty(cgen, ref, key);
+ }
+}
+
+
+void Slot::GenerateStoreCode(CodeGenerator* cgen,
+ Reference* ref,
+ InitState init_state) {
+ MacroAssembler* masm = cgen->masm();
+ Comment cmnt(masm, "[ Store to Slot");
+
+ if (type() == Slot::LOOKUP) {
+ ASSERT(var()->mode() == Variable::DYNAMIC);
+
+ // For now, just do a runtime call.
+ __ push(cp);
+ __ mov(r0, Operand(var()->name()));
+ __ push(r0);
+
+ if (init_state == CONST_INIT) {
+ // Same as the case for a normal store, but ignores attribute
+ // (e.g. READ_ONLY) of context slot so that we can initialize const
+ // properties (introduced via eval("const foo = (some expr);")). Also,
+ // uses the current function context instead of the top context.
+ //
+ // Note that we must declare the foo upon entry of eval(), via a
+ // context slot declaration, but we cannot initialize it at the same
+ // time, because the const declaration may be at the end of the eval
+ // code (sigh...) and the const variable may have been used before
+ // (where its value is 'undefined'). Thus, we can only do the
+ // initialization when we actually encounter the expression and when
+ // the expression operands are defined and valid, and thus we need the
+ // split into 2 operations: declaration of the context slot followed
+ // by initialization.
+ __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
+ } else {
+ __ CallRuntime(Runtime::kStoreContextSlot, 3);
+ }
+ // Storing a variable must keep the (new) value on the expression
+ // stack. This is necessary for compiling assignment expressions.
+ __ push(r0);
+
+ } else {
+ ASSERT(var()->mode() != Variable::DYNAMIC);
+
+ Label exit;
+ if (init_state == CONST_INIT) {
+ ASSERT(var()->mode() == Variable::CONST);
+ // Only the first const initialization must be executed (the slot
+ // still contains 'the hole' value). When the assignment is executed,
+ // the code is identical to a normal store (see below).
+ Comment cmnt(masm, "[ Init const");
+ __ ldr(r2, ArmCodeGenerator::SlotOperand(cgen, this, r2));
+ __ cmp(r2, Operand(Factory::the_hole_value()));
+ __ b(ne, &exit);
+ }
+
+ // We must execute the store.
+ // r2 may be loaded with context; used below in RecordWrite.
+ // Storing a variable must keep the (new) value on the stack. This is
+ // necessary for compiling assignment expressions.
+ //
+ // Note: We will reach here even with var()->mode() == Variable::CONST
+ // because of const declarations which will initialize consts to 'the
+ // hole' value and by doing so, end up calling this code. r2 may be
+ // loaded with context; used below in RecordWrite.
+ __ pop(r0);
+ __ str(r0, ArmCodeGenerator::SlotOperand(cgen, this, r2));
+ __ push(r0);
+
+ if (type() == Slot::CONTEXT) {
+ // Skip write barrier if the written value is a smi.
+ __ tst(r0, Operand(kSmiTagMask));
+ __ b(eq, &exit);
+ // r2 is loaded with context when calling SlotOperand above.
+ int offset = FixedArray::kHeaderSize + index() * kPointerSize;
+ __ mov(r3, Operand(offset));
+ __ RecordWrite(r2, r3, r1);
+ }
+ // If we definitely did not jump over the assignment, we do not need to
+ // bind the exit label. Doing so can defeat peephole optimization.
+ if (init_state == CONST_INIT || type() == Slot::CONTEXT) {
+ __ bind(&exit);
+ }
+ }
+}
+
+
+void GetPropertyStub::Generate(MacroAssembler* masm) {
+ // sp[0]: key
+ // sp[1]: receiver
+ Label slow, fast;
+ // Get the key and receiver object from the stack.
+ __ ldm(ia, sp, r0.bit() | r1.bit());
+ // Check that the key is a smi.
+ __ tst(r0, Operand(kSmiTagMask));
+ __ b(ne, &slow);
+ __ mov(r0, Operand(r0, ASR, kSmiTagSize));
+ // Check that the object isn't a smi.
+ __ tst(r1, Operand(kSmiTagMask));
+ __ b(eq, &slow);
+
+ // Check that the object is some kind of JS object EXCEPT JS Value type.
+ // In the case that the object is a value-wrapper object,
+ // we enter the runtime system to make sure that indexing into string
+ // objects work as intended.
+ ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
+ __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
+ __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
+ __ cmp(r2, Operand(JS_OBJECT_TYPE));
+ __ b(lt, &slow);
+
+ // Get the elements array of the object.
+ __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
+ // Check that the object is in fast mode (not dictionary).
+ __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
+ __ cmp(r3, Operand(Factory::hash_table_map()));
+ __ b(eq, &slow);
+ // Check that the key (index) is within bounds.
+ __ ldr(r3, FieldMemOperand(r1, Array::kLengthOffset));
+ __ cmp(r0, Operand(r3));
+ __ b(lo, &fast);
+
+ // Slow case: Push extra copies of the arguments (2).
+ __ bind(&slow);
+ __ ldm(ia, sp, r0.bit() | r1.bit());
+ __ stm(db_w, sp, r0.bit() | r1.bit());
+ // Do tail-call to runtime routine.
+ __ TailCallRuntime(ExternalReference(Runtime::kGetProperty), 2);
+
+ // Fast case: Do the load.
+ __ bind(&fast);
+ __ add(r3, r1, Operand(Array::kHeaderSize - kHeapObjectTag));
+ __ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2));
+ __ cmp(r0, Operand(Factory::the_hole_value()));
+ // In case the loaded value is the_hole we have to consult GetProperty
+ // to ensure the prototype chain is searched.
+ __ b(eq, &slow);
+
+ __ StubReturn(1);
+}
+
+
+void SetPropertyStub::Generate(MacroAssembler* masm) {
+ // r0 : value
+ // sp[0] : key
+ // sp[1] : receiver
+
+ Label slow, fast, array, extra, exit;
+ // Get the key and the object from the stack.
+ __ ldm(ia, sp, r1.bit() | r3.bit()); // r1 = key, r3 = receiver
+ // Check that the key is a smi.
+ __ tst(r1, Operand(kSmiTagMask));
+ __ b(ne, &slow);
+ // Check that the object isn't a smi.
+ __ tst(r3, Operand(kSmiTagMask));
+ __ b(eq, &slow);
+ // Get the type of the object from its map.
+ __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
+ __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
+ // Check if the object is a JS array or not.
+ __ cmp(r2, Operand(JS_ARRAY_TYPE));
+ __ b(eq, &array);
+ // Check that the object is some kind of JS object.
+ __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE));
+ __ b(lt, &slow);
+
+
+ // Object case: Check key against length in the elements array.
+ __ ldr(r3, FieldMemOperand(r3, JSObject::kElementsOffset));
+ // Check that the object is in fast mode (not dictionary).
+ __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
+ __ cmp(r2, Operand(Factory::hash_table_map()));
+ __ b(eq, &slow);
+ // Untag the key (for checking against untagged length in the fixed array).
+ __ mov(r1, Operand(r1, ASR, kSmiTagSize));
+ // Compute address to store into and check array bounds.
+ __ add(r2, r3, Operand(Array::kHeaderSize - kHeapObjectTag));
+ __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2));
+ __ ldr(ip, FieldMemOperand(r3, Array::kLengthOffset));
+ __ cmp(r1, Operand(ip));
+ __ b(lo, &fast);
+
+
+ // Slow case: Push extra copies of the arguments (3).
+ __ bind(&slow);
+ __ ldm(ia, sp, r1.bit() | r3.bit()); // r0 == value, r1 == key, r3 == object
+ __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit());
+ // Do tail-call to runtime routine.
+ __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3);
+
+
+ // Extra capacity case: Check if there is extra capacity to
+ // perform the store and update the length. Used for adding one
+ // element to the array by writing to array[array.length].
+ // r0 == value, r1 == key, r2 == elements, r3 == object
+ __ bind(&extra);
+ __ b(ne, &slow); // do not leave holes in the array
+ __ mov(r1, Operand(r1, ASR, kSmiTagSize)); // untag
+ __ ldr(ip, FieldMemOperand(r2, Array::kLengthOffset));
+ __ cmp(r1, Operand(ip));
+ __ b(hs, &slow);
+ __ mov(r1, Operand(r1, LSL, kSmiTagSize)); // restore tag
+ __ add(r1, r1, Operand(1 << kSmiTagSize)); // and increment
+ __ str(r1, FieldMemOperand(r3, JSArray::kLengthOffset));
+ __ mov(r3, Operand(r2));
+ // NOTE: Computing the address to store into must take the fact
+ // that the key has been incremented into account.
+ int displacement = Array::kHeaderSize - kHeapObjectTag -
+ ((1 << kSmiTagSize) * 2);
+ __ add(r2, r2, Operand(displacement));
+ __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ b(&fast);
+
+
+ // Array case: Get the length and the elements array from the JS
+ // array. Check that the array is in fast mode; if it is the
+ // length is always a smi.
+ // r0 == value, r3 == object
+ __ bind(&array);
+ __ ldr(r2, FieldMemOperand(r3, JSObject::kElementsOffset));
+ __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
+ __ cmp(r1, Operand(Factory::hash_table_map()));
+ __ b(eq, &slow);
+
+ // Check the key against the length in the array, compute the
+ // address to store into and fall through to fast case.
+ __ ldr(r1, MemOperand(sp));
+ // r0 == value, r1 == key, r2 == elements, r3 == object.
+ __ ldr(ip, FieldMemOperand(r3, JSArray::kLengthOffset));
+ __ cmp(r1, Operand(ip));
+ __ b(hs, &extra);
+ __ mov(r3, Operand(r2));
+ __ add(r2, r2, Operand(Array::kHeaderSize - kHeapObjectTag));
+ __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
+
+
+ // Fast case: Do the store.
+ // r0 == value, r2 == address to store into, r3 == elements
+ __ bind(&fast);
+ __ str(r0, MemOperand(r2));
+ // Skip write barrier if the written value is a smi.
+ __ tst(r0, Operand(kSmiTagMask));
+ __ b(eq, &exit);
+ // Update write barrier for the elements array address.
+ __ sub(r1, r2, Operand(r3));
+ __ RecordWrite(r3, r1, r2);
+ __ bind(&exit);
+ __ StubReturn(1);
+}
+
+
+void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
+ // r1 : x
+ // r0 : y
+ // result : r0
+
+ switch (op_) {
+ case Token::ADD: {
+ Label slow, exit;
+ // fast path
+ __ orr(r2, r1, Operand(r0)); // r2 = x | y;
+ __ add(r0, r1, Operand(r0), SetCC); // add y optimistically
+ // go slow-path in case of overflow
+ __ b(vs, &slow);
+ // go slow-path in case of non-smi operands
+ ASSERT(kSmiTag == 0); // adjust code below
+ __ tst(r2, Operand(kSmiTagMask));
+ __ b(eq, &exit);
+ // slow path
+ __ bind(&slow);
+ __ sub(r0, r0, Operand(r1)); // revert optimistic add
+ __ push(r1);
+ __ push(r0);
+ __ mov(r0, Operand(1)); // set number of arguments
+ __ InvokeBuiltin(Builtins::ADD, JUMP_JS);
+ // done
+ __ bind(&exit);
+ break;
+ }
+
+ case Token::SUB: {
+ Label slow, exit;
+ // fast path
+ __ orr(r2, r1, Operand(r0)); // r2 = x | y;
+ __ sub(r3, r1, Operand(r0), SetCC); // subtract y optimistically
+ // go slow-path in case of overflow
+ __ b(vs, &slow);
+ // go slow-path in case of non-smi operands
+ ASSERT(kSmiTag == 0); // adjust code below
+ __ tst(r2, Operand(kSmiTagMask));
+ __ mov(r0, Operand(r3), LeaveCC, eq); // conditionally set r0 to result
+ __ b(eq, &exit);
+ // slow path
+ __ bind(&slow);
+ __ push(r1);
+ __ push(r0);
+ __ mov(r0, Operand(1)); // set number of arguments
+ __ InvokeBuiltin(Builtins::SUB, JUMP_JS);
+ // done
+ __ bind(&exit);
+ break;
+ }
+
+ case Token::MUL: {
+ Label slow, exit;
+ // tag check
+ __ orr(r2, r1, Operand(r0)); // r2 = x | y;
+ ASSERT(kSmiTag == 0); // adjust code below
+ __ tst(r2, Operand(kSmiTagMask));
+ __ b(ne, &slow);
+ // remove tag from one operand (but keep sign), so that result is smi
+ __ mov(ip, Operand(r0, ASR, kSmiTagSize));
+ // do multiplication
+ __ smull(r3, r2, r1, ip); // r3 = lower 32 bits of ip*r1
+ // go slow on overflows (overflow bit is not set)
+ __ mov(ip, Operand(r3, ASR, 31));
+ __ cmp(ip, Operand(r2)); // no overflow if higher 33 bits are identical
+ __ b(ne, &slow);
+ // go slow on zero result to handle -0
+ __ tst(r3, Operand(r3));
+ __ mov(r0, Operand(r3), LeaveCC, ne);
+ __ b(ne, &exit);
+ // slow case
+ __ bind(&slow);
+ __ push(r1);
+ __ push(r0);
+ __ mov(r0, Operand(1)); // set number of arguments
+ __ InvokeBuiltin(Builtins::MUL, JUMP_JS);
+ // done
+ __ bind(&exit);
+ break;
+ }
+
+ case Token::BIT_OR:
+ case Token::BIT_AND:
+ case Token::BIT_XOR: {
+ Label slow, exit;
+ // tag check
+ __ orr(r2, r1, Operand(r0)); // r2 = x | y;
+ ASSERT(kSmiTag == 0); // adjust code below
+ __ tst(r2, Operand(kSmiTagMask));
+ __ b(ne, &slow);
+ switch (op_) {
+ case Token::BIT_OR: __ orr(r0, r0, Operand(r1)); break;
+ case Token::BIT_AND: __ and_(r0, r0, Operand(r1)); break;
+ case Token::BIT_XOR: __ eor(r0, r0, Operand(r1)); break;
+ default: UNREACHABLE();
+ }
+ __ b(&exit);
+ __ bind(&slow);
+ __ push(r1); // restore stack
+ __ push(r0);
+ __ mov(r0, Operand(1)); // 1 argument (not counting receiver).
+ switch (op_) {
+ case Token::BIT_OR:
+ __ InvokeBuiltin(Builtins::BIT_OR, JUMP_JS);
+ break;
+ case Token::BIT_AND:
+ __ InvokeBuiltin(Builtins::BIT_AND, JUMP_JS);
+ break;
+ case Token::BIT_XOR:
+ __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_JS);
+ break;
+ default:
+ UNREACHABLE();
+ }
+ __ bind(&exit);
+ break;
+ }
+
+ case Token::SHL:
+ case Token::SHR:
+ case Token::SAR: {
+ Label slow, exit;
+ // tag check
+ __ orr(r2, r1, Operand(r0)); // r2 = x | y;
+ ASSERT(kSmiTag == 0); // adjust code below
+ __ tst(r2, Operand(kSmiTagMask));
+ __ b(ne, &slow);
+ // remove tags from operands (but keep sign)
+ __ mov(r3, Operand(r1, ASR, kSmiTagSize)); // x
+ __ mov(r2, Operand(r0, ASR, kSmiTagSize)); // y
+ // use only the 5 least significant bits of the shift count
+ __ and_(r2, r2, Operand(0x1f));
+ // perform operation
+ switch (op_) {
+ case Token::SAR:
+ __ mov(r3, Operand(r3, ASR, r2));
+ // no checks of result necessary
+ break;
+
+ case Token::SHR:
+ __ mov(r3, Operand(r3, LSR, r2));
+ // check that the *unsigned* result fits in a smi
+ // neither of the two high-order bits can be set:
+ // - 0x80000000: high bit would be lost when smi tagging
+ // - 0x40000000: this number would convert to negative when
+ // smi tagging these two cases can only happen with shifts
+ // by 0 or 1 when handed a valid smi
+ __ and_(r2, r3, Operand(0xc0000000), SetCC);
+ __ b(ne, &slow);
+ break;
+
+ case Token::SHL:
+ __ mov(r3, Operand(r3, LSL, r2));
+ // check that the *signed* result fits in a smi
+ __ add(r2, r3, Operand(0x40000000), SetCC);
+ __ b(mi, &slow);
+ break;
+
+ default: UNREACHABLE();
+ }
+ // tag result and store it in r0
+ ASSERT(kSmiTag == 0); // adjust code below
+ __ mov(r0, Operand(r3, LSL, kSmiTagSize));
+ __ b(&exit);
+ // slow case
+ __ bind(&slow);
+ __ push(r1); // restore stack
+ __ push(r0);
+ __ mov(r0, Operand(1)); // 1 argument (not counting receiver).
+ switch (op_) {
+ case Token::SAR: __ InvokeBuiltin(Builtins::SAR, JUMP_JS); break;
+ case Token::SHR: __ InvokeBuiltin(Builtins::SHR, JUMP_JS); break;
+ case Token::SHL: __ InvokeBuiltin(Builtins::SHL, JUMP_JS); break;
+ default: UNREACHABLE();
+ }
+ __ bind(&exit);
+ break;
+ }
+
+ default: UNREACHABLE();
+ }
+ __ Ret();
+}
+
+
+void StackCheckStub::Generate(MacroAssembler* masm) {
+ Label within_limit;
+ __ mov(ip, Operand(ExternalReference::address_of_stack_guard_limit()));
+ __ ldr(ip, MemOperand(ip));
+ __ cmp(sp, Operand(ip));
+ __ b(hs, &within_limit);
+ // Do tail-call to runtime routine.
+ __ push(r0);
+ __ TailCallRuntime(ExternalReference(Runtime::kStackGuard), 1);
+ __ bind(&within_limit);
+
+ __ StubReturn(1);
+}
+
+
+void UnarySubStub::Generate(MacroAssembler* masm) {
+ Label undo;
+ Label slow;
+ Label done;
+
+ // Enter runtime system if the value is not a smi.
+ __ tst(r0, Operand(kSmiTagMask));
+ __ b(ne, &slow);
+
+ // Enter runtime system if the value of the expression is zero
+ // to make sure that we switch between 0 and -0.
+ __ cmp(r0, Operand(0));
+ __ b(eq, &slow);
+
+ // The value of the expression is a smi that is not zero. Try
+ // optimistic subtraction '0 - value'.
+ __ rsb(r1, r0, Operand(0), SetCC);
+ __ b(vs, &slow);
+
+ // If result is a smi we are done.
+ __ tst(r1, Operand(kSmiTagMask));
+ __ mov(r0, Operand(r1), LeaveCC, eq); // conditionally set r0 to result
+ __ b(eq, &done);
+
+ // Enter runtime system.
+ __ bind(&slow);
+ __ push(r0);
+ __ mov(r0, Operand(0)); // set number of arguments
+ __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_JS);
+
+ __ bind(&done);
+ __ StubReturn(1);
+}
+
+
+void InvokeBuiltinStub::Generate(MacroAssembler* masm) {
+ __ push(r0);
+ __ mov(r0, Operand(0)); // set number of arguments
+ switch (kind_) {
+ case ToNumber: __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_JS); break;
+ case Inc: __ InvokeBuiltin(Builtins::INC, JUMP_JS); break;
+ case Dec: __ InvokeBuiltin(Builtins::DEC, JUMP_JS); break;
+ default: UNREACHABLE();
+ }
+ __ StubReturn(argc_);
+}
+
+
+void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
+ // r0 holds exception
+ ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code
+ __ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
+ __ ldr(sp, MemOperand(r3));
+ __ pop(r2); // pop next in chain
+ __ str(r2, MemOperand(r3));
+ // restore parameter- and frame-pointer and pop state.
+ __ ldm(ia_w, sp, r3.bit() | pp.bit() | fp.bit());
+ // Before returning we restore the context from the frame pointer if not NULL.
+ // The frame pointer is NULL in the exception handler of a JS entry frame.
+ __ cmp(fp, Operand(0));
+ // Set cp to NULL if fp is NULL.
+ __ mov(cp, Operand(0), LeaveCC, eq);
+ // Restore cp otherwise.
+ __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
+ if (kDebug && FLAG_debug_code) __ mov(lr, Operand(pc));
+ __ pop(pc);
+}
+
+
+void CEntryStub::GenerateThrowOutOfMemory(MacroAssembler* masm) {
+ // Fetch top stack handler.
+ __ mov(r3, Operand(ExternalReference(Top::k_handler_address)));
+ __ ldr(r3, MemOperand(r3));
+
+ // Unwind the handlers until the ENTRY handler is found.
+ Label loop, done;
+ __ bind(&loop);
+ // Load the type of the current stack handler.
+ const int kStateOffset = StackHandlerConstants::kAddressDisplacement +
+ StackHandlerConstants::kStateOffset;
+ __ ldr(r2, MemOperand(r3, kStateOffset));
+ __ cmp(r2, Operand(StackHandler::ENTRY));
+ __ b(eq, &done);
+ // Fetch the next handler in the list.
+ const int kNextOffset = StackHandlerConstants::kAddressDisplacement +
+ StackHandlerConstants::kNextOffset;
+ __ ldr(r3, MemOperand(r3, kNextOffset));
+ __ jmp(&loop);
+ __ bind(&done);
+
+ // Set the top handler address to next handler past the current ENTRY handler.
+ __ ldr(r0, MemOperand(r3, kNextOffset));
+ __ mov(r2, Operand(ExternalReference(Top::k_handler_address)));
+ __ str(r0, MemOperand(r2));
+
+ // Set external caught exception to false.
+ __ mov(r0, Operand(false));
+ ExternalReference external_caught(Top::k_external_caught_exception_address);
+ __ mov(r2, Operand(external_caught));
+ __ str(r0, MemOperand(r2));
+
+ // Set pending exception and r0 to out of memory exception.
+ Failure* out_of_memory = Failure::OutOfMemoryException();
+ __ mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
+ __ mov(r2, Operand(ExternalReference(Top::k_pending_exception_address)));
+ __ str(r0, MemOperand(r2));
+
+ // Restore the stack to the address of the ENTRY handler
+ __ mov(sp, Operand(r3));
+
+ // restore parameter- and frame-pointer and pop state.
+ __ ldm(ia_w, sp, r3.bit() | pp.bit() | fp.bit());
+ // Before returning we restore the context from the frame pointer if not NULL.
+ // The frame pointer is NULL in the exception handler of a JS entry frame.
+ __ cmp(fp, Operand(0));
+ // Set cp to NULL if fp is NULL.
+ __ mov(cp, Operand(0), LeaveCC, eq);
+ // Restore cp otherwise.
+ __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
+ if (kDebug && FLAG_debug_code) __ mov(lr, Operand(pc));
+ __ pop(pc);
+}
+
+
+void CEntryStub::GenerateCore(MacroAssembler* masm,
+ Label* throw_normal_exception,
+ Label* throw_out_of_memory_exception,
+ StackFrame::Type frame_type,
+ bool do_gc) {
+ // r0: result parameter for PerformGC, if any
+ // r4: number of arguments including receiver (C callee-saved)
+ // r5: pointer to builtin function (C callee-saved)
+ // r6: pointer to the first argument (C callee-saved)
+
+ if (do_gc) {
+ // Passing r0.
+ __ Call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
+ }
+
+ // Call C built-in.
+ // r0 = argc, r1 = argv
+ __ mov(r0, Operand(r4));
+ __ mov(r1, Operand(r6));
+
+ // TODO(1242173): To let the GC traverse the return address of the exit
+ // frames, we need to know where the return address is. Right now,
+ // we push it on the stack to be able to find it again, but we never
+ // restore from it in case of changes, which makes it impossible to
+ // support moving the C entry code stub. This should be fixed, but currently
+ // this is OK because the CEntryStub gets generated so early in the V8 boot
+ // sequence that it is not moving ever.
+ __ add(lr, pc, Operand(4)); // compute return address: (pc + 8) + 4
+ __ push(lr);
+#if !defined(__arm__)
+ // Notify the simulator of the transition to C code.
+ __ swi(assembler::arm::call_rt_r5);
+#else /* !defined(__arm__) */
+ __ mov(pc, Operand(r5));
+#endif /* !defined(__arm__) */
+ // result is in r0 or r0:r1 - do not destroy these registers!
+
+ // check for failure result
+ Label failure_returned;
+ ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
+ // Lower 2 bits of r2 are 0 iff r0 has failure tag.
+ __ add(r2, r0, Operand(1));
+ __ tst(r2, Operand(kFailureTagMask));
+ __ b(eq, &failure_returned);
+
+ // Exit C frame and return.
+ // r0:r1: result
+ // sp: stack pointer
+ // fp: frame pointer
+ // pp: caller's parameter pointer pp (restored as C callee-saved)
+ __ LeaveExitFrame(frame_type);
+
+ // check if we should retry or throw exception
+ Label retry;
+ __ bind(&failure_returned);
+ ASSERT(Failure::RETRY_AFTER_GC == 0);
+ __ tst(r0, Operand(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
+ __ b(eq, &retry);
+
+ Label continue_exception;
+ // If the returned failure is EXCEPTION then promote Top::pending_exception().
+ __ cmp(r0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
+ __ b(ne, &continue_exception);
+
+ // Retrieve the pending exception and clear the variable.
+ __ mov(ip, Operand(Factory::the_hole_value().location()));
+ __ ldr(r3, MemOperand(ip));
+ __ mov(ip, Operand(Top::pending_exception_address()));
+ __ ldr(r0, MemOperand(ip));
+ __ str(r3, MemOperand(ip));
+
+ __ bind(&continue_exception);
+ // Special handling of out of memory exception.
+ Failure* out_of_memory = Failure::OutOfMemoryException();
+ __ cmp(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
+ __ b(eq, throw_out_of_memory_exception);
+
+ // Handle normal exception.
+ __ jmp(throw_normal_exception);
+
+ __ bind(&retry); // pass last failure (r0) as parameter (r0) when retrying
+}
+
+
+void CEntryStub::GenerateBody(MacroAssembler* masm, bool is_debug_break) {
+ // Called from JavaScript; parameters are on stack as if calling JS function
+ // r0: number of arguments including receiver
+ // r1: pointer to builtin function
+ // fp: frame pointer (restored after C call)
+ // sp: stack pointer (restored as callee's pp after C call)
+ // cp: current context (C callee-saved)
+ // pp: caller's parameter pointer pp (C callee-saved)
+
+ // NOTE: Invocations of builtins may return failure objects
+ // instead of a proper result. The builtin entry handles
+ // this by performing a garbage collection and retrying the
+ // builtin once.
+
+ StackFrame::Type frame_type = is_debug_break
+ ? StackFrame::EXIT_DEBUG
+ : StackFrame::EXIT;
+
+ // Enter the exit frame that transitions from JavaScript to C++.
+ __ EnterExitFrame(frame_type);
+
+ // r4: number of arguments (C callee-saved)
+ // r5: pointer to builtin function (C callee-saved)
+ // r6: pointer to first argument (C callee-saved)
+
+ Label throw_out_of_memory_exception;
+ Label throw_normal_exception;
+
+#ifdef DEBUG
+ if (FLAG_gc_greedy) {
+ Failure* failure = Failure::RetryAfterGC(0, NEW_SPACE);
+ __ mov(r0, Operand(reinterpret_cast<intptr_t>(failure)));
+ }
+ GenerateCore(masm,
+ &throw_normal_exception,
+ &throw_out_of_memory_exception,
+ frame_type,
+ FLAG_gc_greedy);
+#else
+ GenerateCore(masm,
+ &throw_normal_exception,
+ &throw_out_of_memory_exception,
+ frame_type,
+ false);
+#endif
+ GenerateCore(masm,
+ &throw_normal_exception,
+ &throw_out_of_memory_exception,
+ frame_type,
+ true);
+
+ __ bind(&throw_out_of_memory_exception);
+ GenerateThrowOutOfMemory(masm);
+ // control flow for generated will not return.
+
+ __ bind(&throw_normal_exception);
+ GenerateThrowTOS(masm);
+}
+
+
+void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
+ // r0: code entry
+ // r1: function
+ // r2: receiver
+ // r3: argc
+ // [sp+0]: argv
+
+ Label invoke, exit;
+
+ // Called from C, so do not pop argc and args on exit (preserve sp)
+ // No need to save register-passed args
+ // Save callee-saved registers (incl. cp, pp, and fp), sp, and lr
+ __ stm(db_w, sp, kCalleeSaved | lr.bit());
+
+ // Get address of argv, see stm above.
+ // r0: code entry
+ // r1: function
+ // r2: receiver
+ // r3: argc
+ __ add(r4, sp, Operand((kNumCalleeSaved + 1)*kPointerSize));
+ __ ldr(r4, MemOperand(r4)); // argv
+
+ // Push a frame with special values setup to mark it as an entry frame.
+ // r0: code entry
+ // r1: function
+ // r2: receiver
+ // r3: argc
+ // r4: argv
+ int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
+ __ mov(r8, Operand(-1)); // Push a bad frame pointer to fail if it is used.
+ __ mov(r7, Operand(~ArgumentsAdaptorFrame::SENTINEL));
+ __ mov(r6, Operand(Smi::FromInt(marker)));
+ __ mov(r5, Operand(ExternalReference(Top::k_c_entry_fp_address)));
+ __ ldr(r5, MemOperand(r5));
+ __ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | r8.bit());
+
+ // Setup frame pointer for the frame to be pushed.
+ __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
+
+ // Call a faked try-block that does the invoke.
+ __ bl(&invoke);
+
+ // Caught exception: Store result (exception) in the pending
+ // exception field in the JSEnv and return a failure sentinel.
+ // Coming in here the fp will be invalid because the PushTryHandler below
+ // sets it to 0 to signal the existence of the JSEntry frame.
+ __ mov(ip, Operand(Top::pending_exception_address()));
+ __ str(r0, MemOperand(ip));
+ __ mov(r0, Operand(Handle<Failure>(Failure::Exception())));
+ __ b(&exit);
+
+ // Invoke: Link this frame into the handler chain.
+ __ bind(&invoke);
+ // Must preserve r0-r4, r5-r7 are available.
+ __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
+ // If an exception not caught by another handler occurs, this handler returns
+ // control to the code after the bl(&invoke) above, which restores all
+ // kCalleeSaved registers (including cp, pp and fp) to their saved values
+ // before returning a failure to C.
+
+ // Clear any pending exceptions.
+ __ mov(ip, Operand(ExternalReference::the_hole_value_location()));
+ __ ldr(r5, MemOperand(ip));
+ __ mov(ip, Operand(Top::pending_exception_address()));
+ __ str(r5, MemOperand(ip));
+
+ // Invoke the function by calling through JS entry trampoline builtin.
+ // Notice that we cannot store a reference to the trampoline code directly in
+ // this stub, because runtime stubs are not traversed when doing GC.
+
+ // Expected registers by Builtins::JSEntryTrampoline
+ // r0: code entry
+ // r1: function
+ // r2: receiver
+ // r3: argc
+ // r4: argv
+ if (is_construct) {
+ ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
+ __ mov(ip, Operand(construct_entry));
+ } else {
+ ExternalReference entry(Builtins::JSEntryTrampoline);
+ __ mov(ip, Operand(entry));
+ }
+ __ ldr(ip, MemOperand(ip)); // deref address
+
+ // Branch and link to JSEntryTrampoline
+ __ mov(lr, Operand(pc));
+ __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
+
+ // Unlink this frame from the handler chain. When reading the
+ // address of the next handler, there is no need to use the address
+ // displacement since the current stack pointer (sp) points directly
+ // to the stack handler.
+ __ ldr(r3, MemOperand(sp, StackHandlerConstants::kNextOffset));
+ __ mov(ip, Operand(ExternalReference(Top::k_handler_address)));
+ __ str(r3, MemOperand(ip));
+ // No need to restore registers
+ __ add(sp, sp, Operand(StackHandlerConstants::kSize));
+
+ __ bind(&exit); // r0 holds result
+ // Restore the top frame descriptors from the stack.
+ __ pop(r3);
+ __ mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
+ __ str(r3, MemOperand(ip));
+
+ // Reset the stack to the callee saved registers.
+ __ add(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
+
+ // Restore callee-saved registers and return.
+#ifdef DEBUG
+ if (FLAG_debug_code) __ mov(lr, Operand(pc));
+#endif
+ __ ldm(ia_w, sp, kCalleeSaved | pc.bit());
+}
+
+
+void ArgumentsAccessStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- r0: formal number of parameters for the calling function
+ // -- r1: key (if value access)
+ // -- lr: return address
+ // -----------------------------------
+
+ // Check that the key is a smi for non-length accesses.
+ Label slow;
+ if (!is_length_) {
+ __ tst(r1, Operand(kSmiTagMask));
+ __ b(ne, &slow);
+ }
+
+ // Check if the calling frame is an arguments adaptor frame.
+ // r0: formal number of parameters
+ // r1: key (if access)
+ Label adaptor;
+ __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
+ __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
+ __ cmp(r3, Operand(ArgumentsAdaptorFrame::SENTINEL));
+ __ b(eq, &adaptor);
+
+ static const int kParamDisplacement =
+ StandardFrameConstants::kCallerSPOffset - kPointerSize;
+
+ if (is_length_) {
+ // Nothing to do: the formal length of parameters has been passed in r0
+ // by the calling function.
+ } else {
+ // Check index against formal parameter count. Use unsigned comparison to
+ // get the negative check for free.
+ // r0: formal number of parameters
+ // r1: index
+ __ cmp(r1, r0);
+ __ b(cs, &slow);
+
+ // Read the argument from the current frame.
+ __ sub(r3, r0, r1);
+ __ add(r3, fp, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ ldr(r0, MemOperand(r3, kParamDisplacement));
+ }
+
+ // Return to the calling function.
+ __ mov(pc, lr);
+
+ // An arguments adaptor frame is present. Find the length or the actual
+ // argument in the calling frame.
+ // r0: formal number of parameters
+ // r1: key
+ // r2: adaptor frame pointer
+ __ bind(&adaptor);
+ // Read the arguments length from the adaptor frame. This is the result if
+ // only accessing the length, otherwise it is used in accessing the value
+ __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
+
+ if (!is_length_) {
+ // Check index against actual arguments count. Use unsigned comparison to
+ // get the negative check for free.
+ // r0: actual number of parameter
+ // r1: index
+ // r2: adaptor frame point
+ __ cmp(r1, r0);
+ __ b(cs, &slow);
+
+ // Read the argument from the adaptor frame.
+ __ sub(r3, r0, r1);
+ __ add(r3, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ ldr(r0, MemOperand(r3, kParamDisplacement));
+ }
+
+ // Return to the calling function.
+ __ mov(pc, lr);
+
+ if (!is_length_) {
+ __ bind(&slow);
+ __ push(r1);
+ __ TailCallRuntime(ExternalReference(Runtime::kGetArgumentsProperty), 1);
+ }
+}
+
+
+void ArmCodeGenerator::SetReferenceProperty(CodeGenerator* cgen,
+ Reference* ref,
+ Expression* key) {
+ ASSERT(!ref->is_illegal());
+ MacroAssembler* masm = cgen->masm();
+
+ if (ref->type() == Reference::NAMED) {
+ // Compute the name of the property.
+ Literal* literal = key->AsLiteral();
+ Handle<String> name(String::cast(*literal->handle()));
+
+ // Call the appropriate IC code.
+ __ pop(r0); // value
+ // Setup the name register.
+ __ mov(r2, Operand(name));
+ Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
+ __ Call(ic, RelocInfo::CODE_TARGET);
+
+ } else {
+ // Access keyed property.
+ ASSERT(ref->type() == Reference::KEYED);
+
+ __ pop(r0); // value
+ SetPropertyStub stub;
+ __ CallStub(&stub);
+ }
+ __ push(r0);
+}
+
+
+void CallFunctionStub::Generate(MacroAssembler* masm) {
+ Label slow;
+ // Get the function to call from the stack.
+ // function, receiver [, arguments]
+ __ ldr(r1, MemOperand(sp, (argc_ + 1) * kPointerSize));
+
+ // Check that the function is really a JavaScript function.
+ // r1: pushed function (to be verified)
+ __ tst(r1, Operand(kSmiTagMask));
+ __ b(eq, &slow);
+ // Get the map of the function object.
+ __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
+ __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
+ __ cmp(r2, Operand(JS_FUNCTION_TYPE));
+ __ b(ne, &slow);
+
+ // Fast-case: Invoke the function now.
+ // r1: pushed function
+ ParameterCount actual(argc_);
+ __ InvokeFunction(r1, actual, JUMP_FUNCTION);
+
+ // Slow-case: Non-function called.
+ __ bind(&slow);
+ __ mov(r0, Operand(argc_)); // Setup the number of arguments.
+ __ InvokeBuiltin(Builtins::CALL_NON_FUNCTION, JUMP_JS);
+}
+
+
+#undef __
+
// -----------------------------------------------------------------------------
// CodeGenerator interface
« no previous file with comments | « no previous file | src/codegen-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698