| Index: src/ppc/full-codegen-ppc.cc
|
| diff --git a/src/arm/full-codegen-arm.cc b/src/ppc/full-codegen-ppc.cc
|
| similarity index 71%
|
| copy from src/arm/full-codegen-arm.cc
|
| copy to src/ppc/full-codegen-ppc.cc
|
| index 09459e4e35bece6c6870c904683d5f2683d9ebe6..f038157c428af0f95d4b7196d78e8070da7661f2 100644
|
| --- a/src/arm/full-codegen-arm.cc
|
| +++ b/src/ppc/full-codegen-ppc.cc
|
| @@ -1,10 +1,13 @@
|
| // Copyright 2012 the V8 project authors. All rights reserved.
|
| +//
|
| +// Copyright IBM Corp. 2012, 2013. All rights reserved.
|
| +//
|
| // Use of this source code is governed by a BSD-style license that can be
|
| // found in the LICENSE file.
|
|
|
| #include "src/v8.h"
|
|
|
| -#if V8_TARGET_ARCH_ARM
|
| +#if V8_TARGET_ARCH_PPC
|
|
|
| #include "src/code-stubs.h"
|
| #include "src/codegen.h"
|
| @@ -16,21 +19,21 @@
|
| #include "src/scopes.h"
|
| #include "src/stub-cache.h"
|
|
|
| -#include "src/arm/code-stubs-arm.h"
|
| -#include "src/arm/macro-assembler-arm.h"
|
| +#include "src/ppc/code-stubs-ppc.h"
|
| +#include "src/ppc/macro-assembler-ppc.h"
|
|
|
| namespace v8 {
|
| namespace internal {
|
|
|
| #define __ ACCESS_MASM(masm_)
|
|
|
| -
|
| // A patch site is a location in the code which it is possible to patch. This
|
| // class has a number of methods to emit the code which is patchable and the
|
| // method EmitPatchInfo to record a marker back to the patchable code. This
|
| -// marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
|
| +// marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit
|
| // immediate value is used) is the delta from the pc to the first instruction of
|
| // the patchable code.
|
| +// See PatchInlinedSmiCode in ic-ppc.cc for the code that patches it
|
| class JumpPatchSite BASE_EMBEDDED {
|
| public:
|
| explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
|
| @@ -47,30 +50,29 @@ class JumpPatchSite BASE_EMBEDDED {
|
| // the inlined smi code.
|
| void EmitJumpIfNotSmi(Register reg, Label* target) {
|
| DCHECK(!patch_site_.is_bound() && !info_emitted_);
|
| - Assembler::BlockConstPoolScope block_const_pool(masm_);
|
| + Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
|
| __ bind(&patch_site_);
|
| - __ cmp(reg, Operand(reg));
|
| - __ b(eq, target); // Always taken before patched.
|
| + __ cmp(reg, reg, cr0);
|
| + __ beq(target, cr0); // Always taken before patched.
|
| }
|
|
|
| // When initially emitting this ensure that a jump is never generated to skip
|
| // the inlined smi code.
|
| void EmitJumpIfSmi(Register reg, Label* target) {
|
| + Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
|
| DCHECK(!patch_site_.is_bound() && !info_emitted_);
|
| - Assembler::BlockConstPoolScope block_const_pool(masm_);
|
| __ bind(&patch_site_);
|
| - __ cmp(reg, Operand(reg));
|
| - __ b(ne, target); // Never taken before patched.
|
| + __ cmp(reg, reg, cr0);
|
| + __ bne(target, cr0); // Never taken before patched.
|
| }
|
|
|
| void EmitPatchInfo() {
|
| - // Block literal pool emission whilst recording patch site information.
|
| - Assembler::BlockConstPoolScope block_const_pool(masm_);
|
| if (patch_site_.is_bound()) {
|
| int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
|
| Register reg;
|
| - reg.set_code(delta_to_patch_site / kOff12Mask);
|
| - __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
|
| + // I believe this is using reg as the high bits of of the offset
|
| + reg.set_code(delta_to_patch_site / kOff16Mask);
|
| + __ cmpi(reg, Operand(delta_to_patch_site % kOff16Mask));
|
| #ifdef DEBUG
|
| info_emitted_ = true;
|
| #endif
|
| @@ -94,15 +96,14 @@ class JumpPatchSite BASE_EMBEDDED {
|
| // function.
|
| //
|
| // The live registers are:
|
| -// o r1: the JS function object being called (i.e., ourselves)
|
| +// o r4: the JS function object being called (i.e., ourselves)
|
| // o cp: our context
|
| -// o pp: our caller's constant pool pointer (if FLAG_enable_ool_constant_pool)
|
| -// o fp: our caller's frame pointer
|
| +// o fp: our caller's frame pointer (aka r31)
|
| // o sp: stack pointer
|
| -// o lr: return address
|
| +// o lr: return address (bogus.. PPC has no lr reg)
|
| //
|
| // The function builds a JS frame. Please see JavaScriptFrameConstants in
|
| -// frames-arm.h for its layout.
|
| +// frames-ppc.h for its layout.
|
| void FullCodeGenerator::Generate() {
|
| CompilationInfo* info = info_;
|
| handler_table_ =
|
| @@ -128,14 +129,14 @@ void FullCodeGenerator::Generate() {
|
| if (info->strict_mode() == SLOPPY && !info->is_native()) {
|
| Label ok;
|
| int receiver_offset = info->scope()->num_parameters() * kPointerSize;
|
| - __ ldr(r2, MemOperand(sp, receiver_offset));
|
| - __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
|
| - __ b(ne, &ok);
|
| + __ LoadP(r5, MemOperand(sp, receiver_offset), r0);
|
| + __ CompareRoot(r5, Heap::kUndefinedValueRootIndex);
|
| + __ bne(&ok);
|
|
|
| - __ ldr(r2, GlobalObjectOperand());
|
| - __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalProxyOffset));
|
| + __ LoadP(r5, GlobalObjectOperand());
|
| + __ LoadP(r5, FieldMemOperand(r5, GlobalObject::kGlobalProxyOffset));
|
|
|
| - __ str(r2, MemOperand(sp, receiver_offset));
|
| + __ StoreP(r5, MemOperand(sp, receiver_offset), r0);
|
|
|
| __ bind(&ok);
|
| }
|
| @@ -156,32 +157,32 @@ void FullCodeGenerator::Generate() {
|
| if (locals_count > 0) {
|
| if (locals_count >= 128) {
|
| Label ok;
|
| - __ sub(r9, sp, Operand(locals_count * kPointerSize));
|
| - __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
|
| - __ cmp(r9, Operand(r2));
|
| - __ b(hs, &ok);
|
| + __ Add(ip, sp, -(locals_count * kPointerSize), r0);
|
| + __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
|
| + __ cmpl(ip, r5);
|
| + __ bc_short(ge, &ok);
|
| __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
|
| __ bind(&ok);
|
| }
|
| - __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
|
| + __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
|
| int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
|
| if (locals_count >= kMaxPushes) {
|
| int loop_iterations = locals_count / kMaxPushes;
|
| - __ mov(r2, Operand(loop_iterations));
|
| + __ mov(r5, Operand(loop_iterations));
|
| + __ mtctr(r5);
|
| Label loop_header;
|
| __ bind(&loop_header);
|
| // Do pushes.
|
| for (int i = 0; i < kMaxPushes; i++) {
|
| - __ push(r9);
|
| + __ push(ip);
|
| }
|
| // Continue loop if not done.
|
| - __ sub(r2, r2, Operand(1), SetCC);
|
| - __ b(&loop_header, ne);
|
| + __ bdnz(&loop_header);
|
| }
|
| int remaining = locals_count % kMaxPushes;
|
| // Emit the remaining pushes.
|
| for (int i = 0; i < remaining; i++) {
|
| - __ push(r9);
|
| + __ push(ip);
|
| }
|
| }
|
| }
|
| @@ -191,11 +192,11 @@ void FullCodeGenerator::Generate() {
|
| // Possibly allocate a local context.
|
| int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
|
| if (heap_slots > 0) {
|
| - // Argument to NewContext is the function, which is still in r1.
|
| + // Argument to NewContext is the function, which is still in r4.
|
| Comment cmnt(masm_, "[ Allocate context");
|
| bool need_write_barrier = true;
|
| if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
|
| - __ push(r1);
|
| + __ push(r4);
|
| __ Push(info->scope()->GetScopeInfo());
|
| __ CallRuntime(Runtime::kNewGlobalContext, 2);
|
| } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
|
| @@ -204,14 +205,14 @@ void FullCodeGenerator::Generate() {
|
| // Result of FastNewContextStub is always in new space.
|
| need_write_barrier = false;
|
| } else {
|
| - __ push(r1);
|
| + __ push(r4);
|
| __ CallRuntime(Runtime::kNewFunctionContext, 1);
|
| }
|
| function_in_register = false;
|
| - // Context is returned in r0. It replaces the context passed to us.
|
| + // Context is returned in r3. It replaces the context passed to us.
|
| // It's saved in the stack and kept live in cp.
|
| - __ mov(cp, r0);
|
| - __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| + __ mr(cp, r3);
|
| + __ StoreP(r3, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| // Copy any necessary parameters into the context.
|
| int num_parameters = info->scope()->num_parameters();
|
| for (int i = 0; i < num_parameters; i++) {
|
| @@ -220,18 +221,18 @@ void FullCodeGenerator::Generate() {
|
| int parameter_offset = StandardFrameConstants::kCallerSPOffset +
|
| (num_parameters - 1 - i) * kPointerSize;
|
| // Load parameter from stack.
|
| - __ ldr(r0, MemOperand(fp, parameter_offset));
|
| + __ LoadP(r3, MemOperand(fp, parameter_offset), r0);
|
| // Store it in the context.
|
| MemOperand target = ContextOperand(cp, var->index());
|
| - __ str(r0, target);
|
| + __ StoreP(r3, target, r0);
|
|
|
| // Update the write barrier.
|
| if (need_write_barrier) {
|
| __ RecordWriteContextSlot(
|
| - cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
|
| + cp, target.offset(), r3, r6, kLRHasBeenSaved, kDontSaveFPRegs);
|
| } else if (FLAG_debug_code) {
|
| Label done;
|
| - __ JumpIfInNewSpace(cp, r0, &done);
|
| + __ JumpIfInNewSpace(cp, r3, &done);
|
| __ Abort(kExpectedNewSpaceObject);
|
| __ bind(&done);
|
| }
|
| @@ -245,17 +246,17 @@ void FullCodeGenerator::Generate() {
|
| Comment cmnt(masm_, "[ Allocate arguments object");
|
| if (!function_in_register) {
|
| // Load this again, if it's used by the local context below.
|
| - __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
|
| + __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
|
| } else {
|
| - __ mov(r3, r1);
|
| + __ mr(r6, r4);
|
| }
|
| // Receiver is just before the parameters on the caller's stack.
|
| int num_parameters = info->scope()->num_parameters();
|
| int offset = num_parameters * kPointerSize;
|
| - __ add(r2, fp,
|
| - Operand(StandardFrameConstants::kCallerSPOffset + offset));
|
| - __ mov(r1, Operand(Smi::FromInt(num_parameters)));
|
| - __ Push(r3, r2, r1);
|
| + __ addi(r5, fp,
|
| + Operand(StandardFrameConstants::kCallerSPOffset + offset));
|
| + __ LoadSmiLiteral(r4, Smi::FromInt(num_parameters));
|
| + __ Push(r6, r5, r4);
|
|
|
| // Arguments to ArgumentsAccessStub:
|
| // function, receiver address, parameter count.
|
| @@ -272,7 +273,7 @@ void FullCodeGenerator::Generate() {
|
| ArgumentsAccessStub stub(isolate(), type);
|
| __ CallStub(&stub);
|
|
|
| - SetVar(arguments, r0, r1, r2);
|
| + SetVar(arguments, r3, r4, r5);
|
| }
|
|
|
| if (FLAG_trace) {
|
| @@ -304,12 +305,9 @@ void FullCodeGenerator::Generate() {
|
| PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
|
| Label ok;
|
| __ LoadRoot(ip, Heap::kStackLimitRootIndex);
|
| - __ cmp(sp, Operand(ip));
|
| - __ b(hs, &ok);
|
| - Handle<Code> stack_check = isolate()->builtins()->StackCheck();
|
| - PredictableCodeSizeScope predictable(masm_,
|
| - masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
|
| - __ Call(stack_check, RelocInfo::CODE_TARGET);
|
| + __ cmpl(sp, ip);
|
| + __ bc_short(ge, &ok);
|
| + __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
|
| __ bind(&ok);
|
| }
|
|
|
| @@ -323,60 +321,40 @@ void FullCodeGenerator::Generate() {
|
| // Always emit a 'return undefined' in case control fell off the end of
|
| // the body.
|
| { Comment cmnt(masm_, "[ return <undefined>;");
|
| - __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
|
| + __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
|
| }
|
| EmitReturnSequence();
|
| -
|
| - // Force emit the constant pool, so it doesn't get emitted in the middle
|
| - // of the back edge table.
|
| - masm()->CheckConstPool(true, false);
|
| }
|
|
|
|
|
| void FullCodeGenerator::ClearAccumulator() {
|
| - __ mov(r0, Operand(Smi::FromInt(0)));
|
| + __ LoadSmiLiteral(r3, Smi::FromInt(0));
|
| }
|
|
|
|
|
| void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
|
| - __ mov(r2, Operand(profiling_counter_));
|
| - __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
|
| - __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
|
| - __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
|
| + __ mov(r5, Operand(profiling_counter_));
|
| + __ LoadP(r6, FieldMemOperand(r5, Cell::kValueOffset));
|
| + __ SubSmiLiteral(r6, r6, Smi::FromInt(delta), r0);
|
| + __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
|
| }
|
|
|
|
|
| -static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
|
| -
|
| -
|
| void FullCodeGenerator::EmitProfilingCounterReset() {
|
| - Assembler::BlockConstPoolScope block_const_pool(masm_);
|
| - PredictableCodeSizeScope predictable_code_size_scope(
|
| - masm_, kProfileCounterResetSequenceLength);
|
| - Label start;
|
| - __ bind(&start);
|
| int reset_value = FLAG_interrupt_budget;
|
| if (info_->is_debug()) {
|
| // Detect debug break requests as soon as possible.
|
| reset_value = FLAG_interrupt_budget >> 4;
|
| }
|
| - __ mov(r2, Operand(profiling_counter_));
|
| - // The mov instruction above can be either 1, 2 or 3 instructions depending
|
| - // upon whether it is an extended constant pool - insert nop to compensate.
|
| - DCHECK(masm_->InstructionsGeneratedSince(&start) <= 3);
|
| - while (masm_->InstructionsGeneratedSince(&start) != 3) {
|
| - __ nop();
|
| - }
|
| - __ mov(r3, Operand(Smi::FromInt(reset_value)));
|
| - __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
|
| + __ mov(r5, Operand(profiling_counter_));
|
| + __ LoadSmiLiteral(r6, Smi::FromInt(reset_value));
|
| + __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
|
| }
|
|
|
|
|
| void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
|
| Label* back_edge_target) {
|
| Comment cmnt(masm_, "[ Back edge bookkeeping");
|
| - // Block literal pools whilst emitting back edge code.
|
| - Assembler::BlockConstPoolScope block_const_pool(masm_);
|
| Label ok;
|
|
|
| DCHECK(back_edge_target->is_bound());
|
| @@ -384,14 +362,17 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
|
| int weight = Min(kMaxBackEdgeWeight,
|
| Max(1, distance / kCodeSizeMultiplier));
|
| EmitProfilingCounterDecrement(weight);
|
| - __ b(pl, &ok);
|
| - __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
|
| -
|
| - // Record a mapping of this PC offset to the OSR id. This is used to find
|
| - // the AST id from the unoptimized code in order to use it as a key into
|
| - // the deoptimization input data found in the optimized code.
|
| - RecordBackEdge(stmt->OsrEntryId());
|
| -
|
| + { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
|
| + // BackEdgeTable::PatchAt manipulates this sequence.
|
| + __ cmpi(r6, Operand::Zero());
|
| + __ bc_short(ge, &ok);
|
| + __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
|
| +
|
| + // Record a mapping of this PC offset to the OSR id. This is used to find
|
| + // the AST id from the unoptimized code in order to use it as a key into
|
| + // the deoptimization input data found in the optimized code.
|
| + RecordBackEdge(stmt->OsrEntryId());
|
| + }
|
| EmitProfilingCounterReset();
|
|
|
| __ bind(&ok);
|
| @@ -411,8 +392,8 @@ void FullCodeGenerator::EmitReturnSequence() {
|
| __ bind(&return_label_);
|
| if (FLAG_trace) {
|
| // Push the return value on the stack as the parameter.
|
| - // Runtime::TraceExit returns its parameter in r0.
|
| - __ push(r0);
|
| + // Runtime::TraceExit returns its parameter in r3
|
| + __ push(r3);
|
| __ CallRuntime(Runtime::kTraceExit, 1);
|
| }
|
| // Pretend that the exit is a backwards jump to the entry.
|
| @@ -426,11 +407,12 @@ void FullCodeGenerator::EmitReturnSequence() {
|
| }
|
| EmitProfilingCounterDecrement(weight);
|
| Label ok;
|
| - __ b(pl, &ok);
|
| - __ push(r0);
|
| + __ cmpi(r6, Operand::Zero());
|
| + __ bge(&ok);
|
| + __ push(r3);
|
| __ Call(isolate()->builtins()->InterruptCheck(),
|
| RelocInfo::CODE_TARGET);
|
| - __ pop(r0);
|
| + __ pop(r3);
|
| EmitProfilingCounterReset();
|
| __ bind(&ok);
|
|
|
| @@ -441,16 +423,22 @@ void FullCodeGenerator::EmitReturnSequence() {
|
| #endif
|
| // Make sure that the constant pool is not emitted inside of the return
|
| // sequence.
|
| - { Assembler::BlockConstPoolScope block_const_pool(masm_);
|
| + { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
|
| +#if V8_OOL_CONSTANT_POOL
|
| + ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
|
| +#endif
|
| int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
|
| CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
|
| - // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
|
| - PredictableCodeSizeScope predictable(masm_, -1);
|
| __ RecordJSReturn();
|
| int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT);
|
| - __ add(sp, sp, Operand(sp_delta));
|
| - __ Jump(lr);
|
| + __ Add(sp, sp, sp_delta, r0);
|
| + __ blr();
|
| info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
|
| +#if V8_TARGET_ARCH_PPC64 && !V8_OOL_CONSTANT_POOL
|
| + // With 64bit we need a nop() instructions to ensure we have
|
| + // enough space to SetDebugBreakAtReturn()
|
| + masm_->nop();
|
| +#endif
|
| }
|
|
|
| #ifdef DEBUG
|
| @@ -592,7 +580,7 @@ void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
|
| Register reg) const {
|
| DCHECK(count > 0);
|
| if (count > 1) __ Drop(count - 1);
|
| - __ str(reg, MemOperand(sp, 0));
|
| + __ StoreP(reg, MemOperand(sp, 0));
|
| }
|
|
|
|
|
| @@ -620,7 +608,7 @@ void FullCodeGenerator::AccumulatorValueContext::Plug(
|
| Label done;
|
| __ bind(materialize_true);
|
| __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
|
| - __ jmp(&done);
|
| + __ b(&done);
|
| __ bind(materialize_false);
|
| __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
|
| __ bind(&done);
|
| @@ -633,7 +621,7 @@ void FullCodeGenerator::StackValueContext::Plug(
|
| Label done;
|
| __ bind(materialize_true);
|
| __ LoadRoot(ip, Heap::kTrueValueRootIndex);
|
| - __ jmp(&done);
|
| + __ b(&done);
|
| __ bind(materialize_false);
|
| __ LoadRoot(ip, Heap::kFalseValueRootIndex);
|
| __ bind(&done);
|
| @@ -686,7 +674,7 @@ void FullCodeGenerator::DoTest(Expression* condition,
|
| Label* fall_through) {
|
| Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
|
| CallIC(ic, condition->test_id());
|
| - __ tst(result_register(), result_register());
|
| + __ cmpi(result_register(), Operand::Zero());
|
| Split(ne, if_true, if_false, fall_through);
|
| }
|
|
|
| @@ -694,13 +682,14 @@ void FullCodeGenerator::DoTest(Expression* condition,
|
| void FullCodeGenerator::Split(Condition cond,
|
| Label* if_true,
|
| Label* if_false,
|
| - Label* fall_through) {
|
| + Label* fall_through,
|
| + CRegister cr) {
|
| if (if_false == fall_through) {
|
| - __ b(cond, if_true);
|
| + __ b(cond, if_true, cr);
|
| } else if (if_true == fall_through) {
|
| - __ b(NegateCondition(cond), if_false);
|
| + __ b(NegateCondition(cond), if_false, cr);
|
| } else {
|
| - __ b(cond, if_true);
|
| + __ b(cond, if_true, cr);
|
| __ b(if_false);
|
| }
|
| }
|
| @@ -735,7 +724,7 @@ MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
|
| void FullCodeGenerator::GetVar(Register dest, Variable* var) {
|
| // Use destination as scratch.
|
| MemOperand location = VarOperand(var, dest);
|
| - __ ldr(dest, location);
|
| + __ LoadP(dest, location, r0);
|
| }
|
|
|
|
|
| @@ -748,7 +737,7 @@ void FullCodeGenerator::SetVar(Variable* var,
|
| DCHECK(!scratch0.is(scratch1));
|
| DCHECK(!scratch1.is(src));
|
| MemOperand location = VarOperand(var, scratch0);
|
| - __ str(src, location);
|
| + __ StoreP(src, location, r0);
|
|
|
| // Emit the write barrier code if the location is in the heap.
|
| if (var->IsContextSlot()) {
|
| @@ -776,7 +765,7 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
|
| PrepareForBailout(expr, TOS_REG);
|
| if (should_normalize) {
|
| __ LoadRoot(ip, Heap::kTrueValueRootIndex);
|
| - __ cmp(r0, ip);
|
| + __ cmp(r3, ip);
|
| Split(eq, if_true, if_false, NULL);
|
| __ bind(&skip);
|
| }
|
| @@ -789,10 +778,10 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
|
| DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
|
| if (generate_debug_code_) {
|
| // Check that we're not inside a with or catch context.
|
| - __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
|
| - __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
|
| + __ LoadP(r4, FieldMemOperand(cp, HeapObject::kMapOffset));
|
| + __ CompareRoot(r4, Heap::kWithContextMapRootIndex);
|
| __ Check(ne, kDeclarationInWithContext);
|
| - __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
|
| + __ CompareRoot(r4, Heap::kCatchContextMapRootIndex);
|
| __ Check(ne, kDeclarationInCatchContext);
|
| }
|
| }
|
| @@ -821,7 +810,7 @@ void FullCodeGenerator::VisitVariableDeclaration(
|
| if (hole_init) {
|
| Comment cmnt(masm_, "[ VariableDeclaration");
|
| __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
|
| - __ str(ip, StackOperand(variable));
|
| + __ StoreP(ip, StackOperand(variable));
|
| }
|
| break;
|
|
|
| @@ -830,7 +819,7 @@ void FullCodeGenerator::VisitVariableDeclaration(
|
| Comment cmnt(masm_, "[ VariableDeclaration");
|
| EmitDebugCheckDeclarationContext(variable);
|
| __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
|
| - __ str(ip, ContextOperand(cp, variable->index()));
|
| + __ StoreP(ip, ContextOperand(cp, variable->index()), r0);
|
| // No write barrier since the_hole_value is in old space.
|
| PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
|
| }
|
| @@ -838,22 +827,22 @@ void FullCodeGenerator::VisitVariableDeclaration(
|
|
|
| case Variable::LOOKUP: {
|
| Comment cmnt(masm_, "[ VariableDeclaration");
|
| - __ mov(r2, Operand(variable->name()));
|
| + __ mov(r5, Operand(variable->name()));
|
| // Declaration nodes are always introduced in one of four modes.
|
| DCHECK(IsDeclaredVariableMode(mode));
|
| PropertyAttributes attr =
|
| IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
|
| - __ mov(r1, Operand(Smi::FromInt(attr)));
|
| + __ LoadSmiLiteral(r4, Smi::FromInt(attr));
|
| // Push initial value, if any.
|
| // Note: For variables we must not push an initial value (such as
|
| // 'undefined') because we may have a (legal) redeclaration and we
|
| // must not destroy the current value.
|
| if (hole_init) {
|
| - __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
|
| - __ Push(cp, r2, r1, r0);
|
| + __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
|
| + __ Push(cp, r5, r4, r3);
|
| } else {
|
| - __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
|
| - __ Push(cp, r2, r1, r0);
|
| + __ LoadSmiLiteral(r3, Smi::FromInt(0)); // Indicates no initial value.
|
| + __ Push(cp, r5, r4, r3);
|
| }
|
| __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
|
| break;
|
| @@ -881,7 +870,7 @@ void FullCodeGenerator::VisitFunctionDeclaration(
|
| case Variable::LOCAL: {
|
| Comment cmnt(masm_, "[ FunctionDeclaration");
|
| VisitForAccumulatorValue(declaration->fun());
|
| - __ str(result_register(), StackOperand(variable));
|
| + __ StoreP(result_register(), StackOperand(variable));
|
| break;
|
| }
|
|
|
| @@ -889,13 +878,14 @@ void FullCodeGenerator::VisitFunctionDeclaration(
|
| Comment cmnt(masm_, "[ FunctionDeclaration");
|
| EmitDebugCheckDeclarationContext(variable);
|
| VisitForAccumulatorValue(declaration->fun());
|
| - __ str(result_register(), ContextOperand(cp, variable->index()));
|
| + __ StoreP(result_register(),
|
| + ContextOperand(cp, variable->index()), r0);
|
| int offset = Context::SlotOffset(variable->index());
|
| // We know that we have written a function, which is not a smi.
|
| __ RecordWriteContextSlot(cp,
|
| offset,
|
| result_register(),
|
| - r2,
|
| + r5,
|
| kLRHasBeenSaved,
|
| kDontSaveFPRegs,
|
| EMIT_REMEMBERED_SET,
|
| @@ -906,9 +896,9 @@ void FullCodeGenerator::VisitFunctionDeclaration(
|
|
|
| case Variable::LOOKUP: {
|
| Comment cmnt(masm_, "[ FunctionDeclaration");
|
| - __ mov(r2, Operand(variable->name()));
|
| - __ mov(r1, Operand(Smi::FromInt(NONE)));
|
| - __ Push(cp, r2, r1);
|
| + __ mov(r5, Operand(variable->name()));
|
| + __ LoadSmiLiteral(r4, Smi::FromInt(NONE));
|
| + __ Push(cp, r5, r4);
|
| // Push initial value for function declaration.
|
| VisitForStackValue(declaration->fun());
|
| __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
|
| @@ -927,17 +917,17 @@ void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
|
| EmitDebugCheckDeclarationContext(variable);
|
|
|
| // Load instance object.
|
| - __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope()));
|
| - __ ldr(r1, ContextOperand(r1, variable->interface()->Index()));
|
| - __ ldr(r1, ContextOperand(r1, Context::EXTENSION_INDEX));
|
| + __ LoadContext(r4, scope_->ContextChainLength(scope_->GlobalScope()));
|
| + __ LoadP(r4, ContextOperand(r4, variable->interface()->Index()));
|
| + __ LoadP(r4, ContextOperand(r4, Context::EXTENSION_INDEX));
|
|
|
| // Assign it.
|
| - __ str(r1, ContextOperand(cp, variable->index()));
|
| + __ StoreP(r4, ContextOperand(cp, variable->index()), r0);
|
| // We know that we have written a module, which is not a smi.
|
| __ RecordWriteContextSlot(cp,
|
| Context::SlotOffset(variable->index()),
|
| - r1,
|
| - r3,
|
| + r4,
|
| + r6,
|
| kLRHasBeenSaved,
|
| kDontSaveFPRegs,
|
| EMIT_REMEMBERED_SET,
|
| @@ -980,9 +970,9 @@ void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
|
| void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
|
| // Call the runtime to declare the globals.
|
| // The context is the first argument.
|
| - __ mov(r1, Operand(pairs));
|
| - __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
|
| - __ Push(cp, r1, r0);
|
| + __ mov(r4, Operand(pairs));
|
| + __ LoadSmiLiteral(r3, Smi::FromInt(DeclareGlobalsFlags()));
|
| + __ Push(cp, r4, r3);
|
| __ CallRuntime(Runtime::kDeclareGlobals, 3);
|
| // Return value is ignored.
|
| }
|
| @@ -1028,16 +1018,16 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
|
| VisitForAccumulatorValue(clause->label());
|
|
|
| // Perform the comparison as if via '==='.
|
| - __ ldr(r1, MemOperand(sp, 0)); // Switch value.
|
| + __ LoadP(r4, MemOperand(sp, 0)); // Switch value.
|
| bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
|
| JumpPatchSite patch_site(masm_);
|
| if (inline_smi_code) {
|
| Label slow_case;
|
| - __ orr(r2, r1, r0);
|
| - patch_site.EmitJumpIfNotSmi(r2, &slow_case);
|
| + __ orx(r5, r4, r3);
|
| + patch_site.EmitJumpIfNotSmi(r5, &slow_case);
|
|
|
| - __ cmp(r1, r0);
|
| - __ b(ne, &next_test);
|
| + __ cmp(r4, r3);
|
| + __ bne(&next_test);
|
| __ Drop(1); // Switch value is no longer needed.
|
| __ b(clause->body_target());
|
| __ bind(&slow_case);
|
| @@ -1053,14 +1043,14 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
|
| __ b(&skip);
|
| PrepareForBailout(clause, TOS_REG);
|
| __ LoadRoot(ip, Heap::kTrueValueRootIndex);
|
| - __ cmp(r0, ip);
|
| - __ b(ne, &next_test);
|
| + __ cmp(r3, ip);
|
| + __ bne(&next_test);
|
| __ Drop(1);
|
| - __ jmp(clause->body_target());
|
| + __ b(clause->body_target());
|
| __ bind(&skip);
|
|
|
| - __ cmp(r0, Operand::Zero());
|
| - __ b(ne, &next_test);
|
| + __ cmpi(r3, Operand::Zero());
|
| + __ bne(&next_test);
|
| __ Drop(1); // Switch value is no longer needed.
|
| __ b(clause->body_target());
|
| }
|
| @@ -1102,31 +1092,31 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
| // over the loop. See ECMA-262 version 5, section 12.6.4.
|
| VisitForAccumulatorValue(stmt->enumerable());
|
| __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
|
| - __ cmp(r0, ip);
|
| - __ b(eq, &exit);
|
| - Register null_value = r5;
|
| + __ cmp(r3, ip);
|
| + __ beq(&exit);
|
| + Register null_value = r7;
|
| __ LoadRoot(null_value, Heap::kNullValueRootIndex);
|
| - __ cmp(r0, null_value);
|
| - __ b(eq, &exit);
|
| + __ cmp(r3, null_value);
|
| + __ beq(&exit);
|
|
|
| PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
|
|
|
| // Convert the object to a JS object.
|
| Label convert, done_convert;
|
| - __ JumpIfSmi(r0, &convert);
|
| - __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
|
| - __ b(ge, &done_convert);
|
| + __ JumpIfSmi(r3, &convert);
|
| + __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
|
| + __ bge(&done_convert);
|
| __ bind(&convert);
|
| - __ push(r0);
|
| + __ push(r3);
|
| __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
|
| __ bind(&done_convert);
|
| - __ push(r0);
|
| + __ push(r3);
|
|
|
| // Check for proxies.
|
| Label call_runtime;
|
| STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
|
| - __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
|
| - __ b(le, &call_runtime);
|
| + __ CompareObjectType(r3, r4, r4, LAST_JS_PROXY_TYPE);
|
| + __ ble(&call_runtime);
|
|
|
| // Check cache validity in generated code. This is a fast case for
|
| // the JSObject::IsSimpleEnum cache validity checks. If we cannot
|
| @@ -1137,109 +1127,112 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
| // The enum cache is valid. Load the map of the object being
|
| // iterated over and use the cache for the iteration.
|
| Label use_cache;
|
| - __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
|
| + __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
|
| __ b(&use_cache);
|
|
|
| // Get the set of properties to enumerate.
|
| __ bind(&call_runtime);
|
| - __ push(r0); // Duplicate the enumerable object on the stack.
|
| + __ push(r3); // Duplicate the enumerable object on the stack.
|
| __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
|
|
|
| // If we got a map from the runtime call, we can do a fast
|
| // modification check. Otherwise, we got a fixed array, and we have
|
| // to do a slow check.
|
| Label fixed_array;
|
| - __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
|
| + __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
|
| __ LoadRoot(ip, Heap::kMetaMapRootIndex);
|
| - __ cmp(r2, ip);
|
| - __ b(ne, &fixed_array);
|
| + __ cmp(r5, ip);
|
| + __ bne(&fixed_array);
|
|
|
| - // We got a map in register r0. Get the enumeration cache from it.
|
| + // We got a map in register r3. Get the enumeration cache from it.
|
| Label no_descriptors;
|
| __ bind(&use_cache);
|
|
|
| - __ EnumLength(r1, r0);
|
| - __ cmp(r1, Operand(Smi::FromInt(0)));
|
| - __ b(eq, &no_descriptors);
|
| + __ EnumLength(r4, r3);
|
| + __ CmpSmiLiteral(r4, Smi::FromInt(0), r0);
|
| + __ beq(&no_descriptors);
|
|
|
| - __ LoadInstanceDescriptors(r0, r2);
|
| - __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
|
| - __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
|
| + __ LoadInstanceDescriptors(r3, r5);
|
| + __ LoadP(r5, FieldMemOperand(r5, DescriptorArray::kEnumCacheOffset));
|
| + __ LoadP(r5,
|
| + FieldMemOperand(r5, DescriptorArray::kEnumCacheBridgeCacheOffset));
|
|
|
| // Set up the four remaining stack slots.
|
| - __ push(r0); // Map.
|
| - __ mov(r0, Operand(Smi::FromInt(0)));
|
| + __ push(r3); // Map.
|
| + __ LoadSmiLiteral(r3, Smi::FromInt(0));
|
| // Push enumeration cache, enumeration cache length (as smi) and zero.
|
| - __ Push(r2, r1, r0);
|
| - __ jmp(&loop);
|
| + __ Push(r5, r4, r3);
|
| + __ b(&loop);
|
|
|
| __ bind(&no_descriptors);
|
| __ Drop(1);
|
| - __ jmp(&exit);
|
| + __ b(&exit);
|
|
|
| - // We got a fixed array in register r0. Iterate through that.
|
| + // We got a fixed array in register r3. Iterate through that.
|
| Label non_proxy;
|
| __ bind(&fixed_array);
|
|
|
| - __ Move(r1, FeedbackVector());
|
| - __ mov(r2, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
|
| - __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(slot)));
|
| + __ Move(r4, FeedbackVector());
|
| + __ mov(r5, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
|
| + __ StoreP(r5, FieldMemOperand(r4, FixedArray::OffsetOfElementAt(slot)), r0);
|
|
|
| - __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
|
| - __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
|
| + __ LoadSmiLiteral(r4, Smi::FromInt(1)); // Smi indicates slow check
|
| + __ LoadP(r5, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
|
| STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
|
| - __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
|
| - __ b(gt, &non_proxy);
|
| - __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
|
| + __ CompareObjectType(r5, r6, r6, LAST_JS_PROXY_TYPE);
|
| + __ bgt(&non_proxy);
|
| + __ LoadSmiLiteral(r4, Smi::FromInt(0)); // Zero indicates proxy
|
| __ bind(&non_proxy);
|
| - __ Push(r1, r0); // Smi and array
|
| - __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
|
| - __ mov(r0, Operand(Smi::FromInt(0)));
|
| - __ Push(r1, r0); // Fixed array length (as smi) and initial index.
|
| + __ Push(r4, r3); // Smi and array
|
| + __ LoadP(r4, FieldMemOperand(r3, FixedArray::kLengthOffset));
|
| + __ LoadSmiLiteral(r3, Smi::FromInt(0));
|
| + __ Push(r4, r3); // Fixed array length (as smi) and initial index.
|
|
|
| // Generate code for doing the condition check.
|
| PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
|
| __ bind(&loop);
|
| - // Load the current count to r0, load the length to r1.
|
| - __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
|
| - __ cmp(r0, r1); // Compare to the array length.
|
| - __ b(hs, loop_statement.break_label());
|
| -
|
| - // Get the current entry of the array into register r3.
|
| - __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
|
| - __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
|
| - __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0));
|
| + // Load the current count to r3, load the length to r4.
|
| + __ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
|
| + __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
|
| + __ cmpl(r3, r4); // Compare to the array length.
|
| + __ bge(loop_statement.break_label());
|
| +
|
| + // Get the current entry of the array into register r6.
|
| + __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
|
| + __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
|
| + __ SmiToPtrArrayOffset(r6, r3);
|
| + __ LoadPX(r6, MemOperand(r6, r5));
|
|
|
| // Get the expected map from the stack or a smi in the
|
| - // permanent slow case into register r2.
|
| - __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
|
| + // permanent slow case into register r5.
|
| + __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
|
|
|
| // Check if the expected map still matches that of the enumerable.
|
| // If not, we may have to filter the key.
|
| Label update_each;
|
| - __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
|
| - __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
|
| - __ cmp(r4, Operand(r2));
|
| - __ b(eq, &update_each);
|
| + __ LoadP(r4, MemOperand(sp, 4 * kPointerSize));
|
| + __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
|
| + __ cmp(r7, r5);
|
| + __ beq(&update_each);
|
|
|
| // For proxies, no filtering is done.
|
| // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
|
| - __ cmp(r2, Operand(Smi::FromInt(0)));
|
| - __ b(eq, &update_each);
|
| + __ CmpSmiLiteral(r5, Smi::FromInt(0), r0);
|
| + __ beq(&update_each);
|
|
|
| // Convert the entry to a string or (smi) 0 if it isn't a property
|
| // any more. If the property has been removed while iterating, we
|
| // just skip it.
|
| - __ push(r1); // Enumerable.
|
| - __ push(r3); // Current entry.
|
| + __ Push(r4, r6); // Enumerable and current entry.
|
| __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
|
| - __ mov(r3, Operand(r0), SetCC);
|
| - __ b(eq, loop_statement.continue_label());
|
| + __ mr(r6, r3);
|
| + __ cmpi(r6, Operand::Zero());
|
| + __ beq(loop_statement.continue_label());
|
|
|
| // Update the 'each' property or variable from the possibly filtered
|
| - // entry in register r3.
|
| + // entry in register r6.
|
| __ bind(&update_each);
|
| - __ mov(result_register(), r3);
|
| + __ mr(result_register(), r6);
|
| // Perform the assignment as if via '='.
|
| { EffectContext context(this);
|
| EmitAssignment(stmt->each());
|
| @@ -1251,9 +1244,9 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
| // Generate code for the going to the next element by incrementing
|
| // the index (smi) stored on top of the stack.
|
| __ bind(loop_statement.continue_label());
|
| - __ pop(r0);
|
| - __ add(r0, r0, Operand(Smi::FromInt(1)));
|
| - __ push(r0);
|
| + __ pop(r3);
|
| + __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
|
| + __ push(r3);
|
|
|
| EmitBackEdgeBookkeeping(stmt, &loop);
|
| __ b(&loop);
|
| @@ -1302,7 +1295,7 @@ void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
|
| // Check stack before looping.
|
| PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
|
| EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
|
| - __ jmp(loop_statement.continue_label());
|
| + __ b(loop_statement.continue_label());
|
|
|
| // Exit and decrement the loop depth.
|
| PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
|
| @@ -1327,16 +1320,16 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
|
| FastNewClosureStub stub(isolate(),
|
| info->strict_mode(),
|
| info->is_generator());
|
| - __ mov(r2, Operand(info));
|
| + __ mov(r5, Operand(info));
|
| __ CallStub(&stub);
|
| } else {
|
| - __ mov(r0, Operand(info));
|
| - __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
|
| + __ mov(r3, Operand(info));
|
| + __ LoadRoot(r4, pretenure ? Heap::kTrueValueRootIndex
|
| : Heap::kFalseValueRootIndex);
|
| - __ Push(cp, r0, r1);
|
| + __ Push(cp, r3, r4);
|
| __ CallRuntime(Runtime::kNewClosure, 3);
|
| }
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -1350,20 +1343,20 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
|
| TypeofState typeof_state,
|
| Label* slow) {
|
| Register current = cp;
|
| - Register next = r1;
|
| - Register temp = r2;
|
| + Register next = r4;
|
| + Register temp = r5;
|
|
|
| Scope* s = scope();
|
| while (s != NULL) {
|
| if (s->num_heap_slots() > 0) {
|
| if (s->calls_sloppy_eval()) {
|
| // Check that extension is NULL.
|
| - __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
|
| - __ tst(temp, temp);
|
| - __ b(ne, slow);
|
| + __ LoadP(temp, ContextOperand(current, Context::EXTENSION_INDEX));
|
| + __ cmpi(temp, Operand::Zero());
|
| + __ bne(slow);
|
| }
|
| // Load next context in chain.
|
| - __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
|
| + __ LoadP(next, ContextOperand(current, Context::PREVIOUS_INDEX));
|
| // Walk the rest of the chain without clobbering cp.
|
| current = next;
|
| }
|
| @@ -1380,27 +1373,26 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
|
| }
|
| __ bind(&loop);
|
| // Terminate at native context.
|
| - __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
|
| + __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset));
|
| __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
|
| __ cmp(temp, ip);
|
| - __ b(eq, &fast);
|
| + __ beq(&fast);
|
| // Check that extension is NULL.
|
| - __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
|
| - __ tst(temp, temp);
|
| - __ b(ne, slow);
|
| + __ LoadP(temp, ContextOperand(next, Context::EXTENSION_INDEX));
|
| + __ cmpi(temp, Operand::Zero());
|
| + __ bne(slow);
|
| // Load next context in chain.
|
| - __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
|
| + __ LoadP(next, ContextOperand(next, Context::PREVIOUS_INDEX));
|
| __ b(&loop);
|
| __ bind(&fast);
|
| }
|
|
|
| - __ ldr(LoadIC::ReceiverRegister(), GlobalObjectOperand());
|
| + __ LoadP(LoadIC::ReceiverRegister(), GlobalObjectOperand());
|
| __ mov(LoadIC::NameRegister(), Operand(proxy->var()->name()));
|
| if (FLAG_vector_ics) {
|
| __ mov(LoadIC::SlotRegister(),
|
| Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
|
| }
|
| -
|
| ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
|
| ? NOT_CONTEXTUAL
|
| : CONTEXTUAL;
|
| @@ -1412,26 +1404,26 @@ MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
|
| Label* slow) {
|
| DCHECK(var->IsContextSlot());
|
| Register context = cp;
|
| - Register next = r3;
|
| - Register temp = r4;
|
| + Register next = r6;
|
| + Register temp = r7;
|
|
|
| for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
|
| if (s->num_heap_slots() > 0) {
|
| if (s->calls_sloppy_eval()) {
|
| // Check that extension is NULL.
|
| - __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
|
| - __ tst(temp, temp);
|
| - __ b(ne, slow);
|
| + __ LoadP(temp, ContextOperand(context, Context::EXTENSION_INDEX));
|
| + __ cmpi(temp, Operand::Zero());
|
| + __ bne(slow);
|
| }
|
| - __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
|
| + __ LoadP(next, ContextOperand(context, Context::PREVIOUS_INDEX));
|
| // Walk the rest of the chain without clobbering cp.
|
| context = next;
|
| }
|
| }
|
| // Check that last extension is NULL.
|
| - __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
|
| - __ tst(temp, temp);
|
| - __ b(ne, slow);
|
| + __ LoadP(temp, ContextOperand(context, Context::EXTENSION_INDEX));
|
| + __ cmpi(temp, Operand::Zero());
|
| + __ bne(slow);
|
|
|
| // This function is used only for loads, not stores, so it's safe to
|
| // return an cp-based operand (the write barrier cannot be allowed to
|
| @@ -1452,23 +1444,23 @@ void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
|
| Variable* var = proxy->var();
|
| if (var->mode() == DYNAMIC_GLOBAL) {
|
| EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
|
| - __ jmp(done);
|
| + __ b(done);
|
| } else if (var->mode() == DYNAMIC_LOCAL) {
|
| Variable* local = var->local_if_not_shadowed();
|
| - __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
|
| + __ LoadP(r3, ContextSlotOperandCheckExtensions(local, slow));
|
| if (local->mode() == LET || local->mode() == CONST ||
|
| local->mode() == CONST_LEGACY) {
|
| - __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
|
| + __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
|
| + __ bne(done);
|
| if (local->mode() == CONST_LEGACY) {
|
| - __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
|
| + __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
|
| } else { // LET || CONST
|
| - __ b(ne, done);
|
| - __ mov(r0, Operand(var->name()));
|
| - __ push(r0);
|
| + __ mov(r3, Operand(var->name()));
|
| + __ push(r3);
|
| __ CallRuntime(Runtime::kThrowReferenceError, 1);
|
| }
|
| }
|
| - __ jmp(done);
|
| + __ b(done);
|
| }
|
| }
|
|
|
| @@ -1483,14 +1475,16 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
|
| switch (var->location()) {
|
| case Variable::UNALLOCATED: {
|
| Comment cmnt(masm_, "[ Global variable");
|
| - __ ldr(LoadIC::ReceiverRegister(), GlobalObjectOperand());
|
| + // Use inline caching. Variable name is passed in r5 and the global
|
| + // object (receiver) in r3.
|
| + __ LoadP(LoadIC::ReceiverRegister(), GlobalObjectOperand());
|
| __ mov(LoadIC::NameRegister(), Operand(var->name()));
|
| if (FLAG_vector_ics) {
|
| __ mov(LoadIC::SlotRegister(),
|
| Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
|
| }
|
| CallLoadIC(CONTEXTUAL);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| break;
|
| }
|
|
|
| @@ -1535,24 +1529,24 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
|
| }
|
|
|
| if (!skip_init_check) {
|
| + Label done;
|
| // Let and const need a read barrier.
|
| - GetVar(r0, var);
|
| - __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
|
| + GetVar(r3, var);
|
| + __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
|
| + __ bne(&done);
|
| if (var->mode() == LET || var->mode() == CONST) {
|
| // Throw a reference error when using an uninitialized let/const
|
| // binding in harmony mode.
|
| - Label done;
|
| - __ b(ne, &done);
|
| - __ mov(r0, Operand(var->name()));
|
| - __ push(r0);
|
| + __ mov(r3, Operand(var->name()));
|
| + __ push(r3);
|
| __ CallRuntime(Runtime::kThrowReferenceError, 1);
|
| - __ bind(&done);
|
| } else {
|
| // Uninitalized const bindings outside of harmony mode are unholed.
|
| DCHECK(var->mode() == CONST_LEGACY);
|
| - __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
|
| + __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
|
| }
|
| - context()->Plug(r0);
|
| + __ bind(&done);
|
| + context()->Plug(r3);
|
| break;
|
| }
|
| }
|
| @@ -1567,11 +1561,11 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
|
| // by eval-introduced variables.
|
| EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
|
| __ bind(&slow);
|
| - __ mov(r1, Operand(var->name()));
|
| - __ Push(cp, r1); // Context and name.
|
| + __ mov(r4, Operand(var->name()));
|
| + __ Push(cp, r4); // Context and name.
|
| __ CallRuntime(Runtime::kLoadLookupSlot, 2);
|
| __ bind(&done);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
| }
|
| }
|
| @@ -1581,56 +1575,56 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
|
| Comment cmnt(masm_, "[ RegExpLiteral");
|
| Label materialized;
|
| // Registers will be used as follows:
|
| - // r5 = materialized value (RegExp literal)
|
| - // r4 = JS function, literals array
|
| - // r3 = literal index
|
| - // r2 = RegExp pattern
|
| - // r1 = RegExp flags
|
| - // r0 = RegExp literal clone
|
| - __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
|
| - __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
|
| + // r8 = materialized value (RegExp literal)
|
| + // r7 = JS function, literals array
|
| + // r6 = literal index
|
| + // r5 = RegExp pattern
|
| + // r4 = RegExp flags
|
| + // r3 = RegExp literal clone
|
| + __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
|
| + __ LoadP(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
|
| int literal_offset =
|
| FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
|
| - __ ldr(r5, FieldMemOperand(r4, literal_offset));
|
| + __ LoadP(r8, FieldMemOperand(r7, literal_offset), r0);
|
| __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
|
| - __ cmp(r5, ip);
|
| - __ b(ne, &materialized);
|
| + __ cmp(r8, ip);
|
| + __ bne(&materialized);
|
|
|
| // Create regexp literal using runtime function.
|
| - // Result will be in r0.
|
| - __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
|
| - __ mov(r2, Operand(expr->pattern()));
|
| - __ mov(r1, Operand(expr->flags()));
|
| - __ Push(r4, r3, r2, r1);
|
| + // Result will be in r3.
|
| + __ LoadSmiLiteral(r6, Smi::FromInt(expr->literal_index()));
|
| + __ mov(r5, Operand(expr->pattern()));
|
| + __ mov(r4, Operand(expr->flags()));
|
| + __ Push(r7, r6, r5, r4);
|
| __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
|
| - __ mov(r5, r0);
|
| + __ mr(r8, r3);
|
|
|
| __ bind(&materialized);
|
| int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
|
| Label allocated, runtime_allocate;
|
| - __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
|
| - __ jmp(&allocated);
|
| + __ Allocate(size, r3, r5, r6, &runtime_allocate, TAG_OBJECT);
|
| + __ b(&allocated);
|
|
|
| __ bind(&runtime_allocate);
|
| - __ mov(r0, Operand(Smi::FromInt(size)));
|
| - __ Push(r5, r0);
|
| + __ LoadSmiLiteral(r3, Smi::FromInt(size));
|
| + __ Push(r8, r3);
|
| __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
|
| - __ pop(r5);
|
| + __ pop(r8);
|
|
|
| __ bind(&allocated);
|
| // After this, registers are used as follows:
|
| - // r0: Newly allocated regexp.
|
| - // r5: Materialized regexp.
|
| - // r2: temp.
|
| - __ CopyFields(r0, r5, d0, size / kPointerSize);
|
| - context()->Plug(r0);
|
| + // r3: Newly allocated regexp.
|
| + // r8: Materialized regexp.
|
| + // r5: temp.
|
| + __ CopyFields(r3, r8, r5.bit(), size / kPointerSize);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| void FullCodeGenerator::EmitAccessor(Expression* expression) {
|
| if (expression == NULL) {
|
| - __ LoadRoot(r1, Heap::kNullValueRootIndex);
|
| - __ push(r1);
|
| + __ LoadRoot(r4, Heap::kNullValueRootIndex);
|
| + __ push(r4);
|
| } else {
|
| VisitForStackValue(expression);
|
| }
|
| @@ -1642,22 +1636,22 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
|
|
|
| expr->BuildConstantProperties(isolate());
|
| Handle<FixedArray> constant_properties = expr->constant_properties();
|
| - __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
|
| - __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
|
| - __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
|
| - __ mov(r1, Operand(constant_properties));
|
| + __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
|
| + __ LoadP(r6, FieldMemOperand(r6, JSFunction::kLiteralsOffset));
|
| + __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
|
| + __ mov(r4, Operand(constant_properties));
|
| int flags = expr->fast_elements()
|
| ? ObjectLiteral::kFastElements
|
| : ObjectLiteral::kNoFlags;
|
| flags |= expr->has_function()
|
| ? ObjectLiteral::kHasFunction
|
| : ObjectLiteral::kNoFlags;
|
| - __ mov(r0, Operand(Smi::FromInt(flags)));
|
| + __ LoadSmiLiteral(r3, Smi::FromInt(flags));
|
| int properties_count = constant_properties->length() / 2;
|
| if (expr->may_store_doubles() || expr->depth() > 1 ||
|
| masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
|
| properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
|
| - __ Push(r3, r2, r1, r0);
|
| + __ Push(r6, r5, r4, r3);
|
| __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
|
| } else {
|
| FastCloneShallowObjectStub stub(isolate(), properties_count);
|
| @@ -1665,7 +1659,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
|
| }
|
|
|
| // If result_saved is true the result is on top of the stack. If
|
| - // result_saved is false the result is in r0.
|
| + // result_saved is false the result is in r3.
|
| bool result_saved = false;
|
|
|
| // Mark all computed expressions that are bound to a key that
|
| @@ -1681,7 +1675,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
|
| Literal* key = property->key();
|
| Expression* value = property->value();
|
| if (!result_saved) {
|
| - __ push(r0); // Save result on stack
|
| + __ push(r3); // Save result on stack
|
| result_saved = true;
|
| }
|
| switch (property->kind()) {
|
| @@ -1694,9 +1688,9 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
|
| if (key->value()->IsInternalizedString()) {
|
| if (property->emit_store()) {
|
| VisitForAccumulatorValue(value);
|
| - DCHECK(StoreIC::ValueRegister().is(r0));
|
| + DCHECK(StoreIC::ValueRegister().is(r3));
|
| __ mov(StoreIC::NameRegister(), Operand(key->value()));
|
| - __ ldr(StoreIC::ReceiverRegister(), MemOperand(sp));
|
| + __ LoadP(StoreIC::ReceiverRegister(), MemOperand(sp));
|
| CallStoreIC(key->LiteralFeedbackId());
|
| PrepareForBailoutForId(key->id(), NO_REGISTERS);
|
| } else {
|
| @@ -1705,13 +1699,13 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
|
| break;
|
| }
|
| // Duplicate receiver on stack.
|
| - __ ldr(r0, MemOperand(sp));
|
| - __ push(r0);
|
| + __ LoadP(r3, MemOperand(sp));
|
| + __ push(r3);
|
| VisitForStackValue(key);
|
| VisitForStackValue(value);
|
| if (property->emit_store()) {
|
| - __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes
|
| - __ push(r0);
|
| + __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY)); // PropertyAttributes
|
| + __ push(r3);
|
| __ CallRuntime(Runtime::kSetProperty, 4);
|
| } else {
|
| __ Drop(3);
|
| @@ -1719,8 +1713,8 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
|
| break;
|
| case ObjectLiteral::Property::PROTOTYPE:
|
| // Duplicate receiver on stack.
|
| - __ ldr(r0, MemOperand(sp));
|
| - __ push(r0);
|
| + __ LoadP(r3, MemOperand(sp));
|
| + __ push(r3);
|
| VisitForStackValue(value);
|
| if (property->emit_store()) {
|
| __ CallRuntime(Runtime::kSetPrototype, 2);
|
| @@ -1728,7 +1722,6 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
|
| __ Drop(2);
|
| }
|
| break;
|
| -
|
| case ObjectLiteral::Property::GETTER:
|
| accessor_table.lookup(key)->second->getter = value;
|
| break;
|
| @@ -1743,27 +1736,27 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
|
| for (AccessorTable::Iterator it = accessor_table.begin();
|
| it != accessor_table.end();
|
| ++it) {
|
| - __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
|
| - __ push(r0);
|
| + __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
|
| + __ push(r3);
|
| VisitForStackValue(it->first);
|
| EmitAccessor(it->second->getter);
|
| EmitAccessor(it->second->setter);
|
| - __ mov(r0, Operand(Smi::FromInt(NONE)));
|
| - __ push(r0);
|
| + __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
|
| + __ push(r3);
|
| __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
|
| }
|
|
|
| if (expr->has_function()) {
|
| DCHECK(result_saved);
|
| - __ ldr(r0, MemOperand(sp));
|
| - __ push(r0);
|
| + __ LoadP(r3, MemOperand(sp));
|
| + __ push(r3);
|
| __ CallRuntime(Runtime::kToFastProperties, 1);
|
| }
|
|
|
| if (result_saved) {
|
| context()->PlugTOS();
|
| } else {
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
| }
|
|
|
| @@ -1793,13 +1786,13 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
|
| allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
|
| }
|
|
|
| - __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
|
| - __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
|
| - __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
|
| - __ mov(r1, Operand(constant_elements));
|
| + __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
|
| + __ LoadP(r6, FieldMemOperand(r6, JSFunction::kLiteralsOffset));
|
| + __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
|
| + __ mov(r4, Operand(constant_elements));
|
| if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
|
| - __ mov(r0, Operand(Smi::FromInt(flags)));
|
| - __ Push(r3, r2, r1, r0);
|
| + __ LoadSmiLiteral(r3, Smi::FromInt(flags));
|
| + __ Push(r6, r5, r4, r3);
|
| __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
|
| } else {
|
| FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
|
| @@ -1817,7 +1810,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
|
| if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
|
|
|
| if (!result_saved) {
|
| - __ push(r0);
|
| + __ push(r3);
|
| __ Push(Smi::FromInt(expr->literal_index()));
|
| result_saved = true;
|
| }
|
| @@ -1825,15 +1818,15 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
|
|
|
| if (IsFastObjectElementsKind(constant_elements_kind)) {
|
| int offset = FixedArray::kHeaderSize + (i * kPointerSize);
|
| - __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal.
|
| - __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
|
| - __ str(result_register(), FieldMemOperand(r1, offset));
|
| + __ LoadP(r8, MemOperand(sp, kPointerSize)); // Copy of array literal.
|
| + __ LoadP(r4, FieldMemOperand(r8, JSObject::kElementsOffset));
|
| + __ StoreP(result_register(), FieldMemOperand(r4, offset), r0);
|
| // Update the write barrier for the array store.
|
| - __ RecordWriteField(r1, offset, result_register(), r2,
|
| + __ RecordWriteField(r4, offset, result_register(), r5,
|
| kLRHasBeenSaved, kDontSaveFPRegs,
|
| EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
|
| } else {
|
| - __ mov(r3, Operand(Smi::FromInt(i)));
|
| + __ LoadSmiLiteral(r6, Smi::FromInt(i));
|
| StoreArrayLiteralElementStub stub(isolate());
|
| __ CallStub(&stub);
|
| }
|
| @@ -1845,7 +1838,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
|
| __ pop(); // literal index
|
| context()->PlugTOS();
|
| } else {
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
| }
|
|
|
| @@ -1875,7 +1868,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
|
| if (expr->is_compound()) {
|
| // We need the receiver both on the stack and in the register.
|
| VisitForStackValue(property->obj());
|
| - __ ldr(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
|
| + __ LoadP(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
|
| } else {
|
| VisitForStackValue(property->obj());
|
| }
|
| @@ -1884,8 +1877,8 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
|
| if (expr->is_compound()) {
|
| VisitForStackValue(property->obj());
|
| VisitForStackValue(property->key());
|
| - __ ldr(LoadIC::ReceiverRegister(), MemOperand(sp, 1 * kPointerSize));
|
| - __ ldr(LoadIC::NameRegister(), MemOperand(sp, 0));
|
| + __ LoadP(LoadIC::ReceiverRegister(), MemOperand(sp, 1 * kPointerSize));
|
| + __ LoadP(LoadIC::NameRegister(), MemOperand(sp, 0));
|
| } else {
|
| VisitForStackValue(property->obj());
|
| VisitForStackValue(property->key());
|
| @@ -1914,7 +1907,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
|
| }
|
|
|
| Token::Value op = expr->binary_op();
|
| - __ push(r0); // Left operand goes on the stack.
|
| + __ push(r3); // Left operand goes on the stack.
|
| VisitForAccumulatorValue(expr->value());
|
|
|
| OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
|
| @@ -1947,7 +1940,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
|
| EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
|
| expr->op());
|
| PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| break;
|
| case NAMED_PROPERTY:
|
| EmitNamedPropertyAssignment(expr);
|
| @@ -1974,26 +1967,27 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
|
| case Yield::INITIAL: {
|
| Label suspend, continuation, post_runtime, resume;
|
|
|
| - __ jmp(&suspend);
|
| + __ b(&suspend);
|
|
|
| __ bind(&continuation);
|
| - __ jmp(&resume);
|
| + __ b(&resume);
|
|
|
| __ bind(&suspend);
|
| VisitForAccumulatorValue(expr->generator_object());
|
| DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
|
| - __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
|
| - __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
|
| - __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
|
| - __ mov(r1, cp);
|
| - __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
|
| + __ LoadSmiLiteral(r4, Smi::FromInt(continuation.pos()));
|
| + __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
|
| + r0);
|
| + __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
|
| + __ mr(r4, cp);
|
| + __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
|
| kLRHasBeenSaved, kDontSaveFPRegs);
|
| - __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
|
| - __ cmp(sp, r1);
|
| - __ b(eq, &post_runtime);
|
| - __ push(r0); // generator object
|
| + __ addi(r4, fp, Operand(StandardFrameConstants::kExpressionsOffset));
|
| + __ cmp(sp, r4);
|
| + __ beq(&post_runtime);
|
| + __ push(r3); // generator object
|
| __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
|
| - __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| + __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| __ bind(&post_runtime);
|
| __ pop(result_register());
|
| EmitReturnSequence();
|
| @@ -2005,9 +1999,10 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
|
|
|
| case Yield::FINAL: {
|
| VisitForAccumulatorValue(expr->generator_object());
|
| - __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
|
| - __ str(r1, FieldMemOperand(result_register(),
|
| - JSGeneratorObject::kContinuationOffset));
|
| + __ LoadSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
|
| + __ StoreP(r4, FieldMemOperand(result_register(),
|
| + JSGeneratorObject::kContinuationOffset),
|
| + r0);
|
| // Pop value from top-of-stack slot, box result into result register.
|
| EmitCreateIteratorResult(true);
|
| EmitUnwindBeforeReturn();
|
| @@ -2023,79 +2018,79 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
|
| // [sp + 0 * kPointerSize] g
|
|
|
| Label l_catch, l_try, l_suspend, l_continuation, l_resume;
|
| - Label l_next, l_call, l_loop;
|
| + Label l_next, l_call;
|
| Register load_receiver = LoadIC::ReceiverRegister();
|
| Register load_name = LoadIC::NameRegister();
|
|
|
| // Initial send value is undefined.
|
| - __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
|
| + __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
|
| __ b(&l_next);
|
|
|
| // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
|
| __ bind(&l_catch);
|
| handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
|
| - __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
|
| - __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
|
| - __ Push(load_name, r3, r0); // "throw", iter, except
|
| - __ jmp(&l_call);
|
| + __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
|
| + __ LoadP(r6, MemOperand(sp, 1 * kPointerSize)); // iter
|
| + __ Push(load_name, r6, r3); // "throw", iter, except
|
| + __ b(&l_call);
|
|
|
| // try { received = %yield result }
|
| // Shuffle the received result above a try handler and yield it without
|
| // re-boxing.
|
| __ bind(&l_try);
|
| - __ pop(r0); // result
|
| + __ pop(r3); // result
|
| __ PushTryHandler(StackHandler::CATCH, expr->index());
|
| const int handler_size = StackHandlerConstants::kSize;
|
| - __ push(r0); // result
|
| - __ jmp(&l_suspend);
|
| + __ push(r3); // result
|
| + __ b(&l_suspend);
|
| __ bind(&l_continuation);
|
| - __ jmp(&l_resume);
|
| + __ b(&l_resume);
|
| __ bind(&l_suspend);
|
| const int generator_object_depth = kPointerSize + handler_size;
|
| - __ ldr(r0, MemOperand(sp, generator_object_depth));
|
| - __ push(r0); // g
|
| + __ LoadP(r3, MemOperand(sp, generator_object_depth));
|
| + __ push(r3); // g
|
| DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
|
| - __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
|
| - __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
|
| - __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
|
| - __ mov(r1, cp);
|
| - __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
|
| + __ LoadSmiLiteral(r4, Smi::FromInt(l_continuation.pos()));
|
| + __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
|
| + r0);
|
| + __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
|
| + __ mr(r4, cp);
|
| + __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
|
| kLRHasBeenSaved, kDontSaveFPRegs);
|
| __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
|
| - __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| - __ pop(r0); // result
|
| + __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| + __ pop(r3); // result
|
| EmitReturnSequence();
|
| - __ bind(&l_resume); // received in r0
|
| + __ bind(&l_resume); // received in r3
|
| __ PopTryHandler();
|
|
|
| // receiver = iter; f = 'next'; arg = received;
|
| __ bind(&l_next);
|
|
|
| __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
|
| - __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
|
| - __ Push(load_name, r3, r0); // "next", iter, received
|
| + __ LoadP(r6, MemOperand(sp, 1 * kPointerSize)); // iter
|
| + __ Push(load_name, r6, r3); // "next", iter, received
|
|
|
| // result = receiver[f](arg);
|
| __ bind(&l_call);
|
| - __ ldr(load_receiver, MemOperand(sp, kPointerSize));
|
| - __ ldr(load_name, MemOperand(sp, 2 * kPointerSize));
|
| + __ LoadP(load_receiver, MemOperand(sp, kPointerSize));
|
| + __ LoadP(load_name, MemOperand(sp, 2 * kPointerSize));
|
| if (FLAG_vector_ics) {
|
| __ mov(LoadIC::SlotRegister(),
|
| Operand(Smi::FromInt(expr->KeyedLoadFeedbackSlot())));
|
| }
|
| Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
|
| CallIC(ic, TypeFeedbackId::None());
|
| - __ mov(r1, r0);
|
| - __ str(r1, MemOperand(sp, 2 * kPointerSize));
|
| + __ mr(r4, r3);
|
| + __ StoreP(r4, MemOperand(sp, 2 * kPointerSize));
|
| CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
|
| __ CallStub(&stub);
|
|
|
| - __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| + __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| __ Drop(1); // The function is still on the stack; drop it.
|
|
|
| // if (!result.done) goto l_try;
|
| - __ bind(&l_loop);
|
| - __ Move(load_receiver, r0);
|
| + __ Move(load_receiver, r3);
|
|
|
| __ push(load_receiver); // save result
|
| __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
|
| @@ -2106,8 +2101,8 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
|
| CallLoadIC(NOT_CONTEXTUAL); // r0=result.done
|
| Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
|
| CallIC(bool_ic);
|
| - __ cmp(r0, Operand(0));
|
| - __ b(eq, &l_try);
|
| + __ cmpi(r3, Operand::Zero());
|
| + __ beq(&l_try);
|
|
|
| // result.value
|
| __ pop(load_receiver); // result
|
| @@ -2116,8 +2111,8 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
|
| __ mov(LoadIC::SlotRegister(),
|
| Operand(Smi::FromInt(expr->ValueFeedbackSlot())));
|
| }
|
| - CallLoadIC(NOT_CONTEXTUAL); // r0=result.value
|
| - context()->DropAndPlug(2, r0); // drop iter and g
|
| + CallLoadIC(NOT_CONTEXTUAL); // r3=result.value
|
| + context()->DropAndPlug(2, r3); // drop iter and g
|
| break;
|
| }
|
| }
|
| @@ -2127,100 +2122,109 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
|
| void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
|
| Expression *value,
|
| JSGeneratorObject::ResumeMode resume_mode) {
|
| - // The value stays in r0, and is ultimately read by the resumed generator, as
|
| + // The value stays in r3, and is ultimately read by the resumed generator, as
|
| // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
|
| // is read to throw the value when the resumed generator is already closed.
|
| - // r1 will hold the generator object until the activation has been resumed.
|
| + // r4 will hold the generator object until the activation has been resumed.
|
| VisitForStackValue(generator);
|
| VisitForAccumulatorValue(value);
|
| - __ pop(r1);
|
| + __ pop(r4);
|
|
|
| // Check generator state.
|
| Label wrong_state, closed_state, done;
|
| - __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
|
| + __ LoadP(r6, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
|
| STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
|
| STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
|
| - __ cmp(r3, Operand(Smi::FromInt(0)));
|
| - __ b(eq, &closed_state);
|
| - __ b(lt, &wrong_state);
|
| + __ CmpSmiLiteral(r6, Smi::FromInt(0), r0);
|
| + __ beq(&closed_state);
|
| + __ blt(&wrong_state);
|
|
|
| // Load suspended function and context.
|
| - __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
|
| - __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
|
| + __ LoadP(cp, FieldMemOperand(r4, JSGeneratorObject::kContextOffset));
|
| + __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
|
|
|
| // Load receiver and store as the first argument.
|
| - __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
|
| - __ push(r2);
|
| + __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
|
| + __ push(r5);
|
|
|
| // Push holes for the rest of the arguments to the generator function.
|
| - __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
|
| - __ ldr(r3,
|
| - FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
|
| - __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
|
| - Label push_argument_holes, push_frame;
|
| - __ bind(&push_argument_holes);
|
| - __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
|
| - __ b(mi, &push_frame);
|
| - __ push(r2);
|
| - __ jmp(&push_argument_holes);
|
| + __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
|
| + __ LoadWordArith(r6,
|
| + FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
|
| + __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
|
| + Label argument_loop, push_frame;
|
| +#if V8_TARGET_ARCH_PPC64
|
| + __ cmpi(r6, Operand::Zero());
|
| + __ beq(&push_frame);
|
| +#else
|
| + __ SmiUntag(r6, SetRC);
|
| + __ beq(&push_frame, cr0);
|
| +#endif
|
| + __ mtctr(r6);
|
| + __ bind(&argument_loop);
|
| + __ push(r5);
|
| + __ bdnz(&argument_loop);
|
|
|
| // Enter a new JavaScript frame, and initialize its slots as they were when
|
| // the generator was suspended.
|
| Label resume_frame;
|
| __ bind(&push_frame);
|
| - __ bl(&resume_frame);
|
| - __ jmp(&done);
|
| + __ b(&resume_frame, SetLK);
|
| + __ b(&done);
|
| __ bind(&resume_frame);
|
| // lr = return address.
|
| // fp = caller's frame pointer.
|
| - // pp = caller's constant pool (if FLAG_enable_ool_constant_pool),
|
| // cp = callee's context,
|
| - // r4 = callee's JS function.
|
| - __ PushFixedFrame(r4);
|
| + // r7 = callee's JS function.
|
| + __ PushFixedFrame(r7);
|
| // Adjust FP to point to saved FP.
|
| - __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
|
| + __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
|
|
|
| // Load the operand stack size.
|
| - __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
|
| - __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
|
| - __ SmiUntag(r3);
|
| + __ LoadP(r6, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset));
|
| + __ LoadP(r6, FieldMemOperand(r6, FixedArray::kLengthOffset));
|
| + __ SmiUntag(r6, SetRC);
|
|
|
| // If we are sending a value and there is no operand stack, we can jump back
|
| // in directly.
|
| + Label call_resume;
|
| if (resume_mode == JSGeneratorObject::NEXT) {
|
| Label slow_resume;
|
| - __ cmp(r3, Operand(0));
|
| - __ b(ne, &slow_resume);
|
| - __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
|
| -
|
| + __ bne(&slow_resume, cr0);
|
| + __ LoadP(r6, FieldMemOperand(r7, JSFunction::kCodeEntryOffset));
|
| +#if V8_OOL_CONSTANT_POOL
|
| { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
|
| - if (FLAG_enable_ool_constant_pool) {
|
| - // Load the new code object's constant pool pointer.
|
| - __ ldr(pp,
|
| - MemOperand(r3, Code::kConstantPoolOffset - Code::kHeaderSize));
|
| - }
|
| -
|
| - __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
|
| - __ SmiUntag(r2);
|
| - __ add(r3, r3, r2);
|
| - __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
|
| - __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
|
| - __ Jump(r3);
|
| + // Load the new code object's constant pool pointer.
|
| + __ LoadP(kConstantPoolRegister,
|
| + MemOperand(r6, Code::kConstantPoolOffset - Code::kHeaderSize));
|
| +#endif
|
| + __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
|
| + __ SmiUntag(r5);
|
| + __ add(r6, r6, r5);
|
| + __ LoadSmiLiteral(r5,
|
| + Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
|
| + __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset),
|
| + r0);
|
| + __ Jump(r6);
|
| + __ bind(&slow_resume);
|
| +#if V8_OOL_CONSTANT_POOL
|
| }
|
| - __ bind(&slow_resume);
|
| +#endif
|
| + } else {
|
| + __ beq(&call_resume, cr0);
|
| }
|
|
|
| // Otherwise, we push holes for the operand stack and call the runtime to fix
|
| // up the stack and the handlers.
|
| - Label push_operand_holes, call_resume;
|
| - __ bind(&push_operand_holes);
|
| - __ sub(r3, r3, Operand(1), SetCC);
|
| - __ b(mi, &call_resume);
|
| - __ push(r2);
|
| - __ b(&push_operand_holes);
|
| + Label operand_loop;
|
| + __ mtctr(r6);
|
| + __ bind(&operand_loop);
|
| + __ push(r5);
|
| + __ bdnz(&operand_loop);
|
| +
|
| __ bind(&call_resume);
|
| - DCHECK(!result_register().is(r1));
|
| - __ Push(r1, result_register());
|
| + DCHECK(!result_register().is(r4));
|
| + __ Push(r4, result_register());
|
| __ Push(Smi::FromInt(resume_mode));
|
| __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
|
| // Not reached: the runtime call returns elsewhere.
|
| @@ -2230,20 +2234,20 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
|
| __ bind(&closed_state);
|
| if (resume_mode == JSGeneratorObject::NEXT) {
|
| // Return completed iterator result when generator is closed.
|
| - __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
|
| - __ push(r2);
|
| + __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
|
| + __ push(r5);
|
| // Pop value from top-of-stack slot; box result into result register.
|
| EmitCreateIteratorResult(true);
|
| } else {
|
| // Throw the provided value.
|
| - __ push(r0);
|
| + __ push(r3);
|
| __ CallRuntime(Runtime::kThrow, 1);
|
| }
|
| - __ jmp(&done);
|
| + __ b(&done);
|
|
|
| // Throw error if we attempt to operate on a running generator.
|
| __ bind(&wrong_state);
|
| - __ push(r1);
|
| + __ push(r4);
|
| __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
|
|
|
| __ bind(&done);
|
| @@ -2257,33 +2261,35 @@ void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
|
|
|
| Handle<Map> map(isolate()->native_context()->iterator_result_map());
|
|
|
| - __ Allocate(map->instance_size(), r0, r2, r3, &gc_required, TAG_OBJECT);
|
| - __ jmp(&allocated);
|
| + __ Allocate(map->instance_size(), r3, r5, r6, &gc_required, TAG_OBJECT);
|
| + __ b(&allocated);
|
|
|
| __ bind(&gc_required);
|
| __ Push(Smi::FromInt(map->instance_size()));
|
| __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
|
| - __ ldr(context_register(),
|
| - MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| + __ LoadP(context_register(),
|
| + MemOperand(fp, StandardFrameConstants::kContextOffset));
|
|
|
| __ bind(&allocated);
|
| - __ mov(r1, Operand(map));
|
| - __ pop(r2);
|
| - __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
|
| - __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
|
| + __ mov(r4, Operand(map));
|
| + __ pop(r5);
|
| + __ mov(r6, Operand(isolate()->factory()->ToBoolean(done)));
|
| + __ mov(r7, Operand(isolate()->factory()->empty_fixed_array()));
|
| DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
|
| - __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
|
| - __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
|
| - __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
|
| - __ str(r2,
|
| - FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset));
|
| - __ str(r3,
|
| - FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset));
|
| + __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
|
| + __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
|
| + __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
|
| + __ StoreP(r5,
|
| + FieldMemOperand(r3, JSGeneratorObject::kResultValuePropertyOffset),
|
| + r0);
|
| + __ StoreP(r6,
|
| + FieldMemOperand(r3, JSGeneratorObject::kResultDonePropertyOffset),
|
| + r0);
|
|
|
| // Only the value field needs a write barrier, as the other values are in the
|
| // root set.
|
| - __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset,
|
| - r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
|
| + __ RecordWriteField(r3, JSGeneratorObject::kResultValuePropertyOffset,
|
| + r5, r6, kLRHasBeenSaved, kDontSaveFPRegs);
|
| }
|
|
|
|
|
| @@ -2296,7 +2302,7 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
|
| Operand(Smi::FromInt(prop->PropertyFeedbackSlot())));
|
| CallLoadIC(NOT_CONTEXTUAL);
|
| } else {
|
| - CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
|
| + CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
|
| }
|
| }
|
|
|
| @@ -2309,7 +2315,7 @@ void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
|
| Operand(Smi::FromInt(prop->PropertyFeedbackSlot())));
|
| CallIC(ic);
|
| } else {
|
| - CallIC(ic, prop->PropertyFeedbackId());
|
| + CallIC(ic, prop->PropertyFeedbackId());
|
| }
|
| }
|
|
|
| @@ -2321,16 +2327,16 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
|
| Expression* right_expr) {
|
| Label done, smi_case, stub_call;
|
|
|
| - Register scratch1 = r2;
|
| - Register scratch2 = r3;
|
| + Register scratch1 = r5;
|
| + Register scratch2 = r6;
|
|
|
| // Get the arguments.
|
| - Register left = r1;
|
| - Register right = r0;
|
| + Register left = r4;
|
| + Register right = r3;
|
| __ pop(left);
|
|
|
| // Perform combined smi check on both operands.
|
| - __ orr(scratch1, left, Operand(right));
|
| + __ orx(scratch1, left, right);
|
| STATIC_ASSERT(kSmiTag == 0);
|
| JumpPatchSite patch_site(masm_);
|
| patch_site.EmitJumpIfSmi(scratch1, &smi_case);
|
| @@ -2339,84 +2345,116 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
|
| BinaryOpICStub stub(isolate(), op, mode);
|
| CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
|
| patch_site.EmitPatchInfo();
|
| - __ jmp(&done);
|
| + __ b(&done);
|
|
|
| __ bind(&smi_case);
|
| // Smi case. This code works the same way as the smi-smi case in the type
|
| - // recording binary operation stub, see
|
| + // recording binary operation stub.
|
| switch (op) {
|
| case Token::SAR:
|
| __ GetLeastBitsFromSmi(scratch1, right, 5);
|
| - __ mov(right, Operand(left, ASR, scratch1));
|
| - __ bic(right, right, Operand(kSmiTagMask));
|
| + __ ShiftRightArith(right, left, scratch1);
|
| + __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize));
|
| break;
|
| case Token::SHL: {
|
| - __ SmiUntag(scratch1, left);
|
| __ GetLeastBitsFromSmi(scratch2, right, 5);
|
| - __ mov(scratch1, Operand(scratch1, LSL, scratch2));
|
| - __ TrySmiTag(right, scratch1, &stub_call);
|
| +#if V8_TARGET_ARCH_PPC64
|
| + __ ShiftLeft(right, left, scratch2);
|
| +#else
|
| + __ SmiUntag(scratch1, left);
|
| + __ ShiftLeft(scratch1, scratch1, scratch2);
|
| + // Check that the *signed* result fits in a smi
|
| + __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call);
|
| + __ SmiTag(right, scratch1);
|
| +#endif
|
| break;
|
| }
|
| case Token::SHR: {
|
| __ SmiUntag(scratch1, left);
|
| __ GetLeastBitsFromSmi(scratch2, right, 5);
|
| - __ mov(scratch1, Operand(scratch1, LSR, scratch2));
|
| - __ tst(scratch1, Operand(0xc0000000));
|
| - __ b(ne, &stub_call);
|
| + __ srw(scratch1, scratch1, scratch2);
|
| + // Unsigned shift is not allowed to produce a negative number.
|
| + __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call);
|
| __ SmiTag(right, scratch1);
|
| break;
|
| }
|
| - case Token::ADD:
|
| - __ add(scratch1, left, Operand(right), SetCC);
|
| - __ b(vs, &stub_call);
|
| - __ mov(right, scratch1);
|
| + case Token::ADD: {
|
| + __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0);
|
| + __ bne(&stub_call, cr0);
|
| + __ mr(right, scratch1);
|
| break;
|
| - case Token::SUB:
|
| - __ sub(scratch1, left, Operand(right), SetCC);
|
| - __ b(vs, &stub_call);
|
| - __ mov(right, scratch1);
|
| + }
|
| + case Token::SUB: {
|
| + __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0);
|
| + __ bne(&stub_call, cr0);
|
| + __ mr(right, scratch1);
|
| break;
|
| + }
|
| case Token::MUL: {
|
| + Label mul_zero;
|
| +#if V8_TARGET_ARCH_PPC64
|
| + // Remove tag from both operands.
|
| + __ SmiUntag(ip, right);
|
| + __ SmiUntag(r0, left);
|
| + __ Mul(scratch1, r0, ip);
|
| + // Check for overflowing the smi range - no overflow if higher 33 bits of
|
| + // the result are identical.
|
| + __ TestIfInt32(scratch1, scratch2, ip);
|
| + __ bne(&stub_call);
|
| +#else
|
| __ SmiUntag(ip, right);
|
| - __ smull(scratch1, scratch2, left, ip);
|
| - __ mov(ip, Operand(scratch1, ASR, 31));
|
| - __ cmp(ip, Operand(scratch2));
|
| - __ b(ne, &stub_call);
|
| - __ cmp(scratch1, Operand::Zero());
|
| - __ mov(right, Operand(scratch1), LeaveCC, ne);
|
| - __ b(ne, &done);
|
| - __ add(scratch2, right, Operand(left), SetCC);
|
| - __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
|
| - __ b(mi, &stub_call);
|
| + __ mullw(scratch1, left, ip);
|
| + __ mulhw(scratch2, left, ip);
|
| + // Check for overflowing the smi range - no overflow if higher 33 bits of
|
| + // the result are identical.
|
| + __ TestIfInt32(scratch2, scratch1, ip);
|
| + __ bne(&stub_call);
|
| +#endif
|
| + // Go slow on zero result to handle -0.
|
| + __ cmpi(scratch1, Operand::Zero());
|
| + __ beq(&mul_zero);
|
| +#if V8_TARGET_ARCH_PPC64
|
| + __ SmiTag(right, scratch1);
|
| +#else
|
| + __ mr(right, scratch1);
|
| +#endif
|
| + __ b(&done);
|
| + // We need -0 if we were multiplying a negative number with 0 to get 0.
|
| + // We know one of them was zero.
|
| + __ bind(&mul_zero);
|
| + __ add(scratch2, right, left);
|
| + __ cmpi(scratch2, Operand::Zero());
|
| + __ blt(&stub_call);
|
| + __ LoadSmiLiteral(right, Smi::FromInt(0));
|
| break;
|
| }
|
| case Token::BIT_OR:
|
| - __ orr(right, left, Operand(right));
|
| + __ orx(right, left, right);
|
| break;
|
| case Token::BIT_AND:
|
| - __ and_(right, left, Operand(right));
|
| + __ and_(right, left, right);
|
| break;
|
| case Token::BIT_XOR:
|
| - __ eor(right, left, Operand(right));
|
| + __ xor_(right, left, right);
|
| break;
|
| default:
|
| UNREACHABLE();
|
| }
|
|
|
| __ bind(&done);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
|
| Token::Value op,
|
| OverwriteMode mode) {
|
| - __ pop(r1);
|
| + __ pop(r4);
|
| BinaryOpICStub stub(isolate(), op, mode);
|
| JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
|
| CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
|
| patch_site.EmitPatchInfo();
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -2442,9 +2480,9 @@ void FullCodeGenerator::EmitAssignment(Expression* expr) {
|
| break;
|
| }
|
| case NAMED_PROPERTY: {
|
| - __ push(r0); // Preserve value.
|
| + __ push(r3); // Preserve value.
|
| VisitForAccumulatorValue(prop->obj());
|
| - __ Move(StoreIC::ReceiverRegister(), r0);
|
| + __ Move(StoreIC::ReceiverRegister(), r3);
|
| __ pop(StoreIC::ValueRegister()); // Restore value.
|
| __ mov(StoreIC::NameRegister(),
|
| Operand(prop->key()->AsLiteral()->value()));
|
| @@ -2452,10 +2490,10 @@ void FullCodeGenerator::EmitAssignment(Expression* expr) {
|
| break;
|
| }
|
| case KEYED_PROPERTY: {
|
| - __ push(r0); // Preserve value.
|
| + __ push(r3); // Preserve value.
|
| VisitForStackValue(prop->obj());
|
| VisitForAccumulatorValue(prop->key());
|
| - __ Move(KeyedStoreIC::NameRegister(), r0);
|
| + __ Move(KeyedStoreIC::NameRegister(), r3);
|
| __ Pop(KeyedStoreIC::ValueRegister(), KeyedStoreIC::ReceiverRegister());
|
| Handle<Code> ic = strict_mode() == SLOPPY
|
| ? isolate()->builtins()->KeyedStoreIC_Initialize()
|
| @@ -2464,19 +2502,19 @@ void FullCodeGenerator::EmitAssignment(Expression* expr) {
|
| break;
|
| }
|
| }
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
|
| Variable* var, MemOperand location) {
|
| - __ str(result_register(), location);
|
| + __ StoreP(result_register(), location, r0);
|
| if (var->IsContextSlot()) {
|
| // RecordWrite may destroy all its register arguments.
|
| - __ mov(r3, result_register());
|
| + __ mr(r6, result_register());
|
| int offset = Context::SlotOffset(var->index());
|
| __ RecordWriteContextSlot(
|
| - r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
|
| + r4, offset, r6, r5, kLRHasBeenSaved, kDontSaveFPRegs);
|
| }
|
| }
|
|
|
| @@ -2485,24 +2523,24 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
|
| if (var->IsUnallocated()) {
|
| // Global var, const, or let.
|
| __ mov(StoreIC::NameRegister(), Operand(var->name()));
|
| - __ ldr(StoreIC::ReceiverRegister(), GlobalObjectOperand());
|
| + __ LoadP(StoreIC::ReceiverRegister(), GlobalObjectOperand());
|
| CallStoreIC();
|
|
|
| } else if (op == Token::INIT_CONST_LEGACY) {
|
| // Const initializers need a write barrier.
|
| DCHECK(!var->IsParameter()); // No const parameters.
|
| if (var->IsLookupSlot()) {
|
| - __ push(r0);
|
| - __ mov(r0, Operand(var->name()));
|
| - __ Push(cp, r0); // Context and name.
|
| + __ push(r3);
|
| + __ mov(r3, Operand(var->name()));
|
| + __ Push(cp, r3); // Context and name.
|
| __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
|
| } else {
|
| DCHECK(var->IsStackAllocated() || var->IsContextSlot());
|
| Label skip;
|
| - MemOperand location = VarOperand(var, r1);
|
| - __ ldr(r2, location);
|
| - __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
|
| - __ b(ne, &skip);
|
| + MemOperand location = VarOperand(var, r4);
|
| + __ LoadP(r5, location);
|
| + __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
|
| + __ bne(&skip);
|
| EmitStoreToStackLocalOrContextSlot(var, location);
|
| __ bind(&skip);
|
| }
|
| @@ -2512,12 +2550,12 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
|
| DCHECK(!var->IsLookupSlot());
|
| DCHECK(var->IsStackAllocated() || var->IsContextSlot());
|
| Label assign;
|
| - MemOperand location = VarOperand(var, r1);
|
| - __ ldr(r3, location);
|
| - __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
|
| - __ b(ne, &assign);
|
| - __ mov(r3, Operand(var->name()));
|
| - __ push(r3);
|
| + MemOperand location = VarOperand(var, r4);
|
| + __ LoadP(r6, location);
|
| + __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
|
| + __ bne(&assign);
|
| + __ mov(r6, Operand(var->name()));
|
| + __ push(r6);
|
| __ CallRuntime(Runtime::kThrowReferenceError, 1);
|
| // Perform the assignment.
|
| __ bind(&assign);
|
| @@ -2526,20 +2564,20 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
|
| } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
|
| if (var->IsLookupSlot()) {
|
| // Assignment to var.
|
| - __ push(r0); // Value.
|
| - __ mov(r1, Operand(var->name()));
|
| - __ mov(r0, Operand(Smi::FromInt(strict_mode())));
|
| - __ Push(cp, r1, r0); // Context, name, strict mode.
|
| + __ push(r3); // Value.
|
| + __ mov(r4, Operand(var->name()));
|
| + __ mov(r3, Operand(Smi::FromInt(strict_mode())));
|
| + __ Push(cp, r4, r3); // Context, name, strict mode.
|
| __ CallRuntime(Runtime::kStoreLookupSlot, 4);
|
| } else {
|
| // Assignment to var or initializing assignment to let/const in harmony
|
| // mode.
|
| DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
|
| - MemOperand location = VarOperand(var, r1);
|
| + MemOperand location = VarOperand(var, r4);
|
| if (generate_debug_code_ && op == Token::INIT_LET) {
|
| // Check for an uninitialized let binding.
|
| - __ ldr(r2, location);
|
| - __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
|
| + __ LoadP(r5, location);
|
| + __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
|
| __ Check(eq, kLetBindingReInitialization);
|
| }
|
| EmitStoreToStackLocalOrContextSlot(var, location);
|
| @@ -2562,7 +2600,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
|
| CallStoreIC(expr->AssignmentFeedbackId());
|
|
|
| PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -2572,7 +2610,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
|
| // Record source code position before IC call.
|
| SetSourcePosition(expr->position());
|
| __ Pop(KeyedStoreIC::ReceiverRegister(), KeyedStoreIC::NameRegister());
|
| - DCHECK(KeyedStoreIC::ValueRegister().is(r0));
|
| + DCHECK(KeyedStoreIC::ValueRegister().is(r3));
|
|
|
| Handle<Code> ic = strict_mode() == SLOPPY
|
| ? isolate()->builtins()->KeyedStoreIC_Initialize()
|
| @@ -2580,7 +2618,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
|
| CallIC(ic, expr->AssignmentFeedbackId());
|
|
|
| PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -2590,17 +2628,17 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
|
|
|
| if (key->IsPropertyName()) {
|
| VisitForAccumulatorValue(expr->obj());
|
| - __ Move(LoadIC::ReceiverRegister(), r0);
|
| + __ Move(LoadIC::ReceiverRegister(), r3);
|
| EmitNamedPropertyLoad(expr);
|
| PrepareForBailoutForId(expr->LoadId(), TOS_REG);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| } else {
|
| VisitForStackValue(expr->obj());
|
| VisitForAccumulatorValue(expr->key());
|
| - __ Move(LoadIC::NameRegister(), r0);
|
| + __ Move(LoadIC::NameRegister(), r3);
|
| __ pop(LoadIC::ReceiverRegister());
|
| EmitKeyedPropertyLoad(expr);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
| }
|
|
|
| @@ -2608,10 +2646,7 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
|
| void FullCodeGenerator::CallIC(Handle<Code> code,
|
| TypeFeedbackId ast_id) {
|
| ic_total_count_++;
|
| - // All calls must have a predictable size in full-codegen code to ensure that
|
| - // the debugger can patch them correctly.
|
| - __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
|
| - NEVER_INLINE_TARGET_ADDRESS);
|
| + __ Call(code, RelocInfo::CODE_TARGET, ast_id);
|
| }
|
|
|
|
|
| @@ -2635,13 +2670,13 @@ void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
|
| } else {
|
| // Load the function from the receiver.
|
| DCHECK(callee->IsProperty());
|
| - __ ldr(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
|
| + __ LoadP(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
|
| EmitNamedPropertyLoad(callee->AsProperty());
|
| PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
|
| // Push the target function under the receiver.
|
| - __ ldr(ip, MemOperand(sp, 0));
|
| + __ LoadP(ip, MemOperand(sp, 0));
|
| __ push(ip);
|
| - __ str(r0, MemOperand(sp, kPointerSize));
|
| + __ StoreP(r3, MemOperand(sp, kPointerSize));
|
| }
|
|
|
| EmitCall(expr, call_type);
|
| @@ -2658,15 +2693,15 @@ void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
|
|
|
| // Load the function from the receiver.
|
| DCHECK(callee->IsProperty());
|
| - __ ldr(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
|
| - __ Move(LoadIC::NameRegister(), r0);
|
| + __ LoadP(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
|
| + __ Move(LoadIC::NameRegister(), r3);
|
| EmitKeyedPropertyLoad(callee->AsProperty());
|
| PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
|
|
|
| // Push the target function under the receiver.
|
| - __ ldr(ip, MemOperand(sp, 0));
|
| + __ LoadP(ip, MemOperand(sp, 0));
|
| __ push(ip);
|
| - __ str(r0, MemOperand(sp, kPointerSize));
|
| + __ StoreP(r3, MemOperand(sp, kPointerSize));
|
|
|
| EmitCall(expr, CallIC::METHOD);
|
| }
|
| @@ -2686,39 +2721,39 @@ void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) {
|
| SetSourcePosition(expr->position());
|
| Handle<Code> ic = CallIC::initialize_stub(
|
| isolate(), arg_count, call_type);
|
| - __ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
|
| - __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
|
| + __ LoadSmiLiteral(r6, Smi::FromInt(expr->CallFeedbackSlot()));
|
| + __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
|
| // Don't assign a type feedback id to the IC, since type feedback is provided
|
| // by the vector above.
|
| CallIC(ic);
|
|
|
| RecordJSReturnSite(expr);
|
| // Restore context register.
|
| - __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| - context()->DropAndPlug(1, r0);
|
| + __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| + context()->DropAndPlug(1, r3);
|
| }
|
|
|
|
|
| void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
|
| - // r4: copy of the first argument or undefined if it doesn't exist.
|
| + // r7: copy of the first argument or undefined if it doesn't exist.
|
| if (arg_count > 0) {
|
| - __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
|
| + __ LoadP(r7, MemOperand(sp, arg_count * kPointerSize), r0);
|
| } else {
|
| - __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
|
| + __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
|
| }
|
|
|
| - // r3: the receiver of the enclosing function.
|
| + // r6: the receiver of the enclosing function.
|
| int receiver_offset = 2 + info_->scope()->num_parameters();
|
| - __ ldr(r3, MemOperand(fp, receiver_offset * kPointerSize));
|
| + __ LoadP(r6, MemOperand(fp, receiver_offset * kPointerSize), r0);
|
|
|
| - // r2: strict mode.
|
| - __ mov(r2, Operand(Smi::FromInt(strict_mode())));
|
| + // r5: strict mode.
|
| + __ LoadSmiLiteral(r5, Smi::FromInt(strict_mode()));
|
|
|
| - // r1: the start position of the scope the calls resides in.
|
| - __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
|
| + // r4: the start position of the scope the calls resides in.
|
| + __ LoadSmiLiteral(r4, Smi::FromInt(scope()->start_position()));
|
|
|
| // Do the runtime call.
|
| - __ Push(r4, r3, r2, r1);
|
| + __ Push(r7, r6, r5, r4);
|
| __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
|
| }
|
|
|
| @@ -2744,8 +2779,8 @@ void FullCodeGenerator::VisitCall(Call* expr) {
|
|
|
| { PreservePositionScope pos_scope(masm()->positions_recorder());
|
| VisitForStackValue(callee);
|
| - __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
|
| - __ push(r2); // Reserved receiver slot.
|
| + __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
|
| + __ push(r5); // Reserved receiver slot.
|
|
|
| // Push the arguments.
|
| for (int i = 0; i < arg_count; i++) {
|
| @@ -2754,25 +2789,25 @@ void FullCodeGenerator::VisitCall(Call* expr) {
|
|
|
| // Push a copy of the function (found below the arguments) and
|
| // resolve eval.
|
| - __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
|
| - __ push(r1);
|
| + __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
|
| + __ push(r4);
|
| EmitResolvePossiblyDirectEval(arg_count);
|
|
|
| - // The runtime call returns a pair of values in r0 (function) and
|
| - // r1 (receiver). Touch up the stack with the right values.
|
| - __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
|
| - __ str(r1, MemOperand(sp, arg_count * kPointerSize));
|
| + // The runtime call returns a pair of values in r3 (function) and
|
| + // r4 (receiver). Touch up the stack with the right values.
|
| + __ StoreP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
|
| + __ StoreP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
|
| }
|
|
|
| // Record source position for debugger.
|
| SetSourcePosition(expr->position());
|
| CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
|
| - __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
|
| + __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
|
| __ CallStub(&stub);
|
| RecordJSReturnSite(expr);
|
| // Restore context register.
|
| - __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| - context()->DropAndPlug(1, r0);
|
| + __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| + context()->DropAndPlug(1, r3);
|
| } else if (call_type == Call::GLOBAL_CALL) {
|
| EmitCallWithLoadIC(expr);
|
|
|
| @@ -2788,13 +2823,13 @@ void FullCodeGenerator::VisitCall(Call* expr) {
|
| }
|
|
|
| __ bind(&slow);
|
| - // Call the runtime to find the function to call (returned in r0)
|
| + // Call the runtime to find the function to call (returned in r3)
|
| // and the object holding it (returned in edx).
|
| - DCHECK(!context_register().is(r2));
|
| - __ mov(r2, Operand(proxy->name()));
|
| - __ Push(context_register(), r2);
|
| + DCHECK(!context_register().is(r5));
|
| + __ mov(r5, Operand(proxy->name()));
|
| + __ Push(context_register(), r5);
|
| __ CallRuntime(Runtime::kLoadLookupSlot, 2);
|
| - __ Push(r0, r1); // Function, receiver.
|
| + __ Push(r3, r4); // Function, receiver.
|
|
|
| // If fast case code has been generated, emit code to push the
|
| // function and receiver and have the slow path jump around this
|
| @@ -2804,11 +2839,11 @@ void FullCodeGenerator::VisitCall(Call* expr) {
|
| __ b(&call);
|
| __ bind(&done);
|
| // Push function.
|
| - __ push(r0);
|
| + __ push(r3);
|
| // The receiver is implicitly the global receiver. Indicate this
|
| // by passing the hole to the call function stub.
|
| - __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
|
| - __ push(r1);
|
| + __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
|
| + __ push(r4);
|
| __ bind(&call);
|
| }
|
|
|
| @@ -2831,8 +2866,8 @@ void FullCodeGenerator::VisitCall(Call* expr) {
|
| { PreservePositionScope scope(masm()->positions_recorder());
|
| VisitForStackValue(callee);
|
| }
|
| - __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
|
| - __ push(r1);
|
| + __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
|
| + __ push(r4);
|
| // Emit function call.
|
| EmitCall(expr);
|
| }
|
| @@ -2866,9 +2901,9 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
|
| // constructor invocation.
|
| SetSourcePosition(expr->position());
|
|
|
| - // Load function and argument count into r1 and r0.
|
| - __ mov(r0, Operand(arg_count));
|
| - __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
|
| + // Load function and argument count into r4 and r3.
|
| + __ mov(r3, Operand(arg_count));
|
| + __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
|
|
|
| // Record call targets in unoptimized code.
|
| if (FLAG_pretenuring_call_new) {
|
| @@ -2877,13 +2912,13 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
|
| expr->CallNewFeedbackSlot() + 1);
|
| }
|
|
|
| - __ Move(r2, FeedbackVector());
|
| - __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
|
| + __ Move(r5, FeedbackVector());
|
| + __ LoadSmiLiteral(r6, Smi::FromInt(expr->CallNewFeedbackSlot()));
|
|
|
| CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
|
| __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
|
| PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -2901,8 +2936,8 @@ void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
|
| &if_true, &if_false, &fall_through);
|
|
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| - __ SmiTst(r0);
|
| - Split(eq, if_true, if_false, fall_through);
|
| + __ TestIfSmi(r3, r0);
|
| + Split(eq, if_true, if_false, fall_through, cr0);
|
|
|
| context()->Plug(if_true, if_false);
|
| }
|
| @@ -2922,8 +2957,8 @@ void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
|
| &if_true, &if_false, &fall_through);
|
|
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| - __ NonNegativeSmiTst(r0);
|
| - Split(eq, if_true, if_false, fall_through);
|
| + __ TestIfPositiveSmi(r3, r0);
|
| + Split(eq, if_true, if_false, fall_through, cr0);
|
|
|
| context()->Plug(if_true, if_false);
|
| }
|
| @@ -2942,19 +2977,19 @@ void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
|
| context()->PrepareTest(&materialize_true, &materialize_false,
|
| &if_true, &if_false, &fall_through);
|
|
|
| - __ JumpIfSmi(r0, if_false);
|
| + __ JumpIfSmi(r3, if_false);
|
| __ LoadRoot(ip, Heap::kNullValueRootIndex);
|
| - __ cmp(r0, ip);
|
| - __ b(eq, if_true);
|
| - __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
|
| + __ cmp(r3, ip);
|
| + __ beq(if_true);
|
| + __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
|
| // Undetectable objects behave like undefined when tested with typeof.
|
| - __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
|
| - __ tst(r1, Operand(1 << Map::kIsUndetectable));
|
| - __ b(ne, if_false);
|
| - __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
|
| - __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
|
| - __ b(lt, if_false);
|
| - __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
|
| + __ lbz(r4, FieldMemOperand(r5, Map::kBitFieldOffset));
|
| + __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
|
| + __ bne(if_false, cr0);
|
| + __ lbz(r4, FieldMemOperand(r5, Map::kInstanceTypeOffset));
|
| + __ cmpi(r4, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
|
| + __ blt(if_false);
|
| + __ cmpi(r4, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| Split(le, if_true, if_false, fall_through);
|
|
|
| @@ -2975,8 +3010,8 @@ void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
|
| context()->PrepareTest(&materialize_true, &materialize_false,
|
| &if_true, &if_false, &fall_through);
|
|
|
| - __ JumpIfSmi(r0, if_false);
|
| - __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
|
| + __ JumpIfSmi(r3, if_false);
|
| + __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| Split(ge, if_true, if_false, fall_through);
|
|
|
| @@ -2997,12 +3032,12 @@ void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
|
| context()->PrepareTest(&materialize_true, &materialize_false,
|
| &if_true, &if_false, &fall_through);
|
|
|
| - __ JumpIfSmi(r0, if_false);
|
| - __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
|
| - __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
|
| - __ tst(r1, Operand(1 << Map::kIsUndetectable));
|
| + __ JumpIfSmi(r3, if_false);
|
| + __ LoadP(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
|
| + __ lbz(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
|
| + __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| - Split(ne, if_true, if_false, fall_through);
|
| + Split(ne, if_true, if_false, fall_through, cr0);
|
|
|
| context()->Plug(if_true, if_false);
|
| }
|
| @@ -3022,19 +3057,19 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
|
| context()->PrepareTest(&materialize_true, &materialize_false,
|
| &if_true, &if_false, &fall_through);
|
|
|
| - __ AssertNotSmi(r0);
|
| + __ AssertNotSmi(r3);
|
|
|
| - __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
|
| - __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
|
| - __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
|
| - __ b(ne, &skip_lookup);
|
| + __ LoadP(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
|
| + __ lbz(ip, FieldMemOperand(r4, Map::kBitField2Offset));
|
| + __ andi(r0, ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
|
| + __ bne(&skip_lookup, cr0);
|
|
|
| // Check for fast case object. Generate false result for slow case object.
|
| - __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
|
| - __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
|
| + __ LoadP(r5, FieldMemOperand(r3, JSObject::kPropertiesOffset));
|
| + __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset));
|
| __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
|
| - __ cmp(r2, ip);
|
| - __ b(eq, if_false);
|
| + __ cmp(r5, ip);
|
| + __ beq(if_false);
|
|
|
| // Look for valueOf name in the descriptor array, and indicate false if
|
| // found. Since we omit an enumeration index check, if it is added via a
|
| @@ -3042,54 +3077,56 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
|
| Label entry, loop, done;
|
|
|
| // Skip loop if no descriptors are valid.
|
| - __ NumberOfOwnDescriptors(r3, r1);
|
| - __ cmp(r3, Operand::Zero());
|
| - __ b(eq, &done);
|
| + __ NumberOfOwnDescriptors(r6, r4);
|
| + __ cmpi(r6, Operand::Zero());
|
| + __ beq(&done);
|
|
|
| - __ LoadInstanceDescriptors(r1, r4);
|
| - // r4: descriptor array.
|
| - // r3: valid entries in the descriptor array.
|
| + __ LoadInstanceDescriptors(r4, r7);
|
| + // r7: descriptor array.
|
| + // r6: valid entries in the descriptor array.
|
| __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
|
| - __ mul(r3, r3, ip);
|
| + __ Mul(r6, r6, ip);
|
| // Calculate location of the first key name.
|
| - __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
|
| + __ addi(r7, r7, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
|
| // Calculate the end of the descriptor array.
|
| - __ mov(r2, r4);
|
| - __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2));
|
| + __ mr(r5, r7);
|
| + __ ShiftLeftImm(ip, r6, Operand(kPointerSizeLog2));
|
| + __ add(r5, r5, ip);
|
|
|
| // Loop through all the keys in the descriptor array. If one of these is the
|
| // string "valueOf" the result is false.
|
| // The use of ip to store the valueOf string assumes that it is not otherwise
|
| // used in the loop below.
|
| __ mov(ip, Operand(isolate()->factory()->value_of_string()));
|
| - __ jmp(&entry);
|
| + __ b(&entry);
|
| __ bind(&loop);
|
| - __ ldr(r3, MemOperand(r4, 0));
|
| - __ cmp(r3, ip);
|
| - __ b(eq, if_false);
|
| - __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
|
| + __ LoadP(r6, MemOperand(r7, 0));
|
| + __ cmp(r6, ip);
|
| + __ beq(if_false);
|
| + __ addi(r7, r7, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
|
| __ bind(&entry);
|
| - __ cmp(r4, Operand(r2));
|
| - __ b(ne, &loop);
|
| + __ cmp(r7, r5);
|
| + __ bne(&loop);
|
|
|
| __ bind(&done);
|
|
|
| // Set the bit in the map to indicate that there is no local valueOf field.
|
| - __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
|
| - __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
|
| - __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
|
| + __ lbz(r5, FieldMemOperand(r4, Map::kBitField2Offset));
|
| + __ ori(r5, r5, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
|
| + __ stb(r5, FieldMemOperand(r4, Map::kBitField2Offset));
|
|
|
| __ bind(&skip_lookup);
|
|
|
| // If a valueOf property is not found on the object check that its
|
| // prototype is the un-modified String prototype. If not result is false.
|
| - __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
|
| - __ JumpIfSmi(r2, if_false);
|
| - __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
|
| - __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
|
| - __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
|
| - __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
|
| - __ cmp(r2, r3);
|
| + __ LoadP(r5, FieldMemOperand(r4, Map::kPrototypeOffset));
|
| + __ JumpIfSmi(r5, if_false);
|
| + __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset));
|
| + __ LoadP(r6, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
|
| + __ LoadP(r6, FieldMemOperand(r6, GlobalObject::kNativeContextOffset));
|
| + __ LoadP(r6, ContextOperand(r6,
|
| + Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
|
| + __ cmp(r5, r6);
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| Split(eq, if_true, if_false, fall_through);
|
|
|
| @@ -3110,8 +3147,8 @@ void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
|
| context()->PrepareTest(&materialize_true, &materialize_false,
|
| &if_true, &if_false, &fall_through);
|
|
|
| - __ JumpIfSmi(r0, if_false);
|
| - __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
|
| + __ JumpIfSmi(r3, if_false);
|
| + __ CompareObjectType(r3, r4, r5, JS_FUNCTION_TYPE);
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| Split(eq, if_true, if_false, fall_through);
|
|
|
| @@ -3132,11 +3169,22 @@ void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
|
| context()->PrepareTest(&materialize_true, &materialize_false,
|
| &if_true, &if_false, &fall_through);
|
|
|
| - __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
|
| - __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
|
| - __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
|
| - __ cmp(r2, Operand(0x80000000));
|
| - __ cmp(r1, Operand(0x00000000), eq);
|
| + __ CheckMap(r3, r4, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
|
| +#if V8_TARGET_ARCH_PPC64
|
| + __ LoadP(r4, FieldMemOperand(r3, HeapNumber::kValueOffset));
|
| + __ li(r5, Operand(1));
|
| + __ rotrdi(r5, r5, 1); // r5 = 0x80000000_00000000
|
| + __ cmp(r4, r5);
|
| +#else
|
| + __ lwz(r5, FieldMemOperand(r3, HeapNumber::kExponentOffset));
|
| + __ lwz(r4, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
|
| + Label skip;
|
| + __ lis(r0, Operand(SIGN_EXT_IMM16(0x8000)));
|
| + __ cmp(r5, r0);
|
| + __ bne(&skip);
|
| + __ cmpi(r4, Operand::Zero());
|
| + __ bind(&skip);
|
| +#endif
|
|
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| Split(eq, if_true, if_false, fall_through);
|
| @@ -3158,8 +3206,8 @@ void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
|
| context()->PrepareTest(&materialize_true, &materialize_false,
|
| &if_true, &if_false, &fall_through);
|
|
|
| - __ JumpIfSmi(r0, if_false);
|
| - __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
|
| + __ JumpIfSmi(r3, if_false);
|
| + __ CompareObjectType(r3, r4, r4, JS_ARRAY_TYPE);
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| Split(eq, if_true, if_false, fall_through);
|
|
|
| @@ -3180,8 +3228,8 @@ void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
|
| context()->PrepareTest(&materialize_true, &materialize_false,
|
| &if_true, &if_false, &fall_through);
|
|
|
| - __ JumpIfSmi(r0, if_false);
|
| - __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
|
| + __ JumpIfSmi(r3, if_false);
|
| + __ CompareObjectType(r3, r4, r4, JS_REGEXP_TYPE);
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| Split(eq, if_true, if_false, fall_through);
|
|
|
| @@ -3201,16 +3249,20 @@ void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
|
| &if_true, &if_false, &fall_through);
|
|
|
| // Get the frame pointer for the calling frame.
|
| - __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
|
| + __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
|
|
|
| // Skip the arguments adaptor frame if it exists.
|
| - __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
|
| - __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
| - __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq);
|
| + Label check_frame_marker;
|
| + __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kContextOffset));
|
| + __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
|
| + __ bne(&check_frame_marker);
|
| + __ LoadP(r5, MemOperand(r5, StandardFrameConstants::kCallerFPOffset));
|
|
|
| // Check the marker in the calling frame.
|
| - __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
|
| - __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
|
| + __ bind(&check_frame_marker);
|
| + __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kMarkerOffset));
|
| + STATIC_ASSERT(StackFrame::CONSTRUCT < 0x4000);
|
| + __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::CONSTRUCT), r0);
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| Split(eq, if_true, if_false, fall_through);
|
|
|
| @@ -3233,8 +3285,8 @@ void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
|
| context()->PrepareTest(&materialize_true, &materialize_false,
|
| &if_true, &if_false, &fall_through);
|
|
|
| - __ pop(r1);
|
| - __ cmp(r0, r1);
|
| + __ pop(r4);
|
| + __ cmp(r3, r4);
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| Split(eq, if_true, if_false, fall_through);
|
|
|
| @@ -3247,32 +3299,34 @@ void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
|
| DCHECK(args->length() == 1);
|
|
|
| // ArgumentsAccessStub expects the key in edx and the formal
|
| - // parameter count in r0.
|
| + // parameter count in r3.
|
| VisitForAccumulatorValue(args->at(0));
|
| - __ mov(r1, r0);
|
| - __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
|
| + __ mr(r4, r3);
|
| + __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
|
| ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
|
| __ CallStub(&stub);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
|
| DCHECK(expr->arguments()->length() == 0);
|
| -
|
| + Label exit;
|
| // Get the number of formal parameters.
|
| - __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
|
| + __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
|
|
|
| // Check if the calling frame is an arguments adaptor frame.
|
| - __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
|
| - __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
|
| - __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
| + __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
|
| + __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset));
|
| + __ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
|
| + __ bne(&exit);
|
|
|
| // Arguments adaptor case: Read the arguments length from the
|
| // adaptor frame.
|
| - __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq);
|
| + __ LoadP(r3, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
|
|
|
| - context()->Plug(r0);
|
| + __ bind(&exit);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -3284,56 +3338,57 @@ void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
|
| VisitForAccumulatorValue(args->at(0));
|
|
|
| // If the object is a smi, we return null.
|
| - __ JumpIfSmi(r0, &null);
|
| + __ JumpIfSmi(r3, &null);
|
|
|
| // Check that the object is a JS object but take special care of JS
|
| // functions to make sure they have 'Function' as their class.
|
| // Assume that there are only two callable types, and one of them is at
|
| // either end of the type range for JS object types. Saves extra comparisons.
|
| STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
|
| - __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
|
| - // Map is now in r0.
|
| - __ b(lt, &null);
|
| + __ CompareObjectType(r3, r3, r4, FIRST_SPEC_OBJECT_TYPE);
|
| + // Map is now in r3.
|
| + __ blt(&null);
|
| STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
|
| FIRST_SPEC_OBJECT_TYPE + 1);
|
| - __ b(eq, &function);
|
| + __ beq(&function);
|
|
|
| - __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
|
| + __ cmpi(r4, Operand(LAST_SPEC_OBJECT_TYPE));
|
| STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
|
| LAST_SPEC_OBJECT_TYPE - 1);
|
| - __ b(eq, &function);
|
| + __ beq(&function);
|
| // Assume that there is no larger type.
|
| STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
|
|
|
| // Check if the constructor in the map is a JS function.
|
| - __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
|
| - __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
|
| - __ b(ne, &non_function_constructor);
|
| + __ LoadP(r3, FieldMemOperand(r3, Map::kConstructorOffset));
|
| + __ CompareObjectType(r3, r4, r4, JS_FUNCTION_TYPE);
|
| + __ bne(&non_function_constructor);
|
|
|
| - // r0 now contains the constructor function. Grab the
|
| + // r3 now contains the constructor function. Grab the
|
| // instance class name from there.
|
| - __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
|
| - __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
|
| + __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
|
| + __ LoadP(r3,
|
| + FieldMemOperand(r3, SharedFunctionInfo::kInstanceClassNameOffset));
|
| __ b(&done);
|
|
|
| // Functions have class 'Function'.
|
| __ bind(&function);
|
| - __ LoadRoot(r0, Heap::kfunction_class_stringRootIndex);
|
| - __ jmp(&done);
|
| + __ LoadRoot(r3, Heap::kfunction_class_stringRootIndex);
|
| + __ b(&done);
|
|
|
| // Objects with a non-function constructor have class 'Object'.
|
| __ bind(&non_function_constructor);
|
| - __ LoadRoot(r0, Heap::kObject_stringRootIndex);
|
| - __ jmp(&done);
|
| + __ LoadRoot(r3, Heap::kObject_stringRootIndex);
|
| + __ b(&done);
|
|
|
| // Non-JS objects have class null.
|
| __ bind(&null);
|
| - __ LoadRoot(r0, Heap::kNullValueRootIndex);
|
| + __ LoadRoot(r3, Heap::kNullValueRootIndex);
|
|
|
| // All done.
|
| __ bind(&done);
|
|
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -3346,7 +3401,7 @@ void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
|
| VisitForStackValue(args->at(1));
|
| VisitForStackValue(args->at(2));
|
| __ CallStub(&stub);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -3360,7 +3415,7 @@ void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
|
| VisitForStackValue(args->at(2));
|
| VisitForStackValue(args->at(3));
|
| __ CallStub(&stub);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -3371,13 +3426,14 @@ void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
|
|
|
| Label done;
|
| // If the object is a smi return the object.
|
| - __ JumpIfSmi(r0, &done);
|
| + __ JumpIfSmi(r3, &done);
|
| // If the object is not a value type, return the object.
|
| - __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
|
| - __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq);
|
| + __ CompareObjectType(r3, r4, r4, JS_VALUE_TYPE);
|
| + __ bne(&done);
|
| + __ LoadP(r3, FieldMemOperand(r3, JSValue::kValueOffset));
|
|
|
| __ bind(&done);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -3390,41 +3446,42 @@ void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
|
| VisitForAccumulatorValue(args->at(0)); // Load the object.
|
|
|
| Label runtime, done, not_date_object;
|
| - Register object = r0;
|
| - Register result = r0;
|
| - Register scratch0 = r9;
|
| - Register scratch1 = r1;
|
| + Register object = r3;
|
| + Register result = r3;
|
| + Register scratch0 = r11;
|
| + Register scratch1 = r4;
|
|
|
| __ JumpIfSmi(object, ¬_date_object);
|
| __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
|
| - __ b(ne, ¬_date_object);
|
| + __ bne(¬_date_object);
|
|
|
| if (index->value() == 0) {
|
| - __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
|
| - __ jmp(&done);
|
| + __ LoadP(result, FieldMemOperand(object, JSDate::kValueOffset));
|
| + __ b(&done);
|
| } else {
|
| if (index->value() < JSDate::kFirstUncachedField) {
|
| ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
|
| __ mov(scratch1, Operand(stamp));
|
| - __ ldr(scratch1, MemOperand(scratch1));
|
| - __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
|
| + __ LoadP(scratch1, MemOperand(scratch1));
|
| + __ LoadP(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
|
| __ cmp(scratch1, scratch0);
|
| - __ b(ne, &runtime);
|
| - __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
|
| - kPointerSize * index->value()));
|
| - __ jmp(&done);
|
| + __ bne(&runtime);
|
| + __ LoadP(result, FieldMemOperand(object, JSDate::kValueOffset +
|
| + kPointerSize * index->value()),
|
| + scratch0);
|
| + __ b(&done);
|
| }
|
| __ bind(&runtime);
|
| __ PrepareCallCFunction(2, scratch1);
|
| - __ mov(r1, Operand(index));
|
| + __ LoadSmiLiteral(r4, index);
|
| __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
|
| - __ jmp(&done);
|
| + __ b(&done);
|
| }
|
|
|
| __ bind(¬_date_object);
|
| __ CallRuntime(Runtime::kThrowNotDateError, 0);
|
| __ bind(&done);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -3432,9 +3489,9 @@ void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
|
| ZoneList<Expression*>* args = expr->arguments();
|
| DCHECK_EQ(3, args->length());
|
|
|
| - Register string = r0;
|
| - Register index = r1;
|
| - Register value = r2;
|
| + Register string = r3;
|
| + Register index = r4;
|
| + Register value = r5;
|
|
|
| VisitForStackValue(args->at(1)); // index
|
| VisitForStackValue(args->at(2)); // value
|
| @@ -3442,21 +3499,22 @@ void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
|
| __ Pop(index, value);
|
|
|
| if (FLAG_debug_code) {
|
| - __ SmiTst(value);
|
| - __ Check(eq, kNonSmiValue);
|
| - __ SmiTst(index);
|
| - __ Check(eq, kNonSmiIndex);
|
| + __ TestIfSmi(value, r0);
|
| + __ Check(eq, kNonSmiValue, cr0);
|
| + __ TestIfSmi(index, r0);
|
| + __ Check(eq, kNonSmiIndex, cr0);
|
| __ SmiUntag(index, index);
|
| static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
|
| __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
|
| __ SmiTag(index, index);
|
| }
|
|
|
| - __ SmiUntag(value, value);
|
| - __ add(ip,
|
| - string,
|
| - Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
|
| - __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
|
| + __ SmiUntag(value);
|
| + __ addi(ip,
|
| + string,
|
| + Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
|
| + __ SmiToByteArrayOffset(r0, index);
|
| + __ stbx(value, MemOperand(ip, r0));
|
| context()->Plug(string);
|
| }
|
|
|
| @@ -3465,9 +3523,9 @@ void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
|
| ZoneList<Expression*>* args = expr->arguments();
|
| DCHECK_EQ(3, args->length());
|
|
|
| - Register string = r0;
|
| - Register index = r1;
|
| - Register value = r2;
|
| + Register string = r3;
|
| + Register index = r4;
|
| + Register value = r5;
|
|
|
| VisitForStackValue(args->at(1)); // index
|
| VisitForStackValue(args->at(2)); // value
|
| @@ -3475,27 +3533,26 @@ void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
|
| __ Pop(index, value);
|
|
|
| if (FLAG_debug_code) {
|
| - __ SmiTst(value);
|
| - __ Check(eq, kNonSmiValue);
|
| - __ SmiTst(index);
|
| - __ Check(eq, kNonSmiIndex);
|
| + __ TestIfSmi(value, r0);
|
| + __ Check(eq, kNonSmiValue, cr0);
|
| + __ TestIfSmi(index, r0);
|
| + __ Check(eq, kNonSmiIndex, cr0);
|
| __ SmiUntag(index, index);
|
| static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
|
| __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
|
| __ SmiTag(index, index);
|
| }
|
|
|
| - __ SmiUntag(value, value);
|
| - __ add(ip,
|
| - string,
|
| - Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
|
| - STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
|
| - __ strh(value, MemOperand(ip, index));
|
| + __ SmiUntag(value);
|
| + __ addi(ip,
|
| + string,
|
| + Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
|
| + __ SmiToShortArrayOffset(r0, index);
|
| + __ sthx(value, MemOperand(ip, r0));
|
| context()->Plug(string);
|
| }
|
|
|
|
|
| -
|
| void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
|
| // Load the arguments on the stack and call the runtime function.
|
| ZoneList<Expression*>* args = expr->arguments();
|
| @@ -3504,7 +3561,7 @@ void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
|
| VisitForStackValue(args->at(1));
|
| MathPowStub stub(isolate(), MathPowStub::ON_STACK);
|
| __ CallStub(&stub);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -3513,38 +3570,38 @@ void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
|
| DCHECK(args->length() == 2);
|
| VisitForStackValue(args->at(0)); // Load the object.
|
| VisitForAccumulatorValue(args->at(1)); // Load the value.
|
| - __ pop(r1); // r0 = value. r1 = object.
|
| + __ pop(r4); // r3 = value. r4 = object.
|
|
|
| Label done;
|
| // If the object is a smi, return the value.
|
| - __ JumpIfSmi(r1, &done);
|
| + __ JumpIfSmi(r4, &done);
|
|
|
| // If the object is not a value type, return the value.
|
| - __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
|
| - __ b(ne, &done);
|
| + __ CompareObjectType(r4, r5, r5, JS_VALUE_TYPE);
|
| + __ bne(&done);
|
|
|
| // Store the value.
|
| - __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
|
| + __ StoreP(r3, FieldMemOperand(r4, JSValue::kValueOffset), r0);
|
| // Update the write barrier. Save the value as it will be
|
| // overwritten by the write barrier code and is needed afterward.
|
| - __ mov(r2, r0);
|
| + __ mr(r5, r3);
|
| __ RecordWriteField(
|
| - r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
|
| + r4, JSValue::kValueOffset, r5, r6, kLRHasBeenSaved, kDontSaveFPRegs);
|
|
|
| __ bind(&done);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
|
| ZoneList<Expression*>* args = expr->arguments();
|
| DCHECK_EQ(args->length(), 1);
|
| - // Load the argument into r0 and call the stub.
|
| + // Load the argument into r3 and call the stub.
|
| VisitForAccumulatorValue(args->at(0));
|
|
|
| NumberToStringStub stub(isolate());
|
| __ CallStub(&stub);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -3554,15 +3611,15 @@ void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
|
| VisitForAccumulatorValue(args->at(0));
|
|
|
| Label done;
|
| - StringCharFromCodeGenerator generator(r0, r1);
|
| + StringCharFromCodeGenerator generator(r3, r4);
|
| generator.GenerateFast(masm_);
|
| - __ jmp(&done);
|
| + __ b(&done);
|
|
|
| NopRuntimeCallHelper call_helper;
|
| generator.GenerateSlow(masm_, call_helper);
|
|
|
| __ bind(&done);
|
| - context()->Plug(r1);
|
| + context()->Plug(r4);
|
| }
|
|
|
|
|
| @@ -3572,9 +3629,9 @@ void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
|
| VisitForStackValue(args->at(0));
|
| VisitForAccumulatorValue(args->at(1));
|
|
|
| - Register object = r1;
|
| - Register index = r0;
|
| - Register result = r3;
|
| + Register object = r4;
|
| + Register index = r3;
|
| + Register result = r6;
|
|
|
| __ pop(object);
|
|
|
| @@ -3589,19 +3646,19 @@ void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
|
| &index_out_of_range,
|
| STRING_INDEX_IS_NUMBER);
|
| generator.GenerateFast(masm_);
|
| - __ jmp(&done);
|
| + __ b(&done);
|
|
|
| __ bind(&index_out_of_range);
|
| // When the index is out of range, the spec requires us to return
|
| // NaN.
|
| __ LoadRoot(result, Heap::kNanValueRootIndex);
|
| - __ jmp(&done);
|
| + __ b(&done);
|
|
|
| __ bind(&need_conversion);
|
| // Load the undefined value into the result register, which will
|
| // trigger conversion.
|
| __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
|
| - __ jmp(&done);
|
| + __ b(&done);
|
|
|
| NopRuntimeCallHelper call_helper;
|
| generator.GenerateSlow(masm_, call_helper);
|
| @@ -3617,10 +3674,10 @@ void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
|
| VisitForStackValue(args->at(0));
|
| VisitForAccumulatorValue(args->at(1));
|
|
|
| - Register object = r1;
|
| - Register index = r0;
|
| - Register scratch = r3;
|
| - Register result = r0;
|
| + Register object = r4;
|
| + Register index = r3;
|
| + Register scratch = r6;
|
| + Register result = r3;
|
|
|
| __ pop(object);
|
|
|
| @@ -3636,19 +3693,19 @@ void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
|
| &index_out_of_range,
|
| STRING_INDEX_IS_NUMBER);
|
| generator.GenerateFast(masm_);
|
| - __ jmp(&done);
|
| + __ b(&done);
|
|
|
| __ bind(&index_out_of_range);
|
| // When the index is out of range, the spec requires us to return
|
| // the empty string.
|
| __ LoadRoot(result, Heap::kempty_stringRootIndex);
|
| - __ jmp(&done);
|
| + __ b(&done);
|
|
|
| __ bind(&need_conversion);
|
| // Move smi zero into the result register, which will trigger
|
| // conversion.
|
| - __ mov(result, Operand(Smi::FromInt(0)));
|
| - __ jmp(&done);
|
| + __ LoadSmiLiteral(result, Smi::FromInt(0));
|
| + __ b(&done);
|
|
|
| NopRuntimeCallHelper call_helper;
|
| generator.GenerateSlow(masm_, call_helper);
|
| @@ -3664,10 +3721,10 @@ void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
|
| VisitForStackValue(args->at(0));
|
| VisitForAccumulatorValue(args->at(1));
|
|
|
| - __ pop(r1);
|
| + __ pop(r4);
|
| StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
|
| __ CallStub(&stub);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -3679,7 +3736,7 @@ void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
|
|
|
| StringCompareStub stub(isolate());
|
| __ CallStub(&stub);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -3695,23 +3752,23 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
|
|
|
| Label runtime, done;
|
| // Check for non-function argument (including proxy).
|
| - __ JumpIfSmi(r0, &runtime);
|
| - __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
|
| - __ b(ne, &runtime);
|
| + __ JumpIfSmi(r3, &runtime);
|
| + __ CompareObjectType(r3, r4, r4, JS_FUNCTION_TYPE);
|
| + __ bne(&runtime);
|
|
|
| - // InvokeFunction requires the function in r1. Move it in there.
|
| - __ mov(r1, result_register());
|
| + // InvokeFunction requires the function in r4. Move it in there.
|
| + __ mr(r4, result_register());
|
| ParameterCount count(arg_count);
|
| - __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper());
|
| - __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| - __ jmp(&done);
|
| + __ InvokeFunction(r4, count, CALL_FUNCTION, NullCallWrapper());
|
| + __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| + __ b(&done);
|
|
|
| __ bind(&runtime);
|
| - __ push(r0);
|
| + __ push(r3);
|
| __ CallRuntime(Runtime::kCall, args->length());
|
| __ bind(&done);
|
|
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -3722,10 +3779,9 @@ void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
|
| VisitForStackValue(args->at(0));
|
| VisitForStackValue(args->at(1));
|
| VisitForAccumulatorValue(args->at(2));
|
| - __ pop(r1);
|
| - __ pop(r2);
|
| + __ Pop(r5, r4);
|
| __ CallStub(&stub);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -3739,33 +3795,34 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
|
| isolate()->native_context()->jsfunction_result_caches());
|
| if (jsfunction_result_caches->length() <= cache_id) {
|
| __ Abort(kAttemptToUseUndefinedCache);
|
| - __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
|
| - context()->Plug(r0);
|
| + __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
|
| + context()->Plug(r3);
|
| return;
|
| }
|
|
|
| VisitForAccumulatorValue(args->at(1));
|
|
|
| - Register key = r0;
|
| - Register cache = r1;
|
| - __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
|
| - __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
|
| - __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
|
| - __ ldr(cache,
|
| - FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
|
| -
|
| + Register key = r3;
|
| + Register cache = r4;
|
| + __ LoadP(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
|
| + __ LoadP(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
|
| + __ LoadP(cache,
|
| + ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
|
| + __ LoadP(cache,
|
| + FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)), r0);
|
|
|
| Label done, not_found;
|
| - __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
|
| - // r2 now holds finger offset as a smi.
|
| - __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
|
| - // r3 now points to the start of fixed array elements.
|
| - __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex));
|
| - // Note side effect of PreIndex: r3 now points to the key of the pair.
|
| - __ cmp(key, r2);
|
| - __ b(ne, ¬_found);
|
| -
|
| - __ ldr(r0, MemOperand(r3, kPointerSize));
|
| + __ LoadP(r5, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
|
| + // r5 now holds finger offset as a smi.
|
| + __ addi(r6, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
|
| + // r6 now points to the start of fixed array elements.
|
| + __ SmiToPtrArrayOffset(r5, r5);
|
| + __ LoadPUX(r5, MemOperand(r6, r5));
|
| + // r6 now points to the key of the pair.
|
| + __ cmp(key, r5);
|
| + __ bne(¬_found);
|
| +
|
| + __ LoadP(r3, MemOperand(r6, kPointerSize));
|
| __ b(&done);
|
|
|
| __ bind(¬_found);
|
| @@ -3774,7 +3831,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
|
| __ CallRuntime(Runtime::kGetFromCache, 2);
|
|
|
| __ bind(&done);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -3789,8 +3846,11 @@ void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
|
| context()->PrepareTest(&materialize_true, &materialize_false,
|
| &if_true, &if_false, &fall_through);
|
|
|
| - __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
|
| - __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
|
| + __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
|
| + // PPC - assume ip is free
|
| + __ mov(ip, Operand(String::kContainsCachedArrayIndexMask));
|
| + __ and_(r0, r3, ip);
|
| + __ cmpi(r0, Operand::Zero());
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| Split(eq, if_true, if_false, fall_through);
|
|
|
| @@ -3803,18 +3863,19 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
|
| DCHECK(args->length() == 1);
|
| VisitForAccumulatorValue(args->at(0));
|
|
|
| - __ AssertString(r0);
|
| + __ AssertString(r3);
|
|
|
| - __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
|
| - __ IndexFromHash(r0, r0);
|
| + __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
|
| + __ IndexFromHash(r3, r3);
|
|
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
| - Label bailout, done, one_char_separator, long_separator, non_trivial_array,
|
| - not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
|
| + Label bailout, done, one_char_separator, long_separator,
|
| + non_trivial_array, not_size_one_array, loop,
|
| + empty_separator_loop, one_char_separator_loop,
|
| one_char_separator_loop_entry, long_separator_loop;
|
| ZoneList<Expression*>* args = expr->arguments();
|
| DCHECK(args->length() == 2);
|
| @@ -3822,49 +3883,52 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
| VisitForAccumulatorValue(args->at(0));
|
|
|
| // All aliases of the same register have disjoint lifetimes.
|
| - Register array = r0;
|
| - Register elements = no_reg; // Will be r0.
|
| - Register result = no_reg; // Will be r0.
|
| - Register separator = r1;
|
| - Register array_length = r2;
|
| - Register result_pos = no_reg; // Will be r2
|
| - Register string_length = r3;
|
| - Register string = r4;
|
| - Register element = r5;
|
| - Register elements_end = r6;
|
| - Register scratch = r9;
|
| + Register array = r3;
|
| + Register elements = no_reg; // Will be r3.
|
| + Register result = no_reg; // Will be r3.
|
| + Register separator = r4;
|
| + Register array_length = r5;
|
| + Register result_pos = no_reg; // Will be r5
|
| + Register string_length = r6;
|
| + Register string = r7;
|
| + Register element = r8;
|
| + Register elements_end = r9;
|
| + Register scratch1 = r10;
|
| + Register scratch2 = r11;
|
|
|
| // Separator operand is on the stack.
|
| __ pop(separator);
|
|
|
| // Check that the array is a JSArray.
|
| __ JumpIfSmi(array, &bailout);
|
| - __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE);
|
| - __ b(ne, &bailout);
|
| + __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
|
| + __ bne(&bailout);
|
|
|
| // Check that the array has fast elements.
|
| - __ CheckFastElements(scratch, array_length, &bailout);
|
| + __ CheckFastElements(scratch1, scratch2, &bailout);
|
|
|
| // If the array has length zero, return the empty string.
|
| - __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
|
| - __ SmiUntag(array_length, SetCC);
|
| - __ b(ne, &non_trivial_array);
|
| - __ LoadRoot(r0, Heap::kempty_stringRootIndex);
|
| + __ LoadP(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
|
| + __ SmiUntag(array_length);
|
| + __ cmpi(array_length, Operand::Zero());
|
| + __ bne(&non_trivial_array);
|
| + __ LoadRoot(r3, Heap::kempty_stringRootIndex);
|
| __ b(&done);
|
|
|
| __ bind(&non_trivial_array);
|
|
|
| // Get the FixedArray containing array's elements.
|
| elements = array;
|
| - __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
|
| + __ LoadP(elements, FieldMemOperand(array, JSArray::kElementsOffset));
|
| array = no_reg; // End of array's live range.
|
|
|
| // Check that all array elements are sequential ASCII strings, and
|
| // accumulate the sum of their lengths, as a smi-encoded value.
|
| - __ mov(string_length, Operand::Zero());
|
| - __ add(element,
|
| - elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
|
| - __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
|
| + __ li(string_length, Operand::Zero());
|
| + __ addi(element,
|
| + elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
|
| + __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
|
| + __ add(elements_end, element, elements_end);
|
| // Loop condition: while (element < elements_end).
|
| // Live values in registers:
|
| // elements: Fixed array of strings.
|
| @@ -3874,25 +3938,29 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
| // element: Current array element.
|
| // elements_end: Array end.
|
| if (generate_debug_code_) {
|
| - __ cmp(array_length, Operand::Zero());
|
| + __ cmpi(array_length, Operand::Zero());
|
| __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
|
| }
|
| __ bind(&loop);
|
| - __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
|
| + __ LoadP(string, MemOperand(element));
|
| + __ addi(element, element, Operand(kPointerSize));
|
| __ JumpIfSmi(string, &bailout);
|
| - __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
|
| - __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
|
| - __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
|
| - __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
|
| - __ add(string_length, string_length, Operand(scratch), SetCC);
|
| - __ b(vs, &bailout);
|
| + __ LoadP(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
|
| + __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
|
| + __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
|
| + __ LoadP(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
|
| +
|
| + __ AddAndCheckForOverflow(string_length, string_length, scratch1,
|
| + scratch2, r0);
|
| + __ BranchOnOverflow(&bailout);
|
| +
|
| __ cmp(element, elements_end);
|
| - __ b(lt, &loop);
|
| + __ blt(&loop);
|
|
|
| // If array_length is 1, return elements[0], a string.
|
| - __ cmp(array_length, Operand(1));
|
| - __ b(ne, ¬_size_one_array);
|
| - __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
|
| + __ cmpi(array_length, Operand(1));
|
| + __ bne(¬_size_one_array);
|
| + __ LoadP(r3, FieldMemOperand(elements, FixedArray::kHeaderSize));
|
| __ b(&done);
|
|
|
| __ bind(¬_size_one_array);
|
| @@ -3905,30 +3973,44 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
|
|
| // Check that the separator is a flat ASCII string.
|
| __ JumpIfSmi(separator, &bailout);
|
| - __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset));
|
| - __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
|
| - __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
|
| + __ LoadP(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
|
| + __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
|
| + __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
|
|
|
| // Add (separator length times array_length) - separator length to the
|
| - // string_length to get the length of the result string. array_length is not
|
| - // smi but the other values are, so the result is a smi
|
| - __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
|
| - __ sub(string_length, string_length, Operand(scratch));
|
| - __ smull(scratch, ip, array_length, scratch);
|
| + // string_length to get the length of the result string.
|
| + __ LoadP(scratch1,
|
| + FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
|
| + __ sub(string_length, string_length, scratch1);
|
| +#if V8_TARGET_ARCH_PPC64
|
| + __ SmiUntag(scratch1, scratch1);
|
| + __ Mul(scratch2, array_length, scratch1);
|
| // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
|
| // zero.
|
| - __ cmp(ip, Operand::Zero());
|
| - __ b(ne, &bailout);
|
| - __ tst(scratch, Operand(0x80000000));
|
| - __ b(ne, &bailout);
|
| - __ add(string_length, string_length, Operand(scratch), SetCC);
|
| - __ b(vs, &bailout);
|
| + __ ShiftRightImm(ip, scratch2, Operand(31), SetRC);
|
| + __ bne(&bailout, cr0);
|
| + __ SmiTag(scratch2, scratch2);
|
| +#else
|
| + // array_length is not smi but the other values are, so the result is a smi
|
| + __ mullw(scratch2, array_length, scratch1);
|
| + __ mulhw(ip, array_length, scratch1);
|
| + // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
|
| + // zero.
|
| + __ cmpi(ip, Operand::Zero());
|
| + __ bne(&bailout);
|
| + __ cmpwi(scratch2, Operand::Zero());
|
| + __ blt(&bailout);
|
| +#endif
|
| +
|
| + __ AddAndCheckForOverflow(string_length, string_length, scratch2,
|
| + scratch1, r0);
|
| + __ BranchOnOverflow(&bailout);
|
| __ SmiUntag(string_length);
|
|
|
| // Get first element in the array to free up the elements register to be used
|
| // for the result.
|
| - __ add(element,
|
| - elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
|
| + __ addi(element,
|
| + elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
|
| result = elements; // End of live range for elements.
|
| elements = no_reg;
|
| // Live values in registers:
|
| @@ -3938,25 +4020,27 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
| // array_length: Length of the array.
|
| __ AllocateAsciiString(result,
|
| string_length,
|
| - scratch,
|
| - string, // used as scratch
|
| - elements_end, // used as scratch
|
| + scratch1,
|
| + scratch2,
|
| + elements_end,
|
| &bailout);
|
| // Prepare for looping. Set up elements_end to end of the array. Set
|
| // result_pos to the position of the result where to write the first
|
| // character.
|
| - __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
|
| + __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
|
| + __ add(elements_end, element, elements_end);
|
| result_pos = array_length; // End of live range for array_length.
|
| array_length = no_reg;
|
| - __ add(result_pos,
|
| - result,
|
| - Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
|
| + __ addi(result_pos,
|
| + result,
|
| + Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
|
|
|
| // Check the length of the separator.
|
| - __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
|
| - __ cmp(scratch, Operand(Smi::FromInt(1)));
|
| - __ b(eq, &one_char_separator);
|
| - __ b(gt, &long_separator);
|
| + __ LoadP(scratch1,
|
| + FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
|
| + __ CmpSmiLiteral(scratch1, Smi::FromInt(1), r0);
|
| + __ beq(&one_char_separator);
|
| + __ bgt(&long_separator);
|
|
|
| // Empty separator case
|
| __ bind(&empty_separator_loop);
|
| @@ -3966,25 +4050,25 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
| // elements_end: Array end.
|
|
|
| // Copy next array element to the result.
|
| - __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
|
| - __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
|
| + __ LoadP(string, MemOperand(element));
|
| + __ addi(element, element, Operand(kPointerSize));
|
| + __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
|
| __ SmiUntag(string_length);
|
| - __ add(string,
|
| - string,
|
| - Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
|
| - __ CopyBytes(string, result_pos, string_length, scratch);
|
| + __ addi(string, string,
|
| + Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
|
| + __ CopyBytes(string, result_pos, string_length, scratch1);
|
| __ cmp(element, elements_end);
|
| - __ b(lt, &empty_separator_loop); // End while (element < elements_end).
|
| - DCHECK(result.is(r0));
|
| + __ blt(&empty_separator_loop); // End while (element < elements_end).
|
| + DCHECK(result.is(r3));
|
| __ b(&done);
|
|
|
| // One-character separator case
|
| __ bind(&one_char_separator);
|
| // Replace separator with its ASCII character value.
|
| - __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
|
| + __ lbz(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
|
| // Jump into the loop after the code that copies the separator, so the first
|
| // element is not preceded by a separator
|
| - __ jmp(&one_char_separator_loop_entry);
|
| + __ b(&one_char_separator_loop_entry);
|
|
|
| __ bind(&one_char_separator_loop);
|
| // Live values in registers:
|
| @@ -3994,20 +4078,21 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
| // separator: Single separator ASCII char (in lower byte).
|
|
|
| // Copy the separator character to the result.
|
| - __ strb(separator, MemOperand(result_pos, 1, PostIndex));
|
| + __ stb(separator, MemOperand(result_pos));
|
| + __ addi(result_pos, result_pos, Operand(1));
|
|
|
| // Copy next array element to the result.
|
| __ bind(&one_char_separator_loop_entry);
|
| - __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
|
| - __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
|
| + __ LoadP(string, MemOperand(element));
|
| + __ addi(element, element, Operand(kPointerSize));
|
| + __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
|
| __ SmiUntag(string_length);
|
| - __ add(string,
|
| - string,
|
| - Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
|
| - __ CopyBytes(string, result_pos, string_length, scratch);
|
| - __ cmp(element, elements_end);
|
| - __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
|
| - DCHECK(result.is(r0));
|
| + __ addi(string, string,
|
| + Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
|
| + __ CopyBytes(string, result_pos, string_length, scratch1);
|
| + __ cmpl(element, elements_end);
|
| + __ blt(&one_char_separator_loop); // End while (element < elements_end).
|
| + DCHECK(result.is(r3));
|
| __ b(&done);
|
|
|
| // Long separator case (separator is more than one character). Entry is at the
|
| @@ -4020,30 +4105,30 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
| // separator: Separator string.
|
|
|
| // Copy the separator to the result.
|
| - __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
|
| + __ LoadP(string_length, FieldMemOperand(separator, String::kLengthOffset));
|
| __ SmiUntag(string_length);
|
| - __ add(string,
|
| - separator,
|
| - Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
|
| - __ CopyBytes(string, result_pos, string_length, scratch);
|
| + __ addi(string,
|
| + separator,
|
| + Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
|
| + __ CopyBytes(string, result_pos, string_length, scratch1);
|
|
|
| __ bind(&long_separator);
|
| - __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
|
| - __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
|
| + __ LoadP(string, MemOperand(element));
|
| + __ addi(element, element, Operand(kPointerSize));
|
| + __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
|
| __ SmiUntag(string_length);
|
| - __ add(string,
|
| - string,
|
| - Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
|
| - __ CopyBytes(string, result_pos, string_length, scratch);
|
| - __ cmp(element, elements_end);
|
| - __ b(lt, &long_separator_loop); // End while (element < elements_end).
|
| - DCHECK(result.is(r0));
|
| + __ addi(string, string,
|
| + Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
|
| + __ CopyBytes(string, result_pos, string_length, scratch1);
|
| + __ cmpl(element, elements_end);
|
| + __ blt(&long_separator_loop); // End while (element < elements_end).
|
| + DCHECK(result.is(r3));
|
| __ b(&done);
|
|
|
| __ bind(&bailout);
|
| - __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
|
| + __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
|
| __ bind(&done);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -4052,9 +4137,9 @@ void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
|
| ExternalReference debug_is_active =
|
| ExternalReference::debug_is_active_address(isolate());
|
| __ mov(ip, Operand(debug_is_active));
|
| - __ ldrb(r0, MemOperand(ip));
|
| - __ SmiTag(r0);
|
| - context()->Plug(r0);
|
| + __ lbz(r3, MemOperand(ip));
|
| + __ SmiTag(r3);
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| @@ -4073,8 +4158,9 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
|
| if (expr->is_jsruntime()) {
|
| // Push the builtins object as the receiver.
|
| Register receiver = LoadIC::ReceiverRegister();
|
| - __ ldr(receiver, GlobalObjectOperand());
|
| - __ ldr(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
|
| + __ LoadP(receiver, GlobalObjectOperand());
|
| + __ LoadP(receiver,
|
| + FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
|
| __ push(receiver);
|
|
|
| // Load the function from the receiver.
|
| @@ -4084,13 +4170,13 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
|
| Operand(Smi::FromInt(expr->CallRuntimeFeedbackSlot())));
|
| CallLoadIC(NOT_CONTEXTUAL);
|
| } else {
|
| - CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
|
| + CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
|
| }
|
|
|
| // Push the target function under the receiver.
|
| - __ ldr(ip, MemOperand(sp, 0));
|
| + __ LoadP(ip, MemOperand(sp, 0));
|
| __ push(ip);
|
| - __ str(r0, MemOperand(sp, kPointerSize));
|
| + __ StoreP(r3, MemOperand(sp, kPointerSize));
|
|
|
| // Push the arguments ("left-to-right").
|
| int arg_count = args->length();
|
| @@ -4101,13 +4187,13 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
|
| // Record source position of the IC call.
|
| SetSourcePosition(expr->position());
|
| CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
|
| - __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
|
| + __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
|
| __ CallStub(&stub);
|
|
|
| // Restore context register.
|
| - __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| + __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
|
|
| - context()->DropAndPlug(1, r0);
|
| + context()->DropAndPlug(1, r3);
|
| } else {
|
| // Push the arguments ("left-to-right").
|
| for (int i = 0; i < arg_count; i++) {
|
| @@ -4116,7 +4202,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
|
|
|
| // Call the C runtime function.
|
| __ CallRuntime(expr->function(), arg_count);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
| }
|
|
|
| @@ -4131,22 +4217,22 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
|
| if (property != NULL) {
|
| VisitForStackValue(property->obj());
|
| VisitForStackValue(property->key());
|
| - __ mov(r1, Operand(Smi::FromInt(strict_mode())));
|
| - __ push(r1);
|
| + __ LoadSmiLiteral(r4, Smi::FromInt(strict_mode()));
|
| + __ push(r4);
|
| __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| } else if (proxy != NULL) {
|
| Variable* var = proxy->var();
|
| // Delete of an unqualified identifier is disallowed in strict mode
|
| // but "delete this" is allowed.
|
| DCHECK(strict_mode() == SLOPPY || var->is_this());
|
| if (var->IsUnallocated()) {
|
| - __ ldr(r2, GlobalObjectOperand());
|
| - __ mov(r1, Operand(var->name()));
|
| - __ mov(r0, Operand(Smi::FromInt(SLOPPY)));
|
| - __ Push(r2, r1, r0);
|
| + __ LoadP(r5, GlobalObjectOperand());
|
| + __ mov(r4, Operand(var->name()));
|
| + __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY));
|
| + __ Push(r5, r4, r3);
|
| __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| } else if (var->IsStackAllocated() || var->IsContextSlot()) {
|
| // Result of deleting non-global, non-dynamic variables is false.
|
| // The subexpression does not have side effects.
|
| @@ -4154,11 +4240,11 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
|
| } else {
|
| // Non-global variable. Call the runtime to try to delete from the
|
| // context where the variable was introduced.
|
| - DCHECK(!context_register().is(r2));
|
| - __ mov(r2, Operand(var->name()));
|
| - __ Push(context_register(), r2);
|
| + DCHECK(!context_register().is(r5));
|
| + __ mov(r5, Operand(var->name()));
|
| + __ Push(context_register(), r5);
|
| __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
| } else {
|
| // Result of deleting non-property, non-variable reference is true.
|
| @@ -4203,13 +4289,13 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
|
| &materialize_true);
|
| __ bind(&materialize_true);
|
| PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
|
| - __ LoadRoot(r0, Heap::kTrueValueRootIndex);
|
| - if (context()->IsStackValue()) __ push(r0);
|
| - __ jmp(&done);
|
| + __ LoadRoot(r3, Heap::kTrueValueRootIndex);
|
| + if (context()->IsStackValue()) __ push(r3);
|
| + __ b(&done);
|
| __ bind(&materialize_false);
|
| PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
|
| - __ LoadRoot(r0, Heap::kFalseValueRootIndex);
|
| - if (context()->IsStackValue()) __ push(r0);
|
| + __ LoadRoot(r3, Heap::kFalseValueRootIndex);
|
| + if (context()->IsStackValue()) __ push(r3);
|
| __ bind(&done);
|
| }
|
| break;
|
| @@ -4221,7 +4307,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
|
| VisitForTypeofValue(expr->expression());
|
| }
|
| __ CallRuntime(Runtime::kTypeof, 1);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| break;
|
| }
|
|
|
| @@ -4257,19 +4343,19 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
| } else {
|
| // Reserve space for result of postfix operation.
|
| if (expr->is_postfix() && !context()->IsEffect()) {
|
| - __ mov(ip, Operand(Smi::FromInt(0)));
|
| + __ LoadSmiLiteral(ip, Smi::FromInt(0));
|
| __ push(ip);
|
| }
|
| if (assign_type == NAMED_PROPERTY) {
|
| // Put the object both on the stack and in the register.
|
| VisitForStackValue(prop->obj());
|
| - __ ldr(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
|
| + __ LoadP(LoadIC::ReceiverRegister(), MemOperand(sp, 0));
|
| EmitNamedPropertyLoad(prop);
|
| } else {
|
| VisitForStackValue(prop->obj());
|
| VisitForStackValue(prop->key());
|
| - __ ldr(LoadIC::ReceiverRegister(), MemOperand(sp, 1 * kPointerSize));
|
| - __ ldr(LoadIC::NameRegister(), MemOperand(sp, 0));
|
| + __ LoadP(LoadIC::ReceiverRegister(), MemOperand(sp, 1 * kPointerSize));
|
| + __ LoadP(LoadIC::NameRegister(), MemOperand(sp, 0));
|
| EmitKeyedPropertyLoad(prop);
|
| }
|
| }
|
| @@ -4289,7 +4375,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
| int count_value = expr->op() == Token::INC ? 1 : -1;
|
| if (ShouldInlineSmiCase(expr->op())) {
|
| Label slow;
|
| - patch_site.EmitJumpIfNotSmi(r0, &slow);
|
| + patch_site.EmitJumpIfNotSmi(r3, &slow);
|
|
|
| // Save result for postfix expressions.
|
| if (expr->is_postfix()) {
|
| @@ -4299,23 +4385,26 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
| // of the stack.
|
| switch (assign_type) {
|
| case VARIABLE:
|
| - __ push(r0);
|
| + __ push(r3);
|
| break;
|
| case NAMED_PROPERTY:
|
| - __ str(r0, MemOperand(sp, kPointerSize));
|
| + __ StoreP(r3, MemOperand(sp, kPointerSize));
|
| break;
|
| case KEYED_PROPERTY:
|
| - __ str(r0, MemOperand(sp, 2 * kPointerSize));
|
| + __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
|
| break;
|
| }
|
| }
|
| }
|
|
|
| - __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
|
| - __ b(vc, &done);
|
| + Register scratch1 = r4;
|
| + Register scratch2 = r5;
|
| + __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value));
|
| + __ AddAndCheckForOverflow(r3, r3, scratch1, scratch2, r0);
|
| + __ BranchOnNoOverflow(&done);
|
| // Call stub. Undo operation first.
|
| - __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
|
| - __ jmp(&stub_call);
|
| + __ sub(r3, r3, scratch1);
|
| + __ b(&stub_call);
|
| __ bind(&slow);
|
| }
|
| ToNumberStub convert_stub(isolate());
|
| @@ -4329,22 +4418,21 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
| // of the stack.
|
| switch (assign_type) {
|
| case VARIABLE:
|
| - __ push(r0);
|
| + __ push(r3);
|
| break;
|
| case NAMED_PROPERTY:
|
| - __ str(r0, MemOperand(sp, kPointerSize));
|
| + __ StoreP(r3, MemOperand(sp, kPointerSize));
|
| break;
|
| case KEYED_PROPERTY:
|
| - __ str(r0, MemOperand(sp, 2 * kPointerSize));
|
| + __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
|
| break;
|
| }
|
| }
|
| }
|
|
|
| -
|
| __ bind(&stub_call);
|
| - __ mov(r1, r0);
|
| - __ mov(r0, Operand(Smi::FromInt(count_value)));
|
| + __ mr(r4, r3);
|
| + __ LoadSmiLiteral(r3, Smi::FromInt(count_value));
|
|
|
| // Record position before stub call.
|
| SetSourcePosition(expr->position());
|
| @@ -4354,7 +4442,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
| patch_site.EmitPatchInfo();
|
| __ bind(&done);
|
|
|
| - // Store the value returned in r0.
|
| + // Store the value returned in r3.
|
| switch (assign_type) {
|
| case VARIABLE:
|
| if (expr->is_postfix()) {
|
| @@ -4362,7 +4450,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
| EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
|
| Token::ASSIGN);
|
| PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
|
| - context.Plug(r0);
|
| + context.Plug(r3);
|
| }
|
| // For all contexts except EffectConstant We have the result on
|
| // top of the stack.
|
| @@ -4373,7 +4461,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
| EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
|
| Token::ASSIGN);
|
| PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
| break;
|
| case NAMED_PROPERTY: {
|
| @@ -4387,7 +4475,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
| context()->PlugTOS();
|
| }
|
| } else {
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
| break;
|
| }
|
| @@ -4403,7 +4491,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
| context()->PlugTOS();
|
| }
|
| } else {
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| }
|
| break;
|
| }
|
| @@ -4417,7 +4505,7 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
|
| VariableProxy* proxy = expr->AsVariableProxy();
|
| if (proxy != NULL && proxy->var()->IsUnallocated()) {
|
| Comment cmnt(masm_, "[ Global variable");
|
| - __ ldr(LoadIC::ReceiverRegister(), GlobalObjectOperand());
|
| + __ LoadP(LoadIC::ReceiverRegister(), GlobalObjectOperand());
|
| __ mov(LoadIC::NameRegister(), Operand(proxy->name()));
|
| if (FLAG_vector_ics) {
|
| __ mov(LoadIC::SlotRegister(),
|
| @@ -4427,7 +4515,7 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
|
| // error.
|
| CallLoadIC(NOT_CONTEXTUAL);
|
| PrepareForBailout(expr, TOS_REG);
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
|
| Comment cmnt(masm_, "[ Lookup slot");
|
| Label done, slow;
|
| @@ -4437,13 +4525,13 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
|
| EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
|
|
|
| __ bind(&slow);
|
| - __ mov(r0, Operand(proxy->name()));
|
| - __ Push(cp, r0);
|
| + __ mov(r3, Operand(proxy->name()));
|
| + __ Push(cp, r3);
|
| __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
|
| PrepareForBailout(expr, TOS_REG);
|
| __ bind(&done);
|
|
|
| - context()->Plug(r0);
|
| + context()->Plug(r3);
|
| } else {
|
| // This expression cannot throw a reference error at the top level.
|
| VisitInDuplicateContext(expr);
|
| @@ -4468,60 +4556,61 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
|
|
|
| Factory* factory = isolate()->factory();
|
| if (String::Equals(check, factory->number_string())) {
|
| - __ JumpIfSmi(r0, if_true);
|
| - __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
|
| + __ JumpIfSmi(r3, if_true);
|
| + __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
|
| __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
|
| - __ cmp(r0, ip);
|
| + __ cmp(r3, ip);
|
| Split(eq, if_true, if_false, fall_through);
|
| } else if (String::Equals(check, factory->string_string())) {
|
| - __ JumpIfSmi(r0, if_false);
|
| + __ JumpIfSmi(r3, if_false);
|
| // Check for undetectable objects => false.
|
| - __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
|
| - __ b(ge, if_false);
|
| - __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
|
| - __ tst(r1, Operand(1 << Map::kIsUndetectable));
|
| - Split(eq, if_true, if_false, fall_through);
|
| + __ CompareObjectType(r3, r3, r4, FIRST_NONSTRING_TYPE);
|
| + __ bge(if_false);
|
| + __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
|
| + STATIC_ASSERT((1 << Map::kIsUndetectable) < 0x8000);
|
| + __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
|
| + Split(eq, if_true, if_false, fall_through, cr0);
|
| } else if (String::Equals(check, factory->symbol_string())) {
|
| - __ JumpIfSmi(r0, if_false);
|
| - __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
|
| + __ JumpIfSmi(r3, if_false);
|
| + __ CompareObjectType(r3, r3, r4, SYMBOL_TYPE);
|
| Split(eq, if_true, if_false, fall_through);
|
| } else if (String::Equals(check, factory->boolean_string())) {
|
| - __ CompareRoot(r0, Heap::kTrueValueRootIndex);
|
| - __ b(eq, if_true);
|
| - __ CompareRoot(r0, Heap::kFalseValueRootIndex);
|
| + __ CompareRoot(r3, Heap::kTrueValueRootIndex);
|
| + __ beq(if_true);
|
| + __ CompareRoot(r3, Heap::kFalseValueRootIndex);
|
| Split(eq, if_true, if_false, fall_through);
|
| } else if (String::Equals(check, factory->undefined_string())) {
|
| - __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
|
| - __ b(eq, if_true);
|
| - __ JumpIfSmi(r0, if_false);
|
| + __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
|
| + __ beq(if_true);
|
| + __ JumpIfSmi(r3, if_false);
|
| // Check for undetectable objects => true.
|
| - __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
|
| - __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
|
| - __ tst(r1, Operand(1 << Map::kIsUndetectable));
|
| - Split(ne, if_true, if_false, fall_through);
|
| + __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
|
| + __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
|
| + __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
|
| + Split(ne, if_true, if_false, fall_through, cr0);
|
|
|
| } else if (String::Equals(check, factory->function_string())) {
|
| - __ JumpIfSmi(r0, if_false);
|
| + __ JumpIfSmi(r3, if_false);
|
| STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
|
| - __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
|
| - __ b(eq, if_true);
|
| - __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
|
| + __ CompareObjectType(r3, r3, r4, JS_FUNCTION_TYPE);
|
| + __ beq(if_true);
|
| + __ cmpi(r4, Operand(JS_FUNCTION_PROXY_TYPE));
|
| Split(eq, if_true, if_false, fall_through);
|
| } else if (String::Equals(check, factory->object_string())) {
|
| - __ JumpIfSmi(r0, if_false);
|
| - __ CompareRoot(r0, Heap::kNullValueRootIndex);
|
| - __ b(eq, if_true);
|
| + __ JumpIfSmi(r3, if_false);
|
| + __ CompareRoot(r3, Heap::kNullValueRootIndex);
|
| + __ beq(if_true);
|
| // Check for JS objects => true.
|
| - __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
|
| - __ b(lt, if_false);
|
| - __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
|
| - __ b(gt, if_false);
|
| + __ CompareObjectType(r3, r3, r4, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
|
| + __ blt(if_false);
|
| + __ CompareInstanceType(r3, r4, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
|
| + __ bgt(if_false);
|
| // Check for undetectable objects => false.
|
| - __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
|
| - __ tst(r1, Operand(1 << Map::kIsUndetectable));
|
| - Split(eq, if_true, if_false, fall_through);
|
| + __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
|
| + __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
|
| + Split(eq, if_true, if_false, fall_through, cr0);
|
| } else {
|
| - if (if_false != fall_through) __ jmp(if_false);
|
| + if (if_false != fall_through) __ b(if_false);
|
| }
|
| context()->Plug(if_true, if_false);
|
| }
|
| @@ -4552,7 +4641,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
|
| __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
|
| PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
|
| __ LoadRoot(ip, Heap::kTrueValueRootIndex);
|
| - __ cmp(r0, ip);
|
| + __ cmp(r3, ip);
|
| Split(eq, if_true, if_false, fall_through);
|
| break;
|
|
|
| @@ -4562,7 +4651,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
|
| __ CallStub(&stub);
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| // The stub returns 0 for true.
|
| - __ tst(r0, r0);
|
| + __ cmpi(r3, Operand::Zero());
|
| Split(eq, if_true, if_false, fall_through);
|
| break;
|
| }
|
| @@ -4570,15 +4659,15 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
|
| default: {
|
| VisitForAccumulatorValue(expr->right());
|
| Condition cond = CompareIC::ComputeCondition(op);
|
| - __ pop(r1);
|
| + __ pop(r4);
|
|
|
| bool inline_smi_code = ShouldInlineSmiCase(op);
|
| JumpPatchSite patch_site(masm_);
|
| if (inline_smi_code) {
|
| Label slow_case;
|
| - __ orr(r2, r0, Operand(r1));
|
| - patch_site.EmitJumpIfNotSmi(r2, &slow_case);
|
| - __ cmp(r1, r0);
|
| + __ orx(r5, r3, r4);
|
| + patch_site.EmitJumpIfNotSmi(r5, &slow_case);
|
| + __ cmp(r4, r3);
|
| Split(cond, if_true, if_false, NULL);
|
| __ bind(&slow_case);
|
| }
|
| @@ -4589,7 +4678,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
|
| CallIC(ic, expr->CompareOperationFeedbackId());
|
| patch_site.EmitPatchInfo();
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| - __ cmp(r0, Operand::Zero());
|
| + __ cmpi(r3, Operand::Zero());
|
| Split(cond, if_true, if_false, fall_through);
|
| }
|
| }
|
| @@ -4614,15 +4703,15 @@ void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| if (expr->op() == Token::EQ_STRICT) {
|
| Heap::RootListIndex nil_value = nil == kNullValue ?
|
| - Heap::kNullValueRootIndex :
|
| - Heap::kUndefinedValueRootIndex;
|
| - __ LoadRoot(r1, nil_value);
|
| - __ cmp(r0, r1);
|
| + Heap::kNullValueRootIndex :
|
| + Heap::kUndefinedValueRootIndex;
|
| + __ LoadRoot(r4, nil_value);
|
| + __ cmp(r3, r4);
|
| Split(eq, if_true, if_false, fall_through);
|
| } else {
|
| Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
|
| CallIC(ic, expr->CompareOperationFeedbackId());
|
| - __ cmp(r0, Operand(0));
|
| + __ cmpi(r3, Operand::Zero());
|
| Split(ne, if_true, if_false, fall_through);
|
| }
|
| context()->Plug(if_true, if_false);
|
| @@ -4630,13 +4719,13 @@ void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
|
|
|
|
|
| void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
|
| - __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
|
| - context()->Plug(r0);
|
| + __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
|
| + context()->Plug(r3);
|
| }
|
|
|
|
|
| Register FullCodeGenerator::result_register() {
|
| - return r0;
|
| + return r3;
|
| }
|
|
|
|
|
| @@ -4646,13 +4735,13 @@ Register FullCodeGenerator::context_register() {
|
|
|
|
|
| void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
|
| - DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
|
| - __ str(value, MemOperand(fp, frame_offset));
|
| + DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
|
| + __ StoreP(value, MemOperand(fp, frame_offset), r0);
|
| }
|
|
|
|
|
| void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
|
| - __ ldr(dst, ContextOperand(cp, context_index));
|
| + __ LoadP(dst, ContextOperand(cp, context_index), r0);
|
| }
|
|
|
|
|
| @@ -4664,15 +4753,15 @@ void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
|
| // as their closure, not the anonymous closure containing the global
|
| // code. Pass a smi sentinel and let the runtime look up the empty
|
| // function.
|
| - __ mov(ip, Operand(Smi::FromInt(0)));
|
| + __ LoadSmiLiteral(ip, Smi::FromInt(0));
|
| } else if (declaration_scope->is_eval_scope()) {
|
| // Contexts created by a call to eval have the same closure as the
|
| // context calling eval, not the anonymous closure containing the eval
|
| // code. Fetch it from the context.
|
| - __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
|
| + __ LoadP(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
|
| } else {
|
| DCHECK(declaration_scope->is_function_scope());
|
| - __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
|
| + __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
|
| }
|
| __ push(ip);
|
| }
|
| @@ -4682,69 +4771,72 @@ void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
|
| // Non-local control flow support.
|
|
|
| void FullCodeGenerator::EnterFinallyBlock() {
|
| - DCHECK(!result_register().is(r1));
|
| + DCHECK(!result_register().is(r4));
|
| // Store result register while executing finally block.
|
| __ push(result_register());
|
| // Cook return address in link register to stack (smi encoded Code* delta)
|
| - __ sub(r1, lr, Operand(masm_->CodeObject()));
|
| - __ SmiTag(r1);
|
| + __ mflr(r4);
|
| + __ mov(ip, Operand(masm_->CodeObject()));
|
| + __ sub(r4, r4, ip);
|
| + __ SmiTag(r4);
|
|
|
| // Store result register while executing finally block.
|
| - __ push(r1);
|
| + __ push(r4);
|
|
|
| // Store pending message while executing finally block.
|
| ExternalReference pending_message_obj =
|
| ExternalReference::address_of_pending_message_obj(isolate());
|
| __ mov(ip, Operand(pending_message_obj));
|
| - __ ldr(r1, MemOperand(ip));
|
| - __ push(r1);
|
| + __ LoadP(r4, MemOperand(ip));
|
| + __ push(r4);
|
|
|
| ExternalReference has_pending_message =
|
| ExternalReference::address_of_has_pending_message(isolate());
|
| __ mov(ip, Operand(has_pending_message));
|
| - STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
|
| - __ ldrb(r1, MemOperand(ip));
|
| - __ SmiTag(r1);
|
| - __ push(r1);
|
| + __ lbz(r4, MemOperand(ip));
|
| + __ SmiTag(r4);
|
| + __ push(r4);
|
|
|
| ExternalReference pending_message_script =
|
| ExternalReference::address_of_pending_message_script(isolate());
|
| __ mov(ip, Operand(pending_message_script));
|
| - __ ldr(r1, MemOperand(ip));
|
| - __ push(r1);
|
| + __ LoadP(r4, MemOperand(ip));
|
| + __ push(r4);
|
| }
|
|
|
|
|
| void FullCodeGenerator::ExitFinallyBlock() {
|
| - DCHECK(!result_register().is(r1));
|
| + DCHECK(!result_register().is(r4));
|
| // Restore pending message from stack.
|
| - __ pop(r1);
|
| + __ pop(r4);
|
| ExternalReference pending_message_script =
|
| ExternalReference::address_of_pending_message_script(isolate());
|
| __ mov(ip, Operand(pending_message_script));
|
| - __ str(r1, MemOperand(ip));
|
| + __ StoreP(r4, MemOperand(ip));
|
|
|
| - __ pop(r1);
|
| - __ SmiUntag(r1);
|
| + __ pop(r4);
|
| + __ SmiUntag(r4);
|
| ExternalReference has_pending_message =
|
| ExternalReference::address_of_has_pending_message(isolate());
|
| __ mov(ip, Operand(has_pending_message));
|
| - STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof)
|
| - __ strb(r1, MemOperand(ip));
|
| + __ stb(r4, MemOperand(ip));
|
|
|
| - __ pop(r1);
|
| + __ pop(r4);
|
| ExternalReference pending_message_obj =
|
| ExternalReference::address_of_pending_message_obj(isolate());
|
| __ mov(ip, Operand(pending_message_obj));
|
| - __ str(r1, MemOperand(ip));
|
| + __ StoreP(r4, MemOperand(ip));
|
|
|
| // Restore result register from stack.
|
| - __ pop(r1);
|
| + __ pop(r4);
|
|
|
| // Uncook return address and return.
|
| __ pop(result_register());
|
| - __ SmiUntag(r1);
|
| - __ add(pc, r1, Operand(masm_->CodeObject()));
|
| + __ SmiUntag(r4);
|
| + __ mov(ip, Operand(masm_->CodeObject()));
|
| + __ add(ip, ip, r4);
|
| + __ mtctr(ip);
|
| + __ bctr();
|
| }
|
|
|
|
|
| @@ -4764,94 +4856,65 @@ FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
|
| __ Drop(*stack_depth); // Down to the handler block.
|
| if (*context_length > 0) {
|
| // Restore the context to its dedicated register and the stack.
|
| - __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
|
| - __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| + __ LoadP(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
|
| + __ StoreP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| }
|
| __ PopTryHandler();
|
| - __ bl(finally_entry_);
|
| + __ b(finally_entry_, SetLK);
|
|
|
| *stack_depth = 0;
|
| *context_length = 0;
|
| return previous_;
|
| }
|
|
|
| -
|
| #undef __
|
|
|
|
|
| -static Address GetInterruptImmediateLoadAddress(Address pc) {
|
| - Address load_address = pc - 2 * Assembler::kInstrSize;
|
| - if (!FLAG_enable_ool_constant_pool) {
|
| - DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
|
| - } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
|
| - // This is an extended constant pool lookup.
|
| - load_address -= 2 * Assembler::kInstrSize;
|
| - DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
|
| - DCHECK(Assembler::IsMovT(
|
| - Memory::int32_at(load_address + Assembler::kInstrSize)));
|
| - } else if (Assembler::IsMovT(Memory::int32_at(load_address))) {
|
| - // This is a movw_movt immediate load.
|
| - load_address -= Assembler::kInstrSize;
|
| - DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
|
| - } else {
|
| - // This is a small constant pool lookup.
|
| - DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
|
| - }
|
| - return load_address;
|
| -}
|
| -
|
| -
|
| void BackEdgeTable::PatchAt(Code* unoptimized_code,
|
| Address pc,
|
| BackEdgeState target_state,
|
| Code* replacement_code) {
|
| - Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
|
| - Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
|
| - CodePatcher patcher(branch_address, 1);
|
| + Address mov_address = Assembler::target_address_from_return_address(pc);
|
| + Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
|
| + CodePatcher patcher(cmp_address, 1);
|
| +
|
| switch (target_state) {
|
| case INTERRUPT:
|
| {
|
| // <decrement profiling counter>
|
| - // bpl ok
|
| - // ; load interrupt stub address into ip - either of:
|
| - // ; <small cp load> | <extended cp load> | <immediate load>
|
| - // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
|
| - // | movt ip, #imm> | movw ip, #imm
|
| - // | ldr ip, [pp, ip]
|
| - // blx ip
|
| + // cmpi r6, 0
|
| + // bge <ok> ;; not changed
|
| + // mov r12, <interrupt stub address>
|
| + // mtlr r12
|
| + // blrl
|
| // <reset profiling counter>
|
| // ok-label
|
| -
|
| - // Calculate branch offset to the ok-label - this is the difference
|
| - // between the branch address and |pc| (which points at <blx ip>) plus
|
| - // kProfileCounterResetSequence instructions
|
| - int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
|
| - kProfileCounterResetSequenceLength;
|
| - patcher.masm()->b(branch_offset, pl);
|
| + patcher.masm()->cmpi(r6, Operand::Zero());
|
| break;
|
| }
|
| case ON_STACK_REPLACEMENT:
|
| case OSR_AFTER_STACK_CHECK:
|
| // <decrement profiling counter>
|
| - // mov r0, r0 (NOP)
|
| - // ; load on-stack replacement address into ip - either of:
|
| - // ; <small cp load> | <extended cp load> | <immediate load>
|
| - // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm
|
| - // | movt ip, #imm> | movw ip, #imm
|
| - // | ldr ip, [pp, ip]
|
| - // blx ip
|
| + // crset
|
| + // bge <ok> ;; not changed
|
| + // mov r12, <on-stack replacement address>
|
| + // mtlr r12
|
| + // blrl
|
| // <reset profiling counter>
|
| - // ok-label
|
| - patcher.masm()->nop();
|
| + // ok-label ----- pc_after points here
|
| +
|
| + // Set the LT bit such that bge is a NOP
|
| + patcher.masm()->crset(Assembler::encode_crbit(cr7, CR_LT));
|
| break;
|
| }
|
|
|
| - // Replace the call address.
|
| - Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code,
|
| - replacement_code->entry());
|
| + // Replace the stack check address in the mov sequence with the
|
| + // entry address of the replacement code.
|
| + Assembler::set_target_address_at(mov_address, unoptimized_code,
|
| + replacement_code->entry());
|
|
|
| unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
|
| - unoptimized_code, pc_immediate_load_address, replacement_code);
|
| + unoptimized_code, mov_address, replacement_code);
|
| }
|
|
|
|
|
| @@ -4859,20 +4922,18 @@ BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
|
| Isolate* isolate,
|
| Code* unoptimized_code,
|
| Address pc) {
|
| - DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize)));
|
| -
|
| - Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
|
| - Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
|
| - Address interrupt_address = Assembler::target_address_at(
|
| - pc_immediate_load_address, unoptimized_code);
|
| + Address mov_address = Assembler::target_address_from_return_address(pc);
|
| + Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
|
| + Address interrupt_address = Assembler::target_address_at(mov_address,
|
| + unoptimized_code);
|
|
|
| - if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
|
| + if (Assembler::IsCmpImmediate(Assembler::instr_at(cmp_address))) {
|
| DCHECK(interrupt_address ==
|
| isolate->builtins()->InterruptCheck()->entry());
|
| return INTERRUPT;
|
| }
|
|
|
| - DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address)));
|
| + DCHECK(Assembler::IsCrSet(Assembler::instr_at(cmp_address)));
|
|
|
| if (interrupt_address ==
|
| isolate->builtins()->OnStackReplacement()->entry()) {
|
| @@ -4887,4 +4948,4 @@ BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
|
|
|
| } } // namespace v8::internal
|
|
|
| -#endif // V8_TARGET_ARCH_ARM
|
| +#endif // V8_TARGET_ARCH_PPC
|
|
|