| Index: src/sh4/full-codegen-sh4.cc
|
| diff --git a/src/arm/full-codegen-arm.cc b/src/sh4/full-codegen-sh4.cc
|
| similarity index 93%
|
| copy from src/arm/full-codegen-arm.cc
|
| copy to src/sh4/full-codegen-sh4.cc
|
| index be8228377aecbad5f0a6cd63db55b9a37f53e3d4..d67eaccf0bdc8f335497b4ce7d62aa20e9846ce6 100644
|
| --- a/src/arm/full-codegen-arm.cc
|
| +++ b/src/sh4/full-codegen-sh4.cc
|
| @@ -1,4 +1,4 @@
|
| -// Copyright 2012 the V8 project authors. All rights reserved.
|
| +// Copyright 2011-2012 the V8 project authors. All rights reserved.
|
| // Redistribution and use in source and binary forms, with or without
|
| // modification, are permitted provided that the following conditions are
|
| // met:
|
| @@ -27,7 +27,7 @@
|
|
|
| #include "v8.h"
|
|
|
| -#if defined(V8_TARGET_ARCH_ARM)
|
| +#if defined(V8_TARGET_ARCH_SH4)
|
|
|
| #include "code-stubs.h"
|
| #include "codegen.h"
|
| @@ -39,26 +39,31 @@
|
| #include "scopes.h"
|
| #include "stub-cache.h"
|
|
|
| -#include "arm/code-stubs-arm.h"
|
| -#include "arm/macro-assembler-arm.h"
|
| +#include "sh4/code-stubs-sh4.h"
|
| +#include "sh4/macro-assembler-sh4.h"
|
|
|
| namespace v8 {
|
| namespace internal {
|
|
|
| #define __ ACCESS_MASM(masm_)
|
|
|
| +#include "map-sh4.h" // For ARM -> SH4 register mapping
|
| +
|
|
|
| // A patch site is a location in the code which it is possible to patch. This
|
| // class has a number of methods to emit the code which is patchable and the
|
| -// method EmitPatchInfo to record a marker back to the patchable code. This
|
| -// marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
|
| -// immediate value is used) is the delta from the pc to the first instruction of
|
| -// the patchable code.
|
| +// method EmitPatchInfo to record a marker back to the patchable code.
|
| +// On SH4 this marker is a cmp #ii, r0 operation, this limits the range
|
| +// of #ii to -128..+127 instructions for the distance betwen the patch and
|
| +// the label.
|
| +// The #ii (8 bits signed value) is the delta from the pc to
|
| +// the first instruction of the patchable code.
|
| class JumpPatchSite BASE_EMBEDDED {
|
| public:
|
| explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
|
| #ifdef DEBUG
|
| info_emitted_ = false;
|
| + ASSERT(!patch_site_.is_bound());
|
| #endif
|
| }
|
|
|
| @@ -70,9 +75,20 @@ class JumpPatchSite BASE_EMBEDDED {
|
| // the inlined smi code.
|
| void EmitJumpIfNotSmi(Register reg, Label* target) {
|
| ASSERT(!patch_site_.is_bound() && !info_emitted_);
|
| - Assembler::BlockConstPoolScope block_const_pool(masm_);
|
| + // For the current unbound branch sequence (in assembler_sh4.cc)
|
| + // to be simple to patch, we force the alignment now, such that the
|
| + // first instruction of the sequence after the cmp is a branch.
|
| + __ align();
|
| + __ mov(sh4_ip, Operand(kSmiTagMask));
|
| __ bind(&patch_site_);
|
| - __ cmp(reg, Operand(reg));
|
| + __ cmp(reg, reg);
|
| + // Don't use b(al, ...) as that might emit the constant pool right after the
|
| + // branch. After patching when the branch is no longer unconditional
|
| + // execution can continue into the constant pool.
|
| + // Also for the later patch in PatchInlinedSmiCode, we require
|
| + // that the target is not bound yet.
|
| + ASSERT(!target->is_bound());
|
| + ASSERT(masm_->pc_offset() % 4 == 0);
|
| __ b(eq, target); // Always taken before patched.
|
| }
|
|
|
| @@ -80,28 +96,31 @@ class JumpPatchSite BASE_EMBEDDED {
|
| // the inlined smi code.
|
| void EmitJumpIfSmi(Register reg, Label* target) {
|
| ASSERT(!patch_site_.is_bound() && !info_emitted_);
|
| - Assembler::BlockConstPoolScope block_const_pool(masm_);
|
| + __ align();
|
| + __ mov(sh4_ip, Operand(kSmiTagMask));
|
| __ bind(&patch_site_);
|
| - __ cmp(reg, Operand(reg));
|
| - __ b(ne, target); // Never taken before patched.
|
| + __ cmp(reg, reg);
|
| + ASSERT(!target->is_bound());
|
| + ASSERT(masm_->pc_offset() % 4 == 0);
|
| + __ bf(target); // Never taken before patched.
|
| }
|
|
|
| void EmitPatchInfo() {
|
| - // Block literal pool emission whilst recording patch site information.
|
| - Assembler::BlockConstPoolScope block_const_pool(masm_);
|
| if (patch_site_.is_bound()) {
|
| int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
|
| - Register reg;
|
| - reg.set_code(delta_to_patch_site / kOff12Mask);
|
| - __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
|
| + ASSERT(delta_to_patch_site >= 0);
|
| + // Ensure that the delta fits into the raw immediate.
|
| + ASSERT(masm_->fits_cmp_unsigned_imm(delta_to_patch_site));
|
| + __ cmpeq_r0_unsigned_imm(delta_to_patch_site);
|
| #ifdef DEBUG
|
| info_emitted_ = true;
|
| #endif
|
| } else {
|
| - __ nop(); // Signals no inlined code.
|
| }
|
| }
|
|
|
| + bool is_bound() const { return patch_site_.is_bound(); }
|
| +
|
| private:
|
| MacroAssembler* masm_;
|
| Label patch_site_;
|
| @@ -121,7 +140,7 @@ class JumpPatchSite BASE_EMBEDDED {
|
| // o cp: our context
|
| // o fp: our caller's frame pointer
|
| // o sp: stack pointer
|
| -// o lr: return address
|
| +// o pr: return address
|
| //
|
| // The function builds a JS frame. Please see JavaScriptFrameConstants in
|
| // frames-arm.h for its layout.
|
| @@ -150,7 +169,7 @@ void FullCodeGenerator::Generate() {
|
| if (!info->is_classic_mode() || info->is_native()) {
|
| Label ok;
|
| __ cmp(r5, Operand(0));
|
| - __ b(eq, &ok);
|
| + __ b(eq, &ok, Label::kNear);
|
| int receiver_offset = info->scope()->num_parameters() * kPointerSize;
|
| __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
|
| __ str(r2, MemOperand(sp, receiver_offset));
|
| @@ -164,7 +183,7 @@ void FullCodeGenerator::Generate() {
|
|
|
| int locals_count = info->scope()->num_stack_slots();
|
|
|
| - __ Push(lr, fp, cp, r1);
|
| + __ Push(pr, fp, cp, r1);
|
| if (locals_count > 0) {
|
| // Load undefined value here, so the value is ready for the loop
|
| // below.
|
| @@ -285,9 +304,8 @@ void FullCodeGenerator::Generate() {
|
| PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
|
| Label ok;
|
| __ LoadRoot(ip, Heap::kStackLimitRootIndex);
|
| - __ cmp(sp, Operand(ip));
|
| - __ b(hs, &ok);
|
| - PredictableCodeSizeScope predictable(masm_);
|
| + __ cmphs(sp, ip);
|
| + __ bt_near(&ok);
|
| StackCheckStub stub;
|
| __ CallStub(&stub);
|
| __ bind(&ok);
|
| @@ -307,9 +325,10 @@ void FullCodeGenerator::Generate() {
|
| }
|
| EmitReturnSequence();
|
|
|
| + // TODO(stm): implement this when const pool are active
|
| // Force emit the constant pool, so it doesn't get emitted in the middle
|
| // of the stack check table.
|
| - masm()->CheckConstPool(true, false);
|
| + // masm()->CheckConstPool(true, false);
|
| }
|
|
|
|
|
| @@ -321,7 +340,8 @@ void FullCodeGenerator::ClearAccumulator() {
|
| void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
|
| __ mov(r2, Operand(profiling_counter_));
|
| __ ldr(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
|
| - __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
|
| + __ sub(r3, r3, Operand(Smi::FromInt(delta)));
|
| + __ cmpge(r3, Operand(0));
|
| __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
|
| }
|
|
|
| @@ -345,8 +365,6 @@ void FullCodeGenerator::EmitProfilingCounterReset() {
|
| void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
|
| Label* back_edge_target) {
|
| Comment cmnt(masm_, "[ Stack check");
|
| - // Block literal pools whilst emitting stack check code.
|
| - Assembler::BlockConstPoolScope block_const_pool(masm_);
|
| Label ok;
|
|
|
| if (FLAG_count_based_interrupts) {
|
| @@ -358,14 +376,13 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
|
| Max(1, distance / kBackEdgeDistanceUnit));
|
| }
|
| EmitProfilingCounterDecrement(weight);
|
| - __ b(pl, &ok);
|
| + __ bt(&ok); // TODO(STM): ??
|
| InterruptStub stub;
|
| __ CallStub(&stub);
|
| } else {
|
| __ LoadRoot(ip, Heap::kStackLimitRootIndex);
|
| - __ cmp(sp, Operand(ip));
|
| - __ b(hs, &ok);
|
| - PredictableCodeSizeScope predictable(masm_);
|
| + __ cmphs(sp, ip);
|
| + __ bt(&ok);
|
| StackCheckStub stub;
|
| __ CallStub(&stub);
|
| }
|
| @@ -412,7 +429,7 @@ void FullCodeGenerator::EmitReturnSequence() {
|
| }
|
| EmitProfilingCounterDecrement(weight);
|
| Label ok;
|
| - __ b(pl, &ok);
|
| + __ bt(&ok); // TODO(STM): ??
|
| __ push(r0);
|
| if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
|
| __ ldr(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
|
| @@ -434,25 +451,26 @@ void FullCodeGenerator::EmitReturnSequence() {
|
| #endif
|
| // Make sure that the constant pool is not emitted inside of the return
|
| // sequence.
|
| - { Assembler::BlockConstPoolScope block_const_pool(masm_);
|
| + {
|
| + // SH4: removed
|
| + // Assembler::BlockConstPoolScope block_const_pool(masm_);
|
| // Here we use masm_-> instead of the __ macro to avoid the code coverage
|
| // tool from instrumenting as we rely on the code size here.
|
| int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
|
| CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
|
| - PredictableCodeSizeScope predictable(masm_);
|
| __ RecordJSReturn();
|
| masm_->mov(sp, fp);
|
| - masm_->ldm(ia_w, sp, fp.bit() | lr.bit());
|
| + masm_->Pop(lr, fp);
|
| masm_->add(sp, sp, Operand(sp_delta));
|
| - masm_->Jump(lr);
|
| + masm_->Ret();
|
| }
|
|
|
| -#ifdef DEBUG
|
| - // Check that the size of the code used for returning is large enough
|
| - // for the debugger's requirements.
|
| - ASSERT(Assembler::kJSReturnSequenceInstructions <=
|
| - masm_->InstructionsGeneratedSince(&check_exit_codesize));
|
| -#endif
|
| +// #ifdef DEBUG
|
| +// // Check that the size of the code used for returning is large enough
|
| +// // for the debugger's requirements.
|
| +// ASSERT(Assembler::kJSReturnSequenceInstructions <=
|
| +// masm_->InstructionsGeneratedSince(&check_exit_codesize));
|
| +// #endif
|
| }
|
| }
|
|
|
| @@ -614,7 +632,7 @@ void FullCodeGenerator::AccumulatorValueContext::Plug(
|
| Label done;
|
| __ bind(materialize_true);
|
| __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
|
| - __ jmp(&done);
|
| + __ jmp_near(&done);
|
| __ bind(materialize_false);
|
| __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
|
| __ bind(&done);
|
| @@ -628,7 +646,7 @@ void FullCodeGenerator::StackValueContext::Plug(
|
| __ bind(materialize_true);
|
| __ LoadRoot(ip, Heap::kTrueValueRootIndex);
|
| __ push(ip);
|
| - __ jmp(&done);
|
| + __ jmp_near(&done);
|
| __ bind(materialize_false);
|
| __ LoadRoot(ip, Heap::kFalseValueRootIndex);
|
| __ push(ip);
|
| @@ -679,9 +697,19 @@ void FullCodeGenerator::DoTest(Expression* condition,
|
| Label* if_true,
|
| Label* if_false,
|
| Label* fall_through) {
|
| - ToBooleanStub stub(result_register());
|
| - __ CallStub(&stub);
|
| - __ tst(result_register(), result_register());
|
| + // TODO(STM): can be removed !
|
| + if (CpuFeatures::IsSupported(FPU)) {
|
| + ToBooleanStub stub(result_register());
|
| + __ CallStub(&stub);
|
| + __ tst(result_register(), result_register());
|
| + } else {
|
| + // Call the runtime to find the boolean value of the source and then
|
| + // translate it into control flow to the pair of labels.
|
| + __ push(result_register());
|
| + __ CallRuntime(Runtime::kToBool, 1);
|
| + __ LoadRoot(ip, Heap::kFalseValueRootIndex);
|
| + __ cmp(r0, ip);
|
| + }
|
| Split(ne, if_true, if_false, fall_through);
|
| }
|
|
|
| @@ -690,11 +718,14 @@ void FullCodeGenerator::Split(Condition cond,
|
| Label* if_true,
|
| Label* if_false,
|
| Label* fall_through) {
|
| + // We use ne for inverting conditions.
|
| + ASSERT(cond == ne || cond == eq);
|
| if (if_false == fall_through) {
|
| __ b(cond, if_true);
|
| } else if (if_true == fall_through) {
|
| __ b(NegateCondition(cond), if_false);
|
| } else {
|
| + // TODO(stm): add a special case for two jumps in a row
|
| __ b(cond, if_true);
|
| __ b(if_false);
|
| }
|
| @@ -744,7 +775,6 @@ void FullCodeGenerator::SetVar(Variable* var,
|
| ASSERT(!scratch1.is(src));
|
| MemOperand location = VarOperand(var, scratch0);
|
| __ str(src, location);
|
| -
|
| // Emit the write barrier code if the location is in the heap.
|
| if (var->IsContextSlot()) {
|
| __ RecordWriteContextSlot(scratch0,
|
| @@ -1090,10 +1120,11 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
| PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
|
|
|
| // Convert the object to a JS object.
|
| - Label convert, done_convert;
|
| - __ JumpIfSmi(r0, &convert);
|
| - __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
|
| - __ b(ge, &done_convert);
|
| + Label convert;
|
| + Label done_convert;
|
| + __ JumpIfSmi(r0, &convert, Label::kNear);
|
| + __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE, ge);
|
| + __ bt_near(&done_convert);
|
| __ bind(&convert);
|
| __ push(r0);
|
| __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
|
| @@ -1103,8 +1134,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
| // Check for proxies.
|
| Label call_runtime;
|
| STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
|
| - __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
|
| - __ b(le, &call_runtime);
|
| + __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE, gt);
|
| + __ bf(&call_runtime);
|
|
|
| // Check cache validity in generated code. This is a fast case for
|
| // the JSObject::IsSimpleEnum cache validity checks. If we cannot
|
| @@ -1116,7 +1147,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
| // iterated over and use the cache for the iteration.
|
| Label use_cache;
|
| __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
|
| - __ b(&use_cache);
|
| + __ b_near(&use_cache);
|
|
|
| // Get the set of properties to enumerate.
|
| __ bind(&call_runtime);
|
| @@ -1129,8 +1160,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
| Label fixed_array;
|
| __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
|
| __ LoadRoot(ip, Heap::kMetaMapRootIndex);
|
| - __ cmp(r2, ip);
|
| - __ b(ne, &fixed_array);
|
| + __ cmpeq(r2, ip);
|
| + __ bf_near(&fixed_array);
|
|
|
| // We got a map in register r0. Get the enumeration cache from it.
|
| Label no_descriptors;
|
| @@ -1140,7 +1171,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
| __ cmp(r1, Operand(Smi::FromInt(0)));
|
| __ b(eq, &no_descriptors);
|
|
|
| - __ LoadInstanceDescriptors(r0, r2);
|
| + __ LoadInstanceDescriptors(r0, r2, r4);
|
| __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
|
| __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
|
|
|
| @@ -1149,7 +1180,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
| __ mov(r0, Operand(Smi::FromInt(0)));
|
| // Push enumeration cache, enumeration cache length (as smi) and zero.
|
| __ Push(r2, r1, r0);
|
| - __ jmp(&loop);
|
| + __ jmp_near(&loop);
|
|
|
| __ bind(&no_descriptors);
|
| __ Drop(1);
|
| @@ -1171,8 +1202,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
| __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
|
| __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
|
| STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
|
| - __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
|
| - __ b(gt, &non_proxy);
|
| + __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE, gt);
|
| + __ bt(&non_proxy);
|
| __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
|
| __ bind(&non_proxy);
|
| __ Push(r1, r0); // Smi and array
|
| @@ -1185,15 +1216,16 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
| __ bind(&loop);
|
| // Load the current count to r0, load the length to r1.
|
| __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
|
| - __ cmp(r0, r1); // Compare to the array length.
|
| - __ b(hs, loop_statement.break_label());
|
| + __ cmphs(r0, r1); // Compare to the array length.
|
| + __ bt(loop_statement.break_label());
|
|
|
| // Get the current entry of the array into register r3.
|
| __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
|
| __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
|
| - __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
|
| + __ lsl(r3, r0, Operand(kPointerSizeLog2 - kSmiTagSize));
|
| + __ ldr(r3, MemOperand(r2, r3));
|
|
|
| - // Get the expected map from the stack or a smi in the
|
| + // Get the expected map from the stack or a zero map in the
|
| // permanent slow case into register r2.
|
| __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
|
|
|
| @@ -1202,8 +1234,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
| Label update_each;
|
| __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
|
| __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
|
| - __ cmp(r4, Operand(r2));
|
| - __ b(eq, &update_each);
|
| + __ cmpeq(r4, r2);
|
| + __ bt_near(&update_each);
|
|
|
| // For proxies, no filtering is done.
|
| // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
|
| @@ -1216,7 +1248,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
| __ push(r1); // Enumerable.
|
| __ push(r3); // Current entry.
|
| __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
|
| - __ mov(r3, Operand(r0), SetCC);
|
| + __ mov(r3, r0);
|
| + __ tst(r3, r3);
|
| __ b(eq, loop_statement.continue_label());
|
|
|
| // Update the 'each' property or variable from the possibly filtered
|
| @@ -1323,7 +1356,7 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
|
| __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
|
| __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
|
| __ cmp(temp, ip);
|
| - __ b(eq, &fast);
|
| + __ b(eq, &fast, Label::kNear);
|
| // Check that extension is NULL.
|
| __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
|
| __ tst(temp, temp);
|
| @@ -1396,7 +1429,10 @@ void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
|
| local->mode() == LET) {
|
| __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
|
| if (local->mode() == CONST) {
|
| - __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
|
| + Label skip;
|
| + __ bf_near(&skip);
|
| + __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
|
| + __ bind(&skip);
|
| } else { // LET || CONST_HARMONY
|
| __ b(ne, done);
|
| __ mov(r0, Operand(var->name()));
|
| @@ -1409,6 +1445,9 @@ void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
|
| }
|
|
|
|
|
| +// clobbers: r0, r1, r3
|
| +// live-in: fp, sp, cp
|
| +// live-out: fp, sp, cp
|
| void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
|
| // Record position before possible IC call.
|
| SetSourcePosition(proxy->position());
|
| @@ -1486,7 +1525,10 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
|
| } else {
|
| // Uninitalized const bindings outside of harmony mode are unholed.
|
| ASSERT(var->mode() == CONST);
|
| - __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
|
| + Label skip;
|
| + __ bf(&skip);
|
| + __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
|
| + __ bind(&skip);
|
| }
|
| context()->Plug(r0);
|
| break;
|
| @@ -1529,8 +1571,8 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
|
| FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
|
| __ ldr(r5, FieldMemOperand(r4, literal_offset));
|
| __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
|
| - __ cmp(r5, ip);
|
| - __ b(ne, &materialized);
|
| + __ cmpeq(r5, ip);
|
| + __ bf_near(&materialized);
|
|
|
| // Create regexp literal using runtime function.
|
| // Result will be in r0.
|
| @@ -1545,7 +1587,7 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
|
| int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
|
| Label allocated, runtime_allocate;
|
| __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
|
| - __ jmp(&allocated);
|
| + __ jmp_near(&allocated);
|
|
|
| __ bind(&runtime_allocate);
|
| __ push(r5);
|
| @@ -1930,7 +1972,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
|
| __ pop(left);
|
|
|
| // Perform combined smi check on both operands.
|
| - __ orr(scratch1, left, Operand(right));
|
| + __ orr(scratch1, left, right);
|
| STATIC_ASSERT(kSmiTag == 0);
|
| JumpPatchSite patch_site(masm_);
|
| patch_site.EmitJumpIfSmi(scratch1, &smi_case);
|
| @@ -1950,16 +1992,17 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
|
| case Token::SAR:
|
| __ b(&stub_call);
|
| __ GetLeastBitsFromSmi(scratch1, right, 5);
|
| - __ mov(right, Operand(left, ASR, scratch1));
|
| + __ asr(right, left, scratch1);
|
| __ bic(right, right, Operand(kSmiTagMask));
|
| break;
|
| case Token::SHL: {
|
| __ b(&stub_call);
|
| __ SmiUntag(scratch1, left);
|
| __ GetLeastBitsFromSmi(scratch2, right, 5);
|
| - __ mov(scratch1, Operand(scratch1, LSL, scratch2));
|
| - __ add(scratch2, scratch1, Operand(0x40000000), SetCC);
|
| - __ b(mi, &stub_call);
|
| + __ lsl(scratch1, scratch1, scratch2);
|
| + __ add(scratch2, scratch1, Operand(0x40000000));
|
| + __ cmpge(scratch2, Operand(0));
|
| + __ b(f, &stub_call);
|
| __ SmiTag(right, scratch1);
|
| break;
|
| }
|
| @@ -1967,44 +2010,45 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
|
| __ b(&stub_call);
|
| __ SmiUntag(scratch1, left);
|
| __ GetLeastBitsFromSmi(scratch2, right, 5);
|
| - __ mov(scratch1, Operand(scratch1, LSR, scratch2));
|
| + __ lsr(scratch1, scratch1, scratch2);
|
| __ tst(scratch1, Operand(0xc0000000));
|
| __ b(ne, &stub_call);
|
| __ SmiTag(right, scratch1);
|
| break;
|
| }
|
| case Token::ADD:
|
| - __ add(scratch1, left, Operand(right), SetCC);
|
| - __ b(vs, &stub_call);
|
| + __ addv(scratch1, left, right);
|
| + __ b(t, &stub_call);
|
| __ mov(right, scratch1);
|
| break;
|
| case Token::SUB:
|
| - __ sub(scratch1, left, Operand(right), SetCC);
|
| - __ b(vs, &stub_call);
|
| + __ subv(scratch1, left, right);
|
| + __ b(t, &stub_call);
|
| __ mov(right, scratch1);
|
| break;
|
| case Token::MUL: {
|
| __ SmiUntag(ip, right);
|
| - __ smull(scratch1, scratch2, left, ip);
|
| - __ mov(ip, Operand(scratch1, ASR, 31));
|
| - __ cmp(ip, Operand(scratch2));
|
| + __ dmuls(scratch1, scratch2, left, ip);
|
| + __ asr(ip, scratch1, Operand(31));
|
| + __ cmp(ip, scratch2);
|
| __ b(ne, &stub_call);
|
| - __ cmp(scratch1, Operand(0));
|
| - __ mov(right, Operand(scratch1), LeaveCC, ne);
|
| + __ tst(scratch1, scratch1);
|
| + __ mov(right, scratch1, ne);
|
| __ b(ne, &done);
|
| - __ add(scratch2, right, Operand(left), SetCC);
|
| - __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
|
| - __ b(mi, &stub_call);
|
| + __ add(scratch2, right, left);
|
| + __ cmpge(scratch2, Operand(0));
|
| + __ mov(right, Operand(Smi::FromInt(0)), t);
|
| + __ bf(&stub_call);
|
| break;
|
| }
|
| case Token::BIT_OR:
|
| - __ orr(right, left, Operand(right));
|
| + __ orr(right, left, right);
|
| break;
|
| case Token::BIT_AND:
|
| - __ and_(right, left, Operand(right));
|
| + __ land(right, left, right);
|
| break;
|
| case Token::BIT_XOR:
|
| - __ eor(right, left, Operand(right));
|
| + __ eor(right, left, right);
|
| break;
|
| default:
|
| UNREACHABLE();
|
| @@ -2102,7 +2146,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
|
| Label skip;
|
| __ ldr(r1, StackOperand(var));
|
| __ CompareRoot(r1, Heap::kTheHoleValueRootIndex);
|
| - __ b(ne, &skip);
|
| + __ b(ne, &skip, Label::kNear);
|
| __ str(result_register(), StackOperand(var));
|
| __ bind(&skip);
|
| } else {
|
| @@ -2132,7 +2176,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
|
| MemOperand location = VarOperand(var, r1);
|
| __ ldr(r3, location);
|
| __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
|
| - __ b(ne, &assign);
|
| + __ b(ne, &assign, Label::kNear);
|
| __ mov(r3, Operand(var->name()));
|
| __ push(r3);
|
| __ CallRuntime(Runtime::kThrowReferenceError, 1);
|
| @@ -2242,9 +2286,7 @@ void FullCodeGenerator::CallIC(Handle<Code> code,
|
| RelocInfo::Mode rmode,
|
| TypeFeedbackId ast_id) {
|
| ic_total_count_++;
|
| - // All calls must have a predictable size in full-codegen code to ensure that
|
| - // the debugger can patch them correctly.
|
| - __ Call(code, rmode, ast_id, al, NEVER_INLINE_TARGET_ADDRESS);
|
| + __ Call(code, rmode, ast_id);
|
| }
|
|
|
| void FullCodeGenerator::EmitCallWithIC(Call* expr,
|
| @@ -2442,7 +2484,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
|
| // code.
|
| if (done.is_linked()) {
|
| Label call;
|
| - __ b(&call);
|
| + __ b_near(&call);
|
| __ bind(&done);
|
| // Push function.
|
| __ push(r0);
|
| @@ -2594,11 +2636,11 @@ void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
|
| __ tst(r1, Operand(1 << Map::kIsUndetectable));
|
| __ b(ne, if_false);
|
| __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
|
| - __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
|
| - __ b(lt, if_false);
|
| - __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
|
| + __ cmpge(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
|
| + __ bf(if_false);
|
| + __ cmpgt(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| - Split(le, if_true, if_false, fall_through);
|
| + Split(ne, if_true, if_false, fall_through);
|
|
|
| context()->Plug(if_true, if_false);
|
| }
|
| @@ -2618,9 +2660,9 @@ void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
|
| &if_true, &if_false, &fall_through);
|
|
|
| __ JumpIfSmi(r0, if_false);
|
| - __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
|
| + __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE, ge);
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| - Split(ge, if_true, if_false, fall_through);
|
| + Split(t, if_true, if_false, fall_through);
|
|
|
| context()->Plug(if_true, if_false);
|
| }
|
| @@ -2664,7 +2706,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
|
| context()->PrepareTest(&materialize_true, &materialize_false,
|
| &if_true, &if_false, &fall_through);
|
|
|
| - __ AssertNotSmi(r0);
|
| + if (generate_debug_code_) __ AbortIfSmi(r0);
|
|
|
| __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
|
| __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
|
| @@ -2688,7 +2730,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
|
| __ cmp(r3, Operand(0));
|
| __ b(eq, &done);
|
|
|
| - __ LoadInstanceDescriptors(r1, r4);
|
| + __ LoadInstanceDescriptors(r1, r4, r2);
|
| // r4: descriptor array.
|
| // r3: valid entries in the descriptor array.
|
| STATIC_ASSERT(kSmiTag == 0);
|
| @@ -2697,24 +2739,23 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
|
| __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
|
| __ mul(r3, r3, ip);
|
| // Calculate location of the first key name.
|
| - __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
|
| - // Calculate the end of the descriptor array.
|
| - __ mov(r2, r4);
|
| - __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
|
| -
|
| + __ add(r4,
|
| + r4,
|
| + Operand(FixedArray::kHeaderSize - kHeapObjectTag +
|
| + DescriptorArray::kFirstIndex * kPointerSize));
|
| // Loop through all the keys in the descriptor array. If one of these is the
|
| // symbol valueOf the result is false.
|
| // The use of ip to store the valueOf symbol asumes that it is not otherwise
|
| // used in the loop below.
|
| __ mov(ip, Operand(FACTORY->value_of_symbol()));
|
| - __ jmp(&entry);
|
| + __ jmp_near(&entry);
|
| __ bind(&loop);
|
| __ ldr(r3, MemOperand(r4, 0));
|
| __ cmp(r3, ip);
|
| __ b(eq, if_false);
|
| __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
|
| __ bind(&entry);
|
| - __ cmp(r4, Operand(r2));
|
| + __ cmp(r4, r2);
|
| __ b(ne, &loop);
|
|
|
| __ bind(&done);
|
| @@ -2755,7 +2796,7 @@ void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
|
| &if_true, &if_false, &fall_through);
|
|
|
| __ JumpIfSmi(r0, if_false);
|
| - __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
|
| + __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE, eq);
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| Split(eq, if_true, if_false, fall_through);
|
|
|
| @@ -2777,7 +2818,7 @@ void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
|
| &if_true, &if_false, &fall_through);
|
|
|
| __ JumpIfSmi(r0, if_false);
|
| - __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
|
| + __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE, eq);
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| Split(eq, if_true, if_false, fall_through);
|
|
|
| @@ -2799,7 +2840,7 @@ void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
|
| &if_true, &if_false, &fall_through);
|
|
|
| __ JumpIfSmi(r0, if_false);
|
| - __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
|
| + __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE, eq);
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| Split(eq, if_true, if_false, fall_through);
|
|
|
| @@ -2825,7 +2866,7 @@ void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
|
| Label check_frame_marker;
|
| __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
|
| __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
| - __ b(ne, &check_frame_marker);
|
| + __ b(ne, &check_frame_marker, Label::kNear);
|
| __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
|
|
|
| // Check the marker in the calling frame.
|
| @@ -2888,7 +2929,7 @@ void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
|
| __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
|
| __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
|
| __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
| - __ b(ne, &exit);
|
| + __ b(ne, &exit, Label::kNear);
|
|
|
| // Arguments adaptor case: Read the arguments length from the
|
| // adaptor frame.
|
| @@ -2907,16 +2948,16 @@ void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
|
| VisitForAccumulatorValue(args->at(0));
|
|
|
| // If the object is a smi, we return null.
|
| - __ JumpIfSmi(r0, &null);
|
| + __ JumpIfSmi(r0, &null, Label::kNear);
|
|
|
| // Check that the object is a JS object but take special care of JS
|
| // functions to make sure they have 'Function' as their class.
|
| // Assume that there are only two callable types, and one of them is at
|
| // either end of the type range for JS object types. Saves extra comparisons.
|
| STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
|
| - __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
|
| + __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE, ge);
|
| // Map is now in r0.
|
| - __ b(lt, &null);
|
| + __ bf_near(&null);
|
| STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
|
| FIRST_SPEC_OBJECT_TYPE + 1);
|
| __ b(eq, &function);
|
| @@ -2930,24 +2971,24 @@ void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
|
|
|
| // Check if the constructor in the map is a JS function.
|
| __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
|
| - __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
|
| - __ b(ne, &non_function_constructor);
|
| + __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE, eq);
|
| + __ b(ne, &non_function_constructor, Label::kNear);
|
|
|
| // r0 now contains the constructor function. Grab the
|
| // instance class name from there.
|
| __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
|
| __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
|
| - __ b(&done);
|
| + __ b_near(&done);
|
|
|
| // Functions have class 'Function'.
|
| __ bind(&function);
|
| __ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex);
|
| - __ jmp(&done);
|
| + __ jmp_near(&done);
|
|
|
| // Objects with a non-function constructor have class 'Object'.
|
| __ bind(&non_function_constructor);
|
| __ LoadRoot(r0, Heap::kObject_symbolRootIndex);
|
| - __ jmp(&done);
|
| + __ jmp_near(&done);
|
|
|
| // Non-JS objects have class null.
|
| __ bind(&null);
|
| @@ -2989,46 +3030,46 @@ void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
|
|
|
| __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
|
| __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber);
|
| - __ jmp(&heapnumber_allocated);
|
| + __ jmp_near(&heapnumber_allocated);
|
|
|
| __ bind(&slow_allocate_heapnumber);
|
| // Allocate a heap number.
|
| __ CallRuntime(Runtime::kNumberAlloc, 0);
|
| - __ mov(r4, Operand(r0));
|
| + __ mov(r4, r0);
|
|
|
| __ bind(&heapnumber_allocated);
|
|
|
| // Convert 32 random bits in r0 to 0.(32 random bits) in a double
|
| // by computing:
|
| // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
|
| - if (CpuFeatures::IsSupported(VFP2)) {
|
| + if (CpuFeatures::IsSupported(FPU)) {
|
| + __ push(r4);
|
| __ PrepareCallCFunction(1, r0);
|
| - __ ldr(r0,
|
| + __ ldr(r4,
|
| ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
|
| - __ ldr(r0, FieldMemOperand(r0, GlobalObject::kNativeContextOffset));
|
| + __ ldr(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset));
|
| __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
|
| + __ pop(r4);
|
|
|
| - CpuFeatures::Scope scope(VFP2);
|
| // 0x41300000 is the top half of 1.0 x 2^20 as a double.
|
| // Create this constant using mov/orr to avoid PC relative load.
|
| __ mov(r1, Operand(0x41000000));
|
| __ orr(r1, r1, Operand(0x300000));
|
| // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
|
| - __ vmov(d7, r0, r1);
|
| + __ movd(dr2, r0, r1);
|
| // Move 0x4130000000000000 to VFP.
|
| __ mov(r0, Operand(0, RelocInfo::NONE));
|
| - __ vmov(d8, r0, r1);
|
| + __ movd(dr4, r0, r1);
|
| // Subtract and store the result in the heap number.
|
| - __ vsub(d7, d7, d8);
|
| + __ fsub(dr2, dr4);
|
| __ sub(r0, r4, Operand(kHeapObjectTag));
|
| - __ vstr(d7, r0, HeapNumber::kValueOffset);
|
| + __ dstr(dr2, MemOperand(r0, HeapNumber::kValueOffset));
|
| __ mov(r0, r4);
|
| } else {
|
| __ PrepareCallCFunction(2, r0);
|
| - __ ldr(r1,
|
| + __ ldr(r5,
|
| ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
|
| - __ mov(r0, Operand(r4));
|
| - __ ldr(r1, FieldMemOperand(r1, GlobalObject::kNativeContextOffset));
|
| + __ ldr(r5, FieldMemOperand(r5, GlobalObject::kNativeContextOffset));
|
| __ CallCFunction(
|
| ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
|
| }
|
| @@ -3071,10 +3112,10 @@ void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
|
|
|
| Label done;
|
| // If the object is a smi return the object.
|
| - __ JumpIfSmi(r0, &done);
|
| + __ JumpIfSmi(r0, &done, Label::kNear);
|
| // If the object is not a value type, return the object.
|
| - __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
|
| - __ b(ne, &done);
|
| + __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE, eq);
|
| + __ bf_near(&done);
|
| __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset));
|
|
|
| __ bind(&done);
|
| @@ -3097,8 +3138,8 @@ void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
|
| Register scratch1 = r1;
|
|
|
| __ JumpIfSmi(object, ¬_date_object);
|
| - __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
|
| - __ b(ne, ¬_date_object);
|
| + __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE, eq);
|
| + __ bf(¬_date_object);
|
|
|
| if (index->value() == 0) {
|
| __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
|
| @@ -3116,8 +3157,10 @@ void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
|
| __ jmp(&done);
|
| }
|
| __ bind(&runtime);
|
| + // TODO(STM): take care of the ABI
|
| __ PrepareCallCFunction(2, scratch1);
|
| - __ mov(r1, Operand(index));
|
| + __ mov(r5, Operand(index));
|
| + __ mov(r4, r0);
|
| __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
|
| __ jmp(&done);
|
| }
|
| @@ -3135,7 +3178,7 @@ void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
|
| ASSERT(args->length() == 2);
|
| VisitForStackValue(args->at(0));
|
| VisitForStackValue(args->at(1));
|
| - if (CpuFeatures::IsSupported(VFP2)) {
|
| + if (CpuFeatures::IsSupported(FPU)) {
|
| MathPowStub stub(MathPowStub::ON_STACK);
|
| __ CallStub(&stub);
|
| } else {
|
| @@ -3157,7 +3200,7 @@ void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
|
| __ JumpIfSmi(r1, &done);
|
|
|
| // If the object is not a value type, return the value.
|
| - __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
|
| + __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE, eq);
|
| __ b(ne, &done);
|
|
|
| // Store the value.
|
| @@ -3188,6 +3231,7 @@ void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
|
| void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
|
| ZoneList<Expression*>* args = expr->arguments();
|
| ASSERT(args->length() == 1);
|
| +
|
| VisitForAccumulatorValue(args->at(0));
|
|
|
| Label done;
|
| @@ -3390,8 +3434,8 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
|
| Label runtime, done;
|
| // Check for non-function argument (including proxy).
|
| __ JumpIfSmi(r0, &runtime);
|
| - __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
|
| - __ b(ne, &runtime);
|
| + __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE, eq);
|
| + __ bf(&runtime);
|
|
|
| // InvokeFunction requires the function in r1. Move it in there.
|
| __ mov(r1, result_register());
|
| @@ -3455,13 +3499,15 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
|
| // r2 now holds finger offset as a smi.
|
| __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
|
| // r3 now points to the start of fixed array elements.
|
| - __ ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex));
|
| + __ lsl(r2, r2, Operand(kPointerSizeLog2 - kSmiTagSize));
|
| + __ add(r3, r3, r2);
|
| + __ ldr(r2, MemOperand(r3));
|
| // Note side effect of PreIndex: r3 now points to the key of the pair.
|
| __ cmp(key, r2);
|
| - __ b(ne, ¬_found);
|
| + __ b(ne, ¬_found, Label::kNear);
|
|
|
| __ ldr(r0, MemOperand(r3, kPointerSize));
|
| - __ b(&done);
|
| + __ b_near(&done);
|
|
|
| __ bind(¬_found);
|
| // Call runtime to perform the lookup.
|
| @@ -3487,25 +3533,25 @@ void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
|
| __ pop(left);
|
|
|
| Label done, fail, ok;
|
| - __ cmp(left, Operand(right));
|
| - __ b(eq, &ok);
|
| + __ cmp(left, right);
|
| + __ b(eq, &ok, Label::kNear);
|
| // Fail if either is a non-HeapObject.
|
| - __ and_(tmp, left, Operand(right));
|
| - __ JumpIfSmi(tmp, &fail);
|
| + __ land(tmp, left, right);
|
| + __ JumpIfSmi(tmp, &fail, Label::kNear);
|
| __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
|
| __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
|
| __ cmp(tmp2, Operand(JS_REGEXP_TYPE));
|
| - __ b(ne, &fail);
|
| + __ b(ne, &fail, Label::kNear);
|
| __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
|
| - __ cmp(tmp, Operand(tmp2));
|
| - __ b(ne, &fail);
|
| + __ cmp(tmp, tmp2);
|
| + __ b(ne, &fail, Label::kNear);
|
| __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
|
| __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
|
| __ cmp(tmp, tmp2);
|
| - __ b(eq, &ok);
|
| + __ b(eq, &ok, Label::kNear);
|
| __ bind(&fail);
|
| __ LoadRoot(r0, Heap::kFalseValueRootIndex);
|
| - __ jmp(&done);
|
| + __ jmp_near(&done);
|
| __ bind(&ok);
|
| __ LoadRoot(r0, Heap::kTrueValueRootIndex);
|
| __ bind(&done);
|
| @@ -3539,7 +3585,8 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
|
| ASSERT(args->length() == 1);
|
| VisitForAccumulatorValue(args->at(0));
|
|
|
| - __ AssertString(r0);
|
| + __ AbortIfNotString(r0);
|
| +
|
|
|
| __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
|
| __ IndexFromHash(r0, r0);
|
| @@ -3577,7 +3624,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
|
|
| // Check that the array is a JSArray.
|
| __ JumpIfSmi(array, &bailout);
|
| - __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
|
| + __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE, eq);
|
| __ b(ne, &bailout);
|
|
|
| // Check that the array has fast elements.
|
| @@ -3585,8 +3632,9 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
|
|
| // If the array has length zero, return the empty string.
|
| __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
|
| - __ SmiUntag(array_length, SetCC);
|
| - __ b(ne, &non_trivial_array);
|
| + __ SmiUntag(array_length);
|
| + __ tst(array_length, array_length);
|
| + __ b(ne, &non_trivial_array, Label::kNear);
|
| __ LoadRoot(r0, Heap::kEmptyStringRootIndex);
|
| __ b(&done);
|
|
|
| @@ -3602,7 +3650,8 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
| __ mov(string_length, Operand(0));
|
| __ add(element,
|
| elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
|
| - __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
|
| + __ lsl(elements_end, array_length, Operand(kPointerSizeLog2));
|
| + __ add(elements_end, element, elements_end);
|
| // Loop condition: while (element < elements_end).
|
| // Live values in registers:
|
| // elements: Fixed array of strings.
|
| @@ -3612,8 +3661,8 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
| // element: Current array element.
|
| // elements_end: Array end.
|
| if (generate_debug_code_) {
|
| - __ cmp(array_length, Operand(0));
|
| - __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin");
|
| + __ cmpgt(array_length, Operand(0));
|
| + __ Assert(eq, "No empty arrays here in EmitFastAsciiArrayJoin");
|
| }
|
| __ bind(&loop);
|
| __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
|
| @@ -3622,14 +3671,14 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
| __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
|
| __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
|
| __ ldr(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset));
|
| - __ add(string_length, string_length, Operand(scratch1), SetCC);
|
| - __ b(vs, &bailout);
|
| - __ cmp(element, elements_end);
|
| - __ b(lt, &loop);
|
| + __ addv(string_length, string_length, scratch1);
|
| + __ b(t, &bailout);
|
| + __ cmpge(element, elements_end);
|
| + __ bf(&loop);
|
|
|
| // If array_length is 1, return elements[0], a string.
|
| __ cmp(array_length, Operand(1));
|
| - __ b(ne, ¬_size_one_array);
|
| + __ b(ne, ¬_size_one_array, Label::kNear);
|
| __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
|
| __ b(&done);
|
|
|
| @@ -3651,16 +3700,16 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
| // string_length to get the length of the result string. array_length is not
|
| // smi but the other values are, so the result is a smi
|
| __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
|
| - __ sub(string_length, string_length, Operand(scratch1));
|
| - __ smull(scratch2, ip, array_length, scratch1);
|
| + __ sub(string_length, string_length, scratch1);
|
| + __ dmuls(scratch2, ip, array_length, scratch1);
|
| // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
|
| // zero.
|
| __ cmp(ip, Operand(0));
|
| __ b(ne, &bailout);
|
| __ tst(scratch2, Operand(0x80000000));
|
| __ b(ne, &bailout);
|
| - __ add(string_length, string_length, Operand(scratch2), SetCC);
|
| - __ b(vs, &bailout);
|
| + __ addv(string_length, string_length, scratch2);
|
| + __ b(t, &bailout);
|
| __ SmiUntag(string_length);
|
|
|
| // Get first element in the array to free up the elements register to be used
|
| @@ -3683,7 +3732,8 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
| // Prepare for looping. Set up elements_end to end of the array. Set
|
| // result_pos to the position of the result where to write the first
|
| // character.
|
| - __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
|
| + __ lsl(elements_end, array_length, Operand(kPointerSizeLog2));
|
| + __ add(elements_end, element, elements_end);
|
| result_pos = array_length; // End of live range for array_length.
|
| array_length = no_reg;
|
| __ add(result_pos,
|
| @@ -3692,9 +3742,10 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
|
|
| // Check the length of the separator.
|
| __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
|
| - __ cmp(scratch1, Operand(Smi::FromInt(1)));
|
| - __ b(eq, &one_char_separator);
|
| - __ b(gt, &long_separator);
|
| + __ cmpeq(scratch1, Operand(Smi::FromInt(1)));
|
| + __ bt_near(&one_char_separator);
|
| + __ cmpgt(scratch1, Operand(Smi::FromInt(1)));
|
| + __ bt(&long_separator);
|
|
|
| // Empty separator case
|
| __ bind(&empty_separator_loop);
|
| @@ -3709,8 +3760,8 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
| __ SmiUntag(string_length);
|
| __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
|
| __ CopyBytes(string, result_pos, string_length, scratch1);
|
| - __ cmp(element, elements_end);
|
| - __ b(lt, &empty_separator_loop); // End while (element < elements_end).
|
| + __ cmpge(element, elements_end);
|
| + __ bf(&empty_separator_loop); // End while (element < elements_end).
|
| ASSERT(result.is(r0));
|
| __ b(&done);
|
|
|
| @@ -3720,7 +3771,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
| __ ldrb(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize));
|
| // Jump into the loop after the code that copies the separator, so the first
|
| // element is not preceded by a separator
|
| - __ jmp(&one_char_separator_loop_entry);
|
| + __ jmp_near(&one_char_separator_loop_entry);
|
|
|
| __ bind(&one_char_separator_loop);
|
| // Live values in registers:
|
| @@ -3730,7 +3781,8 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
| // separator: Single separator ASCII char (in lower byte).
|
|
|
| // Copy the separator character to the result.
|
| - __ strb(separator, MemOperand(result_pos, 1, PostIndex));
|
| + __ strb(separator, MemOperand(result_pos));
|
| + __ add(result_pos, result_pos, Operand(1));
|
|
|
| // Copy next array element to the result.
|
| __ bind(&one_char_separator_loop_entry);
|
| @@ -3739,8 +3791,8 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
| __ SmiUntag(string_length);
|
| __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
|
| __ CopyBytes(string, result_pos, string_length, scratch1);
|
| - __ cmp(element, elements_end);
|
| - __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
|
| + __ cmpge(element, elements_end);
|
| + __ bf(&one_char_separator_loop); // End while (element < elements_end).
|
| ASSERT(result.is(r0));
|
| __ b(&done);
|
|
|
| @@ -3767,8 +3819,8 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
| __ SmiUntag(string_length);
|
| __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
|
| __ CopyBytes(string, result_pos, string_length, scratch1);
|
| - __ cmp(element, elements_end);
|
| - __ b(lt, &long_separator_loop); // End while (element < elements_end).
|
| + __ cmpge(element, elements_end);
|
| + __ bf(&long_separator_loop); // End while (element < elements_end).
|
| ASSERT(result.is(r0));
|
| __ b(&done);
|
|
|
| @@ -4060,8 +4112,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
|
|
| int count_value = expr->op() == Token::INC ? 1 : -1;
|
| if (ShouldInlineSmiCase(expr->op())) {
|
| - __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
|
| - __ b(vs, &stub_call);
|
| + __ addv(r0, r0, Operand(Smi::FromInt(count_value)));
|
| + __ b(t, &stub_call);
|
| // We could eliminate this smi check if we split the code at
|
| // the first smi check before calling ToNumber.
|
| patch_site.EmitJumpIfSmi(r0, &done);
|
| @@ -4200,8 +4252,8 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
|
| } else if (check->Equals(isolate()->heap()->string_symbol())) {
|
| __ JumpIfSmi(r0, if_false);
|
| // Check for undetectable objects => false.
|
| - __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
|
| - __ b(ge, if_false);
|
| + __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE, ge);
|
| + __ bt(if_false);
|
| __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
|
| __ tst(r1, Operand(1 << Map::kIsUndetectable));
|
| Split(eq, if_true, if_false, fall_through);
|
| @@ -4227,7 +4279,7 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
|
| } else if (check->Equals(isolate()->heap()->function_symbol())) {
|
| __ JumpIfSmi(r0, if_false);
|
| STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
|
| - __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
|
| + __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE, eq);
|
| __ b(eq, if_true);
|
| __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
|
| Split(eq, if_true, if_false, fall_through);
|
| @@ -4238,10 +4290,10 @@ void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
|
| __ b(eq, if_true);
|
| }
|
| // Check for JS objects => true.
|
| - __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
|
| - __ b(lt, if_false);
|
| - __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
|
| - __ b(gt, if_false);
|
| + __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, ge);
|
| + __ bf(if_false);
|
| + __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE, gt);
|
| + __ bt(if_false);
|
| // Check for undetectable objects => false.
|
| __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
|
| __ tst(r1, Operand(1 << Map::kIsUndetectable));
|
| @@ -4324,10 +4376,11 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
|
| JumpPatchSite patch_site(masm_);
|
| if (inline_smi_code) {
|
| Label slow_case;
|
| - __ orr(r2, r0, Operand(r1));
|
| + __ orr(r2, r0, r1);
|
| patch_site.EmitJumpIfNotSmi(r2, &slow_case);
|
| - __ cmp(r1, r0);
|
| - Split(cond, if_true, if_false, NULL);
|
| + Condition tmp_cond = cond;
|
| + __ cmp(&tmp_cond, r1, r0);
|
| + Split(tmp_cond, if_true, if_false, NULL);
|
| __ bind(&slow_case);
|
| }
|
|
|
| @@ -4337,7 +4390,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
|
| CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
|
| patch_site.EmitPatchInfo();
|
| PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
| - __ cmp(r0, Operand(0));
|
| + __ cmp(&cond, r0, Operand(0));
|
| Split(cond, if_true, if_false, fall_through);
|
| }
|
| }
|
| @@ -4379,7 +4432,7 @@ void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
|
| // It can be an undetectable object.
|
| __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
|
| __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
|
| - __ and_(r1, r1, Operand(1 << Map::kIsUndetectable));
|
| + __ land(r1, r1, Operand(1 << Map::kIsUndetectable));
|
| __ cmp(r1, Operand(1 << Map::kIsUndetectable));
|
| Split(eq, if_true, if_false, fall_through);
|
| }
|
| @@ -4444,10 +4497,11 @@ void FullCodeGenerator::EnterFinallyBlock() {
|
| // Store result register while executing finally block.
|
| __ push(result_register());
|
| // Cook return address in link register to stack (smi encoded Code* delta)
|
| - __ sub(r1, lr, Operand(masm_->CodeObject()));
|
| + __ strpr(r1);
|
| + __ sub(r1, r1, Operand(masm_->CodeObject()));
|
| ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
|
| STATIC_ASSERT(kSmiTag == 0);
|
| - __ add(r1, r1, Operand(r1)); // Convert to smi.
|
| + __ add(r1, r1, r1); // Convert to smi.
|
|
|
| // Store result register while executing finally block.
|
| __ push(r1);
|
| @@ -4498,12 +4552,12 @@ void FullCodeGenerator::ExitFinallyBlock() {
|
|
|
| // Restore result register from stack.
|
| __ pop(r1);
|
| -
|
| // Uncook return address and return.
|
| __ pop(result_register());
|
| ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
|
| - __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value.
|
| - __ add(pc, r1, Operand(masm_->CodeObject()));
|
| + __ asr(r1, r1, Operand(1)); // Un-smi-tag value.
|
| + __ add(r1, r1, Operand(masm_->CodeObject()));
|
| + __ jmp(r1);
|
| }
|
|
|
|
|
| @@ -4527,7 +4581,7 @@ FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
|
| __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| }
|
| __ PopTryHandler();
|
| - __ bl(finally_entry_);
|
| + __ jsr(finally_entry_);
|
|
|
| *stack_depth = 0;
|
| *context_length = 0;
|
| @@ -4539,4 +4593,4 @@ FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
|
|
|
| } } // namespace v8::internal
|
|
|
| -#endif // V8_TARGET_ARCH_ARM
|
| +#endif // V8_TARGET_ARCH_SH4
|
|
|