| Index: src/sh4/ic-sh4.cc | 
| diff --git a/src/arm/ic-arm.cc b/src/sh4/ic-sh4.cc | 
| similarity index 91% | 
| copy from src/arm/ic-arm.cc | 
| copy to src/sh4/ic-sh4.cc | 
| index 48395897da826b34adad426cabf92407376d1075..95f16c6f5497984f54cfdaf5abb5f7d7abe34c12 100644 | 
| --- a/src/arm/ic-arm.cc | 
| +++ b/src/sh4/ic-sh4.cc | 
| @@ -1,4 +1,4 @@ | 
| -// Copyright 2012 the V8 project authors. All rights reserved. | 
| +// Copyright 2011-2012 the V8 project authors. All rights reserved. | 
| // Redistribution and use in source and binary forms, with or without | 
| // modification, are permitted provided that the following conditions are | 
| // met: | 
| @@ -27,9 +27,9 @@ | 
|  | 
| #include "v8.h" | 
|  | 
| -#if defined(V8_TARGET_ARCH_ARM) | 
| +#if defined(V8_TARGET_ARCH_SH4) | 
|  | 
| -#include "assembler-arm.h" | 
| +#include "assembler-sh4.h" | 
| #include "code-stubs.h" | 
| #include "codegen.h" | 
| #include "disasm.h" | 
| @@ -47,18 +47,20 @@ namespace internal { | 
|  | 
| #define __ ACCESS_MASM(masm) | 
|  | 
| +  // ARM to SH4 mapping | 
| +#include "map-sh4.h" | 
|  | 
| static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, | 
| Register type, | 
| Label* global_object) { | 
| // Register usage: | 
| //   type: holds the receiver instance type on entry. | 
| -  __ cmp(type, Operand(JS_GLOBAL_OBJECT_TYPE)); | 
| -  __ b(eq, global_object); | 
| -  __ cmp(type, Operand(JS_BUILTINS_OBJECT_TYPE)); | 
| -  __ b(eq, global_object); | 
| -  __ cmp(type, Operand(JS_GLOBAL_PROXY_TYPE)); | 
| -  __ b(eq, global_object); | 
| +  __ cmpeq(type, Operand(JS_GLOBAL_OBJECT_TYPE)); | 
| +  __ bt(global_object); | 
| +  __ cmpeq(type, Operand(JS_BUILTINS_OBJECT_TYPE)); | 
| +  __ bt(global_object); | 
| +  __ cmpeq(type, Operand(JS_GLOBAL_PROXY_TYPE)); | 
| +  __ bt(global_object); | 
| } | 
|  | 
|  | 
| @@ -82,8 +84,8 @@ static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm, | 
| __ JumpIfSmi(receiver, miss); | 
|  | 
| // Check that the receiver is a valid JS object. | 
| -  __ CompareObjectType(receiver, t0, t1, FIRST_SPEC_OBJECT_TYPE); | 
| -  __ b(lt, miss); | 
| +  __ CompareObjectType(receiver, t0, t1, FIRST_SPEC_OBJECT_TYPE, ge); | 
| +  __ bf(miss); | 
|  | 
| // If this assert fails, we have to check upper bound too. | 
| STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE); | 
| @@ -94,13 +96,13 @@ static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm, | 
| __ ldrb(t1, FieldMemOperand(t0, Map::kBitFieldOffset)); | 
| __ tst(t1, Operand((1 << Map::kIsAccessCheckNeeded) | | 
| (1 << Map::kHasNamedInterceptor))); | 
| -  __ b(ne, miss); | 
| +  __ bf(miss); | 
|  | 
| __ ldr(elements, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); | 
| __ ldr(t1, FieldMemOperand(elements, HeapObject::kMapOffset)); | 
| __ LoadRoot(ip, Heap::kHashTableMapRootIndex); | 
| -  __ cmp(t1, ip); | 
| -  __ b(ne, miss); | 
| +  __ cmpeq(t1, ip); | 
| +  __ bf(miss); | 
| } | 
|  | 
|  | 
| @@ -199,7 +201,7 @@ static void GenerateDictionaryStore(MacroAssembler* masm, | 
| PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize; | 
| __ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset)); | 
| __ tst(scratch1, Operand(kTypeAndReadOnlyMask)); | 
| -  __ b(ne, miss); | 
| +  __ bf(miss); | 
|  | 
| // Store the value at the masked, scaled index and return. | 
| const int kValueOffset = kElementsStartOffset + kPointerSize; | 
| @@ -283,8 +285,8 @@ static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm, | 
| // objects work as intended. | 
| ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE); | 
| __ ldrb(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); | 
| -  __ cmp(scratch, Operand(JS_OBJECT_TYPE)); | 
| -  __ b(lt, slow); | 
| +  __ cmpge(scratch, Operand(JS_OBJECT_TYPE)); | 
| +  __ bf(slow); | 
| } | 
|  | 
|  | 
| @@ -333,14 +335,14 @@ static void GenerateFastArrayLoad(MacroAssembler* masm, | 
| } | 
| // Check that the key (index) is within bounds. | 
| __ ldr(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset)); | 
| -  __ cmp(key, Operand(scratch1)); | 
| -  __ b(hs, out_of_range); | 
| +  __ cmphs(key, scratch1); | 
| +  __ bt(out_of_range); | 
| // Fast case: Do the load. | 
| __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 
| // The key is a smi. | 
| STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); | 
| -  __ ldr(scratch2, | 
| -         MemOperand(scratch1, key, LSL, kPointerSizeLog2 - kSmiTagSize)); | 
| +  __ lsl(scratch2, key, Operand(kPointerSizeLog2 - kSmiTagSize)); | 
| +  __ ldr(scratch2, MemOperand(scratch1, scratch2)); | 
| __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 
| __ cmp(scratch2, ip); | 
| // In case the loaded value is the_hole we have to consult GetProperty | 
| @@ -360,8 +362,8 @@ static void GenerateKeyStringCheck(MacroAssembler* masm, | 
| Label* not_symbol) { | 
| // The key is not a smi. | 
| // Is it a string? | 
| -  __ CompareObjectType(key, map, hash, FIRST_NONSTRING_TYPE); | 
| -  __ b(ge, not_symbol); | 
| +  __ CompareObjectType(key, map, hash, FIRST_NONSTRING_TYPE, ge); | 
| +  __ bt(not_symbol); | 
|  | 
| // Is the string an array index, with cached numeric value? | 
| __ ldr(hash, FieldMemOperand(key, String::kHashFieldOffset)); | 
| @@ -407,9 +409,9 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm, | 
| // to probe. | 
| // | 
| // Check for number. | 
| -  __ JumpIfSmi(r1, &number); | 
| -  __ CompareObjectType(r1, r3, r3, HEAP_NUMBER_TYPE); | 
| -  __ b(ne, &non_number); | 
| +  __ JumpIfSmi(r1, &number, Label::kNear); | 
| +  __ CompareObjectType(r1, r3, r3, HEAP_NUMBER_TYPE, eq); | 
| +  __ bf_near(&non_number); | 
| __ bind(&number); | 
| StubCompiler::GenerateLoadGlobalFunctionPrototype( | 
| masm, Context::NUMBER_FUNCTION_INDEX, r1); | 
| @@ -417,8 +419,8 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm, | 
|  | 
| // Check for string. | 
| __ bind(&non_number); | 
| -  __ cmp(r3, Operand(FIRST_NONSTRING_TYPE)); | 
| -  __ b(hs, &non_string); | 
| +  __ cmphs(r3, Operand(FIRST_NONSTRING_TYPE)); | 
| +  __ bt_near(&non_string); | 
| StubCompiler::GenerateLoadGlobalFunctionPrototype( | 
| masm, Context::STRING_FUNCTION_INDEX, r1); | 
| __ b(&probe); | 
| @@ -454,8 +456,8 @@ static void GenerateFunctionTailCall(MacroAssembler* masm, | 
| __ JumpIfSmi(r1, miss); | 
|  | 
| // Check that the value is a JSFunction. | 
| -  __ CompareObjectType(r1, scratch, scratch, JS_FUNCTION_TYPE); | 
| -  __ b(ne, miss); | 
| +  __ CompareObjectType(r1, scratch, scratch, JS_FUNCTION_TYPE, eq); | 
| +  __ bf(miss); | 
|  | 
| // Invoke the function. | 
| ParameterCount actual(argc); | 
| @@ -519,7 +521,7 @@ void CallICBase::GenerateMiss(MacroAssembler* masm, | 
| __ CallStub(&stub); | 
|  | 
| // Move result to r1 and leave the internal frame. | 
| -    __ mov(r1, Operand(r0)); | 
| +    __ mov(r1, r0); | 
| } | 
|  | 
| // Check if the receiver is a global object of some sort. | 
| @@ -527,11 +529,11 @@ void CallICBase::GenerateMiss(MacroAssembler* masm, | 
| if (id == IC::kCallIC_Miss) { | 
| Label invoke, global; | 
| __ ldr(r2, MemOperand(sp, argc * kPointerSize));  // receiver | 
| -    __ JumpIfSmi(r2, &invoke); | 
| -    __ CompareObjectType(r2, r3, r3, JS_GLOBAL_OBJECT_TYPE); | 
| -    __ b(eq, &global); | 
| -    __ cmp(r3, Operand(JS_BUILTINS_OBJECT_TYPE)); | 
| -    __ b(ne, &invoke); | 
| +    __ JumpIfSmi(r2, &invoke, Label::kNear); | 
| +    __ CompareObjectType(r2, r3, r3, JS_GLOBAL_OBJECT_TYPE, eq); | 
| +    __ bt_near(&global); | 
| +    __ cmpeq(r3, Operand(JS_BUILTINS_OBJECT_TYPE)); | 
| +    __ bf_near(&invoke); | 
|  | 
| // Patch the receiver on the stack. | 
| __ bind(&global); | 
| @@ -609,7 +611,7 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) { | 
| __ LoadRoot(ip, Heap::kHashTableMapRootIndex); | 
| __ cmp(r3, ip); | 
| __ b(ne, &slow_load); | 
| -  __ mov(r0, Operand(r2, ASR, kSmiTagSize)); | 
| +  __ asr(r0, r2, Operand(kSmiTagSize)); | 
| // r0: untagged index | 
| __ LoadFromNumberDictionary(&slow_load, r4, r2, r1, r0, r3, r5); | 
| __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1, r0, r3); | 
| @@ -770,8 +772,8 @@ static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm, | 
| // requires access checks. | 
| __ JumpIfSmi(object, slow_case); | 
| // Check that the object is some kind of JSObject. | 
| -  __ CompareObjectType(object, scratch1, scratch2, FIRST_JS_RECEIVER_TYPE); | 
| -  __ b(lt, slow_case); | 
| +  __ CompareObjectType(object, scratch1, scratch2, FIRST_JS_RECEIVER_TYPE, ge); | 
| +  __ bf(slow_case); | 
|  | 
| // Check that the key is a positive smi. | 
| __ tst(key, Operand(0x80000001)); | 
| @@ -786,8 +788,8 @@ static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm, | 
| // to the unmapped lookup with the parameter map in scratch1. | 
| __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset)); | 
| __ sub(scratch2, scratch2, Operand(Smi::FromInt(2))); | 
| -  __ cmp(key, Operand(scratch2)); | 
| -  __ b(cs, unmapped_case); | 
| +  __ cmphs(key, scratch2); | 
| +  __ b(t, unmapped_case); | 
|  | 
| // Load element index and check whether it is the hole. | 
| const int kOffset = | 
| @@ -829,8 +831,8 @@ static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm, | 
| __ CheckMap(backing_store, scratch, fixed_array_map, slow_case, | 
| DONT_DO_SMI_CHECK); | 
| __ ldr(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset)); | 
| -  __ cmp(key, Operand(scratch)); | 
| -  __ b(cs, slow_case); | 
| +  __ cmphs(key, scratch); | 
| +  __ b(t, slow_case); | 
| __ mov(scratch, Operand(kPointerSize >> 1)); | 
| __ mul(scratch, key, scratch); | 
| __ add(scratch, | 
| @@ -999,9 +1001,9 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { | 
| // r3: elements map | 
| // r4: elements | 
| __ LoadRoot(ip, Heap::kHashTableMapRootIndex); | 
| -  __ cmp(r3, ip); | 
| -  __ b(ne, &slow); | 
| -  __ mov(r2, Operand(r0, ASR, kSmiTagSize)); | 
| +  __ cmpeq(r3, ip); | 
| +  __ bf(&slow); | 
| +  __ asr(r2, r0, Operand(kSmiTagSize)); | 
| __ LoadFromNumberDictionary(&slow, r4, r0, r0, r2, r3, r5); | 
| __ Ret(); | 
|  | 
| @@ -1023,16 +1025,17 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { | 
| __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset)); | 
| __ LoadRoot(ip, Heap::kHashTableMapRootIndex); | 
| __ cmp(r4, ip); | 
| -  __ b(eq, &probe_dictionary); | 
| +  __ bt(&probe_dictionary); | 
|  | 
| // Load the map of the receiver, compute the keyed lookup cache hash | 
| // based on 32 bits of the map pointer and the string hash. | 
| __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); | 
| -  __ mov(r3, Operand(r2, ASR, KeyedLookupCache::kMapHashShift)); | 
| +  __ asr(r3, r2, Operand(KeyedLookupCache::kMapHashShift)); | 
| __ ldr(r4, FieldMemOperand(r0, String::kHashFieldOffset)); | 
| -  __ eor(r3, r3, Operand(r4, ASR, String::kHashShift)); | 
| +  __ asr(r4, r4, Operand(String::kHashShift)); | 
| +  __ eor(r3, r3, r4); | 
| int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask; | 
| -  __ And(r3, r3, Operand(mask)); | 
| +  __ land(r3, r3, Operand(mask)); | 
|  | 
| // Load the key (consisting of map and symbol) from the cache and | 
| // check for match. | 
| @@ -1043,7 +1046,8 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { | 
| ExternalReference::keyed_lookup_cache_keys(isolate); | 
|  | 
| __ mov(r4, Operand(cache_keys)); | 
| -  __ add(r4, r4, Operand(r3, LSL, kPointerSizeLog2 + 1)); | 
| +  __ lsl(r5, r3, Operand(kPointerSizeLog2 + 1)); | 
| +  __ add(r4, r4, r5); | 
|  | 
| for (int i = 0; i < kEntriesPerBucket - 1; i++) { | 
| Label try_next_entry; | 
| @@ -1062,8 +1066,8 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { | 
| __ cmp(r2, r5); | 
| __ b(ne, &slow); | 
| __ ldr(r5, MemOperand(r4)); | 
| -  __ cmp(r0, r5); | 
| -  __ b(ne, &slow); | 
| +  __ cmpeq(r0, r5); | 
| +  __ bf(&slow); | 
|  | 
| // Get field offset. | 
| // r0     : key | 
| @@ -1080,10 +1084,12 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { | 
| if (i != 0) { | 
| __ add(r3, r3, Operand(i)); | 
| } | 
| -    __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2)); | 
| +    __ lsl(r6, r3, Operand(kPointerSizeLog2)); | 
| +    __ ldr(r5, MemOperand(r4, r6)); | 
| __ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset)); | 
| -    __ sub(r5, r5, r6, SetCC); | 
| -    __ b(ge, &property_array_property); | 
| +    __ sub(r5, r5, r6); | 
| +    __ cmpge(r5, Operand(0)); | 
| +    __ bt(&property_array_property); | 
| if (i != 0) { | 
| __ jmp(&load_in_object_property); | 
| } | 
| @@ -1094,7 +1100,8 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { | 
| __ ldrb(r6, FieldMemOperand(r2, Map::kInstanceSizeOffset)); | 
| __ add(r6, r6, r5);  // Index from start of object. | 
| __ sub(r1, r1, Operand(kHeapObjectTag));  // Remove the heap tag. | 
| -  __ ldr(r0, MemOperand(r1, r6, LSL, kPointerSizeLog2)); | 
| +  __ lsl(r0, r6, Operand(kPointerSizeLog2)); | 
| +  __ ldr(r0, MemOperand(r1, r0)); | 
| __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), | 
| 1, r2, r3); | 
| __ Ret(); | 
| @@ -1103,7 +1110,8 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { | 
| __ bind(&property_array_property); | 
| __ ldr(r1, FieldMemOperand(r1, JSObject::kPropertiesOffset)); | 
| __ add(r1, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 
| -  __ ldr(r0, MemOperand(r1, r5, LSL, kPointerSizeLog2)); | 
| +  __ lsl(r0, r5, Operand(kPointerSizeLog2)); | 
| +  __ ldr(r0, MemOperand(r1, r0)); | 
| __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), | 
| 1, r2, r3); | 
| __ Ret(); | 
| @@ -1171,11 +1179,11 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) { | 
| Label slow; | 
|  | 
| // Check that the receiver isn't a smi. | 
| -  __ JumpIfSmi(r1, &slow); | 
| +  __ JumpIfSmi(r1, &slow, Label::kNear); | 
|  | 
| // Check that the key is an array index, that is Uint32. | 
| __ tst(r0, Operand(kSmiTagMask | kSmiSignMask)); | 
| -  __ b(ne, &slow); | 
| +  __ bf_near(&slow); | 
|  | 
| // Get the map of the receiver. | 
| __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); | 
| @@ -1183,9 +1191,9 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) { | 
| // Check that it has indexed interceptor and access checks | 
| // are not enabled for this object. | 
| __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset)); | 
| -  __ and_(r3, r3, Operand(kSlowCaseBitFieldMask)); | 
| -  __ cmp(r3, Operand(1 << Map::kHasIndexedInterceptor)); | 
| -  __ b(ne, &slow); | 
| +  __ land(r3, r3, Operand(kSlowCaseBitFieldMask)); | 
| +  __ cmpeq(r3, Operand(1 << Map::kHasIndexedInterceptor)); | 
| +  __ bf_near(&slow); | 
|  | 
| // Everything is fine, call runtime. | 
| __ Push(r1, r0);  // Receiver, key. | 
| @@ -1339,7 +1347,8 @@ static void KeyedStoreGenerateGenericHelper( | 
| } | 
| // It's irrelevant whether array is smi-only or not when writing a smi. | 
| __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 
| -  __ add(address, address, Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize)); | 
| +  __ lsl(scratch_value, address, Operand(kPointerSizeLog2 - kSmiTagSize)); | 
| +  __ add(address, address, scratch_value); | 
| __ str(value, MemOperand(address)); | 
| __ Ret(); | 
|  | 
| @@ -1356,7 +1365,8 @@ static void KeyedStoreGenerateGenericHelper( | 
| __ str(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset)); | 
| } | 
| __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 
| -  __ add(address, address, Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize)); | 
| +  __ lsl(scratch_value, key, Operand(kPointerSizeLog2 - kSmiTagSize)); | 
| +  __ add(address, address, scratch_value); | 
| __ str(value, MemOperand(address)); | 
| // Update write barrier for the elements array address. | 
| __ mov(scratch_value, value);  // Preserve the value which is returned. | 
| @@ -1461,30 +1471,30 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, | 
| // r4 and r5 are used as general scratch registers. | 
|  | 
| // Check that the key is a smi. | 
| -  __ JumpIfNotSmi(key, &slow); | 
| +  __ JumpIfNotSmi(key, &slow, Label::kNear); | 
| // Check that the object isn't a smi. | 
| -  __ JumpIfSmi(receiver, &slow); | 
| +  __ JumpIfSmi(receiver, &slow, Label::kNear); | 
| // Get the map of the object. | 
| __ ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 
| // Check that the receiver does not require access checks.  We need | 
| // to do this because this generic stub does not perform map checks. | 
| __ ldrb(ip, FieldMemOperand(receiver_map, Map::kBitFieldOffset)); | 
| __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded)); | 
| -  __ b(ne, &slow); | 
| +  __ bf_near(&slow); | 
| // Check if the object is a JS array or not. | 
| __ ldrb(r4, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset)); | 
| __ cmp(r4, Operand(JS_ARRAY_TYPE)); | 
| __ b(eq, &array); | 
| // Check that the object is some kind of JSObject. | 
| -  __ cmp(r4, Operand(FIRST_JS_OBJECT_TYPE)); | 
| -  __ b(lt, &slow); | 
| +  __ cmpge(r4, Operand(FIRST_JS_OBJECT_TYPE)); | 
| +  __ bf_near(&slow); | 
|  | 
| // Object case: Check key against length in the elements array. | 
| __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 
| // Check array bounds. Both the key and the length of FixedArray are smis. | 
| __ ldr(ip, FieldMemOperand(elements, FixedArray::kLengthOffset)); | 
| -  __ cmp(key, Operand(ip)); | 
| -  __ b(lo, &fast_object); | 
| +  __ cmphs(key, ip); | 
| +  __ bf(&fast_object); | 
|  | 
| // Slow case, handle jump to runtime. | 
| __ bind(&slow); | 
| @@ -1499,12 +1509,14 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, | 
| // element to the array by writing to array[array.length]. | 
| __ bind(&extra); | 
| // Condition code from comparing key and array length is still available. | 
| -  __ b(ne, &slow);  // Only support writing to writing to array[array.length]. | 
| +  __ ldr(ip, FieldMemOperand(elements, FixedArray::kLengthOffset)); | 
| +  __ cmpeq(key, ip); | 
| +  __ bf(&slow);  // Only support writing to writing to array[array.length]. | 
| // Check for room in the elements backing store. | 
| // Both the key and the length of FixedArray are smis. | 
| __ ldr(ip, FieldMemOperand(elements, FixedArray::kLengthOffset)); | 
| -  __ cmp(key, Operand(ip)); | 
| -  __ b(hs, &slow); | 
| +  __ cmphs(key, ip); | 
| +  __ bt(&slow); | 
| __ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset)); | 
| __ cmp(elements_map, | 
| Operand(masm->isolate()->factory()->fixed_array_map())); | 
| @@ -1525,8 +1537,9 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, | 
|  | 
| // Check the key against the length in the array. | 
| __ ldr(ip, FieldMemOperand(receiver, JSArray::kLengthOffset)); | 
| -  __ cmp(key, Operand(ip)); | 
| -  __ b(hs, &extra); | 
| +  __ cmphs(key, ip); | 
| +  __ bt(&extra); | 
| +  // Fall through to fast case. | 
|  | 
| KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double, | 
| &slow, kCheckMap, kDontIncrementLength, | 
| @@ -1600,14 +1613,14 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) { | 
| __ JumpIfSmi(receiver, &miss); | 
|  | 
| // Check that the object is a JS array. | 
| -  __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE); | 
| -  __ b(ne, &miss); | 
| +  __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE, eq); | 
| +  __ bf(&miss); | 
|  | 
| // Check that elements are FixedArray. | 
| // We rely on StoreIC_ArrayLength below to deal with all types of | 
| // fast elements (including COW). | 
| __ ldr(scratch, FieldMemOperand(receiver, JSArray::kElementsOffset)); | 
| -  __ CompareObjectType(scratch, scratch, scratch, FIXED_ARRAY_TYPE); | 
| +  __ CompareObjectType(scratch, scratch, scratch, FIXED_ARRAY_TYPE, eq); | 
| __ b(ne, &miss); | 
|  | 
| // Check that the array has fast properties, otherwise the length | 
| @@ -1734,9 +1747,9 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) { | 
|  | 
| void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) { | 
| Address cmp_instruction_address = | 
| -      Assembler::return_address_from_call_start(address); | 
| +      address + Assembler::kCallTargetAddressOffset; | 
|  | 
| -  // If the instruction following the call is not a cmp rx, #yyy, nothing | 
| +  // If the instruction following the call is not a cmp #ii, rx, nothing | 
| // was inlined. | 
| Instr instr = Assembler::instr_at(cmp_instruction_address); | 
| if (!Assembler::IsCmpImmediate(instr)) { | 
| @@ -1745,10 +1758,11 @@ void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) { | 
|  | 
| // The delta to the start of the map check instruction and the | 
| // condition code uses at the patched jump. | 
| -  int delta = Assembler::GetCmpImmediateRawImmediate(instr); | 
| -  delta += | 
| -      Assembler::GetCmpImmediateRegister(instr).code() * kOff12Mask; | 
| -  // If the delta is 0 the instruction is cmp r0, #0 which also signals that | 
| +  int delta = Assembler::GetCmpImmediateAsUnsigned(instr); | 
| +  // TODO(stm): is this needed for ST40 ? | 
| +  // delta += Assembler::GetCmpImmediateRegister(instr).code() * kOff12Mask | 
| + | 
| +  // If the delta is 0 the instruction is cmp #0, r0 which also signals that | 
| // nothing was inlined. | 
| if (delta == 0) { | 
| return; | 
| @@ -1762,36 +1776,57 @@ void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) { | 
| #endif | 
|  | 
| Address patch_address = | 
| -      cmp_instruction_address - delta * Instruction::kInstrSize; | 
| +      cmp_instruction_address - delta * Assembler::kInstrSize; | 
| Instr instr_at_patch = Assembler::instr_at(patch_address); | 
| +#ifdef DEBUG | 
| +  Instr instr_before_patch = | 
| +    Assembler::instr_at(patch_address - Assembler::kInstrSize); | 
| +#endif | 
| Instr branch_instr = | 
| -      Assembler::instr_at(patch_address + Instruction::kInstrSize); | 
| -  // This is patching a conditional "jump if not smi/jump if smi" site. | 
| -  // Enabling by changing from | 
| -  //   cmp rx, rx | 
| -  //   b eq/ne, <target> | 
| -  // to | 
| -  //   tst rx, #kSmiTagMask | 
| -  //   b ne/eq, <target> | 
| -  // and vice-versa to be disabled again. | 
| -  CodePatcher patcher(patch_address, 2); | 
| -  Register reg = Assembler::GetRn(instr_at_patch); | 
| -  if (check == ENABLE_INLINED_SMI_CHECK) { | 
| -    ASSERT(Assembler::IsCmpRegister(instr_at_patch)); | 
| -    ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(), | 
| -              Assembler::GetRm(instr_at_patch).code()); | 
| -    patcher.masm()->tst(reg, Operand(kSmiTagMask)); | 
| -  } else { | 
| -    ASSERT(check == DISABLE_INLINED_SMI_CHECK); | 
| -    ASSERT(Assembler::IsTstImmediate(instr_at_patch)); | 
| -    patcher.masm()->cmp(reg, reg); | 
| -  } | 
| +      Assembler::instr_at(patch_address + Assembler::kInstrSize); | 
| +  ASSERT(Assembler::IsCmpRegister(instr_at_patch)); | 
| +  ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(), | 
| +            Assembler::GetRm(instr_at_patch).code()); | 
| +  ASSERT(Assembler::IsMovImmediate(instr_before_patch)); | 
| +  ASSERT_EQ(Assembler::GetRn(instr_before_patch).code(), sh4_ip.code()); | 
| ASSERT(Assembler::IsBranch(branch_instr)); | 
| -  if (Assembler::GetCondition(branch_instr) == eq) { | 
| -    patcher.EmitCondition(ne); | 
| +  if (Assembler::GetCondition(branch_instr) == f) { | 
| +    // This is patching a "jump if not smi" site to be active. | 
| +    // Changing | 
| +    //   mov #kSmiTagMask, sh4_ip | 
| +    //   cmp rx, rx | 
| +    //   bf <skip>        // actually a bt <target> | 
| +    //   ... | 
| +    //   bra <target> | 
| +    //   skip: | 
| +    // to | 
| +    //   mov #kSmiTagMask, sh4_ip | 
| +    //   tst rx, sh4_ip | 
| +    //   bt <skip>       // actually implements a bf <target> | 
| +    //   ... | 
| +    CodePatcher patcher(patch_address, 2); | 
| +    Register reg = Assembler::GetRn(instr_at_patch); | 
| +    patcher.masm()->tst(reg, sh4_ip); | 
| +    patcher.EmitCondition(t); | 
| } else { | 
| -    ASSERT(Assembler::GetCondition(branch_instr) == ne); | 
| -    patcher.EmitCondition(eq); | 
| +    ASSERT(Assembler::GetCondition(branch_instr) == t); | 
| +    // This is patching a "jump if smi" site to be active. | 
| +    // Changing | 
| +    //   mov #kSmiTagMask, sh4_ip | 
| +    //   cmp rx, rx | 
| +    //   bt <skip>        // actually a bf <target> | 
| +    //   ... | 
| +    //   bra <target> | 
| +    //   skip: | 
| +    // to | 
| +    //   mov #kSmiTagMask, sh4_ip | 
| +    //   tst rx, sh4_ip | 
| +    //   bf <target> | 
| +    //   ... | 
| +    CodePatcher patcher(patch_address, 2); | 
| +    Register reg = Assembler::GetRn(instr_at_patch); | 
| +    patcher.masm()->tst(reg, sh4_ip); | 
| +    patcher.EmitCondition(f); | 
| } | 
| } | 
|  | 
|  |