| Index: src/ic/ppc/stub-cache-ppc.cc
|
| diff --git a/src/ic/arm/stub-cache-arm.cc b/src/ic/ppc/stub-cache-ppc.cc
|
| similarity index 67%
|
| copy from src/ic/arm/stub-cache-arm.cc
|
| copy to src/ic/ppc/stub-cache-ppc.cc
|
| index bc8b0fba84ffe7586b312adf0e75c72c41446475..816a2ae649f85294efeb3db0239fd5628e3a65d7 100644
|
| --- a/src/ic/arm/stub-cache-arm.cc
|
| +++ b/src/ic/ppc/stub-cache-ppc.cc
|
| @@ -1,10 +1,10 @@
|
| -// Copyright 2012 the V8 project authors. All rights reserved.
|
| +// Copyright 2014 the V8 project authors. All rights reserved.
|
| // Use of this source code is governed by a BSD-style license that can be
|
| // found in the LICENSE file.
|
|
|
| #include "src/v8.h"
|
|
|
| -#if V8_TARGET_ARCH_ARM
|
| +#if V8_TARGET_ARCH_PPC
|
|
|
| #include "src/codegen.h"
|
| #include "src/ic/stub-cache.h"
|
| @@ -25,9 +25,10 @@ static void ProbeTable(Isolate* isolate, MacroAssembler* masm,
|
| ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
|
| ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
|
|
|
| - uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
|
| - uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
|
| - uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
|
| + uintptr_t key_off_addr = reinterpret_cast<uintptr_t>(key_offset.address());
|
| + uintptr_t value_off_addr =
|
| + reinterpret_cast<uintptr_t>(value_offset.address());
|
| + uintptr_t map_off_addr = reinterpret_cast<uintptr_t>(map_offset.address());
|
|
|
| // Check the relative positions of the address fields.
|
| DCHECK(value_off_addr > key_off_addr);
|
| @@ -42,52 +43,56 @@ static void ProbeTable(Isolate* isolate, MacroAssembler* masm,
|
| scratch = no_reg;
|
|
|
| // Multiply by 3 because there are 3 fields per entry (name, code, map).
|
| - __ add(offset_scratch, offset, Operand(offset, LSL, 1));
|
| + __ ShiftLeftImm(offset_scratch, offset, Operand(1));
|
| + __ add(offset_scratch, offset, offset_scratch);
|
|
|
| // Calculate the base address of the entry.
|
| __ mov(base_addr, Operand(key_offset));
|
| - __ add(base_addr, base_addr, Operand(offset_scratch, LSL, kPointerSizeLog2));
|
| + __ ShiftLeftImm(scratch2, offset_scratch, Operand(kPointerSizeLog2));
|
| + __ add(base_addr, base_addr, scratch2);
|
|
|
| // Check that the key in the entry matches the name.
|
| - __ ldr(ip, MemOperand(base_addr, 0));
|
| + __ LoadP(ip, MemOperand(base_addr, 0));
|
| __ cmp(name, ip);
|
| - __ b(ne, &miss);
|
| + __ bne(&miss);
|
|
|
| // Check the map matches.
|
| - __ ldr(ip, MemOperand(base_addr, map_off_addr - key_off_addr));
|
| - __ ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
| + __ LoadP(ip, MemOperand(base_addr, map_off_addr - key_off_addr));
|
| + __ LoadP(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
| __ cmp(ip, scratch2);
|
| - __ b(ne, &miss);
|
| + __ bne(&miss);
|
|
|
| // Get the code entry from the cache.
|
| Register code = scratch2;
|
| scratch2 = no_reg;
|
| - __ ldr(code, MemOperand(base_addr, value_off_addr - key_off_addr));
|
| + __ LoadP(code, MemOperand(base_addr, value_off_addr - key_off_addr));
|
|
|
| // Check that the flags match what we're looking for.
|
| Register flags_reg = base_addr;
|
| base_addr = no_reg;
|
| - __ ldr(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
|
| - // It's a nice optimization if this constant is encodable in the bic insn.
|
| + __ lwz(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
|
|
|
| - uint32_t mask = Code::kFlagsNotUsedInLookup;
|
| - DCHECK(__ ImmediateFitsAddrMode1Instruction(mask));
|
| - __ bic(flags_reg, flags_reg, Operand(mask));
|
| - __ cmp(flags_reg, Operand(flags));
|
| - __ b(ne, &miss);
|
| + DCHECK(!r0.is(flags_reg));
|
| + __ li(r0, Operand(Code::kFlagsNotUsedInLookup));
|
| + __ andc(flags_reg, flags_reg, r0);
|
| + __ mov(r0, Operand(flags));
|
| + __ cmpl(flags_reg, r0);
|
| + __ bne(&miss);
|
|
|
| #ifdef DEBUG
|
| if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
| - __ jmp(&miss);
|
| + __ b(&miss);
|
| } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
| - __ jmp(&miss);
|
| + __ b(&miss);
|
| }
|
| #endif
|
|
|
| if (leave_frame) __ LeaveFrame(StackFrame::INTERNAL);
|
|
|
| // Jump to the first instruction in the code stub.
|
| - __ add(pc, code, Operand(Code::kHeaderSize - kHeapObjectTag));
|
| + __ addi(r0, code, Operand(Code::kHeaderSize - kHeapObjectTag));
|
| + __ mtctr(r0);
|
| + __ bctr();
|
|
|
| // Miss: fall through.
|
| __ bind(&miss);
|
| @@ -101,9 +106,15 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Code::Flags flags,
|
| Isolate* isolate = masm->isolate();
|
| Label miss;
|
|
|
| +#if V8_TARGET_ARCH_PPC64
|
| + // Make sure that code is valid. The multiplying code relies on the
|
| + // entry size being 24.
|
| + DCHECK(sizeof(Entry) == 24);
|
| +#else
|
| // Make sure that code is valid. The multiplying code relies on the
|
| // entry size being 12.
|
| DCHECK(sizeof(Entry) == 12);
|
| +#endif
|
|
|
| // Make sure the flags does not name a specific type.
|
| DCHECK(Code::ExtractTypeFromFlags(flags) == 0);
|
| @@ -133,28 +144,33 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Code::Flags flags,
|
| __ JumpIfSmi(receiver, &miss);
|
|
|
| // Get the map of the receiver and compute the hash.
|
| - __ ldr(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
|
| - __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
| - __ add(scratch, scratch, Operand(ip));
|
| + __ lwz(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
|
| + __ LoadP(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
| + __ add(scratch, scratch, ip);
|
| +#if V8_TARGET_ARCH_PPC64
|
| + // Use only the low 32 bits of the map pointer.
|
| + __ rldicl(scratch, scratch, 0, 32);
|
| +#endif
|
| uint32_t mask = kPrimaryTableSize - 1;
|
| // We shift out the last two bits because they are not part of the hash and
|
| // they are always 01 for maps.
|
| - __ mov(scratch, Operand(scratch, LSR, kCacheIndexShift));
|
| + __ ShiftRightImm(scratch, scratch, Operand(kCacheIndexShift));
|
| // Mask down the eor argument to the minimum to keep the immediate
|
| - // ARM-encodable.
|
| - __ eor(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask));
|
| + // encodable.
|
| + __ xori(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask));
|
| // Prefer and_ to ubfx here because ubfx takes 2 cycles.
|
| - __ and_(scratch, scratch, Operand(mask));
|
| + __ andi(scratch, scratch, Operand(mask));
|
|
|
| // Probe the primary table.
|
| ProbeTable(isolate, masm, flags, leave_frame, kPrimary, receiver, name,
|
| scratch, extra, extra2, extra3);
|
|
|
| // Primary miss: Compute hash for secondary probe.
|
| - __ sub(scratch, scratch, Operand(name, LSR, kCacheIndexShift));
|
| + __ ShiftRightImm(extra, name, Operand(kCacheIndexShift));
|
| + __ sub(scratch, scratch, extra);
|
| uint32_t mask2 = kSecondaryTableSize - 1;
|
| - __ add(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask2));
|
| - __ and_(scratch, scratch, Operand(mask2));
|
| + __ addi(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask2));
|
| + __ andi(scratch, scratch, Operand(mask2));
|
|
|
| // Probe the secondary table.
|
| ProbeTable(isolate, masm, flags, leave_frame, kSecondary, receiver, name,
|
| @@ -172,4 +188,4 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Code::Flags flags,
|
| }
|
| } // namespace v8::internal
|
|
|
| -#endif // V8_TARGET_ARCH_ARM
|
| +#endif // V8_TARGET_ARCH_PPC
|
|
|