| Index: src/mips64/stub-cache-mips64.cc
|
| diff --git a/src/mips/stub-cache-mips.cc b/src/mips64/stub-cache-mips64.cc
|
| similarity index 90%
|
| copy from src/mips/stub-cache-mips.cc
|
| copy to src/mips64/stub-cache-mips64.cc
|
| index 3611a28c1ba8134664d595f1672461d3467f3a3a..6c46321aae03a8cad7a22c5bd88b40345bee8767 100644
|
| --- a/src/mips/stub-cache-mips.cc
|
| +++ b/src/mips64/stub-cache-mips64.cc
|
| @@ -4,7 +4,7 @@
|
|
|
| #include "src/v8.h"
|
|
|
| -#if V8_TARGET_ARCH_MIPS
|
| +#if V8_TARGET_ARCH_MIPS64
|
|
|
| #include "src/codegen.h"
|
| #include "src/ic-inl.h"
|
| @@ -31,9 +31,9 @@ static void ProbeTable(Isolate* isolate,
|
| ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
|
| ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
|
|
|
| - uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
|
| - uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
|
| - uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
|
| + uint64_t key_off_addr = reinterpret_cast<uint64_t>(key_offset.address());
|
| + uint64_t value_off_addr = reinterpret_cast<uint64_t>(value_offset.address());
|
| + uint64_t map_off_addr = reinterpret_cast<uint64_t>(map_offset.address());
|
|
|
| // Check the relative positions of the address fields.
|
| ASSERT(value_off_addr > key_off_addr);
|
| @@ -48,27 +48,27 @@ static void ProbeTable(Isolate* isolate,
|
| scratch = no_reg;
|
|
|
| // Multiply by 3 because there are 3 fields per entry (name, code, map).
|
| - __ sll(offset_scratch, offset, 1);
|
| - __ Addu(offset_scratch, offset_scratch, offset);
|
| + __ dsll(offset_scratch, offset, 1);
|
| + __ Daddu(offset_scratch, offset_scratch, offset);
|
|
|
| // Calculate the base address of the entry.
|
| __ li(base_addr, Operand(key_offset));
|
| - __ sll(at, offset_scratch, kPointerSizeLog2);
|
| - __ Addu(base_addr, base_addr, at);
|
| + __ dsll(at, offset_scratch, kPointerSizeLog2);
|
| + __ Daddu(base_addr, base_addr, at);
|
|
|
| // Check that the key in the entry matches the name.
|
| - __ lw(at, MemOperand(base_addr, 0));
|
| + __ ld(at, MemOperand(base_addr, 0));
|
| __ Branch(&miss, ne, name, Operand(at));
|
|
|
| // Check the map matches.
|
| - __ lw(at, MemOperand(base_addr, map_off_addr - key_off_addr));
|
| - __ lw(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
| + __ ld(at, MemOperand(base_addr, map_off_addr - key_off_addr));
|
| + __ ld(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
| __ Branch(&miss, ne, at, Operand(scratch2));
|
|
|
| // Get the code entry from the cache.
|
| Register code = scratch2;
|
| scratch2 = no_reg;
|
| - __ lw(code, MemOperand(base_addr, value_off_addr - key_off_addr));
|
| + __ ld(code, MemOperand(base_addr, value_off_addr - key_off_addr));
|
|
|
| // Check that the flags match what we're looking for.
|
| Register flags_reg = base_addr;
|
| @@ -86,7 +86,7 @@ static void ProbeTable(Isolate* isolate,
|
| #endif
|
|
|
| // Jump to the first instruction in the code stub.
|
| - __ Addu(at, code, Operand(Code::kHeaderSize - kHeapObjectTag));
|
| + __ Daddu(at, code, Operand(Code::kHeaderSize - kHeapObjectTag));
|
| __ Jump(at);
|
|
|
| // Miss: fall through.
|
| @@ -113,7 +113,7 @@ void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
|
|
|
| // Bail out if the receiver has a named interceptor or requires access checks.
|
| Register map = scratch1;
|
| - __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
| + __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
| __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
|
| __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
|
| __ Branch(miss_label, ne, scratch0, Operand(zero_reg));
|
| @@ -124,15 +124,15 @@ void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
|
|
|
| // Load properties array.
|
| Register properties = scratch0;
|
| - __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
|
| + __ ld(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
|
| // Check that the properties array is a dictionary.
|
| - __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset));
|
| + __ ld(map, FieldMemOperand(properties, HeapObject::kMapOffset));
|
| Register tmp = properties;
|
| __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
|
| __ Branch(miss_label, ne, map, Operand(tmp));
|
|
|
| // Restore the temporarily used register.
|
| - __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
|
| + __ ld(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
|
|
|
|
|
| NameDictionaryLookupStub::GenerateNegativeLookup(masm,
|
| @@ -160,7 +160,8 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
|
|
|
| // Make sure that code is valid. The multiplying code relies on the
|
| // entry size being 12.
|
| - ASSERT(sizeof(Entry) == 12);
|
| + // ASSERT(sizeof(Entry) == 12);
|
| + // ASSERT(sizeof(Entry) == 3 * kPointerSize);
|
|
|
| // Make sure the flags does not name a specific type.
|
| ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
|
| @@ -190,13 +191,13 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
|
| __ JumpIfSmi(receiver, &miss);
|
|
|
| // Get the map of the receiver and compute the hash.
|
| - __ lw(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
|
| - __ lw(at, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
| - __ Addu(scratch, scratch, at);
|
| - uint32_t mask = kPrimaryTableSize - 1;
|
| + __ ld(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
|
| + __ ld(at, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
| + __ Daddu(scratch, scratch, at);
|
| + uint64_t mask = kPrimaryTableSize - 1;
|
| // We shift out the last two bits because they are not part of the hash and
|
| // they are always 01 for maps.
|
| - __ srl(scratch, scratch, kHeapObjectTagSize);
|
| + __ dsrl(scratch, scratch, kHeapObjectTagSize);
|
| __ Xor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
|
| __ And(scratch, scratch, Operand(mask));
|
|
|
| @@ -213,10 +214,10 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
|
| extra3);
|
|
|
| // Primary miss: Compute hash for secondary probe.
|
| - __ srl(at, name, kHeapObjectTagSize);
|
| - __ Subu(scratch, scratch, at);
|
| - uint32_t mask2 = kSecondaryTableSize - 1;
|
| - __ Addu(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
|
| + __ dsrl(at, name, kHeapObjectTagSize);
|
| + __ Dsubu(scratch, scratch, at);
|
| + uint64_t mask2 = kSecondaryTableSize - 1;
|
| + __ Daddu(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
|
| __ And(scratch, scratch, Operand(mask2));
|
|
|
| // Probe the secondary table.
|
| @@ -243,18 +244,18 @@ void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
|
| int index,
|
| Register prototype) {
|
| // Load the global or builtins object from the current context.
|
| - __ lw(prototype,
|
| + __ ld(prototype,
|
| MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
|
| // Load the native context from the global or builtins object.
|
| - __ lw(prototype,
|
| + __ ld(prototype,
|
| FieldMemOperand(prototype, GlobalObject::kNativeContextOffset));
|
| // Load the function from the native context.
|
| - __ lw(prototype, MemOperand(prototype, Context::SlotOffset(index)));
|
| + __ ld(prototype, MemOperand(prototype, Context::SlotOffset(index)));
|
| // Load the initial map. The global functions all have initial maps.
|
| - __ lw(prototype,
|
| + __ ld(prototype,
|
| FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
|
| // Load the prototype from the initial map.
|
| - __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
|
| + __ ld(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
|
| }
|
|
|
|
|
| @@ -271,16 +272,16 @@ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
|
| // Check we're still in the same context.
|
| Register scratch = prototype;
|
| const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
|
| - __ lw(scratch, MemOperand(cp, offset));
|
| - __ lw(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
|
| - __ lw(scratch, MemOperand(scratch, Context::SlotOffset(index)));
|
| + __ ld(scratch, MemOperand(cp, offset));
|
| + __ ld(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
|
| + __ ld(scratch, MemOperand(scratch, Context::SlotOffset(index)));
|
| __ li(at, function);
|
| __ Branch(miss, ne, at, Operand(scratch));
|
|
|
| // Load its initial map. The global functions all have initial maps.
|
| __ li(prototype, Handle<Map>(function->initial_map()));
|
| // Load the prototype from the initial map.
|
| - __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
|
| + __ ld(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
|
| }
|
|
|
|
|
| @@ -295,10 +296,10 @@ void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
|
| if (!inobject) {
|
| // Calculate the offset into the properties array.
|
| offset = offset + FixedArray::kHeaderSize;
|
| - __ lw(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
|
| + __ ld(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
|
| src = dst;
|
| }
|
| - __ lw(dst, FieldMemOperand(src, offset));
|
| + __ ld(dst, FieldMemOperand(src, offset));
|
| }
|
|
|
|
|
| @@ -315,7 +316,7 @@ void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
|
|
|
| // Load length directly from the JS array.
|
| __ Ret(USE_DELAY_SLOT);
|
| - __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
|
| + __ ld(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
|
| }
|
|
|
|
|
| @@ -338,7 +339,7 @@ void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
|
| Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
|
| ASSERT(cell->value()->IsTheHole());
|
| __ li(scratch, Operand(cell));
|
| - __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
|
| + __ ld(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
|
| __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
|
| __ Branch(miss, ne, scratch, Operand(at));
|
| }
|
| @@ -397,7 +398,7 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
|
| HeapType::Iterator<Map> it = field_type->Classes();
|
| Handle<Map> current;
|
| if (!it.Done()) {
|
| - __ lw(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
|
| + __ ld(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
|
| Label do_store;
|
| while (true) {
|
| // Do the CompareMap() directly within the Branch() functions.
|
| @@ -413,9 +414,8 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
|
| }
|
| } else if (representation.IsDouble()) {
|
| Label do_store, heap_number;
|
| - __ LoadRoot(scratch3, Heap::kMutableHeapNumberMapRootIndex);
|
| - __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow,
|
| - TAG_RESULT, MUTABLE);
|
| + __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
|
| + __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow);
|
|
|
| __ JumpIfNotSmi(value_reg, &heap_number);
|
| __ SmiUntag(scratch1, value_reg);
|
| @@ -453,7 +453,7 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
|
|
|
| // Update the map of the object.
|
| __ li(scratch1, Operand(transition));
|
| - __ sw(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
|
| + __ sd(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
|
|
|
| // Update the write barrier for the map field.
|
| __ RecordWriteField(receiver_reg,
|
| @@ -487,9 +487,9 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
|
| // Set the property straight into the object.
|
| int offset = object->map()->instance_size() + (index * kPointerSize);
|
| if (representation.IsDouble()) {
|
| - __ sw(storage_reg, FieldMemOperand(receiver_reg, offset));
|
| + __ sd(storage_reg, FieldMemOperand(receiver_reg, offset));
|
| } else {
|
| - __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
|
| + __ sd(value_reg, FieldMemOperand(receiver_reg, offset));
|
| }
|
|
|
| if (!representation.IsSmi()) {
|
| @@ -510,12 +510,12 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
|
| // Write to the properties array.
|
| int offset = index * kPointerSize + FixedArray::kHeaderSize;
|
| // Get the properties array
|
| - __ lw(scratch1,
|
| + __ ld(scratch1,
|
| FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
|
| if (representation.IsDouble()) {
|
| - __ sw(storage_reg, FieldMemOperand(scratch1, offset));
|
| + __ sd(storage_reg, FieldMemOperand(scratch1, offset));
|
| } else {
|
| - __ sw(value_reg, FieldMemOperand(scratch1, offset));
|
| + __ sd(value_reg, FieldMemOperand(scratch1, offset));
|
| }
|
|
|
| if (!representation.IsSmi()) {
|
| @@ -573,7 +573,7 @@ void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
|
| HeapType* field_type = lookup->GetFieldType();
|
| HeapType::Iterator<Map> it = field_type->Classes();
|
| if (!it.Done()) {
|
| - __ lw(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
|
| + __ ld(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
|
| Label do_store;
|
| Handle<Map> current;
|
| while (true) {
|
| @@ -591,11 +591,11 @@ void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
|
| } else if (representation.IsDouble()) {
|
| // Load the double storage.
|
| if (index.is_inobject()) {
|
| - __ lw(scratch1, FieldMemOperand(receiver_reg, index.offset()));
|
| + __ ld(scratch1, FieldMemOperand(receiver_reg, index.offset()));
|
| } else {
|
| - __ lw(scratch1,
|
| + __ ld(scratch1,
|
| FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
|
| - __ lw(scratch1, FieldMemOperand(scratch1, index.offset()));
|
| + __ ld(scratch1, FieldMemOperand(scratch1, index.offset()));
|
| }
|
|
|
| // Store the value into the storage.
|
| @@ -625,7 +625,7 @@ void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
|
| ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
|
| if (index.is_inobject()) {
|
| // Set the property straight into the object.
|
| - __ sw(value_reg, FieldMemOperand(receiver_reg, index.offset()));
|
| + __ sd(value_reg, FieldMemOperand(receiver_reg, index.offset()));
|
|
|
| if (!representation.IsSmi()) {
|
| // Skip updating write barrier if storing a smi.
|
| @@ -646,9 +646,9 @@ void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
|
| } else {
|
| // Write to the properties array.
|
| // Get the properties array.
|
| - __ lw(scratch1,
|
| + __ ld(scratch1,
|
| FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
|
| - __ sw(value_reg, FieldMemOperand(scratch1, index.offset()));
|
| + __ sd(value_reg, FieldMemOperand(scratch1, index.offset()));
|
|
|
| if (!representation.IsSmi()) {
|
| // Skip updating write barrier if storing a smi.
|
| @@ -730,20 +730,20 @@ void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
|
| Register* values) {
|
| ASSERT(!receiver.is(scratch_in));
|
| // Preparing to push, adjust sp.
|
| - __ Subu(sp, sp, Operand((argc + 1) * kPointerSize));
|
| - __ sw(receiver, MemOperand(sp, argc * kPointerSize)); // Push receiver.
|
| + __ Dsubu(sp, sp, Operand((argc + 1) * kPointerSize));
|
| + __ sd(receiver, MemOperand(sp, argc * kPointerSize)); // Push receiver.
|
| // Write the arguments to stack frame.
|
| for (int i = 0; i < argc; i++) {
|
| Register arg = values[argc-1-i];
|
| ASSERT(!receiver.is(arg));
|
| ASSERT(!scratch_in.is(arg));
|
| - __ sw(arg, MemOperand(sp, (argc-1-i) * kPointerSize)); // Push arg.
|
| + __ sd(arg, MemOperand(sp, (argc-1-i) * kPointerSize)); // Push arg.
|
| }
|
| ASSERT(optimization.is_simple_api_call());
|
|
|
| // Abi for CallApiFunctionStub.
|
| Register callee = a0;
|
| - Register call_data = t0;
|
| + Register call_data = a4;
|
| Register holder = a2;
|
| Register api_function_address = a1;
|
|
|
| @@ -776,7 +776,7 @@ void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
|
| // Put call_data in place.
|
| if (isolate->heap()->InNewSpace(*call_data_obj)) {
|
| __ li(call_data, api_call_info);
|
| - __ lw(call_data, FieldMemOperand(call_data, CallHandlerInfo::kDataOffset));
|
| + __ ld(call_data, FieldMemOperand(call_data, CallHandlerInfo::kDataOffset));
|
| } else if (call_data_obj->IsUndefined()) {
|
| call_data_undefined = true;
|
| __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
|
| @@ -860,16 +860,16 @@ Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
|
| GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
|
| scratch1, scratch2);
|
|
|
| - __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
|
| + __ ld(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
|
| reg = holder_reg; // From now on the object will be in holder_reg.
|
| - __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
|
| + __ ld(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
|
| } else {
|
| Register map_reg = scratch1;
|
| if (depth != 1 || check == CHECK_ALL_MAPS) {
|
| // CheckMap implicitly loads the map of |reg| into |map_reg|.
|
| __ CheckMap(reg, map_reg, current_map, miss, DONT_DO_SMI_CHECK);
|
| } else {
|
| - __ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
|
| + __ ld(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
|
| }
|
|
|
| // Check access rights to the global object. This has to happen after
|
| @@ -888,7 +888,7 @@ Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
|
| if (heap()->InNewSpace(*prototype)) {
|
| // The prototype is in new space; we cannot store a reference to it
|
| // in the code. Load it from the map.
|
| - __ lw(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
|
| + __ ld(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
|
| } else {
|
| // The prototype is in old space; load it directly.
|
| __ li(reg, Operand(prototype));
|
| @@ -959,7 +959,7 @@ Register LoadStubCompiler::CallbackHandlerFrontend(
|
|
|
| // Load the properties dictionary.
|
| Register dictionary = scratch4();
|
| - __ lw(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset));
|
| + __ ld(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset));
|
|
|
| // Probe the dictionary.
|
| Label probe_done;
|
| @@ -978,7 +978,7 @@ Register LoadStubCompiler::CallbackHandlerFrontend(
|
| const int kElementsStartOffset = NameDictionary::kHeaderSize +
|
| NameDictionary::kElementsStartIndex * kPointerSize;
|
| const int kValueOffset = kElementsStartOffset + kPointerSize;
|
| - __ lw(scratch2(), FieldMemOperand(pointer, kValueOffset));
|
| + __ ld(scratch2(), FieldMemOperand(pointer, kValueOffset));
|
| __ Branch(&miss, ne, scratch2(), Operand(callback));
|
| }
|
|
|
| @@ -1027,22 +1027,22 @@ void LoadStubCompiler::GenerateLoadCallback(
|
| __ push(receiver());
|
| if (heap()->InNewSpace(callback->data())) {
|
| __ li(scratch3(), callback);
|
| - __ lw(scratch3(), FieldMemOperand(scratch3(),
|
| + __ ld(scratch3(), FieldMemOperand(scratch3(),
|
| ExecutableAccessorInfo::kDataOffset));
|
| } else {
|
| __ li(scratch3(), Handle<Object>(callback->data(), isolate()));
|
| }
|
| - __ Subu(sp, sp, 6 * kPointerSize);
|
| - __ sw(scratch3(), MemOperand(sp, 5 * kPointerSize));
|
| + __ Dsubu(sp, sp, 6 * kPointerSize);
|
| + __ sd(scratch3(), MemOperand(sp, 5 * kPointerSize));
|
| __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex);
|
| - __ sw(scratch3(), MemOperand(sp, 4 * kPointerSize));
|
| - __ sw(scratch3(), MemOperand(sp, 3 * kPointerSize));
|
| + __ sd(scratch3(), MemOperand(sp, 4 * kPointerSize));
|
| + __ sd(scratch3(), MemOperand(sp, 3 * kPointerSize));
|
| __ li(scratch4(),
|
| Operand(ExternalReference::isolate_address(isolate())));
|
| - __ sw(scratch4(), MemOperand(sp, 2 * kPointerSize));
|
| - __ sw(reg, MemOperand(sp, 1 * kPointerSize));
|
| - __ sw(name(), MemOperand(sp, 0 * kPointerSize));
|
| - __ Addu(scratch2(), sp, 1 * kPointerSize);
|
| + __ sd(scratch4(), MemOperand(sp, 2 * kPointerSize));
|
| + __ sd(reg, MemOperand(sp, 1 * kPointerSize));
|
| + __ sd(name(), MemOperand(sp, 0 * kPointerSize));
|
| + __ Daddu(scratch2(), sp, 1 * kPointerSize);
|
|
|
| __ mov(a2, scratch2()); // Saved in case scratch2 == a1.
|
| // Abi for CallApiGetter.
|
| @@ -1194,7 +1194,7 @@ void StoreStubCompiler::GenerateStoreViaSetter(
|
| // Call the JavaScript setter with receiver and value on the stack.
|
| if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
|
| // Swap in the global receiver.
|
| - __ lw(receiver,
|
| + __ ld(receiver,
|
| FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
|
| }
|
| __ Push(receiver, value());
|
| @@ -1212,7 +1212,7 @@ void StoreStubCompiler::GenerateStoreViaSetter(
|
| __ pop(v0);
|
|
|
| // Restore context register.
|
| - __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| + __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| }
|
| __ Ret();
|
| }
|
| @@ -1255,7 +1255,7 @@ Register* LoadStubCompiler::registers() {
|
| // receiver, name, scratch1, scratch2, scratch3, scratch4.
|
| Register receiver = LoadIC::ReceiverRegister();
|
| Register name = LoadIC::NameRegister();
|
| - static Register registers[] = { receiver, name, a3, a0, t0, t1 };
|
| + static Register registers[] = { receiver, name, a3, a0, a4, a5 };
|
| return registers;
|
| }
|
|
|
| @@ -1264,7 +1264,7 @@ Register* KeyedLoadStubCompiler::registers() {
|
| // receiver, name, scratch1, scratch2, scratch3, scratch4.
|
| Register receiver = LoadIC::ReceiverRegister();
|
| Register name = LoadIC::NameRegister();
|
| - static Register registers[] = { receiver, name, a3, a0, t0, t1 };
|
| + static Register registers[] = { receiver, name, a3, a0, a4, a5 };
|
| return registers;
|
| }
|
|
|
| @@ -1276,14 +1276,14 @@ Register StoreStubCompiler::value() {
|
|
|
| Register* StoreStubCompiler::registers() {
|
| // receiver, name, scratch1, scratch2, scratch3.
|
| - static Register registers[] = { a1, a2, a3, t0, t1 };
|
| + static Register registers[] = { a1, a2, a3, a4, a5 };
|
| return registers;
|
| }
|
|
|
|
|
| Register* KeyedStoreStubCompiler::registers() {
|
| // receiver, name, scratch1, scratch2, scratch3.
|
| - static Register registers[] = { a2, a1, a3, t0, t1 };
|
| + static Register registers[] = { a2, a1, a3, a4, a5 };
|
| return registers;
|
| }
|
|
|
| @@ -1308,7 +1308,7 @@ void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
|
| // Call the JavaScript getter with the receiver on the stack.
|
| if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
|
| // Swap in the global receiver.
|
| - __ lw(receiver,
|
| + __ ld(receiver,
|
| FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
|
| }
|
| __ push(receiver);
|
| @@ -1323,7 +1323,7 @@ void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
|
| }
|
|
|
| // Restore context register.
|
| - __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| + __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
| }
|
| __ Ret();
|
| }
|
| @@ -1345,18 +1345,18 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal(
|
|
|
| // Get the value from the cell.
|
| __ li(a3, Operand(cell));
|
| - __ lw(t0, FieldMemOperand(a3, Cell::kValueOffset));
|
| + __ ld(a4, FieldMemOperand(a3, Cell::kValueOffset));
|
|
|
| // Check for deleted property if property can actually be deleted.
|
| if (!is_dont_delete) {
|
| __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
|
| - __ Branch(&miss, eq, t0, Operand(at));
|
| + __ Branch(&miss, eq, a4, Operand(at));
|
| }
|
|
|
| Counters* counters = isolate()->counters();
|
| __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
|
| __ Ret(USE_DELAY_SLOT);
|
| - __ mov(v0, t0);
|
| + __ mov(v0, a4);
|
|
|
| HandlerFrontendFooter(name, &miss);
|
|
|
| @@ -1387,7 +1387,7 @@ Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
|
|
|
| int receiver_count = types->length();
|
| int number_of_handled_maps = 0;
|
| - __ lw(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
|
| + __ ld(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
|
| for (int current = 0; current < receiver_count; ++current) {
|
| Handle<HeapType> type = types->at(current);
|
| Handle<Map> map = IC::TypeToMap(*type, isolate());
|
| @@ -1395,7 +1395,7 @@ Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
|
| number_of_handled_maps++;
|
| // Check map and tail call if there's a match.
|
| // Separate compare from branch, to provide path for above JumpIfSmi().
|
| - __ Subu(match, map_reg, Operand(map));
|
| + __ Dsubu(match, map_reg, Operand(map));
|
| if (type->Is(HeapType::Number())) {
|
| ASSERT(!number_case.is_unused());
|
| __ bind(&number_case);
|
| @@ -1435,7 +1435,7 @@ Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
|
| __ JumpIfSmi(receiver(), &miss);
|
|
|
| int receiver_count = receiver_maps->length();
|
| - __ lw(scratch1(), FieldMemOperand(receiver(), HeapObject::kMapOffset));
|
| + __ ld(scratch1(), FieldMemOperand(receiver(), HeapObject::kMapOffset));
|
| for (int i = 0; i < receiver_count; ++i) {
|
| if (transitioned_maps->at(i).is_null()) {
|
| __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq,
|
| @@ -1464,7 +1464,7 @@ Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
|
|
|
| void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
|
| MacroAssembler* masm) {
|
| - // The return address is in ra.
|
| + // The return address is in ra
|
| Label slow, miss;
|
|
|
| Register key = LoadIC::NameRegister();
|
| @@ -1472,9 +1472,10 @@ void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
|
| ASSERT(receiver.is(a1));
|
| ASSERT(key.is(a2));
|
|
|
| - __ UntagAndJumpIfNotSmi(t2, key, &miss);
|
| - __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset));
|
| - __ LoadFromNumberDictionary(&slow, t0, key, v0, t2, a3, t1);
|
| + __ UntagAndJumpIfNotSmi(a6, key, &miss);
|
| + __ ld(a4, FieldMemOperand(receiver, JSObject::kElementsOffset));
|
| + ASSERT(kSmiTagSize + kSmiShiftSize == 32);
|
| + __ LoadFromNumberDictionary(&slow, a4, key, v0, a6, a3, a5);
|
| __ Ret();
|
|
|
| // Slow case, key and receiver still unmodified.
|
| @@ -1496,4 +1497,4 @@ void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
|
|
|
| } } // namespace v8::internal
|
|
|
| -#endif // V8_TARGET_ARCH_MIPS
|
| +#endif // V8_TARGET_ARCH_MIPS64
|
|
|