| Index: src/mips/stub-cache-mips.cc
|
| diff --git a/src/mips/stub-cache-mips.cc b/src/mips/stub-cache-mips.cc
|
| index b3f93d5a3188382bc72b92b80e9404805deb91a0..80ab31a5e46124d236573fe2f325d52571214e95 100644
|
| --- a/src/mips/stub-cache-mips.cc
|
| +++ b/src/mips/stub-cache-mips.cc
|
| @@ -307,11 +307,13 @@ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
|
| }
|
|
|
|
|
| -void StubCompiler::DoGenerateFastPropertyLoad(MacroAssembler* masm,
|
| - Register dst,
|
| - Register src,
|
| - bool inobject,
|
| - int index) {
|
| +void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
|
| + Register dst,
|
| + Register src,
|
| + bool inobject,
|
| + int index,
|
| + Representation representation) {
|
| + ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
|
| int offset = index * kPointerSize;
|
| if (!inobject) {
|
| // Calculate the offset into the properties array.
|
| @@ -442,8 +444,10 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
|
| Register value_reg,
|
| Register scratch1,
|
| Register scratch2,
|
| + Register scratch3,
|
| Label* miss_label,
|
| - Label* miss_restore_name) {
|
| + Label* miss_restore_name,
|
| + Label* slow) {
|
| // a0 : value.
|
| Label exit;
|
|
|
| @@ -465,16 +469,6 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
|
| // Ensure no transitions to deprecated maps are followed.
|
| __ CheckMapDeprecated(transition, scratch1, miss_label);
|
|
|
| - if (FLAG_track_fields && representation.IsSmi()) {
|
| - __ JumpIfNotSmi(value_reg, miss_label);
|
| - } else if (FLAG_track_double_fields && representation.IsDouble()) {
|
| - Label do_store;
|
| - __ JumpIfSmi(value_reg, &do_store);
|
| - __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
|
| - miss_label, DONT_DO_SMI_CHECK);
|
| - __ bind(&do_store);
|
| - }
|
| -
|
| // Check that we are allowed to write this.
|
| if (object->GetPrototype()->IsJSObject()) {
|
| JSObject* holder;
|
| @@ -490,7 +484,7 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
|
| }
|
| Register holder_reg = CheckPrototypes(
|
| object, receiver_reg, Handle<JSObject>(holder), name_reg,
|
| - scratch1, scratch2, name, miss_restore_name);
|
| + scratch1, scratch2, name, miss_restore_name, SKIP_RECEIVER);
|
| // If no property was found, and the holder (the last object in the
|
| // prototype chain) is in slow mode, we need to do a negative lookup on the
|
| // holder.
|
| @@ -509,6 +503,30 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
|
| }
|
| }
|
|
|
| + Register storage_reg = name_reg;
|
| +
|
| + if (FLAG_track_fields && representation.IsSmi()) {
|
| + __ JumpIfNotSmi(value_reg, miss_restore_name);
|
| + } else if (FLAG_track_double_fields && representation.IsDouble()) {
|
| + Label do_store, heap_number;
|
| + __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
|
| + __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow);
|
| +
|
| + __ JumpIfNotSmi(value_reg, &heap_number);
|
| + __ SmiUntag(scratch1, value_reg);
|
| + __ mtc1(scratch1, f6);
|
| + __ cvt_d_w(f4, f6);
|
| + __ jmp(&do_store);
|
| +
|
| + __ bind(&heap_number);
|
| + __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
|
| + miss_restore_name, DONT_DO_SMI_CHECK);
|
| + __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
|
| +
|
| + __ bind(&do_store);
|
| + __ sdc1(f4, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
|
| + }
|
| +
|
| // Stub never generated for non-global objects that require access
|
| // checks.
|
| ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
|
| @@ -536,7 +554,7 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
|
| __ RecordWriteField(receiver_reg,
|
| HeapObject::kMapOffset,
|
| scratch1,
|
| - name_reg,
|
| + scratch2,
|
| kRAHasNotBeenSaved,
|
| kDontSaveFPRegs,
|
| OMIT_REMEMBERED_SET,
|
| @@ -554,7 +572,11 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
|
| if (index < 0) {
|
| // Set the property straight into the object.
|
| int offset = object->map()->instance_size() + (index * kPointerSize);
|
| - __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
|
| + if (FLAG_track_double_fields && representation.IsDouble()) {
|
| + __ sw(storage_reg, FieldMemOperand(receiver_reg, offset));
|
| + } else {
|
| + __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
|
| + }
|
|
|
| if (!FLAG_track_fields || !representation.IsSmi()) {
|
| // Skip updating write barrier if storing a smi.
|
| @@ -562,7 +584,11 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
|
|
|
| // Update the write barrier for the array address.
|
| // Pass the now unused name_reg as a scratch register.
|
| - __ mov(name_reg, value_reg);
|
| + if (!FLAG_track_double_fields || !representation.IsDouble()) {
|
| + __ mov(name_reg, value_reg);
|
| + } else {
|
| + ASSERT(storage_reg.is(name_reg));
|
| + }
|
| __ RecordWriteField(receiver_reg,
|
| offset,
|
| name_reg,
|
| @@ -576,7 +602,11 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
|
| // Get the properties array
|
| __ lw(scratch1,
|
| FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
|
| - __ sw(value_reg, FieldMemOperand(scratch1, offset));
|
| + if (FLAG_track_double_fields && representation.IsDouble()) {
|
| + __ sw(storage_reg, FieldMemOperand(scratch1, offset));
|
| + } else {
|
| + __ sw(value_reg, FieldMemOperand(scratch1, offset));
|
| + }
|
|
|
| if (!FLAG_track_fields || !representation.IsSmi()) {
|
| // Skip updating write barrier if storing a smi.
|
| @@ -584,6 +614,11 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm,
|
|
|
| // Update the write barrier for the array address.
|
| // Ok to clobber receiver_reg and name_reg, since we return.
|
| + if (!FLAG_track_double_fields || !representation.IsDouble()) {
|
| + __ mov(name_reg, value_reg);
|
| + } else {
|
| + ASSERT(storage_reg.is(name_reg));
|
| + }
|
| __ mov(name_reg, value_reg);
|
| __ RecordWriteField(scratch1,
|
| offset,
|
| @@ -643,11 +678,37 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
|
| if (FLAG_track_fields && representation.IsSmi()) {
|
| __ JumpIfNotSmi(value_reg, miss_label);
|
| } else if (FLAG_track_double_fields && representation.IsDouble()) {
|
| - Label do_store;
|
| - __ JumpIfSmi(value_reg, &do_store);
|
| - __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
|
| + // Load the double storage.
|
| + if (index < 0) {
|
| + int offset = object->map()->instance_size() + (index * kPointerSize);
|
| + __ lw(scratch1, FieldMemOperand(receiver_reg, offset));
|
| + } else {
|
| + __ lw(scratch1,
|
| + FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
|
| + int offset = index * kPointerSize + FixedArray::kHeaderSize;
|
| + __ lw(scratch1, FieldMemOperand(scratch1, offset));
|
| + }
|
| +
|
| + // Store the value into the storage.
|
| + Label do_store, heap_number;
|
| + __ JumpIfNotSmi(value_reg, &heap_number);
|
| + __ SmiUntag(scratch2, value_reg);
|
| + __ mtc1(scratch2, f6);
|
| + __ cvt_d_w(f4, f6);
|
| + __ jmp(&do_store);
|
| +
|
| + __ bind(&heap_number);
|
| + __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
|
| miss_label, DONT_DO_SMI_CHECK);
|
| + __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
|
| +
|
| __ bind(&do_store);
|
| + __ sdc1(f4, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
|
| + // Return the value (register v0).
|
| + ASSERT(value_reg.is(a0));
|
| + __ mov(v0, a0);
|
| + __ Ret();
|
| + return;
|
| }
|
|
|
| // TODO(verwaest): Share this code as a code stub.
|
| @@ -1308,15 +1369,18 @@ void BaseLoadStubCompiler::NonexistentHandlerFrontend(
|
|
|
| void BaseLoadStubCompiler::GenerateLoadField(Register reg,
|
| Handle<JSObject> holder,
|
| - PropertyIndex field) {
|
| + PropertyIndex field,
|
| + Representation representation) {
|
| if (!reg.is(receiver())) __ mov(receiver(), reg);
|
| if (kind() == Code::LOAD_IC) {
|
| LoadFieldStub stub(field.is_inobject(holder),
|
| - field.translate(holder));
|
| + field.translate(holder),
|
| + representation);
|
| GenerateTailCall(masm(), stub.GetCode(isolate()));
|
| } else {
|
| KeyedLoadFieldStub stub(field.is_inobject(holder),
|
| - field.translate(holder));
|
| + field.translate(holder),
|
| + representation);
|
| GenerateTailCall(masm(), stub.GetCode(isolate()));
|
| }
|
| }
|
| @@ -1548,7 +1612,8 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
|
|
|
| // Do the right check and compute the holder register.
|
| Register reg = CheckPrototypes(object, a0, holder, a1, a3, t0, name, &miss);
|
| - GenerateFastPropertyLoad(masm(), a1, reg, holder, index);
|
| + GenerateFastPropertyLoad(masm(), a1, reg, index.is_inobject(holder),
|
| + index.translate(holder), Representation::Tagged());
|
|
|
| GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
|
|
|
|
|