Chromium Code Reviews| Index: src/ia32/stub-cache-ia32.cc |
| diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc |
| index 18a032f9a241e920976639d89fa139f5cda68e4c..0bf728c68e87bdf45c93f937af059f1bc1706c41 100644 |
| --- a/src/ia32/stub-cache-ia32.cc |
| +++ b/src/ia32/stub-cache-ia32.cc |
| @@ -369,11 +369,13 @@ void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, |
| } |
| -void StubCompiler::DoGenerateFastPropertyLoad(MacroAssembler* masm, |
| - Register dst, |
| - Register src, |
| - bool inobject, |
| - int index) { |
| +void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, |
| + Register dst, |
| + Register src, |
| + bool inobject, |
| + int index, |
| + Representation representation) { |
| + ASSERT(!FLAG_track_double_fields || !representation.IsDouble()); |
| int offset = index * kPointerSize; |
| if (!inobject) { |
| // Calculate the offset into the properties array. |
| @@ -764,7 +766,8 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, |
| Register scratch1, |
| Register scratch2, |
| Label* miss_label, |
| - Label* miss_restore_name) { |
| + Label* miss_restore_name, |
| + Label* slow) { |
| // Check that the map of the object hasn't changed. |
| __ CheckMap(receiver_reg, Handle<Map>(object->map()), |
| miss_label, DO_SMI_CHECK, REQUIRE_EXACT_MAP); |
| @@ -783,16 +786,6 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, |
| // Ensure no transitions to deprecated maps are followed. |
| __ CheckMapDeprecated(transition, scratch1, miss_label); |
| - if (FLAG_track_fields && representation.IsSmi()) { |
| - __ JumpIfNotSmi(value_reg, miss_label); |
| - } else if (FLAG_track_double_fields && representation.IsDouble()) { |
| - Label do_store; |
| - __ JumpIfSmi(value_reg, &do_store); |
| - __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(), |
| - miss_label, DONT_DO_SMI_CHECK, REQUIRE_EXACT_MAP); |
| - __ bind(&do_store); |
| - } |
| - |
| // Check that we are allowed to write this. |
| if (object->GetPrototype()->IsJSObject()) { |
| JSObject* holder; |
| @@ -809,7 +802,7 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, |
| // We need an extra register, push |
| Register holder_reg = CheckPrototypes( |
| object, receiver_reg, Handle<JSObject>(holder), name_reg, |
| - scratch1, scratch2, name, miss_restore_name); |
| + scratch1, scratch2, name, miss_restore_name, SKIP_RECEIVER); |
| // If no property was found, and the holder (the last object in the |
| // prototype chain) is in slow mode, we need to do a negative lookup on the |
| // holder. |
| @@ -828,6 +821,45 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, |
| } |
| } |
| + if (FLAG_track_fields && representation.IsSmi()) { |
| + __ JumpIfNotSmi(value_reg, miss_restore_name); |
| + } else if (FLAG_track_double_fields && representation.IsDouble()) { |
| + // If we allocate a new double holder, store it in name_reg. |
| + // Label do_store, heap_number; |
| + Label do_store, heap_number; |
| + __ JumpIfNotSmi(value_reg, &heap_number); |
| + |
| + if (CpuFeatures::IsSupported(SSE2)) { |
| + CpuFeatureScope use_sse2(masm, SSE2); |
| + __ AllocateHeapNumber(name_reg, scratch1, scratch2, slow); |
| + __ SmiUntag(value_reg); |
| + __ cvtsi2sd(xmm0, value_reg); |
| + __ SmiTag(value_reg); |
| + } else { |
| + UNREACHABLE(); |
| + } |
| + __ jmp(&do_store); |
| + |
| + __ bind(&heap_number); |
| + __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(), |
| + miss_restore_name, DONT_DO_SMI_CHECK, REQUIRE_EXACT_MAP); |
| + if (CpuFeatures::IsSupported(SSE2)) { |
| + CpuFeatureScope use_sse2(masm, SSE2); |
| + __ AllocateHeapNumber(name_reg, scratch1, scratch2, slow); |
| + __ movdbl(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset)); |
| + } else { |
| + UNREACHABLE(); |
| + } |
| + |
| + __ bind(&do_store); |
| + if (CpuFeatures::IsSupported(SSE2)) { |
| + CpuFeatureScope use_sse2(masm, SSE2); |
| + __ movdbl(FieldOperand(name_reg, HeapNumber::kValueOffset), xmm0); |
| + } else { |
| + UNREACHABLE(); |
| + } |
| + } |
| + |
| // Stub never generated for non-global objects that require access |
| // checks. |
| ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); |
| @@ -839,7 +871,7 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, |
| __ pop(scratch1); // Return address. |
| __ push(receiver_reg); |
| __ push(Immediate(transition)); |
| - __ push(eax); |
| + __ push(value_reg); |
| __ push(scratch1); |
| __ TailCallExternalReference( |
| ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), |
| @@ -853,12 +885,11 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, |
| __ mov(scratch1, Immediate(transition)); |
| __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1); |
| - // Update the write barrier for the map field and pass the now unused |
| - // name_reg as scratch register. |
| + // Update the write barrier for the map field. |
| __ RecordWriteField(receiver_reg, |
| HeapObject::kMapOffset, |
| scratch1, |
| - name_reg, |
| + scratch2, |
| kDontSaveFPRegs, |
| OMIT_REMEMBERED_SET, |
| OMIT_SMI_CHECK); |
| @@ -875,12 +906,18 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, |
| if (index < 0) { |
| // Set the property straight into the object. |
| int offset = object->map()->instance_size() + (index * kPointerSize); |
| - __ mov(FieldOperand(receiver_reg, offset), value_reg); |
| + if (FLAG_track_double_fields && representation.IsDouble()) { |
| + __ mov(FieldOperand(receiver_reg, offset), name_reg); |
|
danno
2013/05/07 13:04:47
Give this name_reg guy another name?
Toon Verwaest
2013/05/07 15:08:52
Done.
|
| + } else { |
| + __ mov(FieldOperand(receiver_reg, offset), value_reg); |
| + } |
| if (!FLAG_track_fields || !representation.IsSmi()) { |
| // Update the write barrier for the array address. |
| // Pass the value being stored in the now unused name_reg. |
| - __ mov(name_reg, value_reg); |
| + if (!FLAG_track_double_fields || !representation.IsDouble()) { |
| + __ mov(name_reg, value_reg); |
| + } |
| __ RecordWriteField(receiver_reg, |
| offset, |
| name_reg, |
| @@ -892,12 +929,18 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, |
| int offset = index * kPointerSize + FixedArray::kHeaderSize; |
| // Get the properties array (optimistically). |
| __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); |
| - __ mov(FieldOperand(scratch1, offset), eax); |
| + if (FLAG_track_double_fields && representation.IsDouble()) { |
| + __ mov(FieldOperand(scratch1, offset), name_reg); |
| + } else { |
| + __ mov(FieldOperand(scratch1, offset), value_reg); |
| + } |
| if (!FLAG_track_fields || !representation.IsSmi()) { |
| // Update the write barrier for the array address. |
| // Pass the value being stored in the now unused name_reg. |
| - __ mov(name_reg, value_reg); |
| + if (!FLAG_track_double_fields || !representation.IsDouble()) { |
| + __ mov(name_reg, value_reg); |
| + } |
| __ RecordWriteField(scratch1, |
| offset, |
| name_reg, |
| @@ -948,23 +991,67 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, |
| if (FLAG_track_fields && representation.IsSmi()) { |
| __ JumpIfNotSmi(value_reg, miss_label); |
| } else if (FLAG_track_double_fields && representation.IsDouble()) { |
| - Label do_store; |
| - __ JumpIfSmi(value_reg, &do_store); |
| + // Load the double storage. |
| + if (index < 0) { |
| + int offset = object->map()->instance_size() + (index * kPointerSize); |
| + __ mov(scratch1, FieldOperand(receiver_reg, offset)); |
| + } else { |
| + __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); |
| + int offset = index * kPointerSize + FixedArray::kHeaderSize; |
| + __ mov(scratch1, FieldOperand(scratch1, offset)); |
| + } |
| + |
| + // If we allocate a new double holder, store it in name_reg. |
| + // Label do_store, heap_number; |
| + Label do_store, heap_number; |
| + __ JumpIfNotSmi(value_reg, &heap_number); |
| + if (CpuFeatures::IsSupported(SSE2)) { |
| + CpuFeatureScope use_sse2(masm, SSE2); |
| + __ SmiUntag(value_reg); |
| + __ cvtsi2sd(xmm0, value_reg); |
| + __ SmiTag(value_reg); |
| + } else { |
| + UNREACHABLE(); |
| + } |
| + __ jmp(&do_store); |
| + __ bind(&heap_number); |
| __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(), |
| miss_label, DONT_DO_SMI_CHECK, REQUIRE_EXACT_MAP); |
| + if (CpuFeatures::IsSupported(SSE2)) { |
| + CpuFeatureScope use_sse2(masm, SSE2); |
| + __ movdbl(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset)); |
| + } else { |
| + UNREACHABLE(); |
| + } |
| __ bind(&do_store); |
| + if (CpuFeatures::IsSupported(SSE2)) { |
| + CpuFeatureScope use_sse2(masm, SSE2); |
| + __ movdbl(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0); |
| + } else { |
| + UNREACHABLE(); |
| + } |
| + // Return the value (register eax). |
| + ASSERT(value_reg.is(eax)); |
| + __ ret(0); |
| + return; |
| } |
| // TODO(verwaest): Share this code as a code stub. |
| if (index < 0) { |
| // Set the property straight into the object. |
| int offset = object->map()->instance_size() + (index * kPointerSize); |
| - __ mov(FieldOperand(receiver_reg, offset), value_reg); |
| + if (FLAG_track_double_fields && representation.IsDouble()) { |
| + __ mov(FieldOperand(receiver_reg, offset), name_reg); |
| + } else { |
| + __ mov(FieldOperand(receiver_reg, offset), value_reg); |
| + } |
| if (!FLAG_track_fields || !representation.IsSmi()) { |
| // Update the write barrier for the array address. |
| // Pass the value being stored in the now unused name_reg. |
| - __ mov(name_reg, value_reg); |
| + if (!FLAG_track_double_fields || !representation.IsDouble()) { |
| + __ mov(name_reg, value_reg); |
| + } |
| __ RecordWriteField(receiver_reg, |
| offset, |
| name_reg, |
| @@ -976,12 +1063,18 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, |
| int offset = index * kPointerSize + FixedArray::kHeaderSize; |
| // Get the properties array (optimistically). |
| __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); |
| - __ mov(FieldOperand(scratch1, offset), eax); |
| + if (FLAG_track_double_fields && representation.IsDouble()) { |
| + __ mov(FieldOperand(scratch1, offset), name_reg); |
| + } else { |
| + __ mov(FieldOperand(scratch1, offset), value_reg); |
| + } |
| if (!FLAG_track_fields || !representation.IsSmi()) { |
| // Update the write barrier for the array address. |
| // Pass the value being stored in the now unused name_reg. |
| - __ mov(name_reg, value_reg); |
| + if (!FLAG_track_double_fields || !representation.IsDouble()) { |
| + __ mov(name_reg, value_reg); |
| + } |
| __ RecordWriteField(scratch1, |
|
danno
2013/05/07 13:04:47
Remove the write barrier in the double case
Toon Verwaest
2013/05/07 15:08:52
Done. This was already done by returning in case o
|
| offset, |
| name_reg, |
| @@ -1236,15 +1329,18 @@ void BaseLoadStubCompiler::NonexistentHandlerFrontend( |
| void BaseLoadStubCompiler::GenerateLoadField(Register reg, |
| Handle<JSObject> holder, |
| - PropertyIndex field) { |
| + PropertyIndex field, |
| + Representation representation) { |
| if (!reg.is(receiver())) __ mov(receiver(), reg); |
| if (kind() == Code::LOAD_IC) { |
| LoadFieldStub stub(field.is_inobject(holder), |
| - field.translate(holder)); |
| + field.translate(holder), |
| + representation); |
| GenerateTailCall(masm(), stub.GetCode(isolate())); |
| } else { |
| KeyedLoadFieldStub stub(field.is_inobject(holder), |
| - field.translate(holder)); |
| + field.translate(holder), |
| + representation); |
| GenerateTailCall(masm(), stub.GetCode(isolate())); |
| } |
| } |
| @@ -1501,7 +1597,9 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, |
| Register reg = CheckPrototypes(object, edx, holder, ebx, eax, edi, |
| name, &miss); |
| - GenerateFastPropertyLoad(masm(), edi, reg, holder, index); |
| + GenerateFastPropertyLoad( |
| + masm(), edi, reg, index.is_inobject(holder), |
| + index.translate(holder), Representation::Tagged()); |
| // Check that the function really is a function. |
| __ JumpIfSmi(edi, &miss); |