Chromium Code Reviews| Index: src/ia32/fast-codegen-ia32.cc |
| diff --git a/src/ia32/fast-codegen-ia32.cc b/src/ia32/fast-codegen-ia32.cc |
| index b93f1d8574ce0406e5db3be58edb75daa09dc59f..c3b9b31c3918792117d5b65e2ef77572bd25b69b 100644 |
| --- a/src/ia32/fast-codegen-ia32.cc |
| +++ b/src/ia32/fast-codegen-ia32.cc |
| @@ -61,6 +61,9 @@ void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) { |
| __ cmp(destination(), Factory::the_hole_value()); |
| __ Check(not_equal, "DontDelete cells can't contain the hole"); |
| } |
| + |
| + // The loaded value is not known to be a smi. |
| + clear_as_smi(destination()); |
| } |
| @@ -74,26 +77,46 @@ void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) { |
| int index = lookup.GetFieldIndex() - map->inobject_properties(); |
| int offset = index * kPointerSize; |
| - // Negative offsets are inobject properties. |
| + // We will emit the write barrier unless the stored value is statically |
| + // known to be a smi. |
| + bool needs_write_barrier = !is_smi(accumulator0()); |
| + |
| + // Perform the store. Negative offsets are inobject properties. |
| if (offset < 0) { |
| offset += map->instance_size(); |
| - __ mov(scratch0(), receiver_reg()); // Copy receiver for write barrier. |
| + __ mov(FieldOperand(receiver_reg(), offset), accumulator0()); |
| + if (needs_write_barrier) { |
| + // Preserve receiver from write barrier. |
| + __ mov(scratch0(), receiver_reg()); |
| + } |
| } else { |
| offset += FixedArray::kHeaderSize; |
| __ mov(scratch0(), |
| FieldOperand(receiver_reg(), JSObject::kPropertiesOffset)); |
| + __ mov(FieldOperand(scratch0(), offset), accumulator0()); |
| } |
| - // Perform the store. |
| - __ mov(FieldOperand(scratch0(), offset), accumulator0()); |
| - if (destination().is(no_reg)) { |
| - __ RecordWrite(scratch0(), offset, accumulator0(), scratch1()); |
| - } else { |
| - // Copy the value to the other accumulator to preserve a copy from the |
| - // write barrier. One of the accumulators is available as a scratch |
| - // register. |
| + |
| + if (needs_write_barrier) { |
| + if (destination().is(no_reg)) { |
| + // After RecordWrite accumulator0 is only accidently a smi, but it is |
| + // already marked as not known to be one. |
| + __ RecordWrite(scratch0(), offset, accumulator0(), scratch1()); |
| + } else { |
| + // Copy the value to the other accumulator to preserve a copy from the |
| + // write barrier. One of the accumulators is available as a scratch |
| + // register. Neither is a smi. |
| + __ mov(accumulator1(), accumulator0()); |
| + clear_as_smi(accumulator1()); |
| + Register value_scratch = other_accumulator(destination()); |
| + __ RecordWrite(scratch0(), offset, value_scratch, scratch1()); |
| + } |
| + } else if (destination().is(accumulator1())) { |
| __ mov(accumulator1(), accumulator0()); |
| - Register value_scratch = other_accumulator(destination()); |
| - __ RecordWrite(scratch0(), offset, value_scratch, scratch1()); |
| + if (is_smi(accumulator0())) { |
|
fschneider
2010/02/10 15:14:33
This condition should be always true, since there
Kevin Millikin (Chromium)
2010/02/11 08:30:09
Good catch. I was trying to be too general and no
|
| + set_as_smi(accumulator1()); |
| + } else { |
| + clear_as_smi(accumulator1()); |
| + } |
| } |
| } |
| @@ -119,36 +142,46 @@ void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) { |
| FieldOperand(receiver_reg(), JSObject::kPropertiesOffset)); |
| __ mov(destination(), FieldOperand(scratch0(), offset)); |
| } |
| + |
| + // The loaded value is not known to be a smi. |
| + clear_as_smi(destination()); |
| } |
| void FastCodeGenerator::EmitBitOr() { |
| - Register copied; // One operand is copied to a scratch register. |
| - Register other; // The other is not modified by the operation. |
| - Register check; // A register is used for the smi check/operation. |
| - if (destination().is(no_reg)) { |
| - copied = accumulator1(); // Arbitrary choice of operand to copy. |
| - other = accumulator0(); |
| - check = scratch0(); // Do not clobber either operand register. |
| - } else { |
| - copied = destination(); |
| - other = other_accumulator(destination()); |
| - check = destination(); |
| - } |
| - __ mov(scratch0(), copied); |
| - __ or_(check, Operand(other)); |
| - __ test(check, Immediate(kSmiTagMask)); |
| - |
| - // Restore the clobbered operand if necessary. |
| - if (destination().is(no_reg)) { |
| + if (is_smi(accumulator0()) && is_smi(accumulator1())) { |
| + // If both operands are known to be a smi then there is no need to check |
| + // the operands or result. There is no need to perform the operation in |
| + // an effect context. |
| + if (!destination().is(no_reg)) { |
| + // Leave the result in the destination register. Bitwise or is |
| + // commutative. |
| + __ or_(destination(), Operand(other_accumulator(destination()))); |
| + } |
| + } else if (destination().is(no_reg)) { |
| + // Result is not needed but do not clobber the operands in case of |
| + // bailout. |
| + __ mov(scratch0(), accumulator1()); |
| + __ or_(scratch0(), Operand(accumulator0())); |
| + __ test(scratch0(), Immediate(kSmiTagMask)); |
| __ j(not_zero, bailout(), not_taken); |
| } else { |
| + // Preserve the destination operand in a scratch register in case of |
| + // bailout. |
| Label done; |
| + __ mov(scratch0(), destination()); |
| + __ or_(destination(), Operand(other_accumulator(destination()))); |
| + __ test(destination(), Immediate(kSmiTagMask)); |
| __ j(zero, &done, taken); |
| - __ mov(copied, scratch0()); |
| + __ mov(destination(), scratch0()); |
| __ jmp(bailout()); |
| __ bind(&done); |
| } |
| + |
| + // If we didn't bailout, the result (in fact, both inputs too) is known to |
| + // be a smi. |
| + set_as_smi(accumulator0()); |
| + set_as_smi(accumulator1()); |
| } |