| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 225 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 236 ASSERT(!AreAliased(object, address, value, t9)); | 236 ASSERT(!AreAliased(object, address, value, t9)); |
| 237 // The compiled code assumes that record write doesn't change the | 237 // The compiled code assumes that record write doesn't change the |
| 238 // context register, so we check that none of the clobbered | 238 // context register, so we check that none of the clobbered |
| 239 // registers are cp. | 239 // registers are cp. |
| 240 ASSERT(!address.is(cp) && !value.is(cp)); | 240 ASSERT(!address.is(cp) && !value.is(cp)); |
| 241 | 241 |
| 242 Label done; | 242 Label done; |
| 243 | 243 |
| 244 if (smi_check == INLINE_SMI_CHECK) { | 244 if (smi_check == INLINE_SMI_CHECK) { |
| 245 ASSERT_EQ(0, kSmiTag); | 245 ASSERT_EQ(0, kSmiTag); |
| 246 And(t8, value, Operand(kSmiTagMask)); | 246 JumpIfSmi(value, &done); |
| 247 Branch(&done, eq, t8, Operand(zero_reg)); | |
| 248 } | 247 } |
| 249 | 248 |
| 250 CheckPageFlag(value, | 249 CheckPageFlag(value, |
| 251 value, // Used as scratch. | 250 value, // Used as scratch. |
| 252 MemoryChunk::kPointersToHereAreInterestingMask, | 251 MemoryChunk::kPointersToHereAreInterestingMask, |
| 253 eq, | 252 eq, |
| 254 &done); | 253 &done); |
| 255 CheckPageFlag(object, | 254 CheckPageFlag(object, |
| 256 value, // Used as scratch. | 255 value, // Used as scratch. |
| 257 MemoryChunk::kPointersFromHereAreInterestingMask, | 256 MemoryChunk::kPointersFromHereAreInterestingMask, |
| (...skipping 4215 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4473 Branch(not_power_of_two_or_zero, ne, at, Operand(zero_reg)); | 4472 Branch(not_power_of_two_or_zero, ne, at, Operand(zero_reg)); |
| 4474 } | 4473 } |
| 4475 | 4474 |
| 4476 | 4475 |
| 4477 void MacroAssembler::JumpIfNotBothSmi(Register reg1, | 4476 void MacroAssembler::JumpIfNotBothSmi(Register reg1, |
| 4478 Register reg2, | 4477 Register reg2, |
| 4479 Label* on_not_both_smi) { | 4478 Label* on_not_both_smi) { |
| 4480 STATIC_ASSERT(kSmiTag == 0); | 4479 STATIC_ASSERT(kSmiTag == 0); |
| 4481 ASSERT_EQ(1, kSmiTagMask); | 4480 ASSERT_EQ(1, kSmiTagMask); |
| 4482 or_(at, reg1, reg2); | 4481 or_(at, reg1, reg2); |
| 4483 andi(at, at, kSmiTagMask); | 4482 JumpIfNotSmi(at, on_not_both_smi); |
| 4484 Branch(on_not_both_smi, ne, at, Operand(zero_reg)); | |
| 4485 } | 4483 } |
| 4486 | 4484 |
| 4487 | 4485 |
| 4488 void MacroAssembler::JumpIfEitherSmi(Register reg1, | 4486 void MacroAssembler::JumpIfEitherSmi(Register reg1, |
| 4489 Register reg2, | 4487 Register reg2, |
| 4490 Label* on_either_smi) { | 4488 Label* on_either_smi) { |
| 4491 STATIC_ASSERT(kSmiTag == 0); | 4489 STATIC_ASSERT(kSmiTag == 0); |
| 4492 ASSERT_EQ(1, kSmiTagMask); | 4490 ASSERT_EQ(1, kSmiTagMask); |
| 4493 // Both Smi tags must be 1 (not Smi). | 4491 // Both Smi tags must be 1 (not Smi). |
| 4494 and_(at, reg1, reg2); | 4492 and_(at, reg1, reg2); |
| 4495 andi(at, at, kSmiTagMask); | 4493 JumpIfSmi(at, on_either_smi); |
| 4496 Branch(on_either_smi, eq, at, Operand(zero_reg)); | |
| 4497 } | 4494 } |
| 4498 | 4495 |
| 4499 | 4496 |
| 4500 void MacroAssembler::AbortIfSmi(Register object) { | 4497 void MacroAssembler::AbortIfSmi(Register object) { |
| 4501 STATIC_ASSERT(kSmiTag == 0); | 4498 STATIC_ASSERT(kSmiTag == 0); |
| 4502 andi(at, object, kSmiTagMask); | 4499 andi(at, object, kSmiTagMask); |
| 4503 Assert(ne, "Operand is a smi", at, Operand(zero_reg)); | 4500 Assert(ne, "Operand is a smi", at, Operand(zero_reg)); |
| 4504 } | 4501 } |
| 4505 | 4502 |
| 4506 | 4503 |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4564 | 4561 |
| 4565 | 4562 |
| 4566 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first, | 4563 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first, |
| 4567 Register second, | 4564 Register second, |
| 4568 Register scratch1, | 4565 Register scratch1, |
| 4569 Register scratch2, | 4566 Register scratch2, |
| 4570 Label* failure) { | 4567 Label* failure) { |
| 4571 // Check that neither is a smi. | 4568 // Check that neither is a smi. |
| 4572 STATIC_ASSERT(kSmiTag == 0); | 4569 STATIC_ASSERT(kSmiTag == 0); |
| 4573 And(scratch1, first, Operand(second)); | 4570 And(scratch1, first, Operand(second)); |
| 4574 And(scratch1, scratch1, Operand(kSmiTagMask)); | 4571 JumpIfSmi(scratch1, failure); |
| 4575 Branch(failure, eq, scratch1, Operand(zero_reg)); | |
| 4576 JumpIfNonSmisNotBothSequentialAsciiStrings(first, | 4572 JumpIfNonSmisNotBothSequentialAsciiStrings(first, |
| 4577 second, | 4573 second, |
| 4578 scratch1, | 4574 scratch1, |
| 4579 scratch2, | 4575 scratch2, |
| 4580 failure); | 4576 failure); |
| 4581 } | 4577 } |
| 4582 | 4578 |
| 4583 | 4579 |
| 4584 void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii( | 4580 void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii( |
| 4585 Register first, | 4581 Register first, |
| (...skipping 494 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5080 opcode == BGTZL); | 5076 opcode == BGTZL); |
| 5081 opcode = (cond == eq) ? BEQ : BNE; | 5077 opcode = (cond == eq) ? BEQ : BNE; |
| 5082 instr = (instr & ~kOpcodeMask) | opcode; | 5078 instr = (instr & ~kOpcodeMask) | opcode; |
| 5083 masm_.emit(instr); | 5079 masm_.emit(instr); |
| 5084 } | 5080 } |
| 5085 | 5081 |
| 5086 | 5082 |
| 5087 } } // namespace v8::internal | 5083 } } // namespace v8::internal |
| 5088 | 5084 |
| 5089 #endif // V8_TARGET_ARCH_MIPS | 5085 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |