| OLD | NEW | 
|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 //     * Redistributions of source code must retain the above copyright | 6 //     * Redistributions of source code must retain the above copyright | 
| 7 //       notice, this list of conditions and the following disclaimer. | 7 //       notice, this list of conditions and the following disclaimer. | 
| 8 //     * Redistributions in binary form must reproduce the above | 8 //     * Redistributions in binary form must reproduce the above | 
| 9 //       copyright notice, this list of conditions and the following | 9 //       copyright notice, this list of conditions and the following | 
| 10 //       disclaimer in the documentation and/or other materials provided | 10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 234 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 245                                  SaveFPRegsMode fp_mode, | 245                                  SaveFPRegsMode fp_mode, | 
| 246                                  RememberedSetAction remembered_set_action, | 246                                  RememberedSetAction remembered_set_action, | 
| 247                                  SmiCheck smi_check) { | 247                                  SmiCheck smi_check) { | 
| 248   ASSERT(!AreAliased(object, address, value, t8)); | 248   ASSERT(!AreAliased(object, address, value, t8)); | 
| 249   ASSERT(!AreAliased(object, address, value, t9)); | 249   ASSERT(!AreAliased(object, address, value, t9)); | 
| 250   // The compiled code assumes that record write doesn't change the | 250   // The compiled code assumes that record write doesn't change the | 
| 251   // context register, so we check that none of the clobbered | 251   // context register, so we check that none of the clobbered | 
| 252   // registers are cp. | 252   // registers are cp. | 
| 253   ASSERT(!address.is(cp) && !value.is(cp)); | 253   ASSERT(!address.is(cp) && !value.is(cp)); | 
| 254 | 254 | 
|  | 255   if (FLAG_debug_code) { | 
|  | 256     lw(at, MemOperand(address)); | 
|  | 257     Assert(eq, "Wrong address or value passed to RecordWrite", | 
|  | 258         at, Operand(value)); | 
|  | 259   } | 
|  | 260 | 
| 255   Label done; | 261   Label done; | 
| 256 | 262 | 
| 257   if (smi_check == INLINE_SMI_CHECK) { | 263   if (smi_check == INLINE_SMI_CHECK) { | 
| 258     ASSERT_EQ(0, kSmiTag); | 264     ASSERT_EQ(0, kSmiTag); | 
| 259     JumpIfSmi(value, &done); | 265     JumpIfSmi(value, &done); | 
| 260   } | 266   } | 
| 261 | 267 | 
| 262   CheckPageFlag(value, | 268   CheckPageFlag(value, | 
| 263                 value,  // Used as scratch. | 269                 value,  // Used as scratch. | 
| 264                 MemoryChunk::kPointersToHereAreInterestingMask, | 270                 MemoryChunk::kPointersToHereAreInterestingMask, | 
| (...skipping 4762 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 5027        opcode == BGTZL); | 5033        opcode == BGTZL); | 
| 5028   opcode = (cond == eq) ? BEQ : BNE; | 5034   opcode = (cond == eq) ? BEQ : BNE; | 
| 5029   instr = (instr & ~kOpcodeMask) | opcode; | 5035   instr = (instr & ~kOpcodeMask) | opcode; | 
| 5030   masm_.emit(instr); | 5036   masm_.emit(instr); | 
| 5031 } | 5037 } | 
| 5032 | 5038 | 
| 5033 | 5039 | 
| 5034 } }  // namespace v8::internal | 5040 } }  // namespace v8::internal | 
| 5035 | 5041 | 
| 5036 #endif  // V8_TARGET_ARCH_MIPS | 5042 #endif  // V8_TARGET_ARCH_MIPS | 
| OLD | NEW | 
|---|