| OLD | NEW |
| 1 // Protocol Buffers - Google's data interchange format | 1 // Protocol Buffers - Google's data interchange format |
| 2 // Copyright 2012 Google Inc. All rights reserved. | 2 // Copyright 2012 Google Inc. All rights reserved. |
| 3 // http://code.google.com/p/protobuf/ | 3 // https://developers.google.com/protocol-buffers/ |
| 4 // | 4 // |
| 5 // Redistribution and use in source and binary forms, with or without | 5 // Redistribution and use in source and binary forms, with or without |
| 6 // modification, are permitted provided that the following conditions are | 6 // modification, are permitted provided that the following conditions are |
| 7 // met: | 7 // met: |
| 8 // | 8 // |
| 9 // * Redistributions of source code must retain the above copyright | 9 // * Redistributions of source code must retain the above copyright |
| 10 // notice, this list of conditions and the following disclaimer. | 10 // notice, this list of conditions and the following disclaimer. |
| 11 // * Redistributions in binary form must reproduce the above | 11 // * Redistributions in binary form must reproduce the above |
| 12 // copyright notice, this list of conditions and the following disclaimer | 12 // copyright notice, this list of conditions and the following disclaimer |
| 13 // in the documentation and/or other materials provided with the | 13 // in the documentation and/or other materials provided with the |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 58 "1:\n" | 58 "1:\n" |
| 59 "ll %0, %5\n" // prev = *ptr | 59 "ll %0, %5\n" // prev = *ptr |
| 60 "bne %0, %3, 2f\n" // if (prev != old_value) goto 2 | 60 "bne %0, %3, 2f\n" // if (prev != old_value) goto 2 |
| 61 "move %2, %4\n" // tmp = new_value | 61 "move %2, %4\n" // tmp = new_value |
| 62 "sc %2, %1\n" // *ptr = tmp (with atomic check) | 62 "sc %2, %1\n" // *ptr = tmp (with atomic check) |
| 63 "beqz %2, 1b\n" // start again on atomic error | 63 "beqz %2, 1b\n" // start again on atomic error |
| 64 "nop\n" // delay slot nop | 64 "nop\n" // delay slot nop |
| 65 "2:\n" | 65 "2:\n" |
| 66 ".set pop\n" | 66 ".set pop\n" |
| 67 : "=&r" (prev), "=m" (*ptr), "=&r" (tmp) | 67 : "=&r" (prev), "=m" (*ptr), "=&r" (tmp) |
| 68 : "Ir" (old_value), "r" (new_value), "m" (*ptr) | 68 : "r" (old_value), "r" (new_value), "m" (*ptr) |
| 69 : "memory"); | 69 : "memory"); |
| 70 return prev; | 70 return prev; |
| 71 } | 71 } |
| 72 | 72 |
| 73 // Atomically store new_value into *ptr, returning the previous value held in | 73 // Atomically store new_value into *ptr, returning the previous value held in |
| 74 // *ptr. This routine implies no memory barriers. | 74 // *ptr. This routine implies no memory barriers. |
| 75 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr, | 75 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr, |
| 76 Atomic32 new_value) { | 76 Atomic32 new_value) { |
| 77 Atomic32 temp, old; | 77 Atomic32 temp, old; |
| 78 __asm__ __volatile__(".set push\n" | 78 __asm__ __volatile__(".set push\n" |
| (...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 190 "1:\n" | 190 "1:\n" |
| 191 "lld %0, %5\n" // prev = *ptr | 191 "lld %0, %5\n" // prev = *ptr |
| 192 "bne %0, %3, 2f\n" // if (prev != old_value) goto 2 | 192 "bne %0, %3, 2f\n" // if (prev != old_value) goto 2 |
| 193 "move %2, %4\n" // tmp = new_value | 193 "move %2, %4\n" // tmp = new_value |
| 194 "scd %2, %1\n" // *ptr = tmp (with atomic check) | 194 "scd %2, %1\n" // *ptr = tmp (with atomic check) |
| 195 "beqz %2, 1b\n" // start again on atomic error | 195 "beqz %2, 1b\n" // start again on atomic error |
| 196 "nop\n" // delay slot nop | 196 "nop\n" // delay slot nop |
| 197 "2:\n" | 197 "2:\n" |
| 198 ".set pop\n" | 198 ".set pop\n" |
| 199 : "=&r" (prev), "=m" (*ptr), "=&r" (tmp) | 199 : "=&r" (prev), "=m" (*ptr), "=&r" (tmp) |
| 200 : "Ir" (old_value), "r" (new_value), "m" (*ptr) | 200 : "r" (old_value), "r" (new_value), "m" (*ptr) |
| 201 : "memory"); | 201 : "memory"); |
| 202 return prev; | 202 return prev; |
| 203 } | 203 } |
| 204 | 204 |
| 205 // Atomically store new_value into *ptr, returning the previous value held in | 205 // Atomically store new_value into *ptr, returning the previous value held in |
| 206 // *ptr. This routine implies no memory barriers. | 206 // *ptr. This routine implies no memory barriers. |
| 207 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr, | 207 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr, |
| 208 Atomic64 new_value) { | 208 Atomic64 new_value) { |
| 209 Atomic64 temp, old; | 209 Atomic64 temp, old; |
| 210 __asm__ __volatile__(".set push\n" | 210 __asm__ __volatile__(".set push\n" |
| (...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 304 } | 304 } |
| 305 #endif | 305 #endif |
| 306 | 306 |
| 307 } // namespace internal | 307 } // namespace internal |
| 308 } // namespace protobuf | 308 } // namespace protobuf |
| 309 } // namespace google | 309 } // namespace google |
| 310 | 310 |
| 311 #undef ATOMICOPS_COMPILER_BARRIER | 311 #undef ATOMICOPS_COMPILER_BARRIER |
| 312 | 312 |
| 313 #endif // GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_MIPS_GCC_H_ | 313 #endif // GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_MIPS_GCC_H_ |
| OLD | NEW |