OLD | NEW |
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 466 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
477 | 477 |
478 #define WRITE_DOUBLE_FIELD(p, offset, value) \ | 478 #define WRITE_DOUBLE_FIELD(p, offset, value) \ |
479 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value) | 479 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value) |
480 | 480 |
481 #define READ_INT_FIELD(p, offset) \ | 481 #define READ_INT_FIELD(p, offset) \ |
482 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset))) | 482 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset))) |
483 | 483 |
484 #define WRITE_INT_FIELD(p, offset, value) \ | 484 #define WRITE_INT_FIELD(p, offset, value) \ |
485 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value) | 485 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value) |
486 | 486 |
| 487 #define READ_UINT32_FIELD(p, offset) \ |
| 488 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) |
| 489 |
| 490 #define WRITE_UINT32_FIELD(p, offset, value) \ |
| 491 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value) |
| 492 |
487 #define READ_SHORT_FIELD(p, offset) \ | 493 #define READ_SHORT_FIELD(p, offset) \ |
488 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset))) | 494 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset))) |
489 | 495 |
490 #define WRITE_SHORT_FIELD(p, offset, value) \ | 496 #define WRITE_SHORT_FIELD(p, offset, value) \ |
491 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value) | 497 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value) |
492 | 498 |
493 #define READ_BYTE_FIELD(p, offset) \ | 499 #define READ_BYTE_FIELD(p, offset) \ |
494 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset))) | 500 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset))) |
495 | 501 |
496 #define WRITE_BYTE_FIELD(p, offset, value) \ | 502 #define WRITE_BYTE_FIELD(p, offset, value) \ |
(...skipping 700 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1197 case kLongStringTag: | 1203 case kLongStringTag: |
1198 WRITE_INT_FIELD(this, kLengthOffset, value << kLongLengthShift); | 1204 WRITE_INT_FIELD(this, kLengthOffset, value << kLongLengthShift); |
1199 break; | 1205 break; |
1200 default: | 1206 default: |
1201 UNREACHABLE(); | 1207 UNREACHABLE(); |
1202 break; | 1208 break; |
1203 } | 1209 } |
1204 } | 1210 } |
1205 | 1211 |
1206 | 1212 |
1207 int String::length_field() { | 1213 uint32_t String::length_field() { |
1208 return READ_INT_FIELD(this, kLengthOffset); | 1214 return READ_UINT32_FIELD(this, kLengthOffset); |
1209 } | 1215 } |
1210 | 1216 |
1211 | 1217 |
1212 void String::set_length_field(int value) { | 1218 void String::set_length_field(uint32_t value) { |
1213 WRITE_INT_FIELD(this, kLengthOffset, value); | 1219 WRITE_UINT32_FIELD(this, kLengthOffset, value); |
1214 } | 1220 } |
1215 | 1221 |
1216 | 1222 |
1217 void String::TryFlatten() { | 1223 void String::TryFlatten() { |
1218 // We don't need to flatten strings that are already flat. Since this code | 1224 // We don't need to flatten strings that are already flat. Since this code |
1219 // is inlined, it can be helpful in the flat case to not call out to Flatten. | 1225 // is inlined, it can be helpful in the flat case to not call out to Flatten. |
1220 StringRepresentationTag str_type = representation_tag(); | 1226 StringRepresentationTag str_type = representation_tag(); |
1221 if (str_type != kSeqStringTag && str_type != kExternalStringTag) { | 1227 if (str_type != kSeqStringTag && str_type != kExternalStringTag) { |
1222 Flatten(); | 1228 Flatten(); |
1223 } | 1229 } |
(...skipping 839 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2063 } | 2069 } |
2064 | 2070 |
2065 | 2071 |
2066 bool String::HasHashCode() { | 2072 bool String::HasHashCode() { |
2067 return (length_field() & kHashComputedMask) != 0; | 2073 return (length_field() & kHashComputedMask) != 0; |
2068 } | 2074 } |
2069 | 2075 |
2070 | 2076 |
2071 uint32_t String::Hash() { | 2077 uint32_t String::Hash() { |
2072 // Fast case: has hash code already been computed? | 2078 // Fast case: has hash code already been computed? |
2073 int hash = length_field(); | 2079 uint32_t field = length_field(); |
2074 if (hash & kHashComputedMask) return hash; | 2080 if (field & kHashComputedMask) return field >> kHashShift; |
2075 // Slow case: compute hash code and set it.. | 2081 // Slow case: compute hash code and set it.. |
2076 return ComputeAndSetHash(); | 2082 return ComputeAndSetHash(); |
2077 } | 2083 } |
2078 | 2084 |
2079 | 2085 |
2080 bool String::AsArrayIndex(uint32_t* index) { | 2086 bool String::AsArrayIndex(uint32_t* index) { |
2081 int hash = length_field(); | 2087 uint32_t field = length_field(); |
2082 if ((hash & kHashComputedMask) && !(hash & kIsArrayIndexMask)) return false; | 2088 if ((field & kHashComputedMask) && !(field & kIsArrayIndexMask)) return false; |
2083 return SlowAsArrayIndex(index); | 2089 return SlowAsArrayIndex(index); |
2084 } | 2090 } |
2085 | 2091 |
2086 | 2092 |
2087 Object* JSObject::GetPrototype() { | 2093 Object* JSObject::GetPrototype() { |
2088 return JSObject::cast(this)->map()->prototype(); | 2094 return JSObject::cast(this)->map()->prototype(); |
2089 } | 2095 } |
2090 | 2096 |
2091 | 2097 |
2092 PropertyAttributes JSObject::GetPropertyAttribute(String* key) { | 2098 PropertyAttributes JSObject::GetPropertyAttribute(String* key) { |
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2168 #undef WRITE_INT_FIELD | 2174 #undef WRITE_INT_FIELD |
2169 #undef READ_SHORT_FIELD | 2175 #undef READ_SHORT_FIELD |
2170 #undef WRITE_SHORT_FIELD | 2176 #undef WRITE_SHORT_FIELD |
2171 #undef READ_BYTE_FIELD | 2177 #undef READ_BYTE_FIELD |
2172 #undef WRITE_BYTE_FIELD | 2178 #undef WRITE_BYTE_FIELD |
2173 | 2179 |
2174 | 2180 |
2175 } } // namespace v8::internal | 2181 } } // namespace v8::internal |
2176 | 2182 |
2177 #endif // V8_OBJECTS_INL_H_ | 2183 #endif // V8_OBJECTS_INL_H_ |
OLD | NEW |