| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 120 // --- | 120 // --- |
| 121 | 121 |
| 122 static Object* ComputeKeyedStoreField(String* name, | 122 static Object* ComputeKeyedStoreField(String* name, |
| 123 JSObject* receiver, | 123 JSObject* receiver, |
| 124 int field_index, | 124 int field_index, |
| 125 Map* transition = NULL); | 125 Map* transition = NULL); |
| 126 | 126 |
| 127 // --- | 127 // --- |
| 128 | 128 |
| 129 static Object* ComputeCallField(int argc, | 129 static Object* ComputeCallField(int argc, |
| 130 InlineCacheInLoop in_loop, |
| 130 String* name, | 131 String* name, |
| 131 Object* object, | 132 Object* object, |
| 132 JSObject* holder, | 133 JSObject* holder, |
| 133 int index); | 134 int index); |
| 134 | 135 |
| 135 static Object* ComputeCallConstant(int argc, | 136 static Object* ComputeCallConstant(int argc, |
| 137 InlineCacheInLoop in_loop, |
| 136 String* name, | 138 String* name, |
| 137 Object* object, | 139 Object* object, |
| 138 JSObject* holder, | 140 JSObject* holder, |
| 139 JSFunction* function); | 141 JSFunction* function); |
| 140 | 142 |
| 141 static Object* ComputeCallNormal(int argc, String* name, JSObject* receiver); | 143 static Object* ComputeCallNormal(int argc, |
| 144 InlineCacheInLoop in_loop, |
| 145 String* name, |
| 146 JSObject* receiver); |
| 142 | 147 |
| 143 static Object* ComputeCallInterceptor(int argc, | 148 static Object* ComputeCallInterceptor(int argc, |
| 144 String* name, | 149 String* name, |
| 145 Object* object, | 150 Object* object, |
| 146 JSObject* holder); | 151 JSObject* holder); |
| 147 | 152 |
| 148 // --- | 153 // --- |
| 149 | 154 |
| 150 static Object* ComputeCallInitialize(int argc); | 155 static Object* ComputeCallInitialize(int argc, InlineCacheInLoop in_loop); |
| 151 static Object* ComputeCallInitializeInLoop(int argc); | 156 static Object* ComputeCallPreMonomorphic(int argc, InlineCacheInLoop in_loop); |
| 152 static Object* ComputeCallPreMonomorphic(int argc); | 157 static Object* ComputeCallNormal(int argc, InlineCacheInLoop in_loop); |
| 153 static Object* ComputeCallNormal(int argc); | 158 static Object* ComputeCallMegamorphic(int argc, InlineCacheInLoop in_loop); |
| 154 static Object* ComputeCallMegamorphic(int argc); | |
| 155 static Object* ComputeCallMiss(int argc); | 159 static Object* ComputeCallMiss(int argc); |
| 156 | 160 |
| 157 // Finds the Code object stored in the Heap::non_monomorphic_cache(). | 161 // Finds the Code object stored in the Heap::non_monomorphic_cache(). |
| 158 static Code* FindCallInitialize(int argc); | 162 static Code* FindCallInitialize(int argc, InlineCacheInLoop in_loop); |
| 159 | 163 |
| 160 #ifdef ENABLE_DEBUGGER_SUPPORT | 164 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 161 static Object* ComputeCallDebugBreak(int argc); | 165 static Object* ComputeCallDebugBreak(int argc); |
| 162 static Object* ComputeCallDebugPrepareStepIn(int argc); | 166 static Object* ComputeCallDebugPrepareStepIn(int argc); |
| 163 #endif | 167 #endif |
| 164 | 168 |
| 165 static Object* ComputeLazyCompile(int argc); | 169 static Object* ComputeLazyCompile(int argc); |
| 166 | 170 |
| 167 | 171 |
| 168 // Update cache for entry hash(name, map). | 172 // Update cache for entry hash(name, map). |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 201 // code. | 205 // code. |
| 202 ASSERT(kHeapObjectTagSize == String::kHashShift); | 206 ASSERT(kHeapObjectTagSize == String::kHashShift); |
| 203 // Compute the hash of the name (use entire length field). | 207 // Compute the hash of the name (use entire length field). |
| 204 ASSERT(name->HasHashCode()); | 208 ASSERT(name->HasHashCode()); |
| 205 uint32_t field = name->length_field(); | 209 uint32_t field = name->length_field(); |
| 206 // Using only the low bits in 64-bit mode is unlikely to increase the | 210 // Using only the low bits in 64-bit mode is unlikely to increase the |
| 207 // risk of collision even if the heap is spread over an area larger than | 211 // risk of collision even if the heap is spread over an area larger than |
| 208 // 4Gb (and not at all if it isn't). | 212 // 4Gb (and not at all if it isn't). |
| 209 uint32_t map_low32bits = | 213 uint32_t map_low32bits = |
| 210 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map)); | 214 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map)); |
| 215 // We always set the in_loop bit to zero when generating the lookup code |
| 216 // so do it here too so the hash codes match. |
| 217 uint32_t iflags = |
| 218 (static_cast<uint32_t>(flags) & ~Code::kFlagsICInLoopMask); |
| 211 // Base the offset on a simple combination of name, flags, and map. | 219 // Base the offset on a simple combination of name, flags, and map. |
| 212 uint32_t key = (map_low32bits + field) ^ flags; | 220 uint32_t key = (map_low32bits + field) ^ iflags; |
| 213 return key & ((kPrimaryTableSize - 1) << kHeapObjectTagSize); | 221 return key & ((kPrimaryTableSize - 1) << kHeapObjectTagSize); |
| 214 } | 222 } |
| 215 | 223 |
| 216 static int SecondaryOffset(String* name, Code::Flags flags, int seed) { | 224 static int SecondaryOffset(String* name, Code::Flags flags, int seed) { |
| 217 // Use the seed from the primary cache in the secondary cache. | 225 // Use the seed from the primary cache in the secondary cache. |
| 218 uint32_t string_low32bits = | 226 uint32_t string_low32bits = |
| 219 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)); | 227 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)); |
| 220 uint32_t key = seed - string_low32bits + flags; | 228 // We always set the in_loop bit to zero when generating the lookup code |
| 229 // so do it here too so the hash codes match. |
| 230 uint32_t iflags = |
| 231 (static_cast<uint32_t>(flags) & ~Code::kFlagsICInLoopMask); |
| 232 uint32_t key = seed - string_low32bits + iflags; |
| 221 return key & ((kSecondaryTableSize - 1) << kHeapObjectTagSize); | 233 return key & ((kSecondaryTableSize - 1) << kHeapObjectTagSize); |
| 222 } | 234 } |
| 223 | 235 |
| 224 // Compute the entry for a given offset in exactly the same way as | 236 // Compute the entry for a given offset in exactly the same way as |
| 225 // we done in generated code. This makes it a lot easier to avoid | 237 // we done in generated code. This makes it a lot easier to avoid |
| 226 // making mistakes in the hashed offset computations. | 238 // making mistakes in the hashed offset computations. |
| 227 static Entry* entry(Entry* table, int offset) { | 239 static Entry* entry(Entry* table, int offset) { |
| 228 return reinterpret_cast<Entry*>( | 240 return reinterpret_cast<Entry*>( |
| 229 reinterpret_cast<Address>(table) + (offset << 1)); | 241 reinterpret_cast<Address>(table) + (offset << 1)); |
| 230 } | 242 } |
| (...skipping 230 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 461 }; | 473 }; |
| 462 | 474 |
| 463 | 475 |
| 464 class CallStubCompiler: public StubCompiler { | 476 class CallStubCompiler: public StubCompiler { |
| 465 public: | 477 public: |
| 466 explicit CallStubCompiler(int argc) : arguments_(argc) { } | 478 explicit CallStubCompiler(int argc) : arguments_(argc) { } |
| 467 | 479 |
| 468 Object* CompileCallField(Object* object, | 480 Object* CompileCallField(Object* object, |
| 469 JSObject* holder, | 481 JSObject* holder, |
| 470 int index, | 482 int index, |
| 471 String* name); | 483 String* name, |
| 484 Code::Flags flags); |
| 472 Object* CompileCallConstant(Object* object, | 485 Object* CompileCallConstant(Object* object, |
| 473 JSObject* holder, | 486 JSObject* holder, |
| 474 JSFunction* function, | 487 JSFunction* function, |
| 475 CheckType check); | 488 CheckType check, |
| 489 Code::Flags flags); |
| 476 Object* CompileCallInterceptor(Object* object, | 490 Object* CompileCallInterceptor(Object* object, |
| 477 JSObject* holder, | 491 JSObject* holder, |
| 478 String* name); | 492 String* name); |
| 479 | 493 |
| 480 private: | 494 private: |
| 481 const ParameterCount arguments_; | 495 const ParameterCount arguments_; |
| 482 | 496 |
| 483 const ParameterCount& arguments() { return arguments_; } | 497 const ParameterCount& arguments() { return arguments_; } |
| 484 | 498 |
| 485 Object* GetCode(PropertyType type, String* name); | 499 Object* GetCode(PropertyType type, String* name); |
| 486 }; | 500 }; |
| 487 | 501 |
| 488 | 502 |
| 489 } } // namespace v8::internal | 503 } } // namespace v8::internal |
| 490 | 504 |
| 491 #endif // V8_STUB_CACHE_H_ | 505 #endif // V8_STUB_CACHE_H_ |
| OLD | NEW |