| OLD | NEW |
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 13 matching lines...) Expand all Loading... |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #include "v8.h" | 28 #include "v8.h" |
| 29 | 29 |
| 30 #include "codegen-inl.h" | 30 #include "codegen-inl.h" |
| 31 #include "ic-inl.h" | 31 #include "ic-inl.h" |
| 32 #include "runtime.h" | 32 #include "runtime.h" |
| 33 #include "stub-cache.h" | 33 #include "stub-cache.h" |
| 34 #include "utils.h" |
| 34 | 35 |
| 35 namespace v8 { | 36 namespace v8 { |
| 36 namespace internal { | 37 namespace internal { |
| 37 | 38 |
| 38 // ---------------------------------------------------------------------------- | 39 // ---------------------------------------------------------------------------- |
| 39 // Static IC stub generators. | 40 // Static IC stub generators. |
| 40 // | 41 // |
| 41 | 42 |
| 42 #define __ ACCESS_MASM(masm) | 43 #define __ ACCESS_MASM(masm) |
| 43 | 44 |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 100 | 101 |
| 101 // Generate an unrolled loop that performs a few probes before | 102 // Generate an unrolled loop that performs a few probes before |
| 102 // giving up. Measurements done on Gmail indicate that 2 probes | 103 // giving up. Measurements done on Gmail indicate that 2 probes |
| 103 // cover ~93% of loads from dictionaries. | 104 // cover ~93% of loads from dictionaries. |
| 104 static const int kProbes = 4; | 105 static const int kProbes = 4; |
| 105 const int kElementsStartOffset = | 106 const int kElementsStartOffset = |
| 106 StringDictionary::kHeaderSize + | 107 StringDictionary::kHeaderSize + |
| 107 StringDictionary::kElementsStartIndex * kPointerSize; | 108 StringDictionary::kElementsStartIndex * kPointerSize; |
| 108 for (int i = 0; i < kProbes; i++) { | 109 for (int i = 0; i < kProbes; i++) { |
| 109 // Compute the masked index: (hash + i + i * i) & mask. | 110 // Compute the masked index: (hash + i + i * i) & mask. |
| 110 __ movl(r1, FieldOperand(name, String::kLengthOffset)); | 111 __ movl(r1, FieldOperand(name, String::kHashFieldOffset)); |
| 111 __ shrl(r1, Immediate(String::kHashShift)); | 112 __ shrl(r1, Immediate(String::kHashShift)); |
| 112 if (i > 0) { | 113 if (i > 0) { |
| 113 __ addl(r1, Immediate(StringDictionary::GetProbeOffset(i))); | 114 __ addl(r1, Immediate(StringDictionary::GetProbeOffset(i))); |
| 114 } | 115 } |
| 115 __ and_(r1, r2); | 116 __ and_(r1, r2); |
| 116 | 117 |
| 117 // Scale the index by multiplying by the entry size. | 118 // Scale the index by multiplying by the entry size. |
| 118 ASSERT(StringDictionary::kEntrySize == 3); | 119 ASSERT(StringDictionary::kEntrySize == 3); |
| 119 __ lea(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3 | 120 __ lea(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3 |
| 120 | 121 |
| (...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 232 __ pop(rbx); | 233 __ pop(rbx); |
| 233 __ push(rcx); // receiver | 234 __ push(rcx); // receiver |
| 234 __ push(rax); // name | 235 __ push(rax); // name |
| 235 __ push(rbx); // return address | 236 __ push(rbx); // return address |
| 236 | 237 |
| 237 // Perform tail call to the entry. | 238 // Perform tail call to the entry. |
| 238 __ TailCallRuntime(f, 2, 1); | 239 __ TailCallRuntime(f, 2, 1); |
| 239 } | 240 } |
| 240 | 241 |
| 241 | 242 |
| 242 #ifdef DEBUG | |
| 243 // For use in assert below. | |
| 244 static int TenToThe(int exponent) { | |
| 245 ASSERT(exponent <= 9); | |
| 246 ASSERT(exponent >= 1); | |
| 247 int answer = 10; | |
| 248 for (int i = 1; i < exponent; i++) answer *= 10; | |
| 249 return answer; | |
| 250 } | |
| 251 #endif | |
| 252 | |
| 253 | |
| 254 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { | 243 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { |
| 255 // ----------- S t a t e ------------- | 244 // ----------- S t a t e ------------- |
| 256 // -- rsp[0] : return address | 245 // -- rsp[0] : return address |
| 257 // -- rsp[8] : name | 246 // -- rsp[8] : name |
| 258 // -- rsp[16] : receiver | 247 // -- rsp[16] : receiver |
| 259 // ----------------------------------- | 248 // ----------------------------------- |
| 260 Label slow, check_string, index_int, index_string, check_pixel_array; | 249 Label slow, check_string, index_int, index_string, check_pixel_array; |
| 261 | 250 |
| 262 // Load name and receiver. | 251 // Load name and receiver. |
| 263 __ movq(rax, Operand(rsp, kPointerSize)); | 252 __ movq(rax, Operand(rsp, kPointerSize)); |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 320 // Slow case: Load name and receiver from stack and jump to runtime. | 309 // Slow case: Load name and receiver from stack and jump to runtime. |
| 321 __ bind(&slow); | 310 __ bind(&slow); |
| 322 __ IncrementCounter(&Counters::keyed_load_generic_slow, 1); | 311 __ IncrementCounter(&Counters::keyed_load_generic_slow, 1); |
| 323 Generate(masm, ExternalReference(Runtime::kKeyedGetProperty)); | 312 Generate(masm, ExternalReference(Runtime::kKeyedGetProperty)); |
| 324 __ bind(&check_string); | 313 __ bind(&check_string); |
| 325 // The key is not a smi. | 314 // The key is not a smi. |
| 326 // Is it a string? | 315 // Is it a string? |
| 327 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx); | 316 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx); |
| 328 __ j(above_equal, &slow); | 317 __ j(above_equal, &slow); |
| 329 // Is the string an array index, with cached numeric value? | 318 // Is the string an array index, with cached numeric value? |
| 330 __ movl(rbx, FieldOperand(rax, String::kLengthOffset)); | 319 __ movl(rbx, FieldOperand(rax, String::kHashFieldOffset)); |
| 331 __ testl(rbx, Immediate(String::kIsArrayIndexMask)); | 320 __ testl(rbx, Immediate(String::kIsArrayIndexMask)); |
| 332 | 321 |
| 333 // If the string is a symbol, do a quick inline probe of the receiver's | 322 // If the string is a symbol, do a quick inline probe of the receiver's |
| 334 // dictionary, if it exists. | 323 // dictionary, if it exists. |
| 335 __ j(not_zero, &index_string); // The value in rbx is used at jump target. | 324 __ j(not_zero, &index_string); // The value in rbx is used at jump target. |
| 336 __ testb(FieldOperand(rdx, Map::kInstanceTypeOffset), | 325 __ testb(FieldOperand(rdx, Map::kInstanceTypeOffset), |
| 337 Immediate(kIsSymbolMask)); | 326 Immediate(kIsSymbolMask)); |
| 338 __ j(zero, &slow); | 327 __ j(zero, &slow); |
| 339 // Probe the dictionary leaving result in rcx. | 328 // Probe the dictionary leaving result in rcx. |
| 340 GenerateDictionaryLoad(masm, &slow, rbx, rcx, rdx, rax); | 329 GenerateDictionaryLoad(masm, &slow, rbx, rcx, rdx, rax); |
| 341 GenerateCheckNonObjectOrLoaded(masm, &slow, rcx); | 330 GenerateCheckNonObjectOrLoaded(masm, &slow, rcx); |
| 342 __ movq(rax, rcx); | 331 __ movq(rax, rcx); |
| 343 __ IncrementCounter(&Counters::keyed_load_generic_symbol, 1); | 332 __ IncrementCounter(&Counters::keyed_load_generic_symbol, 1); |
| 344 __ ret(0); | 333 __ ret(0); |
| 345 // Array index string: If short enough use cache in length/hash field (rbx). | 334 // If the hash field contains an array index pick it out. The assert checks |
| 346 // We assert that there are enough bits in an int32_t after the hash shift | 335 // that the constants for the maximum number of digits for an array index |
| 347 // bits have been subtracted to allow space for the length and the cached | 336 // cached in the hash field and the number of bits reserved for it does not |
| 348 // array index. | 337 // conflict. |
| 349 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) < | 338 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) < |
| 350 (1 << (String::kShortLengthShift - String::kHashShift))); | 339 (1 << String::kArrayIndexValueBits)); |
| 351 __ bind(&index_string); | 340 __ bind(&index_string); |
| 352 const int kLengthFieldLimit = | |
| 353 (String::kMaxCachedArrayIndexLength + 1) << String::kShortLengthShift; | |
| 354 __ cmpl(rbx, Immediate(kLengthFieldLimit)); | |
| 355 __ j(above_equal, &slow); | |
| 356 __ movl(rax, rbx); | 341 __ movl(rax, rbx); |
| 357 __ and_(rax, Immediate((1 << String::kShortLengthShift) - 1)); | 342 __ and_(rax, Immediate(String::kArrayIndexHashMask)); |
| 358 __ shrl(rax, Immediate(String::kLongLengthShift)); | 343 __ shrl(rax, Immediate(String::kHashShift)); |
| 359 __ jmp(&index_int); | 344 __ jmp(&index_int); |
| 360 } | 345 } |
| 361 | 346 |
| 362 | 347 |
| 363 void KeyedLoadIC::GenerateExternalArray(MacroAssembler* masm, | 348 void KeyedLoadIC::GenerateExternalArray(MacroAssembler* masm, |
| 364 ExternalArrayType array_type) { | 349 ExternalArrayType array_type) { |
| 365 // ----------- S t a t e ------------- | 350 // ----------- S t a t e ------------- |
| 366 // -- rsp[0] : return address | 351 // -- rsp[0] : return address |
| 367 // -- rsp[8] : name | 352 // -- rsp[8] : name |
| 368 // -- rsp[16] : receiver | 353 // -- rsp[16] : receiver |
| (...skipping 958 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1327 | 1312 |
| 1328 // Cache miss: Jump to runtime. | 1313 // Cache miss: Jump to runtime. |
| 1329 Generate(masm, ExternalReference(IC_Utility(kStoreIC_Miss))); | 1314 Generate(masm, ExternalReference(IC_Utility(kStoreIC_Miss))); |
| 1330 } | 1315 } |
| 1331 | 1316 |
| 1332 | 1317 |
| 1333 #undef __ | 1318 #undef __ |
| 1334 | 1319 |
| 1335 | 1320 |
| 1336 } } // namespace v8::internal | 1321 } } // namespace v8::internal |
| OLD | NEW |