OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 287 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
298 __ lw(scratch2, MemOperand(at)); | 298 __ lw(scratch2, MemOperand(at)); |
299 | 299 |
300 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 300 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
301 // In case the loaded value is the_hole we have to consult GetProperty | 301 // In case the loaded value is the_hole we have to consult GetProperty |
302 // to ensure the prototype chain is searched. | 302 // to ensure the prototype chain is searched. |
303 __ Branch(out_of_range, eq, scratch2, Operand(at)); | 303 __ Branch(out_of_range, eq, scratch2, Operand(at)); |
304 __ mov(result, scratch2); | 304 __ mov(result, scratch2); |
305 } | 305 } |
306 | 306 |
307 | 307 |
308 // Checks whether a key is an array index string or a symbol string. | 308 // Checks whether a key is an array index string or an internalized string. |
309 // Falls through if a key is a symbol. | 309 // Falls through if a key is an internalized string. |
310 static void GenerateKeyStringCheck(MacroAssembler* masm, | 310 static void GenerateKeyStringCheck(MacroAssembler* masm, |
311 Register key, | 311 Register key, |
312 Register map, | 312 Register map, |
313 Register hash, | 313 Register hash, |
314 Label* index_string, | 314 Label* index_string, |
315 Label* not_symbol) { | 315 Label* not_internalized) { |
316 // The key is not a smi. | 316 // The key is not a smi. |
317 // Is it a string? | 317 // Is it a string? |
318 __ GetObjectType(key, map, hash); | 318 __ GetObjectType(key, map, hash); |
319 __ Branch(not_symbol, ge, hash, Operand(FIRST_NONSTRING_TYPE)); | 319 __ Branch(not_internalized, ge, hash, Operand(FIRST_NONSTRING_TYPE)); |
320 | 320 |
321 // Is the string an array index, with cached numeric value? | 321 // Is the string an array index, with cached numeric value? |
322 __ lw(hash, FieldMemOperand(key, String::kHashFieldOffset)); | 322 __ lw(hash, FieldMemOperand(key, String::kHashFieldOffset)); |
323 __ And(at, hash, Operand(String::kContainsCachedArrayIndexMask)); | 323 __ And(at, hash, Operand(String::kContainsCachedArrayIndexMask)); |
324 __ Branch(index_string, eq, at, Operand(zero_reg)); | 324 __ Branch(index_string, eq, at, Operand(zero_reg)); |
325 | 325 |
326 // Is the string a symbol? | 326 // Is the string internalized? |
327 // map: key map | 327 // map: key map |
328 __ lbu(hash, FieldMemOperand(map, Map::kInstanceTypeOffset)); | 328 __ lbu(hash, FieldMemOperand(map, Map::kInstanceTypeOffset)); |
329 STATIC_ASSERT(kSymbolTag != 0); | 329 STATIC_ASSERT(kInternalizedTag != 0); |
330 __ And(at, hash, Operand(kIsSymbolMask)); | 330 __ And(at, hash, Operand(kIsInternalizedMask)); |
331 __ Branch(not_symbol, eq, at, Operand(zero_reg)); | 331 __ Branch(not_internalized, eq, at, Operand(zero_reg)); |
332 } | 332 } |
333 | 333 |
334 | 334 |
335 // Defined in ic.cc. | 335 // Defined in ic.cc. |
336 Object* CallIC_Miss(Arguments args); | 336 Object* CallIC_Miss(Arguments args); |
337 | 337 |
338 // The generated code does not accept smi keys. | 338 // The generated code does not accept smi keys. |
339 // The generated code falls through if both probes miss. | 339 // The generated code falls through if both probes miss. |
340 void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm, | 340 void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm, |
341 int argc, | 341 int argc, |
(...skipping 234 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
576 __ Push(a1, a2); // Pass the receiver and the key. | 576 __ Push(a1, a2); // Pass the receiver and the key. |
577 __ CallRuntime(Runtime::kKeyedGetProperty, 2); | 577 __ CallRuntime(Runtime::kKeyedGetProperty, 2); |
578 __ pop(a2); // Restore the key. | 578 __ pop(a2); // Restore the key. |
579 } | 579 } |
580 __ mov(a1, v0); | 580 __ mov(a1, v0); |
581 __ jmp(&do_call); | 581 __ jmp(&do_call); |
582 | 582 |
583 __ bind(&check_string); | 583 __ bind(&check_string); |
584 GenerateKeyStringCheck(masm, a2, a0, a3, &index_string, &slow_call); | 584 GenerateKeyStringCheck(masm, a2, a0, a3, &index_string, &slow_call); |
585 | 585 |
586 // The key is known to be a symbol. | 586 // The key is known to be internalized. |
587 // If the receiver is a regular JS object with slow properties then do | 587 // If the receiver is a regular JS object with slow properties then do |
588 // a quick inline probe of the receiver's dictionary. | 588 // a quick inline probe of the receiver's dictionary. |
589 // Otherwise do the monomorphic cache probe. | 589 // Otherwise do the monomorphic cache probe. |
590 GenerateKeyedLoadReceiverCheck( | 590 GenerateKeyedLoadReceiverCheck( |
591 masm, a1, a0, a3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache); | 591 masm, a1, a0, a3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache); |
592 | 592 |
593 __ lw(a0, FieldMemOperand(a1, JSObject::kPropertiesOffset)); | 593 __ lw(a0, FieldMemOperand(a1, JSObject::kPropertiesOffset)); |
594 __ lw(a3, FieldMemOperand(a0, HeapObject::kMapOffset)); | 594 __ lw(a3, FieldMemOperand(a0, HeapObject::kMapOffset)); |
595 __ LoadRoot(at, Heap::kHashTableMapRootIndex); | 595 __ LoadRoot(at, Heap::kHashTableMapRootIndex); |
596 __ Branch(&lookup_monomorphic_cache, ne, a3, Operand(at)); | 596 __ Branch(&lookup_monomorphic_cache, ne, a3, Operand(at)); |
597 | 597 |
598 GenerateDictionaryLoad(masm, &slow_load, a0, a2, a1, a3, t0); | 598 GenerateDictionaryLoad(masm, &slow_load, a0, a2, a1, a3, t0); |
599 __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1, a0, a3); | 599 __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1, a0, a3); |
600 __ jmp(&do_call); | 600 __ jmp(&do_call); |
601 | 601 |
602 __ bind(&lookup_monomorphic_cache); | 602 __ bind(&lookup_monomorphic_cache); |
603 __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, a0, a3); | 603 __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, a0, a3); |
604 GenerateMonomorphicCacheProbe(masm, | 604 GenerateMonomorphicCacheProbe(masm, |
605 argc, | 605 argc, |
606 Code::KEYED_CALL_IC, | 606 Code::KEYED_CALL_IC, |
607 Code::kNoExtraICState); | 607 Code::kNoExtraICState); |
608 // Fall through on miss. | 608 // Fall through on miss. |
609 | 609 |
610 __ bind(&slow_call); | 610 __ bind(&slow_call); |
611 // This branch is taken if: | 611 // This branch is taken if: |
612 // - the receiver requires boxing or access check, | 612 // - the receiver requires boxing or access check, |
613 // - the key is neither smi nor symbol, | 613 // - the key is neither smi nor an internalized string, |
614 // - the value loaded is not a function, | 614 // - the value loaded is not a function, |
615 // - there is hope that the runtime will create a monomorphic call stub, | 615 // - there is hope that the runtime will create a monomorphic call stub, |
616 // that will get fetched next time. | 616 // that will get fetched next time. |
617 __ IncrementCounter(counters->keyed_call_generic_slow(), 1, a0, a3); | 617 __ IncrementCounter(counters->keyed_call_generic_slow(), 1, a0, a3); |
618 GenerateMiss(masm, argc); | 618 GenerateMiss(masm, argc); |
619 | 619 |
620 __ bind(&index_string); | 620 __ bind(&index_string); |
621 __ IndexFromHash(a3, a2); | 621 __ IndexFromHash(a3, a2); |
622 // Now jump to the place where smi keys are handled. | 622 // Now jump to the place where smi keys are handled. |
623 __ jmp(&index_smi); | 623 __ jmp(&index_smi); |
(...skipping 360 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
984 // Load the map of the receiver, compute the keyed lookup cache hash | 984 // Load the map of the receiver, compute the keyed lookup cache hash |
985 // based on 32 bits of the map pointer and the string hash. | 985 // based on 32 bits of the map pointer and the string hash. |
986 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset)); | 986 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset)); |
987 __ sra(a3, a2, KeyedLookupCache::kMapHashShift); | 987 __ sra(a3, a2, KeyedLookupCache::kMapHashShift); |
988 __ lw(t0, FieldMemOperand(a0, String::kHashFieldOffset)); | 988 __ lw(t0, FieldMemOperand(a0, String::kHashFieldOffset)); |
989 __ sra(at, t0, String::kHashShift); | 989 __ sra(at, t0, String::kHashShift); |
990 __ xor_(a3, a3, at); | 990 __ xor_(a3, a3, at); |
991 int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask; | 991 int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask; |
992 __ And(a3, a3, Operand(mask)); | 992 __ And(a3, a3, Operand(mask)); |
993 | 993 |
994 // Load the key (consisting of map and symbol) from the cache and | 994 // Load the key (consisting of map and internalized string) from the cache and |
995 // check for match. | 995 // check for match. |
996 Label load_in_object_property; | 996 Label load_in_object_property; |
997 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; | 997 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; |
998 Label hit_on_nth_entry[kEntriesPerBucket]; | 998 Label hit_on_nth_entry[kEntriesPerBucket]; |
999 ExternalReference cache_keys = | 999 ExternalReference cache_keys = |
1000 ExternalReference::keyed_lookup_cache_keys(isolate); | 1000 ExternalReference::keyed_lookup_cache_keys(isolate); |
1001 __ li(t0, Operand(cache_keys)); | 1001 __ li(t0, Operand(cache_keys)); |
1002 __ sll(at, a3, kPointerSizeLog2 + 1); | 1002 __ sll(at, a3, kPointerSizeLog2 + 1); |
1003 __ addu(t0, t0, at); | 1003 __ addu(t0, t0, at); |
1004 | 1004 |
(...skipping 667 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1672 } else { | 1672 } else { |
1673 ASSERT(Assembler::IsBne(branch_instr)); | 1673 ASSERT(Assembler::IsBne(branch_instr)); |
1674 patcher.ChangeBranchCondition(eq); | 1674 patcher.ChangeBranchCondition(eq); |
1675 } | 1675 } |
1676 } | 1676 } |
1677 | 1677 |
1678 | 1678 |
1679 } } // namespace v8::internal | 1679 } } // namespace v8::internal |
1680 | 1680 |
1681 #endif // V8_TARGET_ARCH_MIPS | 1681 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |