OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
54 // Register usage: | 54 // Register usage: |
55 // type: holds the receiver instance type on entry. | 55 // type: holds the receiver instance type on entry. |
56 __ Branch(global_object, eq, type, Operand(JS_GLOBAL_OBJECT_TYPE)); | 56 __ Branch(global_object, eq, type, Operand(JS_GLOBAL_OBJECT_TYPE)); |
57 __ Branch(global_object, eq, type, Operand(JS_BUILTINS_OBJECT_TYPE)); | 57 __ Branch(global_object, eq, type, Operand(JS_BUILTINS_OBJECT_TYPE)); |
58 __ Branch(global_object, eq, type, Operand(JS_GLOBAL_PROXY_TYPE)); | 58 __ Branch(global_object, eq, type, Operand(JS_GLOBAL_PROXY_TYPE)); |
59 } | 59 } |
60 | 60 |
61 | 61 |
62 // Generated code falls through if the receiver is a regular non-global | 62 // Generated code falls through if the receiver is a regular non-global |
63 // JS object with slow properties and no interceptors. | 63 // JS object with slow properties and no interceptors. |
64 static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm, | 64 static void GenerateNameDictionaryReceiverCheck(MacroAssembler* masm, |
65 Register receiver, | 65 Register receiver, |
66 Register elements, | 66 Register elements, |
67 Register scratch0, | 67 Register scratch0, |
68 Register scratch1, | 68 Register scratch1, |
69 Label* miss) { | 69 Label* miss) { |
70 // Register usage: | 70 // Register usage: |
71 // receiver: holds the receiver on entry and is unchanged. | 71 // receiver: holds the receiver on entry and is unchanged. |
72 // elements: holds the property dictionary on fall through. | 72 // elements: holds the property dictionary on fall through. |
73 // Scratch registers: | 73 // Scratch registers: |
74 // scratch0: used to holds the receiver map. | 74 // scratch0: used to holds the receiver map. |
75 // scratch1: used to holds the receiver instance type, receiver bit mask | 75 // scratch1: used to holds the receiver instance type, receiver bit mask |
76 // and elements map. | 76 // and elements map. |
77 | 77 |
78 // Check that the receiver isn't a smi. | 78 // Check that the receiver isn't a smi. |
79 __ JumpIfSmi(receiver, miss); | 79 __ JumpIfSmi(receiver, miss); |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
122 Register result, | 122 Register result, |
123 Register scratch1, | 123 Register scratch1, |
124 Register scratch2) { | 124 Register scratch2) { |
125 // Main use of the scratch registers. | 125 // Main use of the scratch registers. |
126 // scratch1: Used as temporary and to hold the capacity of the property | 126 // scratch1: Used as temporary and to hold the capacity of the property |
127 // dictionary. | 127 // dictionary. |
128 // scratch2: Used as temporary. | 128 // scratch2: Used as temporary. |
129 Label done; | 129 Label done; |
130 | 130 |
131 // Probe the dictionary. | 131 // Probe the dictionary. |
132 StringDictionaryLookupStub::GeneratePositiveLookup(masm, | 132 NameDictionaryLookupStub::GeneratePositiveLookup(masm, |
133 miss, | 133 miss, |
134 &done, | 134 &done, |
135 elements, | 135 elements, |
136 name, | 136 name, |
137 scratch1, | 137 scratch1, |
138 scratch2); | 138 scratch2); |
139 | 139 |
140 // If probing finds an entry check that the value is a normal | 140 // If probing finds an entry check that the value is a normal |
141 // property. | 141 // property. |
142 __ bind(&done); // scratch2 == elements + 4 * index. | 142 __ bind(&done); // scratch2 == elements + 4 * index. |
143 const int kElementsStartOffset = StringDictionary::kHeaderSize + | 143 const int kElementsStartOffset = NameDictionary::kHeaderSize + |
144 StringDictionary::kElementsStartIndex * kPointerSize; | 144 NameDictionary::kElementsStartIndex * kPointerSize; |
145 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize; | 145 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize; |
146 __ lw(scratch1, FieldMemOperand(scratch2, kDetailsOffset)); | 146 __ lw(scratch1, FieldMemOperand(scratch2, kDetailsOffset)); |
147 __ And(at, | 147 __ And(at, |
148 scratch1, | 148 scratch1, |
149 Operand(PropertyDetails::TypeField::kMask << kSmiTagSize)); | 149 Operand(PropertyDetails::TypeField::kMask << kSmiTagSize)); |
150 __ Branch(miss, ne, at, Operand(zero_reg)); | 150 __ Branch(miss, ne, at, Operand(zero_reg)); |
151 | 151 |
152 // Get the value at the masked, scaled index and return. | 152 // Get the value at the masked, scaled index and return. |
153 __ lw(result, | 153 __ lw(result, |
154 FieldMemOperand(scratch2, kElementsStartOffset + 1 * kPointerSize)); | 154 FieldMemOperand(scratch2, kElementsStartOffset + 1 * kPointerSize)); |
(...skipping 20 matching lines...) Expand all Loading... |
175 Register value, | 175 Register value, |
176 Register scratch1, | 176 Register scratch1, |
177 Register scratch2) { | 177 Register scratch2) { |
178 // Main use of the scratch registers. | 178 // Main use of the scratch registers. |
179 // scratch1: Used as temporary and to hold the capacity of the property | 179 // scratch1: Used as temporary and to hold the capacity of the property |
180 // dictionary. | 180 // dictionary. |
181 // scratch2: Used as temporary. | 181 // scratch2: Used as temporary. |
182 Label done; | 182 Label done; |
183 | 183 |
184 // Probe the dictionary. | 184 // Probe the dictionary. |
185 StringDictionaryLookupStub::GeneratePositiveLookup(masm, | 185 NameDictionaryLookupStub::GeneratePositiveLookup(masm, |
186 miss, | 186 miss, |
187 &done, | 187 &done, |
188 elements, | 188 elements, |
189 name, | 189 name, |
190 scratch1, | 190 scratch1, |
191 scratch2); | 191 scratch2); |
192 | 192 |
193 // If probing finds an entry in the dictionary check that the value | 193 // If probing finds an entry in the dictionary check that the value |
194 // is a normal property that is not read only. | 194 // is a normal property that is not read only. |
195 __ bind(&done); // scratch2 == elements + 4 * index. | 195 __ bind(&done); // scratch2 == elements + 4 * index. |
196 const int kElementsStartOffset = StringDictionary::kHeaderSize + | 196 const int kElementsStartOffset = NameDictionary::kHeaderSize + |
197 StringDictionary::kElementsStartIndex * kPointerSize; | 197 NameDictionary::kElementsStartIndex * kPointerSize; |
198 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize; | 198 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize; |
199 const int kTypeAndReadOnlyMask = | 199 const int kTypeAndReadOnlyMask = |
200 (PropertyDetails::TypeField::kMask | | 200 (PropertyDetails::TypeField::kMask | |
201 PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize; | 201 PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize; |
202 __ lw(scratch1, FieldMemOperand(scratch2, kDetailsOffset)); | 202 __ lw(scratch1, FieldMemOperand(scratch2, kDetailsOffset)); |
203 __ And(at, scratch1, Operand(kTypeAndReadOnlyMask)); | 203 __ And(at, scratch1, Operand(kTypeAndReadOnlyMask)); |
204 __ Branch(miss, ne, at, Operand(zero_reg)); | 204 __ Branch(miss, ne, at, Operand(zero_reg)); |
205 | 205 |
206 // Store the value at the masked, scaled index and return. | 206 // Store the value at the masked, scaled index and return. |
207 const int kValueOffset = kElementsStartOffset + kPointerSize; | 207 const int kValueOffset = kElementsStartOffset + kPointerSize; |
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
298 __ lw(scratch2, MemOperand(at)); | 298 __ lw(scratch2, MemOperand(at)); |
299 | 299 |
300 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 300 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
301 // In case the loaded value is the_hole we have to consult GetProperty | 301 // In case the loaded value is the_hole we have to consult GetProperty |
302 // to ensure the prototype chain is searched. | 302 // to ensure the prototype chain is searched. |
303 __ Branch(out_of_range, eq, scratch2, Operand(at)); | 303 __ Branch(out_of_range, eq, scratch2, Operand(at)); |
304 __ mov(result, scratch2); | 304 __ mov(result, scratch2); |
305 } | 305 } |
306 | 306 |
307 | 307 |
308 // Checks whether a key is an array index string or an internalized string. | 308 // Checks whether a key is an array index string or a unique name. |
309 // Falls through if a key is an internalized string. | 309 // Falls through if a key is a unique name. |
310 static void GenerateKeyStringCheck(MacroAssembler* masm, | 310 static void GenerateKeyNameCheck(MacroAssembler* masm, |
311 Register key, | 311 Register key, |
312 Register map, | 312 Register map, |
313 Register hash, | 313 Register hash, |
314 Label* index_string, | 314 Label* index_string, |
315 Label* not_internalized) { | 315 Label* not_unique) { |
316 // The key is not a smi. | 316 // The key is not a smi. |
317 // Is it a string? | 317 Label unique; |
| 318 // Is it a name? |
318 __ GetObjectType(key, map, hash); | 319 __ GetObjectType(key, map, hash); |
319 __ Branch(not_internalized, ge, hash, Operand(FIRST_NONSTRING_TYPE)); | 320 __ Branch(not_unique, hi, hash, Operand(LAST_UNIQUE_NAME_TYPE)); |
| 321 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE); |
| 322 __ Branch(&unique, eq, hash, Operand(LAST_UNIQUE_NAME_TYPE)); |
320 | 323 |
321 // Is the string an array index, with cached numeric value? | 324 // Is the string an array index, with cached numeric value? |
322 __ lw(hash, FieldMemOperand(key, String::kHashFieldOffset)); | 325 __ lw(hash, FieldMemOperand(key, Name::kHashFieldOffset)); |
323 __ And(at, hash, Operand(String::kContainsCachedArrayIndexMask)); | 326 __ And(at, hash, Operand(Name::kContainsCachedArrayIndexMask)); |
324 __ Branch(index_string, eq, at, Operand(zero_reg)); | 327 __ Branch(index_string, eq, at, Operand(zero_reg)); |
325 | 328 |
326 // Is the string internalized? | 329 // Is the string internalized? |
327 // map: key map | 330 // map: key map |
328 __ lbu(hash, FieldMemOperand(map, Map::kInstanceTypeOffset)); | 331 __ lbu(hash, FieldMemOperand(map, Map::kInstanceTypeOffset)); |
329 STATIC_ASSERT(kInternalizedTag != 0); | 332 STATIC_ASSERT(kInternalizedTag != 0); |
330 __ And(at, hash, Operand(kIsInternalizedMask)); | 333 __ And(at, hash, Operand(kIsInternalizedMask)); |
331 __ Branch(not_internalized, eq, at, Operand(zero_reg)); | 334 __ Branch(not_unique, eq, at, Operand(zero_reg)); |
| 335 |
| 336 __ bind(&unique); |
332 } | 337 } |
333 | 338 |
334 | 339 |
335 // Defined in ic.cc. | 340 // Defined in ic.cc. |
336 Object* CallIC_Miss(Arguments args); | 341 Object* CallIC_Miss(Arguments args); |
337 | 342 |
338 // The generated code does not accept smi keys. | 343 // The generated code does not accept smi keys. |
339 // The generated code falls through if both probes miss. | 344 // The generated code falls through if both probes miss. |
340 void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm, | 345 void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm, |
341 int argc, | 346 int argc, |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
419 void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) { | 424 void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) { |
420 // ----------- S t a t e ------------- | 425 // ----------- S t a t e ------------- |
421 // -- a2 : name | 426 // -- a2 : name |
422 // -- ra : return address | 427 // -- ra : return address |
423 // ----------------------------------- | 428 // ----------------------------------- |
424 Label miss; | 429 Label miss; |
425 | 430 |
426 // Get the receiver of the function from the stack into a1. | 431 // Get the receiver of the function from the stack into a1. |
427 __ lw(a1, MemOperand(sp, argc * kPointerSize)); | 432 __ lw(a1, MemOperand(sp, argc * kPointerSize)); |
428 | 433 |
429 GenerateStringDictionaryReceiverCheck(masm, a1, a0, a3, t0, &miss); | 434 GenerateNameDictionaryReceiverCheck(masm, a1, a0, a3, t0, &miss); |
430 | 435 |
431 // a0: elements | 436 // a0: elements |
432 // Search the dictionary - put result in register a1. | 437 // Search the dictionary - put result in register a1. |
433 GenerateDictionaryLoad(masm, &miss, a0, a2, a1, a3, t0); | 438 GenerateDictionaryLoad(masm, &miss, a0, a2, a1, a3, t0); |
434 | 439 |
435 GenerateFunctionTailCall(masm, argc, &miss, t0); | 440 GenerateFunctionTailCall(masm, argc, &miss, t0); |
436 | 441 |
437 // Cache miss: Jump to runtime. | 442 // Cache miss: Jump to runtime. |
438 __ bind(&miss); | 443 __ bind(&miss); |
439 } | 444 } |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
522 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) { | 527 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) { |
523 // ----------- S t a t e ------------- | 528 // ----------- S t a t e ------------- |
524 // -- a2 : name | 529 // -- a2 : name |
525 // -- ra : return address | 530 // -- ra : return address |
526 // ----------------------------------- | 531 // ----------------------------------- |
527 | 532 |
528 // Get the receiver of the function from the stack into a1. | 533 // Get the receiver of the function from the stack into a1. |
529 __ lw(a1, MemOperand(sp, argc * kPointerSize)); | 534 __ lw(a1, MemOperand(sp, argc * kPointerSize)); |
530 | 535 |
531 Label do_call, slow_call, slow_load, slow_reload_receiver; | 536 Label do_call, slow_call, slow_load, slow_reload_receiver; |
532 Label check_number_dictionary, check_string, lookup_monomorphic_cache; | 537 Label check_number_dictionary, check_name, lookup_monomorphic_cache; |
533 Label index_smi, index_string; | 538 Label index_smi, index_name; |
534 | 539 |
535 // Check that the key is a smi. | 540 // Check that the key is a smi. |
536 __ JumpIfNotSmi(a2, &check_string); | 541 __ JumpIfNotSmi(a2, &check_name); |
537 __ bind(&index_smi); | 542 __ bind(&index_smi); |
538 // Now the key is known to be a smi. This place is also jumped to from below | 543 // Now the key is known to be a smi. This place is also jumped to from below |
539 // where a numeric string is converted to a smi. | 544 // where a numeric string is converted to a smi. |
540 | 545 |
541 GenerateKeyedLoadReceiverCheck( | 546 GenerateKeyedLoadReceiverCheck( |
542 masm, a1, a0, a3, Map::kHasIndexedInterceptor, &slow_call); | 547 masm, a1, a0, a3, Map::kHasIndexedInterceptor, &slow_call); |
543 | 548 |
544 GenerateFastArrayLoad( | 549 GenerateFastArrayLoad( |
545 masm, a1, a2, t0, a3, a0, a1, &check_number_dictionary, &slow_load); | 550 masm, a1, a2, t0, a3, a0, a1, &check_number_dictionary, &slow_load); |
546 Counters* counters = masm->isolate()->counters(); | 551 Counters* counters = masm->isolate()->counters(); |
(...skipping 26 matching lines...) Expand all Loading... |
573 { | 578 { |
574 FrameScope scope(masm, StackFrame::INTERNAL); | 579 FrameScope scope(masm, StackFrame::INTERNAL); |
575 __ push(a2); // Save the key. | 580 __ push(a2); // Save the key. |
576 __ Push(a1, a2); // Pass the receiver and the key. | 581 __ Push(a1, a2); // Pass the receiver and the key. |
577 __ CallRuntime(Runtime::kKeyedGetProperty, 2); | 582 __ CallRuntime(Runtime::kKeyedGetProperty, 2); |
578 __ pop(a2); // Restore the key. | 583 __ pop(a2); // Restore the key. |
579 } | 584 } |
580 __ mov(a1, v0); | 585 __ mov(a1, v0); |
581 __ jmp(&do_call); | 586 __ jmp(&do_call); |
582 | 587 |
583 __ bind(&check_string); | 588 __ bind(&check_name); |
584 GenerateKeyStringCheck(masm, a2, a0, a3, &index_string, &slow_call); | 589 GenerateKeyNameCheck(masm, a2, a0, a3, &index_name, &slow_call); |
585 | 590 |
586 // The key is known to be internalized. | 591 // The key is known to be a unique name. |
587 // If the receiver is a regular JS object with slow properties then do | 592 // If the receiver is a regular JS object with slow properties then do |
588 // a quick inline probe of the receiver's dictionary. | 593 // a quick inline probe of the receiver's dictionary. |
589 // Otherwise do the monomorphic cache probe. | 594 // Otherwise do the monomorphic cache probe. |
590 GenerateKeyedLoadReceiverCheck( | 595 GenerateKeyedLoadReceiverCheck( |
591 masm, a1, a0, a3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache); | 596 masm, a1, a0, a3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache); |
592 | 597 |
593 __ lw(a0, FieldMemOperand(a1, JSObject::kPropertiesOffset)); | 598 __ lw(a0, FieldMemOperand(a1, JSObject::kPropertiesOffset)); |
594 __ lw(a3, FieldMemOperand(a0, HeapObject::kMapOffset)); | 599 __ lw(a3, FieldMemOperand(a0, HeapObject::kMapOffset)); |
595 __ LoadRoot(at, Heap::kHashTableMapRootIndex); | 600 __ LoadRoot(at, Heap::kHashTableMapRootIndex); |
596 __ Branch(&lookup_monomorphic_cache, ne, a3, Operand(at)); | 601 __ Branch(&lookup_monomorphic_cache, ne, a3, Operand(at)); |
597 | 602 |
598 GenerateDictionaryLoad(masm, &slow_load, a0, a2, a1, a3, t0); | 603 GenerateDictionaryLoad(masm, &slow_load, a0, a2, a1, a3, t0); |
599 __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1, a0, a3); | 604 __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1, a0, a3); |
600 __ jmp(&do_call); | 605 __ jmp(&do_call); |
601 | 606 |
602 __ bind(&lookup_monomorphic_cache); | 607 __ bind(&lookup_monomorphic_cache); |
603 __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, a0, a3); | 608 __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, a0, a3); |
604 GenerateMonomorphicCacheProbe(masm, | 609 GenerateMonomorphicCacheProbe(masm, |
605 argc, | 610 argc, |
606 Code::KEYED_CALL_IC, | 611 Code::KEYED_CALL_IC, |
607 Code::kNoExtraICState); | 612 Code::kNoExtraICState); |
608 // Fall through on miss. | 613 // Fall through on miss. |
609 | 614 |
610 __ bind(&slow_call); | 615 __ bind(&slow_call); |
611 // This branch is taken if: | 616 // This branch is taken if: |
612 // - the receiver requires boxing or access check, | 617 // - the receiver requires boxing or access check, |
613 // - the key is neither smi nor an internalized string, | 618 // - the key is neither smi nor a unique name, |
614 // - the value loaded is not a function, | 619 // - the value loaded is not a function, |
615 // - there is hope that the runtime will create a monomorphic call stub, | 620 // - there is hope that the runtime will create a monomorphic call stub, |
616 // that will get fetched next time. | 621 // that will get fetched next time. |
617 __ IncrementCounter(counters->keyed_call_generic_slow(), 1, a0, a3); | 622 __ IncrementCounter(counters->keyed_call_generic_slow(), 1, a0, a3); |
618 GenerateMiss(masm, argc); | 623 GenerateMiss(masm, argc); |
619 | 624 |
620 __ bind(&index_string); | 625 __ bind(&index_name); |
621 __ IndexFromHash(a3, a2); | 626 __ IndexFromHash(a3, a2); |
622 // Now jump to the place where smi keys are handled. | 627 // Now jump to the place where smi keys are handled. |
623 __ jmp(&index_smi); | 628 __ jmp(&index_smi); |
624 } | 629 } |
625 | 630 |
626 | 631 |
627 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) { | 632 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) { |
628 // ----------- S t a t e ------------- | 633 // ----------- S t a t e ------------- |
629 // -- a2 : name | 634 // -- a2 : name |
630 // -- ra : return address | 635 // -- ra : return address |
631 // ----------------------------------- | 636 // ----------------------------------- |
632 | 637 |
633 // Check if the name is a string. | 638 // Check if the name is really a name. |
634 Label miss; | 639 Label miss; |
635 __ JumpIfSmi(a2, &miss); | 640 __ JumpIfSmi(a2, &miss); |
636 __ IsObjectJSStringType(a2, a0, &miss); | 641 __ IsObjectNameType(a2, a0, &miss); |
637 | 642 |
638 CallICBase::GenerateNormal(masm, argc); | 643 CallICBase::GenerateNormal(masm, argc); |
639 __ bind(&miss); | 644 __ bind(&miss); |
640 GenerateMiss(masm, argc); | 645 GenerateMiss(masm, argc); |
641 } | 646 } |
642 | 647 |
643 | 648 |
644 // Defined in ic.cc. | 649 // Defined in ic.cc. |
645 Object* LoadIC_Miss(Arguments args); | 650 Object* LoadIC_Miss(Arguments args); |
646 | 651 |
(...skipping 18 matching lines...) Expand all Loading... |
665 | 670 |
666 void LoadIC::GenerateNormal(MacroAssembler* masm) { | 671 void LoadIC::GenerateNormal(MacroAssembler* masm) { |
667 // ----------- S t a t e ------------- | 672 // ----------- S t a t e ------------- |
668 // -- a2 : name | 673 // -- a2 : name |
669 // -- lr : return address | 674 // -- lr : return address |
670 // -- a0 : receiver | 675 // -- a0 : receiver |
671 // -- sp[0] : receiver | 676 // -- sp[0] : receiver |
672 // ----------------------------------- | 677 // ----------------------------------- |
673 Label miss; | 678 Label miss; |
674 | 679 |
675 GenerateStringDictionaryReceiverCheck(masm, a0, a1, a3, t0, &miss); | 680 GenerateNameDictionaryReceiverCheck(masm, a0, a1, a3, t0, &miss); |
676 | 681 |
677 // a1: elements | 682 // a1: elements |
678 GenerateDictionaryLoad(masm, &miss, a1, a2, v0, a3, t0); | 683 GenerateDictionaryLoad(masm, &miss, a1, a2, v0, a3, t0); |
679 __ Ret(); | 684 __ Ret(); |
680 | 685 |
681 // Cache miss: Jump to runtime. | 686 // Cache miss: Jump to runtime. |
682 __ bind(&miss); | 687 __ bind(&miss); |
683 GenerateMiss(masm); | 688 GenerateMiss(masm); |
684 } | 689 } |
685 | 690 |
(...skipping 227 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
913 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); | 918 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); |
914 } | 919 } |
915 | 920 |
916 | 921 |
917 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { | 922 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { |
918 // ---------- S t a t e -------------- | 923 // ---------- S t a t e -------------- |
919 // -- ra : return address | 924 // -- ra : return address |
920 // -- a0 : key | 925 // -- a0 : key |
921 // -- a1 : receiver | 926 // -- a1 : receiver |
922 // ----------------------------------- | 927 // ----------------------------------- |
923 Label slow, check_string, index_smi, index_string, property_array_property; | 928 Label slow, check_name, index_smi, index_name, property_array_property; |
924 Label probe_dictionary, check_number_dictionary; | 929 Label probe_dictionary, check_number_dictionary; |
925 | 930 |
926 Register key = a0; | 931 Register key = a0; |
927 Register receiver = a1; | 932 Register receiver = a1; |
928 | 933 |
929 Isolate* isolate = masm->isolate(); | 934 Isolate* isolate = masm->isolate(); |
930 | 935 |
931 // Check that the key is a smi. | 936 // Check that the key is a smi. |
932 __ JumpIfNotSmi(key, &check_string); | 937 __ JumpIfNotSmi(key, &check_name); |
933 __ bind(&index_smi); | 938 __ bind(&index_smi); |
934 // Now the key is known to be a smi. This place is also jumped to from below | 939 // Now the key is known to be a smi. This place is also jumped to from below |
935 // where a numeric string is converted to a smi. | 940 // where a numeric string is converted to a smi. |
936 | 941 |
937 GenerateKeyedLoadReceiverCheck( | 942 GenerateKeyedLoadReceiverCheck( |
938 masm, receiver, a2, a3, Map::kHasIndexedInterceptor, &slow); | 943 masm, receiver, a2, a3, Map::kHasIndexedInterceptor, &slow); |
939 | 944 |
940 // Check the receiver's map to see if it has fast elements. | 945 // Check the receiver's map to see if it has fast elements. |
941 __ CheckFastElements(a2, a3, &check_number_dictionary); | 946 __ CheckFastElements(a2, a3, &check_number_dictionary); |
942 | 947 |
(...skipping 18 matching lines...) Expand all Loading... |
961 __ Ret(); | 966 __ Ret(); |
962 | 967 |
963 // Slow case, key and receiver still in a0 and a1. | 968 // Slow case, key and receiver still in a0 and a1. |
964 __ bind(&slow); | 969 __ bind(&slow); |
965 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(), | 970 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(), |
966 1, | 971 1, |
967 a2, | 972 a2, |
968 a3); | 973 a3); |
969 GenerateRuntimeGetProperty(masm); | 974 GenerateRuntimeGetProperty(masm); |
970 | 975 |
971 __ bind(&check_string); | 976 __ bind(&check_name); |
972 GenerateKeyStringCheck(masm, key, a2, a3, &index_string, &slow); | 977 GenerateKeyNameCheck(masm, key, a2, a3, &index_name, &slow); |
973 | 978 |
974 GenerateKeyedLoadReceiverCheck( | 979 GenerateKeyedLoadReceiverCheck( |
975 masm, receiver, a2, a3, Map::kHasIndexedInterceptor, &slow); | 980 masm, receiver, a2, a3, Map::kHasIndexedInterceptor, &slow); |
976 | 981 |
977 | 982 |
978 // If the receiver is a fast-case object, check the keyed lookup | 983 // If the receiver is a fast-case object, check the keyed lookup |
979 // cache. Otherwise probe the dictionary. | 984 // cache. Otherwise probe the dictionary. |
980 __ lw(a3, FieldMemOperand(a1, JSObject::kPropertiesOffset)); | 985 __ lw(a3, FieldMemOperand(a1, JSObject::kPropertiesOffset)); |
981 __ lw(t0, FieldMemOperand(a3, HeapObject::kMapOffset)); | 986 __ lw(t0, FieldMemOperand(a3, HeapObject::kMapOffset)); |
982 __ LoadRoot(at, Heap::kHashTableMapRootIndex); | 987 __ LoadRoot(at, Heap::kHashTableMapRootIndex); |
983 __ Branch(&probe_dictionary, eq, t0, Operand(at)); | 988 __ Branch(&probe_dictionary, eq, t0, Operand(at)); |
984 | 989 |
985 // Load the map of the receiver, compute the keyed lookup cache hash | 990 // Load the map of the receiver, compute the keyed lookup cache hash |
986 // based on 32 bits of the map pointer and the string hash. | 991 // based on 32 bits of the map pointer and the name hash. |
987 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset)); | 992 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset)); |
988 __ sra(a3, a2, KeyedLookupCache::kMapHashShift); | 993 __ sra(a3, a2, KeyedLookupCache::kMapHashShift); |
989 __ lw(t0, FieldMemOperand(a0, String::kHashFieldOffset)); | 994 __ lw(t0, FieldMemOperand(a0, Name::kHashFieldOffset)); |
990 __ sra(at, t0, String::kHashShift); | 995 __ sra(at, t0, Name::kHashShift); |
991 __ xor_(a3, a3, at); | 996 __ xor_(a3, a3, at); |
992 int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask; | 997 int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask; |
993 __ And(a3, a3, Operand(mask)); | 998 __ And(a3, a3, Operand(mask)); |
994 | 999 |
995 // Load the key (consisting of map and internalized string) from the cache and | 1000 // Load the key (consisting of map and unique name) from the cache and |
996 // check for match. | 1001 // check for match. |
997 Label load_in_object_property; | 1002 Label load_in_object_property; |
998 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; | 1003 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; |
999 Label hit_on_nth_entry[kEntriesPerBucket]; | 1004 Label hit_on_nth_entry[kEntriesPerBucket]; |
1000 ExternalReference cache_keys = | 1005 ExternalReference cache_keys = |
1001 ExternalReference::keyed_lookup_cache_keys(isolate); | 1006 ExternalReference::keyed_lookup_cache_keys(isolate); |
1002 __ li(t0, Operand(cache_keys)); | 1007 __ li(t0, Operand(cache_keys)); |
1003 __ sll(at, a3, kPointerSizeLog2 + 1); | 1008 __ sll(at, a3, kPointerSizeLog2 + 1); |
1004 __ addu(t0, t0, at); | 1009 __ addu(t0, t0, at); |
1005 | 1010 |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1078 __ lbu(a2, FieldMemOperand(a2, Map::kInstanceTypeOffset)); | 1083 __ lbu(a2, FieldMemOperand(a2, Map::kInstanceTypeOffset)); |
1079 GenerateGlobalInstanceTypeCheck(masm, a2, &slow); | 1084 GenerateGlobalInstanceTypeCheck(masm, a2, &slow); |
1080 // Load the property to v0. | 1085 // Load the property to v0. |
1081 GenerateDictionaryLoad(masm, &slow, a3, a0, v0, a2, t0); | 1086 GenerateDictionaryLoad(masm, &slow, a3, a0, v0, a2, t0); |
1082 __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(), | 1087 __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(), |
1083 1, | 1088 1, |
1084 a2, | 1089 a2, |
1085 a3); | 1090 a3); |
1086 __ Ret(); | 1091 __ Ret(); |
1087 | 1092 |
1088 __ bind(&index_string); | 1093 __ bind(&index_name); |
1089 __ IndexFromHash(a3, key); | 1094 __ IndexFromHash(a3, key); |
1090 // Now jump to the place where smi keys are handled. | 1095 // Now jump to the place where smi keys are handled. |
1091 __ Branch(&index_smi); | 1096 __ Branch(&index_smi); |
1092 } | 1097 } |
1093 | 1098 |
1094 | 1099 |
1095 void KeyedLoadIC::GenerateString(MacroAssembler* masm) { | 1100 void KeyedLoadIC::GenerateString(MacroAssembler* masm) { |
1096 // ---------- S t a t e -------------- | 1101 // ---------- S t a t e -------------- |
1097 // -- ra : return address | 1102 // -- ra : return address |
1098 // -- a0 : key (index) | 1103 // -- a0 : key (index) |
(...skipping 439 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1538 | 1543 |
1539 void StoreIC::GenerateNormal(MacroAssembler* masm) { | 1544 void StoreIC::GenerateNormal(MacroAssembler* masm) { |
1540 // ----------- S t a t e ------------- | 1545 // ----------- S t a t e ------------- |
1541 // -- a0 : value | 1546 // -- a0 : value |
1542 // -- a1 : receiver | 1547 // -- a1 : receiver |
1543 // -- a2 : name | 1548 // -- a2 : name |
1544 // -- ra : return address | 1549 // -- ra : return address |
1545 // ----------------------------------- | 1550 // ----------------------------------- |
1546 Label miss; | 1551 Label miss; |
1547 | 1552 |
1548 GenerateStringDictionaryReceiverCheck(masm, a1, a3, t0, t1, &miss); | 1553 GenerateNameDictionaryReceiverCheck(masm, a1, a3, t0, t1, &miss); |
1549 | 1554 |
1550 GenerateDictionaryStore(masm, &miss, a3, a2, a0, t0, t1); | 1555 GenerateDictionaryStore(masm, &miss, a3, a2, a0, t0, t1); |
1551 Counters* counters = masm->isolate()->counters(); | 1556 Counters* counters = masm->isolate()->counters(); |
1552 __ IncrementCounter(counters->store_normal_hit(), 1, t0, t1); | 1557 __ IncrementCounter(counters->store_normal_hit(), 1, t0, t1); |
1553 __ Ret(); | 1558 __ Ret(); |
1554 | 1559 |
1555 __ bind(&miss); | 1560 __ bind(&miss); |
1556 __ IncrementCounter(counters->store_normal_miss(), 1, t0, t1); | 1561 __ IncrementCounter(counters->store_normal_miss(), 1, t0, t1); |
1557 GenerateMiss(masm); | 1562 GenerateMiss(masm); |
1558 } | 1563 } |
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1673 } else { | 1678 } else { |
1674 ASSERT(Assembler::IsBne(branch_instr)); | 1679 ASSERT(Assembler::IsBne(branch_instr)); |
1675 patcher.ChangeBranchCondition(eq); | 1680 patcher.ChangeBranchCondition(eq); |
1676 } | 1681 } |
1677 } | 1682 } |
1678 | 1683 |
1679 | 1684 |
1680 } } // namespace v8::internal | 1685 } } // namespace v8::internal |
1681 | 1686 |
1682 #endif // V8_TARGET_ARCH_MIPS | 1687 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |