| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 222 Register map, | 222 Register map, |
| 223 Register scratch, | 223 Register scratch, |
| 224 int interceptor_bit, | 224 int interceptor_bit, |
| 225 Label* slow) { | 225 Label* slow) { |
| 226 // Check that the object isn't a smi. | 226 // Check that the object isn't a smi. |
| 227 __ JumpIfSmi(receiver, slow); | 227 __ JumpIfSmi(receiver, slow); |
| 228 // Get the map of the receiver. | 228 // Get the map of the receiver. |
| 229 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 229 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 230 // Check bit field. | 230 // Check bit field. |
| 231 __ lbu(scratch, FieldMemOperand(map, Map::kBitFieldOffset)); | 231 __ lbu(scratch, FieldMemOperand(map, Map::kBitFieldOffset)); |
| 232 __ And(at, scratch, Operand(KeyedLoadIC::kSlowCaseBitFieldMask)); | 232 __ And(at, scratch, |
| 233 Operand((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit))); |
| 233 __ Branch(slow, ne, at, Operand(zero_reg)); | 234 __ Branch(slow, ne, at, Operand(zero_reg)); |
| 234 // Check that the object is some kind of JS object EXCEPT JS Value type. | 235 // Check that the object is some kind of JS object EXCEPT JS Value type. |
| 235 // In the case that the object is a value-wrapper object, | 236 // In the case that the object is a value-wrapper object, |
| 236 // we enter the runtime system to make sure that indexing into string | 237 // we enter the runtime system to make sure that indexing into string |
| 237 // objects work as intended. | 238 // objects work as intended. |
| 238 ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE); | 239 ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE); |
| 239 __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); | 240 __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); |
| 240 __ Branch(slow, lt, scratch, Operand(JS_OBJECT_TYPE)); | 241 __ Branch(slow, lt, scratch, Operand(JS_OBJECT_TYPE)); |
| 241 } | 242 } |
| 242 | 243 |
| (...skipping 399 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 642 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(), | 643 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(), |
| 643 1, | 644 1, |
| 644 a2, | 645 a2, |
| 645 a3); | 646 a3); |
| 646 GenerateRuntimeGetProperty(masm); | 647 GenerateRuntimeGetProperty(masm); |
| 647 | 648 |
| 648 __ bind(&check_name); | 649 __ bind(&check_name); |
| 649 GenerateKeyNameCheck(masm, key, a2, a3, &index_name, &slow); | 650 GenerateKeyNameCheck(masm, key, a2, a3, &index_name, &slow); |
| 650 | 651 |
| 651 GenerateKeyedLoadReceiverCheck( | 652 GenerateKeyedLoadReceiverCheck( |
| 652 masm, receiver, a2, a3, Map::kHasIndexedInterceptor, &slow); | 653 masm, receiver, a2, a3, Map::kHasNamedInterceptor, &slow); |
| 653 | 654 |
| 654 | 655 |
| 655 // If the receiver is a fast-case object, check the keyed lookup | 656 // If the receiver is a fast-case object, check the keyed lookup |
| 656 // cache. Otherwise probe the dictionary. | 657 // cache. Otherwise probe the dictionary. |
| 657 __ lw(a3, FieldMemOperand(a1, JSObject::kPropertiesOffset)); | 658 __ lw(a3, FieldMemOperand(a1, JSObject::kPropertiesOffset)); |
| 658 __ lw(t0, FieldMemOperand(a3, HeapObject::kMapOffset)); | 659 __ lw(t0, FieldMemOperand(a3, HeapObject::kMapOffset)); |
| 659 __ LoadRoot(at, Heap::kHashTableMapRootIndex); | 660 __ LoadRoot(at, Heap::kHashTableMapRootIndex); |
| 660 __ Branch(&probe_dictionary, eq, t0, Operand(at)); | 661 __ Branch(&probe_dictionary, eq, t0, Operand(at)); |
| 661 | 662 |
| 662 // Load the map of the receiver, compute the keyed lookup cache hash | 663 // Load the map of the receiver, compute the keyed lookup cache hash |
| (...skipping 689 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1352 } else { | 1353 } else { |
| 1353 ASSERT(Assembler::IsBne(branch_instr)); | 1354 ASSERT(Assembler::IsBne(branch_instr)); |
| 1354 patcher.ChangeBranchCondition(eq); | 1355 patcher.ChangeBranchCondition(eq); |
| 1355 } | 1356 } |
| 1356 } | 1357 } |
| 1357 | 1358 |
| 1358 | 1359 |
| 1359 } } // namespace v8::internal | 1360 } } // namespace v8::internal |
| 1360 | 1361 |
| 1361 #endif // V8_TARGET_ARCH_MIPS | 1362 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |