OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 5955 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5966 void DirectCEntryStub::GenerateCall(MacroAssembler* masm, | 5966 void DirectCEntryStub::GenerateCall(MacroAssembler* masm, |
5967 Register target) { | 5967 Register target) { |
5968 __ mov(lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()), | 5968 __ mov(lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()), |
5969 RelocInfo::CODE_TARGET)); | 5969 RelocInfo::CODE_TARGET)); |
5970 // Push return address (accessible to GC through exit frame pc). | 5970 // Push return address (accessible to GC through exit frame pc). |
5971 __ str(pc, MemOperand(sp, 0)); | 5971 __ str(pc, MemOperand(sp, 0)); |
5972 __ Jump(target); // Call the C++ function. | 5972 __ Jump(target); // Call the C++ function. |
5973 } | 5973 } |
5974 | 5974 |
5975 | 5975 |
| 5976 void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, |
| 5977 Label* miss, |
| 5978 Label* done, |
| 5979 Register receiver, |
| 5980 Register properties, |
| 5981 String* name, |
| 5982 Register scratch0) { |
| 5983 // If names of slots in range from 1 to kProbes - 1 for the hash value are |
| 5984 // not equal to the name and kProbes-th slot is not used (its name is the |
| 5985 // undefined value), it guarantees the hash table doesn't contain the |
| 5986 // property. It's true even if some slots represent deleted properties |
| 5987 // (their names are the null value). |
| 5988 for (int i = 0; i < kInlinedProbes; i++) { |
| 5989 // scratch0 points to properties hash. |
| 5990 // Compute the masked index: (hash + i + i * i) & mask. |
| 5991 Register index = scratch0; |
| 5992 // Capacity is smi 2^n. |
| 5993 __ ldr(index, FieldMemOperand(properties, kCapacityOffset)); |
| 5994 __ sub(index, index, Operand(1)); |
| 5995 __ and_(index, index, Operand( |
| 5996 Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i)))); |
| 5997 |
| 5998 // Scale the index by multiplying by the entry size. |
| 5999 ASSERT(StringDictionary::kEntrySize == 3); |
| 6000 __ add(index, index, Operand(index, LSL, 1)); // index *= 3. |
| 6001 |
| 6002 Register entity_name = scratch0; |
| 6003 // Having undefined at this place means the name is not contained. |
| 6004 ASSERT_EQ(kSmiTagSize, 1); |
| 6005 Register tmp = properties; |
| 6006 __ add(tmp, properties, Operand(index, LSL, 1)); |
| 6007 __ ldr(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); |
| 6008 |
| 6009 ASSERT(!tmp.is(entity_name)); |
| 6010 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex); |
| 6011 __ cmp(entity_name, tmp); |
| 6012 __ b(eq, done); |
| 6013 |
| 6014 if (i != kInlinedProbes - 1) { |
| 6015 // Stop if found the property. |
| 6016 __ cmp(entity_name, Operand(Handle<String>(name))); |
| 6017 __ b(eq, miss); |
| 6018 |
| 6019 // Check if the entry name is not a symbol. |
| 6020 __ ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); |
| 6021 __ ldrb(entity_name, |
| 6022 FieldMemOperand(entity_name, Map::kInstanceTypeOffset)); |
| 6023 __ tst(entity_name, Operand(kIsSymbolMask)); |
| 6024 __ b(eq, miss); |
| 6025 |
| 6026 // Restore the properties. |
| 6027 __ ldr(properties, |
| 6028 FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
| 6029 } |
| 6030 } |
| 6031 |
| 6032 const int spill_mask = |
| 6033 (lr.bit() | r6.bit() | r5.bit() | r4.bit() | r3.bit() | |
| 6034 r2.bit() | r1.bit() | r0.bit()); |
| 6035 |
| 6036 __ stm(db_w, sp, spill_mask); |
| 6037 __ ldr(r0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
| 6038 __ mov(r1, Operand(Handle<String>(name))); |
| 6039 StringDictionaryLookupStub stub(NEGATIVE_LOOKUP); |
| 6040 __ CallStub(&stub); |
| 6041 __ tst(r0, Operand(r0)); |
| 6042 __ ldm(ia_w, sp, spill_mask); |
| 6043 |
| 6044 __ b(eq, done); |
| 6045 __ b(ne, miss); |
| 6046 } |
| 6047 |
| 6048 |
| 6049 // Probe the string dictionary in the |elements| register. Jump to the |
| 6050 // |done| label if a property with the given name is found. Jump to |
| 6051 // the |miss| label otherwise. |
| 6052 // If lookup was successful |scratch2| will be equal to elements + 4 * index. |
| 6053 void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, |
| 6054 Label* miss, |
| 6055 Label* done, |
| 6056 Register elements, |
| 6057 Register name, |
| 6058 Register scratch1, |
| 6059 Register scratch2) { |
| 6060 // Assert that name contains a string. |
| 6061 if (FLAG_debug_code) __ AbortIfNotString(name); |
| 6062 |
| 6063 // Compute the capacity mask. |
| 6064 __ ldr(scratch1, FieldMemOperand(elements, kCapacityOffset)); |
| 6065 __ mov(scratch1, Operand(scratch1, ASR, kSmiTagSize)); // convert smi to int |
| 6066 __ sub(scratch1, scratch1, Operand(1)); |
| 6067 |
| 6068 // Generate an unrolled loop that performs a few probes before |
| 6069 // giving up. Measurements done on Gmail indicate that 2 probes |
| 6070 // cover ~93% of loads from dictionaries. |
| 6071 for (int i = 0; i < kInlinedProbes; i++) { |
| 6072 // Compute the masked index: (hash + i + i * i) & mask. |
| 6073 __ ldr(scratch2, FieldMemOperand(name, String::kHashFieldOffset)); |
| 6074 if (i > 0) { |
| 6075 // Add the probe offset (i + i * i) left shifted to avoid right shifting |
| 6076 // the hash in a separate instruction. The value hash + i + i * i is right |
| 6077 // shifted in the following and instruction. |
| 6078 ASSERT(StringDictionary::GetProbeOffset(i) < |
| 6079 1 << (32 - String::kHashFieldOffset)); |
| 6080 __ add(scratch2, scratch2, Operand( |
| 6081 StringDictionary::GetProbeOffset(i) << String::kHashShift)); |
| 6082 } |
| 6083 __ and_(scratch2, scratch1, Operand(scratch2, LSR, String::kHashShift)); |
| 6084 |
| 6085 // Scale the index by multiplying by the element size. |
| 6086 ASSERT(StringDictionary::kEntrySize == 3); |
| 6087 // scratch2 = scratch2 * 3. |
| 6088 __ add(scratch2, scratch2, Operand(scratch2, LSL, 1)); |
| 6089 |
| 6090 // Check if the key is identical to the name. |
| 6091 __ add(scratch2, elements, Operand(scratch2, LSL, 2)); |
| 6092 __ ldr(ip, FieldMemOperand(scratch2, kElementsStartOffset)); |
| 6093 __ cmp(name, Operand(ip)); |
| 6094 __ b(eq, done); |
| 6095 } |
| 6096 |
| 6097 const int spill_mask = |
| 6098 (lr.bit() | r6.bit() | r5.bit() | r4.bit() | |
| 6099 r3.bit() | r2.bit() | r1.bit() | r0.bit()) & |
| 6100 ~(scratch1.bit() | scratch2.bit()); |
| 6101 |
| 6102 __ stm(db_w, sp, spill_mask); |
| 6103 __ Move(r0, elements); |
| 6104 __ Move(r1, name); |
| 6105 StringDictionaryLookupStub stub(POSITIVE_LOOKUP); |
| 6106 __ CallStub(&stub); |
| 6107 __ tst(r0, Operand(r0)); |
| 6108 __ mov(scratch2, Operand(r2)); |
| 6109 __ ldm(ia_w, sp, spill_mask); |
| 6110 |
| 6111 __ b(ne, done); |
| 6112 __ b(eq, miss); |
| 6113 } |
| 6114 |
| 6115 |
| 6116 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { |
| 6117 // Registers: |
| 6118 // result: StringDictionary to probe |
| 6119 // r1: key |
| 6120 // : StringDictionary to probe. |
| 6121 // index_: will hold an index of entry if lookup is successful. |
| 6122 // might alias with result_. |
| 6123 // Returns: |
| 6124 // result_ is zero if lookup failed, non zero otherwise. |
| 6125 |
| 6126 Register result = r0; |
| 6127 Register dictionary = r0; |
| 6128 Register key = r1; |
| 6129 Register index = r2; |
| 6130 Register mask = r3; |
| 6131 Register hash = r4; |
| 6132 Register undefined = r5; |
| 6133 Register entry_key = r6; |
| 6134 |
| 6135 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; |
| 6136 |
| 6137 __ ldr(mask, FieldMemOperand(dictionary, kCapacityOffset)); |
| 6138 __ mov(mask, Operand(mask, ASR, kSmiTagSize)); |
| 6139 __ sub(mask, mask, Operand(1)); |
| 6140 |
| 6141 __ ldr(hash, FieldMemOperand(key, String::kHashFieldOffset)); |
| 6142 |
| 6143 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex); |
| 6144 |
| 6145 for (int i = kInlinedProbes; i < kTotalProbes; i++) { |
| 6146 // Compute the masked index: (hash + i + i * i) & mask. |
| 6147 // Capacity is smi 2^n. |
| 6148 if (i > 0) { |
| 6149 // Add the probe offset (i + i * i) left shifted to avoid right shifting |
| 6150 // the hash in a separate instruction. The value hash + i + i * i is right |
| 6151 // shifted in the following and instruction. |
| 6152 ASSERT(StringDictionary::GetProbeOffset(i) < |
| 6153 1 << (32 - String::kHashFieldOffset)); |
| 6154 __ add(index, hash, Operand( |
| 6155 StringDictionary::GetProbeOffset(i) << String::kHashShift)); |
| 6156 } else { |
| 6157 __ mov(index, Operand(hash)); |
| 6158 } |
| 6159 __ and_(index, mask, Operand(index, LSR, String::kHashShift)); |
| 6160 |
| 6161 // Scale the index by multiplying by the entry size. |
| 6162 ASSERT(StringDictionary::kEntrySize == 3); |
| 6163 __ add(index, index, Operand(index, LSL, 1)); // index *= 3. |
| 6164 |
| 6165 ASSERT_EQ(kSmiTagSize, 1); |
| 6166 __ add(index, dictionary, Operand(index, LSL, 2)); |
| 6167 __ ldr(entry_key, FieldMemOperand(index, kElementsStartOffset)); |
| 6168 |
| 6169 // Having undefined at this place means the name is not contained. |
| 6170 __ cmp(entry_key, Operand(undefined)); |
| 6171 __ b(eq, ¬_in_dictionary); |
| 6172 |
| 6173 // Stop if found the property. |
| 6174 __ cmp(entry_key, Operand(key)); |
| 6175 __ b(eq, &in_dictionary); |
| 6176 |
| 6177 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { |
| 6178 // Check if the entry name is not a symbol. |
| 6179 __ ldr(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); |
| 6180 __ ldrb(entry_key, |
| 6181 FieldMemOperand(entry_key, Map::kInstanceTypeOffset)); |
| 6182 __ tst(entry_key, Operand(kIsSymbolMask)); |
| 6183 __ b(eq, &maybe_in_dictionary); |
| 6184 } |
| 6185 } |
| 6186 |
| 6187 __ bind(&maybe_in_dictionary); |
| 6188 // If we are doing negative lookup then probing failure should be |
| 6189 // treated as a lookup success. For positive lookup probing failure |
| 6190 // should be treated as lookup failure. |
| 6191 if (mode_ == POSITIVE_LOOKUP) { |
| 6192 __ mov(result, Operand(0)); |
| 6193 __ Ret(); |
| 6194 } |
| 6195 |
| 6196 __ bind(&in_dictionary); |
| 6197 __ mov(result, Operand(1)); |
| 6198 __ Ret(); |
| 6199 |
| 6200 __ bind(¬_in_dictionary); |
| 6201 __ mov(result, Operand(0)); |
| 6202 __ Ret(); |
| 6203 |
| 6204 } |
| 6205 |
| 6206 |
5976 #undef __ | 6207 #undef __ |
5977 | 6208 |
5978 } } // namespace v8::internal | 6209 } } // namespace v8::internal |
5979 | 6210 |
5980 #endif // V8_TARGET_ARCH_ARM | 6211 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |