OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 23 matching lines...) Expand all Loading... |
34 #include "stub-cache.h" | 34 #include "stub-cache.h" |
35 | 35 |
36 namespace v8 { | 36 namespace v8 { |
37 namespace internal { | 37 namespace internal { |
38 | 38 |
39 #define __ ACCESS_MASM(masm) | 39 #define __ ACCESS_MASM(masm) |
40 | 40 |
41 | 41 |
42 static void ProbeTable(Isolate* isolate, | 42 static void ProbeTable(Isolate* isolate, |
43 MacroAssembler* masm, | 43 MacroAssembler* masm, |
44 Code::Flags flags, | 44 Register flags, |
45 StubCache::Table table, | 45 StubCache::Table table, |
46 Register name, | 46 Register name, |
47 Register offset, | 47 Register offset, |
48 Register scratch, | 48 Register scratch, |
49 Register scratch2) { | 49 Register scratch2) { |
50 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); | 50 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); |
51 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); | 51 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); |
52 | 52 |
53 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address()); | 53 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address()); |
54 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address()); | 54 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address()); |
55 | 55 |
56 // Check the relative positions of the address fields. | 56 // Check the relative positions of the address fields. |
57 ASSERT(value_off_addr > key_off_addr); | 57 ASSERT(value_off_addr > key_off_addr); |
58 ASSERT((value_off_addr - key_off_addr) % 4 == 0); | 58 ASSERT((value_off_addr - key_off_addr) % 4 == 0); |
59 ASSERT((value_off_addr - key_off_addr) < (256 * 4)); | 59 ASSERT((value_off_addr - key_off_addr) < (256 * 4)); |
60 | 60 |
61 Label miss; | 61 Label miss; |
62 Register offsets_base_addr = scratch; | 62 Register offsets_base_addr = scratch; |
63 | 63 |
64 // Check that the key in the entry matches the name. | 64 // Check that the key in the entry matches the name. |
65 __ mov(offsets_base_addr, Operand(key_offset)); | 65 __ mov(offsets_base_addr, Operand(key_offset)); |
66 __ ldr(ip, MemOperand(offsets_base_addr, offset, LSL, 1)); | 66 __ ldr(ip, MemOperand(offsets_base_addr, offset, LSL, 1, PreIndex)); |
67 __ cmp(name, ip); | 67 __ cmp(name, ip); |
68 __ b(ne, &miss); | 68 __ b(ne, &miss); |
69 | 69 |
70 // Get the code entry from the cache. | 70 // Get the code entry from the cache. |
71 __ add(offsets_base_addr, offsets_base_addr, | 71 __ ldr(scratch2, MemOperand(offsets_base_addr, |
72 Operand(value_off_addr - key_off_addr)); | 72 value_off_addr - key_off_addr)); |
73 __ ldr(scratch2, MemOperand(offsets_base_addr, offset, LSL, 1)); | |
74 | 73 |
75 // Check that the flags match what we're looking for. | 74 // Check that the flags match what we're looking for. |
76 __ ldr(scratch2, FieldMemOperand(scratch2, Code::kFlagsOffset)); | 75 __ ldr(scratch2, FieldMemOperand(scratch2, Code::kFlagsOffset)); |
77 __ bic(scratch2, scratch2, Operand(Code::kFlagsNotUsedInLookup)); | 76 __ bic(scratch2, scratch2, Operand(Code::kFlagsNotUsedInLookup)); |
78 __ cmp(scratch2, Operand(flags)); | 77 __ cmp(scratch2, Operand(flags)); |
79 __ b(ne, &miss); | 78 __ b(ne, &miss); |
80 | 79 |
81 // Re-load code entry from cache. | 80 // Re-load code entry from cache. |
82 __ ldr(offset, MemOperand(offsets_base_addr, offset, LSL, 1)); | 81 __ ldr(offset, MemOperand(offsets_base_addr, value_off_addr - key_off_addr)); |
83 | 82 |
84 // Jump to the first instruction in the code stub. | 83 // Jump to the first instruction in the code stub. |
85 __ add(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag)); | 84 __ add(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag)); |
86 __ Jump(offset); | 85 __ Jump(offset); |
87 | 86 |
88 // Miss: fall through. | 87 // Miss: fall through. |
89 __ bind(&miss); | 88 __ bind(&miss); |
90 } | 89 } |
91 | 90 |
92 | 91 |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
154 return result; | 153 return result; |
155 } | 154 } |
156 | 155 |
157 | 156 |
158 void StubCache::GenerateProbe(MacroAssembler* masm, | 157 void StubCache::GenerateProbe(MacroAssembler* masm, |
159 Code::Flags flags, | 158 Code::Flags flags, |
160 Register receiver, | 159 Register receiver, |
161 Register name, | 160 Register name, |
162 Register scratch, | 161 Register scratch, |
163 Register extra, | 162 Register extra, |
164 Register extra2) { | 163 Register extra2, |
| 164 Register extra3) { |
165 Isolate* isolate = masm->isolate(); | 165 Isolate* isolate = masm->isolate(); |
166 Label miss; | 166 Label miss; |
167 | 167 |
168 // Make sure that code is valid. The shifting code relies on the | 168 // Make sure that code is valid. The shifting code relies on the |
169 // entry size being 8. | 169 // entry size being 8. |
170 ASSERT(sizeof(Entry) == 8); | 170 ASSERT(sizeof(Entry) == 8); |
171 | 171 |
172 // Make sure the flags does not name a specific type. | 172 // Make sure the flags does not name a specific type. |
173 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); | 173 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); |
174 | 174 |
175 // Make sure that there are no register conflicts. | 175 // Make sure that there are no register conflicts and registers are valid. |
176 ASSERT(!scratch.is(receiver)); | 176 ASSERT(NumRegs(receiver.bit() | name.bit() | scratch.bit() | extra.bit() | |
177 ASSERT(!scratch.is(name)); | 177 extra2.bit() | extra3.bit()) == 6); |
178 ASSERT(!extra.is(receiver)); | |
179 ASSERT(!extra.is(name)); | |
180 ASSERT(!extra.is(scratch)); | |
181 ASSERT(!extra2.is(receiver)); | |
182 ASSERT(!extra2.is(name)); | |
183 ASSERT(!extra2.is(scratch)); | |
184 ASSERT(!extra2.is(extra)); | |
185 | |
186 // Check scratch, extra and extra2 registers are valid. | |
187 ASSERT(!scratch.is(no_reg)); | |
188 ASSERT(!extra.is(no_reg)); | |
189 ASSERT(!extra2.is(no_reg)); | |
190 | 178 |
191 // Check that the receiver isn't a smi. | 179 // Check that the receiver isn't a smi. |
192 __ JumpIfSmi(receiver, &miss); | 180 __ JumpIfSmi(receiver, &miss); |
193 | 181 |
| 182 // Copy flags into a register. |
| 183 __ mov(extra3, Operand(flags)); |
| 184 |
194 // Get the map of the receiver and compute the hash. | 185 // Get the map of the receiver and compute the hash. |
195 __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset)); | 186 __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset)); |
196 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 187 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
197 __ add(scratch, scratch, Operand(ip)); | 188 __ add(scratch, scratch, Operand(ip)); |
198 __ eor(scratch, scratch, Operand(flags)); | 189 __ eor(scratch, scratch, Operand(extra3)); |
199 __ and_(scratch, | 190 __ and_(scratch, |
200 scratch, | 191 scratch, |
201 Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize)); | 192 Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize)); |
202 | 193 |
203 // Probe the primary table. | 194 // Probe the primary table. |
204 ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra, extra2); | 195 ProbeTable(isolate, masm, extra3, kPrimary, name, scratch, extra, extra2); |
205 | 196 |
206 // Primary miss: Compute hash for secondary probe. | 197 // Primary miss: Compute hash for secondary probe. |
207 __ sub(scratch, scratch, Operand(name)); | 198 __ sub(scratch, scratch, Operand(name)); |
208 __ add(scratch, scratch, Operand(flags)); | 199 __ add(scratch, scratch, Operand(extra3)); |
209 __ and_(scratch, | 200 __ and_(scratch, |
210 scratch, | 201 scratch, |
211 Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize)); | 202 Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize)); |
212 | 203 |
213 // Probe the secondary table. | 204 // Probe the secondary table. |
214 ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra, extra2); | 205 ProbeTable(isolate, masm, extra3, kSecondary, name, scratch, extra, extra2); |
215 | 206 |
216 // Cache miss: Fall-through and let caller handle the miss by | 207 // Cache miss: Fall-through and let caller handle the miss by |
217 // entering the runtime system. | 208 // entering the runtime system. |
218 __ bind(&miss); | 209 __ bind(&miss); |
219 } | 210 } |
220 | 211 |
221 | 212 |
222 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, | 213 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, |
223 int index, | 214 int index, |
224 Register prototype) { | 215 Register prototype) { |
(...skipping 4194 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4419 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric(); | 4410 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric(); |
4420 __ Jump(ic, RelocInfo::CODE_TARGET); | 4411 __ Jump(ic, RelocInfo::CODE_TARGET); |
4421 } | 4412 } |
4422 | 4413 |
4423 | 4414 |
4424 #undef __ | 4415 #undef __ |
4425 | 4416 |
4426 } } // namespace v8::internal | 4417 } } // namespace v8::internal |
4427 | 4418 |
4428 #endif // V8_TARGET_ARCH_ARM | 4419 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |