OLD | NEW |
| (Empty) |
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | |
2 // Redistribution and use in source and binary forms, with or without | |
3 // modification, are permitted provided that the following conditions are | |
4 // met: | |
5 // | |
6 // * Redistributions of source code must retain the above copyright | |
7 // notice, this list of conditions and the following disclaimer. | |
8 // * Redistributions in binary form must reproduce the above | |
9 // copyright notice, this list of conditions and the following | |
10 // disclaimer in the documentation and/or other materials provided | |
11 // with the distribution. | |
12 // * Neither the name of Google Inc. nor the names of its | |
13 // contributors may be used to endorse or promote products derived | |
14 // from this software without specific prior written permission. | |
15 // | |
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | |
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | |
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | |
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | |
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
27 | |
28 #include "v8.h" | |
29 | |
30 #include "codegen-inl.h" | |
31 #include "ic-inl.h" | |
32 #include "runtime.h" | |
33 #include "stub-cache.h" | |
34 | |
35 namespace v8 { namespace internal { | |
36 | |
37 | |
38 // ---------------------------------------------------------------------------- | |
39 // Static IC stub generators. | |
40 // | |
41 | |
42 #define __ ACCESS_MASM(masm) | |
43 | |
44 | |
45 // Helper function used from LoadIC/CallIC GenerateNormal. | |
46 static void GenerateDictionaryLoad(MacroAssembler* masm, | |
47 Label* miss, | |
48 Register t0, | |
49 Register t1) { | |
50 // Register use: | |
51 // | |
52 // t0 - used to hold the property dictionary. | |
53 // | |
54 // t1 - initially the receiver | |
55 // - used for the index into the property dictionary | |
56 // - holds the result on exit. | |
57 // | |
58 // r3 - used as temporary and to hold the capacity of the property | |
59 // dictionary. | |
60 // | |
61 // r2 - holds the name of the property and is unchanges. | |
62 | |
63 Label done; | |
64 | |
65 // Check for the absence of an interceptor. | |
66 // Load the map into t0. | |
67 __ ldr(t0, FieldMemOperand(t1, JSObject::kMapOffset)); | |
68 // Test the has_named_interceptor bit in the map. | |
69 __ ldr(t0, FieldMemOperand(t1, Map::kInstanceAttributesOffset)); | |
70 __ tst(t0, Operand(1 << (Map::kHasNamedInterceptor + (3 * 8)))); | |
71 // Jump to miss if the interceptor bit is set. | |
72 __ b(ne, miss); | |
73 | |
74 | |
75 // Check that the properties array is a dictionary. | |
76 __ ldr(t0, FieldMemOperand(t1, JSObject::kPropertiesOffset)); | |
77 __ ldr(r3, FieldMemOperand(t0, HeapObject::kMapOffset)); | |
78 __ cmp(r3, Operand(Factory::hash_table_map())); | |
79 __ b(ne, miss); | |
80 | |
81 // Compute the capacity mask. | |
82 const int kCapacityOffset = | |
83 Array::kHeaderSize + Dictionary::kCapacityIndex * kPointerSize; | |
84 __ ldr(r3, FieldMemOperand(t0, kCapacityOffset)); | |
85 __ mov(r3, Operand(r3, ASR, kSmiTagSize)); // convert smi to int | |
86 __ sub(r3, r3, Operand(1)); | |
87 | |
88 const int kElementsStartOffset = | |
89 Array::kHeaderSize + Dictionary::kElementsStartIndex * kPointerSize; | |
90 | |
91 // Generate an unrolled loop that performs a few probes before | |
92 // giving up. Measurements done on Gmail indicate that 2 probes | |
93 // cover ~93% of loads from dictionaries. | |
94 static const int kProbes = 4; | |
95 for (int i = 0; i < kProbes; i++) { | |
96 // Compute the masked index: (hash + i + i * i) & mask. | |
97 __ ldr(t1, FieldMemOperand(r2, String::kLengthOffset)); | |
98 __ mov(t1, Operand(t1, LSR, String::kHashShift)); | |
99 if (i > 0) { | |
100 __ add(t1, t1, Operand(Dictionary::GetProbeOffset(i))); | |
101 } | |
102 __ and_(t1, t1, Operand(r3)); | |
103 | |
104 // Scale the index by multiplying by the element size. | |
105 ASSERT(Dictionary::kElementSize == 3); | |
106 __ add(t1, t1, Operand(t1, LSL, 1)); // t1 = t1 * 3 | |
107 | |
108 // Check if the key is identical to the name. | |
109 __ add(t1, t0, Operand(t1, LSL, 2)); | |
110 __ ldr(ip, FieldMemOperand(t1, kElementsStartOffset)); | |
111 __ cmp(r2, Operand(ip)); | |
112 if (i != kProbes - 1) { | |
113 __ b(eq, &done); | |
114 } else { | |
115 __ b(ne, miss); | |
116 } | |
117 } | |
118 | |
119 // Check that the value is a normal property. | |
120 __ bind(&done); // t1 == t0 + 4*index | |
121 __ ldr(r3, FieldMemOperand(t1, kElementsStartOffset + 2 * kPointerSize)); | |
122 __ tst(r3, Operand(PropertyDetails::TypeField::mask() << kSmiTagSize)); | |
123 __ b(ne, miss); | |
124 | |
125 // Get the value at the masked, scaled index and return. | |
126 __ ldr(t1, FieldMemOperand(t1, kElementsStartOffset + 1 * kPointerSize)); | |
127 } | |
128 | |
129 | |
130 // Helper function used to check that a value is either not a function | |
131 // or is loaded if it is a function. | |
132 static void GenerateCheckNonFunctionOrLoaded(MacroAssembler* masm, | |
133 Label* miss, | |
134 Register value, | |
135 Register scratch) { | |
136 Label done; | |
137 // Check if the value is a Smi. | |
138 __ tst(value, Operand(kSmiTagMask)); | |
139 __ b(eq, &done); | |
140 // Check if the value is a function. | |
141 __ ldr(scratch, FieldMemOperand(value, HeapObject::kMapOffset)); | |
142 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); | |
143 __ cmp(scratch, Operand(JS_FUNCTION_TYPE)); | |
144 __ b(ne, &done); | |
145 // Check if the function has been loaded. | |
146 __ ldr(scratch, | |
147 FieldMemOperand(value, JSFunction::kSharedFunctionInfoOffset)); | |
148 __ ldr(scratch, | |
149 FieldMemOperand(scratch, SharedFunctionInfo::kLazyLoadDataOffset)); | |
150 __ cmp(scratch, Operand(Factory::undefined_value())); | |
151 __ b(ne, miss); | |
152 __ bind(&done); | |
153 } | |
154 | |
155 | |
156 void LoadIC::GenerateArrayLength(MacroAssembler* masm) { | |
157 // ----------- S t a t e ------------- | |
158 // -- r2 : name | |
159 // -- lr : return address | |
160 // -- [sp] : receiver | |
161 // ----------------------------------- | |
162 | |
163 Label miss; | |
164 | |
165 __ ldr(r0, MemOperand(sp, 0)); | |
166 | |
167 StubCompiler::GenerateLoadArrayLength(masm, r0, r3, &miss); | |
168 __ bind(&miss); | |
169 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC); | |
170 } | |
171 | |
172 | |
173 void LoadIC::GenerateStringLength(MacroAssembler* masm) { | |
174 // ----------- S t a t e ------------- | |
175 // -- r2 : name | |
176 // -- lr : return address | |
177 // -- [sp] : receiver | |
178 // ----------------------------------- | |
179 Label miss; | |
180 | |
181 __ ldr(r0, MemOperand(sp, 0)); | |
182 | |
183 StubCompiler::GenerateLoadStringLength2(masm, r0, r1, r3, &miss); | |
184 // Cache miss: Jump to runtime. | |
185 __ bind(&miss); | |
186 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC); | |
187 } | |
188 | |
189 | |
190 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) { | |
191 // ----------- S t a t e ------------- | |
192 // -- r2 : name | |
193 // -- lr : return address | |
194 // -- [sp] : receiver | |
195 // ----------------------------------- | |
196 | |
197 // NOTE: Right now, this code always misses on ARM which is | |
198 // sub-optimal. We should port the fast case code from IA-32. | |
199 | |
200 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Miss)); | |
201 __ Jump(ic, RelocInfo::CODE_TARGET); | |
202 } | |
203 | |
204 | |
205 // Defined in ic.cc. | |
206 Object* CallIC_Miss(Arguments args); | |
207 | |
208 void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) { | |
209 // ----------- S t a t e ------------- | |
210 // -- lr: return address | |
211 // ----------------------------------- | |
212 Label number, non_number, non_string, boolean, probe, miss; | |
213 | |
214 // Get the receiver of the function from the stack into r1. | |
215 __ ldr(r1, MemOperand(sp, argc * kPointerSize)); | |
216 // Get the name of the function from the stack; 1 ~ receiver. | |
217 __ ldr(r2, MemOperand(sp, (argc + 1) * kPointerSize)); | |
218 | |
219 // Probe the stub cache. | |
220 Code::Flags flags = | |
221 Code::ComputeFlags(Code::CALL_IC, MONOMORPHIC, NORMAL, argc); | |
222 StubCache::GenerateProbe(masm, flags, r1, r2, r3); | |
223 | |
224 // If the stub cache probing failed, the receiver might be a value. | |
225 // For value objects, we use the map of the prototype objects for | |
226 // the corresponding JSValue for the cache and that is what we need | |
227 // to probe. | |
228 // | |
229 // Check for number. | |
230 __ tst(r1, Operand(kSmiTagMask)); | |
231 __ b(eq, &number); | |
232 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset)); | |
233 __ ldrb(r3, FieldMemOperand(r3, Map::kInstanceTypeOffset)); | |
234 __ cmp(r3, Operand(HEAP_NUMBER_TYPE)); | |
235 __ b(ne, &non_number); | |
236 __ bind(&number); | |
237 StubCompiler::GenerateLoadGlobalFunctionPrototype( | |
238 masm, Context::NUMBER_FUNCTION_INDEX, r1); | |
239 __ b(&probe); | |
240 | |
241 // Check for string. | |
242 __ bind(&non_number); | |
243 __ cmp(r3, Operand(FIRST_NONSTRING_TYPE)); | |
244 __ b(hs, &non_string); | |
245 StubCompiler::GenerateLoadGlobalFunctionPrototype( | |
246 masm, Context::STRING_FUNCTION_INDEX, r1); | |
247 __ b(&probe); | |
248 | |
249 // Check for boolean. | |
250 __ bind(&non_string); | |
251 __ cmp(r1, Operand(Factory::true_value())); | |
252 __ b(eq, &boolean); | |
253 __ cmp(r1, Operand(Factory::false_value())); | |
254 __ b(ne, &miss); | |
255 __ bind(&boolean); | |
256 StubCompiler::GenerateLoadGlobalFunctionPrototype( | |
257 masm, Context::BOOLEAN_FUNCTION_INDEX, r1); | |
258 | |
259 // Probe the stub cache for the value object. | |
260 __ bind(&probe); | |
261 StubCache::GenerateProbe(masm, flags, r1, r2, r3); | |
262 | |
263 // Cache miss: Jump to runtime. | |
264 __ bind(&miss); | |
265 Generate(masm, argc, ExternalReference(IC_Utility(kCallIC_Miss))); | |
266 } | |
267 | |
268 | |
269 static void GenerateNormalHelper(MacroAssembler* masm, | |
270 int argc, | |
271 bool is_global_object, | |
272 Label* miss) { | |
273 // Search dictionary - put result in register r1. | |
274 GenerateDictionaryLoad(masm, miss, r0, r1); | |
275 | |
276 // Check that the value isn't a smi. | |
277 __ tst(r1, Operand(kSmiTagMask)); | |
278 __ b(eq, miss); | |
279 | |
280 // Check that the value is a JSFunction. | |
281 __ ldr(r0, FieldMemOperand(r1, HeapObject::kMapOffset)); | |
282 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset)); | |
283 __ cmp(r0, Operand(JS_FUNCTION_TYPE)); | |
284 __ b(ne, miss); | |
285 | |
286 // Check that the function has been loaded. | |
287 __ ldr(r0, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | |
288 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kLazyLoadDataOffset)); | |
289 __ cmp(r0, Operand(Factory::undefined_value())); | |
290 __ b(ne, miss); | |
291 | |
292 // Patch the receiver with the global proxy if necessary. | |
293 if (is_global_object) { | |
294 __ ldr(r2, MemOperand(sp, argc * kPointerSize)); | |
295 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset)); | |
296 __ str(r2, MemOperand(sp, argc * kPointerSize)); | |
297 } | |
298 | |
299 // Invoke the function. | |
300 ParameterCount actual(argc); | |
301 __ InvokeFunction(r1, actual, JUMP_FUNCTION); | |
302 } | |
303 | |
304 | |
305 void CallIC::GenerateNormal(MacroAssembler* masm, int argc) { | |
306 // ----------- S t a t e ------------- | |
307 // -- lr: return address | |
308 // ----------------------------------- | |
309 | |
310 Label miss, global_object, non_global_object; | |
311 | |
312 // Get the receiver of the function from the stack into r1. | |
313 __ ldr(r1, MemOperand(sp, argc * kPointerSize)); | |
314 // Get the name of the function from the stack; 1 ~ receiver. | |
315 __ ldr(r2, MemOperand(sp, (argc + 1) * kPointerSize)); | |
316 | |
317 // Check that the receiver isn't a smi. | |
318 __ tst(r1, Operand(kSmiTagMask)); | |
319 __ b(eq, &miss); | |
320 | |
321 // Check that the receiver is a valid JS object. | |
322 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset)); | |
323 __ ldrb(r0, FieldMemOperand(r3, Map::kInstanceTypeOffset)); | |
324 __ cmp(r0, Operand(FIRST_JS_OBJECT_TYPE)); | |
325 __ b(lt, &miss); | |
326 | |
327 // If this assert fails, we have to check upper bound too. | |
328 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); | |
329 | |
330 // Check for access to global object. | |
331 __ cmp(r0, Operand(JS_GLOBAL_OBJECT_TYPE)); | |
332 __ b(eq, &global_object); | |
333 __ cmp(r0, Operand(JS_BUILTINS_OBJECT_TYPE)); | |
334 __ b(ne, &non_global_object); | |
335 | |
336 // Accessing global object: Load and invoke. | |
337 __ bind(&global_object); | |
338 // Check that the global object does not require access checks. | |
339 __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset)); | |
340 __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded)); | |
341 __ b(ne, &miss); | |
342 GenerateNormalHelper(masm, argc, true, &miss); | |
343 | |
344 // Accessing non-global object: Check for access to global proxy. | |
345 Label global_proxy, invoke; | |
346 __ bind(&non_global_object); | |
347 __ cmp(r0, Operand(JS_GLOBAL_PROXY_TYPE)); | |
348 __ b(eq, &global_proxy); | |
349 // Check that the non-global, non-global-proxy object does not | |
350 // require access checks. | |
351 __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset)); | |
352 __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded)); | |
353 __ b(ne, &miss); | |
354 __ bind(&invoke); | |
355 GenerateNormalHelper(masm, argc, false, &miss); | |
356 | |
357 // Global object access: Check access rights. | |
358 __ bind(&global_proxy); | |
359 __ CheckAccessGlobalProxy(r1, r0, &miss); | |
360 __ b(&invoke); | |
361 | |
362 // Cache miss: Jump to runtime. | |
363 __ bind(&miss); | |
364 Generate(masm, argc, ExternalReference(IC_Utility(kCallIC_Miss))); | |
365 } | |
366 | |
367 | |
368 void CallIC::Generate(MacroAssembler* masm, | |
369 int argc, | |
370 const ExternalReference& f) { | |
371 // ----------- S t a t e ------------- | |
372 // -- lr: return address | |
373 // ----------------------------------- | |
374 | |
375 // Get the receiver of the function from the stack. | |
376 __ ldr(r2, MemOperand(sp, argc * kPointerSize)); | |
377 // Get the name of the function to call from the stack. | |
378 __ ldr(r1, MemOperand(sp, (argc + 1) * kPointerSize)); | |
379 | |
380 __ EnterInternalFrame(); | |
381 | |
382 // Push the receiver and the name of the function. | |
383 __ stm(db_w, sp, r1.bit() | r2.bit()); | |
384 | |
385 // Call the entry. | |
386 __ mov(r0, Operand(2)); | |
387 __ mov(r1, Operand(f)); | |
388 | |
389 CEntryStub stub; | |
390 __ CallStub(&stub); | |
391 | |
392 // Move result to r1 and leave the internal frame. | |
393 __ mov(r1, Operand(r0)); | |
394 __ LeaveInternalFrame(); | |
395 | |
396 // Check if the receiver is a global object of some sort. | |
397 Label invoke, global; | |
398 __ ldr(r2, MemOperand(sp, argc * kPointerSize)); // receiver | |
399 __ tst(r2, Operand(kSmiTagMask)); | |
400 __ b(eq, &invoke); | |
401 __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); | |
402 __ ldrb(r3, FieldMemOperand(r3, Map::kInstanceTypeOffset)); | |
403 __ cmp(r3, Operand(JS_GLOBAL_OBJECT_TYPE)); | |
404 __ b(eq, &global); | |
405 __ cmp(r3, Operand(JS_BUILTINS_OBJECT_TYPE)); | |
406 __ b(ne, &invoke); | |
407 | |
408 // Patch the receiver on the stack. | |
409 __ bind(&global); | |
410 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset)); | |
411 __ str(r2, MemOperand(sp, argc * kPointerSize)); | |
412 | |
413 // Invoke the function. | |
414 ParameterCount actual(argc); | |
415 __ bind(&invoke); | |
416 __ InvokeFunction(r1, actual, JUMP_FUNCTION); | |
417 } | |
418 | |
419 | |
420 // Defined in ic.cc. | |
421 Object* LoadIC_Miss(Arguments args); | |
422 | |
423 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) { | |
424 // ----------- S t a t e ------------- | |
425 // -- r2 : name | |
426 // -- lr : return address | |
427 // -- [sp] : receiver | |
428 // ----------------------------------- | |
429 | |
430 __ ldr(r0, MemOperand(sp, 0)); | |
431 // Probe the stub cache. | |
432 Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC, MONOMORPHIC); | |
433 StubCache::GenerateProbe(masm, flags, r0, r2, r3); | |
434 | |
435 // Cache miss: Jump to runtime. | |
436 Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss))); | |
437 } | |
438 | |
439 | |
440 void LoadIC::GenerateNormal(MacroAssembler* masm) { | |
441 // ----------- S t a t e ------------- | |
442 // -- r2 : name | |
443 // -- lr : return address | |
444 // -- [sp] : receiver | |
445 // ----------------------------------- | |
446 | |
447 Label miss, probe, global; | |
448 | |
449 __ ldr(r0, MemOperand(sp, 0)); | |
450 // Check that the receiver isn't a smi. | |
451 __ tst(r0, Operand(kSmiTagMask)); | |
452 __ b(eq, &miss); | |
453 | |
454 // Check that the receiver is a valid JS object. | |
455 __ ldr(r3, FieldMemOperand(r0, HeapObject::kMapOffset)); | |
456 __ ldrb(r1, FieldMemOperand(r3, Map::kInstanceTypeOffset)); | |
457 __ cmp(r1, Operand(FIRST_JS_OBJECT_TYPE)); | |
458 __ b(lt, &miss); | |
459 // If this assert fails, we have to check upper bound too. | |
460 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); | |
461 | |
462 // Check for access to global object (unlikely). | |
463 __ cmp(r1, Operand(JS_GLOBAL_PROXY_TYPE)); | |
464 __ b(eq, &global); | |
465 | |
466 // Check for non-global object that requires access check. | |
467 __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset)); | |
468 __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded)); | |
469 __ b(ne, &miss); | |
470 | |
471 __ bind(&probe); | |
472 GenerateDictionaryLoad(masm, &miss, r1, r0); | |
473 GenerateCheckNonFunctionOrLoaded(masm, &miss, r0, r1); | |
474 __ Ret(); | |
475 | |
476 // Global object access: Check access rights. | |
477 __ bind(&global); | |
478 __ CheckAccessGlobalProxy(r0, r1, &miss); | |
479 __ b(&probe); | |
480 | |
481 // Cache miss: Restore receiver from stack and jump to runtime. | |
482 __ bind(&miss); | |
483 Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss))); | |
484 } | |
485 | |
486 | |
487 void LoadIC::GenerateMiss(MacroAssembler* masm) { | |
488 Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss))); | |
489 } | |
490 | |
491 | |
492 void LoadIC::Generate(MacroAssembler* masm, const ExternalReference& f) { | |
493 // ----------- S t a t e ------------- | |
494 // -- r2 : name | |
495 // -- lr : return address | |
496 // -- [sp] : receiver | |
497 // ----------------------------------- | |
498 | |
499 __ ldr(r3, MemOperand(sp, 0)); | |
500 __ stm(db_w, sp, r2.bit() | r3.bit()); | |
501 | |
502 // Perform tail call to the entry. | |
503 __ TailCallRuntime(f, 2); | |
504 } | |
505 | |
506 | |
507 // TODO(181): Implement map patching once loop nesting is tracked on | |
508 // the ARM platform so we can generate inlined fast-case code for | |
509 // array indexing in loops. | |
510 bool KeyedLoadIC::HasInlinedVersion(Address address) { return false; } | |
511 void KeyedLoadIC::ClearInlinedVersion(Address address) { } | |
512 void KeyedLoadIC::PatchInlinedMapCheck(Address address, Object* value) { } | |
513 | |
514 | |
515 Object* KeyedLoadIC_Miss(Arguments args); | |
516 | |
517 | |
518 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { | |
519 Generate(masm, ExternalReference(IC_Utility(kKeyedLoadIC_Miss))); | |
520 } | |
521 | |
522 | |
523 void KeyedLoadIC::Generate(MacroAssembler* masm, const ExternalReference& f) { | |
524 // ---------- S t a t e -------------- | |
525 // -- lr : return address | |
526 // -- sp[0] : key | |
527 // -- sp[4] : receiver | |
528 __ ldm(ia, sp, r2.bit() | r3.bit()); | |
529 __ stm(db_w, sp, r2.bit() | r3.bit()); | |
530 | |
531 __ TailCallRuntime(f, 2); | |
532 } | |
533 | |
534 | |
535 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { | |
536 // ---------- S t a t e -------------- | |
537 // -- lr : return address | |
538 // -- sp[0] : key | |
539 // -- sp[4] : receiver | |
540 Label slow, fast; | |
541 | |
542 // Get the key and receiver object from the stack. | |
543 __ ldm(ia, sp, r0.bit() | r1.bit()); | |
544 // Check that the key is a smi. | |
545 __ tst(r0, Operand(kSmiTagMask)); | |
546 __ b(ne, &slow); | |
547 __ mov(r0, Operand(r0, ASR, kSmiTagSize)); | |
548 // Check that the object isn't a smi. | |
549 __ tst(r1, Operand(kSmiTagMask)); | |
550 __ b(eq, &slow); | |
551 | |
552 // Get the map of the receiver. | |
553 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); | |
554 // Check that the receiver does not require access checks. We need | |
555 // to check this explicitly since this generic stub does not perform | |
556 // map checks. | |
557 __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset)); | |
558 __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded)); | |
559 __ b(ne, &slow); | |
560 // Check that the object is some kind of JS object EXCEPT JS Value type. | |
561 // In the case that the object is a value-wrapper object, | |
562 // we enter the runtime system to make sure that indexing into string | |
563 // objects work as intended. | |
564 ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE); | |
565 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); | |
566 __ cmp(r2, Operand(JS_OBJECT_TYPE)); | |
567 __ b(lt, &slow); | |
568 | |
569 // Get the elements array of the object. | |
570 __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset)); | |
571 // Check that the object is in fast mode (not dictionary). | |
572 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset)); | |
573 __ cmp(r3, Operand(Factory::hash_table_map())); | |
574 __ b(eq, &slow); | |
575 // Check that the key (index) is within bounds. | |
576 __ ldr(r3, FieldMemOperand(r1, Array::kLengthOffset)); | |
577 __ cmp(r0, Operand(r3)); | |
578 __ b(lo, &fast); | |
579 | |
580 // Slow case: Push extra copies of the arguments (2). | |
581 __ bind(&slow); | |
582 __ IncrementCounter(&Counters::keyed_load_generic_slow, 1, r0, r1); | |
583 __ ldm(ia, sp, r0.bit() | r1.bit()); | |
584 __ stm(db_w, sp, r0.bit() | r1.bit()); | |
585 // Do tail-call to runtime routine. | |
586 __ TailCallRuntime(ExternalReference(Runtime::kGetProperty), 2); | |
587 | |
588 // Fast case: Do the load. | |
589 __ bind(&fast); | |
590 __ add(r3, r1, Operand(Array::kHeaderSize - kHeapObjectTag)); | |
591 __ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2)); | |
592 __ cmp(r0, Operand(Factory::the_hole_value())); | |
593 // In case the loaded value is the_hole we have to consult GetProperty | |
594 // to ensure the prototype chain is searched. | |
595 __ b(eq, &slow); | |
596 | |
597 __ Ret(); | |
598 } | |
599 | |
600 | |
601 void KeyedStoreIC::Generate(MacroAssembler* masm, | |
602 const ExternalReference& f) { | |
603 // ---------- S t a t e -------------- | |
604 // -- r0 : value | |
605 // -- lr : return address | |
606 // -- sp[0] : key | |
607 // -- sp[1] : receiver | |
608 | |
609 __ ldm(ia, sp, r2.bit() | r3.bit()); | |
610 __ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit()); | |
611 | |
612 __ TailCallRuntime(f, 3); | |
613 } | |
614 | |
615 | |
616 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) { | |
617 // ---------- S t a t e -------------- | |
618 // -- r0 : value | |
619 // -- lr : return address | |
620 // -- sp[0] : key | |
621 // -- sp[1] : receiver | |
622 Label slow, fast, array, extra, exit; | |
623 // Get the key and the object from the stack. | |
624 __ ldm(ia, sp, r1.bit() | r3.bit()); // r1 = key, r3 = receiver | |
625 // Check that the key is a smi. | |
626 __ tst(r1, Operand(kSmiTagMask)); | |
627 __ b(ne, &slow); | |
628 // Check that the object isn't a smi. | |
629 __ tst(r3, Operand(kSmiTagMask)); | |
630 __ b(eq, &slow); | |
631 // Get the map of the object. | |
632 __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset)); | |
633 // Check that the receiver does not require access checks. We need | |
634 // to do this because this generic stub does not perform map checks. | |
635 __ ldrb(ip, FieldMemOperand(r2, Map::kBitFieldOffset)); | |
636 __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded)); | |
637 __ b(ne, &slow); | |
638 // Check if the object is a JS array or not. | |
639 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); | |
640 __ cmp(r2, Operand(JS_ARRAY_TYPE)); | |
641 // r1 == key. | |
642 __ b(eq, &array); | |
643 // Check that the object is some kind of JS object. | |
644 __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE)); | |
645 __ b(lt, &slow); | |
646 | |
647 | |
648 // Object case: Check key against length in the elements array. | |
649 __ ldr(r3, FieldMemOperand(r3, JSObject::kElementsOffset)); | |
650 // Check that the object is in fast mode (not dictionary). | |
651 __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset)); | |
652 __ cmp(r2, Operand(Factory::hash_table_map())); | |
653 __ b(eq, &slow); | |
654 // Untag the key (for checking against untagged length in the fixed array). | |
655 __ mov(r1, Operand(r1, ASR, kSmiTagSize)); | |
656 // Compute address to store into and check array bounds. | |
657 __ add(r2, r3, Operand(Array::kHeaderSize - kHeapObjectTag)); | |
658 __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2)); | |
659 __ ldr(ip, FieldMemOperand(r3, Array::kLengthOffset)); | |
660 __ cmp(r1, Operand(ip)); | |
661 __ b(lo, &fast); | |
662 | |
663 | |
664 // Slow case: Push extra copies of the arguments (3). | |
665 __ bind(&slow); | |
666 __ ldm(ia, sp, r1.bit() | r3.bit()); // r0 == value, r1 == key, r3 == object | |
667 __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit()); | |
668 // Do tail-call to runtime routine. | |
669 __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3); | |
670 | |
671 // Extra capacity case: Check if there is extra capacity to | |
672 // perform the store and update the length. Used for adding one | |
673 // element to the array by writing to array[array.length]. | |
674 // r0 == value, r1 == key, r2 == elements, r3 == object | |
675 __ bind(&extra); | |
676 __ b(ne, &slow); // do not leave holes in the array | |
677 __ mov(r1, Operand(r1, ASR, kSmiTagSize)); // untag | |
678 __ ldr(ip, FieldMemOperand(r2, Array::kLengthOffset)); | |
679 __ cmp(r1, Operand(ip)); | |
680 __ b(hs, &slow); | |
681 __ mov(r1, Operand(r1, LSL, kSmiTagSize)); // restore tag | |
682 __ add(r1, r1, Operand(1 << kSmiTagSize)); // and increment | |
683 __ str(r1, FieldMemOperand(r3, JSArray::kLengthOffset)); | |
684 __ mov(r3, Operand(r2)); | |
685 // NOTE: Computing the address to store into must take the fact | |
686 // that the key has been incremented into account. | |
687 int displacement = Array::kHeaderSize - kHeapObjectTag - | |
688 ((1 << kSmiTagSize) * 2); | |
689 __ add(r2, r2, Operand(displacement)); | |
690 __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize)); | |
691 __ b(&fast); | |
692 | |
693 | |
694 // Array case: Get the length and the elements array from the JS | |
695 // array. Check that the array is in fast mode; if it is the | |
696 // length is always a smi. | |
697 // r0 == value, r3 == object | |
698 __ bind(&array); | |
699 __ ldr(r2, FieldMemOperand(r3, JSObject::kElementsOffset)); | |
700 __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset)); | |
701 __ cmp(r1, Operand(Factory::hash_table_map())); | |
702 __ b(eq, &slow); | |
703 | |
704 // Check the key against the length in the array, compute the | |
705 // address to store into and fall through to fast case. | |
706 __ ldr(r1, MemOperand(sp)); // restore key | |
707 // r0 == value, r1 == key, r2 == elements, r3 == object. | |
708 __ ldr(ip, FieldMemOperand(r3, JSArray::kLengthOffset)); | |
709 __ cmp(r1, Operand(ip)); | |
710 __ b(hs, &extra); | |
711 __ mov(r3, Operand(r2)); | |
712 __ add(r2, r2, Operand(Array::kHeaderSize - kHeapObjectTag)); | |
713 __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize)); | |
714 | |
715 | |
716 // Fast case: Do the store. | |
717 // r0 == value, r2 == address to store into, r3 == elements | |
718 __ bind(&fast); | |
719 __ str(r0, MemOperand(r2)); | |
720 // Skip write barrier if the written value is a smi. | |
721 __ tst(r0, Operand(kSmiTagMask)); | |
722 __ b(eq, &exit); | |
723 // Update write barrier for the elements array address. | |
724 __ sub(r1, r2, Operand(r3)); | |
725 __ RecordWrite(r3, r1, r2); | |
726 | |
727 __ bind(&exit); | |
728 __ Ret(); | |
729 } | |
730 | |
731 | |
732 void KeyedStoreIC::GenerateExtendStorage(MacroAssembler* masm) { | |
733 // ---------- S t a t e -------------- | |
734 // -- r0 : value | |
735 // -- lr : return address | |
736 // -- sp[0] : key | |
737 // -- sp[1] : receiver | |
738 // ----------- S t a t e ------------- | |
739 | |
740 __ ldm(ia, sp, r2.bit() | r3.bit()); | |
741 __ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit()); | |
742 | |
743 // Perform tail call to the entry. | |
744 __ TailCallRuntime( | |
745 ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3); | |
746 } | |
747 | |
748 | |
749 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) { | |
750 // ----------- S t a t e ------------- | |
751 // -- r0 : value | |
752 // -- r2 : name | |
753 // -- lr : return address | |
754 // -- [sp] : receiver | |
755 // ----------------------------------- | |
756 | |
757 // Get the receiver from the stack and probe the stub cache. | |
758 __ ldr(r1, MemOperand(sp)); | |
759 Code::Flags flags = Code::ComputeFlags(Code::STORE_IC, MONOMORPHIC); | |
760 StubCache::GenerateProbe(masm, flags, r1, r2, r3); | |
761 | |
762 // Cache miss: Jump to runtime. | |
763 Generate(masm, ExternalReference(IC_Utility(kStoreIC_Miss))); | |
764 } | |
765 | |
766 | |
767 void StoreIC::GenerateExtendStorage(MacroAssembler* masm) { | |
768 // ----------- S t a t e ------------- | |
769 // -- r0 : value | |
770 // -- r2 : name | |
771 // -- lr : return address | |
772 // -- [sp] : receiver | |
773 // ----------------------------------- | |
774 | |
775 __ ldr(r3, MemOperand(sp)); // copy receiver | |
776 __ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit()); | |
777 | |
778 // Perform tail call to the entry. | |
779 __ TailCallRuntime( | |
780 ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3); | |
781 } | |
782 | |
783 | |
784 void StoreIC::Generate(MacroAssembler* masm, const ExternalReference& f) { | |
785 // ----------- S t a t e ------------- | |
786 // -- r0 : value | |
787 // -- r2 : name | |
788 // -- lr : return address | |
789 // -- [sp] : receiver | |
790 // ----------------------------------- | |
791 | |
792 __ ldr(r3, MemOperand(sp)); // copy receiver | |
793 __ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit()); | |
794 | |
795 // Perform tail call to the entry. | |
796 __ TailCallRuntime(f, 3); | |
797 } | |
798 | |
799 | |
800 #undef __ | |
801 | |
802 | |
803 } } // namespace v8::internal | |
OLD | NEW |