OLD | NEW |
| (Empty) |
1 // Copyright 2013 the V8 project authors. All rights reserved. | |
2 // Redistribution and use in source and binary forms, with or without | |
3 // modification, are permitted provided that the following conditions are | |
4 // met: | |
5 // | |
6 // * Redistributions of source code must retain the above copyright | |
7 // notice, this list of conditions and the following disclaimer. | |
8 // * Redistributions in binary form must reproduce the above | |
9 // copyright notice, this list of conditions and the following | |
10 // disclaimer in the documentation and/or other materials provided | |
11 // with the distribution. | |
12 // * Neither the name of Google Inc. nor the names of its | |
13 // contributors may be used to endorse or promote products derived | |
14 // from this software without specific prior written permission. | |
15 // | |
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | |
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | |
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | |
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | |
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
27 | |
28 #include "v8.h" | |
29 | |
30 #if V8_TARGET_ARCH_A64 | |
31 | |
32 #include "ic-inl.h" | |
33 #include "codegen.h" | |
34 #include "stub-cache.h" | |
35 | |
36 namespace v8 { | |
37 namespace internal { | |
38 | |
39 | |
40 #define __ ACCESS_MASM(masm) | |
41 | |
42 | |
43 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm, | |
44 Label* miss_label, | |
45 Register receiver, | |
46 Handle<Name> name, | |
47 Register scratch0, | |
48 Register scratch1) { | |
49 ASSERT(!AreAliased(receiver, scratch0, scratch1)); | |
50 ASSERT(name->IsUniqueName()); | |
51 Counters* counters = masm->isolate()->counters(); | |
52 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1); | |
53 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); | |
54 | |
55 Label done; | |
56 | |
57 const int kInterceptorOrAccessCheckNeededMask = | |
58 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); | |
59 | |
60 // Bail out if the receiver has a named interceptor or requires access checks. | |
61 Register map = scratch1; | |
62 __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
63 __ Ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset)); | |
64 __ Tst(scratch0, kInterceptorOrAccessCheckNeededMask); | |
65 __ B(ne, miss_label); | |
66 | |
67 // Check that receiver is a JSObject. | |
68 __ Ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset)); | |
69 __ Cmp(scratch0, FIRST_SPEC_OBJECT_TYPE); | |
70 __ B(lt, miss_label); | |
71 | |
72 // Load properties array. | |
73 Register properties = scratch0; | |
74 __ Ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); | |
75 // Check that the properties array is a dictionary. | |
76 __ Ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset)); | |
77 __ JumpIfNotRoot(map, Heap::kHashTableMapRootIndex, miss_label); | |
78 | |
79 NameDictionaryLookupStub::GenerateNegativeLookup(masm, | |
80 miss_label, | |
81 &done, | |
82 receiver, | |
83 properties, | |
84 name, | |
85 scratch1); | |
86 __ Bind(&done); | |
87 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); | |
88 } | |
89 | |
90 | |
91 // Probe primary or secondary table. | |
92 // If the entry is found in the cache, the generated code jump to the first | |
93 // instruction of the stub in the cache. | |
94 // If there is a miss the code fall trough. | |
95 // | |
96 // 'receiver', 'name' and 'offset' registers are preserved on miss. | |
97 static void ProbeTable(Isolate* isolate, | |
98 MacroAssembler* masm, | |
99 Code::Flags flags, | |
100 StubCache::Table table, | |
101 Register receiver, | |
102 Register name, | |
103 Register offset, | |
104 Register scratch, | |
105 Register scratch2, | |
106 Register scratch3) { | |
107 // Some code below relies on the fact that the Entry struct contains | |
108 // 3 pointers (name, code, map). | |
109 STATIC_ASSERT(sizeof(StubCache::Entry) == (3 * kPointerSize)); | |
110 | |
111 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); | |
112 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); | |
113 ExternalReference map_offset(isolate->stub_cache()->map_reference(table)); | |
114 | |
115 uintptr_t key_off_addr = reinterpret_cast<uintptr_t>(key_offset.address()); | |
116 uintptr_t value_off_addr = | |
117 reinterpret_cast<uintptr_t>(value_offset.address()); | |
118 uintptr_t map_off_addr = reinterpret_cast<uintptr_t>(map_offset.address()); | |
119 | |
120 Label miss; | |
121 | |
122 ASSERT(!AreAliased(name, offset, scratch, scratch2, scratch3)); | |
123 | |
124 // Multiply by 3 because there are 3 fields per entry. | |
125 __ Add(scratch3, offset, Operand(offset, LSL, 1)); | |
126 | |
127 // Calculate the base address of the entry. | |
128 __ Mov(scratch, key_offset); | |
129 __ Add(scratch, scratch, Operand(scratch3, LSL, kPointerSizeLog2)); | |
130 | |
131 // Check that the key in the entry matches the name. | |
132 __ Ldr(scratch2, MemOperand(scratch)); | |
133 __ Cmp(name, scratch2); | |
134 __ B(ne, &miss); | |
135 | |
136 // Check the map matches. | |
137 __ Ldr(scratch2, MemOperand(scratch, map_off_addr - key_off_addr)); | |
138 __ Ldr(scratch3, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
139 __ Cmp(scratch2, scratch3); | |
140 __ B(ne, &miss); | |
141 | |
142 // Get the code entry from the cache. | |
143 __ Ldr(scratch, MemOperand(scratch, value_off_addr - key_off_addr)); | |
144 | |
145 // Check that the flags match what we're looking for. | |
146 __ Ldr(scratch2.W(), FieldMemOperand(scratch, Code::kFlagsOffset)); | |
147 __ Bic(scratch2.W(), scratch2.W(), Code::kFlagsNotUsedInLookup); | |
148 __ Cmp(scratch2.W(), flags); | |
149 __ B(ne, &miss); | |
150 | |
151 #ifdef DEBUG | |
152 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { | |
153 __ B(&miss); | |
154 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { | |
155 __ B(&miss); | |
156 } | |
157 #endif | |
158 | |
159 // Jump to the first instruction in the code stub. | |
160 __ Add(scratch, scratch, Code::kHeaderSize - kHeapObjectTag); | |
161 __ Br(scratch); | |
162 | |
163 // Miss: fall through. | |
164 __ Bind(&miss); | |
165 } | |
166 | |
167 | |
168 void StubCache::GenerateProbe(MacroAssembler* masm, | |
169 Code::Flags flags, | |
170 Register receiver, | |
171 Register name, | |
172 Register scratch, | |
173 Register extra, | |
174 Register extra2, | |
175 Register extra3) { | |
176 Isolate* isolate = masm->isolate(); | |
177 Label miss; | |
178 | |
179 // Make sure the flags does not name a specific type. | |
180 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); | |
181 | |
182 // Make sure that there are no register conflicts. | |
183 ASSERT(!AreAliased(receiver, name, scratch, extra, extra2, extra3)); | |
184 | |
185 // Make sure extra and extra2 registers are valid. | |
186 ASSERT(!extra.is(no_reg)); | |
187 ASSERT(!extra2.is(no_reg)); | |
188 ASSERT(!extra3.is(no_reg)); | |
189 | |
190 Counters* counters = masm->isolate()->counters(); | |
191 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, | |
192 extra2, extra3); | |
193 | |
194 // Check that the receiver isn't a smi. | |
195 __ JumpIfSmi(receiver, &miss); | |
196 | |
197 // Compute the hash for primary table. | |
198 __ Ldr(scratch, FieldMemOperand(name, Name::kHashFieldOffset)); | |
199 __ Ldr(extra, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
200 __ Add(scratch, scratch, extra); | |
201 __ Eor(scratch, scratch, flags); | |
202 // We shift out the last two bits because they are not part of the hash. | |
203 __ Ubfx(scratch, scratch, kHeapObjectTagSize, | |
204 CountTrailingZeros(kPrimaryTableSize, 64)); | |
205 | |
206 // Probe the primary table. | |
207 ProbeTable(isolate, masm, flags, kPrimary, receiver, name, | |
208 scratch, extra, extra2, extra3); | |
209 | |
210 // Primary miss: Compute hash for secondary table. | |
211 __ Sub(scratch, scratch, Operand(name, LSR, kHeapObjectTagSize)); | |
212 __ Add(scratch, scratch, flags >> kHeapObjectTagSize); | |
213 __ And(scratch, scratch, kSecondaryTableSize - 1); | |
214 | |
215 // Probe the secondary table. | |
216 ProbeTable(isolate, masm, flags, kSecondary, receiver, name, | |
217 scratch, extra, extra2, extra3); | |
218 | |
219 // Cache miss: Fall-through and let caller handle the miss by | |
220 // entering the runtime system. | |
221 __ Bind(&miss); | |
222 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1, | |
223 extra2, extra3); | |
224 } | |
225 | |
226 | |
227 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, | |
228 int index, | |
229 Register prototype) { | |
230 // Load the global or builtins object from the current context. | |
231 __ Ldr(prototype, GlobalObjectMemOperand()); | |
232 // Load the native context from the global or builtins object. | |
233 __ Ldr(prototype, | |
234 FieldMemOperand(prototype, GlobalObject::kNativeContextOffset)); | |
235 // Load the function from the native context. | |
236 __ Ldr(prototype, ContextMemOperand(prototype, index)); | |
237 // Load the initial map. The global functions all have initial maps. | |
238 __ Ldr(prototype, | |
239 FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset)); | |
240 // Load the prototype from the initial map. | |
241 __ Ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); | |
242 } | |
243 | |
244 | |
245 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( | |
246 MacroAssembler* masm, | |
247 int index, | |
248 Register prototype, | |
249 Label* miss) { | |
250 Isolate* isolate = masm->isolate(); | |
251 // Get the global function with the given index. | |
252 Handle<JSFunction> function( | |
253 JSFunction::cast(isolate->native_context()->get(index))); | |
254 | |
255 // Check we're still in the same context. | |
256 Register scratch = prototype; | |
257 __ Ldr(scratch, GlobalObjectMemOperand()); | |
258 __ Ldr(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset)); | |
259 __ Ldr(scratch, ContextMemOperand(scratch, index)); | |
260 __ Cmp(scratch, Operand(function)); | |
261 __ B(ne, miss); | |
262 | |
263 // Load its initial map. The global functions all have initial maps. | |
264 __ Mov(prototype, Operand(Handle<Map>(function->initial_map()))); | |
265 // Load the prototype from the initial map. | |
266 __ Ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); | |
267 } | |
268 | |
269 | |
270 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, | |
271 Register dst, | |
272 Register src, | |
273 bool inobject, | |
274 int index, | |
275 Representation representation) { | |
276 ASSERT(!representation.IsDouble()); | |
277 USE(representation); | |
278 if (inobject) { | |
279 int offset = index * kPointerSize; | |
280 __ Ldr(dst, FieldMemOperand(src, offset)); | |
281 } else { | |
282 // Calculate the offset into the properties array. | |
283 int offset = index * kPointerSize + FixedArray::kHeaderSize; | |
284 __ Ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset)); | |
285 __ Ldr(dst, FieldMemOperand(dst, offset)); | |
286 } | |
287 } | |
288 | |
289 | |
290 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, | |
291 Register receiver, | |
292 Register scratch, | |
293 Label* miss_label) { | |
294 ASSERT(!AreAliased(receiver, scratch)); | |
295 | |
296 // Check that the receiver isn't a smi. | |
297 __ JumpIfSmi(receiver, miss_label); | |
298 | |
299 // Check that the object is a JS array. | |
300 __ JumpIfNotObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE, | |
301 miss_label); | |
302 | |
303 // Load length directly from the JS array. | |
304 __ Ldr(x0, FieldMemOperand(receiver, JSArray::kLengthOffset)); | |
305 __ Ret(); | |
306 } | |
307 | |
308 | |
309 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, | |
310 Register receiver, | |
311 Register scratch1, | |
312 Register scratch2, | |
313 Label* miss_label) { | |
314 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); | |
315 // TryGetFunctionPrototype can't put the result directly in x0 because the | |
316 // 3 inputs registers can't alias and we call this function from | |
317 // LoadIC::GenerateFunctionPrototype, where receiver is x0. So we explicitly | |
318 // move the result in x0. | |
319 __ Mov(x0, scratch1); | |
320 __ Ret(); | |
321 } | |
322 | |
323 | |
324 // Generate code to check that a global property cell is empty. Create | |
325 // the property cell at compilation time if no cell exists for the | |
326 // property. | |
327 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm, | |
328 Handle<JSGlobalObject> global, | |
329 Handle<Name> name, | |
330 Register scratch, | |
331 Label* miss) { | |
332 Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name); | |
333 ASSERT(cell->value()->IsTheHole()); | |
334 __ Mov(scratch, Operand(cell)); | |
335 __ Ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); | |
336 __ JumpIfNotRoot(scratch, Heap::kTheHoleValueRootIndex, miss); | |
337 } | |
338 | |
339 | |
340 void StoreStubCompiler::GenerateNegativeHolderLookup( | |
341 MacroAssembler* masm, | |
342 Handle<JSObject> holder, | |
343 Register holder_reg, | |
344 Handle<Name> name, | |
345 Label* miss) { | |
346 if (holder->IsJSGlobalObject()) { | |
347 GenerateCheckPropertyCell( | |
348 masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss); | |
349 } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) { | |
350 GenerateDictionaryNegativeLookup( | |
351 masm, miss, holder_reg, name, scratch1(), scratch2()); | |
352 } | |
353 } | |
354 | |
355 | |
356 // Generate StoreTransition code, value is passed in x0 register. | |
357 // When leaving generated code after success, the receiver_reg and storage_reg | |
358 // may be clobbered. Upon branch to miss_label, the receiver and name registers | |
359 // have their original values. | |
360 void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm, | |
361 Handle<JSObject> object, | |
362 LookupResult* lookup, | |
363 Handle<Map> transition, | |
364 Handle<Name> name, | |
365 Register receiver_reg, | |
366 Register storage_reg, | |
367 Register value_reg, | |
368 Register scratch1, | |
369 Register scratch2, | |
370 Register scratch3, | |
371 Label* miss_label, | |
372 Label* slow) { | |
373 Label exit; | |
374 | |
375 ASSERT(!AreAliased(receiver_reg, storage_reg, value_reg, | |
376 scratch1, scratch2, scratch3)); | |
377 | |
378 // We don't need scratch3. | |
379 scratch3 = NoReg; | |
380 | |
381 int descriptor = transition->LastAdded(); | |
382 DescriptorArray* descriptors = transition->instance_descriptors(); | |
383 PropertyDetails details = descriptors->GetDetails(descriptor); | |
384 Representation representation = details.representation(); | |
385 ASSERT(!representation.IsNone()); | |
386 | |
387 if (details.type() == CONSTANT) { | |
388 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate()); | |
389 __ LoadObject(scratch1, constant); | |
390 __ Cmp(value_reg, scratch1); | |
391 __ B(ne, miss_label); | |
392 } else if (representation.IsSmi()) { | |
393 __ JumpIfNotSmi(value_reg, miss_label); | |
394 } else if (representation.IsHeapObject()) { | |
395 __ JumpIfSmi(value_reg, miss_label); | |
396 } else if (representation.IsDouble()) { | |
397 Label do_store, heap_number; | |
398 __ AllocateHeapNumber(storage_reg, slow, scratch1, scratch2); | |
399 | |
400 // TODO(jbramley): Is fp_scratch the most appropriate FP scratch register? | |
401 // It's only used in Fcmp, but it's not really safe to use it like this. | |
402 __ JumpIfNotSmi(value_reg, &heap_number); | |
403 __ SmiUntagToDouble(fp_scratch, value_reg); | |
404 __ B(&do_store); | |
405 | |
406 __ Bind(&heap_number); | |
407 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, | |
408 miss_label, DONT_DO_SMI_CHECK); | |
409 __ Ldr(fp_scratch, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); | |
410 | |
411 __ Bind(&do_store); | |
412 __ Str(fp_scratch, FieldMemOperand(storage_reg, HeapNumber::kValueOffset)); | |
413 } | |
414 | |
415 // Stub never generated for non-global objects that require access checks. | |
416 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); | |
417 | |
418 // Perform map transition for the receiver if necessary. | |
419 if ((details.type() == FIELD) && | |
420 (object->map()->unused_property_fields() == 0)) { | |
421 // The properties must be extended before we can store the value. | |
422 // We jump to a runtime call that extends the properties array. | |
423 __ Mov(scratch1, Operand(transition)); | |
424 __ Push(receiver_reg, scratch1, value_reg); | |
425 __ TailCallExternalReference( | |
426 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), | |
427 masm->isolate()), | |
428 3, | |
429 1); | |
430 return; | |
431 } | |
432 | |
433 // Update the map of the object. | |
434 __ Mov(scratch1, Operand(transition)); | |
435 __ Str(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); | |
436 | |
437 // Update the write barrier for the map field. | |
438 __ RecordWriteField(receiver_reg, | |
439 HeapObject::kMapOffset, | |
440 scratch1, | |
441 scratch2, | |
442 kLRHasNotBeenSaved, | |
443 kDontSaveFPRegs, | |
444 OMIT_REMEMBERED_SET, | |
445 OMIT_SMI_CHECK); | |
446 | |
447 if (details.type() == CONSTANT) { | |
448 ASSERT(value_reg.is(x0)); | |
449 __ Ret(); | |
450 return; | |
451 } | |
452 | |
453 int index = transition->instance_descriptors()->GetFieldIndex( | |
454 transition->LastAdded()); | |
455 | |
456 // Adjust for the number of properties stored in the object. Even in the | |
457 // face of a transition we can use the old map here because the size of the | |
458 // object and the number of in-object properties is not going to change. | |
459 index -= object->map()->inobject_properties(); | |
460 | |
461 // TODO(verwaest): Share this code as a code stub. | |
462 SmiCheck smi_check = representation.IsTagged() | |
463 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; | |
464 Register prop_reg = representation.IsDouble() ? storage_reg : value_reg; | |
465 if (index < 0) { | |
466 // Set the property straight into the object. | |
467 int offset = object->map()->instance_size() + (index * kPointerSize); | |
468 __ Str(prop_reg, FieldMemOperand(receiver_reg, offset)); | |
469 | |
470 if (!representation.IsSmi()) { | |
471 // Update the write barrier for the array address. | |
472 if (!representation.IsDouble()) { | |
473 __ Mov(storage_reg, value_reg); | |
474 } | |
475 __ RecordWriteField(receiver_reg, | |
476 offset, | |
477 storage_reg, | |
478 scratch1, | |
479 kLRHasNotBeenSaved, | |
480 kDontSaveFPRegs, | |
481 EMIT_REMEMBERED_SET, | |
482 smi_check); | |
483 } | |
484 } else { | |
485 // Write to the properties array. | |
486 int offset = index * kPointerSize + FixedArray::kHeaderSize; | |
487 // Get the properties array | |
488 __ Ldr(scratch1, | |
489 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); | |
490 __ Str(prop_reg, FieldMemOperand(scratch1, offset)); | |
491 | |
492 if (!representation.IsSmi()) { | |
493 // Update the write barrier for the array address. | |
494 if (!representation.IsDouble()) { | |
495 __ Mov(storage_reg, value_reg); | |
496 } | |
497 __ RecordWriteField(scratch1, | |
498 offset, | |
499 storage_reg, | |
500 receiver_reg, | |
501 kLRHasNotBeenSaved, | |
502 kDontSaveFPRegs, | |
503 EMIT_REMEMBERED_SET, | |
504 smi_check); | |
505 } | |
506 } | |
507 | |
508 __ Bind(&exit); | |
509 // Return the value (register x0). | |
510 ASSERT(value_reg.is(x0)); | |
511 __ Ret(); | |
512 } | |
513 | |
514 | |
515 // Generate StoreField code, value is passed in x0 register. | |
516 // When leaving generated code after success, the receiver_reg and name_reg may | |
517 // be clobbered. Upon branch to miss_label, the receiver and name registers have | |
518 // their original values. | |
519 void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm, | |
520 Handle<JSObject> object, | |
521 LookupResult* lookup, | |
522 Register receiver_reg, | |
523 Register name_reg, | |
524 Register value_reg, | |
525 Register scratch1, | |
526 Register scratch2, | |
527 Label* miss_label) { | |
528 // x0 : value | |
529 Label exit; | |
530 | |
531 // Stub never generated for non-global objects that require access | |
532 // checks. | |
533 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); | |
534 | |
535 int index = lookup->GetFieldIndex().field_index(); | |
536 | |
537 // Adjust for the number of properties stored in the object. Even in the | |
538 // face of a transition we can use the old map here because the size of the | |
539 // object and the number of in-object properties is not going to change. | |
540 index -= object->map()->inobject_properties(); | |
541 | |
542 Representation representation = lookup->representation(); | |
543 ASSERT(!representation.IsNone()); | |
544 if (representation.IsSmi()) { | |
545 __ JumpIfNotSmi(value_reg, miss_label); | |
546 } else if (representation.IsHeapObject()) { | |
547 __ JumpIfSmi(value_reg, miss_label); | |
548 } else if (representation.IsDouble()) { | |
549 // Load the double storage. | |
550 if (index < 0) { | |
551 int offset = (index * kPointerSize) + object->map()->instance_size(); | |
552 __ Ldr(scratch1, FieldMemOperand(receiver_reg, offset)); | |
553 } else { | |
554 int offset = (index * kPointerSize) + FixedArray::kHeaderSize; | |
555 __ Ldr(scratch1, | |
556 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); | |
557 __ Ldr(scratch1, FieldMemOperand(scratch1, offset)); | |
558 } | |
559 | |
560 // Store the value into the storage. | |
561 Label do_store, heap_number; | |
562 // TODO(jbramley): Is fp_scratch the most appropriate FP scratch register? | |
563 // It's only used in Fcmp, but it's not really safe to use it like this. | |
564 __ JumpIfNotSmi(value_reg, &heap_number); | |
565 __ SmiUntagToDouble(fp_scratch, value_reg); | |
566 __ B(&do_store); | |
567 | |
568 __ Bind(&heap_number); | |
569 __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex, | |
570 miss_label, DONT_DO_SMI_CHECK); | |
571 __ Ldr(fp_scratch, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); | |
572 | |
573 __ Bind(&do_store); | |
574 __ Str(fp_scratch, FieldMemOperand(scratch1, HeapNumber::kValueOffset)); | |
575 | |
576 // Return the value (register x0). | |
577 ASSERT(value_reg.is(x0)); | |
578 __ Ret(); | |
579 return; | |
580 } | |
581 | |
582 // TODO(verwaest): Share this code as a code stub. | |
583 SmiCheck smi_check = representation.IsTagged() | |
584 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; | |
585 if (index < 0) { | |
586 // Set the property straight into the object. | |
587 int offset = object->map()->instance_size() + (index * kPointerSize); | |
588 __ Str(value_reg, FieldMemOperand(receiver_reg, offset)); | |
589 | |
590 if (!representation.IsSmi()) { | |
591 // Skip updating write barrier if storing a smi. | |
592 __ JumpIfSmi(value_reg, &exit); | |
593 | |
594 // Update the write barrier for the array address. | |
595 // Pass the now unused name_reg as a scratch register. | |
596 __ Mov(name_reg, value_reg); | |
597 __ RecordWriteField(receiver_reg, | |
598 offset, | |
599 name_reg, | |
600 scratch1, | |
601 kLRHasNotBeenSaved, | |
602 kDontSaveFPRegs, | |
603 EMIT_REMEMBERED_SET, | |
604 smi_check); | |
605 } | |
606 } else { | |
607 // Write to the properties array. | |
608 int offset = index * kPointerSize + FixedArray::kHeaderSize; | |
609 // Get the properties array | |
610 __ Ldr(scratch1, | |
611 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); | |
612 __ Str(value_reg, FieldMemOperand(scratch1, offset)); | |
613 | |
614 if (!representation.IsSmi()) { | |
615 // Skip updating write barrier if storing a smi. | |
616 __ JumpIfSmi(value_reg, &exit); | |
617 | |
618 // Update the write barrier for the array address. | |
619 // Ok to clobber receiver_reg and name_reg, since we return. | |
620 __ Mov(name_reg, value_reg); | |
621 __ RecordWriteField(scratch1, | |
622 offset, | |
623 name_reg, | |
624 receiver_reg, | |
625 kLRHasNotBeenSaved, | |
626 kDontSaveFPRegs, | |
627 EMIT_REMEMBERED_SET, | |
628 smi_check); | |
629 } | |
630 } | |
631 | |
632 __ Bind(&exit); | |
633 // Return the value (register x0). | |
634 ASSERT(value_reg.is(x0)); | |
635 __ Ret(); | |
636 } | |
637 | |
638 | |
639 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm, | |
640 Label* label, | |
641 Handle<Name> name) { | |
642 if (!label->is_unused()) { | |
643 __ Bind(label); | |
644 __ Mov(this->name(), Operand(name)); | |
645 } | |
646 } | |
647 | |
648 | |
649 static void PushInterceptorArguments(MacroAssembler* masm, | |
650 Register receiver, | |
651 Register holder, | |
652 Register name, | |
653 Handle<JSObject> holder_obj) { | |
654 STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0); | |
655 STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1); | |
656 STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2); | |
657 STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3); | |
658 STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4); | |
659 | |
660 __ Push(name); | |
661 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor()); | |
662 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor)); | |
663 Register scratch = name; | |
664 __ Mov(scratch, Operand(interceptor)); | |
665 __ Push(scratch, receiver, holder); | |
666 } | |
667 | |
668 | |
669 static void CompileCallLoadPropertyWithInterceptor( | |
670 MacroAssembler* masm, | |
671 Register receiver, | |
672 Register holder, | |
673 Register name, | |
674 Handle<JSObject> holder_obj, | |
675 IC::UtilityId id) { | |
676 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); | |
677 | |
678 __ CallExternalReference( | |
679 ExternalReference(IC_Utility(id), masm->isolate()), | |
680 StubCache::kInterceptorArgsLength); | |
681 } | |
682 | |
683 | |
684 // Generate call to api function. | |
685 void StubCompiler::GenerateFastApiCall(MacroAssembler* masm, | |
686 const CallOptimization& optimization, | |
687 Handle<Map> receiver_map, | |
688 Register receiver, | |
689 Register scratch, | |
690 bool is_store, | |
691 int argc, | |
692 Register* values) { | |
693 ASSERT(!AreAliased(receiver, scratch)); | |
694 | |
695 MacroAssembler::PushPopQueue queue(masm); | |
696 queue.Queue(receiver); | |
697 // Write the arguments to the stack frame. | |
698 for (int i = 0; i < argc; i++) { | |
699 Register arg = values[argc-1-i]; | |
700 ASSERT(!AreAliased(receiver, scratch, arg)); | |
701 queue.Queue(arg); | |
702 } | |
703 queue.PushQueued(); | |
704 | |
705 ASSERT(optimization.is_simple_api_call()); | |
706 | |
707 // Abi for CallApiFunctionStub. | |
708 Register callee = x0; | |
709 Register call_data = x4; | |
710 Register holder = x2; | |
711 Register api_function_address = x1; | |
712 | |
713 // Put holder in place. | |
714 CallOptimization::HolderLookup holder_lookup; | |
715 Handle<JSObject> api_holder = | |
716 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup); | |
717 switch (holder_lookup) { | |
718 case CallOptimization::kHolderIsReceiver: | |
719 __ Mov(holder, receiver); | |
720 break; | |
721 case CallOptimization::kHolderFound: | |
722 __ LoadObject(holder, api_holder); | |
723 break; | |
724 case CallOptimization::kHolderNotFound: | |
725 UNREACHABLE(); | |
726 break; | |
727 } | |
728 | |
729 Isolate* isolate = masm->isolate(); | |
730 Handle<JSFunction> function = optimization.constant_function(); | |
731 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); | |
732 Handle<Object> call_data_obj(api_call_info->data(), isolate); | |
733 | |
734 // Put callee in place. | |
735 __ LoadObject(callee, function); | |
736 | |
737 bool call_data_undefined = false; | |
738 // Put call_data in place. | |
739 if (isolate->heap()->InNewSpace(*call_data_obj)) { | |
740 __ LoadObject(call_data, api_call_info); | |
741 __ Ldr(call_data, FieldMemOperand(call_data, CallHandlerInfo::kDataOffset)); | |
742 } else if (call_data_obj->IsUndefined()) { | |
743 call_data_undefined = true; | |
744 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex); | |
745 } else { | |
746 __ LoadObject(call_data, call_data_obj); | |
747 } | |
748 | |
749 // Put api_function_address in place. | |
750 Address function_address = v8::ToCData<Address>(api_call_info->callback()); | |
751 ApiFunction fun(function_address); | |
752 ExternalReference ref = ExternalReference(&fun, | |
753 ExternalReference::DIRECT_API_CALL, | |
754 masm->isolate()); | |
755 __ Mov(api_function_address, ref); | |
756 | |
757 // Jump to stub. | |
758 CallApiFunctionStub stub(is_store, call_data_undefined, argc); | |
759 __ TailCallStub(&stub); | |
760 } | |
761 | |
762 | |
763 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) { | |
764 __ Jump(code, RelocInfo::CODE_TARGET); | |
765 } | |
766 | |
767 | |
768 #undef __ | |
769 #define __ ACCESS_MASM(masm()) | |
770 | |
771 | |
772 Register StubCompiler::CheckPrototypes(Handle<HeapType> type, | |
773 Register object_reg, | |
774 Handle<JSObject> holder, | |
775 Register holder_reg, | |
776 Register scratch1, | |
777 Register scratch2, | |
778 Handle<Name> name, | |
779 Label* miss, | |
780 PrototypeCheckType check) { | |
781 Handle<Map> receiver_map(IC::TypeToMap(*type, isolate())); | |
782 | |
783 // object_reg and holder_reg registers can alias. | |
784 ASSERT(!AreAliased(object_reg, scratch1, scratch2)); | |
785 ASSERT(!AreAliased(holder_reg, scratch1, scratch2)); | |
786 | |
787 // Keep track of the current object in register reg. | |
788 Register reg = object_reg; | |
789 int depth = 0; | |
790 | |
791 Handle<JSObject> current = Handle<JSObject>::null(); | |
792 if (type->IsConstant()) { | |
793 current = Handle<JSObject>::cast(type->AsConstant()); | |
794 } | |
795 Handle<JSObject> prototype = Handle<JSObject>::null(); | |
796 Handle<Map> current_map = receiver_map; | |
797 Handle<Map> holder_map(holder->map()); | |
798 // Traverse the prototype chain and check the maps in the prototype chain for | |
799 // fast and global objects or do negative lookup for normal objects. | |
800 while (!current_map.is_identical_to(holder_map)) { | |
801 ++depth; | |
802 | |
803 // Only global objects and objects that do not require access | |
804 // checks are allowed in stubs. | |
805 ASSERT(current_map->IsJSGlobalProxyMap() || | |
806 !current_map->is_access_check_needed()); | |
807 | |
808 prototype = handle(JSObject::cast(current_map->prototype())); | |
809 if (current_map->is_dictionary_map() && | |
810 !current_map->IsJSGlobalObjectMap() && | |
811 !current_map->IsJSGlobalProxyMap()) { | |
812 if (!name->IsUniqueName()) { | |
813 ASSERT(name->IsString()); | |
814 name = factory()->InternalizeString(Handle<String>::cast(name)); | |
815 } | |
816 ASSERT(current.is_null() || | |
817 (current->property_dictionary()->FindEntry(*name) == | |
818 NameDictionary::kNotFound)); | |
819 | |
820 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, | |
821 scratch1, scratch2); | |
822 | |
823 __ Ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); | |
824 reg = holder_reg; // From now on the object will be in holder_reg. | |
825 __ Ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); | |
826 } else { | |
827 bool need_map = (depth != 1 || check == CHECK_ALL_MAPS) || | |
828 heap()->InNewSpace(*prototype); | |
829 Register map_reg = NoReg; | |
830 if (need_map) { | |
831 map_reg = scratch1; | |
832 __ Ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset)); | |
833 } | |
834 | |
835 if (depth != 1 || check == CHECK_ALL_MAPS) { | |
836 __ CheckMap(map_reg, current_map, miss, DONT_DO_SMI_CHECK); | |
837 } | |
838 | |
839 // Check access rights to the global object. This has to happen after | |
840 // the map check so that we know that the object is actually a global | |
841 // object. | |
842 if (current_map->IsJSGlobalProxyMap()) { | |
843 UseScratchRegisterScope temps(masm()); | |
844 __ CheckAccessGlobalProxy(reg, scratch2, temps.AcquireX(), miss); | |
845 } else if (current_map->IsJSGlobalObjectMap()) { | |
846 GenerateCheckPropertyCell( | |
847 masm(), Handle<JSGlobalObject>::cast(current), name, | |
848 scratch2, miss); | |
849 } | |
850 | |
851 reg = holder_reg; // From now on the object will be in holder_reg. | |
852 | |
853 if (heap()->InNewSpace(*prototype)) { | |
854 // The prototype is in new space; we cannot store a reference to it | |
855 // in the code. Load it from the map. | |
856 __ Ldr(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset)); | |
857 } else { | |
858 // The prototype is in old space; load it directly. | |
859 __ Mov(reg, Operand(prototype)); | |
860 } | |
861 } | |
862 | |
863 // Go to the next object in the prototype chain. | |
864 current = prototype; | |
865 current_map = handle(current->map()); | |
866 } | |
867 | |
868 // Log the check depth. | |
869 LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); | |
870 | |
871 // Check the holder map. | |
872 if (depth != 0 || check == CHECK_ALL_MAPS) { | |
873 // Check the holder map. | |
874 __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK); | |
875 } | |
876 | |
877 // Perform security check for access to the global object. | |
878 ASSERT(current_map->IsJSGlobalProxyMap() || | |
879 !current_map->is_access_check_needed()); | |
880 if (current_map->IsJSGlobalProxyMap()) { | |
881 __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss); | |
882 } | |
883 | |
884 // Return the register containing the holder. | |
885 return reg; | |
886 } | |
887 | |
888 | |
889 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) { | |
890 if (!miss->is_unused()) { | |
891 Label success; | |
892 __ B(&success); | |
893 | |
894 __ Bind(miss); | |
895 TailCallBuiltin(masm(), MissBuiltin(kind())); | |
896 | |
897 __ Bind(&success); | |
898 } | |
899 } | |
900 | |
901 | |
902 void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) { | |
903 if (!miss->is_unused()) { | |
904 Label success; | |
905 __ B(&success); | |
906 | |
907 GenerateRestoreName(masm(), miss, name); | |
908 TailCallBuiltin(masm(), MissBuiltin(kind())); | |
909 | |
910 __ Bind(&success); | |
911 } | |
912 } | |
913 | |
914 | |
915 Register LoadStubCompiler::CallbackHandlerFrontend(Handle<HeapType> type, | |
916 Register object_reg, | |
917 Handle<JSObject> holder, | |
918 Handle<Name> name, | |
919 Handle<Object> callback) { | |
920 Label miss; | |
921 | |
922 Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss); | |
923 // HandlerFrontendHeader can return its result into scratch1() so do not | |
924 // use it. | |
925 Register scratch2 = this->scratch2(); | |
926 Register scratch3 = this->scratch3(); | |
927 Register dictionary = this->scratch4(); | |
928 ASSERT(!AreAliased(reg, scratch2, scratch3, dictionary)); | |
929 | |
930 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) { | |
931 // Load the properties dictionary. | |
932 __ Ldr(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset)); | |
933 | |
934 // Probe the dictionary. | |
935 Label probe_done; | |
936 NameDictionaryLookupStub::GeneratePositiveLookup(masm(), | |
937 &miss, | |
938 &probe_done, | |
939 dictionary, | |
940 this->name(), | |
941 scratch2, | |
942 scratch3); | |
943 __ Bind(&probe_done); | |
944 | |
945 // If probing finds an entry in the dictionary, scratch3 contains the | |
946 // pointer into the dictionary. Check that the value is the callback. | |
947 Register pointer = scratch3; | |
948 const int kElementsStartOffset = NameDictionary::kHeaderSize + | |
949 NameDictionary::kElementsStartIndex * kPointerSize; | |
950 const int kValueOffset = kElementsStartOffset + kPointerSize; | |
951 __ Ldr(scratch2, FieldMemOperand(pointer, kValueOffset)); | |
952 __ Cmp(scratch2, Operand(callback)); | |
953 __ B(ne, &miss); | |
954 } | |
955 | |
956 HandlerFrontendFooter(name, &miss); | |
957 return reg; | |
958 } | |
959 | |
960 | |
961 void LoadStubCompiler::GenerateLoadField(Register reg, | |
962 Handle<JSObject> holder, | |
963 PropertyIndex field, | |
964 Representation representation) { | |
965 __ Mov(receiver(), reg); | |
966 if (kind() == Code::LOAD_IC) { | |
967 LoadFieldStub stub(field.is_inobject(holder), | |
968 field.translate(holder), | |
969 representation); | |
970 GenerateTailCall(masm(), stub.GetCode(isolate())); | |
971 } else { | |
972 KeyedLoadFieldStub stub(field.is_inobject(holder), | |
973 field.translate(holder), | |
974 representation); | |
975 GenerateTailCall(masm(), stub.GetCode(isolate())); | |
976 } | |
977 } | |
978 | |
979 | |
980 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) { | |
981 // Return the constant value. | |
982 __ LoadObject(x0, value); | |
983 __ Ret(); | |
984 } | |
985 | |
986 | |
987 void LoadStubCompiler::GenerateLoadCallback( | |
988 Register reg, | |
989 Handle<ExecutableAccessorInfo> callback) { | |
990 ASSERT(!AreAliased(scratch2(), scratch3(), scratch4(), reg)); | |
991 | |
992 // Build ExecutableAccessorInfo::args_ list on the stack and push property | |
993 // name below the exit frame to make GC aware of them and store pointers to | |
994 // them. | |
995 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0); | |
996 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1); | |
997 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2); | |
998 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3); | |
999 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4); | |
1000 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5); | |
1001 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6); | |
1002 | |
1003 __ Push(receiver()); | |
1004 | |
1005 if (heap()->InNewSpace(callback->data())) { | |
1006 __ Mov(scratch3(), Operand(callback)); | |
1007 __ Ldr(scratch3(), FieldMemOperand(scratch3(), | |
1008 ExecutableAccessorInfo::kDataOffset)); | |
1009 } else { | |
1010 __ Mov(scratch3(), Operand(Handle<Object>(callback->data(), isolate()))); | |
1011 } | |
1012 // TODO(jbramley): Find another scratch register and combine the pushes | |
1013 // together. Can we use scratch1() here? | |
1014 __ LoadRoot(scratch4(), Heap::kUndefinedValueRootIndex); | |
1015 __ Push(scratch3(), scratch4()); | |
1016 __ Mov(scratch3(), ExternalReference::isolate_address(isolate())); | |
1017 __ Push(scratch4(), scratch3(), reg, name()); | |
1018 | |
1019 Register args_addr = scratch2(); | |
1020 __ Add(args_addr, __ StackPointer(), kPointerSize); | |
1021 | |
1022 // Stack at this point: | |
1023 // sp[40] callback data | |
1024 // sp[32] undefined | |
1025 // sp[24] undefined | |
1026 // sp[16] isolate | |
1027 // args_addr -> sp[8] reg | |
1028 // sp[0] name | |
1029 | |
1030 // Abi for CallApiGetter. | |
1031 Register getter_address_reg = x2; | |
1032 | |
1033 // Set up the call. | |
1034 Address getter_address = v8::ToCData<Address>(callback->getter()); | |
1035 ApiFunction fun(getter_address); | |
1036 ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL; | |
1037 ExternalReference ref = ExternalReference(&fun, type, isolate()); | |
1038 __ Mov(getter_address_reg, ref); | |
1039 | |
1040 CallApiGetterStub stub; | |
1041 __ TailCallStub(&stub); | |
1042 } | |
1043 | |
1044 | |
1045 void LoadStubCompiler::GenerateLoadInterceptor( | |
1046 Register holder_reg, | |
1047 Handle<Object> object, | |
1048 Handle<JSObject> interceptor_holder, | |
1049 LookupResult* lookup, | |
1050 Handle<Name> name) { | |
1051 ASSERT(!AreAliased(receiver(), this->name(), | |
1052 scratch1(), scratch2(), scratch3())); | |
1053 ASSERT(interceptor_holder->HasNamedInterceptor()); | |
1054 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined()); | |
1055 | |
1056 // So far the most popular follow ups for interceptor loads are FIELD | |
1057 // and CALLBACKS, so inline only them, other cases may be added later. | |
1058 bool compile_followup_inline = false; | |
1059 if (lookup->IsFound() && lookup->IsCacheable()) { | |
1060 if (lookup->IsField()) { | |
1061 compile_followup_inline = true; | |
1062 } else if (lookup->type() == CALLBACKS && | |
1063 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) { | |
1064 ExecutableAccessorInfo* callback = | |
1065 ExecutableAccessorInfo::cast(lookup->GetCallbackObject()); | |
1066 compile_followup_inline = callback->getter() != NULL && | |
1067 callback->IsCompatibleReceiver(*object); | |
1068 } | |
1069 } | |
1070 | |
1071 if (compile_followup_inline) { | |
1072 // Compile the interceptor call, followed by inline code to load the | |
1073 // property from further up the prototype chain if the call fails. | |
1074 // Check that the maps haven't changed. | |
1075 ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1())); | |
1076 | |
1077 // Preserve the receiver register explicitly whenever it is different from | |
1078 // the holder and it is needed should the interceptor return without any | |
1079 // result. The CALLBACKS case needs the receiver to be passed into C++ code, | |
1080 // the FIELD case might cause a miss during the prototype check. | |
1081 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder(); | |
1082 bool must_preserve_receiver_reg = !receiver().Is(holder_reg) && | |
1083 (lookup->type() == CALLBACKS || must_perfrom_prototype_check); | |
1084 | |
1085 // Save necessary data before invoking an interceptor. | |
1086 // Requires a frame to make GC aware of pushed pointers. | |
1087 { | |
1088 FrameScope frame_scope(masm(), StackFrame::INTERNAL); | |
1089 if (must_preserve_receiver_reg) { | |
1090 __ Push(receiver(), holder_reg, this->name()); | |
1091 } else { | |
1092 __ Push(holder_reg, this->name()); | |
1093 } | |
1094 // Invoke an interceptor. Note: map checks from receiver to | |
1095 // interceptor's holder has been compiled before (see a caller | |
1096 // of this method.) | |
1097 CompileCallLoadPropertyWithInterceptor( | |
1098 masm(), receiver(), holder_reg, this->name(), interceptor_holder, | |
1099 IC::kLoadPropertyWithInterceptorOnly); | |
1100 | |
1101 // Check if interceptor provided a value for property. If it's | |
1102 // the case, return immediately. | |
1103 Label interceptor_failed; | |
1104 __ JumpIfRoot(x0, | |
1105 Heap::kNoInterceptorResultSentinelRootIndex, | |
1106 &interceptor_failed); | |
1107 frame_scope.GenerateLeaveFrame(); | |
1108 __ Ret(); | |
1109 | |
1110 __ Bind(&interceptor_failed); | |
1111 if (must_preserve_receiver_reg) { | |
1112 __ Pop(this->name(), holder_reg, receiver()); | |
1113 } else { | |
1114 __ Pop(this->name(), holder_reg); | |
1115 } | |
1116 // Leave the internal frame. | |
1117 } | |
1118 GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup); | |
1119 } else { // !compile_followup_inline | |
1120 // Call the runtime system to load the interceptor. | |
1121 // Check that the maps haven't changed. | |
1122 PushInterceptorArguments( | |
1123 masm(), receiver(), holder_reg, this->name(), interceptor_holder); | |
1124 | |
1125 ExternalReference ref = | |
1126 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), | |
1127 isolate()); | |
1128 __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1); | |
1129 } | |
1130 } | |
1131 | |
1132 | |
1133 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) { | |
1134 UseScratchRegisterScope temps(masm()); | |
1135 // Check that the object is a boolean. | |
1136 Register true_root = temps.AcquireX(); | |
1137 Register false_root = temps.AcquireX(); | |
1138 ASSERT(!AreAliased(object, true_root, false_root)); | |
1139 __ LoadTrueFalseRoots(true_root, false_root); | |
1140 __ Cmp(object, true_root); | |
1141 __ Ccmp(object, false_root, ZFlag, ne); | |
1142 __ B(ne, miss); | |
1143 } | |
1144 | |
1145 | |
1146 Handle<Code> StoreStubCompiler::CompileStoreCallback( | |
1147 Handle<JSObject> object, | |
1148 Handle<JSObject> holder, | |
1149 Handle<Name> name, | |
1150 Handle<ExecutableAccessorInfo> callback) { | |
1151 ASM_LOCATION("StoreStubCompiler::CompileStoreCallback"); | |
1152 Register holder_reg = HandlerFrontend( | |
1153 IC::CurrentTypeOf(object, isolate()), receiver(), holder, name); | |
1154 | |
1155 // Stub never generated for non-global objects that require access checks. | |
1156 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded()); | |
1157 | |
1158 // receiver() and holder_reg can alias. | |
1159 ASSERT(!AreAliased(receiver(), scratch1(), scratch2(), value())); | |
1160 ASSERT(!AreAliased(holder_reg, scratch1(), scratch2(), value())); | |
1161 __ Mov(scratch1(), Operand(callback)); | |
1162 __ Mov(scratch2(), Operand(name)); | |
1163 __ Push(receiver(), holder_reg, scratch1(), scratch2(), value()); | |
1164 | |
1165 // Do tail-call to the runtime system. | |
1166 ExternalReference store_callback_property = | |
1167 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate()); | |
1168 __ TailCallExternalReference(store_callback_property, 5, 1); | |
1169 | |
1170 // Return the generated code. | |
1171 return GetCode(kind(), Code::FAST, name); | |
1172 } | |
1173 | |
1174 | |
1175 #undef __ | |
1176 #define __ ACCESS_MASM(masm) | |
1177 | |
1178 | |
1179 void StoreStubCompiler::GenerateStoreViaSetter( | |
1180 MacroAssembler* masm, | |
1181 Handle<HeapType> type, | |
1182 Register receiver, | |
1183 Handle<JSFunction> setter) { | |
1184 // ----------- S t a t e ------------- | |
1185 // -- lr : return address | |
1186 // ----------------------------------- | |
1187 Label miss; | |
1188 | |
1189 { | |
1190 FrameScope scope(masm, StackFrame::INTERNAL); | |
1191 | |
1192 // Save value register, so we can restore it later. | |
1193 __ Push(value()); | |
1194 | |
1195 if (!setter.is_null()) { | |
1196 // Call the JavaScript setter with receiver and value on the stack. | |
1197 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { | |
1198 // Swap in the global receiver. | |
1199 __ Ldr(receiver, | |
1200 FieldMemOperand( | |
1201 receiver, JSGlobalObject::kGlobalReceiverOffset)); | |
1202 } | |
1203 __ Push(receiver, value()); | |
1204 ParameterCount actual(1); | |
1205 ParameterCount expected(setter); | |
1206 __ InvokeFunction(setter, expected, actual, | |
1207 CALL_FUNCTION, NullCallWrapper()); | |
1208 } else { | |
1209 // If we generate a global code snippet for deoptimization only, remember | |
1210 // the place to continue after deoptimization. | |
1211 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); | |
1212 } | |
1213 | |
1214 // We have to return the passed value, not the return value of the setter. | |
1215 __ Pop(x0); | |
1216 | |
1217 // Restore context register. | |
1218 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
1219 } | |
1220 __ Ret(); | |
1221 } | |
1222 | |
1223 | |
1224 #undef __ | |
1225 #define __ ACCESS_MASM(masm()) | |
1226 | |
1227 | |
1228 Handle<Code> StoreStubCompiler::CompileStoreInterceptor( | |
1229 Handle<JSObject> object, | |
1230 Handle<Name> name) { | |
1231 Label miss; | |
1232 | |
1233 ASM_LOCATION("StoreStubCompiler::CompileStoreInterceptor"); | |
1234 | |
1235 __ Push(receiver(), this->name(), value()); | |
1236 | |
1237 // Do tail-call to the runtime system. | |
1238 ExternalReference store_ic_property = | |
1239 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate()); | |
1240 __ TailCallExternalReference(store_ic_property, 3, 1); | |
1241 | |
1242 // Return the generated code. | |
1243 return GetCode(kind(), Code::FAST, name); | |
1244 } | |
1245 | |
1246 | |
1247 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type, | |
1248 Handle<JSObject> last, | |
1249 Handle<Name> name) { | |
1250 NonexistentHandlerFrontend(type, last, name); | |
1251 | |
1252 // Return undefined if maps of the full prototype chain are still the | |
1253 // same and no global property with this name contains a value. | |
1254 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex); | |
1255 __ Ret(); | |
1256 | |
1257 // Return the generated code. | |
1258 return GetCode(kind(), Code::FAST, name); | |
1259 } | |
1260 | |
1261 | |
1262 // TODO(all): The so-called scratch registers are significant in some cases. For | |
1263 // example, KeyedStoreStubCompiler::registers()[3] (x3) is actually used for | |
1264 // KeyedStoreCompiler::transition_map(). We should verify which registers are | |
1265 // actually scratch registers, and which are important. For now, we use the same | |
1266 // assignments as ARM to remain on the safe side. | |
1267 | |
1268 Register* LoadStubCompiler::registers() { | |
1269 // receiver, name, scratch1, scratch2, scratch3, scratch4. | |
1270 static Register registers[] = { x0, x2, x3, x1, x4, x5 }; | |
1271 return registers; | |
1272 } | |
1273 | |
1274 | |
1275 Register* KeyedLoadStubCompiler::registers() { | |
1276 // receiver, name/key, scratch1, scratch2, scratch3, scratch4. | |
1277 static Register registers[] = { x1, x0, x2, x3, x4, x5 }; | |
1278 return registers; | |
1279 } | |
1280 | |
1281 | |
1282 Register StoreStubCompiler::value() { | |
1283 return x0; | |
1284 } | |
1285 | |
1286 | |
1287 Register* StoreStubCompiler::registers() { | |
1288 // receiver, value, scratch1, scratch2, scratch3. | |
1289 static Register registers[] = { x1, x2, x3, x4, x5 }; | |
1290 return registers; | |
1291 } | |
1292 | |
1293 | |
1294 Register* KeyedStoreStubCompiler::registers() { | |
1295 // receiver, name, scratch1, scratch2, scratch3. | |
1296 static Register registers[] = { x2, x1, x3, x4, x5 }; | |
1297 return registers; | |
1298 } | |
1299 | |
1300 | |
1301 #undef __ | |
1302 #define __ ACCESS_MASM(masm) | |
1303 | |
1304 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm, | |
1305 Handle<HeapType> type, | |
1306 Register receiver, | |
1307 Handle<JSFunction> getter) { | |
1308 { | |
1309 FrameScope scope(masm, StackFrame::INTERNAL); | |
1310 | |
1311 if (!getter.is_null()) { | |
1312 // Call the JavaScript getter with the receiver on the stack. | |
1313 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { | |
1314 // Swap in the global receiver. | |
1315 __ Ldr(receiver, | |
1316 FieldMemOperand( | |
1317 receiver, JSGlobalObject::kGlobalReceiverOffset)); | |
1318 } | |
1319 __ Push(receiver); | |
1320 ParameterCount actual(0); | |
1321 ParameterCount expected(getter); | |
1322 __ InvokeFunction(getter, expected, actual, | |
1323 CALL_FUNCTION, NullCallWrapper()); | |
1324 } else { | |
1325 // If we generate a global code snippet for deoptimization only, remember | |
1326 // the place to continue after deoptimization. | |
1327 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); | |
1328 } | |
1329 | |
1330 // Restore context register. | |
1331 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
1332 } | |
1333 __ Ret(); | |
1334 } | |
1335 | |
1336 | |
1337 #undef __ | |
1338 #define __ ACCESS_MASM(masm()) | |
1339 | |
1340 | |
1341 Handle<Code> LoadStubCompiler::CompileLoadGlobal( | |
1342 Handle<HeapType> type, | |
1343 Handle<GlobalObject> global, | |
1344 Handle<PropertyCell> cell, | |
1345 Handle<Name> name, | |
1346 bool is_dont_delete) { | |
1347 Label miss; | |
1348 HandlerFrontendHeader(type, receiver(), global, name, &miss); | |
1349 | |
1350 // Get the value from the cell. | |
1351 __ Mov(x3, Operand(cell)); | |
1352 __ Ldr(x4, FieldMemOperand(x3, Cell::kValueOffset)); | |
1353 | |
1354 // Check for deleted property if property can actually be deleted. | |
1355 if (!is_dont_delete) { | |
1356 __ JumpIfRoot(x4, Heap::kTheHoleValueRootIndex, &miss); | |
1357 } | |
1358 | |
1359 Counters* counters = isolate()->counters(); | |
1360 __ IncrementCounter(counters->named_load_global_stub(), 1, x1, x3); | |
1361 __ Mov(x0, x4); | |
1362 __ Ret(); | |
1363 | |
1364 HandlerFrontendFooter(name, &miss); | |
1365 | |
1366 // Return the generated code. | |
1367 return GetCode(kind(), Code::NORMAL, name); | |
1368 } | |
1369 | |
1370 | |
1371 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC( | |
1372 TypeHandleList* types, | |
1373 CodeHandleList* handlers, | |
1374 Handle<Name> name, | |
1375 Code::StubType type, | |
1376 IcCheckType check) { | |
1377 Label miss; | |
1378 | |
1379 if (check == PROPERTY && | |
1380 (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) { | |
1381 __ CompareAndBranch(this->name(), Operand(name), ne, &miss); | |
1382 } | |
1383 | |
1384 Label number_case; | |
1385 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss; | |
1386 __ JumpIfSmi(receiver(), smi_target); | |
1387 | |
1388 Register map_reg = scratch1(); | |
1389 __ Ldr(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset)); | |
1390 int receiver_count = types->length(); | |
1391 int number_of_handled_maps = 0; | |
1392 for (int current = 0; current < receiver_count; ++current) { | |
1393 Handle<HeapType> type = types->at(current); | |
1394 Handle<Map> map = IC::TypeToMap(*type, isolate()); | |
1395 if (!map->is_deprecated()) { | |
1396 number_of_handled_maps++; | |
1397 Label try_next; | |
1398 __ Cmp(map_reg, Operand(map)); | |
1399 __ B(ne, &try_next); | |
1400 if (type->Is(HeapType::Number())) { | |
1401 ASSERT(!number_case.is_unused()); | |
1402 __ Bind(&number_case); | |
1403 } | |
1404 __ Jump(handlers->at(current), RelocInfo::CODE_TARGET); | |
1405 __ Bind(&try_next); | |
1406 } | |
1407 } | |
1408 ASSERT(number_of_handled_maps != 0); | |
1409 | |
1410 __ Bind(&miss); | |
1411 TailCallBuiltin(masm(), MissBuiltin(kind())); | |
1412 | |
1413 // Return the generated code. | |
1414 InlineCacheState state = | |
1415 (number_of_handled_maps > 1) ? POLYMORPHIC : MONOMORPHIC; | |
1416 return GetICCode(kind(), type, name, state); | |
1417 } | |
1418 | |
1419 | |
1420 void StoreStubCompiler::GenerateStoreArrayLength() { | |
1421 // Prepare tail call to StoreIC_ArrayLength. | |
1422 __ Push(receiver(), value()); | |
1423 | |
1424 ExternalReference ref = | |
1425 ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength), | |
1426 masm()->isolate()); | |
1427 __ TailCallExternalReference(ref, 2, 1); | |
1428 } | |
1429 | |
1430 | |
1431 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic( | |
1432 MapHandleList* receiver_maps, | |
1433 CodeHandleList* handler_stubs, | |
1434 MapHandleList* transitioned_maps) { | |
1435 Label miss; | |
1436 | |
1437 ASM_LOCATION("KeyedStoreStubCompiler::CompileStorePolymorphic"); | |
1438 | |
1439 __ JumpIfSmi(receiver(), &miss); | |
1440 | |
1441 int receiver_count = receiver_maps->length(); | |
1442 __ Ldr(scratch1(), FieldMemOperand(receiver(), HeapObject::kMapOffset)); | |
1443 for (int i = 0; i < receiver_count; i++) { | |
1444 __ Cmp(scratch1(), Operand(receiver_maps->at(i))); | |
1445 | |
1446 Label skip; | |
1447 __ B(&skip, ne); | |
1448 if (!transitioned_maps->at(i).is_null()) { | |
1449 // This argument is used by the handler stub. For example, see | |
1450 // ElementsTransitionGenerator::GenerateMapChangeElementsTransition. | |
1451 __ Mov(transition_map(), Operand(transitioned_maps->at(i))); | |
1452 } | |
1453 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET); | |
1454 __ Bind(&skip); | |
1455 } | |
1456 | |
1457 __ Bind(&miss); | |
1458 TailCallBuiltin(masm(), MissBuiltin(kind())); | |
1459 | |
1460 return GetICCode( | |
1461 kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC); | |
1462 } | |
1463 | |
1464 | |
1465 #undef __ | |
1466 #define __ ACCESS_MASM(masm) | |
1467 | |
1468 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement( | |
1469 MacroAssembler* masm) { | |
1470 // ---------- S t a t e -------------- | |
1471 // -- lr : return address | |
1472 // -- x0 : key | |
1473 // -- x1 : receiver | |
1474 // ----------------------------------- | |
1475 Label slow, miss; | |
1476 | |
1477 Register result = x0; | |
1478 Register key = x0; | |
1479 Register receiver = x1; | |
1480 | |
1481 __ JumpIfNotSmi(key, &miss); | |
1482 __ Ldr(x4, FieldMemOperand(receiver, JSObject::kElementsOffset)); | |
1483 __ LoadFromNumberDictionary(&slow, x4, key, result, x2, x3, x5, x6); | |
1484 __ Ret(); | |
1485 | |
1486 __ Bind(&slow); | |
1487 __ IncrementCounter( | |
1488 masm->isolate()->counters()->keyed_load_external_array_slow(), 1, x2, x3); | |
1489 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow); | |
1490 | |
1491 // Miss case, call the runtime. | |
1492 __ Bind(&miss); | |
1493 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); | |
1494 } | |
1495 | |
1496 | |
1497 } } // namespace v8::internal | |
1498 | |
1499 #endif // V8_TARGET_ARCH_A64 | |
OLD | NEW |