Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(506)

Side by Side Diff: src/a64/stub-cache-a64.cc

Issue 181453002: Reset trunk to 3.24.35.4 (Closed) Base URL: https://v8.googlecode.com/svn/trunk
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/a64/simulator-a64.cc ('k') | src/a64/utils-a64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #if V8_TARGET_ARCH_A64
31
32 #include "ic-inl.h"
33 #include "codegen.h"
34 #include "stub-cache.h"
35
36 namespace v8 {
37 namespace internal {
38
39
40 #define __ ACCESS_MASM(masm)
41
42
43 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
44 Label* miss_label,
45 Register receiver,
46 Handle<Name> name,
47 Register scratch0,
48 Register scratch1) {
49 ASSERT(!AreAliased(receiver, scratch0, scratch1));
50 ASSERT(name->IsUniqueName());
51 Counters* counters = masm->isolate()->counters();
52 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
53 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
54
55 Label done;
56
57 const int kInterceptorOrAccessCheckNeededMask =
58 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
59
60 // Bail out if the receiver has a named interceptor or requires access checks.
61 Register map = scratch1;
62 __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
63 __ Ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
64 __ Tst(scratch0, kInterceptorOrAccessCheckNeededMask);
65 __ B(ne, miss_label);
66
67 // Check that receiver is a JSObject.
68 __ Ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
69 __ Cmp(scratch0, FIRST_SPEC_OBJECT_TYPE);
70 __ B(lt, miss_label);
71
72 // Load properties array.
73 Register properties = scratch0;
74 __ Ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
75 // Check that the properties array is a dictionary.
76 __ Ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
77 __ JumpIfNotRoot(map, Heap::kHashTableMapRootIndex, miss_label);
78
79 NameDictionaryLookupStub::GenerateNegativeLookup(masm,
80 miss_label,
81 &done,
82 receiver,
83 properties,
84 name,
85 scratch1);
86 __ Bind(&done);
87 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
88 }
89
90
91 // Probe primary or secondary table.
92 // If the entry is found in the cache, the generated code jump to the first
93 // instruction of the stub in the cache.
94 // If there is a miss the code fall trough.
95 //
96 // 'receiver', 'name' and 'offset' registers are preserved on miss.
97 static void ProbeTable(Isolate* isolate,
98 MacroAssembler* masm,
99 Code::Flags flags,
100 StubCache::Table table,
101 Register receiver,
102 Register name,
103 Register offset,
104 Register scratch,
105 Register scratch2,
106 Register scratch3) {
107 // Some code below relies on the fact that the Entry struct contains
108 // 3 pointers (name, code, map).
109 STATIC_ASSERT(sizeof(StubCache::Entry) == (3 * kPointerSize));
110
111 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
112 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
113 ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
114
115 uintptr_t key_off_addr = reinterpret_cast<uintptr_t>(key_offset.address());
116 uintptr_t value_off_addr =
117 reinterpret_cast<uintptr_t>(value_offset.address());
118 uintptr_t map_off_addr = reinterpret_cast<uintptr_t>(map_offset.address());
119
120 Label miss;
121
122 ASSERT(!AreAliased(name, offset, scratch, scratch2, scratch3));
123
124 // Multiply by 3 because there are 3 fields per entry.
125 __ Add(scratch3, offset, Operand(offset, LSL, 1));
126
127 // Calculate the base address of the entry.
128 __ Mov(scratch, Operand(key_offset));
129 __ Add(scratch, scratch, Operand(scratch3, LSL, kPointerSizeLog2));
130
131 // Check that the key in the entry matches the name.
132 __ Ldr(scratch2, MemOperand(scratch));
133 __ Cmp(name, scratch2);
134 __ B(ne, &miss);
135
136 // Check the map matches.
137 __ Ldr(scratch2, MemOperand(scratch, map_off_addr - key_off_addr));
138 __ Ldr(scratch3, FieldMemOperand(receiver, HeapObject::kMapOffset));
139 __ Cmp(scratch2, scratch3);
140 __ B(ne, &miss);
141
142 // Get the code entry from the cache.
143 __ Ldr(scratch, MemOperand(scratch, value_off_addr - key_off_addr));
144
145 // Check that the flags match what we're looking for.
146 __ Ldr(scratch2.W(), FieldMemOperand(scratch, Code::kFlagsOffset));
147 __ Bic(scratch2.W(), scratch2.W(), Code::kFlagsNotUsedInLookup);
148 __ Cmp(scratch2.W(), flags);
149 __ B(ne, &miss);
150
151 #ifdef DEBUG
152 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
153 __ B(&miss);
154 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
155 __ B(&miss);
156 }
157 #endif
158
159 // Jump to the first instruction in the code stub.
160 __ Add(scratch, scratch, Code::kHeaderSize - kHeapObjectTag);
161 __ Br(scratch);
162
163 // Miss: fall through.
164 __ Bind(&miss);
165 }
166
167
168 void StubCache::GenerateProbe(MacroAssembler* masm,
169 Code::Flags flags,
170 Register receiver,
171 Register name,
172 Register scratch,
173 Register extra,
174 Register extra2,
175 Register extra3) {
176 Isolate* isolate = masm->isolate();
177 Label miss;
178
179 // Make sure the flags does not name a specific type.
180 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
181
182 // Make sure that there are no register conflicts.
183 ASSERT(!AreAliased(receiver, name, scratch, extra, extra2, extra3));
184
185 // Make sure extra and extra2 registers are valid.
186 ASSERT(!extra.is(no_reg));
187 ASSERT(!extra2.is(no_reg));
188 ASSERT(!extra3.is(no_reg));
189
190 Counters* counters = masm->isolate()->counters();
191 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
192 extra2, extra3);
193
194 // Check that the receiver isn't a smi.
195 __ JumpIfSmi(receiver, &miss);
196
197 // Compute the hash for primary table.
198 __ Ldr(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
199 __ Ldr(extra, FieldMemOperand(receiver, HeapObject::kMapOffset));
200 __ Add(scratch, scratch, extra);
201 __ Eor(scratch, scratch, flags);
202 // We shift out the last two bits because they are not part of the hash.
203 __ Ubfx(scratch, scratch, kHeapObjectTagSize,
204 CountTrailingZeros(kPrimaryTableSize, 64));
205
206 // Probe the primary table.
207 ProbeTable(isolate, masm, flags, kPrimary, receiver, name,
208 scratch, extra, extra2, extra3);
209
210 // Primary miss: Compute hash for secondary table.
211 __ Sub(scratch, scratch, Operand(name, LSR, kHeapObjectTagSize));
212 __ Add(scratch, scratch, flags >> kHeapObjectTagSize);
213 __ And(scratch, scratch, kSecondaryTableSize - 1);
214
215 // Probe the secondary table.
216 ProbeTable(isolate, masm, flags, kSecondary, receiver, name,
217 scratch, extra, extra2, extra3);
218
219 // Cache miss: Fall-through and let caller handle the miss by
220 // entering the runtime system.
221 __ Bind(&miss);
222 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
223 extra2, extra3);
224 }
225
226
227 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
228 int index,
229 Register prototype) {
230 // Load the global or builtins object from the current context.
231 __ Ldr(prototype, GlobalObjectMemOperand());
232 // Load the native context from the global or builtins object.
233 __ Ldr(prototype,
234 FieldMemOperand(prototype, GlobalObject::kNativeContextOffset));
235 // Load the function from the native context.
236 __ Ldr(prototype, ContextMemOperand(prototype, index));
237 // Load the initial map. The global functions all have initial maps.
238 __ Ldr(prototype,
239 FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
240 // Load the prototype from the initial map.
241 __ Ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
242 }
243
244
245 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
246 MacroAssembler* masm,
247 int index,
248 Register prototype,
249 Label* miss) {
250 Isolate* isolate = masm->isolate();
251 // Get the global function with the given index.
252 Handle<JSFunction> function(
253 JSFunction::cast(isolate->native_context()->get(index)));
254
255 // Check we're still in the same context.
256 Register scratch = prototype;
257 __ Ldr(scratch, GlobalObjectMemOperand());
258 __ Ldr(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
259 __ Ldr(scratch, ContextMemOperand(scratch, index));
260 __ Cmp(scratch, Operand(function));
261 __ B(ne, miss);
262
263 // Load its initial map. The global functions all have initial maps.
264 __ Mov(prototype, Operand(Handle<Map>(function->initial_map())));
265 // Load the prototype from the initial map.
266 __ Ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
267 }
268
269
270 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
271 Register dst,
272 Register src,
273 bool inobject,
274 int index,
275 Representation representation) {
276 ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
277 USE(representation);
278 if (inobject) {
279 int offset = index * kPointerSize;
280 __ Ldr(dst, FieldMemOperand(src, offset));
281 } else {
282 // Calculate the offset into the properties array.
283 int offset = index * kPointerSize + FixedArray::kHeaderSize;
284 __ Ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
285 __ Ldr(dst, FieldMemOperand(dst, offset));
286 }
287 }
288
289
290 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
291 Register receiver,
292 Register scratch,
293 Label* miss_label) {
294 ASSERT(!AreAliased(receiver, scratch));
295
296 // Check that the receiver isn't a smi.
297 __ JumpIfSmi(receiver, miss_label);
298
299 // Check that the object is a JS array.
300 __ JumpIfNotObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE,
301 miss_label);
302
303 // Load length directly from the JS array.
304 __ Ldr(x0, FieldMemOperand(receiver, JSArray::kLengthOffset));
305 __ Ret();
306 }
307
308
309 // Generate code to check if an object is a string. If the object is a
310 // heap object, its map's instance type is left in the scratch1 register.
311 static void GenerateStringCheck(MacroAssembler* masm,
312 Register receiver,
313 Register scratch1,
314 Label* smi,
315 Label* non_string_object) {
316 // Check that the receiver isn't a smi.
317 __ JumpIfSmi(receiver, smi);
318
319 // Get the object's instance type filed.
320 __ Ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
321 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
322 // Check if the "not string" bit is set.
323 __ Tbnz(scratch1, MaskToBit(kNotStringTag), non_string_object);
324 }
325
326
327 // Generate code to load the length from a string object and return the length.
328 // If the receiver object is not a string or a wrapped string object the
329 // execution continues at the miss label. The register containing the
330 // receiver is not clobbered if the receiver is not a string.
331 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
332 Register receiver,
333 Register scratch1,
334 Register scratch2,
335 Label* miss) {
336 // Input registers can't alias because we don't want to clobber the
337 // receiver register if the object is not a string.
338 ASSERT(!AreAliased(receiver, scratch1, scratch2));
339
340 Label check_wrapper;
341
342 // Check if the object is a string leaving the instance type in the
343 // scratch1 register.
344 GenerateStringCheck(masm, receiver, scratch1, miss, &check_wrapper);
345
346 // Load length directly from the string.
347 __ Ldr(x0, FieldMemOperand(receiver, String::kLengthOffset));
348 __ Ret();
349
350 // Check if the object is a JSValue wrapper.
351 __ Bind(&check_wrapper);
352 __ Cmp(scratch1, Operand(JS_VALUE_TYPE));
353 __ B(ne, miss);
354
355 // Unwrap the value and check if the wrapped value is a string.
356 __ Ldr(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
357 GenerateStringCheck(masm, scratch1, scratch2, miss, miss);
358 __ Ldr(x0, FieldMemOperand(scratch1, String::kLengthOffset));
359 __ Ret();
360 }
361
362
363 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
364 Register receiver,
365 Register scratch1,
366 Register scratch2,
367 Label* miss_label) {
368 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
369 // TryGetFunctionPrototype can't put the result directly in x0 because the
370 // 3 inputs registers can't alias and we call this function from
371 // LoadIC::GenerateFunctionPrototype, where receiver is x0. So we explicitly
372 // move the result in x0.
373 __ Mov(x0, scratch1);
374 __ Ret();
375 }
376
377
378 // Generate code to check that a global property cell is empty. Create
379 // the property cell at compilation time if no cell exists for the
380 // property.
381 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
382 Handle<JSGlobalObject> global,
383 Handle<Name> name,
384 Register scratch,
385 Label* miss) {
386 Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
387 ASSERT(cell->value()->IsTheHole());
388 __ Mov(scratch, Operand(cell));
389 __ Ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
390 __ JumpIfNotRoot(scratch, Heap::kTheHoleValueRootIndex, miss);
391 }
392
393
394 void StoreStubCompiler::GenerateNegativeHolderLookup(
395 MacroAssembler* masm,
396 Handle<JSObject> holder,
397 Register holder_reg,
398 Handle<Name> name,
399 Label* miss) {
400 if (holder->IsJSGlobalObject()) {
401 GenerateCheckPropertyCell(
402 masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
403 } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
404 GenerateDictionaryNegativeLookup(
405 masm, miss, holder_reg, name, scratch1(), scratch2());
406 }
407 }
408
409
410 // Generate StoreTransition code, value is passed in x0 register.
411 // When leaving generated code after success, the receiver_reg and storage_reg
412 // may be clobbered. Upon branch to miss_label, the receiver and name registers
413 // have their original values.
414 void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
415 Handle<JSObject> object,
416 LookupResult* lookup,
417 Handle<Map> transition,
418 Handle<Name> name,
419 Register receiver_reg,
420 Register storage_reg,
421 Register value_reg,
422 Register scratch1,
423 Register scratch2,
424 Register scratch3,
425 Label* miss_label,
426 Label* slow) {
427 Label exit;
428
429 ASSERT(!AreAliased(receiver_reg, storage_reg, value_reg,
430 scratch1, scratch2, scratch3));
431
432 // We don't need scratch3.
433 scratch3 = NoReg;
434
435 int descriptor = transition->LastAdded();
436 DescriptorArray* descriptors = transition->instance_descriptors();
437 PropertyDetails details = descriptors->GetDetails(descriptor);
438 Representation representation = details.representation();
439 ASSERT(!representation.IsNone());
440
441 if (details.type() == CONSTANT) {
442 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
443 __ LoadObject(scratch1, constant);
444 __ Cmp(value_reg, scratch1);
445 __ B(ne, miss_label);
446 } else if (FLAG_track_fields && representation.IsSmi()) {
447 __ JumpIfNotSmi(value_reg, miss_label);
448 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
449 __ JumpIfSmi(value_reg, miss_label);
450 } else if (FLAG_track_double_fields && representation.IsDouble()) {
451 Label do_store, heap_number;
452 __ AllocateHeapNumber(storage_reg, slow, scratch1, scratch2);
453
454 // TODO(jbramley): Is fp_scratch the most appropriate FP scratch register?
455 // It's only used in Fcmp, but it's not really safe to use it like this.
456 __ JumpIfNotSmi(value_reg, &heap_number);
457 __ SmiUntagToDouble(fp_scratch, value_reg);
458 __ B(&do_store);
459
460 __ Bind(&heap_number);
461 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
462 miss_label, DONT_DO_SMI_CHECK);
463 __ Ldr(fp_scratch, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
464
465 __ Bind(&do_store);
466 __ Str(fp_scratch, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
467 }
468
469 // Stub never generated for non-global objects that require access checks.
470 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
471
472 // Perform map transition for the receiver if necessary.
473 if ((details.type() == FIELD) &&
474 (object->map()->unused_property_fields() == 0)) {
475 // The properties must be extended before we can store the value.
476 // We jump to a runtime call that extends the properties array.
477 __ Mov(scratch1, Operand(transition));
478 __ Push(receiver_reg, scratch1, value_reg);
479 __ TailCallExternalReference(
480 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
481 masm->isolate()),
482 3,
483 1);
484 return;
485 }
486
487 // Update the map of the object.
488 __ Mov(scratch1, Operand(transition));
489 __ Str(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
490
491 // Update the write barrier for the map field.
492 __ RecordWriteField(receiver_reg,
493 HeapObject::kMapOffset,
494 scratch1,
495 scratch2,
496 kLRHasNotBeenSaved,
497 kDontSaveFPRegs,
498 OMIT_REMEMBERED_SET,
499 OMIT_SMI_CHECK);
500
501 if (details.type() == CONSTANT) {
502 ASSERT(value_reg.is(x0));
503 __ Ret();
504 return;
505 }
506
507 int index = transition->instance_descriptors()->GetFieldIndex(
508 transition->LastAdded());
509
510 // Adjust for the number of properties stored in the object. Even in the
511 // face of a transition we can use the old map here because the size of the
512 // object and the number of in-object properties is not going to change.
513 index -= object->map()->inobject_properties();
514
515 // TODO(verwaest): Share this code as a code stub.
516 SmiCheck smi_check = representation.IsTagged()
517 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
518 if (index < 0) {
519 // Set the property straight into the object.
520 int offset = object->map()->instance_size() + (index * kPointerSize);
521 // TODO(jbramley): This construct appears in several places in this
522 // function. Try to clean it up, perhaps using a result_reg.
523 if (FLAG_track_double_fields && representation.IsDouble()) {
524 __ Str(storage_reg, FieldMemOperand(receiver_reg, offset));
525 } else {
526 __ Str(value_reg, FieldMemOperand(receiver_reg, offset));
527 }
528
529 if (!FLAG_track_fields || !representation.IsSmi()) {
530 // Update the write barrier for the array address.
531 if (!FLAG_track_double_fields || !representation.IsDouble()) {
532 __ Mov(storage_reg, value_reg);
533 }
534 __ RecordWriteField(receiver_reg,
535 offset,
536 storage_reg,
537 scratch1,
538 kLRHasNotBeenSaved,
539 kDontSaveFPRegs,
540 EMIT_REMEMBERED_SET,
541 smi_check);
542 }
543 } else {
544 // Write to the properties array.
545 int offset = index * kPointerSize + FixedArray::kHeaderSize;
546 // Get the properties array
547 __ Ldr(scratch1,
548 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
549 if (FLAG_track_double_fields && representation.IsDouble()) {
550 __ Str(storage_reg, FieldMemOperand(scratch1, offset));
551 } else {
552 __ Str(value_reg, FieldMemOperand(scratch1, offset));
553 }
554
555 if (!FLAG_track_fields || !representation.IsSmi()) {
556 // Update the write barrier for the array address.
557 if (!FLAG_track_double_fields || !representation.IsDouble()) {
558 __ Mov(storage_reg, value_reg);
559 }
560 __ RecordWriteField(scratch1,
561 offset,
562 storage_reg,
563 receiver_reg,
564 kLRHasNotBeenSaved,
565 kDontSaveFPRegs,
566 EMIT_REMEMBERED_SET,
567 smi_check);
568 }
569 }
570
571 __ Bind(&exit);
572 // Return the value (register x0).
573 ASSERT(value_reg.is(x0));
574 __ Ret();
575 }
576
577
578 // Generate StoreField code, value is passed in x0 register.
579 // When leaving generated code after success, the receiver_reg and name_reg may
580 // be clobbered. Upon branch to miss_label, the receiver and name registers have
581 // their original values.
582 void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
583 Handle<JSObject> object,
584 LookupResult* lookup,
585 Register receiver_reg,
586 Register name_reg,
587 Register value_reg,
588 Register scratch1,
589 Register scratch2,
590 Label* miss_label) {
591 // x0 : value
592 Label exit;
593
594 // Stub never generated for non-global objects that require access
595 // checks.
596 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
597
598 int index = lookup->GetFieldIndex().field_index();
599
600 // Adjust for the number of properties stored in the object. Even in the
601 // face of a transition we can use the old map here because the size of the
602 // object and the number of in-object properties is not going to change.
603 index -= object->map()->inobject_properties();
604
605 Representation representation = lookup->representation();
606 ASSERT(!representation.IsNone());
607 if (FLAG_track_fields && representation.IsSmi()) {
608 __ JumpIfNotSmi(value_reg, miss_label);
609 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
610 __ JumpIfSmi(value_reg, miss_label);
611 } else if (FLAG_track_double_fields && representation.IsDouble()) {
612 // Load the double storage.
613 if (index < 0) {
614 int offset = (index * kPointerSize) + object->map()->instance_size();
615 __ Ldr(scratch1, FieldMemOperand(receiver_reg, offset));
616 } else {
617 int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
618 __ Ldr(scratch1,
619 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
620 __ Ldr(scratch1, FieldMemOperand(scratch1, offset));
621 }
622
623 // Store the value into the storage.
624 Label do_store, heap_number;
625 // TODO(jbramley): Is fp_scratch the most appropriate FP scratch register?
626 // It's only used in Fcmp, but it's not really safe to use it like this.
627 __ JumpIfNotSmi(value_reg, &heap_number);
628 __ SmiUntagToDouble(fp_scratch, value_reg);
629 __ B(&do_store);
630
631 __ Bind(&heap_number);
632 __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
633 miss_label, DONT_DO_SMI_CHECK);
634 __ Ldr(fp_scratch, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
635
636 __ Bind(&do_store);
637 __ Str(fp_scratch, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
638
639 // Return the value (register x0).
640 ASSERT(value_reg.is(x0));
641 __ Ret();
642 return;
643 }
644
645 // TODO(verwaest): Share this code as a code stub.
646 SmiCheck smi_check = representation.IsTagged()
647 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
648 if (index < 0) {
649 // Set the property straight into the object.
650 int offset = object->map()->instance_size() + (index * kPointerSize);
651 __ Str(value_reg, FieldMemOperand(receiver_reg, offset));
652
653 if (!FLAG_track_fields || !representation.IsSmi()) {
654 // Skip updating write barrier if storing a smi.
655 __ JumpIfSmi(value_reg, &exit);
656
657 // Update the write barrier for the array address.
658 // Pass the now unused name_reg as a scratch register.
659 __ Mov(name_reg, value_reg);
660 __ RecordWriteField(receiver_reg,
661 offset,
662 name_reg,
663 scratch1,
664 kLRHasNotBeenSaved,
665 kDontSaveFPRegs,
666 EMIT_REMEMBERED_SET,
667 smi_check);
668 }
669 } else {
670 // Write to the properties array.
671 int offset = index * kPointerSize + FixedArray::kHeaderSize;
672 // Get the properties array
673 __ Ldr(scratch1,
674 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
675 __ Str(value_reg, FieldMemOperand(scratch1, offset));
676
677 if (!FLAG_track_fields || !representation.IsSmi()) {
678 // Skip updating write barrier if storing a smi.
679 __ JumpIfSmi(value_reg, &exit);
680
681 // Update the write barrier for the array address.
682 // Ok to clobber receiver_reg and name_reg, since we return.
683 __ Mov(name_reg, value_reg);
684 __ RecordWriteField(scratch1,
685 offset,
686 name_reg,
687 receiver_reg,
688 kLRHasNotBeenSaved,
689 kDontSaveFPRegs,
690 EMIT_REMEMBERED_SET,
691 smi_check);
692 }
693 }
694
695 __ Bind(&exit);
696 // Return the value (register x0).
697 ASSERT(value_reg.is(x0));
698 __ Ret();
699 }
700
701
702 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
703 Label* label,
704 Handle<Name> name) {
705 if (!label->is_unused()) {
706 __ Bind(label);
707 __ Mov(this->name(), Operand(name));
708 }
709 }
710
711
712 static void PushInterceptorArguments(MacroAssembler* masm,
713 Register receiver,
714 Register holder,
715 Register name,
716 Handle<JSObject> holder_obj) {
717 STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
718 STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
719 STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
720 STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
721 STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
722
723 __ Push(name);
724 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
725 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
726 Register scratch = name;
727 __ Mov(scratch, Operand(interceptor));
728 __ Push(scratch, receiver, holder);
729 }
730
731
732 static void CompileCallLoadPropertyWithInterceptor(
733 MacroAssembler* masm,
734 Register receiver,
735 Register holder,
736 Register name,
737 Handle<JSObject> holder_obj,
738 IC::UtilityId id) {
739 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
740
741 __ CallExternalReference(
742 ExternalReference(IC_Utility(id), masm->isolate()),
743 StubCache::kInterceptorArgsLength);
744 }
745
746
747 // Generate call to api function.
748 void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
749 const CallOptimization& optimization,
750 Handle<Map> receiver_map,
751 Register receiver,
752 Register scratch,
753 bool is_store,
754 int argc,
755 Register* values) {
756 ASSERT(!AreAliased(receiver, scratch));
757
758 MacroAssembler::PushPopQueue queue(masm);
759 queue.Queue(receiver);
760 // Write the arguments to the stack frame.
761 for (int i = 0; i < argc; i++) {
762 Register arg = values[argc-1-i];
763 ASSERT(!AreAliased(receiver, scratch, arg));
764 queue.Queue(arg);
765 }
766 queue.PushQueued();
767
768 ASSERT(optimization.is_simple_api_call());
769
770 // Abi for CallApiFunctionStub.
771 Register callee = x0;
772 Register call_data = x4;
773 Register holder = x2;
774 Register api_function_address = x1;
775
776 // Put holder in place.
777 CallOptimization::HolderLookup holder_lookup;
778 Handle<JSObject> api_holder =
779 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup);
780 switch (holder_lookup) {
781 case CallOptimization::kHolderIsReceiver:
782 __ Mov(holder, receiver);
783 break;
784 case CallOptimization::kHolderFound:
785 __ LoadObject(holder, api_holder);
786 break;
787 case CallOptimization::kHolderNotFound:
788 UNREACHABLE();
789 break;
790 }
791
792 Isolate* isolate = masm->isolate();
793 Handle<JSFunction> function = optimization.constant_function();
794 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
795 Handle<Object> call_data_obj(api_call_info->data(), isolate);
796
797 // Put callee in place.
798 __ LoadObject(callee, function);
799
800 bool call_data_undefined = false;
801 // Put call_data in place.
802 if (isolate->heap()->InNewSpace(*call_data_obj)) {
803 __ LoadObject(call_data, api_call_info);
804 __ Ldr(call_data, FieldMemOperand(call_data, CallHandlerInfo::kDataOffset));
805 } else if (call_data_obj->IsUndefined()) {
806 call_data_undefined = true;
807 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
808 } else {
809 __ LoadObject(call_data, call_data_obj);
810 }
811
812 // Put api_function_address in place.
813 Address function_address = v8::ToCData<Address>(api_call_info->callback());
814 ApiFunction fun(function_address);
815 ExternalReference ref = ExternalReference(&fun,
816 ExternalReference::DIRECT_API_CALL,
817 masm->isolate());
818 __ Mov(api_function_address, Operand(ref));
819
820 // Jump to stub.
821 CallApiFunctionStub stub(is_store, call_data_undefined, argc);
822 __ TailCallStub(&stub);
823 }
824
825
826 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
827 __ Jump(code, RelocInfo::CODE_TARGET);
828 }
829
830
831 #undef __
832 #define __ ACCESS_MASM(masm())
833
834
835 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
836 Register object_reg,
837 Handle<JSObject> holder,
838 Register holder_reg,
839 Register scratch1,
840 Register scratch2,
841 Handle<Name> name,
842 Label* miss,
843 PrototypeCheckType check) {
844 Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
845
846 // object_reg and holder_reg registers can alias.
847 ASSERT(!AreAliased(object_reg, scratch1, scratch2));
848 ASSERT(!AreAliased(holder_reg, scratch1, scratch2));
849
850 // Keep track of the current object in register reg.
851 Register reg = object_reg;
852 int depth = 0;
853
854 Handle<JSObject> current = Handle<JSObject>::null();
855 if (type->IsConstant()) {
856 current = Handle<JSObject>::cast(type->AsConstant());
857 }
858 Handle<JSObject> prototype = Handle<JSObject>::null();
859 Handle<Map> current_map = receiver_map;
860 Handle<Map> holder_map(holder->map());
861 // Traverse the prototype chain and check the maps in the prototype chain for
862 // fast and global objects or do negative lookup for normal objects.
863 while (!current_map.is_identical_to(holder_map)) {
864 ++depth;
865
866 // Only global objects and objects that do not require access
867 // checks are allowed in stubs.
868 ASSERT(current_map->IsJSGlobalProxyMap() ||
869 !current_map->is_access_check_needed());
870
871 prototype = handle(JSObject::cast(current_map->prototype()));
872 if (current_map->is_dictionary_map() &&
873 !current_map->IsJSGlobalObjectMap() &&
874 !current_map->IsJSGlobalProxyMap()) {
875 if (!name->IsUniqueName()) {
876 ASSERT(name->IsString());
877 name = factory()->InternalizeString(Handle<String>::cast(name));
878 }
879 ASSERT(current.is_null() ||
880 (current->property_dictionary()->FindEntry(*name) ==
881 NameDictionary::kNotFound));
882
883 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
884 scratch1, scratch2);
885
886 __ Ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
887 reg = holder_reg; // From now on the object will be in holder_reg.
888 __ Ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
889 } else {
890 bool need_map = (depth != 1 || check == CHECK_ALL_MAPS) ||
891 heap()->InNewSpace(*prototype);
892 Register map_reg = NoReg;
893 if (need_map) {
894 map_reg = scratch1;
895 __ Ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
896 }
897
898 if (depth != 1 || check == CHECK_ALL_MAPS) {
899 __ CheckMap(map_reg, current_map, miss, DONT_DO_SMI_CHECK);
900 }
901
902 // Check access rights to the global object. This has to happen after
903 // the map check so that we know that the object is actually a global
904 // object.
905 if (current_map->IsJSGlobalProxyMap()) {
906 __ CheckAccessGlobalProxy(reg, scratch2, miss);
907 } else if (current_map->IsJSGlobalObjectMap()) {
908 GenerateCheckPropertyCell(
909 masm(), Handle<JSGlobalObject>::cast(current), name,
910 scratch2, miss);
911 }
912
913 reg = holder_reg; // From now on the object will be in holder_reg.
914
915 if (heap()->InNewSpace(*prototype)) {
916 // The prototype is in new space; we cannot store a reference to it
917 // in the code. Load it from the map.
918 __ Ldr(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
919 } else {
920 // The prototype is in old space; load it directly.
921 __ Mov(reg, Operand(prototype));
922 }
923 }
924
925 // Go to the next object in the prototype chain.
926 current = prototype;
927 current_map = handle(current->map());
928 }
929
930 // Log the check depth.
931 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
932
933 // Check the holder map.
934 if (depth != 0 || check == CHECK_ALL_MAPS) {
935 // Check the holder map.
936 __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK);
937 }
938
939 // Perform security check for access to the global object.
940 ASSERT(current_map->IsJSGlobalProxyMap() ||
941 !current_map->is_access_check_needed());
942 if (current_map->IsJSGlobalProxyMap()) {
943 __ CheckAccessGlobalProxy(reg, scratch1, miss);
944 }
945
946 // Return the register containing the holder.
947 return reg;
948 }
949
950
951 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
952 if (!miss->is_unused()) {
953 Label success;
954 __ B(&success);
955
956 __ Bind(miss);
957 TailCallBuiltin(masm(), MissBuiltin(kind()));
958
959 __ Bind(&success);
960 }
961 }
962
963
964 void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
965 if (!miss->is_unused()) {
966 Label success;
967 __ B(&success);
968
969 GenerateRestoreName(masm(), miss, name);
970 TailCallBuiltin(masm(), MissBuiltin(kind()));
971
972 __ Bind(&success);
973 }
974 }
975
976
977 Register LoadStubCompiler::CallbackHandlerFrontend(Handle<HeapType> type,
978 Register object_reg,
979 Handle<JSObject> holder,
980 Handle<Name> name,
981 Handle<Object> callback) {
982 Label miss;
983
984 Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
985
986 // TODO(jbramely): HandlerFrontendHeader returns its result in scratch1(), so
987 // we can't use it below, but that isn't very obvious. Is there a better way
988 // of handling this?
989
990 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
991 ASSERT(!AreAliased(reg, scratch2(), scratch3(), scratch4()));
992
993 // Load the properties dictionary.
994 Register dictionary = scratch4();
995 __ Ldr(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset));
996
997 // Probe the dictionary.
998 Label probe_done;
999 NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
1000 &miss,
1001 &probe_done,
1002 dictionary,
1003 this->name(),
1004 scratch2(),
1005 scratch3());
1006 __ Bind(&probe_done);
1007
1008 // If probing finds an entry in the dictionary, scratch3 contains the
1009 // pointer into the dictionary. Check that the value is the callback.
1010 Register pointer = scratch3();
1011 const int kElementsStartOffset = NameDictionary::kHeaderSize +
1012 NameDictionary::kElementsStartIndex * kPointerSize;
1013 const int kValueOffset = kElementsStartOffset + kPointerSize;
1014 __ Ldr(scratch2(), FieldMemOperand(pointer, kValueOffset));
1015 __ Cmp(scratch2(), Operand(callback));
1016 __ B(ne, &miss);
1017 }
1018
1019 HandlerFrontendFooter(name, &miss);
1020 return reg;
1021 }
1022
1023
1024 void LoadStubCompiler::GenerateLoadField(Register reg,
1025 Handle<JSObject> holder,
1026 PropertyIndex field,
1027 Representation representation) {
1028 __ Mov(receiver(), reg);
1029 if (kind() == Code::LOAD_IC) {
1030 LoadFieldStub stub(field.is_inobject(holder),
1031 field.translate(holder),
1032 representation);
1033 GenerateTailCall(masm(), stub.GetCode(isolate()));
1034 } else {
1035 KeyedLoadFieldStub stub(field.is_inobject(holder),
1036 field.translate(holder),
1037 representation);
1038 GenerateTailCall(masm(), stub.GetCode(isolate()));
1039 }
1040 }
1041
1042
1043 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
1044 // Return the constant value.
1045 __ LoadObject(x0, value);
1046 __ Ret();
1047 }
1048
1049
1050 void LoadStubCompiler::GenerateLoadCallback(
1051 Register reg,
1052 Handle<ExecutableAccessorInfo> callback) {
1053 ASSERT(!AreAliased(scratch2(), scratch3(), scratch4(), reg));
1054
1055 // Build ExecutableAccessorInfo::args_ list on the stack and push property
1056 // name below the exit frame to make GC aware of them and store pointers to
1057 // them.
1058 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
1059 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
1060 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
1061 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
1062 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
1063 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
1064 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
1065
1066 __ Push(receiver());
1067
1068 if (heap()->InNewSpace(callback->data())) {
1069 __ Mov(scratch3(), Operand(callback));
1070 __ Ldr(scratch3(), FieldMemOperand(scratch3(),
1071 ExecutableAccessorInfo::kDataOffset));
1072 } else {
1073 __ Mov(scratch3(), Operand(Handle<Object>(callback->data(), isolate())));
1074 }
1075 // TODO(jbramley): Find another scratch register and combine the pushes
1076 // together. Can we use scratch1() here?
1077 __ LoadRoot(scratch4(), Heap::kUndefinedValueRootIndex);
1078 __ Push(scratch3(), scratch4());
1079 __ Mov(scratch3(), Operand(ExternalReference::isolate_address(isolate())));
1080 __ Push(scratch4(), scratch3(), reg, name());
1081
1082 Register args_addr = scratch2();
1083 __ Add(args_addr, __ StackPointer(), kPointerSize);
1084
1085 // Stack at this point:
1086 // sp[40] callback data
1087 // sp[32] undefined
1088 // sp[24] undefined
1089 // sp[16] isolate
1090 // args_addr -> sp[8] reg
1091 // sp[0] name
1092
1093 // Abi for CallApiGetter.
1094 Register getter_address_reg = x2;
1095
1096 // Set up the call.
1097 Address getter_address = v8::ToCData<Address>(callback->getter());
1098 ApiFunction fun(getter_address);
1099 ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL;
1100 ExternalReference ref = ExternalReference(&fun, type, isolate());
1101 __ Mov(getter_address_reg, Operand(ref));
1102
1103 CallApiGetterStub stub;
1104 __ TailCallStub(&stub);
1105 }
1106
1107
1108 void LoadStubCompiler::GenerateLoadInterceptor(
1109 Register holder_reg,
1110 Handle<Object> object,
1111 Handle<JSObject> interceptor_holder,
1112 LookupResult* lookup,
1113 Handle<Name> name) {
1114 ASSERT(!AreAliased(receiver(), this->name(),
1115 scratch1(), scratch2(), scratch3()));
1116 ASSERT(interceptor_holder->HasNamedInterceptor());
1117 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1118
1119 // So far the most popular follow ups for interceptor loads are FIELD
1120 // and CALLBACKS, so inline only them, other cases may be added later.
1121 bool compile_followup_inline = false;
1122 if (lookup->IsFound() && lookup->IsCacheable()) {
1123 if (lookup->IsField()) {
1124 compile_followup_inline = true;
1125 } else if (lookup->type() == CALLBACKS &&
1126 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1127 ExecutableAccessorInfo* callback =
1128 ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
1129 compile_followup_inline = callback->getter() != NULL &&
1130 callback->IsCompatibleReceiver(*object);
1131 }
1132 }
1133
1134 if (compile_followup_inline) {
1135 // Compile the interceptor call, followed by inline code to load the
1136 // property from further up the prototype chain if the call fails.
1137 // Check that the maps haven't changed.
1138 ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
1139
1140 // Preserve the receiver register explicitly whenever it is different from
1141 // the holder and it is needed should the interceptor return without any
1142 // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1143 // the FIELD case might cause a miss during the prototype check.
1144 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1145 bool must_preserve_receiver_reg = !receiver().Is(holder_reg) &&
1146 (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1147
1148 // Save necessary data before invoking an interceptor.
1149 // Requires a frame to make GC aware of pushed pointers.
1150 {
1151 FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1152 if (must_preserve_receiver_reg) {
1153 __ Push(receiver(), holder_reg, this->name());
1154 } else {
1155 __ Push(holder_reg, this->name());
1156 }
1157 // Invoke an interceptor. Note: map checks from receiver to
1158 // interceptor's holder has been compiled before (see a caller
1159 // of this method.)
1160 CompileCallLoadPropertyWithInterceptor(
1161 masm(), receiver(), holder_reg, this->name(), interceptor_holder,
1162 IC::kLoadPropertyWithInterceptorOnly);
1163
1164 // Check if interceptor provided a value for property. If it's
1165 // the case, return immediately.
1166 Label interceptor_failed;
1167 __ JumpIfRoot(x0,
1168 Heap::kNoInterceptorResultSentinelRootIndex,
1169 &interceptor_failed);
1170 frame_scope.GenerateLeaveFrame();
1171 __ Ret();
1172
1173 __ Bind(&interceptor_failed);
1174 if (must_preserve_receiver_reg) {
1175 __ Pop(this->name(), holder_reg, receiver());
1176 } else {
1177 __ Pop(this->name(), holder_reg);
1178 }
1179 // Leave the internal frame.
1180 }
1181 GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1182 } else { // !compile_followup_inline
1183 // Call the runtime system to load the interceptor.
1184 // Check that the maps haven't changed.
1185 PushInterceptorArguments(
1186 masm(), receiver(), holder_reg, this->name(), interceptor_holder);
1187
1188 ExternalReference ref =
1189 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1190 isolate());
1191 __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1192 }
1193 }
1194
1195
1196 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
1197 Label success;
1198 // Check that the object is a boolean.
1199 // TODO(all): Optimize this like LCodeGen::DoDeferredTaggedToI.
1200 __ JumpIfRoot(object, Heap::kTrueValueRootIndex, &success);
1201 __ JumpIfNotRoot(object, Heap::kFalseValueRootIndex, miss);
1202 __ Bind(&success);
1203 }
1204
1205
1206 Handle<Code> StoreStubCompiler::CompileStoreCallback(
1207 Handle<JSObject> object,
1208 Handle<JSObject> holder,
1209 Handle<Name> name,
1210 Handle<ExecutableAccessorInfo> callback) {
1211 ASM_LOCATION("StoreStubCompiler::CompileStoreCallback");
1212 Register holder_reg = HandlerFrontend(
1213 IC::CurrentTypeOf(object, isolate()), receiver(), holder, name);
1214
1215 // Stub never generated for non-global objects that require access checks.
1216 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1217
1218 // TODO(jbramley): Make Push take more than four arguments and combine these
1219 // two calls.
1220 __ Push(receiver(), holder_reg);
1221 __ Mov(scratch1(), Operand(callback));
1222 __ Mov(scratch2(), Operand(name));
1223 __ Push(scratch1(), scratch2(), value());
1224
1225 // Do tail-call to the runtime system.
1226 ExternalReference store_callback_property =
1227 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
1228 __ TailCallExternalReference(store_callback_property, 5, 1);
1229
1230 // Return the generated code.
1231 return GetCode(kind(), Code::FAST, name);
1232 }
1233
1234
1235 #undef __
1236 #define __ ACCESS_MASM(masm)
1237
1238
1239 void StoreStubCompiler::GenerateStoreViaSetter(
1240 MacroAssembler* masm,
1241 Handle<HeapType> type,
1242 Handle<JSFunction> setter) {
1243 // ----------- S t a t e -------------
1244 // -- x0 : value
1245 // -- x1 : receiver
1246 // -- x2 : name
1247 // -- lr : return address
1248 // -----------------------------------
1249 Register value = x0;
1250 Register receiver = x1;
1251 Label miss;
1252
1253 {
1254 FrameScope scope(masm, StackFrame::INTERNAL);
1255
1256 // Save value register, so we can restore it later.
1257 __ Push(value);
1258
1259 if (!setter.is_null()) {
1260 // Call the JavaScript setter with receiver and value on the stack.
1261 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1262 // Swap in the global receiver.
1263 __ Ldr(receiver,
1264 FieldMemOperand(
1265 receiver, JSGlobalObject::kGlobalReceiverOffset));
1266 }
1267 __ Push(receiver, value);
1268 ParameterCount actual(1);
1269 ParameterCount expected(setter);
1270 __ InvokeFunction(setter, expected, actual,
1271 CALL_FUNCTION, NullCallWrapper());
1272 } else {
1273 // If we generate a global code snippet for deoptimization only, remember
1274 // the place to continue after deoptimization.
1275 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1276 }
1277
1278 // We have to return the passed value, not the return value of the setter.
1279 __ Pop(value);
1280
1281 // Restore context register.
1282 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1283 }
1284 __ Ret();
1285 }
1286
1287
1288 #undef __
1289 #define __ ACCESS_MASM(masm())
1290
1291
1292 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
1293 Handle<JSObject> object,
1294 Handle<Name> name) {
1295 Label miss;
1296
1297 ASM_LOCATION("StoreStubCompiler::CompileStoreInterceptor");
1298
1299 __ Push(receiver(), this->name(), value());
1300
1301 // Do tail-call to the runtime system.
1302 ExternalReference store_ic_property =
1303 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1304 __ TailCallExternalReference(store_ic_property, 3, 1);
1305
1306 // Return the generated code.
1307 return GetCode(kind(), Code::FAST, name);
1308 }
1309
1310
1311 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type,
1312 Handle<JSObject> last,
1313 Handle<Name> name) {
1314 NonexistentHandlerFrontend(type, last, name);
1315
1316 // Return undefined if maps of the full prototype chain are still the
1317 // same and no global property with this name contains a value.
1318 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1319 __ Ret();
1320
1321 // Return the generated code.
1322 return GetCode(kind(), Code::FAST, name);
1323 }
1324
1325
1326 // TODO(all): The so-called scratch registers are significant in some cases. For
1327 // example, KeyedStoreStubCompiler::registers()[3] (x3) is actually used for
1328 // KeyedStoreCompiler::transition_map(). We should verify which registers are
1329 // actually scratch registers, and which are important. For now, we use the same
1330 // assignments as ARM to remain on the safe side.
1331
1332 Register* LoadStubCompiler::registers() {
1333 // receiver, name, scratch1, scratch2, scratch3, scratch4.
1334 static Register registers[] = { x0, x2, x3, x1, x4, x5 };
1335 return registers;
1336 }
1337
1338
1339 Register* KeyedLoadStubCompiler::registers() {
1340 // receiver, name/key, scratch1, scratch2, scratch3, scratch4.
1341 static Register registers[] = { x1, x0, x2, x3, x4, x5 };
1342 return registers;
1343 }
1344
1345
1346 Register* StoreStubCompiler::registers() {
1347 // receiver, name, value, scratch1, scratch2, scratch3.
1348 static Register registers[] = { x1, x2, x0, x3, x4, x5 };
1349 return registers;
1350 }
1351
1352
1353 Register* KeyedStoreStubCompiler::registers() {
1354 // receiver, name, value, scratch1, scratch2, scratch3.
1355 static Register registers[] = { x2, x1, x0, x3, x4, x5 };
1356 return registers;
1357 }
1358
1359
1360 #undef __
1361 #define __ ACCESS_MASM(masm)
1362
1363 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
1364 Handle<HeapType> type,
1365 Register receiver,
1366 Handle<JSFunction> getter) {
1367 {
1368 FrameScope scope(masm, StackFrame::INTERNAL);
1369
1370 if (!getter.is_null()) {
1371 // Call the JavaScript getter with the receiver on the stack.
1372 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1373 // Swap in the global receiver.
1374 __ Ldr(receiver,
1375 FieldMemOperand(
1376 receiver, JSGlobalObject::kGlobalReceiverOffset));
1377 }
1378 __ Push(receiver);
1379 ParameterCount actual(0);
1380 ParameterCount expected(getter);
1381 __ InvokeFunction(getter, expected, actual,
1382 CALL_FUNCTION, NullCallWrapper());
1383 } else {
1384 // If we generate a global code snippet for deoptimization only, remember
1385 // the place to continue after deoptimization.
1386 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
1387 }
1388
1389 // Restore context register.
1390 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1391 }
1392 __ Ret();
1393 }
1394
1395
1396 #undef __
1397 #define __ ACCESS_MASM(masm())
1398
1399
1400 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
1401 Handle<HeapType> type,
1402 Handle<GlobalObject> global,
1403 Handle<PropertyCell> cell,
1404 Handle<Name> name,
1405 bool is_dont_delete) {
1406 Label miss;
1407 HandlerFrontendHeader(type, receiver(), global, name, &miss);
1408
1409 // Get the value from the cell.
1410 __ Mov(x3, Operand(cell));
1411 __ Ldr(x4, FieldMemOperand(x3, Cell::kValueOffset));
1412
1413 // Check for deleted property if property can actually be deleted.
1414 if (!is_dont_delete) {
1415 __ JumpIfRoot(x4, Heap::kTheHoleValueRootIndex, &miss);
1416 }
1417
1418 Counters* counters = isolate()->counters();
1419 __ IncrementCounter(counters->named_load_global_stub(), 1, x1, x3);
1420 __ Mov(x0, x4);
1421 __ Ret();
1422
1423 HandlerFrontendFooter(name, &miss);
1424
1425 // Return the generated code.
1426 return GetCode(kind(), Code::NORMAL, name);
1427 }
1428
1429
1430 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
1431 TypeHandleList* types,
1432 CodeHandleList* handlers,
1433 Handle<Name> name,
1434 Code::StubType type,
1435 IcCheckType check) {
1436 Label miss;
1437
1438 if (check == PROPERTY &&
1439 (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) {
1440 __ CompareAndBranch(this->name(), Operand(name), ne, &miss);
1441 }
1442
1443 Label number_case;
1444 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
1445 __ JumpIfSmi(receiver(), smi_target);
1446
1447 Register map_reg = scratch1();
1448 __ Ldr(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
1449 int receiver_count = types->length();
1450 int number_of_handled_maps = 0;
1451 for (int current = 0; current < receiver_count; ++current) {
1452 Handle<HeapType> type = types->at(current);
1453 Handle<Map> map = IC::TypeToMap(*type, isolate());
1454 if (!map->is_deprecated()) {
1455 number_of_handled_maps++;
1456 Label try_next;
1457 __ Cmp(map_reg, Operand(map));
1458 __ B(ne, &try_next);
1459 if (type->Is(HeapType::Number())) {
1460 ASSERT(!number_case.is_unused());
1461 __ Bind(&number_case);
1462 }
1463 __ Jump(handlers->at(current), RelocInfo::CODE_TARGET);
1464 __ Bind(&try_next);
1465 }
1466 }
1467 ASSERT(number_of_handled_maps != 0);
1468
1469 __ Bind(&miss);
1470 TailCallBuiltin(masm(), MissBuiltin(kind()));
1471
1472 // Return the generated code.
1473 InlineCacheState state =
1474 (number_of_handled_maps > 1) ? POLYMORPHIC : MONOMORPHIC;
1475 return GetICCode(kind(), type, name, state);
1476 }
1477
1478
1479 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
1480 MapHandleList* receiver_maps,
1481 CodeHandleList* handler_stubs,
1482 MapHandleList* transitioned_maps) {
1483 Label miss;
1484
1485 ASM_LOCATION("KeyedStoreStubCompiler::CompileStorePolymorphic");
1486
1487 __ JumpIfSmi(receiver(), &miss);
1488
1489 int receiver_count = receiver_maps->length();
1490 __ Ldr(scratch1(), FieldMemOperand(receiver(), HeapObject::kMapOffset));
1491 for (int i = 0; i < receiver_count; i++) {
1492 __ Cmp(scratch1(), Operand(receiver_maps->at(i)));
1493
1494 Label skip;
1495 __ B(&skip, ne);
1496 if (!transitioned_maps->at(i).is_null()) {
1497 // This argument is used by the handler stub. For example, see
1498 // ElementsTransitionGenerator::GenerateMapChangeElementsTransition.
1499 __ Mov(transition_map(), Operand(transitioned_maps->at(i)));
1500 }
1501 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET);
1502 __ Bind(&skip);
1503 }
1504
1505 __ Bind(&miss);
1506 TailCallBuiltin(masm(), MissBuiltin(kind()));
1507
1508 return GetICCode(
1509 kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
1510 }
1511
1512
1513 #undef __
1514 #define __ ACCESS_MASM(masm)
1515
1516 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
1517 MacroAssembler* masm) {
1518 // ---------- S t a t e --------------
1519 // -- lr : return address
1520 // -- x0 : key
1521 // -- x1 : receiver
1522 // -----------------------------------
1523 Label slow, miss;
1524
1525 Register result = x0;
1526 Register key = x0;
1527 Register receiver = x1;
1528
1529 __ JumpIfNotSmi(key, &miss);
1530 __ Ldr(x4, FieldMemOperand(receiver, JSObject::kElementsOffset));
1531 __ LoadFromNumberDictionary(&slow, x4, key, result, x2, x3, x5, x6);
1532 __ Ret();
1533
1534 __ Bind(&slow);
1535 __ IncrementCounter(
1536 masm->isolate()->counters()->keyed_load_external_array_slow(), 1, x2, x3);
1537 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1538
1539 // Miss case, call the runtime.
1540 __ Bind(&miss);
1541 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1542 }
1543
1544
1545 } } // namespace v8::internal
1546
1547 #endif // V8_TARGET_ARCH_A64
OLDNEW
« no previous file with comments | « src/a64/simulator-a64.cc ('k') | src/a64/utils-a64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698