Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(7)

Side by Side Diff: src/x64/ic-x64.cc

Issue 483683005: Move IC code into a subdir and move ic-compilation related code from stub-cache into ic-compiler (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Fix BUILD.gn Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/full-codegen-x64.cc ('k') | src/x64/lithium-codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #if V8_TARGET_ARCH_X64
8
9 #include "src/codegen.h"
10 #include "src/ic-inl.h"
11 #include "src/runtime.h"
12 #include "src/stub-cache.h"
13
14 namespace v8 {
15 namespace internal {
16
17 // ----------------------------------------------------------------------------
18 // Static IC stub generators.
19 //
20
21 #define __ ACCESS_MASM(masm)
22
23
24 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
25 Register type,
26 Label* global_object) {
27 // Register usage:
28 // type: holds the receiver instance type on entry.
29 __ cmpb(type, Immediate(JS_GLOBAL_OBJECT_TYPE));
30 __ j(equal, global_object);
31 __ cmpb(type, Immediate(JS_BUILTINS_OBJECT_TYPE));
32 __ j(equal, global_object);
33 __ cmpb(type, Immediate(JS_GLOBAL_PROXY_TYPE));
34 __ j(equal, global_object);
35 }
36
37
38 // Helper function used to load a property from a dictionary backing storage.
39 // This function may return false negatives, so miss_label
40 // must always call a backup property load that is complete.
41 // This function is safe to call if name is not an internalized string,
42 // and will jump to the miss_label in that case.
43 // The generated code assumes that the receiver has slow properties,
44 // is not a global object and does not have interceptors.
45 static void GenerateDictionaryLoad(MacroAssembler* masm,
46 Label* miss_label,
47 Register elements,
48 Register name,
49 Register r0,
50 Register r1,
51 Register result) {
52 // Register use:
53 //
54 // elements - holds the property dictionary on entry and is unchanged.
55 //
56 // name - holds the name of the property on entry and is unchanged.
57 //
58 // r0 - used to hold the capacity of the property dictionary.
59 //
60 // r1 - used to hold the index into the property dictionary.
61 //
62 // result - holds the result on exit if the load succeeded.
63
64 Label done;
65
66 // Probe the dictionary.
67 NameDictionaryLookupStub::GeneratePositiveLookup(masm,
68 miss_label,
69 &done,
70 elements,
71 name,
72 r0,
73 r1);
74
75 // If probing finds an entry in the dictionary, r1 contains the
76 // index into the dictionary. Check that the value is a normal
77 // property.
78 __ bind(&done);
79 const int kElementsStartOffset =
80 NameDictionary::kHeaderSize +
81 NameDictionary::kElementsStartIndex * kPointerSize;
82 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
83 __ Test(Operand(elements, r1, times_pointer_size,
84 kDetailsOffset - kHeapObjectTag),
85 Smi::FromInt(PropertyDetails::TypeField::kMask));
86 __ j(not_zero, miss_label);
87
88 // Get the value at the masked, scaled index.
89 const int kValueOffset = kElementsStartOffset + kPointerSize;
90 __ movp(result,
91 Operand(elements, r1, times_pointer_size,
92 kValueOffset - kHeapObjectTag));
93 }
94
95
96 // Helper function used to store a property to a dictionary backing
97 // storage. This function may fail to store a property even though it
98 // is in the dictionary, so code at miss_label must always call a
99 // backup property store that is complete. This function is safe to
100 // call if name is not an internalized string, and will jump to the miss_label
101 // in that case. The generated code assumes that the receiver has slow
102 // properties, is not a global object and does not have interceptors.
103 static void GenerateDictionaryStore(MacroAssembler* masm,
104 Label* miss_label,
105 Register elements,
106 Register name,
107 Register value,
108 Register scratch0,
109 Register scratch1) {
110 // Register use:
111 //
112 // elements - holds the property dictionary on entry and is clobbered.
113 //
114 // name - holds the name of the property on entry and is unchanged.
115 //
116 // value - holds the value to store and is unchanged.
117 //
118 // scratch0 - used during the positive dictionary lookup and is clobbered.
119 //
120 // scratch1 - used for index into the property dictionary and is clobbered.
121 Label done;
122
123 // Probe the dictionary.
124 NameDictionaryLookupStub::GeneratePositiveLookup(masm,
125 miss_label,
126 &done,
127 elements,
128 name,
129 scratch0,
130 scratch1);
131
132 // If probing finds an entry in the dictionary, scratch0 contains the
133 // index into the dictionary. Check that the value is a normal
134 // property that is not read only.
135 __ bind(&done);
136 const int kElementsStartOffset =
137 NameDictionary::kHeaderSize +
138 NameDictionary::kElementsStartIndex * kPointerSize;
139 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
140 const int kTypeAndReadOnlyMask =
141 (PropertyDetails::TypeField::kMask |
142 PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
143 __ Test(Operand(elements,
144 scratch1,
145 times_pointer_size,
146 kDetailsOffset - kHeapObjectTag),
147 Smi::FromInt(kTypeAndReadOnlyMask));
148 __ j(not_zero, miss_label);
149
150 // Store the value at the masked, scaled index.
151 const int kValueOffset = kElementsStartOffset + kPointerSize;
152 __ leap(scratch1, Operand(elements,
153 scratch1,
154 times_pointer_size,
155 kValueOffset - kHeapObjectTag));
156 __ movp(Operand(scratch1, 0), value);
157
158 // Update write barrier. Make sure not to clobber the value.
159 __ movp(scratch0, value);
160 __ RecordWrite(elements, scratch1, scratch0, kDontSaveFPRegs);
161 }
162
163
164 // Checks the receiver for special cases (value type, slow case bits).
165 // Falls through for regular JS object.
166 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
167 Register receiver,
168 Register map,
169 int interceptor_bit,
170 Label* slow) {
171 // Register use:
172 // receiver - holds the receiver and is unchanged.
173 // Scratch registers:
174 // map - used to hold the map of the receiver.
175
176 // Check that the object isn't a smi.
177 __ JumpIfSmi(receiver, slow);
178
179 // Check that the object is some kind of JS object EXCEPT JS Value type.
180 // In the case that the object is a value-wrapper object,
181 // we enter the runtime system to make sure that indexing
182 // into string objects work as intended.
183 DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE);
184 __ CmpObjectType(receiver, JS_OBJECT_TYPE, map);
185 __ j(below, slow);
186
187 // Check bit field.
188 __ testb(FieldOperand(map, Map::kBitFieldOffset),
189 Immediate((1 << Map::kIsAccessCheckNeeded) |
190 (1 << interceptor_bit)));
191 __ j(not_zero, slow);
192 }
193
194
195 // Loads an indexed element from a fast case array.
196 // If not_fast_array is NULL, doesn't perform the elements map check.
197 static void GenerateFastArrayLoad(MacroAssembler* masm,
198 Register receiver,
199 Register key,
200 Register elements,
201 Register scratch,
202 Register result,
203 Label* not_fast_array,
204 Label* out_of_range) {
205 // Register use:
206 //
207 // receiver - holds the receiver on entry.
208 // Unchanged unless 'result' is the same register.
209 //
210 // key - holds the smi key on entry.
211 // Unchanged unless 'result' is the same register.
212 //
213 // elements - holds the elements of the receiver on exit.
214 //
215 // result - holds the result on exit if the load succeeded.
216 // Allowed to be the the same as 'receiver' or 'key'.
217 // Unchanged on bailout so 'receiver' and 'key' can be safely
218 // used by further computation.
219 //
220 // Scratch registers:
221 //
222 // scratch - used to hold elements of the receiver and the loaded value.
223
224 __ movp(elements, FieldOperand(receiver, JSObject::kElementsOffset));
225 if (not_fast_array != NULL) {
226 // Check that the object is in fast mode and writable.
227 __ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
228 Heap::kFixedArrayMapRootIndex);
229 __ j(not_equal, not_fast_array);
230 } else {
231 __ AssertFastElements(elements);
232 }
233 // Check that the key (index) is within bounds.
234 __ SmiCompare(key, FieldOperand(elements, FixedArray::kLengthOffset));
235 // Unsigned comparison rejects negative indices.
236 __ j(above_equal, out_of_range);
237 // Fast case: Do the load.
238 SmiIndex index = masm->SmiToIndex(scratch, key, kPointerSizeLog2);
239 __ movp(scratch, FieldOperand(elements,
240 index.reg,
241 index.scale,
242 FixedArray::kHeaderSize));
243 __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex);
244 // In case the loaded value is the_hole we have to consult GetProperty
245 // to ensure the prototype chain is searched.
246 __ j(equal, out_of_range);
247 if (!result.is(scratch)) {
248 __ movp(result, scratch);
249 }
250 }
251
252
253 // Checks whether a key is an array index string or a unique name.
254 // Falls through if the key is a unique name.
255 static void GenerateKeyNameCheck(MacroAssembler* masm,
256 Register key,
257 Register map,
258 Register hash,
259 Label* index_string,
260 Label* not_unique) {
261 // Register use:
262 // key - holds the key and is unchanged. Assumed to be non-smi.
263 // Scratch registers:
264 // map - used to hold the map of the key.
265 // hash - used to hold the hash of the key.
266 Label unique;
267 __ CmpObjectType(key, LAST_UNIQUE_NAME_TYPE, map);
268 __ j(above, not_unique);
269 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
270 __ j(equal, &unique);
271
272 // Is the string an array index, with cached numeric value?
273 __ movl(hash, FieldOperand(key, Name::kHashFieldOffset));
274 __ testl(hash, Immediate(Name::kContainsCachedArrayIndexMask));
275 __ j(zero, index_string); // The value in hash is used at jump target.
276
277 // Is the string internalized? We already know it's a string so a single
278 // bit test is enough.
279 STATIC_ASSERT(kNotInternalizedTag != 0);
280 __ testb(FieldOperand(map, Map::kInstanceTypeOffset),
281 Immediate(kIsNotInternalizedMask));
282 __ j(not_zero, not_unique);
283
284 __ bind(&unique);
285 }
286
287
288
289 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
290 // The return address is on the stack.
291 Label slow, check_name, index_smi, index_name, property_array_property;
292 Label probe_dictionary, check_number_dictionary;
293
294 Register receiver = ReceiverRegister();
295 Register key = NameRegister();
296 DCHECK(receiver.is(rdx));
297 DCHECK(key.is(rcx));
298
299 // Check that the key is a smi.
300 __ JumpIfNotSmi(key, &check_name);
301 __ bind(&index_smi);
302 // Now the key is known to be a smi. This place is also jumped to from below
303 // where a numeric string is converted to a smi.
304
305 GenerateKeyedLoadReceiverCheck(
306 masm, receiver, rax, Map::kHasIndexedInterceptor, &slow);
307
308 // Check the receiver's map to see if it has fast elements.
309 __ CheckFastElements(rax, &check_number_dictionary);
310
311 GenerateFastArrayLoad(masm,
312 receiver,
313 key,
314 rax,
315 rbx,
316 rax,
317 NULL,
318 &slow);
319 Counters* counters = masm->isolate()->counters();
320 __ IncrementCounter(counters->keyed_load_generic_smi(), 1);
321 __ ret(0);
322
323 __ bind(&check_number_dictionary);
324 __ SmiToInteger32(rbx, key);
325 __ movp(rax, FieldOperand(receiver, JSObject::kElementsOffset));
326
327 // Check whether the elements is a number dictionary.
328 // rbx: key as untagged int32
329 // rax: elements
330 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
331 Heap::kHashTableMapRootIndex);
332 __ j(not_equal, &slow);
333 __ LoadFromNumberDictionary(&slow, rax, key, rbx, r9, rdi, rax);
334 __ ret(0);
335
336 __ bind(&slow);
337 // Slow case: Jump to runtime.
338 __ IncrementCounter(counters->keyed_load_generic_slow(), 1);
339 GenerateRuntimeGetProperty(masm);
340
341 __ bind(&check_name);
342 GenerateKeyNameCheck(masm, key, rax, rbx, &index_name, &slow);
343
344 GenerateKeyedLoadReceiverCheck(
345 masm, receiver, rax, Map::kHasNamedInterceptor, &slow);
346
347 // If the receiver is a fast-case object, check the keyed lookup
348 // cache. Otherwise probe the dictionary leaving result in key.
349 __ movp(rbx, FieldOperand(receiver, JSObject::kPropertiesOffset));
350 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
351 Heap::kHashTableMapRootIndex);
352 __ j(equal, &probe_dictionary);
353
354 // Load the map of the receiver, compute the keyed lookup cache hash
355 // based on 32 bits of the map pointer and the string hash.
356 __ movp(rbx, FieldOperand(receiver, HeapObject::kMapOffset));
357 __ movl(rax, rbx);
358 __ shrl(rax, Immediate(KeyedLookupCache::kMapHashShift));
359 __ movl(rdi, FieldOperand(key, String::kHashFieldOffset));
360 __ shrl(rdi, Immediate(String::kHashShift));
361 __ xorp(rax, rdi);
362 int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
363 __ andp(rax, Immediate(mask));
364
365 // Load the key (consisting of map and internalized string) from the cache and
366 // check for match.
367 Label load_in_object_property;
368 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
369 Label hit_on_nth_entry[kEntriesPerBucket];
370 ExternalReference cache_keys
371 = ExternalReference::keyed_lookup_cache_keys(masm->isolate());
372
373 for (int i = 0; i < kEntriesPerBucket - 1; i++) {
374 Label try_next_entry;
375 __ movp(rdi, rax);
376 __ shlp(rdi, Immediate(kPointerSizeLog2 + 1));
377 __ LoadAddress(kScratchRegister, cache_keys);
378 int off = kPointerSize * i * 2;
379 __ cmpp(rbx, Operand(kScratchRegister, rdi, times_1, off));
380 __ j(not_equal, &try_next_entry);
381 __ cmpp(key, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
382 __ j(equal, &hit_on_nth_entry[i]);
383 __ bind(&try_next_entry);
384 }
385
386 int off = kPointerSize * (kEntriesPerBucket - 1) * 2;
387 __ cmpp(rbx, Operand(kScratchRegister, rdi, times_1, off));
388 __ j(not_equal, &slow);
389 __ cmpp(key, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
390 __ j(not_equal, &slow);
391
392 // Get field offset, which is a 32-bit integer.
393 ExternalReference cache_field_offsets
394 = ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
395
396 // Hit on nth entry.
397 for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
398 __ bind(&hit_on_nth_entry[i]);
399 if (i != 0) {
400 __ addl(rax, Immediate(i));
401 }
402 __ LoadAddress(kScratchRegister, cache_field_offsets);
403 __ movl(rdi, Operand(kScratchRegister, rax, times_4, 0));
404 __ movzxbp(rax, FieldOperand(rbx, Map::kInObjectPropertiesOffset));
405 __ subp(rdi, rax);
406 __ j(above_equal, &property_array_property);
407 if (i != 0) {
408 __ jmp(&load_in_object_property);
409 }
410 }
411
412 // Load in-object property.
413 __ bind(&load_in_object_property);
414 __ movzxbp(rax, FieldOperand(rbx, Map::kInstanceSizeOffset));
415 __ addp(rax, rdi);
416 __ movp(rax, FieldOperand(receiver, rax, times_pointer_size, 0));
417 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
418 __ ret(0);
419
420 // Load property array property.
421 __ bind(&property_array_property);
422 __ movp(rax, FieldOperand(receiver, JSObject::kPropertiesOffset));
423 __ movp(rax, FieldOperand(rax, rdi, times_pointer_size,
424 FixedArray::kHeaderSize));
425 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
426 __ ret(0);
427
428 // Do a quick inline probe of the receiver's dictionary, if it
429 // exists.
430 __ bind(&probe_dictionary);
431 // rbx: elements
432
433 __ movp(rax, FieldOperand(receiver, JSObject::kMapOffset));
434 __ movb(rax, FieldOperand(rax, Map::kInstanceTypeOffset));
435 GenerateGlobalInstanceTypeCheck(masm, rax, &slow);
436
437 GenerateDictionaryLoad(masm, &slow, rbx, key, rax, rdi, rax);
438 __ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
439 __ ret(0);
440
441 __ bind(&index_name);
442 __ IndexFromHash(rbx, key);
443 __ jmp(&index_smi);
444 }
445
446
447 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
448 // Return address is on the stack.
449 Label miss;
450
451 Register receiver = ReceiverRegister();
452 Register index = NameRegister();
453 Register scratch = rbx;
454 Register result = rax;
455 DCHECK(!scratch.is(receiver) && !scratch.is(index));
456
457 StringCharAtGenerator char_at_generator(receiver,
458 index,
459 scratch,
460 result,
461 &miss, // When not a string.
462 &miss, // When not a number.
463 &miss, // When index out of range.
464 STRING_INDEX_IS_ARRAY_INDEX);
465 char_at_generator.GenerateFast(masm);
466 __ ret(0);
467
468 StubRuntimeCallHelper call_helper;
469 char_at_generator.GenerateSlow(masm, call_helper);
470
471 __ bind(&miss);
472 GenerateMiss(masm);
473 }
474
475
476 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
477 // Return address is on the stack.
478 Label slow;
479
480 Register receiver = ReceiverRegister();
481 Register key = NameRegister();
482 Register scratch = rax;
483 DCHECK(!scratch.is(receiver) && !scratch.is(key));
484
485 // Check that the receiver isn't a smi.
486 __ JumpIfSmi(receiver, &slow);
487
488 // Check that the key is an array index, that is Uint32.
489 STATIC_ASSERT(kSmiValueSize <= 32);
490 __ JumpUnlessNonNegativeSmi(key, &slow);
491
492 // Get the map of the receiver.
493 __ movp(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
494
495 // Check that it has indexed interceptor and access checks
496 // are not enabled for this object.
497 __ movb(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
498 __ andb(scratch, Immediate(kSlowCaseBitFieldMask));
499 __ cmpb(scratch, Immediate(1 << Map::kHasIndexedInterceptor));
500 __ j(not_zero, &slow);
501
502 // Everything is fine, call runtime.
503 __ PopReturnAddressTo(scratch);
504 __ Push(receiver); // receiver
505 __ Push(key); // key
506 __ PushReturnAddressFrom(scratch);
507
508 // Perform tail call to the entry.
509 __ TailCallExternalReference(
510 ExternalReference(IC_Utility(kLoadElementWithInterceptor),
511 masm->isolate()),
512 2, 1);
513
514 __ bind(&slow);
515 GenerateMiss(masm);
516 }
517
518
519 static void KeyedStoreGenerateGenericHelper(
520 MacroAssembler* masm,
521 Label* fast_object,
522 Label* fast_double,
523 Label* slow,
524 KeyedStoreCheckMap check_map,
525 KeyedStoreIncrementLength increment_length) {
526 Label transition_smi_elements;
527 Label finish_object_store, non_double_value, transition_double_elements;
528 Label fast_double_without_map_check;
529 Register receiver = KeyedStoreIC::ReceiverRegister();
530 Register key = KeyedStoreIC::NameRegister();
531 Register value = KeyedStoreIC::ValueRegister();
532 DCHECK(receiver.is(rdx));
533 DCHECK(key.is(rcx));
534 DCHECK(value.is(rax));
535 // Fast case: Do the store, could be either Object or double.
536 __ bind(fast_object);
537 // rbx: receiver's elements array (a FixedArray)
538 // receiver is a JSArray.
539 // r9: map of receiver
540 if (check_map == kCheckMap) {
541 __ movp(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
542 __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
543 __ j(not_equal, fast_double);
544 }
545
546 // HOLECHECK: guards "A[i] = V"
547 // We have to go to the runtime if the current value is the hole because
548 // there may be a callback on the element
549 Label holecheck_passed1;
550 __ movp(kScratchRegister, FieldOperand(rbx,
551 key,
552 times_pointer_size,
553 FixedArray::kHeaderSize));
554 __ CompareRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
555 __ j(not_equal, &holecheck_passed1);
556 __ JumpIfDictionaryInPrototypeChain(receiver, rdi, kScratchRegister, slow);
557
558 __ bind(&holecheck_passed1);
559
560 // Smi stores don't require further checks.
561 Label non_smi_value;
562 __ JumpIfNotSmi(value, &non_smi_value);
563 if (increment_length == kIncrementLength) {
564 // Add 1 to receiver->length.
565 __ leal(rdi, Operand(key, 1));
566 __ Integer32ToSmiField(FieldOperand(receiver, JSArray::kLengthOffset), rdi);
567 }
568 // It's irrelevant whether array is smi-only or not when writing a smi.
569 __ movp(FieldOperand(rbx, key, times_pointer_size, FixedArray::kHeaderSize),
570 value);
571 __ ret(0);
572
573 __ bind(&non_smi_value);
574 // Writing a non-smi, check whether array allows non-smi elements.
575 // r9: receiver's map
576 __ CheckFastObjectElements(r9, &transition_smi_elements);
577
578 __ bind(&finish_object_store);
579 if (increment_length == kIncrementLength) {
580 // Add 1 to receiver->length.
581 __ leal(rdi, Operand(key, 1));
582 __ Integer32ToSmiField(FieldOperand(receiver, JSArray::kLengthOffset), rdi);
583 }
584 __ movp(FieldOperand(rbx, key, times_pointer_size, FixedArray::kHeaderSize),
585 value);
586 __ movp(rdx, value); // Preserve the value which is returned.
587 __ RecordWriteArray(
588 rbx, rdx, key, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
589 __ ret(0);
590
591 __ bind(fast_double);
592 if (check_map == kCheckMap) {
593 // Check for fast double array case. If this fails, call through to the
594 // runtime.
595 // rdi: elements array's map
596 __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
597 __ j(not_equal, slow);
598 }
599
600 // HOLECHECK: guards "A[i] double hole?"
601 // We have to see if the double version of the hole is present. If so
602 // go to the runtime.
603 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
604 __ cmpl(FieldOperand(rbx, key, times_8, offset), Immediate(kHoleNanUpper32));
605 __ j(not_equal, &fast_double_without_map_check);
606 __ JumpIfDictionaryInPrototypeChain(receiver, rdi, kScratchRegister, slow);
607
608 __ bind(&fast_double_without_map_check);
609 __ StoreNumberToDoubleElements(value, rbx, key, xmm0,
610 &transition_double_elements);
611 if (increment_length == kIncrementLength) {
612 // Add 1 to receiver->length.
613 __ leal(rdi, Operand(key, 1));
614 __ Integer32ToSmiField(FieldOperand(receiver, JSArray::kLengthOffset), rdi);
615 }
616 __ ret(0);
617
618 __ bind(&transition_smi_elements);
619 __ movp(rbx, FieldOperand(receiver, HeapObject::kMapOffset));
620
621 // Transition the array appropriately depending on the value type.
622 __ movp(r9, FieldOperand(value, HeapObject::kMapOffset));
623 __ CompareRoot(r9, Heap::kHeapNumberMapRootIndex);
624 __ j(not_equal, &non_double_value);
625
626 // Value is a double. Transition FAST_SMI_ELEMENTS ->
627 // FAST_DOUBLE_ELEMENTS and complete the store.
628 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
629 FAST_DOUBLE_ELEMENTS,
630 rbx,
631 rdi,
632 slow);
633 AllocationSiteMode mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS,
634 FAST_DOUBLE_ELEMENTS);
635 ElementsTransitionGenerator::GenerateSmiToDouble(
636 masm, receiver, key, value, rbx, mode, slow);
637 __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
638 __ jmp(&fast_double_without_map_check);
639
640 __ bind(&non_double_value);
641 // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
642 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
643 FAST_ELEMENTS,
644 rbx,
645 rdi,
646 slow);
647 mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS);
648 ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
649 masm, receiver, key, value, rbx, mode, slow);
650 __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
651 __ jmp(&finish_object_store);
652
653 __ bind(&transition_double_elements);
654 // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
655 // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
656 // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
657 __ movp(rbx, FieldOperand(receiver, HeapObject::kMapOffset));
658 __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
659 FAST_ELEMENTS,
660 rbx,
661 rdi,
662 slow);
663 mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS);
664 ElementsTransitionGenerator::GenerateDoubleToObject(
665 masm, receiver, key, value, rbx, mode, slow);
666 __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
667 __ jmp(&finish_object_store);
668 }
669
670
671 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
672 StrictMode strict_mode) {
673 // Return address is on the stack.
674 Label slow, slow_with_tagged_index, fast_object, fast_object_grow;
675 Label fast_double, fast_double_grow;
676 Label array, extra, check_if_double_array;
677 Register receiver = ReceiverRegister();
678 Register key = NameRegister();
679 DCHECK(receiver.is(rdx));
680 DCHECK(key.is(rcx));
681
682 // Check that the object isn't a smi.
683 __ JumpIfSmi(receiver, &slow_with_tagged_index);
684 // Get the map from the receiver.
685 __ movp(r9, FieldOperand(receiver, HeapObject::kMapOffset));
686 // Check that the receiver does not require access checks and is not observed.
687 // The generic stub does not perform map checks or handle observed objects.
688 __ testb(FieldOperand(r9, Map::kBitFieldOffset),
689 Immediate(1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved));
690 __ j(not_zero, &slow_with_tagged_index);
691 // Check that the key is a smi.
692 __ JumpIfNotSmi(key, &slow_with_tagged_index);
693 __ SmiToInteger32(key, key);
694
695 __ CmpInstanceType(r9, JS_ARRAY_TYPE);
696 __ j(equal, &array);
697 // Check that the object is some kind of JSObject.
698 __ CmpInstanceType(r9, FIRST_JS_OBJECT_TYPE);
699 __ j(below, &slow);
700
701 // Object case: Check key against length in the elements array.
702 __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
703 // Check array bounds.
704 __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), key);
705 // rbx: FixedArray
706 __ j(above, &fast_object);
707
708 // Slow case: call runtime.
709 __ bind(&slow);
710 __ Integer32ToSmi(key, key);
711 __ bind(&slow_with_tagged_index);
712 GenerateRuntimeSetProperty(masm, strict_mode);
713 // Never returns to here.
714
715 // Extra capacity case: Check if there is extra capacity to
716 // perform the store and update the length. Used for adding one
717 // element to the array by writing to array[array.length].
718 __ bind(&extra);
719 // receiver is a JSArray.
720 // rbx: receiver's elements array (a FixedArray)
721 // flags: smicompare (receiver.length(), rbx)
722 __ j(not_equal, &slow); // do not leave holes in the array
723 __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), key);
724 __ j(below_equal, &slow);
725 // Increment index to get new length.
726 __ movp(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
727 __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
728 __ j(not_equal, &check_if_double_array);
729 __ jmp(&fast_object_grow);
730
731 __ bind(&check_if_double_array);
732 // rdi: elements array's map
733 __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
734 __ j(not_equal, &slow);
735 __ jmp(&fast_double_grow);
736
737 // Array case: Get the length and the elements array from the JS
738 // array. Check that the array is in fast mode (and writable); if it
739 // is the length is always a smi.
740 __ bind(&array);
741 // receiver is a JSArray.
742 __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
743
744 // Check the key against the length in the array, compute the
745 // address to store into and fall through to fast case.
746 __ SmiCompareInteger32(FieldOperand(receiver, JSArray::kLengthOffset), key);
747 __ j(below_equal, &extra);
748
749 KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double,
750 &slow, kCheckMap, kDontIncrementLength);
751 KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
752 &slow, kDontCheckMap, kIncrementLength);
753 }
754
755
756 static Operand GenerateMappedArgumentsLookup(MacroAssembler* masm,
757 Register object,
758 Register key,
759 Register scratch1,
760 Register scratch2,
761 Register scratch3,
762 Label* unmapped_case,
763 Label* slow_case) {
764 Heap* heap = masm->isolate()->heap();
765
766 // Check that the receiver is a JSObject. Because of the elements
767 // map check later, we do not need to check for interceptors or
768 // whether it requires access checks.
769 __ JumpIfSmi(object, slow_case);
770 // Check that the object is some kind of JSObject.
771 __ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1);
772 __ j(below, slow_case);
773
774 // Check that the key is a positive smi.
775 Condition check = masm->CheckNonNegativeSmi(key);
776 __ j(NegateCondition(check), slow_case);
777
778 // Load the elements into scratch1 and check its map. If not, jump
779 // to the unmapped lookup with the parameter map in scratch1.
780 Handle<Map> arguments_map(heap->sloppy_arguments_elements_map());
781 __ movp(scratch1, FieldOperand(object, JSObject::kElementsOffset));
782 __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
783
784 // Check if element is in the range of mapped arguments.
785 __ movp(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset));
786 __ SmiSubConstant(scratch2, scratch2, Smi::FromInt(2));
787 __ cmpp(key, scratch2);
788 __ j(greater_equal, unmapped_case);
789
790 // Load element index and check whether it is the hole.
791 const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize;
792 __ SmiToInteger64(scratch3, key);
793 __ movp(scratch2, FieldOperand(scratch1,
794 scratch3,
795 times_pointer_size,
796 kHeaderSize));
797 __ CompareRoot(scratch2, Heap::kTheHoleValueRootIndex);
798 __ j(equal, unmapped_case);
799
800 // Load value from context and return it. We can reuse scratch1 because
801 // we do not jump to the unmapped lookup (which requires the parameter
802 // map in scratch1).
803 __ movp(scratch1, FieldOperand(scratch1, FixedArray::kHeaderSize));
804 __ SmiToInteger64(scratch3, scratch2);
805 return FieldOperand(scratch1,
806 scratch3,
807 times_pointer_size,
808 Context::kHeaderSize);
809 }
810
811
812 static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
813 Register key,
814 Register parameter_map,
815 Register scratch,
816 Label* slow_case) {
817 // Element is in arguments backing store, which is referenced by the
818 // second element of the parameter_map. The parameter_map register
819 // must be loaded with the parameter map of the arguments object and is
820 // overwritten.
821 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
822 Register backing_store = parameter_map;
823 __ movp(backing_store, FieldOperand(parameter_map, kBackingStoreOffset));
824 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
825 __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK);
826 __ movp(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset));
827 __ cmpp(key, scratch);
828 __ j(greater_equal, slow_case);
829 __ SmiToInteger64(scratch, key);
830 return FieldOperand(backing_store,
831 scratch,
832 times_pointer_size,
833 FixedArray::kHeaderSize);
834 }
835
836
837 void KeyedLoadIC::GenerateSloppyArguments(MacroAssembler* masm) {
838 // The return address is on the stack.
839 Register receiver = ReceiverRegister();
840 Register key = NameRegister();
841 DCHECK(receiver.is(rdx));
842 DCHECK(key.is(rcx));
843
844 Label slow, notin;
845 Operand mapped_location =
846 GenerateMappedArgumentsLookup(
847 masm, receiver, key, rbx, rax, rdi, &notin, &slow);
848 __ movp(rax, mapped_location);
849 __ Ret();
850 __ bind(&notin);
851 // The unmapped lookup expects that the parameter map is in rbx.
852 Operand unmapped_location =
853 GenerateUnmappedArgumentsLookup(masm, key, rbx, rax, &slow);
854 __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex);
855 __ j(equal, &slow);
856 __ movp(rax, unmapped_location);
857 __ Ret();
858 __ bind(&slow);
859 GenerateMiss(masm);
860 }
861
862
863 void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) {
864 // The return address is on the stack.
865 Label slow, notin;
866 Register receiver = ReceiverRegister();
867 Register name = NameRegister();
868 Register value = ValueRegister();
869 DCHECK(receiver.is(rdx));
870 DCHECK(name.is(rcx));
871 DCHECK(value.is(rax));
872
873 Operand mapped_location = GenerateMappedArgumentsLookup(
874 masm, receiver, name, rbx, rdi, r8, &notin, &slow);
875 __ movp(mapped_location, value);
876 __ leap(r9, mapped_location);
877 __ movp(r8, value);
878 __ RecordWrite(rbx,
879 r9,
880 r8,
881 kDontSaveFPRegs,
882 EMIT_REMEMBERED_SET,
883 INLINE_SMI_CHECK);
884 __ Ret();
885 __ bind(&notin);
886 // The unmapped lookup expects that the parameter map is in rbx.
887 Operand unmapped_location =
888 GenerateUnmappedArgumentsLookup(masm, name, rbx, rdi, &slow);
889 __ movp(unmapped_location, value);
890 __ leap(r9, unmapped_location);
891 __ movp(r8, value);
892 __ RecordWrite(rbx,
893 r9,
894 r8,
895 kDontSaveFPRegs,
896 EMIT_REMEMBERED_SET,
897 INLINE_SMI_CHECK);
898 __ Ret();
899 __ bind(&slow);
900 GenerateMiss(masm);
901 }
902
903
904 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
905 // The return address is on the stack.
906 Register receiver = ReceiverRegister();
907 Register name = NameRegister();
908 DCHECK(receiver.is(rdx));
909 DCHECK(name.is(rcx));
910
911 // Probe the stub cache.
912 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags(
913 Code::ComputeHandlerFlags(Code::LOAD_IC));
914 masm->isolate()->stub_cache()->GenerateProbe(
915 masm, flags, receiver, name, rbx, rax);
916
917 GenerateMiss(masm);
918 }
919
920
921 void LoadIC::GenerateNormal(MacroAssembler* masm) {
922 Register dictionary = rax;
923 DCHECK(!dictionary.is(ReceiverRegister()));
924 DCHECK(!dictionary.is(NameRegister()));
925
926 Label slow;
927
928 __ movp(dictionary,
929 FieldOperand(ReceiverRegister(), JSObject::kPropertiesOffset));
930 GenerateDictionaryLoad(masm, &slow, dictionary, NameRegister(), rbx, rdi,
931 rax);
932 __ ret(0);
933
934 // Dictionary load failed, go slow (but don't miss).
935 __ bind(&slow);
936 GenerateRuntimeGetProperty(masm);
937 }
938
939
940 // A register that isn't one of the parameters to the load ic.
941 static const Register LoadIC_TempRegister() { return rbx; }
942
943
944 static const Register KeyedLoadIC_TempRegister() {
945 return rbx;
946 }
947
948
949 void LoadIC::GenerateMiss(MacroAssembler* masm) {
950 // The return address is on the stack.
951
952 Counters* counters = masm->isolate()->counters();
953 __ IncrementCounter(counters->load_miss(), 1);
954
955 __ PopReturnAddressTo(LoadIC_TempRegister());
956 __ Push(ReceiverRegister()); // receiver
957 __ Push(NameRegister()); // name
958 __ PushReturnAddressFrom(LoadIC_TempRegister());
959
960 // Perform tail call to the entry.
961 ExternalReference ref =
962 ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
963 __ TailCallExternalReference(ref, 2, 1);
964 }
965
966
967 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
968 // The return address is on the stack.
969
970 __ PopReturnAddressTo(LoadIC_TempRegister());
971 __ Push(ReceiverRegister()); // receiver
972 __ Push(NameRegister()); // name
973 __ PushReturnAddressFrom(LoadIC_TempRegister());
974
975 // Perform tail call to the entry.
976 __ TailCallRuntime(Runtime::kGetProperty, 2, 1);
977 }
978
979
980 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
981 // The return address is on the stack.
982 Counters* counters = masm->isolate()->counters();
983 __ IncrementCounter(counters->keyed_load_miss(), 1);
984
985 __ PopReturnAddressTo(KeyedLoadIC_TempRegister());
986 __ Push(ReceiverRegister()); // receiver
987 __ Push(NameRegister()); // name
988 __ PushReturnAddressFrom(KeyedLoadIC_TempRegister());
989
990 // Perform tail call to the entry.
991 ExternalReference ref =
992 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
993 __ TailCallExternalReference(ref, 2, 1);
994 }
995
996
997 // IC register specifications
998 const Register LoadIC::ReceiverRegister() { return rdx; }
999 const Register LoadIC::NameRegister() { return rcx; }
1000
1001
1002 const Register LoadIC::SlotRegister() {
1003 DCHECK(FLAG_vector_ics);
1004 return rax;
1005 }
1006
1007
1008 const Register LoadIC::VectorRegister() {
1009 DCHECK(FLAG_vector_ics);
1010 return rbx;
1011 }
1012
1013
1014 const Register StoreIC::ReceiverRegister() { return rdx; }
1015 const Register StoreIC::NameRegister() { return rcx; }
1016 const Register StoreIC::ValueRegister() { return rax; }
1017
1018
1019 const Register KeyedStoreIC::MapRegister() {
1020 return rbx;
1021 }
1022
1023
1024 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
1025 // The return address is on the stack.
1026
1027 __ PopReturnAddressTo(KeyedLoadIC_TempRegister());
1028 __ Push(ReceiverRegister()); // receiver
1029 __ Push(NameRegister()); // name
1030 __ PushReturnAddressFrom(KeyedLoadIC_TempRegister());
1031
1032 // Perform tail call to the entry.
1033 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
1034 }
1035
1036
1037 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
1038 // The return address is on the stack.
1039
1040 // Get the receiver from the stack and probe the stub cache.
1041 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags(
1042 Code::ComputeHandlerFlags(Code::STORE_IC));
1043 masm->isolate()->stub_cache()->GenerateProbe(
1044 masm, flags, ReceiverRegister(), NameRegister(), rbx, no_reg);
1045
1046 // Cache miss: Jump to runtime.
1047 GenerateMiss(masm);
1048 }
1049
1050
1051 static void StoreIC_PushArgs(MacroAssembler* masm) {
1052 Register receiver = StoreIC::ReceiverRegister();
1053 Register name = StoreIC::NameRegister();
1054 Register value = StoreIC::ValueRegister();
1055
1056 DCHECK(!rbx.is(receiver) && !rbx.is(name) && !rbx.is(value));
1057
1058 __ PopReturnAddressTo(rbx);
1059 __ Push(receiver);
1060 __ Push(name);
1061 __ Push(value);
1062 __ PushReturnAddressFrom(rbx);
1063 }
1064
1065
1066 void StoreIC::GenerateMiss(MacroAssembler* masm) {
1067 // Return address is on the stack.
1068 StoreIC_PushArgs(masm);
1069
1070 // Perform tail call to the entry.
1071 ExternalReference ref =
1072 ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
1073 __ TailCallExternalReference(ref, 3, 1);
1074 }
1075
1076
1077 void StoreIC::GenerateNormal(MacroAssembler* masm) {
1078 Register receiver = ReceiverRegister();
1079 Register name = NameRegister();
1080 Register value = ValueRegister();
1081 Register dictionary = rbx;
1082
1083 Label miss;
1084
1085 __ movp(dictionary, FieldOperand(receiver, JSObject::kPropertiesOffset));
1086 GenerateDictionaryStore(masm, &miss, dictionary, name, value, r8, r9);
1087 Counters* counters = masm->isolate()->counters();
1088 __ IncrementCounter(counters->store_normal_hit(), 1);
1089 __ ret(0);
1090
1091 __ bind(&miss);
1092 __ IncrementCounter(counters->store_normal_miss(), 1);
1093 GenerateMiss(masm);
1094 }
1095
1096
1097 void StoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1098 StrictMode strict_mode) {
1099 // Return address is on the stack.
1100 DCHECK(!rbx.is(ReceiverRegister()) && !rbx.is(NameRegister()) &&
1101 !rbx.is(ValueRegister()));
1102
1103 __ PopReturnAddressTo(rbx);
1104 __ Push(ReceiverRegister());
1105 __ Push(NameRegister());
1106 __ Push(ValueRegister());
1107 __ Push(Smi::FromInt(strict_mode));
1108 __ PushReturnAddressFrom(rbx);
1109
1110 // Do tail-call to runtime routine.
1111 __ TailCallRuntime(Runtime::kSetProperty, 4, 1);
1112 }
1113
1114
1115 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1116 StrictMode strict_mode) {
1117 // Return address is on the stack.
1118 DCHECK(!rbx.is(ReceiverRegister()) && !rbx.is(NameRegister()) &&
1119 !rbx.is(ValueRegister()));
1120
1121 __ PopReturnAddressTo(rbx);
1122 __ Push(ReceiverRegister());
1123 __ Push(NameRegister());
1124 __ Push(ValueRegister());
1125 __ Push(Smi::FromInt(strict_mode)); // Strict mode.
1126 __ PushReturnAddressFrom(rbx);
1127
1128 // Do tail-call to runtime routine.
1129 __ TailCallRuntime(Runtime::kSetProperty, 4, 1);
1130 }
1131
1132
1133 void StoreIC::GenerateSlow(MacroAssembler* masm) {
1134 // Return address is on the stack.
1135 StoreIC_PushArgs(masm);
1136
1137 // Do tail-call to runtime routine.
1138 ExternalReference ref(IC_Utility(kStoreIC_Slow), masm->isolate());
1139 __ TailCallExternalReference(ref, 3, 1);
1140 }
1141
1142
1143 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
1144 // Return address is on the stack.
1145 StoreIC_PushArgs(masm);
1146
1147 // Do tail-call to runtime routine.
1148 ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1149 __ TailCallExternalReference(ref, 3, 1);
1150 }
1151
1152
1153 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
1154 // Return address is on the stack.
1155 StoreIC_PushArgs(masm);
1156
1157 // Do tail-call to runtime routine.
1158 ExternalReference ref =
1159 ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1160 __ TailCallExternalReference(ref, 3, 1);
1161 }
1162
1163
1164 #undef __
1165
1166
1167 Condition CompareIC::ComputeCondition(Token::Value op) {
1168 switch (op) {
1169 case Token::EQ_STRICT:
1170 case Token::EQ:
1171 return equal;
1172 case Token::LT:
1173 return less;
1174 case Token::GT:
1175 return greater;
1176 case Token::LTE:
1177 return less_equal;
1178 case Token::GTE:
1179 return greater_equal;
1180 default:
1181 UNREACHABLE();
1182 return no_condition;
1183 }
1184 }
1185
1186
1187 bool CompareIC::HasInlinedSmiCode(Address address) {
1188 // The address of the instruction following the call.
1189 Address test_instruction_address =
1190 address + Assembler::kCallTargetAddressOffset;
1191
1192 // If the instruction following the call is not a test al, nothing
1193 // was inlined.
1194 return *test_instruction_address == Assembler::kTestAlByte;
1195 }
1196
1197
1198 void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
1199 // The address of the instruction following the call.
1200 Address test_instruction_address =
1201 address + Assembler::kCallTargetAddressOffset;
1202
1203 // If the instruction following the call is not a test al, nothing
1204 // was inlined.
1205 if (*test_instruction_address != Assembler::kTestAlByte) {
1206 DCHECK(*test_instruction_address == Assembler::kNopByte);
1207 return;
1208 }
1209
1210 Address delta_address = test_instruction_address + 1;
1211 // The delta to the start of the map check instruction and the
1212 // condition code uses at the patched jump.
1213 uint8_t delta = *reinterpret_cast<uint8_t*>(delta_address);
1214 if (FLAG_trace_ic) {
1215 PrintF("[ patching ic at %p, test=%p, delta=%d\n",
1216 address, test_instruction_address, delta);
1217 }
1218
1219 // Patch with a short conditional jump. Enabling means switching from a short
1220 // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the
1221 // reverse operation of that.
1222 Address jmp_address = test_instruction_address - delta;
1223 DCHECK((check == ENABLE_INLINED_SMI_CHECK)
1224 ? (*jmp_address == Assembler::kJncShortOpcode ||
1225 *jmp_address == Assembler::kJcShortOpcode)
1226 : (*jmp_address == Assembler::kJnzShortOpcode ||
1227 *jmp_address == Assembler::kJzShortOpcode));
1228 Condition cc = (check == ENABLE_INLINED_SMI_CHECK)
1229 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
1230 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
1231 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
1232 }
1233
1234
1235 } } // namespace v8::internal
1236
1237 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/full-codegen-x64.cc ('k') | src/x64/lithium-codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698