Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(102)

Side by Side Diff: src/x64/ic-x64.cc

Issue 338963003: KeyedLoadIC should have same register spec as LoadIC. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Last comment response. Created 6 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/full-codegen-x64.cc ('k') | src/x64/lithium-codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_X64 7 #if V8_TARGET_ARCH_X64
8 8
9 #include "src/codegen.h" 9 #include "src/codegen.h"
10 #include "src/ic-inl.h" 10 #include "src/ic-inl.h"
(...skipping 309 matching lines...) Expand 10 before | Expand all | Expand 10 after
320 __ testb(FieldOperand(map, Map::kInstanceTypeOffset), 320 __ testb(FieldOperand(map, Map::kInstanceTypeOffset),
321 Immediate(kIsNotInternalizedMask)); 321 Immediate(kIsNotInternalizedMask));
322 __ j(not_zero, not_unique); 322 __ j(not_zero, not_unique);
323 323
324 __ bind(&unique); 324 __ bind(&unique);
325 } 325 }
326 326
327 327
328 328
329 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { 329 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
330 // ----------- S t a t e ------------- 330 // The return address is on the stack.
331 // -- rax : key
332 // -- rdx : receiver
333 // -- rsp[0] : return address
334 // -----------------------------------
335 ASSERT(rdx.is(ReceiverRegister()));
336 ASSERT(rax.is(NameRegister()));
337 Label slow, check_name, index_smi, index_name, property_array_property; 331 Label slow, check_name, index_smi, index_name, property_array_property;
338 Label probe_dictionary, check_number_dictionary; 332 Label probe_dictionary, check_number_dictionary;
339 333
334 Register receiver = ReceiverRegister();
335 Register key = NameRegister();
336 ASSERT(receiver.is(rdx));
337 ASSERT(key.is(rcx));
338
340 // Check that the key is a smi. 339 // Check that the key is a smi.
341 __ JumpIfNotSmi(rax, &check_name); 340 __ JumpIfNotSmi(key, &check_name);
342 __ bind(&index_smi); 341 __ bind(&index_smi);
343 // Now the key is known to be a smi. This place is also jumped to from below 342 // Now the key is known to be a smi. This place is also jumped to from below
344 // where a numeric string is converted to a smi. 343 // where a numeric string is converted to a smi.
345 344
346 GenerateKeyedLoadReceiverCheck( 345 GenerateKeyedLoadReceiverCheck(
347 masm, rdx, rcx, Map::kHasIndexedInterceptor, &slow); 346 masm, receiver, rax, Map::kHasIndexedInterceptor, &slow);
348 347
349 // Check the receiver's map to see if it has fast elements. 348 // Check the receiver's map to see if it has fast elements.
350 __ CheckFastElements(rcx, &check_number_dictionary); 349 __ CheckFastElements(rax, &check_number_dictionary);
351 350
352 GenerateFastArrayLoad(masm, 351 GenerateFastArrayLoad(masm,
353 rdx, 352 receiver,
353 key,
354 rax, 354 rax,
355 rcx,
356 rbx, 355 rbx,
357 rax, 356 rax,
358 NULL, 357 NULL,
359 &slow); 358 &slow);
360 Counters* counters = masm->isolate()->counters(); 359 Counters* counters = masm->isolate()->counters();
361 __ IncrementCounter(counters->keyed_load_generic_smi(), 1); 360 __ IncrementCounter(counters->keyed_load_generic_smi(), 1);
362 __ ret(0); 361 __ ret(0);
363 362
364 __ bind(&check_number_dictionary); 363 __ bind(&check_number_dictionary);
365 __ SmiToInteger32(rbx, rax); 364 __ SmiToInteger32(rbx, key);
366 __ movp(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); 365 __ movp(rax, FieldOperand(receiver, JSObject::kElementsOffset));
367 366
368 // Check whether the elements is a number dictionary. 367 // Check whether the elements is a number dictionary.
369 // rdx: receiver
370 // rax: key
371 // rbx: key as untagged int32 368 // rbx: key as untagged int32
372 // rcx: elements 369 // rax: elements
373 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), 370 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
374 Heap::kHashTableMapRootIndex); 371 Heap::kHashTableMapRootIndex);
375 __ j(not_equal, &slow); 372 __ j(not_equal, &slow);
376 __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax); 373 __ LoadFromNumberDictionary(&slow, rax, key, rbx, r9, rdi, rax);
377 __ ret(0); 374 __ ret(0);
378 375
379 __ bind(&slow); 376 __ bind(&slow);
380 // Slow case: Jump to runtime. 377 // Slow case: Jump to runtime.
381 // rdx: receiver
382 // rax: key
383 __ IncrementCounter(counters->keyed_load_generic_slow(), 1); 378 __ IncrementCounter(counters->keyed_load_generic_slow(), 1);
384 GenerateRuntimeGetProperty(masm); 379 GenerateRuntimeGetProperty(masm);
385 380
386 __ bind(&check_name); 381 __ bind(&check_name);
387 GenerateKeyNameCheck(masm, rax, rcx, rbx, &index_name, &slow); 382 GenerateKeyNameCheck(masm, key, rax, rbx, &index_name, &slow);
388 383
389 GenerateKeyedLoadReceiverCheck( 384 GenerateKeyedLoadReceiverCheck(
390 masm, rdx, rcx, Map::kHasNamedInterceptor, &slow); 385 masm, receiver, rax, Map::kHasNamedInterceptor, &slow);
391 386
392 // If the receiver is a fast-case object, check the keyed lookup 387 // If the receiver is a fast-case object, check the keyed lookup
393 // cache. Otherwise probe the dictionary leaving result in rcx. 388 // cache. Otherwise probe the dictionary leaving result in key.
394 __ movp(rbx, FieldOperand(rdx, JSObject::kPropertiesOffset)); 389 __ movp(rbx, FieldOperand(receiver, JSObject::kPropertiesOffset));
395 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), 390 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
396 Heap::kHashTableMapRootIndex); 391 Heap::kHashTableMapRootIndex);
397 __ j(equal, &probe_dictionary); 392 __ j(equal, &probe_dictionary);
398 393
399 // Load the map of the receiver, compute the keyed lookup cache hash 394 // Load the map of the receiver, compute the keyed lookup cache hash
400 // based on 32 bits of the map pointer and the string hash. 395 // based on 32 bits of the map pointer and the string hash.
401 __ movp(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); 396 __ movp(rbx, FieldOperand(receiver, HeapObject::kMapOffset));
402 __ movl(rcx, rbx); 397 __ movl(rax, rbx);
403 __ shrl(rcx, Immediate(KeyedLookupCache::kMapHashShift)); 398 __ shrl(rax, Immediate(KeyedLookupCache::kMapHashShift));
404 __ movl(rdi, FieldOperand(rax, String::kHashFieldOffset)); 399 __ movl(rdi, FieldOperand(key, String::kHashFieldOffset));
405 __ shrl(rdi, Immediate(String::kHashShift)); 400 __ shrl(rdi, Immediate(String::kHashShift));
406 __ xorp(rcx, rdi); 401 __ xorp(rax, rdi);
407 int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask); 402 int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
408 __ andp(rcx, Immediate(mask)); 403 __ andp(rax, Immediate(mask));
409 404
410 // Load the key (consisting of map and internalized string) from the cache and 405 // Load the key (consisting of map and internalized string) from the cache and
411 // check for match. 406 // check for match.
412 Label load_in_object_property; 407 Label load_in_object_property;
413 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; 408 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
414 Label hit_on_nth_entry[kEntriesPerBucket]; 409 Label hit_on_nth_entry[kEntriesPerBucket];
415 ExternalReference cache_keys 410 ExternalReference cache_keys
416 = ExternalReference::keyed_lookup_cache_keys(masm->isolate()); 411 = ExternalReference::keyed_lookup_cache_keys(masm->isolate());
417 412
418 for (int i = 0; i < kEntriesPerBucket - 1; i++) { 413 for (int i = 0; i < kEntriesPerBucket - 1; i++) {
419 Label try_next_entry; 414 Label try_next_entry;
420 __ movp(rdi, rcx); 415 __ movp(rdi, rax);
421 __ shlp(rdi, Immediate(kPointerSizeLog2 + 1)); 416 __ shlp(rdi, Immediate(kPointerSizeLog2 + 1));
422 __ LoadAddress(kScratchRegister, cache_keys); 417 __ LoadAddress(kScratchRegister, cache_keys);
423 int off = kPointerSize * i * 2; 418 int off = kPointerSize * i * 2;
424 __ cmpp(rbx, Operand(kScratchRegister, rdi, times_1, off)); 419 __ cmpp(rbx, Operand(kScratchRegister, rdi, times_1, off));
425 __ j(not_equal, &try_next_entry); 420 __ j(not_equal, &try_next_entry);
426 __ cmpp(rax, Operand(kScratchRegister, rdi, times_1, off + kPointerSize)); 421 __ cmpp(key, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
427 __ j(equal, &hit_on_nth_entry[i]); 422 __ j(equal, &hit_on_nth_entry[i]);
428 __ bind(&try_next_entry); 423 __ bind(&try_next_entry);
429 } 424 }
430 425
431 int off = kPointerSize * (kEntriesPerBucket - 1) * 2; 426 int off = kPointerSize * (kEntriesPerBucket - 1) * 2;
432 __ cmpp(rbx, Operand(kScratchRegister, rdi, times_1, off)); 427 __ cmpp(rbx, Operand(kScratchRegister, rdi, times_1, off));
433 __ j(not_equal, &slow); 428 __ j(not_equal, &slow);
434 __ cmpp(rax, Operand(kScratchRegister, rdi, times_1, off + kPointerSize)); 429 __ cmpp(key, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
435 __ j(not_equal, &slow); 430 __ j(not_equal, &slow);
436 431
437 // Get field offset, which is a 32-bit integer. 432 // Get field offset, which is a 32-bit integer.
438 ExternalReference cache_field_offsets 433 ExternalReference cache_field_offsets
439 = ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate()); 434 = ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
440 435
441 // Hit on nth entry. 436 // Hit on nth entry.
442 for (int i = kEntriesPerBucket - 1; i >= 0; i--) { 437 for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
443 __ bind(&hit_on_nth_entry[i]); 438 __ bind(&hit_on_nth_entry[i]);
444 if (i != 0) { 439 if (i != 0) {
445 __ addl(rcx, Immediate(i)); 440 __ addl(rax, Immediate(i));
446 } 441 }
447 __ LoadAddress(kScratchRegister, cache_field_offsets); 442 __ LoadAddress(kScratchRegister, cache_field_offsets);
448 __ movl(rdi, Operand(kScratchRegister, rcx, times_4, 0)); 443 __ movl(rdi, Operand(kScratchRegister, rax, times_4, 0));
449 __ movzxbp(rcx, FieldOperand(rbx, Map::kInObjectPropertiesOffset)); 444 __ movzxbp(rax, FieldOperand(rbx, Map::kInObjectPropertiesOffset));
450 __ subp(rdi, rcx); 445 __ subp(rdi, rax);
451 __ j(above_equal, &property_array_property); 446 __ j(above_equal, &property_array_property);
452 if (i != 0) { 447 if (i != 0) {
453 __ jmp(&load_in_object_property); 448 __ jmp(&load_in_object_property);
454 } 449 }
455 } 450 }
456 451
457 // Load in-object property. 452 // Load in-object property.
458 __ bind(&load_in_object_property); 453 __ bind(&load_in_object_property);
459 __ movzxbp(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset)); 454 __ movzxbp(rax, FieldOperand(rbx, Map::kInstanceSizeOffset));
460 __ addp(rcx, rdi); 455 __ addp(rax, rdi);
461 __ movp(rax, FieldOperand(rdx, rcx, times_pointer_size, 0)); 456 __ movp(rax, FieldOperand(receiver, rax, times_pointer_size, 0));
462 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); 457 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
463 __ ret(0); 458 __ ret(0);
464 459
465 // Load property array property. 460 // Load property array property.
466 __ bind(&property_array_property); 461 __ bind(&property_array_property);
467 __ movp(rax, FieldOperand(rdx, JSObject::kPropertiesOffset)); 462 __ movp(rax, FieldOperand(receiver, JSObject::kPropertiesOffset));
468 __ movp(rax, FieldOperand(rax, rdi, times_pointer_size, 463 __ movp(rax, FieldOperand(rax, rdi, times_pointer_size,
469 FixedArray::kHeaderSize)); 464 FixedArray::kHeaderSize));
470 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); 465 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
471 __ ret(0); 466 __ ret(0);
472 467
473 // Do a quick inline probe of the receiver's dictionary, if it 468 // Do a quick inline probe of the receiver's dictionary, if it
474 // exists. 469 // exists.
475 __ bind(&probe_dictionary); 470 __ bind(&probe_dictionary);
476 // rdx: receiver
477 // rax: key
478 // rbx: elements 471 // rbx: elements
479 472
480 __ movp(rcx, FieldOperand(rdx, JSObject::kMapOffset)); 473 __ movp(rax, FieldOperand(receiver, JSObject::kMapOffset));
481 __ movb(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset)); 474 __ movb(rax, FieldOperand(rax, Map::kInstanceTypeOffset));
482 GenerateGlobalInstanceTypeCheck(masm, rcx, &slow); 475 GenerateGlobalInstanceTypeCheck(masm, rax, &slow);
483 476
484 GenerateDictionaryLoad(masm, &slow, rbx, rax, rcx, rdi, rax); 477 GenerateDictionaryLoad(masm, &slow, rbx, key, rax, rdi, rax);
485 __ IncrementCounter(counters->keyed_load_generic_symbol(), 1); 478 __ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
486 __ ret(0); 479 __ ret(0);
487 480
488 __ bind(&index_name); 481 __ bind(&index_name);
489 __ IndexFromHash(rbx, rax); 482 __ IndexFromHash(rbx, key);
490 __ jmp(&index_smi); 483 __ jmp(&index_smi);
491 } 484 }
492 485
493 486
494 void KeyedLoadIC::GenerateString(MacroAssembler* masm) { 487 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
495 // Return address is on the stack. 488 // Return address is on the stack.
496 Label miss; 489 Label miss;
497 490
498 Register receiver = ReceiverRegister(); 491 Register receiver = ReceiverRegister();
499 Register index = NameRegister(); 492 Register index = NameRegister();
500 Register scratch = rcx; 493 Register scratch = rbx;
501 Register result = rax; 494 Register result = rax;
502 ASSERT(!scratch.is(receiver) && !scratch.is(index)); 495 ASSERT(!scratch.is(receiver) && !scratch.is(index));
503 496
504 StringCharAtGenerator char_at_generator(receiver, 497 StringCharAtGenerator char_at_generator(receiver,
505 index, 498 index,
506 scratch, 499 scratch,
507 result, 500 result,
508 &miss, // When not a string. 501 &miss, // When not a string.
509 &miss, // When not a number. 502 &miss, // When not a number.
510 &miss, // When index out of range. 503 &miss, // When index out of range.
511 STRING_INDEX_IS_ARRAY_INDEX); 504 STRING_INDEX_IS_ARRAY_INDEX);
512 char_at_generator.GenerateFast(masm); 505 char_at_generator.GenerateFast(masm);
513 __ ret(0); 506 __ ret(0);
514 507
515 StubRuntimeCallHelper call_helper; 508 StubRuntimeCallHelper call_helper;
516 char_at_generator.GenerateSlow(masm, call_helper); 509 char_at_generator.GenerateSlow(masm, call_helper);
517 510
518 __ bind(&miss); 511 __ bind(&miss);
519 GenerateMiss(masm); 512 GenerateMiss(masm);
520 } 513 }
521 514
522 515
523 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) { 516 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
524 // Return address is on the stack. 517 // Return address is on the stack.
525 Label slow; 518 Label slow;
526 519
527 Register receiver = ReceiverRegister(); 520 Register receiver = ReceiverRegister();
528 Register key = NameRegister(); 521 Register key = NameRegister();
529 Register scratch = rcx; 522 Register scratch = rax;
530 ASSERT(!scratch.is(receiver) && !scratch.is(key)); 523 ASSERT(!scratch.is(receiver) && !scratch.is(key));
531 524
532 // Check that the receiver isn't a smi. 525 // Check that the receiver isn't a smi.
533 __ JumpIfSmi(receiver, &slow); 526 __ JumpIfSmi(receiver, &slow);
534 527
535 // Check that the key is an array index, that is Uint32. 528 // Check that the key is an array index, that is Uint32.
536 STATIC_ASSERT(kSmiValueSize <= 32); 529 STATIC_ASSERT(kSmiValueSize <= 32);
537 __ JumpUnlessNonNegativeSmi(key, &slow); 530 __ JumpUnlessNonNegativeSmi(key, &slow);
538 531
539 // Get the map of the receiver. 532 // Get the map of the receiver.
(...skipping 340 matching lines...) Expand 10 before | Expand all | Expand 10 after
880 __ j(greater_equal, slow_case); 873 __ j(greater_equal, slow_case);
881 __ SmiToInteger64(scratch, key); 874 __ SmiToInteger64(scratch, key);
882 return FieldOperand(backing_store, 875 return FieldOperand(backing_store,
883 scratch, 876 scratch,
884 times_pointer_size, 877 times_pointer_size,
885 FixedArray::kHeaderSize); 878 FixedArray::kHeaderSize);
886 } 879 }
887 880
888 881
889 void KeyedLoadIC::GenerateSloppyArguments(MacroAssembler* masm) { 882 void KeyedLoadIC::GenerateSloppyArguments(MacroAssembler* masm) {
890 // ----------- S t a t e ------------- 883 // The return address is on the stack.
891 // -- rax : key 884 Register receiver = ReceiverRegister();
892 // -- rdx : receiver 885 Register key = NameRegister();
893 // -- rsp[0] : return address 886 ASSERT(receiver.is(rdx));
894 // ----------------------------------- 887 ASSERT(key.is(rcx));
895 ASSERT(rdx.is(ReceiverRegister())); 888
896 ASSERT(rax.is(NameRegister()));
897 Label slow, notin; 889 Label slow, notin;
898 Operand mapped_location = 890 Operand mapped_location =
899 GenerateMappedArgumentsLookup( 891 GenerateMappedArgumentsLookup(
900 masm, rdx, rax, rbx, rcx, rdi, &notin, &slow); 892 masm, receiver, key, rbx, rax, rdi, &notin, &slow);
901 __ movp(rax, mapped_location); 893 __ movp(rax, mapped_location);
902 __ Ret(); 894 __ Ret();
903 __ bind(&notin); 895 __ bind(&notin);
904 // The unmapped lookup expects that the parameter map is in rbx. 896 // The unmapped lookup expects that the parameter map is in rbx.
905 Operand unmapped_location = 897 Operand unmapped_location =
906 GenerateUnmappedArgumentsLookup(masm, rax, rbx, rcx, &slow); 898 GenerateUnmappedArgumentsLookup(masm, key, rbx, rax, &slow);
907 __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex); 899 __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex);
908 __ j(equal, &slow); 900 __ j(equal, &slow);
909 __ movp(rax, unmapped_location); 901 __ movp(rax, unmapped_location);
910 __ Ret(); 902 __ Ret();
911 __ bind(&slow); 903 __ bind(&slow);
912 GenerateMiss(masm); 904 GenerateMiss(masm);
913 } 905 }
914 906
915 907
916 void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) { 908 void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) {
(...skipping 29 matching lines...) Expand all
946 kDontSaveFPRegs, 938 kDontSaveFPRegs,
947 EMIT_REMEMBERED_SET, 939 EMIT_REMEMBERED_SET,
948 INLINE_SMI_CHECK); 940 INLINE_SMI_CHECK);
949 __ Ret(); 941 __ Ret();
950 __ bind(&slow); 942 __ bind(&slow);
951 GenerateMiss(masm); 943 GenerateMiss(masm);
952 } 944 }
953 945
954 946
955 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) { 947 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
956 // ----------- S t a t e ------------- 948 // The return address is on the stack.
957 // -- rax : receiver 949 Register receiver = ReceiverRegister();
958 // -- rcx : name 950 Register name = NameRegister();
959 // -- rsp[0] : return address 951 ASSERT(receiver.is(rdx));
960 // ----------------------------------- 952 ASSERT(name.is(rcx));
961 ASSERT(rax.is(ReceiverRegister()));
962 ASSERT(rcx.is(NameRegister()));
963 953
964 // Probe the stub cache. 954 // Probe the stub cache.
965 Code::Flags flags = Code::ComputeHandlerFlags(Code::LOAD_IC); 955 Code::Flags flags = Code::ComputeHandlerFlags(Code::LOAD_IC);
966 masm->isolate()->stub_cache()->GenerateProbe( 956 masm->isolate()->stub_cache()->GenerateProbe(
967 masm, flags, rax, rcx, rbx, rdx); 957 masm, flags, receiver, name, rbx, rax);
968 958
969 GenerateMiss(masm); 959 GenerateMiss(masm);
970 } 960 }
971 961
972 962
973 void LoadIC::GenerateNormal(MacroAssembler* masm) { 963 void LoadIC::GenerateNormal(MacroAssembler* masm) {
974 // ----------- S t a t e ------------- 964 // ----------- S t a t e -------------
975 // -- rax : receiver 965 // -- rdx : receiver
976 // -- rcx : name 966 // -- rcx : name
977 // -- rsp[0] : return address 967 // -- rsp[0] : return address
978 // ----------------------------------- 968 // -----------------------------------
979 ASSERT(rax.is(ReceiverRegister())); 969 ASSERT(rdx.is(ReceiverRegister()));
980 ASSERT(rcx.is(NameRegister())); 970 ASSERT(rcx.is(NameRegister()));
981 Label miss, slow; 971 Label miss, slow;
982 972
983 GenerateNameDictionaryReceiverCheck(masm, rax, rdx, rbx, &miss); 973 GenerateNameDictionaryReceiverCheck(masm, rdx, rax, rbx, &miss);
984 974
985 // rdx: elements 975 // rax: elements
986 // Search the dictionary placing the result in rax. 976 // Search the dictionary placing the result in rax.
987 GenerateDictionaryLoad(masm, &slow, rdx, rcx, rbx, rdi, rax); 977 GenerateDictionaryLoad(masm, &slow, rax, rcx, rbx, rdi, rax);
988 __ ret(0); 978 __ ret(0);
989 979
990 // Dictionary load failed, go slow (but don't miss). 980 // Dictionary load failed, go slow (but don't miss).
991 __ bind(&slow); 981 __ bind(&slow);
992 GenerateRuntimeGetProperty(masm); 982 GenerateRuntimeGetProperty(masm);
993 983
994 // Cache miss: Jump to runtime. 984 // Cache miss: Jump to runtime.
995 __ bind(&miss); 985 __ bind(&miss);
996 GenerateMiss(masm); 986 GenerateMiss(masm);
997 } 987 }
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
1048 __ PushReturnAddressFrom(KeyedLoadIC_TempRegister()); 1038 __ PushReturnAddressFrom(KeyedLoadIC_TempRegister());
1049 1039
1050 // Perform tail call to the entry. 1040 // Perform tail call to the entry.
1051 ExternalReference ref = 1041 ExternalReference ref =
1052 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate()); 1042 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
1053 __ TailCallExternalReference(ref, 2, 1); 1043 __ TailCallExternalReference(ref, 2, 1);
1054 } 1044 }
1055 1045
1056 1046
1057 // IC register specifications 1047 // IC register specifications
1058 const Register LoadIC::ReceiverRegister() { return rax; } 1048 const Register LoadIC::ReceiverRegister() { return rdx; }
1059 const Register LoadIC::NameRegister() { return rcx; } 1049 const Register LoadIC::NameRegister() { return rcx; }
1060 const Register KeyedLoadIC::ReceiverRegister() { return rdx; }
1061 const Register KeyedLoadIC::NameRegister() { return rax; }
1062 1050
1063 1051
1064 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { 1052 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
1065 // The return address is on the stack. 1053 // The return address is on the stack.
1066 1054
1067 __ PopReturnAddressTo(KeyedLoadIC_TempRegister()); 1055 __ PopReturnAddressTo(KeyedLoadIC_TempRegister());
1068 __ Push(ReceiverRegister()); // receiver 1056 __ Push(ReceiverRegister()); // receiver
1069 __ Push(NameRegister()); // name 1057 __ Push(NameRegister()); // name
1070 __ PushReturnAddressFrom(KeyedLoadIC_TempRegister()); 1058 __ PushReturnAddressFrom(KeyedLoadIC_TempRegister());
1071 1059
(...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after
1307 Condition cc = (check == ENABLE_INLINED_SMI_CHECK) 1295 Condition cc = (check == ENABLE_INLINED_SMI_CHECK)
1308 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero) 1296 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
1309 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry); 1297 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
1310 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); 1298 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
1311 } 1299 }
1312 1300
1313 1301
1314 } } // namespace v8::internal 1302 } } // namespace v8::internal
1315 1303
1316 #endif // V8_TARGET_ARCH_X64 1304 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/full-codegen-x64.cc ('k') | src/x64/lithium-codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698