Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(654)

Side by Side Diff: src/ia32/ic-ia32.cc

Issue 8086021: Clean up the x86 assembler API. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 308 matching lines...) Expand 10 before | Expand all | Expand 10 after
319 DONT_DO_SMI_CHECK); 319 DONT_DO_SMI_CHECK);
320 } else { 320 } else {
321 __ AssertFastElements(scratch); 321 __ AssertFastElements(scratch);
322 } 322 }
323 // Check that the key (index) is within bounds. 323 // Check that the key (index) is within bounds.
324 __ cmp(key, FieldOperand(scratch, FixedArray::kLengthOffset)); 324 __ cmp(key, FieldOperand(scratch, FixedArray::kLengthOffset));
325 __ j(above_equal, out_of_range); 325 __ j(above_equal, out_of_range);
326 // Fast case: Do the load. 326 // Fast case: Do the load.
327 STATIC_ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0)); 327 STATIC_ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0));
328 __ mov(scratch, FieldOperand(scratch, key, times_2, FixedArray::kHeaderSize)); 328 __ mov(scratch, FieldOperand(scratch, key, times_2, FixedArray::kHeaderSize));
329 __ cmp(Operand(scratch), Immediate(FACTORY->the_hole_value())); 329 __ cmp(scratch, Immediate(FACTORY->the_hole_value()));
330 // In case the loaded value is the_hole we have to consult GetProperty 330 // In case the loaded value is the_hole we have to consult GetProperty
331 // to ensure the prototype chain is searched. 331 // to ensure the prototype chain is searched.
332 __ j(equal, out_of_range); 332 __ j(equal, out_of_range);
333 if (!result.is(scratch)) { 333 if (!result.is(scratch)) {
334 __ mov(result, scratch); 334 __ mov(result, scratch);
335 } 335 }
336 } 336 }
337 337
338 338
339 // Checks whether a key is an array index string or a symbol string. 339 // Checks whether a key is an array index string or a symbol string.
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
387 __ j(not_zero, slow_case); 387 __ j(not_zero, slow_case);
388 388
389 // Load the elements into scratch1 and check its map. 389 // Load the elements into scratch1 and check its map.
390 Handle<Map> arguments_map(heap->non_strict_arguments_elements_map()); 390 Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
391 __ mov(scratch1, FieldOperand(object, JSObject::kElementsOffset)); 391 __ mov(scratch1, FieldOperand(object, JSObject::kElementsOffset));
392 __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK); 392 __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
393 393
394 // Check if element is in the range of mapped arguments. If not, jump 394 // Check if element is in the range of mapped arguments. If not, jump
395 // to the unmapped lookup with the parameter map in scratch1. 395 // to the unmapped lookup with the parameter map in scratch1.
396 __ mov(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset)); 396 __ mov(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset));
397 __ sub(Operand(scratch2), Immediate(Smi::FromInt(2))); 397 __ sub(scratch2, Immediate(Smi::FromInt(2)));
398 __ cmp(key, Operand(scratch2)); 398 __ cmp(key, scratch2);
399 __ j(greater_equal, unmapped_case); 399 __ j(greater_equal, unmapped_case);
400 400
401 // Load element index and check whether it is the hole. 401 // Load element index and check whether it is the hole.
402 const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize; 402 const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize;
403 __ mov(scratch2, FieldOperand(scratch1, 403 __ mov(scratch2, FieldOperand(scratch1,
404 key, 404 key,
405 times_half_pointer_size, 405 times_half_pointer_size,
406 kHeaderSize)); 406 kHeaderSize));
407 __ cmp(scratch2, factory->the_hole_value()); 407 __ cmp(scratch2, factory->the_hole_value());
408 __ j(equal, unmapped_case); 408 __ j(equal, unmapped_case);
(...skipping 16 matching lines...) Expand all
425 Register scratch, 425 Register scratch,
426 Label* slow_case) { 426 Label* slow_case) {
427 // Element is in arguments backing store, which is referenced by the 427 // Element is in arguments backing store, which is referenced by the
428 // second element of the parameter_map. 428 // second element of the parameter_map.
429 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize; 429 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
430 Register backing_store = parameter_map; 430 Register backing_store = parameter_map;
431 __ mov(backing_store, FieldOperand(parameter_map, kBackingStoreOffset)); 431 __ mov(backing_store, FieldOperand(parameter_map, kBackingStoreOffset));
432 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map()); 432 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
433 __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK); 433 __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK);
434 __ mov(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset)); 434 __ mov(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset));
435 __ cmp(key, Operand(scratch)); 435 __ cmp(key, scratch);
436 __ j(greater_equal, slow_case); 436 __ j(greater_equal, slow_case);
437 return FieldOperand(backing_store, 437 return FieldOperand(backing_store,
438 key, 438 key,
439 times_half_pointer_size, 439 times_half_pointer_size,
440 FixedArray::kHeaderSize); 440 FixedArray::kHeaderSize);
441 } 441 }
442 442
443 443
444 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { 444 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
445 // ----------- S t a t e ------------- 445 // ----------- S t a t e -------------
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
527 Immediate(isolate->factory()->hash_table_map())); 527 Immediate(isolate->factory()->hash_table_map()));
528 __ j(equal, &probe_dictionary); 528 __ j(equal, &probe_dictionary);
529 529
530 // Load the map of the receiver, compute the keyed lookup cache hash 530 // Load the map of the receiver, compute the keyed lookup cache hash
531 // based on 32 bits of the map pointer and the string hash. 531 // based on 32 bits of the map pointer and the string hash.
532 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset)); 532 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
533 __ mov(ecx, ebx); 533 __ mov(ecx, ebx);
534 __ shr(ecx, KeyedLookupCache::kMapHashShift); 534 __ shr(ecx, KeyedLookupCache::kMapHashShift);
535 __ mov(edi, FieldOperand(eax, String::kHashFieldOffset)); 535 __ mov(edi, FieldOperand(eax, String::kHashFieldOffset));
536 __ shr(edi, String::kHashShift); 536 __ shr(edi, String::kHashShift);
537 __ xor_(ecx, Operand(edi)); 537 __ xor_(ecx, edi);
538 __ and_(ecx, KeyedLookupCache::kCapacityMask); 538 __ and_(ecx, KeyedLookupCache::kCapacityMask);
539 539
540 // Load the key (consisting of map and symbol) from the cache and 540 // Load the key (consisting of map and symbol) from the cache and
541 // check for match. 541 // check for match.
542 ExternalReference cache_keys = 542 ExternalReference cache_keys =
543 ExternalReference::keyed_lookup_cache_keys(masm->isolate()); 543 ExternalReference::keyed_lookup_cache_keys(masm->isolate());
544 __ mov(edi, ecx); 544 __ mov(edi, ecx);
545 __ shl(edi, kPointerSizeLog2 + 1); 545 __ shl(edi, kPointerSizeLog2 + 1);
546 __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys)); 546 __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
547 __ j(not_equal, &slow); 547 __ j(not_equal, &slow);
548 __ add(Operand(edi), Immediate(kPointerSize)); 548 __ add(edi, Immediate(kPointerSize));
549 __ cmp(eax, Operand::StaticArray(edi, times_1, cache_keys)); 549 __ cmp(eax, Operand::StaticArray(edi, times_1, cache_keys));
550 __ j(not_equal, &slow); 550 __ j(not_equal, &slow);
551 551
552 // Get field offset. 552 // Get field offset.
553 // edx : receiver 553 // edx : receiver
554 // ebx : receiver's map 554 // ebx : receiver's map
555 // eax : key 555 // eax : key
556 // ecx : lookup cache index 556 // ecx : lookup cache index
557 ExternalReference cache_field_offsets = 557 ExternalReference cache_field_offsets =
558 ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate()); 558 ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
559 __ mov(edi, 559 __ mov(edi,
560 Operand::StaticArray(ecx, times_pointer_size, cache_field_offsets)); 560 Operand::StaticArray(ecx, times_pointer_size, cache_field_offsets));
561 __ movzx_b(ecx, FieldOperand(ebx, Map::kInObjectPropertiesOffset)); 561 __ movzx_b(ecx, FieldOperand(ebx, Map::kInObjectPropertiesOffset));
562 __ sub(edi, Operand(ecx)); 562 __ sub(edi, ecx);
563 __ j(above_equal, &property_array_property); 563 __ j(above_equal, &property_array_property);
564 564
565 // Load in-object property. 565 // Load in-object property.
566 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceSizeOffset)); 566 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceSizeOffset));
567 __ add(ecx, Operand(edi)); 567 __ add(ecx, edi);
568 __ mov(eax, FieldOperand(edx, ecx, times_pointer_size, 0)); 568 __ mov(eax, FieldOperand(edx, ecx, times_pointer_size, 0));
569 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); 569 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
570 __ ret(0); 570 __ ret(0);
571 571
572 // Load property array property. 572 // Load property array property.
573 __ bind(&property_array_property); 573 __ bind(&property_array_property);
574 __ mov(eax, FieldOperand(edx, JSObject::kPropertiesOffset)); 574 __ mov(eax, FieldOperand(edx, JSObject::kPropertiesOffset));
575 __ mov(eax, FieldOperand(eax, edi, times_pointer_size, 575 __ mov(eax, FieldOperand(eax, edi, times_pointer_size,
576 FixedArray::kHeaderSize)); 576 FixedArray::kHeaderSize));
577 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); 577 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
644 // Check that the key is an array index, that is Uint32. 644 // Check that the key is an array index, that is Uint32.
645 __ test(eax, Immediate(kSmiTagMask | kSmiSignMask)); 645 __ test(eax, Immediate(kSmiTagMask | kSmiSignMask));
646 __ j(not_zero, &slow); 646 __ j(not_zero, &slow);
647 647
648 // Get the map of the receiver. 648 // Get the map of the receiver.
649 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); 649 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
650 650
651 // Check that it has indexed interceptor and access checks 651 // Check that it has indexed interceptor and access checks
652 // are not enabled for this object. 652 // are not enabled for this object.
653 __ movzx_b(ecx, FieldOperand(ecx, Map::kBitFieldOffset)); 653 __ movzx_b(ecx, FieldOperand(ecx, Map::kBitFieldOffset));
654 __ and_(Operand(ecx), Immediate(kSlowCaseBitFieldMask)); 654 __ and_(ecx, Immediate(kSlowCaseBitFieldMask));
655 __ cmp(Operand(ecx), Immediate(1 << Map::kHasIndexedInterceptor)); 655 __ cmp(ecx, Immediate(1 << Map::kHasIndexedInterceptor));
656 __ j(not_zero, &slow); 656 __ j(not_zero, &slow);
657 657
658 // Everything is fine, call runtime. 658 // Everything is fine, call runtime.
659 __ pop(ecx); 659 __ pop(ecx);
660 __ push(edx); // receiver 660 __ push(edx); // receiver
661 __ push(eax); // key 661 __ push(eax); // key
662 __ push(ecx); // return address 662 __ push(ecx); // return address
663 663
664 // Perform tail call to the entry. 664 // Perform tail call to the entry.
665 ExternalReference ref = 665 ExternalReference ref =
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after
839 __ bind(&non_smi_value); 839 __ bind(&non_smi_value);
840 if (FLAG_smi_only_arrays) { 840 if (FLAG_smi_only_arrays) {
841 // Escape to slow case when writing non-smi into smi-only array. 841 // Escape to slow case when writing non-smi into smi-only array.
842 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset)); 842 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
843 __ CheckFastObjectElements(edi, &slow, Label::kNear); 843 __ CheckFastObjectElements(edi, &slow, Label::kNear);
844 } 844 }
845 845
846 // Fast elements array, store the value to the elements backing store. 846 // Fast elements array, store the value to the elements backing store.
847 __ mov(CodeGenerator::FixedArrayElementOperand(ebx, ecx), eax); 847 __ mov(CodeGenerator::FixedArrayElementOperand(ebx, ecx), eax);
848 // Update write barrier for the elements array address. 848 // Update write barrier for the elements array address.
849 __ mov(edx, Operand(eax)); // Preserve the value which is returned. 849 __ mov(edx, eax); // Preserve the value which is returned.
850 __ RecordWriteArray( 850 __ RecordWriteArray(
851 ebx, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 851 ebx, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
852 __ ret(0); 852 __ ret(0);
853 853
854 __ bind(&fast_double_with_map_check); 854 __ bind(&fast_double_with_map_check);
855 // Check for fast double array case. If this fails, call through to the 855 // Check for fast double array case. If this fails, call through to the
856 // runtime. 856 // runtime.
857 __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map()); 857 __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map());
858 __ j(not_equal, &slow); 858 __ j(not_equal, &slow);
859 __ bind(&fast_double_without_map_check); 859 __ bind(&fast_double_without_map_check);
(...skipping 822 matching lines...) Expand 10 before | Expand all | Expand 10 after
1682 Condition cc = *jmp_address == Assembler::kJncShortOpcode 1682 Condition cc = *jmp_address == Assembler::kJncShortOpcode
1683 ? not_zero 1683 ? not_zero
1684 : zero; 1684 : zero;
1685 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc); 1685 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
1686 } 1686 }
1687 1687
1688 1688
1689 } } // namespace v8::internal 1689 } } // namespace v8::internal
1690 1690
1691 #endif // V8_TARGET_ARCH_IA32 1691 #endif // V8_TARGET_ARCH_IA32
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698