Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(82)

Side by Side Diff: src/x64/ic-x64.cc

Issue 2441002: ARM: Add more logic to the generic keyed load stub... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 10 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/v8-counters.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 335 matching lines...) Expand 10 before | Expand all | Expand 10 after
346 } 346 }
347 347
348 348
349 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { 349 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
350 // ----------- S t a t e ------------- 350 // ----------- S t a t e -------------
351 // -- rsp[0] : return address 351 // -- rsp[0] : return address
352 // -- rsp[8] : name 352 // -- rsp[8] : name
353 // -- rsp[16] : receiver 353 // -- rsp[16] : receiver
354 // ----------------------------------- 354 // -----------------------------------
355 Label slow, check_string, index_smi, index_string; 355 Label slow, check_string, index_smi, index_string;
356 Label check_pixel_array, probe_dictionary; 356 Label check_pixel_array, probe_dictionary, check_number_dictionary;
357 Label check_number_dictionary;
358 357
359 // Load name and receiver. 358 // Load name and receiver.
360 __ movq(rax, Operand(rsp, kPointerSize)); 359 __ movq(rax, Operand(rsp, kPointerSize));
361 __ movq(rcx, Operand(rsp, 2 * kPointerSize)); 360 __ movq(rcx, Operand(rsp, 2 * kPointerSize));
362 361
363 // Check that the object isn't a smi. 362 // Check that the object isn't a smi.
364 __ JumpIfSmi(rcx, &slow); 363 __ JumpIfSmi(rcx, &slow);
365 364
366 // Check that the object is some kind of JS object EXCEPT JS Value type. 365 // Check that the object is some kind of JS object EXCEPT JS Value type.
367 // In the case that the object is a value-wrapper object, 366 // In the case that the object is a value-wrapper object,
368 // we enter the runtime system to make sure that indexing 367 // we enter the runtime system to make sure that indexing
369 // into string objects work as intended. 368 // into string objects work as intended.
370 ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE); 369 ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
371 __ CmpObjectType(rcx, JS_OBJECT_TYPE, rdx); 370 __ CmpObjectType(rcx, JS_OBJECT_TYPE, rdx);
372 __ j(below, &slow); 371 __ j(below, &slow);
373 372
374 // Check bit field. 373 // Check bit field.
375 __ testb(FieldOperand(rdx, Map::kBitFieldOffset), 374 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
376 Immediate(kSlowCaseBitFieldMask)); 375 Immediate(kSlowCaseBitFieldMask));
377 __ j(not_zero, &slow); 376 __ j(not_zero, &slow);
378 377
379 // Check that the key is a smi. 378 // Check that the key is a smi.
380 __ JumpIfNotSmi(rax, &check_string); 379 __ JumpIfNotSmi(rax, &check_string);
381
382 // Get the elements array of the object.
383 __ bind(&index_smi); 380 __ bind(&index_smi);
381 // Now the key is known to be a smi. This place is also jumped to from below
382 // where a numeric string is converted to a smi.
384 __ movq(rcx, FieldOperand(rcx, JSObject::kElementsOffset)); 383 __ movq(rcx, FieldOperand(rcx, JSObject::kElementsOffset));
385 // Check that the object is in fast mode (not dictionary). 384 // Check that the object is in fast mode (not dictionary).
386 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), 385 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
387 Heap::kFixedArrayMapRootIndex); 386 Heap::kFixedArrayMapRootIndex);
388 __ j(not_equal, &check_pixel_array); 387 __ j(not_equal, &check_pixel_array);
389 // Check that the key (index) is within bounds. 388 // Check that the key (index) is within bounds.
390 __ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset)); 389 __ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset));
391 __ j(above_equal, &slow); // Unsigned comparison rejects negative indices. 390 __ j(above_equal, &slow); // Unsigned comparison rejects negative indices.
392 // Fast case: Do the load. 391 // Fast case: Do the load.
393 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2); 392 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
435 __ bind(&check_string); 434 __ bind(&check_string);
436 // The key is not a smi. 435 // The key is not a smi.
437 // Is it a string? 436 // Is it a string?
438 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx); 437 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
439 __ j(above_equal, &slow); 438 __ j(above_equal, &slow);
440 // Is the string an array index, with cached numeric value? 439 // Is the string an array index, with cached numeric value?
441 __ movl(rbx, FieldOperand(rax, String::kHashFieldOffset)); 440 __ movl(rbx, FieldOperand(rax, String::kHashFieldOffset));
442 __ testl(rbx, Immediate(String::kIsArrayIndexMask)); 441 __ testl(rbx, Immediate(String::kIsArrayIndexMask));
443 442
444 // Is the string a symbol? 443 // Is the string a symbol?
444 // rcx: key map.
445 __ j(not_zero, &index_string); // The value in rbx is used at jump target. 445 __ j(not_zero, &index_string); // The value in rbx is used at jump target.
446 ASSERT(kSymbolTag != 0); 446 ASSERT(kSymbolTag != 0);
447 __ testb(FieldOperand(rdx, Map::kInstanceTypeOffset), 447 __ testb(FieldOperand(rdx, Map::kInstanceTypeOffset),
448 Immediate(kIsSymbolMask)); 448 Immediate(kIsSymbolMask));
449 __ j(zero, &slow); 449 __ j(zero, &slow);
450 450
451 // If the receiver is a fast-case object, check the keyed lookup 451 // If the receiver is a fast-case object, check the keyed lookup
452 // cache. Otherwise probe the dictionary leaving result in rcx. 452 // cache. Otherwise probe the dictionary leaving result in rcx.
453 __ movq(rbx, FieldOperand(rcx, JSObject::kPropertiesOffset)); 453 __ movq(rbx, FieldOperand(rcx, JSObject::kPropertiesOffset));
454 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), Factory::hash_table_map()); 454 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), Factory::hash_table_map());
(...skipping 30 matching lines...) Expand all
485 __ movl(rax, Operand(kScratchRegister, rdx, times_4, 0)); 485 __ movl(rax, Operand(kScratchRegister, rdx, times_4, 0));
486 __ movzxbq(rdx, FieldOperand(rbx, Map::kInObjectPropertiesOffset)); 486 __ movzxbq(rdx, FieldOperand(rbx, Map::kInObjectPropertiesOffset));
487 __ cmpq(rax, rdx); 487 __ cmpq(rax, rdx);
488 __ j(above_equal, &slow); 488 __ j(above_equal, &slow);
489 489
490 // Load in-object property. 490 // Load in-object property.
491 __ subq(rax, rdx); 491 __ subq(rax, rdx);
492 __ movzxbq(rdx, FieldOperand(rbx, Map::kInstanceSizeOffset)); 492 __ movzxbq(rdx, FieldOperand(rbx, Map::kInstanceSizeOffset));
493 __ addq(rax, rdx); 493 __ addq(rax, rdx);
494 __ movq(rax, FieldOperand(rcx, rax, times_pointer_size, 0)); 494 __ movq(rax, FieldOperand(rcx, rax, times_pointer_size, 0));
495 __ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1);
495 __ ret(0); 496 __ ret(0);
496 497
497 // Do a quick inline probe of the receiver's dictionary, if it 498 // Do a quick inline probe of the receiver's dictionary, if it
498 // exists. 499 // exists.
499 __ bind(&probe_dictionary); 500 __ bind(&probe_dictionary);
500 GenerateDictionaryLoad(masm, 501 GenerateDictionaryLoad(masm,
501 &slow, 502 &slow,
502 rbx, 503 rbx,
503 rcx, 504 rcx,
504 rdx, 505 rdx,
505 rax, 506 rax,
506 rdi, 507 rdi,
507 DICTIONARY_CHECK_DONE); 508 DICTIONARY_CHECK_DONE);
508 __ movq(rax, rcx); 509 __ movq(rax, rcx);
509 __ IncrementCounter(&Counters::keyed_load_generic_symbol, 1); 510 __ IncrementCounter(&Counters::keyed_load_generic_symbol, 1);
510 __ ret(0); 511 __ ret(0);
511 // If the hash field contains an array index pick it out. The assert checks 512 // If the hash field contains an array index pick it out. The assert checks
512 // that the constants for the maximum number of digits for an array index 513 // that the constants for the maximum number of digits for an array index
513 // cached in the hash field and the number of bits reserved for it does not 514 // cached in the hash field and the number of bits reserved for it does not
514 // conflict. 515 // conflict.
515 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) < 516 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
516 (1 << String::kArrayIndexValueBits)); 517 (1 << String::kArrayIndexValueBits));
517 __ bind(&index_string); 518 __ bind(&index_string);
518 // We want the smi-tagged index in rax. 519 // We want the smi-tagged index in rax.
520 // rax: key (string).
521 // rbx: hash field.
522 // rdx: receiver.
519 __ and_(rbx, Immediate(String::kArrayIndexValueMask)); 523 __ and_(rbx, Immediate(String::kArrayIndexValueMask));
520 __ shr(rbx, Immediate(String::kHashShift)); 524 __ shr(rbx, Immediate(String::kHashShift));
525 // Here we actually clobber the key (rax) which will be used if calling into
526 // runtime later. However as the new key is the numeric value of a string key
527 // there is no difference in using either key.
521 __ Integer32ToSmi(rax, rbx); 528 __ Integer32ToSmi(rax, rbx);
529 // Now jump to the place where smi keys are handled.
522 __ jmp(&index_smi); 530 __ jmp(&index_smi);
523 } 531 }
524 532
525 533
526 void KeyedLoadIC::GenerateString(MacroAssembler* masm) { 534 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
527 // ----------- S t a t e ------------- 535 // ----------- S t a t e -------------
528 // -- rsp[0] : return address 536 // -- rsp[0] : return address
529 // -- rsp[8] : name (index) 537 // -- rsp[8] : name (index)
530 // -- rsp[16] : receiver 538 // -- rsp[16] : receiver
531 // ----------------------------------- 539 // -----------------------------------
(...skipping 1040 matching lines...) Expand 10 before | Expand all | Expand 10 after
1572 GenerateMiss(masm); 1580 GenerateMiss(masm);
1573 } 1581 }
1574 1582
1575 1583
1576 #undef __ 1584 #undef __
1577 1585
1578 1586
1579 } } // namespace v8::internal 1587 } } // namespace v8::internal
1580 1588
1581 #endif // V8_TARGET_ARCH_X64 1589 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/v8-counters.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698