OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 7447 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7458 intptr_t loc = | 7458 intptr_t loc = |
7459 reinterpret_cast<intptr_t>(GetCode(masm->isolate()).location()); | 7459 reinterpret_cast<intptr_t>(GetCode(masm->isolate()).location()); |
7460 masm->li(ra, Operand(loc, RelocInfo::CODE_TARGET), CONSTANT_SIZE); | 7460 masm->li(ra, Operand(loc, RelocInfo::CODE_TARGET), CONSTANT_SIZE); |
7461 // Call the function. | 7461 // Call the function. |
7462 masm->Jump(t9); | 7462 masm->Jump(t9); |
7463 // Make sure the stored 'ra' points to this position. | 7463 // Make sure the stored 'ra' points to this position. |
7464 ASSERT_EQ(kNumInstructionsToJump, masm->InstructionsGeneratedSince(&find_ra)); | 7464 ASSERT_EQ(kNumInstructionsToJump, masm->InstructionsGeneratedSince(&find_ra)); |
7465 } | 7465 } |
7466 | 7466 |
7467 | 7467 |
7468 void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, | 7468 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, |
7469 Label* miss, | 7469 Label* miss, |
7470 Label* done, | 7470 Label* done, |
7471 Register receiver, | 7471 Register receiver, |
7472 Register properties, | 7472 Register properties, |
7473 Handle<String> name, | 7473 Handle<Name> name, |
7474 Register scratch0) { | 7474 Register scratch0) { |
| 7475 ASSERT(name->IsUniqueName()); |
7475 // If names of slots in range from 1 to kProbes - 1 for the hash value are | 7476 // If names of slots in range from 1 to kProbes - 1 for the hash value are |
7476 // not equal to the name and kProbes-th slot is not used (its name is the | 7477 // not equal to the name and kProbes-th slot is not used (its name is the |
7477 // undefined value), it guarantees the hash table doesn't contain the | 7478 // undefined value), it guarantees the hash table doesn't contain the |
7478 // property. It's true even if some slots represent deleted properties | 7479 // property. It's true even if some slots represent deleted properties |
7479 // (their names are the hole value). | 7480 // (their names are the hole value). |
7480 for (int i = 0; i < kInlinedProbes; i++) { | 7481 for (int i = 0; i < kInlinedProbes; i++) { |
7481 // scratch0 points to properties hash. | 7482 // scratch0 points to properties hash. |
7482 // Compute the masked index: (hash + i + i * i) & mask. | 7483 // Compute the masked index: (hash + i + i * i) & mask. |
7483 Register index = scratch0; | 7484 Register index = scratch0; |
7484 // Capacity is smi 2^n. | 7485 // Capacity is smi 2^n. |
7485 __ lw(index, FieldMemOperand(properties, kCapacityOffset)); | 7486 __ lw(index, FieldMemOperand(properties, kCapacityOffset)); |
7486 __ Subu(index, index, Operand(1)); | 7487 __ Subu(index, index, Operand(1)); |
7487 __ And(index, index, Operand( | 7488 __ And(index, index, Operand( |
7488 Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i)))); | 7489 Smi::FromInt(name->Hash() + NameDictionary::GetProbeOffset(i)))); |
7489 | 7490 |
7490 // Scale the index by multiplying by the entry size. | 7491 // Scale the index by multiplying by the entry size. |
7491 ASSERT(StringDictionary::kEntrySize == 3); | 7492 ASSERT(NameDictionary::kEntrySize == 3); |
7492 __ sll(at, index, 1); | 7493 __ sll(at, index, 1); |
7493 __ Addu(index, index, at); | 7494 __ Addu(index, index, at); |
7494 | 7495 |
7495 Register entity_name = scratch0; | 7496 Register entity_name = scratch0; |
7496 // Having undefined at this place means the name is not contained. | 7497 // Having undefined at this place means the name is not contained. |
7497 ASSERT_EQ(kSmiTagSize, 1); | 7498 ASSERT_EQ(kSmiTagSize, 1); |
7498 Register tmp = properties; | 7499 Register tmp = properties; |
7499 __ sll(scratch0, index, 1); | 7500 __ sll(scratch0, index, 1); |
7500 __ Addu(tmp, properties, scratch0); | 7501 __ Addu(tmp, properties, scratch0); |
7501 __ lw(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); | 7502 __ lw(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); |
7502 | 7503 |
7503 ASSERT(!tmp.is(entity_name)); | 7504 ASSERT(!tmp.is(entity_name)); |
7504 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex); | 7505 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex); |
7505 __ Branch(done, eq, entity_name, Operand(tmp)); | 7506 __ Branch(done, eq, entity_name, Operand(tmp)); |
7506 | 7507 |
7507 if (i != kInlinedProbes - 1) { | 7508 if (i != kInlinedProbes - 1) { |
7508 // Load the hole ready for use below: | 7509 // Load the hole ready for use below: |
7509 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex); | 7510 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex); |
7510 | 7511 |
7511 // Stop if found the property. | 7512 // Stop if found the property. |
7512 __ Branch(miss, eq, entity_name, Operand(Handle<String>(name))); | 7513 __ Branch(miss, eq, entity_name, Operand(Handle<Name>(name))); |
7513 | 7514 |
7514 Label the_hole; | 7515 Label good; |
7515 __ Branch(&the_hole, eq, entity_name, Operand(tmp)); | 7516 __ Branch(&good, eq, entity_name, Operand(tmp)); |
7516 | 7517 |
7517 // Check if the entry name is not a internalized string. | 7518 // Check if the entry name is not a unique name. |
7518 __ lw(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); | 7519 __ lw(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); |
7519 __ lbu(entity_name, | 7520 __ lbu(entity_name, |
7520 FieldMemOperand(entity_name, Map::kInstanceTypeOffset)); | 7521 FieldMemOperand(entity_name, Map::kInstanceTypeOffset)); |
7521 __ And(scratch0, entity_name, Operand(kIsInternalizedMask)); | 7522 __ And(scratch0, entity_name, Operand(kIsInternalizedMask)); |
7522 __ Branch(miss, eq, scratch0, Operand(zero_reg)); | 7523 __ Branch(&good, ne, scratch0, Operand(zero_reg)); |
| 7524 __ Branch(miss, ne, entity_name, Operand(SYMBOL_TYPE)); |
7523 | 7525 |
7524 __ bind(&the_hole); | 7526 __ bind(&good); |
7525 | 7527 |
7526 // Restore the properties. | 7528 // Restore the properties. |
7527 __ lw(properties, | 7529 __ lw(properties, |
7528 FieldMemOperand(receiver, JSObject::kPropertiesOffset)); | 7530 FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
7529 } | 7531 } |
7530 } | 7532 } |
7531 | 7533 |
7532 const int spill_mask = | 7534 const int spill_mask = |
7533 (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() | | 7535 (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() | |
7534 a2.bit() | a1.bit() | a0.bit() | v0.bit()); | 7536 a2.bit() | a1.bit() | a0.bit() | v0.bit()); |
7535 | 7537 |
7536 __ MultiPush(spill_mask); | 7538 __ MultiPush(spill_mask); |
7537 __ lw(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); | 7539 __ lw(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
7538 __ li(a1, Operand(Handle<String>(name))); | 7540 __ li(a1, Operand(Handle<Name>(name))); |
7539 StringDictionaryLookupStub stub(NEGATIVE_LOOKUP); | 7541 NameDictionaryLookupStub stub(NEGATIVE_LOOKUP); |
7540 __ CallStub(&stub); | 7542 __ CallStub(&stub); |
7541 __ mov(at, v0); | 7543 __ mov(at, v0); |
7542 __ MultiPop(spill_mask); | 7544 __ MultiPop(spill_mask); |
7543 | 7545 |
7544 __ Branch(done, eq, at, Operand(zero_reg)); | 7546 __ Branch(done, eq, at, Operand(zero_reg)); |
7545 __ Branch(miss, ne, at, Operand(zero_reg)); | 7547 __ Branch(miss, ne, at, Operand(zero_reg)); |
7546 } | 7548 } |
7547 | 7549 |
7548 | 7550 |
7549 // Probe the string dictionary in the |elements| register. Jump to the | 7551 // Probe the name dictionary in the |elements| register. Jump to the |
7550 // |done| label if a property with the given name is found. Jump to | 7552 // |done| label if a property with the given name is found. Jump to |
7551 // the |miss| label otherwise. | 7553 // the |miss| label otherwise. |
7552 // If lookup was successful |scratch2| will be equal to elements + 4 * index. | 7554 // If lookup was successful |scratch2| will be equal to elements + 4 * index. |
7553 void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, | 7555 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, |
7554 Label* miss, | 7556 Label* miss, |
7555 Label* done, | 7557 Label* done, |
7556 Register elements, | 7558 Register elements, |
7557 Register name, | 7559 Register name, |
7558 Register scratch1, | 7560 Register scratch1, |
7559 Register scratch2) { | 7561 Register scratch2) { |
7560 ASSERT(!elements.is(scratch1)); | 7562 ASSERT(!elements.is(scratch1)); |
7561 ASSERT(!elements.is(scratch2)); | 7563 ASSERT(!elements.is(scratch2)); |
7562 ASSERT(!name.is(scratch1)); | 7564 ASSERT(!name.is(scratch1)); |
7563 ASSERT(!name.is(scratch2)); | 7565 ASSERT(!name.is(scratch2)); |
7564 | 7566 |
7565 __ AssertString(name); | 7567 __ AssertName(name); |
7566 | 7568 |
7567 // Compute the capacity mask. | 7569 // Compute the capacity mask. |
7568 __ lw(scratch1, FieldMemOperand(elements, kCapacityOffset)); | 7570 __ lw(scratch1, FieldMemOperand(elements, kCapacityOffset)); |
7569 __ sra(scratch1, scratch1, kSmiTagSize); // convert smi to int | 7571 __ sra(scratch1, scratch1, kSmiTagSize); // convert smi to int |
7570 __ Subu(scratch1, scratch1, Operand(1)); | 7572 __ Subu(scratch1, scratch1, Operand(1)); |
7571 | 7573 |
7572 // Generate an unrolled loop that performs a few probes before | 7574 // Generate an unrolled loop that performs a few probes before |
7573 // giving up. Measurements done on Gmail indicate that 2 probes | 7575 // giving up. Measurements done on Gmail indicate that 2 probes |
7574 // cover ~93% of loads from dictionaries. | 7576 // cover ~93% of loads from dictionaries. |
7575 for (int i = 0; i < kInlinedProbes; i++) { | 7577 for (int i = 0; i < kInlinedProbes; i++) { |
7576 // Compute the masked index: (hash + i + i * i) & mask. | 7578 // Compute the masked index: (hash + i + i * i) & mask. |
7577 __ lw(scratch2, FieldMemOperand(name, String::kHashFieldOffset)); | 7579 __ lw(scratch2, FieldMemOperand(name, Name::kHashFieldOffset)); |
7578 if (i > 0) { | 7580 if (i > 0) { |
7579 // Add the probe offset (i + i * i) left shifted to avoid right shifting | 7581 // Add the probe offset (i + i * i) left shifted to avoid right shifting |
7580 // the hash in a separate instruction. The value hash + i + i * i is right | 7582 // the hash in a separate instruction. The value hash + i + i * i is right |
7581 // shifted in the following and instruction. | 7583 // shifted in the following and instruction. |
7582 ASSERT(StringDictionary::GetProbeOffset(i) < | 7584 ASSERT(NameDictionary::GetProbeOffset(i) < |
7583 1 << (32 - String::kHashFieldOffset)); | 7585 1 << (32 - Name::kHashFieldOffset)); |
7584 __ Addu(scratch2, scratch2, Operand( | 7586 __ Addu(scratch2, scratch2, Operand( |
7585 StringDictionary::GetProbeOffset(i) << String::kHashShift)); | 7587 NameDictionary::GetProbeOffset(i) << Name::kHashShift)); |
7586 } | 7588 } |
7587 __ srl(scratch2, scratch2, String::kHashShift); | 7589 __ srl(scratch2, scratch2, Name::kHashShift); |
7588 __ And(scratch2, scratch1, scratch2); | 7590 __ And(scratch2, scratch1, scratch2); |
7589 | 7591 |
7590 // Scale the index by multiplying by the element size. | 7592 // Scale the index by multiplying by the element size. |
7591 ASSERT(StringDictionary::kEntrySize == 3); | 7593 ASSERT(NameDictionary::kEntrySize == 3); |
7592 // scratch2 = scratch2 * 3. | 7594 // scratch2 = scratch2 * 3. |
7593 | 7595 |
7594 __ sll(at, scratch2, 1); | 7596 __ sll(at, scratch2, 1); |
7595 __ Addu(scratch2, scratch2, at); | 7597 __ Addu(scratch2, scratch2, at); |
7596 | 7598 |
7597 // Check if the key is identical to the name. | 7599 // Check if the key is identical to the name. |
7598 __ sll(at, scratch2, 2); | 7600 __ sll(at, scratch2, 2); |
7599 __ Addu(scratch2, elements, at); | 7601 __ Addu(scratch2, elements, at); |
7600 __ lw(at, FieldMemOperand(scratch2, kElementsStartOffset)); | 7602 __ lw(at, FieldMemOperand(scratch2, kElementsStartOffset)); |
7601 __ Branch(done, eq, name, Operand(at)); | 7603 __ Branch(done, eq, name, Operand(at)); |
7602 } | 7604 } |
7603 | 7605 |
7604 const int spill_mask = | 7606 const int spill_mask = |
7605 (ra.bit() | t2.bit() | t1.bit() | t0.bit() | | 7607 (ra.bit() | t2.bit() | t1.bit() | t0.bit() | |
7606 a3.bit() | a2.bit() | a1.bit() | a0.bit() | v0.bit()) & | 7608 a3.bit() | a2.bit() | a1.bit() | a0.bit() | v0.bit()) & |
7607 ~(scratch1.bit() | scratch2.bit()); | 7609 ~(scratch1.bit() | scratch2.bit()); |
7608 | 7610 |
7609 __ MultiPush(spill_mask); | 7611 __ MultiPush(spill_mask); |
7610 if (name.is(a0)) { | 7612 if (name.is(a0)) { |
7611 ASSERT(!elements.is(a1)); | 7613 ASSERT(!elements.is(a1)); |
7612 __ Move(a1, name); | 7614 __ Move(a1, name); |
7613 __ Move(a0, elements); | 7615 __ Move(a0, elements); |
7614 } else { | 7616 } else { |
7615 __ Move(a0, elements); | 7617 __ Move(a0, elements); |
7616 __ Move(a1, name); | 7618 __ Move(a1, name); |
7617 } | 7619 } |
7618 StringDictionaryLookupStub stub(POSITIVE_LOOKUP); | 7620 NameDictionaryLookupStub stub(POSITIVE_LOOKUP); |
7619 __ CallStub(&stub); | 7621 __ CallStub(&stub); |
7620 __ mov(scratch2, a2); | 7622 __ mov(scratch2, a2); |
7621 __ mov(at, v0); | 7623 __ mov(at, v0); |
7622 __ MultiPop(spill_mask); | 7624 __ MultiPop(spill_mask); |
7623 | 7625 |
7624 __ Branch(done, ne, at, Operand(zero_reg)); | 7626 __ Branch(done, ne, at, Operand(zero_reg)); |
7625 __ Branch(miss, eq, at, Operand(zero_reg)); | 7627 __ Branch(miss, eq, at, Operand(zero_reg)); |
7626 } | 7628 } |
7627 | 7629 |
7628 | 7630 |
7629 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { | 7631 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { |
7630 // This stub overrides SometimesSetsUpAFrame() to return false. That means | 7632 // This stub overrides SometimesSetsUpAFrame() to return false. That means |
7631 // we cannot call anything that could cause a GC from this stub. | 7633 // we cannot call anything that could cause a GC from this stub. |
7632 // Registers: | 7634 // Registers: |
7633 // result: StringDictionary to probe | 7635 // result: NameDictionary to probe |
7634 // a1: key | 7636 // a1: key |
7635 // : StringDictionary to probe. | 7637 // dictionary: NameDictionary to probe. |
7636 // index_: will hold an index of entry if lookup is successful. | 7638 // index: will hold an index of entry if lookup is successful. |
7637 // might alias with result_. | 7639 // might alias with result_. |
7638 // Returns: | 7640 // Returns: |
7639 // result_ is zero if lookup failed, non zero otherwise. | 7641 // result_ is zero if lookup failed, non zero otherwise. |
7640 | 7642 |
7641 Register result = v0; | 7643 Register result = v0; |
7642 Register dictionary = a0; | 7644 Register dictionary = a0; |
7643 Register key = a1; | 7645 Register key = a1; |
7644 Register index = a2; | 7646 Register index = a2; |
7645 Register mask = a3; | 7647 Register mask = a3; |
7646 Register hash = t0; | 7648 Register hash = t0; |
7647 Register undefined = t1; | 7649 Register undefined = t1; |
7648 Register entry_key = t2; | 7650 Register entry_key = t2; |
7649 | 7651 |
7650 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; | 7652 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; |
7651 | 7653 |
7652 __ lw(mask, FieldMemOperand(dictionary, kCapacityOffset)); | 7654 __ lw(mask, FieldMemOperand(dictionary, kCapacityOffset)); |
7653 __ sra(mask, mask, kSmiTagSize); | 7655 __ sra(mask, mask, kSmiTagSize); |
7654 __ Subu(mask, mask, Operand(1)); | 7656 __ Subu(mask, mask, Operand(1)); |
7655 | 7657 |
7656 __ lw(hash, FieldMemOperand(key, String::kHashFieldOffset)); | 7658 __ lw(hash, FieldMemOperand(key, Name::kHashFieldOffset)); |
7657 | 7659 |
7658 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex); | 7660 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex); |
7659 | 7661 |
7660 for (int i = kInlinedProbes; i < kTotalProbes; i++) { | 7662 for (int i = kInlinedProbes; i < kTotalProbes; i++) { |
7661 // Compute the masked index: (hash + i + i * i) & mask. | 7663 // Compute the masked index: (hash + i + i * i) & mask. |
7662 // Capacity is smi 2^n. | 7664 // Capacity is smi 2^n. |
7663 if (i > 0) { | 7665 if (i > 0) { |
7664 // Add the probe offset (i + i * i) left shifted to avoid right shifting | 7666 // Add the probe offset (i + i * i) left shifted to avoid right shifting |
7665 // the hash in a separate instruction. The value hash + i + i * i is right | 7667 // the hash in a separate instruction. The value hash + i + i * i is right |
7666 // shifted in the following and instruction. | 7668 // shifted in the following and instruction. |
7667 ASSERT(StringDictionary::GetProbeOffset(i) < | 7669 ASSERT(NameDictionary::GetProbeOffset(i) < |
7668 1 << (32 - String::kHashFieldOffset)); | 7670 1 << (32 - Name::kHashFieldOffset)); |
7669 __ Addu(index, hash, Operand( | 7671 __ Addu(index, hash, Operand( |
7670 StringDictionary::GetProbeOffset(i) << String::kHashShift)); | 7672 NameDictionary::GetProbeOffset(i) << Name::kHashShift)); |
7671 } else { | 7673 } else { |
7672 __ mov(index, hash); | 7674 __ mov(index, hash); |
7673 } | 7675 } |
7674 __ srl(index, index, String::kHashShift); | 7676 __ srl(index, index, Name::kHashShift); |
7675 __ And(index, mask, index); | 7677 __ And(index, mask, index); |
7676 | 7678 |
7677 // Scale the index by multiplying by the entry size. | 7679 // Scale the index by multiplying by the entry size. |
7678 ASSERT(StringDictionary::kEntrySize == 3); | 7680 ASSERT(NameDictionary::kEntrySize == 3); |
7679 // index *= 3. | 7681 // index *= 3. |
7680 __ mov(at, index); | 7682 __ mov(at, index); |
7681 __ sll(index, index, 1); | 7683 __ sll(index, index, 1); |
7682 __ Addu(index, index, at); | 7684 __ Addu(index, index, at); |
7683 | 7685 |
7684 | 7686 |
7685 ASSERT_EQ(kSmiTagSize, 1); | 7687 ASSERT_EQ(kSmiTagSize, 1); |
7686 __ sll(index, index, 2); | 7688 __ sll(index, index, 2); |
7687 __ Addu(index, index, dictionary); | 7689 __ Addu(index, index, dictionary); |
7688 __ lw(entry_key, FieldMemOperand(index, kElementsStartOffset)); | 7690 __ lw(entry_key, FieldMemOperand(index, kElementsStartOffset)); |
7689 | 7691 |
7690 // Having undefined at this place means the name is not contained. | 7692 // Having undefined at this place means the name is not contained. |
7691 __ Branch(¬_in_dictionary, eq, entry_key, Operand(undefined)); | 7693 __ Branch(¬_in_dictionary, eq, entry_key, Operand(undefined)); |
7692 | 7694 |
7693 // Stop if found the property. | 7695 // Stop if found the property. |
7694 __ Branch(&in_dictionary, eq, entry_key, Operand(key)); | 7696 __ Branch(&in_dictionary, eq, entry_key, Operand(key)); |
7695 | 7697 |
7696 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { | 7698 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { |
7697 // Check if the entry name is not a internalized string. | 7699 // Check if the entry name is not a unique name. |
| 7700 Label cont; |
7698 __ lw(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); | 7701 __ lw(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); |
7699 __ lbu(entry_key, | 7702 __ lbu(entry_key, |
7700 FieldMemOperand(entry_key, Map::kInstanceTypeOffset)); | 7703 FieldMemOperand(entry_key, Map::kInstanceTypeOffset)); |
7701 __ And(result, entry_key, Operand(kIsInternalizedMask)); | 7704 __ And(result, entry_key, Operand(kIsInternalizedMask)); |
7702 __ Branch(&maybe_in_dictionary, eq, result, Operand(zero_reg)); | 7705 __ Branch(&cont, ne, result, Operand(zero_reg)); |
| 7706 __ Branch(&maybe_in_dictionary, ne, entry_key, Operand(SYMBOL_TYPE)); |
| 7707 __ bind(&cont); |
7703 } | 7708 } |
7704 } | 7709 } |
7705 | 7710 |
7706 __ bind(&maybe_in_dictionary); | 7711 __ bind(&maybe_in_dictionary); |
7707 // If we are doing negative lookup then probing failure should be | 7712 // If we are doing negative lookup then probing failure should be |
7708 // treated as a lookup success. For positive lookup probing failure | 7713 // treated as a lookup success. For positive lookup probing failure |
7709 // should be treated as lookup failure. | 7714 // should be treated as lookup failure. |
7710 if (mode_ == POSITIVE_LOOKUP) { | 7715 if (mode_ == POSITIVE_LOOKUP) { |
7711 __ Ret(USE_DELAY_SLOT); | 7716 __ Ret(USE_DELAY_SLOT); |
7712 __ mov(result, zero_reg); | 7717 __ mov(result, zero_reg); |
(...skipping 436 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8149 __ Pop(ra, t1, a1); | 8154 __ Pop(ra, t1, a1); |
8150 __ Ret(); | 8155 __ Ret(); |
8151 } | 8156 } |
8152 | 8157 |
8153 | 8158 |
8154 #undef __ | 8159 #undef __ |
8155 | 8160 |
8156 } } // namespace v8::internal | 8161 } } // namespace v8::internal |
8157 | 8162 |
8158 #endif // V8_TARGET_ARCH_MIPS | 8163 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |