OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 7307 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7318 Label start; | 7318 Label start; |
7319 __ bind(&start); | 7319 __ bind(&start); |
7320 __ add(ip, pc, Operand(Assembler::kInstrSize)); | 7320 __ add(ip, pc, Operand(Assembler::kInstrSize)); |
7321 __ str(ip, MemOperand(sp, 0)); | 7321 __ str(ip, MemOperand(sp, 0)); |
7322 __ Jump(target); // Call the C++ function. | 7322 __ Jump(target); // Call the C++ function. |
7323 ASSERT_EQ(Assembler::kInstrSize + Assembler::kPcLoadDelta, | 7323 ASSERT_EQ(Assembler::kInstrSize + Assembler::kPcLoadDelta, |
7324 masm->SizeOfCodeGeneratedSince(&start)); | 7324 masm->SizeOfCodeGeneratedSince(&start)); |
7325 } | 7325 } |
7326 | 7326 |
7327 | 7327 |
7328 void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, | 7328 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, |
7329 Label* miss, | 7329 Label* miss, |
7330 Label* done, | 7330 Label* done, |
7331 Register receiver, | 7331 Register receiver, |
7332 Register properties, | 7332 Register properties, |
7333 Handle<String> name, | 7333 Handle<Name> name, |
7334 Register scratch0) { | 7334 Register scratch0) { |
| 7335 ASSERT(name->IsUniqueName()); |
7335 // If names of slots in range from 1 to kProbes - 1 for the hash value are | 7336 // If names of slots in range from 1 to kProbes - 1 for the hash value are |
7336 // not equal to the name and kProbes-th slot is not used (its name is the | 7337 // not equal to the name and kProbes-th slot is not used (its name is the |
7337 // undefined value), it guarantees the hash table doesn't contain the | 7338 // undefined value), it guarantees the hash table doesn't contain the |
7338 // property. It's true even if some slots represent deleted properties | 7339 // property. It's true even if some slots represent deleted properties |
7339 // (their names are the hole value). | 7340 // (their names are the hole value). |
7340 for (int i = 0; i < kInlinedProbes; i++) { | 7341 for (int i = 0; i < kInlinedProbes; i++) { |
7341 // scratch0 points to properties hash. | 7342 // scratch0 points to properties hash. |
7342 // Compute the masked index: (hash + i + i * i) & mask. | 7343 // Compute the masked index: (hash + i + i * i) & mask. |
7343 Register index = scratch0; | 7344 Register index = scratch0; |
7344 // Capacity is smi 2^n. | 7345 // Capacity is smi 2^n. |
7345 __ ldr(index, FieldMemOperand(properties, kCapacityOffset)); | 7346 __ ldr(index, FieldMemOperand(properties, kCapacityOffset)); |
7346 __ sub(index, index, Operand(1)); | 7347 __ sub(index, index, Operand(1)); |
7347 __ and_(index, index, Operand( | 7348 __ and_(index, index, Operand( |
7348 Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i)))); | 7349 Smi::FromInt(name->Hash() + NameDictionary::GetProbeOffset(i)))); |
7349 | 7350 |
7350 // Scale the index by multiplying by the entry size. | 7351 // Scale the index by multiplying by the entry size. |
7351 ASSERT(StringDictionary::kEntrySize == 3); | 7352 ASSERT(NameDictionary::kEntrySize == 3); |
7352 __ add(index, index, Operand(index, LSL, 1)); // index *= 3. | 7353 __ add(index, index, Operand(index, LSL, 1)); // index *= 3. |
7353 | 7354 |
7354 Register entity_name = scratch0; | 7355 Register entity_name = scratch0; |
7355 // Having undefined at this place means the name is not contained. | 7356 // Having undefined at this place means the name is not contained. |
7356 ASSERT_EQ(kSmiTagSize, 1); | 7357 ASSERT_EQ(kSmiTagSize, 1); |
7357 Register tmp = properties; | 7358 Register tmp = properties; |
7358 __ add(tmp, properties, Operand(index, LSL, 1)); | 7359 __ add(tmp, properties, Operand(index, LSL, 1)); |
7359 __ ldr(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); | 7360 __ ldr(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); |
7360 | 7361 |
7361 ASSERT(!tmp.is(entity_name)); | 7362 ASSERT(!tmp.is(entity_name)); |
7362 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex); | 7363 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex); |
7363 __ cmp(entity_name, tmp); | 7364 __ cmp(entity_name, tmp); |
7364 __ b(eq, done); | 7365 __ b(eq, done); |
7365 | 7366 |
7366 if (i != kInlinedProbes - 1) { | 7367 if (i != kInlinedProbes - 1) { |
7367 // Load the hole ready for use below: | 7368 // Load the hole ready for use below: |
7368 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex); | 7369 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex); |
7369 | 7370 |
7370 // Stop if found the property. | 7371 // Stop if found the property. |
7371 __ cmp(entity_name, Operand(Handle<String>(name))); | 7372 __ cmp(entity_name, Operand(Handle<Name>(name))); |
7372 __ b(eq, miss); | 7373 __ b(eq, miss); |
7373 | 7374 |
7374 Label the_hole; | 7375 Label good; |
7375 __ cmp(entity_name, tmp); | 7376 __ cmp(entity_name, tmp); |
7376 __ b(eq, &the_hole); | 7377 __ b(eq, &good); |
7377 | 7378 |
7378 // Check if the entry name is not an internalized string. | 7379 // Check if the entry name is not a unique name. |
7379 __ ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); | 7380 __ ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); |
7380 __ ldrb(entity_name, | 7381 __ ldrb(entity_name, |
7381 FieldMemOperand(entity_name, Map::kInstanceTypeOffset)); | 7382 FieldMemOperand(entity_name, Map::kInstanceTypeOffset)); |
7382 __ tst(entity_name, Operand(kIsInternalizedMask)); | 7383 __ tst(entity_name, Operand(kIsInternalizedMask)); |
7383 __ b(eq, miss); | 7384 __ b(ne, &good); |
| 7385 __ cmp(entity_name, Operand(SYMBOL_TYPE)); |
| 7386 __ b(ne, miss); |
7384 | 7387 |
7385 __ bind(&the_hole); | 7388 __ bind(&good); |
7386 | 7389 |
7387 // Restore the properties. | 7390 // Restore the properties. |
7388 __ ldr(properties, | 7391 __ ldr(properties, |
7389 FieldMemOperand(receiver, JSObject::kPropertiesOffset)); | 7392 FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
7390 } | 7393 } |
7391 } | 7394 } |
7392 | 7395 |
7393 const int spill_mask = | 7396 const int spill_mask = |
7394 (lr.bit() | r6.bit() | r5.bit() | r4.bit() | r3.bit() | | 7397 (lr.bit() | r6.bit() | r5.bit() | r4.bit() | r3.bit() | |
7395 r2.bit() | r1.bit() | r0.bit()); | 7398 r2.bit() | r1.bit() | r0.bit()); |
7396 | 7399 |
7397 __ stm(db_w, sp, spill_mask); | 7400 __ stm(db_w, sp, spill_mask); |
7398 __ ldr(r0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); | 7401 __ ldr(r0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
7399 __ mov(r1, Operand(Handle<String>(name))); | 7402 __ mov(r1, Operand(Handle<Name>(name))); |
7400 StringDictionaryLookupStub stub(NEGATIVE_LOOKUP); | 7403 NameDictionaryLookupStub stub(NEGATIVE_LOOKUP); |
7401 __ CallStub(&stub); | 7404 __ CallStub(&stub); |
7402 __ cmp(r0, Operand::Zero()); | 7405 __ cmp(r0, Operand::Zero()); |
7403 __ ldm(ia_w, sp, spill_mask); | 7406 __ ldm(ia_w, sp, spill_mask); |
7404 | 7407 |
7405 __ b(eq, done); | 7408 __ b(eq, done); |
7406 __ b(ne, miss); | 7409 __ b(ne, miss); |
7407 } | 7410 } |
7408 | 7411 |
7409 | 7412 |
7410 // Probe the string dictionary in the |elements| register. Jump to the | 7413 // Probe the name dictionary in the |elements| register. Jump to the |
7411 // |done| label if a property with the given name is found. Jump to | 7414 // |done| label if a property with the given name is found. Jump to |
7412 // the |miss| label otherwise. | 7415 // the |miss| label otherwise. |
7413 // If lookup was successful |scratch2| will be equal to elements + 4 * index. | 7416 // If lookup was successful |scratch2| will be equal to elements + 4 * index. |
7414 void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, | 7417 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, |
7415 Label* miss, | 7418 Label* miss, |
7416 Label* done, | 7419 Label* done, |
7417 Register elements, | 7420 Register elements, |
7418 Register name, | 7421 Register name, |
7419 Register scratch1, | 7422 Register scratch1, |
7420 Register scratch2) { | 7423 Register scratch2) { |
7421 ASSERT(!elements.is(scratch1)); | 7424 ASSERT(!elements.is(scratch1)); |
7422 ASSERT(!elements.is(scratch2)); | 7425 ASSERT(!elements.is(scratch2)); |
7423 ASSERT(!name.is(scratch1)); | 7426 ASSERT(!name.is(scratch1)); |
7424 ASSERT(!name.is(scratch2)); | 7427 ASSERT(!name.is(scratch2)); |
7425 | 7428 |
7426 __ AssertString(name); | 7429 __ AssertName(name); |
7427 | 7430 |
7428 // Compute the capacity mask. | 7431 // Compute the capacity mask. |
7429 __ ldr(scratch1, FieldMemOperand(elements, kCapacityOffset)); | 7432 __ ldr(scratch1, FieldMemOperand(elements, kCapacityOffset)); |
7430 __ mov(scratch1, Operand(scratch1, ASR, kSmiTagSize)); // convert smi to int | 7433 __ mov(scratch1, Operand(scratch1, ASR, kSmiTagSize)); // convert smi to int |
7431 __ sub(scratch1, scratch1, Operand(1)); | 7434 __ sub(scratch1, scratch1, Operand(1)); |
7432 | 7435 |
7433 // Generate an unrolled loop that performs a few probes before | 7436 // Generate an unrolled loop that performs a few probes before |
7434 // giving up. Measurements done on Gmail indicate that 2 probes | 7437 // giving up. Measurements done on Gmail indicate that 2 probes |
7435 // cover ~93% of loads from dictionaries. | 7438 // cover ~93% of loads from dictionaries. |
7436 for (int i = 0; i < kInlinedProbes; i++) { | 7439 for (int i = 0; i < kInlinedProbes; i++) { |
7437 // Compute the masked index: (hash + i + i * i) & mask. | 7440 // Compute the masked index: (hash + i + i * i) & mask. |
7438 __ ldr(scratch2, FieldMemOperand(name, String::kHashFieldOffset)); | 7441 __ ldr(scratch2, FieldMemOperand(name, Name::kHashFieldOffset)); |
7439 if (i > 0) { | 7442 if (i > 0) { |
7440 // Add the probe offset (i + i * i) left shifted to avoid right shifting | 7443 // Add the probe offset (i + i * i) left shifted to avoid right shifting |
7441 // the hash in a separate instruction. The value hash + i + i * i is right | 7444 // the hash in a separate instruction. The value hash + i + i * i is right |
7442 // shifted in the following and instruction. | 7445 // shifted in the following and instruction. |
7443 ASSERT(StringDictionary::GetProbeOffset(i) < | 7446 ASSERT(NameDictionary::GetProbeOffset(i) < |
7444 1 << (32 - String::kHashFieldOffset)); | 7447 1 << (32 - Name::kHashFieldOffset)); |
7445 __ add(scratch2, scratch2, Operand( | 7448 __ add(scratch2, scratch2, Operand( |
7446 StringDictionary::GetProbeOffset(i) << String::kHashShift)); | 7449 NameDictionary::GetProbeOffset(i) << Name::kHashShift)); |
7447 } | 7450 } |
7448 __ and_(scratch2, scratch1, Operand(scratch2, LSR, String::kHashShift)); | 7451 __ and_(scratch2, scratch1, Operand(scratch2, LSR, Name::kHashShift)); |
7449 | 7452 |
7450 // Scale the index by multiplying by the element size. | 7453 // Scale the index by multiplying by the element size. |
7451 ASSERT(StringDictionary::kEntrySize == 3); | 7454 ASSERT(NameDictionary::kEntrySize == 3); |
7452 // scratch2 = scratch2 * 3. | 7455 // scratch2 = scratch2 * 3. |
7453 __ add(scratch2, scratch2, Operand(scratch2, LSL, 1)); | 7456 __ add(scratch2, scratch2, Operand(scratch2, LSL, 1)); |
7454 | 7457 |
7455 // Check if the key is identical to the name. | 7458 // Check if the key is identical to the name. |
7456 __ add(scratch2, elements, Operand(scratch2, LSL, 2)); | 7459 __ add(scratch2, elements, Operand(scratch2, LSL, 2)); |
7457 __ ldr(ip, FieldMemOperand(scratch2, kElementsStartOffset)); | 7460 __ ldr(ip, FieldMemOperand(scratch2, kElementsStartOffset)); |
7458 __ cmp(name, Operand(ip)); | 7461 __ cmp(name, Operand(ip)); |
7459 __ b(eq, done); | 7462 __ b(eq, done); |
7460 } | 7463 } |
7461 | 7464 |
7462 const int spill_mask = | 7465 const int spill_mask = |
7463 (lr.bit() | r6.bit() | r5.bit() | r4.bit() | | 7466 (lr.bit() | r6.bit() | r5.bit() | r4.bit() | |
7464 r3.bit() | r2.bit() | r1.bit() | r0.bit()) & | 7467 r3.bit() | r2.bit() | r1.bit() | r0.bit()) & |
7465 ~(scratch1.bit() | scratch2.bit()); | 7468 ~(scratch1.bit() | scratch2.bit()); |
7466 | 7469 |
7467 __ stm(db_w, sp, spill_mask); | 7470 __ stm(db_w, sp, spill_mask); |
7468 if (name.is(r0)) { | 7471 if (name.is(r0)) { |
7469 ASSERT(!elements.is(r1)); | 7472 ASSERT(!elements.is(r1)); |
7470 __ Move(r1, name); | 7473 __ Move(r1, name); |
7471 __ Move(r0, elements); | 7474 __ Move(r0, elements); |
7472 } else { | 7475 } else { |
7473 __ Move(r0, elements); | 7476 __ Move(r0, elements); |
7474 __ Move(r1, name); | 7477 __ Move(r1, name); |
7475 } | 7478 } |
7476 StringDictionaryLookupStub stub(POSITIVE_LOOKUP); | 7479 NameDictionaryLookupStub stub(POSITIVE_LOOKUP); |
7477 __ CallStub(&stub); | 7480 __ CallStub(&stub); |
7478 __ cmp(r0, Operand::Zero()); | 7481 __ cmp(r0, Operand::Zero()); |
7479 __ mov(scratch2, Operand(r2)); | 7482 __ mov(scratch2, Operand(r2)); |
7480 __ ldm(ia_w, sp, spill_mask); | 7483 __ ldm(ia_w, sp, spill_mask); |
7481 | 7484 |
7482 __ b(ne, done); | 7485 __ b(ne, done); |
7483 __ b(eq, miss); | 7486 __ b(eq, miss); |
7484 } | 7487 } |
7485 | 7488 |
7486 | 7489 |
7487 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { | 7490 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { |
7488 // This stub overrides SometimesSetsUpAFrame() to return false. That means | 7491 // This stub overrides SometimesSetsUpAFrame() to return false. That means |
7489 // we cannot call anything that could cause a GC from this stub. | 7492 // we cannot call anything that could cause a GC from this stub. |
7490 // Registers: | 7493 // Registers: |
7491 // result: StringDictionary to probe | 7494 // result: NameDictionary to probe |
7492 // r1: key | 7495 // r1: key |
7493 // : StringDictionary to probe. | 7496 // dictionary : NameDictionary to probe. |
7494 // index_: will hold an index of entry if lookup is successful. | 7497 // index: will hold an index of entry if lookup is successful. |
7495 // might alias with result_. | 7498 // might alias with result_. |
7496 // Returns: | 7499 // Returns: |
7497 // result_ is zero if lookup failed, non zero otherwise. | 7500 // result_ is zero if lookup failed, non zero otherwise. |
7498 | 7501 |
7499 Register result = r0; | 7502 Register result = r0; |
7500 Register dictionary = r0; | 7503 Register dictionary = r0; |
7501 Register key = r1; | 7504 Register key = r1; |
7502 Register index = r2; | 7505 Register index = r2; |
7503 Register mask = r3; | 7506 Register mask = r3; |
7504 Register hash = r4; | 7507 Register hash = r4; |
7505 Register undefined = r5; | 7508 Register undefined = r5; |
7506 Register entry_key = r6; | 7509 Register entry_key = r6; |
7507 | 7510 |
7508 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; | 7511 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; |
7509 | 7512 |
7510 __ ldr(mask, FieldMemOperand(dictionary, kCapacityOffset)); | 7513 __ ldr(mask, FieldMemOperand(dictionary, kCapacityOffset)); |
7511 __ mov(mask, Operand(mask, ASR, kSmiTagSize)); | 7514 __ mov(mask, Operand(mask, ASR, kSmiTagSize)); |
7512 __ sub(mask, mask, Operand(1)); | 7515 __ sub(mask, mask, Operand(1)); |
7513 | 7516 |
7514 __ ldr(hash, FieldMemOperand(key, String::kHashFieldOffset)); | 7517 __ ldr(hash, FieldMemOperand(key, Name::kHashFieldOffset)); |
7515 | 7518 |
7516 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex); | 7519 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex); |
7517 | 7520 |
7518 for (int i = kInlinedProbes; i < kTotalProbes; i++) { | 7521 for (int i = kInlinedProbes; i < kTotalProbes; i++) { |
7519 // Compute the masked index: (hash + i + i * i) & mask. | 7522 // Compute the masked index: (hash + i + i * i) & mask. |
7520 // Capacity is smi 2^n. | 7523 // Capacity is smi 2^n. |
7521 if (i > 0) { | 7524 if (i > 0) { |
7522 // Add the probe offset (i + i * i) left shifted to avoid right shifting | 7525 // Add the probe offset (i + i * i) left shifted to avoid right shifting |
7523 // the hash in a separate instruction. The value hash + i + i * i is right | 7526 // the hash in a separate instruction. The value hash + i + i * i is right |
7524 // shifted in the following and instruction. | 7527 // shifted in the following and instruction. |
7525 ASSERT(StringDictionary::GetProbeOffset(i) < | 7528 ASSERT(NameDictionary::GetProbeOffset(i) < |
7526 1 << (32 - String::kHashFieldOffset)); | 7529 1 << (32 - Name::kHashFieldOffset)); |
7527 __ add(index, hash, Operand( | 7530 __ add(index, hash, Operand( |
7528 StringDictionary::GetProbeOffset(i) << String::kHashShift)); | 7531 NameDictionary::GetProbeOffset(i) << Name::kHashShift)); |
7529 } else { | 7532 } else { |
7530 __ mov(index, Operand(hash)); | 7533 __ mov(index, Operand(hash)); |
7531 } | 7534 } |
7532 __ and_(index, mask, Operand(index, LSR, String::kHashShift)); | 7535 __ and_(index, mask, Operand(index, LSR, Name::kHashShift)); |
7533 | 7536 |
7534 // Scale the index by multiplying by the entry size. | 7537 // Scale the index by multiplying by the entry size. |
7535 ASSERT(StringDictionary::kEntrySize == 3); | 7538 ASSERT(NameDictionary::kEntrySize == 3); |
7536 __ add(index, index, Operand(index, LSL, 1)); // index *= 3. | 7539 __ add(index, index, Operand(index, LSL, 1)); // index *= 3. |
7537 | 7540 |
7538 ASSERT_EQ(kSmiTagSize, 1); | 7541 ASSERT_EQ(kSmiTagSize, 1); |
7539 __ add(index, dictionary, Operand(index, LSL, 2)); | 7542 __ add(index, dictionary, Operand(index, LSL, 2)); |
7540 __ ldr(entry_key, FieldMemOperand(index, kElementsStartOffset)); | 7543 __ ldr(entry_key, FieldMemOperand(index, kElementsStartOffset)); |
7541 | 7544 |
7542 // Having undefined at this place means the name is not contained. | 7545 // Having undefined at this place means the name is not contained. |
7543 __ cmp(entry_key, Operand(undefined)); | 7546 __ cmp(entry_key, Operand(undefined)); |
7544 __ b(eq, ¬_in_dictionary); | 7547 __ b(eq, ¬_in_dictionary); |
7545 | 7548 |
7546 // Stop if found the property. | 7549 // Stop if found the property. |
7547 __ cmp(entry_key, Operand(key)); | 7550 __ cmp(entry_key, Operand(key)); |
7548 __ b(eq, &in_dictionary); | 7551 __ b(eq, &in_dictionary); |
7549 | 7552 |
7550 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { | 7553 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { |
7551 // Check if the entry name is not an internalized string. | 7554 // Check if the entry name is not a unique name. |
| 7555 Label cont; |
7552 __ ldr(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); | 7556 __ ldr(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); |
7553 __ ldrb(entry_key, | 7557 __ ldrb(entry_key, |
7554 FieldMemOperand(entry_key, Map::kInstanceTypeOffset)); | 7558 FieldMemOperand(entry_key, Map::kInstanceTypeOffset)); |
7555 __ tst(entry_key, Operand(kIsInternalizedMask)); | 7559 __ tst(entry_key, Operand(kIsInternalizedMask)); |
7556 __ b(eq, &maybe_in_dictionary); | 7560 __ b(ne, &cont); |
| 7561 __ cmp(entry_key, Operand(SYMBOL_TYPE)); |
| 7562 __ b(ne, &maybe_in_dictionary); |
| 7563 __ bind(&cont); |
7557 } | 7564 } |
7558 } | 7565 } |
7559 | 7566 |
7560 __ bind(&maybe_in_dictionary); | 7567 __ bind(&maybe_in_dictionary); |
7561 // If we are doing negative lookup then probing failure should be | 7568 // If we are doing negative lookup then probing failure should be |
7562 // treated as a lookup success. For positive lookup probing failure | 7569 // treated as a lookup success. For positive lookup probing failure |
7563 // should be treated as lookup failure. | 7570 // should be treated as lookup failure. |
7564 if (mode_ == POSITIVE_LOOKUP) { | 7571 if (mode_ == POSITIVE_LOOKUP) { |
7565 __ mov(result, Operand::Zero()); | 7572 __ mov(result, Operand::Zero()); |
7566 __ Ret(); | 7573 __ Ret(); |
(...skipping 433 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8000 | 8007 |
8001 __ Pop(lr, r5, r1); | 8008 __ Pop(lr, r5, r1); |
8002 __ Ret(); | 8009 __ Ret(); |
8003 } | 8010 } |
8004 | 8011 |
8005 #undef __ | 8012 #undef __ |
8006 | 8013 |
8007 } } // namespace v8::internal | 8014 } } // namespace v8::internal |
8008 | 8015 |
8009 #endif // V8_TARGET_ARCH_ARM | 8016 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |