Index: src/x64/macro-assembler-x64.cc |
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc |
index 2fb0f74b0b5bbd833089c4b659425473012a05ba..96fa4fc49630424093eaa29874c118e95c34a115 100644 |
--- a/src/x64/macro-assembler-x64.cc |
+++ b/src/x64/macro-assembler-x64.cc |
@@ -2264,6 +2264,90 @@ void MacroAssembler::Test(const Operand& src, Smi* source) { |
// ---------------------------------------------------------------------------- |
+void MacroAssembler::LookupNumberStringCache(Register object, |
+ Register result, |
+ Register scratch1, |
+ Register scratch2, |
+ Label* not_found) { |
+ // Use of registers. Register result is used as a temporary. |
+ Register number_string_cache = result; |
+ Register mask = scratch1; |
+ Register scratch = scratch2; |
+ |
+ // Load the number string cache. |
+ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex); |
+ |
+ // Make the hash mask from the length of the number string cache. It |
+ // contains two elements (number and string) for each cache entry. |
+ SmiToInteger32( |
+ mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); |
+ shrl(mask, Immediate(1)); |
+ subq(mask, Immediate(1)); // Make mask. |
+ |
+ // Calculate the entry in the number string cache. The hash value in the |
+ // number string cache for smis is just the smi value, and the hash for |
+ // doubles is the xor of the upper and lower words. See |
+ // Heap::GetNumberStringCache. |
+ Label is_smi; |
+ Label load_result_from_cache; |
+ JumpIfSmi(object, &is_smi); |
+ CheckMap(object, |
+ isolate()->factory()->heap_number_map(), |
+ not_found, |
+ DONT_DO_SMI_CHECK); |
+ |
+ STATIC_ASSERT(8 == kDoubleSize); |
+ movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4)); |
+ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset)); |
+ and_(scratch, mask); |
+ // Each entry in string cache consists of two pointer sized fields, |
+ // but times_twice_pointer_size (multiplication by 16) scale factor |
+ // is not supported by addrmode on x64 platform. |
+ // So we have to premultiply entry index before lookup. |
+ shl(scratch, Immediate(kPointerSizeLog2 + 1)); |
+ |
+ Register index = scratch; |
+ Register probe = mask; |
+ movq(probe, |
+ FieldOperand(number_string_cache, |
+ index, |
+ times_1, |
+ FixedArray::kHeaderSize)); |
+ JumpIfSmi(probe, not_found); |
+ movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset)); |
+ ucomisd(xmm0, FieldOperand(probe, HeapNumber::kValueOffset)); |
+ j(parity_even, not_found); // Bail out if NaN is involved. |
+ j(not_equal, not_found); // The cache did not contain this value. |
+ jmp(&load_result_from_cache); |
+ |
+ bind(&is_smi); |
+ SmiToInteger32(scratch, object); |
+ and_(scratch, mask); |
+ // Each entry in string cache consists of two pointer sized fields, |
+ // but times_twice_pointer_size (multiplication by 16) scale factor |
+ // is not supported by addrmode on x64 platform. |
+ // So we have to premultiply entry index before lookup. |
+ shl(scratch, Immediate(kPointerSizeLog2 + 1)); |
+ |
+ // Check if the entry is the smi we are looking for. |
+ cmpq(object, |
+ FieldOperand(number_string_cache, |
+ index, |
+ times_1, |
+ FixedArray::kHeaderSize)); |
+ j(not_equal, not_found); |
+ |
+ // Get the result from the cache. |
+ bind(&load_result_from_cache); |
+ movq(result, |
+ FieldOperand(number_string_cache, |
+ index, |
+ times_1, |
+ FixedArray::kHeaderSize + kPointerSize)); |
+ IncrementCounter(isolate()->counters()->number_to_string_native(), 1); |
+} |
+ |
+ |
void MacroAssembler::JumpIfNotString(Register object, |
Register object_map, |
Label* not_string, |