OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 3079 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3090 Register heap_number_map, | 3090 Register heap_number_map, |
3091 Register scratch, | 3091 Register scratch, |
3092 Label* on_not_heap_number) { | 3092 Label* on_not_heap_number) { |
3093 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); | 3093 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); |
3094 AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); | 3094 AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); |
3095 cmp(scratch, heap_number_map); | 3095 cmp(scratch, heap_number_map); |
3096 b(ne, on_not_heap_number); | 3096 b(ne, on_not_heap_number); |
3097 } | 3097 } |
3098 | 3098 |
3099 | 3099 |
| 3100 void MacroAssembler::LookupNumberStringCache(Register object, |
| 3101 Register result, |
| 3102 Register scratch1, |
| 3103 Register scratch2, |
| 3104 Register scratch3, |
| 3105 Label* not_found) { |
| 3106 // Use of registers. Register result is used as a temporary. |
| 3107 Register number_string_cache = result; |
| 3108 Register mask = scratch3; |
| 3109 |
| 3110 // Load the number string cache. |
| 3111 LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex); |
| 3112 |
| 3113 // Make the hash mask from the length of the number string cache. It |
| 3114 // contains two elements (number and string) for each cache entry. |
| 3115 ldr(mask, FieldMemOperand(number_string_cache, FixedArray::kLengthOffset)); |
| 3116 // Divide length by two (length is a smi). |
| 3117 mov(mask, Operand(mask, ASR, kSmiTagSize + 1)); |
| 3118 sub(mask, mask, Operand(1)); // Make mask. |
| 3119 |
| 3120 // Calculate the entry in the number string cache. The hash value in the |
| 3121 // number string cache for smis is just the smi value, and the hash for |
| 3122 // doubles is the xor of the upper and lower words. See |
| 3123 // Heap::GetNumberStringCache. |
| 3124 Label is_smi; |
| 3125 Label load_result_from_cache; |
| 3126 JumpIfSmi(object, &is_smi); |
| 3127 CheckMap(object, |
| 3128 scratch1, |
| 3129 Heap::kHeapNumberMapRootIndex, |
| 3130 not_found, |
| 3131 DONT_DO_SMI_CHECK); |
| 3132 |
| 3133 STATIC_ASSERT(8 == kDoubleSize); |
| 3134 add(scratch1, |
| 3135 object, |
| 3136 Operand(HeapNumber::kValueOffset - kHeapObjectTag)); |
| 3137 ldm(ia, scratch1, scratch1.bit() | scratch2.bit()); |
| 3138 eor(scratch1, scratch1, Operand(scratch2)); |
| 3139 and_(scratch1, scratch1, Operand(mask)); |
| 3140 |
| 3141 // Calculate address of entry in string cache: each entry consists |
| 3142 // of two pointer sized fields. |
| 3143 add(scratch1, |
| 3144 number_string_cache, |
| 3145 Operand(scratch1, LSL, kPointerSizeLog2 + 1)); |
| 3146 |
| 3147 Register probe = mask; |
| 3148 ldr(probe, FieldMemOperand(scratch1, FixedArray::kHeaderSize)); |
| 3149 JumpIfSmi(probe, not_found); |
| 3150 sub(scratch2, object, Operand(kHeapObjectTag)); |
| 3151 vldr(d0, scratch2, HeapNumber::kValueOffset); |
| 3152 sub(probe, probe, Operand(kHeapObjectTag)); |
| 3153 vldr(d1, probe, HeapNumber::kValueOffset); |
| 3154 VFPCompareAndSetFlags(d0, d1); |
| 3155 b(ne, not_found); // The cache did not contain this value. |
| 3156 b(&load_result_from_cache); |
| 3157 |
| 3158 bind(&is_smi); |
| 3159 Register scratch = scratch1; |
| 3160 and_(scratch, mask, Operand(object, ASR, 1)); |
| 3161 // Calculate address of entry in string cache: each entry consists |
| 3162 // of two pointer sized fields. |
| 3163 add(scratch, |
| 3164 number_string_cache, |
| 3165 Operand(scratch, LSL, kPointerSizeLog2 + 1)); |
| 3166 |
| 3167 // Check if the entry is the smi we are looking for. |
| 3168 ldr(probe, FieldMemOperand(scratch, FixedArray::kHeaderSize)); |
| 3169 cmp(object, probe); |
| 3170 b(ne, not_found); |
| 3171 |
| 3172 // Get the result from the cache. |
| 3173 bind(&load_result_from_cache); |
| 3174 ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize + kPointerSize)); |
| 3175 IncrementCounter(isolate()->counters()->number_to_string_native(), |
| 3176 1, |
| 3177 scratch1, |
| 3178 scratch2); |
| 3179 } |
| 3180 |
| 3181 |
3100 void MacroAssembler::JumpIfNonSmisNotBothSequentialAsciiStrings( | 3182 void MacroAssembler::JumpIfNonSmisNotBothSequentialAsciiStrings( |
3101 Register first, | 3183 Register first, |
3102 Register second, | 3184 Register second, |
3103 Register scratch1, | 3185 Register scratch1, |
3104 Register scratch2, | 3186 Register scratch2, |
3105 Label* failure) { | 3187 Label* failure) { |
3106 // Test that both first and second are sequential ASCII strings. | 3188 // Test that both first and second are sequential ASCII strings. |
3107 // Assume that they are non-smis. | 3189 // Assume that they are non-smis. |
3108 ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset)); | 3190 ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset)); |
3109 ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset)); | 3191 ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset)); |
(...skipping 792 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3902 void CodePatcher::EmitCondition(Condition cond) { | 3984 void CodePatcher::EmitCondition(Condition cond) { |
3903 Instr instr = Assembler::instr_at(masm_.pc_); | 3985 Instr instr = Assembler::instr_at(masm_.pc_); |
3904 instr = (instr & ~kCondMask) | cond; | 3986 instr = (instr & ~kCondMask) | cond; |
3905 masm_.emit(instr); | 3987 masm_.emit(instr); |
3906 } | 3988 } |
3907 | 3989 |
3908 | 3990 |
3909 } } // namespace v8::internal | 3991 } } // namespace v8::internal |
3910 | 3992 |
3911 #endif // V8_TARGET_ARCH_ARM | 3993 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |