OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 4150 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4161 __ mov(r0, r4); | 4161 __ mov(r0, r4); |
4162 __ Ret(); | 4162 __ Ret(); |
4163 | 4163 |
4164 __ bind(&miss_force_generic); | 4164 __ bind(&miss_force_generic); |
4165 Code* stub = masm->isolate()->builtins()->builtin( | 4165 Code* stub = masm->isolate()->builtins()->builtin( |
4166 Builtins::kKeyedLoadIC_MissForceGeneric); | 4166 Builtins::kKeyedLoadIC_MissForceGeneric); |
4167 __ Jump(Handle<Code>(stub), RelocInfo::CODE_TARGET); | 4167 __ Jump(Handle<Code>(stub), RelocInfo::CODE_TARGET); |
4168 } | 4168 } |
4169 | 4169 |
4170 | 4170 |
| 4171 void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement( |
| 4172 MacroAssembler* masm) { |
| 4173 // ----------- S t a t e ------------- |
| 4174 // -- lr : return address |
| 4175 // -- r0 : key |
| 4176 // -- r1 : receiver |
| 4177 // ----------------------------------- |
| 4178 Label miss_force_generic, slow_allocate_heapnumber; |
| 4179 |
| 4180 Register key_reg = r0; |
| 4181 Register receiver_reg = r1; |
| 4182 Register elements_reg = r2; |
| 4183 Register heap_number_reg = r2; |
| 4184 Register indexed_double_offset = r3; |
| 4185 Register scratch = r4; |
| 4186 Register scratch2 = r5; |
| 4187 Register scratch3 = r6; |
| 4188 Register heap_number_map = r7; |
| 4189 |
| 4190 // This stub is meant to be tail-jumped to, the receiver must already |
| 4191 // have been verified by the caller to not be a smi. |
| 4192 |
| 4193 // Check that the key is a smi. |
| 4194 __ JumpIfNotSmi(key_reg, &miss_force_generic); |
| 4195 |
| 4196 // Get the elements array. |
| 4197 __ ldr(elements_reg, |
| 4198 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); |
| 4199 |
| 4200 // Check that the key is within bounds. |
| 4201 __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset)); |
| 4202 __ cmp(key_reg, Operand(scratch)); |
| 4203 __ b(hs, &miss_force_generic); |
| 4204 |
| 4205 // Load the upper word of the double in the fixed array and test for NaN. |
| 4206 __ add(indexed_double_offset, elements_reg, |
| 4207 Operand(key_reg, LSL, kDoubleSizeLog2 - kSmiTagSize)); |
| 4208 uint32_t upper_32_offset = FixedArray::kHeaderSize + sizeof(kHoleNanLower32); |
| 4209 __ ldr(scratch, FieldMemOperand(indexed_double_offset, upper_32_offset)); |
| 4210 __ cmp(scratch, Operand(kHoleNanUpper32)); |
| 4211 __ b(&miss_force_generic, eq); |
| 4212 |
| 4213 // Non-NaN. Allocate a new heap number and copy the double value into it. |
| 4214 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); |
| 4215 __ AllocateHeapNumber(heap_number_reg, scratch2, scratch3, |
| 4216 heap_number_map, &slow_allocate_heapnumber); |
| 4217 |
| 4218 // Don't need to reload the upper 32 bits of the double, it's already in |
| 4219 // scratch. |
| 4220 __ str(scratch, FieldMemOperand(heap_number_reg, |
| 4221 HeapNumber::kExponentOffset)); |
| 4222 __ ldr(scratch, FieldMemOperand(indexed_double_offset, |
| 4223 FixedArray::kHeaderSize)); |
| 4224 __ str(scratch, FieldMemOperand(heap_number_reg, |
| 4225 HeapNumber::kMantissaOffset)); |
| 4226 |
| 4227 __ mov(r0, heap_number_reg); |
| 4228 __ Ret(); |
| 4229 |
| 4230 __ bind(&slow_allocate_heapnumber); |
| 4231 Handle<Code> slow_ic = |
| 4232 masm->isolate()->builtins()->KeyedLoadIC_Slow(); |
| 4233 __ Jump(slow_ic, RelocInfo::CODE_TARGET); |
| 4234 |
| 4235 __ bind(&miss_force_generic); |
| 4236 Handle<Code> miss_ic = |
| 4237 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric(); |
| 4238 __ Jump(miss_ic, RelocInfo::CODE_TARGET); |
| 4239 } |
| 4240 |
| 4241 |
4171 void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm, | 4242 void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm, |
4172 bool is_js_array) { | 4243 bool is_js_array) { |
4173 // ----------- S t a t e ------------- | 4244 // ----------- S t a t e ------------- |
4174 // -- r0 : value | 4245 // -- r0 : value |
4175 // -- r1 : key | 4246 // -- r1 : key |
4176 // -- r2 : receiver | 4247 // -- r2 : receiver |
4177 // -- lr : return address | 4248 // -- lr : return address |
4178 // -- r3 : scratch | 4249 // -- r3 : scratch |
4179 // -- r4 : scratch (elements) | 4250 // -- r4 : scratch (elements) |
4180 // ----------------------------------- | 4251 // ----------------------------------- |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4224 // Done. | 4295 // Done. |
4225 __ Ret(); | 4296 __ Ret(); |
4226 | 4297 |
4227 __ bind(&miss_force_generic); | 4298 __ bind(&miss_force_generic); |
4228 Handle<Code> ic = | 4299 Handle<Code> ic = |
4229 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric(); | 4300 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric(); |
4230 __ Jump(ic, RelocInfo::CODE_TARGET); | 4301 __ Jump(ic, RelocInfo::CODE_TARGET); |
4231 } | 4302 } |
4232 | 4303 |
4233 | 4304 |
| 4305 void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement( |
| 4306 MacroAssembler* masm, |
| 4307 bool is_js_array) { |
| 4308 // ----------- S t a t e ------------- |
| 4309 // -- r0 : value |
| 4310 // -- r1 : key |
| 4311 // -- r2 : receiver |
| 4312 // -- lr : return address |
| 4313 // -- r3 : scratch |
| 4314 // -- r4 : scratch |
| 4315 // -- r5 : scratch |
| 4316 // ----------------------------------- |
| 4317 Label miss_force_generic, smi_value, is_nan, maybe_nan, have_double_value; |
| 4318 |
| 4319 Register value_reg = r0; |
| 4320 Register key_reg = r1; |
| 4321 Register receiver_reg = r2; |
| 4322 Register scratch = r3; |
| 4323 Register elements_reg = r4; |
| 4324 Register mantissa_reg = r5; |
| 4325 Register exponent_reg = r6; |
| 4326 Register scratch4 = r7; |
| 4327 |
| 4328 // This stub is meant to be tail-jumped to, the receiver must already |
| 4329 // have been verified by the caller to not be a smi. |
| 4330 __ JumpIfNotSmi(key_reg, &miss_force_generic); |
| 4331 |
| 4332 __ ldr(elements_reg, |
| 4333 FieldMemOperand(receiver_reg, JSObject::kElementsOffset)); |
| 4334 |
| 4335 // Check that the key is within bounds. |
| 4336 if (is_js_array) { |
| 4337 __ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset)); |
| 4338 } else { |
| 4339 __ ldr(scratch, |
| 4340 FieldMemOperand(elements_reg, FixedArray::kLengthOffset)); |
| 4341 } |
| 4342 // Compare smis, unsigned compare catches both negative and out-of-bound |
| 4343 // indexes. |
| 4344 __ cmp(key_reg, scratch); |
| 4345 __ b(hs, &miss_force_generic); |
| 4346 |
| 4347 // Handle smi values specially. |
| 4348 __ JumpIfSmi(value_reg, &smi_value); |
| 4349 |
| 4350 // Ensure that the object is a heap number |
| 4351 __ CheckMap(value_reg, |
| 4352 scratch, |
| 4353 masm->isolate()->factory()->heap_number_map(), |
| 4354 &miss_force_generic, |
| 4355 DONT_DO_SMI_CHECK); |
| 4356 |
| 4357 // Check for nan: all NaN values have a value greater (signed) than 0x7ff00000 |
| 4358 // in the exponent. |
| 4359 __ mov(scratch, Operand(kNaNOrInfinityLowerBoundUpper32)); |
| 4360 __ ldr(exponent_reg, FieldMemOperand(value_reg, HeapNumber::kExponentOffset)); |
| 4361 __ cmp(exponent_reg, scratch); |
| 4362 __ b(ge, &maybe_nan); |
| 4363 |
| 4364 __ ldr(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset)); |
| 4365 |
| 4366 __ bind(&have_double_value); |
| 4367 __ add(scratch, elements_reg, |
| 4368 Operand(key_reg, LSL, kDoubleSizeLog2 - kSmiTagSize)); |
| 4369 __ str(mantissa_reg, FieldMemOperand(scratch, FixedDoubleArray::kHeaderSize)); |
| 4370 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32); |
| 4371 __ str(exponent_reg, FieldMemOperand(scratch, offset)); |
| 4372 __ Ret(); |
| 4373 |
| 4374 __ bind(&maybe_nan); |
| 4375 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise |
| 4376 // it's an Infinity, and the non-NaN code path applies. |
| 4377 __ b(gt, &is_nan); |
| 4378 __ ldr(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset)); |
| 4379 __ cmp(mantissa_reg, Operand(0)); |
| 4380 __ b(eq, &have_double_value); |
| 4381 __ bind(&is_nan); |
| 4382 // Load canonical NaN for storing into the double array. |
| 4383 __ mov(mantissa_reg, Operand(kCanonicalNonHoleNanLower32)); |
| 4384 __ mov(exponent_reg, Operand(kCanonicalNonHoleNanUpper32)); |
| 4385 __ jmp(&have_double_value); |
| 4386 |
| 4387 __ bind(&smi_value); |
| 4388 __ add(scratch, elements_reg, |
| 4389 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag)); |
| 4390 __ add(scratch, scratch, |
| 4391 Operand(key_reg, LSL, kDoubleSizeLog2 - kSmiTagSize)); |
| 4392 // scratch is now effective address of the double element |
| 4393 |
| 4394 FloatingPointHelper::Destination destination; |
| 4395 if (CpuFeatures::IsSupported(VFP3)) { |
| 4396 destination = FloatingPointHelper::kVFPRegisters; |
| 4397 } else { |
| 4398 destination = FloatingPointHelper::kCoreRegisters; |
| 4399 } |
| 4400 __ SmiUntag(value_reg, value_reg); |
| 4401 FloatingPointHelper::ConvertIntToDouble( |
| 4402 masm, value_reg, destination, |
| 4403 d0, mantissa_reg, exponent_reg, // These are: double_dst, dst1, dst2. |
| 4404 scratch4, s2); // These are: scratch2, single_scratch. |
| 4405 if (destination == FloatingPointHelper::kVFPRegisters) { |
| 4406 CpuFeatures::Scope scope(VFP3); |
| 4407 __ vstr(d0, scratch, 0); |
| 4408 } else { |
| 4409 __ str(mantissa_reg, MemOperand(scratch, 0)); |
| 4410 __ str(exponent_reg, MemOperand(scratch, Register::kSizeInBytes)); |
| 4411 } |
| 4412 __ Ret(); |
| 4413 |
| 4414 // Handle store cache miss, replacing the ic with the generic stub. |
| 4415 __ bind(&miss_force_generic); |
| 4416 Handle<Code> ic = |
| 4417 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric(); |
| 4418 __ Jump(ic, RelocInfo::CODE_TARGET); |
| 4419 } |
| 4420 |
| 4421 |
4234 #undef __ | 4422 #undef __ |
4235 | 4423 |
4236 } } // namespace v8::internal | 4424 } } // namespace v8::internal |
4237 | 4425 |
4238 #endif // V8_TARGET_ARCH_ARM | 4426 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |