| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 | 5 |
| 6 #include "src/v8.h" | 6 #include "src/v8.h" |
| 7 | 7 |
| 8 #if V8_TARGET_ARCH_MIPS | 8 #if V8_TARGET_ARCH_MIPS |
| 9 | 9 |
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
| (...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 323 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 323 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { |
| 324 // The return address is in ra. | 324 // The return address is in ra. |
| 325 | 325 |
| 326 __ mov(LoadIC_TempRegister(), LoadDescriptor::ReceiverRegister()); | 326 __ mov(LoadIC_TempRegister(), LoadDescriptor::ReceiverRegister()); |
| 327 __ Push(LoadIC_TempRegister(), LoadDescriptor::NameRegister()); | 327 __ Push(LoadIC_TempRegister(), LoadDescriptor::NameRegister()); |
| 328 | 328 |
| 329 __ TailCallRuntime(Runtime::kGetProperty, 2, 1); | 329 __ TailCallRuntime(Runtime::kGetProperty, 2, 1); |
| 330 } | 330 } |
| 331 | 331 |
| 332 | 332 |
| 333 static MemOperand GenerateMappedArgumentsLookup( | |
| 334 MacroAssembler* masm, Register object, Register key, Register scratch1, | |
| 335 Register scratch2, Register scratch3, Label* unmapped_case, | |
| 336 Label* slow_case) { | |
| 337 Heap* heap = masm->isolate()->heap(); | |
| 338 | |
| 339 // Check that the receiver is a JSObject. Because of the map check | |
| 340 // later, we do not need to check for interceptors or whether it | |
| 341 // requires access checks. | |
| 342 __ JumpIfSmi(object, slow_case); | |
| 343 // Check that the object is some kind of JSObject. | |
| 344 __ GetObjectType(object, scratch1, scratch2); | |
| 345 __ Branch(slow_case, lt, scratch2, Operand(FIRST_JS_RECEIVER_TYPE)); | |
| 346 | |
| 347 // Check that the key is a positive smi. | |
| 348 __ And(scratch1, key, Operand(0x80000001)); | |
| 349 __ Branch(slow_case, ne, scratch1, Operand(zero_reg)); | |
| 350 | |
| 351 // Load the elements into scratch1 and check its map. | |
| 352 Handle<Map> arguments_map(heap->sloppy_arguments_elements_map()); | |
| 353 __ lw(scratch1, FieldMemOperand(object, JSObject::kElementsOffset)); | |
| 354 __ CheckMap(scratch1, scratch2, arguments_map, slow_case, DONT_DO_SMI_CHECK); | |
| 355 // Check if element is in the range of mapped arguments. If not, jump | |
| 356 // to the unmapped lookup with the parameter map in scratch1. | |
| 357 __ lw(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset)); | |
| 358 __ Subu(scratch2, scratch2, Operand(Smi::FromInt(2))); | |
| 359 __ Branch(unmapped_case, Ugreater_equal, key, Operand(scratch2)); | |
| 360 | |
| 361 // Load element index and check whether it is the hole. | |
| 362 const int kOffset = | |
| 363 FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag; | |
| 364 | |
| 365 __ li(scratch3, Operand(kPointerSize >> 1)); | |
| 366 __ Mul(scratch3, key, scratch3); | |
| 367 __ Addu(scratch3, scratch3, Operand(kOffset)); | |
| 368 | |
| 369 __ Addu(scratch2, scratch1, scratch3); | |
| 370 __ lw(scratch2, MemOperand(scratch2)); | |
| 371 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex); | |
| 372 __ Branch(unmapped_case, eq, scratch2, Operand(scratch3)); | |
| 373 | |
| 374 // Load value from context and return it. We can reuse scratch1 because | |
| 375 // we do not jump to the unmapped lookup (which requires the parameter | |
| 376 // map in scratch1). | |
| 377 __ lw(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize)); | |
| 378 __ li(scratch3, Operand(kPointerSize >> 1)); | |
| 379 __ Mul(scratch3, scratch2, scratch3); | |
| 380 __ Addu(scratch3, scratch3, Operand(Context::kHeaderSize - kHeapObjectTag)); | |
| 381 __ Addu(scratch2, scratch1, scratch3); | |
| 382 return MemOperand(scratch2); | |
| 383 } | |
| 384 | |
| 385 | |
| 386 static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm, | |
| 387 Register key, | |
| 388 Register parameter_map, | |
| 389 Register scratch, | |
| 390 Label* slow_case) { | |
| 391 // Element is in arguments backing store, which is referenced by the | |
| 392 // second element of the parameter_map. The parameter_map register | |
| 393 // must be loaded with the parameter map of the arguments object and is | |
| 394 // overwritten. | |
| 395 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize; | |
| 396 Register backing_store = parameter_map; | |
| 397 __ lw(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset)); | |
| 398 __ CheckMap(backing_store, scratch, Heap::kFixedArrayMapRootIndex, slow_case, | |
| 399 DONT_DO_SMI_CHECK); | |
| 400 __ lw(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset)); | |
| 401 __ Branch(slow_case, Ugreater_equal, key, Operand(scratch)); | |
| 402 __ li(scratch, Operand(kPointerSize >> 1)); | |
| 403 __ Mul(scratch, key, scratch); | |
| 404 __ Addu(scratch, scratch, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
| 405 __ Addu(scratch, backing_store, scratch); | |
| 406 return MemOperand(scratch); | |
| 407 } | |
| 408 | |
| 409 | |
| 410 void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) { | |
| 411 Register receiver = StoreDescriptor::ReceiverRegister(); | |
| 412 Register key = StoreDescriptor::NameRegister(); | |
| 413 Register value = StoreDescriptor::ValueRegister(); | |
| 414 DCHECK(value.is(a0)); | |
| 415 | |
| 416 Label slow, notin; | |
| 417 // Store address is returned in register (of MemOperand) mapped_location. | |
| 418 MemOperand mapped_location = GenerateMappedArgumentsLookup( | |
| 419 masm, receiver, key, a3, t0, t1, ¬in, &slow); | |
| 420 __ sw(value, mapped_location); | |
| 421 __ mov(t5, value); | |
| 422 DCHECK_EQ(mapped_location.offset(), 0); | |
| 423 __ RecordWrite(a3, mapped_location.rm(), t5, kRAHasNotBeenSaved, | |
| 424 kDontSaveFPRegs); | |
| 425 __ Ret(USE_DELAY_SLOT); | |
| 426 __ mov(v0, value); // (In delay slot) return the value stored in v0. | |
| 427 __ bind(¬in); | |
| 428 // The unmapped lookup expects that the parameter map is in a3. | |
| 429 // Store address is returned in register (of MemOperand) unmapped_location. | |
| 430 MemOperand unmapped_location = | |
| 431 GenerateUnmappedArgumentsLookup(masm, key, a3, t0, &slow); | |
| 432 __ sw(value, unmapped_location); | |
| 433 __ mov(t5, value); | |
| 434 DCHECK_EQ(unmapped_location.offset(), 0); | |
| 435 __ RecordWrite(a3, unmapped_location.rm(), t5, kRAHasNotBeenSaved, | |
| 436 kDontSaveFPRegs); | |
| 437 __ Ret(USE_DELAY_SLOT); | |
| 438 __ mov(v0, a0); // (In delay slot) return the value stored in v0. | |
| 439 __ bind(&slow); | |
| 440 GenerateMiss(masm); | |
| 441 } | |
| 442 | |
| 443 | |
| 444 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { | 333 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { |
| 445 // The return address is in ra. | 334 // The return address is in ra. |
| 446 Isolate* isolate = masm->isolate(); | 335 Isolate* isolate = masm->isolate(); |
| 447 | 336 |
| 448 DCHECK(!AreAliased(t0, t1, LoadWithVectorDescriptor::SlotRegister(), | 337 DCHECK(!AreAliased(t0, t1, LoadWithVectorDescriptor::SlotRegister(), |
| 449 LoadWithVectorDescriptor::VectorRegister())); | 338 LoadWithVectorDescriptor::VectorRegister())); |
| 450 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, t0, t1); | 339 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, t0, t1); |
| 451 | 340 |
| 452 LoadIC_PushArgs(masm); | 341 LoadIC_PushArgs(masm); |
| 453 | 342 |
| (...skipping 524 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 978 patcher.ChangeBranchCondition(ne); | 867 patcher.ChangeBranchCondition(ne); |
| 979 } else { | 868 } else { |
| 980 DCHECK(Assembler::IsBne(branch_instr)); | 869 DCHECK(Assembler::IsBne(branch_instr)); |
| 981 patcher.ChangeBranchCondition(eq); | 870 patcher.ChangeBranchCondition(eq); |
| 982 } | 871 } |
| 983 } | 872 } |
| 984 } | 873 } |
| 985 } // namespace v8::internal | 874 } // namespace v8::internal |
| 986 | 875 |
| 987 #endif // V8_TARGET_ARCH_MIPS | 876 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |