| OLD | NEW | 
|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. | 
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be | 
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. | 
| 4 | 4 | 
| 5 #include "src/v8.h" | 5 #include "src/v8.h" | 
| 6 | 6 | 
| 7 #if V8_TARGET_ARCH_PPC | 7 #if V8_TARGET_ARCH_PPC | 
| 8 | 8 | 
| 9 #include "src/codegen.h" | 9 #include "src/codegen.h" | 
| 10 #include "src/ic/ic.h" | 10 #include "src/ic/ic.h" | 
| (...skipping 315 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 326 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 326 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 
| 327   // The return address is in lr. | 327   // The return address is in lr. | 
| 328 | 328 | 
| 329   __ mr(LoadIC_TempRegister(), LoadDescriptor::ReceiverRegister()); | 329   __ mr(LoadIC_TempRegister(), LoadDescriptor::ReceiverRegister()); | 
| 330   __ Push(LoadIC_TempRegister(), LoadDescriptor::NameRegister()); | 330   __ Push(LoadIC_TempRegister(), LoadDescriptor::NameRegister()); | 
| 331 | 331 | 
| 332   __ TailCallRuntime(Runtime::kGetProperty, 2, 1); | 332   __ TailCallRuntime(Runtime::kGetProperty, 2, 1); | 
| 333 } | 333 } | 
| 334 | 334 | 
| 335 | 335 | 
| 336 static MemOperand GenerateMappedArgumentsLookup( |  | 
| 337     MacroAssembler* masm, Register object, Register key, Register scratch1, |  | 
| 338     Register scratch2, Register scratch3, Label* unmapped_case, |  | 
| 339     Label* slow_case) { |  | 
| 340   Heap* heap = masm->isolate()->heap(); |  | 
| 341 |  | 
| 342   // Check that the receiver is a JSObject. Because of the map check |  | 
| 343   // later, we do not need to check for interceptors or whether it |  | 
| 344   // requires access checks. |  | 
| 345   __ JumpIfSmi(object, slow_case); |  | 
| 346   // Check that the object is some kind of JSObject. |  | 
| 347   __ CompareObjectType(object, scratch1, scratch2, FIRST_JS_RECEIVER_TYPE); |  | 
| 348   __ blt(slow_case); |  | 
| 349 |  | 
| 350   // Check that the key is a positive smi. |  | 
| 351   __ mov(scratch1, Operand(0x80000001)); |  | 
| 352   __ and_(r0, key, scratch1, SetRC); |  | 
| 353   __ bne(slow_case, cr0); |  | 
| 354 |  | 
| 355   // Load the elements into scratch1 and check its map. |  | 
| 356   Handle<Map> arguments_map(heap->sloppy_arguments_elements_map()); |  | 
| 357   __ LoadP(scratch1, FieldMemOperand(object, JSObject::kElementsOffset)); |  | 
| 358   __ CheckMap(scratch1, scratch2, arguments_map, slow_case, DONT_DO_SMI_CHECK); |  | 
| 359 |  | 
| 360   // Check if element is in the range of mapped arguments. If not, jump |  | 
| 361   // to the unmapped lookup with the parameter map in scratch1. |  | 
| 362   __ LoadP(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset)); |  | 
| 363   __ SubSmiLiteral(scratch2, scratch2, Smi::FromInt(2), r0); |  | 
| 364   __ cmpl(key, scratch2); |  | 
| 365   __ bge(unmapped_case); |  | 
| 366 |  | 
| 367   // Load element index and check whether it is the hole. |  | 
| 368   const int kOffset = |  | 
| 369       FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag; |  | 
| 370 |  | 
| 371   __ SmiToPtrArrayOffset(scratch3, key); |  | 
| 372   __ addi(scratch3, scratch3, Operand(kOffset)); |  | 
| 373 |  | 
| 374   __ LoadPX(scratch2, MemOperand(scratch1, scratch3)); |  | 
| 375   __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex); |  | 
| 376   __ cmp(scratch2, scratch3); |  | 
| 377   __ beq(unmapped_case); |  | 
| 378 |  | 
| 379   // Load value from context and return it. We can reuse scratch1 because |  | 
| 380   // we do not jump to the unmapped lookup (which requires the parameter |  | 
| 381   // map in scratch1). |  | 
| 382   __ LoadP(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize)); |  | 
| 383   __ SmiToPtrArrayOffset(scratch3, scratch2); |  | 
| 384   __ addi(scratch3, scratch3, Operand(Context::kHeaderSize - kHeapObjectTag)); |  | 
| 385   return MemOperand(scratch1, scratch3); |  | 
| 386 } |  | 
| 387 |  | 
| 388 |  | 
| 389 static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm, |  | 
| 390                                                   Register key, |  | 
| 391                                                   Register parameter_map, |  | 
| 392                                                   Register scratch, |  | 
| 393                                                   Label* slow_case) { |  | 
| 394   // Element is in arguments backing store, which is referenced by the |  | 
| 395   // second element of the parameter_map. The parameter_map register |  | 
| 396   // must be loaded with the parameter map of the arguments object and is |  | 
| 397   // overwritten. |  | 
| 398   const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize; |  | 
| 399   Register backing_store = parameter_map; |  | 
| 400   __ LoadP(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset)); |  | 
| 401   Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map()); |  | 
| 402   __ CheckMap(backing_store, scratch, fixed_array_map, slow_case, |  | 
| 403               DONT_DO_SMI_CHECK); |  | 
| 404   __ LoadP(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset)); |  | 
| 405   __ cmpl(key, scratch); |  | 
| 406   __ bge(slow_case); |  | 
| 407   __ SmiToPtrArrayOffset(scratch, key); |  | 
| 408   __ addi(scratch, scratch, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |  | 
| 409   return MemOperand(backing_store, scratch); |  | 
| 410 } |  | 
| 411 |  | 
| 412 |  | 
| 413 void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) { |  | 
| 414   Register receiver = StoreDescriptor::ReceiverRegister(); |  | 
| 415   Register key = StoreDescriptor::NameRegister(); |  | 
| 416   Register value = StoreDescriptor::ValueRegister(); |  | 
| 417   DCHECK(receiver.is(r4)); |  | 
| 418   DCHECK(key.is(r5)); |  | 
| 419   DCHECK(value.is(r3)); |  | 
| 420 |  | 
| 421   Label slow, notin; |  | 
| 422   MemOperand mapped_location = GenerateMappedArgumentsLookup( |  | 
| 423       masm, receiver, key, r6, r7, r8, ¬in, &slow); |  | 
| 424   Register mapped_base = mapped_location.ra(); |  | 
| 425   Register mapped_offset = mapped_location.rb(); |  | 
| 426   __ StorePX(value, mapped_location); |  | 
| 427   __ add(r9, mapped_base, mapped_offset); |  | 
| 428   __ mr(r11, value); |  | 
| 429   __ RecordWrite(mapped_base, r9, r11, kLRHasNotBeenSaved, kDontSaveFPRegs); |  | 
| 430   __ Ret(); |  | 
| 431   __ bind(¬in); |  | 
| 432   // The unmapped lookup expects that the parameter map is in r6. |  | 
| 433   MemOperand unmapped_location = |  | 
| 434       GenerateUnmappedArgumentsLookup(masm, key, r6, r7, &slow); |  | 
| 435   Register unmapped_base = unmapped_location.ra(); |  | 
| 436   Register unmapped_offset = unmapped_location.rb(); |  | 
| 437   __ StorePX(value, unmapped_location); |  | 
| 438   __ add(r9, unmapped_base, unmapped_offset); |  | 
| 439   __ mr(r11, value); |  | 
| 440   __ RecordWrite(unmapped_base, r9, r11, kLRHasNotBeenSaved, kDontSaveFPRegs); |  | 
| 441   __ Ret(); |  | 
| 442   __ bind(&slow); |  | 
| 443   GenerateMiss(masm); |  | 
| 444 } |  | 
| 445 |  | 
| 446 |  | 
| 447 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { | 336 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { | 
| 448   // The return address is in lr. | 337   // The return address is in lr. | 
| 449   Isolate* isolate = masm->isolate(); | 338   Isolate* isolate = masm->isolate(); | 
| 450 | 339 | 
| 451   DCHECK(!AreAliased(r7, r8, LoadWithVectorDescriptor::SlotRegister(), | 340   DCHECK(!AreAliased(r7, r8, LoadWithVectorDescriptor::SlotRegister(), | 
| 452                      LoadWithVectorDescriptor::VectorRegister())); | 341                      LoadWithVectorDescriptor::VectorRegister())); | 
| 453   __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, r7, r8); | 342   __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, r7, r8); | 
| 454 | 343 | 
| 455   LoadIC_PushArgs(masm); | 344   LoadIC_PushArgs(masm); | 
| 456 | 345 | 
| (...skipping 542 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 999     patcher.EmitCondition(ne); | 888     patcher.EmitCondition(ne); | 
| 1000   } else { | 889   } else { | 
| 1001     DCHECK(Assembler::GetCondition(branch_instr) == ne); | 890     DCHECK(Assembler::GetCondition(branch_instr) == ne); | 
| 1002     patcher.EmitCondition(eq); | 891     patcher.EmitCondition(eq); | 
| 1003   } | 892   } | 
| 1004 } | 893 } | 
| 1005 } | 894 } | 
| 1006 }  // namespace v8::internal | 895 }  // namespace v8::internal | 
| 1007 | 896 | 
| 1008 #endif  // V8_TARGET_ARCH_PPC | 897 #endif  // V8_TARGET_ARCH_PPC | 
| OLD | NEW | 
|---|