OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_ARM | 7 #if V8_TARGET_ARCH_ARM |
8 | 8 |
9 #include "src/ic-inl.h" | 9 #include "src/ic-inl.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 555 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
566 Register scratch1, | 566 Register scratch1, |
567 Register scratch2, | 567 Register scratch2, |
568 Label* miss_label) { | 568 Label* miss_label) { |
569 // r0 : value | 569 // r0 : value |
570 Label exit; | 570 Label exit; |
571 | 571 |
572 // Stub never generated for non-global objects that require access | 572 // Stub never generated for non-global objects that require access |
573 // checks. | 573 // checks. |
574 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); | 574 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); |
575 | 575 |
576 FieldIndex index = lookup->GetFieldIndex(); | 576 int index = lookup->GetFieldIndex().field_index(); |
| 577 |
| 578 // Adjust for the number of properties stored in the object. Even in the |
| 579 // face of a transition we can use the old map here because the size of the |
| 580 // object and the number of in-object properties is not going to change. |
| 581 index -= object->map()->inobject_properties(); |
577 | 582 |
578 Representation representation = lookup->representation(); | 583 Representation representation = lookup->representation(); |
579 ASSERT(!representation.IsNone()); | 584 ASSERT(!representation.IsNone()); |
580 if (representation.IsSmi()) { | 585 if (representation.IsSmi()) { |
581 __ JumpIfNotSmi(value_reg, miss_label); | 586 __ JumpIfNotSmi(value_reg, miss_label); |
582 } else if (representation.IsHeapObject()) { | 587 } else if (representation.IsHeapObject()) { |
583 __ JumpIfSmi(value_reg, miss_label); | 588 __ JumpIfSmi(value_reg, miss_label); |
584 HeapType* field_type = lookup->GetFieldType(); | 589 HeapType* field_type = lookup->GetFieldType(); |
585 HeapType::Iterator<Map> it = field_type->Classes(); | 590 HeapType::Iterator<Map> it = field_type->Classes(); |
586 if (!it.Done()) { | 591 if (!it.Done()) { |
587 __ ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); | 592 __ ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); |
588 Label do_store; | 593 Label do_store; |
589 while (true) { | 594 while (true) { |
590 __ CompareMap(scratch1, it.Current(), &do_store); | 595 __ CompareMap(scratch1, it.Current(), &do_store); |
591 it.Advance(); | 596 it.Advance(); |
592 if (it.Done()) { | 597 if (it.Done()) { |
593 __ b(ne, miss_label); | 598 __ b(ne, miss_label); |
594 break; | 599 break; |
595 } | 600 } |
596 __ b(eq, &do_store); | 601 __ b(eq, &do_store); |
597 } | 602 } |
598 __ bind(&do_store); | 603 __ bind(&do_store); |
599 } | 604 } |
600 } else if (representation.IsDouble()) { | 605 } else if (representation.IsDouble()) { |
601 // Load the double storage. | 606 // Load the double storage. |
602 if (index.is_inobject()) { | 607 if (index < 0) { |
603 __ ldr(scratch1, FieldMemOperand(receiver_reg, index.offset())); | 608 int offset = object->map()->instance_size() + (index * kPointerSize); |
| 609 __ ldr(scratch1, FieldMemOperand(receiver_reg, offset)); |
604 } else { | 610 } else { |
605 __ ldr(scratch1, | 611 __ ldr(scratch1, |
606 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); | 612 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); |
607 __ ldr(scratch1, FieldMemOperand(scratch1, index.offset())); | 613 int offset = index * kPointerSize + FixedArray::kHeaderSize; |
| 614 __ ldr(scratch1, FieldMemOperand(scratch1, offset)); |
608 } | 615 } |
609 | 616 |
610 // Store the value into the storage. | 617 // Store the value into the storage. |
611 Label do_store, heap_number; | 618 Label do_store, heap_number; |
612 __ JumpIfNotSmi(value_reg, &heap_number); | 619 __ JumpIfNotSmi(value_reg, &heap_number); |
613 __ SmiUntag(scratch2, value_reg); | 620 __ SmiUntag(scratch2, value_reg); |
614 __ vmov(s0, scratch2); | 621 __ vmov(s0, scratch2); |
615 __ vcvt_f64_s32(d0, s0); | 622 __ vcvt_f64_s32(d0, s0); |
616 __ jmp(&do_store); | 623 __ jmp(&do_store); |
617 | 624 |
618 __ bind(&heap_number); | 625 __ bind(&heap_number); |
619 __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex, | 626 __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex, |
620 miss_label, DONT_DO_SMI_CHECK); | 627 miss_label, DONT_DO_SMI_CHECK); |
621 __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); | 628 __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); |
622 | 629 |
623 __ bind(&do_store); | 630 __ bind(&do_store); |
624 __ vstr(d0, FieldMemOperand(scratch1, HeapNumber::kValueOffset)); | 631 __ vstr(d0, FieldMemOperand(scratch1, HeapNumber::kValueOffset)); |
625 // Return the value (register r0). | 632 // Return the value (register r0). |
626 ASSERT(value_reg.is(r0)); | 633 ASSERT(value_reg.is(r0)); |
627 __ Ret(); | 634 __ Ret(); |
628 return; | 635 return; |
629 } | 636 } |
630 | 637 |
631 // TODO(verwaest): Share this code as a code stub. | 638 // TODO(verwaest): Share this code as a code stub. |
632 SmiCheck smi_check = representation.IsTagged() | 639 SmiCheck smi_check = representation.IsTagged() |
633 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; | 640 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; |
634 if (index.is_inobject()) { | 641 if (index < 0) { |
635 // Set the property straight into the object. | 642 // Set the property straight into the object. |
636 __ str(value_reg, FieldMemOperand(receiver_reg, index.offset())); | 643 int offset = object->map()->instance_size() + (index * kPointerSize); |
| 644 __ str(value_reg, FieldMemOperand(receiver_reg, offset)); |
637 | 645 |
638 if (!representation.IsSmi()) { | 646 if (!representation.IsSmi()) { |
639 // Skip updating write barrier if storing a smi. | 647 // Skip updating write barrier if storing a smi. |
640 __ JumpIfSmi(value_reg, &exit); | 648 __ JumpIfSmi(value_reg, &exit); |
641 | 649 |
642 // Update the write barrier for the array address. | 650 // Update the write barrier for the array address. |
643 // Pass the now unused name_reg as a scratch register. | 651 // Pass the now unused name_reg as a scratch register. |
644 __ mov(name_reg, value_reg); | 652 __ mov(name_reg, value_reg); |
645 __ RecordWriteField(receiver_reg, | 653 __ RecordWriteField(receiver_reg, |
646 index.offset(), | 654 offset, |
647 name_reg, | 655 name_reg, |
648 scratch1, | 656 scratch1, |
649 kLRHasNotBeenSaved, | 657 kLRHasNotBeenSaved, |
650 kDontSaveFPRegs, | 658 kDontSaveFPRegs, |
651 EMIT_REMEMBERED_SET, | 659 EMIT_REMEMBERED_SET, |
652 smi_check); | 660 smi_check); |
653 } | 661 } |
654 } else { | 662 } else { |
655 // Write to the properties array. | 663 // Write to the properties array. |
| 664 int offset = index * kPointerSize + FixedArray::kHeaderSize; |
656 // Get the properties array | 665 // Get the properties array |
657 __ ldr(scratch1, | 666 __ ldr(scratch1, |
658 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); | 667 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); |
659 __ str(value_reg, FieldMemOperand(scratch1, index.offset())); | 668 __ str(value_reg, FieldMemOperand(scratch1, offset)); |
660 | 669 |
661 if (!representation.IsSmi()) { | 670 if (!representation.IsSmi()) { |
662 // Skip updating write barrier if storing a smi. | 671 // Skip updating write barrier if storing a smi. |
663 __ JumpIfSmi(value_reg, &exit); | 672 __ JumpIfSmi(value_reg, &exit); |
664 | 673 |
665 // Update the write barrier for the array address. | 674 // Update the write barrier for the array address. |
666 // Ok to clobber receiver_reg and name_reg, since we return. | 675 // Ok to clobber receiver_reg and name_reg, since we return. |
667 __ mov(name_reg, value_reg); | 676 __ mov(name_reg, value_reg); |
668 __ RecordWriteField(scratch1, | 677 __ RecordWriteField(scratch1, |
669 index.offset(), | 678 offset, |
670 name_reg, | 679 name_reg, |
671 receiver_reg, | 680 receiver_reg, |
672 kLRHasNotBeenSaved, | 681 kLRHasNotBeenSaved, |
673 kDontSaveFPRegs, | 682 kDontSaveFPRegs, |
674 EMIT_REMEMBERED_SET, | 683 EMIT_REMEMBERED_SET, |
675 smi_check); | 684 smi_check); |
676 } | 685 } |
677 } | 686 } |
678 | 687 |
679 // Return the value (register r0). | 688 // Return the value (register r0). |
(...skipping 310 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
990 __ b(ne, &miss); | 999 __ b(ne, &miss); |
991 } | 1000 } |
992 | 1001 |
993 HandlerFrontendFooter(name, &miss); | 1002 HandlerFrontendFooter(name, &miss); |
994 return reg; | 1003 return reg; |
995 } | 1004 } |
996 | 1005 |
997 | 1006 |
998 void LoadStubCompiler::GenerateLoadField(Register reg, | 1007 void LoadStubCompiler::GenerateLoadField(Register reg, |
999 Handle<JSObject> holder, | 1008 Handle<JSObject> holder, |
1000 FieldIndex field, | 1009 PropertyIndex field, |
1001 Representation representation) { | 1010 Representation representation) { |
1002 if (!reg.is(receiver())) __ mov(receiver(), reg); | 1011 if (!reg.is(receiver())) __ mov(receiver(), reg); |
1003 if (kind() == Code::LOAD_IC) { | 1012 if (kind() == Code::LOAD_IC) { |
1004 LoadFieldStub stub(isolate(), field); | 1013 LoadFieldStub stub(isolate(), |
| 1014 field.is_inobject(holder), |
| 1015 field.translate(holder), |
| 1016 representation); |
1005 GenerateTailCall(masm(), stub.GetCode()); | 1017 GenerateTailCall(masm(), stub.GetCode()); |
1006 } else { | 1018 } else { |
1007 KeyedLoadFieldStub stub(isolate(), field); | 1019 KeyedLoadFieldStub stub(isolate(), |
| 1020 field.is_inobject(holder), |
| 1021 field.translate(holder), |
| 1022 representation); |
1008 GenerateTailCall(masm(), stub.GetCode()); | 1023 GenerateTailCall(masm(), stub.GetCode()); |
1009 } | 1024 } |
1010 } | 1025 } |
1011 | 1026 |
1012 | 1027 |
1013 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) { | 1028 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) { |
1014 // Return the constant value. | 1029 // Return the constant value. |
1015 __ Move(r0, value); | 1030 __ Move(r0, value); |
1016 __ Ret(); | 1031 __ Ret(); |
1017 } | 1032 } |
(...skipping 489 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1507 // ----------------------------------- | 1522 // ----------------------------------- |
1508 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); | 1523 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); |
1509 } | 1524 } |
1510 | 1525 |
1511 | 1526 |
1512 #undef __ | 1527 #undef __ |
1513 | 1528 |
1514 } } // namespace v8::internal | 1529 } } // namespace v8::internal |
1515 | 1530 |
1516 #endif // V8_TARGET_ARCH_ARM | 1531 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |