| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 22 matching lines...) Expand all Loading... |
| 33 // | 33 // |
| 34 | 34 |
| 35 #ifndef V8_OBJECTS_INL_H_ | 35 #ifndef V8_OBJECTS_INL_H_ |
| 36 #define V8_OBJECTS_INL_H_ | 36 #define V8_OBJECTS_INL_H_ |
| 37 | 37 |
| 38 #include "objects.h" | 38 #include "objects.h" |
| 39 #include "contexts.h" | 39 #include "contexts.h" |
| 40 #include "conversions-inl.h" | 40 #include "conversions-inl.h" |
| 41 #include "heap.h" | 41 #include "heap.h" |
| 42 #include "memory.h" | 42 #include "memory.h" |
| 43 #include "isolate.h" |
| 43 #include "property.h" | 44 #include "property.h" |
| 44 #include "spaces.h" | 45 #include "spaces.h" |
| 45 #include "store-buffer.h" | 46 #include "store-buffer.h" |
| 46 | 47 |
| 47 #include "incremental-marking.h" | 48 #include "incremental-marking.h" |
| 48 | 49 |
| 49 namespace v8 { | 50 namespace v8 { |
| 50 namespace internal { | 51 namespace internal { |
| 51 | 52 |
| 52 PropertyDetails::PropertyDetails(Smi* smi) { | 53 PropertyDetails::PropertyDetails(Smi* smi) { |
| (...skipping 21 matching lines...) Expand all Loading... |
| 74 | 75 |
| 75 #define INT_ACCESSORS(holder, name, offset) \ | 76 #define INT_ACCESSORS(holder, name, offset) \ |
| 76 int holder::name() { return READ_INT_FIELD(this, offset); } \ | 77 int holder::name() { return READ_INT_FIELD(this, offset); } \ |
| 77 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); } | 78 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); } |
| 78 | 79 |
| 79 | 80 |
| 80 #define ACCESSORS(holder, name, type, offset) \ | 81 #define ACCESSORS(holder, name, type, offset) \ |
| 81 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \ | 82 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \ |
| 82 void holder::set_##name(type* value, WriteBarrierMode mode) { \ | 83 void holder::set_##name(type* value, WriteBarrierMode mode) { \ |
| 83 WRITE_FIELD(this, offset, value); \ | 84 WRITE_FIELD(this, offset, value); \ |
| 84 WRITE_BARRIER(this, offset, value); \ | 85 WRITE_BARRIER(GetHeap(), this, offset, value); \ |
| 86 } |
| 87 |
| 88 |
| 89 // GC-safe accessors do not use HeapObject::GetHeap(), but access TLS instead. |
| 90 #define ACCESSORS_GCSAFE(holder, name, type, offset) \ |
| 91 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \ |
| 92 void holder::set_##name(type* value, WriteBarrierMode mode) { \ |
| 93 WRITE_FIELD(this, offset, value); \ |
| 94 WRITE_BARRIER(HEAP, this, offset, value); \ |
| 85 } | 95 } |
| 86 | 96 |
| 87 | 97 |
| 88 #define SMI_ACCESSORS(holder, name, offset) \ | 98 #define SMI_ACCESSORS(holder, name, offset) \ |
| 89 int holder::name() { \ | 99 int holder::name() { \ |
| 90 Object* value = READ_FIELD(this, offset); \ | 100 Object* value = READ_FIELD(this, offset); \ |
| 91 return Smi::cast(value)->value(); \ | 101 return Smi::cast(value)->value(); \ |
| 92 } \ | 102 } \ |
| 93 void holder::set_##name(int value) { \ | 103 void holder::set_##name(int value) { \ |
| 94 WRITE_FIELD(this, offset, Smi::FromInt(value)); \ | 104 WRITE_FIELD(this, offset, Smi::FromInt(value)); \ |
| (...skipping 339 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 434 && Failure::cast(this)->IsOutOfMemoryException(); | 444 && Failure::cast(this)->IsOutOfMemoryException(); |
| 435 } | 445 } |
| 436 | 446 |
| 437 | 447 |
| 438 bool MaybeObject::IsException() { | 448 bool MaybeObject::IsException() { |
| 439 return this == Failure::Exception(); | 449 return this == Failure::Exception(); |
| 440 } | 450 } |
| 441 | 451 |
| 442 | 452 |
| 443 bool MaybeObject::IsTheHole() { | 453 bool MaybeObject::IsTheHole() { |
| 444 return this == Heap::the_hole_value(); | 454 return !IsFailure() && ToObjectUnchecked()->IsTheHole(); |
| 445 } | 455 } |
| 446 | 456 |
| 447 | 457 |
| 448 Failure* Failure::cast(MaybeObject* obj) { | 458 Failure* Failure::cast(MaybeObject* obj) { |
| 449 ASSERT(HAS_FAILURE_TAG(obj)); | 459 ASSERT(HAS_FAILURE_TAG(obj)); |
| 450 return reinterpret_cast<Failure*>(obj); | 460 return reinterpret_cast<Failure*>(obj); |
| 451 } | 461 } |
| 452 | 462 |
| 453 | 463 |
| 454 bool Object::IsJSObject() { | 464 bool Object::IsJSObject() { |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 502 if (!IsFixedArray()) return false; | 512 if (!IsFixedArray()) return false; |
| 503 // There's actually no way to see the difference between a fixed array and | 513 // There's actually no way to see the difference between a fixed array and |
| 504 // a deoptimization data array. Since this is used for asserts we can check | 514 // a deoptimization data array. Since this is used for asserts we can check |
| 505 // that the length is plausible though. | 515 // that the length is plausible though. |
| 506 if (FixedArray::cast(this)->length() % 2 != 0) return false; | 516 if (FixedArray::cast(this)->length() % 2 != 0) return false; |
| 507 return true; | 517 return true; |
| 508 } | 518 } |
| 509 | 519 |
| 510 | 520 |
| 511 bool Object::IsContext() { | 521 bool Object::IsContext() { |
| 512 return Object::IsHeapObject() | 522 if (Object::IsHeapObject()) { |
| 513 && (HeapObject::cast(this)->map() == Heap::context_map() || | 523 Heap* heap = HeapObject::cast(this)->GetHeap(); |
| 514 HeapObject::cast(this)->map() == Heap::catch_context_map() || | 524 return (HeapObject::cast(this)->map() == heap->context_map() || |
| 515 HeapObject::cast(this)->map() == Heap::global_context_map()); | 525 HeapObject::cast(this)->map() == heap->catch_context_map() || |
| 526 HeapObject::cast(this)->map() == heap->global_context_map()); |
| 527 } |
| 528 return false; |
| 516 } | 529 } |
| 517 | 530 |
| 518 | 531 |
| 519 bool Object::IsCatchContext() { | 532 bool Object::IsCatchContext() { |
| 520 return Object::IsHeapObject() | 533 return Object::IsHeapObject() && |
| 521 && HeapObject::cast(this)->map() == Heap::catch_context_map(); | 534 HeapObject::cast(this)->map() == |
| 535 HeapObject::cast(this)->GetHeap()->catch_context_map(); |
| 522 } | 536 } |
| 523 | 537 |
| 524 | 538 |
| 525 bool Object::IsGlobalContext() { | 539 bool Object::IsGlobalContext() { |
| 526 return Object::IsHeapObject() | 540 return Object::IsHeapObject() && |
| 527 && HeapObject::cast(this)->map() == Heap::global_context_map(); | 541 HeapObject::cast(this)->map() == |
| 542 HeapObject::cast(this)->GetHeap()->global_context_map(); |
| 528 } | 543 } |
| 529 | 544 |
| 530 | 545 |
| 531 bool Object::IsJSFunction() { | 546 bool Object::IsJSFunction() { |
| 532 return Object::IsHeapObject() | 547 return Object::IsHeapObject() |
| 533 && HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_TYPE; | 548 && HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_TYPE; |
| 534 } | 549 } |
| 535 | 550 |
| 536 | 551 |
| 537 template <> inline bool Is<JSFunction>(Object* obj) { | 552 template <> inline bool Is<JSFunction>(Object* obj) { |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 583 } | 598 } |
| 584 | 599 |
| 585 | 600 |
| 586 bool Object::IsProxy() { | 601 bool Object::IsProxy() { |
| 587 return Object::IsHeapObject() | 602 return Object::IsHeapObject() |
| 588 && HeapObject::cast(this)->map()->instance_type() == PROXY_TYPE; | 603 && HeapObject::cast(this)->map()->instance_type() == PROXY_TYPE; |
| 589 } | 604 } |
| 590 | 605 |
| 591 | 606 |
| 592 bool Object::IsBoolean() { | 607 bool Object::IsBoolean() { |
| 593 return IsTrue() || IsFalse(); | 608 return IsOddball() && |
| 609 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0); |
| 594 } | 610 } |
| 595 | 611 |
| 596 | 612 |
| 597 bool Object::IsJSArray() { | 613 bool Object::IsJSArray() { |
| 598 return Object::IsHeapObject() | 614 return Object::IsHeapObject() |
| 599 && HeapObject::cast(this)->map()->instance_type() == JS_ARRAY_TYPE; | 615 && HeapObject::cast(this)->map()->instance_type() == JS_ARRAY_TYPE; |
| 600 } | 616 } |
| 601 | 617 |
| 602 | 618 |
| 603 bool Object::IsJSRegExp() { | 619 bool Object::IsJSRegExp() { |
| 604 return Object::IsHeapObject() | 620 return Object::IsHeapObject() |
| 605 && HeapObject::cast(this)->map()->instance_type() == JS_REGEXP_TYPE; | 621 && HeapObject::cast(this)->map()->instance_type() == JS_REGEXP_TYPE; |
| 606 } | 622 } |
| 607 | 623 |
| 608 | 624 |
| 609 template <> inline bool Is<JSArray>(Object* obj) { | 625 template <> inline bool Is<JSArray>(Object* obj) { |
| 610 return obj->IsJSArray(); | 626 return obj->IsJSArray(); |
| 611 } | 627 } |
| 612 | 628 |
| 613 | 629 |
| 614 bool Object::IsHashTable() { | 630 bool Object::IsHashTable() { |
| 615 return Object::IsHeapObject() | 631 return Object::IsHeapObject() && |
| 616 && HeapObject::cast(this)->map() == Heap::hash_table_map(); | 632 HeapObject::cast(this)->map() == |
| 633 HeapObject::cast(this)->GetHeap()->hash_table_map(); |
| 617 } | 634 } |
| 618 | 635 |
| 619 | 636 |
| 620 bool Object::IsDictionary() { | 637 bool Object::IsDictionary() { |
| 621 return IsHashTable() && this != Heap::symbol_table(); | 638 return IsHashTable() && this != |
| 639 HeapObject::cast(this)->GetHeap()->symbol_table(); |
| 622 } | 640 } |
| 623 | 641 |
| 624 | 642 |
| 625 bool Object::IsSymbolTable() { | 643 bool Object::IsSymbolTable() { |
| 626 return IsHashTable() && this == Heap::raw_unchecked_symbol_table(); | 644 return IsHashTable() && this == |
| 645 HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table(); |
| 627 } | 646 } |
| 628 | 647 |
| 629 | 648 |
| 630 bool Object::IsJSFunctionResultCache() { | 649 bool Object::IsJSFunctionResultCache() { |
| 631 if (!IsFixedArray()) return false; | 650 if (!IsFixedArray()) return false; |
| 632 FixedArray* self = FixedArray::cast(this); | 651 FixedArray* self = FixedArray::cast(this); |
| 633 int length = self->length(); | 652 int length = self->length(); |
| 634 if (length < JSFunctionResultCache::kEntriesIndex) return false; | 653 if (length < JSFunctionResultCache::kEntriesIndex) return false; |
| 635 if ((length - JSFunctionResultCache::kEntriesIndex) | 654 if ((length - JSFunctionResultCache::kEntriesIndex) |
| 636 % JSFunctionResultCache::kEntrySize != 0) { | 655 % JSFunctionResultCache::kEntrySize != 0) { |
| (...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 733 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \ | 752 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \ |
| 734 bool Object::Is##Name() { \ | 753 bool Object::Is##Name() { \ |
| 735 return Object::IsHeapObject() \ | 754 return Object::IsHeapObject() \ |
| 736 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \ | 755 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \ |
| 737 } | 756 } |
| 738 STRUCT_LIST(MAKE_STRUCT_PREDICATE) | 757 STRUCT_LIST(MAKE_STRUCT_PREDICATE) |
| 739 #undef MAKE_STRUCT_PREDICATE | 758 #undef MAKE_STRUCT_PREDICATE |
| 740 | 759 |
| 741 | 760 |
| 742 bool Object::IsUndefined() { | 761 bool Object::IsUndefined() { |
| 743 return this == Heap::undefined_value(); | 762 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined; |
| 744 } | 763 } |
| 745 | 764 |
| 746 | 765 |
| 747 bool Object::IsNull() { | 766 bool Object::IsNull() { |
| 748 return this == Heap::null_value(); | 767 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull; |
| 768 } |
| 769 |
| 770 |
| 771 bool Object::IsTheHole() { |
| 772 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole; |
| 749 } | 773 } |
| 750 | 774 |
| 751 | 775 |
| 752 bool Object::IsTrue() { | 776 bool Object::IsTrue() { |
| 753 return this == Heap::true_value(); | 777 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue; |
| 754 } | 778 } |
| 755 | 779 |
| 756 | 780 |
| 757 bool Object::IsFalse() { | 781 bool Object::IsFalse() { |
| 758 return this == Heap::false_value(); | 782 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse; |
| 759 } | 783 } |
| 760 | 784 |
| 761 | 785 |
| 762 bool Object::IsArgumentsMarker() { | 786 bool Object::IsArgumentsMarker() { |
| 763 return this == Heap::arguments_marker(); | 787 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker; |
| 764 } | 788 } |
| 765 | 789 |
| 766 | 790 |
| 767 double Object::Number() { | 791 double Object::Number() { |
| 768 ASSERT(IsNumber()); | 792 ASSERT(IsNumber()); |
| 769 return IsSmi() | 793 return IsSmi() |
| 770 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value()) | 794 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value()) |
| 771 : reinterpret_cast<HeapNumber*>(this)->value(); | 795 : reinterpret_cast<HeapNumber*>(this)->value(); |
| 772 } | 796 } |
| 773 | 797 |
| 774 | 798 |
| 775 | |
| 776 MaybeObject* Object::ToSmi() { | 799 MaybeObject* Object::ToSmi() { |
| 777 if (IsSmi()) return this; | 800 if (IsSmi()) return this; |
| 778 if (IsHeapNumber()) { | 801 if (IsHeapNumber()) { |
| 779 double value = HeapNumber::cast(this)->value(); | 802 double value = HeapNumber::cast(this)->value(); |
| 780 int int_value = FastD2I(value); | 803 int int_value = FastD2I(value); |
| 781 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) { | 804 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) { |
| 782 return Smi::FromInt(int_value); | 805 return Smi::FromInt(int_value); |
| 783 } | 806 } |
| 784 } | 807 } |
| 785 return Failure::Exception(); | 808 return Failure::Exception(); |
| 786 } | 809 } |
| 787 | 810 |
| 788 | 811 |
| 789 bool Object::HasSpecificClassOf(String* name) { | 812 bool Object::HasSpecificClassOf(String* name) { |
| 790 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name); | 813 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name); |
| 791 } | 814 } |
| 792 | 815 |
| 793 | 816 |
| 794 MaybeObject* Object::GetElement(uint32_t index) { | 817 MaybeObject* Object::GetElement(uint32_t index) { |
| 795 // GetElement can trigger a getter which can cause allocation. | 818 // GetElement can trigger a getter which can cause allocation. |
| 796 // This was not always the case. This ASSERT is here to catch | 819 // This was not always the case. This ASSERT is here to catch |
| 797 // leftover incorrect uses. | 820 // leftover incorrect uses. |
| 798 ASSERT(Heap::IsAllocationAllowed()); | 821 ASSERT(HEAP->IsAllocationAllowed()); |
| 799 return GetElementWithReceiver(this, index); | 822 return GetElementWithReceiver(this, index); |
| 800 } | 823 } |
| 801 | 824 |
| 802 | 825 |
| 803 Object* Object::GetElementNoExceptionThrown(uint32_t index) { | 826 Object* Object::GetElementNoExceptionThrown(uint32_t index) { |
| 804 MaybeObject* maybe = GetElementWithReceiver(this, index); | 827 MaybeObject* maybe = GetElementWithReceiver(this, index); |
| 805 ASSERT(!maybe->IsFailure()); | 828 ASSERT(!maybe->IsFailure()); |
| 806 Object* result = NULL; // Initialization to please compiler. | 829 Object* result = NULL; // Initialization to please compiler. |
| 807 maybe->ToObject(&result); | 830 maybe->ToObject(&result); |
| 808 return result; | 831 return result; |
| (...skipping 13 matching lines...) Expand all Loading... |
| 822 | 845 |
| 823 #define FIELD_ADDR(p, offset) \ | 846 #define FIELD_ADDR(p, offset) \ |
| 824 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag) | 847 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag) |
| 825 | 848 |
| 826 #define READ_FIELD(p, offset) \ | 849 #define READ_FIELD(p, offset) \ |
| 827 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset))) | 850 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset))) |
| 828 | 851 |
| 829 #define WRITE_FIELD(p, offset, value) \ | 852 #define WRITE_FIELD(p, offset, value) \ |
| 830 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value) | 853 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value) |
| 831 | 854 |
| 832 #define WRITE_BARRIER(object, offset, value) \ | 855 #define WRITE_BARRIER(heap, object, offset, value) \ |
| 833 IncrementalMarking::RecordWrite(object, value); \ | 856 heap->incremental_marking()->RecordWrite(object, value); \ |
| 834 if (Heap::InNewSpace(value)) { \ | 857 if (HEAP->InNewSpace(value)) { \ |
| 835 Heap::RecordWrite(object->address(), offset); \ | 858 heap->RecordWrite(object->address(), offset); \ |
| 836 } | 859 } |
| 837 | 860 |
| 861 // TODO(gc) !!! |
| 862 |
| 838 #define READ_DOUBLE_FIELD(p, offset) \ | 863 #define READ_DOUBLE_FIELD(p, offset) \ |
| 839 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset))) | 864 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset))) |
| 840 | 865 |
| 841 #define WRITE_DOUBLE_FIELD(p, offset, value) \ | 866 #define WRITE_DOUBLE_FIELD(p, offset, value) \ |
| 842 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value) | 867 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value) |
| 843 | 868 |
| 844 #define READ_INT_FIELD(p, offset) \ | 869 #define READ_INT_FIELD(p, offset) \ |
| 845 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset))) | 870 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset))) |
| 846 | 871 |
| 847 #define WRITE_INT_FIELD(p, offset, value) \ | 872 #define WRITE_INT_FIELD(p, offset, value) \ |
| (...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1018 void HeapObject::VerifyObjectField(int offset) { | 1043 void HeapObject::VerifyObjectField(int offset) { |
| 1019 VerifyPointer(READ_FIELD(this, offset)); | 1044 VerifyPointer(READ_FIELD(this, offset)); |
| 1020 } | 1045 } |
| 1021 | 1046 |
| 1022 void HeapObject::VerifySmiField(int offset) { | 1047 void HeapObject::VerifySmiField(int offset) { |
| 1023 ASSERT(READ_FIELD(this, offset)->IsSmi()); | 1048 ASSERT(READ_FIELD(this, offset)->IsSmi()); |
| 1024 } | 1049 } |
| 1025 #endif | 1050 #endif |
| 1026 | 1051 |
| 1027 | 1052 |
| 1053 Heap* HeapObject::GetHeap() { |
| 1054 // During GC, the map pointer in HeapObject is used in various ways that |
| 1055 // prevent us from retrieving Heap from the map. |
| 1056 // Assert that we are not in GC, implement GC code in a way that it doesn't |
| 1057 // pull heap from the map. |
| 1058 return map()->heap(); |
| 1059 } |
| 1060 |
| 1061 |
| 1062 Isolate* HeapObject::GetIsolate() { |
| 1063 Isolate* i = GetHeap()->isolate(); |
| 1064 ASSERT(i == Isolate::Current()); |
| 1065 return i; |
| 1066 } |
| 1067 |
| 1068 |
| 1028 Map* HeapObject::map() { | 1069 Map* HeapObject::map() { |
| 1029 return map_word().ToMap(); | 1070 return map_word().ToMap(); |
| 1030 } | 1071 } |
| 1031 | 1072 |
| 1032 | 1073 |
| 1033 void HeapObject::set_map(Map* value) { | 1074 void HeapObject::set_map(Map* value) { |
| 1034 set_map_word(MapWord::FromMap(value)); | 1075 set_map_word(MapWord::FromMap(value)); |
| 1035 IncrementalMarking::RecordWrite(this, value); | 1076 if (value != NULL) { |
| 1077 value->heap()->incremental_marking()->RecordWrite(this, value); |
| 1078 } |
| 1036 } | 1079 } |
| 1037 | 1080 |
| 1038 | 1081 |
| 1039 MapWord HeapObject::map_word() { | 1082 MapWord HeapObject::map_word() { |
| 1040 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset))); | 1083 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset))); |
| 1041 } | 1084 } |
| 1042 | 1085 |
| 1043 | 1086 |
| 1044 void HeapObject::set_map_word(MapWord map_word) { | 1087 void HeapObject::set_map_word(MapWord map_word) { |
| 1045 // WRITE_FIELD does not invoke write barrier, but there is no need | 1088 // WRITE_FIELD does not invoke write barrier, but there is no need |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1102 HeapObject* JSObject::elements() { | 1145 HeapObject* JSObject::elements() { |
| 1103 Object* array = READ_FIELD(this, kElementsOffset); | 1146 Object* array = READ_FIELD(this, kElementsOffset); |
| 1104 // In the assert below Dictionary is covered under FixedArray. | 1147 // In the assert below Dictionary is covered under FixedArray. |
| 1105 ASSERT(array->IsFixedArray() || array->IsExternalArray()); | 1148 ASSERT(array->IsFixedArray() || array->IsExternalArray()); |
| 1106 return reinterpret_cast<HeapObject*>(array); | 1149 return reinterpret_cast<HeapObject*>(array); |
| 1107 } | 1150 } |
| 1108 | 1151 |
| 1109 | 1152 |
| 1110 void JSObject::set_elements(HeapObject* value, WriteBarrierMode mode) { | 1153 void JSObject::set_elements(HeapObject* value, WriteBarrierMode mode) { |
| 1111 ASSERT(map()->has_fast_elements() == | 1154 ASSERT(map()->has_fast_elements() == |
| 1112 (value->map() == Heap::fixed_array_map() || | 1155 (value->map() == GetHeap()->fixed_array_map() || |
| 1113 value->map() == Heap::fixed_cow_array_map())); | 1156 value->map() == GetHeap()->fixed_cow_array_map())); |
| 1114 // In the assert below Dictionary is covered under FixedArray. | 1157 // In the assert below Dictionary is covered under FixedArray. |
| 1115 ASSERT(value->IsFixedArray() || value->IsExternalArray()); | 1158 ASSERT(value->IsFixedArray() || value->IsExternalArray()); |
| 1116 WRITE_FIELD(this, kElementsOffset, value); | 1159 WRITE_FIELD(this, kElementsOffset, value); |
| 1117 WRITE_BARRIER(this, kElementsOffset, value); | 1160 WRITE_BARRIER(GetHeap(), this, kElementsOffset, value); |
| 1118 } | 1161 } |
| 1119 | 1162 |
| 1120 | 1163 |
| 1121 void JSObject::initialize_properties() { | 1164 void JSObject::initialize_properties() { |
| 1122 ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array())); | 1165 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array())); |
| 1123 WRITE_FIELD(this, kPropertiesOffset, Heap::empty_fixed_array()); | 1166 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array()); |
| 1124 } | 1167 } |
| 1125 | 1168 |
| 1126 | 1169 |
| 1127 void JSObject::initialize_elements() { | 1170 void JSObject::initialize_elements() { |
| 1128 ASSERT(map()->has_fast_elements()); | 1171 ASSERT(map()->has_fast_elements()); |
| 1129 ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array())); | 1172 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array())); |
| 1130 WRITE_FIELD(this, kElementsOffset, Heap::empty_fixed_array()); | 1173 WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array()); |
| 1131 } | 1174 } |
| 1132 | 1175 |
| 1133 | 1176 |
| 1134 MaybeObject* JSObject::ResetElements() { | 1177 MaybeObject* JSObject::ResetElements() { |
| 1135 Object* obj; | 1178 Object* obj; |
| 1136 { MaybeObject* maybe_obj = map()->GetFastElementsMap(); | 1179 { MaybeObject* maybe_obj = map()->GetFastElementsMap(); |
| 1137 if (!maybe_obj->ToObject(&obj)) return maybe_obj; | 1180 if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
| 1138 } | 1181 } |
| 1139 set_map(Map::cast(obj)); | 1182 set_map(Map::cast(obj)); |
| 1140 initialize_elements(); | 1183 initialize_elements(); |
| 1141 return this; | 1184 return this; |
| 1142 } | 1185 } |
| 1143 | 1186 |
| 1144 | 1187 |
| 1145 ACCESSORS(Oddball, to_string, String, kToStringOffset) | 1188 ACCESSORS(Oddball, to_string, String, kToStringOffset) |
| 1146 ACCESSORS(Oddball, to_number, Object, kToNumberOffset) | 1189 ACCESSORS(Oddball, to_number, Object, kToNumberOffset) |
| 1147 | 1190 |
| 1148 | 1191 |
| 1192 byte Oddball::kind() { |
| 1193 return READ_BYTE_FIELD(this, kKindOffset); |
| 1194 } |
| 1195 |
| 1196 |
| 1197 void Oddball::set_kind(byte value) { |
| 1198 WRITE_BYTE_FIELD(this, kKindOffset, value); |
| 1199 } |
| 1200 |
| 1201 |
| 1149 Object* JSGlobalPropertyCell::value() { | 1202 Object* JSGlobalPropertyCell::value() { |
| 1150 return READ_FIELD(this, kValueOffset); | 1203 return READ_FIELD(this, kValueOffset); |
| 1151 } | 1204 } |
| 1152 | 1205 |
| 1153 | 1206 |
| 1154 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) { | 1207 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) { |
| 1155 // The write barrier is not used for global property cells. | 1208 // The write barrier is not used for global property cells. |
| 1156 ASSERT(!val->IsJSGlobalPropertyCell()); | 1209 ASSERT(!val->IsJSGlobalPropertyCell()); |
| 1157 WRITE_FIELD(this, kValueOffset, val); | 1210 WRITE_FIELD(this, kValueOffset, val); |
| 1158 IncrementalMarking::RecordWrite(this, val); | 1211 // TODO(gc) ISOLATES MERGE cell should heap accessor. |
| 1212 GetHeap()->incremental_marking()->RecordWrite(this, val); |
| 1159 } | 1213 } |
| 1160 | 1214 |
| 1161 | 1215 |
| 1162 int JSObject::GetHeaderSize() { | 1216 int JSObject::GetHeaderSize() { |
| 1163 InstanceType type = map()->instance_type(); | 1217 InstanceType type = map()->instance_type(); |
| 1164 // Check for the most common kind of JavaScript object before | 1218 // Check for the most common kind of JavaScript object before |
| 1165 // falling into the generic switch. This speeds up the internal | 1219 // falling into the generic switch. This speeds up the internal |
| 1166 // field operations considerably on average. | 1220 // field operations considerably on average. |
| 1167 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize; | 1221 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize; |
| 1168 switch (type) { | 1222 switch (type) { |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1215 } | 1269 } |
| 1216 | 1270 |
| 1217 | 1271 |
| 1218 void JSObject::SetInternalField(int index, Object* value) { | 1272 void JSObject::SetInternalField(int index, Object* value) { |
| 1219 ASSERT(index < GetInternalFieldCount() && index >= 0); | 1273 ASSERT(index < GetInternalFieldCount() && index >= 0); |
| 1220 // Internal objects do follow immediately after the header, whereas in-object | 1274 // Internal objects do follow immediately after the header, whereas in-object |
| 1221 // properties are at the end of the object. Therefore there is no need | 1275 // properties are at the end of the object. Therefore there is no need |
| 1222 // to adjust the index here. | 1276 // to adjust the index here. |
| 1223 int offset = GetHeaderSize() + (kPointerSize * index); | 1277 int offset = GetHeaderSize() + (kPointerSize * index); |
| 1224 WRITE_FIELD(this, offset, value); | 1278 WRITE_FIELD(this, offset, value); |
| 1225 WRITE_BARRIER(this, offset, value); | 1279 WRITE_BARRIER(GetHeap(), this, offset, value); |
| 1226 } | 1280 } |
| 1227 | 1281 |
| 1228 | 1282 |
| 1229 // Access fast-case object properties at index. The use of these routines | 1283 // Access fast-case object properties at index. The use of these routines |
| 1230 // is needed to correctly distinguish between properties stored in-object and | 1284 // is needed to correctly distinguish between properties stored in-object and |
| 1231 // properties stored in the properties array. | 1285 // properties stored in the properties array. |
| 1232 Object* JSObject::FastPropertyAt(int index) { | 1286 Object* JSObject::FastPropertyAt(int index) { |
| 1233 // Adjust for the number of properties stored in the object. | 1287 // Adjust for the number of properties stored in the object. |
| 1234 index -= map()->inobject_properties(); | 1288 index -= map()->inobject_properties(); |
| 1235 if (index < 0) { | 1289 if (index < 0) { |
| 1236 int offset = map()->instance_size() + (index * kPointerSize); | 1290 int offset = map()->instance_size() + (index * kPointerSize); |
| 1237 return READ_FIELD(this, offset); | 1291 return READ_FIELD(this, offset); |
| 1238 } else { | 1292 } else { |
| 1239 ASSERT(index < properties()->length()); | 1293 ASSERT(index < properties()->length()); |
| 1240 return properties()->get(index); | 1294 return properties()->get(index); |
| 1241 } | 1295 } |
| 1242 } | 1296 } |
| 1243 | 1297 |
| 1244 | 1298 |
| 1245 Object* JSObject::FastPropertyAtPut(int index, Object* value) { | 1299 Object* JSObject::FastPropertyAtPut(int index, Object* value) { |
| 1246 // Adjust for the number of properties stored in the object. | 1300 // Adjust for the number of properties stored in the object. |
| 1247 index -= map()->inobject_properties(); | 1301 index -= map()->inobject_properties(); |
| 1248 if (index < 0) { | 1302 if (index < 0) { |
| 1249 int offset = map()->instance_size() + (index * kPointerSize); | 1303 int offset = map()->instance_size() + (index * kPointerSize); |
| 1250 WRITE_FIELD(this, offset, value); | 1304 WRITE_FIELD(this, offset, value); |
| 1251 WRITE_BARRIER(this, offset, value); | 1305 WRITE_BARRIER(GetHeap(), this, offset, value); |
| 1252 } else { | 1306 } else { |
| 1253 ASSERT(index < properties()->length()); | 1307 ASSERT(index < properties()->length()); |
| 1254 properties()->set(index, value); | 1308 properties()->set(index, value); |
| 1255 } | 1309 } |
| 1256 return value; | 1310 return value; |
| 1257 } | 1311 } |
| 1258 | 1312 |
| 1259 | 1313 |
| 1260 int JSObject::GetInObjectPropertyOffset(int index) { | 1314 int JSObject::GetInObjectPropertyOffset(int index) { |
| 1261 // Adjust for the number of properties stored in the object. | 1315 // Adjust for the number of properties stored in the object. |
| (...skipping 13 matching lines...) Expand all Loading... |
| 1275 | 1329 |
| 1276 | 1330 |
| 1277 Object* JSObject::InObjectPropertyAtPut(int index, | 1331 Object* JSObject::InObjectPropertyAtPut(int index, |
| 1278 Object* value, | 1332 Object* value, |
| 1279 WriteBarrierMode mode) { | 1333 WriteBarrierMode mode) { |
| 1280 // Adjust for the number of properties stored in the object. | 1334 // Adjust for the number of properties stored in the object. |
| 1281 index -= map()->inobject_properties(); | 1335 index -= map()->inobject_properties(); |
| 1282 ASSERT(index < 0); | 1336 ASSERT(index < 0); |
| 1283 int offset = map()->instance_size() + (index * kPointerSize); | 1337 int offset = map()->instance_size() + (index * kPointerSize); |
| 1284 WRITE_FIELD(this, offset, value); | 1338 WRITE_FIELD(this, offset, value); |
| 1285 WRITE_BARRIER(this, offset, value); | 1339 WRITE_BARRIER(GetHeap(), this, offset, value); |
| 1286 return value; | 1340 return value; |
| 1287 } | 1341 } |
| 1288 | 1342 |
| 1289 | 1343 |
| 1290 | 1344 |
| 1291 void JSObject::InitializeBody(int object_size, Object* value) { | 1345 void JSObject::InitializeBody(int object_size, Object* value) { |
| 1292 ASSERT(!value->IsHeapObject() || !Heap::InNewSpace(value)); | 1346 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value)); |
| 1293 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) { | 1347 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) { |
| 1294 WRITE_FIELD(this, offset, value); | 1348 WRITE_FIELD(this, offset, value); |
| 1295 } | 1349 } |
| 1296 } | 1350 } |
| 1297 | 1351 |
| 1298 | 1352 |
| 1299 bool JSObject::HasFastProperties() { | 1353 bool JSObject::HasFastProperties() { |
| 1300 return !properties()->IsDictionary(); | 1354 return !properties()->IsDictionary(); |
| 1301 } | 1355 } |
| 1302 | 1356 |
| 1303 | 1357 |
| 1304 int JSObject::MaxFastProperties() { | 1358 int JSObject::MaxFastProperties() { |
| 1305 // Allow extra fast properties if the object has more than | 1359 // Allow extra fast properties if the object has more than |
| 1306 // kMaxFastProperties in-object properties. When this is the case, | 1360 // kMaxFastProperties in-object properties. When this is the case, |
| 1307 // it is very unlikely that the object is being used as a dictionary | 1361 // it is very unlikely that the object is being used as a dictionary |
| 1308 // and there is a good chance that allowing more map transitions | 1362 // and there is a good chance that allowing more map transitions |
| 1309 // will be worth it. | 1363 // will be worth it. |
| 1310 return Max(map()->inobject_properties(), kMaxFastProperties); | 1364 return Max(map()->inobject_properties(), kMaxFastProperties); |
| 1311 } | 1365 } |
| 1312 | 1366 |
| 1313 | 1367 |
| 1314 void Struct::InitializeBody(int object_size) { | 1368 void Struct::InitializeBody(int object_size) { |
| 1315 Object* value = Heap::undefined_value(); | 1369 Object* value = GetHeap()->undefined_value(); |
| 1316 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) { | 1370 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) { |
| 1317 WRITE_FIELD(this, offset, value); | 1371 WRITE_FIELD(this, offset, value); |
| 1318 } | 1372 } |
| 1319 } | 1373 } |
| 1320 | 1374 |
| 1321 | 1375 |
| 1322 bool Object::ToArrayIndex(uint32_t* index) { | 1376 bool Object::ToArrayIndex(uint32_t* index) { |
| 1323 if (IsSmi()) { | 1377 if (IsSmi()) { |
| 1324 int value = Smi::cast(this)->value(); | 1378 int value = Smi::cast(this)->value(); |
| 1325 if (value < 0) return false; | 1379 if (value < 0) return false; |
| (...skipping 25 matching lines...) Expand all Loading... |
| 1351 } | 1405 } |
| 1352 | 1406 |
| 1353 | 1407 |
| 1354 Object* FixedArray::get(int index) { | 1408 Object* FixedArray::get(int index) { |
| 1355 ASSERT(index >= 0 && index < this->length()); | 1409 ASSERT(index >= 0 && index < this->length()); |
| 1356 return READ_FIELD(this, kHeaderSize + index * kPointerSize); | 1410 return READ_FIELD(this, kHeaderSize + index * kPointerSize); |
| 1357 } | 1411 } |
| 1358 | 1412 |
| 1359 | 1413 |
| 1360 void FixedArray::set(int index, Smi* value) { | 1414 void FixedArray::set(int index, Smi* value) { |
| 1361 ASSERT(map() != Heap::fixed_cow_array_map()); | 1415 ASSERT(map() != HEAP->fixed_cow_array_map()); |
| 1362 ASSERT(reinterpret_cast<Object*>(value)->IsSmi()); | 1416 ASSERT(reinterpret_cast<Object*>(value)->IsSmi()); |
| 1363 int offset = kHeaderSize + index * kPointerSize; | 1417 int offset = kHeaderSize + index * kPointerSize; |
| 1364 WRITE_FIELD(this, offset, value); | 1418 WRITE_FIELD(this, offset, value); |
| 1365 } | 1419 } |
| 1366 | 1420 |
| 1367 | 1421 |
| 1368 void FixedArray::set(int index, Object* value) { | 1422 void FixedArray::set(int index, Object* value) { |
| 1369 ASSERT(map() != Heap::fixed_cow_array_map()); | 1423 ASSERT(map() != HEAP->fixed_cow_array_map()); |
| 1370 ASSERT(index >= 0 && index < this->length()); | 1424 ASSERT(index >= 0 && index < this->length()); |
| 1371 int offset = kHeaderSize + index * kPointerSize; | 1425 int offset = kHeaderSize + index * kPointerSize; |
| 1372 WRITE_FIELD(this, offset, value); | 1426 WRITE_FIELD(this, offset, value); |
| 1373 WRITE_BARRIER(this, offset, value); | 1427 WRITE_BARRIER(GetHeap(), this, offset, value); |
| 1374 } | 1428 } |
| 1375 | 1429 |
| 1376 | 1430 |
| 1377 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) { | 1431 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) { |
| 1378 if (Heap::InNewSpace(this)) return SKIP_WRITE_BARRIER; | 1432 if (GetHeap()->InNewSpace(this)) return SKIP_WRITE_BARRIER; |
| 1379 return UPDATE_WRITE_BARRIER; | 1433 return UPDATE_WRITE_BARRIER; |
| 1380 } | 1434 } |
| 1381 | 1435 |
| 1382 | 1436 |
| 1383 void FixedArray::set(int index, | 1437 void FixedArray::set(int index, |
| 1384 Object* value, | 1438 Object* value, |
| 1385 WriteBarrierMode mode) { | 1439 WriteBarrierMode mode) { |
| 1386 ASSERT(map() != Heap::fixed_cow_array_map()); | 1440 ASSERT(map() != HEAP->fixed_cow_array_map()); |
| 1387 ASSERT(index >= 0 && index < this->length()); | 1441 ASSERT(index >= 0 && index < this->length()); |
| 1388 int offset = kHeaderSize + index * kPointerSize; | 1442 int offset = kHeaderSize + index * kPointerSize; |
| 1389 WRITE_FIELD(this, offset, value); | 1443 WRITE_FIELD(this, offset, value); |
| 1390 WRITE_BARRIER(this, offset, value); | 1444 WRITE_BARRIER(GetHeap(), this, offset, value); |
| 1391 } | 1445 } |
| 1392 | 1446 |
| 1393 | 1447 |
| 1394 void FixedArray::fast_set(FixedArray* array, int index, Object* value) { | 1448 void FixedArray::fast_set(FixedArray* array, int index, Object* value) { |
| 1395 ASSERT(array->map() != Heap::raw_unchecked_fixed_cow_array_map()); | 1449 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map()); |
| 1396 ASSERT(index >= 0 && index < array->length()); | 1450 ASSERT(index >= 0 && index < array->length()); |
| 1397 ASSERT(!Heap::InNewSpace(value)); | 1451 ASSERT(!HEAP->InNewSpace(value)); |
| 1398 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value); | 1452 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value); |
| 1399 IncrementalMarking::RecordWrite(array, value); | 1453 array->GetHeap()->incremental_marking()->RecordWrite(array, value); |
| 1400 } | 1454 } |
| 1401 | 1455 |
| 1402 | 1456 |
| 1403 void FixedArray::set_undefined(int index) { | 1457 void FixedArray::set_undefined(int index) { |
| 1404 ASSERT(map() != Heap::fixed_cow_array_map()); | 1458 ASSERT(map() != HEAP->fixed_cow_array_map()); |
| 1459 set_undefined(GetHeap(), index); |
| 1460 } |
| 1461 |
| 1462 |
| 1463 void FixedArray::set_undefined(Heap* heap, int index) { |
| 1405 ASSERT(index >= 0 && index < this->length()); | 1464 ASSERT(index >= 0 && index < this->length()); |
| 1406 ASSERT(!Heap::InNewSpace(Heap::undefined_value())); | 1465 ASSERT(!heap->InNewSpace(heap->undefined_value())); |
| 1407 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, | 1466 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, |
| 1408 Heap::undefined_value()); | 1467 heap->undefined_value()); |
| 1409 } | 1468 } |
| 1410 | 1469 |
| 1411 | 1470 |
| 1412 void FixedArray::set_null(int index) { | 1471 void FixedArray::set_null(int index) { |
| 1413 ASSERT(map() != Heap::fixed_cow_array_map()); | 1472 set_null(GetHeap(), index); |
| 1473 } |
| 1474 |
| 1475 |
| 1476 void FixedArray::set_null(Heap* heap, int index) { |
| 1414 ASSERT(index >= 0 && index < this->length()); | 1477 ASSERT(index >= 0 && index < this->length()); |
| 1415 ASSERT(!Heap::InNewSpace(Heap::null_value())); | 1478 ASSERT(!heap->InNewSpace(heap->null_value())); |
| 1416 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::null_value()); | 1479 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value()); |
| 1417 } | 1480 } |
| 1418 | 1481 |
| 1419 | 1482 |
| 1420 void FixedArray::set_the_hole(int index) { | 1483 void FixedArray::set_the_hole(int index) { |
| 1421 ASSERT(map() != Heap::fixed_cow_array_map()); | 1484 ASSERT(map() != HEAP->fixed_cow_array_map()); |
| 1422 ASSERT(index >= 0 && index < this->length()); | 1485 ASSERT(index >= 0 && index < this->length()); |
| 1423 ASSERT(!Heap::InNewSpace(Heap::the_hole_value())); | 1486 ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value())); |
| 1424 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::the_hole_value()); | 1487 WRITE_FIELD(this, |
| 1488 kHeaderSize + index * kPointerSize, |
| 1489 GetHeap()->the_hole_value()); |
| 1425 } | 1490 } |
| 1426 | 1491 |
| 1427 | 1492 |
| 1428 void FixedArray::set_unchecked(int index, Smi* value) { | 1493 void FixedArray::set_unchecked(int index, Smi* value) { |
| 1429 ASSERT(reinterpret_cast<Object*>(value)->IsSmi()); | 1494 ASSERT(reinterpret_cast<Object*>(value)->IsSmi()); |
| 1430 int offset = kHeaderSize + index * kPointerSize; | 1495 int offset = kHeaderSize + index * kPointerSize; |
| 1431 WRITE_FIELD(this, offset, value); | 1496 WRITE_FIELD(this, offset, value); |
| 1432 } | 1497 } |
| 1433 | 1498 |
| 1434 | 1499 |
| 1435 void FixedArray::set_unchecked(int index, | 1500 void FixedArray::set_unchecked(Heap* heap, |
| 1501 int index, |
| 1436 Object* value, | 1502 Object* value, |
| 1437 WriteBarrierMode mode) { | 1503 WriteBarrierMode mode) { |
| 1438 int offset = kHeaderSize + index * kPointerSize; | 1504 int offset = kHeaderSize + index * kPointerSize; |
| 1439 WRITE_FIELD(this, offset, value); | 1505 WRITE_FIELD(this, offset, value); |
| 1440 WRITE_BARRIER(this, offset, value); | 1506 WRITE_BARRIER(heap, this, offset, value); |
| 1441 } | 1507 } |
| 1442 | 1508 |
| 1443 | 1509 |
| 1444 void FixedArray::set_null_unchecked(int index) { | 1510 void FixedArray::set_null_unchecked(Heap* heap, int index) { |
| 1445 ASSERT(index >= 0 && index < this->length()); | 1511 ASSERT(index >= 0 && index < this->length()); |
| 1446 ASSERT(!Heap::InNewSpace(Heap::null_value())); | 1512 ASSERT(!HEAP->InNewSpace(heap->null_value())); |
| 1447 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::null_value()); | 1513 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value()); |
| 1448 } | 1514 } |
| 1449 | 1515 |
| 1450 | 1516 |
| 1451 Object** FixedArray::data_start() { | 1517 Object** FixedArray::data_start() { |
| 1452 return HeapObject::RawField(this, kHeaderSize); | 1518 return HeapObject::RawField(this, kHeaderSize); |
| 1453 } | 1519 } |
| 1454 | 1520 |
| 1455 | 1521 |
| 1456 bool DescriptorArray::IsEmpty() { | 1522 bool DescriptorArray::IsEmpty() { |
| 1457 ASSERT(this == Heap::empty_descriptor_array() || | 1523 ASSERT(this->length() > kFirstIndex || |
| 1458 this->length() > 2); | 1524 this == HEAP->empty_descriptor_array()); |
| 1459 return this == Heap::empty_descriptor_array(); | 1525 return length() <= kFirstIndex; |
| 1460 } | 1526 } |
| 1461 | 1527 |
| 1462 | 1528 |
| 1463 void DescriptorArray::fast_swap(FixedArray* array, int first, int second) { | 1529 void DescriptorArray::fast_swap(FixedArray* array, int first, int second) { |
| 1464 Object* tmp = array->get(first); | 1530 Object* tmp = array->get(first); |
| 1465 fast_set(array, first, array->get(second)); | 1531 fast_set(array, first, array->get(second)); |
| 1466 fast_set(array, second, tmp); | 1532 fast_set(array, second, tmp); |
| 1467 } | 1533 } |
| 1468 | 1534 |
| 1469 | 1535 |
| 1470 int DescriptorArray::Search(String* name) { | 1536 int DescriptorArray::Search(String* name) { |
| 1471 SLOW_ASSERT(IsSortedNoDuplicates()); | 1537 SLOW_ASSERT(IsSortedNoDuplicates()); |
| 1472 | 1538 |
| 1473 // Check for empty descriptor array. | 1539 // Check for empty descriptor array. |
| 1474 int nof = number_of_descriptors(); | 1540 int nof = number_of_descriptors(); |
| 1475 if (nof == 0) return kNotFound; | 1541 if (nof == 0) return kNotFound; |
| 1476 | 1542 |
| 1477 // Fast case: do linear search for small arrays. | 1543 // Fast case: do linear search for small arrays. |
| 1478 const int kMaxElementsForLinearSearch = 8; | 1544 const int kMaxElementsForLinearSearch = 8; |
| 1479 if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) { | 1545 if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) { |
| 1480 return LinearSearch(name, nof); | 1546 return LinearSearch(name, nof); |
| 1481 } | 1547 } |
| 1482 | 1548 |
| 1483 // Slow case: perform binary search. | 1549 // Slow case: perform binary search. |
| 1484 return BinarySearch(name, 0, nof - 1); | 1550 return BinarySearch(name, 0, nof - 1); |
| 1485 } | 1551 } |
| 1486 | 1552 |
| 1487 | 1553 |
| 1488 int DescriptorArray::SearchWithCache(String* name) { | 1554 int DescriptorArray::SearchWithCache(String* name) { |
| 1489 int number = DescriptorLookupCache::Lookup(this, name); | 1555 int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name); |
| 1490 if (number == DescriptorLookupCache::kAbsent) { | 1556 if (number == DescriptorLookupCache::kAbsent) { |
| 1491 number = Search(name); | 1557 number = Search(name); |
| 1492 DescriptorLookupCache::Update(this, name, number); | 1558 GetIsolate()->descriptor_lookup_cache()->Update(this, name, number); |
| 1493 } | 1559 } |
| 1494 return number; | 1560 return number; |
| 1495 } | 1561 } |
| 1496 | 1562 |
| 1497 | 1563 |
| 1498 String* DescriptorArray::GetKey(int descriptor_number) { | 1564 String* DescriptorArray::GetKey(int descriptor_number) { |
| 1499 ASSERT(descriptor_number < number_of_descriptors()); | 1565 ASSERT(descriptor_number < number_of_descriptors()); |
| 1500 return String::cast(get(ToKeyIndex(descriptor_number))); | 1566 return String::cast(get(ToKeyIndex(descriptor_number))); |
| 1501 } | 1567 } |
| 1502 | 1568 |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1568 GetValue(descriptor_number), | 1634 GetValue(descriptor_number), |
| 1569 GetDetails(descriptor_number)); | 1635 GetDetails(descriptor_number)); |
| 1570 } | 1636 } |
| 1571 | 1637 |
| 1572 | 1638 |
| 1573 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) { | 1639 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) { |
| 1574 // Range check. | 1640 // Range check. |
| 1575 ASSERT(descriptor_number < number_of_descriptors()); | 1641 ASSERT(descriptor_number < number_of_descriptors()); |
| 1576 | 1642 |
| 1577 // Make sure none of the elements in desc are in new space. | 1643 // Make sure none of the elements in desc are in new space. |
| 1578 ASSERT(!Heap::InNewSpace(desc->GetKey())); | 1644 ASSERT(!HEAP->InNewSpace(desc->GetKey())); |
| 1579 ASSERT(!Heap::InNewSpace(desc->GetValue())); | 1645 ASSERT(!HEAP->InNewSpace(desc->GetValue())); |
| 1580 | 1646 |
| 1581 fast_set(this, ToKeyIndex(descriptor_number), desc->GetKey()); | 1647 fast_set(this, ToKeyIndex(descriptor_number), desc->GetKey()); |
| 1582 FixedArray* content_array = GetContentArray(); | 1648 FixedArray* content_array = GetContentArray(); |
| 1583 fast_set(content_array, ToValueIndex(descriptor_number), desc->GetValue()); | 1649 fast_set(content_array, ToValueIndex(descriptor_number), desc->GetValue()); |
| 1584 fast_set(content_array, ToDetailsIndex(descriptor_number), | 1650 fast_set(content_array, ToDetailsIndex(descriptor_number), |
| 1585 desc->GetDetails().AsSmi()); | 1651 desc->GetDetails().AsSmi()); |
| 1586 } | 1652 } |
| 1587 | 1653 |
| 1588 | 1654 |
| 1589 void DescriptorArray::CopyFrom(int index, DescriptorArray* src, int src_index) { | 1655 void DescriptorArray::CopyFrom(int index, DescriptorArray* src, int src_index) { |
| 1590 Descriptor desc; | 1656 Descriptor desc; |
| 1591 src->Get(src_index, &desc); | 1657 src->Get(src_index, &desc); |
| 1592 Set(index, &desc); | 1658 Set(index, &desc); |
| 1593 } | 1659 } |
| 1594 | 1660 |
| 1595 | 1661 |
| 1596 void DescriptorArray::Swap(int first, int second) { | 1662 void DescriptorArray::Swap(int first, int second) { |
| 1597 fast_swap(this, ToKeyIndex(first), ToKeyIndex(second)); | 1663 fast_swap(this, ToKeyIndex(first), ToKeyIndex(second)); |
| 1598 FixedArray* content_array = GetContentArray(); | 1664 FixedArray* content_array = GetContentArray(); |
| 1599 fast_swap(content_array, ToValueIndex(first), ToValueIndex(second)); | 1665 fast_swap(content_array, ToValueIndex(first), ToValueIndex(second)); |
| 1600 fast_swap(content_array, ToDetailsIndex(first), ToDetailsIndex(second)); | 1666 fast_swap(content_array, ToDetailsIndex(first), ToDetailsIndex(second)); |
| 1601 } | 1667 } |
| 1602 | 1668 |
| 1603 | 1669 |
| 1670 template<typename Shape, typename Key> |
| 1671 int HashTable<Shape, Key>::FindEntry(Key key) { |
| 1672 return FindEntry(GetIsolate(), key); |
| 1673 } |
| 1674 |
| 1675 |
| 1676 // Find entry for key otherwise return kNotFound. |
| 1677 template<typename Shape, typename Key> |
| 1678 int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) { |
| 1679 uint32_t capacity = Capacity(); |
| 1680 uint32_t entry = FirstProbe(Shape::Hash(key), capacity); |
| 1681 uint32_t count = 1; |
| 1682 // EnsureCapacity will guarantee the hash table is never full. |
| 1683 while (true) { |
| 1684 Object* element = KeyAt(entry); |
| 1685 if (element == isolate->heap()->undefined_value()) break; // Empty entry. |
| 1686 if (element != isolate->heap()->null_value() && |
| 1687 Shape::IsMatch(key, element)) return entry; |
| 1688 entry = NextProbe(entry, count++, capacity); |
| 1689 } |
| 1690 return kNotFound; |
| 1691 } |
| 1692 |
| 1693 |
| 1604 bool NumberDictionary::requires_slow_elements() { | 1694 bool NumberDictionary::requires_slow_elements() { |
| 1605 Object* max_index_object = get(kMaxNumberKeyIndex); | 1695 Object* max_index_object = get(kMaxNumberKeyIndex); |
| 1606 if (!max_index_object->IsSmi()) return false; | 1696 if (!max_index_object->IsSmi()) return false; |
| 1607 return 0 != | 1697 return 0 != |
| 1608 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask); | 1698 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask); |
| 1609 } | 1699 } |
| 1610 | 1700 |
| 1611 uint32_t NumberDictionary::max_number_key() { | 1701 uint32_t NumberDictionary::max_number_key() { |
| 1612 ASSERT(!requires_slow_elements()); | 1702 ASSERT(!requires_slow_elements()); |
| 1613 Object* max_index_object = get(kMaxNumberKeyIndex); | 1703 Object* max_index_object = get(kMaxNumberKeyIndex); |
| (...skipping 228 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1842 } | 1932 } |
| 1843 | 1933 |
| 1844 | 1934 |
| 1845 Object* ConsString::unchecked_first() { | 1935 Object* ConsString::unchecked_first() { |
| 1846 return READ_FIELD(this, kFirstOffset); | 1936 return READ_FIELD(this, kFirstOffset); |
| 1847 } | 1937 } |
| 1848 | 1938 |
| 1849 | 1939 |
| 1850 void ConsString::set_first(String* value, WriteBarrierMode mode) { | 1940 void ConsString::set_first(String* value, WriteBarrierMode mode) { |
| 1851 WRITE_FIELD(this, kFirstOffset, value); | 1941 WRITE_FIELD(this, kFirstOffset, value); |
| 1852 WRITE_BARRIER(this, kFirstOffset, value); | 1942 WRITE_BARRIER(GetHeap(), this, kFirstOffset, value); |
| 1853 } | 1943 } |
| 1854 | 1944 |
| 1855 | 1945 |
| 1856 String* ConsString::second() { | 1946 String* ConsString::second() { |
| 1857 return String::cast(READ_FIELD(this, kSecondOffset)); | 1947 return String::cast(READ_FIELD(this, kSecondOffset)); |
| 1858 } | 1948 } |
| 1859 | 1949 |
| 1860 | 1950 |
| 1861 Object* ConsString::unchecked_second() { | 1951 Object* ConsString::unchecked_second() { |
| 1862 return READ_FIELD(this, kSecondOffset); | 1952 return READ_FIELD(this, kSecondOffset); |
| 1863 } | 1953 } |
| 1864 | 1954 |
| 1865 | 1955 |
| 1866 void ConsString::set_second(String* value, WriteBarrierMode mode) { | 1956 void ConsString::set_second(String* value, WriteBarrierMode mode) { |
| 1867 WRITE_FIELD(this, kSecondOffset, value); | 1957 WRITE_FIELD(this, kSecondOffset, value); |
| 1868 WRITE_BARRIER(this, kSecondOffset, value); | 1958 WRITE_BARRIER(GetHeap(), this, kSecondOffset, value); |
| 1869 } | 1959 } |
| 1870 | 1960 |
| 1871 | 1961 |
| 1872 ExternalAsciiString::Resource* ExternalAsciiString::resource() { | 1962 ExternalAsciiString::Resource* ExternalAsciiString::resource() { |
| 1873 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)); | 1963 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)); |
| 1874 } | 1964 } |
| 1875 | 1965 |
| 1876 | 1966 |
| 1877 void ExternalAsciiString::set_resource( | 1967 void ExternalAsciiString::set_resource( |
| 1878 ExternalAsciiString::Resource* resource) { | 1968 ExternalAsciiString::Resource* resource) { |
| (...skipping 15 matching lines...) Expand all Loading... |
| 1894 void JSFunctionResultCache::MakeZeroSize() { | 1984 void JSFunctionResultCache::MakeZeroSize() { |
| 1895 set_finger_index(kEntriesIndex); | 1985 set_finger_index(kEntriesIndex); |
| 1896 set_size(kEntriesIndex); | 1986 set_size(kEntriesIndex); |
| 1897 } | 1987 } |
| 1898 | 1988 |
| 1899 | 1989 |
| 1900 void JSFunctionResultCache::Clear() { | 1990 void JSFunctionResultCache::Clear() { |
| 1901 int cache_size = size(); | 1991 int cache_size = size(); |
| 1902 Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex)); | 1992 Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex)); |
| 1903 MemsetPointer(entries_start, | 1993 MemsetPointer(entries_start, |
| 1904 Heap::the_hole_value(), | 1994 GetHeap()->the_hole_value(), |
| 1905 cache_size - kEntriesIndex); | 1995 cache_size - kEntriesIndex); |
| 1906 MakeZeroSize(); | 1996 MakeZeroSize(); |
| 1907 } | 1997 } |
| 1908 | 1998 |
| 1909 | 1999 |
| 1910 int JSFunctionResultCache::size() { | 2000 int JSFunctionResultCache::size() { |
| 1911 return Smi::cast(get(kCacheSizeIndex))->value(); | 2001 return Smi::cast(get(kCacheSizeIndex))->value(); |
| 1912 } | 2002 } |
| 1913 | 2003 |
| 1914 | 2004 |
| (...skipping 699 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2614 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize); | 2704 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize); |
| 2615 // GetCodeFromTargetAddress might be called when marking objects during mark | 2705 // GetCodeFromTargetAddress might be called when marking objects during mark |
| 2616 // sweep. reinterpret_cast is therefore used instead of the more appropriate | 2706 // sweep. reinterpret_cast is therefore used instead of the more appropriate |
| 2617 // Code::cast. Code::cast does not work when the object's map is | 2707 // Code::cast. Code::cast does not work when the object's map is |
| 2618 // marked. | 2708 // marked. |
| 2619 Code* result = reinterpret_cast<Code*>(code); | 2709 Code* result = reinterpret_cast<Code*>(code); |
| 2620 return result; | 2710 return result; |
| 2621 } | 2711 } |
| 2622 | 2712 |
| 2623 | 2713 |
| 2714 Heap* Map::heap() { |
| 2715 // NOTE: address() helper is not used to save one instruction. |
| 2716 Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap(); |
| 2717 ASSERT(heap != NULL); |
| 2718 ASSERT(heap->isolate() == Isolate::Current()); |
| 2719 return heap; |
| 2720 } |
| 2721 |
| 2722 |
| 2624 Object* Code::GetObjectFromEntryAddress(Address location_of_address) { | 2723 Object* Code::GetObjectFromEntryAddress(Address location_of_address) { |
| 2625 return HeapObject:: | 2724 return HeapObject:: |
| 2626 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize); | 2725 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize); |
| 2627 } | 2726 } |
| 2628 | 2727 |
| 2629 | 2728 |
| 2630 Object* Map::prototype() { | 2729 Object* Map::prototype() { |
| 2631 return READ_FIELD(this, kPrototypeOffset); | 2730 return READ_FIELD(this, kPrototypeOffset); |
| 2632 } | 2731 } |
| 2633 | 2732 |
| 2634 | 2733 |
| 2635 void Map::set_prototype(Object* value, WriteBarrierMode mode) { | 2734 void Map::set_prototype(Object* value, WriteBarrierMode mode) { |
| 2636 ASSERT(value->IsNull() || value->IsJSObject()); | 2735 ASSERT(value->IsNull() || value->IsJSObject()); |
| 2637 WRITE_FIELD(this, kPrototypeOffset, value); | 2736 WRITE_FIELD(this, kPrototypeOffset, value); |
| 2638 WRITE_BARRIER(this, kPrototypeOffset, value); | 2737 WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value); |
| 2639 } | 2738 } |
| 2640 | 2739 |
| 2641 | 2740 |
| 2642 MaybeObject* Map::GetFastElementsMap() { | 2741 MaybeObject* Map::GetFastElementsMap() { |
| 2643 if (has_fast_elements()) return this; | 2742 if (has_fast_elements()) return this; |
| 2644 Object* obj; | 2743 Object* obj; |
| 2645 { MaybeObject* maybe_obj = CopyDropTransitions(); | 2744 { MaybeObject* maybe_obj = CopyDropTransitions(); |
| 2646 if (!maybe_obj->ToObject(&obj)) return maybe_obj; | 2745 if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
| 2647 } | 2746 } |
| 2648 Map* new_map = Map::cast(obj); | 2747 Map* new_map = Map::cast(obj); |
| 2649 new_map->set_has_fast_elements(true); | 2748 new_map->set_has_fast_elements(true); |
| 2650 Counters::map_slow_to_fast_elements.Increment(); | 2749 COUNTERS->map_slow_to_fast_elements()->Increment(); |
| 2651 return new_map; | 2750 return new_map; |
| 2652 } | 2751 } |
| 2653 | 2752 |
| 2654 | 2753 |
| 2655 MaybeObject* Map::GetSlowElementsMap() { | 2754 MaybeObject* Map::GetSlowElementsMap() { |
| 2656 if (!has_fast_elements()) return this; | 2755 if (!has_fast_elements()) return this; |
| 2657 Object* obj; | 2756 Object* obj; |
| 2658 { MaybeObject* maybe_obj = CopyDropTransitions(); | 2757 { MaybeObject* maybe_obj = CopyDropTransitions(); |
| 2659 if (!maybe_obj->ToObject(&obj)) return maybe_obj; | 2758 if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
| 2660 } | 2759 } |
| 2661 Map* new_map = Map::cast(obj); | 2760 Map* new_map = Map::cast(obj); |
| 2662 new_map->set_has_fast_elements(false); | 2761 new_map->set_has_fast_elements(false); |
| 2663 Counters::map_fast_to_slow_elements.Increment(); | 2762 COUNTERS->map_fast_to_slow_elements()->Increment(); |
| 2664 return new_map; | 2763 return new_map; |
| 2665 } | 2764 } |
| 2666 | 2765 |
| 2667 | 2766 |
| 2668 MaybeObject* Map::NewExternalArrayElementsMap() { | 2767 MaybeObject* Map::NewExternalArrayElementsMap() { |
| 2669 // TODO(danno): Special case empty object map (or most common case) | 2768 // TODO(danno): Special case empty object map (or most common case) |
| 2670 // to return a pre-canned pixel array map. | 2769 // to return a pre-canned pixel array map. |
| 2671 Object* obj; | 2770 Object* obj; |
| 2672 { MaybeObject* maybe_obj = CopyDropTransitions(); | 2771 { MaybeObject* maybe_obj = CopyDropTransitions(); |
| 2673 if (!maybe_obj->ToObject(&obj)) return maybe_obj; | 2772 if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
| 2674 } | 2773 } |
| 2675 Map* new_map = Map::cast(obj); | 2774 Map* new_map = Map::cast(obj); |
| 2676 new_map->set_has_fast_elements(false); | 2775 new_map->set_has_fast_elements(false); |
| 2677 new_map->set_has_external_array_elements(true); | 2776 new_map->set_has_external_array_elements(true); |
| 2678 Counters::map_to_external_array_elements.Increment(); | 2777 COUNTERS->map_to_external_array_elements()->Increment(); |
| 2679 return new_map; | 2778 return new_map; |
| 2680 } | 2779 } |
| 2681 | 2780 |
| 2682 | 2781 |
| 2683 ACCESSORS(Map, instance_descriptors, DescriptorArray, | 2782 ACCESSORS(Map, instance_descriptors, DescriptorArray, |
| 2684 kInstanceDescriptorsOffset) | 2783 kInstanceDescriptorsOffset) |
| 2685 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset) | 2784 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset) |
| 2686 ACCESSORS(Map, constructor, Object, kConstructorOffset) | 2785 ACCESSORS(Map, constructor, Object, kConstructorOffset) |
| 2687 | 2786 |
| 2688 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset) | 2787 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset) |
| 2689 ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset) | 2788 ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset) |
| 2690 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset) | 2789 ACCESSORS_GCSAFE(JSFunction, |
| 2790 next_function_link, |
| 2791 Object, |
| 2792 kNextFunctionLinkOffset) |
| 2691 | 2793 |
| 2692 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset) | 2794 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset) |
| 2693 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset) | 2795 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset) |
| 2694 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset) | 2796 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset) |
| 2695 | 2797 |
| 2696 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset) | 2798 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset) |
| 2697 | 2799 |
| 2698 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset) | 2800 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset) |
| 2699 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset) | 2801 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset) |
| 2700 ACCESSORS(AccessorInfo, data, Object, kDataOffset) | 2802 ACCESSORS(AccessorInfo, data, Object, kDataOffset) |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2769 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex) | 2871 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex) |
| 2770 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex) | 2872 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex) |
| 2771 | 2873 |
| 2772 ACCESSORS(BreakPointInfo, code_position, Smi, kCodePositionIndex) | 2874 ACCESSORS(BreakPointInfo, code_position, Smi, kCodePositionIndex) |
| 2773 ACCESSORS(BreakPointInfo, source_position, Smi, kSourcePositionIndex) | 2875 ACCESSORS(BreakPointInfo, source_position, Smi, kSourcePositionIndex) |
| 2774 ACCESSORS(BreakPointInfo, statement_position, Smi, kStatementPositionIndex) | 2876 ACCESSORS(BreakPointInfo, statement_position, Smi, kStatementPositionIndex) |
| 2775 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex) | 2877 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex) |
| 2776 #endif | 2878 #endif |
| 2777 | 2879 |
| 2778 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset) | 2880 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset) |
| 2779 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset) | 2881 ACCESSORS_GCSAFE(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset) |
| 2780 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset) | 2882 ACCESSORS_GCSAFE(SharedFunctionInfo, initial_map, Object, kInitialMapOffset) |
| 2781 ACCESSORS(SharedFunctionInfo, instance_class_name, Object, | 2883 ACCESSORS(SharedFunctionInfo, instance_class_name, Object, |
| 2782 kInstanceClassNameOffset) | 2884 kInstanceClassNameOffset) |
| 2783 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset) | 2885 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset) |
| 2784 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset) | 2886 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset) |
| 2785 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset) | 2887 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset) |
| 2786 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset) | 2888 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset) |
| 2787 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object, | 2889 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object, |
| 2788 kThisPropertyAssignmentsOffset) | 2890 kThisPropertyAssignmentsOffset) |
| 2789 | 2891 |
| 2790 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype, | 2892 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype, |
| (...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2898 void SharedFunctionInfo::set_live_objects_may_exist(bool value) { | 3000 void SharedFunctionInfo::set_live_objects_may_exist(bool value) { |
| 2899 if (value) { | 3001 if (value) { |
| 2900 set_compiler_hints(compiler_hints() | (1 << kLiveObjectsMayExist)); | 3002 set_compiler_hints(compiler_hints() | (1 << kLiveObjectsMayExist)); |
| 2901 } else { | 3003 } else { |
| 2902 set_compiler_hints(compiler_hints() & ~(1 << kLiveObjectsMayExist)); | 3004 set_compiler_hints(compiler_hints() & ~(1 << kLiveObjectsMayExist)); |
| 2903 } | 3005 } |
| 2904 } | 3006 } |
| 2905 | 3007 |
| 2906 | 3008 |
| 2907 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() { | 3009 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() { |
| 2908 return initial_map() != Heap::undefined_value(); | 3010 return initial_map() != HEAP->undefined_value(); |
| 2909 } | 3011 } |
| 2910 | 3012 |
| 2911 | 3013 |
| 2912 bool SharedFunctionInfo::optimization_disabled() { | 3014 bool SharedFunctionInfo::optimization_disabled() { |
| 2913 return BooleanBit::get(compiler_hints(), kOptimizationDisabled); | 3015 return BooleanBit::get(compiler_hints(), kOptimizationDisabled); |
| 2914 } | 3016 } |
| 2915 | 3017 |
| 2916 | 3018 |
| 2917 void SharedFunctionInfo::set_optimization_disabled(bool disable) { | 3019 void SharedFunctionInfo::set_optimization_disabled(bool disable) { |
| 2918 set_compiler_hints(BooleanBit::set(compiler_hints(), | 3020 set_compiler_hints(BooleanBit::set(compiler_hints(), |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2977 } | 3079 } |
| 2978 | 3080 |
| 2979 | 3081 |
| 2980 Code* SharedFunctionInfo::unchecked_code() { | 3082 Code* SharedFunctionInfo::unchecked_code() { |
| 2981 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset)); | 3083 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset)); |
| 2982 } | 3084 } |
| 2983 | 3085 |
| 2984 | 3086 |
| 2985 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) { | 3087 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) { |
| 2986 WRITE_FIELD(this, kCodeOffset, value); | 3088 WRITE_FIELD(this, kCodeOffset, value); |
| 2987 WRITE_BARRIER(this, kCodeOffset, value); | 3089 // TODO(gc) ISOLATESMERGE HEAP |
| 3090 WRITE_BARRIER(HEAP, this, kCodeOffset, value); |
| 2988 } | 3091 } |
| 2989 | 3092 |
| 2990 | 3093 |
| 2991 SerializedScopeInfo* SharedFunctionInfo::scope_info() { | 3094 SerializedScopeInfo* SharedFunctionInfo::scope_info() { |
| 2992 return reinterpret_cast<SerializedScopeInfo*>( | 3095 return reinterpret_cast<SerializedScopeInfo*>( |
| 2993 READ_FIELD(this, kScopeInfoOffset)); | 3096 READ_FIELD(this, kScopeInfoOffset)); |
| 2994 } | 3097 } |
| 2995 | 3098 |
| 2996 | 3099 |
| 2997 void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value, | 3100 void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value, |
| 2998 WriteBarrierMode mode) { | 3101 WriteBarrierMode mode) { |
| 2999 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value)); | 3102 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value)); |
| 3000 WRITE_BARRIER(this, kScopeInfoOffset, reinterpret_cast<Object*>(value)); | 3103 WRITE_BARRIER(GetHeap(), |
| 3104 this, |
| 3105 kScopeInfoOffset, |
| 3106 reinterpret_cast<Object*>(value)); |
| 3001 } | 3107 } |
| 3002 | 3108 |
| 3003 | 3109 |
| 3004 Smi* SharedFunctionInfo::deopt_counter() { | 3110 Smi* SharedFunctionInfo::deopt_counter() { |
| 3005 return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset)); | 3111 return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset)); |
| 3006 } | 3112 } |
| 3007 | 3113 |
| 3008 | 3114 |
| 3009 void SharedFunctionInfo::set_deopt_counter(Smi* value) { | 3115 void SharedFunctionInfo::set_deopt_counter(Smi* value) { |
| 3010 WRITE_FIELD(this, kDeoptCounterOffset, value); | 3116 WRITE_FIELD(this, kDeoptCounterOffset, value); |
| 3011 } | 3117 } |
| 3012 | 3118 |
| 3013 | 3119 |
| 3014 bool SharedFunctionInfo::is_compiled() { | 3120 bool SharedFunctionInfo::is_compiled() { |
| 3015 return code() != Builtins::builtin(Builtins::LazyCompile); | 3121 return code() != |
| 3122 Isolate::Current()->builtins()->builtin(Builtins::LazyCompile); |
| 3016 } | 3123 } |
| 3017 | 3124 |
| 3018 | 3125 |
| 3019 bool SharedFunctionInfo::IsApiFunction() { | 3126 bool SharedFunctionInfo::IsApiFunction() { |
| 3020 return function_data()->IsFunctionTemplateInfo(); | 3127 return function_data()->IsFunctionTemplateInfo(); |
| 3021 } | 3128 } |
| 3022 | 3129 |
| 3023 | 3130 |
| 3024 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() { | 3131 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() { |
| 3025 ASSERT(IsApiFunction()); | 3132 ASSERT(IsApiFunction()); |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3065 SharedFunctionInfo::kDontAdaptArgumentsSentinel; | 3172 SharedFunctionInfo::kDontAdaptArgumentsSentinel; |
| 3066 } | 3173 } |
| 3067 | 3174 |
| 3068 | 3175 |
| 3069 bool JSFunction::IsOptimized() { | 3176 bool JSFunction::IsOptimized() { |
| 3070 return code()->kind() == Code::OPTIMIZED_FUNCTION; | 3177 return code()->kind() == Code::OPTIMIZED_FUNCTION; |
| 3071 } | 3178 } |
| 3072 | 3179 |
| 3073 | 3180 |
| 3074 bool JSFunction::IsMarkedForLazyRecompilation() { | 3181 bool JSFunction::IsMarkedForLazyRecompilation() { |
| 3075 return code() == Builtins::builtin(Builtins::LazyRecompile); | 3182 return code() == GetIsolate()->builtins()->builtin(Builtins::LazyRecompile); |
| 3076 } | 3183 } |
| 3077 | 3184 |
| 3078 | 3185 |
| 3079 Code* JSFunction::code() { | 3186 Code* JSFunction::code() { |
| 3080 return Code::cast(unchecked_code()); | 3187 return Code::cast(unchecked_code()); |
| 3081 } | 3188 } |
| 3082 | 3189 |
| 3083 | 3190 |
| 3084 Code* JSFunction::unchecked_code() { | 3191 Code* JSFunction::unchecked_code() { |
| 3085 return reinterpret_cast<Code*>( | 3192 return reinterpret_cast<Code*>( |
| 3086 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset))); | 3193 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset))); |
| 3087 } | 3194 } |
| 3088 | 3195 |
| 3089 | 3196 |
| 3090 void JSFunction::set_code(Code* value) { | 3197 void JSFunction::set_code(Code* value) { |
| 3091 // Skip the write barrier because code is never in new space. | 3198 // Skip the write barrier because code is never in new space. |
| 3092 ASSERT(!Heap::InNewSpace(value)); | 3199 ASSERT(!HEAP->InNewSpace(value)); |
| 3093 Address entry = value->entry(); | 3200 Address entry = value->entry(); |
| 3094 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry)); | 3201 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry)); |
| 3095 IncrementalMarking::RecordWrite(this, value); | 3202 GetHeap()->incremental_marking()->RecordWrite(this, value); |
| 3096 } | 3203 } |
| 3097 | 3204 |
| 3098 | 3205 |
| 3099 void JSFunction::ReplaceCode(Code* code) { | 3206 void JSFunction::ReplaceCode(Code* code) { |
| 3100 bool was_optimized = IsOptimized(); | 3207 bool was_optimized = IsOptimized(); |
| 3101 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION; | 3208 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION; |
| 3102 | 3209 |
| 3103 set_code(code); | 3210 set_code(code); |
| 3104 | 3211 |
| 3105 // Add/remove the function from the list of optimized functions for this | 3212 // Add/remove the function from the list of optimized functions for this |
| (...skipping 17 matching lines...) Expand all Loading... |
| 3123 } | 3230 } |
| 3124 | 3231 |
| 3125 | 3232 |
| 3126 SharedFunctionInfo* JSFunction::unchecked_shared() { | 3233 SharedFunctionInfo* JSFunction::unchecked_shared() { |
| 3127 return reinterpret_cast<SharedFunctionInfo*>( | 3234 return reinterpret_cast<SharedFunctionInfo*>( |
| 3128 READ_FIELD(this, kSharedFunctionInfoOffset)); | 3235 READ_FIELD(this, kSharedFunctionInfoOffset)); |
| 3129 } | 3236 } |
| 3130 | 3237 |
| 3131 | 3238 |
| 3132 void JSFunction::set_context(Object* value) { | 3239 void JSFunction::set_context(Object* value) { |
| 3133 ASSERT(value == Heap::undefined_value() || value->IsContext()); | 3240 ASSERT(value->IsUndefined() || value->IsContext()); |
| 3134 WRITE_FIELD(this, kContextOffset, value); | 3241 WRITE_FIELD(this, kContextOffset, value); |
| 3135 WRITE_BARRIER(this, kContextOffset, value); | 3242 WRITE_BARRIER(GetHeap(), this, kContextOffset, value); |
| 3136 } | 3243 } |
| 3137 | 3244 |
| 3138 ACCESSORS(JSFunction, prototype_or_initial_map, Object, | 3245 ACCESSORS(JSFunction, prototype_or_initial_map, Object, |
| 3139 kPrototypeOrInitialMapOffset) | 3246 kPrototypeOrInitialMapOffset) |
| 3140 | 3247 |
| 3141 | 3248 |
| 3142 Map* JSFunction::initial_map() { | 3249 Map* JSFunction::initial_map() { |
| 3143 return Map::cast(prototype_or_initial_map()); | 3250 return Map::cast(prototype_or_initial_map()); |
| 3144 } | 3251 } |
| 3145 | 3252 |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3180 if (map()->has_non_instance_prototype()) return map()->constructor(); | 3287 if (map()->has_non_instance_prototype()) return map()->constructor(); |
| 3181 return instance_prototype(); | 3288 return instance_prototype(); |
| 3182 } | 3289 } |
| 3183 | 3290 |
| 3184 bool JSFunction::should_have_prototype() { | 3291 bool JSFunction::should_have_prototype() { |
| 3185 return map()->function_with_prototype(); | 3292 return map()->function_with_prototype(); |
| 3186 } | 3293 } |
| 3187 | 3294 |
| 3188 | 3295 |
| 3189 bool JSFunction::is_compiled() { | 3296 bool JSFunction::is_compiled() { |
| 3190 return code() != Builtins::builtin(Builtins::LazyCompile); | 3297 return code() != GetIsolate()->builtins()->builtin(Builtins::LazyCompile); |
| 3191 } | 3298 } |
| 3192 | 3299 |
| 3193 | 3300 |
| 3194 int JSFunction::NumberOfLiterals() { | 3301 int JSFunction::NumberOfLiterals() { |
| 3195 return literals()->length(); | 3302 return literals()->length(); |
| 3196 } | 3303 } |
| 3197 | 3304 |
| 3198 | 3305 |
| 3199 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) { | 3306 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) { |
| 3200 ASSERT(id < kJSBuiltinsCount); // id is unsigned. | 3307 ASSERT(id < kJSBuiltinsCount); // id is unsigned. |
| 3201 return READ_FIELD(this, OffsetOfFunctionWithId(id)); | 3308 return READ_FIELD(this, OffsetOfFunctionWithId(id)); |
| 3202 } | 3309 } |
| 3203 | 3310 |
| 3204 | 3311 |
| 3205 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id, | 3312 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id, |
| 3206 Object* value) { | 3313 Object* value) { |
| 3207 ASSERT(id < kJSBuiltinsCount); // id is unsigned. | 3314 ASSERT(id < kJSBuiltinsCount); // id is unsigned. |
| 3208 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value); | 3315 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value); |
| 3209 WRITE_BARRIER(this, OffsetOfFunctionWithId(id), value); | 3316 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value); |
| 3210 } | 3317 } |
| 3211 | 3318 |
| 3212 | 3319 |
| 3213 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) { | 3320 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) { |
| 3214 ASSERT(id < kJSBuiltinsCount); // id is unsigned. | 3321 ASSERT(id < kJSBuiltinsCount); // id is unsigned. |
| 3215 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id))); | 3322 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id))); |
| 3216 } | 3323 } |
| 3217 | 3324 |
| 3218 | 3325 |
| 3219 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id, | 3326 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id, |
| 3220 Code* value) { | 3327 Code* value) { |
| 3221 ASSERT(id < kJSBuiltinsCount); // id is unsigned. | 3328 ASSERT(id < kJSBuiltinsCount); // id is unsigned. |
| 3222 WRITE_FIELD(this, OffsetOfCodeWithId(id), value); | 3329 WRITE_FIELD(this, OffsetOfCodeWithId(id), value); |
| 3223 ASSERT(!Heap::InNewSpace(value)); | 3330 ASSERT(!HEAP->InNewSpace(value)); |
| 3224 } | 3331 } |
| 3225 | 3332 |
| 3226 | 3333 |
| 3227 Address Proxy::proxy() { | 3334 Address Proxy::proxy() { |
| 3228 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kProxyOffset)); | 3335 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kProxyOffset)); |
| 3229 } | 3336 } |
| 3230 | 3337 |
| 3231 | 3338 |
| 3232 void Proxy::set_proxy(Address value) { | 3339 void Proxy::set_proxy(Address value) { |
| 3233 WRITE_INTPTR_FIELD(this, kProxyOffset, OffsetFrom(value)); | 3340 WRITE_INTPTR_FIELD(this, kProxyOffset, OffsetFrom(value)); |
| (...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3363 | 3470 |
| 3364 void JSRegExp::SetDataAt(int index, Object* value) { | 3471 void JSRegExp::SetDataAt(int index, Object* value) { |
| 3365 ASSERT(TypeTag() != NOT_COMPILED); | 3472 ASSERT(TypeTag() != NOT_COMPILED); |
| 3366 ASSERT(index >= kDataIndex); // Only implementation data can be set this way. | 3473 ASSERT(index >= kDataIndex); // Only implementation data can be set this way. |
| 3367 FixedArray::cast(data())->set(index, value); | 3474 FixedArray::cast(data())->set(index, value); |
| 3368 } | 3475 } |
| 3369 | 3476 |
| 3370 | 3477 |
| 3371 JSObject::ElementsKind JSObject::GetElementsKind() { | 3478 JSObject::ElementsKind JSObject::GetElementsKind() { |
| 3372 if (map()->has_fast_elements()) { | 3479 if (map()->has_fast_elements()) { |
| 3373 ASSERT(elements()->map() == Heap::fixed_array_map() || | 3480 ASSERT(elements()->map() == GetHeap()->fixed_array_map() || |
| 3374 elements()->map() == Heap::fixed_cow_array_map()); | 3481 elements()->map() == GetHeap()->fixed_cow_array_map()); |
| 3375 return FAST_ELEMENTS; | 3482 return FAST_ELEMENTS; |
| 3376 } | 3483 } |
| 3377 HeapObject* array = elements(); | 3484 HeapObject* array = elements(); |
| 3378 if (array->IsFixedArray()) { | 3485 if (array->IsFixedArray()) { |
| 3379 // FAST_ELEMENTS or DICTIONARY_ELEMENTS are both stored in a | 3486 // FAST_ELEMENTS or DICTIONARY_ELEMENTS are both stored in a |
| 3380 // FixedArray, but FAST_ELEMENTS is already handled above. | 3487 // FixedArray, but FAST_ELEMENTS is already handled above. |
| 3381 ASSERT(array->IsDictionary()); | 3488 ASSERT(array->IsDictionary()); |
| 3382 return DICTIONARY_ELEMENTS; | 3489 return DICTIONARY_ELEMENTS; |
| 3383 } | 3490 } |
| 3491 ASSERT(!map()->has_fast_elements()); |
| 3384 if (array->IsExternalArray()) { | 3492 if (array->IsExternalArray()) { |
| 3385 switch (array->map()->instance_type()) { | 3493 switch (array->map()->instance_type()) { |
| 3386 case EXTERNAL_BYTE_ARRAY_TYPE: | 3494 case EXTERNAL_BYTE_ARRAY_TYPE: |
| 3387 return EXTERNAL_BYTE_ELEMENTS; | 3495 return EXTERNAL_BYTE_ELEMENTS; |
| 3388 case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE: | 3496 case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE: |
| 3389 return EXTERNAL_UNSIGNED_BYTE_ELEMENTS; | 3497 return EXTERNAL_UNSIGNED_BYTE_ELEMENTS; |
| 3390 case EXTERNAL_SHORT_ARRAY_TYPE: | 3498 case EXTERNAL_SHORT_ARRAY_TYPE: |
| 3391 return EXTERNAL_SHORT_ELEMENTS; | 3499 return EXTERNAL_SHORT_ELEMENTS; |
| 3392 case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE: | 3500 case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE: |
| 3393 return EXTERNAL_UNSIGNED_SHORT_ELEMENTS; | 3501 return EXTERNAL_UNSIGNED_SHORT_ELEMENTS; |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3459 bool JSObject::AllowsSetElementsLength() { | 3567 bool JSObject::AllowsSetElementsLength() { |
| 3460 bool result = elements()->IsFixedArray(); | 3568 bool result = elements()->IsFixedArray(); |
| 3461 ASSERT(result == !HasExternalArrayElements()); | 3569 ASSERT(result == !HasExternalArrayElements()); |
| 3462 return result; | 3570 return result; |
| 3463 } | 3571 } |
| 3464 | 3572 |
| 3465 | 3573 |
| 3466 MaybeObject* JSObject::EnsureWritableFastElements() { | 3574 MaybeObject* JSObject::EnsureWritableFastElements() { |
| 3467 ASSERT(HasFastElements()); | 3575 ASSERT(HasFastElements()); |
| 3468 FixedArray* elems = FixedArray::cast(elements()); | 3576 FixedArray* elems = FixedArray::cast(elements()); |
| 3469 if (elems->map() != Heap::fixed_cow_array_map()) return elems; | 3577 Isolate* isolate = GetIsolate(); |
| 3578 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems; |
| 3470 Object* writable_elems; | 3579 Object* writable_elems; |
| 3471 { MaybeObject* maybe_writable_elems = | 3580 { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap( |
| 3472 Heap::CopyFixedArrayWithMap(elems, Heap::fixed_array_map()); | 3581 elems, isolate->heap()->fixed_array_map()); |
| 3473 if (!maybe_writable_elems->ToObject(&writable_elems)) { | 3582 if (!maybe_writable_elems->ToObject(&writable_elems)) { |
| 3474 return maybe_writable_elems; | 3583 return maybe_writable_elems; |
| 3475 } | 3584 } |
| 3476 } | 3585 } |
| 3477 set_elements(FixedArray::cast(writable_elems)); | 3586 set_elements(FixedArray::cast(writable_elems)); |
| 3478 Counters::cow_arrays_converted.Increment(); | 3587 isolate->counters()->cow_arrays_converted()->Increment(); |
| 3479 return writable_elems; | 3588 return writable_elems; |
| 3480 } | 3589 } |
| 3481 | 3590 |
| 3482 | 3591 |
| 3483 StringDictionary* JSObject::property_dictionary() { | 3592 StringDictionary* JSObject::property_dictionary() { |
| 3484 ASSERT(!HasFastProperties()); | 3593 ASSERT(!HasFastProperties()); |
| 3485 return StringDictionary::cast(properties()); | 3594 return StringDictionary::cast(properties()); |
| 3486 } | 3595 } |
| 3487 | 3596 |
| 3488 | 3597 |
| (...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3608 | 3717 |
| 3609 PropertyAttributes JSObject::GetPropertyAttribute(String* key) { | 3718 PropertyAttributes JSObject::GetPropertyAttribute(String* key) { |
| 3610 return GetPropertyAttributeWithReceiver(this, key); | 3719 return GetPropertyAttributeWithReceiver(this, key); |
| 3611 } | 3720 } |
| 3612 | 3721 |
| 3613 // TODO(504): this may be useful in other places too where JSGlobalProxy | 3722 // TODO(504): this may be useful in other places too where JSGlobalProxy |
| 3614 // is used. | 3723 // is used. |
| 3615 Object* JSObject::BypassGlobalProxy() { | 3724 Object* JSObject::BypassGlobalProxy() { |
| 3616 if (IsJSGlobalProxy()) { | 3725 if (IsJSGlobalProxy()) { |
| 3617 Object* proto = GetPrototype(); | 3726 Object* proto = GetPrototype(); |
| 3618 if (proto->IsNull()) return Heap::undefined_value(); | 3727 if (proto->IsNull()) return GetHeap()->undefined_value(); |
| 3619 ASSERT(proto->IsJSGlobalObject()); | 3728 ASSERT(proto->IsJSGlobalObject()); |
| 3620 return proto; | 3729 return proto; |
| 3621 } | 3730 } |
| 3622 return this; | 3731 return this; |
| 3623 } | 3732 } |
| 3624 | 3733 |
| 3625 | 3734 |
| 3626 bool JSObject::HasHiddenPropertiesObject() { | 3735 bool JSObject::HasHiddenPropertiesObject() { |
| 3627 ASSERT(!IsJSGlobalProxy()); | 3736 ASSERT(!IsJSGlobalProxy()); |
| 3628 return GetPropertyAttributePostInterceptor(this, | 3737 return GetPropertyAttributePostInterceptor(this, |
| 3629 Heap::hidden_symbol(), | 3738 GetHeap()->hidden_symbol(), |
| 3630 false) != ABSENT; | 3739 false) != ABSENT; |
| 3631 } | 3740 } |
| 3632 | 3741 |
| 3633 | 3742 |
| 3634 Object* JSObject::GetHiddenPropertiesObject() { | 3743 Object* JSObject::GetHiddenPropertiesObject() { |
| 3635 ASSERT(!IsJSGlobalProxy()); | 3744 ASSERT(!IsJSGlobalProxy()); |
| 3636 PropertyAttributes attributes; | 3745 PropertyAttributes attributes; |
| 3637 // You can't install a getter on a property indexed by the hidden symbol, | 3746 // You can't install a getter on a property indexed by the hidden symbol, |
| 3638 // so we can be sure that GetLocalPropertyPostInterceptor returns a real | 3747 // so we can be sure that GetLocalPropertyPostInterceptor returns a real |
| 3639 // object. | 3748 // object. |
| 3640 Object* result = | 3749 Object* result = |
| 3641 GetLocalPropertyPostInterceptor(this, | 3750 GetLocalPropertyPostInterceptor(this, |
| 3642 Heap::hidden_symbol(), | 3751 GetHeap()->hidden_symbol(), |
| 3643 &attributes)->ToObjectUnchecked(); | 3752 &attributes)->ToObjectUnchecked(); |
| 3644 return result; | 3753 return result; |
| 3645 } | 3754 } |
| 3646 | 3755 |
| 3647 | 3756 |
| 3648 MaybeObject* JSObject::SetHiddenPropertiesObject(Object* hidden_obj) { | 3757 MaybeObject* JSObject::SetHiddenPropertiesObject(Object* hidden_obj) { |
| 3649 ASSERT(!IsJSGlobalProxy()); | 3758 ASSERT(!IsJSGlobalProxy()); |
| 3650 return SetPropertyPostInterceptor(Heap::hidden_symbol(), | 3759 return SetPropertyPostInterceptor(GetHeap()->hidden_symbol(), |
| 3651 hidden_obj, | 3760 hidden_obj, |
| 3652 DONT_ENUM, | 3761 DONT_ENUM, |
| 3653 kNonStrictMode); | 3762 kNonStrictMode); |
| 3654 } | 3763 } |
| 3655 | 3764 |
| 3656 | 3765 |
| 3657 bool JSObject::HasElement(uint32_t index) { | 3766 bool JSObject::HasElement(uint32_t index) { |
| 3658 return HasElementWithReceiver(this, index); | 3767 return HasElementWithReceiver(this, index); |
| 3659 } | 3768 } |
| 3660 | 3769 |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3708 ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0); | 3817 ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0); |
| 3709 int index = HashTable<Shape, Key>::EntryToIndex(entry); | 3818 int index = HashTable<Shape, Key>::EntryToIndex(entry); |
| 3710 AssertNoAllocation no_gc; | 3819 AssertNoAllocation no_gc; |
| 3711 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc); | 3820 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc); |
| 3712 FixedArray::set(index, key, mode); | 3821 FixedArray::set(index, key, mode); |
| 3713 FixedArray::set(index+1, value, mode); | 3822 FixedArray::set(index+1, value, mode); |
| 3714 FixedArray::fast_set(this, index+2, details.AsSmi()); | 3823 FixedArray::fast_set(this, index+2, details.AsSmi()); |
| 3715 } | 3824 } |
| 3716 | 3825 |
| 3717 | 3826 |
| 3718 void Map::ClearCodeCache() { | 3827 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) { |
| 3828 ASSERT(other->IsNumber()); |
| 3829 return key == static_cast<uint32_t>(other->Number()); |
| 3830 } |
| 3831 |
| 3832 |
| 3833 uint32_t NumberDictionaryShape::Hash(uint32_t key) { |
| 3834 return ComputeIntegerHash(key); |
| 3835 } |
| 3836 |
| 3837 |
| 3838 uint32_t NumberDictionaryShape::HashForObject(uint32_t key, Object* other) { |
| 3839 ASSERT(other->IsNumber()); |
| 3840 return ComputeIntegerHash(static_cast<uint32_t>(other->Number())); |
| 3841 } |
| 3842 |
| 3843 |
| 3844 MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) { |
| 3845 return Isolate::Current()->heap()->NumberFromUint32(key); |
| 3846 } |
| 3847 |
| 3848 |
| 3849 bool StringDictionaryShape::IsMatch(String* key, Object* other) { |
| 3850 // We know that all entries in a hash table had their hash keys created. |
| 3851 // Use that knowledge to have fast failure. |
| 3852 if (key->Hash() != String::cast(other)->Hash()) return false; |
| 3853 return key->Equals(String::cast(other)); |
| 3854 } |
| 3855 |
| 3856 |
| 3857 uint32_t StringDictionaryShape::Hash(String* key) { |
| 3858 return key->Hash(); |
| 3859 } |
| 3860 |
| 3861 |
| 3862 uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) { |
| 3863 return String::cast(other)->Hash(); |
| 3864 } |
| 3865 |
| 3866 |
| 3867 MaybeObject* StringDictionaryShape::AsObject(String* key) { |
| 3868 return key; |
| 3869 } |
| 3870 |
| 3871 |
| 3872 void Map::ClearCodeCache(Heap* heap) { |
| 3719 // No write barrier is needed since empty_fixed_array is not in new space. | 3873 // No write barrier is needed since empty_fixed_array is not in new space. |
| 3720 // Please note this function is used during marking: | 3874 // Please note this function is used during marking: |
| 3721 // - MarkCompactCollector::MarkUnmarkedObject | 3875 // - MarkCompactCollector::MarkUnmarkedObject |
| 3722 ASSERT(!Heap::InNewSpace(Heap::raw_unchecked_empty_fixed_array())); | 3876 ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array())); |
| 3723 WRITE_FIELD(this, kCodeCacheOffset, Heap::raw_unchecked_empty_fixed_array()); | 3877 WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array()); |
| 3724 } | 3878 } |
| 3725 | 3879 |
| 3726 | 3880 |
| 3727 void JSArray::EnsureSize(int required_size) { | 3881 void JSArray::EnsureSize(int required_size) { |
| 3728 ASSERT(HasFastElements()); | 3882 ASSERT(HasFastElements()); |
| 3729 FixedArray* elts = FixedArray::cast(elements()); | 3883 FixedArray* elts = FixedArray::cast(elements()); |
| 3730 const int kArraySizeThatFitsComfortablyInNewSpace = 128; | 3884 const int kArraySizeThatFitsComfortablyInNewSpace = 128; |
| 3731 if (elts->length() < required_size) { | 3885 if (elts->length() < required_size) { |
| 3732 // Doubling in size would be overkill, but leave some slack to avoid | 3886 // Doubling in size would be overkill, but leave some slack to avoid |
| 3733 // constantly growing. | 3887 // constantly growing. |
| 3734 Expand(required_size + (required_size >> 3)); | 3888 Expand(required_size + (required_size >> 3)); |
| 3735 // It's a performance benefit to keep a frequently used array in new-space. | 3889 // It's a performance benefit to keep a frequently used array in new-space. |
| 3736 } else if (!Heap::new_space()->Contains(elts) && | 3890 } else if (!GetHeap()->new_space()->Contains(elts) && |
| 3737 required_size < kArraySizeThatFitsComfortablyInNewSpace) { | 3891 required_size < kArraySizeThatFitsComfortablyInNewSpace) { |
| 3738 // Expand will allocate a new backing store in new space even if the size | 3892 // Expand will allocate a new backing store in new space even if the size |
| 3739 // we asked for isn't larger than what we had before. | 3893 // we asked for isn't larger than what we had before. |
| 3740 Expand(required_size); | 3894 Expand(required_size); |
| 3741 } | 3895 } |
| 3742 } | 3896 } |
| 3743 | 3897 |
| 3744 | 3898 |
| 3745 void JSArray::set_length(Smi* length) { | 3899 void JSArray::set_length(Smi* length) { |
| 3746 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER); | 3900 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER); |
| 3747 } | 3901 } |
| 3748 | 3902 |
| 3749 | 3903 |
| 3750 void JSArray::SetContent(FixedArray* storage) { | 3904 void JSArray::SetContent(FixedArray* storage) { |
| 3751 set_length(Smi::FromInt(storage->length())); | 3905 set_length(Smi::FromInt(storage->length())); |
| 3752 set_elements(storage); | 3906 set_elements(storage); |
| 3753 } | 3907 } |
| 3754 | 3908 |
| 3755 | 3909 |
| 3756 MaybeObject* FixedArray::Copy() { | 3910 MaybeObject* FixedArray::Copy() { |
| 3757 if (length() == 0) return this; | 3911 if (length() == 0) return this; |
| 3758 return Heap::CopyFixedArray(this); | 3912 return GetHeap()->CopyFixedArray(this); |
| 3913 } |
| 3914 |
| 3915 |
| 3916 Relocatable::Relocatable(Isolate* isolate) { |
| 3917 ASSERT(isolate == Isolate::Current()); |
| 3918 isolate_ = isolate; |
| 3919 prev_ = isolate->relocatable_top(); |
| 3920 isolate->set_relocatable_top(this); |
| 3921 } |
| 3922 |
| 3923 |
| 3924 Relocatable::~Relocatable() { |
| 3925 ASSERT(isolate_ == Isolate::Current()); |
| 3926 ASSERT_EQ(isolate_->relocatable_top(), this); |
| 3927 isolate_->set_relocatable_top(prev_); |
| 3759 } | 3928 } |
| 3760 | 3929 |
| 3761 | 3930 |
| 3762 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) { | 3931 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) { |
| 3763 return map->instance_size(); | 3932 return map->instance_size(); |
| 3764 } | 3933 } |
| 3765 | 3934 |
| 3766 | 3935 |
| 3767 void Proxy::ProxyIterateBody(ObjectVisitor* v) { | 3936 void Proxy::ProxyIterateBody(ObjectVisitor* v) { |
| 3768 v->VisitExternalReference( | 3937 v->VisitExternalReference( |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3843 #undef WRITE_INT_FIELD | 4012 #undef WRITE_INT_FIELD |
| 3844 #undef READ_SHORT_FIELD | 4013 #undef READ_SHORT_FIELD |
| 3845 #undef WRITE_SHORT_FIELD | 4014 #undef WRITE_SHORT_FIELD |
| 3846 #undef READ_BYTE_FIELD | 4015 #undef READ_BYTE_FIELD |
| 3847 #undef WRITE_BYTE_FIELD | 4016 #undef WRITE_BYTE_FIELD |
| 3848 | 4017 |
| 3849 | 4018 |
| 3850 } } // namespace v8::internal | 4019 } } // namespace v8::internal |
| 3851 | 4020 |
| 3852 #endif // V8_OBJECTS_INL_H_ | 4021 #endif // V8_OBJECTS_INL_H_ |
| OLD | NEW |