Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: src/objects-inl.h

Issue 6685088: Merge isolates to bleeding_edge. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/objects-debug.cc ('k') | src/parser.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 22 matching lines...) Expand all
33 // 33 //
34 34
35 #ifndef V8_OBJECTS_INL_H_ 35 #ifndef V8_OBJECTS_INL_H_
36 #define V8_OBJECTS_INL_H_ 36 #define V8_OBJECTS_INL_H_
37 37
38 #include "objects.h" 38 #include "objects.h"
39 #include "contexts.h" 39 #include "contexts.h"
40 #include "conversions-inl.h" 40 #include "conversions-inl.h"
41 #include "heap.h" 41 #include "heap.h"
42 #include "memory.h" 42 #include "memory.h"
43 #include "isolate.h"
43 #include "property.h" 44 #include "property.h"
44 #include "spaces.h" 45 #include "spaces.h"
45 46
46 namespace v8 { 47 namespace v8 {
47 namespace internal { 48 namespace internal {
48 49
49 PropertyDetails::PropertyDetails(Smi* smi) { 50 PropertyDetails::PropertyDetails(Smi* smi) {
50 value_ = smi->value(); 51 value_ = smi->value();
51 } 52 }
52 53
(...skipping 18 matching lines...) Expand all
71 72
72 #define INT_ACCESSORS(holder, name, offset) \ 73 #define INT_ACCESSORS(holder, name, offset) \
73 int holder::name() { return READ_INT_FIELD(this, offset); } \ 74 int holder::name() { return READ_INT_FIELD(this, offset); } \
74 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); } 75 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
75 76
76 77
77 #define ACCESSORS(holder, name, type, offset) \ 78 #define ACCESSORS(holder, name, type, offset) \
78 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \ 79 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
79 void holder::set_##name(type* value, WriteBarrierMode mode) { \ 80 void holder::set_##name(type* value, WriteBarrierMode mode) { \
80 WRITE_FIELD(this, offset, value); \ 81 WRITE_FIELD(this, offset, value); \
81 CONDITIONAL_WRITE_BARRIER(this, offset, mode); \ 82 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode); \
83 }
84
85
86 // GC-safe accessors do not use HeapObject::GetHeap(), but access TLS instead.
87 #define ACCESSORS_GCSAFE(holder, name, type, offset) \
88 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
89 void holder::set_##name(type* value, WriteBarrierMode mode) { \
90 WRITE_FIELD(this, offset, value); \
91 CONDITIONAL_WRITE_BARRIER(HEAP, this, offset, mode); \
82 } 92 }
83 93
84 94
85 #define SMI_ACCESSORS(holder, name, offset) \ 95 #define SMI_ACCESSORS(holder, name, offset) \
86 int holder::name() { \ 96 int holder::name() { \
87 Object* value = READ_FIELD(this, offset); \ 97 Object* value = READ_FIELD(this, offset); \
88 return Smi::cast(value)->value(); \ 98 return Smi::cast(value)->value(); \
89 } \ 99 } \
90 void holder::set_##name(int value) { \ 100 void holder::set_##name(int value) { \
91 WRITE_FIELD(this, offset, Smi::FromInt(value)); \ 101 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
(...skipping 320 matching lines...) Expand 10 before | Expand all | Expand 10 after
412 && Failure::cast(this)->IsOutOfMemoryException(); 422 && Failure::cast(this)->IsOutOfMemoryException();
413 } 423 }
414 424
415 425
416 bool MaybeObject::IsException() { 426 bool MaybeObject::IsException() {
417 return this == Failure::Exception(); 427 return this == Failure::Exception();
418 } 428 }
419 429
420 430
421 bool MaybeObject::IsTheHole() { 431 bool MaybeObject::IsTheHole() {
422 return this == Heap::the_hole_value(); 432 return !IsFailure() && ToObjectUnchecked()->IsTheHole();
423 } 433 }
424 434
425 435
426 Failure* Failure::cast(MaybeObject* obj) { 436 Failure* Failure::cast(MaybeObject* obj) {
427 ASSERT(HAS_FAILURE_TAG(obj)); 437 ASSERT(HAS_FAILURE_TAG(obj));
428 return reinterpret_cast<Failure*>(obj); 438 return reinterpret_cast<Failure*>(obj);
429 } 439 }
430 440
431 441
432 bool Object::IsJSObject() { 442 bool Object::IsJSObject() {
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
480 if (!IsFixedArray()) return false; 490 if (!IsFixedArray()) return false;
481 // There's actually no way to see the difference between a fixed array and 491 // There's actually no way to see the difference between a fixed array and
482 // a deoptimization data array. Since this is used for asserts we can check 492 // a deoptimization data array. Since this is used for asserts we can check
483 // that the length is plausible though. 493 // that the length is plausible though.
484 if (FixedArray::cast(this)->length() % 2 != 0) return false; 494 if (FixedArray::cast(this)->length() % 2 != 0) return false;
485 return true; 495 return true;
486 } 496 }
487 497
488 498
489 bool Object::IsContext() { 499 bool Object::IsContext() {
490 return Object::IsHeapObject() 500 if (Object::IsHeapObject()) {
491 && (HeapObject::cast(this)->map() == Heap::context_map() || 501 Heap* heap = HeapObject::cast(this)->GetHeap();
492 HeapObject::cast(this)->map() == Heap::catch_context_map() || 502 return (HeapObject::cast(this)->map() == heap->context_map() ||
493 HeapObject::cast(this)->map() == Heap::global_context_map()); 503 HeapObject::cast(this)->map() == heap->catch_context_map() ||
504 HeapObject::cast(this)->map() == heap->global_context_map());
505 }
506 return false;
494 } 507 }
495 508
496 509
497 bool Object::IsCatchContext() { 510 bool Object::IsCatchContext() {
498 return Object::IsHeapObject() 511 return Object::IsHeapObject() &&
499 && HeapObject::cast(this)->map() == Heap::catch_context_map(); 512 HeapObject::cast(this)->map() ==
513 HeapObject::cast(this)->GetHeap()->catch_context_map();
500 } 514 }
501 515
502 516
503 bool Object::IsGlobalContext() { 517 bool Object::IsGlobalContext() {
504 return Object::IsHeapObject() 518 return Object::IsHeapObject() &&
505 && HeapObject::cast(this)->map() == Heap::global_context_map(); 519 HeapObject::cast(this)->map() ==
520 HeapObject::cast(this)->GetHeap()->global_context_map();
506 } 521 }
507 522
508 523
509 bool Object::IsJSFunction() { 524 bool Object::IsJSFunction() {
510 return Object::IsHeapObject() 525 return Object::IsHeapObject()
511 && HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_TYPE; 526 && HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_TYPE;
512 } 527 }
513 528
514 529
515 template <> inline bool Is<JSFunction>(Object* obj) { 530 template <> inline bool Is<JSFunction>(Object* obj) {
516 return obj->IsJSFunction(); 531 return obj->IsJSFunction();
517 } 532 }
518 533
519 534
520 bool Object::IsCode() { 535 bool Object::IsCode() {
521 return Object::IsHeapObject() 536 return Object::IsHeapObject()
522 && HeapObject::cast(this)->map()->instance_type() == CODE_TYPE; 537 && HeapObject::cast(this)->map()->instance_type() == CODE_TYPE;
523 } 538 }
524 539
525 540
526 bool Object::IsOddball() { 541 bool Object::IsOddball() {
542 ASSERT(HEAP->is_safe_to_read_maps());
527 return Object::IsHeapObject() 543 return Object::IsHeapObject()
528 && HeapObject::cast(this)->map()->instance_type() == ODDBALL_TYPE; 544 && HeapObject::cast(this)->map()->instance_type() == ODDBALL_TYPE;
529 } 545 }
530 546
531 547
532 bool Object::IsJSGlobalPropertyCell() { 548 bool Object::IsJSGlobalPropertyCell() {
533 return Object::IsHeapObject() 549 return Object::IsHeapObject()
534 && HeapObject::cast(this)->map()->instance_type() 550 && HeapObject::cast(this)->map()->instance_type()
535 == JS_GLOBAL_PROPERTY_CELL_TYPE; 551 == JS_GLOBAL_PROPERTY_CELL_TYPE;
536 } 552 }
(...skipping 24 matching lines...) Expand all
561 } 577 }
562 578
563 579
564 bool Object::IsProxy() { 580 bool Object::IsProxy() {
565 return Object::IsHeapObject() 581 return Object::IsHeapObject()
566 && HeapObject::cast(this)->map()->instance_type() == PROXY_TYPE; 582 && HeapObject::cast(this)->map()->instance_type() == PROXY_TYPE;
567 } 583 }
568 584
569 585
570 bool Object::IsBoolean() { 586 bool Object::IsBoolean() {
571 return IsTrue() || IsFalse(); 587 return IsOddball() &&
588 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
572 } 589 }
573 590
574 591
575 bool Object::IsJSArray() { 592 bool Object::IsJSArray() {
576 return Object::IsHeapObject() 593 return Object::IsHeapObject()
577 && HeapObject::cast(this)->map()->instance_type() == JS_ARRAY_TYPE; 594 && HeapObject::cast(this)->map()->instance_type() == JS_ARRAY_TYPE;
578 } 595 }
579 596
580 597
581 bool Object::IsJSRegExp() { 598 bool Object::IsJSRegExp() {
582 return Object::IsHeapObject() 599 return Object::IsHeapObject()
583 && HeapObject::cast(this)->map()->instance_type() == JS_REGEXP_TYPE; 600 && HeapObject::cast(this)->map()->instance_type() == JS_REGEXP_TYPE;
584 } 601 }
585 602
586 603
587 template <> inline bool Is<JSArray>(Object* obj) { 604 template <> inline bool Is<JSArray>(Object* obj) {
588 return obj->IsJSArray(); 605 return obj->IsJSArray();
589 } 606 }
590 607
591 608
592 bool Object::IsHashTable() { 609 bool Object::IsHashTable() {
593 return Object::IsHeapObject() 610 return Object::IsHeapObject() &&
594 && HeapObject::cast(this)->map() == Heap::hash_table_map(); 611 HeapObject::cast(this)->map() ==
612 HeapObject::cast(this)->GetHeap()->hash_table_map();
595 } 613 }
596 614
597 615
598 bool Object::IsDictionary() { 616 bool Object::IsDictionary() {
599 return IsHashTable() && this != Heap::symbol_table(); 617 return IsHashTable() && this !=
618 HeapObject::cast(this)->GetHeap()->symbol_table();
600 } 619 }
601 620
602 621
603 bool Object::IsSymbolTable() { 622 bool Object::IsSymbolTable() {
604 return IsHashTable() && this == Heap::raw_unchecked_symbol_table(); 623 return IsHashTable() && this ==
624 HeapObject::cast(this)->GetHeap()->raw_unchecked_symbol_table();
605 } 625 }
606 626
607 627
608 bool Object::IsJSFunctionResultCache() { 628 bool Object::IsJSFunctionResultCache() {
609 if (!IsFixedArray()) return false; 629 if (!IsFixedArray()) return false;
610 FixedArray* self = FixedArray::cast(this); 630 FixedArray* self = FixedArray::cast(this);
611 int length = self->length(); 631 int length = self->length();
612 if (length < JSFunctionResultCache::kEntriesIndex) return false; 632 if (length < JSFunctionResultCache::kEntriesIndex) return false;
613 if ((length - JSFunctionResultCache::kEntriesIndex) 633 if ((length - JSFunctionResultCache::kEntriesIndex)
614 % JSFunctionResultCache::kEntrySize != 0) { 634 % JSFunctionResultCache::kEntrySize != 0) {
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
711 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \ 731 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
712 bool Object::Is##Name() { \ 732 bool Object::Is##Name() { \
713 return Object::IsHeapObject() \ 733 return Object::IsHeapObject() \
714 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \ 734 && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
715 } 735 }
716 STRUCT_LIST(MAKE_STRUCT_PREDICATE) 736 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
717 #undef MAKE_STRUCT_PREDICATE 737 #undef MAKE_STRUCT_PREDICATE
718 738
719 739
720 bool Object::IsUndefined() { 740 bool Object::IsUndefined() {
721 return this == Heap::undefined_value(); 741 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
722 } 742 }
723 743
724 744
725 bool Object::IsNull() { 745 bool Object::IsNull() {
726 return this == Heap::null_value(); 746 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
747 }
748
749
750 bool Object::IsTheHole() {
751 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
727 } 752 }
728 753
729 754
730 bool Object::IsTrue() { 755 bool Object::IsTrue() {
731 return this == Heap::true_value(); 756 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
732 } 757 }
733 758
734 759
735 bool Object::IsFalse() { 760 bool Object::IsFalse() {
736 return this == Heap::false_value(); 761 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
737 } 762 }
738 763
739 764
740 bool Object::IsArgumentsMarker() { 765 bool Object::IsArgumentsMarker() {
741 return this == Heap::arguments_marker(); 766 return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
742 } 767 }
743 768
744 769
745 double Object::Number() { 770 double Object::Number() {
746 ASSERT(IsNumber()); 771 ASSERT(IsNumber());
747 return IsSmi() 772 return IsSmi()
748 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value()) 773 ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
749 : reinterpret_cast<HeapNumber*>(this)->value(); 774 : reinterpret_cast<HeapNumber*>(this)->value();
750 } 775 }
751 776
752 777
753
754 MaybeObject* Object::ToSmi() { 778 MaybeObject* Object::ToSmi() {
755 if (IsSmi()) return this; 779 if (IsSmi()) return this;
756 if (IsHeapNumber()) { 780 if (IsHeapNumber()) {
757 double value = HeapNumber::cast(this)->value(); 781 double value = HeapNumber::cast(this)->value();
758 int int_value = FastD2I(value); 782 int int_value = FastD2I(value);
759 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) { 783 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
760 return Smi::FromInt(int_value); 784 return Smi::FromInt(int_value);
761 } 785 }
762 } 786 }
763 return Failure::Exception(); 787 return Failure::Exception();
764 } 788 }
765 789
766 790
767 bool Object::HasSpecificClassOf(String* name) { 791 bool Object::HasSpecificClassOf(String* name) {
768 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name); 792 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
769 } 793 }
770 794
771 795
772 MaybeObject* Object::GetElement(uint32_t index) { 796 MaybeObject* Object::GetElement(uint32_t index) {
773 // GetElement can trigger a getter which can cause allocation. 797 // GetElement can trigger a getter which can cause allocation.
774 // This was not always the case. This ASSERT is here to catch 798 // This was not always the case. This ASSERT is here to catch
775 // leftover incorrect uses. 799 // leftover incorrect uses.
776 ASSERT(Heap::IsAllocationAllowed()); 800 ASSERT(HEAP->IsAllocationAllowed());
777 return GetElementWithReceiver(this, index); 801 return GetElementWithReceiver(this, index);
778 } 802 }
779 803
780 804
781 Object* Object::GetElementNoExceptionThrown(uint32_t index) { 805 Object* Object::GetElementNoExceptionThrown(uint32_t index) {
782 MaybeObject* maybe = GetElementWithReceiver(this, index); 806 MaybeObject* maybe = GetElementWithReceiver(this, index);
783 ASSERT(!maybe->IsFailure()); 807 ASSERT(!maybe->IsFailure());
784 Object* result = NULL; // Initialization to please compiler. 808 Object* result = NULL; // Initialization to please compiler.
785 maybe->ToObject(&result); 809 maybe->ToObject(&result);
786 return result; 810 return result;
(...skipping 13 matching lines...) Expand all
800 824
801 #define FIELD_ADDR(p, offset) \ 825 #define FIELD_ADDR(p, offset) \
802 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag) 826 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
803 827
804 #define READ_FIELD(p, offset) \ 828 #define READ_FIELD(p, offset) \
805 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset))) 829 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
806 830
807 #define WRITE_FIELD(p, offset, value) \ 831 #define WRITE_FIELD(p, offset, value) \
808 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value) 832 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
809 833
810 834 // TODO(isolates): Pass heap in to these macros.
811 #define WRITE_BARRIER(object, offset) \ 835 #define WRITE_BARRIER(object, offset) \
812 Heap::RecordWrite(object->address(), offset); 836 object->GetHeap()->RecordWrite(object->address(), offset);
813 837
814 // CONDITIONAL_WRITE_BARRIER must be issued after the actual 838 // CONDITIONAL_WRITE_BARRIER must be issued after the actual
815 // write due to the assert validating the written value. 839 // write due to the assert validating the written value.
816 #define CONDITIONAL_WRITE_BARRIER(object, offset, mode) \ 840 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, mode) \
817 if (mode == UPDATE_WRITE_BARRIER) { \ 841 if (mode == UPDATE_WRITE_BARRIER) { \
818 Heap::RecordWrite(object->address(), offset); \ 842 heap->RecordWrite(object->address(), offset); \
819 } else { \ 843 } else { \
820 ASSERT(mode == SKIP_WRITE_BARRIER); \ 844 ASSERT(mode == SKIP_WRITE_BARRIER); \
821 ASSERT(Heap::InNewSpace(object) || \ 845 ASSERT(heap->InNewSpace(object) || \
822 !Heap::InNewSpace(READ_FIELD(object, offset)) || \ 846 !heap->InNewSpace(READ_FIELD(object, offset)) || \
823 Page::FromAddress(object->address())-> \ 847 Page::FromAddress(object->address())-> \
824 IsRegionDirty(object->address() + offset)); \ 848 IsRegionDirty(object->address() + offset)); \
825 } 849 }
826 850
827 #define READ_DOUBLE_FIELD(p, offset) \ 851 #define READ_DOUBLE_FIELD(p, offset) \
828 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset))) 852 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
829 853
830 #define WRITE_DOUBLE_FIELD(p, offset, value) \ 854 #define WRITE_DOUBLE_FIELD(p, offset, value) \
831 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value) 855 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
832 856
(...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after
1092 void HeapObject::VerifyObjectField(int offset) { 1116 void HeapObject::VerifyObjectField(int offset) {
1093 VerifyPointer(READ_FIELD(this, offset)); 1117 VerifyPointer(READ_FIELD(this, offset));
1094 } 1118 }
1095 1119
1096 void HeapObject::VerifySmiField(int offset) { 1120 void HeapObject::VerifySmiField(int offset) {
1097 ASSERT(READ_FIELD(this, offset)->IsSmi()); 1121 ASSERT(READ_FIELD(this, offset)->IsSmi());
1098 } 1122 }
1099 #endif 1123 #endif
1100 1124
1101 1125
1126 Heap* HeapObject::GetHeap() {
1127 // During GC, the map pointer in HeapObject is used in various ways that
1128 // prevent us from retrieving Heap from the map.
1129 // Assert that we are not in GC, implement GC code in a way that it doesn't
1130 // pull heap from the map.
1131 ASSERT(HEAP->is_safe_to_read_maps());
1132 return map()->heap();
1133 }
1134
1135
1136 Isolate* HeapObject::GetIsolate() {
1137 Isolate* i = GetHeap()->isolate();
1138 ASSERT(i == Isolate::Current());
1139 return i;
1140 }
1141
1142
1102 Map* HeapObject::map() { 1143 Map* HeapObject::map() {
1103 return map_word().ToMap(); 1144 return map_word().ToMap();
1104 } 1145 }
1105 1146
1106 1147
1107 void HeapObject::set_map(Map* value) { 1148 void HeapObject::set_map(Map* value) {
1108 set_map_word(MapWord::FromMap(value)); 1149 set_map_word(MapWord::FromMap(value));
1109 } 1150 }
1110 1151
1111 1152
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
1216 HeapObject* JSObject::elements() { 1257 HeapObject* JSObject::elements() {
1217 Object* array = READ_FIELD(this, kElementsOffset); 1258 Object* array = READ_FIELD(this, kElementsOffset);
1218 // In the assert below Dictionary is covered under FixedArray. 1259 // In the assert below Dictionary is covered under FixedArray.
1219 ASSERT(array->IsFixedArray() || array->IsExternalArray()); 1260 ASSERT(array->IsFixedArray() || array->IsExternalArray());
1220 return reinterpret_cast<HeapObject*>(array); 1261 return reinterpret_cast<HeapObject*>(array);
1221 } 1262 }
1222 1263
1223 1264
1224 void JSObject::set_elements(HeapObject* value, WriteBarrierMode mode) { 1265 void JSObject::set_elements(HeapObject* value, WriteBarrierMode mode) {
1225 ASSERT(map()->has_fast_elements() == 1266 ASSERT(map()->has_fast_elements() ==
1226 (value->map() == Heap::fixed_array_map() || 1267 (value->map() == GetHeap()->fixed_array_map() ||
1227 value->map() == Heap::fixed_cow_array_map())); 1268 value->map() == GetHeap()->fixed_cow_array_map()));
1228 // In the assert below Dictionary is covered under FixedArray. 1269 // In the assert below Dictionary is covered under FixedArray.
1229 ASSERT(value->IsFixedArray() || value->IsExternalArray()); 1270 ASSERT(value->IsFixedArray() || value->IsExternalArray());
1230 WRITE_FIELD(this, kElementsOffset, value); 1271 WRITE_FIELD(this, kElementsOffset, value);
1231 CONDITIONAL_WRITE_BARRIER(this, kElementsOffset, mode); 1272 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, mode);
1232 } 1273 }
1233 1274
1234 1275
1235 void JSObject::initialize_properties() { 1276 void JSObject::initialize_properties() {
1236 ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array())); 1277 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1237 WRITE_FIELD(this, kPropertiesOffset, Heap::empty_fixed_array()); 1278 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1238 } 1279 }
1239 1280
1240 1281
1241 void JSObject::initialize_elements() { 1282 void JSObject::initialize_elements() {
1242 ASSERT(map()->has_fast_elements()); 1283 ASSERT(map()->has_fast_elements());
1243 ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array())); 1284 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1244 WRITE_FIELD(this, kElementsOffset, Heap::empty_fixed_array()); 1285 WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1245 } 1286 }
1246 1287
1247 1288
1248 MaybeObject* JSObject::ResetElements() { 1289 MaybeObject* JSObject::ResetElements() {
1249 Object* obj; 1290 Object* obj;
1250 { MaybeObject* maybe_obj = map()->GetFastElementsMap(); 1291 { MaybeObject* maybe_obj = map()->GetFastElementsMap();
1251 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1292 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1252 } 1293 }
1253 set_map(Map::cast(obj)); 1294 set_map(Map::cast(obj));
1254 initialize_elements(); 1295 initialize_elements();
1255 return this; 1296 return this;
1256 } 1297 }
1257 1298
1258 1299
1259 ACCESSORS(Oddball, to_string, String, kToStringOffset) 1300 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1260 ACCESSORS(Oddball, to_number, Object, kToNumberOffset) 1301 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1261 1302
1262 1303
1304 byte Oddball::kind() {
1305 return READ_BYTE_FIELD(this, kKindOffset);
1306 }
1307
1308
1309 void Oddball::set_kind(byte value) {
1310 WRITE_BYTE_FIELD(this, kKindOffset, value);
1311 }
1312
1313
1263 Object* JSGlobalPropertyCell::value() { 1314 Object* JSGlobalPropertyCell::value() {
1264 return READ_FIELD(this, kValueOffset); 1315 return READ_FIELD(this, kValueOffset);
1265 } 1316 }
1266 1317
1267 1318
1268 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) { 1319 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1269 // The write barrier is not used for global property cells. 1320 // The write barrier is not used for global property cells.
1270 ASSERT(!val->IsJSGlobalPropertyCell()); 1321 ASSERT(!val->IsJSGlobalPropertyCell());
1271 WRITE_FIELD(this, kValueOffset, val); 1322 WRITE_FIELD(this, kValueOffset, val);
1272 } 1323 }
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after
1388 1439
1389 1440
1390 Object* JSObject::InObjectPropertyAtPut(int index, 1441 Object* JSObject::InObjectPropertyAtPut(int index,
1391 Object* value, 1442 Object* value,
1392 WriteBarrierMode mode) { 1443 WriteBarrierMode mode) {
1393 // Adjust for the number of properties stored in the object. 1444 // Adjust for the number of properties stored in the object.
1394 index -= map()->inobject_properties(); 1445 index -= map()->inobject_properties();
1395 ASSERT(index < 0); 1446 ASSERT(index < 0);
1396 int offset = map()->instance_size() + (index * kPointerSize); 1447 int offset = map()->instance_size() + (index * kPointerSize);
1397 WRITE_FIELD(this, offset, value); 1448 WRITE_FIELD(this, offset, value);
1398 CONDITIONAL_WRITE_BARRIER(this, offset, mode); 1449 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);
1399 return value; 1450 return value;
1400 } 1451 }
1401 1452
1402 1453
1403 1454
1404 void JSObject::InitializeBody(int object_size, Object* value) { 1455 void JSObject::InitializeBody(int object_size, Object* value) {
1405 ASSERT(!value->IsHeapObject() || !Heap::InNewSpace(value)); 1456 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
1406 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) { 1457 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1407 WRITE_FIELD(this, offset, value); 1458 WRITE_FIELD(this, offset, value);
1408 } 1459 }
1409 } 1460 }
1410 1461
1411 1462
1412 bool JSObject::HasFastProperties() { 1463 bool JSObject::HasFastProperties() {
1413 return !properties()->IsDictionary(); 1464 return !properties()->IsDictionary();
1414 } 1465 }
1415 1466
1416 1467
1417 int JSObject::MaxFastProperties() { 1468 int JSObject::MaxFastProperties() {
1418 // Allow extra fast properties if the object has more than 1469 // Allow extra fast properties if the object has more than
1419 // kMaxFastProperties in-object properties. When this is the case, 1470 // kMaxFastProperties in-object properties. When this is the case,
1420 // it is very unlikely that the object is being used as a dictionary 1471 // it is very unlikely that the object is being used as a dictionary
1421 // and there is a good chance that allowing more map transitions 1472 // and there is a good chance that allowing more map transitions
1422 // will be worth it. 1473 // will be worth it.
1423 return Max(map()->inobject_properties(), kMaxFastProperties); 1474 return Max(map()->inobject_properties(), kMaxFastProperties);
1424 } 1475 }
1425 1476
1426 1477
1427 void Struct::InitializeBody(int object_size) { 1478 void Struct::InitializeBody(int object_size) {
1428 Object* value = Heap::undefined_value(); 1479 Object* value = GetHeap()->undefined_value();
1429 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) { 1480 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1430 WRITE_FIELD(this, offset, value); 1481 WRITE_FIELD(this, offset, value);
1431 } 1482 }
1432 } 1483 }
1433 1484
1434 1485
1435 bool Object::ToArrayIndex(uint32_t* index) { 1486 bool Object::ToArrayIndex(uint32_t* index) {
1436 if (IsSmi()) { 1487 if (IsSmi()) {
1437 int value = Smi::cast(this)->value(); 1488 int value = Smi::cast(this)->value();
1438 if (value < 0) return false; 1489 if (value < 0) return false;
(...skipping 25 matching lines...) Expand all
1464 } 1515 }
1465 1516
1466 1517
1467 Object* FixedArray::get(int index) { 1518 Object* FixedArray::get(int index) {
1468 ASSERT(index >= 0 && index < this->length()); 1519 ASSERT(index >= 0 && index < this->length());
1469 return READ_FIELD(this, kHeaderSize + index * kPointerSize); 1520 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
1470 } 1521 }
1471 1522
1472 1523
1473 void FixedArray::set(int index, Smi* value) { 1524 void FixedArray::set(int index, Smi* value) {
1474 ASSERT(map() != Heap::fixed_cow_array_map()); 1525 ASSERT(map() != HEAP->fixed_cow_array_map());
1475 ASSERT(reinterpret_cast<Object*>(value)->IsSmi()); 1526 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1476 int offset = kHeaderSize + index * kPointerSize; 1527 int offset = kHeaderSize + index * kPointerSize;
1477 WRITE_FIELD(this, offset, value); 1528 WRITE_FIELD(this, offset, value);
1478 } 1529 }
1479 1530
1480 1531
1481 void FixedArray::set(int index, Object* value) { 1532 void FixedArray::set(int index, Object* value) {
1482 ASSERT(map() != Heap::fixed_cow_array_map()); 1533 ASSERT(map() != HEAP->fixed_cow_array_map());
1483 ASSERT(index >= 0 && index < this->length()); 1534 ASSERT(index >= 0 && index < this->length());
1484 int offset = kHeaderSize + index * kPointerSize; 1535 int offset = kHeaderSize + index * kPointerSize;
1485 WRITE_FIELD(this, offset, value); 1536 WRITE_FIELD(this, offset, value);
1486 WRITE_BARRIER(this, offset); 1537 WRITE_BARRIER(this, offset);
1487 } 1538 }
1488 1539
1489 1540
1490 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) { 1541 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
1491 if (Heap::InNewSpace(this)) return SKIP_WRITE_BARRIER; 1542 if (GetHeap()->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1492 return UPDATE_WRITE_BARRIER; 1543 return UPDATE_WRITE_BARRIER;
1493 } 1544 }
1494 1545
1495 1546
1496 void FixedArray::set(int index, 1547 void FixedArray::set(int index,
1497 Object* value, 1548 Object* value,
1498 WriteBarrierMode mode) { 1549 WriteBarrierMode mode) {
1499 ASSERT(map() != Heap::fixed_cow_array_map()); 1550 ASSERT(map() != HEAP->fixed_cow_array_map());
1500 ASSERT(index >= 0 && index < this->length()); 1551 ASSERT(index >= 0 && index < this->length());
1501 int offset = kHeaderSize + index * kPointerSize; 1552 int offset = kHeaderSize + index * kPointerSize;
1502 WRITE_FIELD(this, offset, value); 1553 WRITE_FIELD(this, offset, value);
1503 CONDITIONAL_WRITE_BARRIER(this, offset, mode); 1554 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode);
1504 } 1555 }
1505 1556
1506 1557
1507 void FixedArray::fast_set(FixedArray* array, int index, Object* value) { 1558 void FixedArray::fast_set(FixedArray* array, int index, Object* value) {
1508 ASSERT(array->map() != Heap::raw_unchecked_fixed_cow_array_map()); 1559 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1509 ASSERT(index >= 0 && index < array->length()); 1560 ASSERT(index >= 0 && index < array->length());
1510 ASSERT(!Heap::InNewSpace(value)); 1561 ASSERT(!HEAP->InNewSpace(value));
1511 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value); 1562 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1512 } 1563 }
1513 1564
1514 1565
1515 void FixedArray::set_undefined(int index) { 1566 void FixedArray::set_undefined(int index) {
1516 ASSERT(map() != Heap::fixed_cow_array_map()); 1567 ASSERT(map() != HEAP->fixed_cow_array_map());
1568 set_undefined(GetHeap(), index);
1569 }
1570
1571
1572 void FixedArray::set_undefined(Heap* heap, int index) {
1517 ASSERT(index >= 0 && index < this->length()); 1573 ASSERT(index >= 0 && index < this->length());
1518 ASSERT(!Heap::InNewSpace(Heap::undefined_value())); 1574 ASSERT(!heap->InNewSpace(heap->undefined_value()));
1519 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, 1575 WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1520 Heap::undefined_value()); 1576 heap->undefined_value());
1521 } 1577 }
1522 1578
1523 1579
1524 void FixedArray::set_null(int index) { 1580 void FixedArray::set_null(int index) {
1525 ASSERT(map() != Heap::fixed_cow_array_map()); 1581 set_null(GetHeap(), index);
1582 }
1583
1584
1585 void FixedArray::set_null(Heap* heap, int index) {
1526 ASSERT(index >= 0 && index < this->length()); 1586 ASSERT(index >= 0 && index < this->length());
1527 ASSERT(!Heap::InNewSpace(Heap::null_value())); 1587 ASSERT(!heap->InNewSpace(heap->null_value()));
1528 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::null_value()); 1588 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1529 } 1589 }
1530 1590
1531 1591
1532 void FixedArray::set_the_hole(int index) { 1592 void FixedArray::set_the_hole(int index) {
1533 ASSERT(map() != Heap::fixed_cow_array_map()); 1593 ASSERT(map() != HEAP->fixed_cow_array_map());
1534 ASSERT(index >= 0 && index < this->length()); 1594 ASSERT(index >= 0 && index < this->length());
1535 ASSERT(!Heap::InNewSpace(Heap::the_hole_value())); 1595 ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
1536 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::the_hole_value()); 1596 WRITE_FIELD(this,
1597 kHeaderSize + index * kPointerSize,
1598 GetHeap()->the_hole_value());
1537 } 1599 }
1538 1600
1539 1601
1540 void FixedArray::set_unchecked(int index, Smi* value) { 1602 void FixedArray::set_unchecked(int index, Smi* value) {
1541 ASSERT(reinterpret_cast<Object*>(value)->IsSmi()); 1603 ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
1542 int offset = kHeaderSize + index * kPointerSize; 1604 int offset = kHeaderSize + index * kPointerSize;
1543 WRITE_FIELD(this, offset, value); 1605 WRITE_FIELD(this, offset, value);
1544 } 1606 }
1545 1607
1546 1608
1547 void FixedArray::set_unchecked(int index, 1609 void FixedArray::set_unchecked(Heap* heap,
1610 int index,
1548 Object* value, 1611 Object* value,
1549 WriteBarrierMode mode) { 1612 WriteBarrierMode mode) {
1550 int offset = kHeaderSize + index * kPointerSize; 1613 int offset = kHeaderSize + index * kPointerSize;
1551 WRITE_FIELD(this, offset, value); 1614 WRITE_FIELD(this, offset, value);
1552 CONDITIONAL_WRITE_BARRIER(this, offset, mode); 1615 CONDITIONAL_WRITE_BARRIER(heap, this, offset, mode);
1553 } 1616 }
1554 1617
1555 1618
1556 void FixedArray::set_null_unchecked(int index) { 1619 void FixedArray::set_null_unchecked(Heap* heap, int index) {
1557 ASSERT(index >= 0 && index < this->length()); 1620 ASSERT(index >= 0 && index < this->length());
1558 ASSERT(!Heap::InNewSpace(Heap::null_value())); 1621 ASSERT(!HEAP->InNewSpace(heap->null_value()));
1559 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::null_value()); 1622 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1560 } 1623 }
1561 1624
1562 1625
1563 Object** FixedArray::data_start() { 1626 Object** FixedArray::data_start() {
1564 return HeapObject::RawField(this, kHeaderSize); 1627 return HeapObject::RawField(this, kHeaderSize);
1565 } 1628 }
1566 1629
1567 1630
1568 bool DescriptorArray::IsEmpty() { 1631 bool DescriptorArray::IsEmpty() {
1569 ASSERT(this == Heap::empty_descriptor_array() || 1632 ASSERT(this->length() > kFirstIndex ||
1570 this->length() > 2); 1633 this == HEAP->empty_descriptor_array());
1571 return this == Heap::empty_descriptor_array(); 1634 return length() <= kFirstIndex;
1572 } 1635 }
1573 1636
1574 1637
1575 void DescriptorArray::fast_swap(FixedArray* array, int first, int second) { 1638 void DescriptorArray::fast_swap(FixedArray* array, int first, int second) {
1576 Object* tmp = array->get(first); 1639 Object* tmp = array->get(first);
1577 fast_set(array, first, array->get(second)); 1640 fast_set(array, first, array->get(second));
1578 fast_set(array, second, tmp); 1641 fast_set(array, second, tmp);
1579 } 1642 }
1580 1643
1581 1644
1582 int DescriptorArray::Search(String* name) { 1645 int DescriptorArray::Search(String* name) {
1583 SLOW_ASSERT(IsSortedNoDuplicates()); 1646 SLOW_ASSERT(IsSortedNoDuplicates());
1584 1647
1585 // Check for empty descriptor array. 1648 // Check for empty descriptor array.
1586 int nof = number_of_descriptors(); 1649 int nof = number_of_descriptors();
1587 if (nof == 0) return kNotFound; 1650 if (nof == 0) return kNotFound;
1588 1651
1589 // Fast case: do linear search for small arrays. 1652 // Fast case: do linear search for small arrays.
1590 const int kMaxElementsForLinearSearch = 8; 1653 const int kMaxElementsForLinearSearch = 8;
1591 if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) { 1654 if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
1592 return LinearSearch(name, nof); 1655 return LinearSearch(name, nof);
1593 } 1656 }
1594 1657
1595 // Slow case: perform binary search. 1658 // Slow case: perform binary search.
1596 return BinarySearch(name, 0, nof - 1); 1659 return BinarySearch(name, 0, nof - 1);
1597 } 1660 }
1598 1661
1599 1662
1600 int DescriptorArray::SearchWithCache(String* name) { 1663 int DescriptorArray::SearchWithCache(String* name) {
1601 int number = DescriptorLookupCache::Lookup(this, name); 1664 int number = GetIsolate()->descriptor_lookup_cache()->Lookup(this, name);
1602 if (number == DescriptorLookupCache::kAbsent) { 1665 if (number == DescriptorLookupCache::kAbsent) {
1603 number = Search(name); 1666 number = Search(name);
1604 DescriptorLookupCache::Update(this, name, number); 1667 GetIsolate()->descriptor_lookup_cache()->Update(this, name, number);
1605 } 1668 }
1606 return number; 1669 return number;
1607 } 1670 }
1608 1671
1609 1672
1610 String* DescriptorArray::GetKey(int descriptor_number) { 1673 String* DescriptorArray::GetKey(int descriptor_number) {
1611 ASSERT(descriptor_number < number_of_descriptors()); 1674 ASSERT(descriptor_number < number_of_descriptors());
1612 return String::cast(get(ToKeyIndex(descriptor_number))); 1675 return String::cast(get(ToKeyIndex(descriptor_number)));
1613 } 1676 }
1614 1677
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
1680 GetValue(descriptor_number), 1743 GetValue(descriptor_number),
1681 GetDetails(descriptor_number)); 1744 GetDetails(descriptor_number));
1682 } 1745 }
1683 1746
1684 1747
1685 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) { 1748 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
1686 // Range check. 1749 // Range check.
1687 ASSERT(descriptor_number < number_of_descriptors()); 1750 ASSERT(descriptor_number < number_of_descriptors());
1688 1751
1689 // Make sure none of the elements in desc are in new space. 1752 // Make sure none of the elements in desc are in new space.
1690 ASSERT(!Heap::InNewSpace(desc->GetKey())); 1753 ASSERT(!HEAP->InNewSpace(desc->GetKey()));
1691 ASSERT(!Heap::InNewSpace(desc->GetValue())); 1754 ASSERT(!HEAP->InNewSpace(desc->GetValue()));
1692 1755
1693 fast_set(this, ToKeyIndex(descriptor_number), desc->GetKey()); 1756 fast_set(this, ToKeyIndex(descriptor_number), desc->GetKey());
1694 FixedArray* content_array = GetContentArray(); 1757 FixedArray* content_array = GetContentArray();
1695 fast_set(content_array, ToValueIndex(descriptor_number), desc->GetValue()); 1758 fast_set(content_array, ToValueIndex(descriptor_number), desc->GetValue());
1696 fast_set(content_array, ToDetailsIndex(descriptor_number), 1759 fast_set(content_array, ToDetailsIndex(descriptor_number),
1697 desc->GetDetails().AsSmi()); 1760 desc->GetDetails().AsSmi());
1698 } 1761 }
1699 1762
1700 1763
1701 void DescriptorArray::CopyFrom(int index, DescriptorArray* src, int src_index) { 1764 void DescriptorArray::CopyFrom(int index, DescriptorArray* src, int src_index) {
1702 Descriptor desc; 1765 Descriptor desc;
1703 src->Get(src_index, &desc); 1766 src->Get(src_index, &desc);
1704 Set(index, &desc); 1767 Set(index, &desc);
1705 } 1768 }
1706 1769
1707 1770
1708 void DescriptorArray::Swap(int first, int second) { 1771 void DescriptorArray::Swap(int first, int second) {
1709 fast_swap(this, ToKeyIndex(first), ToKeyIndex(second)); 1772 fast_swap(this, ToKeyIndex(first), ToKeyIndex(second));
1710 FixedArray* content_array = GetContentArray(); 1773 FixedArray* content_array = GetContentArray();
1711 fast_swap(content_array, ToValueIndex(first), ToValueIndex(second)); 1774 fast_swap(content_array, ToValueIndex(first), ToValueIndex(second));
1712 fast_swap(content_array, ToDetailsIndex(first), ToDetailsIndex(second)); 1775 fast_swap(content_array, ToDetailsIndex(first), ToDetailsIndex(second));
1713 } 1776 }
1714 1777
1715 1778
1779 template<typename Shape, typename Key>
1780 int HashTable<Shape, Key>::FindEntry(Key key) {
1781 return FindEntry(GetIsolate(), key);
1782 }
1783
1784
1785 // Find entry for key otherwise return kNotFound.
1786 template<typename Shape, typename Key>
1787 int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
1788 uint32_t capacity = Capacity();
1789 uint32_t entry = FirstProbe(Shape::Hash(key), capacity);
1790 uint32_t count = 1;
1791 // EnsureCapacity will guarantee the hash table is never full.
1792 while (true) {
1793 Object* element = KeyAt(entry);
1794 if (element == isolate->heap()->undefined_value()) break; // Empty entry.
1795 if (element != isolate->heap()->null_value() &&
1796 Shape::IsMatch(key, element)) return entry;
1797 entry = NextProbe(entry, count++, capacity);
1798 }
1799 return kNotFound;
1800 }
1801
1802
1716 bool NumberDictionary::requires_slow_elements() { 1803 bool NumberDictionary::requires_slow_elements() {
1717 Object* max_index_object = get(kMaxNumberKeyIndex); 1804 Object* max_index_object = get(kMaxNumberKeyIndex);
1718 if (!max_index_object->IsSmi()) return false; 1805 if (!max_index_object->IsSmi()) return false;
1719 return 0 != 1806 return 0 !=
1720 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask); 1807 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
1721 } 1808 }
1722 1809
1723 uint32_t NumberDictionary::max_number_key() { 1810 uint32_t NumberDictionary::max_number_key() {
1724 ASSERT(!requires_slow_elements()); 1811 ASSERT(!requires_slow_elements());
1725 Object* max_index_object = get(kMaxNumberKeyIndex); 1812 Object* max_index_object = get(kMaxNumberKeyIndex);
(...skipping 226 matching lines...) Expand 10 before | Expand all | Expand 10 after
1952 } 2039 }
1953 2040
1954 2041
1955 Object* ConsString::unchecked_first() { 2042 Object* ConsString::unchecked_first() {
1956 return READ_FIELD(this, kFirstOffset); 2043 return READ_FIELD(this, kFirstOffset);
1957 } 2044 }
1958 2045
1959 2046
1960 void ConsString::set_first(String* value, WriteBarrierMode mode) { 2047 void ConsString::set_first(String* value, WriteBarrierMode mode) {
1961 WRITE_FIELD(this, kFirstOffset, value); 2048 WRITE_FIELD(this, kFirstOffset, value);
1962 CONDITIONAL_WRITE_BARRIER(this, kFirstOffset, mode); 2049 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, mode);
1963 } 2050 }
1964 2051
1965 2052
1966 String* ConsString::second() { 2053 String* ConsString::second() {
1967 return String::cast(READ_FIELD(this, kSecondOffset)); 2054 return String::cast(READ_FIELD(this, kSecondOffset));
1968 } 2055 }
1969 2056
1970 2057
1971 Object* ConsString::unchecked_second() { 2058 Object* ConsString::unchecked_second() {
1972 return READ_FIELD(this, kSecondOffset); 2059 return READ_FIELD(this, kSecondOffset);
1973 } 2060 }
1974 2061
1975 2062
1976 void ConsString::set_second(String* value, WriteBarrierMode mode) { 2063 void ConsString::set_second(String* value, WriteBarrierMode mode) {
1977 WRITE_FIELD(this, kSecondOffset, value); 2064 WRITE_FIELD(this, kSecondOffset, value);
1978 CONDITIONAL_WRITE_BARRIER(this, kSecondOffset, mode); 2065 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, mode);
1979 } 2066 }
1980 2067
1981 2068
1982 ExternalAsciiString::Resource* ExternalAsciiString::resource() { 2069 ExternalAsciiString::Resource* ExternalAsciiString::resource() {
1983 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)); 2070 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
1984 } 2071 }
1985 2072
1986 2073
1987 void ExternalAsciiString::set_resource( 2074 void ExternalAsciiString::set_resource(
1988 ExternalAsciiString::Resource* resource) { 2075 ExternalAsciiString::Resource* resource) {
(...skipping 15 matching lines...) Expand all
2004 void JSFunctionResultCache::MakeZeroSize() { 2091 void JSFunctionResultCache::MakeZeroSize() {
2005 set_finger_index(kEntriesIndex); 2092 set_finger_index(kEntriesIndex);
2006 set_size(kEntriesIndex); 2093 set_size(kEntriesIndex);
2007 } 2094 }
2008 2095
2009 2096
2010 void JSFunctionResultCache::Clear() { 2097 void JSFunctionResultCache::Clear() {
2011 int cache_size = size(); 2098 int cache_size = size();
2012 Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex)); 2099 Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
2013 MemsetPointer(entries_start, 2100 MemsetPointer(entries_start,
2014 Heap::the_hole_value(), 2101 GetHeap()->the_hole_value(),
2015 cache_size - kEntriesIndex); 2102 cache_size - kEntriesIndex);
2016 MakeZeroSize(); 2103 MakeZeroSize();
2017 } 2104 }
2018 2105
2019 2106
2020 int JSFunctionResultCache::size() { 2107 int JSFunctionResultCache::size() {
2021 return Smi::cast(get(kCacheSizeIndex))->value(); 2108 return Smi::cast(get(kCacheSizeIndex))->value();
2022 } 2109 }
2023 2110
2024 2111
(...skipping 696 matching lines...) Expand 10 before | Expand all | Expand 10 after
2721 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize); 2808 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
2722 // GetCodeFromTargetAddress might be called when marking objects during mark 2809 // GetCodeFromTargetAddress might be called when marking objects during mark
2723 // sweep. reinterpret_cast is therefore used instead of the more appropriate 2810 // sweep. reinterpret_cast is therefore used instead of the more appropriate
2724 // Code::cast. Code::cast does not work when the object's map is 2811 // Code::cast. Code::cast does not work when the object's map is
2725 // marked. 2812 // marked.
2726 Code* result = reinterpret_cast<Code*>(code); 2813 Code* result = reinterpret_cast<Code*>(code);
2727 return result; 2814 return result;
2728 } 2815 }
2729 2816
2730 2817
2818 Heap* Map::heap() {
2819 // NOTE: address() helper is not used to save one instruction.
2820 Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
2821 ASSERT(heap != NULL);
2822 ASSERT(heap->isolate() == Isolate::Current());
2823 return heap;
2824 }
2825
2826
2731 Object* Code::GetObjectFromEntryAddress(Address location_of_address) { 2827 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
2732 return HeapObject:: 2828 return HeapObject::
2733 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize); 2829 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
2734 } 2830 }
2735 2831
2736 2832
2737 Object* Map::prototype() { 2833 Object* Map::prototype() {
2738 return READ_FIELD(this, kPrototypeOffset); 2834 return READ_FIELD(this, kPrototypeOffset);
2739 } 2835 }
2740 2836
2741 2837
2742 void Map::set_prototype(Object* value, WriteBarrierMode mode) { 2838 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
2743 ASSERT(value->IsNull() || value->IsJSObject()); 2839 ASSERT(value->IsNull() || value->IsJSObject());
2744 WRITE_FIELD(this, kPrototypeOffset, value); 2840 WRITE_FIELD(this, kPrototypeOffset, value);
2745 CONDITIONAL_WRITE_BARRIER(this, kPrototypeOffset, mode); 2841 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, mode);
2746 } 2842 }
2747 2843
2748 2844
2749 MaybeObject* Map::GetFastElementsMap() { 2845 MaybeObject* Map::GetFastElementsMap() {
2750 if (has_fast_elements()) return this; 2846 if (has_fast_elements()) return this;
2751 Object* obj; 2847 Object* obj;
2752 { MaybeObject* maybe_obj = CopyDropTransitions(); 2848 { MaybeObject* maybe_obj = CopyDropTransitions();
2753 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 2849 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2754 } 2850 }
2755 Map* new_map = Map::cast(obj); 2851 Map* new_map = Map::cast(obj);
2756 new_map->set_has_fast_elements(true); 2852 new_map->set_has_fast_elements(true);
2757 Counters::map_slow_to_fast_elements.Increment(); 2853 COUNTERS->map_slow_to_fast_elements()->Increment();
2758 return new_map; 2854 return new_map;
2759 } 2855 }
2760 2856
2761 2857
2762 MaybeObject* Map::GetSlowElementsMap() { 2858 MaybeObject* Map::GetSlowElementsMap() {
2763 if (!has_fast_elements()) return this; 2859 if (!has_fast_elements()) return this;
2764 Object* obj; 2860 Object* obj;
2765 { MaybeObject* maybe_obj = CopyDropTransitions(); 2861 { MaybeObject* maybe_obj = CopyDropTransitions();
2766 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 2862 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2767 } 2863 }
2768 Map* new_map = Map::cast(obj); 2864 Map* new_map = Map::cast(obj);
2769 new_map->set_has_fast_elements(false); 2865 new_map->set_has_fast_elements(false);
2770 Counters::map_fast_to_slow_elements.Increment(); 2866 COUNTERS->map_fast_to_slow_elements()->Increment();
2771 return new_map; 2867 return new_map;
2772 } 2868 }
2773 2869
2774 2870
2775 MaybeObject* Map::NewExternalArrayElementsMap() { 2871 MaybeObject* Map::NewExternalArrayElementsMap() {
2776 // TODO(danno): Special case empty object map (or most common case) 2872 // TODO(danno): Special case empty object map (or most common case)
2777 // to return a pre-canned pixel array map. 2873 // to return a pre-canned pixel array map.
2778 Object* obj; 2874 Object* obj;
2779 { MaybeObject* maybe_obj = CopyDropTransitions(); 2875 { MaybeObject* maybe_obj = CopyDropTransitions();
2780 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 2876 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2781 } 2877 }
2782 Map* new_map = Map::cast(obj); 2878 Map* new_map = Map::cast(obj);
2783 new_map->set_has_fast_elements(false); 2879 new_map->set_has_fast_elements(false);
2784 new_map->set_has_external_array_elements(true); 2880 new_map->set_has_external_array_elements(true);
2785 Counters::map_to_external_array_elements.Increment(); 2881 COUNTERS->map_to_external_array_elements()->Increment();
2786 return new_map; 2882 return new_map;
2787 } 2883 }
2788 2884
2789 2885
2790 ACCESSORS(Map, instance_descriptors, DescriptorArray, 2886 ACCESSORS(Map, instance_descriptors, DescriptorArray,
2791 kInstanceDescriptorsOffset) 2887 kInstanceDescriptorsOffset)
2792 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset) 2888 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
2793 ACCESSORS(Map, constructor, Object, kConstructorOffset) 2889 ACCESSORS(Map, constructor, Object, kConstructorOffset)
2794 2890
2795 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset) 2891 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
2796 ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset) 2892 ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset)
2797 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset) 2893 ACCESSORS_GCSAFE(JSFunction, next_function_link, Object,
2894 kNextFunctionLinkOffset)
2798 2895
2799 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset) 2896 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
2800 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset) 2897 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
2801 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset) 2898 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
2802 2899
2803 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset) 2900 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
2804 2901
2805 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset) 2902 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
2806 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset) 2903 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
2807 ACCESSORS(AccessorInfo, data, Object, kDataOffset) 2904 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
2876 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex) 2973 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
2877 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex) 2974 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
2878 2975
2879 ACCESSORS(BreakPointInfo, code_position, Smi, kCodePositionIndex) 2976 ACCESSORS(BreakPointInfo, code_position, Smi, kCodePositionIndex)
2880 ACCESSORS(BreakPointInfo, source_position, Smi, kSourcePositionIndex) 2977 ACCESSORS(BreakPointInfo, source_position, Smi, kSourcePositionIndex)
2881 ACCESSORS(BreakPointInfo, statement_position, Smi, kStatementPositionIndex) 2978 ACCESSORS(BreakPointInfo, statement_position, Smi, kStatementPositionIndex)
2882 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex) 2979 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
2883 #endif 2980 #endif
2884 2981
2885 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset) 2982 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
2886 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset) 2983 ACCESSORS_GCSAFE(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
2887 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset) 2984 ACCESSORS_GCSAFE(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
2888 ACCESSORS(SharedFunctionInfo, instance_class_name, Object, 2985 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
2889 kInstanceClassNameOffset) 2986 kInstanceClassNameOffset)
2890 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset) 2987 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
2891 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset) 2988 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
2892 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset) 2989 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
2893 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset) 2990 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
2894 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object, 2991 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
2895 kThisPropertyAssignmentsOffset) 2992 kThisPropertyAssignmentsOffset)
2896 2993
2897 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype, 2994 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after
3005 void SharedFunctionInfo::set_live_objects_may_exist(bool value) { 3102 void SharedFunctionInfo::set_live_objects_may_exist(bool value) {
3006 if (value) { 3103 if (value) {
3007 set_compiler_hints(compiler_hints() | (1 << kLiveObjectsMayExist)); 3104 set_compiler_hints(compiler_hints() | (1 << kLiveObjectsMayExist));
3008 } else { 3105 } else {
3009 set_compiler_hints(compiler_hints() & ~(1 << kLiveObjectsMayExist)); 3106 set_compiler_hints(compiler_hints() & ~(1 << kLiveObjectsMayExist));
3010 } 3107 }
3011 } 3108 }
3012 3109
3013 3110
3014 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() { 3111 bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
3015 return initial_map() != Heap::undefined_value(); 3112 return initial_map() != HEAP->undefined_value();
3016 } 3113 }
3017 3114
3018 3115
3019 bool SharedFunctionInfo::optimization_disabled() { 3116 bool SharedFunctionInfo::optimization_disabled() {
3020 return BooleanBit::get(compiler_hints(), kOptimizationDisabled); 3117 return BooleanBit::get(compiler_hints(), kOptimizationDisabled);
3021 } 3118 }
3022 3119
3023 3120
3024 void SharedFunctionInfo::set_optimization_disabled(bool disable) { 3121 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
3025 set_compiler_hints(BooleanBit::set(compiler_hints(), 3122 set_compiler_hints(BooleanBit::set(compiler_hints(),
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
3084 } 3181 }
3085 3182
3086 3183
3087 Code* SharedFunctionInfo::unchecked_code() { 3184 Code* SharedFunctionInfo::unchecked_code() {
3088 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset)); 3185 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
3089 } 3186 }
3090 3187
3091 3188
3092 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) { 3189 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
3093 WRITE_FIELD(this, kCodeOffset, value); 3190 WRITE_FIELD(this, kCodeOffset, value);
3094 CONDITIONAL_WRITE_BARRIER(this, kCodeOffset, mode); 3191 ASSERT(!Isolate::Current()->heap()->InNewSpace(value));
3095 } 3192 }
3096 3193
3097 3194
3098 SerializedScopeInfo* SharedFunctionInfo::scope_info() { 3195 SerializedScopeInfo* SharedFunctionInfo::scope_info() {
3099 return reinterpret_cast<SerializedScopeInfo*>( 3196 return reinterpret_cast<SerializedScopeInfo*>(
3100 READ_FIELD(this, kScopeInfoOffset)); 3197 READ_FIELD(this, kScopeInfoOffset));
3101 } 3198 }
3102 3199
3103 3200
3104 void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value, 3201 void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
3105 WriteBarrierMode mode) { 3202 WriteBarrierMode mode) {
3106 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value)); 3203 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3107 CONDITIONAL_WRITE_BARRIER(this, kScopeInfoOffset, mode); 3204 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kScopeInfoOffset, mode);
3108 } 3205 }
3109 3206
3110 3207
3111 Smi* SharedFunctionInfo::deopt_counter() { 3208 Smi* SharedFunctionInfo::deopt_counter() {
3112 return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset)); 3209 return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset));
3113 } 3210 }
3114 3211
3115 3212
3116 void SharedFunctionInfo::set_deopt_counter(Smi* value) { 3213 void SharedFunctionInfo::set_deopt_counter(Smi* value) {
3117 WRITE_FIELD(this, kDeoptCounterOffset, value); 3214 WRITE_FIELD(this, kDeoptCounterOffset, value);
3118 } 3215 }
3119 3216
3120 3217
3121 bool SharedFunctionInfo::is_compiled() { 3218 bool SharedFunctionInfo::is_compiled() {
3122 return code() != Builtins::builtin(Builtins::LazyCompile); 3219 return code() !=
3220 Isolate::Current()->builtins()->builtin(Builtins::LazyCompile);
3123 } 3221 }
3124 3222
3125 3223
3126 bool SharedFunctionInfo::IsApiFunction() { 3224 bool SharedFunctionInfo::IsApiFunction() {
3127 return function_data()->IsFunctionTemplateInfo(); 3225 return function_data()->IsFunctionTemplateInfo();
3128 } 3226 }
3129 3227
3130 3228
3131 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() { 3229 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
3132 ASSERT(IsApiFunction()); 3230 ASSERT(IsApiFunction());
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
3172 SharedFunctionInfo::kDontAdaptArgumentsSentinel; 3270 SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3173 } 3271 }
3174 3272
3175 3273
3176 bool JSFunction::IsOptimized() { 3274 bool JSFunction::IsOptimized() {
3177 return code()->kind() == Code::OPTIMIZED_FUNCTION; 3275 return code()->kind() == Code::OPTIMIZED_FUNCTION;
3178 } 3276 }
3179 3277
3180 3278
3181 bool JSFunction::IsMarkedForLazyRecompilation() { 3279 bool JSFunction::IsMarkedForLazyRecompilation() {
3182 return code() == Builtins::builtin(Builtins::LazyRecompile); 3280 return code() == GetIsolate()->builtins()->builtin(Builtins::LazyRecompile);
3183 } 3281 }
3184 3282
3185 3283
3186 Code* JSFunction::code() { 3284 Code* JSFunction::code() {
3187 return Code::cast(unchecked_code()); 3285 return Code::cast(unchecked_code());
3188 } 3286 }
3189 3287
3190 3288
3191 Code* JSFunction::unchecked_code() { 3289 Code* JSFunction::unchecked_code() {
3192 return reinterpret_cast<Code*>( 3290 return reinterpret_cast<Code*>(
3193 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset))); 3291 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
3194 } 3292 }
3195 3293
3196 3294
3197 void JSFunction::set_code(Code* value) { 3295 void JSFunction::set_code(Code* value) {
3198 // Skip the write barrier because code is never in new space. 3296 // Skip the write barrier because code is never in new space.
3199 ASSERT(!Heap::InNewSpace(value)); 3297 ASSERT(!HEAP->InNewSpace(value));
3200 Address entry = value->entry(); 3298 Address entry = value->entry();
3201 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry)); 3299 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
3202 } 3300 }
3203 3301
3204 3302
3205 void JSFunction::ReplaceCode(Code* code) { 3303 void JSFunction::ReplaceCode(Code* code) {
3206 bool was_optimized = IsOptimized(); 3304 bool was_optimized = IsOptimized();
3207 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION; 3305 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
3208 3306
3209 set_code(code); 3307 set_code(code);
(...skipping 19 matching lines...) Expand all
3229 } 3327 }
3230 3328
3231 3329
3232 SharedFunctionInfo* JSFunction::unchecked_shared() { 3330 SharedFunctionInfo* JSFunction::unchecked_shared() {
3233 return reinterpret_cast<SharedFunctionInfo*>( 3331 return reinterpret_cast<SharedFunctionInfo*>(
3234 READ_FIELD(this, kSharedFunctionInfoOffset)); 3332 READ_FIELD(this, kSharedFunctionInfoOffset));
3235 } 3333 }
3236 3334
3237 3335
3238 void JSFunction::set_context(Object* value) { 3336 void JSFunction::set_context(Object* value) {
3239 ASSERT(value == Heap::undefined_value() || value->IsContext()); 3337 ASSERT(value->IsUndefined() || value->IsContext());
3240 WRITE_FIELD(this, kContextOffset, value); 3338 WRITE_FIELD(this, kContextOffset, value);
3241 WRITE_BARRIER(this, kContextOffset); 3339 WRITE_BARRIER(this, kContextOffset);
3242 } 3340 }
3243 3341
3244 ACCESSORS(JSFunction, prototype_or_initial_map, Object, 3342 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
3245 kPrototypeOrInitialMapOffset) 3343 kPrototypeOrInitialMapOffset)
3246 3344
3247 3345
3248 Map* JSFunction::initial_map() { 3346 Map* JSFunction::initial_map() {
3249 return Map::cast(prototype_or_initial_map()); 3347 return Map::cast(prototype_or_initial_map());
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
3286 if (map()->has_non_instance_prototype()) return map()->constructor(); 3384 if (map()->has_non_instance_prototype()) return map()->constructor();
3287 return instance_prototype(); 3385 return instance_prototype();
3288 } 3386 }
3289 3387
3290 bool JSFunction::should_have_prototype() { 3388 bool JSFunction::should_have_prototype() {
3291 return map()->function_with_prototype(); 3389 return map()->function_with_prototype();
3292 } 3390 }
3293 3391
3294 3392
3295 bool JSFunction::is_compiled() { 3393 bool JSFunction::is_compiled() {
3296 return code() != Builtins::builtin(Builtins::LazyCompile); 3394 return code() != GetIsolate()->builtins()->builtin(Builtins::LazyCompile);
3297 } 3395 }
3298 3396
3299 3397
3300 int JSFunction::NumberOfLiterals() { 3398 int JSFunction::NumberOfLiterals() {
3301 return literals()->length(); 3399 return literals()->length();
3302 } 3400 }
3303 3401
3304 3402
3305 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) { 3403 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
3306 ASSERT(id < kJSBuiltinsCount); // id is unsigned. 3404 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
(...skipping 12 matching lines...) Expand all
3319 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) { 3417 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
3320 ASSERT(id < kJSBuiltinsCount); // id is unsigned. 3418 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3321 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id))); 3419 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
3322 } 3420 }
3323 3421
3324 3422
3325 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id, 3423 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
3326 Code* value) { 3424 Code* value) {
3327 ASSERT(id < kJSBuiltinsCount); // id is unsigned. 3425 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3328 WRITE_FIELD(this, OffsetOfCodeWithId(id), value); 3426 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
3329 ASSERT(!Heap::InNewSpace(value)); 3427 ASSERT(!HEAP->InNewSpace(value));
3330 } 3428 }
3331 3429
3332 3430
3333 Address Proxy::proxy() { 3431 Address Proxy::proxy() {
3334 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kProxyOffset)); 3432 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kProxyOffset));
3335 } 3433 }
3336 3434
3337 3435
3338 void Proxy::set_proxy(Address value) { 3436 void Proxy::set_proxy(Address value) {
3339 WRITE_INTPTR_FIELD(this, kProxyOffset, OffsetFrom(value)); 3437 WRITE_INTPTR_FIELD(this, kProxyOffset, OffsetFrom(value));
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
3469 3567
3470 void JSRegExp::SetDataAt(int index, Object* value) { 3568 void JSRegExp::SetDataAt(int index, Object* value) {
3471 ASSERT(TypeTag() != NOT_COMPILED); 3569 ASSERT(TypeTag() != NOT_COMPILED);
3472 ASSERT(index >= kDataIndex); // Only implementation data can be set this way. 3570 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
3473 FixedArray::cast(data())->set(index, value); 3571 FixedArray::cast(data())->set(index, value);
3474 } 3572 }
3475 3573
3476 3574
3477 JSObject::ElementsKind JSObject::GetElementsKind() { 3575 JSObject::ElementsKind JSObject::GetElementsKind() {
3478 if (map()->has_fast_elements()) { 3576 if (map()->has_fast_elements()) {
3479 ASSERT(elements()->map() == Heap::fixed_array_map() || 3577 ASSERT(elements()->map() == GetHeap()->fixed_array_map() ||
3480 elements()->map() == Heap::fixed_cow_array_map()); 3578 elements()->map() == GetHeap()->fixed_cow_array_map());
3481 return FAST_ELEMENTS; 3579 return FAST_ELEMENTS;
3482 } 3580 }
3483 HeapObject* array = elements(); 3581 HeapObject* array = elements();
3484 if (array->IsFixedArray()) { 3582 if (array->IsFixedArray()) {
3485 // FAST_ELEMENTS or DICTIONARY_ELEMENTS are both stored in a 3583 // FAST_ELEMENTS or DICTIONARY_ELEMENTS are both stored in a
3486 // FixedArray, but FAST_ELEMENTS is already handled above. 3584 // FixedArray, but FAST_ELEMENTS is already handled above.
3487 ASSERT(array->IsDictionary()); 3585 ASSERT(array->IsDictionary());
3488 return DICTIONARY_ELEMENTS; 3586 return DICTIONARY_ELEMENTS;
3489 } 3587 }
3588 ASSERT(!map()->has_fast_elements());
3490 if (array->IsExternalArray()) { 3589 if (array->IsExternalArray()) {
3491 switch (array->map()->instance_type()) { 3590 switch (array->map()->instance_type()) {
3492 case EXTERNAL_BYTE_ARRAY_TYPE: 3591 case EXTERNAL_BYTE_ARRAY_TYPE:
3493 return EXTERNAL_BYTE_ELEMENTS; 3592 return EXTERNAL_BYTE_ELEMENTS;
3494 case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE: 3593 case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
3495 return EXTERNAL_UNSIGNED_BYTE_ELEMENTS; 3594 return EXTERNAL_UNSIGNED_BYTE_ELEMENTS;
3496 case EXTERNAL_SHORT_ARRAY_TYPE: 3595 case EXTERNAL_SHORT_ARRAY_TYPE:
3497 return EXTERNAL_SHORT_ELEMENTS; 3596 return EXTERNAL_SHORT_ELEMENTS;
3498 case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE: 3597 case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
3499 return EXTERNAL_UNSIGNED_SHORT_ELEMENTS; 3598 return EXTERNAL_UNSIGNED_SHORT_ELEMENTS;
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
3565 bool JSObject::AllowsSetElementsLength() { 3664 bool JSObject::AllowsSetElementsLength() {
3566 bool result = elements()->IsFixedArray(); 3665 bool result = elements()->IsFixedArray();
3567 ASSERT(result == !HasExternalArrayElements()); 3666 ASSERT(result == !HasExternalArrayElements());
3568 return result; 3667 return result;
3569 } 3668 }
3570 3669
3571 3670
3572 MaybeObject* JSObject::EnsureWritableFastElements() { 3671 MaybeObject* JSObject::EnsureWritableFastElements() {
3573 ASSERT(HasFastElements()); 3672 ASSERT(HasFastElements());
3574 FixedArray* elems = FixedArray::cast(elements()); 3673 FixedArray* elems = FixedArray::cast(elements());
3575 if (elems->map() != Heap::fixed_cow_array_map()) return elems; 3674 Isolate* isolate = GetIsolate();
3675 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
3576 Object* writable_elems; 3676 Object* writable_elems;
3577 { MaybeObject* maybe_writable_elems = 3677 { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
3578 Heap::CopyFixedArrayWithMap(elems, Heap::fixed_array_map()); 3678 elems, isolate->heap()->fixed_array_map());
3579 if (!maybe_writable_elems->ToObject(&writable_elems)) { 3679 if (!maybe_writable_elems->ToObject(&writable_elems)) {
3580 return maybe_writable_elems; 3680 return maybe_writable_elems;
3581 } 3681 }
3582 } 3682 }
3583 set_elements(FixedArray::cast(writable_elems)); 3683 set_elements(FixedArray::cast(writable_elems));
3584 Counters::cow_arrays_converted.Increment(); 3684 isolate->counters()->cow_arrays_converted()->Increment();
3585 return writable_elems; 3685 return writable_elems;
3586 } 3686 }
3587 3687
3588 3688
3589 StringDictionary* JSObject::property_dictionary() { 3689 StringDictionary* JSObject::property_dictionary() {
3590 ASSERT(!HasFastProperties()); 3690 ASSERT(!HasFastProperties());
3591 return StringDictionary::cast(properties()); 3691 return StringDictionary::cast(properties());
3592 } 3692 }
3593 3693
3594 3694
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
3714 3814
3715 PropertyAttributes JSObject::GetPropertyAttribute(String* key) { 3815 PropertyAttributes JSObject::GetPropertyAttribute(String* key) {
3716 return GetPropertyAttributeWithReceiver(this, key); 3816 return GetPropertyAttributeWithReceiver(this, key);
3717 } 3817 }
3718 3818
3719 // TODO(504): this may be useful in other places too where JSGlobalProxy 3819 // TODO(504): this may be useful in other places too where JSGlobalProxy
3720 // is used. 3820 // is used.
3721 Object* JSObject::BypassGlobalProxy() { 3821 Object* JSObject::BypassGlobalProxy() {
3722 if (IsJSGlobalProxy()) { 3822 if (IsJSGlobalProxy()) {
3723 Object* proto = GetPrototype(); 3823 Object* proto = GetPrototype();
3724 if (proto->IsNull()) return Heap::undefined_value(); 3824 if (proto->IsNull()) return GetHeap()->undefined_value();
3725 ASSERT(proto->IsJSGlobalObject()); 3825 ASSERT(proto->IsJSGlobalObject());
3726 return proto; 3826 return proto;
3727 } 3827 }
3728 return this; 3828 return this;
3729 } 3829 }
3730 3830
3731 3831
3732 bool JSObject::HasHiddenPropertiesObject() { 3832 bool JSObject::HasHiddenPropertiesObject() {
3733 ASSERT(!IsJSGlobalProxy()); 3833 ASSERT(!IsJSGlobalProxy());
3734 return GetPropertyAttributePostInterceptor(this, 3834 return GetPropertyAttributePostInterceptor(this,
3735 Heap::hidden_symbol(), 3835 GetHeap()->hidden_symbol(),
3736 false) != ABSENT; 3836 false) != ABSENT;
3737 } 3837 }
3738 3838
3739 3839
3740 Object* JSObject::GetHiddenPropertiesObject() { 3840 Object* JSObject::GetHiddenPropertiesObject() {
3741 ASSERT(!IsJSGlobalProxy()); 3841 ASSERT(!IsJSGlobalProxy());
3742 PropertyAttributes attributes; 3842 PropertyAttributes attributes;
3743 // You can't install a getter on a property indexed by the hidden symbol, 3843 // You can't install a getter on a property indexed by the hidden symbol,
3744 // so we can be sure that GetLocalPropertyPostInterceptor returns a real 3844 // so we can be sure that GetLocalPropertyPostInterceptor returns a real
3745 // object. 3845 // object.
3746 Object* result = 3846 Object* result =
3747 GetLocalPropertyPostInterceptor(this, 3847 GetLocalPropertyPostInterceptor(this,
3748 Heap::hidden_symbol(), 3848 GetHeap()->hidden_symbol(),
3749 &attributes)->ToObjectUnchecked(); 3849 &attributes)->ToObjectUnchecked();
3750 return result; 3850 return result;
3751 } 3851 }
3752 3852
3753 3853
3754 MaybeObject* JSObject::SetHiddenPropertiesObject(Object* hidden_obj) { 3854 MaybeObject* JSObject::SetHiddenPropertiesObject(Object* hidden_obj) {
3755 ASSERT(!IsJSGlobalProxy()); 3855 ASSERT(!IsJSGlobalProxy());
3756 return SetPropertyPostInterceptor(Heap::hidden_symbol(), 3856 return SetPropertyPostInterceptor(GetHeap()->hidden_symbol(),
3757 hidden_obj, 3857 hidden_obj,
3758 DONT_ENUM, 3858 DONT_ENUM,
3759 kNonStrictMode); 3859 kNonStrictMode);
3760 } 3860 }
3761 3861
3762 3862
3763 bool JSObject::HasElement(uint32_t index) { 3863 bool JSObject::HasElement(uint32_t index) {
3764 return HasElementWithReceiver(this, index); 3864 return HasElementWithReceiver(this, index);
3765 } 3865 }
3766 3866
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
3814 ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0); 3914 ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
3815 int index = HashTable<Shape, Key>::EntryToIndex(entry); 3915 int index = HashTable<Shape, Key>::EntryToIndex(entry);
3816 AssertNoAllocation no_gc; 3916 AssertNoAllocation no_gc;
3817 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc); 3917 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
3818 FixedArray::set(index, key, mode); 3918 FixedArray::set(index, key, mode);
3819 FixedArray::set(index+1, value, mode); 3919 FixedArray::set(index+1, value, mode);
3820 FixedArray::fast_set(this, index+2, details.AsSmi()); 3920 FixedArray::fast_set(this, index+2, details.AsSmi());
3821 } 3921 }
3822 3922
3823 3923
3824 void Map::ClearCodeCache() { 3924 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
3925 ASSERT(other->IsNumber());
3926 return key == static_cast<uint32_t>(other->Number());
3927 }
3928
3929
3930 uint32_t NumberDictionaryShape::Hash(uint32_t key) {
3931 return ComputeIntegerHash(key);
3932 }
3933
3934
3935 uint32_t NumberDictionaryShape::HashForObject(uint32_t key, Object* other) {
3936 ASSERT(other->IsNumber());
3937 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()));
3938 }
3939
3940
3941 MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
3942 return Isolate::Current()->heap()->NumberFromUint32(key);
3943 }
3944
3945
3946 bool StringDictionaryShape::IsMatch(String* key, Object* other) {
3947 // We know that all entries in a hash table had their hash keys created.
3948 // Use that knowledge to have fast failure.
3949 if (key->Hash() != String::cast(other)->Hash()) return false;
3950 return key->Equals(String::cast(other));
3951 }
3952
3953
3954 uint32_t StringDictionaryShape::Hash(String* key) {
3955 return key->Hash();
3956 }
3957
3958
3959 uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
3960 return String::cast(other)->Hash();
3961 }
3962
3963
3964 MaybeObject* StringDictionaryShape::AsObject(String* key) {
3965 return key;
3966 }
3967
3968
3969 void Map::ClearCodeCache(Heap* heap) {
3825 // No write barrier is needed since empty_fixed_array is not in new space. 3970 // No write barrier is needed since empty_fixed_array is not in new space.
3826 // Please note this function is used during marking: 3971 // Please note this function is used during marking:
3827 // - MarkCompactCollector::MarkUnmarkedObject 3972 // - MarkCompactCollector::MarkUnmarkedObject
3828 ASSERT(!Heap::InNewSpace(Heap::raw_unchecked_empty_fixed_array())); 3973 ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
3829 WRITE_FIELD(this, kCodeCacheOffset, Heap::raw_unchecked_empty_fixed_array()); 3974 WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
3830 } 3975 }
3831 3976
3832 3977
3833 void JSArray::EnsureSize(int required_size) { 3978 void JSArray::EnsureSize(int required_size) {
3834 ASSERT(HasFastElements()); 3979 ASSERT(HasFastElements());
3835 FixedArray* elts = FixedArray::cast(elements()); 3980 FixedArray* elts = FixedArray::cast(elements());
3836 const int kArraySizeThatFitsComfortablyInNewSpace = 128; 3981 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
3837 if (elts->length() < required_size) { 3982 if (elts->length() < required_size) {
3838 // Doubling in size would be overkill, but leave some slack to avoid 3983 // Doubling in size would be overkill, but leave some slack to avoid
3839 // constantly growing. 3984 // constantly growing.
3840 Expand(required_size + (required_size >> 3)); 3985 Expand(required_size + (required_size >> 3));
3841 // It's a performance benefit to keep a frequently used array in new-space. 3986 // It's a performance benefit to keep a frequently used array in new-space.
3842 } else if (!Heap::new_space()->Contains(elts) && 3987 } else if (!GetHeap()->new_space()->Contains(elts) &&
3843 required_size < kArraySizeThatFitsComfortablyInNewSpace) { 3988 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
3844 // Expand will allocate a new backing store in new space even if the size 3989 // Expand will allocate a new backing store in new space even if the size
3845 // we asked for isn't larger than what we had before. 3990 // we asked for isn't larger than what we had before.
3846 Expand(required_size); 3991 Expand(required_size);
3847 } 3992 }
3848 } 3993 }
3849 3994
3850 3995
3851 void JSArray::set_length(Smi* length) { 3996 void JSArray::set_length(Smi* length) {
3852 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER); 3997 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
3853 } 3998 }
3854 3999
3855 4000
3856 void JSArray::SetContent(FixedArray* storage) { 4001 void JSArray::SetContent(FixedArray* storage) {
3857 set_length(Smi::FromInt(storage->length())); 4002 set_length(Smi::FromInt(storage->length()));
3858 set_elements(storage); 4003 set_elements(storage);
3859 } 4004 }
3860 4005
3861 4006
3862 MaybeObject* FixedArray::Copy() { 4007 MaybeObject* FixedArray::Copy() {
3863 if (length() == 0) return this; 4008 if (length() == 0) return this;
3864 return Heap::CopyFixedArray(this); 4009 return GetHeap()->CopyFixedArray(this);
4010 }
4011
4012
4013 Relocatable::Relocatable(Isolate* isolate) {
4014 ASSERT(isolate == Isolate::Current());
4015 isolate_ = isolate;
4016 prev_ = isolate->relocatable_top();
4017 isolate->set_relocatable_top(this);
4018 }
4019
4020
4021 Relocatable::~Relocatable() {
4022 ASSERT(isolate_ == Isolate::Current());
4023 ASSERT_EQ(isolate_->relocatable_top(), this);
4024 isolate_->set_relocatable_top(prev_);
3865 } 4025 }
3866 4026
3867 4027
3868 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) { 4028 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
3869 return map->instance_size(); 4029 return map->instance_size();
3870 } 4030 }
3871 4031
3872 4032
3873 void Proxy::ProxyIterateBody(ObjectVisitor* v) { 4033 void Proxy::ProxyIterateBody(ObjectVisitor* v) {
3874 v->VisitExternalReference( 4034 v->VisitExternalReference(
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
3950 #undef WRITE_INT_FIELD 4110 #undef WRITE_INT_FIELD
3951 #undef READ_SHORT_FIELD 4111 #undef READ_SHORT_FIELD
3952 #undef WRITE_SHORT_FIELD 4112 #undef WRITE_SHORT_FIELD
3953 #undef READ_BYTE_FIELD 4113 #undef READ_BYTE_FIELD
3954 #undef WRITE_BYTE_FIELD 4114 #undef WRITE_BYTE_FIELD
3955 4115
3956 4116
3957 } } // namespace v8::internal 4117 } } // namespace v8::internal
3958 4118
3959 #endif // V8_OBJECTS_INL_H_ 4119 #endif // V8_OBJECTS_INL_H_
OLDNEW
« no previous file with comments | « src/objects-debug.cc ('k') | src/parser.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698