| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 2170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2181 Object** zap = reinterpret_cast<Object**>(new_end); | 2181 Object** zap = reinterpret_cast<Object**>(new_end); |
| 2182 zap++; // Header of filler must be at least one word so skip that. | 2182 zap++; // Header of filler must be at least one word so skip that. |
| 2183 for (int i = 1; i < to_trim; i++) { | 2183 for (int i = 1; i < to_trim; i++) { |
| 2184 *zap++ = Smi::FromInt(0); | 2184 *zap++ = Smi::FromInt(0); |
| 2185 } | 2185 } |
| 2186 } | 2186 } |
| 2187 | 2187 |
| 2188 | 2188 |
| 2189 template<RightTrimMode trim_mode> | 2189 template<RightTrimMode trim_mode> |
| 2190 static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) { | 2190 static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) { |
| 2191 ASSERT(elms->map() != HEAP->fixed_cow_array_map()); | 2191 ASSERT(elms->map() != heap->fixed_cow_array_map()); |
| 2192 // For now this trick is only applied to fixed arrays in new and paged space. | 2192 // For now this trick is only applied to fixed arrays in new and paged space. |
| 2193 ASSERT(!HEAP->lo_space()->Contains(elms)); | 2193 ASSERT(!heap->lo_space()->Contains(elms)); |
| 2194 | 2194 |
| 2195 const int len = elms->length(); | 2195 const int len = elms->length(); |
| 2196 | 2196 |
| 2197 ASSERT(to_trim < len); | 2197 ASSERT(to_trim < len); |
| 2198 | 2198 |
| 2199 Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim); | 2199 Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim); |
| 2200 | 2200 |
| 2201 if (trim_mode != FROM_GC || Heap::ShouldZapGarbage()) { | 2201 if (trim_mode != FROM_GC || Heap::ShouldZapGarbage()) { |
| 2202 ZapEndOfFixedArray(new_end, to_trim); | 2202 ZapEndOfFixedArray(new_end, to_trim); |
| 2203 } | 2203 } |
| (...skipping 1818 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4022 EnqueueChangeRecord(self, "updated", name, old_value); | 4022 EnqueueChangeRecord(self, "updated", name, old_value); |
| 4023 } | 4023 } |
| 4024 } | 4024 } |
| 4025 } | 4025 } |
| 4026 } | 4026 } |
| 4027 | 4027 |
| 4028 return *hresult; | 4028 return *hresult; |
| 4029 } | 4029 } |
| 4030 | 4030 |
| 4031 | 4031 |
| 4032 MaybeObject* JSObject::SetLocalPropertyIgnoreAttributesTrampoline( |
| 4033 Name* key, |
| 4034 Object* value, |
| 4035 PropertyAttributes attributes, |
| 4036 ValueType value_type, |
| 4037 StoreMode mode, |
| 4038 ExtensibilityCheck extensibility_check) { |
| 4039 // TODO(mstarzinger): The trampoline is a giant hack, don't use it anywhere |
| 4040 // else or handlification people will start hating you for all eternity. |
| 4041 HandleScope scope(GetIsolate()); |
| 4042 IdempotentPointerToHandleCodeTrampoline trampoline(GetIsolate()); |
| 4043 return trampoline.CallWithReturnValue( |
| 4044 &JSObject::SetLocalPropertyIgnoreAttributes, |
| 4045 Handle<JSObject>(this), |
| 4046 Handle<Name>(key), |
| 4047 Handle<Object>(value, GetIsolate()), |
| 4048 attributes, |
| 4049 value_type, |
| 4050 mode, |
| 4051 extensibility_check); |
| 4052 } |
| 4053 |
| 4054 |
| 4032 // Set a real local property, even if it is READ_ONLY. If the property is not | 4055 // Set a real local property, even if it is READ_ONLY. If the property is not |
| 4033 // present, add it with attributes NONE. This code is an exact clone of | 4056 // present, add it with attributes NONE. This code is an exact clone of |
| 4034 // SetProperty, with the check for IsReadOnly and the check for a | 4057 // SetProperty, with the check for IsReadOnly and the check for a |
| 4035 // callback setter removed. The two lines looking up the LookupResult | 4058 // callback setter removed. The two lines looking up the LookupResult |
| 4036 // result are also added. If one of the functions is changed, the other | 4059 // result are also added. If one of the functions is changed, the other |
| 4037 // should be. | 4060 // should be. |
| 4038 // Note that this method cannot be used to set the prototype of a function | 4061 // Note that this method cannot be used to set the prototype of a function |
| 4039 // because ConvertDescriptorToField() which is called in "case CALLBACKS:" | 4062 // because ConvertDescriptorToField() which is called in "case CALLBACKS:" |
| 4040 // doesn't handle function prototypes correctly. | 4063 // doesn't handle function prototypes correctly. |
| 4041 Handle<Object> JSObject::SetLocalPropertyIgnoreAttributes( | 4064 Handle<Object> JSObject::SetLocalPropertyIgnoreAttributes( |
| 4042 Handle<JSObject> object, | 4065 Handle<JSObject> object, |
| 4043 Handle<Name> key, | 4066 Handle<Name> key, |
| 4044 Handle<Object> value, | 4067 Handle<Object> value, |
| 4045 PropertyAttributes attributes, | 4068 PropertyAttributes attributes, |
| 4046 ValueType value_type, | 4069 ValueType value_type, |
| 4047 StoreMode mode) { | 4070 StoreMode mode, |
| 4071 ExtensibilityCheck extensibility_check) { |
| 4048 CALL_HEAP_FUNCTION( | 4072 CALL_HEAP_FUNCTION( |
| 4049 object->GetIsolate(), | 4073 object->GetIsolate(), |
| 4050 object->SetLocalPropertyIgnoreAttributes( | 4074 object->SetLocalPropertyIgnoreAttributes( |
| 4051 *key, *value, attributes, value_type, mode), | 4075 *key, *value, attributes, value_type, mode, extensibility_check), |
| 4052 Object); | 4076 Object); |
| 4053 } | 4077 } |
| 4054 | 4078 |
| 4055 | 4079 |
| 4056 MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes( | 4080 MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes( |
| 4057 Name* name_raw, | 4081 Name* name_raw, |
| 4058 Object* value_raw, | 4082 Object* value_raw, |
| 4059 PropertyAttributes attributes, | 4083 PropertyAttributes attributes, |
| 4060 ValueType value_type, | 4084 ValueType value_type, |
| 4061 StoreMode mode, | 4085 StoreMode mode, |
| (...skipping 650 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4712 | 4736 |
| 4713 | 4737 |
| 4714 Smi* JSReceiver::GenerateIdentityHash() { | 4738 Smi* JSReceiver::GenerateIdentityHash() { |
| 4715 Isolate* isolate = GetIsolate(); | 4739 Isolate* isolate = GetIsolate(); |
| 4716 | 4740 |
| 4717 int hash_value; | 4741 int hash_value; |
| 4718 int attempts = 0; | 4742 int attempts = 0; |
| 4719 do { | 4743 do { |
| 4720 // Generate a random 32-bit hash value but limit range to fit | 4744 // Generate a random 32-bit hash value but limit range to fit |
| 4721 // within a smi. | 4745 // within a smi. |
| 4722 hash_value = V8::RandomPrivate(isolate) & Smi::kMaxValue; | 4746 hash_value = isolate->random_number_generator()->NextInt() & Smi::kMaxValue; |
| 4723 attempts++; | 4747 attempts++; |
| 4724 } while (hash_value == 0 && attempts < 30); | 4748 } while (hash_value == 0 && attempts < 30); |
| 4725 hash_value = hash_value != 0 ? hash_value : 1; // never return 0 | 4749 hash_value = hash_value != 0 ? hash_value : 1; // never return 0 |
| 4726 | 4750 |
| 4727 return Smi::FromInt(hash_value); | 4751 return Smi::FromInt(hash_value); |
| 4728 } | 4752 } |
| 4729 | 4753 |
| 4730 | 4754 |
| 4731 void JSObject::SetIdentityHash(Handle<JSObject> object, Smi* hash) { | 4755 void JSObject::SetIdentityHash(Handle<JSObject> object, Smi* hash) { |
| 4732 CALL_HEAP_FUNCTION_VOID(object->GetIsolate(), | 4756 CALL_HEAP_FUNCTION_VOID(object->GetIsolate(), |
| (...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4944 // We were storing the identity hash inline and now allocated an actual | 4968 // We were storing the identity hash inline and now allocated an actual |
| 4945 // dictionary. Put the identity hash into the new dictionary. | 4969 // dictionary. Put the identity hash into the new dictionary. |
| 4946 MaybeObject* insert_result = | 4970 MaybeObject* insert_result = |
| 4947 hashtable->Put(GetHeap()->identity_hash_string(), inline_value); | 4971 hashtable->Put(GetHeap()->identity_hash_string(), inline_value); |
| 4948 ObjectHashTable* new_table; | 4972 ObjectHashTable* new_table; |
| 4949 if (!insert_result->To(&new_table)) return insert_result; | 4973 if (!insert_result->To(&new_table)) return insert_result; |
| 4950 // We expect no resizing for the first insert. | 4974 // We expect no resizing for the first insert. |
| 4951 ASSERT_EQ(hashtable, new_table); | 4975 ASSERT_EQ(hashtable, new_table); |
| 4952 } | 4976 } |
| 4953 | 4977 |
| 4954 MaybeObject* store_result = | 4978 MaybeObject* store_result = SetLocalPropertyIgnoreAttributesTrampoline( |
| 4955 SetLocalPropertyIgnoreAttributes(GetHeap()->hidden_string(), | 4979 GetHeap()->hidden_string(), |
| 4956 hashtable, | 4980 hashtable, |
| 4957 DONT_ENUM, | 4981 DONT_ENUM, |
| 4958 OPTIMAL_REPRESENTATION, | 4982 OPTIMAL_REPRESENTATION, |
| 4959 ALLOW_AS_CONSTANT, | 4983 ALLOW_AS_CONSTANT, |
| 4960 OMIT_EXTENSIBILITY_CHECK); | 4984 OMIT_EXTENSIBILITY_CHECK); |
| 4961 if (store_result->IsFailure()) return store_result; | 4985 if (store_result->IsFailure()) return store_result; |
| 4962 return hashtable; | 4986 return hashtable; |
| 4963 } | 4987 } |
| 4964 | 4988 |
| 4965 | 4989 |
| 4966 MaybeObject* JSObject::SetHiddenPropertiesHashTable(Object* value) { | 4990 MaybeObject* JSObject::SetHiddenPropertiesHashTable(Object* value) { |
| 4967 ASSERT(!IsJSGlobalProxy()); | 4991 ASSERT(!IsJSGlobalProxy()); |
| 4968 // We can store the identity hash inline iff there is no backing store | 4992 // We can store the identity hash inline iff there is no backing store |
| 4969 // for hidden properties yet. | 4993 // for hidden properties yet. |
| 4970 ASSERT(HasHiddenProperties() != value->IsSmi()); | 4994 ASSERT(HasHiddenProperties() != value->IsSmi()); |
| 4971 if (HasFastProperties()) { | 4995 if (HasFastProperties()) { |
| 4972 // If the object has fast properties, check whether the first slot | 4996 // If the object has fast properties, check whether the first slot |
| 4973 // in the descriptor array matches the hidden string. Since the | 4997 // in the descriptor array matches the hidden string. Since the |
| 4974 // hidden strings hash code is zero (and no other name has hash | 4998 // hidden strings hash code is zero (and no other name has hash |
| 4975 // code zero) it will always occupy the first entry if present. | 4999 // code zero) it will always occupy the first entry if present. |
| 4976 DescriptorArray* descriptors = this->map()->instance_descriptors(); | 5000 DescriptorArray* descriptors = this->map()->instance_descriptors(); |
| 4977 if (descriptors->number_of_descriptors() > 0) { | 5001 if (descriptors->number_of_descriptors() > 0) { |
| 4978 int sorted_index = descriptors->GetSortedKeyIndex(0); | 5002 int sorted_index = descriptors->GetSortedKeyIndex(0); |
| 4979 if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() && | 5003 if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() && |
| 4980 sorted_index < map()->NumberOfOwnDescriptors()) { | 5004 sorted_index < map()->NumberOfOwnDescriptors()) { |
| 4981 ASSERT(descriptors->GetType(sorted_index) == FIELD); | 5005 ASSERT(descriptors->GetType(sorted_index) == FIELD); |
| 4982 FastPropertyAtPut(descriptors->GetFieldIndex(sorted_index), value); | 5006 FastPropertyAtPut(descriptors->GetFieldIndex(sorted_index), value); |
| 4983 return this; | 5007 return this; |
| 4984 } | 5008 } |
| 4985 } | 5009 } |
| 4986 } | 5010 } |
| 4987 MaybeObject* store_result = | 5011 MaybeObject* store_result = SetLocalPropertyIgnoreAttributesTrampoline( |
| 4988 SetLocalPropertyIgnoreAttributes(GetHeap()->hidden_string(), | 5012 GetHeap()->hidden_string(), |
| 4989 value, | 5013 value, |
| 4990 DONT_ENUM, | 5014 DONT_ENUM, |
| 4991 OPTIMAL_REPRESENTATION, | 5015 OPTIMAL_REPRESENTATION, |
| 4992 ALLOW_AS_CONSTANT, | 5016 ALLOW_AS_CONSTANT, |
| 4993 OMIT_EXTENSIBILITY_CHECK); | 5017 OMIT_EXTENSIBILITY_CHECK); |
| 4994 if (store_result->IsFailure()) return store_result; | 5018 if (store_result->IsFailure()) return store_result; |
| 4995 return this; | 5019 return this; |
| 4996 } | 5020 } |
| 4997 | 5021 |
| 4998 | 5022 |
| 4999 Handle<Object> JSObject::DeletePropertyPostInterceptor(Handle<JSObject> object, | 5023 Handle<Object> JSObject::DeletePropertyPostInterceptor(Handle<JSObject> object, |
| 5000 Handle<Name> name, | 5024 Handle<Name> name, |
| 5001 DeleteMode mode) { | 5025 DeleteMode mode) { |
| 5002 // Check local property, ignore interceptor. | 5026 // Check local property, ignore interceptor. |
| 5003 Isolate* isolate = object->GetIsolate(); | 5027 Isolate* isolate = object->GetIsolate(); |
| (...skipping 2945 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7949 return copy; | 7973 return copy; |
| 7950 } | 7974 } |
| 7951 | 7975 |
| 7952 | 7976 |
| 7953 Object* AccessorPair::GetComponent(AccessorComponent component) { | 7977 Object* AccessorPair::GetComponent(AccessorComponent component) { |
| 7954 Object* accessor = get(component); | 7978 Object* accessor = get(component); |
| 7955 return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor; | 7979 return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor; |
| 7956 } | 7980 } |
| 7957 | 7981 |
| 7958 | 7982 |
| 7959 MaybeObject* DeoptimizationInputData::Allocate(int deopt_entry_count, | 7983 MaybeObject* DeoptimizationInputData::Allocate(Isolate* isolate, |
| 7984 int deopt_entry_count, |
| 7960 PretenureFlag pretenure) { | 7985 PretenureFlag pretenure) { |
| 7961 ASSERT(deopt_entry_count > 0); | 7986 ASSERT(deopt_entry_count > 0); |
| 7962 return HEAP->AllocateFixedArray(LengthFor(deopt_entry_count), | 7987 return isolate->heap()->AllocateFixedArray(LengthFor(deopt_entry_count), |
| 7963 pretenure); | 7988 pretenure); |
| 7964 } | 7989 } |
| 7965 | 7990 |
| 7966 | 7991 |
| 7967 MaybeObject* DeoptimizationOutputData::Allocate(int number_of_deopt_points, | 7992 MaybeObject* DeoptimizationOutputData::Allocate(Isolate* isolate, |
| 7993 int number_of_deopt_points, |
| 7968 PretenureFlag pretenure) { | 7994 PretenureFlag pretenure) { |
| 7969 if (number_of_deopt_points == 0) return HEAP->empty_fixed_array(); | 7995 if (number_of_deopt_points == 0) return isolate->heap()->empty_fixed_array(); |
| 7970 return HEAP->AllocateFixedArray(LengthOfFixedArray(number_of_deopt_points), | 7996 return isolate->heap()->AllocateFixedArray( |
| 7971 pretenure); | 7997 LengthOfFixedArray(number_of_deopt_points), pretenure); |
| 7972 } | 7998 } |
| 7973 | 7999 |
| 7974 | 8000 |
| 7975 #ifdef DEBUG | 8001 #ifdef DEBUG |
| 7976 bool DescriptorArray::IsEqualTo(DescriptorArray* other) { | 8002 bool DescriptorArray::IsEqualTo(DescriptorArray* other) { |
| 7977 if (IsEmpty()) return other->IsEmpty(); | 8003 if (IsEmpty()) return other->IsEmpty(); |
| 7978 if (other->IsEmpty()) return false; | 8004 if (other->IsEmpty()) return false; |
| 7979 if (length() != other->length()) return false; | 8005 if (length() != other->length()) return false; |
| 7980 for (int i = 0; i < length(); ++i) { | 8006 for (int i = 0; i < length(); ++i) { |
| 7981 if (get(i) != other->get(i)) return false; | 8007 if (get(i) != other->get(i)) return false; |
| (...skipping 996 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 8978 return string; | 9004 return string; |
| 8979 } | 9005 } |
| 8980 | 9006 |
| 8981 | 9007 |
| 8982 AllocationMemento* AllocationMemento::FindForJSObject(JSObject* object) { | 9008 AllocationMemento* AllocationMemento::FindForJSObject(JSObject* object) { |
| 8983 // Currently, AllocationMemento objects are only allocated immediately | 9009 // Currently, AllocationMemento objects are only allocated immediately |
| 8984 // after JSArrays in NewSpace, and detecting whether a JSArray has one | 9010 // after JSArrays in NewSpace, and detecting whether a JSArray has one |
| 8985 // involves carefully checking the object immediately after the JSArray | 9011 // involves carefully checking the object immediately after the JSArray |
| 8986 // (if there is one) to see if it's an AllocationMemento. | 9012 // (if there is one) to see if it's an AllocationMemento. |
| 8987 if (FLAG_track_allocation_sites && object->GetHeap()->InNewSpace(object)) { | 9013 if (FLAG_track_allocation_sites && object->GetHeap()->InNewSpace(object)) { |
| 9014 // TODO(mvstanton): CHECK to diagnose chromium bug 284577, remove after. |
| 9015 CHECK(object->GetHeap()->InToSpace(object)); |
| 8988 Address ptr_end = (reinterpret_cast<Address>(object) - kHeapObjectTag) + | 9016 Address ptr_end = (reinterpret_cast<Address>(object) - kHeapObjectTag) + |
| 8989 object->Size(); | 9017 object->Size(); |
| 8990 if ((ptr_end + AllocationMemento::kSize) <= | 9018 if ((ptr_end + AllocationMemento::kSize) <= |
| 8991 object->GetHeap()->NewSpaceTop()) { | 9019 object->GetHeap()->NewSpaceTop()) { |
| 8992 // There is room in newspace for allocation info. Do we have some? | 9020 // There is room in newspace for allocation info. Do we have some? |
| 8993 Map** possible_allocation_memento_map = | 9021 Map** possible_allocation_memento_map = |
| 8994 reinterpret_cast<Map**>(ptr_end); | 9022 reinterpret_cast<Map**>(ptr_end); |
| 8995 if (*possible_allocation_memento_map == | 9023 if (*possible_allocation_memento_map == |
| 8996 object->GetHeap()->allocation_memento_map()) { | 9024 object->GetHeap()->allocation_memento_map()) { |
| 9025 Address ptr_object = reinterpret_cast<Address>(object); |
| 9026 // TODO(mvstanton): CHECK to diagnose chromium bug 284577, remove after. |
| 9027 // If this check fails it points to the very unlikely case that we've |
| 9028 // misinterpreted a page header as an allocation memento. Follow up |
| 9029 // with a real fix. |
| 9030 CHECK(Page::FromAddress(ptr_object) == Page::FromAddress(ptr_end)); |
| 8997 AllocationMemento* memento = AllocationMemento::cast( | 9031 AllocationMemento* memento = AllocationMemento::cast( |
| 8998 reinterpret_cast<Object*>(ptr_end + 1)); | 9032 reinterpret_cast<Object*>(ptr_end + kHeapObjectTag)); |
| 8999 return memento; | 9033 return memento; |
| 9000 } | 9034 } |
| 9001 } | 9035 } |
| 9002 } | 9036 } |
| 9003 return NULL; | 9037 return NULL; |
| 9004 } | 9038 } |
| 9005 | 9039 |
| 9006 | 9040 |
| 9007 uint32_t StringHasher::MakeArrayIndexHash(uint32_t value, int length) { | 9041 uint32_t StringHasher::MakeArrayIndexHash(uint32_t value, int length) { |
| 9008 // For array indexes mix the length into the hash as an array index could | 9042 // For array indexes mix the length into the hash as an array index could |
| (...skipping 850 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 9859 // non-optimizable if optimization is disabled for the shared | 9893 // non-optimizable if optimization is disabled for the shared |
| 9860 // function info. | 9894 // function info. |
| 9861 set_optimization_disabled(true); | 9895 set_optimization_disabled(true); |
| 9862 set_bailout_reason(reason); | 9896 set_bailout_reason(reason); |
| 9863 // Code should be the lazy compilation stub or else unoptimized. If the | 9897 // Code should be the lazy compilation stub or else unoptimized. If the |
| 9864 // latter, disable optimization for the code too. | 9898 // latter, disable optimization for the code too. |
| 9865 ASSERT(code()->kind() == Code::FUNCTION || code()->kind() == Code::BUILTIN); | 9899 ASSERT(code()->kind() == Code::FUNCTION || code()->kind() == Code::BUILTIN); |
| 9866 if (code()->kind() == Code::FUNCTION) { | 9900 if (code()->kind() == Code::FUNCTION) { |
| 9867 code()->set_optimizable(false); | 9901 code()->set_optimizable(false); |
| 9868 } | 9902 } |
| 9869 PROFILE(Isolate::Current(), | 9903 PROFILE(GetIsolate(), |
| 9870 LogExistingFunction(Handle<SharedFunctionInfo>(this), | 9904 LogExistingFunction(Handle<SharedFunctionInfo>(this), |
| 9871 Handle<Code>(code()))); | 9905 Handle<Code>(code()))); |
| 9872 if (FLAG_trace_opt) { | 9906 if (FLAG_trace_opt) { |
| 9873 PrintF("[disabled optimization for "); | 9907 PrintF("[disabled optimization for "); |
| 9874 ShortPrint(); | 9908 ShortPrint(); |
| 9875 PrintF(", reason: %s]\n", GetBailoutReason(reason)); | 9909 PrintF(", reason: %s]\n", GetBailoutReason(reason)); |
| 9876 } | 9910 } |
| 9877 } | 9911 } |
| 9878 | 9912 |
| 9879 | 9913 |
| (...skipping 1521 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 11401 #ifdef DEBUG | 11435 #ifdef DEBUG |
| 11402 for (int i = start; i < end - 1; i++) { | 11436 for (int i = start; i < end - 1; i++) { |
| 11403 ASSERT(is_code_at(i) || compilation_info_at(i) != info); | 11437 ASSERT(is_code_at(i) || compilation_info_at(i) != info); |
| 11404 } | 11438 } |
| 11405 #endif | 11439 #endif |
| 11406 } | 11440 } |
| 11407 | 11441 |
| 11408 | 11442 |
| 11409 bool DependentCode::Contains(DependencyGroup group, Code* code) { | 11443 bool DependentCode::Contains(DependencyGroup group, Code* code) { |
| 11410 GroupStartIndexes starts(this); | 11444 GroupStartIndexes starts(this); |
| 11411 int number_of_entries = starts.number_of_entries(); | 11445 int start = starts.at(group); |
| 11412 for (int i = 0; i < number_of_entries; i++) { | 11446 int end = starts.at(group + 1); |
| 11447 for (int i = start; i < end; i++) { |
| 11413 if (object_at(i) == code) return true; | 11448 if (object_at(i) == code) return true; |
| 11414 } | 11449 } |
| 11415 return false; | 11450 return false; |
| 11416 } | 11451 } |
| 11417 | 11452 |
| 11418 | 11453 |
| 11419 void DependentCode::DeoptimizeDependentCodeGroup( | 11454 void DependentCode::DeoptimizeDependentCodeGroup( |
| 11420 Isolate* isolate, | 11455 Isolate* isolate, |
| 11421 DependentCode::DependencyGroup group) { | 11456 DependentCode::DependencyGroup group) { |
| 11422 ASSERT(AllowCodeDependencyChange::IsAllowed()); | 11457 ASSERT(AllowCodeDependencyChange::IsAllowed()); |
| (...skipping 2341 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 13764 } | 13799 } |
| 13765 } | 13800 } |
| 13766 } | 13801 } |
| 13767 new_table->SetNumberOfElements(NumberOfElements()); | 13802 new_table->SetNumberOfElements(NumberOfElements()); |
| 13768 new_table->SetNumberOfDeletedElements(0); | 13803 new_table->SetNumberOfDeletedElements(0); |
| 13769 return new_table; | 13804 return new_table; |
| 13770 } | 13805 } |
| 13771 | 13806 |
| 13772 | 13807 |
| 13773 template<typename Shape, typename Key> | 13808 template<typename Shape, typename Key> |
| 13809 uint32_t HashTable<Shape, Key>::EntryForProbe(Key key, |
| 13810 Object* k, |
| 13811 int probe, |
| 13812 uint32_t expected) { |
| 13813 uint32_t hash = HashTable<Shape, Key>::HashForObject(key, k); |
| 13814 uint32_t capacity = Capacity(); |
| 13815 uint32_t entry = FirstProbe(hash, capacity); |
| 13816 for (int i = 1; i < probe; i++) { |
| 13817 if (entry == expected) return expected; |
| 13818 entry = NextProbe(entry, i, capacity); |
| 13819 } |
| 13820 return entry; |
| 13821 } |
| 13822 |
| 13823 |
| 13824 template<typename Shape, typename Key> |
| 13825 void HashTable<Shape, Key>::Swap(uint32_t entry1, |
| 13826 uint32_t entry2, |
| 13827 WriteBarrierMode mode) { |
| 13828 int index1 = EntryToIndex(entry1); |
| 13829 int index2 = EntryToIndex(entry2); |
| 13830 Object* temp[Shape::kEntrySize]; |
| 13831 for (int j = 0; j < Shape::kEntrySize; j++) { |
| 13832 temp[j] = get(index1 + j); |
| 13833 } |
| 13834 for (int j = 0; j < Shape::kEntrySize; j++) { |
| 13835 set(index1 + j, get(index2 + j), mode); |
| 13836 } |
| 13837 for (int j = 0; j < Shape::kEntrySize; j++) { |
| 13838 set(index2 + j, temp[j], mode); |
| 13839 } |
| 13840 } |
| 13841 |
| 13842 |
| 13843 template<typename Shape, typename Key> |
| 13844 void HashTable<Shape, Key>::Rehash(Key key) { |
| 13845 DisallowHeapAllocation no_gc; |
| 13846 WriteBarrierMode mode = GetWriteBarrierMode(no_gc); |
| 13847 uint32_t capacity = Capacity(); |
| 13848 bool done = false; |
| 13849 for (int probe = 1; !done; probe++) { |
| 13850 // All elements at entries given by one of the first _probe_ probes |
| 13851 // are placed correctly. Other elements might need to be moved. |
| 13852 done = true; |
| 13853 for (uint32_t current = 0; current < capacity; current++) { |
| 13854 Object* current_key = get(EntryToIndex(current)); |
| 13855 if (IsKey(current_key)) { |
| 13856 uint32_t target = EntryForProbe(key, current_key, probe, current); |
| 13857 if (current == target) continue; |
| 13858 Object* target_key = get(EntryToIndex(target)); |
| 13859 if (!IsKey(target_key) || |
| 13860 EntryForProbe(key, target_key, probe, target) != target) { |
| 13861 // Put the current element into the correct position. |
| 13862 Swap(current, target, mode); |
| 13863 // The other element will be processed on the next iteration. |
| 13864 current--; |
| 13865 } else { |
| 13866 // The place for the current element is occupied. Leave the element |
| 13867 // for the next probe. |
| 13868 done = false; |
| 13869 } |
| 13870 } |
| 13871 } |
| 13872 } |
| 13873 } |
| 13874 |
| 13875 |
| 13876 template<typename Shape, typename Key> |
| 13774 MaybeObject* HashTable<Shape, Key>::EnsureCapacity(int n, Key key) { | 13877 MaybeObject* HashTable<Shape, Key>::EnsureCapacity(int n, Key key) { |
| 13775 int capacity = Capacity(); | 13878 int capacity = Capacity(); |
| 13776 int nof = NumberOfElements() + n; | 13879 int nof = NumberOfElements() + n; |
| 13777 int nod = NumberOfDeletedElements(); | 13880 int nod = NumberOfDeletedElements(); |
| 13778 // Return if: | 13881 // Return if: |
| 13779 // 50% is still free after adding n elements and | 13882 // 50% is still free after adding n elements and |
| 13780 // at most 50% of the free elements are deleted elements. | 13883 // at most 50% of the free elements are deleted elements. |
| 13781 if (nod <= (capacity - nof) >> 1) { | 13884 if (nod <= (capacity - nof) >> 1) { |
| 13782 int needed_free = nof >> 1; | 13885 int needed_free = nof >> 1; |
| 13783 if (nof + needed_free <= capacity) return this; | 13886 if (nof + needed_free <= capacity) return this; |
| (...skipping 2269 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 16053 #define ERROR_MESSAGES_TEXTS(C, T) T, | 16156 #define ERROR_MESSAGES_TEXTS(C, T) T, |
| 16054 static const char* error_messages_[] = { | 16157 static const char* error_messages_[] = { |
| 16055 ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS) | 16158 ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS) |
| 16056 }; | 16159 }; |
| 16057 #undef ERROR_MESSAGES_TEXTS | 16160 #undef ERROR_MESSAGES_TEXTS |
| 16058 return error_messages_[reason]; | 16161 return error_messages_[reason]; |
| 16059 } | 16162 } |
| 16060 | 16163 |
| 16061 | 16164 |
| 16062 } } // namespace v8::internal | 16165 } } // namespace v8::internal |
| OLD | NEW |