Index: src/objects.cc |
diff --git a/src/objects.cc b/src/objects.cc |
index 4217666afa02b859f0055d4811bb17d41cdb04f2..f334d5679666c588565c7d6478f1502e1f42468a 100644 |
--- a/src/objects.cc |
+++ b/src/objects.cc |
@@ -124,7 +124,7 @@ void Object::Lookup(Handle<Name> name, LookupResult* result) { |
0xDEAD0000, this, JSReceiver::cast(this)->map(), 0xDEAD0001); |
} |
} |
- ASSERT(holder != NULL); // Cannot handle null or undefined. |
+ DCHECK(holder != NULL); // Cannot handle null or undefined. |
JSReceiver::cast(holder)->Lookup(name, result); |
} |
@@ -228,7 +228,7 @@ bool FunctionTemplateInfo::IsTemplateFor(Map* map) { |
template<typename To> |
static inline To* CheckedCast(void *from) { |
uintptr_t temp = reinterpret_cast<uintptr_t>(from); |
- ASSERT(temp % sizeof(To) == 0); |
+ DCHECK(temp % sizeof(To) == 0); |
return reinterpret_cast<To*>(temp); |
} |
@@ -330,39 +330,39 @@ static Handle<Object> GetDeclaredAccessorProperty( |
const DeclaredAccessorDescriptorData* data = iterator.Next(); |
switch (data->type) { |
case kDescriptorReturnObject: { |
- ASSERT(iterator.Complete()); |
+ DCHECK(iterator.Complete()); |
current = *CheckedCast<char*>(current); |
return handle(*CheckedCast<Object*>(current), isolate); |
} |
case kDescriptorPointerDereference: |
- ASSERT(!iterator.Complete()); |
+ DCHECK(!iterator.Complete()); |
current = *reinterpret_cast<char**>(current); |
break; |
case kDescriptorPointerShift: |
- ASSERT(!iterator.Complete()); |
+ DCHECK(!iterator.Complete()); |
current += data->pointer_shift_descriptor.byte_offset; |
break; |
case kDescriptorObjectDereference: { |
- ASSERT(!iterator.Complete()); |
+ DCHECK(!iterator.Complete()); |
Object* object = CheckedCast<Object>(current); |
int field = data->object_dereference_descriptor.internal_field; |
Object* smi = JSObject::cast(object)->GetInternalField(field); |
- ASSERT(smi->IsSmi()); |
+ DCHECK(smi->IsSmi()); |
current = reinterpret_cast<char*>(smi); |
break; |
} |
case kDescriptorBitmaskCompare: |
- ASSERT(iterator.Complete()); |
+ DCHECK(iterator.Complete()); |
return PerformCompare(data->bitmask_compare_descriptor, |
current, |
isolate); |
case kDescriptorPointerCompare: |
- ASSERT(iterator.Complete()); |
+ DCHECK(iterator.Complete()); |
return PerformCompare(data->pointer_compare_descriptor, |
current, |
isolate); |
case kDescriptorPrimitiveValue: |
- ASSERT(iterator.Complete()); |
+ DCHECK(iterator.Complete()); |
return GetPrimitiveValue(data->primitive_value_descriptor, |
current, |
isolate); |
@@ -375,7 +375,7 @@ static Handle<Object> GetDeclaredAccessorProperty( |
Handle<FixedArray> JSObject::EnsureWritableFastElements( |
Handle<JSObject> object) { |
- ASSERT(object->HasFastSmiOrObjectElements()); |
+ DCHECK(object->HasFastSmiOrObjectElements()); |
Isolate* isolate = object->GetIsolate(); |
Handle<FixedArray> elems(FixedArray::cast(object->elements()), isolate); |
if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems; |
@@ -406,7 +406,7 @@ MaybeHandle<Object> Object::GetPropertyWithAccessor(Handle<Object> receiver, |
Handle<JSObject> holder, |
Handle<Object> structure) { |
Isolate* isolate = name->GetIsolate(); |
- ASSERT(!structure->IsForeign()); |
+ DCHECK(!structure->IsForeign()); |
// api style callbacks. |
if (structure->IsAccessorInfo()) { |
Handle<AccessorInfo> info = Handle<AccessorInfo>::cast(structure); |
@@ -480,7 +480,7 @@ MaybeHandle<Object> Object::SetPropertyWithAccessor( |
// We should never get here to initialize a const with the hole |
// value since a const declaration would conflict with the setter. |
- ASSERT(!structure->IsForeign()); |
+ DCHECK(!structure->IsForeign()); |
if (structure->IsExecutableAccessorInfo()) { |
// Don't call executable accessor setters with non-JSObject receivers. |
if (!receiver->IsJSObject()) return value; |
@@ -649,27 +649,27 @@ MaybeHandle<Object> JSObject::SetPropertyWithFailedAccessCheck( |
Object* JSObject::GetNormalizedProperty(const LookupResult* result) { |
- ASSERT(!HasFastProperties()); |
+ DCHECK(!HasFastProperties()); |
Object* value = property_dictionary()->ValueAt(result->GetDictionaryEntry()); |
if (IsGlobalObject()) { |
value = PropertyCell::cast(value)->value(); |
} |
- ASSERT(!value->IsPropertyCell() && !value->IsCell()); |
+ DCHECK(!value->IsPropertyCell() && !value->IsCell()); |
return value; |
} |
Handle<Object> JSObject::GetNormalizedProperty(Handle<JSObject> object, |
const LookupResult* result) { |
- ASSERT(!object->HasFastProperties()); |
+ DCHECK(!object->HasFastProperties()); |
Isolate* isolate = object->GetIsolate(); |
Handle<Object> value(object->property_dictionary()->ValueAt( |
result->GetDictionaryEntry()), isolate); |
if (object->IsGlobalObject()) { |
value = handle(Handle<PropertyCell>::cast(value)->value(), isolate); |
- ASSERT(!value->IsTheHole()); |
+ DCHECK(!value->IsTheHole()); |
} |
- ASSERT(!value->IsPropertyCell() && !value->IsCell()); |
+ DCHECK(!value->IsPropertyCell() && !value->IsCell()); |
return value; |
} |
@@ -677,7 +677,7 @@ Handle<Object> JSObject::GetNormalizedProperty(Handle<JSObject> object, |
void JSObject::SetNormalizedProperty(Handle<JSObject> object, |
const LookupResult* result, |
Handle<Object> value) { |
- ASSERT(!object->HasFastProperties()); |
+ DCHECK(!object->HasFastProperties()); |
NameDictionary* property_dictionary = object->property_dictionary(); |
if (object->IsGlobalObject()) { |
Handle<PropertyCell> cell(PropertyCell::cast( |
@@ -693,7 +693,7 @@ void JSObject::SetNormalizedProperty(Handle<JSObject> object, |
Handle<Name> name, |
Handle<Object> value, |
PropertyDetails details) { |
- ASSERT(!object->HasFastProperties()); |
+ DCHECK(!object->HasFastProperties()); |
Handle<NameDictionary> property_dictionary(object->property_dictionary()); |
if (!name->IsUniqueName()) { |
@@ -722,7 +722,7 @@ void JSObject::SetNormalizedProperty(Handle<JSObject> object, |
property_dictionary->SetNextEnumerationIndex(enumeration_index + 1); |
} else { |
enumeration_index = original_details.dictionary_index(); |
- ASSERT(enumeration_index > 0); |
+ DCHECK(enumeration_index > 0); |
} |
details = PropertyDetails( |
@@ -743,7 +743,7 @@ void JSObject::SetNormalizedProperty(Handle<JSObject> object, |
Handle<Object> JSObject::DeleteNormalizedProperty(Handle<JSObject> object, |
Handle<Name> name, |
DeleteMode mode) { |
- ASSERT(!object->HasFastProperties()); |
+ DCHECK(!object->HasFastProperties()); |
Isolate* isolate = object->GetIsolate(); |
Handle<NameDictionary> dictionary(object->property_dictionary()); |
int entry = dictionary->FindEntry(name); |
@@ -758,7 +758,7 @@ Handle<Object> JSObject::DeleteNormalizedProperty(Handle<JSObject> object, |
// from the DontDelete cell without checking if it contains |
// the hole value. |
Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map())); |
- ASSERT(new_map->is_dictionary_map()); |
+ DCHECK(new_map->is_dictionary_map()); |
JSObject::MigrateToMap(object, new_map); |
} |
Handle<PropertyCell> cell(PropertyCell::cast(dictionary->ValueAt(entry))); |
@@ -899,7 +899,7 @@ Object* Object::GetHash() { |
return Smi::FromInt(hash); |
} |
- ASSERT(IsJSReceiver()); |
+ DCHECK(IsJSReceiver()); |
return JSReceiver::cast(this)->GetIdentityHash(); |
} |
@@ -908,7 +908,7 @@ Handle<Smi> Object::GetOrCreateHash(Isolate* isolate, Handle<Object> object) { |
Handle<Object> hash(object->GetHash(), isolate); |
if (hash->IsSmi()) return Handle<Smi>::cast(hash); |
- ASSERT(object->IsJSReceiver()); |
+ DCHECK(object->IsJSReceiver()); |
return JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver>::cast(object)); |
} |
@@ -1009,8 +1009,8 @@ static bool AnWord(String* str) { |
Handle<String> String::SlowFlatten(Handle<ConsString> cons, |
PretenureFlag pretenure) { |
- ASSERT(AllowHeapAllocation::IsAllowed()); |
- ASSERT(cons->second()->length() != 0); |
+ DCHECK(AllowHeapAllocation::IsAllowed()); |
+ DCHECK(cons->second()->length() != 0); |
Isolate* isolate = cons->GetIsolate(); |
int length = cons->length(); |
PretenureFlag tenure = isolate->heap()->InNewSpace(*cons) ? pretenure |
@@ -1031,7 +1031,7 @@ Handle<String> String::SlowFlatten(Handle<ConsString> cons, |
} |
cons->set_first(*result); |
cons->set_second(isolate->heap()->empty_string()); |
- ASSERT(result->IsFlat()); |
+ DCHECK(result->IsFlat()); |
return result; |
} |
@@ -1040,14 +1040,14 @@ Handle<String> String::SlowFlatten(Handle<ConsString> cons, |
bool String::MakeExternal(v8::String::ExternalStringResource* resource) { |
// Externalizing twice leaks the external resource, so it's |
// prohibited by the API. |
- ASSERT(!this->IsExternalString()); |
-#ifdef ENABLE_SLOW_ASSERTS |
+ DCHECK(!this->IsExternalString()); |
+#ifdef ENABLE_SLOW_DCHECKS |
if (FLAG_enable_slow_asserts) { |
// Assert that the resource and the string are equivalent. |
- ASSERT(static_cast<size_t>(this->length()) == resource->length()); |
+ DCHECK(static_cast<size_t>(this->length()) == resource->length()); |
ScopedVector<uc16> smart_chars(this->length()); |
String::WriteToFlat(this, smart_chars.start(), 0, this->length()); |
- ASSERT(memcmp(smart_chars.start(), |
+ DCHECK(memcmp(smart_chars.start(), |
resource->data(), |
resource->length() * sizeof(smart_chars[0])) == 0); |
} |
@@ -1109,18 +1109,18 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) { |
bool String::MakeExternal(v8::String::ExternalAsciiStringResource* resource) { |
-#ifdef ENABLE_SLOW_ASSERTS |
+#ifdef ENABLE_SLOW_DCHECKS |
if (FLAG_enable_slow_asserts) { |
// Assert that the resource and the string are equivalent. |
- ASSERT(static_cast<size_t>(this->length()) == resource->length()); |
+ DCHECK(static_cast<size_t>(this->length()) == resource->length()); |
if (this->IsTwoByteRepresentation()) { |
ScopedVector<uint16_t> smart_chars(this->length()); |
String::WriteToFlat(this, smart_chars.start(), 0, this->length()); |
- ASSERT(String::IsOneByte(smart_chars.start(), this->length())); |
+ DCHECK(String::IsOneByte(smart_chars.start(), this->length())); |
} |
ScopedVector<char> smart_chars(this->length()); |
String::WriteToFlat(this, smart_chars.start(), 0, this->length()); |
- ASSERT(memcmp(smart_chars.start(), |
+ DCHECK(memcmp(smart_chars.start(), |
resource->data(), |
resource->length() * sizeof(smart_chars[0])) == 0); |
} |
@@ -1776,7 +1776,7 @@ MaybeHandle<Map> Map::CopyWithField(Handle<Map> map, |
PropertyAttributes attributes, |
Representation representation, |
TransitionFlag flag) { |
- ASSERT(DescriptorArray::kNotFound == |
+ DCHECK(DescriptorArray::kNotFound == |
map->instance_descriptors()->Search( |
*name, map->NumberOfOwnDescriptors())); |
@@ -1828,7 +1828,7 @@ void JSObject::AddFastProperty(Handle<JSObject> object, |
PropertyAttributes attributes, |
StoreFromKeyed store_mode, |
TransitionFlag flag) { |
- ASSERT(!object->IsJSGlobalProxy()); |
+ DCHECK(!object->IsJSGlobalProxy()); |
MaybeHandle<Map> maybe_map; |
if (value->IsJSFunction()) { |
@@ -1857,7 +1857,7 @@ void JSObject::AddSlowProperty(Handle<JSObject> object, |
Handle<Name> name, |
Handle<Object> value, |
PropertyAttributes attributes) { |
- ASSERT(!object->HasFastProperties()); |
+ DCHECK(!object->HasFastProperties()); |
Isolate* isolate = object->GetIsolate(); |
Handle<NameDictionary> dict(object->property_dictionary()); |
if (object->IsGlobalObject()) { |
@@ -1889,7 +1889,7 @@ MaybeHandle<Object> JSObject::AddPropertyInternal( |
Handle<JSObject> object, Handle<Name> name, Handle<Object> value, |
PropertyAttributes attributes, JSReceiver::StoreFromKeyed store_mode, |
ExtensibilityCheck extensibility_check, TransitionFlag transition_flag) { |
- ASSERT(!object->IsJSGlobalProxy()); |
+ DCHECK(!object->IsJSGlobalProxy()); |
Isolate* isolate = object->GetIsolate(); |
if (!name->IsUniqueName()) { |
@@ -1943,8 +1943,8 @@ void JSObject::EnqueueChangeRecord(Handle<JSObject> object, |
const char* type_str, |
Handle<Name> name, |
Handle<Object> old_value) { |
- ASSERT(!object->IsJSGlobalProxy()); |
- ASSERT(!object->IsJSGlobalObject()); |
+ DCHECK(!object->IsJSGlobalProxy()); |
+ DCHECK(!object->IsJSGlobalObject()); |
Isolate* isolate = object->GetIsolate(); |
HandleScope scope(isolate); |
Handle<String> type = isolate->factory()->InternalizeUtf8String(type_str); |
@@ -2003,13 +2003,13 @@ static void ZapEndOfFixedArray(Address new_end, int to_trim) { |
template<Heap::InvocationMode mode> |
static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) { |
- ASSERT(elms->map() != heap->fixed_cow_array_map()); |
+ DCHECK(elms->map() != heap->fixed_cow_array_map()); |
// For now this trick is only applied to fixed arrays in new and paged space. |
- ASSERT(!heap->lo_space()->Contains(elms)); |
+ DCHECK(!heap->lo_space()->Contains(elms)); |
const int len = elms->length(); |
- ASSERT(to_trim < len); |
+ DCHECK(to_trim < len); |
Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim); |
@@ -2044,7 +2044,7 @@ bool Map::InstancesNeedRewriting(Map* target, int target_number_of_fields, |
int* old_number_of_fields) { |
// If fields were added (or removed), rewrite the instance. |
*old_number_of_fields = NumberOfFields(); |
- ASSERT(target_number_of_fields >= *old_number_of_fields); |
+ DCHECK(target_number_of_fields >= *old_number_of_fields); |
if (target_number_of_fields != *old_number_of_fields) return true; |
// If smi descriptors were replaced by double descriptors, rewrite. |
@@ -2064,9 +2064,9 @@ bool Map::InstancesNeedRewriting(Map* target, int target_number_of_fields, |
// In-object slack tracking may have reduced the object size of the new map. |
// In that case, succeed if all existing fields were inobject, and they still |
// fit within the new inobject size. |
- ASSERT(target_inobject < inobject_properties()); |
+ DCHECK(target_inobject < inobject_properties()); |
if (target_number_of_fields <= target_inobject) { |
- ASSERT(target_number_of_fields + target_unused == target_inobject); |
+ DCHECK(target_number_of_fields + target_unused == target_inobject); |
return false; |
} |
// Otherwise, properties will need to be moved to the backing store. |
@@ -2142,7 +2142,7 @@ void JSObject::MigrateFastToFast(Handle<JSObject> object, Handle<Map> new_map) { |
if ((old_map->unused_property_fields() == 0) && |
(number_of_fields != old_number_of_fields) && |
(new_map->GetBackPointer() == *old_map)) { |
- ASSERT(number_of_fields == old_number_of_fields + 1); |
+ DCHECK(number_of_fields == old_number_of_fields + 1); |
// This migration is a transition from a map that has run out out property |
// space. Therefore it could be done by extending the backing store. |
Handle<FixedArray> old_storage = handle(object->properties(), isolate); |
@@ -2157,9 +2157,9 @@ void JSObject::MigrateFastToFast(Handle<JSObject> object, Handle<Map> new_map) { |
} else { |
value = isolate->factory()->uninitialized_value(); |
} |
- ASSERT(details.type() == FIELD); |
+ DCHECK(details.type() == FIELD); |
int target_index = details.field_index() - inobject; |
- ASSERT(target_index >= 0); // Must be a backing store index. |
+ DCHECK(target_index >= 0); // Must be a backing store index. |
new_storage->set(target_index, *value); |
// From here on we cannot fail and we shouldn't GC anymore. |
@@ -2179,17 +2179,17 @@ void JSObject::MigrateFastToFast(Handle<JSObject> object, Handle<Map> new_map) { |
// This method only supports generalizing instances to at least the same |
// number of properties. |
- ASSERT(old_nof <= new_nof); |
+ DCHECK(old_nof <= new_nof); |
for (int i = 0; i < old_nof; i++) { |
PropertyDetails details = new_descriptors->GetDetails(i); |
if (details.type() != FIELD) continue; |
PropertyDetails old_details = old_descriptors->GetDetails(i); |
if (old_details.type() == CALLBACKS) { |
- ASSERT(details.representation().IsTagged()); |
+ DCHECK(details.representation().IsTagged()); |
continue; |
} |
- ASSERT(old_details.type() == CONSTANT || |
+ DCHECK(old_details.type() == CONSTANT || |
old_details.type() == FIELD); |
Object* raw_value = old_details.type() == CONSTANT |
? old_descriptors->GetValue(i) |
@@ -2205,7 +2205,7 @@ void JSObject::MigrateFastToFast(Handle<JSObject> object, Handle<Map> new_map) { |
!details.representation().IsDouble()) { |
value = Object::WrapForRead(isolate, value, old_details.representation()); |
} |
- ASSERT(!(details.representation().IsDouble() && value->IsSmi())); |
+ DCHECK(!(details.representation().IsDouble() && value->IsSmi())); |
int target_index = new_descriptors->GetFieldIndex(i) - inobject; |
if (target_index < 0) target_index += total_size; |
array->set(target_index, *value); |
@@ -2248,7 +2248,7 @@ void JSObject::MigrateFastToFast(Handle<JSObject> object, Handle<Map> new_map) { |
// Create filler object past the new instance size. |
int new_instance_size = new_map->instance_size(); |
int instance_size_delta = old_map->instance_size() - new_instance_size; |
- ASSERT(instance_size_delta >= 0); |
+ DCHECK(instance_size_delta >= 0); |
if (instance_size_delta > 0) { |
Address address = object->address(); |
@@ -2318,7 +2318,7 @@ Handle<Map> Map::CopyGeneralizeAllRepresentations(Handle<Map> map, |
new_map->set_unused_property_fields(unused_property_fields); |
} |
} else { |
- ASSERT(details.attributes() == attributes); |
+ DCHECK(details.attributes() == attributes); |
} |
if (FLAG_trace_generalization) { |
@@ -2409,7 +2409,7 @@ Map* Map::FindLastMatchMap(int verbatim, |
DisallowHeapAllocation no_allocation; |
// This can only be called on roots of transition trees. |
- ASSERT(GetBackPointer()->IsUndefined()); |
+ DCHECK(GetBackPointer()->IsUndefined()); |
Map* current = this; |
@@ -2443,7 +2443,7 @@ Map* Map::FindLastMatchMap(int verbatim, |
Map* Map::FindFieldOwner(int descriptor) { |
DisallowHeapAllocation no_allocation; |
- ASSERT_EQ(FIELD, instance_descriptors()->GetDetails(descriptor).type()); |
+ DCHECK_EQ(FIELD, instance_descriptors()->GetDetails(descriptor).type()); |
Map* result = this; |
while (true) { |
Object* back = result->GetBackPointer(); |
@@ -2485,9 +2485,9 @@ Handle<HeapType> Map::GeneralizeFieldType(Handle<HeapType> type1, |
if (type1->NowStable() && type2->NowStable()) { |
Handle<HeapType> type = HeapType::Union(type1, type2, isolate); |
if (type->NumClasses() <= kMaxClassesPerFieldType) { |
- ASSERT(type->NowStable()); |
- ASSERT(type1->NowIs(type)); |
- ASSERT(type2->NowIs(type)); |
+ DCHECK(type->NowStable()); |
+ DCHECK(type1->NowIs(type)); |
+ DCHECK(type2->NowIs(type)); |
return type; |
} |
} |
@@ -2505,7 +2505,7 @@ void Map::GeneralizeFieldType(Handle<Map> map, |
Handle<HeapType> old_field_type( |
map->instance_descriptors()->GetFieldType(modify_index), isolate); |
if (new_field_type->NowIs(old_field_type)) { |
- ASSERT(Map::GeneralizeFieldType(old_field_type, |
+ DCHECK(Map::GeneralizeFieldType(old_field_type, |
new_field_type, |
isolate)->NowIs(old_field_type)); |
return; |
@@ -2515,7 +2515,7 @@ void Map::GeneralizeFieldType(Handle<Map> map, |
Handle<Map> field_owner(map->FindFieldOwner(modify_index), isolate); |
Handle<DescriptorArray> descriptors( |
field_owner->instance_descriptors(), isolate); |
- ASSERT_EQ(*old_field_type, descriptors->GetFieldType(modify_index)); |
+ DCHECK_EQ(*old_field_type, descriptors->GetFieldType(modify_index)); |
// Determine the generalized new field type. |
new_field_type = Map::GeneralizeFieldType( |
@@ -2577,8 +2577,8 @@ Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map, |
if (old_representation.IsNone() && |
!new_representation.IsNone() && |
!new_representation.IsDouble()) { |
- ASSERT(old_details.type() == FIELD); |
- ASSERT(old_descriptors->GetFieldType(modify_index)->NowIs( |
+ DCHECK(old_details.type() == FIELD); |
+ DCHECK(old_descriptors->GetFieldType(modify_index)->NowIs( |
HeapType::None())); |
if (FLAG_trace_generalization) { |
old_map->PrintGeneralization( |
@@ -2655,8 +2655,8 @@ Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map, |
break; |
} |
} else { |
- ASSERT_EQ(tmp_type, old_type); |
- ASSERT_EQ(tmp_descriptors->GetValue(i), old_descriptors->GetValue(i)); |
+ DCHECK_EQ(tmp_type, old_type); |
+ DCHECK_EQ(tmp_descriptors->GetValue(i), old_descriptors->GetValue(i)); |
} |
target_map = tmp_map; |
} |
@@ -2668,10 +2668,10 @@ Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map, |
if (target_nof == old_nof && |
(store_mode != FORCE_FIELD || |
target_descriptors->GetDetails(modify_index).type() == FIELD)) { |
- ASSERT(modify_index < target_nof); |
- ASSERT(new_representation.fits_into( |
+ DCHECK(modify_index < target_nof); |
+ DCHECK(new_representation.fits_into( |
target_descriptors->GetDetails(modify_index).representation())); |
- ASSERT(target_descriptors->GetDetails(modify_index).type() != FIELD || |
+ DCHECK(target_descriptors->GetDetails(modify_index).type() != FIELD || |
new_field_type->NowIs( |
target_descriptors->GetFieldType(modify_index))); |
return target_map; |
@@ -2707,11 +2707,11 @@ Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map, |
old_nof, old_descriptors->number_of_descriptors()) - old_nof; |
Handle<DescriptorArray> new_descriptors = DescriptorArray::Allocate( |
isolate, old_nof, new_slack); |
- ASSERT(new_descriptors->length() > target_descriptors->length() || |
+ DCHECK(new_descriptors->length() > target_descriptors->length() || |
new_descriptors->NumberOfSlackDescriptors() > 0 || |
new_descriptors->number_of_descriptors() == |
old_descriptors->number_of_descriptors()); |
- ASSERT(new_descriptors->number_of_descriptors() == old_nof); |
+ DCHECK(new_descriptors->number_of_descriptors() == old_nof); |
// 0 -> |root_nof| |
int current_offset = 0; |
@@ -2736,7 +2736,7 @@ Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map, |
target_details = target_details.CopyWithRepresentation( |
new_representation.generalize(target_details.representation())); |
} |
- ASSERT_EQ(old_details.attributes(), target_details.attributes()); |
+ DCHECK_EQ(old_details.attributes(), target_details.attributes()); |
if (old_details.type() == FIELD || |
target_details.type() == FIELD || |
(modify_index == i && store_mode == FORCE_FIELD) || |
@@ -2762,7 +2762,7 @@ Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map, |
target_details.representation()); |
new_descriptors->Set(i, &d); |
} else { |
- ASSERT_NE(FIELD, target_details.type()); |
+ DCHECK_NE(FIELD, target_details.type()); |
Descriptor d(target_key, |
handle(target_descriptors->GetValue(i), isolate), |
target_details); |
@@ -2792,7 +2792,7 @@ Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map, |
old_details.representation()); |
new_descriptors->Set(i, &d); |
} else { |
- ASSERT(old_details.type() == CONSTANT || old_details.type() == CALLBACKS); |
+ DCHECK(old_details.type() == CONSTANT || old_details.type() == CALLBACKS); |
if (modify_index == i && store_mode == FORCE_FIELD) { |
FieldDescriptor d(old_key, |
current_offset++, |
@@ -2804,7 +2804,7 @@ Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map, |
old_details.representation()); |
new_descriptors->Set(i, &d); |
} else { |
- ASSERT_NE(FIELD, old_details.type()); |
+ DCHECK_NE(FIELD, old_details.type()); |
Descriptor d(old_key, |
handle(old_descriptors->GetValue(i), isolate), |
old_details); |
@@ -2815,13 +2815,13 @@ Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map, |
new_descriptors->Sort(); |
- ASSERT(store_mode != FORCE_FIELD || |
+ DCHECK(store_mode != FORCE_FIELD || |
new_descriptors->GetDetails(modify_index).type() == FIELD); |
Handle<Map> split_map(root_map->FindLastMatchMap( |
root_nof, old_nof, *new_descriptors), isolate); |
int split_nof = split_map->NumberOfOwnDescriptors(); |
- ASSERT_NE(old_nof, split_nof); |
+ DCHECK_NE(old_nof, split_nof); |
split_map->DeprecateTarget( |
old_descriptors->GetKey(split_nof), *new_descriptors); |
@@ -3094,7 +3094,7 @@ MaybeHandle<Object> Object::SetDataProperty(LookupIterator* it, |
Handle<JSObject> receiver = Handle<JSObject>::cast(it->GetReceiver()); |
// Store on the holder which may be hidden behind the receiver. |
- ASSERT(it->HolderIsReceiverOrHiddenPrototype()); |
+ DCHECK(it->HolderIsReceiverOrHiddenPrototype()); |
// Old value for the observation change record. |
// Fetch before transforming the object since the encoding may become |
@@ -3127,7 +3127,7 @@ MaybeHandle<Object> Object::AddDataProperty(LookupIterator* it, |
PropertyAttributes attributes, |
StrictMode strict_mode, |
StoreFromKeyed store_mode) { |
- ASSERT(!it->GetReceiver()->IsJSProxy()); |
+ DCHECK(!it->GetReceiver()->IsJSProxy()); |
if (!it->GetReceiver()->IsJSObject()) { |
// TODO(verwaest): Throw a TypeError with a more specific message. |
return WriteToReadOnlyProperty(it, value, strict_mode); |
@@ -3217,7 +3217,7 @@ MaybeHandle<Object> JSObject::SetElementWithCallbackSetterInPrototypes( |
void Map::EnsureDescriptorSlack(Handle<Map> map, int slack) { |
// Only supports adding slack to owned descriptors. |
- ASSERT(map->owns_descriptors()); |
+ DCHECK(map->owns_descriptors()); |
Handle<DescriptorArray> descriptors(map->instance_descriptors()); |
int old_size = map->NumberOfOwnDescriptors(); |
@@ -3337,7 +3337,7 @@ void Map::AppendCallbackDescriptors(Handle<Map> map, |
int nof = map->NumberOfOwnDescriptors(); |
Handle<DescriptorArray> array(map->instance_descriptors()); |
NeanderArray callbacks(descriptors); |
- ASSERT(array->NumberOfSlackDescriptors() >= callbacks.length()); |
+ DCHECK(array->NumberOfSlackDescriptors() >= callbacks.length()); |
nof = AppendUniqueCallbacks<DescriptorArrayAppender>(&callbacks, array, nof); |
map->SetNumberOfOwnDescriptors(nof); |
} |
@@ -3347,7 +3347,7 @@ int AccessorInfo::AppendUnique(Handle<Object> descriptors, |
Handle<FixedArray> array, |
int valid_descriptors) { |
NeanderArray callbacks(descriptors); |
- ASSERT(array->length() >= callbacks.length() + valid_descriptors); |
+ DCHECK(array->length() >= callbacks.length() + valid_descriptors); |
return AppendUniqueCallbacks<FixedArrayAppender>(&callbacks, |
array, |
valid_descriptors); |
@@ -3355,7 +3355,7 @@ int AccessorInfo::AppendUnique(Handle<Object> descriptors, |
static bool ContainsMap(MapHandleList* maps, Handle<Map> map) { |
- ASSERT(!map.is_null()); |
+ DCHECK(!map.is_null()); |
for (int i = 0; i < maps->length(); ++i) { |
if (!maps->at(i).is_null() && maps->at(i).is_identical_to(map)) return true; |
} |
@@ -3421,12 +3421,12 @@ static Map* FindClosestElementsTransition(Map* map, ElementsKind to_kind) { |
} |
if (to_kind != kind && current_map->HasElementsTransition()) { |
- ASSERT(to_kind == DICTIONARY_ELEMENTS); |
+ DCHECK(to_kind == DICTIONARY_ELEMENTS); |
Map* next_map = current_map->elements_transition_map(); |
if (next_map->elements_kind() == to_kind) return next_map; |
} |
- ASSERT(current_map->elements_kind() == target_kind); |
+ DCHECK(current_map->elements_kind() == target_kind); |
return current_map; |
} |
@@ -3454,7 +3454,7 @@ bool Map::IsMapInArrayPrototypeChain() { |
static Handle<Map> AddMissingElementsTransitions(Handle<Map> map, |
ElementsKind to_kind) { |
- ASSERT(IsTransitionElementsKind(map->elements_kind())); |
+ DCHECK(IsTransitionElementsKind(map->elements_kind())); |
Handle<Map> current_map = map; |
@@ -3472,7 +3472,7 @@ static Handle<Map> AddMissingElementsTransitions(Handle<Map> map, |
current_map, to_kind, INSERT_TRANSITION); |
} |
- ASSERT(current_map->elements_kind() == to_kind); |
+ DCHECK(current_map->elements_kind() == to_kind); |
return current_map; |
} |
@@ -3554,7 +3554,7 @@ void JSObject::LookupOwnRealNamedProperty(Handle<Name> name, |
if (IsJSGlobalProxy()) { |
PrototypeIterator iter(GetIsolate(), this); |
if (iter.IsAtEnd()) return result->NotFound(); |
- ASSERT(iter.GetCurrent()->IsJSGlobalObject()); |
+ DCHECK(iter.GetCurrent()->IsJSGlobalObject()); |
return JSObject::cast(iter.GetCurrent()) |
->LookupOwnRealNamedProperty(name, result); |
} |
@@ -3564,7 +3564,7 @@ void JSObject::LookupOwnRealNamedProperty(Handle<Name> name, |
// A property or a map transition was found. We return all of these result |
// types because LookupOwnRealNamedProperty is used when setting |
// properties where map transitions are handled. |
- ASSERT(!result->IsFound() || |
+ DCHECK(!result->IsFound() || |
(result->holder() == this && result->IsFastPropertyType())); |
return; |
} |
@@ -3607,7 +3607,7 @@ void JSObject::LookupRealNamedPropertyInPrototypes(Handle<Name> name, |
return result->HandlerResult(JSProxy::cast(iter.GetCurrent())); |
} |
JSObject::cast(iter.GetCurrent())->LookupOwnRealNamedProperty(name, result); |
- ASSERT(!(result->IsFound() && result->type() == INTERCEPTOR)); |
+ DCHECK(!(result->IsFound() && result->type() == INTERCEPTOR)); |
if (result->IsFound()) return; |
} |
result->NotFound(); |
@@ -3703,7 +3703,7 @@ MaybeHandle<Object> JSProxy::SetPropertyViaPrototypesWithHandler( |
STATIC_ASCII_VECTOR("configurable_")); |
Handle<Object> configurable = |
Object::GetProperty(desc, configurable_name).ToHandleChecked(); |
- ASSERT(configurable->IsBoolean()); |
+ DCHECK(configurable->IsBoolean()); |
if (configurable->IsFalse()) { |
Handle<String> trap = |
isolate->factory()->InternalizeOneByteString( |
@@ -3713,7 +3713,7 @@ MaybeHandle<Object> JSProxy::SetPropertyViaPrototypesWithHandler( |
"proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args))); |
return isolate->Throw<Object>(error); |
} |
- ASSERT(configurable->IsTrue()); |
+ DCHECK(configurable->IsTrue()); |
// Check for DataDescriptor. |
Handle<String> hasWritable_name = |
@@ -3721,14 +3721,14 @@ MaybeHandle<Object> JSProxy::SetPropertyViaPrototypesWithHandler( |
STATIC_ASCII_VECTOR("hasWritable_")); |
Handle<Object> hasWritable = |
Object::GetProperty(desc, hasWritable_name).ToHandleChecked(); |
- ASSERT(hasWritable->IsBoolean()); |
+ DCHECK(hasWritable->IsBoolean()); |
if (hasWritable->IsTrue()) { |
Handle<String> writable_name = |
isolate->factory()->InternalizeOneByteString( |
STATIC_ASCII_VECTOR("writable_")); |
Handle<Object> writable = |
Object::GetProperty(desc, writable_name).ToHandleChecked(); |
- ASSERT(writable->IsBoolean()); |
+ DCHECK(writable->IsBoolean()); |
*done = writable->IsFalse(); |
if (!*done) return isolate->factory()->the_hole_value(); |
if (strict_mode == SLOPPY) return value; |
@@ -3890,7 +3890,7 @@ void JSProxy::Fix(Handle<JSProxy> proxy) { |
} else { |
isolate->factory()->BecomeJSObject(proxy); |
} |
- ASSERT(proxy->IsJSObject()); |
+ DCHECK(proxy->IsJSObject()); |
// Inherit identity, if it was present. |
if (hash->IsSmi()) { |
@@ -3930,7 +3930,7 @@ MaybeHandle<Object> JSProxy::CallTrap(Handle<JSProxy> proxy, |
void JSObject::AllocateStorageForMap(Handle<JSObject> object, Handle<Map> map) { |
- ASSERT(object->map()->inobject_properties() == map->inobject_properties()); |
+ DCHECK(object->map()->inobject_properties() == map->inobject_properties()); |
ElementsKind obj_kind = object->map()->elements_kind(); |
ElementsKind map_kind = map->elements_kind(); |
if (map_kind != obj_kind) { |
@@ -4032,14 +4032,14 @@ void JSObject::WriteToField(int descriptor, Object* value) { |
DescriptorArray* desc = map()->instance_descriptors(); |
PropertyDetails details = desc->GetDetails(descriptor); |
- ASSERT(details.type() == FIELD); |
+ DCHECK(details.type() == FIELD); |
FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor); |
if (details.representation().IsDouble()) { |
// Nothing more to be done. |
if (value->IsUninitialized()) return; |
HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index)); |
- ASSERT(box->IsMutableHeapNumber()); |
+ DCHECK(box->IsMutableHeapNumber()); |
box->set_value(value->Number()); |
} else { |
FastPropertyAtPut(index, value); |
@@ -4108,13 +4108,13 @@ void JSObject::AddProperty(Handle<JSObject> object, Handle<Name> name, |
PropertyAttributes attributes) { |
#ifdef DEBUG |
uint32_t index; |
- ASSERT(!object->IsJSProxy()); |
- ASSERT(!name->AsArrayIndex(&index)); |
+ DCHECK(!object->IsJSProxy()); |
+ DCHECK(!name->AsArrayIndex(&index)); |
LookupIterator it(object, name, LookupIterator::CHECK_OWN_REAL); |
Maybe<PropertyAttributes> maybe = GetPropertyAttributes(&it); |
- ASSERT(maybe.has_value); |
- ASSERT(!it.IsFound()); |
- ASSERT(object->map()->is_extensible()); |
+ DCHECK(maybe.has_value); |
+ DCHECK(!it.IsFound()); |
+ DCHECK(object->map()->is_extensible()); |
#endif |
SetOwnPropertyIgnoreAttributes(object, name, value, attributes, |
OMIT_EXTENSIBILITY_CHECK).Check(); |
@@ -4131,7 +4131,7 @@ MaybeHandle<Object> JSObject::SetOwnPropertyIgnoreAttributes( |
ExtensibilityCheck extensibility_check, |
StoreFromKeyed store_from_keyed, |
ExecutableAccessorInfoHandling handling) { |
- ASSERT(!value->IsTheHole()); |
+ DCHECK(!value->IsTheHole()); |
Isolate* isolate = object->GetIsolate(); |
// Make sure that the top context does not change when doing callbacks or |
@@ -4155,7 +4155,7 @@ MaybeHandle<Object> JSObject::SetOwnPropertyIgnoreAttributes( |
if (object->IsJSGlobalProxy()) { |
PrototypeIterator iter(isolate, object); |
if (iter.IsAtEnd()) return value; |
- ASSERT(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
+ DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
return SetOwnPropertyIgnoreAttributes( |
Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), name, |
value, attributes, extensibility_check); |
@@ -4310,7 +4310,7 @@ Maybe<PropertyAttributes> JSObject::GetPropertyAttributesWithInterceptor( |
v8::Handle<v8::Integer> result = |
args.Call(query, v8::Utils::ToLocal(Handle<String>::cast(name))); |
if (!result.IsEmpty()) { |
- ASSERT(result->IsInt32()); |
+ DCHECK(result->IsInt32()); |
return maybe(static_cast<PropertyAttributes>(result->Int32Value())); |
} |
} else if (!interceptor->getter()->IsUndefined()) { |
@@ -4388,7 +4388,7 @@ Maybe<PropertyAttributes> JSObject::GetElementAttributeWithReceiver( |
if (object->IsJSGlobalProxy()) { |
PrototypeIterator iter(isolate, object); |
if (iter.IsAtEnd()) return maybe(ABSENT); |
- ASSERT(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
+ DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
return JSObject::GetElementAttributeWithReceiver( |
Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), receiver, |
index, check_prototype); |
@@ -4491,7 +4491,7 @@ MaybeHandle<Map> NormalizedMapCache::Get(Handle<Map> fast_map, |
void NormalizedMapCache::Set(Handle<Map> fast_map, |
Handle<Map> normalized_map) { |
DisallowHeapAllocation no_gc; |
- ASSERT(normalized_map->is_dictionary_map()); |
+ DCHECK(normalized_map->is_dictionary_map()); |
FixedArray::set(GetIndex(fast_map), *normalized_map); |
} |
@@ -4528,9 +4528,9 @@ void JSObject::MigrateFastToSlow(Handle<JSObject> object, |
Handle<Map> new_map, |
int expected_additional_properties) { |
// The global object is always normalized. |
- ASSERT(!object->IsGlobalObject()); |
+ DCHECK(!object->IsGlobalObject()); |
// JSGlobalProxy must never be normalized |
- ASSERT(!object->IsJSGlobalProxy()); |
+ DCHECK(!object->IsJSGlobalProxy()); |
Isolate* isolate = object->GetIsolate(); |
HandleScope scope(isolate); |
@@ -4565,7 +4565,7 @@ void JSObject::MigrateFastToSlow(Handle<JSObject> object, |
Handle<Object> value( |
object->RawFastPropertyAt(index), isolate); |
if (details.representation().IsDouble()) { |
- ASSERT(value->IsMutableHeapNumber()); |
+ DCHECK(value->IsMutableHeapNumber()); |
Handle<HeapNumber> old = Handle<HeapNumber>::cast(value); |
value = isolate->factory()->NewHeapNumber(old->value()); |
} |
@@ -4601,7 +4601,7 @@ void JSObject::MigrateFastToSlow(Handle<JSObject> object, |
// Resize the object in the heap if necessary. |
int new_instance_size = new_map->instance_size(); |
int instance_size_delta = map->instance_size() - new_instance_size; |
- ASSERT(instance_size_delta >= 0); |
+ DCHECK(instance_size_delta >= 0); |
if (instance_size_delta > 0) { |
Heap* heap = isolate->heap(); |
@@ -4632,7 +4632,7 @@ void JSObject::MigrateFastToSlow(Handle<JSObject> object, |
void JSObject::MigrateSlowToFast(Handle<JSObject> object, |
int unused_property_fields) { |
if (object->HasFastProperties()) return; |
- ASSERT(!object->IsGlobalObject()); |
+ DCHECK(!object->IsGlobalObject()); |
Isolate* isolate = object->GetIsolate(); |
Factory* factory = isolate->factory(); |
Handle<NameDictionary> dictionary(object->property_dictionary()); |
@@ -4656,7 +4656,7 @@ void JSObject::MigrateSlowToFast(Handle<JSObject> object, |
if (dictionary->IsKey(k)) { |
Object* value = dictionary->ValueAt(i); |
PropertyType type = dictionary->DetailsAt(i).type(); |
- ASSERT(type != FIELD); |
+ DCHECK(type != FIELD); |
instance_descriptor_length++; |
if (type == NORMAL && !value->IsJSFunction()) { |
number_of_fields += 1; |
@@ -4672,13 +4672,13 @@ void JSObject::MigrateSlowToFast(Handle<JSObject> object, |
if (instance_descriptor_length == 0) { |
DisallowHeapAllocation no_gc; |
- ASSERT_LE(unused_property_fields, inobject_props); |
+ DCHECK_LE(unused_property_fields, inobject_props); |
// Transform the object. |
new_map->set_unused_property_fields(inobject_props); |
object->synchronized_set_map(*new_map); |
object->set_properties(isolate->heap()->empty_fixed_array()); |
// Check that it really works. |
- ASSERT(object->HasFastProperties()); |
+ DCHECK(object->HasFastProperties()); |
return; |
} |
@@ -4747,7 +4747,7 @@ void JSObject::MigrateSlowToFast(Handle<JSObject> object, |
} |
} |
} |
- ASSERT(current_offset == number_of_fields); |
+ DCHECK(current_offset == number_of_fields); |
descriptors->Sort(); |
@@ -4759,10 +4759,10 @@ void JSObject::MigrateSlowToFast(Handle<JSObject> object, |
object->synchronized_set_map(*new_map); |
object->set_properties(*fields); |
- ASSERT(object->IsJSObject()); |
+ DCHECK(object->IsJSObject()); |
// Check that it really works. |
- ASSERT(object->HasFastProperties()); |
+ DCHECK(object->HasFastProperties()); |
} |
@@ -4805,7 +4805,7 @@ static Handle<SeededNumberDictionary> CopyFastElementsToDictionary( |
Handle<SeededNumberDictionary> JSObject::NormalizeElements( |
Handle<JSObject> object) { |
- ASSERT(!object->HasExternalArrayElements() && |
+ DCHECK(!object->HasExternalArrayElements() && |
!object->HasFixedTypedArrayElements()); |
Isolate* isolate = object->GetIsolate(); |
@@ -4819,7 +4819,7 @@ Handle<SeededNumberDictionary> JSObject::NormalizeElements( |
} |
if (array->IsDictionary()) return Handle<SeededNumberDictionary>::cast(array); |
- ASSERT(object->HasFastSmiOrObjectElements() || |
+ DCHECK(object->HasFastSmiOrObjectElements() || |
object->HasFastDoubleElements() || |
object->HasFastArgumentsElements()); |
// Compute the effective length and allocate a new backing store. |
@@ -4857,7 +4857,7 @@ Handle<SeededNumberDictionary> JSObject::NormalizeElements( |
} |
#endif |
- ASSERT(object->HasDictionaryElements() || |
+ DCHECK(object->HasDictionaryElements() || |
object->HasDictionaryArgumentsElements()); |
return dictionary; |
} |
@@ -4879,7 +4879,7 @@ static Smi* GenerateIdentityHash(Isolate* isolate) { |
void JSObject::SetIdentityHash(Handle<JSObject> object, Handle<Smi> hash) { |
- ASSERT(!object->IsJSGlobalProxy()); |
+ DCHECK(!object->IsJSGlobalProxy()); |
Isolate* isolate = object->GetIsolate(); |
SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash); |
} |
@@ -4940,18 +4940,18 @@ Handle<Smi> JSProxy::GetOrCreateIdentityHash(Handle<JSProxy> proxy) { |
Object* JSObject::GetHiddenProperty(Handle<Name> key) { |
DisallowHeapAllocation no_gc; |
- ASSERT(key->IsUniqueName()); |
+ DCHECK(key->IsUniqueName()); |
if (IsJSGlobalProxy()) { |
// JSGlobalProxies store their hash internally. |
- ASSERT(*key != GetHeap()->identity_hash_string()); |
+ DCHECK(*key != GetHeap()->identity_hash_string()); |
// For a proxy, use the prototype as target object. |
PrototypeIterator iter(GetIsolate(), this); |
// If the proxy is detached, return undefined. |
if (iter.IsAtEnd()) return GetHeap()->the_hole_value(); |
- ASSERT(iter.GetCurrent()->IsJSGlobalObject()); |
+ DCHECK(iter.GetCurrent()->IsJSGlobalObject()); |
return JSObject::cast(iter.GetCurrent())->GetHiddenProperty(key); |
} |
- ASSERT(!IsJSGlobalProxy()); |
+ DCHECK(!IsJSGlobalProxy()); |
Object* inline_value = GetHiddenPropertiesHashTable(); |
if (inline_value->IsSmi()) { |
@@ -4976,20 +4976,20 @@ Handle<Object> JSObject::SetHiddenProperty(Handle<JSObject> object, |
Handle<Object> value) { |
Isolate* isolate = object->GetIsolate(); |
- ASSERT(key->IsUniqueName()); |
+ DCHECK(key->IsUniqueName()); |
if (object->IsJSGlobalProxy()) { |
// JSGlobalProxies store their hash internally. |
- ASSERT(*key != *isolate->factory()->identity_hash_string()); |
+ DCHECK(*key != *isolate->factory()->identity_hash_string()); |
// For a proxy, use the prototype as target object. |
PrototypeIterator iter(isolate, object); |
// If the proxy is detached, return undefined. |
if (iter.IsAtEnd()) return isolate->factory()->undefined_value(); |
- ASSERT(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
+ DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
return SetHiddenProperty( |
Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), key, |
value); |
} |
- ASSERT(!object->IsJSGlobalProxy()); |
+ DCHECK(!object->IsJSGlobalProxy()); |
Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate); |
@@ -5019,12 +5019,12 @@ Handle<Object> JSObject::SetHiddenProperty(Handle<JSObject> object, |
void JSObject::DeleteHiddenProperty(Handle<JSObject> object, Handle<Name> key) { |
Isolate* isolate = object->GetIsolate(); |
- ASSERT(key->IsUniqueName()); |
+ DCHECK(key->IsUniqueName()); |
if (object->IsJSGlobalProxy()) { |
PrototypeIterator iter(isolate, object); |
if (iter.IsAtEnd()) return; |
- ASSERT(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
+ DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
return DeleteHiddenProperty( |
Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), key); |
} |
@@ -5032,7 +5032,7 @@ void JSObject::DeleteHiddenProperty(Handle<JSObject> object, Handle<Name> key) { |
Object* inline_value = object->GetHiddenPropertiesHashTable(); |
// We never delete (inline-stored) identity hashes. |
- ASSERT(*key != *isolate->factory()->identity_hash_string()); |
+ DCHECK(*key != *isolate->factory()->identity_hash_string()); |
if (inline_value->IsUndefined() || inline_value->IsSmi()) return; |
Handle<ObjectHashTable> hashtable(ObjectHashTable::cast(inline_value)); |
@@ -5046,13 +5046,13 @@ bool JSObject::HasHiddenProperties(Handle<JSObject> object) { |
LookupIterator it(object, hidden, LookupIterator::CHECK_OWN_REAL); |
Maybe<PropertyAttributes> maybe = GetPropertyAttributes(&it); |
// Cannot get an exception since the hidden_string isn't accessible to JS. |
- ASSERT(maybe.has_value); |
+ DCHECK(maybe.has_value); |
return maybe.value != ABSENT; |
} |
Object* JSObject::GetHiddenPropertiesHashTable() { |
- ASSERT(!IsJSGlobalProxy()); |
+ DCHECK(!IsJSGlobalProxy()); |
if (HasFastProperties()) { |
// If the object has fast properties, check whether the first slot |
// in the descriptor array matches the hidden string. Since the |
@@ -5063,8 +5063,8 @@ Object* JSObject::GetHiddenPropertiesHashTable() { |
int sorted_index = descriptors->GetSortedKeyIndex(0); |
if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() && |
sorted_index < map()->NumberOfOwnDescriptors()) { |
- ASSERT(descriptors->GetType(sorted_index) == FIELD); |
- ASSERT(descriptors->GetDetails(sorted_index).representation(). |
+ DCHECK(descriptors->GetType(sorted_index) == FIELD); |
+ DCHECK(descriptors->GetDetails(sorted_index).representation(). |
IsCompatibleForLoad(Representation::Tagged())); |
FieldIndex index = FieldIndex::ForDescriptor(this->map(), |
sorted_index); |
@@ -5080,8 +5080,8 @@ Object* JSObject::GetHiddenPropertiesHashTable() { |
LookupResult result(isolate); |
LookupOwnRealNamedProperty(isolate->factory()->hidden_string(), &result); |
if (result.IsFound()) { |
- ASSERT(result.IsNormal()); |
- ASSERT(result.holder() == this); |
+ DCHECK(result.IsNormal()); |
+ DCHECK(result.holder() == this); |
return GetNormalizedProperty(&result); |
} |
return GetHeap()->undefined_value(); |
@@ -5119,13 +5119,13 @@ Handle<ObjectHashTable> JSObject::GetOrCreateHiddenPropertiesHashtable( |
Handle<Object> JSObject::SetHiddenPropertiesHashTable(Handle<JSObject> object, |
Handle<Object> value) { |
- ASSERT(!object->IsJSGlobalProxy()); |
+ DCHECK(!object->IsJSGlobalProxy()); |
Isolate* isolate = object->GetIsolate(); |
// We can store the identity hash inline iff there is no backing store |
// for hidden properties yet. |
- ASSERT(JSObject::HasHiddenProperties(object) != value->IsSmi()); |
+ DCHECK(JSObject::HasHiddenProperties(object) != value->IsSmi()); |
if (object->HasFastProperties()) { |
// If the object has fast properties, check whether the first slot |
// in the descriptor array matches the hidden string. Since the |
@@ -5184,7 +5184,7 @@ MaybeHandle<Object> JSObject::DeletePropertyWithInterceptor( |
args.Call(deleter, v8::Utils::ToLocal(Handle<String>::cast(name))); |
RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); |
if (!result.IsEmpty()) { |
- ASSERT(result->IsBoolean()); |
+ DCHECK(result->IsBoolean()); |
Handle<Object> result_internal = v8::Utils::OpenHandle(*result); |
result_internal->VerifyApiCallResultType(); |
// Rebox CustomArguments::kReturnValueOffset before returning. |
@@ -5218,7 +5218,7 @@ MaybeHandle<Object> JSObject::DeleteElementWithInterceptor( |
v8::Handle<v8::Boolean> result = args.Call(deleter, index); |
RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); |
if (!result.IsEmpty()) { |
- ASSERT(result->IsBoolean()); |
+ DCHECK(result->IsBoolean()); |
Handle<Object> result_internal = v8::Utils::OpenHandle(*result); |
result_internal->VerifyApiCallResultType(); |
// Rebox CustomArguments::kReturnValueOffset before returning. |
@@ -5261,7 +5261,7 @@ MaybeHandle<Object> JSObject::DeleteElement(Handle<JSObject> object, |
if (object->IsJSGlobalProxy()) { |
PrototypeIterator iter(isolate, object); |
if (iter.IsAtEnd()) return factory->false_value(); |
- ASSERT(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
+ DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
return DeleteElement( |
Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index, |
mode); |
@@ -5311,7 +5311,7 @@ MaybeHandle<Object> JSObject::DeleteProperty(Handle<JSObject> object, |
DeleteMode mode) { |
Isolate* isolate = object->GetIsolate(); |
// ECMA-262, 3rd, 8.6.2.5 |
- ASSERT(name->IsName()); |
+ DCHECK(name->IsName()); |
// Check access rights if needed. |
if (object->IsAccessCheckNeeded() && |
@@ -5324,7 +5324,7 @@ MaybeHandle<Object> JSObject::DeleteProperty(Handle<JSObject> object, |
if (object->IsJSGlobalProxy()) { |
PrototypeIterator iter(isolate, object); |
if (iter.IsAtEnd()) return isolate->factory()->false_value(); |
- ASSERT(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
+ DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
return JSGlobalObject::DeleteProperty( |
Handle<JSGlobalObject>::cast(PrototypeIterator::GetCurrent(iter)), name, |
mode); |
@@ -5414,7 +5414,7 @@ MaybeHandle<Object> JSReceiver::DeleteProperty(Handle<JSReceiver> object, |
bool JSObject::ReferencesObjectFromElements(FixedArray* elements, |
ElementsKind kind, |
Object* object) { |
- ASSERT(IsFastObjectElementsKind(kind) || |
+ DCHECK(IsFastObjectElementsKind(kind) || |
kind == DICTIONARY_ELEMENTS); |
if (IsFastObjectElementsKind(kind)) { |
int length = IsJSArray() |
@@ -5562,7 +5562,7 @@ MaybeHandle<Object> JSObject::PreventExtensions(Handle<JSObject> object) { |
if (object->IsJSGlobalProxy()) { |
PrototypeIterator iter(isolate, object); |
if (iter.IsAtEnd()) return object; |
- ASSERT(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
+ DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
return PreventExtensions( |
Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter))); |
} |
@@ -5579,7 +5579,7 @@ MaybeHandle<Object> JSObject::PreventExtensions(Handle<JSObject> object) { |
// If there are fast elements we normalize. |
Handle<SeededNumberDictionary> dictionary = NormalizeElements(object); |
- ASSERT(object->HasDictionaryElements() || |
+ DCHECK(object->HasDictionaryElements() || |
object->HasDictionaryArgumentsElements()); |
// Make sure that we never go back to fast case. |
@@ -5592,7 +5592,7 @@ MaybeHandle<Object> JSObject::PreventExtensions(Handle<JSObject> object) { |
new_map->set_is_extensible(false); |
JSObject::MigrateToMap(object, new_map); |
- ASSERT(!object->map()->is_extensible()); |
+ DCHECK(!object->map()->is_extensible()); |
if (object->map()->is_observed()) { |
EnqueueChangeRecord(object, "preventExtensions", Handle<Name>(), |
@@ -5629,8 +5629,8 @@ static void FreezeDictionary(Dictionary* dictionary) { |
MaybeHandle<Object> JSObject::Freeze(Handle<JSObject> object) { |
// Freezing sloppy arguments should be handled elsewhere. |
- ASSERT(!object->HasSloppyArgumentsElements()); |
- ASSERT(!object->map()->is_observed()); |
+ DCHECK(!object->HasSloppyArgumentsElements()); |
+ DCHECK(!object->map()->is_observed()); |
if (object->map()->is_frozen()) return object; |
@@ -5646,7 +5646,7 @@ MaybeHandle<Object> JSObject::Freeze(Handle<JSObject> object) { |
if (object->IsJSGlobalProxy()) { |
PrototypeIterator iter(isolate, object); |
if (iter.IsAtEnd()) return object; |
- ASSERT(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
+ DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
return Freeze(Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter))); |
} |
@@ -5687,9 +5687,9 @@ MaybeHandle<Object> JSObject::Freeze(Handle<JSObject> object) { |
isolate->heap()->frozen_symbol()); |
if (transition_index != TransitionArray::kNotFound) { |
Handle<Map> transition_map(old_map->GetTransition(transition_index)); |
- ASSERT(transition_map->has_dictionary_elements()); |
- ASSERT(transition_map->is_frozen()); |
- ASSERT(!transition_map->is_extensible()); |
+ DCHECK(transition_map->has_dictionary_elements()); |
+ DCHECK(transition_map->is_frozen()); |
+ DCHECK(!transition_map->is_extensible()); |
JSObject::MigrateToMap(object, transition_map); |
} else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) { |
// Create a new descriptor array with fully-frozen properties |
@@ -5711,7 +5711,7 @@ MaybeHandle<Object> JSObject::Freeze(Handle<JSObject> object) { |
FreezeDictionary(object->property_dictionary()); |
} |
- ASSERT(object->map()->has_dictionary_elements()); |
+ DCHECK(object->map()->has_dictionary_elements()); |
if (!new_element_dictionary.is_null()) { |
object->set_elements(*new_element_dictionary); |
} |
@@ -5729,17 +5729,17 @@ MaybeHandle<Object> JSObject::Freeze(Handle<JSObject> object) { |
void JSObject::SetObserved(Handle<JSObject> object) { |
- ASSERT(!object->IsJSGlobalProxy()); |
- ASSERT(!object->IsJSGlobalObject()); |
+ DCHECK(!object->IsJSGlobalProxy()); |
+ DCHECK(!object->IsJSGlobalObject()); |
Isolate* isolate = object->GetIsolate(); |
Handle<Map> new_map; |
Handle<Map> old_map(object->map(), isolate); |
- ASSERT(!old_map->is_observed()); |
+ DCHECK(!old_map->is_observed()); |
int transition_index = old_map->SearchTransition( |
isolate->heap()->observed_symbol()); |
if (transition_index != TransitionArray::kNotFound) { |
new_map = handle(old_map->GetTransition(transition_index), isolate); |
- ASSERT(new_map->is_observed()); |
+ DCHECK(new_map->is_observed()); |
} else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) { |
new_map = Map::CopyForObserved(old_map); |
} else { |
@@ -5824,7 +5824,7 @@ MaybeHandle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk( |
copy = object; |
} |
- ASSERT(copying || copy.is_identical_to(object)); |
+ DCHECK(copying || copy.is_identical_to(object)); |
ElementsKind kind = copy->GetElementsKind(); |
if (copying && IsFastSmiOrObjectElementsKind(kind) && |
@@ -5863,11 +5863,11 @@ MaybeHandle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk( |
isolate->factory()->NewFixedArray(copy->NumberOfOwnProperties()); |
copy->GetOwnPropertyNames(*names, 0); |
for (int i = 0; i < names->length(); i++) { |
- ASSERT(names->get(i)->IsString()); |
+ DCHECK(names->get(i)->IsString()); |
Handle<String> key_string(String::cast(names->get(i))); |
Maybe<PropertyAttributes> maybe = |
JSReceiver::GetOwnPropertyAttributes(copy, key_string); |
- ASSERT(maybe.has_value); |
+ DCHECK(maybe.has_value); |
PropertyAttributes attributes = maybe.value; |
// Only deep copy fields from the object literal expression. |
// In particular, don't try to copy the length attribute of |
@@ -5891,7 +5891,7 @@ MaybeHandle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk( |
// Deep copy own elements. |
// Pixel elements cannot be created using an object literal. |
- ASSERT(!copy->HasExternalArrayElements()); |
+ DCHECK(!copy->HasExternalArrayElements()); |
switch (kind) { |
case FAST_SMI_ELEMENTS: |
case FAST_ELEMENTS: |
@@ -5901,13 +5901,13 @@ MaybeHandle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk( |
if (elements->map() == isolate->heap()->fixed_cow_array_map()) { |
#ifdef DEBUG |
for (int i = 0; i < elements->length(); i++) { |
- ASSERT(!elements->get(i)->IsJSObject()); |
+ DCHECK(!elements->get(i)->IsJSObject()); |
} |
#endif |
} else { |
for (int i = 0; i < elements->length(); i++) { |
Handle<Object> value(elements->get(i), isolate); |
- ASSERT(value->IsSmi() || |
+ DCHECK(value->IsSmi() || |
value->IsTheHole() || |
(IsFastObjectElementsKind(copy->GetElementsKind()))); |
if (value->IsJSObject()) { |
@@ -5976,7 +5976,7 @@ MaybeHandle<JSObject> JSObject::DeepWalk( |
kNoHints); |
MaybeHandle<JSObject> result = v.StructureWalk(object); |
Handle<JSObject> for_assert; |
- ASSERT(!result.ToHandle(&for_assert) || for_assert.is_identical_to(object)); |
+ DCHECK(!result.ToHandle(&for_assert) || for_assert.is_identical_to(object)); |
return result; |
} |
@@ -5988,7 +5988,7 @@ MaybeHandle<JSObject> JSObject::DeepCopy( |
JSObjectWalkVisitor<AllocationSiteUsageContext> v(site_context, true, hints); |
MaybeHandle<JSObject> copy = v.StructureWalk(object); |
Handle<JSObject> for_assert; |
- ASSERT(!copy.ToHandle(&for_assert) || !for_assert.is_identical_to(object)); |
+ DCHECK(!copy.ToHandle(&for_assert) || !for_assert.is_identical_to(object)); |
return copy; |
} |
@@ -6042,8 +6042,8 @@ bool JSReceiver::IsSimpleEnum() { |
int enum_length = curr->map()->EnumLength(); |
if (enum_length == kInvalidEnumCacheSentinel) return false; |
if (curr->IsAccessCheckNeeded()) return false; |
- ASSERT(!curr->HasNamedInterceptor()); |
- ASSERT(!curr->HasIndexedInterceptor()); |
+ DCHECK(!curr->HasNamedInterceptor()); |
+ DCHECK(!curr->HasIndexedInterceptor()); |
if (curr->NumberOfEnumElements() > 0) return false; |
if (curr != this && enum_length != 0) return false; |
} |
@@ -6103,12 +6103,12 @@ int Map::NextFreePropertyIndex() { |
void JSReceiver::LookupOwn( |
Handle<Name> name, LookupResult* result, bool search_hidden_prototypes) { |
DisallowHeapAllocation no_gc; |
- ASSERT(name->IsName()); |
+ DCHECK(name->IsName()); |
if (IsJSGlobalProxy()) { |
PrototypeIterator iter(GetIsolate(), this); |
if (iter.IsAtEnd()) return result->NotFound(); |
- ASSERT(iter.GetCurrent()->IsJSGlobalObject()); |
+ DCHECK(iter.GetCurrent()->IsJSGlobalObject()); |
return JSReceiver::cast(iter.GetCurrent()) |
->LookupOwn(name, result, search_hidden_prototypes); |
} |
@@ -6170,7 +6170,7 @@ static bool ContainsOnlyValidKeys(Handle<FixedArray> array) { |
static Handle<FixedArray> ReduceFixedArrayTo( |
Handle<FixedArray> array, int length) { |
- ASSERT(array->length() >= length); |
+ DCHECK(array->length() >= length); |
if (array->length() == length) return array; |
Handle<FixedArray> new_array = |
@@ -6194,7 +6194,7 @@ static Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object, |
own_property_count = object->map()->NumberOfDescribedProperties( |
OWN_DESCRIPTORS, DONT_SHOW); |
} else { |
- ASSERT(own_property_count == object->map()->NumberOfDescribedProperties( |
+ DCHECK(own_property_count == object->map()->NumberOfDescribedProperties( |
OWN_DESCRIPTORS, DONT_SHOW)); |
} |
@@ -6251,7 +6251,7 @@ static Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object, |
index++; |
} |
} |
- ASSERT(index == storage->length()); |
+ DCHECK(index == storage->length()); |
Handle<FixedArray> bridge_storage = |
isolate->factory()->NewFixedArray( |
@@ -6331,7 +6331,7 @@ MaybeHandle<FixedArray> JSReceiver::GetKeys(Handle<JSReceiver> object, |
isolate, content, |
FixedArray::UnionOfKeys(content, element_keys), |
FixedArray); |
- ASSERT(ContainsOnlyValidKeys(content)); |
+ DCHECK(ContainsOnlyValidKeys(content)); |
// Add the element keys from the interceptor. |
if (current->HasIndexedInterceptor()) { |
@@ -6343,7 +6343,7 @@ MaybeHandle<FixedArray> JSReceiver::GetKeys(Handle<JSReceiver> object, |
FixedArray::AddKeysFromArrayLike(content, result), |
FixedArray); |
} |
- ASSERT(ContainsOnlyValidKeys(content)); |
+ DCHECK(ContainsOnlyValidKeys(content)); |
} |
// We can cache the computed property keys if access checks are |
@@ -6368,7 +6368,7 @@ MaybeHandle<FixedArray> JSReceiver::GetKeys(Handle<JSReceiver> object, |
FixedArray::UnionOfKeys( |
content, GetEnumPropertyKeys(current, cache_enum_keys)), |
FixedArray); |
- ASSERT(ContainsOnlyValidKeys(content)); |
+ DCHECK(ContainsOnlyValidKeys(content)); |
// Add the property keys from the interceptor. |
if (current->HasNamedInterceptor()) { |
@@ -6380,7 +6380,7 @@ MaybeHandle<FixedArray> JSReceiver::GetKeys(Handle<JSReceiver> object, |
FixedArray::AddKeysFromArrayLike(content, result), |
FixedArray); |
} |
- ASSERT(ContainsOnlyValidKeys(content)); |
+ DCHECK(ContainsOnlyValidKeys(content)); |
} |
// If we only want own properties we bail out after the first |
@@ -6404,7 +6404,7 @@ static bool UpdateGetterSetterInDictionary( |
Object* result = dictionary->ValueAt(entry); |
PropertyDetails details = dictionary->DetailsAt(entry); |
if (details.type() == CALLBACKS && result->IsAccessorPair()) { |
- ASSERT(!details.IsDontDelete()); |
+ DCHECK(!details.IsDontDelete()); |
if (details.attributes() != attributes) { |
dictionary->DetailsAtPut( |
entry, |
@@ -6560,7 +6560,7 @@ void JSObject::SetElementCallback(Handle<JSObject> object, |
// Normalize elements to make this operation simple. |
bool had_dictionary_elements = object->HasDictionaryElements(); |
Handle<SeededNumberDictionary> dictionary = NormalizeElements(object); |
- ASSERT(object->HasDictionaryElements() || |
+ DCHECK(object->HasDictionaryElements() || |
object->HasDictionaryArgumentsElements()); |
// Update the dictionary with the new CALLBACKS property. |
dictionary = SeededNumberDictionary::Set(dictionary, index, structure, |
@@ -6601,7 +6601,7 @@ void JSObject::SetPropertyCallback(Handle<JSObject> object, |
// caches which have a global property cell reference directly in the code. |
if (object->IsGlobalObject()) { |
Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map())); |
- ASSERT(new_map->is_dictionary_map()); |
+ DCHECK(new_map->is_dictionary_map()); |
JSObject::MigrateToMap(object, new_map); |
// When running crankshaft, changing the map is not enough. We |
@@ -6633,7 +6633,7 @@ MaybeHandle<Object> JSObject::DefineAccessor(Handle<JSObject> object, |
if (object->IsJSGlobalProxy()) { |
PrototypeIterator iter(isolate, object); |
if (iter.IsAtEnd()) return isolate->factory()->undefined_value(); |
- ASSERT(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
+ DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
DefineAccessor(Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), |
name, getter, setter, attributes); |
return isolate->factory()->undefined_value(); |
@@ -6659,7 +6659,7 @@ MaybeHandle<Object> JSObject::DefineAccessor(Handle<JSObject> object, |
// Workaround for a GCC 4.4.3 bug which leads to "‘preexists’ may be used |
// uninitialized in this function". |
if (!maybe.has_value) { |
- ASSERT(false); |
+ DCHECK(false); |
return isolate->factory()->undefined_value(); |
} |
preexists = maybe.value; |
@@ -6727,7 +6727,7 @@ bool JSObject::DefineFastAccessor(Handle<JSObject> object, |
AccessorComponent component, |
Handle<Object> accessor, |
PropertyAttributes attributes) { |
- ASSERT(accessor->IsSpecFunction() || accessor->IsUndefined()); |
+ DCHECK(accessor->IsSpecFunction() || accessor->IsUndefined()); |
Isolate* isolate = object->GetIsolate(); |
LookupResult result(isolate); |
object->LookupOwn(name, &result); |
@@ -6756,10 +6756,10 @@ bool JSObject::DefineFastAccessor(Handle<JSObject> object, |
if (result.IsFound()) { |
Handle<Map> target(result.GetTransitionTarget()); |
- ASSERT(target->NumberOfOwnDescriptors() == |
+ DCHECK(target->NumberOfOwnDescriptors() == |
object->map()->NumberOfOwnDescriptors()); |
// This works since descriptors are sorted in order of addition. |
- ASSERT(Name::Equals( |
+ DCHECK(Name::Equals( |
handle(object->map()->instance_descriptors()->GetKey( |
descriptor_number)), |
name)); |
@@ -6774,7 +6774,7 @@ bool JSObject::DefineFastAccessor(Handle<JSObject> object, |
if (result.IsFound()) { |
Handle<Map> target(result.GetTransitionTarget()); |
int descriptor_number = target->LastAdded(); |
- ASSERT(Name::Equals(name, |
+ DCHECK(Name::Equals(name, |
handle(target->instance_descriptors()->GetKey(descriptor_number)))); |
return TryAccessorTransition(object, target, descriptor_number, |
component, accessor, attributes); |
@@ -6815,7 +6815,7 @@ MaybeHandle<Object> JSObject::SetAccessor(Handle<JSObject> object, |
if (object->IsJSGlobalProxy()) { |
PrototypeIterator iter(isolate, object); |
if (iter.IsAtEnd()) return object; |
- ASSERT(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
+ DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
return SetAccessor( |
Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), info); |
} |
@@ -6950,7 +6950,7 @@ Object* JSObject::SlowReverseLookup(Object* value) { |
Object* property = |
RawFastPropertyAt(FieldIndex::ForDescriptor(map(), i)); |
if (descs->GetDetails(i).representation().IsDouble()) { |
- ASSERT(property->IsMutableHeapNumber()); |
+ DCHECK(property->IsMutableHeapNumber()); |
if (value->IsNumber() && property->Number() == value->Number()) { |
return descs->GetKey(i); |
} |
@@ -6995,7 +6995,7 @@ Handle<Map> Map::RawCopy(Handle<Map> map, int instance_size) { |
Handle<Map> Map::Normalize(Handle<Map> fast_map, |
PropertyNormalizationMode mode) { |
- ASSERT(!fast_map->is_dictionary_map()); |
+ DCHECK(!fast_map->is_dictionary_map()); |
Isolate* isolate = fast_map->GetIsolate(); |
Handle<NormalizedMapCache> cache( |
@@ -7008,7 +7008,7 @@ Handle<Map> Map::Normalize(Handle<Map> fast_map, |
new_map->SharedMapVerify(); |
} |
#endif |
-#ifdef ENABLE_SLOW_ASSERTS |
+#ifdef ENABLE_SLOW_DCHECKS |
if (FLAG_enable_slow_asserts) { |
// The cached map should match newly created normalized map bit-by-bit, |
// except for the code cache, which can contain some ics which can be |
@@ -7016,13 +7016,13 @@ Handle<Map> Map::Normalize(Handle<Map> fast_map, |
Handle<Map> fresh = Map::CopyNormalized( |
fast_map, mode, SHARED_NORMALIZED_MAP); |
- ASSERT(memcmp(fresh->address(), |
+ DCHECK(memcmp(fresh->address(), |
new_map->address(), |
Map::kCodeCacheOffset) == 0); |
STATIC_ASSERT(Map::kDependentCodeOffset == |
Map::kCodeCacheOffset + kPointerSize); |
int offset = Map::kDependentCodeOffset + kPointerSize; |
- ASSERT(memcmp(fresh->address() + offset, |
+ DCHECK(memcmp(fresh->address() + offset, |
new_map->address() + offset, |
Map::kSize - offset) == 0); |
} |
@@ -7087,7 +7087,7 @@ Handle<Map> Map::ShareDescriptor(Handle<Map> map, |
// Sanity check. This path is only to be taken if the map owns its descriptor |
// array, implying that its NumberOfOwnDescriptors equals the number of |
// descriptors in the descriptor array. |
- ASSERT(map->NumberOfOwnDescriptors() == |
+ DCHECK(map->NumberOfOwnDescriptors() == |
map->instance_descriptors()->number_of_descriptors()); |
Handle<Map> result = CopyDropDescriptors(map); |
@@ -7113,7 +7113,7 @@ Handle<Map> Map::ShareDescriptor(Handle<Map> map, |
result->SetBackPointer(*map); |
result->InitializeDescriptors(*descriptors); |
- ASSERT(result->NumberOfOwnDescriptors() == map->NumberOfOwnDescriptors() + 1); |
+ DCHECK(result->NumberOfOwnDescriptors() == map->NumberOfOwnDescriptors() + 1); |
map->set_transitions(*transitions); |
map->set_owns_descriptors(false); |
@@ -7127,7 +7127,7 @@ Handle<Map> Map::CopyReplaceDescriptors(Handle<Map> map, |
TransitionFlag flag, |
MaybeHandle<Name> maybe_name, |
SimpleTransitionFlag simple_flag) { |
- ASSERT(descriptors->IsSortedNoDuplicates()); |
+ DCHECK(descriptors->IsSortedNoDuplicates()); |
Handle<Map> result = CopyDropDescriptors(map); |
result->InitializeDescriptors(*descriptors); |
@@ -7158,7 +7158,7 @@ Handle<Map> Map::CopyReplaceDescriptors(Handle<Map> map, |
Handle<Map> Map::CopyInstallDescriptors(Handle<Map> map, |
int new_descriptor, |
Handle<DescriptorArray> descriptors) { |
- ASSERT(descriptors->IsSortedNoDuplicates()); |
+ DCHECK(descriptors->IsSortedNoDuplicates()); |
Handle<Map> result = CopyDropDescriptors(map); |
@@ -7190,16 +7190,16 @@ Handle<Map> Map::CopyInstallDescriptors(Handle<Map> map, |
Handle<Map> Map::CopyAsElementsKind(Handle<Map> map, ElementsKind kind, |
TransitionFlag flag) { |
if (flag == INSERT_TRANSITION) { |
- ASSERT(!map->HasElementsTransition() || |
+ DCHECK(!map->HasElementsTransition() || |
((map->elements_transition_map()->elements_kind() == |
DICTIONARY_ELEMENTS || |
IsExternalArrayElementsKind( |
map->elements_transition_map()->elements_kind())) && |
(kind == DICTIONARY_ELEMENTS || |
IsExternalArrayElementsKind(kind)))); |
- ASSERT(!IsFastElementsKind(kind) || |
+ DCHECK(!IsFastElementsKind(kind) || |
IsMoreGeneralElementsKindTransition(map->elements_kind(), kind)); |
- ASSERT(kind != map->elements_kind()); |
+ DCHECK(kind != map->elements_kind()); |
} |
bool insert_transition = |
@@ -7236,7 +7236,7 @@ Handle<Map> Map::CopyAsElementsKind(Handle<Map> map, ElementsKind kind, |
Handle<Map> Map::CopyForObserved(Handle<Map> map) { |
- ASSERT(!map->is_observed()); |
+ DCHECK(!map->is_observed()); |
Isolate* isolate = map->GetIsolate(); |
@@ -7327,7 +7327,7 @@ bool DescriptorArray::CanHoldValue(int descriptor, Object* value) { |
GetFieldType(descriptor)->NowContains(value); |
case CONSTANT: |
- ASSERT(GetConstant(descriptor) != value || |
+ DCHECK(GetConstant(descriptor) != value || |
value->FitsRepresentation(details.representation())); |
return GetConstant(descriptor) == value; |
@@ -7518,7 +7518,7 @@ Handle<Map> Map::CopyReplaceDescriptor(Handle<Map> map, |
descriptor->KeyToUniqueName(); |
Handle<Name> key = descriptor->GetKey(); |
- ASSERT(*key == descriptors->GetKey(insertion_index)); |
+ DCHECK(*key == descriptors->GetKey(insertion_index)); |
Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo( |
descriptors, map->NumberOfOwnDescriptors()); |
@@ -7572,7 +7572,7 @@ int Map::IndexInCodeCache(Object* name, Code* code) { |
void Map::RemoveFromCodeCache(Name* name, Code* code, int index) { |
// No GC is supposed to happen between a call to IndexInCodeCache and |
// RemoveFromCodeCache so the code cache must be there. |
- ASSERT(!code_cache()->IsFixedArray()); |
+ DCHECK(!code_cache()->IsFixedArray()); |
CodeCache::cast(code_cache())->RemoveByIndex(name, code, index); |
} |
@@ -7590,9 +7590,9 @@ class IntrusiveMapTransitionIterator { |
constructor_(constructor) { } |
void StartIfNotStarted() { |
- ASSERT(!(*IteratorField())->IsSmi() || IsIterating()); |
+ DCHECK(!(*IteratorField())->IsSmi() || IsIterating()); |
if (!(*IteratorField())->IsSmi()) { |
- ASSERT(*IteratorField() == constructor_); |
+ DCHECK(*IteratorField() == constructor_); |
*IteratorField() = Smi::FromInt(-1); |
} |
} |
@@ -7603,7 +7603,7 @@ class IntrusiveMapTransitionIterator { |
} |
Map* Next() { |
- ASSERT(IsIterating()); |
+ DCHECK(IsIterating()); |
int value = Smi::cast(*IteratorField())->value(); |
int index = -value - 1; |
int number_of_transitions = transition_array_->number_of_transitions(); |
@@ -7639,7 +7639,7 @@ class IntrusivePrototypeTransitionIterator { |
void StartIfNotStarted() { |
if (!(*IteratorField())->IsSmi()) { |
- ASSERT(*IteratorField() == constructor_); |
+ DCHECK(*IteratorField() == constructor_); |
*IteratorField() = Smi::FromInt(0); |
} |
} |
@@ -7650,7 +7650,7 @@ class IntrusivePrototypeTransitionIterator { |
} |
Map* Next() { |
- ASSERT(IsIterating()); |
+ DCHECK(IsIterating()); |
int transitionNumber = Smi::cast(*IteratorField())->value(); |
if (transitionNumber < NumberOfTransitions()) { |
*IteratorField() = Smi::FromInt(transitionNumber + 1); |
@@ -7798,7 +7798,7 @@ void CodeCache::Update( |
} |
UpdateNormalTypeCache(code_cache, name, code); |
} else { |
- ASSERT(code_cache->default_cache()->IsFixedArray()); |
+ DCHECK(code_cache->default_cache()->IsFixedArray()); |
UpdateDefaultCache(code_cache, name, code); |
} |
} |
@@ -7853,7 +7853,7 @@ void CodeCache::UpdateDefaultCache( |
// multiple of the entry size. |
int new_length = length + ((length >> 1)) + kCodeCacheEntrySize; |
new_length = new_length - new_length % kCodeCacheEntrySize; |
- ASSERT((new_length % kCodeCacheEntrySize) == 0); |
+ DCHECK((new_length % kCodeCacheEntrySize) == 0); |
cache = FixedArray::CopySize(cache, new_length); |
// Add the (name, code) pair to the new cache. |
@@ -7930,17 +7930,17 @@ int CodeCache::GetIndex(Object* name, Code* code) { |
void CodeCache::RemoveByIndex(Object* name, Code* code, int index) { |
if (code->type() == Code::NORMAL) { |
- ASSERT(!normal_type_cache()->IsUndefined()); |
+ DCHECK(!normal_type_cache()->IsUndefined()); |
CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache()); |
- ASSERT(cache->GetIndex(Name::cast(name), code->flags()) == index); |
+ DCHECK(cache->GetIndex(Name::cast(name), code->flags()) == index); |
cache->RemoveByIndex(index); |
} else { |
FixedArray* array = default_cache(); |
- ASSERT(array->length() >= index && array->get(index)->IsCode()); |
+ DCHECK(array->length() >= index && array->get(index)->IsCode()); |
// Use null instead of undefined for deleted elements to distinguish |
// deleted elements from unused elements. This distinction is used |
// when looking up in the cache and when updating the cache. |
- ASSERT_EQ(1, kCodeCacheEntryCodeOffset - kCodeCacheEntryNameOffset); |
+ DCHECK_EQ(1, kCodeCacheEntryCodeOffset - kCodeCacheEntryNameOffset); |
array->set_null(index - 1); // Name. |
array->set_null(index); // Code. |
} |
@@ -8033,7 +8033,7 @@ int CodeCacheHashTable::GetIndex(Name* name, Code::Flags flags) { |
void CodeCacheHashTable::RemoveByIndex(int index) { |
- ASSERT(index >= 0); |
+ DCHECK(index >= 0); |
Heap* heap = GetHeap(); |
set(EntryToIndex(index), heap->the_hole_value()); |
set(EntryToIndex(index) + 1, heap->the_hole_value()); |
@@ -8054,7 +8054,7 @@ void PolymorphicCodeCache::Update(Handle<PolymorphicCodeCache> code_cache, |
code_cache->set_cache(*result); |
} else { |
// This entry shouldn't be contained in the cache yet. |
- ASSERT(PolymorphicCodeCacheHashTable::cast(code_cache->cache()) |
+ DCHECK(PolymorphicCodeCacheHashTable::cast(code_cache->cache()) |
->Lookup(maps, flags)->IsUndefined()); |
} |
Handle<PolymorphicCodeCacheHashTable> hash_table = |
@@ -8194,7 +8194,7 @@ Handle<PolymorphicCodeCacheHashTable> PolymorphicCodeCacheHashTable::Put( |
void FixedArray::Shrink(int new_length) { |
- ASSERT(0 <= new_length && new_length <= length()); |
+ DCHECK(0 <= new_length && new_length <= length()); |
if (new_length < length()) { |
RightTrimFixedArray<Heap::FROM_MUTATOR>( |
GetHeap(), this, length() - new_length); |
@@ -8205,7 +8205,7 @@ void FixedArray::Shrink(int new_length) { |
MaybeHandle<FixedArray> FixedArray::AddKeysFromArrayLike( |
Handle<FixedArray> content, |
Handle<JSObject> array) { |
- ASSERT(array->IsJSArray() || array->HasSloppyArgumentsElements()); |
+ DCHECK(array->IsJSArray() || array->HasSloppyArgumentsElements()); |
ElementsAccessor* accessor = array->GetElementsAccessor(); |
Handle<FixedArray> result; |
ASSIGN_RETURN_ON_EXCEPTION( |
@@ -8213,12 +8213,12 @@ MaybeHandle<FixedArray> FixedArray::AddKeysFromArrayLike( |
accessor->AddElementsToFixedArray(array, array, content), |
FixedArray); |
-#ifdef ENABLE_SLOW_ASSERTS |
+#ifdef ENABLE_SLOW_DCHECKS |
if (FLAG_enable_slow_asserts) { |
DisallowHeapAllocation no_allocation; |
for (int i = 0; i < result->length(); i++) { |
Object* current = result->get(i); |
- ASSERT(current->IsNumber() || current->IsName()); |
+ DCHECK(current->IsNumber() || current->IsName()); |
} |
} |
#endif |
@@ -8239,12 +8239,12 @@ MaybeHandle<FixedArray> FixedArray::UnionOfKeys(Handle<FixedArray> first, |
Handle<FixedArrayBase>::cast(second)), |
FixedArray); |
-#ifdef ENABLE_SLOW_ASSERTS |
+#ifdef ENABLE_SLOW_DCHECKS |
if (FLAG_enable_slow_asserts) { |
DisallowHeapAllocation no_allocation; |
for (int i = 0; i < result->length(); i++) { |
Object* current = result->get(i); |
- ASSERT(current->IsNumber() || current->IsName()); |
+ DCHECK(current->IsNumber() || current->IsName()); |
} |
} |
#endif |
@@ -8296,7 +8296,7 @@ bool FixedArray::IsEqualTo(FixedArray* other) { |
Handle<DescriptorArray> DescriptorArray::Allocate(Isolate* isolate, |
int number_of_descriptors, |
int slack) { |
- ASSERT(0 <= number_of_descriptors); |
+ DCHECK(0 <= number_of_descriptors); |
Factory* factory = isolate->factory(); |
// Do not use DescriptorArray::cast on incomplete object. |
int size = number_of_descriptors + slack; |
@@ -8324,10 +8324,10 @@ void DescriptorArray::Replace(int index, Descriptor* descriptor) { |
void DescriptorArray::SetEnumCache(FixedArray* bridge_storage, |
FixedArray* new_cache, |
Object* new_index_cache) { |
- ASSERT(bridge_storage->length() >= kEnumCacheBridgeLength); |
- ASSERT(new_index_cache->IsSmi() || new_index_cache->IsFixedArray()); |
- ASSERT(!IsEmpty()); |
- ASSERT(!HasEnumCache() || new_cache->length() > GetEnumCache()->length()); |
+ DCHECK(bridge_storage->length() >= kEnumCacheBridgeLength); |
+ DCHECK(new_index_cache->IsSmi() || new_index_cache->IsFixedArray()); |
+ DCHECK(!IsEmpty()); |
+ DCHECK(!HasEnumCache() || new_cache->length() > GetEnumCache()->length()); |
FixedArray::cast(bridge_storage)-> |
set(kEnumCacheBridgeCacheIndex, new_cache); |
FixedArray::cast(bridge_storage)-> |
@@ -8403,7 +8403,7 @@ void DescriptorArray::Sort() { |
parent_index = child_index; |
} |
} |
- ASSERT(IsSortedNoDuplicates()); |
+ DCHECK(IsSortedNoDuplicates()); |
} |
@@ -8424,7 +8424,7 @@ Object* AccessorPair::GetComponent(AccessorComponent component) { |
Handle<DeoptimizationInputData> DeoptimizationInputData::New( |
Isolate* isolate, int deopt_entry_count, int return_patch_address_count, |
PretenureFlag pretenure) { |
- ASSERT(deopt_entry_count + return_patch_address_count > 0); |
+ DCHECK(deopt_entry_count + return_patch_address_count > 0); |
Handle<FixedArray> deoptimization_data = |
Handle<FixedArray>::cast(isolate->factory()->NewFixedArray( |
LengthFor(deopt_entry_count, return_patch_address_count), pretenure)); |
@@ -8471,7 +8471,7 @@ bool String::LooksValid() { |
String::FlatContent String::GetFlatContent() { |
- ASSERT(!AllowHeapAllocation::IsAllowed()); |
+ DCHECK(!AllowHeapAllocation::IsAllowed()); |
int length = this->length(); |
StringShape shape(this); |
String* string = this; |
@@ -8489,7 +8489,7 @@ String::FlatContent String::GetFlatContent() { |
offset = slice->offset(); |
string = slice->parent(); |
shape = StringShape(string); |
- ASSERT(shape.representation_tag() != kConsStringTag && |
+ DCHECK(shape.representation_tag() != kConsStringTag && |
shape.representation_tag() != kSlicedStringTag); |
} |
if (shape.encoding_tag() == kOneByteStringTag) { |
@@ -8501,7 +8501,7 @@ String::FlatContent String::GetFlatContent() { |
} |
return FlatContent(start + offset, length); |
} else { |
- ASSERT(shape.encoding_tag() == kTwoByteStringTag); |
+ DCHECK(shape.encoding_tag() == kTwoByteStringTag); |
const uc16* start; |
if (shape.representation_tag() == kSeqStringTag) { |
start = SeqTwoByteString::cast(string)->GetChars(); |
@@ -8572,7 +8572,7 @@ SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls, |
const uc16* String::GetTwoByteData(unsigned start) { |
- ASSERT(!IsOneByteRepresentationUnderneath()); |
+ DCHECK(!IsOneByteRepresentationUnderneath()); |
switch (StringShape(this).representation_tag()) { |
case kSeqStringTag: |
return SeqTwoByteString::cast(this)->SeqTwoByteStringGetData(start); |
@@ -8690,11 +8690,11 @@ FlatStringReader::FlatStringReader(Isolate* isolate, Vector<const char> input) |
void FlatStringReader::PostGarbageCollection() { |
if (str_ == NULL) return; |
Handle<String> str(str_); |
- ASSERT(str->IsFlat()); |
+ DCHECK(str->IsFlat()); |
DisallowHeapAllocation no_gc; |
// This does not actually prevent the vector from being relocated later. |
String::FlatContent content = str->GetFlatContent(); |
- ASSERT(content.IsFlat()); |
+ DCHECK(content.IsFlat()); |
is_ascii_ = content.IsAscii(); |
if (is_ascii_) { |
start_ = content.ToOneByteVector().start(); |
@@ -8705,26 +8705,26 @@ void FlatStringReader::PostGarbageCollection() { |
void ConsStringIteratorOp::Initialize(ConsString* cons_string, int offset) { |
- ASSERT(cons_string != NULL); |
+ DCHECK(cons_string != NULL); |
root_ = cons_string; |
consumed_ = offset; |
// Force stack blown condition to trigger restart. |
depth_ = 1; |
maximum_depth_ = kStackSize + depth_; |
- ASSERT(StackBlown()); |
+ DCHECK(StackBlown()); |
} |
String* ConsStringIteratorOp::Continue(int* offset_out) { |
- ASSERT(depth_ != 0); |
- ASSERT_EQ(0, *offset_out); |
+ DCHECK(depth_ != 0); |
+ DCHECK_EQ(0, *offset_out); |
bool blew_stack = StackBlown(); |
String* string = NULL; |
// Get the next leaf if there is one. |
if (!blew_stack) string = NextLeaf(&blew_stack); |
// Restart search from root. |
if (blew_stack) { |
- ASSERT(string == NULL); |
+ DCHECK(string == NULL); |
string = Search(offset_out); |
} |
// Ensure future calls return null immediately. |
@@ -8783,7 +8783,7 @@ String* ConsStringIteratorOp::Search(int* offset_out) { |
// Pop stack so next iteration is in correct place. |
Pop(); |
} |
- ASSERT(length != 0); |
+ DCHECK(length != 0); |
// Adjust return values and exit. |
consumed_ = offset + length; |
*offset_out = consumed - offset; |
@@ -8829,7 +8829,7 @@ String* ConsStringIteratorOp::NextLeaf(bool* blew_stack) { |
if ((type & kStringRepresentationMask) != kConsStringTag) { |
AdjustMaximumDepth(); |
int length = string->length(); |
- ASSERT(length != 0); |
+ DCHECK(length != 0); |
consumed_ += length; |
return string; |
} |
@@ -8843,7 +8843,7 @@ String* ConsStringIteratorOp::NextLeaf(bool* blew_stack) { |
uint16_t ConsString::ConsStringGet(int index) { |
- ASSERT(index >= 0 && index < this->length()); |
+ DCHECK(index >= 0 && index < this->length()); |
// Check for a flattened cons string |
if (second()->length() == 0) { |
@@ -8887,7 +8887,7 @@ void String::WriteToFlat(String* src, |
int from = f; |
int to = t; |
while (true) { |
- ASSERT(0 <= from && from <= to && to <= source->length()); |
+ DCHECK(0 <= from && from <= to && to <= source->length()); |
switch (StringShape(source).full_representation_tag()) { |
case kOneByteStringTag | kExternalStringTag: { |
CopyChars(sink, |
@@ -9004,7 +9004,7 @@ Handle<FixedArray> String::CalculateLineEnds(Handle<String> src, |
{ DisallowHeapAllocation no_allocation; // ensure vectors stay valid. |
// Dispatch on type of strings. |
String::FlatContent content = src->GetFlatContent(); |
- ASSERT(content.IsFlat()); |
+ DCHECK(content.IsFlat()); |
if (content.IsAscii()) { |
CalculateLineEndsImpl(isolate, |
&line_ends, |
@@ -9069,7 +9069,7 @@ template<typename Chars1, typename Chars2> |
class RawStringComparator : public AllStatic { |
public: |
static inline bool compare(const Chars1* a, const Chars2* b, int len) { |
- ASSERT(sizeof(Chars1) != sizeof(Chars2)); |
+ DCHECK(sizeof(Chars1) != sizeof(Chars2)); |
for (int i = 0; i < len; i++) { |
if (a[i] != b[i]) { |
return false; |
@@ -9127,7 +9127,7 @@ class StringComparator { |
} |
void Advance(int consumed) { |
- ASSERT(consumed <= length_); |
+ DCHECK(consumed <= length_); |
// Still in buffer. |
if (length_ != consumed) { |
if (is_one_byte_) { |
@@ -9141,8 +9141,8 @@ class StringComparator { |
// Advance state. |
int offset; |
String* next = op_->Next(&offset); |
- ASSERT_EQ(0, offset); |
- ASSERT(next != NULL); |
+ DCHECK_EQ(0, offset); |
+ DCHECK(next != NULL); |
String::VisitFlat(this, next); |
} |
@@ -9178,7 +9178,7 @@ class StringComparator { |
state_2_.Init(string_2); |
while (true) { |
int to_check = Min(state_1_.length_, state_2_.length_); |
- ASSERT(to_check > 0 && to_check <= length); |
+ DCHECK(to_check > 0 && to_check <= length); |
bool is_equal; |
if (state_1_.is_one_byte_) { |
if (state_2_.is_one_byte_) { |
@@ -9220,7 +9220,7 @@ bool String::SlowEquals(String* other) { |
// Fast check: if hash code is computed for both strings |
// a fast negative check can be performed. |
if (HasHashCode() && other->HasHashCode()) { |
-#ifdef ENABLE_SLOW_ASSERTS |
+#ifdef ENABLE_SLOW_DCHECKS |
if (FLAG_enable_slow_asserts) { |
if (Hash() != other->Hash()) { |
bool found_difference = false; |
@@ -9230,7 +9230,7 @@ bool String::SlowEquals(String* other) { |
break; |
} |
} |
- ASSERT(found_difference); |
+ DCHECK(found_difference); |
} |
} |
#endif |
@@ -9264,7 +9264,7 @@ bool String::SlowEquals(Handle<String> one, Handle<String> two) { |
// Fast check: if hash code is computed for both strings |
// a fast negative check can be performed. |
if (one->HasHashCode() && two->HasHashCode()) { |
-#ifdef ENABLE_SLOW_ASSERTS |
+#ifdef ENABLE_SLOW_DCHECKS |
if (FLAG_enable_slow_asserts) { |
if (one->Hash() != two->Hash()) { |
bool found_difference = false; |
@@ -9274,7 +9274,7 @@ bool String::SlowEquals(Handle<String> one, Handle<String> two) { |
break; |
} |
} |
- ASSERT(found_difference); |
+ DCHECK(found_difference); |
} |
} |
#endif |
@@ -9337,7 +9337,7 @@ bool String::IsUtf8EqualTo(Vector<const char> str, bool allow_prefix_match) { |
for (i = 0; i < slen && remaining_in_str > 0; i++) { |
unsigned cursor = 0; |
uint32_t r = unibrow::Utf8::ValueOf(utf8_data, remaining_in_str, &cursor); |
- ASSERT(cursor > 0 && cursor <= remaining_in_str); |
+ DCHECK(cursor > 0 && cursor <= remaining_in_str); |
if (r > unibrow::Utf16::kMaxNonSurrogateCharCode) { |
if (i > slen - 1) return false; |
if (Get(i++) != unibrow::Utf16::LeadSurrogate(r)) return false; |
@@ -9385,16 +9385,16 @@ bool String::IsTwoByteEqualTo(Vector<const uc16> str) { |
uint32_t String::ComputeAndSetHash() { |
// Should only be called if hash code has not yet been computed. |
- ASSERT(!HasHashCode()); |
+ DCHECK(!HasHashCode()); |
// Store the hash code in the object. |
uint32_t field = IteratingStringHasher::Hash(this, GetHeap()->HashSeed()); |
set_hash_field(field); |
// Check the hash code is there. |
- ASSERT(HasHashCode()); |
+ DCHECK(HasHashCode()); |
uint32_t result = field >> kHashShift; |
- ASSERT(result != 0); // Ensure that the hash value of 0 is never computed. |
+ DCHECK(result != 0); // Ensure that the hash value of 0 is never computed. |
return result; |
} |
@@ -9431,7 +9431,7 @@ Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) { |
old_size = SeqOneByteString::SizeFor(old_length); |
new_size = SeqOneByteString::SizeFor(new_length); |
} else { |
- ASSERT(string->IsSeqTwoByteString()); |
+ DCHECK(string->IsSeqTwoByteString()); |
old_size = SeqTwoByteString::SizeFor(old_length); |
new_size = SeqTwoByteString::SizeFor(new_length); |
} |
@@ -9439,8 +9439,8 @@ Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) { |
int delta = old_size - new_size; |
Address start_of_string = string->address(); |
- ASSERT_OBJECT_ALIGNED(start_of_string); |
- ASSERT_OBJECT_ALIGNED(start_of_string + new_size); |
+ DCHECK_OBJECT_ALIGNED(start_of_string); |
+ DCHECK_OBJECT_ALIGNED(start_of_string + new_size); |
Heap* heap = string->GetHeap(); |
NewSpace* newspace = heap->new_space(); |
@@ -9467,16 +9467,16 @@ Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) { |
uint32_t StringHasher::MakeArrayIndexHash(uint32_t value, int length) { |
// For array indexes mix the length into the hash as an array index could |
// be zero. |
- ASSERT(length > 0); |
- ASSERT(length <= String::kMaxArrayIndexSize); |
- ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) < |
+ DCHECK(length > 0); |
+ DCHECK(length <= String::kMaxArrayIndexSize); |
+ DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) < |
(1 << String::kArrayIndexValueBits)); |
value <<= String::ArrayIndexValueBits::kShift; |
value |= length << String::ArrayIndexLengthBits::kShift; |
- ASSERT((value & String::kIsNotArrayIndexMask) == 0); |
- ASSERT((length > String::kMaxCachedArrayIndexLength) || |
+ DCHECK((value & String::kIsNotArrayIndexMask) == 0); |
+ DCHECK((length > String::kMaxCachedArrayIndexLength) || |
(value & String::kContainsCachedArrayIndexMask) == 0); |
return value; |
} |
@@ -9501,7 +9501,7 @@ uint32_t StringHasher::ComputeUtf8Hash(Vector<const char> chars, |
int vector_length = chars.length(); |
// Handle some edge cases |
if (vector_length <= 1) { |
- ASSERT(vector_length == 0 || |
+ DCHECK(vector_length == 0 || |
static_cast<uint8_t>(chars.start()[0]) <= |
unibrow::Utf8::kMaxOneByteChar); |
*utf16_length_out = vector_length; |
@@ -9514,11 +9514,11 @@ uint32_t StringHasher::ComputeUtf8Hash(Vector<const char> chars, |
const uint8_t* stream = reinterpret_cast<const uint8_t*>(chars.start()); |
int utf16_length = 0; |
bool is_index = true; |
- ASSERT(hasher.is_array_index_); |
+ DCHECK(hasher.is_array_index_); |
while (remaining > 0) { |
unsigned consumed = 0; |
uint32_t c = unibrow::Utf8::ValueOf(stream, remaining, &consumed); |
- ASSERT(consumed > 0 && consumed <= remaining); |
+ DCHECK(consumed > 0 && consumed <= remaining); |
stream += consumed; |
remaining -= consumed; |
bool is_two_characters = c > unibrow::Utf16::kMaxNonSurrogateCharCode; |
@@ -9645,10 +9645,10 @@ void Map::ClearNonLiveTransitions(Heap* heap) { |
if (descriptors_owner_died) { |
if (number_of_own_descriptors > 0) { |
TrimDescriptorArray(heap, this, descriptors, number_of_own_descriptors); |
- ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors); |
+ DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors); |
set_owns_descriptors(true); |
} else { |
- ASSERT(descriptors == GetHeap()->empty_descriptor_array()); |
+ DCHECK(descriptors == GetHeap()->empty_descriptor_array()); |
} |
} |
@@ -9661,7 +9661,7 @@ void Map::ClearNonLiveTransitions(Heap* heap) { |
RightTrimFixedArray<Heap::FROM_GC>(heap, t, t->IsSimpleTransition() |
? trim : trim * TransitionArray::kTransitionSize); |
} |
- ASSERT(HasTransitionArray()); |
+ DCHECK(HasTransitionArray()); |
} |
@@ -9761,11 +9761,11 @@ void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) { |
void JSFunction::MarkForOptimization() { |
- ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints()); |
- ASSERT(!IsOptimized()); |
- ASSERT(shared()->allows_lazy_compilation() || |
+ DCHECK(is_compiled() || GetIsolate()->DebuggerHasBreakPoints()); |
+ DCHECK(!IsOptimized()); |
+ DCHECK(shared()->allows_lazy_compilation() || |
code()->optimizable()); |
- ASSERT(!shared()->is_generator()); |
+ DCHECK(!shared()->is_generator()); |
set_code_no_write_barrier( |
GetIsolate()->builtins()->builtin(Builtins::kCompileOptimized)); |
// No write barrier required, since the builtin is part of the root set. |
@@ -9773,11 +9773,11 @@ void JSFunction::MarkForOptimization() { |
void JSFunction::MarkForConcurrentOptimization() { |
- ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints()); |
- ASSERT(!IsOptimized()); |
- ASSERT(shared()->allows_lazy_compilation() || code()->optimizable()); |
- ASSERT(!shared()->is_generator()); |
- ASSERT(GetIsolate()->concurrent_recompilation_enabled()); |
+ DCHECK(is_compiled() || GetIsolate()->DebuggerHasBreakPoints()); |
+ DCHECK(!IsOptimized()); |
+ DCHECK(shared()->allows_lazy_compilation() || code()->optimizable()); |
+ DCHECK(!shared()->is_generator()); |
+ DCHECK(GetIsolate()->concurrent_recompilation_enabled()); |
if (FLAG_trace_concurrent_recompilation) { |
PrintF(" ** Marking "); |
PrintName(); |
@@ -9792,10 +9792,10 @@ void JSFunction::MarkForConcurrentOptimization() { |
void JSFunction::MarkInOptimizationQueue() { |
// We can only arrive here via the concurrent-recompilation builtin. If |
// break points were set, the code would point to the lazy-compile builtin. |
- ASSERT(!GetIsolate()->DebuggerHasBreakPoints()); |
- ASSERT(IsMarkedForConcurrentOptimization() && !IsOptimized()); |
- ASSERT(shared()->allows_lazy_compilation() || code()->optimizable()); |
- ASSERT(GetIsolate()->concurrent_recompilation_enabled()); |
+ DCHECK(!GetIsolate()->DebuggerHasBreakPoints()); |
+ DCHECK(IsMarkedForConcurrentOptimization() && !IsOptimized()); |
+ DCHECK(shared()->allows_lazy_compilation() || code()->optimizable()); |
+ DCHECK(GetIsolate()->concurrent_recompilation_enabled()); |
if (FLAG_trace_concurrent_recompilation) { |
PrintF(" ** Queueing "); |
PrintName(); |
@@ -9814,22 +9814,22 @@ void SharedFunctionInfo::AddToOptimizedCodeMap( |
Handle<FixedArray> literals, |
BailoutId osr_ast_id) { |
Isolate* isolate = shared->GetIsolate(); |
- ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); |
- ASSERT(native_context->IsNativeContext()); |
+ DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION); |
+ DCHECK(native_context->IsNativeContext()); |
STATIC_ASSERT(kEntryLength == 4); |
Handle<FixedArray> new_code_map; |
Handle<Object> value(shared->optimized_code_map(), isolate); |
int old_length; |
if (value->IsSmi()) { |
// No optimized code map. |
- ASSERT_EQ(0, Smi::cast(*value)->value()); |
+ DCHECK_EQ(0, Smi::cast(*value)->value()); |
// Create 3 entries per context {context, code, literals}. |
new_code_map = isolate->factory()->NewFixedArray(kInitialLength); |
old_length = kEntriesStart; |
} else { |
// Copy old map and append one new entry. |
Handle<FixedArray> old_code_map = Handle<FixedArray>::cast(value); |
- ASSERT_EQ(-1, shared->SearchOptimizedCodeMap(*native_context, osr_ast_id)); |
+ DCHECK_EQ(-1, shared->SearchOptimizedCodeMap(*native_context, osr_ast_id)); |
old_length = old_code_map->length(); |
new_code_map = FixedArray::CopySize( |
old_code_map, old_length + kEntryLength); |
@@ -9847,12 +9847,12 @@ void SharedFunctionInfo::AddToOptimizedCodeMap( |
#ifdef DEBUG |
for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { |
- ASSERT(new_code_map->get(i + kContextOffset)->IsNativeContext()); |
- ASSERT(new_code_map->get(i + kCachedCodeOffset)->IsCode()); |
- ASSERT(Code::cast(new_code_map->get(i + kCachedCodeOffset))->kind() == |
+ DCHECK(new_code_map->get(i + kContextOffset)->IsNativeContext()); |
+ DCHECK(new_code_map->get(i + kCachedCodeOffset)->IsCode()); |
+ DCHECK(Code::cast(new_code_map->get(i + kCachedCodeOffset))->kind() == |
Code::OPTIMIZED_FUNCTION); |
- ASSERT(new_code_map->get(i + kLiteralsOffset)->IsFixedArray()); |
- ASSERT(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); |
+ DCHECK(new_code_map->get(i + kLiteralsOffset)->IsFixedArray()); |
+ DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); |
} |
#endif |
shared->set_optimized_code_map(*new_code_map); |
@@ -9860,11 +9860,11 @@ void SharedFunctionInfo::AddToOptimizedCodeMap( |
FixedArray* SharedFunctionInfo::GetLiteralsFromOptimizedCodeMap(int index) { |
- ASSERT(index > kEntriesStart); |
+ DCHECK(index > kEntriesStart); |
FixedArray* code_map = FixedArray::cast(optimized_code_map()); |
if (!bound()) { |
FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1)); |
- ASSERT_NE(NULL, cached_literals); |
+ DCHECK_NE(NULL, cached_literals); |
return cached_literals; |
} |
return NULL; |
@@ -9872,10 +9872,10 @@ FixedArray* SharedFunctionInfo::GetLiteralsFromOptimizedCodeMap(int index) { |
Code* SharedFunctionInfo::GetCodeFromOptimizedCodeMap(int index) { |
- ASSERT(index > kEntriesStart); |
+ DCHECK(index > kEntriesStart); |
FixedArray* code_map = FixedArray::cast(optimized_code_map()); |
Code* code = Code::cast(code_map->get(index)); |
- ASSERT_NE(NULL, code); |
+ DCHECK_NE(NULL, code); |
return code; |
} |
@@ -9890,7 +9890,7 @@ void SharedFunctionInfo::ClearOptimizedCodeMap() { |
flusher->EvictOptimizedCodeMap(this); |
} |
- ASSERT(code_map->get(kNextMapIndex)->IsUndefined()); |
+ DCHECK(code_map->get(kNextMapIndex)->IsUndefined()); |
set_optimized_code_map(Smi::FromInt(0)); |
} |
@@ -9904,7 +9904,7 @@ void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, |
int dst = kEntriesStart; |
int length = code_map->length(); |
for (int src = kEntriesStart; src < length; src += kEntryLength) { |
- ASSERT(code_map->get(src)->IsNativeContext()); |
+ DCHECK(code_map->get(src)->IsNativeContext()); |
if (Code::cast(code_map->get(src + kCachedCodeOffset)) == optimized_code) { |
// Evict the src entry by not copying it to the dst entry. |
if (FLAG_trace_opt) { |
@@ -9942,8 +9942,8 @@ void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, |
void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) { |
FixedArray* code_map = FixedArray::cast(optimized_code_map()); |
- ASSERT(shrink_by % kEntryLength == 0); |
- ASSERT(shrink_by <= code_map->length() - kEntriesStart); |
+ DCHECK(shrink_by % kEntryLength == 0); |
+ DCHECK(shrink_by <= code_map->length() - kEntriesStart); |
// Always trim even when array is cleared because of heap verifier. |
RightTrimFixedArray<Heap::FROM_GC>(GetHeap(), code_map, shrink_by); |
if (code_map->length() == kEntriesStart) { |
@@ -9973,7 +9973,7 @@ Handle<Object> CacheInitialJSArrayMaps( |
Handle<Map> current_map = initial_map; |
ElementsKind kind = current_map->elements_kind(); |
- ASSERT(kind == GetInitialFastElementsKind()); |
+ DCHECK(kind == GetInitialFastElementsKind()); |
maps->set(kind, *current_map); |
for (int i = GetSequenceIndexFromFastElementsKind(kind) + 1; |
i < kFastElementsKindCount; ++i) { |
@@ -9981,7 +9981,7 @@ Handle<Object> CacheInitialJSArrayMaps( |
ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(i); |
if (current_map->HasElementsTransition()) { |
new_map = handle(current_map->elements_transition_map()); |
- ASSERT(new_map->elements_kind() == next_kind); |
+ DCHECK(new_map->elements_kind() == next_kind); |
} else { |
new_map = Map::CopyAsElementsKind( |
current_map, next_kind, INSERT_TRANSITION); |
@@ -9998,7 +9998,7 @@ void JSFunction::SetInstancePrototype(Handle<JSFunction> function, |
Handle<Object> value) { |
Isolate* isolate = function->GetIsolate(); |
- ASSERT(value->IsJSReceiver()); |
+ DCHECK(value->IsJSReceiver()); |
// First some logic for the map of the prototype to make sure it is in fast |
// mode. |
@@ -10046,7 +10046,7 @@ void JSFunction::SetInstancePrototype(Handle<JSFunction> function, |
void JSFunction::SetPrototype(Handle<JSFunction> function, |
Handle<Object> value) { |
- ASSERT(function->should_have_prototype()); |
+ DCHECK(function->should_have_prototype()); |
Handle<Object> construct_prototype = value; |
// If the value is not a JSReceiver, store the value in the map's |
@@ -10135,7 +10135,7 @@ void JSFunction::EnsureHasInitialMap(Handle<JSFunction> function) { |
map->set_inobject_properties(in_object_properties); |
map->set_unused_property_fields(in_object_properties); |
map->set_prototype(*prototype); |
- ASSERT(map->has_fast_object_elements()); |
+ DCHECK(map->has_fast_object_elements()); |
// Finally link initial map and constructor function. |
function->set_initial_map(*map); |
@@ -10219,10 +10219,10 @@ void Script::InitLineEnds(Handle<Script> script) { |
Isolate* isolate = script->GetIsolate(); |
if (!script->source()->IsString()) { |
- ASSERT(script->source()->IsUndefined()); |
+ DCHECK(script->source()->IsUndefined()); |
Handle<FixedArray> empty = isolate->factory()->NewFixedArray(0); |
script->set_line_ends(*empty); |
- ASSERT(script->line_ends()->IsFixedArray()); |
+ DCHECK(script->line_ends()->IsFixedArray()); |
return; |
} |
@@ -10235,7 +10235,7 @@ void Script::InitLineEnds(Handle<Script> script) { |
} |
script->set_line_ends(*array); |
- ASSERT(script->line_ends()->IsFixedArray()); |
+ DCHECK(script->line_ends()->IsFixedArray()); |
} |
@@ -10255,7 +10255,7 @@ int Script::GetColumnNumber(Handle<Script> script, int code_pos) { |
int Script::GetLineNumberWithArray(int code_pos) { |
DisallowHeapAllocation no_allocation; |
- ASSERT(line_ends()->IsFixedArray()); |
+ DCHECK(line_ends()->IsFixedArray()); |
FixedArray* line_ends_array = FixedArray::cast(line_ends()); |
int line_ends_len = line_ends_array->length(); |
if (line_ends_len == 0) return -1; |
@@ -10309,7 +10309,7 @@ Handle<Object> Script::GetNameOrSourceURL(Handle<Script> script) { |
Handle<JSObject> script_wrapper = Script::GetWrapper(script); |
Handle<Object> property = Object::GetProperty( |
script_wrapper, name_or_source_url_key).ToHandleChecked(); |
- ASSERT(property->IsJSFunction()); |
+ DCHECK(property->IsJSFunction()); |
Handle<JSFunction> method = Handle<JSFunction>::cast(property); |
Handle<Object> result; |
// Do not check against pending exception, since this function may be called |
@@ -10338,7 +10338,7 @@ static void ClearWrapperCacheWeakCallback( |
void Script::ClearWrapperCache() { |
Foreign* foreign = wrapper(); |
Object** location = reinterpret_cast<Object**>(foreign->foreign_address()); |
- ASSERT_EQ(foreign->foreign_address(), reinterpret_cast<Address>(location)); |
+ DCHECK_EQ(foreign->foreign_address(), reinterpret_cast<Address>(location)); |
foreign->set_foreign_address(0); |
GlobalHandles::Destroy(location); |
GetIsolate()->counters()->script_wrappers()->Decrement(); |
@@ -10473,7 +10473,7 @@ static bool IsCodeEquivalent(Code* code, Code* recompiled) { |
void SharedFunctionInfo::EnableDeoptimizationSupport(Code* recompiled) { |
- ASSERT(!has_deoptimization_support()); |
+ DCHECK(!has_deoptimization_support()); |
DisallowHeapAllocation no_allocation; |
Code* code = this->code(); |
if (IsCodeEquivalent(code, recompiled)) { |
@@ -10487,7 +10487,7 @@ void SharedFunctionInfo::EnableDeoptimizationSupport(Code* recompiled) { |
// effectively resetting all IC state. |
ReplaceCode(recompiled); |
} |
- ASSERT(has_deoptimization_support()); |
+ DCHECK(has_deoptimization_support()); |
} |
@@ -10503,7 +10503,7 @@ void SharedFunctionInfo::DisableOptimization(BailoutReason reason) { |
set_bailout_reason(reason); |
// Code should be the lazy compilation stub or else unoptimized. If the |
// latter, disable optimization for the code too. |
- ASSERT(code()->kind() == Code::FUNCTION || code()->kind() == Code::BUILTIN); |
+ DCHECK(code()->kind() == Code::FUNCTION || code()->kind() == Code::BUILTIN); |
if (code()->kind() == Code::FUNCTION) { |
code()->set_optimizable(false); |
} |
@@ -10517,18 +10517,18 @@ void SharedFunctionInfo::DisableOptimization(BailoutReason reason) { |
bool SharedFunctionInfo::VerifyBailoutId(BailoutId id) { |
- ASSERT(!id.IsNone()); |
+ DCHECK(!id.IsNone()); |
Code* unoptimized = code(); |
DeoptimizationOutputData* data = |
DeoptimizationOutputData::cast(unoptimized->deoptimization_data()); |
unsigned ignore = Deoptimizer::GetOutputInfo(data, id, this); |
USE(ignore); |
- return true; // Return true if there was no ASSERT. |
+ return true; // Return true if there was no DCHECK. |
} |
void JSFunction::StartInobjectSlackTracking() { |
- ASSERT(has_initial_map() && !IsInobjectSlackTrackingInProgress()); |
+ DCHECK(has_initial_map() && !IsInobjectSlackTrackingInProgress()); |
if (!FLAG_clever_optimizations) return; |
Map* map = initial_map(); |
@@ -10587,10 +10587,10 @@ static void ShrinkInstanceSize(Map* map, void* data) { |
void JSFunction::CompleteInobjectSlackTracking() { |
- ASSERT(has_initial_map()); |
+ DCHECK(has_initial_map()); |
Map* map = initial_map(); |
- ASSERT(map->done_inobject_slack_tracking()); |
+ DCHECK(map->done_inobject_slack_tracking()); |
map->set_construction_count(kNoSlackTracking); |
int slack = map->unused_property_fields(); |
@@ -10605,7 +10605,7 @@ void JSFunction::CompleteInobjectSlackTracking() { |
int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context, |
BailoutId osr_ast_id) { |
DisallowHeapAllocation no_gc; |
- ASSERT(native_context->IsNativeContext()); |
+ DCHECK(native_context->IsNativeContext()); |
if (!FLAG_cache_optimized_code) return -1; |
Object* value = optimized_code_map(); |
if (!value->IsSmi()) { |
@@ -10645,7 +10645,7 @@ const char* const VisitorSynchronization::kTagNames[ |
void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) { |
- ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); |
+ DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode())); |
Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
Object* old_target = target; |
VisitPointer(&target); |
@@ -10654,7 +10654,7 @@ void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) { |
void ObjectVisitor::VisitCodeAgeSequence(RelocInfo* rinfo) { |
- ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode())); |
+ DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode())); |
Object* stub = rinfo->code_age_stub(); |
if (stub) { |
VisitPointer(&stub); |
@@ -10673,7 +10673,7 @@ void ObjectVisitor::VisitCodeEntry(Address entry_address) { |
void ObjectVisitor::VisitCell(RelocInfo* rinfo) { |
- ASSERT(rinfo->rmode() == RelocInfo::CELL); |
+ DCHECK(rinfo->rmode() == RelocInfo::CELL); |
Object* cell = rinfo->target_cell(); |
Object* old_cell = cell; |
VisitPointer(&cell); |
@@ -10684,7 +10684,7 @@ void ObjectVisitor::VisitCell(RelocInfo* rinfo) { |
void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) { |
- ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && |
+ DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) && |
rinfo->IsPatchedReturnSequence()) || |
(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && |
rinfo->IsPatchedDebugBreakSlotSequence())); |
@@ -10696,7 +10696,7 @@ void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) { |
void ObjectVisitor::VisitEmbeddedPointer(RelocInfo* rinfo) { |
- ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); |
+ DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); |
Object* p = rinfo->target_object(); |
VisitPointer(&p); |
} |
@@ -10739,7 +10739,7 @@ void Code::Relocate(intptr_t delta) { |
void Code::CopyFrom(const CodeDesc& desc) { |
- ASSERT(Marking::Color(this) == Marking::WHITE_OBJECT); |
+ DCHECK(Marking::Color(this) == Marking::WHITE_OBJECT); |
// copy code |
CopyBytes(instruction_start(), desc.buffer, |
@@ -10855,7 +10855,7 @@ SafepointEntry Code::GetSafepointEntry(Address pc) { |
Object* Code::FindNthObject(int n, Map* match_map) { |
- ASSERT(is_inline_cache_stub()); |
+ DCHECK(is_inline_cache_stub()); |
DisallowHeapAllocation no_allocation; |
int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); |
for (RelocIterator it(this, mask); !it.done(); it.next()) { |
@@ -10884,7 +10884,7 @@ Map* Code::FindFirstMap() { |
void Code::FindAndReplace(const FindAndReplacePattern& pattern) { |
- ASSERT(is_inline_cache_stub() || is_handler()); |
+ DCHECK(is_inline_cache_stub() || is_handler()); |
DisallowHeapAllocation no_allocation; |
int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); |
STATIC_ASSERT(FindAndReplacePattern::kMaxCount < 32); |
@@ -10905,7 +10905,7 @@ void Code::FindAndReplace(const FindAndReplacePattern& pattern) { |
void Code::FindAllMaps(MapHandleList* maps) { |
- ASSERT(is_inline_cache_stub()); |
+ DCHECK(is_inline_cache_stub()); |
DisallowHeapAllocation no_allocation; |
int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); |
for (RelocIterator it(this, mask); !it.done(); it.next()) { |
@@ -10917,7 +10917,7 @@ void Code::FindAllMaps(MapHandleList* maps) { |
Code* Code::FindFirstHandler() { |
- ASSERT(is_inline_cache_stub()); |
+ DCHECK(is_inline_cache_stub()); |
DisallowHeapAllocation no_allocation; |
int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET); |
for (RelocIterator it(this, mask); !it.done(); it.next()) { |
@@ -10930,7 +10930,7 @@ Code* Code::FindFirstHandler() { |
bool Code::FindHandlers(CodeHandleList* code_list, int length) { |
- ASSERT(is_inline_cache_stub()); |
+ DCHECK(is_inline_cache_stub()); |
DisallowHeapAllocation no_allocation; |
int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET); |
int i = 0; |
@@ -10949,7 +10949,7 @@ bool Code::FindHandlers(CodeHandleList* code_list, int length) { |
MaybeHandle<Code> Code::FindHandlerForMap(Map* map) { |
- ASSERT(is_inline_cache_stub()); |
+ DCHECK(is_inline_cache_stub()); |
int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) | |
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); |
bool return_next = false; |
@@ -10960,7 +10960,7 @@ MaybeHandle<Code> Code::FindHandlerForMap(Map* map) { |
if (object == map) return_next = true; |
} else if (return_next) { |
Code* code = Code::GetCodeFromTargetAddress(info->target_address()); |
- ASSERT(code->kind() == Code::HANDLER); |
+ DCHECK(code->kind() == Code::HANDLER); |
return handle(code); |
} |
} |
@@ -10969,7 +10969,7 @@ MaybeHandle<Code> Code::FindHandlerForMap(Map* map) { |
Name* Code::FindFirstName() { |
- ASSERT(is_inline_cache_stub()); |
+ DCHECK(is_inline_cache_stub()); |
DisallowHeapAllocation no_allocation; |
int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); |
for (RelocIterator it(this, mask); !it.done(); it.next()) { |
@@ -11035,7 +11035,7 @@ void SharedFunctionInfo::ClearTypeFeedbackInfo() { |
BailoutId Code::TranslatePcOffsetToAstId(uint32_t pc_offset) { |
DisallowHeapAllocation no_gc; |
- ASSERT(kind() == FUNCTION); |
+ DCHECK(kind() == FUNCTION); |
BackEdgeTable back_edges(this, &no_gc); |
for (uint32_t i = 0; i < back_edges.length(); i++) { |
if (back_edges.pc_offset(i) == pc_offset) return back_edges.ast_id(i); |
@@ -11046,7 +11046,7 @@ BailoutId Code::TranslatePcOffsetToAstId(uint32_t pc_offset) { |
uint32_t Code::TranslateAstIdToPcOffset(BailoutId ast_id) { |
DisallowHeapAllocation no_gc; |
- ASSERT(kind() == FUNCTION); |
+ DCHECK(kind() == FUNCTION); |
BackEdgeTable back_edges(this, &no_gc); |
for (uint32_t i = 0; i < back_edges.length(); i++) { |
if (back_edges.ast_id(i) == ast_id) return back_edges.pc_offset(i); |
@@ -11178,11 +11178,11 @@ Code* Code::GetCodeAgeStub(Isolate* isolate, Age age, MarkingParity parity) { |
CODE_AGE_LIST(HANDLE_CODE_AGE) |
#undef HANDLE_CODE_AGE |
case kNotExecutedCodeAge: { |
- ASSERT(parity == NO_MARKING_PARITY); |
+ DCHECK(parity == NO_MARKING_PARITY); |
return *builtins->MarkCodeAsExecutedOnce(); |
} |
case kExecutedOnceCodeAge: { |
- ASSERT(parity == NO_MARKING_PARITY); |
+ DCHECK(parity == NO_MARKING_PARITY); |
return *builtins->MarkCodeAsExecutedTwice(); |
} |
default: |
@@ -11271,7 +11271,7 @@ void DeoptimizationInputData::DeoptimizationInputDataPrint( |
TranslationIterator iterator(TranslationByteArray(), translation_index); |
Translation::Opcode opcode = |
static_cast<Translation::Opcode>(iterator.Next()); |
- ASSERT(Translation::BEGIN == opcode); |
+ DCHECK(Translation::BEGIN == opcode); |
int frame_count = iterator.Next(); |
int jsframe_count = iterator.Next(); |
os << " " << Translation::StringFor(opcode) |
@@ -11490,7 +11490,7 @@ void Code::Disassemble(const char* name, OStream& os) { // NOLINT |
os << "type = " << StubType2String(type()) << "\n"; |
} |
if (is_compare_ic_stub()) { |
- ASSERT(CodeStub::GetMajorKey(this) == CodeStub::CompareIC); |
+ DCHECK(CodeStub::GetMajorKey(this) == CodeStub::CompareIC); |
CompareIC::State left_state, right_state, handler_state; |
Token::Value op; |
ICCompareStub::DecodeKey(stub_key(), &left_state, &right_state, |
@@ -11594,7 +11594,7 @@ Handle<FixedArray> JSObject::SetFastElementsCapacityAndLength( |
int length, |
SetFastElementsCapacitySmiMode smi_mode) { |
// We should never end in here with a pixel or external array. |
- ASSERT(!object->HasExternalArrayElements()); |
+ DCHECK(!object->HasExternalArrayElements()); |
// Allocate a new fast elements backing store. |
Handle<FixedArray> new_elements = |
@@ -11654,7 +11654,7 @@ void JSObject::SetFastDoubleElementsCapacityAndLength(Handle<JSObject> object, |
int capacity, |
int length) { |
// We should never end in here with a pixel or external array. |
- ASSERT(!object->HasExternalArrayElements()); |
+ DCHECK(!object->HasExternalArrayElements()); |
Handle<FixedArrayBase> elems = |
object->GetIsolate()->factory()->NewFixedDoubleArray(capacity); |
@@ -11690,7 +11690,7 @@ void JSObject::SetFastDoubleElementsCapacityAndLength(Handle<JSObject> object, |
// static |
void JSArray::Initialize(Handle<JSArray> array, int capacity, int length) { |
- ASSERT(capacity >= 0); |
+ DCHECK(capacity >= 0); |
array->GetIsolate()->factory()->NewJSArrayStorage( |
array, length, capacity, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE); |
} |
@@ -11712,8 +11712,8 @@ static bool GetOldValue(Isolate* isolate, |
List<uint32_t>* indices) { |
Maybe<PropertyAttributes> maybe = |
JSReceiver::GetOwnElementAttribute(object, index); |
- ASSERT(maybe.has_value); |
- ASSERT(maybe.value != ABSENT); |
+ DCHECK(maybe.has_value); |
+ DCHECK(maybe.value != ABSENT); |
if (maybe.value == DONT_DELETE) return false; |
Handle<Object> value; |
if (!JSObject::GetOwnElementAccessorPair(object, index).is_null()) { |
@@ -11788,7 +11788,7 @@ MaybeHandle<Object> JSArray::SetElementsLength( |
} |
// We should never end in here with a pixel or external array. |
- ASSERT(array->AllowsSetElementsLength()); |
+ DCHECK(array->AllowsSetElementsLength()); |
if (!array->map()->is_observed()) { |
return array->GetElementsAccessor()->SetLength(array, new_length_handle); |
} |
@@ -11894,8 +11894,8 @@ Handle<Map> Map::GetPrototypeTransition(Handle<Map> map, |
Handle<Map> Map::PutPrototypeTransition(Handle<Map> map, |
Handle<Object> prototype, |
Handle<Map> target_map) { |
- ASSERT(target_map->IsMap()); |
- ASSERT(HeapObject::cast(*prototype)->map()->IsMap()); |
+ DCHECK(target_map->IsMap()); |
+ DCHECK(HeapObject::cast(*prototype)->map()->IsMap()); |
// Don't cache prototype transition if this map is shared. |
if (map->is_shared() || !FLAG_cache_prototype_transitions) return map; |
@@ -11973,7 +11973,7 @@ void Map::AddDependentCode(Handle<Map> map, |
// static |
void Map::AddDependentIC(Handle<Map> map, |
Handle<Code> stub) { |
- ASSERT(stub->next_code_link()->IsUndefined()); |
+ DCHECK(stub->next_code_link()->IsUndefined()); |
int n = map->dependent_code()->number_of_entries(DependentCode::kWeakICGroup); |
if (n == 0) { |
// Slow path: insert the head of the list with possible heap allocation. |
@@ -11981,7 +11981,7 @@ void Map::AddDependentIC(Handle<Map> map, |
} else { |
// Fast path: link the stub to the existing head of the list without any |
// heap allocation. |
- ASSERT(n == 1); |
+ DCHECK(n == 1); |
map->dependent_code()->AddToDependentICList(stub); |
} |
} |
@@ -12072,7 +12072,7 @@ void DependentCode::UpdateToFinishedCode(DependencyGroup group, |
#ifdef DEBUG |
for (int i = start; i < end; i++) { |
- ASSERT(is_code_at(i) || compilation_info_at(i) != info); |
+ DCHECK(is_code_at(i) || compilation_info_at(i) != info); |
} |
#endif |
} |
@@ -12099,18 +12099,18 @@ void DependentCode::RemoveCompilationInfo(DependentCode::DependencyGroup group, |
// Use the last of each group to fill the gap in the previous group. |
for (int i = group; i < kGroupCount; i++) { |
int last_of_group = starts.at(i + 1) - 1; |
- ASSERT(last_of_group >= gap); |
+ DCHECK(last_of_group >= gap); |
if (last_of_group == gap) continue; |
copy(last_of_group, gap); |
gap = last_of_group; |
} |
- ASSERT(gap == starts.number_of_entries() - 1); |
+ DCHECK(gap == starts.number_of_entries() - 1); |
clear_at(gap); // Clear last gap. |
set_number_of_entries(group, end - start - 1); |
#ifdef DEBUG |
for (int i = start; i < end - 1; i++) { |
- ASSERT(is_code_at(i) || compilation_info_at(i) != info); |
+ DCHECK(is_code_at(i) || compilation_info_at(i) != info); |
} |
#endif |
} |
@@ -12180,7 +12180,7 @@ bool DependentCode::MarkCodeForDeoptimization( |
void DependentCode::DeoptimizeDependentCodeGroup( |
Isolate* isolate, |
DependentCode::DependencyGroup group) { |
- ASSERT(AllowCodeDependencyChange::IsAllowed()); |
+ DCHECK(AllowCodeDependencyChange::IsAllowed()); |
DisallowHeapAllocation no_allocation_scope; |
bool marked = MarkCodeForDeoptimization(isolate, group); |
@@ -12287,7 +12287,7 @@ MaybeHandle<Object> JSObject::SetPrototype(Handle<JSObject> object, |
} |
Handle<Map> new_map = Map::TransitionToPrototype(map, value); |
- ASSERT(new_map->prototype() == *value); |
+ DCHECK(new_map->prototype() == *value); |
JSObject::MigrateToMap(real_receiver, new_map); |
if (!dictionary_elements_in_chain && |
@@ -12299,7 +12299,7 @@ MaybeHandle<Object> JSObject::SetPrototype(Handle<JSObject> object, |
} |
heap->ClearInstanceofCache(); |
- ASSERT(size == object->Size()); |
+ DCHECK(size == object->Size()); |
return value; |
} |
@@ -12323,7 +12323,7 @@ MaybeHandle<AccessorPair> JSObject::GetOwnElementAccessorPair( |
if (object->IsJSGlobalProxy()) { |
PrototypeIterator iter(object->GetIsolate(), object); |
if (iter.IsAtEnd()) return MaybeHandle<AccessorPair>(); |
- ASSERT(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
+ DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
return GetOwnElementAccessorPair( |
Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index); |
} |
@@ -12377,7 +12377,7 @@ MaybeHandle<Object> JSObject::GetElementWithCallback( |
uint32_t index, |
Handle<Object> holder) { |
Isolate* isolate = object->GetIsolate(); |
- ASSERT(!structure->IsForeign()); |
+ DCHECK(!structure->IsForeign()); |
// api style callbacks. |
if (structure->IsExecutableAccessorInfo()) { |
Handle<ExecutableAccessorInfo> data = |
@@ -12434,8 +12434,8 @@ MaybeHandle<Object> JSObject::SetElementWithCallback(Handle<JSObject> object, |
// We should never get here to initialize a const with the hole |
// value since a const declaration would conflict with the setter. |
- ASSERT(!value->IsTheHole()); |
- ASSERT(!structure->IsForeign()); |
+ DCHECK(!value->IsTheHole()); |
+ DCHECK(!structure->IsForeign()); |
if (structure->IsExecutableAccessorInfo()) { |
// api style callbacks |
Handle<ExecutableAccessorInfo> data = |
@@ -12512,7 +12512,7 @@ MaybeHandle<Object> JSObject::SetFastElement(Handle<JSObject> object, |
Handle<Object> value, |
StrictMode strict_mode, |
bool check_prototype) { |
- ASSERT(object->HasFastSmiOrObjectElements() || |
+ DCHECK(object->HasFastSmiOrObjectElements() || |
object->HasFastArgumentsElements()); |
Isolate* isolate = object->GetIsolate(); |
@@ -12575,7 +12575,7 @@ MaybeHandle<Object> JSObject::SetFastElement(Handle<JSObject> object, |
bool convert_to_slow = true; |
if ((index - capacity) < kMaxGap) { |
new_capacity = NewElementsCapacity(index + 1); |
- ASSERT(new_capacity > index); |
+ DCHECK(new_capacity > index); |
if (!object->ShouldConvertToSlowElements(new_capacity)) { |
convert_to_slow = false; |
} |
@@ -12609,7 +12609,7 @@ MaybeHandle<Object> JSObject::SetFastElement(Handle<JSObject> object, |
UpdateAllocationSite(object, kind); |
Handle<Map> new_map = GetElementsTransitionMap(object, kind); |
JSObject::MigrateToMap(object, new_map); |
- ASSERT(IsFastObjectElementsKind(object->GetElementsKind())); |
+ DCHECK(IsFastObjectElementsKind(object->GetElementsKind())); |
} |
// Increase backing store capacity if that's been decided previously. |
if (new_capacity != capacity) { |
@@ -12626,7 +12626,7 @@ MaybeHandle<Object> JSObject::SetFastElement(Handle<JSObject> object, |
} |
// Finally, set the new element and length. |
- ASSERT(object->elements()->IsFixedArray()); |
+ DCHECK(object->elements()->IsFixedArray()); |
backing_store->set(index, *value); |
if (must_update_array_length) { |
Handle<JSArray>::cast(object)->set_length(Smi::FromInt(array_length)); |
@@ -12643,7 +12643,7 @@ MaybeHandle<Object> JSObject::SetDictionaryElement( |
StrictMode strict_mode, |
bool check_prototype, |
SetPropertyMode set_mode) { |
- ASSERT(object->HasDictionaryElements() || |
+ DCHECK(object->HasDictionaryElements() || |
object->HasDictionaryArgumentsElements()); |
Isolate* isolate = object->GetIsolate(); |
@@ -12689,7 +12689,7 @@ MaybeHandle<Object> JSObject::SetDictionaryElement( |
Handle<AliasedArgumentsEntry>::cast(element); |
Handle<Context> context(Context::cast(elements->get(0))); |
int context_index = entry->aliased_context_slot(); |
- ASSERT(!context->get(context_index)->IsTheHole()); |
+ DCHECK(!context->get(context_index)->IsTheHole()); |
context->set(context_index, *value); |
// For elements that are still writable we keep slow aliasing. |
if (!details.IsReadOnly()) value = element; |
@@ -12780,7 +12780,7 @@ MaybeHandle<Object> JSObject::SetFastDoubleElement( |
Handle<Object> value, |
StrictMode strict_mode, |
bool check_prototype) { |
- ASSERT(object->HasFastDoubleElements()); |
+ DCHECK(object->HasFastDoubleElements()); |
Handle<FixedArrayBase> base_elms(FixedArrayBase::cast(object->elements())); |
uint32_t elms_length = static_cast<uint32_t>(base_elms->length()); |
@@ -12853,7 +12853,7 @@ MaybeHandle<Object> JSObject::SetFastDoubleElement( |
// Try allocating extra space. |
int new_capacity = NewElementsCapacity(index+1); |
if (!object->ShouldConvertToSlowElements(new_capacity)) { |
- ASSERT(static_cast<uint32_t>(new_capacity) > index); |
+ DCHECK(static_cast<uint32_t>(new_capacity) > index); |
SetFastDoubleElementsCapacityAndLength(object, new_capacity, index + 1); |
FixedDoubleArray::cast(object->elements())->set(index, double_value); |
JSObject::ValidateElements(object); |
@@ -12862,13 +12862,13 @@ MaybeHandle<Object> JSObject::SetFastDoubleElement( |
} |
// Otherwise default to slow case. |
- ASSERT(object->HasFastDoubleElements()); |
- ASSERT(object->map()->has_fast_double_elements()); |
- ASSERT(object->elements()->IsFixedDoubleArray() || |
+ DCHECK(object->HasFastDoubleElements()); |
+ DCHECK(object->map()->has_fast_double_elements()); |
+ DCHECK(object->elements()->IsFixedDoubleArray() || |
object->elements()->length() == 0); |
NormalizeElements(object); |
- ASSERT(object->HasDictionaryElements()); |
+ DCHECK(object->HasDictionaryElements()); |
return SetElement(object, index, value, NONE, strict_mode, check_prototype); |
} |
@@ -12891,7 +12891,7 @@ MaybeHandle<Object> JSObject::SetOwnElement(Handle<JSObject> object, |
uint32_t index, |
Handle<Object> value, |
StrictMode strict_mode) { |
- ASSERT(!object->HasExternalArrayElements()); |
+ DCHECK(!object->HasExternalArrayElements()); |
return JSObject::SetElement(object, index, value, NONE, strict_mode, false); |
} |
@@ -12926,7 +12926,7 @@ MaybeHandle<Object> JSObject::SetElement(Handle<JSObject> object, |
if (object->IsJSGlobalProxy()) { |
PrototypeIterator iter(isolate, object); |
if (iter.IsAtEnd()) return value; |
- ASSERT(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
+ DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
return SetElement( |
Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index, |
value, attributes, strict_mode, check_prototype, set_mode); |
@@ -13043,7 +13043,7 @@ MaybeHandle<Object> JSObject::SetElementWithoutInterceptor( |
StrictMode strict_mode, |
bool check_prototype, |
SetPropertyMode set_mode) { |
- ASSERT(object->HasDictionaryElements() || |
+ DCHECK(object->HasDictionaryElements() || |
object->HasDictionaryArgumentsElements() || |
(attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) == 0); |
Isolate* isolate = object->GetIsolate(); |
@@ -13105,7 +13105,7 @@ MaybeHandle<Object> JSObject::SetElementWithoutInterceptor( |
if (!probe.is_null() && !probe->IsTheHole()) { |
Handle<Context> context(Context::cast(parameter_map->get(0))); |
int context_index = Handle<Smi>::cast(probe)->value(); |
- ASSERT(!context->get(context_index)->IsTheHole()); |
+ DCHECK(!context->get(context_index)->IsTheHole()); |
context->set(context_index, *value); |
// Redefining attributes of an aliased element destroys fast aliasing. |
if (set_mode == SET_PROPERTY || attributes == NONE) return value; |
@@ -13153,7 +13153,7 @@ PretenureFlag AllocationSite::GetPretenureMode() { |
bool AllocationSite::IsNestedSite() { |
- ASSERT(FLAG_trace_track_allocation_sites); |
+ DCHECK(FLAG_trace_track_allocation_sites); |
Object* current = GetHeap()->allocation_sites_list(); |
while (current->IsAllocationSite()) { |
AllocationSite* current_site = AllocationSite::cast(current); |
@@ -13286,7 +13286,7 @@ void JSObject::TransitionElementsKind(Handle<JSObject> object, |
IsFastSmiOrObjectElementsKind(to_kind)) || |
(from_kind == FAST_DOUBLE_ELEMENTS && |
to_kind == FAST_HOLEY_DOUBLE_ELEMENTS)) { |
- ASSERT(from_kind != TERMINAL_FAST_ELEMENTS_KIND); |
+ DCHECK(from_kind != TERMINAL_FAST_ELEMENTS_KIND); |
// No change is needed to the elements() buffer, the transition |
// only requires a map change. |
Handle<Map> new_map = GetElementsTransitionMap(object, to_kind); |
@@ -13365,7 +13365,7 @@ void JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray> array, |
bool JSArray::IsReadOnlyLengthDescriptor(Handle<Map> jsarray_map) { |
Isolate* isolate = jsarray_map->GetIsolate(); |
- ASSERT(!jsarray_map->is_dictionary_map()); |
+ DCHECK(!jsarray_map->is_dictionary_map()); |
LookupResult lookup(isolate); |
Handle<Name> length_string = isolate->factory()->length_string(); |
jsarray_map->LookupDescriptor(NULL, *length_string, &lookup); |
@@ -13559,7 +13559,7 @@ bool JSObject::ShouldConvertToSlowElements(int new_capacity) { |
bool JSObject::ShouldConvertToFastElements() { |
- ASSERT(HasDictionaryElements() || HasDictionaryArgumentsElements()); |
+ DCHECK(HasDictionaryElements() || HasDictionaryArgumentsElements()); |
// If the elements are sparse, we should not go back to fast case. |
if (!HasDenseElements()) return false; |
// An object requiring access checks is never allowed to have fast |
@@ -13599,7 +13599,7 @@ bool JSObject::ShouldConvertToFastDoubleElements( |
*has_smi_only_elements = false; |
if (HasSloppyArgumentsElements()) return false; |
if (FLAG_unbox_double_arrays) { |
- ASSERT(HasDictionaryElements()); |
+ DCHECK(HasDictionaryElements()); |
SeededNumberDictionary* dictionary = element_dictionary(); |
bool found_double = false; |
for (int i = 0; i < dictionary->Capacity(); i++) { |
@@ -13657,14 +13657,14 @@ void Dictionary<Derived, Shape, Key>::CopyValuesTo(FixedArray* elements) { |
elements->set(pos++, ValueAt(i), mode); |
} |
} |
- ASSERT(pos == elements->length()); |
+ DCHECK(pos == elements->length()); |
} |
InterceptorInfo* JSObject::GetNamedInterceptor() { |
- ASSERT(map()->has_named_interceptor()); |
+ DCHECK(map()->has_named_interceptor()); |
JSFunction* constructor = JSFunction::cast(map()->constructor()); |
- ASSERT(constructor->shared()->IsApiFunction()); |
+ DCHECK(constructor->shared()->IsApiFunction()); |
Object* result = |
constructor->shared()->get_api_func_data()->named_property_handler(); |
return InterceptorInfo::cast(result); |
@@ -13672,9 +13672,9 @@ InterceptorInfo* JSObject::GetNamedInterceptor() { |
InterceptorInfo* JSObject::GetIndexedInterceptor() { |
- ASSERT(map()->has_indexed_interceptor()); |
+ DCHECK(map()->has_indexed_interceptor()); |
JSFunction* constructor = JSFunction::cast(map()->constructor()); |
- ASSERT(constructor->shared()->IsApiFunction()); |
+ DCHECK(constructor->shared()->IsApiFunction()); |
Object* result = |
constructor->shared()->get_api_func_data()->indexed_property_handler(); |
return InterceptorInfo::cast(result); |
@@ -13800,7 +13800,7 @@ Maybe<bool> JSObject::HasRealElementProperty(Handle<JSObject> object, |
HandleScope scope(isolate); |
PrototypeIterator iter(isolate, object); |
if (iter.IsAtEnd()) return maybe(false); |
- ASSERT(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
+ DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); |
return HasRealElementProperty( |
Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index); |
} |
@@ -13874,7 +13874,7 @@ static void InsertionSortPairs(FixedArray* content, |
void HeapSortPairs(FixedArray* content, FixedArray* numbers, int len) { |
// In-place heap sort. |
- ASSERT(content->length() == numbers->length()); |
+ DCHECK(content->length() == numbers->length()); |
// Bottom-up max-heap construction. |
for (int i = 1; i < len; ++i) { |
@@ -13920,7 +13920,7 @@ void HeapSortPairs(FixedArray* content, FixedArray* numbers, int len) { |
// Sort this array and the numbers as pairs wrt. the (distinct) numbers. |
void FixedArray::SortPairs(FixedArray* numbers, uint32_t len) { |
- ASSERT(this->length() == numbers->length()); |
+ DCHECK(this->length() == numbers->length()); |
// For small arrays, simply use insertion sort. |
if (len <= 10) { |
InsertionSortPairs(this, numbers, len); |
@@ -13963,7 +13963,7 @@ void FixedArray::SortPairs(FixedArray* numbers, uint32_t len) { |
// mirrors. |
void JSObject::GetOwnPropertyNames( |
FixedArray* storage, int index, PropertyAttributes filter) { |
- ASSERT(storage->length() >= (NumberOfOwnProperties(filter) - index)); |
+ DCHECK(storage->length() >= (NumberOfOwnProperties(filter) - index)); |
if (HasFastProperties()) { |
int real_size = map()->NumberOfOwnDescriptors(); |
DescriptorArray* descs = map()->instance_descriptors(); |
@@ -14020,7 +14020,7 @@ int JSObject::GetOwnElementKeys(FixedArray* storage, |
counter++; |
} |
} |
- ASSERT(!storage || storage->length() >= counter); |
+ DCHECK(!storage || storage->length() >= counter); |
break; |
} |
case FAST_DOUBLE_ELEMENTS: |
@@ -14036,7 +14036,7 @@ int JSObject::GetOwnElementKeys(FixedArray* storage, |
counter++; |
} |
} |
- ASSERT(!storage || storage->length() >= counter); |
+ DCHECK(!storage || storage->length() >= counter); |
break; |
} |
@@ -14054,7 +14054,7 @@ int JSObject::GetOwnElementKeys(FixedArray* storage, |
} |
counter++; |
} |
- ASSERT(!storage || storage->length() >= counter); |
+ DCHECK(!storage || storage->length() >= counter); |
break; |
} |
@@ -14122,7 +14122,7 @@ int JSObject::GetOwnElementKeys(FixedArray* storage, |
counter += str->length(); |
} |
} |
- ASSERT(!storage || storage->length() == counter); |
+ DCHECK(!storage || storage->length() == counter); |
return counter; |
} |
@@ -14151,7 +14151,7 @@ class StringSharedKey : public HashTableKey { |
SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0)); |
if (shared != *shared_) return false; |
int strict_unchecked = Smi::cast(other_array->get(2))->value(); |
- ASSERT(strict_unchecked == SLOPPY || strict_unchecked == STRICT); |
+ DCHECK(strict_unchecked == SLOPPY || strict_unchecked == STRICT); |
StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked); |
if (strict_mode != strict_mode_) return false; |
int scope_position = Smi::cast(other_array->get(3))->value(); |
@@ -14190,7 +14190,7 @@ class StringSharedKey : public HashTableKey { |
SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0)); |
String* source = String::cast(other_array->get(1)); |
int strict_unchecked = Smi::cast(other_array->get(2))->value(); |
- ASSERT(strict_unchecked == SLOPPY || strict_unchecked == STRICT); |
+ DCHECK(strict_unchecked == SLOPPY || strict_unchecked == STRICT); |
StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked); |
int scope_position = Smi::cast(other_array->get(3))->value(); |
return StringSharedHashHelper( |
@@ -14341,7 +14341,7 @@ class InternalizedStringKey : public HashTableKey { |
Handle<Map> map; |
if (maybe_map.ToHandle(&map)) { |
string_->set_map_no_write_barrier(*map); |
- ASSERT(string_->IsInternalizedString()); |
+ DCHECK(string_->IsInternalizedString()); |
return string_; |
} |
// Otherwise allocate a new internalized string. |
@@ -14377,8 +14377,8 @@ Handle<Derived> HashTable<Derived, Shape, Key>::New( |
int at_least_space_for, |
MinimumCapacity capacity_option, |
PretenureFlag pretenure) { |
- ASSERT(0 <= at_least_space_for); |
- ASSERT(!capacity_option || IsPowerOf2(at_least_space_for)); |
+ DCHECK(0 <= at_least_space_for); |
+ DCHECK(!capacity_option || IsPowerOf2(at_least_space_for)); |
int capacity = (capacity_option == USE_CUSTOM_MINIMUM_CAPACITY) |
? at_least_space_for |
: ComputeCapacity(at_least_space_for); |
@@ -14432,7 +14432,7 @@ int NameDictionary::FindEntry(Handle<Name> key) { |
set(index, *key); |
return entry; |
} |
- ASSERT(element->IsTheHole() || !Name::cast(element)->Equals(*key)); |
+ DCHECK(element->IsTheHole() || !Name::cast(element)->Equals(*key)); |
entry = NextProbe(entry, count++, capacity); |
} |
return kNotFound; |
@@ -14443,7 +14443,7 @@ template<typename Derived, typename Shape, typename Key> |
void HashTable<Derived, Shape, Key>::Rehash( |
Handle<Derived> new_table, |
Key key) { |
- ASSERT(NumberOfElements() < new_table->Capacity()); |
+ DCHECK(NumberOfElements() < new_table->Capacity()); |
DisallowHeapAllocation no_gc; |
WriteBarrierMode mode = new_table->GetWriteBarrierMode(no_gc); |
@@ -14772,7 +14772,7 @@ int HashTable<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: |
Handle<Object> JSObject::PrepareSlowElementsForSort( |
Handle<JSObject> object, uint32_t limit) { |
- ASSERT(object->HasDictionaryElements()); |
+ DCHECK(object->HasDictionaryElements()); |
Isolate* isolate = object->GetIsolate(); |
// Must stay in dictionary mode, either because of requires_slow_elements, |
// or because we are not going to sort (and therefore compact) all of the |
@@ -14792,10 +14792,10 @@ Handle<Object> JSObject::PrepareSlowElementsForSort( |
Object* k = dict->KeyAt(i); |
if (!dict->IsKey(k)) continue; |
- ASSERT(k->IsNumber()); |
- ASSERT(!k->IsSmi() || Smi::cast(k)->value() >= 0); |
- ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() >= 0); |
- ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() <= kMaxUInt32); |
+ DCHECK(k->IsNumber()); |
+ DCHECK(!k->IsSmi() || Smi::cast(k)->value() >= 0); |
+ DCHECK(!k->IsHeapNumber() || HeapNumber::cast(k)->value() >= 0); |
+ DCHECK(!k->IsHeapNumber() || HeapNumber::cast(k)->value() <= kMaxUInt32); |
HandleScope scope(isolate); |
Handle<Object> value(dict->ValueAt(i), isolate); |
@@ -14817,7 +14817,7 @@ Handle<Object> JSObject::PrepareSlowElementsForSort( |
} else { |
Handle<Object> result = SeededNumberDictionary::AddNumberEntry( |
new_dict, pos, value, details); |
- ASSERT(result.is_identical_to(new_dict)); |
+ DCHECK(result.is_identical_to(new_dict)); |
USE(result); |
pos++; |
} |
@@ -14828,7 +14828,7 @@ Handle<Object> JSObject::PrepareSlowElementsForSort( |
} else { |
Handle<Object> result = SeededNumberDictionary::AddNumberEntry( |
new_dict, key, value, details); |
- ASSERT(result.is_identical_to(new_dict)); |
+ DCHECK(result.is_identical_to(new_dict)); |
USE(result); |
} |
} |
@@ -14844,7 +14844,7 @@ Handle<Object> JSObject::PrepareSlowElementsForSort( |
HandleScope scope(isolate); |
Handle<Object> result = SeededNumberDictionary::AddNumberEntry( |
new_dict, pos, isolate->factory()->undefined_value(), no_details); |
- ASSERT(result.is_identical_to(new_dict)); |
+ DCHECK(result.is_identical_to(new_dict)); |
USE(result); |
pos++; |
undefs--; |
@@ -14898,7 +14898,7 @@ Handle<Object> JSObject::PrepareElementsForSort(Handle<JSObject> object, |
} else if (!object->HasFastDoubleElements()) { |
EnsureWritableFastElements(object); |
} |
- ASSERT(object->HasFastSmiOrObjectElements() || |
+ DCHECK(object->HasFastSmiOrObjectElements() || |
object->HasFastDoubleElements()); |
// Collect holes at the end, undefined before that and the rest at the |
@@ -15054,7 +15054,7 @@ Handle<Object> ExternalUint8ClampedArray::SetValue( |
} else { |
// Clamp undefined to zero (default). All other types have been |
// converted to a number type further up in the call chain. |
- ASSERT(value->IsUndefined()); |
+ DCHECK(value->IsUndefined()); |
} |
array->set(index, clamped_value); |
} |
@@ -15079,7 +15079,7 @@ static Handle<Object> ExternalArrayIntSetter( |
} else { |
// Clamp undefined to zero (default). All other types have been |
// converted to a number type further up in the call chain. |
- ASSERT(value->IsUndefined()); |
+ DCHECK(value->IsUndefined()); |
} |
receiver->set(index, cast_value); |
} |
@@ -15142,7 +15142,7 @@ Handle<Object> ExternalUint32Array::SetValue( |
} else { |
// Clamp undefined to zero (default). All other types have been |
// converted to a number type further up in the call chain. |
- ASSERT(value->IsUndefined()); |
+ DCHECK(value->IsUndefined()); |
} |
array->set(index, cast_value); |
} |
@@ -15165,7 +15165,7 @@ Handle<Object> ExternalFloat32Array::SetValue( |
} else { |
// Clamp undefined to NaN (default). All other types have been |
// converted to a number type further up in the call chain. |
- ASSERT(value->IsUndefined()); |
+ DCHECK(value->IsUndefined()); |
} |
array->set(index, cast_value); |
} |
@@ -15184,7 +15184,7 @@ Handle<Object> ExternalFloat64Array::SetValue( |
} else { |
// Clamp undefined to NaN (default). All other types have been |
// converted to a number type further up in the call chain. |
- ASSERT(value->IsUndefined()); |
+ DCHECK(value->IsUndefined()); |
} |
array->set(index, double_value); |
} |
@@ -15193,7 +15193,7 @@ Handle<Object> ExternalFloat64Array::SetValue( |
PropertyCell* GlobalObject::GetPropertyCell(LookupResult* result) { |
- ASSERT(!HasFastProperties()); |
+ DCHECK(!HasFastProperties()); |
Object* value = property_dictionary()->ValueAt(result->GetDictionaryEntry()); |
return PropertyCell::cast(value); |
} |
@@ -15202,7 +15202,7 @@ PropertyCell* GlobalObject::GetPropertyCell(LookupResult* result) { |
Handle<PropertyCell> JSGlobalObject::EnsurePropertyCell( |
Handle<JSGlobalObject> global, |
Handle<Name> name) { |
- ASSERT(!global->HasFastProperties()); |
+ DCHECK(!global->HasFastProperties()); |
int entry = global->property_dictionary()->FindEntry(name); |
if (entry == NameDictionary::kNotFound) { |
Isolate* isolate = global->GetIsolate(); |
@@ -15216,7 +15216,7 @@ Handle<PropertyCell> JSGlobalObject::EnsurePropertyCell( |
return cell; |
} else { |
Object* value = global->property_dictionary()->ValueAt(entry); |
- ASSERT(value->IsPropertyCell()); |
+ DCHECK(value->IsPropertyCell()); |
return handle(PropertyCell::cast(value)); |
} |
} |
@@ -15254,7 +15254,7 @@ class TwoCharHashTableKey : public HashTableKey { |
uint16_t chars[2] = {c1, c2}; |
uint32_t check_hash = StringHasher::HashSequentialString(chars, 2, seed); |
hash = (hash << String::kHashShift) | String::kIsNotArrayIndexMask; |
- ASSERT_EQ(static_cast<int32_t>(hash), static_cast<int32_t>(check_hash)); |
+ DCHECK_EQ(static_cast<int32_t>(hash), static_cast<int32_t>(check_hash)); |
#endif |
} |
@@ -15306,7 +15306,7 @@ MaybeHandle<String> StringTable::LookupStringIfExists( |
return MaybeHandle<String>(); |
} else { |
Handle<String> result(String::cast(string_table->KeyAt(entry)), isolate); |
- ASSERT(StringShape(*result).IsInternalized()); |
+ DCHECK(StringShape(*result).IsInternalized()); |
return result; |
} |
} |
@@ -15323,7 +15323,7 @@ MaybeHandle<String> StringTable::LookupTwoCharsStringIfExists( |
return MaybeHandle<String>(); |
} else { |
Handle<String> result(String::cast(string_table->KeyAt(entry)), isolate); |
- ASSERT(StringShape(*result).IsInternalized()); |
+ DCHECK(StringShape(*result).IsInternalized()); |
return result; |
} |
} |
@@ -15526,7 +15526,7 @@ Handle<Derived> Dictionary<Derived, Shape, Key>::New( |
Isolate* isolate, |
int at_least_space_for, |
PretenureFlag pretenure) { |
- ASSERT(0 <= at_least_space_for); |
+ DCHECK(0 <= at_least_space_for); |
Handle<Derived> dict = DerivedHashTable::New(isolate, |
at_least_space_for, |
USE_DEFAULT_MINIMUM_CAPACITY, |
@@ -15653,7 +15653,7 @@ Handle<Derived> Dictionary<Derived, Shape, Key>::Add( |
Handle<Object> value, |
PropertyDetails details) { |
// Valdate key is absent. |
- SLOW_ASSERT((dictionary->FindEntry(key) == Dictionary::kNotFound)); |
+ SLOW_DCHECK((dictionary->FindEntry(key) == Dictionary::kNotFound)); |
// Check whether the dictionary should be extended. |
dictionary = EnsureCapacity(dictionary, 1, key); |
@@ -15685,7 +15685,7 @@ void Dictionary<Derived, Shape, Key>::AddEntry( |
dictionary->SetNextEnumerationIndex(index + 1); |
} |
dictionary->SetEntry(entry, k, value, details); |
- ASSERT((dictionary->KeyAt(entry)->IsNumber() || |
+ DCHECK((dictionary->KeyAt(entry)->IsNumber() || |
dictionary->KeyAt(entry)->IsName())); |
dictionary->ElementAdded(); |
} |
@@ -15717,7 +15717,7 @@ Handle<SeededNumberDictionary> SeededNumberDictionary::AddNumberEntry( |
Handle<Object> value, |
PropertyDetails details) { |
dictionary->UpdateMaxNumberKey(key); |
- SLOW_ASSERT(dictionary->FindEntry(key) == kNotFound); |
+ SLOW_DCHECK(dictionary->FindEntry(key) == kNotFound); |
return Add(dictionary, key, value, details); |
} |
@@ -15726,7 +15726,7 @@ Handle<UnseededNumberDictionary> UnseededNumberDictionary::AddNumberEntry( |
Handle<UnseededNumberDictionary> dictionary, |
uint32_t key, |
Handle<Object> value) { |
- SLOW_ASSERT(dictionary->FindEntry(key) == kNotFound); |
+ SLOW_DCHECK(dictionary->FindEntry(key) == kNotFound); |
return Add(dictionary, key, value, PropertyDetails(NONE, NORMAL, 0)); |
} |
@@ -15812,7 +15812,7 @@ void Dictionary<Derived, Shape, Key>::CopyKeysTo( |
FixedArray* storage, |
PropertyAttributes filter, |
typename Dictionary<Derived, Shape, Key>::SortMode sort_mode) { |
- ASSERT(storage->length() >= NumberOfElementsFilterAttributes(filter)); |
+ DCHECK(storage->length() >= NumberOfElementsFilterAttributes(filter)); |
int capacity = DerivedHashTable::Capacity(); |
int index = 0; |
for (int i = 0; i < capacity; i++) { |
@@ -15827,7 +15827,7 @@ void Dictionary<Derived, Shape, Key>::CopyKeysTo( |
if (sort_mode == Dictionary::SORTED) { |
storage->SortPairs(storage, index); |
} |
- ASSERT(storage->length() >= index); |
+ DCHECK(storage->length() >= index); |
} |
@@ -15873,7 +15873,7 @@ void Dictionary<Derived, Shape, Key>::CopyKeysTo( |
int index, |
PropertyAttributes filter, |
typename Dictionary<Derived, Shape, Key>::SortMode sort_mode) { |
- ASSERT(storage->length() >= NumberOfElementsFilterAttributes(filter)); |
+ DCHECK(storage->length() >= NumberOfElementsFilterAttributes(filter)); |
int capacity = DerivedHashTable::Capacity(); |
for (int i = 0; i < capacity; i++) { |
Object* k = DerivedHashTable::KeyAt(i); |
@@ -15887,7 +15887,7 @@ void Dictionary<Derived, Shape, Key>::CopyKeysTo( |
if (sort_mode == Dictionary::SORTED) { |
storage->SortPairs(storage, index); |
} |
- ASSERT(storage->length() >= index); |
+ DCHECK(storage->length() >= index); |
} |
@@ -15912,7 +15912,7 @@ Object* Dictionary<Derived, Shape, Key>::SlowReverseLookup(Object* value) { |
Object* ObjectHashTable::Lookup(Handle<Object> key) { |
DisallowHeapAllocation no_gc; |
- ASSERT(IsKey(*key)); |
+ DCHECK(IsKey(*key)); |
// If the object does not have an identity hash, it was never used as a key. |
Object* hash = key->GetHash(); |
@@ -15928,8 +15928,8 @@ Object* ObjectHashTable::Lookup(Handle<Object> key) { |
Handle<ObjectHashTable> ObjectHashTable::Put(Handle<ObjectHashTable> table, |
Handle<Object> key, |
Handle<Object> value) { |
- ASSERT(table->IsKey(*key)); |
- ASSERT(!value->IsTheHole()); |
+ DCHECK(table->IsKey(*key)); |
+ DCHECK(!value->IsTheHole()); |
Isolate* isolate = table->GetIsolate(); |
@@ -15956,7 +15956,7 @@ Handle<ObjectHashTable> ObjectHashTable::Put(Handle<ObjectHashTable> table, |
Handle<ObjectHashTable> ObjectHashTable::Remove(Handle<ObjectHashTable> table, |
Handle<Object> key, |
bool* was_present) { |
- ASSERT(table->IsKey(*key)); |
+ DCHECK(table->IsKey(*key)); |
Object* hash = key->GetHash(); |
if (hash->IsUndefined()) { |
@@ -15992,7 +15992,7 @@ void ObjectHashTable::RemoveEntry(int entry) { |
Object* WeakHashTable::Lookup(Handle<Object> key) { |
DisallowHeapAllocation no_gc; |
- ASSERT(IsKey(*key)); |
+ DCHECK(IsKey(*key)); |
int entry = FindEntry(key); |
if (entry == kNotFound) return GetHeap()->the_hole_value(); |
return get(EntryToValueIndex(entry)); |
@@ -16002,7 +16002,7 @@ Object* WeakHashTable::Lookup(Handle<Object> key) { |
Handle<WeakHashTable> WeakHashTable::Put(Handle<WeakHashTable> table, |
Handle<Object> key, |
Handle<Object> value) { |
- ASSERT(table->IsKey(*key)); |
+ DCHECK(table->IsKey(*key)); |
int entry = table->FindEntry(key); |
// Key is already in table, just overwrite value. |
if (entry != kNotFound) { |
@@ -16065,7 +16065,7 @@ Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Allocate( |
template<class Derived, class Iterator, int entrysize> |
Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::EnsureGrowable( |
Handle<Derived> table) { |
- ASSERT(!table->IsObsolete()); |
+ DCHECK(!table->IsObsolete()); |
int nof = table->NumberOfElements(); |
int nod = table->NumberOfDeletedElements(); |
@@ -16081,7 +16081,7 @@ Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::EnsureGrowable( |
template<class Derived, class Iterator, int entrysize> |
Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Shrink( |
Handle<Derived> table) { |
- ASSERT(!table->IsObsolete()); |
+ DCHECK(!table->IsObsolete()); |
int nof = table->NumberOfElements(); |
int capacity = table->Capacity(); |
@@ -16093,7 +16093,7 @@ Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Shrink( |
template<class Derived, class Iterator, int entrysize> |
Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Clear( |
Handle<Derived> table) { |
- ASSERT(!table->IsObsolete()); |
+ DCHECK(!table->IsObsolete()); |
Handle<Derived> new_table = |
Allocate(table->GetIsolate(), |
@@ -16124,7 +16124,7 @@ Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Remove( |
template<class Derived, class Iterator, int entrysize> |
Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Rehash( |
Handle<Derived> table, int new_capacity) { |
- ASSERT(!table->IsObsolete()); |
+ DCHECK(!table->IsObsolete()); |
Handle<Derived> new_table = |
Allocate(table->GetIsolate(), |
@@ -16157,7 +16157,7 @@ Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Rehash( |
++new_entry; |
} |
- ASSERT_EQ(nod, removed_holes_index); |
+ DCHECK_EQ(nod, removed_holes_index); |
new_table->SetNumberOfElements(nof); |
table->SetNextTable(*new_table); |
@@ -16169,10 +16169,10 @@ Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Rehash( |
template <class Derived, class Iterator, int entrysize> |
int OrderedHashTable<Derived, Iterator, entrysize>::FindEntry( |
Handle<Object> key, int hash) { |
- ASSERT(!IsObsolete()); |
+ DCHECK(!IsObsolete()); |
DisallowHeapAllocation no_gc; |
- ASSERT(!key->IsTheHole()); |
+ DCHECK(!key->IsTheHole()); |
for (int entry = HashToEntry(hash); entry != kNotFound; |
entry = ChainAt(entry)) { |
Object* candidate = KeyAt(entry); |
@@ -16195,7 +16195,7 @@ int OrderedHashTable<Derived, Iterator, entrysize>::FindEntry( |
template <class Derived, class Iterator, int entrysize> |
int OrderedHashTable<Derived, Iterator, entrysize>::AddEntry(int hash) { |
- ASSERT(!IsObsolete()); |
+ DCHECK(!IsObsolete()); |
int entry = UsedCapacity(); |
int bucket = HashToBucket(hash); |
@@ -16210,7 +16210,7 @@ int OrderedHashTable<Derived, Iterator, entrysize>::AddEntry(int hash) { |
template<class Derived, class Iterator, int entrysize> |
void OrderedHashTable<Derived, Iterator, entrysize>::RemoveEntry(int entry) { |
- ASSERT(!IsObsolete()); |
+ DCHECK(!IsObsolete()); |
int index = EntryToIndex(entry); |
for (int i = 0; i < entrysize; ++i) { |
@@ -16314,7 +16314,7 @@ Object* OrderedHashMap::Lookup(Handle<Object> key) { |
Handle<OrderedHashMap> OrderedHashMap::Put(Handle<OrderedHashMap> table, |
Handle<Object> key, |
Handle<Object> value) { |
- ASSERT(!key->IsTheHole()); |
+ DCHECK(!key->IsTheHole()); |
int hash = GetOrCreateHash(table->GetIsolate(), key)->value(); |
int entry = table->FindEntry(key, hash); |
@@ -16448,13 +16448,13 @@ DeclaredAccessorDescriptorIterator::DeclaredAccessorDescriptorIterator( |
const DeclaredAccessorDescriptorData* |
DeclaredAccessorDescriptorIterator::Next() { |
- ASSERT(offset_ < length_); |
+ DCHECK(offset_ < length_); |
uint8_t* ptr = &array_[offset_]; |
- ASSERT(reinterpret_cast<uintptr_t>(ptr) % sizeof(uintptr_t) == 0); |
+ DCHECK(reinterpret_cast<uintptr_t>(ptr) % sizeof(uintptr_t) == 0); |
const DeclaredAccessorDescriptorData* data = |
reinterpret_cast<const DeclaredAccessorDescriptorData*>(ptr); |
offset_ += sizeof(*data); |
- ASSERT(offset_ <= length_); |
+ DCHECK(offset_ <= length_); |
return data; |
} |
@@ -16481,7 +16481,7 @@ Handle<DeclaredAccessorDescriptor> DeclaredAccessorDescriptor::Create( |
MemCopy(array, previous_array, previous_length); |
array += previous_length; |
} |
- ASSERT(reinterpret_cast<uintptr_t>(array) % sizeof(uintptr_t) == 0); |
+ DCHECK(reinterpret_cast<uintptr_t>(array) % sizeof(uintptr_t) == 0); |
DeclaredAccessorDescriptorData* data = |
reinterpret_cast<DeclaredAccessorDescriptorData*>(array); |
*data = descriptor; |
@@ -16565,7 +16565,7 @@ void DebugInfo::SetBreakPoint(Handle<DebugInfo> debug_info, |
} |
index = old_break_points->length(); |
} |
- ASSERT(index != kNoBreakPointInfo); |
+ DCHECK(index != kNoBreakPointInfo); |
// Allocate new BreakPointInfo object and set the break point. |
Handle<BreakPointInfo> new_break_point_info = Handle<BreakPointInfo>::cast( |
@@ -16657,7 +16657,7 @@ void BreakPointInfo::ClearBreakPoint(Handle<BreakPointInfo> break_point_info, |
return; |
} |
// If there are multiple break points shrink the array |
- ASSERT(break_point_info->break_point_objects()->IsFixedArray()); |
+ DCHECK(break_point_info->break_point_objects()->IsFixedArray()); |
Handle<FixedArray> old_array = |
Handle<FixedArray>( |
FixedArray::cast(break_point_info->break_point_objects())); |
@@ -16666,7 +16666,7 @@ void BreakPointInfo::ClearBreakPoint(Handle<BreakPointInfo> break_point_info, |
int found_count = 0; |
for (int i = 0; i < old_array->length(); i++) { |
if (old_array->get(i) == *break_point_object) { |
- ASSERT(found_count == 0); |
+ DCHECK(found_count == 0); |
found_count++; |
} else { |
new_array->set(i - found_count, old_array->get(i)); |
@@ -16752,7 +16752,7 @@ Object* JSDate::GetField(Object* object, Smi* index) { |
Object* JSDate::DoGetField(FieldIndex index) { |
- ASSERT(index != kDateValue); |
+ DCHECK(index != kDateValue); |
DateCache* date_cache = GetIsolate()->date_cache(); |
@@ -16790,7 +16790,7 @@ Object* JSDate::DoGetField(FieldIndex index) { |
int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days); |
if (index == kMillisecond) return Smi::FromInt(time_in_day_ms % 1000); |
- ASSERT(index == kTimeInDay); |
+ DCHECK(index == kTimeInDay); |
return Smi::FromInt(time_in_day_ms); |
} |
@@ -16798,7 +16798,7 @@ Object* JSDate::DoGetField(FieldIndex index) { |
Object* JSDate::GetUTCField(FieldIndex index, |
double value, |
DateCache* date_cache) { |
- ASSERT(index >= kFirstUTCField); |
+ DCHECK(index >= kFirstUTCField); |
if (std::isnan(value)) return GetIsolate()->heap()->nan_value(); |
@@ -16817,7 +16817,7 @@ Object* JSDate::GetUTCField(FieldIndex index, |
date_cache->YearMonthDayFromDays(days, &year, &month, &day); |
if (index == kYearUTC) return Smi::FromInt(year); |
if (index == kMonthUTC) return Smi::FromInt(month); |
- ASSERT(index == kDayUTC); |
+ DCHECK(index == kDayUTC); |
return Smi::FromInt(day); |
} |
@@ -16876,7 +16876,7 @@ void JSDate::SetCachedFields(int64_t local_time_ms, DateCache* date_cache) { |
void JSArrayBuffer::Neuter() { |
- ASSERT(is_external()); |
+ DCHECK(is_external()); |
set_backing_store(NULL); |
set_byte_length(Smi::FromInt(0)); |
} |
@@ -16921,7 +16921,7 @@ Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer( |
Handle<Map> map(typed_array->map()); |
Isolate* isolate = typed_array->GetIsolate(); |
- ASSERT(IsFixedTypedArrayElementsKind(map->elements_kind())); |
+ DCHECK(IsFixedTypedArrayElementsKind(map->elements_kind())); |
Handle<Map> new_map = Map::TransitionElementsTo( |
map, |
@@ -16941,7 +16941,7 @@ Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer( |
static_cast<uint8_t*>(buffer->backing_store())); |
buffer->set_weak_first_view(*typed_array); |
- ASSERT(typed_array->weak_next() == isolate->heap()->undefined_value()); |
+ DCHECK(typed_array->weak_next() == isolate->heap()->undefined_value()); |
typed_array->set_buffer(*buffer); |
JSObject::SetMapAndElements(typed_array, new_map, new_elements); |
@@ -16952,7 +16952,7 @@ Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer( |
Handle<JSArrayBuffer> JSTypedArray::GetBuffer() { |
Handle<Object> result(buffer(), GetIsolate()); |
if (*result != Smi::FromInt(0)) { |
- ASSERT(IsExternalArrayElementsKind(map()->elements_kind())); |
+ DCHECK(IsExternalArrayElementsKind(map()->elements_kind())); |
return Handle<JSArrayBuffer>::cast(result); |
} |
Handle<JSTypedArray> self(this); |
@@ -16966,7 +16966,7 @@ HeapType* PropertyCell::type() { |
void PropertyCell::set_type(HeapType* type, WriteBarrierMode ignored) { |
- ASSERT(IsPropertyCell()); |
+ DCHECK(IsPropertyCell()); |
set_type_raw(type, ignored); |
} |
@@ -17014,7 +17014,7 @@ void PropertyCell::AddDependentCompilationInfo(Handle<PropertyCell> cell, |
const char* GetBailoutReason(BailoutReason reason) { |
- ASSERT(reason < kLastErrorMessage); |
+ DCHECK(reason < kLastErrorMessage); |
#define ERROR_MESSAGES_TEXTS(C, T) T, |
static const char* error_messages_[] = { |
ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS) |