Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(761)

Side by Side Diff: src/objects.cc

Issue 157503002: A64: Synchronize with r18444. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/objects.h ('k') | src/objects-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1665 matching lines...) Expand 10 before | Expand all | Expand 10 after
1676 break; 1676 break;
1677 case EXTERNAL_DOUBLE_ARRAY_TYPE: 1677 case EXTERNAL_DOUBLE_ARRAY_TYPE:
1678 accumulator->Add("<ExternalDoubleArray[%u]>", 1678 accumulator->Add("<ExternalDoubleArray[%u]>",
1679 ExternalDoubleArray::cast(this)->length()); 1679 ExternalDoubleArray::cast(this)->length());
1680 break; 1680 break;
1681 case SHARED_FUNCTION_INFO_TYPE: { 1681 case SHARED_FUNCTION_INFO_TYPE: {
1682 SharedFunctionInfo* shared = SharedFunctionInfo::cast(this); 1682 SharedFunctionInfo* shared = SharedFunctionInfo::cast(this);
1683 SmartArrayPointer<char> debug_name = 1683 SmartArrayPointer<char> debug_name =
1684 shared->DebugName()->ToCString(); 1684 shared->DebugName()->ToCString();
1685 if (debug_name[0] != 0) { 1685 if (debug_name[0] != 0) {
1686 accumulator->Add("<SharedFunctionInfo %s>", *debug_name); 1686 accumulator->Add("<SharedFunctionInfo %s>", debug_name.get());
1687 } else { 1687 } else {
1688 accumulator->Add("<SharedFunctionInfo>"); 1688 accumulator->Add("<SharedFunctionInfo>");
1689 } 1689 }
1690 break; 1690 break;
1691 } 1691 }
1692 case JS_MESSAGE_OBJECT_TYPE: 1692 case JS_MESSAGE_OBJECT_TYPE:
1693 accumulator->Add("<JSMessageObject>"); 1693 accumulator->Add("<JSMessageObject>");
1694 break; 1694 break;
1695 #define MAKE_STRUCT_CASE(NAME, Name, name) \ 1695 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1696 case NAME##_TYPE: \ 1696 case NAME##_TYPE: \
(...skipping 1099 matching lines...) Expand 10 before | Expand all | Expand 10 after
2796 for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) { 2796 for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) {
2797 PropertyDetails details = descriptors->GetDetails(i); 2797 PropertyDetails details = descriptors->GetDetails(i);
2798 if (details.type() == FIELD) { 2798 if (details.type() == FIELD) {
2799 map = GeneralizeRepresentation(map, i, new_representation, FORCE_FIELD); 2799 map = GeneralizeRepresentation(map, i, new_representation, FORCE_FIELD);
2800 } 2800 }
2801 } 2801 }
2802 return map; 2802 return map;
2803 } 2803 }
2804 2804
2805 2805
2806 Map* Map::CurrentMapForDeprecated() { 2806 Handle<Map> Map::CurrentMapForDeprecated(Handle<Map> map) {
2807 Handle<Map> proto_map(map);
2808 while (proto_map->prototype()->IsJSObject()) {
2809 Handle<JSObject> holder(JSObject::cast(proto_map->prototype()));
2810 if (holder->map()->is_deprecated()) {
2811 JSObject::TryMigrateInstance(holder);
2812 }
2813 proto_map = Handle<Map>(holder->map());
2814 }
2815 return CurrentMapForDeprecatedInternal(map);
2816 }
2817
2818
2819 Handle<Map> Map::CurrentMapForDeprecatedInternal(Handle<Map> map) {
2820 if (!map->is_deprecated()) return map;
2821
2807 DisallowHeapAllocation no_allocation; 2822 DisallowHeapAllocation no_allocation;
2808 if (!is_deprecated()) return this; 2823 DescriptorArray* old_descriptors = map->instance_descriptors();
2809 2824
2810 DescriptorArray* old_descriptors = instance_descriptors(); 2825 int descriptors = map->NumberOfOwnDescriptors();
2811 2826 Map* root_map = map->FindRootMap();
2812 int descriptors = NumberOfOwnDescriptors();
2813 Map* root_map = FindRootMap();
2814 2827
2815 // Check the state of the root map. 2828 // Check the state of the root map.
2816 if (!EquivalentToForTransition(root_map)) return NULL; 2829 if (!map->EquivalentToForTransition(root_map)) return Handle<Map>();
2817 int verbatim = root_map->NumberOfOwnDescriptors(); 2830 int verbatim = root_map->NumberOfOwnDescriptors();
2818 2831
2819 Map* updated = root_map->FindUpdatedMap( 2832 Map* updated = root_map->FindUpdatedMap(
2820 verbatim, descriptors, old_descriptors); 2833 verbatim, descriptors, old_descriptors);
2821 if (updated == NULL) return NULL; 2834 if (updated == NULL) return Handle<Map>();
2822 2835
2823 DescriptorArray* updated_descriptors = updated->instance_descriptors(); 2836 DescriptorArray* updated_descriptors = updated->instance_descriptors();
2824 int valid = updated->NumberOfOwnDescriptors(); 2837 int valid = updated->NumberOfOwnDescriptors();
2825 if (!updated_descriptors->IsMoreGeneralThan( 2838 if (!updated_descriptors->IsMoreGeneralThan(
2826 verbatim, valid, descriptors, old_descriptors)) { 2839 verbatim, valid, descriptors, old_descriptors)) {
2827 return NULL; 2840 return Handle<Map>();
2828 } 2841 }
2829 2842
2830 return updated; 2843 return handle(updated);
2831 } 2844 }
2832 2845
2833 2846
2834 Handle<Object> JSObject::SetPropertyWithInterceptor( 2847 Handle<Object> JSObject::SetPropertyWithInterceptor(
2835 Handle<JSObject> object, 2848 Handle<JSObject> object,
2836 Handle<Name> name, 2849 Handle<Name> name,
2837 Handle<Object> value, 2850 Handle<Object> value,
2838 PropertyAttributes attributes, 2851 PropertyAttributes attributes,
2839 StrictModeFlag strict_mode) { 2852 StrictModeFlag strict_mode) {
2840 // TODO(rossberg): Support symbols in the API. 2853 // TODO(rossberg): Support symbols in the API.
(...skipping 1031 matching lines...) Expand 10 before | Expand all | Expand 10 after
3872 GeneralizeFieldRepresentation( 3885 GeneralizeFieldRepresentation(
3873 object, 0, Representation::None(), ALLOW_AS_CONSTANT); 3886 object, 0, Representation::None(), ALLOW_AS_CONSTANT);
3874 object->map()->set_migration_target(true); 3887 object->map()->set_migration_target(true);
3875 if (FLAG_trace_migration) { 3888 if (FLAG_trace_migration) {
3876 object->PrintInstanceMigration(stdout, *original_map, object->map()); 3889 object->PrintInstanceMigration(stdout, *original_map, object->map());
3877 } 3890 }
3878 } 3891 }
3879 3892
3880 3893
3881 Handle<Object> JSObject::TryMigrateInstance(Handle<JSObject> object) { 3894 Handle<Object> JSObject::TryMigrateInstance(Handle<JSObject> object) {
3882 Map* new_map = object->map()->CurrentMapForDeprecated();
3883 if (new_map == NULL) return Handle<Object>();
3884 Handle<Map> original_map(object->map()); 3895 Handle<Map> original_map(object->map());
3885 JSObject::MigrateToMap(object, handle(new_map)); 3896 Handle<Map> new_map = Map::CurrentMapForDeprecatedInternal(original_map);
3897 if (new_map.is_null()) return Handle<Object>();
3898 JSObject::MigrateToMap(object, new_map);
3886 if (FLAG_trace_migration) { 3899 if (FLAG_trace_migration) {
3887 object->PrintInstanceMigration(stdout, *original_map, object->map()); 3900 object->PrintInstanceMigration(stdout, *original_map, object->map());
3888 } 3901 }
3889 return object; 3902 return object;
3890 } 3903 }
3891 3904
3892 3905
3893 Handle<Object> JSObject::SetPropertyUsingTransition( 3906 Handle<Object> JSObject::SetPropertyUsingTransition(
3894 Handle<JSObject> object, 3907 Handle<JSObject> object,
3895 LookupResult* lookup, 3908 LookupResult* lookup,
(...skipping 5268 matching lines...) Expand 10 before | Expand all | Expand 10 after
9164 if (Marking::IsBlack(Marking::MarkBitFrom(start_of_string))) { 9177 if (Marking::IsBlack(Marking::MarkBitFrom(start_of_string))) {
9165 MemoryChunk::IncrementLiveBytesFromMutator(start_of_string, -delta); 9178 MemoryChunk::IncrementLiveBytesFromMutator(start_of_string, -delta);
9166 } 9179 }
9167 9180
9168 9181
9169 if (new_length == 0) return heap->isolate()->factory()->empty_string(); 9182 if (new_length == 0) return heap->isolate()->factory()->empty_string();
9170 return string; 9183 return string;
9171 } 9184 }
9172 9185
9173 9186
9174 AllocationMemento* AllocationMemento::FindForJSObject(JSObject* object, 9187 AllocationMemento* AllocationMemento::FindForHeapObject(HeapObject* object,
9175 bool in_GC) { 9188 bool in_GC) {
9176 // Currently, AllocationMemento objects are only allocated immediately 9189 // AllocationMemento objects are only allocated immediately after objects in
9177 // after JSArrays and some JSObjects in NewSpace. Detecting whether a 9190 // NewSpace. Detecting whether a memento is present involves carefully
9178 // memento is present involves carefully checking the object immediately 9191 // checking the object immediately after the current object (if there is one)
9179 // after the current object (if there is one) to see if it's an 9192 // to see if it's an AllocationMemento.
9180 // AllocationMemento. 9193 ASSERT(object->GetHeap()->InNewSpace(object));
9181 if (FLAG_track_allocation_sites && object->GetHeap()->InNewSpace(object)) { 9194 if (FLAG_track_allocation_sites) {
9182 Address ptr_end = (reinterpret_cast<Address>(object) - kHeapObjectTag) + 9195 Address ptr_end = (reinterpret_cast<Address>(object) - kHeapObjectTag) +
9183 object->Size(); 9196 object->Size();
9184 Address top; 9197 Address top;
9185 if (in_GC) { 9198 if (in_GC) {
9186 top = object->GetHeap()->new_space()->FromSpacePageHigh(); 9199 top = object->GetHeap()->new_space()->FromSpacePageHigh();
9187 } else { 9200 } else {
9188 top = object->GetHeap()->NewSpaceTop(); 9201 top = object->GetHeap()->NewSpaceTop();
9189 } 9202 }
9190 if ((ptr_end + AllocationMemento::kSize) <= top) { 9203 if ((ptr_end + AllocationMemento::kSize) <= top) {
9191 // There is room in newspace for allocation info. Do we have some? 9204 // There is room in newspace for allocation info. Do we have some?
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
9278 if (is_index) is_index = hasher.UpdateIndex(c); 9291 if (is_index) is_index = hasher.UpdateIndex(c);
9279 } 9292 }
9280 } 9293 }
9281 *utf16_length_out = static_cast<int>(utf16_length); 9294 *utf16_length_out = static_cast<int>(utf16_length);
9282 // Must set length here so that hash computation is correct. 9295 // Must set length here so that hash computation is correct.
9283 hasher.length_ = utf16_length; 9296 hasher.length_ = utf16_length;
9284 return hasher.GetHashField(); 9297 return hasher.GetHashField();
9285 } 9298 }
9286 9299
9287 9300
9288 MaybeObject* String::SubString(int start, int end, PretenureFlag pretenure) {
9289 Heap* heap = GetHeap();
9290 if (start == 0 && end == length()) return this;
9291 MaybeObject* result = heap->AllocateSubString(this, start, end, pretenure);
9292 return result;
9293 }
9294
9295
9296 void String::PrintOn(FILE* file) { 9301 void String::PrintOn(FILE* file) {
9297 int length = this->length(); 9302 int length = this->length();
9298 for (int i = 0; i < length; i++) { 9303 for (int i = 0; i < length; i++) {
9299 PrintF(file, "%c", Get(i)); 9304 PrintF(file, "%c", Get(i));
9300 } 9305 }
9301 } 9306 }
9302 9307
9303 9308
9304 static void TrimEnumCache(Heap* heap, Map* map, DescriptorArray* descriptors) { 9309 static void TrimEnumCache(Heap* heap, Map* map, DescriptorArray* descriptors) {
9305 int live_enum = map->EnumLength(); 9310 int live_enum = map->EnumLength();
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
9445 9450
9446 bool Map::EquivalentToForNormalization(Map* other, 9451 bool Map::EquivalentToForNormalization(Map* other,
9447 PropertyNormalizationMode mode) { 9452 PropertyNormalizationMode mode) {
9448 int properties = mode == CLEAR_INOBJECT_PROPERTIES 9453 int properties = mode == CLEAR_INOBJECT_PROPERTIES
9449 ? 0 : other->inobject_properties(); 9454 ? 0 : other->inobject_properties();
9450 return CheckEquivalent(this, other) && inobject_properties() == properties; 9455 return CheckEquivalent(this, other) && inobject_properties() == properties;
9451 } 9456 }
9452 9457
9453 9458
9454 void ConstantPoolArray::ConstantPoolIterateBody(ObjectVisitor* v) { 9459 void ConstantPoolArray::ConstantPoolIterateBody(ObjectVisitor* v) {
9455 int first_ptr_offset = OffsetOfElementAt(first_ptr_index()); 9460 if (count_of_ptr_entries() > 0) {
9456 int last_ptr_offset = 9461 int first_ptr_offset = OffsetOfElementAt(first_ptr_index());
9457 OffsetOfElementAt(first_ptr_index() + count_of_ptr_entries()); 9462 int last_ptr_offset =
9458 v->VisitPointers( 9463 OffsetOfElementAt(first_ptr_index() + count_of_ptr_entries());
9459 HeapObject::RawField(this, first_ptr_offset), 9464 v->VisitPointers(
9460 HeapObject::RawField(this, last_ptr_offset)); 9465 HeapObject::RawField(this, first_ptr_offset),
9466 HeapObject::RawField(this, last_ptr_offset));
9467 }
9461 } 9468 }
9462 9469
9463 9470
9464 void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) { 9471 void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) {
9465 // Iterate over all fields in the body but take care in dealing with 9472 // Iterate over all fields in the body but take care in dealing with
9466 // the code entry. 9473 // the code entry.
9467 IteratePointers(v, kPropertiesOffset, kCodeEntryOffset); 9474 IteratePointers(v, kPropertiesOffset, kCodeEntryOffset);
9468 v->VisitCodeEntry(this->address() + kCodeEntryOffset); 9475 v->VisitCodeEntry(this->address() + kCodeEntryOffset);
9469 IteratePointers(v, kCodeEntryOffset + kPointerSize, object_size); 9476 IteratePointers(v, kCodeEntryOffset + kPointerSize, object_size);
9470 } 9477 }
9471 9478
9472 9479
9473 void JSFunction::MarkForLazyRecompilation() { 9480 void JSFunction::MarkForOptimization() {
9474 ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints()); 9481 ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
9475 ASSERT(!IsOptimized()); 9482 ASSERT(!IsOptimized());
9476 ASSERT(shared()->allows_lazy_compilation() || 9483 ASSERT(shared()->allows_lazy_compilation() ||
9477 code()->optimizable()); 9484 code()->optimizable());
9478 ASSERT(!shared()->is_generator()); 9485 ASSERT(!shared()->is_generator());
9479 set_code_no_write_barrier( 9486 set_code_no_write_barrier(
9480 GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile)); 9487 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimized));
9481 // No write barrier required, since the builtin is part of the root set. 9488 // No write barrier required, since the builtin is part of the root set.
9482 } 9489 }
9483 9490
9484 9491
9485 void JSFunction::MarkForConcurrentRecompilation() { 9492 void JSFunction::MarkForConcurrentOptimization() {
9486 ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints()); 9493 ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
9487 ASSERT(!IsOptimized()); 9494 ASSERT(!IsOptimized());
9488 ASSERT(shared()->allows_lazy_compilation() || code()->optimizable()); 9495 ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
9489 ASSERT(!shared()->is_generator()); 9496 ASSERT(!shared()->is_generator());
9490 ASSERT(GetIsolate()->concurrent_recompilation_enabled()); 9497 ASSERT(GetIsolate()->concurrent_recompilation_enabled());
9491 if (FLAG_trace_concurrent_recompilation) { 9498 if (FLAG_trace_concurrent_recompilation) {
9492 PrintF(" ** Marking "); 9499 PrintF(" ** Marking ");
9493 PrintName(); 9500 PrintName();
9494 PrintF(" for concurrent recompilation.\n"); 9501 PrintF(" for concurrent recompilation.\n");
9495 } 9502 }
9496 set_code_no_write_barrier( 9503 set_code_no_write_barrier(
9497 GetIsolate()->builtins()->builtin(Builtins::kConcurrentRecompile)); 9504 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimizedConcurrent));
9498 // No write barrier required, since the builtin is part of the root set. 9505 // No write barrier required, since the builtin is part of the root set.
9499 } 9506 }
9500 9507
9501 9508
9502 void JSFunction::MarkInRecompileQueue() { 9509 void JSFunction::MarkInOptimizationQueue() {
9503 // We can only arrive here via the concurrent-recompilation builtin. If 9510 // We can only arrive here via the concurrent-recompilation builtin. If
9504 // break points were set, the code would point to the lazy-compile builtin. 9511 // break points were set, the code would point to the lazy-compile builtin.
9505 ASSERT(!GetIsolate()->DebuggerHasBreakPoints()); 9512 ASSERT(!GetIsolate()->DebuggerHasBreakPoints());
9506 ASSERT(IsMarkedForConcurrentRecompilation() && !IsOptimized()); 9513 ASSERT(IsMarkedForConcurrentOptimization() && !IsOptimized());
9507 ASSERT(shared()->allows_lazy_compilation() || code()->optimizable()); 9514 ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
9508 ASSERT(GetIsolate()->concurrent_recompilation_enabled()); 9515 ASSERT(GetIsolate()->concurrent_recompilation_enabled());
9509 if (FLAG_trace_concurrent_recompilation) { 9516 if (FLAG_trace_concurrent_recompilation) {
9510 PrintF(" ** Queueing "); 9517 PrintF(" ** Queueing ");
9511 PrintName(); 9518 PrintName();
9512 PrintF(" for concurrent recompilation.\n"); 9519 PrintF(" for concurrent recompilation.\n");
9513 } 9520 }
9514 set_code_no_write_barrier( 9521 set_code_no_write_barrier(
9515 GetIsolate()->builtins()->builtin(Builtins::kInRecompileQueue)); 9522 GetIsolate()->builtins()->builtin(Builtins::kInOptimizationQueue));
9516 // No write barrier required, since the builtin is part of the root set. 9523 // No write barrier required, since the builtin is part of the root set.
9517 } 9524 }
9518 9525
9519 9526
9520 static bool CompileLazyHelper(CompilationInfo* info,
9521 ClearExceptionFlag flag) {
9522 // Compile the source information to a code object.
9523 ASSERT(info->IsOptimizing() || !info->shared_info()->is_compiled());
9524 ASSERT(!info->isolate()->has_pending_exception());
9525 bool result = Compiler::CompileLazy(info);
9526 ASSERT(result != info->isolate()->has_pending_exception());
9527 if (!result && flag == CLEAR_EXCEPTION) {
9528 info->isolate()->clear_pending_exception();
9529 }
9530 return result;
9531 }
9532
9533
9534 bool SharedFunctionInfo::CompileLazy(Handle<SharedFunctionInfo> shared,
9535 ClearExceptionFlag flag) {
9536 ASSERT(shared->allows_lazy_compilation_without_context());
9537 CompilationInfoWithZone info(shared);
9538 return CompileLazyHelper(&info, flag);
9539 }
9540
9541
9542 void SharedFunctionInfo::AddToOptimizedCodeMap( 9527 void SharedFunctionInfo::AddToOptimizedCodeMap(
9543 Handle<SharedFunctionInfo> shared, 9528 Handle<SharedFunctionInfo> shared,
9544 Handle<Context> native_context, 9529 Handle<Context> native_context,
9545 Handle<Code> code, 9530 Handle<Code> code,
9546 Handle<FixedArray> literals) { 9531 Handle<FixedArray> literals,
9532 BailoutId osr_ast_id) {
9547 CALL_HEAP_FUNCTION_VOID( 9533 CALL_HEAP_FUNCTION_VOID(
9548 shared->GetIsolate(), 9534 shared->GetIsolate(),
9549 shared->AddToOptimizedCodeMap(*native_context, *code, *literals)); 9535 shared->AddToOptimizedCodeMap(
9536 *native_context, *code, *literals, osr_ast_id));
9550 } 9537 }
9551 9538
9552 9539
9553 MaybeObject* SharedFunctionInfo::AddToOptimizedCodeMap(Context* native_context, 9540 MaybeObject* SharedFunctionInfo::AddToOptimizedCodeMap(Context* native_context,
9554 Code* code, 9541 Code* code,
9555 FixedArray* literals) { 9542 FixedArray* literals,
9543 BailoutId osr_ast_id) {
9556 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); 9544 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
9557 ASSERT(native_context->IsNativeContext()); 9545 ASSERT(native_context->IsNativeContext());
9558 STATIC_ASSERT(kEntryLength == 3); 9546 STATIC_ASSERT(kEntryLength == 4);
9559 Heap* heap = GetHeap(); 9547 Heap* heap = GetHeap();
9560 FixedArray* new_code_map; 9548 FixedArray* new_code_map;
9561 Object* value = optimized_code_map(); 9549 Object* value = optimized_code_map();
9550 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt());
9562 if (value->IsSmi()) { 9551 if (value->IsSmi()) {
9563 // No optimized code map. 9552 // No optimized code map.
9564 ASSERT_EQ(0, Smi::cast(value)->value()); 9553 ASSERT_EQ(0, Smi::cast(value)->value());
9565 // Create 3 entries per context {context, code, literals}. 9554 // Create 3 entries per context {context, code, literals}.
9566 MaybeObject* maybe = heap->AllocateFixedArray(kInitialLength); 9555 MaybeObject* maybe = heap->AllocateFixedArray(kInitialLength);
9567 if (!maybe->To(&new_code_map)) return maybe; 9556 if (!maybe->To(&new_code_map)) return maybe;
9568 new_code_map->set(kEntriesStart + 0, native_context); 9557 new_code_map->set(kEntriesStart + kContextOffset, native_context);
9569 new_code_map->set(kEntriesStart + 1, code); 9558 new_code_map->set(kEntriesStart + kCachedCodeOffset, code);
9570 new_code_map->set(kEntriesStart + 2, literals); 9559 new_code_map->set(kEntriesStart + kLiteralsOffset, literals);
9560 new_code_map->set(kEntriesStart + kOsrAstIdOffset, osr_ast_id_smi);
9571 } else { 9561 } else {
9572 // Copy old map and append one new entry. 9562 // Copy old map and append one new entry.
9573 FixedArray* old_code_map = FixedArray::cast(value); 9563 FixedArray* old_code_map = FixedArray::cast(value);
9574 ASSERT_EQ(-1, SearchOptimizedCodeMap(native_context)); 9564 ASSERT_EQ(-1, SearchOptimizedCodeMap(native_context, osr_ast_id));
9575 int old_length = old_code_map->length(); 9565 int old_length = old_code_map->length();
9576 int new_length = old_length + kEntryLength; 9566 int new_length = old_length + kEntryLength;
9577 MaybeObject* maybe = old_code_map->CopySize(new_length); 9567 MaybeObject* maybe = old_code_map->CopySize(new_length);
9578 if (!maybe->To(&new_code_map)) return maybe; 9568 if (!maybe->To(&new_code_map)) return maybe;
9579 new_code_map->set(old_length + 0, native_context); 9569 new_code_map->set(old_length + kContextOffset, native_context);
9580 new_code_map->set(old_length + 1, code); 9570 new_code_map->set(old_length + kCachedCodeOffset, code);
9581 new_code_map->set(old_length + 2, literals); 9571 new_code_map->set(old_length + kLiteralsOffset, literals);
9572 new_code_map->set(old_length + kOsrAstIdOffset, osr_ast_id_smi);
9582 // Zap the old map for the sake of the heap verifier. 9573 // Zap the old map for the sake of the heap verifier.
9583 if (Heap::ShouldZapGarbage()) { 9574 if (Heap::ShouldZapGarbage()) {
9584 Object** data = old_code_map->data_start(); 9575 Object** data = old_code_map->data_start();
9585 MemsetPointer(data, heap->the_hole_value(), old_length); 9576 MemsetPointer(data, heap->the_hole_value(), old_length);
9586 } 9577 }
9587 } 9578 }
9588 #ifdef DEBUG 9579 #ifdef DEBUG
9589 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { 9580 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) {
9590 ASSERT(new_code_map->get(i)->IsNativeContext()); 9581 ASSERT(new_code_map->get(i + kContextOffset)->IsNativeContext());
9591 ASSERT(new_code_map->get(i + 1)->IsCode()); 9582 ASSERT(new_code_map->get(i + kCachedCodeOffset)->IsCode());
9592 ASSERT(Code::cast(new_code_map->get(i + 1))->kind() == 9583 ASSERT(Code::cast(new_code_map->get(i + kCachedCodeOffset))->kind() ==
9593 Code::OPTIMIZED_FUNCTION); 9584 Code::OPTIMIZED_FUNCTION);
9594 ASSERT(new_code_map->get(i + 2)->IsFixedArray()); 9585 ASSERT(new_code_map->get(i + kLiteralsOffset)->IsFixedArray());
9586 ASSERT(new_code_map->get(i + kOsrAstIdOffset)->IsSmi());
9595 } 9587 }
9596 #endif 9588 #endif
9597 set_optimized_code_map(new_code_map); 9589 set_optimized_code_map(new_code_map);
9598 return new_code_map; 9590 return new_code_map;
9599 } 9591 }
9600 9592
9601 9593
9602 void SharedFunctionInfo::InstallFromOptimizedCodeMap(JSFunction* function, 9594 FixedArray* SharedFunctionInfo::GetLiteralsFromOptimizedCodeMap(int index) {
9603 int index) {
9604 ASSERT(index > kEntriesStart); 9595 ASSERT(index > kEntriesStart);
9605 FixedArray* code_map = FixedArray::cast(optimized_code_map()); 9596 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9606 if (!bound()) { 9597 if (!bound()) {
9607 FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1)); 9598 FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1));
9608 ASSERT(cached_literals != NULL); 9599 ASSERT_NE(NULL, cached_literals);
9609 function->set_literals(cached_literals); 9600 return cached_literals;
9610 } 9601 }
9611 Code* code = Code::cast(code_map->get(index)); 9602 return NULL;
9612 ASSERT(code != NULL);
9613 ASSERT(function->context()->native_context() == code_map->get(index - 1));
9614 function->ReplaceCode(code);
9615 } 9603 }
9616 9604
9617 9605
9606 Code* SharedFunctionInfo::GetCodeFromOptimizedCodeMap(int index) {
9607 ASSERT(index > kEntriesStart);
9608 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9609 Code* code = Code::cast(code_map->get(index));
9610 ASSERT_NE(NULL, code);
9611 return code;
9612 }
9613
9614
9618 void SharedFunctionInfo::ClearOptimizedCodeMap() { 9615 void SharedFunctionInfo::ClearOptimizedCodeMap() {
9619 FixedArray* code_map = FixedArray::cast(optimized_code_map()); 9616 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9620 9617
9621 // If the next map link slot is already used then the function was 9618 // If the next map link slot is already used then the function was
9622 // enqueued with code flushing and we remove it now. 9619 // enqueued with code flushing and we remove it now.
9623 if (!code_map->get(kNextMapIndex)->IsUndefined()) { 9620 if (!code_map->get(kNextMapIndex)->IsUndefined()) {
9624 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher(); 9621 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
9625 flusher->EvictOptimizedCodeMap(this); 9622 flusher->EvictOptimizedCodeMap(this);
9626 } 9623 }
9627 9624
(...skipping 15 matching lines...) Expand all
9643 if (FLAG_trace_opt) { 9640 if (FLAG_trace_opt) {
9644 PrintF("[evicting entry from optimizing code map (%s) for ", reason); 9641 PrintF("[evicting entry from optimizing code map (%s) for ", reason);
9645 ShortPrint(); 9642 ShortPrint();
9646 PrintF("]\n"); 9643 PrintF("]\n");
9647 } 9644 }
9648 removed_entry = true; 9645 removed_entry = true;
9649 break; 9646 break;
9650 } 9647 }
9651 } 9648 }
9652 while (i < (code_map->length() - kEntryLength)) { 9649 while (i < (code_map->length() - kEntryLength)) {
9653 code_map->set(i, code_map->get(i + kEntryLength)); 9650 code_map->set(i + kContextOffset,
9654 code_map->set(i + 1, code_map->get(i + 1 + kEntryLength)); 9651 code_map->get(i + kContextOffset + kEntryLength));
9655 code_map->set(i + 2, code_map->get(i + 2 + kEntryLength)); 9652 code_map->set(i + kCachedCodeOffset,
9653 code_map->get(i + kCachedCodeOffset + kEntryLength));
9654 code_map->set(i + kLiteralsOffset,
9655 code_map->get(i + kLiteralsOffset + kEntryLength));
9656 code_map->set(i + kOsrAstIdOffset,
9657 code_map->get(i + kOsrAstIdOffset + kEntryLength));
9656 i += kEntryLength; 9658 i += kEntryLength;
9657 } 9659 }
9658 if (removed_entry) { 9660 if (removed_entry) {
9659 // Always trim even when array is cleared because of heap verifier. 9661 // Always trim even when array is cleared because of heap verifier.
9660 RightTrimFixedArray<FROM_MUTATOR>(GetHeap(), code_map, kEntryLength); 9662 RightTrimFixedArray<FROM_MUTATOR>(GetHeap(), code_map, kEntryLength);
9661 if (code_map->length() == kEntriesStart) { 9663 if (code_map->length() == kEntriesStart) {
9662 ClearOptimizedCodeMap(); 9664 ClearOptimizedCodeMap();
9663 } 9665 }
9664 } 9666 }
9665 } 9667 }
9666 9668
9667 9669
9668 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) { 9670 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) {
9669 FixedArray* code_map = FixedArray::cast(optimized_code_map()); 9671 FixedArray* code_map = FixedArray::cast(optimized_code_map());
9670 ASSERT(shrink_by % kEntryLength == 0); 9672 ASSERT(shrink_by % kEntryLength == 0);
9671 ASSERT(shrink_by <= code_map->length() - kEntriesStart); 9673 ASSERT(shrink_by <= code_map->length() - kEntriesStart);
9672 // Always trim even when array is cleared because of heap verifier. 9674 // Always trim even when array is cleared because of heap verifier.
9673 RightTrimFixedArray<FROM_GC>(GetHeap(), code_map, shrink_by); 9675 RightTrimFixedArray<FROM_GC>(GetHeap(), code_map, shrink_by);
9674 if (code_map->length() == kEntriesStart) { 9676 if (code_map->length() == kEntriesStart) {
9675 ClearOptimizedCodeMap(); 9677 ClearOptimizedCodeMap();
9676 } 9678 }
9677 } 9679 }
9678 9680
9679 9681
9680 bool JSFunction::CompileLazy(Handle<JSFunction> function,
9681 ClearExceptionFlag flag) {
9682 bool result = true;
9683 if (function->shared()->is_compiled()) {
9684 function->ReplaceCode(function->shared()->code());
9685 } else {
9686 ASSERT(function->shared()->allows_lazy_compilation());
9687 CompilationInfoWithZone info(function);
9688 result = CompileLazyHelper(&info, flag);
9689 ASSERT(!result || function->is_compiled());
9690 }
9691 return result;
9692 }
9693
9694
9695 Handle<Code> JSFunction::CompileOsr(Handle<JSFunction> function,
9696 BailoutId osr_ast_id,
9697 ClearExceptionFlag flag) {
9698 CompilationInfoWithZone info(function);
9699 info.SetOptimizing(osr_ast_id);
9700 if (CompileLazyHelper(&info, flag)) {
9701 // TODO(titzer): don't install the OSR code.
9702 // ASSERT(function->code() != *info.code());
9703 return info.code();
9704 } else {
9705 return Handle<Code>::null();
9706 }
9707 }
9708
9709
9710 bool JSFunction::CompileOptimized(Handle<JSFunction> function,
9711 ClearExceptionFlag flag) {
9712 CompilationInfoWithZone info(function);
9713 info.SetOptimizing(BailoutId::None());
9714 return CompileLazyHelper(&info, flag);
9715 }
9716
9717
9718 bool JSFunction::EnsureCompiled(Handle<JSFunction> function,
9719 ClearExceptionFlag flag) {
9720 return function->is_compiled() || CompileLazy(function, flag);
9721 }
9722
9723
9724 void JSObject::OptimizeAsPrototype(Handle<JSObject> object) { 9682 void JSObject::OptimizeAsPrototype(Handle<JSObject> object) {
9725 if (object->IsGlobalObject()) return; 9683 if (object->IsGlobalObject()) return;
9726 9684
9727 // Make sure prototypes are fast objects and their maps have the bit set 9685 // Make sure prototypes are fast objects and their maps have the bit set
9728 // so they remain fast. 9686 // so they remain fast.
9729 if (!object->HasFastProperties()) { 9687 if (!object->HasFastProperties()) {
9730 TransformToFastProperties(object, 0); 9688 TransformToFastProperties(object, 0);
9731 } 9689 }
9732 } 9690 }
9733 9691
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
9906 } 9864 }
9907 9865
9908 9866
9909 void JSFunction::SetInstanceClassName(String* name) { 9867 void JSFunction::SetInstanceClassName(String* name) {
9910 shared()->set_instance_class_name(name); 9868 shared()->set_instance_class_name(name);
9911 } 9869 }
9912 9870
9913 9871
9914 void JSFunction::PrintName(FILE* out) { 9872 void JSFunction::PrintName(FILE* out) {
9915 SmartArrayPointer<char> name = shared()->DebugName()->ToCString(); 9873 SmartArrayPointer<char> name = shared()->DebugName()->ToCString();
9916 PrintF(out, "%s", *name); 9874 PrintF(out, "%s", name.get());
9917 } 9875 }
9918 9876
9919 9877
9920 Context* JSFunction::NativeContextFromLiterals(FixedArray* literals) { 9878 Context* JSFunction::NativeContextFromLiterals(FixedArray* literals) {
9921 return Context::cast(literals->get(JSFunction::kLiteralNativeContextIndex)); 9879 return Context::cast(literals->get(JSFunction::kLiteralNativeContextIndex));
9922 } 9880 }
9923 9881
9924 9882
9925 // The filter is a pattern that matches function names in this way: 9883 // The filter is a pattern that matches function names in this way:
9926 // "*" all; the default 9884 // "*" all; the default
(...skipping 324 matching lines...) Expand 10 before | Expand all | Expand 10 after
10251 // Resize the initial map and all maps in its transition tree. 10209 // Resize the initial map and all maps in its transition tree.
10252 map->TraverseTransitionTree(&ShrinkInstanceSize, &slack); 10210 map->TraverseTransitionTree(&ShrinkInstanceSize, &slack);
10253 10211
10254 // Give the correct expected_nof_properties to initial maps created later. 10212 // Give the correct expected_nof_properties to initial maps created later.
10255 ASSERT(expected_nof_properties() >= slack); 10213 ASSERT(expected_nof_properties() >= slack);
10256 set_expected_nof_properties(expected_nof_properties() - slack); 10214 set_expected_nof_properties(expected_nof_properties() - slack);
10257 } 10215 }
10258 } 10216 }
10259 10217
10260 10218
10261 int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context) { 10219 int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context,
10220 BailoutId osr_ast_id) {
10262 ASSERT(native_context->IsNativeContext()); 10221 ASSERT(native_context->IsNativeContext());
10263 if (!FLAG_cache_optimized_code) return -1; 10222 if (!FLAG_cache_optimized_code) return -1;
10264 Object* value = optimized_code_map(); 10223 Object* value = optimized_code_map();
10265 if (!value->IsSmi()) { 10224 if (!value->IsSmi()) {
10266 FixedArray* optimized_code_map = FixedArray::cast(value); 10225 FixedArray* optimized_code_map = FixedArray::cast(value);
10267 int length = optimized_code_map->length(); 10226 int length = optimized_code_map->length();
10227 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt());
10268 for (int i = kEntriesStart; i < length; i += kEntryLength) { 10228 for (int i = kEntriesStart; i < length; i += kEntryLength) {
10269 if (optimized_code_map->get(i) == native_context) { 10229 if (optimized_code_map->get(i + kContextOffset) == native_context &&
10270 return i + 1; 10230 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) {
10231 return i + kCachedCodeOffset;
10271 } 10232 }
10272 } 10233 }
10273 if (FLAG_trace_opt) { 10234 if (FLAG_trace_opt) {
10274 PrintF("[didn't find optimized code in optimized code map for "); 10235 PrintF("[didn't find optimized code in optimized code map for ");
10275 ShortPrint(); 10236 ShortPrint();
10276 PrintF("]\n"); 10237 PrintF("]\n");
10277 } 10238 }
10278 } 10239 }
10279 return -1; 10240 return -1;
10280 } 10241 }
(...skipping 228 matching lines...) Expand 10 before | Expand all | Expand 10 after
10509 if (object->IsHeapObject()) { 10470 if (object->IsHeapObject()) {
10510 if (HeapObject::cast(object)->map() == match_map) { 10471 if (HeapObject::cast(object)->map() == match_map) {
10511 if (--n == 0) return object; 10472 if (--n == 0) return object;
10512 } 10473 }
10513 } 10474 }
10514 } 10475 }
10515 return NULL; 10476 return NULL;
10516 } 10477 }
10517 10478
10518 10479
10480 AllocationSite* Code::FindFirstAllocationSite() {
10481 Object* result = FindNthObject(1, GetHeap()->allocation_site_map());
10482 return (result != NULL) ? AllocationSite::cast(result) : NULL;
10483 }
10484
10485
10519 Map* Code::FindFirstMap() { 10486 Map* Code::FindFirstMap() {
10520 Object* result = FindNthObject(1, GetHeap()->meta_map()); 10487 Object* result = FindNthObject(1, GetHeap()->meta_map());
10521 return (result != NULL) ? Map::cast(result) : NULL; 10488 return (result != NULL) ? Map::cast(result) : NULL;
10522 } 10489 }
10523 10490
10524 10491
10525 void Code::ReplaceNthObject(int n, 10492 void Code::ReplaceNthObject(int n,
10526 Map* match_map, 10493 Map* match_map,
10527 Object* replace_with) { 10494 Object* replace_with) {
10528 ASSERT(is_inline_cache_stub() || is_handler()); 10495 ASSERT(is_inline_cache_stub() || is_handler());
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after
10686 DisallowHeapAllocation no_gc; 10653 DisallowHeapAllocation no_gc;
10687 ASSERT(kind() == FUNCTION); 10654 ASSERT(kind() == FUNCTION);
10688 BackEdgeTable back_edges(this, &no_gc); 10655 BackEdgeTable back_edges(this, &no_gc);
10689 for (uint32_t i = 0; i < back_edges.length(); i++) { 10656 for (uint32_t i = 0; i < back_edges.length(); i++) {
10690 if (back_edges.pc_offset(i) == pc_offset) return back_edges.ast_id(i); 10657 if (back_edges.pc_offset(i) == pc_offset) return back_edges.ast_id(i);
10691 } 10658 }
10692 return BailoutId::None(); 10659 return BailoutId::None();
10693 } 10660 }
10694 10661
10695 10662
10663 uint32_t Code::TranslateAstIdToPcOffset(BailoutId ast_id) {
10664 DisallowHeapAllocation no_gc;
10665 ASSERT(kind() == FUNCTION);
10666 BackEdgeTable back_edges(this, &no_gc);
10667 for (uint32_t i = 0; i < back_edges.length(); i++) {
10668 if (back_edges.ast_id(i) == ast_id) return back_edges.pc_offset(i);
10669 }
10670 UNREACHABLE(); // We expect to find the back edge.
10671 return 0;
10672 }
10673
10674
10696 void Code::MakeCodeAgeSequenceYoung(byte* sequence, Isolate* isolate) { 10675 void Code::MakeCodeAgeSequenceYoung(byte* sequence, Isolate* isolate) {
10697 PatchPlatformCodeAge(isolate, sequence, kNoAgeCodeAge, NO_MARKING_PARITY); 10676 PatchPlatformCodeAge(isolate, sequence, kNoAgeCodeAge, NO_MARKING_PARITY);
10698 } 10677 }
10699 10678
10700 10679
10701 void Code::MarkCodeAsExecuted(byte* sequence, Isolate* isolate) { 10680 void Code::MarkCodeAsExecuted(byte* sequence, Isolate* isolate) {
10702 PatchPlatformCodeAge(isolate, sequence, kExecutedOnceCodeAge, 10681 PatchPlatformCodeAge(isolate, sequence, kExecutedOnceCodeAge,
10703 NO_MARKING_PARITY); 10682 NO_MARKING_PARITY);
10704 } 10683 }
10705 10684
(...skipping 2173 matching lines...) Expand 10 before | Expand all | Expand 10 after
12879 CALL_HEAP_FUNCTION_VOID(object->GetIsolate(), 12858 CALL_HEAP_FUNCTION_VOID(object->GetIsolate(),
12880 object->UpdateAllocationSite(to_kind)); 12859 object->UpdateAllocationSite(to_kind));
12881 } 12860 }
12882 12861
12883 12862
12884 MaybeObject* JSObject::UpdateAllocationSite(ElementsKind to_kind) { 12863 MaybeObject* JSObject::UpdateAllocationSite(ElementsKind to_kind) {
12885 if (!FLAG_track_allocation_sites || !IsJSArray()) { 12864 if (!FLAG_track_allocation_sites || !IsJSArray()) {
12886 return this; 12865 return this;
12887 } 12866 }
12888 12867
12889 AllocationMemento* memento = AllocationMemento::FindForJSObject(this); 12868 if (!GetHeap()->InNewSpace(this)) return this;
12869
12870 AllocationMemento* memento = AllocationMemento::FindForHeapObject(this);
12890 if (memento == NULL || !memento->IsValid()) { 12871 if (memento == NULL || !memento->IsValid()) {
12891 return this; 12872 return this;
12892 } 12873 }
12893 12874
12894 // Walk through to the Allocation Site 12875 // Walk through to the Allocation Site
12895 AllocationSite* site = memento->GetAllocationSite(); 12876 AllocationSite* site = memento->GetAllocationSite();
12896 return site->DigestTransitionFeedback(to_kind); 12877 return site->DigestTransitionFeedback(to_kind);
12897 } 12878 }
12898 12879
12899 12880
(...skipping 3751 matching lines...) Expand 10 before | Expand all | Expand 10 after
16651 #define ERROR_MESSAGES_TEXTS(C, T) T, 16632 #define ERROR_MESSAGES_TEXTS(C, T) T,
16652 static const char* error_messages_[] = { 16633 static const char* error_messages_[] = {
16653 ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS) 16634 ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS)
16654 }; 16635 };
16655 #undef ERROR_MESSAGES_TEXTS 16636 #undef ERROR_MESSAGES_TEXTS
16656 return error_messages_[reason]; 16637 return error_messages_[reason];
16657 } 16638 }
16658 16639
16659 16640
16660 } } // namespace v8::internal 16641 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/objects.h ('k') | src/objects-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698