OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2168 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2179 // For now this trick is only applied to fixed arrays in new and paged space. | 2179 // For now this trick is only applied to fixed arrays in new and paged space. |
2180 ASSERT(!HEAP->lo_space()->Contains(elms)); | 2180 ASSERT(!HEAP->lo_space()->Contains(elms)); |
2181 | 2181 |
2182 const int len = elms->length(); | 2182 const int len = elms->length(); |
2183 | 2183 |
2184 ASSERT(to_trim < len); | 2184 ASSERT(to_trim < len); |
2185 | 2185 |
2186 Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim); | 2186 Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim); |
2187 | 2187 |
2188 if (trim_mode != FROM_GC || Heap::ShouldZapGarbage()) { | 2188 if (trim_mode != FROM_GC || Heap::ShouldZapGarbage()) { |
2189 ZapEndOfFixedArray(new_end, to_trim); | 2189 ZapEndOfFixedArray(new_end, to_trim); |
2190 } | 2190 } |
2191 | 2191 |
2192 int size_delta = to_trim * kPointerSize; | 2192 int size_delta = to_trim * kPointerSize; |
2193 | 2193 |
2194 // Technically in new space this write might be omitted (except for | 2194 // Technically in new space this write might be omitted (except for |
2195 // debug mode which iterates through the heap), but to play safer | 2195 // debug mode which iterates through the heap), but to play safer |
2196 // we still do it. | 2196 // we still do it. |
2197 heap->CreateFillerObjectAt(new_end, size_delta); | 2197 heap->CreateFillerObjectAt(new_end, size_delta); |
2198 | 2198 |
2199 elms->set_length(len - to_trim); | 2199 elms->set_length(len - to_trim); |
(...skipping 6769 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8969 CompilationInfoWithZone info(shared); | 8969 CompilationInfoWithZone info(shared); |
8970 return CompileLazyHelper(&info, flag); | 8970 return CompileLazyHelper(&info, flag); |
8971 } | 8971 } |
8972 | 8972 |
8973 | 8973 |
8974 void SharedFunctionInfo::AddToOptimizedCodeMap( | 8974 void SharedFunctionInfo::AddToOptimizedCodeMap( |
8975 Handle<SharedFunctionInfo> shared, | 8975 Handle<SharedFunctionInfo> shared, |
8976 Handle<Context> native_context, | 8976 Handle<Context> native_context, |
8977 Handle<Code> code, | 8977 Handle<Code> code, |
8978 Handle<FixedArray> literals) { | 8978 Handle<FixedArray> literals) { |
| 8979 CALL_HEAP_FUNCTION_VOID( |
| 8980 shared->GetIsolate(), |
| 8981 shared->AddToOptimizedCodeMap(*native_context, *code, *literals)); |
| 8982 } |
| 8983 |
| 8984 |
| 8985 MaybeObject* SharedFunctionInfo::AddToOptimizedCodeMap(Context* native_context, |
| 8986 Code* code, |
| 8987 FixedArray* literals) { |
8979 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); | 8988 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); |
8980 ASSERT(native_context->IsNativeContext()); | 8989 ASSERT(native_context->IsNativeContext()); |
8981 STATIC_ASSERT(kEntryLength == 3); | 8990 STATIC_ASSERT(kEntryLength == 3); |
8982 Object* value = shared->optimized_code_map(); | 8991 Heap* heap = GetHeap(); |
8983 Handle<FixedArray> new_code_map; | 8992 FixedArray* new_code_map; |
| 8993 Object* value = optimized_code_map(); |
8984 if (value->IsSmi()) { | 8994 if (value->IsSmi()) { |
8985 // No optimized code map. | 8995 // No optimized code map. |
8986 ASSERT_EQ(0, Smi::cast(value)->value()); | 8996 ASSERT_EQ(0, Smi::cast(value)->value()); |
8987 // Crate 3 entries per context {context, code, literals}. | 8997 // Crate 3 entries per context {context, code, literals}. |
8988 new_code_map = FACTORY->NewFixedArray(kEntryLength); | 8998 MaybeObject* maybe = heap->AllocateFixedArray(kInitialLength); |
8989 new_code_map->set(0, *native_context); | 8999 if (!maybe->To(&new_code_map)) return maybe; |
8990 new_code_map->set(1, *code); | 9000 new_code_map->set(kEntriesStart + 0, native_context); |
8991 new_code_map->set(2, *literals); | 9001 new_code_map->set(kEntriesStart + 1, code); |
| 9002 new_code_map->set(kEntriesStart + 2, literals); |
8992 } else { | 9003 } else { |
8993 // Copy old map and append one new entry. | 9004 // Copy old map and append one new entry. |
8994 Handle<FixedArray> old_code_map(FixedArray::cast(value)); | 9005 FixedArray* old_code_map = FixedArray::cast(value); |
8995 ASSERT_EQ(-1, shared->SearchOptimizedCodeMap(*native_context)); | 9006 ASSERT_EQ(-1, SearchOptimizedCodeMap(native_context)); |
8996 int old_length = old_code_map->length(); | 9007 int old_length = old_code_map->length(); |
8997 int new_length = old_length + kEntryLength; | 9008 int new_length = old_length + kEntryLength; |
8998 new_code_map = FACTORY->NewFixedArray(new_length); | 9009 MaybeObject* maybe = old_code_map->CopySize(new_length); |
8999 old_code_map->CopyTo(0, *new_code_map, 0, old_length); | 9010 if (!maybe->To(&new_code_map)) return maybe; |
9000 new_code_map->set(old_length, *native_context); | 9011 new_code_map->set(old_length + 0, native_context); |
9001 new_code_map->set(old_length + 1, *code); | 9012 new_code_map->set(old_length + 1, code); |
9002 new_code_map->set(old_length + 2, *literals); | 9013 new_code_map->set(old_length + 2, literals); |
| 9014 // Zap the old map for the sake of the heap verifier. |
| 9015 if (Heap::ShouldZapGarbage()) ZapOptimizedCodeMap(); |
9003 } | 9016 } |
9004 #ifdef DEBUG | 9017 #ifdef DEBUG |
9005 for (int i = 0; i < new_code_map->length(); i += kEntryLength) { | 9018 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { |
9006 ASSERT(new_code_map->get(i)->IsNativeContext()); | 9019 ASSERT(new_code_map->get(i)->IsNativeContext()); |
9007 ASSERT(new_code_map->get(i + 1)->IsCode()); | 9020 ASSERT(new_code_map->get(i + 1)->IsCode()); |
9008 ASSERT(Code::cast(new_code_map->get(i + 1))->kind() == | 9021 ASSERT(Code::cast(new_code_map->get(i + 1))->kind() == |
9009 Code::OPTIMIZED_FUNCTION); | 9022 Code::OPTIMIZED_FUNCTION); |
9010 ASSERT(new_code_map->get(i + 2)->IsFixedArray()); | 9023 ASSERT(new_code_map->get(i + 2)->IsFixedArray()); |
9011 } | 9024 } |
9012 #endif | 9025 #endif |
9013 shared->set_optimized_code_map(*new_code_map); | 9026 set_optimized_code_map(new_code_map); |
| 9027 return new_code_map; |
9014 } | 9028 } |
9015 | 9029 |
9016 | 9030 |
9017 void SharedFunctionInfo::InstallFromOptimizedCodeMap(JSFunction* function, | 9031 void SharedFunctionInfo::InstallFromOptimizedCodeMap(JSFunction* function, |
9018 int index) { | 9032 int index) { |
9019 ASSERT(index > 0); | 9033 ASSERT(index > kEntriesStart); |
9020 ASSERT(optimized_code_map()->IsFixedArray()); | |
9021 FixedArray* code_map = FixedArray::cast(optimized_code_map()); | 9034 FixedArray* code_map = FixedArray::cast(optimized_code_map()); |
9022 if (!bound()) { | 9035 if (!bound()) { |
9023 FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1)); | 9036 FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1)); |
9024 ASSERT(cached_literals != NULL); | 9037 ASSERT(cached_literals != NULL); |
9025 function->set_literals(cached_literals); | 9038 function->set_literals(cached_literals); |
9026 } | 9039 } |
9027 Code* code = Code::cast(code_map->get(index)); | 9040 Code* code = Code::cast(code_map->get(index)); |
9028 ASSERT(code != NULL); | 9041 ASSERT(code != NULL); |
9029 ASSERT(function->context()->native_context() == code_map->get(index - 1)); | 9042 ASSERT(function->context()->native_context() == code_map->get(index - 1)); |
9030 function->ReplaceCode(code); | 9043 function->ReplaceCode(code); |
9031 } | 9044 } |
9032 | 9045 |
9033 | 9046 |
9034 void SharedFunctionInfo::ClearOptimizedCodeMap(const char* reason) { | 9047 void SharedFunctionInfo::ClearOptimizedCodeMap() { |
9035 if (!optimized_code_map()->IsSmi()) { | 9048 FixedArray* code_map = FixedArray::cast(optimized_code_map()); |
9036 if (FLAG_trace_opt) { | 9049 |
9037 PrintF("[clearing entire optimizing code map (%s) for ", reason); | 9050 // If the next map link slot is already used then the function was |
9038 ShortPrint(); | 9051 // enqueued with code flushing and we remove it now. |
9039 PrintF("]\n"); | 9052 if (!code_map->get(kNextMapIndex)->IsUndefined()) { |
9040 } | 9053 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher(); |
9041 set_optimized_code_map(Smi::FromInt(0)); | 9054 flusher->EvictOptimizedCodeMap(this); |
9042 } | 9055 } |
| 9056 |
| 9057 ASSERT(code_map->get(kNextMapIndex)->IsUndefined()); |
| 9058 set_optimized_code_map(Smi::FromInt(0)); |
9043 } | 9059 } |
9044 | 9060 |
9045 | 9061 |
9046 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, | 9062 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, |
9047 const char* reason) { | 9063 const char* reason) { |
9048 if (optimized_code_map()->IsSmi()) return; | 9064 if (optimized_code_map()->IsSmi()) return; |
9049 | 9065 |
9050 int i; | 9066 int i; |
9051 bool removed_entry = false; | 9067 bool removed_entry = false; |
9052 FixedArray* code_map = FixedArray::cast(optimized_code_map()); | 9068 FixedArray* code_map = FixedArray::cast(optimized_code_map()); |
9053 for (i = 0; i < code_map->length(); i += kEntryLength) { | 9069 for (i = kEntriesStart; i < code_map->length(); i += kEntryLength) { |
9054 ASSERT(code_map->get(i)->IsNativeContext()); | 9070 ASSERT(code_map->get(i)->IsNativeContext()); |
9055 if (Code::cast(code_map->get(i + 1)) == optimized_code) { | 9071 if (Code::cast(code_map->get(i + 1)) == optimized_code) { |
9056 if (FLAG_trace_opt) { | 9072 if (FLAG_trace_opt) { |
9057 PrintF("[clearing optimizing code map (%s) for ", reason); | 9073 PrintF("[evicting entry from optimizing code map (%s) for ", reason); |
9058 ShortPrint(); | 9074 ShortPrint(); |
9059 PrintF("]\n"); | 9075 PrintF("]\n"); |
9060 } | 9076 } |
9061 removed_entry = true; | 9077 removed_entry = true; |
9062 break; | 9078 break; |
9063 } | 9079 } |
9064 } | 9080 } |
9065 while (i < (code_map->length() - kEntryLength)) { | 9081 while (i < (code_map->length() - kEntryLength)) { |
9066 code_map->set(i, code_map->get(i + kEntryLength)); | 9082 code_map->set(i, code_map->get(i + kEntryLength)); |
9067 code_map->set(i + 1, code_map->get(i + 1 + kEntryLength)); | 9083 code_map->set(i + 1, code_map->get(i + 1 + kEntryLength)); |
9068 code_map->set(i + 2, code_map->get(i + 2 + kEntryLength)); | 9084 code_map->set(i + 2, code_map->get(i + 2 + kEntryLength)); |
9069 i += kEntryLength; | 9085 i += kEntryLength; |
9070 } | 9086 } |
9071 if (removed_entry) { | 9087 if (removed_entry) { |
9072 if (code_map->length() > kEntryLength) { | 9088 // Always trim even when array is cleared because of heap verifier. |
9073 RightTrimFixedArray<FROM_MUTATOR>(GetHeap(), code_map, kEntryLength); | 9089 RightTrimFixedArray<FROM_MUTATOR>(GetHeap(), code_map, kEntryLength); |
9074 } else { | 9090 if (code_map->length() == kEntriesStart) { |
9075 ClearOptimizedCodeMap(reason); | 9091 ClearOptimizedCodeMap(); |
9076 } | 9092 } |
9077 } | 9093 } |
9078 } | 9094 } |
9079 | 9095 |
9080 | 9096 |
| 9097 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) { |
| 9098 FixedArray* code_map = FixedArray::cast(optimized_code_map()); |
| 9099 ASSERT(shrink_by % kEntryLength == 0); |
| 9100 ASSERT(shrink_by <= code_map->length() - kEntriesStart); |
| 9101 // Always trim even when array is cleared because of heap verifier. |
| 9102 RightTrimFixedArray<FROM_GC>(GetHeap(), code_map, shrink_by); |
| 9103 if (code_map->length() == kEntriesStart) { |
| 9104 ClearOptimizedCodeMap(); |
| 9105 } |
| 9106 } |
| 9107 |
| 9108 |
| 9109 void SharedFunctionInfo::ZapOptimizedCodeMap() { |
| 9110 FixedArray* code_map = FixedArray::cast(optimized_code_map()); |
| 9111 MemsetPointer(code_map->data_start(), |
| 9112 GetHeap()->the_hole_value(), |
| 9113 code_map->length()); |
| 9114 } |
| 9115 |
| 9116 |
9081 bool JSFunction::CompileLazy(Handle<JSFunction> function, | 9117 bool JSFunction::CompileLazy(Handle<JSFunction> function, |
9082 ClearExceptionFlag flag) { | 9118 ClearExceptionFlag flag) { |
9083 bool result = true; | 9119 bool result = true; |
9084 if (function->shared()->is_compiled()) { | 9120 if (function->shared()->is_compiled()) { |
9085 function->ReplaceCode(function->shared()->code()); | 9121 function->ReplaceCode(function->shared()->code()); |
9086 function->shared()->set_code_age(0); | 9122 function->shared()->set_code_age(0); |
9087 } else { | 9123 } else { |
9088 ASSERT(function->shared()->allows_lazy_compilation()); | 9124 ASSERT(function->shared()->allows_lazy_compilation()); |
9089 CompilationInfoWithZone info(function); | 9125 CompilationInfoWithZone info(function); |
9090 result = CompileLazyHelper(&info, flag); | 9126 result = CompileLazyHelper(&info, flag); |
(...skipping 619 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
9710 } | 9746 } |
9711 | 9747 |
9712 | 9748 |
9713 int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context) { | 9749 int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context) { |
9714 ASSERT(native_context->IsNativeContext()); | 9750 ASSERT(native_context->IsNativeContext()); |
9715 if (!FLAG_cache_optimized_code) return -1; | 9751 if (!FLAG_cache_optimized_code) return -1; |
9716 Object* value = optimized_code_map(); | 9752 Object* value = optimized_code_map(); |
9717 if (!value->IsSmi()) { | 9753 if (!value->IsSmi()) { |
9718 FixedArray* optimized_code_map = FixedArray::cast(value); | 9754 FixedArray* optimized_code_map = FixedArray::cast(value); |
9719 int length = optimized_code_map->length(); | 9755 int length = optimized_code_map->length(); |
9720 for (int i = 0; i < length; i += 3) { | 9756 for (int i = kEntriesStart; i < length; i += kEntryLength) { |
9721 if (optimized_code_map->get(i) == native_context) { | 9757 if (optimized_code_map->get(i) == native_context) { |
9722 return i + 1; | 9758 return i + 1; |
9723 } | 9759 } |
9724 } | 9760 } |
9725 if (FLAG_trace_opt) { | 9761 if (FLAG_trace_opt) { |
9726 PrintF("[didn't find optimized code in optimized code map for "); | 9762 PrintF("[didn't find optimized code in optimized code map for "); |
9727 ShortPrint(); | 9763 ShortPrint(); |
9728 PrintF("]\n"); | 9764 PrintF("]\n"); |
9729 } | 9765 } |
9730 } | 9766 } |
(...skipping 5658 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
15389 set_year(Smi::FromInt(year), SKIP_WRITE_BARRIER); | 15425 set_year(Smi::FromInt(year), SKIP_WRITE_BARRIER); |
15390 set_month(Smi::FromInt(month), SKIP_WRITE_BARRIER); | 15426 set_month(Smi::FromInt(month), SKIP_WRITE_BARRIER); |
15391 set_day(Smi::FromInt(day), SKIP_WRITE_BARRIER); | 15427 set_day(Smi::FromInt(day), SKIP_WRITE_BARRIER); |
15392 set_weekday(Smi::FromInt(weekday), SKIP_WRITE_BARRIER); | 15428 set_weekday(Smi::FromInt(weekday), SKIP_WRITE_BARRIER); |
15393 set_hour(Smi::FromInt(hour), SKIP_WRITE_BARRIER); | 15429 set_hour(Smi::FromInt(hour), SKIP_WRITE_BARRIER); |
15394 set_min(Smi::FromInt(min), SKIP_WRITE_BARRIER); | 15430 set_min(Smi::FromInt(min), SKIP_WRITE_BARRIER); |
15395 set_sec(Smi::FromInt(sec), SKIP_WRITE_BARRIER); | 15431 set_sec(Smi::FromInt(sec), SKIP_WRITE_BARRIER); |
15396 } | 15432 } |
15397 | 15433 |
15398 } } // namespace v8::internal | 15434 } } // namespace v8::internal |
OLD | NEW |