| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 348 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 359 void SafeStackTraceFrameIterator::Advance() { | 359 void SafeStackTraceFrameIterator::Advance() { |
| 360 while (true) { | 360 while (true) { |
| 361 SafeJavaScriptFrameIterator::Advance(); | 361 SafeJavaScriptFrameIterator::Advance(); |
| 362 if (done()) return; | 362 if (done()) return; |
| 363 if (frame()->is_java_script()) return; | 363 if (frame()->is_java_script()) return; |
| 364 } | 364 } |
| 365 } | 365 } |
| 366 | 366 |
| 367 | 367 |
| 368 Code* StackFrame::GetSafepointData(Isolate* isolate, | 368 Code* StackFrame::GetSafepointData(Isolate* isolate, |
| 369 Address pc, | 369 Address inner_pointer, |
| 370 SafepointEntry* safepoint_entry, | 370 SafepointEntry* safepoint_entry, |
| 371 unsigned* stack_slots) { | 371 unsigned* stack_slots) { |
| 372 PcToCodeCache::PcToCodeCacheEntry* entry = | 372 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry = |
| 373 isolate->pc_to_code_cache()->GetCacheEntry(pc); | 373 isolate->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer); |
| 374 if (!entry->safepoint_entry.is_valid()) { | 374 if (!entry->safepoint_entry.is_valid()) { |
| 375 entry->safepoint_entry = entry->code->GetSafepointEntry(pc); | 375 entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer); |
| 376 ASSERT(entry->safepoint_entry.is_valid()); | 376 ASSERT(entry->safepoint_entry.is_valid()); |
| 377 } else { | 377 } else { |
| 378 ASSERT(entry->safepoint_entry.Equals(entry->code->GetSafepointEntry(pc))); | 378 ASSERT(entry->safepoint_entry.Equals( |
| 379 entry->code->GetSafepointEntry(inner_pointer))); |
| 379 } | 380 } |
| 380 | 381 |
| 381 // Fill in the results and return the code. | 382 // Fill in the results and return the code. |
| 382 Code* code = entry->code; | 383 Code* code = entry->code; |
| 383 *safepoint_entry = entry->safepoint_entry; | 384 *safepoint_entry = entry->safepoint_entry; |
| 384 *stack_slots = code->stack_slots(); | 385 *stack_slots = code->stack_slots(); |
| 385 return code; | 386 return code; |
| 386 } | 387 } |
| 387 | 388 |
| 388 | 389 |
| (...skipping 423 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 812 int* deopt_index) { | 813 int* deopt_index) { |
| 813 ASSERT(is_optimized()); | 814 ASSERT(is_optimized()); |
| 814 | 815 |
| 815 JSFunction* opt_function = JSFunction::cast(function()); | 816 JSFunction* opt_function = JSFunction::cast(function()); |
| 816 Code* code = opt_function->code(); | 817 Code* code = opt_function->code(); |
| 817 | 818 |
| 818 // The code object may have been replaced by lazy deoptimization. Fall | 819 // The code object may have been replaced by lazy deoptimization. Fall |
| 819 // back to a slow search in this case to find the original optimized | 820 // back to a slow search in this case to find the original optimized |
| 820 // code object. | 821 // code object. |
| 821 if (!code->contains(pc())) { | 822 if (!code->contains(pc())) { |
| 822 code = isolate()->pc_to_code_cache()->GcSafeFindCodeForPc(pc()); | 823 code = isolate()->inner_pointer_to_code_cache()-> |
| 824 GcSafeFindCodeForInnerPointer(pc()); |
| 823 } | 825 } |
| 824 ASSERT(code != NULL); | 826 ASSERT(code != NULL); |
| 825 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); | 827 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); |
| 826 | 828 |
| 827 SafepointEntry safepoint_entry = code->GetSafepointEntry(pc()); | 829 SafepointEntry safepoint_entry = code->GetSafepointEntry(pc()); |
| 828 *deopt_index = safepoint_entry.deoptimization_index(); | 830 *deopt_index = safepoint_entry.deoptimization_index(); |
| 829 ASSERT(*deopt_index != Safepoint::kNoDeoptimizationIndex); | 831 ASSERT(*deopt_index != Safepoint::kNoDeoptimizationIndex); |
| 830 | 832 |
| 831 return DeoptimizationInputData::cast(code->deoptimization_data()); | 833 return DeoptimizationInputData::cast(code->deoptimization_data()); |
| 832 } | 834 } |
| (...skipping 315 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1148 iterator_.Advance(); | 1150 iterator_.Advance(); |
| 1149 } | 1151 } |
| 1150 UNREACHABLE(); | 1152 UNREACHABLE(); |
| 1151 return NULL; | 1153 return NULL; |
| 1152 } | 1154 } |
| 1153 | 1155 |
| 1154 | 1156 |
| 1155 // ------------------------------------------------------------------------- | 1157 // ------------------------------------------------------------------------- |
| 1156 | 1158 |
| 1157 | 1159 |
| 1158 Code* PcToCodeCache::GcSafeCastToCode(HeapObject* object, Address pc) { | 1160 Code* InnerPointerToCodeCache::GcSafeCastToCode(HeapObject* object, |
| 1161 Address inner_pointer) { |
| 1159 Code* code = reinterpret_cast<Code*>(object); | 1162 Code* code = reinterpret_cast<Code*>(object); |
| 1160 ASSERT(code != NULL && code->contains(pc)); | 1163 ASSERT(code != NULL && code->contains(inner_pointer)); |
| 1161 return code; | 1164 return code; |
| 1162 } | 1165 } |
| 1163 | 1166 |
| 1164 | 1167 |
| 1165 static int GcSafeSizeOfCodeSpaceObject(HeapObject* object) { | 1168 static int GcSafeSizeOfCodeSpaceObject(HeapObject* object) { |
| 1166 MapWord map_word = object->map_word(); | 1169 MapWord map_word = object->map_word(); |
| 1167 Map* map = map_word.IsForwardingAddress() ? | 1170 Map* map = map_word.IsForwardingAddress() ? |
| 1168 map_word.ToForwardingAddress()->map() : map_word.ToMap(); | 1171 map_word.ToForwardingAddress()->map() : map_word.ToMap(); |
| 1169 return object->SizeFromMap(map); | 1172 return object->SizeFromMap(map); |
| 1170 } | 1173 } |
| 1171 | 1174 |
| 1172 | 1175 |
| 1173 Code* PcToCodeCache::GcSafeFindCodeForPc(Address pc) { | 1176 Code* InnerPointerToCodeCache::GcSafeFindCodeForInnerPointer( |
| 1177 Address inner_pointer) { |
| 1174 Heap* heap = isolate_->heap(); | 1178 Heap* heap = isolate_->heap(); |
| 1175 // Check if the pc points into a large object chunk. | 1179 // Check if the inner pointer points into a large object chunk. |
| 1176 LargePage* large_page = heap->lo_space()->FindPageContainingPc(pc); | 1180 LargePage* large_page = heap->lo_space()->FindPageContainingPc(inner_pointer); |
| 1177 if (large_page != NULL) return GcSafeCastToCode(large_page->GetObject(), pc); | 1181 if (large_page != NULL) { |
| 1182 return GcSafeCastToCode(large_page->GetObject(), inner_pointer); |
| 1183 } |
| 1178 | 1184 |
| 1179 // Iterate through the page until we reach the end or find an object starting | 1185 // Iterate through the page until we reach the end or find an object starting |
| 1180 // after the pc. | 1186 // after the inner pointer. |
| 1181 Page* page = Page::FromAddress(pc); | 1187 Page* page = Page::FromAddress(inner_pointer); |
| 1182 | 1188 |
| 1183 Address addr = page->skip_list()->StartFor(pc); | 1189 Address addr = page->skip_list()->StartFor(inner_pointer); |
| 1184 | 1190 |
| 1185 Address top = heap->code_space()->top(); | 1191 Address top = heap->code_space()->top(); |
| 1186 Address limit = heap->code_space()->limit(); | 1192 Address limit = heap->code_space()->limit(); |
| 1187 | 1193 |
| 1188 while (true) { | 1194 while (true) { |
| 1189 if (addr == top && addr != limit) { | 1195 if (addr == top && addr != limit) { |
| 1190 addr = limit; | 1196 addr = limit; |
| 1191 continue; | 1197 continue; |
| 1192 } | 1198 } |
| 1193 | 1199 |
| 1194 HeapObject* obj = HeapObject::FromAddress(addr); | 1200 HeapObject* obj = HeapObject::FromAddress(addr); |
| 1195 int obj_size = GcSafeSizeOfCodeSpaceObject(obj); | 1201 int obj_size = GcSafeSizeOfCodeSpaceObject(obj); |
| 1196 Address next_addr = addr + obj_size; | 1202 Address next_addr = addr + obj_size; |
| 1197 if (next_addr >= pc) return GcSafeCastToCode(obj, pc); | 1203 if (next_addr > inner_pointer) return GcSafeCastToCode(obj, inner_pointer); |
| 1198 addr = next_addr; | 1204 addr = next_addr; |
| 1199 } | 1205 } |
| 1200 } | 1206 } |
| 1201 | 1207 |
| 1202 | 1208 |
| 1203 PcToCodeCache::PcToCodeCacheEntry* PcToCodeCache::GetCacheEntry(Address pc) { | 1209 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* |
| 1210 InnerPointerToCodeCache::GetCacheEntry(Address inner_pointer) { |
| 1204 isolate_->counters()->pc_to_code()->Increment(); | 1211 isolate_->counters()->pc_to_code()->Increment(); |
| 1205 ASSERT(IsPowerOf2(kPcToCodeCacheSize)); | 1212 ASSERT(IsPowerOf2(kPcToCodeCacheSize)); |
| 1206 uint32_t hash = ComputeIntegerHash( | 1213 uint32_t hash = ComputeIntegerHash( |
| 1207 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(pc))); | 1214 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(inner_pointer))); |
| 1208 uint32_t index = hash & (kPcToCodeCacheSize - 1); | 1215 uint32_t index = hash & (kPcToCodeCacheSize - 1); |
| 1209 PcToCodeCacheEntry* entry = cache(index); | 1216 InnerPointerToCodeCacheEntry* entry = cache(index); |
| 1210 if (entry->pc == pc) { | 1217 if (entry->inner_pointer == inner_pointer) { |
| 1211 isolate_->counters()->pc_to_code_cached()->Increment(); | 1218 isolate_->counters()->pc_to_code_cached()->Increment(); |
| 1212 ASSERT(entry->code == GcSafeFindCodeForPc(pc)); | 1219 ASSERT(entry->code == GcSafeFindCodeForInnerPointer(inner_pointer)); |
| 1213 } else { | 1220 } else { |
| 1214 // Because this code may be interrupted by a profiling signal that | 1221 // Because this code may be interrupted by a profiling signal that |
| 1215 // also queries the cache, we cannot update pc before the code has | 1222 // also queries the cache, we cannot update inner_pointer before the code |
| 1216 // been set. Otherwise, we risk trying to use a cache entry before | 1223 // has been set. Otherwise, we risk trying to use a cache entry before |
| 1217 // the code has been computed. | 1224 // the code has been computed. |
| 1218 entry->code = GcSafeFindCodeForPc(pc); | 1225 entry->code = GcSafeFindCodeForInnerPointer(inner_pointer); |
| 1219 entry->safepoint_entry.Reset(); | 1226 entry->safepoint_entry.Reset(); |
| 1220 entry->pc = pc; | 1227 entry->inner_pointer = inner_pointer; |
| 1221 } | 1228 } |
| 1222 return entry; | 1229 return entry; |
| 1223 } | 1230 } |
| 1224 | 1231 |
| 1225 | 1232 |
| 1226 // ------------------------------------------------------------------------- | 1233 // ------------------------------------------------------------------------- |
| 1227 | 1234 |
| 1228 int NumRegs(RegList reglist) { | 1235 int NumRegs(RegList reglist) { |
| 1229 int n = 0; | 1236 int n = 0; |
| 1230 while (reglist != 0) { | 1237 while (reglist != 0) { |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1287 ZoneList<StackFrame*> list(10); | 1294 ZoneList<StackFrame*> list(10); |
| 1288 for (StackFrameIterator it; !it.done(); it.Advance()) { | 1295 for (StackFrameIterator it; !it.done(); it.Advance()) { |
| 1289 StackFrame* frame = AllocateFrameCopy(it.frame()); | 1296 StackFrame* frame = AllocateFrameCopy(it.frame()); |
| 1290 list.Add(frame); | 1297 list.Add(frame); |
| 1291 } | 1298 } |
| 1292 return list.ToVector(); | 1299 return list.ToVector(); |
| 1293 } | 1300 } |
| 1294 | 1301 |
| 1295 | 1302 |
| 1296 } } // namespace v8::internal | 1303 } } // namespace v8::internal |
| OLD | NEW |