| OLD | NEW |
| (Empty) |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "src/lithium.h" | |
| 6 | |
| 7 #include "src/scopes.h" | |
| 8 | |
| 9 #if V8_TARGET_ARCH_IA32 | |
| 10 #include "src/ia32/lithium-ia32.h" // NOLINT | |
| 11 #include "src/ia32/lithium-codegen-ia32.h" // NOLINT | |
| 12 #elif V8_TARGET_ARCH_X64 | |
| 13 #include "src/x64/lithium-x64.h" // NOLINT | |
| 14 #include "src/x64/lithium-codegen-x64.h" // NOLINT | |
| 15 #elif V8_TARGET_ARCH_ARM | |
| 16 #include "src/arm/lithium-arm.h" // NOLINT | |
| 17 #include "src/arm/lithium-codegen-arm.h" // NOLINT | |
| 18 #elif V8_TARGET_ARCH_PPC | |
| 19 #include "src/ppc/lithium-ppc.h" // NOLINT | |
| 20 #include "src/ppc/lithium-codegen-ppc.h" // NOLINT | |
| 21 #elif V8_TARGET_ARCH_MIPS | |
| 22 #include "src/mips/lithium-mips.h" // NOLINT | |
| 23 #include "src/mips/lithium-codegen-mips.h" // NOLINT | |
| 24 #elif V8_TARGET_ARCH_ARM64 | |
| 25 #include "src/arm64/lithium-arm64.h" // NOLINT | |
| 26 #include "src/arm64/lithium-codegen-arm64.h" // NOLINT | |
| 27 #elif V8_TARGET_ARCH_MIPS64 | |
| 28 #include "src/mips64/lithium-mips64.h" // NOLINT | |
| 29 #include "src/mips64/lithium-codegen-mips64.h" // NOLINT | |
| 30 #elif V8_TARGET_ARCH_X87 | |
| 31 #include "src/x87/lithium-x87.h" // NOLINT | |
| 32 #include "src/x87/lithium-codegen-x87.h" // NOLINT | |
| 33 #else | |
| 34 #error "Unknown architecture." | |
| 35 #endif | |
| 36 | |
| 37 namespace v8 { | |
| 38 namespace internal { | |
| 39 | |
| 40 | |
| 41 void LOperand::PrintTo(StringStream* stream) { | |
| 42 LUnallocated* unalloc = NULL; | |
| 43 switch (kind()) { | |
| 44 case INVALID: | |
| 45 stream->Add("(0)"); | |
| 46 break; | |
| 47 case UNALLOCATED: | |
| 48 unalloc = LUnallocated::cast(this); | |
| 49 stream->Add("v%d", unalloc->virtual_register()); | |
| 50 if (unalloc->basic_policy() == LUnallocated::FIXED_SLOT) { | |
| 51 stream->Add("(=%dS)", unalloc->fixed_slot_index()); | |
| 52 break; | |
| 53 } | |
| 54 switch (unalloc->extended_policy()) { | |
| 55 case LUnallocated::NONE: | |
| 56 break; | |
| 57 case LUnallocated::FIXED_REGISTER: { | |
| 58 int reg_index = unalloc->fixed_register_index(); | |
| 59 if (reg_index < 0 || reg_index >= Register::kNumRegisters) { | |
| 60 stream->Add("(=invalid_reg#%d)", reg_index); | |
| 61 } else { | |
| 62 const char* register_name = | |
| 63 Register::from_code(reg_index).ToString(); | |
| 64 stream->Add("(=%s)", register_name); | |
| 65 } | |
| 66 break; | |
| 67 } | |
| 68 case LUnallocated::FIXED_DOUBLE_REGISTER: { | |
| 69 int reg_index = unalloc->fixed_register_index(); | |
| 70 if (reg_index < 0 || reg_index >= DoubleRegister::kMaxNumRegisters) { | |
| 71 stream->Add("(=invalid_double_reg#%d)", reg_index); | |
| 72 } else { | |
| 73 const char* double_register_name = | |
| 74 DoubleRegister::from_code(reg_index).ToString(); | |
| 75 stream->Add("(=%s)", double_register_name); | |
| 76 } | |
| 77 break; | |
| 78 } | |
| 79 case LUnallocated::MUST_HAVE_REGISTER: | |
| 80 stream->Add("(R)"); | |
| 81 break; | |
| 82 case LUnallocated::MUST_HAVE_DOUBLE_REGISTER: | |
| 83 stream->Add("(D)"); | |
| 84 break; | |
| 85 case LUnallocated::WRITABLE_REGISTER: | |
| 86 stream->Add("(WR)"); | |
| 87 break; | |
| 88 case LUnallocated::SAME_AS_FIRST_INPUT: | |
| 89 stream->Add("(1)"); | |
| 90 break; | |
| 91 case LUnallocated::ANY: | |
| 92 stream->Add("(-)"); | |
| 93 break; | |
| 94 } | |
| 95 break; | |
| 96 case CONSTANT_OPERAND: | |
| 97 stream->Add("[constant:%d]", index()); | |
| 98 break; | |
| 99 case STACK_SLOT: | |
| 100 stream->Add("[stack:%d]", index()); | |
| 101 break; | |
| 102 case DOUBLE_STACK_SLOT: | |
| 103 stream->Add("[double_stack:%d]", index()); | |
| 104 break; | |
| 105 case REGISTER: { | |
| 106 int reg_index = index(); | |
| 107 if (reg_index < 0 || reg_index >= Register::kNumRegisters) { | |
| 108 stream->Add("(=invalid_reg#%d|R)", reg_index); | |
| 109 } else { | |
| 110 stream->Add("[%s|R]", Register::from_code(reg_index).ToString()); | |
| 111 } | |
| 112 break; | |
| 113 } | |
| 114 case DOUBLE_REGISTER: { | |
| 115 int reg_index = index(); | |
| 116 if (reg_index < 0 || reg_index >= DoubleRegister::kMaxNumRegisters) { | |
| 117 stream->Add("(=invalid_double_reg#%d|R)", reg_index); | |
| 118 } else { | |
| 119 stream->Add("[%s|R]", DoubleRegister::from_code(reg_index).ToString()); | |
| 120 } | |
| 121 break; | |
| 122 } | |
| 123 } | |
| 124 } | |
| 125 | |
| 126 | |
| 127 template<LOperand::Kind kOperandKind, int kNumCachedOperands> | |
| 128 LSubKindOperand<kOperandKind, kNumCachedOperands>* | |
| 129 LSubKindOperand<kOperandKind, kNumCachedOperands>::cache = NULL; | |
| 130 | |
| 131 | |
| 132 template<LOperand::Kind kOperandKind, int kNumCachedOperands> | |
| 133 void LSubKindOperand<kOperandKind, kNumCachedOperands>::SetUpCache() { | |
| 134 if (cache) return; | |
| 135 cache = new LSubKindOperand[kNumCachedOperands]; | |
| 136 for (int i = 0; i < kNumCachedOperands; i++) { | |
| 137 cache[i].ConvertTo(kOperandKind, i); | |
| 138 } | |
| 139 } | |
| 140 | |
| 141 | |
| 142 template<LOperand::Kind kOperandKind, int kNumCachedOperands> | |
| 143 void LSubKindOperand<kOperandKind, kNumCachedOperands>::TearDownCache() { | |
| 144 delete[] cache; | |
| 145 cache = NULL; | |
| 146 } | |
| 147 | |
| 148 | |
| 149 void LOperand::SetUpCaches() { | |
| 150 #define LITHIUM_OPERAND_SETUP(name, type, number) L##name::SetUpCache(); | |
| 151 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_SETUP) | |
| 152 #undef LITHIUM_OPERAND_SETUP | |
| 153 } | |
| 154 | |
| 155 | |
| 156 void LOperand::TearDownCaches() { | |
| 157 #define LITHIUM_OPERAND_TEARDOWN(name, type, number) L##name::TearDownCache(); | |
| 158 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_TEARDOWN) | |
| 159 #undef LITHIUM_OPERAND_TEARDOWN | |
| 160 } | |
| 161 | |
| 162 | |
| 163 bool LParallelMove::IsRedundant() const { | |
| 164 for (int i = 0; i < move_operands_.length(); ++i) { | |
| 165 if (!move_operands_[i].IsRedundant()) return false; | |
| 166 } | |
| 167 return true; | |
| 168 } | |
| 169 | |
| 170 | |
| 171 void LParallelMove::PrintDataTo(StringStream* stream) const { | |
| 172 bool first = true; | |
| 173 for (int i = 0; i < move_operands_.length(); ++i) { | |
| 174 if (!move_operands_[i].IsEliminated()) { | |
| 175 LOperand* source = move_operands_[i].source(); | |
| 176 LOperand* destination = move_operands_[i].destination(); | |
| 177 if (!first) stream->Add(" "); | |
| 178 first = false; | |
| 179 if (source->Equals(destination)) { | |
| 180 destination->PrintTo(stream); | |
| 181 } else { | |
| 182 destination->PrintTo(stream); | |
| 183 stream->Add(" = "); | |
| 184 source->PrintTo(stream); | |
| 185 } | |
| 186 stream->Add(";"); | |
| 187 } | |
| 188 } | |
| 189 } | |
| 190 | |
| 191 | |
| 192 void LEnvironment::PrintTo(StringStream* stream) { | |
| 193 stream->Add("[id=%d|", ast_id().ToInt()); | |
| 194 if (deoptimization_index() != Safepoint::kNoDeoptimizationIndex) { | |
| 195 stream->Add("deopt_id=%d|", deoptimization_index()); | |
| 196 } | |
| 197 stream->Add("parameters=%d|", parameter_count()); | |
| 198 stream->Add("arguments_stack_height=%d|", arguments_stack_height()); | |
| 199 for (int i = 0; i < values_.length(); ++i) { | |
| 200 if (i != 0) stream->Add(";"); | |
| 201 if (values_[i] == NULL) { | |
| 202 stream->Add("[hole]"); | |
| 203 } else { | |
| 204 values_[i]->PrintTo(stream); | |
| 205 } | |
| 206 } | |
| 207 stream->Add("]"); | |
| 208 } | |
| 209 | |
| 210 | |
| 211 void LPointerMap::RecordPointer(LOperand* op, Zone* zone) { | |
| 212 // Do not record arguments as pointers. | |
| 213 if (op->IsStackSlot() && op->index() < 0) return; | |
| 214 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot()); | |
| 215 pointer_operands_.Add(op, zone); | |
| 216 } | |
| 217 | |
| 218 | |
| 219 void LPointerMap::RemovePointer(LOperand* op) { | |
| 220 // Do not record arguments as pointers. | |
| 221 if (op->IsStackSlot() && op->index() < 0) return; | |
| 222 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot()); | |
| 223 for (int i = 0; i < pointer_operands_.length(); ++i) { | |
| 224 if (pointer_operands_[i]->Equals(op)) { | |
| 225 pointer_operands_.Remove(i); | |
| 226 --i; | |
| 227 } | |
| 228 } | |
| 229 } | |
| 230 | |
| 231 | |
| 232 void LPointerMap::RecordUntagged(LOperand* op, Zone* zone) { | |
| 233 // Do not record arguments as pointers. | |
| 234 if (op->IsStackSlot() && op->index() < 0) return; | |
| 235 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot()); | |
| 236 untagged_operands_.Add(op, zone); | |
| 237 } | |
| 238 | |
| 239 | |
| 240 void LPointerMap::PrintTo(StringStream* stream) { | |
| 241 stream->Add("{"); | |
| 242 for (int i = 0; i < pointer_operands_.length(); ++i) { | |
| 243 if (i != 0) stream->Add(";"); | |
| 244 pointer_operands_[i]->PrintTo(stream); | |
| 245 } | |
| 246 stream->Add("}"); | |
| 247 } | |
| 248 | |
| 249 | |
| 250 int StackSlotOffset(int index) { | |
| 251 if (index >= 0) { | |
| 252 // Local or spill slot. Skip the frame pointer, function, and | |
| 253 // context in the fixed part of the frame. | |
| 254 return -(index + 1) * kPointerSize - | |
| 255 StandardFrameConstants::kFixedFrameSizeFromFp; | |
| 256 } else { | |
| 257 // Incoming parameter. Skip the return address. | |
| 258 return -(index + 1) * kPointerSize + kFPOnStackSize + kPCOnStackSize; | |
| 259 } | |
| 260 } | |
| 261 | |
| 262 | |
| 263 LChunk::LChunk(CompilationInfo* info, HGraph* graph) | |
| 264 : spill_slot_count_(0), | |
| 265 info_(info), | |
| 266 graph_(graph), | |
| 267 instructions_(32, info->zone()), | |
| 268 pointer_maps_(8, info->zone()), | |
| 269 inlined_functions_(1, info->zone()), | |
| 270 deprecation_dependencies_(32, info->zone()), | |
| 271 stability_dependencies_(8, info->zone()) {} | |
| 272 | |
| 273 | |
| 274 LLabel* LChunk::GetLabel(int block_id) const { | |
| 275 HBasicBlock* block = graph_->blocks()->at(block_id); | |
| 276 int first_instruction = block->first_instruction_index(); | |
| 277 return LLabel::cast(instructions_[first_instruction]); | |
| 278 } | |
| 279 | |
| 280 | |
| 281 int LChunk::LookupDestination(int block_id) const { | |
| 282 LLabel* cur = GetLabel(block_id); | |
| 283 while (cur->replacement() != NULL) { | |
| 284 cur = cur->replacement(); | |
| 285 } | |
| 286 return cur->block_id(); | |
| 287 } | |
| 288 | |
| 289 Label* LChunk::GetAssemblyLabel(int block_id) const { | |
| 290 LLabel* label = GetLabel(block_id); | |
| 291 DCHECK(!label->HasReplacement()); | |
| 292 return label->label(); | |
| 293 } | |
| 294 | |
| 295 | |
| 296 void LChunk::MarkEmptyBlocks() { | |
| 297 LPhase phase("L_Mark empty blocks", this); | |
| 298 for (int i = 0; i < graph()->blocks()->length(); ++i) { | |
| 299 HBasicBlock* block = graph()->blocks()->at(i); | |
| 300 int first = block->first_instruction_index(); | |
| 301 int last = block->last_instruction_index(); | |
| 302 LInstruction* first_instr = instructions()->at(first); | |
| 303 LInstruction* last_instr = instructions()->at(last); | |
| 304 | |
| 305 LLabel* label = LLabel::cast(first_instr); | |
| 306 if (last_instr->IsGoto()) { | |
| 307 LGoto* goto_instr = LGoto::cast(last_instr); | |
| 308 if (label->IsRedundant() && | |
| 309 !label->is_loop_header()) { | |
| 310 bool can_eliminate = true; | |
| 311 for (int i = first + 1; i < last && can_eliminate; ++i) { | |
| 312 LInstruction* cur = instructions()->at(i); | |
| 313 if (cur->IsGap()) { | |
| 314 LGap* gap = LGap::cast(cur); | |
| 315 if (!gap->IsRedundant()) { | |
| 316 can_eliminate = false; | |
| 317 } | |
| 318 } else { | |
| 319 can_eliminate = false; | |
| 320 } | |
| 321 } | |
| 322 if (can_eliminate) { | |
| 323 label->set_replacement(GetLabel(goto_instr->block_id())); | |
| 324 } | |
| 325 } | |
| 326 } | |
| 327 } | |
| 328 } | |
| 329 | |
| 330 | |
| 331 void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) { | |
| 332 LInstructionGap* gap = new (zone()) LInstructionGap(block); | |
| 333 gap->set_hydrogen_value(instr->hydrogen_value()); | |
| 334 int index = -1; | |
| 335 if (instr->IsControl()) { | |
| 336 instructions_.Add(gap, zone()); | |
| 337 index = instructions_.length(); | |
| 338 instructions_.Add(instr, zone()); | |
| 339 } else { | |
| 340 index = instructions_.length(); | |
| 341 instructions_.Add(instr, zone()); | |
| 342 instructions_.Add(gap, zone()); | |
| 343 } | |
| 344 if (instr->HasPointerMap()) { | |
| 345 pointer_maps_.Add(instr->pointer_map(), zone()); | |
| 346 instr->pointer_map()->set_lithium_position(index); | |
| 347 } | |
| 348 } | |
| 349 | |
| 350 | |
| 351 LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) { | |
| 352 return LConstantOperand::Create(constant->id(), zone()); | |
| 353 } | |
| 354 | |
| 355 | |
| 356 int LChunk::GetParameterStackSlot(int index) const { | |
| 357 // The receiver is at index 0, the first parameter at index 1, so we | |
| 358 // shift all parameter indexes down by the number of parameters, and | |
| 359 // make sure they end up negative so they are distinguishable from | |
| 360 // spill slots. | |
| 361 int result = index - info()->num_parameters() - 1; | |
| 362 | |
| 363 DCHECK(result < 0); | |
| 364 return result; | |
| 365 } | |
| 366 | |
| 367 | |
| 368 // A parameter relative to ebp in the arguments stub. | |
| 369 int LChunk::ParameterAt(int index) { | |
| 370 DCHECK(-1 <= index); // -1 is the receiver. | |
| 371 return (1 + info()->scope()->num_parameters() - index) * | |
| 372 kPointerSize; | |
| 373 } | |
| 374 | |
| 375 | |
| 376 LGap* LChunk::GetGapAt(int index) const { | |
| 377 return LGap::cast(instructions_[index]); | |
| 378 } | |
| 379 | |
| 380 | |
| 381 bool LChunk::IsGapAt(int index) const { | |
| 382 return instructions_[index]->IsGap(); | |
| 383 } | |
| 384 | |
| 385 | |
| 386 int LChunk::NearestGapPos(int index) const { | |
| 387 while (!IsGapAt(index)) index--; | |
| 388 return index; | |
| 389 } | |
| 390 | |
| 391 | |
| 392 void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) { | |
| 393 GetGapAt(index)->GetOrCreateParallelMove( | |
| 394 LGap::START, zone())->AddMove(from, to, zone()); | |
| 395 } | |
| 396 | |
| 397 | |
| 398 HConstant* LChunk::LookupConstant(LConstantOperand* operand) const { | |
| 399 return HConstant::cast(graph_->LookupValue(operand->index())); | |
| 400 } | |
| 401 | |
| 402 | |
| 403 Representation LChunk::LookupLiteralRepresentation( | |
| 404 LConstantOperand* operand) const { | |
| 405 return graph_->LookupValue(operand->index())->representation(); | |
| 406 } | |
| 407 | |
| 408 | |
| 409 static void AddWeakObjectToCodeDependency(Isolate* isolate, | |
| 410 Handle<HeapObject> object, | |
| 411 Handle<Code> code) { | |
| 412 Handle<WeakCell> cell = Code::WeakCellFor(code); | |
| 413 Heap* heap = isolate->heap(); | |
| 414 Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object)); | |
| 415 dep = DependentCode::InsertWeakCode(dep, DependentCode::kWeakCodeGroup, cell); | |
| 416 heap->AddWeakObjectToCodeDependency(object, dep); | |
| 417 } | |
| 418 | |
| 419 | |
| 420 void LChunk::RegisterWeakObjectsInOptimizedCode(Handle<Code> code) const { | |
| 421 DCHECK(code->is_optimized_code()); | |
| 422 ZoneList<Handle<Map> > maps(1, zone()); | |
| 423 ZoneList<Handle<HeapObject> > objects(1, zone()); | |
| 424 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | | |
| 425 RelocInfo::ModeMask(RelocInfo::CELL); | |
| 426 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { | |
| 427 RelocInfo::Mode mode = it.rinfo()->rmode(); | |
| 428 if (mode == RelocInfo::CELL && | |
| 429 code->IsWeakObjectInOptimizedCode(it.rinfo()->target_cell())) { | |
| 430 objects.Add(Handle<HeapObject>(it.rinfo()->target_cell()), zone()); | |
| 431 } else if (mode == RelocInfo::EMBEDDED_OBJECT && | |
| 432 code->IsWeakObjectInOptimizedCode(it.rinfo()->target_object())) { | |
| 433 if (it.rinfo()->target_object()->IsMap()) { | |
| 434 Handle<Map> map(Map::cast(it.rinfo()->target_object())); | |
| 435 maps.Add(map, zone()); | |
| 436 } else { | |
| 437 Handle<HeapObject> object( | |
| 438 HeapObject::cast(it.rinfo()->target_object())); | |
| 439 objects.Add(object, zone()); | |
| 440 } | |
| 441 } | |
| 442 } | |
| 443 for (int i = 0; i < maps.length(); i++) { | |
| 444 if (maps.at(i)->dependent_code()->number_of_entries( | |
| 445 DependentCode::kWeakCodeGroup) == 0) { | |
| 446 isolate()->heap()->AddRetainedMap(maps.at(i)); | |
| 447 } | |
| 448 Map::AddDependentCode(maps.at(i), DependentCode::kWeakCodeGroup, code); | |
| 449 } | |
| 450 for (int i = 0; i < objects.length(); i++) { | |
| 451 AddWeakObjectToCodeDependency(isolate(), objects.at(i), code); | |
| 452 } | |
| 453 code->set_can_have_weak_objects(true); | |
| 454 } | |
| 455 | |
| 456 | |
| 457 void LChunk::CommitDependencies(Handle<Code> code) const { | |
| 458 if (!code->is_optimized_code()) return; | |
| 459 HandleScope scope(isolate()); | |
| 460 | |
| 461 for (Handle<Map> map : deprecation_dependencies_) { | |
| 462 DCHECK(!map->is_deprecated()); | |
| 463 DCHECK(map->CanBeDeprecated()); | |
| 464 Map::AddDependentCode(map, DependentCode::kTransitionGroup, code); | |
| 465 } | |
| 466 | |
| 467 for (Handle<Map> map : stability_dependencies_) { | |
| 468 DCHECK(map->is_stable()); | |
| 469 DCHECK(map->CanTransition()); | |
| 470 Map::AddDependentCode(map, DependentCode::kPrototypeCheckGroup, code); | |
| 471 } | |
| 472 | |
| 473 info_->dependencies()->Commit(code); | |
| 474 RegisterWeakObjectsInOptimizedCode(code); | |
| 475 } | |
| 476 | |
| 477 | |
| 478 LChunk* LChunk::NewChunk(HGraph* graph) { | |
| 479 DisallowHandleAllocation no_handles; | |
| 480 DisallowHeapAllocation no_gc; | |
| 481 graph->DisallowAddingNewValues(); | |
| 482 int values = graph->GetMaximumValueID(); | |
| 483 CompilationInfo* info = graph->info(); | |
| 484 if (values > LUnallocated::kMaxVirtualRegisters) { | |
| 485 info->AbortOptimization(kNotEnoughVirtualRegistersForValues); | |
| 486 return NULL; | |
| 487 } | |
| 488 LAllocator allocator(values, graph); | |
| 489 LChunkBuilder builder(info, graph, &allocator); | |
| 490 LChunk* chunk = builder.Build(); | |
| 491 if (chunk == NULL) return NULL; | |
| 492 | |
| 493 if (!allocator.Allocate(chunk)) { | |
| 494 info->AbortOptimization(kNotEnoughVirtualRegistersRegalloc); | |
| 495 return NULL; | |
| 496 } | |
| 497 | |
| 498 chunk->set_allocated_double_registers( | |
| 499 allocator.assigned_double_registers()); | |
| 500 | |
| 501 return chunk; | |
| 502 } | |
| 503 | |
| 504 | |
| 505 Handle<Code> LChunk::Codegen() { | |
| 506 MacroAssembler assembler(info()->isolate(), NULL, 0); | |
| 507 LOG_CODE_EVENT(info()->isolate(), | |
| 508 CodeStartLinePosInfoRecordEvent( | |
| 509 assembler.positions_recorder())); | |
| 510 // Code serializer only takes unoptimized code. | |
| 511 DCHECK(!info()->will_serialize()); | |
| 512 LCodeGen generator(this, &assembler, info()); | |
| 513 | |
| 514 MarkEmptyBlocks(); | |
| 515 | |
| 516 if (generator.GenerateCode()) { | |
| 517 generator.CheckEnvironmentUsage(); | |
| 518 CodeGenerator::MakeCodePrologue(info(), "optimized"); | |
| 519 Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&assembler, info()); | |
| 520 generator.FinishCode(code); | |
| 521 CommitDependencies(code); | |
| 522 code->set_is_crankshafted(true); | |
| 523 void* jit_handler_data = | |
| 524 assembler.positions_recorder()->DetachJITHandlerData(); | |
| 525 LOG_CODE_EVENT(info()->isolate(), | |
| 526 CodeEndLinePosInfoRecordEvent(*code, jit_handler_data)); | |
| 527 | |
| 528 CodeGenerator::PrintCode(code, info()); | |
| 529 DCHECK(!(info()->isolate()->serializer_enabled() && | |
| 530 info()->GetMustNotHaveEagerFrame() && | |
| 531 generator.NeedsEagerFrame())); | |
| 532 return code; | |
| 533 } | |
| 534 assembler.AbortedCodeGeneration(); | |
| 535 return Handle<Code>::null(); | |
| 536 } | |
| 537 | |
| 538 | |
| 539 void LChunk::set_allocated_double_registers(BitVector* allocated_registers) { | |
| 540 allocated_double_registers_ = allocated_registers; | |
| 541 BitVector* doubles = allocated_double_registers(); | |
| 542 BitVector::Iterator iterator(doubles); | |
| 543 while (!iterator.Done()) { | |
| 544 if (info()->saves_caller_doubles()) { | |
| 545 if (kDoubleSize == kPointerSize * 2) { | |
| 546 spill_slot_count_ += 2; | |
| 547 } else { | |
| 548 spill_slot_count_++; | |
| 549 } | |
| 550 } | |
| 551 iterator.Advance(); | |
| 552 } | |
| 553 } | |
| 554 | |
| 555 | |
| 556 void LChunkBuilderBase::Abort(BailoutReason reason) { | |
| 557 info()->AbortOptimization(reason); | |
| 558 status_ = ABORTED; | |
| 559 } | |
| 560 | |
| 561 | |
| 562 void LChunkBuilderBase::Retry(BailoutReason reason) { | |
| 563 info()->RetryOptimization(reason); | |
| 564 status_ = ABORTED; | |
| 565 } | |
| 566 | |
| 567 | |
| 568 LEnvironment* LChunkBuilderBase::CreateEnvironment( | |
| 569 HEnvironment* hydrogen_env, int* argument_index_accumulator, | |
| 570 ZoneList<HValue*>* objects_to_materialize) { | |
| 571 if (hydrogen_env == NULL) return NULL; | |
| 572 | |
| 573 LEnvironment* outer = | |
| 574 CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator, | |
| 575 objects_to_materialize); | |
| 576 BailoutId ast_id = hydrogen_env->ast_id(); | |
| 577 DCHECK(!ast_id.IsNone() || | |
| 578 hydrogen_env->frame_type() != JS_FUNCTION); | |
| 579 | |
| 580 int omitted_count = (hydrogen_env->frame_type() == JS_FUNCTION) | |
| 581 ? 0 | |
| 582 : hydrogen_env->specials_count(); | |
| 583 | |
| 584 int value_count = hydrogen_env->length() - omitted_count; | |
| 585 LEnvironment* result = | |
| 586 new(zone()) LEnvironment(hydrogen_env->closure(), | |
| 587 hydrogen_env->frame_type(), | |
| 588 ast_id, | |
| 589 hydrogen_env->parameter_count(), | |
| 590 argument_count_, | |
| 591 value_count, | |
| 592 outer, | |
| 593 hydrogen_env->entry(), | |
| 594 zone()); | |
| 595 int argument_index = *argument_index_accumulator; | |
| 596 | |
| 597 // Store the environment description into the environment | |
| 598 // (with holes for nested objects) | |
| 599 for (int i = 0; i < hydrogen_env->length(); ++i) { | |
| 600 if (hydrogen_env->is_special_index(i) && | |
| 601 hydrogen_env->frame_type() != JS_FUNCTION) { | |
| 602 continue; | |
| 603 } | |
| 604 LOperand* op; | |
| 605 HValue* value = hydrogen_env->values()->at(i); | |
| 606 CHECK(!value->IsPushArguments()); // Do not deopt outgoing arguments | |
| 607 if (value->IsArgumentsObject() || value->IsCapturedObject()) { | |
| 608 op = LEnvironment::materialization_marker(); | |
| 609 } else { | |
| 610 op = UseAny(value); | |
| 611 } | |
| 612 result->AddValue(op, | |
| 613 value->representation(), | |
| 614 value->CheckFlag(HInstruction::kUint32)); | |
| 615 } | |
| 616 | |
| 617 // Recursively store the nested objects into the environment | |
| 618 for (int i = 0; i < hydrogen_env->length(); ++i) { | |
| 619 if (hydrogen_env->is_special_index(i)) continue; | |
| 620 | |
| 621 HValue* value = hydrogen_env->values()->at(i); | |
| 622 if (value->IsArgumentsObject() || value->IsCapturedObject()) { | |
| 623 AddObjectToMaterialize(value, objects_to_materialize, result); | |
| 624 } | |
| 625 } | |
| 626 | |
| 627 if (hydrogen_env->frame_type() == JS_FUNCTION) { | |
| 628 *argument_index_accumulator = argument_index; | |
| 629 } | |
| 630 | |
| 631 return result; | |
| 632 } | |
| 633 | |
| 634 | |
| 635 // Add an object to the supplied environment and object materialization list. | |
| 636 // | |
| 637 // Notes: | |
| 638 // | |
| 639 // We are building three lists here: | |
| 640 // | |
| 641 // 1. In the result->object_mapping_ list (added to by the | |
| 642 // LEnvironment::Add*Object methods), we store the lengths (number | |
| 643 // of fields) of the captured objects in depth-first traversal order, or | |
| 644 // in case of duplicated objects, we store the index to the duplicate object | |
| 645 // (with a tag to differentiate between captured and duplicated objects). | |
| 646 // | |
| 647 // 2. The object fields are stored in the result->values_ list | |
| 648 // (added to by the LEnvironment.AddValue method) sequentially as lists | |
| 649 // of fields with holes for nested objects (the holes will be expanded | |
| 650 // later by LCodegen::AddToTranslation according to the | |
| 651 // LEnvironment.object_mapping_ list). | |
| 652 // | |
| 653 // 3. The auxiliary objects_to_materialize array stores the hydrogen values | |
| 654 // in the same order as result->object_mapping_ list. This is used | |
| 655 // to detect duplicate values and calculate the corresponding object index. | |
| 656 void LChunkBuilderBase::AddObjectToMaterialize(HValue* value, | |
| 657 ZoneList<HValue*>* objects_to_materialize, LEnvironment* result) { | |
| 658 int object_index = objects_to_materialize->length(); | |
| 659 // Store the hydrogen value into the de-duplication array | |
| 660 objects_to_materialize->Add(value, zone()); | |
| 661 // Find out whether we are storing a duplicated value | |
| 662 int previously_materialized_object = -1; | |
| 663 for (int prev = 0; prev < object_index; ++prev) { | |
| 664 if (objects_to_materialize->at(prev) == value) { | |
| 665 previously_materialized_object = prev; | |
| 666 break; | |
| 667 } | |
| 668 } | |
| 669 // Store the captured object length (or duplicated object index) | |
| 670 // into the environment. For duplicated objects, we stop here. | |
| 671 int length = value->OperandCount(); | |
| 672 bool is_arguments = value->IsArgumentsObject(); | |
| 673 if (previously_materialized_object >= 0) { | |
| 674 result->AddDuplicateObject(previously_materialized_object); | |
| 675 return; | |
| 676 } else { | |
| 677 result->AddNewObject(is_arguments ? length - 1 : length, is_arguments); | |
| 678 } | |
| 679 // Store the captured object's fields into the environment | |
| 680 for (int i = is_arguments ? 1 : 0; i < length; ++i) { | |
| 681 LOperand* op; | |
| 682 HValue* arg_value = value->OperandAt(i); | |
| 683 if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) { | |
| 684 // Insert a hole for nested objects | |
| 685 op = LEnvironment::materialization_marker(); | |
| 686 } else { | |
| 687 DCHECK(!arg_value->IsPushArguments()); | |
| 688 // For ordinary values, tell the register allocator we need the value | |
| 689 // to be alive here | |
| 690 op = UseAny(arg_value); | |
| 691 } | |
| 692 result->AddValue(op, | |
| 693 arg_value->representation(), | |
| 694 arg_value->CheckFlag(HInstruction::kUint32)); | |
| 695 } | |
| 696 // Recursively store all the nested captured objects into the environment | |
| 697 for (int i = is_arguments ? 1 : 0; i < length; ++i) { | |
| 698 HValue* arg_value = value->OperandAt(i); | |
| 699 if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) { | |
| 700 AddObjectToMaterialize(arg_value, objects_to_materialize, result); | |
| 701 } | |
| 702 } | |
| 703 } | |
| 704 | |
| 705 | |
| 706 LPhase::~LPhase() { | |
| 707 if (ShouldProduceTraceOutput()) { | |
| 708 isolate()->GetHTracer()->TraceLithium(name(), chunk_); | |
| 709 } | |
| 710 } | |
| 711 | |
| 712 | |
| 713 } // namespace internal | |
| 714 } // namespace v8 | |
| OLD | NEW |