| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 246 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 257 } | 257 } |
| 258 | 258 |
| 259 | 259 |
| 260 bool LCodeGen::GenerateBody() { | 260 bool LCodeGen::GenerateBody() { |
| 261 ASSERT(is_generating()); | 261 ASSERT(is_generating()); |
| 262 bool emit_instructions = true; | 262 bool emit_instructions = true; |
| 263 for (current_instruction_ = 0; | 263 for (current_instruction_ = 0; |
| 264 !is_aborted() && current_instruction_ < instructions_->length(); | 264 !is_aborted() && current_instruction_ < instructions_->length(); |
| 265 current_instruction_++) { | 265 current_instruction_++) { |
| 266 LInstruction* instr = instructions_->at(current_instruction_); | 266 LInstruction* instr = instructions_->at(current_instruction_); |
| 267 |
| 268 // Don't emit code for basic blocks with a replacement. |
| 267 if (instr->IsLabel()) { | 269 if (instr->IsLabel()) { |
| 268 LLabel* label = LLabel::cast(instr); | 270 emit_instructions = !LLabel::cast(instr)->HasReplacement(); |
| 269 emit_instructions = !label->HasReplacement(); | 271 } |
| 272 if (!emit_instructions) continue; |
| 273 |
| 274 if (FLAG_code_comments && instr->HasInterestingComment(this)) { |
| 275 Comment(";;; <@%d,#%d> %s", |
| 276 current_instruction_, |
| 277 instr->hydrogen_value()->id(), |
| 278 instr->Mnemonic()); |
| 270 } | 279 } |
| 271 | 280 |
| 272 if (emit_instructions) { | 281 instr->CompileToNative(this); |
| 273 if (FLAG_code_comments) { | |
| 274 HValue* hydrogen = instr->hydrogen_value(); | |
| 275 if (hydrogen != NULL) { | |
| 276 if (hydrogen->IsChange()) { | |
| 277 HValue* changed_value = HChange::cast(hydrogen)->value(); | |
| 278 int use_id = 0; | |
| 279 const char* use_mnemo = "dead"; | |
| 280 if (hydrogen->UseCount() >= 1) { | |
| 281 HValue* use_value = hydrogen->uses().value(); | |
| 282 use_id = use_value->id(); | |
| 283 use_mnemo = use_value->Mnemonic(); | |
| 284 } | |
| 285 Comment(";;; @%d: %s. <of #%d %s for #%d %s>", | |
| 286 current_instruction_, instr->Mnemonic(), | |
| 287 changed_value->id(), changed_value->Mnemonic(), | |
| 288 use_id, use_mnemo); | |
| 289 } else { | |
| 290 Comment(";;; @%d: %s. <#%d>", current_instruction_, | |
| 291 instr->Mnemonic(), hydrogen->id()); | |
| 292 } | |
| 293 } else { | |
| 294 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic()); | |
| 295 } | |
| 296 } | |
| 297 instr->CompileToNative(this); | |
| 298 } | |
| 299 } | 282 } |
| 300 EnsureSpaceForLazyDeopt(); | 283 EnsureSpaceForLazyDeopt(); |
| 301 return !is_aborted(); | 284 return !is_aborted(); |
| 302 } | 285 } |
| 303 | 286 |
| 304 | 287 |
| 305 bool LCodeGen::GenerateDeferredCode() { | 288 bool LCodeGen::GenerateDeferredCode() { |
| 306 ASSERT(is_generating()); | 289 ASSERT(is_generating()); |
| 307 if (deferred_.length() > 0) { | 290 if (deferred_.length() > 0) { |
| 308 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { | 291 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { |
| 309 LDeferredCode* code = deferred_[i]; | 292 LDeferredCode* code = deferred_[i]; |
| 293 Comment(";;; <@%d,#%d> " |
| 294 "-------------------- Deferred %s --------------------", |
| 295 code->instruction_index(), |
| 296 code->instr()->hydrogen_value()->id(), |
| 297 code->instr()->Mnemonic()); |
| 310 __ bind(code->entry()); | 298 __ bind(code->entry()); |
| 311 if (NeedsDeferredFrame()) { | 299 if (NeedsDeferredFrame()) { |
| 312 Comment(";;; Deferred build frame @%d: %s.", | 300 Comment(";;; Build frame"); |
| 313 code->instruction_index(), | |
| 314 code->instr()->Mnemonic()); | |
| 315 ASSERT(!frame_is_built_); | 301 ASSERT(!frame_is_built_); |
| 316 ASSERT(info()->IsStub()); | 302 ASSERT(info()->IsStub()); |
| 317 frame_is_built_ = true; | 303 frame_is_built_ = true; |
| 318 __ stm(db_w, sp, cp.bit() | fp.bit() | lr.bit()); | 304 __ stm(db_w, sp, cp.bit() | fp.bit() | lr.bit()); |
| 319 __ mov(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); | 305 __ mov(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); |
| 320 __ push(scratch0()); | 306 __ push(scratch0()); |
| 321 __ add(fp, sp, Operand(2 * kPointerSize)); | 307 __ add(fp, sp, Operand(2 * kPointerSize)); |
| 308 Comment(";;; Deferred code"); |
| 322 } | 309 } |
| 323 Comment(";;; Deferred code @%d: %s.", | |
| 324 code->instruction_index(), | |
| 325 code->instr()->Mnemonic()); | |
| 326 code->Generate(); | 310 code->Generate(); |
| 327 if (NeedsDeferredFrame()) { | 311 if (NeedsDeferredFrame()) { |
| 328 Comment(";;; Deferred destroy frame @%d: %s.", | 312 Comment(";;; Destroy frame"); |
| 329 code->instruction_index(), | |
| 330 code->instr()->Mnemonic()); | |
| 331 ASSERT(frame_is_built_); | 313 ASSERT(frame_is_built_); |
| 332 __ pop(ip); | 314 __ pop(ip); |
| 333 __ ldm(ia_w, sp, cp.bit() | fp.bit() | lr.bit()); | 315 __ ldm(ia_w, sp, cp.bit() | fp.bit() | lr.bit()); |
| 334 frame_is_built_ = false; | 316 frame_is_built_ = false; |
| 335 } | 317 } |
| 336 __ jmp(code->exit()); | 318 __ jmp(code->exit()); |
| 337 } | 319 } |
| 338 } | 320 } |
| 339 | 321 |
| 340 // Force constant pool emission at the end of the deferred code to make | 322 // Force constant pool emission at the end of the deferred code to make |
| (...skipping 10 matching lines...) Expand all Loading... |
| 351 // immediate of a branch instruction. | 333 // immediate of a branch instruction. |
| 352 // To simplify we consider the code size from the first instruction to the | 334 // To simplify we consider the code size from the first instruction to the |
| 353 // end of the jump table. We also don't consider the pc load delta. | 335 // end of the jump table. We also don't consider the pc load delta. |
| 354 // Each entry in the jump table generates one instruction and inlines one | 336 // Each entry in the jump table generates one instruction and inlines one |
| 355 // 32bit data after it. | 337 // 32bit data after it. |
| 356 if (!is_int24((masm()->pc_offset() / Assembler::kInstrSize) + | 338 if (!is_int24((masm()->pc_offset() / Assembler::kInstrSize) + |
| 357 deopt_jump_table_.length() * 7)) { | 339 deopt_jump_table_.length() * 7)) { |
| 358 Abort("Generated code is too large"); | 340 Abort("Generated code is too large"); |
| 359 } | 341 } |
| 360 | 342 |
| 361 __ RecordComment("[ Deoptimisation jump table"); | 343 if (deopt_jump_table_.length() > 0) { |
| 344 Comment(";;; -------------------- Jump table --------------------"); |
| 345 } |
| 362 Label table_start; | 346 Label table_start; |
| 363 __ bind(&table_start); | 347 __ bind(&table_start); |
| 364 Label needs_frame_not_call; | 348 Label needs_frame_not_call; |
| 365 Label needs_frame_is_call; | 349 Label needs_frame_is_call; |
| 366 for (int i = 0; i < deopt_jump_table_.length(); i++) { | 350 for (int i = 0; i < deopt_jump_table_.length(); i++) { |
| 367 __ bind(&deopt_jump_table_[i].label); | 351 __ bind(&deopt_jump_table_[i].label); |
| 368 Address entry = deopt_jump_table_[i].address; | 352 Address entry = deopt_jump_table_[i].address; |
| 369 bool is_lazy_deopt = deopt_jump_table_[i].is_lazy_deopt; | 353 bool is_lazy_deopt = deopt_jump_table_[i].is_lazy_deopt; |
| 370 Deoptimizer::BailoutType type = | 354 Deoptimizer::BailoutType type = |
| 371 is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER; | 355 is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER; |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 412 } else { | 396 } else { |
| 413 if (is_lazy_deopt) { | 397 if (is_lazy_deopt) { |
| 414 __ mov(lr, Operand(pc), LeaveCC, al); | 398 __ mov(lr, Operand(pc), LeaveCC, al); |
| 415 __ mov(pc, Operand(ExternalReference::ForDeoptEntry(entry))); | 399 __ mov(pc, Operand(ExternalReference::ForDeoptEntry(entry))); |
| 416 } else { | 400 } else { |
| 417 __ mov(pc, Operand(ExternalReference::ForDeoptEntry(entry))); | 401 __ mov(pc, Operand(ExternalReference::ForDeoptEntry(entry))); |
| 418 } | 402 } |
| 419 } | 403 } |
| 420 masm()->CheckConstPool(false, false); | 404 masm()->CheckConstPool(false, false); |
| 421 } | 405 } |
| 422 __ RecordComment("]"); | |
| 423 | 406 |
| 424 // Force constant pool emission at the end of the deopt jump table to make | 407 // Force constant pool emission at the end of the deopt jump table to make |
| 425 // sure that no constant pools are emitted after. | 408 // sure that no constant pools are emitted after. |
| 426 masm()->CheckConstPool(true, false); | 409 masm()->CheckConstPool(true, false); |
| 427 | 410 |
| 428 // The deoptimization jump table is the last part of the instruction | 411 // The deoptimization jump table is the last part of the instruction |
| 429 // sequence. Mark the generated code as done unless we bailed out. | 412 // sequence. Mark the generated code as done unless we bailed out. |
| 430 if (!is_aborted()) status_ = DONE; | 413 if (!is_aborted()) status_ = DONE; |
| 431 return !is_aborted(); | 414 return !is_aborted(); |
| 432 } | 415 } |
| (...skipping 607 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1040 pointers, Safepoint::kWithRegistersAndDoubles, arguments, deopt_mode); | 1023 pointers, Safepoint::kWithRegistersAndDoubles, arguments, deopt_mode); |
| 1041 } | 1024 } |
| 1042 | 1025 |
| 1043 | 1026 |
| 1044 void LCodeGen::RecordPosition(int position) { | 1027 void LCodeGen::RecordPosition(int position) { |
| 1045 if (position == RelocInfo::kNoPosition) return; | 1028 if (position == RelocInfo::kNoPosition) return; |
| 1046 masm()->positions_recorder()->RecordPosition(position); | 1029 masm()->positions_recorder()->RecordPosition(position); |
| 1047 } | 1030 } |
| 1048 | 1031 |
| 1049 | 1032 |
| 1033 static const char* LabelType(LLabel* label) { |
| 1034 if (label->is_loop_header()) return " (loop header)"; |
| 1035 if (label->is_osr_entry()) return " (OSR entry)"; |
| 1036 return ""; |
| 1037 } |
| 1038 |
| 1039 |
| 1050 void LCodeGen::DoLabel(LLabel* label) { | 1040 void LCodeGen::DoLabel(LLabel* label) { |
| 1051 Comment(";;; -------------------- B%d%s --------------------", | 1041 Comment(";;; <@%d,#%d> -------------------- B%d%s --------------------", |
| 1042 current_instruction_, |
| 1043 label->hydrogen_value()->id(), |
| 1052 label->block_id(), | 1044 label->block_id(), |
| 1053 label->is_loop_header() ? " (loop header)" : ""); | 1045 LabelType(label)); |
| 1054 __ bind(label->label()); | 1046 __ bind(label->label()); |
| 1055 current_block_ = label->block_id(); | 1047 current_block_ = label->block_id(); |
| 1056 DoGap(label); | 1048 DoGap(label); |
| 1057 } | 1049 } |
| 1058 | 1050 |
| 1059 | 1051 |
| 1060 void LCodeGen::DoParallelMove(LParallelMove* move) { | 1052 void LCodeGen::DoParallelMove(LParallelMove* move) { |
| 1061 resolver_.Resolve(move); | 1053 resolver_.Resolve(move); |
| 1062 } | 1054 } |
| 1063 | 1055 |
| (...skipping 1104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2168 | 2160 |
| 2169 BinaryOpStub stub(instr->op(), NO_OVERWRITE); | 2161 BinaryOpStub stub(instr->op(), NO_OVERWRITE); |
| 2170 // Block literal pool emission to ensure nop indicating no inlined smi code | 2162 // Block literal pool emission to ensure nop indicating no inlined smi code |
| 2171 // is in the correct position. | 2163 // is in the correct position. |
| 2172 Assembler::BlockConstPoolScope block_const_pool(masm()); | 2164 Assembler::BlockConstPoolScope block_const_pool(masm()); |
| 2173 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 2165 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 2174 __ nop(); // Signals no inlined code. | 2166 __ nop(); // Signals no inlined code. |
| 2175 } | 2167 } |
| 2176 | 2168 |
| 2177 | 2169 |
| 2178 int LCodeGen::GetNextEmittedBlock() { | 2170 int LCodeGen::GetNextEmittedBlock() const { |
| 2179 for (int i = current_block_ + 1; i < graph()->blocks()->length(); ++i) { | 2171 for (int i = current_block_ + 1; i < graph()->blocks()->length(); ++i) { |
| 2180 if (!chunk_->GetLabel(i)->HasReplacement()) return i; | 2172 if (!chunk_->GetLabel(i)->HasReplacement()) return i; |
| 2181 } | 2173 } |
| 2182 return -1; | 2174 return -1; |
| 2183 } | 2175 } |
| 2184 | 2176 |
| 2185 | 2177 |
| 2186 void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) { | 2178 void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) { |
| 2187 int next_block = GetNextEmittedBlock(); | 2179 int next_block = GetNextEmittedBlock(); |
| 2188 right_block = chunk_->LookupDestination(right_block); | 2180 right_block = chunk_->LookupDestination(right_block); |
| (...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2314 } | 2306 } |
| 2315 | 2307 |
| 2316 // We've seen something for the first time -> deopt. | 2308 // We've seen something for the first time -> deopt. |
| 2317 DeoptimizeIf(al, instr->environment()); | 2309 DeoptimizeIf(al, instr->environment()); |
| 2318 } | 2310 } |
| 2319 } | 2311 } |
| 2320 } | 2312 } |
| 2321 | 2313 |
| 2322 | 2314 |
| 2323 void LCodeGen::EmitGoto(int block) { | 2315 void LCodeGen::EmitGoto(int block) { |
| 2324 int destination = chunk_->LookupDestination(block); | 2316 if (!IsNextEmittedBlock(block)) { |
| 2325 if (destination != GetNextEmittedBlock()) { | 2317 __ jmp(chunk_->GetAssemblyLabel(chunk_->LookupDestination(block))); |
| 2326 __ jmp(chunk_->GetAssemblyLabel(destination)); | |
| 2327 } | 2318 } |
| 2328 } | 2319 } |
| 2329 | 2320 |
| 2330 | 2321 |
| 2331 void LCodeGen::DoGoto(LGoto* instr) { | 2322 void LCodeGen::DoGoto(LGoto* instr) { |
| 2332 EmitGoto(instr->block_id()); | 2323 EmitGoto(instr->block_id()); |
| 2333 } | 2324 } |
| 2334 | 2325 |
| 2335 | 2326 |
| 2336 Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) { | 2327 Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) { |
| (...skipping 3669 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6006 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize)); | 5997 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 6007 __ ldr(result, FieldMemOperand(scratch, | 5998 __ ldr(result, FieldMemOperand(scratch, |
| 6008 FixedArray::kHeaderSize - kPointerSize)); | 5999 FixedArray::kHeaderSize - kPointerSize)); |
| 6009 __ bind(&done); | 6000 __ bind(&done); |
| 6010 } | 6001 } |
| 6011 | 6002 |
| 6012 | 6003 |
| 6013 #undef __ | 6004 #undef __ |
| 6014 | 6005 |
| 6015 } } // namespace v8::internal | 6006 } } // namespace v8::internal |
| OLD | NEW |