| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/interpreter/bytecode-array-builder.h" | 5 #include "src/interpreter/bytecode-array-builder.h" |
| 6 | 6 |
| 7 namespace v8 { | 7 namespace v8 { |
| 8 namespace internal { | 8 namespace internal { |
| 9 namespace interpreter { | 9 namespace interpreter { |
| 10 | 10 |
| 11 BytecodeArrayBuilder::BytecodeArrayBuilder(Isolate* isolate, Zone* zone) | 11 BytecodeArrayBuilder::BytecodeArrayBuilder(Isolate* isolate, Zone* zone) |
| 12 : isolate_(isolate), | 12 : isolate_(isolate), |
| 13 zone_(zone), |
| 13 bytecodes_(zone), | 14 bytecodes_(zone), |
| 14 bytecode_generated_(false), | 15 bytecode_generated_(false), |
| 15 last_block_end_(0), | 16 last_block_end_(0), |
| 16 last_bytecode_start_(~0), | 17 last_bytecode_start_(~0), |
| 17 return_seen_in_block_(false), | 18 return_seen_in_block_(false), |
| 18 constants_map_(isolate->heap(), zone), | 19 constants_map_(isolate->heap(), zone), |
| 19 constants_(zone), | 20 constants_(zone), |
| 20 parameter_count_(-1), | 21 parameter_count_(-1), |
| 21 local_register_count_(-1), | 22 local_register_count_(-1), |
| 22 temporary_register_count_(0), | 23 temporary_register_count_(0), |
| (...skipping 284 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 307 if (label->is_forward_target()) { | 308 if (label->is_forward_target()) { |
| 308 // An earlier jump instruction refers to this label. Update it's location. | 309 // An earlier jump instruction refers to this label. Update it's location. |
| 309 PatchJump(bytecodes()->end(), bytecodes()->begin() + label->offset()); | 310 PatchJump(bytecodes()->end(), bytecodes()->begin() + label->offset()); |
| 310 // Now treat as if the label will only be back referred to. | 311 // Now treat as if the label will only be back referred to. |
| 311 } | 312 } |
| 312 label->bind_to(bytecodes()->size()); | 313 label->bind_to(bytecodes()->size()); |
| 313 return *this; | 314 return *this; |
| 314 } | 315 } |
| 315 | 316 |
| 316 | 317 |
| 317 // static | 318 BytecodeArrayBuilder& BytecodeArrayBuilder::Bind(const BytecodeLabel& target, |
| 318 bool BytecodeArrayBuilder::IsJumpWithImm8Operand(Bytecode jump_bytecode) { | 319 BytecodeLabel* label) { |
| 319 return jump_bytecode == Bytecode::kJump || | 320 DCHECK_EQ(label->is_bound(), false); |
| 320 jump_bytecode == Bytecode::kJumpIfTrue || | 321 DCHECK_EQ(target.is_bound(), true); |
| 321 jump_bytecode == Bytecode::kJumpIfFalse; | 322 PatchJump(bytecodes()->begin() + target.offset(), |
| 323 bytecodes()->begin() + label->offset()); |
| 324 label->bind_to(target.offset()); |
| 325 return *this; |
| 322 } | 326 } |
| 323 | 327 |
| 324 | 328 |
| 325 // static | 329 // static |
| 326 Bytecode BytecodeArrayBuilder::GetJumpWithConstantOperand( | 330 Bytecode BytecodeArrayBuilder::GetJumpWithConstantOperand( |
| 327 Bytecode jump_bytecode) { | 331 Bytecode jump_bytecode) { |
| 328 switch (jump_bytecode) { | 332 switch (jump_bytecode) { |
| 329 case Bytecode::kJump: | 333 case Bytecode::kJump: |
| 330 return Bytecode::kJumpConstant; | 334 return Bytecode::kJumpConstant; |
| 331 case Bytecode::kJumpIfTrue: | 335 case Bytecode::kJumpIfTrue: |
| 332 return Bytecode::kJumpIfTrueConstant; | 336 return Bytecode::kJumpIfTrueConstant; |
| 333 case Bytecode::kJumpIfFalse: | 337 case Bytecode::kJumpIfFalse: |
| 334 return Bytecode::kJumpIfFalseConstant; | 338 return Bytecode::kJumpIfFalseConstant; |
| 335 default: | 339 default: |
| 336 UNREACHABLE(); | 340 UNREACHABLE(); |
| 337 return Bytecode::kJumpConstant; | 341 return Bytecode::kJumpConstant; |
| 338 } | 342 } |
| 339 } | 343 } |
| 340 | 344 |
| 341 | 345 |
| 342 void BytecodeArrayBuilder::PatchJump( | 346 void BytecodeArrayBuilder::PatchJump( |
| 343 const ZoneVector<uint8_t>::iterator& jump_target, | 347 const ZoneVector<uint8_t>::iterator& jump_target, |
| 344 ZoneVector<uint8_t>::iterator jump_location) { | 348 ZoneVector<uint8_t>::iterator jump_location) { |
| 345 Bytecode jump_bytecode = Bytecodes::FromByte(*jump_location); | 349 Bytecode jump_bytecode = Bytecodes::FromByte(*jump_location); |
| 346 int delta = static_cast<int>(jump_target - jump_location); | 350 int delta = static_cast<int>(jump_target - jump_location); |
| 347 | 351 |
| 348 DCHECK(IsJumpWithImm8Operand(jump_bytecode)); | 352 DCHECK(Bytecodes::IsJump(jump_bytecode)); |
| 349 DCHECK_EQ(Bytecodes::Size(jump_bytecode), 2); | 353 DCHECK_EQ(Bytecodes::Size(jump_bytecode), 2); |
| 350 DCHECK_GE(delta, 0); | 354 DCHECK_NE(delta, 0); |
| 351 | 355 |
| 352 if (FitsInImm8Operand(delta)) { | 356 if (FitsInImm8Operand(delta)) { |
| 353 // Just update the operand | 357 // Just update the operand |
| 354 jump_location++; | 358 jump_location++; |
| 355 *jump_location = static_cast<uint8_t>(delta); | 359 *jump_location = static_cast<uint8_t>(delta); |
| 356 } else { | 360 } else { |
| 357 // Update the jump type and operand | 361 // Update the jump type and operand |
| 358 size_t entry = GetConstantPoolEntry(handle(Smi::FromInt(delta), isolate())); | 362 size_t entry = GetConstantPoolEntry(handle(Smi::FromInt(delta), isolate())); |
| 359 if (FitsInIdxOperand(entry)) { | 363 if (FitsInIdxOperand(entry)) { |
| 360 *jump_location++ = | 364 jump_bytecode = GetJumpWithConstantOperand(jump_bytecode); |
| 361 Bytecodes::ToByte(GetJumpWithConstantOperand(jump_bytecode)); | 365 *jump_location++ = Bytecodes::ToByte(jump_bytecode); |
| 362 *jump_location = static_cast<uint8_t>(entry); | 366 *jump_location = static_cast<uint8_t>(entry); |
| 363 } else { | 367 } else { |
| 364 // TODO(oth): OutputJump should reserve a constant pool entry | 368 // TODO(oth): OutputJump should reserve a constant pool entry |
| 365 // when jump is written. The reservation should be used here if | 369 // when jump is written. The reservation should be used here if |
| 366 // needed, or cancelled if not. This is due to the patch needing | 370 // needed, or cancelled if not. This is due to the patch needing |
| 367 // to match the size of the code it's replacing. In future, | 371 // to match the size of the code it's replacing. In future, |
| 368 // there will probably be a jump with 32-bit operand for cases | 372 // there will probably be a jump with 32-bit operand for cases |
| 369 // when constant pool is full, but that needs to be emitted in | 373 // when constant pool is full, but that needs to be emitted in |
| 370 // OutputJump too. | 374 // OutputJump too. |
| 371 UNIMPLEMENTED(); | 375 UNIMPLEMENTED(); |
| (...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 605 | 609 |
| 606 Register TemporaryRegisterScope::NewRegister() { | 610 Register TemporaryRegisterScope::NewRegister() { |
| 607 count_++; | 611 count_++; |
| 608 last_register_index_ = builder_->BorrowTemporaryRegister(); | 612 last_register_index_ = builder_->BorrowTemporaryRegister(); |
| 609 return Register(last_register_index_); | 613 return Register(last_register_index_); |
| 610 } | 614 } |
| 611 | 615 |
| 612 } // namespace interpreter | 616 } // namespace interpreter |
| 613 } // namespace internal | 617 } // namespace internal |
| 614 } // namespace v8 | 618 } // namespace v8 |
| OLD | NEW |