OLD | NEW |
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/interpreter/bytecode-array-writer.h" | 5 #include "src/interpreter/bytecode-array-writer.h" |
6 | 6 |
7 #include <iomanip> | 7 #include "src/api.h" |
8 #include "src/interpreter/source-position-table.h" | 8 #include "src/interpreter/bytecode-label.h" |
| 9 #include "src/interpreter/constant-array-builder.h" |
| 10 #include "src/log.h" |
9 | 11 |
10 namespace v8 { | 12 namespace v8 { |
11 namespace internal { | 13 namespace internal { |
12 namespace interpreter { | 14 namespace interpreter { |
13 | 15 |
14 BytecodeArrayWriter::BytecodeArrayWriter( | 16 BytecodeArrayWriter::BytecodeArrayWriter( |
15 Zone* zone, SourcePositionTableBuilder* source_position_table_builder) | 17 Isolate* isolate, Zone* zone, ConstantArrayBuilder* constant_array_builder) |
16 : bytecodes_(zone), | 18 : isolate_(isolate), |
| 19 bytecodes_(zone), |
17 max_register_count_(0), | 20 max_register_count_(0), |
18 source_position_table_builder_(source_position_table_builder) {} | 21 unbound_jumps_(0), |
| 22 source_position_table_builder_(isolate, zone), |
| 23 constant_array_builder_(constant_array_builder) { |
| 24 LOG_CODE_EVENT(isolate_, CodeStartLinePosInfoRecordEvent( |
| 25 source_position_table_builder())); |
| 26 } |
19 | 27 |
20 // override | 28 // override |
21 BytecodeArrayWriter::~BytecodeArrayWriter() {} | 29 BytecodeArrayWriter::~BytecodeArrayWriter() {} |
22 | 30 |
23 // override | 31 // override |
24 size_t BytecodeArrayWriter::FlushForOffset() { return bytecodes()->size(); } | 32 Handle<BytecodeArray> BytecodeArrayWriter::ToBytecodeArray( |
| 33 int fixed_register_count, int parameter_count, |
| 34 Handle<FixedArray> handler_table) { |
| 35 DCHECK_EQ(0, unbound_jumps_); |
| 36 |
| 37 int bytecode_size = static_cast<int>(bytecodes()->size()); |
| 38 |
| 39 // All locals need a frame slot for the debugger, but may not be |
| 40 // present in generated code. |
| 41 int frame_size_for_locals = fixed_register_count * kPointerSize; |
| 42 int frame_size_used = max_register_count() * kPointerSize; |
| 43 int frame_size = std::max(frame_size_for_locals, frame_size_used); |
| 44 Handle<FixedArray> constant_pool = constant_array_builder()->ToFixedArray(); |
| 45 Handle<ByteArray> source_position_table = |
| 46 source_position_table_builder()->ToSourcePositionTable(); |
| 47 Handle<BytecodeArray> bytecode_array = isolate_->factory()->NewBytecodeArray( |
| 48 bytecode_size, &bytecodes()->front(), frame_size, parameter_count, |
| 49 constant_pool); |
| 50 bytecode_array->set_handler_table(*handler_table); |
| 51 bytecode_array->set_source_position_table(*source_position_table); |
| 52 |
| 53 void* line_info = source_position_table_builder()->DetachJITHandlerData(); |
| 54 LOG_CODE_EVENT(isolate_, CodeEndLinePosInfoRecordEvent( |
| 55 AbstractCode::cast(*bytecode_array), line_info)); |
| 56 return bytecode_array; |
| 57 } |
25 | 58 |
26 // override | 59 // override |
27 void BytecodeArrayWriter::Write(BytecodeNode* node) { | 60 void BytecodeArrayWriter::Write(BytecodeNode* node) { |
| 61 DCHECK(!Bytecodes::IsJump(node->bytecode())); |
28 UpdateSourcePositionTable(node); | 62 UpdateSourcePositionTable(node); |
29 EmitBytecode(node); | 63 EmitBytecode(node); |
30 } | 64 } |
31 | 65 |
| 66 // override |
| 67 void BytecodeArrayWriter::WriteJump(BytecodeNode* node, BytecodeLabel* label) { |
| 68 DCHECK(Bytecodes::IsJump(node->bytecode())); |
| 69 UpdateSourcePositionTable(node); |
| 70 EmitJump(node, label); |
| 71 } |
| 72 |
| 73 // override |
| 74 void BytecodeArrayWriter::BindLabel(BytecodeLabel* label) { |
| 75 size_t current_offset = bytecodes()->size(); |
| 76 if (label->is_forward_target()) { |
| 77 // An earlier jump instruction refers to this label. Update it's location. |
| 78 PatchJump(current_offset, label->offset()); |
| 79 // Now treat as if the label will only be back referred to. |
| 80 } |
| 81 label->bind_to(current_offset); |
| 82 } |
| 83 |
| 84 // override |
| 85 void BytecodeArrayWriter::BindLabel(const BytecodeLabel& target, |
| 86 BytecodeLabel* label) { |
| 87 DCHECK(!label->is_bound()); |
| 88 DCHECK(target.is_bound()); |
| 89 if (label->is_forward_target()) { |
| 90 // An earlier jump instruction refers to this label. Update it's location. |
| 91 PatchJump(target.offset(), label->offset()); |
| 92 // Now treat as if the label will only be back referred to. |
| 93 } |
| 94 label->bind_to(target.offset()); |
| 95 } |
| 96 |
32 void BytecodeArrayWriter::UpdateSourcePositionTable( | 97 void BytecodeArrayWriter::UpdateSourcePositionTable( |
33 const BytecodeNode* const node) { | 98 const BytecodeNode* const node) { |
34 int bytecode_offset = static_cast<int>(bytecodes()->size()); | 99 int bytecode_offset = static_cast<int>(bytecodes()->size()); |
35 const BytecodeSourceInfo& source_info = node->source_info(); | 100 const BytecodeSourceInfo& source_info = node->source_info(); |
36 if (source_info.is_valid()) { | 101 if (source_info.is_valid()) { |
37 source_position_table_builder_->AddPosition(bytecode_offset, | 102 source_position_table_builder()->AddPosition(bytecode_offset, |
38 source_info.source_position(), | 103 source_info.source_position(), |
39 source_info.is_statement()); | 104 source_info.is_statement()); |
40 } | 105 } |
41 } | 106 } |
42 | 107 |
43 void BytecodeArrayWriter::EmitBytecode(const BytecodeNode* const node) { | 108 void BytecodeArrayWriter::EmitBytecode(const BytecodeNode* const node) { |
44 DCHECK_NE(node->bytecode(), Bytecode::kIllegal); | 109 DCHECK_NE(node->bytecode(), Bytecode::kIllegal); |
45 | 110 |
46 OperandScale operand_scale = node->operand_scale(); | 111 OperandScale operand_scale = node->operand_scale(); |
47 if (operand_scale != OperandScale::kSingle) { | 112 if (operand_scale != OperandScale::kSingle) { |
48 Bytecode prefix = Bytecodes::OperandScaleToPrefixBytecode(operand_scale); | 113 Bytecode prefix = Bytecodes::OperandScaleToPrefixBytecode(operand_scale); |
49 bytecodes()->push_back(Bytecodes::ToByte(prefix)); | 114 bytecodes()->push_back(Bytecodes::ToByte(prefix)); |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
88 count = static_cast<int>(operands[i + 1]); | 153 count = static_cast<int>(operands[i + 1]); |
89 } else { | 154 } else { |
90 count = Bytecodes::GetNumberOfRegistersRepresentedBy(operand_type); | 155 count = Bytecodes::GetNumberOfRegistersRepresentedBy(operand_type); |
91 } | 156 } |
92 Register reg = Register::FromOperand(static_cast<int32_t>(operands[i])); | 157 Register reg = Register::FromOperand(static_cast<int32_t>(operands[i])); |
93 max_register_count_ = std::max(max_register_count_, reg.index() + count); | 158 max_register_count_ = std::max(max_register_count_, reg.index() + count); |
94 } | 159 } |
95 } | 160 } |
96 } | 161 } |
97 | 162 |
98 // override | 163 // TODO(rmcilroy): This is the same as SignedOperand in BytecodeArrayBuilder. |
99 void BytecodeArrayWriter::FlushBasicBlock() {} | 164 // Once we move the scalable operand processing here remove the SignedOperand |
| 165 // in BytecodeArrayBuilder. |
| 166 static uint32_t SignedOperand(int value, OperandSize size) { |
| 167 switch (size) { |
| 168 case OperandSize::kByte: |
| 169 return static_cast<uint8_t>(value & 0xff); |
| 170 case OperandSize::kShort: |
| 171 return static_cast<uint16_t>(value & 0xffff); |
| 172 case OperandSize::kQuad: |
| 173 return static_cast<uint32_t>(value); |
| 174 case OperandSize::kNone: |
| 175 UNREACHABLE(); |
| 176 } |
| 177 return 0; |
| 178 } |
100 | 179 |
101 int BytecodeArrayWriter::GetMaximumFrameSizeUsed() { | 180 // static |
102 return max_register_count_ * kPointerSize; | 181 Bytecode GetJumpWithConstantOperand(Bytecode jump_bytecode) { |
| 182 switch (jump_bytecode) { |
| 183 case Bytecode::kJump: |
| 184 return Bytecode::kJumpConstant; |
| 185 case Bytecode::kJumpIfTrue: |
| 186 return Bytecode::kJumpIfTrueConstant; |
| 187 case Bytecode::kJumpIfFalse: |
| 188 return Bytecode::kJumpIfFalseConstant; |
| 189 case Bytecode::kJumpIfToBooleanTrue: |
| 190 return Bytecode::kJumpIfToBooleanTrueConstant; |
| 191 case Bytecode::kJumpIfToBooleanFalse: |
| 192 return Bytecode::kJumpIfToBooleanFalseConstant; |
| 193 case Bytecode::kJumpIfNotHole: |
| 194 return Bytecode::kJumpIfNotHoleConstant; |
| 195 case Bytecode::kJumpIfNull: |
| 196 return Bytecode::kJumpIfNullConstant; |
| 197 case Bytecode::kJumpIfUndefined: |
| 198 return Bytecode::kJumpIfUndefinedConstant; |
| 199 default: |
| 200 UNREACHABLE(); |
| 201 return Bytecode::kIllegal; |
| 202 } |
| 203 } |
| 204 |
| 205 void BytecodeArrayWriter::PatchJumpWith8BitOperand(size_t jump_location, |
| 206 int delta) { |
| 207 Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location)); |
| 208 DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode)); |
| 209 size_t operand_location = jump_location + 1; |
| 210 DCHECK_EQ(bytecodes()->at(operand_location), 0); |
| 211 if (Bytecodes::SizeForSignedOperand(delta) == OperandSize::kByte) { |
| 212 // The jump fits within the range of an Imm operand, so cancel |
| 213 // the reservation and jump directly. |
| 214 constant_array_builder()->DiscardReservedEntry(OperandSize::kByte); |
| 215 bytecodes()->at(operand_location) = static_cast<uint8_t>(delta); |
| 216 } else { |
| 217 // The jump does not fit within the range of an Imm operand, so |
| 218 // commit reservation putting the offset into the constant pool, |
| 219 // and update the jump instruction and operand. |
| 220 size_t entry = constant_array_builder()->CommitReservedEntry( |
| 221 OperandSize::kByte, handle(Smi::FromInt(delta), isolate())); |
| 222 DCHECK(Bytecodes::SizeForUnsignedOperand(entry) == OperandSize::kByte); |
| 223 jump_bytecode = GetJumpWithConstantOperand(jump_bytecode); |
| 224 bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode); |
| 225 bytecodes()->at(operand_location) = static_cast<uint8_t>(entry); |
| 226 } |
| 227 } |
| 228 |
| 229 void BytecodeArrayWriter::PatchJumpWith16BitOperand(size_t jump_location, |
| 230 int delta) { |
| 231 Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location)); |
| 232 DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode)); |
| 233 size_t operand_location = jump_location + 1; |
| 234 uint8_t operand_bytes[2]; |
| 235 if (Bytecodes::SizeForSignedOperand(delta) <= OperandSize::kShort) { |
| 236 constant_array_builder()->DiscardReservedEntry(OperandSize::kShort); |
| 237 WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(delta)); |
| 238 } else { |
| 239 jump_bytecode = GetJumpWithConstantOperand(jump_bytecode); |
| 240 bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode); |
| 241 size_t entry = constant_array_builder()->CommitReservedEntry( |
| 242 OperandSize::kShort, handle(Smi::FromInt(delta), isolate())); |
| 243 WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(entry)); |
| 244 } |
| 245 DCHECK(bytecodes()->at(operand_location) == 0 && |
| 246 bytecodes()->at(operand_location + 1) == 0); |
| 247 bytecodes()->at(operand_location++) = operand_bytes[0]; |
| 248 bytecodes()->at(operand_location) = operand_bytes[1]; |
| 249 } |
| 250 |
| 251 void BytecodeArrayWriter::PatchJumpWith32BitOperand(size_t jump_location, |
| 252 int delta) { |
| 253 DCHECK(Bytecodes::IsJumpImmediate( |
| 254 Bytecodes::FromByte(bytecodes()->at(jump_location)))); |
| 255 constant_array_builder()->DiscardReservedEntry(OperandSize::kQuad); |
| 256 uint8_t operand_bytes[4]; |
| 257 WriteUnalignedUInt32(operand_bytes, static_cast<uint32_t>(delta)); |
| 258 size_t operand_location = jump_location + 1; |
| 259 DCHECK(bytecodes()->at(operand_location) == 0 && |
| 260 bytecodes()->at(operand_location + 1) == 0 && |
| 261 bytecodes()->at(operand_location + 2) == 0 && |
| 262 bytecodes()->at(operand_location + 3) == 0); |
| 263 bytecodes()->at(operand_location++) = operand_bytes[0]; |
| 264 bytecodes()->at(operand_location++) = operand_bytes[1]; |
| 265 bytecodes()->at(operand_location++) = operand_bytes[2]; |
| 266 bytecodes()->at(operand_location) = operand_bytes[3]; |
| 267 } |
| 268 |
| 269 void BytecodeArrayWriter::PatchJump(size_t jump_target, size_t jump_location) { |
| 270 Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location)); |
| 271 int delta = static_cast<int>(jump_target - jump_location); |
| 272 int prefix_offset = 0; |
| 273 OperandScale operand_scale = OperandScale::kSingle; |
| 274 if (Bytecodes::IsPrefixScalingBytecode(jump_bytecode)) { |
| 275 // If a prefix scaling bytecode is emitted the target offset is one |
| 276 // less than the case of no prefix scaling bytecode. |
| 277 delta -= 1; |
| 278 prefix_offset = 1; |
| 279 operand_scale = Bytecodes::PrefixBytecodeToOperandScale(jump_bytecode); |
| 280 jump_bytecode = |
| 281 Bytecodes::FromByte(bytecodes()->at(jump_location + prefix_offset)); |
| 282 } |
| 283 |
| 284 DCHECK(Bytecodes::IsJump(jump_bytecode)); |
| 285 switch (operand_scale) { |
| 286 case OperandScale::kSingle: |
| 287 PatchJumpWith8BitOperand(jump_location, delta); |
| 288 break; |
| 289 case OperandScale::kDouble: |
| 290 PatchJumpWith16BitOperand(jump_location + prefix_offset, delta); |
| 291 break; |
| 292 case OperandScale::kQuadruple: |
| 293 PatchJumpWith32BitOperand(jump_location + prefix_offset, delta); |
| 294 break; |
| 295 default: |
| 296 UNREACHABLE(); |
| 297 } |
| 298 unbound_jumps_--; |
| 299 } |
| 300 |
| 301 void BytecodeArrayWriter::EmitJump(BytecodeNode* node, BytecodeLabel* label) { |
| 302 DCHECK(Bytecodes::IsJump(node->bytecode())); |
| 303 DCHECK_EQ(0, node->operand(0)); |
| 304 |
| 305 size_t current_offset = bytecodes()->size(); |
| 306 |
| 307 if (label->is_bound()) { |
| 308 CHECK_GE(current_offset, label->offset()); |
| 309 CHECK_LE(current_offset, static_cast<size_t>(kMaxInt)); |
| 310 // Label has been bound already so this is a backwards jump. |
| 311 size_t abs_delta = current_offset - label->offset(); |
| 312 int delta = -static_cast<int>(abs_delta); |
| 313 OperandSize operand_size = Bytecodes::SizeForSignedOperand(delta); |
| 314 if (operand_size > OperandSize::kByte) { |
| 315 // Adjust for scaling byte prefix for wide jump offset. |
| 316 DCHECK_LE(delta, 0); |
| 317 delta -= 1; |
| 318 } |
| 319 node->set_bytecode(node->bytecode(), SignedOperand(delta, operand_size), |
| 320 Bytecodes::OperandSizesToScale(operand_size)); |
| 321 } else { |
| 322 // The label has not yet been bound so this is a forward reference |
| 323 // that will be patched when the label is bound. We create a |
| 324 // reservation in the constant pool so the jump can be patched |
| 325 // when the label is bound. The reservation means the maximum size |
| 326 // of the operand for the constant is known and the jump can |
| 327 // be emitted into the bytecode stream with space for the operand. |
| 328 unbound_jumps_++; |
| 329 label->set_referrer(current_offset); |
| 330 OperandSize reserved_operand_size = |
| 331 constant_array_builder()->CreateReservedEntry(); |
| 332 OperandScale operand_scale = |
| 333 Bytecodes::OperandSizesToScale(reserved_operand_size); |
| 334 node->set_bytecode(node->bytecode(), 0, operand_scale); |
| 335 } |
| 336 EmitBytecode(node); |
103 } | 337 } |
104 | 338 |
105 } // namespace interpreter | 339 } // namespace interpreter |
106 } // namespace internal | 340 } // namespace internal |
107 } // namespace v8 | 341 } // namespace v8 |
OLD | NEW |