OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/arm/lithium-codegen-arm.h" | 7 #include "src/arm/lithium-codegen-arm.h" |
8 #include "src/arm/lithium-gap-resolver-arm.h" | 8 #include "src/arm/lithium-gap-resolver-arm.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/hydrogen-osr.h" | 10 #include "src/hydrogen-osr.h" |
(...skipping 306 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
317 // To simplify we consider the code size from the first instruction to the | 317 // To simplify we consider the code size from the first instruction to the |
318 // end of the jump table. We also don't consider the pc load delta. | 318 // end of the jump table. We also don't consider the pc load delta. |
319 // Each entry in the jump table generates one instruction and inlines one | 319 // Each entry in the jump table generates one instruction and inlines one |
320 // 32bit data after it. | 320 // 32bit data after it. |
321 if (!is_int24((masm()->pc_offset() / Assembler::kInstrSize) + | 321 if (!is_int24((masm()->pc_offset() / Assembler::kInstrSize) + |
322 deopt_jump_table_.length() * 7)) { | 322 deopt_jump_table_.length() * 7)) { |
323 Abort(kGeneratedCodeIsTooLarge); | 323 Abort(kGeneratedCodeIsTooLarge); |
324 } | 324 } |
325 | 325 |
326 if (deopt_jump_table_.length() > 0) { | 326 if (deopt_jump_table_.length() > 0) { |
| 327 Label needs_frame, call_deopt_entry; |
| 328 |
327 Comment(";;; -------------------- Jump table --------------------"); | 329 Comment(";;; -------------------- Jump table --------------------"); |
328 } | 330 Address base = deopt_jump_table_[0].address; |
329 Label table_start; | 331 |
330 __ bind(&table_start); | 332 Register entry_offset = scratch0(); |
331 Label needs_frame; | 333 |
332 for (int i = 0; i < deopt_jump_table_.length(); i++) { | 334 int length = deopt_jump_table_.length(); |
333 __ bind(&deopt_jump_table_[i].label); | 335 for (int i = 0; i < length; i++) { |
334 Address entry = deopt_jump_table_[i].address; | 336 __ bind(&deopt_jump_table_[i].label); |
335 Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type; | 337 |
336 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); | 338 Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type; |
337 if (id == Deoptimizer::kNotDeoptimizationEntry) { | 339 ASSERT(type == deopt_jump_table_[0].bailout_type); |
338 Comment(";;; jump table entry %d.", i); | 340 Address entry = deopt_jump_table_[i].address; |
339 } else { | 341 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); |
| 342 ASSERT(id != Deoptimizer::kNotDeoptimizationEntry); |
340 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); | 343 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); |
| 344 |
| 345 // Second-level deopt table entries are contiguous and small, so instead |
| 346 // of loading the full, absolute address of each one, load an immediate |
| 347 // offset which will be added to the base address later. |
| 348 __ mov(entry_offset, Operand(entry - base)); |
| 349 |
| 350 if (deopt_jump_table_[i].needs_frame) { |
| 351 ASSERT(!info()->saves_caller_doubles()); |
| 352 if (needs_frame.is_bound()) { |
| 353 __ b(&needs_frame); |
| 354 } else { |
| 355 __ bind(&needs_frame); |
| 356 Comment(";;; call deopt with frame"); |
| 357 __ PushFixedFrame(); |
| 358 // This variant of deopt can only be used with stubs. Since we don't |
| 359 // have a function pointer to install in the stack frame that we're |
| 360 // building, install a special marker there instead. |
| 361 ASSERT(info()->IsStub()); |
| 362 __ mov(ip, Operand(Smi::FromInt(StackFrame::STUB))); |
| 363 __ push(ip); |
| 364 __ add(fp, sp, |
| 365 Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); |
| 366 __ bind(&call_deopt_entry); |
| 367 // Add the base address to the offset previously loaded in |
| 368 // entry_offset. |
| 369 __ add(entry_offset, entry_offset, |
| 370 Operand(ExternalReference::ForDeoptEntry(base))); |
| 371 __ blx(entry_offset); |
| 372 } |
| 373 |
| 374 masm()->CheckConstPool(false, false); |
| 375 } else { |
| 376 // The last entry can fall through into `call_deopt_entry`, avoiding a |
| 377 // branch. |
| 378 bool need_branch = ((i + 1) != length) || call_deopt_entry.is_bound(); |
| 379 |
| 380 if (need_branch) __ b(&call_deopt_entry); |
| 381 |
| 382 masm()->CheckConstPool(false, !need_branch); |
| 383 } |
341 } | 384 } |
342 if (deopt_jump_table_[i].needs_frame) { | 385 |
343 ASSERT(!info()->saves_caller_doubles()); | 386 if (!call_deopt_entry.is_bound()) { |
344 __ mov(ip, Operand(ExternalReference::ForDeoptEntry(entry))); | 387 Comment(";;; call deopt"); |
345 if (needs_frame.is_bound()) { | 388 __ bind(&call_deopt_entry); |
346 __ b(&needs_frame); | 389 |
347 } else { | |
348 __ bind(&needs_frame); | |
349 __ PushFixedFrame(); | |
350 // This variant of deopt can only be used with stubs. Since we don't | |
351 // have a function pointer to install in the stack frame that we're | |
352 // building, install a special marker there instead. | |
353 ASSERT(info()->IsStub()); | |
354 __ mov(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); | |
355 __ push(scratch0()); | |
356 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); | |
357 __ mov(lr, Operand(pc), LeaveCC, al); | |
358 __ mov(pc, ip); | |
359 } | |
360 } else { | |
361 if (info()->saves_caller_doubles()) { | 390 if (info()->saves_caller_doubles()) { |
362 ASSERT(info()->IsStub()); | 391 ASSERT(info()->IsStub()); |
363 RestoreCallerDoubles(); | 392 RestoreCallerDoubles(); |
364 } | 393 } |
365 __ mov(lr, Operand(pc), LeaveCC, al); | 394 |
366 __ mov(pc, Operand(ExternalReference::ForDeoptEntry(entry))); | 395 // Add the base address to the offset previously loaded in entry_offset. |
| 396 __ add(entry_offset, entry_offset, |
| 397 Operand(ExternalReference::ForDeoptEntry(base))); |
| 398 __ blx(entry_offset); |
367 } | 399 } |
368 masm()->CheckConstPool(false, false); | |
369 } | 400 } |
370 | 401 |
371 // Force constant pool emission at the end of the deopt jump table to make | 402 // Force constant pool emission at the end of the deopt jump table to make |
372 // sure that no constant pools are emitted after. | 403 // sure that no constant pools are emitted after. |
373 masm()->CheckConstPool(true, false); | 404 masm()->CheckConstPool(true, false); |
374 | 405 |
375 // The deoptimization jump table is the last part of the instruction | 406 // The deoptimization jump table is the last part of the instruction |
376 // sequence. Mark the generated code as done unless we bailed out. | 407 // sequence. Mark the generated code as done unless we bailed out. |
377 if (!is_aborted()) status_ = DONE; | 408 if (!is_aborted()) status_ = DONE; |
378 return !is_aborted(); | 409 return !is_aborted(); |
(...skipping 5446 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5825 __ Push(scope_info); | 5856 __ Push(scope_info); |
5826 __ push(ToRegister(instr->function())); | 5857 __ push(ToRegister(instr->function())); |
5827 CallRuntime(Runtime::kHiddenPushBlockContext, 2, instr); | 5858 CallRuntime(Runtime::kHiddenPushBlockContext, 2, instr); |
5828 RecordSafepoint(Safepoint::kNoLazyDeopt); | 5859 RecordSafepoint(Safepoint::kNoLazyDeopt); |
5829 } | 5860 } |
5830 | 5861 |
5831 | 5862 |
5832 #undef __ | 5863 #undef __ |
5833 | 5864 |
5834 } } // namespace v8::internal | 5865 } } // namespace v8::internal |
OLD | NEW |