OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 21 matching lines...) Expand all Loading... |
32 #include "x64/lithium-codegen-x64.h" | 32 #include "x64/lithium-codegen-x64.h" |
33 #include "code-stubs.h" | 33 #include "code-stubs.h" |
34 #include "stub-cache.h" | 34 #include "stub-cache.h" |
35 | 35 |
36 namespace v8 { | 36 namespace v8 { |
37 namespace internal { | 37 namespace internal { |
38 | 38 |
39 | 39 |
40 // When invoking builtins, we need to record the safepoint in the middle of | 40 // When invoking builtins, we need to record the safepoint in the middle of |
41 // the invoke instruction sequence generated by the macro assembler. | 41 // the invoke instruction sequence generated by the macro assembler. |
42 class SafepointGenerator : public PostCallGenerator { | 42 class SafepointGenerator : public CallWrapper { |
43 public: | 43 public: |
44 SafepointGenerator(LCodeGen* codegen, | 44 SafepointGenerator(LCodeGen* codegen, |
45 LPointerMap* pointers, | 45 LPointerMap* pointers, |
46 int deoptimization_index, | 46 int deoptimization_index, |
47 bool ensure_reloc_space = false) | 47 bool ensure_reloc_space = false) |
48 : codegen_(codegen), | 48 : codegen_(codegen), |
49 pointers_(pointers), | 49 pointers_(pointers), |
50 deoptimization_index_(deoptimization_index), | 50 deoptimization_index_(deoptimization_index), |
51 ensure_reloc_space_(ensure_reloc_space), | 51 ensure_reloc_space_(ensure_reloc_space) { } |
52 previous_safepoint_position_(-kMinSafepointSize) { } | |
53 virtual ~SafepointGenerator() { } | 52 virtual ~SafepointGenerator() { } |
54 | 53 |
55 virtual void Generate() { | 54 virtual void BeforeCall(int call_size) { |
| 55 ASSERT(call_size >= 0); |
56 // Ensure that we have enough space after the previous safepoint position | 56 // Ensure that we have enough space after the previous safepoint position |
57 // for the generated code there. | 57 // for the jump generated there. |
58 int position = codegen_->masm()->pc_offset(); | 58 int call_end = codegen_->masm()->pc_offset() + call_size; |
59 ASSERT(position > previous_safepoint_position_); | 59 int prev_jump_end = codegen_->LastSafepointEnd() + kMinSafepointSize; |
60 if (position < previous_safepoint_position_ + kMinSafepointSize) { | 60 if (call_end < prev_jump_end) { |
61 int padding_size = | 61 int padding_size = prev_jump_end - call_end; |
62 previous_safepoint_position_ + kMinSafepointSize - position; | |
63 STATIC_ASSERT(kMinSafepointSize <= 9); // One multibyte nop is enough. | 62 STATIC_ASSERT(kMinSafepointSize <= 9); // One multibyte nop is enough. |
64 codegen_->masm()->nop(padding_size); | 63 codegen_->masm()->nop(padding_size); |
65 position += padding_size; | |
66 } | 64 } |
| 65 } |
| 66 |
| 67 virtual void AfterCall() { |
67 // Ensure that we have enough space in the reloc info to patch | 68 // Ensure that we have enough space in the reloc info to patch |
68 // this with calls when doing deoptimization. | 69 // this with calls when doing deoptimization. |
69 if (ensure_reloc_space_) { | 70 if (ensure_reloc_space_) { |
70 codegen_->masm()->RecordComment(RelocInfo::kFillerCommentString, true); | 71 codegen_->masm()->RecordComment(RelocInfo::kFillerCommentString, true); |
71 } | 72 } |
72 codegen_->RecordSafepoint(pointers_, deoptimization_index_); | 73 codegen_->RecordSafepoint(pointers_, deoptimization_index_); |
73 previous_safepoint_position_ = position; | |
74 } | 74 } |
75 | 75 |
76 private: | 76 private: |
77 static const int kMinSafepointSize = | 77 static const int kMinSafepointSize = |
78 MacroAssembler::kShortCallInstructionLength; | 78 MacroAssembler::kShortCallInstructionLength; |
79 LCodeGen* codegen_; | 79 LCodeGen* codegen_; |
80 LPointerMap* pointers_; | 80 LPointerMap* pointers_; |
81 int deoptimization_index_; | 81 int deoptimization_index_; |
82 bool ensure_reloc_space_; | 82 bool ensure_reloc_space_; |
83 int previous_safepoint_position_; | |
84 }; | 83 }; |
85 | 84 |
86 | 85 |
87 #define __ masm()-> | 86 #define __ masm()-> |
88 | 87 |
89 bool LCodeGen::GenerateCode() { | 88 bool LCodeGen::GenerateCode() { |
90 HPhase phase("Code generation", chunk()); | 89 HPhase phase("Code generation", chunk()); |
91 ASSERT(is_unused()); | 90 ASSERT(is_unused()); |
92 status_ = GENERATING; | 91 status_ = GENERATING; |
93 return GeneratePrologue() && | 92 return GeneratePrologue() && |
(...skipping 3570 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3664 RegisterEnvironmentForDeoptimization(environment); | 3663 RegisterEnvironmentForDeoptimization(environment); |
3665 ASSERT(osr_pc_offset_ == -1); | 3664 ASSERT(osr_pc_offset_ == -1); |
3666 osr_pc_offset_ = masm()->pc_offset(); | 3665 osr_pc_offset_ = masm()->pc_offset(); |
3667 } | 3666 } |
3668 | 3667 |
3669 #undef __ | 3668 #undef __ |
3670 | 3669 |
3671 } } // namespace v8::internal | 3670 } } // namespace v8::internal |
3672 | 3671 |
3673 #endif // V8_TARGET_ARCH_X64 | 3672 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |