| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 62 } | 62 } |
| 63 | 63 |
| 64 ~JumpPatchSite() { | 64 ~JumpPatchSite() { |
| 65 ASSERT(patch_site_.is_bound() == info_emitted_); | 65 ASSERT(patch_site_.is_bound() == info_emitted_); |
| 66 } | 66 } |
| 67 | 67 |
| 68 // When initially emitting this ensure that a jump is always generated to skip | 68 // When initially emitting this ensure that a jump is always generated to skip |
| 69 // the inlined smi code. | 69 // the inlined smi code. |
| 70 void EmitJumpIfNotSmi(Register reg, Label* target) { | 70 void EmitJumpIfNotSmi(Register reg, Label* target) { |
| 71 ASSERT(!patch_site_.is_bound() && !info_emitted_); | 71 ASSERT(!patch_site_.is_bound() && !info_emitted_); |
| 72 Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 72 __ bind(&patch_site_); | 73 __ bind(&patch_site_); |
| 73 __ cmp(reg, Operand(reg)); | 74 __ cmp(reg, Operand(reg)); |
| 74 // Don't use b(al, ...) as that might emit the constant pool right after the | 75 // Don't use b(al, ...) as that might emit the constant pool right after the |
| 75 // branch. After patching when the branch is no longer unconditional | 76 // branch. After patching when the branch is no longer unconditional |
| 76 // execution can continue into the constant pool. | 77 // execution can continue into the constant pool. |
| 77 __ b(eq, target); // Always taken before patched. | 78 __ b(eq, target); // Always taken before patched. |
| 78 } | 79 } |
| 79 | 80 |
| 80 // When initially emitting this ensure that a jump is never generated to skip | 81 // When initially emitting this ensure that a jump is never generated to skip |
| 81 // the inlined smi code. | 82 // the inlined smi code. |
| 82 void EmitJumpIfSmi(Register reg, Label* target) { | 83 void EmitJumpIfSmi(Register reg, Label* target) { |
| 83 ASSERT(!patch_site_.is_bound() && !info_emitted_); | 84 ASSERT(!patch_site_.is_bound() && !info_emitted_); |
| 85 Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 84 __ bind(&patch_site_); | 86 __ bind(&patch_site_); |
| 85 __ cmp(reg, Operand(reg)); | 87 __ cmp(reg, Operand(reg)); |
| 86 __ b(ne, target); // Never taken before patched. | 88 __ b(ne, target); // Never taken before patched. |
| 87 } | 89 } |
| 88 | 90 |
| 89 void EmitPatchInfo() { | 91 void EmitPatchInfo() { |
| 90 if (patch_site_.is_bound()) { | 92 if (patch_site_.is_bound()) { |
| 91 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); | 93 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); |
| 92 Register reg; | 94 Register reg; |
| 93 reg.set_code(delta_to_patch_site / kOff12Mask); | 95 reg.set_code(delta_to_patch_site / kOff12Mask); |
| (...skipping 4296 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4390 *context_length = 0; | 4392 *context_length = 0; |
| 4391 return previous_; | 4393 return previous_; |
| 4392 } | 4394 } |
| 4393 | 4395 |
| 4394 | 4396 |
| 4395 #undef __ | 4397 #undef __ |
| 4396 | 4398 |
| 4397 } } // namespace v8::internal | 4399 } } // namespace v8::internal |
| 4398 | 4400 |
| 4399 #endif // V8_TARGET_ARCH_ARM | 4401 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |