OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
55 | 55 |
56 | 56 |
57 static unsigned GetPropertyId(Property* property) { | 57 static unsigned GetPropertyId(Property* property) { |
58 return property->id(); | 58 return property->id(); |
59 } | 59 } |
60 | 60 |
61 | 61 |
62 // A patch site is a location in the code which it is possible to patch. This | 62 // A patch site is a location in the code which it is possible to patch. This |
63 // class has a number of methods to emit the code which is patchable and the | 63 // class has a number of methods to emit the code which is patchable and the |
64 // method EmitPatchInfo to record a marker back to the patchable code. This | 64 // method EmitPatchInfo to record a marker back to the patchable code. This |
65 // marker is a andi at, rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 | 65 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy |
66 // bit immediate value is used) is the delta from the pc to the first | 66 // (raw 16 bit immediate value is used) is the delta from the pc to the first |
67 // instruction of the patchable code. | 67 // instruction of the patchable code. |
| 68 // The marker instruction is effectively a NOP (dest is zero_reg) and will |
| 69 // never be emitted by normal code. |
68 class JumpPatchSite BASE_EMBEDDED { | 70 class JumpPatchSite BASE_EMBEDDED { |
69 public: | 71 public: |
70 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { | 72 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { |
71 #ifdef DEBUG | 73 #ifdef DEBUG |
72 info_emitted_ = false; | 74 info_emitted_ = false; |
73 #endif | 75 #endif |
74 } | 76 } |
75 | 77 |
76 ~JumpPatchSite() { | 78 ~JumpPatchSite() { |
77 ASSERT(patch_site_.is_bound() == info_emitted_); | 79 ASSERT(patch_site_.is_bound() == info_emitted_); |
(...skipping 18 matching lines...) Expand all Loading... |
96 __ bind(&patch_site_); | 98 __ bind(&patch_site_); |
97 __ andi(at, reg, 0); | 99 __ andi(at, reg, 0); |
98 // Never taken before patched. | 100 // Never taken before patched. |
99 __ Branch(target, ne, at, Operand(zero_reg)); | 101 __ Branch(target, ne, at, Operand(zero_reg)); |
100 } | 102 } |
101 | 103 |
102 void EmitPatchInfo() { | 104 void EmitPatchInfo() { |
103 if (patch_site_.is_bound()) { | 105 if (patch_site_.is_bound()) { |
104 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); | 106 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); |
105 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask); | 107 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask); |
106 __ andi(at, reg, delta_to_patch_site % kImm16Mask); | 108 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask); |
107 #ifdef DEBUG | 109 #ifdef DEBUG |
108 info_emitted_ = true; | 110 info_emitted_ = true; |
109 #endif | 111 #endif |
110 } else { | 112 } else { |
111 __ nop(); // Signals no inlined code. | 113 __ nop(); // Signals no inlined code. |
112 } | 114 } |
113 } | 115 } |
114 | 116 |
115 private: | 117 private: |
116 MacroAssembler* masm_; | 118 MacroAssembler* masm_; |
(...skipping 186 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
303 } | 305 } |
304 | 306 |
305 | 307 |
306 void FullCodeGenerator::ClearAccumulator() { | 308 void FullCodeGenerator::ClearAccumulator() { |
307 ASSERT(Smi::FromInt(0) == 0); | 309 ASSERT(Smi::FromInt(0) == 0); |
308 __ mov(v0, zero_reg); | 310 __ mov(v0, zero_reg); |
309 } | 311 } |
310 | 312 |
311 | 313 |
312 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) { | 314 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) { |
| 315 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need |
| 316 // to make sure it is constant. Branch may emit a skip-or-jump sequence |
| 317 // instead of the normal Branch. It seems that the "skip" part of that |
| 318 // sequence is about as long as this Branch would be so it is safe to ignore |
| 319 // that. |
| 320 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
313 Comment cmnt(masm_, "[ Stack check"); | 321 Comment cmnt(masm_, "[ Stack check"); |
314 Label ok; | 322 Label ok; |
315 __ LoadRoot(t0, Heap::kStackLimitRootIndex); | 323 __ LoadRoot(t0, Heap::kStackLimitRootIndex); |
316 __ Branch(&ok, hs, sp, Operand(t0)); | 324 __ sltu(at, sp, t0); |
| 325 __ beq(at, zero_reg, &ok); |
| 326 // CallStub will emit a li t9, ... first, so it is safe to use the delay slot. |
317 StackCheckStub stub; | 327 StackCheckStub stub; |
| 328 __ CallStub(&stub); |
318 // Record a mapping of this PC offset to the OSR id. This is used to find | 329 // Record a mapping of this PC offset to the OSR id. This is used to find |
319 // the AST id from the unoptimized code in order to use it as a key into | 330 // the AST id from the unoptimized code in order to use it as a key into |
320 // the deoptimization input data found in the optimized code. | 331 // the deoptimization input data found in the optimized code. |
321 RecordStackCheck(stmt->OsrEntryId()); | 332 RecordStackCheck(stmt->OsrEntryId()); |
322 | 333 |
323 __ CallStub(&stub); | |
324 __ bind(&ok); | 334 __ bind(&ok); |
325 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); | 335 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); |
326 // Record a mapping of the OSR id to this PC. This is used if the OSR | 336 // Record a mapping of the OSR id to this PC. This is used if the OSR |
327 // entry becomes the target of a bailout. We don't expect it to be, but | 337 // entry becomes the target of a bailout. We don't expect it to be, but |
328 // we want it to work if it is. | 338 // we want it to work if it is. |
329 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); | 339 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); |
330 } | 340 } |
331 | 341 |
332 | 342 |
333 void FullCodeGenerator::EmitReturnSequence() { | 343 void FullCodeGenerator::EmitReturnSequence() { |
(...skipping 3909 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4243 *context_length = 0; | 4253 *context_length = 0; |
4244 return previous_; | 4254 return previous_; |
4245 } | 4255 } |
4246 | 4256 |
4247 | 4257 |
4248 #undef __ | 4258 #undef __ |
4249 | 4259 |
4250 } } // namespace v8::internal | 4260 } } // namespace v8::internal |
4251 | 4261 |
4252 #endif // V8_TARGET_ARCH_MIPS | 4262 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |