| OLD | NEW |
| (Empty) |
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | |
| 2 // Redistribution and use in source and binary forms, with or without | |
| 3 // modification, are permitted provided that the following conditions are | |
| 4 // met: | |
| 5 // | |
| 6 // * Redistributions of source code must retain the above copyright | |
| 7 // notice, this list of conditions and the following disclaimer. | |
| 8 // * Redistributions in binary form must reproduce the above | |
| 9 // copyright notice, this list of conditions and the following | |
| 10 // disclaimer in the documentation and/or other materials provided | |
| 11 // with the distribution. | |
| 12 // * Neither the name of Google Inc. nor the names of its | |
| 13 // contributors may be used to endorse or promote products derived | |
| 14 // from this software without specific prior written permission. | |
| 15 // | |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
| 27 | |
| 28 #include "v8.h" | |
| 29 | |
| 30 #include "cfg.h" | |
| 31 #include "codegen-inl.h" | |
| 32 #include "codegen-x64.h" | |
| 33 #include "debug.h" | |
| 34 #include "macro-assembler-x64.h" | |
| 35 | |
| 36 namespace v8 { | |
| 37 namespace internal { | |
| 38 | |
| 39 #define __ ACCESS_MASM(masm) | |
| 40 | |
| 41 void InstructionBlock::Compile(MacroAssembler* masm) { | |
| 42 ASSERT(!is_marked()); | |
| 43 is_marked_ = true; | |
| 44 { | |
| 45 Comment cmt(masm, "[ InstructionBlock"); | |
| 46 for (int i = 0, len = instructions_.length(); i < len; i++) { | |
| 47 // If the location of the current instruction is a temp, then the | |
| 48 // instruction cannot be in tail position in the block. Allocate the | |
| 49 // temp based on peeking ahead to the next instruction. | |
| 50 Instruction* instr = instructions_[i]; | |
| 51 Location* loc = instr->location(); | |
| 52 if (loc->is_temporary()) { | |
| 53 instructions_[i+1]->FastAllocate(TempLocation::cast(loc)); | |
| 54 } | |
| 55 instructions_[i]->Compile(masm); | |
| 56 } | |
| 57 } | |
| 58 successor_->Compile(masm); | |
| 59 } | |
| 60 | |
| 61 | |
| 62 void EntryNode::Compile(MacroAssembler* masm) { | |
| 63 ASSERT(!is_marked()); | |
| 64 is_marked_ = true; | |
| 65 Label deferred_enter, deferred_exit; | |
| 66 { | |
| 67 Comment cmnt(masm, "[ EntryNode"); | |
| 68 __ push(rbp); | |
| 69 __ movq(rbp, rsp); | |
| 70 __ push(rsi); | |
| 71 __ push(rdi); | |
| 72 int count = CfgGlobals::current()->fun()->scope()->num_stack_slots(); | |
| 73 if (count > 0) { | |
| 74 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); | |
| 75 for (int i = 0; i < count; i++) { | |
| 76 __ push(kScratchRegister); | |
| 77 } | |
| 78 } | |
| 79 if (FLAG_trace) { | |
| 80 __ CallRuntime(Runtime::kTraceEnter, 0); | |
| 81 } | |
| 82 if (FLAG_check_stack) { | |
| 83 ExternalReference stack_limit = | |
| 84 ExternalReference::address_of_stack_guard_limit(); | |
| 85 __ movq(kScratchRegister, stack_limit); | |
| 86 __ cmpq(rsp, Operand(kScratchRegister, 0)); | |
| 87 __ j(below, &deferred_enter); | |
| 88 __ bind(&deferred_exit); | |
| 89 } | |
| 90 } | |
| 91 successor_->Compile(masm); | |
| 92 if (FLAG_check_stack) { | |
| 93 Comment cmnt(masm, "[ Deferred Stack Check"); | |
| 94 __ bind(&deferred_enter); | |
| 95 StackCheckStub stub; | |
| 96 __ CallStub(&stub); | |
| 97 __ jmp(&deferred_exit); | |
| 98 } | |
| 99 } | |
| 100 | |
| 101 | |
| 102 void ExitNode::Compile(MacroAssembler* masm) { | |
| 103 ASSERT(!is_marked()); | |
| 104 is_marked_ = true; | |
| 105 Comment cmnt(masm, "[ ExitNode"); | |
| 106 if (FLAG_trace) { | |
| 107 __ push(rax); | |
| 108 __ CallRuntime(Runtime::kTraceExit, 1); | |
| 109 } | |
| 110 __ RecordJSReturn(); | |
| 111 __ movq(rsp, rbp); | |
| 112 __ pop(rbp); | |
| 113 int count = CfgGlobals::current()->fun()->scope()->num_parameters(); | |
| 114 __ ret((count + 1) * kPointerSize); | |
| 115 #ifdef ENABLE_DEBUGGER_SUPPORT | |
| 116 // Add padding that will be overwritten by a debugger breakpoint. | |
| 117 // "movq rsp, rbp; pop rbp" has length 4. "ret k" has length 3. | |
| 118 const int kPadding = Debug::kX64JSReturnSequenceLength - 4 - 3; | |
| 119 for (int i = 0; i < kPadding; ++i) { | |
| 120 __ int3(); | |
| 121 } | |
| 122 #endif | |
| 123 } | |
| 124 | |
| 125 | |
| 126 void PropLoadInstr::Compile(MacroAssembler* masm) { | |
| 127 // The key should not be on the stack---if it is a compiler-generated | |
| 128 // temporary it is in the accumulator. | |
| 129 ASSERT(!key()->is_on_stack()); | |
| 130 | |
| 131 Comment cmnt(masm, "[ Load from Property"); | |
| 132 // If the key is known at compile-time we may be able to use a load IC. | |
| 133 bool is_keyed_load = true; | |
| 134 if (key()->is_constant()) { | |
| 135 // Still use the keyed load IC if the key can be parsed as an integer so | |
| 136 // we will get into the case that handles [] on string objects. | |
| 137 Handle<Object> key_val = Constant::cast(key())->handle(); | |
| 138 uint32_t ignored; | |
| 139 if (key_val->IsSymbol() && | |
| 140 !String::cast(*key_val)->AsArrayIndex(&ignored)) { | |
| 141 is_keyed_load = false; | |
| 142 } | |
| 143 } | |
| 144 | |
| 145 if (!object()->is_on_stack()) object()->Push(masm); | |
| 146 // A test rax instruction after the call indicates to the IC code that it | |
| 147 // was inlined. Ensure there is not one after the call below. | |
| 148 if (is_keyed_load) { | |
| 149 key()->Push(masm); | |
| 150 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); | |
| 151 __ Call(ic, RelocInfo::CODE_TARGET); | |
| 152 __ pop(rbx); // Discard key. | |
| 153 } else { | |
| 154 key()->Get(masm, rcx); | |
| 155 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); | |
| 156 __ Call(ic, RelocInfo::CODE_TARGET); | |
| 157 } | |
| 158 __ pop(rbx); // Discard receiver. | |
| 159 location()->Set(masm, rax); | |
| 160 } | |
| 161 | |
| 162 | |
| 163 void BinaryOpInstr::Compile(MacroAssembler* masm) { | |
| 164 // The right-hand value should not be on the stack---if it is a | |
| 165 // compiler-generated temporary it is in the accumulator. | |
| 166 ASSERT(!right()->is_on_stack()); | |
| 167 | |
| 168 Comment cmnt(masm, "[ BinaryOpInstr"); | |
| 169 // We can overwrite one of the operands if it is a temporary. | |
| 170 OverwriteMode mode = NO_OVERWRITE; | |
| 171 if (left()->is_temporary()) { | |
| 172 mode = OVERWRITE_LEFT; | |
| 173 } else if (right()->is_temporary()) { | |
| 174 mode = OVERWRITE_RIGHT; | |
| 175 } | |
| 176 | |
| 177 // Push both operands and call the specialized stub. | |
| 178 if (!left()->is_on_stack()) left()->Push(masm); | |
| 179 right()->Push(masm); | |
| 180 GenericBinaryOpStub stub(op(), mode, SMI_CODE_IN_STUB); | |
| 181 __ CallStub(&stub); | |
| 182 location()->Set(masm, rax); | |
| 183 } | |
| 184 | |
| 185 | |
| 186 void ReturnInstr::Compile(MacroAssembler* masm) { | |
| 187 // The location should be 'Effect'. As a side effect, move the value to | |
| 188 // the accumulator. | |
| 189 Comment cmnt(masm, "[ ReturnInstr"); | |
| 190 value()->Get(masm, rax); | |
| 191 } | |
| 192 | |
| 193 | |
| 194 void Constant::Get(MacroAssembler* masm, Register reg) { | |
| 195 __ Move(reg, handle_); | |
| 196 } | |
| 197 | |
| 198 | |
| 199 void Constant::Push(MacroAssembler* masm) { | |
| 200 __ Push(handle_); | |
| 201 } | |
| 202 | |
| 203 | |
| 204 static Operand ToOperand(SlotLocation* loc) { | |
| 205 switch (loc->type()) { | |
| 206 case Slot::PARAMETER: { | |
| 207 int count = CfgGlobals::current()->fun()->scope()->num_parameters(); | |
| 208 return Operand(rbp, (1 + count - loc->index()) * kPointerSize); | |
| 209 } | |
| 210 case Slot::LOCAL: { | |
| 211 const int kOffset = JavaScriptFrameConstants::kLocal0Offset; | |
| 212 return Operand(rbp, kOffset - loc->index() * kPointerSize); | |
| 213 } | |
| 214 default: | |
| 215 UNREACHABLE(); | |
| 216 return Operand(rax, 0); | |
| 217 } | |
| 218 } | |
| 219 | |
| 220 | |
| 221 void Constant::MoveToSlot(MacroAssembler* masm, SlotLocation* loc) { | |
| 222 __ Move(ToOperand(loc), handle_); | |
| 223 } | |
| 224 | |
| 225 | |
| 226 void SlotLocation::Get(MacroAssembler* masm, Register reg) { | |
| 227 __ movq(reg, ToOperand(this)); | |
| 228 } | |
| 229 | |
| 230 | |
| 231 void SlotLocation::Set(MacroAssembler* masm, Register reg) { | |
| 232 __ movq(ToOperand(this), reg); | |
| 233 } | |
| 234 | |
| 235 | |
| 236 void SlotLocation::Push(MacroAssembler* masm) { | |
| 237 __ push(ToOperand(this)); | |
| 238 } | |
| 239 | |
| 240 | |
| 241 void SlotLocation::Move(MacroAssembler* masm, Value* value) { | |
| 242 // We dispatch to the value because in some cases (temp or constant) we | |
| 243 // can use special instruction sequences. | |
| 244 value->MoveToSlot(masm, this); | |
| 245 } | |
| 246 | |
| 247 | |
| 248 void SlotLocation::MoveToSlot(MacroAssembler* masm, SlotLocation* loc) { | |
| 249 __ movq(kScratchRegister, ToOperand(this)); | |
| 250 __ movq(ToOperand(loc), kScratchRegister); | |
| 251 } | |
| 252 | |
| 253 | |
| 254 void TempLocation::Get(MacroAssembler* masm, Register reg) { | |
| 255 switch (where_) { | |
| 256 case ACCUMULATOR: | |
| 257 if (!reg.is(rax)) __ movq(reg, rax); | |
| 258 break; | |
| 259 case STACK: | |
| 260 __ pop(reg); | |
| 261 break; | |
| 262 case NOT_ALLOCATED: | |
| 263 UNREACHABLE(); | |
| 264 } | |
| 265 } | |
| 266 | |
| 267 | |
| 268 void TempLocation::Set(MacroAssembler* masm, Register reg) { | |
| 269 switch (where_) { | |
| 270 case ACCUMULATOR: | |
| 271 if (!reg.is(rax)) __ movq(rax, reg); | |
| 272 break; | |
| 273 case STACK: | |
| 274 __ push(reg); | |
| 275 break; | |
| 276 case NOT_ALLOCATED: | |
| 277 UNREACHABLE(); | |
| 278 } | |
| 279 } | |
| 280 | |
| 281 | |
| 282 void TempLocation::Push(MacroAssembler* masm) { | |
| 283 switch (where_) { | |
| 284 case ACCUMULATOR: | |
| 285 __ push(rax); | |
| 286 break; | |
| 287 case STACK: | |
| 288 case NOT_ALLOCATED: | |
| 289 UNREACHABLE(); | |
| 290 } | |
| 291 } | |
| 292 | |
| 293 | |
| 294 void TempLocation::Move(MacroAssembler* masm, Value* value) { | |
| 295 switch (where_) { | |
| 296 case ACCUMULATOR: | |
| 297 value->Get(masm, rax); | |
| 298 break; | |
| 299 case STACK: | |
| 300 value->Push(masm); | |
| 301 break; | |
| 302 case NOT_ALLOCATED: | |
| 303 UNREACHABLE(); | |
| 304 } | |
| 305 } | |
| 306 | |
| 307 | |
| 308 void TempLocation::MoveToSlot(MacroAssembler* masm, SlotLocation* loc) { | |
| 309 switch (where_) { | |
| 310 case ACCUMULATOR: | |
| 311 __ movq(ToOperand(loc), rax); | |
| 312 break; | |
| 313 case STACK: | |
| 314 __ pop(ToOperand(loc)); | |
| 315 break; | |
| 316 case NOT_ALLOCATED: | |
| 317 UNREACHABLE(); | |
| 318 } | |
| 319 } | |
| 320 | |
| 321 | |
| 322 #undef __ | |
| 323 | |
| 324 } } // namespace v8::internal | |
| OLD | NEW |