| OLD | NEW |
| (Empty) |
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | |
| 2 // Redistribution and use in source and binary forms, with or without | |
| 3 // modification, are permitted provided that the following conditions are | |
| 4 // met: | |
| 5 // | |
| 6 // * Redistributions of source code must retain the above copyright | |
| 7 // notice, this list of conditions and the following disclaimer. | |
| 8 // * Redistributions in binary form must reproduce the above | |
| 9 // copyright notice, this list of conditions and the following | |
| 10 // disclaimer in the documentation and/or other materials provided | |
| 11 // with the distribution. | |
| 12 // * Neither the name of Google Inc. nor the names of its | |
| 13 // contributors may be used to endorse or promote products derived | |
| 14 // from this software without specific prior written permission. | |
| 15 // | |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
| 27 | |
| 28 #include "v8.h" | |
| 29 | |
| 30 #if defined(V8_TARGET_ARCH_ARM) | |
| 31 | |
| 32 #include "codegen-inl.h" | |
| 33 #include "register-allocator-inl.h" | |
| 34 #include "scopes.h" | |
| 35 #include "virtual-frame-inl.h" | |
| 36 | |
| 37 namespace v8 { | |
| 38 namespace internal { | |
| 39 | |
| 40 #define __ ACCESS_MASM(masm()) | |
| 41 | |
| 42 void VirtualFrame::PopToR1R0() { | |
| 43 // Shuffle things around so the top of stack is in r0 and r1. | |
| 44 MergeTOSTo(R0_R1_TOS); | |
| 45 // Pop the two registers off the stack so they are detached from the frame. | |
| 46 LowerHeight(2); | |
| 47 top_of_stack_state_ = NO_TOS_REGISTERS; | |
| 48 } | |
| 49 | |
| 50 | |
| 51 void VirtualFrame::PopToR1() { | |
| 52 // Shuffle things around so the top of stack is only in r1. | |
| 53 MergeTOSTo(R1_TOS); | |
| 54 // Pop the register off the stack so it is detached from the frame. | |
| 55 LowerHeight(1); | |
| 56 top_of_stack_state_ = NO_TOS_REGISTERS; | |
| 57 } | |
| 58 | |
| 59 | |
| 60 void VirtualFrame::PopToR0() { | |
| 61 // Shuffle things around so the top of stack only in r0. | |
| 62 MergeTOSTo(R0_TOS); | |
| 63 // Pop the register off the stack so it is detached from the frame. | |
| 64 LowerHeight(1); | |
| 65 top_of_stack_state_ = NO_TOS_REGISTERS; | |
| 66 } | |
| 67 | |
| 68 | |
| 69 void VirtualFrame::MergeTo(const VirtualFrame* expected, Condition cond) { | |
| 70 if (Equals(expected)) return; | |
| 71 ASSERT((expected->tos_known_smi_map_ & tos_known_smi_map_) == | |
| 72 expected->tos_known_smi_map_); | |
| 73 ASSERT(expected->IsCompatibleWith(this)); | |
| 74 MergeTOSTo(expected->top_of_stack_state_, cond); | |
| 75 ASSERT(register_allocation_map_ == expected->register_allocation_map_); | |
| 76 } | |
| 77 | |
| 78 | |
| 79 void VirtualFrame::MergeTo(VirtualFrame* expected, Condition cond) { | |
| 80 if (Equals(expected)) return; | |
| 81 tos_known_smi_map_ &= expected->tos_known_smi_map_; | |
| 82 MergeTOSTo(expected->top_of_stack_state_, cond); | |
| 83 ASSERT(register_allocation_map_ == expected->register_allocation_map_); | |
| 84 } | |
| 85 | |
| 86 | |
| 87 void VirtualFrame::MergeTOSTo( | |
| 88 VirtualFrame::TopOfStack expected_top_of_stack_state, Condition cond) { | |
| 89 #define CASE_NUMBER(a, b) ((a) * TOS_STATES + (b)) | |
| 90 switch (CASE_NUMBER(top_of_stack_state_, expected_top_of_stack_state)) { | |
| 91 case CASE_NUMBER(NO_TOS_REGISTERS, NO_TOS_REGISTERS): | |
| 92 break; | |
| 93 case CASE_NUMBER(NO_TOS_REGISTERS, R0_TOS): | |
| 94 __ pop(r0, cond); | |
| 95 break; | |
| 96 case CASE_NUMBER(NO_TOS_REGISTERS, R1_TOS): | |
| 97 __ pop(r1, cond); | |
| 98 break; | |
| 99 case CASE_NUMBER(NO_TOS_REGISTERS, R0_R1_TOS): | |
| 100 __ pop(r0, cond); | |
| 101 __ pop(r1, cond); | |
| 102 break; | |
| 103 case CASE_NUMBER(NO_TOS_REGISTERS, R1_R0_TOS): | |
| 104 __ pop(r1, cond); | |
| 105 __ pop(r0, cond); | |
| 106 break; | |
| 107 case CASE_NUMBER(R0_TOS, NO_TOS_REGISTERS): | |
| 108 __ push(r0, cond); | |
| 109 break; | |
| 110 case CASE_NUMBER(R0_TOS, R0_TOS): | |
| 111 break; | |
| 112 case CASE_NUMBER(R0_TOS, R1_TOS): | |
| 113 __ mov(r1, r0, LeaveCC, cond); | |
| 114 break; | |
| 115 case CASE_NUMBER(R0_TOS, R0_R1_TOS): | |
| 116 __ pop(r1, cond); | |
| 117 break; | |
| 118 case CASE_NUMBER(R0_TOS, R1_R0_TOS): | |
| 119 __ mov(r1, r0, LeaveCC, cond); | |
| 120 __ pop(r0, cond); | |
| 121 break; | |
| 122 case CASE_NUMBER(R1_TOS, NO_TOS_REGISTERS): | |
| 123 __ push(r1, cond); | |
| 124 break; | |
| 125 case CASE_NUMBER(R1_TOS, R0_TOS): | |
| 126 __ mov(r0, r1, LeaveCC, cond); | |
| 127 break; | |
| 128 case CASE_NUMBER(R1_TOS, R1_TOS): | |
| 129 break; | |
| 130 case CASE_NUMBER(R1_TOS, R0_R1_TOS): | |
| 131 __ mov(r0, r1, LeaveCC, cond); | |
| 132 __ pop(r1, cond); | |
| 133 break; | |
| 134 case CASE_NUMBER(R1_TOS, R1_R0_TOS): | |
| 135 __ pop(r0, cond); | |
| 136 break; | |
| 137 case CASE_NUMBER(R0_R1_TOS, NO_TOS_REGISTERS): | |
| 138 __ Push(r1, r0, cond); | |
| 139 break; | |
| 140 case CASE_NUMBER(R0_R1_TOS, R0_TOS): | |
| 141 __ push(r1, cond); | |
| 142 break; | |
| 143 case CASE_NUMBER(R0_R1_TOS, R1_TOS): | |
| 144 __ push(r1, cond); | |
| 145 __ mov(r1, r0, LeaveCC, cond); | |
| 146 break; | |
| 147 case CASE_NUMBER(R0_R1_TOS, R0_R1_TOS): | |
| 148 break; | |
| 149 case CASE_NUMBER(R0_R1_TOS, R1_R0_TOS): | |
| 150 __ Swap(r0, r1, ip, cond); | |
| 151 break; | |
| 152 case CASE_NUMBER(R1_R0_TOS, NO_TOS_REGISTERS): | |
| 153 __ Push(r0, r1, cond); | |
| 154 break; | |
| 155 case CASE_NUMBER(R1_R0_TOS, R0_TOS): | |
| 156 __ push(r0, cond); | |
| 157 __ mov(r0, r1, LeaveCC, cond); | |
| 158 break; | |
| 159 case CASE_NUMBER(R1_R0_TOS, R1_TOS): | |
| 160 __ push(r0, cond); | |
| 161 break; | |
| 162 case CASE_NUMBER(R1_R0_TOS, R0_R1_TOS): | |
| 163 __ Swap(r0, r1, ip, cond); | |
| 164 break; | |
| 165 case CASE_NUMBER(R1_R0_TOS, R1_R0_TOS): | |
| 166 break; | |
| 167 default: | |
| 168 UNREACHABLE(); | |
| 169 #undef CASE_NUMBER | |
| 170 } | |
| 171 // A conditional merge will be followed by a conditional branch and the | |
| 172 // fall-through code will have an unchanged virtual frame state. If the | |
| 173 // merge is unconditional ('al'ways) then it might be followed by a fall | |
| 174 // through. We need to update the virtual frame state to match the code we | |
| 175 // are falling into. The final case is an unconditional merge followed by an | |
| 176 // unconditional branch, in which case it doesn't matter what we do to the | |
| 177 // virtual frame state, because the virtual frame will be invalidated. | |
| 178 if (cond == al) { | |
| 179 top_of_stack_state_ = expected_top_of_stack_state; | |
| 180 } | |
| 181 } | |
| 182 | |
| 183 | |
| 184 void VirtualFrame::Enter() { | |
| 185 Comment cmnt(masm(), "[ Enter JS frame"); | |
| 186 | |
| 187 #ifdef DEBUG | |
| 188 // Verify that r1 contains a JS function. The following code relies | |
| 189 // on r2 being available for use. | |
| 190 if (FLAG_debug_code) { | |
| 191 Label map_check, done; | |
| 192 __ tst(r1, Operand(kSmiTagMask)); | |
| 193 __ b(ne, &map_check); | |
| 194 __ stop("VirtualFrame::Enter - r1 is not a function (smi check)."); | |
| 195 __ bind(&map_check); | |
| 196 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); | |
| 197 __ b(eq, &done); | |
| 198 __ stop("VirtualFrame::Enter - r1 is not a function (map check)."); | |
| 199 __ bind(&done); | |
| 200 } | |
| 201 #endif // DEBUG | |
| 202 | |
| 203 // We are about to push four values to the frame. | |
| 204 Adjust(4); | |
| 205 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit()); | |
| 206 // Adjust FP to point to saved FP. | |
| 207 __ add(fp, sp, Operand(2 * kPointerSize)); | |
| 208 } | |
| 209 | |
| 210 | |
| 211 void VirtualFrame::Exit() { | |
| 212 Comment cmnt(masm(), "[ Exit JS frame"); | |
| 213 // Record the location of the JS exit code for patching when setting | |
| 214 // break point. | |
| 215 __ RecordJSReturn(); | |
| 216 | |
| 217 // Drop the execution stack down to the frame pointer and restore the caller | |
| 218 // frame pointer and return address. | |
| 219 __ mov(sp, fp); | |
| 220 __ ldm(ia_w, sp, fp.bit() | lr.bit()); | |
| 221 } | |
| 222 | |
| 223 | |
| 224 void VirtualFrame::AllocateStackSlots() { | |
| 225 int count = local_count(); | |
| 226 if (count > 0) { | |
| 227 Comment cmnt(masm(), "[ Allocate space for locals"); | |
| 228 Adjust(count); | |
| 229 // Initialize stack slots with 'undefined' value. | |
| 230 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | |
| 231 __ LoadRoot(r2, Heap::kStackLimitRootIndex); | |
| 232 if (count < kLocalVarBound) { | |
| 233 // For less locals the unrolled loop is more compact. | |
| 234 for (int i = 0; i < count; i++) { | |
| 235 __ push(ip); | |
| 236 } | |
| 237 } else { | |
| 238 // For more locals a loop in generated code is more compact. | |
| 239 Label alloc_locals_loop; | |
| 240 __ mov(r1, Operand(count)); | |
| 241 __ bind(&alloc_locals_loop); | |
| 242 __ push(ip); | |
| 243 __ sub(r1, r1, Operand(1), SetCC); | |
| 244 __ b(ne, &alloc_locals_loop); | |
| 245 } | |
| 246 } else { | |
| 247 __ LoadRoot(r2, Heap::kStackLimitRootIndex); | |
| 248 } | |
| 249 // Check the stack for overflow or a break request. | |
| 250 masm()->cmp(sp, Operand(r2)); | |
| 251 StackCheckStub stub; | |
| 252 // Call the stub if lower. | |
| 253 masm()->mov(ip, | |
| 254 Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()), | |
| 255 RelocInfo::CODE_TARGET), | |
| 256 LeaveCC, | |
| 257 lo); | |
| 258 masm()->Call(ip, lo); | |
| 259 } | |
| 260 | |
| 261 | |
| 262 | |
| 263 void VirtualFrame::PushReceiverSlotAddress() { | |
| 264 UNIMPLEMENTED(); | |
| 265 } | |
| 266 | |
| 267 | |
| 268 void VirtualFrame::PushTryHandler(HandlerType type) { | |
| 269 // Grow the expression stack by handler size less one (the return | |
| 270 // address in lr is already counted by a call instruction). | |
| 271 Adjust(kHandlerSize - 1); | |
| 272 __ PushTryHandler(IN_JAVASCRIPT, type); | |
| 273 } | |
| 274 | |
| 275 | |
| 276 void VirtualFrame::CallJSFunction(int arg_count) { | |
| 277 // InvokeFunction requires function in r1. | |
| 278 PopToR1(); | |
| 279 SpillAll(); | |
| 280 | |
| 281 // +1 for receiver. | |
| 282 Forget(arg_count + 1); | |
| 283 ASSERT(cgen()->HasValidEntryRegisters()); | |
| 284 ParameterCount count(arg_count); | |
| 285 __ InvokeFunction(r1, count, CALL_FUNCTION); | |
| 286 // Restore the context. | |
| 287 __ ldr(cp, Context()); | |
| 288 } | |
| 289 | |
| 290 | |
| 291 void VirtualFrame::CallRuntime(const Runtime::Function* f, int arg_count) { | |
| 292 SpillAll(); | |
| 293 Forget(arg_count); | |
| 294 ASSERT(cgen()->HasValidEntryRegisters()); | |
| 295 __ CallRuntime(f, arg_count); | |
| 296 } | |
| 297 | |
| 298 | |
| 299 void VirtualFrame::CallRuntime(Runtime::FunctionId id, int arg_count) { | |
| 300 SpillAll(); | |
| 301 Forget(arg_count); | |
| 302 ASSERT(cgen()->HasValidEntryRegisters()); | |
| 303 __ CallRuntime(id, arg_count); | |
| 304 } | |
| 305 | |
| 306 | |
| 307 #ifdef ENABLE_DEBUGGER_SUPPORT | |
| 308 void VirtualFrame::DebugBreak() { | |
| 309 ASSERT(cgen()->HasValidEntryRegisters()); | |
| 310 __ DebugBreak(); | |
| 311 } | |
| 312 #endif | |
| 313 | |
| 314 | |
| 315 void VirtualFrame::InvokeBuiltin(Builtins::JavaScript id, | |
| 316 InvokeJSFlags flags, | |
| 317 int arg_count) { | |
| 318 Forget(arg_count); | |
| 319 __ InvokeBuiltin(id, flags); | |
| 320 } | |
| 321 | |
| 322 | |
| 323 void VirtualFrame::CallLoadIC(Handle<String> name, RelocInfo::Mode mode) { | |
| 324 Handle<Code> ic(Isolate::Current()->builtins()->builtin( | |
| 325 Builtins::kLoadIC_Initialize)); | |
| 326 PopToR0(); | |
| 327 SpillAll(); | |
| 328 __ mov(r2, Operand(name)); | |
| 329 CallCodeObject(ic, mode, 0); | |
| 330 } | |
| 331 | |
| 332 | |
| 333 void VirtualFrame::CallStoreIC(Handle<String> name, | |
| 334 bool is_contextual, | |
| 335 StrictModeFlag strict_mode) { | |
| 336 Handle<Code> ic(Isolate::Current()->builtins()->builtin( | |
| 337 (strict_mode == kStrictMode) ? Builtins::kStoreIC_Initialize_Strict | |
| 338 : Builtins::kStoreIC_Initialize)); | |
| 339 PopToR0(); | |
| 340 RelocInfo::Mode mode; | |
| 341 if (is_contextual) { | |
| 342 SpillAll(); | |
| 343 __ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); | |
| 344 mode = RelocInfo::CODE_TARGET_CONTEXT; | |
| 345 } else { | |
| 346 EmitPop(r1); | |
| 347 SpillAll(); | |
| 348 mode = RelocInfo::CODE_TARGET; | |
| 349 } | |
| 350 __ mov(r2, Operand(name)); | |
| 351 CallCodeObject(ic, mode, 0); | |
| 352 } | |
| 353 | |
| 354 | |
| 355 void VirtualFrame::CallKeyedLoadIC() { | |
| 356 Handle<Code> ic(Isolate::Current()->builtins()->builtin( | |
| 357 Builtins::kKeyedLoadIC_Initialize)); | |
| 358 PopToR1R0(); | |
| 359 SpillAll(); | |
| 360 CallCodeObject(ic, RelocInfo::CODE_TARGET, 0); | |
| 361 } | |
| 362 | |
| 363 | |
| 364 void VirtualFrame::CallKeyedStoreIC(StrictModeFlag strict_mode) { | |
| 365 Handle<Code> ic(Isolate::Current()->builtins()->builtin( | |
| 366 (strict_mode == kStrictMode) ? Builtins::kKeyedStoreIC_Initialize_Strict | |
| 367 : Builtins::kKeyedStoreIC_Initialize)); | |
| 368 PopToR1R0(); | |
| 369 SpillAll(); | |
| 370 EmitPop(r2); | |
| 371 CallCodeObject(ic, RelocInfo::CODE_TARGET, 0); | |
| 372 } | |
| 373 | |
| 374 | |
| 375 void VirtualFrame::CallCodeObject(Handle<Code> code, | |
| 376 RelocInfo::Mode rmode, | |
| 377 int dropped_args) { | |
| 378 switch (code->kind()) { | |
| 379 case Code::CALL_IC: | |
| 380 case Code::KEYED_CALL_IC: | |
| 381 case Code::FUNCTION: | |
| 382 break; | |
| 383 case Code::KEYED_LOAD_IC: | |
| 384 case Code::LOAD_IC: | |
| 385 case Code::KEYED_STORE_IC: | |
| 386 case Code::STORE_IC: | |
| 387 ASSERT(dropped_args == 0); | |
| 388 break; | |
| 389 case Code::BUILTIN: | |
| 390 ASSERT(*code == Isolate::Current()->builtins()->builtin( | |
| 391 Builtins::kJSConstructCall)); | |
| 392 break; | |
| 393 default: | |
| 394 UNREACHABLE(); | |
| 395 break; | |
| 396 } | |
| 397 Forget(dropped_args); | |
| 398 ASSERT(cgen()->HasValidEntryRegisters()); | |
| 399 __ Call(code, rmode); | |
| 400 } | |
| 401 | |
| 402 | |
| 403 // NO_TOS_REGISTERS, R0_TOS, R1_TOS, R1_R0_TOS, R0_R1_TOS. | |
| 404 const bool VirtualFrame::kR0InUse[TOS_STATES] = | |
| 405 { false, true, false, true, true }; | |
| 406 const bool VirtualFrame::kR1InUse[TOS_STATES] = | |
| 407 { false, false, true, true, true }; | |
| 408 const int VirtualFrame::kVirtualElements[TOS_STATES] = | |
| 409 { 0, 1, 1, 2, 2 }; | |
| 410 const Register VirtualFrame::kTopRegister[TOS_STATES] = | |
| 411 { r0, r0, r1, r1, r0 }; | |
| 412 const Register VirtualFrame::kBottomRegister[TOS_STATES] = | |
| 413 { r0, r0, r1, r0, r1 }; | |
| 414 const Register VirtualFrame::kAllocatedRegisters[ | |
| 415 VirtualFrame::kNumberOfAllocatedRegisters] = { r2, r3, r4, r5, r6 }; | |
| 416 // Popping is done by the transition implied by kStateAfterPop. Of course if | |
| 417 // there were no stack slots allocated to registers then the physical SP must | |
| 418 // be adjusted. | |
| 419 const VirtualFrame::TopOfStack VirtualFrame::kStateAfterPop[TOS_STATES] = | |
| 420 { NO_TOS_REGISTERS, NO_TOS_REGISTERS, NO_TOS_REGISTERS, R0_TOS, R1_TOS }; | |
| 421 // Pushing is done by the transition implied by kStateAfterPush. Of course if | |
| 422 // the maximum number of registers was already allocated to the top of stack | |
| 423 // slots then one register must be physically pushed onto the stack. | |
| 424 const VirtualFrame::TopOfStack VirtualFrame::kStateAfterPush[TOS_STATES] = | |
| 425 { R0_TOS, R1_R0_TOS, R0_R1_TOS, R0_R1_TOS, R1_R0_TOS }; | |
| 426 | |
| 427 | |
| 428 void VirtualFrame::Drop(int count) { | |
| 429 ASSERT(count >= 0); | |
| 430 ASSERT(height() >= count); | |
| 431 // Discard elements from the virtual frame and free any registers. | |
| 432 int num_virtual_elements = kVirtualElements[top_of_stack_state_]; | |
| 433 while (num_virtual_elements > 0) { | |
| 434 Pop(); | |
| 435 num_virtual_elements--; | |
| 436 count--; | |
| 437 if (count == 0) return; | |
| 438 } | |
| 439 if (count == 0) return; | |
| 440 __ add(sp, sp, Operand(count * kPointerSize)); | |
| 441 LowerHeight(count); | |
| 442 } | |
| 443 | |
| 444 | |
| 445 void VirtualFrame::Pop() { | |
| 446 if (top_of_stack_state_ == NO_TOS_REGISTERS) { | |
| 447 __ add(sp, sp, Operand(kPointerSize)); | |
| 448 } else { | |
| 449 top_of_stack_state_ = kStateAfterPop[top_of_stack_state_]; | |
| 450 } | |
| 451 LowerHeight(1); | |
| 452 } | |
| 453 | |
| 454 | |
| 455 void VirtualFrame::EmitPop(Register reg) { | |
| 456 ASSERT(!is_used(RegisterAllocator::ToNumber(reg))); | |
| 457 if (top_of_stack_state_ == NO_TOS_REGISTERS) { | |
| 458 __ pop(reg); | |
| 459 } else { | |
| 460 __ mov(reg, kTopRegister[top_of_stack_state_]); | |
| 461 top_of_stack_state_ = kStateAfterPop[top_of_stack_state_]; | |
| 462 } | |
| 463 LowerHeight(1); | |
| 464 } | |
| 465 | |
| 466 | |
| 467 void VirtualFrame::SpillAllButCopyTOSToR0() { | |
| 468 switch (top_of_stack_state_) { | |
| 469 case NO_TOS_REGISTERS: | |
| 470 __ ldr(r0, MemOperand(sp, 0)); | |
| 471 break; | |
| 472 case R0_TOS: | |
| 473 __ push(r0); | |
| 474 break; | |
| 475 case R1_TOS: | |
| 476 __ push(r1); | |
| 477 __ mov(r0, r1); | |
| 478 break; | |
| 479 case R0_R1_TOS: | |
| 480 __ Push(r1, r0); | |
| 481 break; | |
| 482 case R1_R0_TOS: | |
| 483 __ Push(r0, r1); | |
| 484 __ mov(r0, r1); | |
| 485 break; | |
| 486 default: | |
| 487 UNREACHABLE(); | |
| 488 } | |
| 489 top_of_stack_state_ = NO_TOS_REGISTERS; | |
| 490 } | |
| 491 | |
| 492 | |
| 493 void VirtualFrame::SpillAllButCopyTOSToR1() { | |
| 494 switch (top_of_stack_state_) { | |
| 495 case NO_TOS_REGISTERS: | |
| 496 __ ldr(r1, MemOperand(sp, 0)); | |
| 497 break; | |
| 498 case R0_TOS: | |
| 499 __ push(r0); | |
| 500 __ mov(r1, r0); | |
| 501 break; | |
| 502 case R1_TOS: | |
| 503 __ push(r1); | |
| 504 break; | |
| 505 case R0_R1_TOS: | |
| 506 __ Push(r1, r0); | |
| 507 __ mov(r1, r0); | |
| 508 break; | |
| 509 case R1_R0_TOS: | |
| 510 __ Push(r0, r1); | |
| 511 break; | |
| 512 default: | |
| 513 UNREACHABLE(); | |
| 514 } | |
| 515 top_of_stack_state_ = NO_TOS_REGISTERS; | |
| 516 } | |
| 517 | |
| 518 | |
| 519 void VirtualFrame::SpillAllButCopyTOSToR1R0() { | |
| 520 switch (top_of_stack_state_) { | |
| 521 case NO_TOS_REGISTERS: | |
| 522 __ ldr(r1, MemOperand(sp, 0)); | |
| 523 __ ldr(r0, MemOperand(sp, kPointerSize)); | |
| 524 break; | |
| 525 case R0_TOS: | |
| 526 __ push(r0); | |
| 527 __ mov(r1, r0); | |
| 528 __ ldr(r0, MemOperand(sp, kPointerSize)); | |
| 529 break; | |
| 530 case R1_TOS: | |
| 531 __ push(r1); | |
| 532 __ ldr(r0, MemOperand(sp, kPointerSize)); | |
| 533 break; | |
| 534 case R0_R1_TOS: | |
| 535 __ Push(r1, r0); | |
| 536 __ Swap(r0, r1, ip); | |
| 537 break; | |
| 538 case R1_R0_TOS: | |
| 539 __ Push(r0, r1); | |
| 540 break; | |
| 541 default: | |
| 542 UNREACHABLE(); | |
| 543 } | |
| 544 top_of_stack_state_ = NO_TOS_REGISTERS; | |
| 545 } | |
| 546 | |
| 547 | |
| 548 Register VirtualFrame::Peek() { | |
| 549 AssertIsNotSpilled(); | |
| 550 if (top_of_stack_state_ == NO_TOS_REGISTERS) { | |
| 551 top_of_stack_state_ = kStateAfterPush[top_of_stack_state_]; | |
| 552 Register answer = kTopRegister[top_of_stack_state_]; | |
| 553 __ pop(answer); | |
| 554 return answer; | |
| 555 } else { | |
| 556 return kTopRegister[top_of_stack_state_]; | |
| 557 } | |
| 558 } | |
| 559 | |
| 560 | |
| 561 Register VirtualFrame::Peek2() { | |
| 562 AssertIsNotSpilled(); | |
| 563 switch (top_of_stack_state_) { | |
| 564 case NO_TOS_REGISTERS: | |
| 565 case R0_TOS: | |
| 566 case R0_R1_TOS: | |
| 567 MergeTOSTo(R0_R1_TOS); | |
| 568 return r1; | |
| 569 case R1_TOS: | |
| 570 case R1_R0_TOS: | |
| 571 MergeTOSTo(R1_R0_TOS); | |
| 572 return r0; | |
| 573 default: | |
| 574 UNREACHABLE(); | |
| 575 return no_reg; | |
| 576 } | |
| 577 } | |
| 578 | |
| 579 | |
| 580 void VirtualFrame::Dup() { | |
| 581 if (SpilledScope::is_spilled()) { | |
| 582 __ ldr(ip, MemOperand(sp, 0)); | |
| 583 __ push(ip); | |
| 584 } else { | |
| 585 switch (top_of_stack_state_) { | |
| 586 case NO_TOS_REGISTERS: | |
| 587 __ ldr(r0, MemOperand(sp, 0)); | |
| 588 top_of_stack_state_ = R0_TOS; | |
| 589 break; | |
| 590 case R0_TOS: | |
| 591 __ mov(r1, r0); | |
| 592 // r0 and r1 contains the same value. Prefer state with r0 holding TOS. | |
| 593 top_of_stack_state_ = R0_R1_TOS; | |
| 594 break; | |
| 595 case R1_TOS: | |
| 596 __ mov(r0, r1); | |
| 597 // r0 and r1 contains the same value. Prefer state with r0 holding TOS. | |
| 598 top_of_stack_state_ = R0_R1_TOS; | |
| 599 break; | |
| 600 case R0_R1_TOS: | |
| 601 __ push(r1); | |
| 602 __ mov(r1, r0); | |
| 603 // r0 and r1 contains the same value. Prefer state with r0 holding TOS. | |
| 604 top_of_stack_state_ = R0_R1_TOS; | |
| 605 break; | |
| 606 case R1_R0_TOS: | |
| 607 __ push(r0); | |
| 608 __ mov(r0, r1); | |
| 609 // r0 and r1 contains the same value. Prefer state with r0 holding TOS. | |
| 610 top_of_stack_state_ = R0_R1_TOS; | |
| 611 break; | |
| 612 default: | |
| 613 UNREACHABLE(); | |
| 614 } | |
| 615 } | |
| 616 RaiseHeight(1, tos_known_smi_map_ & 1); | |
| 617 } | |
| 618 | |
| 619 | |
| 620 void VirtualFrame::Dup2() { | |
| 621 if (SpilledScope::is_spilled()) { | |
| 622 __ ldr(ip, MemOperand(sp, kPointerSize)); | |
| 623 __ push(ip); | |
| 624 __ ldr(ip, MemOperand(sp, kPointerSize)); | |
| 625 __ push(ip); | |
| 626 } else { | |
| 627 switch (top_of_stack_state_) { | |
| 628 case NO_TOS_REGISTERS: | |
| 629 __ ldr(r0, MemOperand(sp, 0)); | |
| 630 __ ldr(r1, MemOperand(sp, kPointerSize)); | |
| 631 top_of_stack_state_ = R0_R1_TOS; | |
| 632 break; | |
| 633 case R0_TOS: | |
| 634 __ push(r0); | |
| 635 __ ldr(r1, MemOperand(sp, kPointerSize)); | |
| 636 top_of_stack_state_ = R0_R1_TOS; | |
| 637 break; | |
| 638 case R1_TOS: | |
| 639 __ push(r1); | |
| 640 __ ldr(r0, MemOperand(sp, kPointerSize)); | |
| 641 top_of_stack_state_ = R1_R0_TOS; | |
| 642 break; | |
| 643 case R0_R1_TOS: | |
| 644 __ Push(r1, r0); | |
| 645 top_of_stack_state_ = R0_R1_TOS; | |
| 646 break; | |
| 647 case R1_R0_TOS: | |
| 648 __ Push(r0, r1); | |
| 649 top_of_stack_state_ = R1_R0_TOS; | |
| 650 break; | |
| 651 default: | |
| 652 UNREACHABLE(); | |
| 653 } | |
| 654 } | |
| 655 RaiseHeight(2, tos_known_smi_map_ & 3); | |
| 656 } | |
| 657 | |
| 658 | |
| 659 Register VirtualFrame::PopToRegister(Register but_not_to_this_one) { | |
| 660 ASSERT(but_not_to_this_one.is(r0) || | |
| 661 but_not_to_this_one.is(r1) || | |
| 662 but_not_to_this_one.is(no_reg)); | |
| 663 LowerHeight(1); | |
| 664 if (top_of_stack_state_ == NO_TOS_REGISTERS) { | |
| 665 if (but_not_to_this_one.is(r0)) { | |
| 666 __ pop(r1); | |
| 667 return r1; | |
| 668 } else { | |
| 669 __ pop(r0); | |
| 670 return r0; | |
| 671 } | |
| 672 } else { | |
| 673 Register answer = kTopRegister[top_of_stack_state_]; | |
| 674 ASSERT(!answer.is(but_not_to_this_one)); | |
| 675 top_of_stack_state_ = kStateAfterPop[top_of_stack_state_]; | |
| 676 return answer; | |
| 677 } | |
| 678 } | |
| 679 | |
| 680 | |
| 681 void VirtualFrame::EnsureOneFreeTOSRegister() { | |
| 682 if (kVirtualElements[top_of_stack_state_] == kMaxTOSRegisters) { | |
| 683 __ push(kBottomRegister[top_of_stack_state_]); | |
| 684 top_of_stack_state_ = kStateAfterPush[top_of_stack_state_]; | |
| 685 top_of_stack_state_ = kStateAfterPop[top_of_stack_state_]; | |
| 686 } | |
| 687 ASSERT(kVirtualElements[top_of_stack_state_] != kMaxTOSRegisters); | |
| 688 } | |
| 689 | |
| 690 | |
| 691 void VirtualFrame::EmitPush(Register reg, TypeInfo info) { | |
| 692 RaiseHeight(1, info.IsSmi() ? 1 : 0); | |
| 693 if (reg.is(cp)) { | |
| 694 // If we are pushing cp then we are about to make a call and things have to | |
| 695 // be pushed to the physical stack. There's nothing to be gained my moving | |
| 696 // to a TOS register and then pushing that, we might as well push to the | |
| 697 // physical stack immediately. | |
| 698 MergeTOSTo(NO_TOS_REGISTERS); | |
| 699 __ push(reg); | |
| 700 return; | |
| 701 } | |
| 702 if (SpilledScope::is_spilled()) { | |
| 703 ASSERT(top_of_stack_state_ == NO_TOS_REGISTERS); | |
| 704 __ push(reg); | |
| 705 return; | |
| 706 } | |
| 707 if (top_of_stack_state_ == NO_TOS_REGISTERS) { | |
| 708 if (reg.is(r0)) { | |
| 709 top_of_stack_state_ = R0_TOS; | |
| 710 return; | |
| 711 } | |
| 712 if (reg.is(r1)) { | |
| 713 top_of_stack_state_ = R1_TOS; | |
| 714 return; | |
| 715 } | |
| 716 } | |
| 717 EnsureOneFreeTOSRegister(); | |
| 718 top_of_stack_state_ = kStateAfterPush[top_of_stack_state_]; | |
| 719 Register dest = kTopRegister[top_of_stack_state_]; | |
| 720 __ Move(dest, reg); | |
| 721 } | |
| 722 | |
| 723 | |
| 724 void VirtualFrame::SetElementAt(Register reg, int this_far_down) { | |
| 725 if (this_far_down < kTOSKnownSmiMapSize) { | |
| 726 tos_known_smi_map_ &= ~(1 << this_far_down); | |
| 727 } | |
| 728 if (this_far_down == 0) { | |
| 729 Pop(); | |
| 730 Register dest = GetTOSRegister(); | |
| 731 if (dest.is(reg)) { | |
| 732 // We already popped one item off the top of the stack. If the only | |
| 733 // free register is the one we were asked to push then we have been | |
| 734 // asked to push a register that was already in use, which cannot | |
| 735 // happen. It therefore folows that there are two free TOS registers: | |
| 736 ASSERT(top_of_stack_state_ == NO_TOS_REGISTERS); | |
| 737 dest = dest.is(r0) ? r1 : r0; | |
| 738 } | |
| 739 __ mov(dest, reg); | |
| 740 EmitPush(dest); | |
| 741 } else if (this_far_down == 1) { | |
| 742 int virtual_elements = kVirtualElements[top_of_stack_state_]; | |
| 743 if (virtual_elements < 2) { | |
| 744 __ str(reg, ElementAt(this_far_down)); | |
| 745 } else { | |
| 746 ASSERT(virtual_elements == 2); | |
| 747 ASSERT(!reg.is(r0)); | |
| 748 ASSERT(!reg.is(r1)); | |
| 749 Register dest = kBottomRegister[top_of_stack_state_]; | |
| 750 __ mov(dest, reg); | |
| 751 } | |
| 752 } else { | |
| 753 ASSERT(this_far_down >= 2); | |
| 754 ASSERT(kVirtualElements[top_of_stack_state_] <= 2); | |
| 755 __ str(reg, ElementAt(this_far_down)); | |
| 756 } | |
| 757 } | |
| 758 | |
| 759 | |
| 760 Register VirtualFrame::GetTOSRegister() { | |
| 761 if (SpilledScope::is_spilled()) return r0; | |
| 762 | |
| 763 EnsureOneFreeTOSRegister(); | |
| 764 return kTopRegister[kStateAfterPush[top_of_stack_state_]]; | |
| 765 } | |
| 766 | |
| 767 | |
| 768 void VirtualFrame::EmitPush(Operand operand, TypeInfo info) { | |
| 769 RaiseHeight(1, info.IsSmi() ? 1 : 0); | |
| 770 if (SpilledScope::is_spilled()) { | |
| 771 __ mov(r0, operand); | |
| 772 __ push(r0); | |
| 773 return; | |
| 774 } | |
| 775 EnsureOneFreeTOSRegister(); | |
| 776 top_of_stack_state_ = kStateAfterPush[top_of_stack_state_]; | |
| 777 __ mov(kTopRegister[top_of_stack_state_], operand); | |
| 778 } | |
| 779 | |
| 780 | |
| 781 void VirtualFrame::EmitPush(MemOperand operand, TypeInfo info) { | |
| 782 RaiseHeight(1, info.IsSmi() ? 1 : 0); | |
| 783 if (SpilledScope::is_spilled()) { | |
| 784 __ ldr(r0, operand); | |
| 785 __ push(r0); | |
| 786 return; | |
| 787 } | |
| 788 EnsureOneFreeTOSRegister(); | |
| 789 top_of_stack_state_ = kStateAfterPush[top_of_stack_state_]; | |
| 790 __ ldr(kTopRegister[top_of_stack_state_], operand); | |
| 791 } | |
| 792 | |
| 793 | |
| 794 void VirtualFrame::EmitPushRoot(Heap::RootListIndex index) { | |
| 795 RaiseHeight(1, 0); | |
| 796 if (SpilledScope::is_spilled()) { | |
| 797 __ LoadRoot(r0, index); | |
| 798 __ push(r0); | |
| 799 return; | |
| 800 } | |
| 801 EnsureOneFreeTOSRegister(); | |
| 802 top_of_stack_state_ = kStateAfterPush[top_of_stack_state_]; | |
| 803 __ LoadRoot(kTopRegister[top_of_stack_state_], index); | |
| 804 } | |
| 805 | |
| 806 | |
| 807 void VirtualFrame::EmitPushMultiple(int count, int src_regs) { | |
| 808 ASSERT(SpilledScope::is_spilled()); | |
| 809 Adjust(count); | |
| 810 __ stm(db_w, sp, src_regs); | |
| 811 } | |
| 812 | |
| 813 | |
| 814 void VirtualFrame::SpillAll() { | |
| 815 switch (top_of_stack_state_) { | |
| 816 case R1_R0_TOS: | |
| 817 masm()->push(r0); | |
| 818 // Fall through. | |
| 819 case R1_TOS: | |
| 820 masm()->push(r1); | |
| 821 top_of_stack_state_ = NO_TOS_REGISTERS; | |
| 822 break; | |
| 823 case R0_R1_TOS: | |
| 824 masm()->push(r1); | |
| 825 // Fall through. | |
| 826 case R0_TOS: | |
| 827 masm()->push(r0); | |
| 828 top_of_stack_state_ = NO_TOS_REGISTERS; | |
| 829 // Fall through. | |
| 830 case NO_TOS_REGISTERS: | |
| 831 break; | |
| 832 default: | |
| 833 UNREACHABLE(); | |
| 834 break; | |
| 835 } | |
| 836 ASSERT(register_allocation_map_ == 0); // Not yet implemented. | |
| 837 } | |
| 838 | |
| 839 #undef __ | |
| 840 | |
| 841 } } // namespace v8::internal | |
| 842 | |
| 843 #endif // V8_TARGET_ARCH_ARM | |
| OLD | NEW |