| OLD | NEW |
| (Empty) |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | |
| 2 // Redistribution and use in source and binary forms, with or without | |
| 3 // modification, are permitted provided that the following conditions are | |
| 4 // met: | |
| 5 // | |
| 6 // * Redistributions of source code must retain the above copyright | |
| 7 // notice, this list of conditions and the following disclaimer. | |
| 8 // * Redistributions in binary form must reproduce the above | |
| 9 // copyright notice, this list of conditions and the following | |
| 10 // disclaimer in the documentation and/or other materials provided | |
| 11 // with the distribution. | |
| 12 // * Neither the name of Google Inc. nor the names of its | |
| 13 // contributors may be used to endorse or promote products derived | |
| 14 // from this software without specific prior written permission. | |
| 15 // | |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
| 27 | |
| 28 #include "v8.h" | |
| 29 | |
| 30 #include "bootstrapper.h" | |
| 31 #include "codegen-inl.h" | |
| 32 #include "debug.h" | |
| 33 #include "runtime.h" | |
| 34 | |
| 35 namespace v8 { namespace internal { | |
| 36 | |
| 37 // Give alias names to registers | |
| 38 Register cp = { 8 }; // JavaScript context pointer | |
| 39 Register pp = { 10 }; // parameter pointer | |
| 40 | |
| 41 | |
| 42 MacroAssembler::MacroAssembler(void* buffer, int size) | |
| 43 : Assembler(buffer, size), | |
| 44 unresolved_(0), | |
| 45 generating_stub_(false), | |
| 46 allow_stub_calls_(true), | |
| 47 code_object_(Heap::undefined_value()) { | |
| 48 } | |
| 49 | |
| 50 | |
| 51 // We always generate arm code, never thumb code, even if V8 is compiled to | |
| 52 // thumb, so we require inter-working support | |
| 53 #if defined(__thumb__) && !defined(__THUMB_INTERWORK__) | |
| 54 #error "flag -mthumb-interwork missing" | |
| 55 #endif | |
| 56 | |
| 57 | |
| 58 // We do not support thumb inter-working with an arm architecture not supporting | |
| 59 // the blx instruction (below v5t) | |
| 60 #if defined(__THUMB_INTERWORK__) | |
| 61 #if !defined(__ARM_ARCH_5T__) && !defined(__ARM_ARCH_5TE__) | |
| 62 // add tests for other versions above v5t as required | |
| 63 #error "for thumb inter-working we require architecture v5t or above" | |
| 64 #endif | |
| 65 #endif | |
| 66 | |
| 67 | |
| 68 // Using blx may yield better code, so use it when required or when available | |
| 69 #if defined(__THUMB_INTERWORK__) || defined(__ARM_ARCH_5__) | |
| 70 #define USE_BLX 1 | |
| 71 #endif | |
| 72 | |
| 73 // Using bx does not yield better code, so use it only when required | |
| 74 #if defined(__THUMB_INTERWORK__) | |
| 75 #define USE_BX 1 | |
| 76 #endif | |
| 77 | |
| 78 | |
| 79 void MacroAssembler::Jump(Register target, Condition cond) { | |
| 80 #if USE_BX | |
| 81 bx(target, cond); | |
| 82 #else | |
| 83 mov(pc, Operand(target), LeaveCC, cond); | |
| 84 #endif | |
| 85 } | |
| 86 | |
| 87 | |
| 88 void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode, | |
| 89 Condition cond) { | |
| 90 #if USE_BX | |
| 91 mov(ip, Operand(target, rmode), LeaveCC, cond); | |
| 92 bx(ip, cond); | |
| 93 #else | |
| 94 mov(pc, Operand(target, rmode), LeaveCC, cond); | |
| 95 #endif | |
| 96 } | |
| 97 | |
| 98 | |
| 99 void MacroAssembler::Jump(byte* target, RelocInfo::Mode rmode, | |
| 100 Condition cond) { | |
| 101 ASSERT(!RelocInfo::IsCodeTarget(rmode)); | |
| 102 Jump(reinterpret_cast<intptr_t>(target), rmode, cond); | |
| 103 } | |
| 104 | |
| 105 | |
| 106 void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode, | |
| 107 Condition cond) { | |
| 108 ASSERT(RelocInfo::IsCodeTarget(rmode)); | |
| 109 // 'code' is always generated ARM code, never THUMB code | |
| 110 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond); | |
| 111 } | |
| 112 | |
| 113 | |
| 114 void MacroAssembler::Call(Register target, Condition cond) { | |
| 115 #if USE_BLX | |
| 116 blx(target, cond); | |
| 117 #else | |
| 118 // set lr for return at current pc + 8 | |
| 119 mov(lr, Operand(pc), LeaveCC, cond); | |
| 120 mov(pc, Operand(target), LeaveCC, cond); | |
| 121 #endif | |
| 122 } | |
| 123 | |
| 124 | |
| 125 void MacroAssembler::Call(intptr_t target, RelocInfo::Mode rmode, | |
| 126 Condition cond) { | |
| 127 #if !defined(__arm__) | |
| 128 if (rmode == RelocInfo::RUNTIME_ENTRY) { | |
| 129 mov(r2, Operand(target, rmode), LeaveCC, cond); | |
| 130 // Set lr for return at current pc + 8. | |
| 131 mov(lr, Operand(pc), LeaveCC, cond); | |
| 132 // Emit a ldr<cond> pc, [pc + offset of target in constant pool]. | |
| 133 // Notify the simulator of the transition to C code. | |
| 134 swi(assembler::arm::call_rt_r2); | |
| 135 } else { | |
| 136 // set lr for return at current pc + 8 | |
| 137 mov(lr, Operand(pc), LeaveCC, cond); | |
| 138 // emit a ldr<cond> pc, [pc + offset of target in constant pool] | |
| 139 mov(pc, Operand(target, rmode), LeaveCC, cond); | |
| 140 } | |
| 141 #else | |
| 142 // Set lr for return at current pc + 8. | |
| 143 mov(lr, Operand(pc), LeaveCC, cond); | |
| 144 // Emit a ldr<cond> pc, [pc + offset of target in constant pool]. | |
| 145 mov(pc, Operand(target, rmode), LeaveCC, cond); | |
| 146 #endif // !defined(__arm__) | |
| 147 // If USE_BLX is defined, we could emit a 'mov ip, target', followed by a | |
| 148 // 'blx ip'; however, the code would not be shorter than the above sequence | |
| 149 // and the target address of the call would be referenced by the first | |
| 150 // instruction rather than the second one, which would make it harder to patch | |
| 151 // (two instructions before the return address, instead of one). | |
| 152 ASSERT(kTargetAddrToReturnAddrDist == sizeof(Instr)); | |
| 153 } | |
| 154 | |
| 155 | |
| 156 void MacroAssembler::Call(byte* target, RelocInfo::Mode rmode, | |
| 157 Condition cond) { | |
| 158 ASSERT(!RelocInfo::IsCodeTarget(rmode)); | |
| 159 Call(reinterpret_cast<intptr_t>(target), rmode, cond); | |
| 160 } | |
| 161 | |
| 162 | |
| 163 void MacroAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode, | |
| 164 Condition cond) { | |
| 165 ASSERT(RelocInfo::IsCodeTarget(rmode)); | |
| 166 // 'code' is always generated ARM code, never THUMB code | |
| 167 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond); | |
| 168 } | |
| 169 | |
| 170 | |
| 171 void MacroAssembler::Ret(Condition cond) { | |
| 172 #if USE_BX | |
| 173 bx(lr, cond); | |
| 174 #else | |
| 175 mov(pc, Operand(lr), LeaveCC, cond); | |
| 176 #endif | |
| 177 } | |
| 178 | |
| 179 | |
| 180 void MacroAssembler::SmiJumpTable(Register index, Vector<Label*> targets) { | |
| 181 // Empty the const pool. | |
| 182 CheckConstPool(true, true); | |
| 183 add(pc, pc, Operand(index, | |
| 184 LSL, | |
| 185 assembler::arm::Instr::kInstrSizeLog2 - kSmiTagSize)); | |
| 186 BlockConstPoolBefore(pc_offset() + (targets.length() + 1) * sizeof(Instr)); | |
| 187 nop(); // Jump table alignment. | |
| 188 for (int i = 0; i < targets.length(); i++) { | |
| 189 b(targets[i]); | |
| 190 } | |
| 191 } | |
| 192 | |
| 193 | |
| 194 // Will clobber 4 registers: object, offset, scratch, ip. The | |
| 195 // register 'object' contains a heap object pointer. The heap object | |
| 196 // tag is shifted away. | |
| 197 void MacroAssembler::RecordWrite(Register object, Register offset, | |
| 198 Register scratch) { | |
| 199 // This is how much we shift the remembered set bit offset to get the | |
| 200 // offset of the word in the remembered set. We divide by kBitsPerInt (32, | |
| 201 // shift right 5) and then multiply by kIntSize (4, shift left 2). | |
| 202 const int kRSetWordShift = 3; | |
| 203 | |
| 204 Label fast, done; | |
| 205 | |
| 206 // First, test that the object is not in the new space. We cannot set | |
| 207 // remembered set bits in the new space. | |
| 208 // object: heap object pointer (with tag) | |
| 209 // offset: offset to store location from the object | |
| 210 and_(scratch, object, Operand(Heap::NewSpaceMask())); | |
| 211 cmp(scratch, Operand(ExternalReference::new_space_start())); | |
| 212 b(eq, &done); | |
| 213 | |
| 214 // Compute the bit offset in the remembered set. | |
| 215 // object: heap object pointer (with tag) | |
| 216 // offset: offset to store location from the object | |
| 217 mov(ip, Operand(Page::kPageAlignmentMask)); // load mask only once | |
| 218 and_(scratch, object, Operand(ip)); // offset into page of the object | |
| 219 add(offset, scratch, Operand(offset)); // add offset into the object | |
| 220 mov(offset, Operand(offset, LSR, kObjectAlignmentBits)); | |
| 221 | |
| 222 // Compute the page address from the heap object pointer. | |
| 223 // object: heap object pointer (with tag) | |
| 224 // offset: bit offset of store position in the remembered set | |
| 225 bic(object, object, Operand(ip)); | |
| 226 | |
| 227 // If the bit offset lies beyond the normal remembered set range, it is in | |
| 228 // the extra remembered set area of a large object. | |
| 229 // object: page start | |
| 230 // offset: bit offset of store position in the remembered set | |
| 231 cmp(offset, Operand(Page::kPageSize / kPointerSize)); | |
| 232 b(lt, &fast); | |
| 233 | |
| 234 // Adjust the bit offset to be relative to the start of the extra | |
| 235 // remembered set and the start address to be the address of the extra | |
| 236 // remembered set. | |
| 237 sub(offset, offset, Operand(Page::kPageSize / kPointerSize)); | |
| 238 // Load the array length into 'scratch' and multiply by four to get the | |
| 239 // size in bytes of the elements. | |
| 240 ldr(scratch, MemOperand(object, Page::kObjectStartOffset | |
| 241 + FixedArray::kLengthOffset)); | |
| 242 mov(scratch, Operand(scratch, LSL, kObjectAlignmentBits)); | |
| 243 // Add the page header (including remembered set), array header, and array | |
| 244 // body size to the page address. | |
| 245 add(object, object, Operand(Page::kObjectStartOffset | |
| 246 + Array::kHeaderSize)); | |
| 247 add(object, object, Operand(scratch)); | |
| 248 | |
| 249 bind(&fast); | |
| 250 // Get address of the rset word. | |
| 251 // object: start of the remembered set (page start for the fast case) | |
| 252 // offset: bit offset of store position in the remembered set | |
| 253 bic(scratch, offset, Operand(kBitsPerInt - 1)); // clear the bit offset | |
| 254 add(object, object, Operand(scratch, LSR, kRSetWordShift)); | |
| 255 // Get bit offset in the rset word. | |
| 256 // object: address of remembered set word | |
| 257 // offset: bit offset of store position | |
| 258 and_(offset, offset, Operand(kBitsPerInt - 1)); | |
| 259 | |
| 260 ldr(scratch, MemOperand(object)); | |
| 261 mov(ip, Operand(1)); | |
| 262 orr(scratch, scratch, Operand(ip, LSL, offset)); | |
| 263 str(scratch, MemOperand(object)); | |
| 264 | |
| 265 bind(&done); | |
| 266 } | |
| 267 | |
| 268 | |
| 269 void MacroAssembler::EnterFrame(StackFrame::Type type) { | |
| 270 // r0-r3: preserved | |
| 271 stm(db_w, sp, cp.bit() | fp.bit() | lr.bit()); | |
| 272 mov(ip, Operand(Smi::FromInt(type))); | |
| 273 push(ip); | |
| 274 mov(ip, Operand(CodeObject())); | |
| 275 push(ip); | |
| 276 add(fp, sp, Operand(3 * kPointerSize)); // Adjust FP to point to saved FP. | |
| 277 } | |
| 278 | |
| 279 | |
| 280 void MacroAssembler::LeaveFrame(StackFrame::Type type) { | |
| 281 // r0: preserved | |
| 282 // r1: preserved | |
| 283 // r2: preserved | |
| 284 | |
| 285 // Drop the execution stack down to the frame pointer and restore | |
| 286 // the caller frame pointer and return address. | |
| 287 mov(sp, fp); | |
| 288 ldm(ia_w, sp, fp.bit() | lr.bit()); | |
| 289 } | |
| 290 | |
| 291 | |
| 292 void MacroAssembler::EnterExitFrame(StackFrame::Type type) { | |
| 293 ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG); | |
| 294 // Compute parameter pointer before making changes and save it as ip | |
| 295 // register so that it is restored as sp register on exit, thereby | |
| 296 // popping the args. | |
| 297 | |
| 298 // ip = sp + kPointerSize * #args; | |
| 299 add(ip, sp, Operand(r0, LSL, kPointerSizeLog2)); | |
| 300 | |
| 301 // Push in reverse order: caller_fp, sp_on_exit, and caller_pc. | |
| 302 stm(db_w, sp, fp.bit() | ip.bit() | lr.bit()); | |
| 303 mov(fp, Operand(sp)); // setup new frame pointer | |
| 304 | |
| 305 // Push debug marker. | |
| 306 mov(ip, Operand(type == StackFrame::EXIT_DEBUG ? 1 : 0)); | |
| 307 push(ip); | |
| 308 | |
| 309 // Save the frame pointer and the context in top. | |
| 310 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address))); | |
| 311 str(fp, MemOperand(ip)); | |
| 312 mov(ip, Operand(ExternalReference(Top::k_context_address))); | |
| 313 str(cp, MemOperand(ip)); | |
| 314 | |
| 315 // Setup argc and the builtin function in callee-saved registers. | |
| 316 mov(r4, Operand(r0)); | |
| 317 mov(r5, Operand(r1)); | |
| 318 | |
| 319 // Compute the argv pointer and keep it in a callee-saved register. | |
| 320 add(r6, fp, Operand(r4, LSL, kPointerSizeLog2)); | |
| 321 add(r6, r6, Operand(ExitFrameConstants::kPPDisplacement - kPointerSize)); | |
| 322 | |
| 323 #ifdef ENABLE_DEBUGGER_SUPPORT | |
| 324 // Save the state of all registers to the stack from the memory | |
| 325 // location. This is needed to allow nested break points. | |
| 326 if (type == StackFrame::EXIT_DEBUG) { | |
| 327 // Use sp as base to push. | |
| 328 CopyRegistersFromMemoryToStack(sp, kJSCallerSaved); | |
| 329 } | |
| 330 #endif | |
| 331 } | |
| 332 | |
| 333 | |
| 334 void MacroAssembler::LeaveExitFrame(StackFrame::Type type) { | |
| 335 #ifdef ENABLE_DEBUGGER_SUPPORT | |
| 336 // Restore the memory copy of the registers by digging them out from | |
| 337 // the stack. This is needed to allow nested break points. | |
| 338 if (type == StackFrame::EXIT_DEBUG) { | |
| 339 // This code intentionally clobbers r2 and r3. | |
| 340 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize; | |
| 341 const int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize; | |
| 342 add(r3, fp, Operand(kOffset)); | |
| 343 CopyRegistersFromStackToMemory(r3, r2, kJSCallerSaved); | |
| 344 } | |
| 345 #endif | |
| 346 | |
| 347 // Clear top frame. | |
| 348 mov(r3, Operand(0)); | |
| 349 mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address))); | |
| 350 str(r3, MemOperand(ip)); | |
| 351 | |
| 352 // Restore current context from top and clear it in debug mode. | |
| 353 mov(ip, Operand(ExternalReference(Top::k_context_address))); | |
| 354 ldr(cp, MemOperand(ip)); | |
| 355 #ifdef DEBUG | |
| 356 str(r3, MemOperand(ip)); | |
| 357 #endif | |
| 358 | |
| 359 // Pop the arguments, restore registers, and return. | |
| 360 mov(sp, Operand(fp)); // respect ABI stack constraint | |
| 361 ldm(ia, sp, fp.bit() | sp.bit() | pc.bit()); | |
| 362 } | |
| 363 | |
| 364 | |
| 365 void MacroAssembler::InvokePrologue(const ParameterCount& expected, | |
| 366 const ParameterCount& actual, | |
| 367 Handle<Code> code_constant, | |
| 368 Register code_reg, | |
| 369 Label* done, | |
| 370 InvokeFlag flag) { | |
| 371 bool definitely_matches = false; | |
| 372 Label regular_invoke; | |
| 373 | |
| 374 // Check whether the expected and actual arguments count match. If not, | |
| 375 // setup registers according to contract with ArgumentsAdaptorTrampoline: | |
| 376 // r0: actual arguments count | |
| 377 // r1: function (passed through to callee) | |
| 378 // r2: expected arguments count | |
| 379 // r3: callee code entry | |
| 380 | |
| 381 // The code below is made a lot easier because the calling code already sets | |
| 382 // up actual and expected registers according to the contract if values are | |
| 383 // passed in registers. | |
| 384 ASSERT(actual.is_immediate() || actual.reg().is(r0)); | |
| 385 ASSERT(expected.is_immediate() || expected.reg().is(r2)); | |
| 386 ASSERT((!code_constant.is_null() && code_reg.is(no_reg)) || code_reg.is(r3)); | |
| 387 | |
| 388 if (expected.is_immediate()) { | |
| 389 ASSERT(actual.is_immediate()); | |
| 390 if (expected.immediate() == actual.immediate()) { | |
| 391 definitely_matches = true; | |
| 392 } else { | |
| 393 mov(r0, Operand(actual.immediate())); | |
| 394 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel; | |
| 395 if (expected.immediate() == sentinel) { | |
| 396 // Don't worry about adapting arguments for builtins that | |
| 397 // don't want that done. Skip adaption code by making it look | |
| 398 // like we have a match between expected and actual number of | |
| 399 // arguments. | |
| 400 definitely_matches = true; | |
| 401 } else { | |
| 402 mov(r2, Operand(expected.immediate())); | |
| 403 } | |
| 404 } | |
| 405 } else { | |
| 406 if (actual.is_immediate()) { | |
| 407 cmp(expected.reg(), Operand(actual.immediate())); | |
| 408 b(eq, ®ular_invoke); | |
| 409 mov(r0, Operand(actual.immediate())); | |
| 410 } else { | |
| 411 cmp(expected.reg(), Operand(actual.reg())); | |
| 412 b(eq, ®ular_invoke); | |
| 413 } | |
| 414 } | |
| 415 | |
| 416 if (!definitely_matches) { | |
| 417 if (!code_constant.is_null()) { | |
| 418 mov(r3, Operand(code_constant)); | |
| 419 add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 420 } | |
| 421 | |
| 422 Handle<Code> adaptor = | |
| 423 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline)); | |
| 424 if (flag == CALL_FUNCTION) { | |
| 425 Call(adaptor, RelocInfo::CODE_TARGET); | |
| 426 b(done); | |
| 427 } else { | |
| 428 Jump(adaptor, RelocInfo::CODE_TARGET); | |
| 429 } | |
| 430 bind(®ular_invoke); | |
| 431 } | |
| 432 } | |
| 433 | |
| 434 | |
| 435 void MacroAssembler::InvokeCode(Register code, | |
| 436 const ParameterCount& expected, | |
| 437 const ParameterCount& actual, | |
| 438 InvokeFlag flag) { | |
| 439 Label done; | |
| 440 | |
| 441 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag); | |
| 442 if (flag == CALL_FUNCTION) { | |
| 443 Call(code); | |
| 444 } else { | |
| 445 ASSERT(flag == JUMP_FUNCTION); | |
| 446 Jump(code); | |
| 447 } | |
| 448 | |
| 449 // Continue here if InvokePrologue does handle the invocation due to | |
| 450 // mismatched parameter counts. | |
| 451 bind(&done); | |
| 452 } | |
| 453 | |
| 454 | |
| 455 void MacroAssembler::InvokeCode(Handle<Code> code, | |
| 456 const ParameterCount& expected, | |
| 457 const ParameterCount& actual, | |
| 458 RelocInfo::Mode rmode, | |
| 459 InvokeFlag flag) { | |
| 460 Label done; | |
| 461 | |
| 462 InvokePrologue(expected, actual, code, no_reg, &done, flag); | |
| 463 if (flag == CALL_FUNCTION) { | |
| 464 Call(code, rmode); | |
| 465 } else { | |
| 466 Jump(code, rmode); | |
| 467 } | |
| 468 | |
| 469 // Continue here if InvokePrologue does handle the invocation due to | |
| 470 // mismatched parameter counts. | |
| 471 bind(&done); | |
| 472 } | |
| 473 | |
| 474 | |
| 475 void MacroAssembler::InvokeFunction(Register fun, | |
| 476 const ParameterCount& actual, | |
| 477 InvokeFlag flag) { | |
| 478 // Contract with called JS functions requires that function is passed in r1. | |
| 479 ASSERT(fun.is(r1)); | |
| 480 | |
| 481 Register expected_reg = r2; | |
| 482 Register code_reg = r3; | |
| 483 | |
| 484 ldr(code_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); | |
| 485 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); | |
| 486 ldr(expected_reg, | |
| 487 FieldMemOperand(code_reg, | |
| 488 SharedFunctionInfo::kFormalParameterCountOffset)); | |
| 489 ldr(code_reg, | |
| 490 MemOperand(code_reg, SharedFunctionInfo::kCodeOffset - kHeapObjectTag)); | |
| 491 add(code_reg, code_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 492 | |
| 493 ParameterCount expected(expected_reg); | |
| 494 InvokeCode(code_reg, expected, actual, flag); | |
| 495 } | |
| 496 | |
| 497 | |
| 498 #ifdef ENABLE_DEBUGGER_SUPPORT | |
| 499 void MacroAssembler::SaveRegistersToMemory(RegList regs) { | |
| 500 ASSERT((regs & ~kJSCallerSaved) == 0); | |
| 501 // Copy the content of registers to memory location. | |
| 502 for (int i = 0; i < kNumJSCallerSaved; i++) { | |
| 503 int r = JSCallerSavedCode(i); | |
| 504 if ((regs & (1 << r)) != 0) { | |
| 505 Register reg = { r }; | |
| 506 mov(ip, Operand(ExternalReference(Debug_Address::Register(i)))); | |
| 507 str(reg, MemOperand(ip)); | |
| 508 } | |
| 509 } | |
| 510 } | |
| 511 | |
| 512 | |
| 513 void MacroAssembler::RestoreRegistersFromMemory(RegList regs) { | |
| 514 ASSERT((regs & ~kJSCallerSaved) == 0); | |
| 515 // Copy the content of memory location to registers. | |
| 516 for (int i = kNumJSCallerSaved; --i >= 0;) { | |
| 517 int r = JSCallerSavedCode(i); | |
| 518 if ((regs & (1 << r)) != 0) { | |
| 519 Register reg = { r }; | |
| 520 mov(ip, Operand(ExternalReference(Debug_Address::Register(i)))); | |
| 521 ldr(reg, MemOperand(ip)); | |
| 522 } | |
| 523 } | |
| 524 } | |
| 525 | |
| 526 | |
| 527 void MacroAssembler::CopyRegistersFromMemoryToStack(Register base, | |
| 528 RegList regs) { | |
| 529 ASSERT((regs & ~kJSCallerSaved) == 0); | |
| 530 // Copy the content of the memory location to the stack and adjust base. | |
| 531 for (int i = kNumJSCallerSaved; --i >= 0;) { | |
| 532 int r = JSCallerSavedCode(i); | |
| 533 if ((regs & (1 << r)) != 0) { | |
| 534 mov(ip, Operand(ExternalReference(Debug_Address::Register(i)))); | |
| 535 ldr(ip, MemOperand(ip)); | |
| 536 str(ip, MemOperand(base, 4, NegPreIndex)); | |
| 537 } | |
| 538 } | |
| 539 } | |
| 540 | |
| 541 | |
| 542 void MacroAssembler::CopyRegistersFromStackToMemory(Register base, | |
| 543 Register scratch, | |
| 544 RegList regs) { | |
| 545 ASSERT((regs & ~kJSCallerSaved) == 0); | |
| 546 // Copy the content of the stack to the memory location and adjust base. | |
| 547 for (int i = 0; i < kNumJSCallerSaved; i++) { | |
| 548 int r = JSCallerSavedCode(i); | |
| 549 if ((regs & (1 << r)) != 0) { | |
| 550 mov(ip, Operand(ExternalReference(Debug_Address::Register(i)))); | |
| 551 ldr(scratch, MemOperand(base, 4, PostIndex)); | |
| 552 str(scratch, MemOperand(ip)); | |
| 553 } | |
| 554 } | |
| 555 } | |
| 556 #endif | |
| 557 | |
| 558 void MacroAssembler::PushTryHandler(CodeLocation try_location, | |
| 559 HandlerType type) { | |
| 560 ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code | |
| 561 // The pc (return address) is passed in register lr. | |
| 562 if (try_location == IN_JAVASCRIPT) { | |
| 563 stm(db_w, sp, pp.bit() | fp.bit() | lr.bit()); | |
| 564 if (type == TRY_CATCH_HANDLER) { | |
| 565 mov(r3, Operand(StackHandler::TRY_CATCH)); | |
| 566 } else { | |
| 567 mov(r3, Operand(StackHandler::TRY_FINALLY)); | |
| 568 } | |
| 569 push(r3); // state | |
| 570 mov(r3, Operand(ExternalReference(Top::k_handler_address))); | |
| 571 ldr(r1, MemOperand(r3)); | |
| 572 push(r1); // next sp | |
| 573 str(sp, MemOperand(r3)); // chain handler | |
| 574 mov(r0, Operand(Smi::FromInt(StackHandler::kCodeNotPresent))); // new TOS | |
| 575 push(r0); | |
| 576 } else { | |
| 577 // Must preserve r0-r4, r5-r7 are available. | |
| 578 ASSERT(try_location == IN_JS_ENTRY); | |
| 579 // The parameter pointer is meaningless here and fp does not point to a JS | |
| 580 // frame. So we save NULL for both pp and fp. We expect the code throwing an | |
| 581 // exception to check fp before dereferencing it to restore the context. | |
| 582 mov(pp, Operand(0)); // set pp to NULL | |
| 583 mov(ip, Operand(0)); // to save a NULL fp | |
| 584 stm(db_w, sp, pp.bit() | ip.bit() | lr.bit()); | |
| 585 mov(r6, Operand(StackHandler::ENTRY)); | |
| 586 push(r6); // state | |
| 587 mov(r7, Operand(ExternalReference(Top::k_handler_address))); | |
| 588 ldr(r6, MemOperand(r7)); | |
| 589 push(r6); // next sp | |
| 590 str(sp, MemOperand(r7)); // chain handler | |
| 591 mov(r5, Operand(Smi::FromInt(StackHandler::kCodeNotPresent))); // new TOS | |
| 592 push(r5); // flush TOS | |
| 593 } | |
| 594 } | |
| 595 | |
| 596 | |
| 597 Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg, | |
| 598 JSObject* holder, Register holder_reg, | |
| 599 Register scratch, | |
| 600 Label* miss) { | |
| 601 // Make sure there's no overlap between scratch and the other | |
| 602 // registers. | |
| 603 ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg)); | |
| 604 | |
| 605 // Keep track of the current object in register reg. | |
| 606 Register reg = object_reg; | |
| 607 int depth = 1; | |
| 608 | |
| 609 // Check the maps in the prototype chain. | |
| 610 // Traverse the prototype chain from the object and do map checks. | |
| 611 while (object != holder) { | |
| 612 depth++; | |
| 613 | |
| 614 // Only global objects and objects that do not require access | |
| 615 // checks are allowed in stubs. | |
| 616 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); | |
| 617 | |
| 618 // Get the map of the current object. | |
| 619 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset)); | |
| 620 cmp(scratch, Operand(Handle<Map>(object->map()))); | |
| 621 | |
| 622 // Branch on the result of the map check. | |
| 623 b(ne, miss); | |
| 624 | |
| 625 // Check access rights to the global object. This has to happen | |
| 626 // after the map check so that we know that the object is | |
| 627 // actually a global object. | |
| 628 if (object->IsJSGlobalProxy()) { | |
| 629 CheckAccessGlobalProxy(reg, scratch, miss); | |
| 630 // Restore scratch register to be the map of the object. In the | |
| 631 // new space case below, we load the prototype from the map in | |
| 632 // the scratch register. | |
| 633 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset)); | |
| 634 } | |
| 635 | |
| 636 reg = holder_reg; // from now the object is in holder_reg | |
| 637 JSObject* prototype = JSObject::cast(object->GetPrototype()); | |
| 638 if (Heap::InNewSpace(prototype)) { | |
| 639 // The prototype is in new space; we cannot store a reference | |
| 640 // to it in the code. Load it from the map. | |
| 641 ldr(reg, FieldMemOperand(scratch, Map::kPrototypeOffset)); | |
| 642 } else { | |
| 643 // The prototype is in old space; load it directly. | |
| 644 mov(reg, Operand(Handle<JSObject>(prototype))); | |
| 645 } | |
| 646 | |
| 647 // Go to the next object in the prototype chain. | |
| 648 object = prototype; | |
| 649 } | |
| 650 | |
| 651 // Check the holder map. | |
| 652 ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset)); | |
| 653 cmp(scratch, Operand(Handle<Map>(object->map()))); | |
| 654 b(ne, miss); | |
| 655 | |
| 656 // Log the check depth. | |
| 657 LOG(IntEvent("check-maps-depth", depth)); | |
| 658 | |
| 659 // Perform security check for access to the global object and return | |
| 660 // the holder register. | |
| 661 ASSERT(object == holder); | |
| 662 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); | |
| 663 if (object->IsJSGlobalProxy()) { | |
| 664 CheckAccessGlobalProxy(reg, scratch, miss); | |
| 665 } | |
| 666 return reg; | |
| 667 } | |
| 668 | |
| 669 | |
| 670 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, | |
| 671 Register scratch, | |
| 672 Label* miss) { | |
| 673 Label same_contexts; | |
| 674 | |
| 675 ASSERT(!holder_reg.is(scratch)); | |
| 676 ASSERT(!holder_reg.is(ip)); | |
| 677 ASSERT(!scratch.is(ip)); | |
| 678 | |
| 679 // Load current lexical context from the stack frame. | |
| 680 ldr(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 681 // In debug mode, make sure the lexical context is set. | |
| 682 #ifdef DEBUG | |
| 683 cmp(scratch, Operand(0)); | |
| 684 Check(ne, "we should not have an empty lexical context"); | |
| 685 #endif | |
| 686 | |
| 687 // Load the global context of the current context. | |
| 688 int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; | |
| 689 ldr(scratch, FieldMemOperand(scratch, offset)); | |
| 690 ldr(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset)); | |
| 691 | |
| 692 // Check the context is a global context. | |
| 693 if (FLAG_debug_code) { | |
| 694 // TODO(119): avoid push(holder_reg)/pop(holder_reg) | |
| 695 // Cannot use ip as a temporary in this verification code. Due to the fact | |
| 696 // that ip is clobbered as part of cmp with an object Operand. | |
| 697 push(holder_reg); // Temporarily save holder on the stack. | |
| 698 // Read the first word and compare to the global_context_map. | |
| 699 ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset)); | |
| 700 cmp(holder_reg, Operand(Factory::global_context_map())); | |
| 701 Check(eq, "JSGlobalObject::global_context should be a global context."); | |
| 702 pop(holder_reg); // Restore holder. | |
| 703 } | |
| 704 | |
| 705 // Check if both contexts are the same. | |
| 706 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset)); | |
| 707 cmp(scratch, Operand(ip)); | |
| 708 b(eq, &same_contexts); | |
| 709 | |
| 710 // Check the context is a global context. | |
| 711 if (FLAG_debug_code) { | |
| 712 // TODO(119): avoid push(holder_reg)/pop(holder_reg) | |
| 713 // Cannot use ip as a temporary in this verification code. Due to the fact | |
| 714 // that ip is clobbered as part of cmp with an object Operand. | |
| 715 push(holder_reg); // Temporarily save holder on the stack. | |
| 716 mov(holder_reg, ip); // Move ip to its holding place. | |
| 717 cmp(holder_reg, Operand(Factory::null_value())); | |
| 718 Check(ne, "JSGlobalProxy::context() should not be null."); | |
| 719 | |
| 720 ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset)); | |
| 721 cmp(holder_reg, Operand(Factory::global_context_map())); | |
| 722 Check(eq, "JSGlobalObject::global_context should be a global context."); | |
| 723 // Restore ip is not needed. ip is reloaded below. | |
| 724 pop(holder_reg); // Restore holder. | |
| 725 // Restore ip to holder's context. | |
| 726 ldr(ip, FieldMemOperand(holder_reg, JSGlobalProxy::kContextOffset)); | |
| 727 } | |
| 728 | |
| 729 // Check that the security token in the calling global object is | |
| 730 // compatible with the security token in the receiving global | |
| 731 // object. | |
| 732 int token_offset = Context::kHeaderSize + | |
| 733 Context::SECURITY_TOKEN_INDEX * kPointerSize; | |
| 734 | |
| 735 ldr(scratch, FieldMemOperand(scratch, token_offset)); | |
| 736 ldr(ip, FieldMemOperand(ip, token_offset)); | |
| 737 cmp(scratch, Operand(ip)); | |
| 738 b(ne, miss); | |
| 739 | |
| 740 bind(&same_contexts); | |
| 741 } | |
| 742 | |
| 743 | |
| 744 void MacroAssembler::CallStub(CodeStub* stub) { | |
| 745 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs | |
| 746 Call(stub->GetCode(), RelocInfo::CODE_TARGET); | |
| 747 } | |
| 748 | |
| 749 | |
| 750 void MacroAssembler::StubReturn(int argc) { | |
| 751 ASSERT(argc >= 1 && generating_stub()); | |
| 752 if (argc > 1) | |
| 753 add(sp, sp, Operand((argc - 1) * kPointerSize)); | |
| 754 Ret(); | |
| 755 } | |
| 756 | |
| 757 | |
| 758 void MacroAssembler::IllegalOperation(int num_arguments) { | |
| 759 if (num_arguments > 0) { | |
| 760 add(sp, sp, Operand(num_arguments * kPointerSize)); | |
| 761 } | |
| 762 mov(r0, Operand(Factory::undefined_value())); | |
| 763 } | |
| 764 | |
| 765 | |
| 766 void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) { | |
| 767 // All parameters are on the stack. r0 has the return value after call. | |
| 768 | |
| 769 // If the expected number of arguments of the runtime function is | |
| 770 // constant, we check that the actual number of arguments match the | |
| 771 // expectation. | |
| 772 if (f->nargs >= 0 && f->nargs != num_arguments) { | |
| 773 IllegalOperation(num_arguments); | |
| 774 return; | |
| 775 } | |
| 776 | |
| 777 Runtime::FunctionId function_id = | |
| 778 static_cast<Runtime::FunctionId>(f->stub_id); | |
| 779 RuntimeStub stub(function_id, num_arguments); | |
| 780 CallStub(&stub); | |
| 781 } | |
| 782 | |
| 783 | |
| 784 void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) { | |
| 785 CallRuntime(Runtime::FunctionForId(fid), num_arguments); | |
| 786 } | |
| 787 | |
| 788 | |
| 789 void MacroAssembler::TailCallRuntime(const ExternalReference& ext, | |
| 790 int num_arguments) { | |
| 791 // TODO(1236192): Most runtime routines don't need the number of | |
| 792 // arguments passed in because it is constant. At some point we | |
| 793 // should remove this need and make the runtime routine entry code | |
| 794 // smarter. | |
| 795 mov(r0, Operand(num_arguments)); | |
| 796 JumpToBuiltin(ext); | |
| 797 } | |
| 798 | |
| 799 | |
| 800 void MacroAssembler::JumpToBuiltin(const ExternalReference& builtin) { | |
| 801 #if defined(__thumb__) | |
| 802 // Thumb mode builtin. | |
| 803 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1); | |
| 804 #endif | |
| 805 mov(r1, Operand(builtin)); | |
| 806 CEntryStub stub; | |
| 807 Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | |
| 808 } | |
| 809 | |
| 810 | |
| 811 Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id, | |
| 812 bool* resolved) { | |
| 813 // Contract with compiled functions is that the function is passed in r1. | |
| 814 int builtins_offset = | |
| 815 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize); | |
| 816 ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); | |
| 817 ldr(r1, FieldMemOperand(r1, GlobalObject::kBuiltinsOffset)); | |
| 818 ldr(r1, FieldMemOperand(r1, builtins_offset)); | |
| 819 | |
| 820 return Builtins::GetCode(id, resolved); | |
| 821 } | |
| 822 | |
| 823 | |
| 824 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, | |
| 825 InvokeJSFlags flags) { | |
| 826 bool resolved; | |
| 827 Handle<Code> code = ResolveBuiltin(id, &resolved); | |
| 828 | |
| 829 if (flags == CALL_JS) { | |
| 830 Call(code, RelocInfo::CODE_TARGET); | |
| 831 } else { | |
| 832 ASSERT(flags == JUMP_JS); | |
| 833 Jump(code, RelocInfo::CODE_TARGET); | |
| 834 } | |
| 835 | |
| 836 if (!resolved) { | |
| 837 const char* name = Builtins::GetName(id); | |
| 838 int argc = Builtins::GetArgumentsCount(id); | |
| 839 uint32_t flags = | |
| 840 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) | | |
| 841 Bootstrapper::FixupFlagsIsPCRelative::encode(true) | | |
| 842 Bootstrapper::FixupFlagsUseCodeObject::encode(false); | |
| 843 Unresolved entry = { pc_offset() - sizeof(Instr), flags, name }; | |
| 844 unresolved_.Add(entry); | |
| 845 } | |
| 846 } | |
| 847 | |
| 848 | |
| 849 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) { | |
| 850 bool resolved; | |
| 851 Handle<Code> code = ResolveBuiltin(id, &resolved); | |
| 852 | |
| 853 mov(target, Operand(code)); | |
| 854 if (!resolved) { | |
| 855 const char* name = Builtins::GetName(id); | |
| 856 int argc = Builtins::GetArgumentsCount(id); | |
| 857 uint32_t flags = | |
| 858 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) | | |
| 859 Bootstrapper::FixupFlagsIsPCRelative::encode(true) | | |
| 860 Bootstrapper::FixupFlagsUseCodeObject::encode(true); | |
| 861 Unresolved entry = { pc_offset() - sizeof(Instr), flags, name }; | |
| 862 unresolved_.Add(entry); | |
| 863 } | |
| 864 | |
| 865 add(target, target, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
| 866 } | |
| 867 | |
| 868 | |
| 869 void MacroAssembler::SetCounter(StatsCounter* counter, int value, | |
| 870 Register scratch1, Register scratch2) { | |
| 871 if (FLAG_native_code_counters && counter->Enabled()) { | |
| 872 mov(scratch1, Operand(value)); | |
| 873 mov(scratch2, Operand(ExternalReference(counter))); | |
| 874 str(scratch1, MemOperand(scratch2)); | |
| 875 } | |
| 876 } | |
| 877 | |
| 878 | |
| 879 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value, | |
| 880 Register scratch1, Register scratch2) { | |
| 881 ASSERT(value > 0); | |
| 882 if (FLAG_native_code_counters && counter->Enabled()) { | |
| 883 mov(scratch2, Operand(ExternalReference(counter))); | |
| 884 ldr(scratch1, MemOperand(scratch2)); | |
| 885 add(scratch1, scratch1, Operand(value)); | |
| 886 str(scratch1, MemOperand(scratch2)); | |
| 887 } | |
| 888 } | |
| 889 | |
| 890 | |
| 891 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value, | |
| 892 Register scratch1, Register scratch2) { | |
| 893 ASSERT(value > 0); | |
| 894 if (FLAG_native_code_counters && counter->Enabled()) { | |
| 895 mov(scratch2, Operand(ExternalReference(counter))); | |
| 896 ldr(scratch1, MemOperand(scratch2)); | |
| 897 sub(scratch1, scratch1, Operand(value)); | |
| 898 str(scratch1, MemOperand(scratch2)); | |
| 899 } | |
| 900 } | |
| 901 | |
| 902 | |
| 903 void MacroAssembler::Assert(Condition cc, const char* msg) { | |
| 904 if (FLAG_debug_code) | |
| 905 Check(cc, msg); | |
| 906 } | |
| 907 | |
| 908 | |
| 909 void MacroAssembler::Check(Condition cc, const char* msg) { | |
| 910 Label L; | |
| 911 b(cc, &L); | |
| 912 Abort(msg); | |
| 913 // will not return here | |
| 914 bind(&L); | |
| 915 } | |
| 916 | |
| 917 | |
| 918 void MacroAssembler::Abort(const char* msg) { | |
| 919 // We want to pass the msg string like a smi to avoid GC | |
| 920 // problems, however msg is not guaranteed to be aligned | |
| 921 // properly. Instead, we pass an aligned pointer that is | |
| 922 // a proper v8 smi, but also pass the alignment difference | |
| 923 // from the real pointer as a smi. | |
| 924 intptr_t p1 = reinterpret_cast<intptr_t>(msg); | |
| 925 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; | |
| 926 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); | |
| 927 #ifdef DEBUG | |
| 928 if (msg != NULL) { | |
| 929 RecordComment("Abort message: "); | |
| 930 RecordComment(msg); | |
| 931 } | |
| 932 #endif | |
| 933 mov(r0, Operand(p0)); | |
| 934 push(r0); | |
| 935 mov(r0, Operand(Smi::FromInt(p1 - p0))); | |
| 936 push(r0); | |
| 937 CallRuntime(Runtime::kAbort, 2); | |
| 938 // will not return here | |
| 939 } | |
| 940 | |
| 941 } } // namespace v8::internal | |
| OLD | NEW |