OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 318 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
329 int3(); | 329 int3(); |
330 } | 330 } |
331 | 331 |
332 | 332 |
333 void MacroAssembler::CallStub(CodeStub* stub) { | 333 void MacroAssembler::CallStub(CodeStub* stub) { |
334 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs | 334 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs |
335 Call(stub->GetCode(), RelocInfo::CODE_TARGET); | 335 Call(stub->GetCode(), RelocInfo::CODE_TARGET); |
336 } | 336 } |
337 | 337 |
338 | 338 |
| 339 Object* MacroAssembler::TryCallStub(CodeStub* stub) { |
| 340 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. |
| 341 Object* result = stub->TryGetCode(); |
| 342 if (!result->IsFailure()) { |
| 343 call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET); |
| 344 } |
| 345 return result; |
| 346 } |
| 347 |
| 348 |
339 void MacroAssembler::TailCallStub(CodeStub* stub) { | 349 void MacroAssembler::TailCallStub(CodeStub* stub) { |
340 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs | 350 ASSERT(allow_stub_calls()); // calls are not allowed in some stubs |
341 Jump(stub->GetCode(), RelocInfo::CODE_TARGET); | 351 Jump(stub->GetCode(), RelocInfo::CODE_TARGET); |
342 } | 352 } |
343 | 353 |
344 | 354 |
| 355 Object* MacroAssembler::TryTailCallStub(CodeStub* stub) { |
| 356 ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs. |
| 357 Object* result = stub->TryGetCode(); |
| 358 if (!result->IsFailure()) { |
| 359 jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET); |
| 360 } |
| 361 return result; |
| 362 } |
| 363 |
| 364 |
345 void MacroAssembler::StubReturn(int argc) { | 365 void MacroAssembler::StubReturn(int argc) { |
346 ASSERT(argc >= 1 && generating_stub()); | 366 ASSERT(argc >= 1 && generating_stub()); |
347 ret((argc - 1) * kPointerSize); | 367 ret((argc - 1) * kPointerSize); |
348 } | 368 } |
349 | 369 |
350 | 370 |
351 void MacroAssembler::IllegalOperation(int num_arguments) { | 371 void MacroAssembler::IllegalOperation(int num_arguments) { |
352 if (num_arguments > 0) { | 372 if (num_arguments > 0) { |
353 addq(rsp, Immediate(num_arguments * kPointerSize)); | 373 addq(rsp, Immediate(num_arguments * kPointerSize)); |
354 } | 374 } |
355 LoadRoot(rax, Heap::kUndefinedValueRootIndex); | 375 LoadRoot(rax, Heap::kUndefinedValueRootIndex); |
356 } | 376 } |
357 | 377 |
358 | 378 |
359 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { | 379 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { |
360 CallRuntime(Runtime::FunctionForId(id), num_arguments); | 380 CallRuntime(Runtime::FunctionForId(id), num_arguments); |
361 } | 381 } |
362 | 382 |
363 | 383 |
| 384 Object* MacroAssembler::TryCallRuntime(Runtime::FunctionId id, |
| 385 int num_arguments) { |
| 386 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments); |
| 387 } |
| 388 |
| 389 |
364 void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) { | 390 void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) { |
365 // If the expected number of arguments of the runtime function is | 391 // If the expected number of arguments of the runtime function is |
366 // constant, we check that the actual number of arguments match the | 392 // constant, we check that the actual number of arguments match the |
367 // expectation. | 393 // expectation. |
368 if (f->nargs >= 0 && f->nargs != num_arguments) { | 394 if (f->nargs >= 0 && f->nargs != num_arguments) { |
369 IllegalOperation(num_arguments); | 395 IllegalOperation(num_arguments); |
370 return; | 396 return; |
371 } | 397 } |
372 | 398 |
373 // TODO(1236192): Most runtime routines don't need the number of | 399 // TODO(1236192): Most runtime routines don't need the number of |
374 // arguments passed in because it is constant. At some point we | 400 // arguments passed in because it is constant. At some point we |
375 // should remove this need and make the runtime routine entry code | 401 // should remove this need and make the runtime routine entry code |
376 // smarter. | 402 // smarter. |
377 Set(rax, num_arguments); | 403 Set(rax, num_arguments); |
378 movq(rbx, ExternalReference(f)); | 404 movq(rbx, ExternalReference(f)); |
379 CEntryStub ces(f->result_size); | 405 CEntryStub ces(f->result_size); |
380 CallStub(&ces); | 406 CallStub(&ces); |
381 } | 407 } |
382 | 408 |
383 | 409 |
| 410 Object* MacroAssembler::TryCallRuntime(Runtime::Function* f, |
| 411 int num_arguments) { |
| 412 if (f->nargs >= 0 && f->nargs != num_arguments) { |
| 413 IllegalOperation(num_arguments); |
| 414 // Since we did not call the stub, there was no allocation failure. |
| 415 // Return some non-failure object. |
| 416 return Heap::undefined_value(); |
| 417 } |
| 418 |
| 419 // TODO(1236192): Most runtime routines don't need the number of |
| 420 // arguments passed in because it is constant. At some point we |
| 421 // should remove this need and make the runtime routine entry code |
| 422 // smarter. |
| 423 Set(rax, num_arguments); |
| 424 movq(rbx, ExternalReference(f)); |
| 425 CEntryStub ces(f->result_size); |
| 426 return TryCallStub(&ces); |
| 427 } |
| 428 |
| 429 |
384 void MacroAssembler::CallExternalReference(const ExternalReference& ext, | 430 void MacroAssembler::CallExternalReference(const ExternalReference& ext, |
385 int num_arguments) { | 431 int num_arguments) { |
386 Set(rax, num_arguments); | 432 Set(rax, num_arguments); |
387 movq(rbx, ext); | 433 movq(rbx, ext); |
388 | 434 |
389 CEntryStub stub(1); | 435 CEntryStub stub(1); |
390 CallStub(&stub); | 436 CallStub(&stub); |
391 } | 437 } |
392 | 438 |
393 | 439 |
(...skipping 16 matching lines...) Expand all Loading... |
410 } | 456 } |
411 | 457 |
412 | 458 |
413 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, | 459 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, |
414 int num_arguments, | 460 int num_arguments, |
415 int result_size) { | 461 int result_size) { |
416 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size); | 462 TailCallExternalReference(ExternalReference(fid), num_arguments, result_size); |
417 } | 463 } |
418 | 464 |
419 | 465 |
| 466 static int Offset(ExternalReference ref0, ExternalReference ref1) { |
| 467 int64_t offset = (ref0.address() - ref1.address()); |
| 468 // Check that fits into int. |
| 469 ASSERT(static_cast<int>(offset) == offset); |
| 470 return static_cast<int>(offset); |
| 471 } |
| 472 |
| 473 |
| 474 void MacroAssembler::PushHandleScope(Register scratch) { |
| 475 ExternalReference extensions_address = |
| 476 ExternalReference::handle_scope_extensions_address(); |
| 477 const int kExtensionsOffset = 0; |
| 478 const int kNextOffset = Offset( |
| 479 ExternalReference::handle_scope_next_address(), |
| 480 extensions_address); |
| 481 const int kLimitOffset = Offset( |
| 482 ExternalReference::handle_scope_limit_address(), |
| 483 extensions_address); |
| 484 |
| 485 // Push the number of extensions, smi-tagged so the gc will ignore it. |
| 486 movq(kScratchRegister, extensions_address); |
| 487 movq(scratch, Operand(kScratchRegister, kExtensionsOffset)); |
| 488 movq(Operand(kScratchRegister, kExtensionsOffset), Immediate(0)); |
| 489 Integer32ToSmi(scratch, scratch); |
| 490 push(scratch); |
| 491 // Push next and limit pointers which will be wordsize aligned and |
| 492 // hence automatically smi tagged. |
| 493 push(Operand(kScratchRegister, kNextOffset)); |
| 494 push(Operand(kScratchRegister, kLimitOffset)); |
| 495 } |
| 496 |
| 497 |
| 498 Object* MacroAssembler::PopHandleScopeHelper(Register saved, |
| 499 Register scratch, |
| 500 bool gc_allowed) { |
| 501 ExternalReference extensions_address = |
| 502 ExternalReference::handle_scope_extensions_address(); |
| 503 const int kExtensionsOffset = 0; |
| 504 const int kNextOffset = Offset( |
| 505 ExternalReference::handle_scope_next_address(), |
| 506 extensions_address); |
| 507 const int kLimitOffset = Offset( |
| 508 ExternalReference::handle_scope_limit_address(), |
| 509 extensions_address); |
| 510 |
| 511 Object* result = NULL; |
| 512 Label write_back; |
| 513 movq(kScratchRegister, extensions_address); |
| 514 cmpq(Operand(kScratchRegister, kExtensionsOffset), Immediate(0)); |
| 515 j(equal, &write_back); |
| 516 push(saved); |
| 517 if (gc_allowed) { |
| 518 CallRuntime(Runtime::kDeleteHandleScopeExtensions, 0); |
| 519 } else { |
| 520 result = TryCallRuntime(Runtime::kDeleteHandleScopeExtensions, 0); |
| 521 if (result->IsFailure()) return result; |
| 522 } |
| 523 pop(saved); |
| 524 movq(kScratchRegister, extensions_address); |
| 525 |
| 526 bind(&write_back); |
| 527 pop(Operand(kScratchRegister, kLimitOffset)); |
| 528 pop(Operand(kScratchRegister, kNextOffset)); |
| 529 pop(scratch); |
| 530 SmiToInteger32(scratch, scratch); |
| 531 movq(Operand(kScratchRegister, kExtensionsOffset), scratch); |
| 532 |
| 533 return result; |
| 534 } |
| 535 |
| 536 |
| 537 void MacroAssembler::PopHandleScope(Register saved, Register scratch) { |
| 538 PopHandleScopeHelper(saved, scratch, true); |
| 539 } |
| 540 |
| 541 |
| 542 Object* MacroAssembler::TryPopHandleScope(Register saved, Register scratch) { |
| 543 return PopHandleScopeHelper(saved, scratch, false); |
| 544 } |
| 545 |
| 546 |
420 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext, | 547 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext, |
421 int result_size) { | 548 int result_size) { |
422 // Set the entry point and jump to the C entry runtime stub. | 549 // Set the entry point and jump to the C entry runtime stub. |
423 movq(rbx, ext); | 550 movq(rbx, ext); |
424 CEntryStub ces(result_size); | 551 CEntryStub ces(result_size); |
425 jmp(ces.GetCode(), RelocInfo::CODE_TARGET); | 552 jmp(ces.GetCode(), RelocInfo::CODE_TARGET); |
426 } | 553 } |
427 | 554 |
428 | 555 |
429 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) { | 556 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) { |
(...skipping 1771 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2201 if (FLAG_debug_code) { | 2328 if (FLAG_debug_code) { |
2202 Move(kScratchRegister, Smi::FromInt(type)); | 2329 Move(kScratchRegister, Smi::FromInt(type)); |
2203 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister); | 2330 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister); |
2204 Check(equal, "stack frame types must match"); | 2331 Check(equal, "stack frame types must match"); |
2205 } | 2332 } |
2206 movq(rsp, rbp); | 2333 movq(rsp, rbp); |
2207 pop(rbp); | 2334 pop(rbp); |
2208 } | 2335 } |
2209 | 2336 |
2210 | 2337 |
2211 void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode, int result_size) { | 2338 void MacroAssembler::EnterExitFramePrologue(ExitFrame::Mode mode, |
| 2339 bool save_rax) { |
2212 // Setup the frame structure on the stack. | 2340 // Setup the frame structure on the stack. |
2213 // All constants are relative to the frame pointer of the exit frame. | 2341 // All constants are relative to the frame pointer of the exit frame. |
2214 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize); | 2342 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize); |
2215 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize); | 2343 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize); |
2216 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize); | 2344 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize); |
2217 push(rbp); | 2345 push(rbp); |
2218 movq(rbp, rsp); | 2346 movq(rbp, rsp); |
2219 | 2347 |
2220 // Reserve room for entry stack pointer and push the debug marker. | 2348 // Reserve room for entry stack pointer and push the debug marker. |
2221 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize); | 2349 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize); |
2222 push(Immediate(0)); // Saved entry sp, patched before call. | 2350 push(Immediate(0)); // Saved entry sp, patched before call. |
2223 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); | 2351 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); |
2224 push(kScratchRegister); // Accessed from EditFrame::code_slot. | 2352 push(kScratchRegister); // Accessed from EditFrame::code_slot. |
2225 | 2353 |
2226 // Save the frame pointer and the context in top. | 2354 // Save the frame pointer and the context in top. |
2227 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address); | 2355 ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address); |
2228 ExternalReference context_address(Top::k_context_address); | 2356 ExternalReference context_address(Top::k_context_address); |
2229 movq(r14, rax); // Backup rax before we use it. | 2357 if (save_rax) { |
| 2358 movq(r14, rax); // Backup rax before we use it. |
| 2359 } |
2230 | 2360 |
2231 movq(rax, rbp); | 2361 movq(rax, rbp); |
2232 store_rax(c_entry_fp_address); | 2362 store_rax(c_entry_fp_address); |
2233 movq(rax, rsi); | 2363 movq(rax, rsi); |
2234 store_rax(context_address); | 2364 store_rax(context_address); |
| 2365 } |
2235 | 2366 |
2236 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame, | 2367 void MacroAssembler::EnterExitFrameEpilogue(ExitFrame::Mode mode, |
2237 // so it must be retained across the C-call. | 2368 int result_size, |
2238 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; | 2369 int argc) { |
2239 lea(r12, Operand(rbp, r14, times_pointer_size, offset)); | |
2240 | |
2241 #ifdef ENABLE_DEBUGGER_SUPPORT | 2370 #ifdef ENABLE_DEBUGGER_SUPPORT |
2242 // Save the state of all registers to the stack from the memory | 2371 // Save the state of all registers to the stack from the memory |
2243 // location. This is needed to allow nested break points. | 2372 // location. This is needed to allow nested break points. |
2244 if (mode == ExitFrame::MODE_DEBUG) { | 2373 if (mode == ExitFrame::MODE_DEBUG) { |
2245 // TODO(1243899): This should be symmetric to | 2374 // TODO(1243899): This should be symmetric to |
2246 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed | 2375 // CopyRegistersFromStackToMemory() but it isn't! esp is assumed |
2247 // correct here, but computed for the other call. Very error | 2376 // correct here, but computed for the other call. Very error |
2248 // prone! FIX THIS. Actually there are deeper problems with | 2377 // prone! FIX THIS. Actually there are deeper problems with |
2249 // register saving than this asymmetry (see the bug report | 2378 // register saving than this asymmetry (see the bug report |
2250 // associated with this issue). | 2379 // associated with this issue). |
2251 PushRegistersFromMemory(kJSCallerSaved); | 2380 PushRegistersFromMemory(kJSCallerSaved); |
2252 } | 2381 } |
2253 #endif | 2382 #endif |
2254 | 2383 |
2255 #ifdef _WIN64 | 2384 #ifdef _WIN64 |
2256 // Reserve space on stack for result and argument structures, if necessary. | 2385 // Reserve space on stack for result and argument structures, if necessary. |
2257 int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize; | 2386 int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize; |
2258 // Reserve space for the Arguments object. The Windows 64-bit ABI | 2387 // Reserve space for the Arguments object. The Windows 64-bit ABI |
2259 // requires us to pass this structure as a pointer to its location on | 2388 // requires us to pass this structure as a pointer to its location on |
2260 // the stack. The structure contains 2 values. | 2389 // the stack. The structure contains 2 values. |
2261 int argument_stack_space = 2 * kPointerSize; | 2390 int argument_stack_space = argc * kPointerSize; |
2262 // We also need backing space for 4 parameters, even though | 2391 // We also need backing space for 4 parameters, even though |
2263 // we only pass one or two parameter, and it is in a register. | 2392 // we only pass one or two parameter, and it is in a register. |
2264 int argument_mirror_space = 4 * kPointerSize; | 2393 int argument_mirror_space = 4 * kPointerSize; |
2265 int total_stack_space = | 2394 int total_stack_space = |
2266 argument_mirror_space + argument_stack_space + result_stack_space; | 2395 argument_mirror_space + argument_stack_space + result_stack_space; |
2267 subq(rsp, Immediate(total_stack_space)); | 2396 subq(rsp, Immediate(total_stack_space)); |
2268 #endif | 2397 #endif |
2269 | 2398 |
2270 // Get the required frame alignment for the OS. | 2399 // Get the required frame alignment for the OS. |
2271 static const int kFrameAlignment = OS::ActivationFrameAlignment(); | 2400 static const int kFrameAlignment = OS::ActivationFrameAlignment(); |
2272 if (kFrameAlignment > 0) { | 2401 if (kFrameAlignment > 0) { |
2273 ASSERT(IsPowerOf2(kFrameAlignment)); | 2402 ASSERT(IsPowerOf2(kFrameAlignment)); |
2274 movq(kScratchRegister, Immediate(-kFrameAlignment)); | 2403 movq(kScratchRegister, Immediate(-kFrameAlignment)); |
2275 and_(rsp, kScratchRegister); | 2404 and_(rsp, kScratchRegister); |
2276 } | 2405 } |
2277 | 2406 |
2278 // Patch the saved entry sp. | 2407 // Patch the saved entry sp. |
2279 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp); | 2408 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp); |
2280 } | 2409 } |
2281 | 2410 |
2282 | 2411 |
| 2412 void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode, int result_size) { |
| 2413 EnterExitFramePrologue(mode, true); |
| 2414 |
| 2415 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame, |
| 2416 // so it must be retained across the C-call. |
| 2417 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; |
| 2418 lea(r12, Operand(rbp, r14, times_pointer_size, offset)); |
| 2419 |
| 2420 EnterExitFrameEpilogue(mode, result_size, 2); |
| 2421 } |
| 2422 |
| 2423 |
| 2424 void MacroAssembler::EnterApiExitFrame(ExitFrame::Mode mode, |
| 2425 int stack_space, |
| 2426 int argc, |
| 2427 int result_size) { |
| 2428 EnterExitFramePrologue(mode, false); |
| 2429 |
| 2430 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame, |
| 2431 // so it must be retained across the C-call. |
| 2432 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; |
| 2433 lea(r12, Operand(rbp, (stack_space * kPointerSize) + offset)); |
| 2434 |
| 2435 EnterExitFrameEpilogue(mode, result_size, argc); |
| 2436 } |
| 2437 |
| 2438 |
2283 void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode, int result_size) { | 2439 void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode, int result_size) { |
2284 // Registers: | 2440 // Registers: |
2285 // r12 : argv | 2441 // r12 : argv |
2286 #ifdef ENABLE_DEBUGGER_SUPPORT | 2442 #ifdef ENABLE_DEBUGGER_SUPPORT |
2287 // Restore the memory copy of the registers by digging them out from | 2443 // Restore the memory copy of the registers by digging them out from |
2288 // the stack. This is needed to allow nested break points. | 2444 // the stack. This is needed to allow nested break points. |
2289 if (mode == ExitFrame::MODE_DEBUG) { | 2445 if (mode == ExitFrame::MODE_DEBUG) { |
2290 // It's okay to clobber register rbx below because we don't need | 2446 // It's okay to clobber register rbx below because we don't need |
2291 // the function pointer after this. | 2447 // the function pointer after this. |
2292 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize; | 2448 const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize; |
(...skipping 491 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2784 CPU::FlushICache(address_, size_); | 2940 CPU::FlushICache(address_, size_); |
2785 | 2941 |
2786 // Check that the code was patched as expected. | 2942 // Check that the code was patched as expected. |
2787 ASSERT(masm_.pc_ == address_ + size_); | 2943 ASSERT(masm_.pc_ == address_ + size_); |
2788 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2944 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
2789 } | 2945 } |
2790 | 2946 |
2791 } } // namespace v8::internal | 2947 } } // namespace v8::internal |
2792 | 2948 |
2793 #endif // V8_TARGET_ARCH_X64 | 2949 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |