| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 2474 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2485 movq(dst, src); | 2485 movq(dst, src); |
| 2486 } | 2486 } |
| 2487 } | 2487 } |
| 2488 | 2488 |
| 2489 | 2489 |
| 2490 void MacroAssembler::Move(Register dst, Handle<Object> source) { | 2490 void MacroAssembler::Move(Register dst, Handle<Object> source) { |
| 2491 AllowDeferredHandleDereference smi_check; | 2491 AllowDeferredHandleDereference smi_check; |
| 2492 if (source->IsSmi()) { | 2492 if (source->IsSmi()) { |
| 2493 Move(dst, Smi::cast(*source)); | 2493 Move(dst, Smi::cast(*source)); |
| 2494 } else { | 2494 } else { |
| 2495 ASSERT(source->IsHeapObject()); | 2495 MoveHeapObject(dst, source); |
| 2496 movq(dst, source, RelocInfo::EMBEDDED_OBJECT); | |
| 2497 } | 2496 } |
| 2498 } | 2497 } |
| 2499 | 2498 |
| 2500 | 2499 |
| 2501 void MacroAssembler::Move(const Operand& dst, Handle<Object> source) { | 2500 void MacroAssembler::Move(const Operand& dst, Handle<Object> source) { |
| 2502 AllowDeferredHandleDereference smi_check; | 2501 AllowDeferredHandleDereference smi_check; |
| 2503 if (source->IsSmi()) { | 2502 if (source->IsSmi()) { |
| 2504 Move(dst, Smi::cast(*source)); | 2503 Move(dst, Smi::cast(*source)); |
| 2505 } else { | 2504 } else { |
| 2506 ASSERT(source->IsHeapObject()); | 2505 MoveHeapObject(kScratchRegister, source); |
| 2507 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT); | |
| 2508 movq(dst, kScratchRegister); | 2506 movq(dst, kScratchRegister); |
| 2509 } | 2507 } |
| 2510 } | 2508 } |
| 2511 | 2509 |
| 2512 | 2510 |
| 2513 void MacroAssembler::Cmp(Register dst, Handle<Object> source) { | 2511 void MacroAssembler::Cmp(Register dst, Handle<Object> source) { |
| 2514 AllowDeferredHandleDereference smi_check; | 2512 AllowDeferredHandleDereference smi_check; |
| 2515 if (source->IsSmi()) { | 2513 if (source->IsSmi()) { |
| 2516 Cmp(dst, Smi::cast(*source)); | 2514 Cmp(dst, Smi::cast(*source)); |
| 2517 } else { | 2515 } else { |
| 2518 ASSERT(source->IsHeapObject()); | 2516 MoveHeapObject(kScratchRegister, source); |
| 2519 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT); | |
| 2520 cmpq(dst, kScratchRegister); | 2517 cmpq(dst, kScratchRegister); |
| 2521 } | 2518 } |
| 2522 } | 2519 } |
| 2523 | 2520 |
| 2524 | 2521 |
| 2525 void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) { | 2522 void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) { |
| 2526 AllowDeferredHandleDereference smi_check; | 2523 AllowDeferredHandleDereference smi_check; |
| 2527 if (source->IsSmi()) { | 2524 if (source->IsSmi()) { |
| 2528 Cmp(dst, Smi::cast(*source)); | 2525 Cmp(dst, Smi::cast(*source)); |
| 2529 } else { | 2526 } else { |
| 2530 ASSERT(source->IsHeapObject()); | 2527 MoveHeapObject(kScratchRegister, source); |
| 2531 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT); | |
| 2532 cmpq(dst, kScratchRegister); | 2528 cmpq(dst, kScratchRegister); |
| 2533 } | 2529 } |
| 2534 } | 2530 } |
| 2535 | 2531 |
| 2536 | 2532 |
| 2537 void MacroAssembler::Push(Handle<Object> source) { | 2533 void MacroAssembler::Push(Handle<Object> source) { |
| 2538 AllowDeferredHandleDereference smi_check; | 2534 AllowDeferredHandleDereference smi_check; |
| 2539 if (source->IsSmi()) { | 2535 if (source->IsSmi()) { |
| 2540 Push(Smi::cast(*source)); | 2536 Push(Smi::cast(*source)); |
| 2541 } else { | 2537 } else { |
| 2542 ASSERT(source->IsHeapObject()); | 2538 MoveHeapObject(kScratchRegister, source); |
| 2543 movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT); | |
| 2544 push(kScratchRegister); | 2539 push(kScratchRegister); |
| 2545 } | 2540 } |
| 2546 } | 2541 } |
| 2547 | 2542 |
| 2548 | 2543 |
| 2549 void MacroAssembler::LoadHeapObject(Register result, | 2544 void MacroAssembler::MoveHeapObject(Register result, |
| 2550 Handle<HeapObject> object) { | 2545 Handle<Object> object) { |
| 2551 AllowDeferredHandleDereference using_raw_address; | 2546 AllowDeferredHandleDereference using_raw_address; |
| 2547 ASSERT(object->IsHeapObject()); |
| 2552 if (isolate()->heap()->InNewSpace(*object)) { | 2548 if (isolate()->heap()->InNewSpace(*object)) { |
| 2553 Handle<Cell> cell = isolate()->factory()->NewCell(object); | 2549 Handle<Cell> cell = isolate()->factory()->NewCell(object); |
| 2554 movq(result, cell, RelocInfo::CELL); | 2550 movq(result, cell, RelocInfo::CELL); |
| 2555 movq(result, Operand(result, 0)); | 2551 movq(result, Operand(result, 0)); |
| 2556 } else { | 2552 } else { |
| 2557 Move(result, object); | 2553 movq(result, object, RelocInfo::EMBEDDED_OBJECT); |
| 2558 } | 2554 } |
| 2559 } | 2555 } |
| 2560 | 2556 |
| 2561 | |
| 2562 void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) { | |
| 2563 AllowDeferredHandleDereference using_raw_address; | |
| 2564 if (isolate()->heap()->InNewSpace(*object)) { | |
| 2565 Handle<Cell> cell = isolate()->factory()->NewCell(object); | |
| 2566 movq(kScratchRegister, cell, RelocInfo::CELL); | |
| 2567 cmpq(reg, Operand(kScratchRegister, 0)); | |
| 2568 } else { | |
| 2569 Cmp(reg, object); | |
| 2570 } | |
| 2571 } | |
| 2572 | |
| 2573 | |
| 2574 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) { | |
| 2575 AllowDeferredHandleDereference using_raw_address; | |
| 2576 if (isolate()->heap()->InNewSpace(*object)) { | |
| 2577 Handle<Cell> cell = isolate()->factory()->NewCell(object); | |
| 2578 movq(kScratchRegister, cell, RelocInfo::CELL); | |
| 2579 movq(kScratchRegister, Operand(kScratchRegister, 0)); | |
| 2580 push(kScratchRegister); | |
| 2581 } else { | |
| 2582 Push(object); | |
| 2583 } | |
| 2584 } | |
| 2585 | |
| 2586 | 2557 |
| 2587 void MacroAssembler::LoadGlobalCell(Register dst, Handle<Cell> cell) { | 2558 void MacroAssembler::LoadGlobalCell(Register dst, Handle<Cell> cell) { |
| 2588 if (dst.is(rax)) { | 2559 if (dst.is(rax)) { |
| 2589 AllowDeferredHandleDereference embedding_raw_address; | 2560 AllowDeferredHandleDereference embedding_raw_address; |
| 2590 load_rax(cell.location(), RelocInfo::CELL); | 2561 load_rax(cell.location(), RelocInfo::CELL); |
| 2591 } else { | 2562 } else { |
| 2592 movq(dst, cell, RelocInfo::CELL); | 2563 movq(dst, cell, RelocInfo::CELL); |
| 2593 movq(dst, Operand(dst, 0)); | 2564 movq(dst, Operand(dst, 0)); |
| 2594 } | 2565 } |
| 2595 } | 2566 } |
| (...skipping 988 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3584 void MacroAssembler::InvokeFunction(Handle<JSFunction> function, | 3555 void MacroAssembler::InvokeFunction(Handle<JSFunction> function, |
| 3585 const ParameterCount& expected, | 3556 const ParameterCount& expected, |
| 3586 const ParameterCount& actual, | 3557 const ParameterCount& actual, |
| 3587 InvokeFlag flag, | 3558 InvokeFlag flag, |
| 3588 const CallWrapper& call_wrapper, | 3559 const CallWrapper& call_wrapper, |
| 3589 CallKind call_kind) { | 3560 CallKind call_kind) { |
| 3590 // You can't call a function without a valid frame. | 3561 // You can't call a function without a valid frame. |
| 3591 ASSERT(flag == JUMP_FUNCTION || has_frame()); | 3562 ASSERT(flag == JUMP_FUNCTION || has_frame()); |
| 3592 | 3563 |
| 3593 // Get the function and setup the context. | 3564 // Get the function and setup the context. |
| 3594 LoadHeapObject(rdi, function); | 3565 Move(rdi, function); |
| 3595 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); | 3566 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); |
| 3596 | 3567 |
| 3597 // We call indirectly through the code field in the function to | 3568 // We call indirectly through the code field in the function to |
| 3598 // allow recompilation to take effect without changing any of the | 3569 // allow recompilation to take effect without changing any of the |
| 3599 // call sites. | 3570 // call sites. |
| 3600 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); | 3571 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); |
| 3601 InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind); | 3572 InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind); |
| 3602 } | 3573 } |
| 3603 | 3574 |
| 3604 | 3575 |
| (...skipping 1363 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4968 movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE); | 4939 movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE); |
| 4969 CallCFunction( | 4940 CallCFunction( |
| 4970 ExternalReference::record_object_allocation_function(isolate), 3); | 4941 ExternalReference::record_object_allocation_function(isolate), 3); |
| 4971 PopSafepointRegisters(); | 4942 PopSafepointRegisters(); |
| 4972 } | 4943 } |
| 4973 | 4944 |
| 4974 | 4945 |
| 4975 } } // namespace v8::internal | 4946 } } // namespace v8::internal |
| 4976 | 4947 |
| 4977 #endif // V8_TARGET_ARCH_X64 | 4948 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |