| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 487 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 498 Label ok; | 498 Label ok; |
| 499 testl(result, result); | 499 testl(result, result); |
| 500 j(not_zero, &ok, Label::kNear); | 500 j(not_zero, &ok, Label::kNear); |
| 501 testl(op, op); | 501 testl(op, op); |
| 502 j(sign, then_label); | 502 j(sign, then_label); |
| 503 bind(&ok); | 503 bind(&ok); |
| 504 } | 504 } |
| 505 | 505 |
| 506 | 506 |
| 507 void MacroAssembler::Abort(BailoutReason reason) { | 507 void MacroAssembler::Abort(BailoutReason reason) { |
| 508 // We want to pass the msg string like a smi to avoid GC |
| 509 // problems, however msg is not guaranteed to be aligned |
| 510 // properly. Instead, we pass an aligned pointer that is |
| 511 // a proper v8 smi, but also pass the alignment difference |
| 512 // from the real pointer as a smi. |
| 513 const char* msg = GetBailoutReason(reason); |
| 514 intptr_t p1 = reinterpret_cast<intptr_t>(msg); |
| 515 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; |
| 516 // Note: p0 might not be a valid Smi _value_, but it has a valid Smi tag. |
| 517 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); |
| 508 #ifdef DEBUG | 518 #ifdef DEBUG |
| 509 const char* msg = GetBailoutReason(reason); | |
| 510 if (msg != NULL) { | 519 if (msg != NULL) { |
| 511 RecordComment("Abort message: "); | 520 RecordComment("Abort message: "); |
| 512 RecordComment(msg); | 521 RecordComment(msg); |
| 513 } | 522 } |
| 514 | 523 |
| 515 if (FLAG_trap_on_abort) { | 524 if (FLAG_trap_on_abort) { |
| 516 int3(); | 525 int3(); |
| 517 return; | 526 return; |
| 518 } | 527 } |
| 519 #endif | 528 #endif |
| 520 | 529 |
| 521 push(rax); | 530 push(rax); |
| 522 Move(kScratchRegister, Smi::FromInt(static_cast<int>(reason)), | 531 Move(kScratchRegister, reinterpret_cast<Smi*>(p0), |
| 532 Assembler::RelocInfoNone()); |
| 533 push(kScratchRegister); |
| 534 Move(kScratchRegister, Smi::FromInt(static_cast<int>(p1 - p0)), |
| 523 Assembler::RelocInfoNone()); | 535 Assembler::RelocInfoNone()); |
| 524 push(kScratchRegister); | 536 push(kScratchRegister); |
| 525 | 537 |
| 526 if (!has_frame_) { | 538 if (!has_frame_) { |
| 527 // We don't actually want to generate a pile of code for this, so just | 539 // We don't actually want to generate a pile of code for this, so just |
| 528 // claim there is a stack frame, without generating one. | 540 // claim there is a stack frame, without generating one. |
| 529 FrameScope scope(this, StackFrame::NONE); | 541 FrameScope scope(this, StackFrame::NONE); |
| 530 CallRuntime(Runtime::kAbort, 1); | 542 CallRuntime(Runtime::kAbort, 2); |
| 531 } else { | 543 } else { |
| 532 CallRuntime(Runtime::kAbort, 1); | 544 CallRuntime(Runtime::kAbort, 2); |
| 533 } | 545 } |
| 534 // Control will not return here. | 546 // Control will not return here. |
| 535 int3(); | 547 int3(); |
| 536 } | 548 } |
| 537 | 549 |
| 538 | 550 |
| 539 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) { | 551 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) { |
| 540 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs | 552 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs |
| 541 Call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id); | 553 Call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id); |
| 542 } | 554 } |
| (...skipping 422 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 965 } else if (is_uint32(x)) { | 977 } else if (is_uint32(x)) { |
| 966 movl(dst, Immediate(static_cast<uint32_t>(x))); | 978 movl(dst, Immediate(static_cast<uint32_t>(x))); |
| 967 } else if (is_int32(x)) { | 979 } else if (is_int32(x)) { |
| 968 movq(dst, Immediate(static_cast<int32_t>(x))); | 980 movq(dst, Immediate(static_cast<int32_t>(x))); |
| 969 } else { | 981 } else { |
| 970 movq(dst, x); | 982 movq(dst, x); |
| 971 } | 983 } |
| 972 } | 984 } |
| 973 | 985 |
| 974 | 986 |
| 975 void MacroAssembler::Set(const Operand& dst, intptr_t x) { | 987 void MacroAssembler::Set(const Operand& dst, int64_t x) { |
| 976 if (kPointerSize == kInt64Size) { | 988 if (is_int32(x)) { |
| 977 if (is_int32(x)) { | 989 movq(dst, Immediate(static_cast<int32_t>(x))); |
| 978 movp(dst, Immediate(static_cast<int32_t>(x))); | |
| 979 } else { | |
| 980 Set(kScratchRegister, x); | |
| 981 movp(dst, kScratchRegister); | |
| 982 } | |
| 983 } else { | 990 } else { |
| 984 ASSERT(kPointerSize == kInt32Size); | 991 Set(kScratchRegister, x); |
| 985 movp(dst, Immediate(static_cast<int32_t>(x))); | 992 movq(dst, kScratchRegister); |
| 986 } | 993 } |
| 987 } | 994 } |
| 988 | 995 |
| 989 | 996 |
| 990 // ---------------------------------------------------------------------------- | 997 // ---------------------------------------------------------------------------- |
| 991 // Smi tagging, untagging and tag detection. | 998 // Smi tagging, untagging and tag detection. |
| 992 | 999 |
| 993 bool MacroAssembler::IsUnsafeInt(const int32_t x) { | 1000 bool MacroAssembler::IsUnsafeInt(const int32_t x) { |
| 994 static const int kMaxBits = 17; | 1001 static const int kMaxBits = 17; |
| 995 return !is_intn(x, kMaxBits); | 1002 return !is_intn(x, kMaxBits); |
| (...skipping 1582 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2578 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte)); | 2585 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte)); |
| 2579 } | 2586 } |
| 2580 | 2587 |
| 2581 | 2588 |
| 2582 void MacroAssembler::Jump(ExternalReference ext) { | 2589 void MacroAssembler::Jump(ExternalReference ext) { |
| 2583 LoadAddress(kScratchRegister, ext); | 2590 LoadAddress(kScratchRegister, ext); |
| 2584 jmp(kScratchRegister); | 2591 jmp(kScratchRegister); |
| 2585 } | 2592 } |
| 2586 | 2593 |
| 2587 | 2594 |
| 2588 void MacroAssembler::Jump(const Operand& op) { | |
| 2589 if (kPointerSize == kInt64Size) { | |
| 2590 jmp(op); | |
| 2591 } else { | |
| 2592 ASSERT(kPointerSize == kInt32Size); | |
| 2593 movp(kScratchRegister, op); | |
| 2594 jmp(kScratchRegister); | |
| 2595 } | |
| 2596 } | |
| 2597 | |
| 2598 | |
| 2599 void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) { | 2595 void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) { |
| 2600 Move(kScratchRegister, destination, rmode); | 2596 Move(kScratchRegister, destination, rmode); |
| 2601 jmp(kScratchRegister); | 2597 jmp(kScratchRegister); |
| 2602 } | 2598 } |
| 2603 | 2599 |
| 2604 | 2600 |
| 2605 void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) { | 2601 void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) { |
| 2606 // TODO(X64): Inline this | 2602 // TODO(X64): Inline this |
| 2607 jmp(code_object, rmode); | 2603 jmp(code_object, rmode); |
| 2608 } | 2604 } |
| (...skipping 11 matching lines...) Expand all Loading... |
| 2620 int end_position = pc_offset() + CallSize(ext); | 2616 int end_position = pc_offset() + CallSize(ext); |
| 2621 #endif | 2617 #endif |
| 2622 LoadAddress(kScratchRegister, ext); | 2618 LoadAddress(kScratchRegister, ext); |
| 2623 call(kScratchRegister); | 2619 call(kScratchRegister); |
| 2624 #ifdef DEBUG | 2620 #ifdef DEBUG |
| 2625 CHECK_EQ(end_position, pc_offset()); | 2621 CHECK_EQ(end_position, pc_offset()); |
| 2626 #endif | 2622 #endif |
| 2627 } | 2623 } |
| 2628 | 2624 |
| 2629 | 2625 |
| 2630 void MacroAssembler::Call(const Operand& op) { | |
| 2631 if (kPointerSize == kInt64Size) { | |
| 2632 call(op); | |
| 2633 } else { | |
| 2634 ASSERT(kPointerSize == kInt32Size); | |
| 2635 movp(kScratchRegister, op); | |
| 2636 call(kScratchRegister); | |
| 2637 } | |
| 2638 } | |
| 2639 | |
| 2640 | |
| 2641 void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) { | 2626 void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) { |
| 2642 #ifdef DEBUG | 2627 #ifdef DEBUG |
| 2643 int end_position = pc_offset() + CallSize(destination); | 2628 int end_position = pc_offset() + CallSize(destination); |
| 2644 #endif | 2629 #endif |
| 2645 Move(kScratchRegister, destination, rmode); | 2630 Move(kScratchRegister, destination, rmode); |
| 2646 call(kScratchRegister); | 2631 call(kScratchRegister); |
| 2647 #ifdef DEBUG | 2632 #ifdef DEBUG |
| 2648 CHECK_EQ(pc_offset(), end_position); | 2633 CHECK_EQ(pc_offset(), end_position); |
| 2649 #endif | 2634 #endif |
| 2650 } | 2635 } |
| (...skipping 1872 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4523 cmpq(map_in_out, FieldOperand(scratch, offset)); | 4508 cmpq(map_in_out, FieldOperand(scratch, offset)); |
| 4524 j(not_equal, no_map_match); | 4509 j(not_equal, no_map_match); |
| 4525 | 4510 |
| 4526 // Use the transitioned cached map. | 4511 // Use the transitioned cached map. |
| 4527 offset = transitioned_kind * kPointerSize + | 4512 offset = transitioned_kind * kPointerSize + |
| 4528 FixedArrayBase::kHeaderSize; | 4513 FixedArrayBase::kHeaderSize; |
| 4529 movp(map_in_out, FieldOperand(scratch, offset)); | 4514 movp(map_in_out, FieldOperand(scratch, offset)); |
| 4530 } | 4515 } |
| 4531 | 4516 |
| 4532 | 4517 |
| 4518 void MacroAssembler::LoadInitialArrayMap( |
| 4519 Register function_in, Register scratch, |
| 4520 Register map_out, bool can_have_holes) { |
| 4521 ASSERT(!function_in.is(map_out)); |
| 4522 Label done; |
| 4523 movp(map_out, FieldOperand(function_in, |
| 4524 JSFunction::kPrototypeOrInitialMapOffset)); |
| 4525 if (!FLAG_smi_only_arrays) { |
| 4526 ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS; |
| 4527 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, |
| 4528 kind, |
| 4529 map_out, |
| 4530 scratch, |
| 4531 &done); |
| 4532 } else if (can_have_holes) { |
| 4533 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, |
| 4534 FAST_HOLEY_SMI_ELEMENTS, |
| 4535 map_out, |
| 4536 scratch, |
| 4537 &done); |
| 4538 } |
| 4539 bind(&done); |
| 4540 } |
| 4541 |
| 4533 #ifdef _WIN64 | 4542 #ifdef _WIN64 |
| 4534 static const int kRegisterPassedArguments = 4; | 4543 static const int kRegisterPassedArguments = 4; |
| 4535 #else | 4544 #else |
| 4536 static const int kRegisterPassedArguments = 6; | 4545 static const int kRegisterPassedArguments = 6; |
| 4537 #endif | 4546 #endif |
| 4538 | 4547 |
| 4539 void MacroAssembler::LoadGlobalFunction(int index, Register function) { | 4548 void MacroAssembler::LoadGlobalFunction(int index, Register function) { |
| 4540 // Load the global or builtins object from the current context. | 4549 // Load the global or builtins object from the current context. |
| 4541 movp(function, | 4550 movp(function, |
| 4542 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 4551 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
| (...skipping 441 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4984 j(equal, found); | 4993 j(equal, found); |
| 4985 movp(current, FieldOperand(current, Map::kPrototypeOffset)); | 4994 movp(current, FieldOperand(current, Map::kPrototypeOffset)); |
| 4986 CompareRoot(current, Heap::kNullValueRootIndex); | 4995 CompareRoot(current, Heap::kNullValueRootIndex); |
| 4987 j(not_equal, &loop_again); | 4996 j(not_equal, &loop_again); |
| 4988 } | 4997 } |
| 4989 | 4998 |
| 4990 | 4999 |
| 4991 } } // namespace v8::internal | 5000 } } // namespace v8::internal |
| 4992 | 5001 |
| 4993 #endif // V8_TARGET_ARCH_X64 | 5002 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |