OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 4613 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4624 __ Cmp(allocation_top, x10); | 4624 __ Cmp(allocation_top, x10); |
4625 __ B(hi, &call_builtin); | 4625 __ B(hi, &call_builtin); |
4626 | 4626 |
4627 // We fit and could grow elements. | 4627 // We fit and could grow elements. |
4628 // Update new_space_allocation_top. | 4628 // Update new_space_allocation_top. |
4629 __ Str(allocation_top, MemOperand(allocation_top_addr)); | 4629 __ Str(allocation_top, MemOperand(allocation_top_addr)); |
4630 // Push the argument. | 4630 // Push the argument. |
4631 __ Str(argument, MemOperand(end_elements)); | 4631 __ Str(argument, MemOperand(end_elements)); |
4632 // Fill the rest with holes. | 4632 // Fill the rest with holes. |
4633 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex); | 4633 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex); |
4634 for (int i = 1; i < kAllocationDelta; i++) { | 4634 ASSERT(kAllocationDelta == 4); |
4635 // TODO(all): Try to use stp here. | 4635 __ Stp(x10, x10, MemOperand(end_elements, 1 * kPointerSize)); |
4636 __ Str(x10, MemOperand(end_elements, i * kPointerSize)); | 4636 __ Stp(x10, x10, MemOperand(end_elements, 3 * kPointerSize)); |
4637 } | |
4638 | 4637 |
4639 // Update elements' and array's sizes. | 4638 // Update elements' and array's sizes. |
4640 __ Str(length, FieldMemOperand(receiver, JSArray::kLengthOffset)); | 4639 __ Str(length, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
4641 __ Add(elements_length, | 4640 __ Add(elements_length, |
4642 elements_length, | 4641 elements_length, |
4643 Operand(Smi::FromInt(kAllocationDelta))); | 4642 Operand(Smi::FromInt(kAllocationDelta))); |
4644 __ Str(elements_length, | 4643 __ Str(elements_length, |
4645 FieldMemOperand(elements, FixedArray::kLengthOffset)); | 4644 FieldMemOperand(elements, FixedArray::kLengthOffset)); |
4646 | 4645 |
4647 // Elements are in new space, so write barrier is not required. | 4646 // Elements are in new space, so write barrier is not required. |
(...skipping 308 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4956 __ CallStub(&stub); | 4955 __ CallStub(&stub); |
4957 __ Pop(lr); | 4956 __ Pop(lr); |
4958 } | 4957 } |
4959 } | 4958 } |
4960 | 4959 |
4961 | 4960 |
4962 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { | 4961 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { |
4963 MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm); | 4962 MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm); |
4964 // The entry hook is a "BumpSystemStackPointer" instruction (sub), followed by | 4963 // The entry hook is a "BumpSystemStackPointer" instruction (sub), followed by |
4965 // a "Push lr" instruction, followed by a call. | 4964 // a "Push lr" instruction, followed by a call. |
4966 // TODO(jbramley): Verify that this call is always made with relocation. | |
4967 static const int kReturnAddressDistanceFromFunctionStart = | 4965 static const int kReturnAddressDistanceFromFunctionStart = |
4968 Assembler::kCallSizeWithRelocation + (2 * kInstructionSize); | 4966 Assembler::kCallSizeWithRelocation + (2 * kInstructionSize); |
4969 | 4967 |
4970 // Save all kCallerSaved registers (including lr), since this can be called | 4968 // Save all kCallerSaved registers (including lr), since this can be called |
4971 // from anywhere. | 4969 // from anywhere. |
4972 // TODO(jbramley): What about FP registers? | 4970 // TODO(jbramley): What about FP registers? |
4973 __ PushCPURegList(kCallerSaved); | 4971 __ PushCPURegList(kCallerSaved); |
4974 ASSERT(kCallerSaved.IncludesAliasOf(lr)); | 4972 ASSERT(kCallerSaved.IncludesAliasOf(lr)); |
4975 const int kNumSavedRegs = kCallerSaved.Count(); | 4973 const int kNumSavedRegs = kCallerSaved.Count(); |
4976 | 4974 |
(...skipping 403 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5380 __ Add(x11, x11, Operand(Smi::FromInt(kFastElementsKindPackedToHoley))); | 5378 __ Add(x11, x11, Operand(Smi::FromInt(kFastElementsKindPackedToHoley))); |
5381 __ Str(x11, FieldMemOperand(allocation_site, | 5379 __ Str(x11, FieldMemOperand(allocation_site, |
5382 AllocationSite::kTransitionInfoOffset)); | 5380 AllocationSite::kTransitionInfoOffset)); |
5383 | 5381 |
5384 __ Bind(&normal_sequence); | 5382 __ Bind(&normal_sequence); |
5385 int last_index = | 5383 int last_index = |
5386 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); | 5384 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); |
5387 for (int i = 0; i <= last_index; ++i) { | 5385 for (int i = 0; i <= last_index; ++i) { |
5388 Label next; | 5386 Label next; |
5389 ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i); | 5387 ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i); |
5390 // TODO(jbramley): Is this the best way to handle this? Can we make the | |
5391 // tail calls conditional, rather than hopping over each one? | |
5392 __ CompareAndBranch(kind, candidate_kind, ne, &next); | 5388 __ CompareAndBranch(kind, candidate_kind, ne, &next); |
5393 ArraySingleArgumentConstructorStub stub(candidate_kind); | 5389 ArraySingleArgumentConstructorStub stub(candidate_kind); |
5394 __ TailCallStub(&stub); | 5390 __ TailCallStub(&stub); |
5395 __ Bind(&next); | 5391 __ Bind(&next); |
5396 } | 5392 } |
5397 | 5393 |
5398 // If we reached this point there is a problem. | 5394 // If we reached this point there is a problem. |
5399 __ Abort(kUnexpectedElementsKindInArrayConstructor); | 5395 __ Abort(kUnexpectedElementsKindInArrayConstructor); |
5400 } else { | 5396 } else { |
5401 UNREACHABLE(); | 5397 UNREACHABLE(); |
(...skipping 303 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5705 const int kCallApiFunctionSpillSpace = 4; | 5701 const int kCallApiFunctionSpillSpace = 4; |
5706 | 5702 |
5707 FrameScope frame_scope(masm, StackFrame::MANUAL); | 5703 FrameScope frame_scope(masm, StackFrame::MANUAL); |
5708 __ EnterExitFrame(false, x10, kApiStackSpace + kCallApiFunctionSpillSpace); | 5704 __ EnterExitFrame(false, x10, kApiStackSpace + kCallApiFunctionSpillSpace); |
5709 | 5705 |
5710 // TODO(all): Optimize this with stp and suchlike. | 5706 // TODO(all): Optimize this with stp and suchlike. |
5711 ASSERT(!AreAliased(x0, api_function_address)); | 5707 ASSERT(!AreAliased(x0, api_function_address)); |
5712 // x0 = FunctionCallbackInfo& | 5708 // x0 = FunctionCallbackInfo& |
5713 // Arguments is after the return address. | 5709 // Arguments is after the return address. |
5714 __ Add(x0, masm->StackPointer(), 1 * kPointerSize); | 5710 __ Add(x0, masm->StackPointer(), 1 * kPointerSize); |
5715 // FunctionCallbackInfo::implicit_args_ | 5711 // FunctionCallbackInfo::implicit_args_ and FunctionCallbackInfo::values_ |
5716 __ Str(args, MemOperand(x0, 0 * kPointerSize)); | |
5717 // FunctionCallbackInfo::values_ | |
5718 __ Add(x10, args, Operand((FCA::kArgsLength - 1 + argc) * kPointerSize)); | 5712 __ Add(x10, args, Operand((FCA::kArgsLength - 1 + argc) * kPointerSize)); |
5719 __ Str(x10, MemOperand(x0, 1 * kPointerSize)); | 5713 __ Stp(args, x10, MemOperand(x0, 0 * kPointerSize)); |
5720 // FunctionCallbackInfo::length_ = argc | 5714 // FunctionCallbackInfo::length_ = argc and |
| 5715 // FunctionCallbackInfo::is_construct_call = 0 |
5721 __ Mov(x10, argc); | 5716 __ Mov(x10, argc); |
5722 __ Str(x10, MemOperand(x0, 2 * kPointerSize)); | 5717 __ Stp(x10, xzr, MemOperand(x0, 2 * kPointerSize)); |
5723 // FunctionCallbackInfo::is_construct_call = 0 | |
5724 __ Str(xzr, MemOperand(x0, 3 * kPointerSize)); | |
5725 | 5718 |
5726 const int kStackUnwindSpace = argc + FCA::kArgsLength + 1; | 5719 const int kStackUnwindSpace = argc + FCA::kArgsLength + 1; |
5727 Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback); | 5720 Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback); |
5728 ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL; | 5721 ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL; |
5729 ApiFunction thunk_fun(thunk_address); | 5722 ApiFunction thunk_fun(thunk_address); |
5730 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type, | 5723 ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type, |
5731 masm->isolate()); | 5724 masm->isolate()); |
5732 | 5725 |
5733 AllowExternalCallThatCantCauseGC scope(masm); | 5726 AllowExternalCallThatCantCauseGC scope(masm); |
5734 MemOperand context_restore_operand( | 5727 MemOperand context_restore_operand( |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5796 MemOperand(fp, 6 * kPointerSize), | 5789 MemOperand(fp, 6 * kPointerSize), |
5797 NULL); | 5790 NULL); |
5798 } | 5791 } |
5799 | 5792 |
5800 | 5793 |
5801 #undef __ | 5794 #undef __ |
5802 | 5795 |
5803 } } // namespace v8::internal | 5796 } } // namespace v8::internal |
5804 | 5797 |
5805 #endif // V8_TARGET_ARCH_A64 | 5798 #endif // V8_TARGET_ARCH_A64 |
OLD | NEW |