OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_X87 | 5 #if V8_TARGET_ARCH_X87 |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/base/division-by-constant.h" | 8 #include "src/base/division-by-constant.h" |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 2006 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2017 } | 2017 } |
2018 | 2018 |
2019 | 2019 |
2020 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) { | 2020 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) { |
2021 // Set the entry point and jump to the C entry runtime stub. | 2021 // Set the entry point and jump to the C entry runtime stub. |
2022 mov(ebx, Immediate(ext)); | 2022 mov(ebx, Immediate(ext)); |
2023 CEntryStub ces(isolate(), 1); | 2023 CEntryStub ces(isolate(), 1); |
2024 jmp(ces.GetCode(), RelocInfo::CODE_TARGET); | 2024 jmp(ces.GetCode(), RelocInfo::CODE_TARGET); |
2025 } | 2025 } |
2026 | 2026 |
| 2027 void MacroAssembler::PrepareForTailCall(const ParameterCount& callee_args_count, |
| 2028 Register caller_args_count_reg, |
| 2029 Register scratch0, Register scratch1, |
| 2030 ReturnAddressState ra_state) { |
| 2031 #if DEBUG |
| 2032 if (callee_args_count.is_reg()) { |
| 2033 DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0, |
| 2034 scratch1)); |
| 2035 } else { |
| 2036 DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1)); |
| 2037 } |
| 2038 #endif |
| 2039 |
| 2040 // Calculate the destination address where we will put the return address |
| 2041 // after we drop current frame. |
| 2042 Register new_sp_reg = scratch0; |
| 2043 if (callee_args_count.is_reg()) { |
| 2044 sub(caller_args_count_reg, callee_args_count.reg()); |
| 2045 lea(new_sp_reg, Operand(ebp, caller_args_count_reg, times_pointer_size, |
| 2046 StandardFrameConstants::kCallerPCOffset)); |
| 2047 } else { |
| 2048 lea(new_sp_reg, Operand(ebp, caller_args_count_reg, times_pointer_size, |
| 2049 StandardFrameConstants::kCallerPCOffset - |
| 2050 callee_args_count.immediate() * kPointerSize)); |
| 2051 } |
| 2052 |
| 2053 if (FLAG_debug_code) { |
| 2054 cmp(esp, new_sp_reg); |
| 2055 Check(below, kStackAccessBelowStackPointer); |
| 2056 } |
| 2057 |
| 2058 // Copy return address from caller's frame to current frame's return address |
| 2059 // to avoid its trashing and let the following loop copy it to the right |
| 2060 // place. |
| 2061 Register tmp_reg = scratch1; |
| 2062 if (ra_state == ReturnAddressState::kOnStack) { |
| 2063 mov(tmp_reg, Operand(ebp, StandardFrameConstants::kCallerPCOffset)); |
| 2064 mov(Operand(esp, 0), tmp_reg); |
| 2065 } else { |
| 2066 DCHECK(ReturnAddressState::kNotOnStack == ra_state); |
| 2067 Push(Operand(ebp, StandardFrameConstants::kCallerPCOffset)); |
| 2068 } |
| 2069 |
| 2070 // Restore caller's frame pointer now as it could be overwritten by |
| 2071 // the copying loop. |
| 2072 mov(ebp, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); |
| 2073 |
| 2074 // +2 here is to copy both receiver and return address. |
| 2075 Register count_reg = caller_args_count_reg; |
| 2076 if (callee_args_count.is_reg()) { |
| 2077 lea(count_reg, Operand(callee_args_count.reg(), 2)); |
| 2078 } else { |
| 2079 mov(count_reg, Immediate(callee_args_count.immediate() + 2)); |
| 2080 // TODO(ishell): Unroll copying loop for small immediate values. |
| 2081 } |
| 2082 |
| 2083 // Now copy callee arguments to the caller frame going backwards to avoid |
| 2084 // callee arguments corruption (source and destination areas could overlap). |
| 2085 Label loop, entry; |
| 2086 jmp(&entry, Label::kNear); |
| 2087 bind(&loop); |
| 2088 dec(count_reg); |
| 2089 mov(tmp_reg, Operand(esp, count_reg, times_pointer_size, 0)); |
| 2090 mov(Operand(new_sp_reg, count_reg, times_pointer_size, 0), tmp_reg); |
| 2091 bind(&entry); |
| 2092 cmp(count_reg, Immediate(0)); |
| 2093 j(not_equal, &loop, Label::kNear); |
| 2094 |
| 2095 // Leave current frame. |
| 2096 mov(esp, new_sp_reg); |
| 2097 } |
2027 | 2098 |
2028 void MacroAssembler::InvokePrologue(const ParameterCount& expected, | 2099 void MacroAssembler::InvokePrologue(const ParameterCount& expected, |
2029 const ParameterCount& actual, | 2100 const ParameterCount& actual, |
2030 Label* done, | 2101 Label* done, |
2031 bool* definitely_mismatches, | 2102 bool* definitely_mismatches, |
2032 InvokeFlag flag, | 2103 InvokeFlag flag, |
2033 Label::Distance done_near, | 2104 Label::Distance done_near, |
2034 const CallWrapper& call_wrapper) { | 2105 const CallWrapper& call_wrapper) { |
2035 bool definitely_matches = false; | 2106 bool definitely_matches = false; |
2036 *definitely_mismatches = false; | 2107 *definitely_mismatches = false; |
(...skipping 1038 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3075 mov(eax, dividend); | 3146 mov(eax, dividend); |
3076 shr(eax, 31); | 3147 shr(eax, 31); |
3077 add(edx, eax); | 3148 add(edx, eax); |
3078 } | 3149 } |
3079 | 3150 |
3080 | 3151 |
3081 } // namespace internal | 3152 } // namespace internal |
3082 } // namespace v8 | 3153 } // namespace v8 |
3083 | 3154 |
3084 #endif // V8_TARGET_ARCH_X87 | 3155 #endif // V8_TARGET_ARCH_X87 |
OLD | NEW |