OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // | 2 // |
3 // Use of this source code is governed by a BSD-style license that can be | 3 // Use of this source code is governed by a BSD-style license that can be |
4 // found in the LICENSE file. | 4 // found in the LICENSE file. |
5 | 5 |
6 #include "src/crankshaft/s390/lithium-codegen-s390.h" | 6 #include "src/crankshaft/s390/lithium-codegen-s390.h" |
7 | 7 |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/code-factory.h" | 9 #include "src/code-factory.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
106 | 106 |
107 if (prologue_offset) { | 107 if (prologue_offset) { |
108 // Prologue logic requires its starting address in ip and the | 108 // Prologue logic requires its starting address in ip and the |
109 // corresponding offset from the function entry. Need to add | 109 // corresponding offset from the function entry. Need to add |
110 // 4 bytes for the size of AHI/AGHI that AddP expands into. | 110 // 4 bytes for the size of AHI/AGHI that AddP expands into. |
111 __ AddP(ip, ip, Operand(prologue_offset + sizeof(FourByteInstr))); | 111 __ AddP(ip, ip, Operand(prologue_offset + sizeof(FourByteInstr))); |
112 } | 112 } |
113 info()->set_prologue_offset(prologue_offset); | 113 info()->set_prologue_offset(prologue_offset); |
114 if (NeedsEagerFrame()) { | 114 if (NeedsEagerFrame()) { |
115 if (info()->IsStub()) { | 115 if (info()->IsStub()) { |
116 __ StubPrologue(ip, prologue_offset); | 116 __ StubPrologue(StackFrame::STUB, ip, prologue_offset); |
117 } else { | 117 } else { |
118 __ Prologue(info()->GeneratePreagedPrologue(), ip, prologue_offset); | 118 __ Prologue(info()->GeneratePreagedPrologue(), ip, prologue_offset); |
119 } | 119 } |
120 frame_is_built_ = true; | 120 frame_is_built_ = true; |
121 } | 121 } |
122 | 122 |
123 // Reserve space for the stack slots needed by the code. | 123 // Reserve space for the stack slots needed by the code. |
124 int slots = GetStackSlotCount(); | 124 int slots = GetStackSlotCount(); |
125 if (slots > 0) { | 125 if (slots > 0) { |
126 __ lay(sp, MemOperand(sp, -(slots * kPointerSize))); | 126 __ lay(sp, MemOperand(sp, -(slots * kPointerSize))); |
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
244 "-------------------- Deferred %s --------------------", | 244 "-------------------- Deferred %s --------------------", |
245 code->instruction_index(), code->instr()->hydrogen_value()->id(), | 245 code->instruction_index(), code->instr()->hydrogen_value()->id(), |
246 code->instr()->Mnemonic()); | 246 code->instr()->Mnemonic()); |
247 __ bind(code->entry()); | 247 __ bind(code->entry()); |
248 if (NeedsDeferredFrame()) { | 248 if (NeedsDeferredFrame()) { |
249 Comment(";;; Build frame"); | 249 Comment(";;; Build frame"); |
250 DCHECK(!frame_is_built_); | 250 DCHECK(!frame_is_built_); |
251 DCHECK(info()->IsStub()); | 251 DCHECK(info()->IsStub()); |
252 frame_is_built_ = true; | 252 frame_is_built_ = true; |
253 __ LoadSmiLiteral(scratch0(), Smi::FromInt(StackFrame::STUB)); | 253 __ LoadSmiLiteral(scratch0(), Smi::FromInt(StackFrame::STUB)); |
254 __ PushFixedFrame(scratch0()); | 254 __ PushCommonFrame(scratch0()); |
255 __ la(fp, | |
256 MemOperand(sp, StandardFrameConstants::kFixedFrameSizeFromFp)); | |
257 Comment(";;; Deferred code"); | 255 Comment(";;; Deferred code"); |
258 } | 256 } |
259 code->Generate(); | 257 code->Generate(); |
260 if (NeedsDeferredFrame()) { | 258 if (NeedsDeferredFrame()) { |
261 Comment(";;; Destroy frame"); | 259 Comment(";;; Destroy frame"); |
262 DCHECK(frame_is_built_); | 260 DCHECK(frame_is_built_); |
263 __ PopFixedFrame(ip); | 261 __ PopCommonFrame(scratch0()); |
264 frame_is_built_ = false; | 262 frame_is_built_ = false; |
265 } | 263 } |
266 __ b(code->exit()); | 264 __ b(code->exit()); |
267 } | 265 } |
268 } | 266 } |
269 | 267 |
270 return !is_aborted(); | 268 return !is_aborted(); |
271 } | 269 } |
272 | 270 |
273 bool LCodeGen::GenerateJumpTable() { | 271 bool LCodeGen::GenerateJumpTable() { |
(...skipping 27 matching lines...) Expand all Loading... |
301 DeoptComment(table_entry->deopt_info); | 299 DeoptComment(table_entry->deopt_info); |
302 | 300 |
303 // Second-level deopt table entries are contiguous and small, so instead | 301 // Second-level deopt table entries are contiguous and small, so instead |
304 // of loading the full, absolute address of each one, load an immediate | 302 // of loading the full, absolute address of each one, load an immediate |
305 // offset which will be added to the base address later. | 303 // offset which will be added to the base address later. |
306 __ mov(entry_offset, Operand(entry - base)); | 304 __ mov(entry_offset, Operand(entry - base)); |
307 | 305 |
308 if (table_entry->needs_frame) { | 306 if (table_entry->needs_frame) { |
309 DCHECK(!info()->saves_caller_doubles()); | 307 DCHECK(!info()->saves_caller_doubles()); |
310 Comment(";;; call deopt with frame"); | 308 Comment(";;; call deopt with frame"); |
311 __ PushFixedFrame(); | 309 __ PushCommonFrame(); |
312 __ b(r14, &needs_frame); | 310 __ b(r14, &needs_frame); |
313 } else { | 311 } else { |
314 __ b(r14, &call_deopt_entry); | 312 __ b(r14, &call_deopt_entry); |
315 } | 313 } |
316 info()->LogDeoptCallPosition(masm()->pc_offset(), | 314 info()->LogDeoptCallPosition(masm()->pc_offset(), |
317 table_entry->deopt_info.inlining_id); | 315 table_entry->deopt_info.inlining_id); |
318 } | 316 } |
319 | 317 |
320 if (needs_frame.is_linked()) { | 318 if (needs_frame.is_linked()) { |
321 __ bind(&needs_frame); | 319 __ bind(&needs_frame); |
322 // This variant of deopt can only be used with stubs. Since we don't | 320 // This variant of deopt can only be used with stubs. Since we don't |
323 // have a function pointer to install in the stack frame that we're | 321 // have a function pointer to install in the stack frame that we're |
324 // building, install a special marker there instead. | 322 // building, install a special marker there instead. |
325 DCHECK(info()->IsStub()); | 323 DCHECK(info()->IsStub()); |
326 __ LoadSmiLiteral(ip, Smi::FromInt(StackFrame::STUB)); | 324 __ LoadSmiLiteral(ip, Smi::FromInt(StackFrame::STUB)); |
327 __ push(ip); | 325 __ push(ip); |
328 __ lay(fp, MemOperand(sp, StandardFrameConstants::kFixedFrameSizeFromFp)); | 326 DCHECK(info()->IsStub()); |
329 } | 327 } |
330 | 328 |
331 Comment(";;; call deopt"); | 329 Comment(";;; call deopt"); |
332 __ bind(&call_deopt_entry); | 330 __ bind(&call_deopt_entry); |
333 | 331 |
334 if (info()->saves_caller_doubles()) { | 332 if (info()->saves_caller_doubles()) { |
335 DCHECK(info()->IsStub()); | 333 DCHECK(info()->IsStub()); |
336 RestoreCallerDoubles(); | 334 RestoreCallerDoubles(); |
337 } | 335 } |
338 | 336 |
(...skipping 1762 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2101 __ lzdr(kDoubleRegZero); | 2099 __ lzdr(kDoubleRegZero); |
2102 __ cdbr(dbl_scratch, kDoubleRegZero); | 2100 __ cdbr(dbl_scratch, kDoubleRegZero); |
2103 Condition lt_gt = static_cast<Condition>(lt | gt); | 2101 Condition lt_gt = static_cast<Condition>(lt | gt); |
2104 EmitBranch(instr, lt_gt); | 2102 EmitBranch(instr, lt_gt); |
2105 } else if (type.IsString()) { | 2103 } else if (type.IsString()) { |
2106 DCHECK(!info()->IsStub()); | 2104 DCHECK(!info()->IsStub()); |
2107 __ LoadP(ip, FieldMemOperand(reg, String::kLengthOffset)); | 2105 __ LoadP(ip, FieldMemOperand(reg, String::kLengthOffset)); |
2108 __ CmpP(ip, Operand::Zero()); | 2106 __ CmpP(ip, Operand::Zero()); |
2109 EmitBranch(instr, ne); | 2107 EmitBranch(instr, ne); |
2110 } else { | 2108 } else { |
2111 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); | 2109 ToBooleanICStub::Types expected = |
| 2110 instr->hydrogen()->expected_input_types(); |
2112 // Avoid deopts in the case where we've never executed this path before. | 2111 // Avoid deopts in the case where we've never executed this path before. |
2113 if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic(); | 2112 if (expected.IsEmpty()) expected = ToBooleanICStub::Types::Generic(); |
2114 | 2113 |
2115 if (expected.Contains(ToBooleanStub::UNDEFINED)) { | 2114 if (expected.Contains(ToBooleanICStub::UNDEFINED)) { |
2116 // undefined -> false. | 2115 // undefined -> false. |
2117 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex); | 2116 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex); |
2118 __ beq(instr->FalseLabel(chunk_)); | 2117 __ beq(instr->FalseLabel(chunk_)); |
2119 } | 2118 } |
2120 if (expected.Contains(ToBooleanStub::BOOLEAN)) { | 2119 if (expected.Contains(ToBooleanICStub::BOOLEAN)) { |
2121 // Boolean -> its value. | 2120 // Boolean -> its value. |
2122 __ CompareRoot(reg, Heap::kTrueValueRootIndex); | 2121 __ CompareRoot(reg, Heap::kTrueValueRootIndex); |
2123 __ beq(instr->TrueLabel(chunk_)); | 2122 __ beq(instr->TrueLabel(chunk_)); |
2124 __ CompareRoot(reg, Heap::kFalseValueRootIndex); | 2123 __ CompareRoot(reg, Heap::kFalseValueRootIndex); |
2125 __ beq(instr->FalseLabel(chunk_)); | 2124 __ beq(instr->FalseLabel(chunk_)); |
2126 } | 2125 } |
2127 if (expected.Contains(ToBooleanStub::NULL_TYPE)) { | 2126 if (expected.Contains(ToBooleanICStub::NULL_TYPE)) { |
2128 // 'null' -> false. | 2127 // 'null' -> false. |
2129 __ CompareRoot(reg, Heap::kNullValueRootIndex); | 2128 __ CompareRoot(reg, Heap::kNullValueRootIndex); |
2130 __ beq(instr->FalseLabel(chunk_)); | 2129 __ beq(instr->FalseLabel(chunk_)); |
2131 } | 2130 } |
2132 | 2131 |
2133 if (expected.Contains(ToBooleanStub::SMI)) { | 2132 if (expected.Contains(ToBooleanICStub::SMI)) { |
2134 // Smis: 0 -> false, all other -> true. | 2133 // Smis: 0 -> false, all other -> true. |
2135 __ CmpP(reg, Operand::Zero()); | 2134 __ CmpP(reg, Operand::Zero()); |
2136 __ beq(instr->FalseLabel(chunk_)); | 2135 __ beq(instr->FalseLabel(chunk_)); |
2137 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); | 2136 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); |
2138 } else if (expected.NeedsMap()) { | 2137 } else if (expected.NeedsMap()) { |
2139 // If we need a map later and have a Smi -> deopt. | 2138 // If we need a map later and have a Smi -> deopt. |
2140 __ TestIfSmi(reg); | 2139 __ TestIfSmi(reg); |
2141 DeoptimizeIf(eq, instr, Deoptimizer::kSmi, cr0); | 2140 DeoptimizeIf(eq, instr, Deoptimizer::kSmi, cr0); |
2142 } | 2141 } |
2143 | 2142 |
2144 const Register map = scratch0(); | 2143 const Register map = scratch0(); |
2145 if (expected.NeedsMap()) { | 2144 if (expected.NeedsMap()) { |
2146 __ LoadP(map, FieldMemOperand(reg, HeapObject::kMapOffset)); | 2145 __ LoadP(map, FieldMemOperand(reg, HeapObject::kMapOffset)); |
2147 | 2146 |
2148 if (expected.CanBeUndetectable()) { | 2147 if (expected.CanBeUndetectable()) { |
2149 // Undetectable -> false. | 2148 // Undetectable -> false. |
2150 __ tm(FieldMemOperand(map, Map::kBitFieldOffset), | 2149 __ tm(FieldMemOperand(map, Map::kBitFieldOffset), |
2151 Operand(1 << Map::kIsUndetectable)); | 2150 Operand(1 << Map::kIsUndetectable)); |
2152 __ bne(instr->FalseLabel(chunk_)); | 2151 __ bne(instr->FalseLabel(chunk_)); |
2153 } | 2152 } |
2154 } | 2153 } |
2155 | 2154 |
2156 if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) { | 2155 if (expected.Contains(ToBooleanICStub::SPEC_OBJECT)) { |
2157 // spec object -> true. | 2156 // spec object -> true. |
2158 __ CompareInstanceType(map, ip, FIRST_JS_RECEIVER_TYPE); | 2157 __ CompareInstanceType(map, ip, FIRST_JS_RECEIVER_TYPE); |
2159 __ bge(instr->TrueLabel(chunk_)); | 2158 __ bge(instr->TrueLabel(chunk_)); |
2160 } | 2159 } |
2161 | 2160 |
2162 if (expected.Contains(ToBooleanStub::STRING)) { | 2161 if (expected.Contains(ToBooleanICStub::STRING)) { |
2163 // String value -> false iff empty. | 2162 // String value -> false iff empty. |
2164 Label not_string; | 2163 Label not_string; |
2165 __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE); | 2164 __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE); |
2166 __ bge(¬_string, Label::kNear); | 2165 __ bge(¬_string, Label::kNear); |
2167 __ LoadP(ip, FieldMemOperand(reg, String::kLengthOffset)); | 2166 __ LoadP(ip, FieldMemOperand(reg, String::kLengthOffset)); |
2168 __ CmpP(ip, Operand::Zero()); | 2167 __ CmpP(ip, Operand::Zero()); |
2169 __ bne(instr->TrueLabel(chunk_)); | 2168 __ bne(instr->TrueLabel(chunk_)); |
2170 __ b(instr->FalseLabel(chunk_)); | 2169 __ b(instr->FalseLabel(chunk_)); |
2171 __ bind(¬_string); | 2170 __ bind(¬_string); |
2172 } | 2171 } |
2173 | 2172 |
2174 if (expected.Contains(ToBooleanStub::SYMBOL)) { | 2173 if (expected.Contains(ToBooleanICStub::SYMBOL)) { |
2175 // Symbol value -> true. | 2174 // Symbol value -> true. |
2176 __ CompareInstanceType(map, ip, SYMBOL_TYPE); | 2175 __ CompareInstanceType(map, ip, SYMBOL_TYPE); |
2177 __ beq(instr->TrueLabel(chunk_)); | 2176 __ beq(instr->TrueLabel(chunk_)); |
2178 } | 2177 } |
2179 | 2178 |
2180 if (expected.Contains(ToBooleanStub::SIMD_VALUE)) { | 2179 if (expected.Contains(ToBooleanICStub::SIMD_VALUE)) { |
2181 // SIMD value -> true. | 2180 // SIMD value -> true. |
2182 Label not_simd; | 2181 Label not_simd; |
2183 __ CompareInstanceType(map, ip, SIMD128_VALUE_TYPE); | 2182 __ CompareInstanceType(map, ip, SIMD128_VALUE_TYPE); |
2184 __ beq(instr->TrueLabel(chunk_)); | 2183 __ beq(instr->TrueLabel(chunk_)); |
2185 } | 2184 } |
2186 | 2185 |
2187 if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) { | 2186 if (expected.Contains(ToBooleanICStub::HEAP_NUMBER)) { |
2188 // heap number -> false iff +0, -0, or NaN. | 2187 // heap number -> false iff +0, -0, or NaN. |
2189 Label not_heap_number; | 2188 Label not_heap_number; |
2190 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); | 2189 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); |
2191 __ bne(¬_heap_number, Label::kNear); | 2190 __ bne(¬_heap_number, Label::kNear); |
2192 __ LoadDouble(dbl_scratch, | 2191 __ LoadDouble(dbl_scratch, |
2193 FieldMemOperand(reg, HeapNumber::kValueOffset)); | 2192 FieldMemOperand(reg, HeapNumber::kValueOffset)); |
2194 __ lzdr(kDoubleRegZero); | 2193 __ lzdr(kDoubleRegZero); |
2195 __ cdbr(dbl_scratch, kDoubleRegZero); | 2194 __ cdbr(dbl_scratch, kDoubleRegZero); |
2196 __ bunordered(instr->FalseLabel(chunk_)); // NaN -> false. | 2195 __ bunordered(instr->FalseLabel(chunk_)); // NaN -> false. |
2197 __ beq(instr->FalseLabel(chunk_)); // +0, -0 -> false. | 2196 __ beq(instr->FalseLabel(chunk_)); // +0, -0 -> false. |
(...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2410 UNREACHABLE(); | 2409 UNREACHABLE(); |
2411 return kNoCondition; | 2410 return kNoCondition; |
2412 } | 2411 } |
2413 } | 2412 } |
2414 | 2413 |
2415 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { | 2414 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { |
2416 DCHECK(ToRegister(instr->context()).is(cp)); | 2415 DCHECK(ToRegister(instr->context()).is(cp)); |
2417 DCHECK(ToRegister(instr->left()).is(r3)); | 2416 DCHECK(ToRegister(instr->left()).is(r3)); |
2418 DCHECK(ToRegister(instr->right()).is(r2)); | 2417 DCHECK(ToRegister(instr->right()).is(r2)); |
2419 | 2418 |
2420 Handle<Code> code = CodeFactory::StringCompare(isolate()).code(); | 2419 Handle<Code> code = CodeFactory::StringCompare(isolate(), instr->op()).code(); |
2421 CallCode(code, RelocInfo::CODE_TARGET, instr); | 2420 CallCode(code, RelocInfo::CODE_TARGET, instr); |
2422 __ CmpP(r2, Operand::Zero()); | 2421 __ CompareRoot(r2, Heap::kTrueValueRootIndex); |
2423 | 2422 EmitBranch(instr, eq); |
2424 EmitBranch(instr, ComputeCompareCondition(instr->op())); | |
2425 } | 2423 } |
2426 | 2424 |
2427 static InstanceType TestType(HHasInstanceTypeAndBranch* instr) { | 2425 static InstanceType TestType(HHasInstanceTypeAndBranch* instr) { |
2428 InstanceType from = instr->from(); | 2426 InstanceType from = instr->from(); |
2429 InstanceType to = instr->to(); | 2427 InstanceType to = instr->to(); |
2430 if (from == FIRST_TYPE) return to; | 2428 if (from == FIRST_TYPE) return to; |
2431 DCHECK(from == to || to == LAST_TYPE); | 2429 DCHECK(from == to || to == LAST_TYPE); |
2432 return from; | 2430 return from; |
2433 } | 2431 } |
2434 | 2432 |
(...skipping 720 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3155 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { | 3153 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { |
3156 Register scratch = scratch0(); | 3154 Register scratch = scratch0(); |
3157 Register result = ToRegister(instr->result()); | 3155 Register result = ToRegister(instr->result()); |
3158 | 3156 |
3159 if (instr->hydrogen()->from_inlined()) { | 3157 if (instr->hydrogen()->from_inlined()) { |
3160 __ lay(result, MemOperand(sp, -2 * kPointerSize)); | 3158 __ lay(result, MemOperand(sp, -2 * kPointerSize)); |
3161 } else { | 3159 } else { |
3162 // Check if the calling frame is an arguments adaptor frame. | 3160 // Check if the calling frame is an arguments adaptor frame. |
3163 Label done, adapted; | 3161 Label done, adapted; |
3164 __ LoadP(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 3162 __ LoadP(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
3165 __ LoadP(result, | 3163 __ LoadP( |
3166 MemOperand(scratch, StandardFrameConstants::kContextOffset)); | 3164 result, |
| 3165 MemOperand(scratch, CommonFrameConstants::kContextOrFrameTypeOffset)); |
3167 __ CmpSmiLiteral(result, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); | 3166 __ CmpSmiLiteral(result, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); |
3168 | 3167 |
3169 // Result is the frame pointer for the frame if not adapted and for the real | 3168 // Result is the frame pointer for the frame if not adapted and for the real |
3170 // frame below the adaptor frame if adapted. | 3169 // frame below the adaptor frame if adapted. |
3171 __ beq(&adapted, Label::kNear); | 3170 __ beq(&adapted, Label::kNear); |
3172 __ LoadRR(result, fp); | 3171 __ LoadRR(result, fp); |
3173 __ b(&done, Label::kNear); | 3172 __ b(&done, Label::kNear); |
3174 | 3173 |
3175 __ bind(&adapted); | 3174 __ bind(&adapted); |
3176 __ LoadRR(result, scratch); | 3175 __ LoadRR(result, scratch); |
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3280 // length is a small non-negative integer, due to the test above. | 3279 // length is a small non-negative integer, due to the test above. |
3281 __ CmpP(length, Operand::Zero()); | 3280 __ CmpP(length, Operand::Zero()); |
3282 __ beq(&invoke, Label::kNear); | 3281 __ beq(&invoke, Label::kNear); |
3283 __ bind(&loop); | 3282 __ bind(&loop); |
3284 __ ShiftLeftP(r1, length, Operand(kPointerSizeLog2)); | 3283 __ ShiftLeftP(r1, length, Operand(kPointerSizeLog2)); |
3285 __ LoadP(scratch, MemOperand(elements, r1)); | 3284 __ LoadP(scratch, MemOperand(elements, r1)); |
3286 __ push(scratch); | 3285 __ push(scratch); |
3287 __ BranchOnCount(length, &loop); | 3286 __ BranchOnCount(length, &loop); |
3288 | 3287 |
3289 __ bind(&invoke); | 3288 __ bind(&invoke); |
| 3289 |
| 3290 InvokeFlag flag = CALL_FUNCTION; |
| 3291 if (instr->hydrogen()->tail_call_mode() == TailCallMode::kAllow) { |
| 3292 DCHECK(!info()->saves_caller_doubles()); |
| 3293 // TODO(ishell): drop current frame before pushing arguments to the stack. |
| 3294 flag = JUMP_FUNCTION; |
| 3295 ParameterCount actual(r2); |
| 3296 // It is safe to use r5, r6 and r7 as scratch registers here given that |
| 3297 // 1) we are not going to return to caller function anyway, |
| 3298 // 2) r5 (new.target) will be initialized below. |
| 3299 PrepareForTailCall(actual, r5, r6, r7); |
| 3300 } |
| 3301 |
3290 DCHECK(instr->HasPointerMap()); | 3302 DCHECK(instr->HasPointerMap()); |
3291 LPointerMap* pointers = instr->pointer_map(); | 3303 LPointerMap* pointers = instr->pointer_map(); |
3292 SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt); | 3304 SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt); |
3293 // The number of arguments is stored in receiver which is r2, as expected | 3305 // The number of arguments is stored in receiver which is r2, as expected |
3294 // by InvokeFunction. | 3306 // by InvokeFunction. |
3295 ParameterCount actual(receiver); | 3307 ParameterCount actual(receiver); |
3296 __ InvokeFunction(function, no_reg, actual, CALL_FUNCTION, | 3308 __ InvokeFunction(function, no_reg, actual, flag, safepoint_generator); |
3297 safepoint_generator); | |
3298 } | 3309 } |
3299 | 3310 |
3300 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 3311 void LCodeGen::DoPushArgument(LPushArgument* instr) { |
3301 LOperand* argument = instr->value(); | 3312 LOperand* argument = instr->value(); |
3302 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) { | 3313 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) { |
3303 Abort(kDoPushArgumentNotImplementedForDoubleType); | 3314 Abort(kDoPushArgumentNotImplementedForDoubleType); |
3304 } else { | 3315 } else { |
3305 Register argument_reg = EmitLoadRegister(argument, ip); | 3316 Register argument_reg = EmitLoadRegister(argument, ip); |
3306 __ push(argument_reg); | 3317 __ push(argument_reg); |
3307 } | 3318 } |
(...skipping 21 matching lines...) Expand all Loading... |
3329 DCHECK(ToRegister(instr->context()).is(cp)); | 3340 DCHECK(ToRegister(instr->context()).is(cp)); |
3330 __ Move(scratch0(), instr->hydrogen()->pairs()); | 3341 __ Move(scratch0(), instr->hydrogen()->pairs()); |
3331 __ push(scratch0()); | 3342 __ push(scratch0()); |
3332 __ LoadSmiLiteral(scratch0(), Smi::FromInt(instr->hydrogen()->flags())); | 3343 __ LoadSmiLiteral(scratch0(), Smi::FromInt(instr->hydrogen()->flags())); |
3333 __ push(scratch0()); | 3344 __ push(scratch0()); |
3334 CallRuntime(Runtime::kDeclareGlobals, instr); | 3345 CallRuntime(Runtime::kDeclareGlobals, instr); |
3335 } | 3346 } |
3336 | 3347 |
3337 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, | 3348 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, |
3338 int formal_parameter_count, int arity, | 3349 int formal_parameter_count, int arity, |
3339 LInstruction* instr) { | 3350 bool is_tail_call, LInstruction* instr) { |
3340 bool dont_adapt_arguments = | 3351 bool dont_adapt_arguments = |
3341 formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel; | 3352 formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel; |
3342 bool can_invoke_directly = | 3353 bool can_invoke_directly = |
3343 dont_adapt_arguments || formal_parameter_count == arity; | 3354 dont_adapt_arguments || formal_parameter_count == arity; |
3344 | 3355 |
3345 Register function_reg = r3; | 3356 Register function_reg = r3; |
3346 | 3357 |
3347 LPointerMap* pointers = instr->pointer_map(); | 3358 LPointerMap* pointers = instr->pointer_map(); |
3348 | 3359 |
3349 if (can_invoke_directly) { | 3360 if (can_invoke_directly) { |
3350 // Change context. | 3361 // Change context. |
3351 __ LoadP(cp, FieldMemOperand(function_reg, JSFunction::kContextOffset)); | 3362 __ LoadP(cp, FieldMemOperand(function_reg, JSFunction::kContextOffset)); |
3352 | 3363 |
3353 // Always initialize new target and number of actual arguments. | 3364 // Always initialize new target and number of actual arguments. |
3354 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); | 3365 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); |
3355 __ mov(r2, Operand(arity)); | 3366 __ mov(r2, Operand(arity)); |
3356 | 3367 |
3357 bool is_self_call = function.is_identical_to(info()->closure()); | 3368 bool is_self_call = function.is_identical_to(info()->closure()); |
3358 | 3369 |
3359 // Invoke function. | 3370 // Invoke function. |
3360 if (is_self_call) { | 3371 if (is_self_call) { |
3361 __ CallSelf(); | 3372 Handle<Code> self(reinterpret_cast<Code**>(__ CodeObject().location())); |
| 3373 if (is_tail_call) { |
| 3374 __ Jump(self, RelocInfo::CODE_TARGET); |
| 3375 } else { |
| 3376 __ Call(self, RelocInfo::CODE_TARGET); |
| 3377 } |
3362 } else { | 3378 } else { |
3363 __ LoadP(ip, FieldMemOperand(function_reg, JSFunction::kCodeEntryOffset)); | 3379 __ LoadP(ip, FieldMemOperand(function_reg, JSFunction::kCodeEntryOffset)); |
3364 __ CallJSEntry(ip); | 3380 if (is_tail_call) { |
| 3381 __ JumpToJSEntry(ip); |
| 3382 } else { |
| 3383 __ CallJSEntry(ip); |
| 3384 } |
3365 } | 3385 } |
3366 | 3386 |
3367 // Set up deoptimization. | 3387 if (!is_tail_call) { |
3368 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); | 3388 // Set up deoptimization. |
| 3389 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
| 3390 } |
3369 } else { | 3391 } else { |
3370 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); | 3392 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); |
3371 ParameterCount count(arity); | 3393 ParameterCount actual(arity); |
3372 ParameterCount expected(formal_parameter_count); | 3394 ParameterCount expected(formal_parameter_count); |
3373 __ InvokeFunction(function_reg, expected, count, CALL_FUNCTION, generator); | 3395 InvokeFlag flag = is_tail_call ? JUMP_FUNCTION : CALL_FUNCTION; |
| 3396 __ InvokeFunction(function_reg, expected, actual, flag, generator); |
3374 } | 3397 } |
3375 } | 3398 } |
3376 | 3399 |
3377 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { | 3400 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { |
3378 DCHECK(instr->context() != NULL); | 3401 DCHECK(instr->context() != NULL); |
3379 DCHECK(ToRegister(instr->context()).is(cp)); | 3402 DCHECK(ToRegister(instr->context()).is(cp)); |
3380 Register input = ToRegister(instr->value()); | 3403 Register input = ToRegister(instr->value()); |
3381 Register result = ToRegister(instr->result()); | 3404 Register result = ToRegister(instr->result()); |
3382 Register scratch = scratch0(); | 3405 Register scratch = scratch0(); |
3383 | 3406 |
(...skipping 298 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3682 Label done; | 3705 Label done; |
3683 __ llgfr(result, input); | 3706 __ llgfr(result, input); |
3684 __ flogr(r0, result); | 3707 __ flogr(r0, result); |
3685 __ LoadRR(result, r0); | 3708 __ LoadRR(result, r0); |
3686 __ CmpP(r0, Operand::Zero()); | 3709 __ CmpP(r0, Operand::Zero()); |
3687 __ beq(&done, Label::kNear); | 3710 __ beq(&done, Label::kNear); |
3688 __ SubP(result, Operand(32)); | 3711 __ SubP(result, Operand(32)); |
3689 __ bind(&done); | 3712 __ bind(&done); |
3690 } | 3713 } |
3691 | 3714 |
| 3715 void LCodeGen::PrepareForTailCall(const ParameterCount& actual, |
| 3716 Register scratch1, Register scratch2, |
| 3717 Register scratch3) { |
| 3718 #if DEBUG |
| 3719 if (actual.is_reg()) { |
| 3720 DCHECK(!AreAliased(actual.reg(), scratch1, scratch2, scratch3)); |
| 3721 } else { |
| 3722 DCHECK(!AreAliased(scratch1, scratch2, scratch3)); |
| 3723 } |
| 3724 #endif |
| 3725 if (FLAG_code_comments) { |
| 3726 if (actual.is_reg()) { |
| 3727 Comment(";;; PrepareForTailCall, actual: %s {", actual.reg().ToString()); |
| 3728 } else { |
| 3729 Comment(";;; PrepareForTailCall, actual: %d {", actual.immediate()); |
| 3730 } |
| 3731 } |
| 3732 |
| 3733 // Check if next frame is an arguments adaptor frame. |
| 3734 Register caller_args_count_reg = scratch1; |
| 3735 Label no_arguments_adaptor, formal_parameter_count_loaded; |
| 3736 __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 3737 __ LoadP(scratch3, |
| 3738 MemOperand(scratch2, StandardFrameConstants::kContextOffset)); |
| 3739 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); |
| 3740 __ bne(&no_arguments_adaptor); |
| 3741 |
| 3742 // Drop current frame and load arguments count from arguments adaptor frame. |
| 3743 __ LoadRR(fp, scratch2); |
| 3744 __ LoadP(caller_args_count_reg, |
| 3745 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 3746 __ SmiUntag(caller_args_count_reg); |
| 3747 __ b(&formal_parameter_count_loaded); |
| 3748 |
| 3749 __ bind(&no_arguments_adaptor); |
| 3750 // Load caller's formal parameter count |
| 3751 __ mov(caller_args_count_reg, Operand(info()->literal()->parameter_count())); |
| 3752 |
| 3753 __ bind(&formal_parameter_count_loaded); |
| 3754 __ PrepareForTailCall(actual, caller_args_count_reg, scratch2, scratch3); |
| 3755 |
| 3756 Comment(";;; }"); |
| 3757 } |
| 3758 |
3692 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { | 3759 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { |
| 3760 HInvokeFunction* hinstr = instr->hydrogen(); |
3693 DCHECK(ToRegister(instr->context()).is(cp)); | 3761 DCHECK(ToRegister(instr->context()).is(cp)); |
3694 DCHECK(ToRegister(instr->function()).is(r3)); | 3762 DCHECK(ToRegister(instr->function()).is(r3)); |
3695 DCHECK(instr->HasPointerMap()); | 3763 DCHECK(instr->HasPointerMap()); |
3696 | 3764 |
3697 Handle<JSFunction> known_function = instr->hydrogen()->known_function(); | 3765 bool is_tail_call = hinstr->tail_call_mode() == TailCallMode::kAllow; |
| 3766 |
| 3767 if (is_tail_call) { |
| 3768 DCHECK(!info()->saves_caller_doubles()); |
| 3769 ParameterCount actual(instr->arity()); |
| 3770 // It is safe to use r5, r6 and r7 as scratch registers here given that |
| 3771 // 1) we are not going to return to caller function anyway, |
| 3772 // 2) r5 (new.target) will be initialized below. |
| 3773 PrepareForTailCall(actual, r5, r6, r7); |
| 3774 } |
| 3775 |
| 3776 Handle<JSFunction> known_function = hinstr->known_function(); |
3698 if (known_function.is_null()) { | 3777 if (known_function.is_null()) { |
3699 LPointerMap* pointers = instr->pointer_map(); | 3778 LPointerMap* pointers = instr->pointer_map(); |
3700 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); | 3779 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); |
3701 ParameterCount count(instr->arity()); | 3780 ParameterCount actual(instr->arity()); |
3702 __ InvokeFunction(r3, no_reg, count, CALL_FUNCTION, generator); | 3781 InvokeFlag flag = is_tail_call ? JUMP_FUNCTION : CALL_FUNCTION; |
| 3782 __ InvokeFunction(r3, no_reg, actual, flag, generator); |
3703 } else { | 3783 } else { |
3704 CallKnownFunction(known_function, | 3784 CallKnownFunction(known_function, hinstr->formal_parameter_count(), |
3705 instr->hydrogen()->formal_parameter_count(), | 3785 instr->arity(), is_tail_call, instr); |
3706 instr->arity(), instr); | |
3707 } | 3786 } |
3708 } | 3787 } |
3709 | 3788 |
3710 void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) { | 3789 void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) { |
3711 DCHECK(ToRegister(instr->result()).is(r2)); | 3790 DCHECK(ToRegister(instr->result()).is(r2)); |
3712 | 3791 |
3713 if (instr->hydrogen()->IsTailCall()) { | 3792 if (instr->hydrogen()->IsTailCall()) { |
3714 if (NeedsEagerFrame()) __ LeaveFrame(StackFrame::INTERNAL); | 3793 if (NeedsEagerFrame()) __ LeaveFrame(StackFrame::INTERNAL); |
3715 | 3794 |
3716 if (instr->target()->IsConstantOperand()) { | 3795 if (instr->target()->IsConstantOperand()) { |
(...skipping 1850 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5567 } | 5646 } |
5568 | 5647 |
5569 void LCodeGen::DoStoreFrameContext(LStoreFrameContext* instr) { | 5648 void LCodeGen::DoStoreFrameContext(LStoreFrameContext* instr) { |
5570 Register context = ToRegister(instr->context()); | 5649 Register context = ToRegister(instr->context()); |
5571 __ StoreP(context, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 5650 __ StoreP(context, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
5572 } | 5651 } |
5573 | 5652 |
5574 #undef __ | 5653 #undef __ |
5575 } // namespace internal | 5654 } // namespace internal |
5576 } // namespace v8 | 5655 } // namespace v8 |
OLD | NEW |