OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
103 private: | 103 private: |
104 MacroAssembler* masm_; | 104 MacroAssembler* masm_; |
105 Label patch_site_; | 105 Label patch_site_; |
106 Register reg_; | 106 Register reg_; |
107 #ifdef DEBUG | 107 #ifdef DEBUG |
108 bool info_emitted_; | 108 bool info_emitted_; |
109 #endif | 109 #endif |
110 }; | 110 }; |
111 | 111 |
112 | 112 |
| 113 static void EmitStackCheck(MacroAssembler* masm_, |
| 114 int pointers = 0, |
| 115 Register scratch = jssp) { |
| 116 Isolate* isolate = masm_->isolate(); |
| 117 Label ok; |
| 118 ASSERT(jssp.Is(__ StackPointer())); |
| 119 ASSERT(scratch.Is(jssp) == (pointers == 0)); |
| 120 if (pointers != 0) { |
| 121 __ Sub(scratch, jssp, pointers * kPointerSize); |
| 122 } |
| 123 __ CompareRoot(scratch, Heap::kStackLimitRootIndex); |
| 124 __ B(hs, &ok); |
| 125 PredictableCodeSizeScope predictable(masm_, |
| 126 Assembler::kCallSizeWithRelocation); |
| 127 __ Call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET); |
| 128 __ Bind(&ok); |
| 129 } |
| 130 |
| 131 |
113 // Generate code for a JS function. On entry to the function the receiver | 132 // Generate code for a JS function. On entry to the function the receiver |
114 // and arguments have been pushed on the stack left to right. The actual | 133 // and arguments have been pushed on the stack left to right. The actual |
115 // argument count matches the formal parameter count expected by the | 134 // argument count matches the formal parameter count expected by the |
116 // function. | 135 // function. |
117 // | 136 // |
118 // The live registers are: | 137 // The live registers are: |
119 // - x1: the JS function object being called (i.e. ourselves). | 138 // - x1: the JS function object being called (i.e. ourselves). |
120 // - cp: our context. | 139 // - cp: our context. |
121 // - fp: our caller's frame pointer. | 140 // - fp: our caller's frame pointer. |
122 // - jssp: stack pointer. | 141 // - jssp: stack pointer. |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
175 __ Prologue(BUILD_FUNCTION_FRAME); | 194 __ Prologue(BUILD_FUNCTION_FRAME); |
176 info->AddNoFrameRange(0, masm_->pc_offset()); | 195 info->AddNoFrameRange(0, masm_->pc_offset()); |
177 | 196 |
178 // Reserve space on the stack for locals. | 197 // Reserve space on the stack for locals. |
179 { Comment cmnt(masm_, "[ Allocate locals"); | 198 { Comment cmnt(masm_, "[ Allocate locals"); |
180 int locals_count = info->scope()->num_stack_slots(); | 199 int locals_count = info->scope()->num_stack_slots(); |
181 // Generators allocate locals, if any, in context slots. | 200 // Generators allocate locals, if any, in context slots. |
182 ASSERT(!info->function()->is_generator() || locals_count == 0); | 201 ASSERT(!info->function()->is_generator() || locals_count == 0); |
183 | 202 |
184 if (locals_count > 0) { | 203 if (locals_count > 0) { |
| 204 if (locals_count >= 128) { |
| 205 EmitStackCheck(masm_, locals_count, x10); |
| 206 } |
185 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); | 207 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); |
186 __ PushMultipleTimes(x10, locals_count); | 208 if (FLAG_optimize_for_size) { |
| 209 __ PushMultipleTimes(x10 , locals_count); |
| 210 } else { |
| 211 const int kMaxPushes = 32; |
| 212 if (locals_count >= kMaxPushes) { |
| 213 int loop_iterations = locals_count / kMaxPushes; |
| 214 __ Mov(x3, loop_iterations); |
| 215 Label loop_header; |
| 216 __ Bind(&loop_header); |
| 217 // Do pushes. |
| 218 __ PushMultipleTimes(x10 , kMaxPushes); |
| 219 __ Subs(x3, x3, 1); |
| 220 __ B(ne, &loop_header); |
| 221 } |
| 222 int remaining = locals_count % kMaxPushes; |
| 223 // Emit the remaining pushes. |
| 224 __ PushMultipleTimes(x10 , remaining); |
| 225 } |
187 } | 226 } |
188 } | 227 } |
189 | 228 |
190 bool function_in_register_x1 = true; | 229 bool function_in_register_x1 = true; |
191 | 230 |
192 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; | 231 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
193 if (heap_slots > 0) { | 232 if (heap_slots > 0) { |
194 // Argument to NewContext is the function, which is still in x1. | 233 // Argument to NewContext is the function, which is still in x1. |
195 Comment cmnt(masm_, "[ Allocate context"); | 234 Comment cmnt(masm_, "[ Allocate context"); |
196 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) { | 235 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) { |
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
284 function->proxy()->var()->mode() == CONST_LEGACY); | 323 function->proxy()->var()->mode() == CONST_LEGACY); |
285 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); | 324 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); |
286 VisitVariableDeclaration(function); | 325 VisitVariableDeclaration(function); |
287 } | 326 } |
288 VisitDeclarations(scope()->declarations()); | 327 VisitDeclarations(scope()->declarations()); |
289 } | 328 } |
290 } | 329 } |
291 | 330 |
292 { Comment cmnt(masm_, "[ Stack check"); | 331 { Comment cmnt(masm_, "[ Stack check"); |
293 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); | 332 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); |
294 Label ok; | 333 EmitStackCheck(masm_); |
295 ASSERT(jssp.Is(__ StackPointer())); | |
296 __ CompareRoot(jssp, Heap::kStackLimitRootIndex); | |
297 __ B(hs, &ok); | |
298 PredictableCodeSizeScope predictable(masm_, | |
299 Assembler::kCallSizeWithRelocation); | |
300 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET); | |
301 __ Bind(&ok); | |
302 } | 334 } |
303 | 335 |
304 { Comment cmnt(masm_, "[ Body"); | 336 { Comment cmnt(masm_, "[ Body"); |
305 ASSERT(loop_depth() == 0); | 337 ASSERT(loop_depth() == 0); |
306 VisitStatements(function()->body()); | 338 VisitStatements(function()->body()); |
307 ASSERT(loop_depth() == 0); | 339 ASSERT(loop_depth() == 0); |
308 } | 340 } |
309 | 341 |
310 // Always emit a 'return undefined' in case control fell off the end of | 342 // Always emit a 'return undefined' in case control fell off the end of |
311 // the body. | 343 // the body. |
(...skipping 4661 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4973 return previous_; | 5005 return previous_; |
4974 } | 5006 } |
4975 | 5007 |
4976 | 5008 |
4977 #undef __ | 5009 #undef __ |
4978 | 5010 |
4979 | 5011 |
4980 } } // namespace v8::internal | 5012 } } // namespace v8::internal |
4981 | 5013 |
4982 #endif // V8_TARGET_ARCH_ARM64 | 5014 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |