OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
113 | 113 |
114 private: | 114 private: |
115 MacroAssembler* masm_; | 115 MacroAssembler* masm_; |
116 Label patch_site_; | 116 Label patch_site_; |
117 #ifdef DEBUG | 117 #ifdef DEBUG |
118 bool info_emitted_; | 118 bool info_emitted_; |
119 #endif | 119 #endif |
120 }; | 120 }; |
121 | 121 |
122 | 122 |
| 123 static void EmitStackCheck(MacroAssembler* masm_, |
| 124 Register stack_limit_scratch, |
| 125 int pointers = 0, |
| 126 Register scratch = sp) { |
| 127 Isolate* isolate = masm_->isolate(); |
| 128 Label ok; |
| 129 ASSERT(scratch.is(sp) == (pointers == 0)); |
| 130 if (pointers != 0) { |
| 131 __ Subu(scratch, sp, Operand(pointers * kPointerSize)); |
| 132 } |
| 133 __ LoadRoot(stack_limit_scratch, Heap::kStackLimitRootIndex); |
| 134 __ Branch(&ok, hs, scratch, Operand(stack_limit_scratch)); |
| 135 PredictableCodeSizeScope predictable(masm_, 4 * Assembler::kInstrSize); |
| 136 __ Call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET); |
| 137 __ bind(&ok); |
| 138 } |
| 139 |
| 140 |
123 // Generate code for a JS function. On entry to the function the receiver | 141 // Generate code for a JS function. On entry to the function the receiver |
124 // and arguments have been pushed on the stack left to right. The actual | 142 // and arguments have been pushed on the stack left to right. The actual |
125 // argument count matches the formal parameter count expected by the | 143 // argument count matches the formal parameter count expected by the |
126 // function. | 144 // function. |
127 // | 145 // |
128 // The live registers are: | 146 // The live registers are: |
129 // o a1: the JS function object being called (i.e. ourselves) | 147 // o a1: the JS function object being called (i.e. ourselves) |
130 // o cp: our context | 148 // o cp: our context |
131 // o fp: our caller's frame pointer | 149 // o fp: our caller's frame pointer |
132 // o sp: stack pointer | 150 // o sp: stack pointer |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
180 | 198 |
181 info->set_prologue_offset(masm_->pc_offset()); | 199 info->set_prologue_offset(masm_->pc_offset()); |
182 __ Prologue(BUILD_FUNCTION_FRAME); | 200 __ Prologue(BUILD_FUNCTION_FRAME); |
183 info->AddNoFrameRange(0, masm_->pc_offset()); | 201 info->AddNoFrameRange(0, masm_->pc_offset()); |
184 | 202 |
185 { Comment cmnt(masm_, "[ Allocate locals"); | 203 { Comment cmnt(masm_, "[ Allocate locals"); |
186 int locals_count = info->scope()->num_stack_slots(); | 204 int locals_count = info->scope()->num_stack_slots(); |
187 // Generators allocate locals, if any, in context slots. | 205 // Generators allocate locals, if any, in context slots. |
188 ASSERT(!info->function()->is_generator() || locals_count == 0); | 206 ASSERT(!info->function()->is_generator() || locals_count == 0); |
189 if (locals_count > 0) { | 207 if (locals_count > 0) { |
190 // Emit a loop to initialize stack cells for locals when optimizing for | 208 if (locals_count >= 128) { |
191 // size. Otherwise, unroll the loop for maximum performance. | 209 EmitStackCheck(masm_, a2, locals_count, t5); |
| 210 } |
192 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex); | 211 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex); |
193 if ((FLAG_optimize_for_size && locals_count > 4) || | 212 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32; |
194 !is_int16(locals_count)) { | 213 if (locals_count >= kMaxPushes) { |
195 Label loop; | 214 int loop_iterations = locals_count / kMaxPushes; |
196 __ Subu(a2, sp, Operand(locals_count * kPointerSize)); | 215 __ li(a2, Operand(loop_iterations)); |
197 __ bind(&loop); | 216 Label loop_header; |
198 __ Subu(sp, sp, Operand(kPointerSize)); | 217 __ bind(&loop_header); |
199 __ Branch(&loop, gt, sp, Operand(a2), USE_DELAY_SLOT); | 218 // Do pushes. |
200 __ sw(t5, MemOperand(sp, 0)); // Push in the delay slot. | 219 __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize)); |
201 } else { | 220 for (int i = 0; i < kMaxPushes; i++) { |
202 __ Subu(sp, sp, Operand(locals_count * kPointerSize)); | |
203 for (int i = 0; i < locals_count; i++) { | |
204 __ sw(t5, MemOperand(sp, i * kPointerSize)); | 221 __ sw(t5, MemOperand(sp, i * kPointerSize)); |
205 } | 222 } |
| 223 // Continue loop if not done. |
| 224 __ Subu(a2, a2, Operand(1)); |
| 225 __ Branch(&loop_header, ne, a2, Operand(zero_reg)); |
| 226 } |
| 227 int remaining = locals_count % kMaxPushes; |
| 228 // Emit the remaining pushes. |
| 229 __ Subu(sp, sp, Operand(remaining * kPointerSize)); |
| 230 for (int i = 0; i < remaining; i++) { |
| 231 __ sw(t5, MemOperand(sp, i * kPointerSize)); |
206 } | 232 } |
207 } | 233 } |
208 } | 234 } |
209 | 235 |
210 bool function_in_register = true; | 236 bool function_in_register = true; |
211 | 237 |
212 // Possibly allocate a local context. | 238 // Possibly allocate a local context. |
213 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; | 239 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
214 if (heap_slots > 0) { | 240 if (heap_slots > 0) { |
215 Comment cmnt(masm_, "[ Allocate context"); | 241 Comment cmnt(masm_, "[ Allocate context"); |
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
306 ASSERT(function->proxy()->var()->mode() == CONST || | 332 ASSERT(function->proxy()->var()->mode() == CONST || |
307 function->proxy()->var()->mode() == CONST_LEGACY); | 333 function->proxy()->var()->mode() == CONST_LEGACY); |
308 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); | 334 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); |
309 VisitVariableDeclaration(function); | 335 VisitVariableDeclaration(function); |
310 } | 336 } |
311 VisitDeclarations(scope()->declarations()); | 337 VisitDeclarations(scope()->declarations()); |
312 } | 338 } |
313 | 339 |
314 { Comment cmnt(masm_, "[ Stack check"); | 340 { Comment cmnt(masm_, "[ Stack check"); |
315 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); | 341 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); |
316 Label ok; | 342 EmitStackCheck(masm_, at); |
317 __ LoadRoot(t0, Heap::kStackLimitRootIndex); | |
318 __ Branch(&ok, hs, sp, Operand(t0)); | |
319 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET); | |
320 __ bind(&ok); | |
321 } | 343 } |
322 | 344 |
323 { Comment cmnt(masm_, "[ Body"); | 345 { Comment cmnt(masm_, "[ Body"); |
324 ASSERT(loop_depth() == 0); | 346 ASSERT(loop_depth() == 0); |
325 VisitStatements(function()->body()); | 347 VisitStatements(function()->body()); |
326 ASSERT(loop_depth() == 0); | 348 ASSERT(loop_depth() == 0); |
327 } | 349 } |
328 } | 350 } |
329 | 351 |
330 // Always emit a 'return undefined' in case control fell off the end of | 352 // Always emit a 'return undefined' in case control fell off the end of |
(...skipping 4624 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4955 Assembler::target_address_at(pc_immediate_load_address)) == | 4977 Assembler::target_address_at(pc_immediate_load_address)) == |
4956 reinterpret_cast<uint32_t>( | 4978 reinterpret_cast<uint32_t>( |
4957 isolate->builtins()->OsrAfterStackCheck()->entry())); | 4979 isolate->builtins()->OsrAfterStackCheck()->entry())); |
4958 return OSR_AFTER_STACK_CHECK; | 4980 return OSR_AFTER_STACK_CHECK; |
4959 } | 4981 } |
4960 | 4982 |
4961 | 4983 |
4962 } } // namespace v8::internal | 4984 } } // namespace v8::internal |
4963 | 4985 |
4964 #endif // V8_TARGET_ARCH_MIPS | 4986 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |