OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
114 | 114 |
115 CodeGenState::~CodeGenState() { | 115 CodeGenState::~CodeGenState() { |
116 ASSERT(owner_->state() == this); | 116 ASSERT(owner_->state() == this); |
117 owner_->set_state(previous_); | 117 owner_->set_state(previous_); |
118 } | 118 } |
119 | 119 |
120 | 120 |
121 // ------------------------------------------------------------------------- | 121 // ------------------------------------------------------------------------- |
122 // CodeGenerator implementation | 122 // CodeGenerator implementation |
123 | 123 |
124 CodeGenerator::CodeGenerator(MacroAssembler* masm, | 124 CodeGenerator::CodeGenerator(MacroAssembler* masm) |
125 Handle<Script> script, | 125 : deferred_(8), |
126 bool is_eval) | |
127 : is_eval_(is_eval), | |
128 script_(script), | |
129 deferred_(8), | |
130 masm_(masm), | 126 masm_(masm), |
131 scope_(NULL), | 127 info_(NULL), |
132 frame_(NULL), | 128 frame_(NULL), |
133 allocator_(NULL), | 129 allocator_(NULL), |
134 cc_reg_(al), | 130 cc_reg_(al), |
135 state_(NULL), | 131 state_(NULL), |
136 function_return_is_shadowed_(false) { | 132 function_return_is_shadowed_(false) { |
137 } | 133 } |
138 | 134 |
139 | 135 |
| 136 Scope* CodeGenerator::scope() { return info_->function()->scope(); } |
| 137 |
| 138 |
140 // Calling conventions: | 139 // Calling conventions: |
141 // fp: caller's frame pointer | 140 // fp: caller's frame pointer |
142 // sp: stack pointer | 141 // sp: stack pointer |
143 // r1: called JS function | 142 // r1: called JS function |
144 // cp: callee's context | 143 // cp: callee's context |
145 | 144 |
146 void CodeGenerator::Generate(FunctionLiteral* fun, | 145 void CodeGenerator::Generate(CompilationInfo* info, Mode mode) { |
147 Mode mode, | |
148 CompilationInfo* info) { | |
149 // Record the position for debugging purposes. | 146 // Record the position for debugging purposes. |
150 CodeForFunctionPosition(fun); | 147 CodeForFunctionPosition(info->function()); |
151 | |
152 ZoneList<Statement*>* body = fun->body(); | |
153 | 148 |
154 // Initialize state. | 149 // Initialize state. |
155 ASSERT(scope_ == NULL); | 150 info_ = info; |
156 scope_ = fun->scope(); | |
157 ASSERT(allocator_ == NULL); | 151 ASSERT(allocator_ == NULL); |
158 RegisterAllocator register_allocator(this); | 152 RegisterAllocator register_allocator(this); |
159 allocator_ = ®ister_allocator; | 153 allocator_ = ®ister_allocator; |
160 ASSERT(frame_ == NULL); | 154 ASSERT(frame_ == NULL); |
161 frame_ = new VirtualFrame(); | 155 frame_ = new VirtualFrame(); |
162 cc_reg_ = al; | 156 cc_reg_ = al; |
163 { | 157 { |
164 CodeGenState state(this); | 158 CodeGenState state(this); |
165 | 159 |
166 // Entry: | 160 // Entry: |
167 // Stack: receiver, arguments | 161 // Stack: receiver, arguments |
168 // lr: return address | 162 // lr: return address |
169 // fp: caller's frame pointer | 163 // fp: caller's frame pointer |
170 // sp: stack pointer | 164 // sp: stack pointer |
171 // r1: called JS function | 165 // r1: called JS function |
172 // cp: callee's context | 166 // cp: callee's context |
173 allocator_->Initialize(); | 167 allocator_->Initialize(); |
174 | 168 |
175 #ifdef DEBUG | 169 #ifdef DEBUG |
176 if (strlen(FLAG_stop_at) > 0 && | 170 if (strlen(FLAG_stop_at) > 0 && |
177 fun->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { | 171 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { |
178 frame_->SpillAll(); | 172 frame_->SpillAll(); |
179 __ stop("stop-at"); | 173 __ stop("stop-at"); |
180 } | 174 } |
181 #endif | 175 #endif |
182 | 176 |
183 if (mode == PRIMARY) { | 177 if (mode == PRIMARY) { |
184 frame_->Enter(); | 178 frame_->Enter(); |
185 // tos: code slot | 179 // tos: code slot |
186 | 180 |
187 // Allocate space for locals and initialize them. This also checks | 181 // Allocate space for locals and initialize them. This also checks |
188 // for stack overflow. | 182 // for stack overflow. |
189 frame_->AllocateStackSlots(); | 183 frame_->AllocateStackSlots(); |
190 | 184 |
191 VirtualFrame::SpilledScope spilled_scope; | 185 VirtualFrame::SpilledScope spilled_scope; |
192 int heap_slots = scope_->num_heap_slots(); | 186 int heap_slots = scope()->num_heap_slots(); |
193 if (heap_slots > 0) { | 187 if (heap_slots > 0) { |
194 // Allocate local context. | 188 // Allocate local context. |
195 // Get outer context and create a new context based on it. | 189 // Get outer context and create a new context based on it. |
196 __ ldr(r0, frame_->Function()); | 190 __ ldr(r0, frame_->Function()); |
197 frame_->EmitPush(r0); | 191 frame_->EmitPush(r0); |
198 if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 192 if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
199 FastNewContextStub stub(heap_slots); | 193 FastNewContextStub stub(heap_slots); |
200 frame_->CallStub(&stub, 1); | 194 frame_->CallStub(&stub, 1); |
201 } else { | 195 } else { |
202 frame_->CallRuntime(Runtime::kNewContext, 1); | 196 frame_->CallRuntime(Runtime::kNewContext, 1); |
203 } | 197 } |
204 | 198 |
205 #ifdef DEBUG | 199 #ifdef DEBUG |
206 JumpTarget verified_true; | 200 JumpTarget verified_true; |
207 __ cmp(r0, Operand(cp)); | 201 __ cmp(r0, Operand(cp)); |
208 verified_true.Branch(eq); | 202 verified_true.Branch(eq); |
209 __ stop("NewContext: r0 is expected to be the same as cp"); | 203 __ stop("NewContext: r0 is expected to be the same as cp"); |
210 verified_true.Bind(); | 204 verified_true.Bind(); |
211 #endif | 205 #endif |
212 // Update context local. | 206 // Update context local. |
213 __ str(cp, frame_->Context()); | 207 __ str(cp, frame_->Context()); |
214 } | 208 } |
215 | 209 |
216 // TODO(1241774): Improve this code: | 210 // TODO(1241774): Improve this code: |
217 // 1) only needed if we have a context | 211 // 1) only needed if we have a context |
218 // 2) no need to recompute context ptr every single time | 212 // 2) no need to recompute context ptr every single time |
219 // 3) don't copy parameter operand code from SlotOperand! | 213 // 3) don't copy parameter operand code from SlotOperand! |
220 { | 214 { |
221 Comment cmnt2(masm_, "[ copy context parameters into .context"); | 215 Comment cmnt2(masm_, "[ copy context parameters into .context"); |
222 | |
223 // Note that iteration order is relevant here! If we have the same | 216 // Note that iteration order is relevant here! If we have the same |
224 // parameter twice (e.g., function (x, y, x)), and that parameter | 217 // parameter twice (e.g., function (x, y, x)), and that parameter |
225 // needs to be copied into the context, it must be the last argument | 218 // needs to be copied into the context, it must be the last argument |
226 // passed to the parameter that needs to be copied. This is a rare | 219 // passed to the parameter that needs to be copied. This is a rare |
227 // case so we don't check for it, instead we rely on the copying | 220 // case so we don't check for it, instead we rely on the copying |
228 // order: such a parameter is copied repeatedly into the same | 221 // order: such a parameter is copied repeatedly into the same |
229 // context location and thus the last value is what is seen inside | 222 // context location and thus the last value is what is seen inside |
230 // the function. | 223 // the function. |
231 for (int i = 0; i < scope_->num_parameters(); i++) { | 224 for (int i = 0; i < scope()->num_parameters(); i++) { |
232 Variable* par = scope_->parameter(i); | 225 Variable* par = scope()->parameter(i); |
233 Slot* slot = par->slot(); | 226 Slot* slot = par->slot(); |
234 if (slot != NULL && slot->type() == Slot::CONTEXT) { | 227 if (slot != NULL && slot->type() == Slot::CONTEXT) { |
235 // No parameters in global scope. | 228 ASSERT(!scope()->is_global_scope()); // no parameters in global sco
pe |
236 ASSERT(!scope_->is_global_scope()); | |
237 __ ldr(r1, frame_->ParameterAt(i)); | 229 __ ldr(r1, frame_->ParameterAt(i)); |
238 // Loads r2 with context; used below in RecordWrite. | 230 // Loads r2 with context; used below in RecordWrite. |
239 __ str(r1, SlotOperand(slot, r2)); | 231 __ str(r1, SlotOperand(slot, r2)); |
240 // Load the offset into r3. | 232 // Load the offset into r3. |
241 int slot_offset = | 233 int slot_offset = |
242 FixedArray::kHeaderSize + slot->index() * kPointerSize; | 234 FixedArray::kHeaderSize + slot->index() * kPointerSize; |
243 __ mov(r3, Operand(slot_offset)); | 235 __ mov(r3, Operand(slot_offset)); |
244 __ RecordWrite(r2, r3, r1); | 236 __ RecordWrite(r2, r3, r1); |
245 } | 237 } |
246 } | 238 } |
247 } | 239 } |
248 | 240 |
249 // Store the arguments object. This must happen after context | 241 // Store the arguments object. This must happen after context |
250 // initialization because the arguments object may be stored in the | 242 // initialization because the arguments object may be stored in the |
251 // context. | 243 // context. |
252 if (scope_->arguments() != NULL) { | 244 if (scope()->arguments() != NULL) { |
253 Comment cmnt(masm_, "[ allocate arguments object"); | 245 Comment cmnt(masm_, "[ allocate arguments object"); |
254 ASSERT(scope_->arguments_shadow() != NULL); | 246 ASSERT(scope()->arguments_shadow() != NULL); |
255 Variable* arguments = scope_->arguments()->var(); | 247 Variable* arguments = scope()->arguments()->var(); |
256 Variable* shadow = scope_->arguments_shadow()->var(); | 248 Variable* shadow = scope()->arguments_shadow()->var(); |
257 ASSERT(arguments != NULL && arguments->slot() != NULL); | 249 ASSERT(arguments != NULL && arguments->slot() != NULL); |
258 ASSERT(shadow != NULL && shadow->slot() != NULL); | 250 ASSERT(shadow != NULL && shadow->slot() != NULL); |
259 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); | 251 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); |
260 __ ldr(r2, frame_->Function()); | 252 __ ldr(r2, frame_->Function()); |
261 // The receiver is below the arguments, the return address, and the | 253 // The receiver is below the arguments, the return address, and the |
262 // frame pointer on the stack. | 254 // frame pointer on the stack. |
263 const int kReceiverDisplacement = 2 + scope_->num_parameters(); | 255 const int kReceiverDisplacement = 2 + scope()->num_parameters(); |
264 __ add(r1, fp, Operand(kReceiverDisplacement * kPointerSize)); | 256 __ add(r1, fp, Operand(kReceiverDisplacement * kPointerSize)); |
265 __ mov(r0, Operand(Smi::FromInt(scope_->num_parameters()))); | 257 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters()))); |
266 frame_->Adjust(3); | 258 frame_->Adjust(3); |
267 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit()); | 259 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit()); |
268 frame_->CallStub(&stub, 3); | 260 frame_->CallStub(&stub, 3); |
269 frame_->EmitPush(r0); | 261 frame_->EmitPush(r0); |
270 StoreToSlot(arguments->slot(), NOT_CONST_INIT); | 262 StoreToSlot(arguments->slot(), NOT_CONST_INIT); |
271 StoreToSlot(shadow->slot(), NOT_CONST_INIT); | 263 StoreToSlot(shadow->slot(), NOT_CONST_INIT); |
272 frame_->Drop(); // Value is no longer needed. | 264 frame_->Drop(); // Value is no longer needed. |
273 } | 265 } |
274 | 266 |
275 // Initialize ThisFunction reference if present. | 267 // Initialize ThisFunction reference if present. |
276 if (scope_->is_function_scope() && scope_->function() != NULL) { | 268 if (scope()->is_function_scope() && scope()->function() != NULL) { |
277 __ mov(ip, Operand(Factory::the_hole_value())); | 269 __ mov(ip, Operand(Factory::the_hole_value())); |
278 frame_->EmitPush(ip); | 270 frame_->EmitPush(ip); |
279 StoreToSlot(scope_->function()->slot(), NOT_CONST_INIT); | 271 StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT); |
280 } | 272 } |
281 } else { | 273 } else { |
282 // When used as the secondary compiler for splitting, r1, cp, | 274 // When used as the secondary compiler for splitting, r1, cp, |
283 // fp, and lr have been pushed on the stack. Adjust the virtual | 275 // fp, and lr have been pushed on the stack. Adjust the virtual |
284 // frame to match this state. | 276 // frame to match this state. |
285 frame_->Adjust(4); | 277 frame_->Adjust(4); |
286 allocator_->Unuse(r1); | 278 allocator_->Unuse(r1); |
287 allocator_->Unuse(lr); | 279 allocator_->Unuse(lr); |
288 } | 280 } |
289 | 281 |
290 // Initialize the function return target after the locals are set | 282 // Initialize the function return target after the locals are set |
291 // up, because it needs the expected frame height from the frame. | 283 // up, because it needs the expected frame height from the frame. |
292 function_return_.set_direction(JumpTarget::BIDIRECTIONAL); | 284 function_return_.set_direction(JumpTarget::BIDIRECTIONAL); |
293 function_return_is_shadowed_ = false; | 285 function_return_is_shadowed_ = false; |
294 | 286 |
295 // Generate code to 'execute' declarations and initialize functions | 287 // Generate code to 'execute' declarations and initialize functions |
296 // (source elements). In case of an illegal redeclaration we need to | 288 // (source elements). In case of an illegal redeclaration we need to |
297 // handle that instead of processing the declarations. | 289 // handle that instead of processing the declarations. |
298 if (scope_->HasIllegalRedeclaration()) { | 290 if (scope()->HasIllegalRedeclaration()) { |
299 Comment cmnt(masm_, "[ illegal redeclarations"); | 291 Comment cmnt(masm_, "[ illegal redeclarations"); |
300 scope_->VisitIllegalRedeclaration(this); | 292 scope()->VisitIllegalRedeclaration(this); |
301 } else { | 293 } else { |
302 Comment cmnt(masm_, "[ declarations"); | 294 Comment cmnt(masm_, "[ declarations"); |
303 ProcessDeclarations(scope_->declarations()); | 295 ProcessDeclarations(scope()->declarations()); |
304 // Bail out if a stack-overflow exception occurred when processing | 296 // Bail out if a stack-overflow exception occurred when processing |
305 // declarations. | 297 // declarations. |
306 if (HasStackOverflow()) return; | 298 if (HasStackOverflow()) return; |
307 } | 299 } |
308 | 300 |
309 if (FLAG_trace) { | 301 if (FLAG_trace) { |
310 frame_->CallRuntime(Runtime::kTraceEnter, 0); | 302 frame_->CallRuntime(Runtime::kTraceEnter, 0); |
311 // Ignore the return value. | 303 // Ignore the return value. |
312 } | 304 } |
313 | 305 |
314 // Compile the body of the function in a vanilla state. Don't | 306 // Compile the body of the function in a vanilla state. Don't |
315 // bother compiling all the code if the scope has an illegal | 307 // bother compiling all the code if the scope has an illegal |
316 // redeclaration. | 308 // redeclaration. |
317 if (!scope_->HasIllegalRedeclaration()) { | 309 if (!scope()->HasIllegalRedeclaration()) { |
318 Comment cmnt(masm_, "[ function body"); | 310 Comment cmnt(masm_, "[ function body"); |
319 #ifdef DEBUG | 311 #ifdef DEBUG |
320 bool is_builtin = Bootstrapper::IsActive(); | 312 bool is_builtin = Bootstrapper::IsActive(); |
321 bool should_trace = | 313 bool should_trace = |
322 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls; | 314 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls; |
323 if (should_trace) { | 315 if (should_trace) { |
324 frame_->CallRuntime(Runtime::kDebugTrace, 0); | 316 frame_->CallRuntime(Runtime::kDebugTrace, 0); |
325 // Ignore the return value. | 317 // Ignore the return value. |
326 } | 318 } |
327 #endif | 319 #endif |
328 VisitStatementsAndSpill(body); | 320 VisitStatementsAndSpill(info->function()->body()); |
329 } | 321 } |
330 } | 322 } |
331 | 323 |
332 // Generate the return sequence if necessary. | 324 // Generate the return sequence if necessary. |
333 if (has_valid_frame() || function_return_.is_linked()) { | 325 if (has_valid_frame() || function_return_.is_linked()) { |
334 if (!function_return_.is_linked()) { | 326 if (!function_return_.is_linked()) { |
335 CodeForReturnPosition(fun); | 327 CodeForReturnPosition(info->function()); |
336 } | 328 } |
337 // exit | 329 // exit |
338 // r0: result | 330 // r0: result |
339 // sp: stack pointer | 331 // sp: stack pointer |
340 // fp: frame pointer | 332 // fp: frame pointer |
341 // cp: callee's context | 333 // cp: callee's context |
342 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 334 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
343 | 335 |
344 function_return_.Bind(); | 336 function_return_.Bind(); |
345 if (FLAG_trace) { | 337 if (FLAG_trace) { |
346 // Push the return value on the stack as the parameter. | 338 // Push the return value on the stack as the parameter. |
347 // Runtime::TraceExit returns the parameter as it is. | 339 // Runtime::TraceExit returns the parameter as it is. |
348 frame_->EmitPush(r0); | 340 frame_->EmitPush(r0); |
349 frame_->CallRuntime(Runtime::kTraceExit, 1); | 341 frame_->CallRuntime(Runtime::kTraceExit, 1); |
350 } | 342 } |
351 | 343 |
352 // Add a label for checking the size of the code used for returning. | 344 // Add a label for checking the size of the code used for returning. |
353 Label check_exit_codesize; | 345 Label check_exit_codesize; |
354 masm_->bind(&check_exit_codesize); | 346 masm_->bind(&check_exit_codesize); |
355 | 347 |
356 // Calculate the exact length of the return sequence and make sure that | 348 // Calculate the exact length of the return sequence and make sure that |
357 // the constant pool is not emitted inside of the return sequence. | 349 // the constant pool is not emitted inside of the return sequence. |
358 int32_t sp_delta = (scope_->num_parameters() + 1) * kPointerSize; | 350 int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize; |
359 int return_sequence_length = Assembler::kJSReturnSequenceLength; | 351 int return_sequence_length = Assembler::kJSReturnSequenceLength; |
360 if (!masm_->ImmediateFitsAddrMode1Instruction(sp_delta)) { | 352 if (!masm_->ImmediateFitsAddrMode1Instruction(sp_delta)) { |
361 // Additional mov instruction generated. | 353 // Additional mov instruction generated. |
362 return_sequence_length++; | 354 return_sequence_length++; |
363 } | 355 } |
364 masm_->BlockConstPoolFor(return_sequence_length); | 356 masm_->BlockConstPoolFor(return_sequence_length); |
365 | 357 |
366 // Tear down the frame which will restore the caller's frame pointer and | 358 // Tear down the frame which will restore the caller's frame pointer and |
367 // the link register. | 359 // the link register. |
368 frame_->Exit(); | 360 frame_->Exit(); |
(...skipping 19 matching lines...) Expand all Loading... |
388 ASSERT(!function_return_is_shadowed_); | 380 ASSERT(!function_return_is_shadowed_); |
389 function_return_.Unuse(); | 381 function_return_.Unuse(); |
390 DeleteFrame(); | 382 DeleteFrame(); |
391 | 383 |
392 // Process any deferred code using the register allocator. | 384 // Process any deferred code using the register allocator. |
393 if (!HasStackOverflow()) { | 385 if (!HasStackOverflow()) { |
394 ProcessDeferred(); | 386 ProcessDeferred(); |
395 } | 387 } |
396 | 388 |
397 allocator_ = NULL; | 389 allocator_ = NULL; |
398 scope_ = NULL; | |
399 } | 390 } |
400 | 391 |
401 | 392 |
402 MemOperand CodeGenerator::SlotOperand(Slot* slot, Register tmp) { | 393 MemOperand CodeGenerator::SlotOperand(Slot* slot, Register tmp) { |
403 // Currently, this assertion will fail if we try to assign to | 394 // Currently, this assertion will fail if we try to assign to |
404 // a constant variable that is constant because it is read-only | 395 // a constant variable that is constant because it is read-only |
405 // (such as the variable referring to a named function expression). | 396 // (such as the variable referring to a named function expression). |
406 // We need to implement assignments to read-only variables. | 397 // We need to implement assignments to read-only variables. |
407 // Ideally, we should do this during AST generation (by converting | 398 // Ideally, we should do this during AST generation (by converting |
408 // such assignments into expression statements); however, in general | 399 // such assignments into expression statements); however, in general |
(...skipping 1925 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2334 | 2325 |
2335 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) { | 2326 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) { |
2336 #ifdef DEBUG | 2327 #ifdef DEBUG |
2337 int original_height = frame_->height(); | 2328 int original_height = frame_->height(); |
2338 #endif | 2329 #endif |
2339 VirtualFrame::SpilledScope spilled_scope; | 2330 VirtualFrame::SpilledScope spilled_scope; |
2340 Comment cmnt(masm_, "[ FunctionLiteral"); | 2331 Comment cmnt(masm_, "[ FunctionLiteral"); |
2341 | 2332 |
2342 // Build the function boilerplate and instantiate it. | 2333 // Build the function boilerplate and instantiate it. |
2343 Handle<JSFunction> boilerplate = | 2334 Handle<JSFunction> boilerplate = |
2344 Compiler::BuildBoilerplate(node, script_, this); | 2335 Compiler::BuildBoilerplate(node, script(), this); |
2345 // Check for stack-overflow exception. | 2336 // Check for stack-overflow exception. |
2346 if (HasStackOverflow()) { | 2337 if (HasStackOverflow()) { |
2347 ASSERT(frame_->height() == original_height); | 2338 ASSERT(frame_->height() == original_height); |
2348 return; | 2339 return; |
2349 } | 2340 } |
2350 InstantiateBoilerplate(boilerplate); | 2341 InstantiateBoilerplate(boilerplate); |
2351 ASSERT(frame_->height() == original_height + 1); | 2342 ASSERT(frame_->height() == original_height + 1); |
2352 } | 2343 } |
2353 | 2344 |
2354 | 2345 |
(...skipping 1157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3512 cc_reg_ = eq; | 3503 cc_reg_ = eq; |
3513 } | 3504 } |
3514 | 3505 |
3515 | 3506 |
3516 void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) { | 3507 void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) { |
3517 VirtualFrame::SpilledScope spilled_scope; | 3508 VirtualFrame::SpilledScope spilled_scope; |
3518 ASSERT(args->length() == 0); | 3509 ASSERT(args->length() == 0); |
3519 | 3510 |
3520 // Seed the result with the formal parameters count, which will be used | 3511 // Seed the result with the formal parameters count, which will be used |
3521 // in case no arguments adaptor frame is found below the current frame. | 3512 // in case no arguments adaptor frame is found below the current frame. |
3522 __ mov(r0, Operand(Smi::FromInt(scope_->num_parameters()))); | 3513 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters()))); |
3523 | 3514 |
3524 // Call the shared stub to get to the arguments.length. | 3515 // Call the shared stub to get to the arguments.length. |
3525 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_LENGTH); | 3516 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_LENGTH); |
3526 frame_->CallStub(&stub, 0); | 3517 frame_->CallStub(&stub, 0); |
3527 frame_->EmitPush(r0); | 3518 frame_->EmitPush(r0); |
3528 } | 3519 } |
3529 | 3520 |
3530 | 3521 |
3531 void CodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* args) { | 3522 void CodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* args) { |
3532 VirtualFrame::SpilledScope spilled_scope; | 3523 VirtualFrame::SpilledScope spilled_scope; |
3533 ASSERT(args->length() == 1); | 3524 ASSERT(args->length() == 1); |
3534 | 3525 |
3535 // Satisfy contract with ArgumentsAccessStub: | 3526 // Satisfy contract with ArgumentsAccessStub: |
3536 // Load the key into r1 and the formal parameters count into r0. | 3527 // Load the key into r1 and the formal parameters count into r0. |
3537 LoadAndSpill(args->at(0)); | 3528 LoadAndSpill(args->at(0)); |
3538 frame_->EmitPop(r1); | 3529 frame_->EmitPop(r1); |
3539 __ mov(r0, Operand(Smi::FromInt(scope_->num_parameters()))); | 3530 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters()))); |
3540 | 3531 |
3541 // Call the shared stub to get to arguments[key]. | 3532 // Call the shared stub to get to arguments[key]. |
3542 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); | 3533 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); |
3543 frame_->CallStub(&stub, 0); | 3534 frame_->CallStub(&stub, 0); |
3544 frame_->EmitPush(r0); | 3535 frame_->EmitPush(r0); |
3545 } | 3536 } |
3546 | 3537 |
3547 | 3538 |
3548 void CodeGenerator::GenerateRandomPositiveSmi(ZoneList<Expression*>* args) { | 3539 void CodeGenerator::GenerateRandomPositiveSmi(ZoneList<Expression*>* args) { |
3549 VirtualFrame::SpilledScope spilled_scope; | 3540 VirtualFrame::SpilledScope spilled_scope; |
(...skipping 3377 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6927 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) | 6918 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) |
6928 // tagged as a small integer. | 6919 // tagged as a small integer. |
6929 __ bind(&runtime); | 6920 __ bind(&runtime); |
6930 __ TailCallRuntime(ExternalReference(Runtime::kStringCompare), 2, 1); | 6921 __ TailCallRuntime(ExternalReference(Runtime::kStringCompare), 2, 1); |
6931 } | 6922 } |
6932 | 6923 |
6933 | 6924 |
6934 #undef __ | 6925 #undef __ |
6935 | 6926 |
6936 } } // namespace v8::internal | 6927 } } // namespace v8::internal |
OLD | NEW |