OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 228 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
239 // and rcx. | 239 // and rcx. |
240 static void LoadAsIntegers(MacroAssembler* masm, | 240 static void LoadAsIntegers(MacroAssembler* masm, |
241 bool use_sse3, | 241 bool use_sse3, |
242 Label* operand_conversion_failure); | 242 Label* operand_conversion_failure); |
243 }; | 243 }; |
244 | 244 |
245 | 245 |
246 // ----------------------------------------------------------------------------- | 246 // ----------------------------------------------------------------------------- |
247 // CodeGenerator implementation. | 247 // CodeGenerator implementation. |
248 | 248 |
249 CodeGenerator::CodeGenerator(MacroAssembler* masm, | 249 CodeGenerator::CodeGenerator(MacroAssembler* masm) |
250 Handle<Script> script, | 250 : deferred_(8), |
251 bool is_eval) | |
252 : is_eval_(is_eval), | |
253 script_(script), | |
254 deferred_(8), | |
255 masm_(masm), | 251 masm_(masm), |
256 scope_(NULL), | 252 info_(NULL), |
257 frame_(NULL), | 253 frame_(NULL), |
258 allocator_(NULL), | 254 allocator_(NULL), |
259 state_(NULL), | 255 state_(NULL), |
260 loop_nesting_(0), | 256 loop_nesting_(0), |
261 function_return_is_shadowed_(false), | 257 function_return_is_shadowed_(false), |
262 in_spilled_code_(false) { | 258 in_spilled_code_(false) { |
263 } | 259 } |
264 | 260 |
265 | 261 |
| 262 Scope* CodeGenerator::scope() { return info_->function()->scope(); } |
| 263 |
| 264 |
266 void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { | 265 void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { |
267 // Call the runtime to declare the globals. The inevitable call | 266 // Call the runtime to declare the globals. The inevitable call |
268 // will sync frame elements to memory anyway, so we do it eagerly to | 267 // will sync frame elements to memory anyway, so we do it eagerly to |
269 // allow us to push the arguments directly into place. | 268 // allow us to push the arguments directly into place. |
270 frame_->SyncRange(0, frame_->element_count() - 1); | 269 frame_->SyncRange(0, frame_->element_count() - 1); |
271 | 270 |
272 __ movq(kScratchRegister, pairs, RelocInfo::EMBEDDED_OBJECT); | 271 __ movq(kScratchRegister, pairs, RelocInfo::EMBEDDED_OBJECT); |
273 frame_->EmitPush(rsi); // The context is the first argument. | 272 frame_->EmitPush(rsi); // The context is the first argument. |
274 frame_->EmitPush(kScratchRegister); | 273 frame_->EmitPush(kScratchRegister); |
275 frame_->EmitPush(Smi::FromInt(is_eval() ? 1 : 0)); | 274 frame_->EmitPush(Smi::FromInt(is_eval() ? 1 : 0)); |
276 Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3); | 275 Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3); |
277 // Return value is ignored. | 276 // Return value is ignored. |
278 } | 277 } |
279 | 278 |
280 | 279 |
281 void CodeGenerator::Generate(FunctionLiteral* function, | 280 void CodeGenerator::Generate(CompilationInfo* info, Mode mode) { |
282 Mode mode, | |
283 CompilationInfo* info) { | |
284 // Record the position for debugging purposes. | 281 // Record the position for debugging purposes. |
285 CodeForFunctionPosition(function); | 282 CodeForFunctionPosition(info->function()); |
286 ZoneList<Statement*>* body = function->body(); | |
287 | 283 |
288 // Initialize state. | 284 // Initialize state. |
289 ASSERT(scope_ == NULL); | 285 info_ = info; |
290 scope_ = function->scope(); | |
291 ASSERT(allocator_ == NULL); | 286 ASSERT(allocator_ == NULL); |
292 RegisterAllocator register_allocator(this); | 287 RegisterAllocator register_allocator(this); |
293 allocator_ = ®ister_allocator; | 288 allocator_ = ®ister_allocator; |
294 ASSERT(frame_ == NULL); | 289 ASSERT(frame_ == NULL); |
295 frame_ = new VirtualFrame(); | 290 frame_ = new VirtualFrame(); |
296 set_in_spilled_code(false); | 291 set_in_spilled_code(false); |
297 | 292 |
298 // Adjust for function-level loop nesting. | 293 // Adjust for function-level loop nesting. |
299 loop_nesting_ += info->loop_nesting(); | 294 loop_nesting_ += info->loop_nesting(); |
300 | 295 |
301 JumpTarget::set_compiling_deferred_code(false); | 296 JumpTarget::set_compiling_deferred_code(false); |
302 | 297 |
303 #ifdef DEBUG | 298 #ifdef DEBUG |
304 if (strlen(FLAG_stop_at) > 0 && | 299 if (strlen(FLAG_stop_at) > 0 && |
305 function->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { | 300 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { |
306 frame_->SpillAll(); | 301 frame_->SpillAll(); |
307 __ int3(); | 302 __ int3(); |
308 } | 303 } |
309 #endif | 304 #endif |
310 | 305 |
311 // New scope to get automatic timing calculation. | 306 // New scope to get automatic timing calculation. |
312 { // NOLINT | 307 { // NOLINT |
313 HistogramTimerScope codegen_timer(&Counters::code_generation); | 308 HistogramTimerScope codegen_timer(&Counters::code_generation); |
314 CodeGenState state(this); | 309 CodeGenState state(this); |
315 | 310 |
316 // Entry: | 311 // Entry: |
317 // Stack: receiver, arguments, return address. | 312 // Stack: receiver, arguments, return address. |
318 // rbp: caller's frame pointer | 313 // rbp: caller's frame pointer |
319 // rsp: stack pointer | 314 // rsp: stack pointer |
320 // rdi: called JS function | 315 // rdi: called JS function |
321 // rsi: callee's context | 316 // rsi: callee's context |
322 allocator_->Initialize(); | 317 allocator_->Initialize(); |
323 | 318 |
324 if (mode == PRIMARY) { | 319 if (mode == PRIMARY) { |
325 frame_->Enter(); | 320 frame_->Enter(); |
326 | 321 |
327 // Allocate space for locals and initialize them. | 322 // Allocate space for locals and initialize them. |
328 frame_->AllocateStackSlots(); | 323 frame_->AllocateStackSlots(); |
329 | 324 |
330 // Allocate the local context if needed. | 325 // Allocate the local context if needed. |
331 int heap_slots = scope_->num_heap_slots(); | 326 int heap_slots = scope()->num_heap_slots(); |
332 if (heap_slots > 0) { | 327 if (heap_slots > 0) { |
333 Comment cmnt(masm_, "[ allocate local context"); | 328 Comment cmnt(masm_, "[ allocate local context"); |
334 // Allocate local context. | 329 // Allocate local context. |
335 // Get outer context and create a new context based on it. | 330 // Get outer context and create a new context based on it. |
336 frame_->PushFunction(); | 331 frame_->PushFunction(); |
337 Result context; | 332 Result context; |
338 if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 333 if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
339 FastNewContextStub stub(heap_slots); | 334 FastNewContextStub stub(heap_slots); |
340 context = frame_->CallStub(&stub, 1); | 335 context = frame_->CallStub(&stub, 1); |
341 } else { | 336 } else { |
342 context = frame_->CallRuntime(Runtime::kNewContext, 1); | 337 context = frame_->CallRuntime(Runtime::kNewContext, 1); |
343 } | 338 } |
344 | 339 |
345 // Update context local. | 340 // Update context local. |
346 frame_->SaveContextRegister(); | 341 frame_->SaveContextRegister(); |
347 | 342 |
348 // Verify that the runtime call result and rsi agree. | 343 // Verify that the runtime call result and rsi agree. |
349 if (FLAG_debug_code) { | 344 if (FLAG_debug_code) { |
350 __ cmpq(context.reg(), rsi); | 345 __ cmpq(context.reg(), rsi); |
351 __ Assert(equal, "Runtime::NewContext should end up in rsi"); | 346 __ Assert(equal, "Runtime::NewContext should end up in rsi"); |
352 } | 347 } |
353 } | 348 } |
354 | 349 |
355 // TODO(1241774): Improve this code: | 350 // TODO(1241774): Improve this code: |
356 // 1) only needed if we have a context | 351 // 1) only needed if we have a context |
357 // 2) no need to recompute context ptr every single time | 352 // 2) no need to recompute context ptr every single time |
358 // 3) don't copy parameter operand code from SlotOperand! | 353 // 3) don't copy parameter operand code from SlotOperand! |
359 { | 354 { |
360 Comment cmnt2(masm_, "[ copy context parameters into .context"); | 355 Comment cmnt2(masm_, "[ copy context parameters into .context"); |
361 | |
362 // Note that iteration order is relevant here! If we have the same | 356 // Note that iteration order is relevant here! If we have the same |
363 // parameter twice (e.g., function (x, y, x)), and that parameter | 357 // parameter twice (e.g., function (x, y, x)), and that parameter |
364 // needs to be copied into the context, it must be the last argument | 358 // needs to be copied into the context, it must be the last argument |
365 // passed to the parameter that needs to be copied. This is a rare | 359 // passed to the parameter that needs to be copied. This is a rare |
366 // case so we don't check for it, instead we rely on the copying | 360 // case so we don't check for it, instead we rely on the copying |
367 // order: such a parameter is copied repeatedly into the same | 361 // order: such a parameter is copied repeatedly into the same |
368 // context location and thus the last value is what is seen inside | 362 // context location and thus the last value is what is seen inside |
369 // the function. | 363 // the function. |
370 for (int i = 0; i < scope_->num_parameters(); i++) { | 364 for (int i = 0; i < scope()->num_parameters(); i++) { |
371 Variable* par = scope_->parameter(i); | 365 Variable* par = scope()->parameter(i); |
372 Slot* slot = par->slot(); | 366 Slot* slot = par->slot(); |
373 if (slot != NULL && slot->type() == Slot::CONTEXT) { | 367 if (slot != NULL && slot->type() == Slot::CONTEXT) { |
374 // The use of SlotOperand below is safe in unspilled code | 368 // The use of SlotOperand below is safe in unspilled code |
375 // because the slot is guaranteed to be a context slot. | 369 // because the slot is guaranteed to be a context slot. |
376 // | 370 // |
377 // There are no parameters in the global scope. | 371 // There are no parameters in the global scope. |
378 ASSERT(!scope_->is_global_scope()); | 372 ASSERT(!scope()->is_global_scope()); |
379 frame_->PushParameterAt(i); | 373 frame_->PushParameterAt(i); |
380 Result value = frame_->Pop(); | 374 Result value = frame_->Pop(); |
381 value.ToRegister(); | 375 value.ToRegister(); |
382 | 376 |
383 // SlotOperand loads context.reg() with the context object | 377 // SlotOperand loads context.reg() with the context object |
384 // stored to, used below in RecordWrite. | 378 // stored to, used below in RecordWrite. |
385 Result context = allocator_->Allocate(); | 379 Result context = allocator_->Allocate(); |
386 ASSERT(context.is_valid()); | 380 ASSERT(context.is_valid()); |
387 __ movq(SlotOperand(slot, context.reg()), value.reg()); | 381 __ movq(SlotOperand(slot, context.reg()), value.reg()); |
388 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; | 382 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; |
389 Result scratch = allocator_->Allocate(); | 383 Result scratch = allocator_->Allocate(); |
390 ASSERT(scratch.is_valid()); | 384 ASSERT(scratch.is_valid()); |
391 frame_->Spill(context.reg()); | 385 frame_->Spill(context.reg()); |
392 frame_->Spill(value.reg()); | 386 frame_->Spill(value.reg()); |
393 __ RecordWrite(context.reg(), offset, value.reg(), scratch.reg()); | 387 __ RecordWrite(context.reg(), offset, value.reg(), scratch.reg()); |
394 } | 388 } |
395 } | 389 } |
396 } | 390 } |
397 | 391 |
398 // Store the arguments object. This must happen after context | 392 // Store the arguments object. This must happen after context |
399 // initialization because the arguments object may be stored in | 393 // initialization because the arguments object may be stored in |
400 // the context. | 394 // the context. |
401 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) { | 395 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) { |
402 StoreArgumentsObject(true); | 396 StoreArgumentsObject(true); |
403 } | 397 } |
404 | 398 |
405 // Initialize ThisFunction reference if present. | 399 // Initialize ThisFunction reference if present. |
406 if (scope_->is_function_scope() && scope_->function() != NULL) { | 400 if (scope()->is_function_scope() && scope()->function() != NULL) { |
407 frame_->Push(Factory::the_hole_value()); | 401 frame_->Push(Factory::the_hole_value()); |
408 StoreToSlot(scope_->function()->slot(), NOT_CONST_INIT); | 402 StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT); |
409 } | 403 } |
410 } else { | 404 } else { |
411 // When used as the secondary compiler for splitting, rbp, rsi, | 405 // When used as the secondary compiler for splitting, rbp, rsi, |
412 // and rdi have been pushed on the stack. Adjust the virtual | 406 // and rdi have been pushed on the stack. Adjust the virtual |
413 // frame to match this state. | 407 // frame to match this state. |
414 frame_->Adjust(3); | 408 frame_->Adjust(3); |
415 allocator_->Unuse(rdi); | 409 allocator_->Unuse(rdi); |
416 } | 410 } |
417 | 411 |
418 // Initialize the function return target after the locals are set | 412 // Initialize the function return target after the locals are set |
419 // up, because it needs the expected frame height from the frame. | 413 // up, because it needs the expected frame height from the frame. |
420 function_return_.set_direction(JumpTarget::BIDIRECTIONAL); | 414 function_return_.set_direction(JumpTarget::BIDIRECTIONAL); |
421 function_return_is_shadowed_ = false; | 415 function_return_is_shadowed_ = false; |
422 | 416 |
423 // Generate code to 'execute' declarations and initialize functions | 417 // Generate code to 'execute' declarations and initialize functions |
424 // (source elements). In case of an illegal redeclaration we need to | 418 // (source elements). In case of an illegal redeclaration we need to |
425 // handle that instead of processing the declarations. | 419 // handle that instead of processing the declarations. |
426 if (scope_->HasIllegalRedeclaration()) { | 420 if (scope()->HasIllegalRedeclaration()) { |
427 Comment cmnt(masm_, "[ illegal redeclarations"); | 421 Comment cmnt(masm_, "[ illegal redeclarations"); |
428 scope_->VisitIllegalRedeclaration(this); | 422 scope()->VisitIllegalRedeclaration(this); |
429 } else { | 423 } else { |
430 Comment cmnt(masm_, "[ declarations"); | 424 Comment cmnt(masm_, "[ declarations"); |
431 ProcessDeclarations(scope_->declarations()); | 425 ProcessDeclarations(scope()->declarations()); |
432 // Bail out if a stack-overflow exception occurred when processing | 426 // Bail out if a stack-overflow exception occurred when processing |
433 // declarations. | 427 // declarations. |
434 if (HasStackOverflow()) return; | 428 if (HasStackOverflow()) return; |
435 } | 429 } |
436 | 430 |
437 if (FLAG_trace) { | 431 if (FLAG_trace) { |
438 frame_->CallRuntime(Runtime::kTraceEnter, 0); | 432 frame_->CallRuntime(Runtime::kTraceEnter, 0); |
439 // Ignore the return value. | 433 // Ignore the return value. |
440 } | 434 } |
441 CheckStack(); | 435 CheckStack(); |
442 | 436 |
443 // Compile the body of the function in a vanilla state. Don't | 437 // Compile the body of the function in a vanilla state. Don't |
444 // bother compiling all the code if the scope has an illegal | 438 // bother compiling all the code if the scope has an illegal |
445 // redeclaration. | 439 // redeclaration. |
446 if (!scope_->HasIllegalRedeclaration()) { | 440 if (!scope()->HasIllegalRedeclaration()) { |
447 Comment cmnt(masm_, "[ function body"); | 441 Comment cmnt(masm_, "[ function body"); |
448 #ifdef DEBUG | 442 #ifdef DEBUG |
449 bool is_builtin = Bootstrapper::IsActive(); | 443 bool is_builtin = Bootstrapper::IsActive(); |
450 bool should_trace = | 444 bool should_trace = |
451 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls; | 445 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls; |
452 if (should_trace) { | 446 if (should_trace) { |
453 frame_->CallRuntime(Runtime::kDebugTrace, 0); | 447 frame_->CallRuntime(Runtime::kDebugTrace, 0); |
454 // Ignore the return value. | 448 // Ignore the return value. |
455 } | 449 } |
456 #endif | 450 #endif |
457 VisitStatements(body); | 451 VisitStatements(info->function()->body()); |
458 | 452 |
459 // Handle the return from the function. | 453 // Handle the return from the function. |
460 if (has_valid_frame()) { | 454 if (has_valid_frame()) { |
461 // If there is a valid frame, control flow can fall off the end of | 455 // If there is a valid frame, control flow can fall off the end of |
462 // the body. In that case there is an implicit return statement. | 456 // the body. In that case there is an implicit return statement. |
463 ASSERT(!function_return_is_shadowed_); | 457 ASSERT(!function_return_is_shadowed_); |
464 CodeForReturnPosition(function); | 458 CodeForReturnPosition(info->function()); |
465 frame_->PrepareForReturn(); | 459 frame_->PrepareForReturn(); |
466 Result undefined(Factory::undefined_value()); | 460 Result undefined(Factory::undefined_value()); |
467 if (function_return_.is_bound()) { | 461 if (function_return_.is_bound()) { |
468 function_return_.Jump(&undefined); | 462 function_return_.Jump(&undefined); |
469 } else { | 463 } else { |
470 function_return_.Bind(&undefined); | 464 function_return_.Bind(&undefined); |
471 GenerateReturnSequence(&undefined); | 465 GenerateReturnSequence(&undefined); |
472 } | 466 } |
473 } else if (function_return_.is_linked()) { | 467 } else if (function_return_.is_linked()) { |
474 // If the return target has dangling jumps to it, then we have not | 468 // If the return target has dangling jumps to it, then we have not |
(...skipping 22 matching lines...) Expand all Loading... |
497 if (!HasStackOverflow()) { | 491 if (!HasStackOverflow()) { |
498 HistogramTimerScope deferred_timer(&Counters::deferred_code_generation); | 492 HistogramTimerScope deferred_timer(&Counters::deferred_code_generation); |
499 JumpTarget::set_compiling_deferred_code(true); | 493 JumpTarget::set_compiling_deferred_code(true); |
500 ProcessDeferred(); | 494 ProcessDeferred(); |
501 JumpTarget::set_compiling_deferred_code(false); | 495 JumpTarget::set_compiling_deferred_code(false); |
502 } | 496 } |
503 | 497 |
504 // There is no need to delete the register allocator, it is a | 498 // There is no need to delete the register allocator, it is a |
505 // stack-allocated local. | 499 // stack-allocated local. |
506 allocator_ = NULL; | 500 allocator_ = NULL; |
507 scope_ = NULL; | |
508 } | 501 } |
509 | 502 |
510 void CodeGenerator::GenerateReturnSequence(Result* return_value) { | 503 void CodeGenerator::GenerateReturnSequence(Result* return_value) { |
511 // The return value is a live (but not currently reference counted) | 504 // The return value is a live (but not currently reference counted) |
512 // reference to rax. This is safe because the current frame does not | 505 // reference to rax. This is safe because the current frame does not |
513 // contain a reference to rax (it is prepared for the return by spilling | 506 // contain a reference to rax (it is prepared for the return by spilling |
514 // all registers). | 507 // all registers). |
515 if (FLAG_trace) { | 508 if (FLAG_trace) { |
516 frame_->Push(return_value); | 509 frame_->Push(return_value); |
517 *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1); | 510 *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1); |
518 } | 511 } |
519 return_value->ToRegister(rax); | 512 return_value->ToRegister(rax); |
520 | 513 |
521 // Add a label for checking the size of the code used for returning. | 514 // Add a label for checking the size of the code used for returning. |
522 #ifdef DEBUG | 515 #ifdef DEBUG |
523 Label check_exit_codesize; | 516 Label check_exit_codesize; |
524 masm_->bind(&check_exit_codesize); | 517 masm_->bind(&check_exit_codesize); |
525 #endif | 518 #endif |
526 | 519 |
527 // Leave the frame and return popping the arguments and the | 520 // Leave the frame and return popping the arguments and the |
528 // receiver. | 521 // receiver. |
529 frame_->Exit(); | 522 frame_->Exit(); |
530 masm_->ret((scope_->num_parameters() + 1) * kPointerSize); | 523 masm_->ret((scope()->num_parameters() + 1) * kPointerSize); |
531 #ifdef ENABLE_DEBUGGER_SUPPORT | 524 #ifdef ENABLE_DEBUGGER_SUPPORT |
532 // Add padding that will be overwritten by a debugger breakpoint. | 525 // Add padding that will be overwritten by a debugger breakpoint. |
533 // frame_->Exit() generates "movq rsp, rbp; pop rbp; ret k" | 526 // frame_->Exit() generates "movq rsp, rbp; pop rbp; ret k" |
534 // with length 7 (3 + 1 + 3). | 527 // with length 7 (3 + 1 + 3). |
535 const int kPadding = Assembler::kJSReturnSequenceLength - 7; | 528 const int kPadding = Assembler::kJSReturnSequenceLength - 7; |
536 for (int i = 0; i < kPadding; ++i) { | 529 for (int i = 0; i < kPadding; ++i) { |
537 masm_->int3(); | 530 masm_->int3(); |
538 } | 531 } |
539 // Check that the size of the code used for returning matches what is | 532 // Check that the size of the code used for returning matches what is |
540 // expected by the debugger. | 533 // expected by the debugger. |
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
688 Load(applicand); | 681 Load(applicand); |
689 Handle<String> name = Factory::LookupAsciiSymbol("apply"); | 682 Handle<String> name = Factory::LookupAsciiSymbol("apply"); |
690 frame()->Push(name); | 683 frame()->Push(name); |
691 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET); | 684 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET); |
692 __ nop(); | 685 __ nop(); |
693 frame()->Push(&answer); | 686 frame()->Push(&answer); |
694 | 687 |
695 // Load the receiver and the existing arguments object onto the | 688 // Load the receiver and the existing arguments object onto the |
696 // expression stack. Avoid allocating the arguments object here. | 689 // expression stack. Avoid allocating the arguments object here. |
697 Load(receiver); | 690 Load(receiver); |
698 LoadFromSlot(scope_->arguments()->var()->slot(), NOT_INSIDE_TYPEOF); | 691 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF); |
699 | 692 |
700 // Emit the source position information after having loaded the | 693 // Emit the source position information after having loaded the |
701 // receiver and the arguments. | 694 // receiver and the arguments. |
702 CodeForSourcePosition(position); | 695 CodeForSourcePosition(position); |
703 // Contents of frame at this point: | 696 // Contents of frame at this point: |
704 // Frame[0]: arguments object of the current function or the hole. | 697 // Frame[0]: arguments object of the current function or the hole. |
705 // Frame[1]: receiver | 698 // Frame[1]: receiver |
706 // Frame[2]: applicand.apply | 699 // Frame[2]: applicand.apply |
707 // Frame[3]: applicand. | 700 // Frame[3]: applicand. |
708 | 701 |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
766 | 759 |
767 // Copy the arguments to this function possibly from the | 760 // Copy the arguments to this function possibly from the |
768 // adaptor frame below it. | 761 // adaptor frame below it. |
769 Label invoke, adapted; | 762 Label invoke, adapted; |
770 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 763 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
771 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset), | 764 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset), |
772 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 765 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
773 __ j(equal, &adapted); | 766 __ j(equal, &adapted); |
774 | 767 |
775 // No arguments adaptor frame. Copy fixed number of arguments. | 768 // No arguments adaptor frame. Copy fixed number of arguments. |
776 __ movq(rax, Immediate(scope_->num_parameters())); | 769 __ movq(rax, Immediate(scope()->num_parameters())); |
777 for (int i = 0; i < scope_->num_parameters(); i++) { | 770 for (int i = 0; i < scope()->num_parameters(); i++) { |
778 __ push(frame_->ParameterAt(i)); | 771 __ push(frame_->ParameterAt(i)); |
779 } | 772 } |
780 __ jmp(&invoke); | 773 __ jmp(&invoke); |
781 | 774 |
782 // Arguments adaptor frame present. Copy arguments from there, but | 775 // Arguments adaptor frame present. Copy arguments from there, but |
783 // avoid copying too many arguments to avoid stack overflows. | 776 // avoid copying too many arguments to avoid stack overflows. |
784 __ bind(&adapted); | 777 __ bind(&adapted); |
785 static const uint32_t kArgumentsLimit = 1 * KB; | 778 static const uint32_t kArgumentsLimit = 1 * KB; |
786 __ movq(rax, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 779 __ movq(rax, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
787 __ SmiToInteger32(rax, rax); | 780 __ SmiToInteger32(rax, rax); |
(...skipping 1468 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2256 frame_->Push(&result); | 2249 frame_->Push(&result); |
2257 } | 2250 } |
2258 } | 2251 } |
2259 | 2252 |
2260 | 2253 |
2261 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) { | 2254 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) { |
2262 Comment cmnt(masm_, "[ FunctionLiteral"); | 2255 Comment cmnt(masm_, "[ FunctionLiteral"); |
2263 | 2256 |
2264 // Build the function boilerplate and instantiate it. | 2257 // Build the function boilerplate and instantiate it. |
2265 Handle<JSFunction> boilerplate = | 2258 Handle<JSFunction> boilerplate = |
2266 Compiler::BuildBoilerplate(node, script_, this); | 2259 Compiler::BuildBoilerplate(node, script(), this); |
2267 // Check for stack-overflow exception. | 2260 // Check for stack-overflow exception. |
2268 if (HasStackOverflow()) return; | 2261 if (HasStackOverflow()) return; |
2269 InstantiateBoilerplate(boilerplate); | 2262 InstantiateBoilerplate(boilerplate); |
2270 } | 2263 } |
2271 | 2264 |
2272 | 2265 |
2273 void CodeGenerator::VisitFunctionBoilerplateLiteral( | 2266 void CodeGenerator::VisitFunctionBoilerplateLiteral( |
2274 FunctionBoilerplateLiteral* node) { | 2267 FunctionBoilerplateLiteral* node) { |
2275 Comment cmnt(masm_, "[ FunctionBoilerplateLiteral"); | 2268 Comment cmnt(masm_, "[ FunctionBoilerplateLiteral"); |
2276 InstantiateBoilerplate(node->boilerplate()); | 2269 InstantiateBoilerplate(node->boilerplate()); |
(...skipping 1326 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3603 | 3596 |
3604 | 3597 |
3605 void CodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* args) { | 3598 void CodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* args) { |
3606 ASSERT(args->length() == 1); | 3599 ASSERT(args->length() == 1); |
3607 | 3600 |
3608 // ArgumentsAccessStub expects the key in rdx and the formal | 3601 // ArgumentsAccessStub expects the key in rdx and the formal |
3609 // parameter count in rax. | 3602 // parameter count in rax. |
3610 Load(args->at(0)); | 3603 Load(args->at(0)); |
3611 Result key = frame_->Pop(); | 3604 Result key = frame_->Pop(); |
3612 // Explicitly create a constant result. | 3605 // Explicitly create a constant result. |
3613 Result count(Handle<Smi>(Smi::FromInt(scope_->num_parameters()))); | 3606 Result count(Handle<Smi>(Smi::FromInt(scope()->num_parameters()))); |
3614 // Call the shared stub to get to arguments[key]. | 3607 // Call the shared stub to get to arguments[key]. |
3615 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); | 3608 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); |
3616 Result result = frame_->CallStub(&stub, &key, &count); | 3609 Result result = frame_->CallStub(&stub, &key, &count); |
3617 frame_->Push(&result); | 3610 frame_->Push(&result); |
3618 } | 3611 } |
3619 | 3612 |
3620 | 3613 |
3621 void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) { | 3614 void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) { |
3622 ASSERT(args->length() == 1); | 3615 ASSERT(args->length() == 1); |
3623 Load(args->at(0)); | 3616 Load(args->at(0)); |
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3712 Smi::FromInt(StackFrame::CONSTRUCT)); | 3705 Smi::FromInt(StackFrame::CONSTRUCT)); |
3713 fp.Unuse(); | 3706 fp.Unuse(); |
3714 destination()->Split(equal); | 3707 destination()->Split(equal); |
3715 } | 3708 } |
3716 | 3709 |
3717 | 3710 |
3718 void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) { | 3711 void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) { |
3719 ASSERT(args->length() == 0); | 3712 ASSERT(args->length() == 0); |
3720 // ArgumentsAccessStub takes the parameter count as an input argument | 3713 // ArgumentsAccessStub takes the parameter count as an input argument |
3721 // in register eax. Create a constant result for it. | 3714 // in register eax. Create a constant result for it. |
3722 Result count(Handle<Smi>(Smi::FromInt(scope_->num_parameters()))); | 3715 Result count(Handle<Smi>(Smi::FromInt(scope()->num_parameters()))); |
3723 // Call the shared stub to get to the arguments.length. | 3716 // Call the shared stub to get to the arguments.length. |
3724 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_LENGTH); | 3717 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_LENGTH); |
3725 Result result = frame_->CallStub(&stub, &count); | 3718 Result result = frame_->CallStub(&stub, &count); |
3726 frame_->Push(&result); | 3719 frame_->Push(&result); |
3727 } | 3720 } |
3728 | 3721 |
3729 | 3722 |
3730 void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) { | 3723 void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) { |
3731 Comment(masm_, "[ GenerateFastCharCodeAt"); | 3724 Comment(masm_, "[ GenerateFastCharCodeAt"); |
3732 ASSERT(args->length() == 2); | 3725 ASSERT(args->length() == 2); |
(...skipping 1049 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4782 | 4775 |
4783 void CodeGenerator::LoadGlobalReceiver() { | 4776 void CodeGenerator::LoadGlobalReceiver() { |
4784 Result temp = allocator_->Allocate(); | 4777 Result temp = allocator_->Allocate(); |
4785 Register reg = temp.reg(); | 4778 Register reg = temp.reg(); |
4786 __ movq(reg, GlobalObject()); | 4779 __ movq(reg, GlobalObject()); |
4787 __ movq(reg, FieldOperand(reg, GlobalObject::kGlobalReceiverOffset)); | 4780 __ movq(reg, FieldOperand(reg, GlobalObject::kGlobalReceiverOffset)); |
4788 frame_->Push(&temp); | 4781 frame_->Push(&temp); |
4789 } | 4782 } |
4790 | 4783 |
4791 | 4784 |
4792 ArgumentsAllocationMode CodeGenerator::ArgumentsMode() const { | 4785 ArgumentsAllocationMode CodeGenerator::ArgumentsMode() { |
4793 if (scope_->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION; | 4786 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION; |
4794 ASSERT(scope_->arguments_shadow() != NULL); | 4787 ASSERT(scope()->arguments_shadow() != NULL); |
4795 // We don't want to do lazy arguments allocation for functions that | 4788 // We don't want to do lazy arguments allocation for functions that |
4796 // have heap-allocated contexts, because it interfers with the | 4789 // have heap-allocated contexts, because it interfers with the |
4797 // uninitialized const tracking in the context objects. | 4790 // uninitialized const tracking in the context objects. |
4798 return (scope_->num_heap_slots() > 0) | 4791 return (scope()->num_heap_slots() > 0) |
4799 ? EAGER_ARGUMENTS_ALLOCATION | 4792 ? EAGER_ARGUMENTS_ALLOCATION |
4800 : LAZY_ARGUMENTS_ALLOCATION; | 4793 : LAZY_ARGUMENTS_ALLOCATION; |
4801 } | 4794 } |
4802 | 4795 |
4803 | 4796 |
4804 Result CodeGenerator::StoreArgumentsObject(bool initial) { | 4797 Result CodeGenerator::StoreArgumentsObject(bool initial) { |
4805 ArgumentsAllocationMode mode = ArgumentsMode(); | 4798 ArgumentsAllocationMode mode = ArgumentsMode(); |
4806 ASSERT(mode != NO_ARGUMENTS_ALLOCATION); | 4799 ASSERT(mode != NO_ARGUMENTS_ALLOCATION); |
4807 | 4800 |
4808 Comment cmnt(masm_, "[ store arguments object"); | 4801 Comment cmnt(masm_, "[ store arguments object"); |
4809 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) { | 4802 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) { |
4810 // When using lazy arguments allocation, we store the hole value | 4803 // When using lazy arguments allocation, we store the hole value |
4811 // as a sentinel indicating that the arguments object hasn't been | 4804 // as a sentinel indicating that the arguments object hasn't been |
4812 // allocated yet. | 4805 // allocated yet. |
4813 frame_->Push(Factory::the_hole_value()); | 4806 frame_->Push(Factory::the_hole_value()); |
4814 } else { | 4807 } else { |
4815 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); | 4808 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); |
4816 frame_->PushFunction(); | 4809 frame_->PushFunction(); |
4817 frame_->PushReceiverSlotAddress(); | 4810 frame_->PushReceiverSlotAddress(); |
4818 frame_->Push(Smi::FromInt(scope_->num_parameters())); | 4811 frame_->Push(Smi::FromInt(scope()->num_parameters())); |
4819 Result result = frame_->CallStub(&stub, 3); | 4812 Result result = frame_->CallStub(&stub, 3); |
4820 frame_->Push(&result); | 4813 frame_->Push(&result); |
4821 } | 4814 } |
4822 | 4815 |
4823 | 4816 |
4824 Variable* arguments = scope_->arguments()->var(); | 4817 Variable* arguments = scope()->arguments()->var(); |
4825 Variable* shadow = scope_->arguments_shadow()->var(); | 4818 Variable* shadow = scope()->arguments_shadow()->var(); |
4826 ASSERT(arguments != NULL && arguments->slot() != NULL); | 4819 ASSERT(arguments != NULL && arguments->slot() != NULL); |
4827 ASSERT(shadow != NULL && shadow->slot() != NULL); | 4820 ASSERT(shadow != NULL && shadow->slot() != NULL); |
4828 JumpTarget done; | 4821 JumpTarget done; |
4829 bool skip_arguments = false; | 4822 bool skip_arguments = false; |
4830 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) { | 4823 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) { |
4831 // We have to skip storing into the arguments slot if it has | 4824 // We have to skip storing into the arguments slot if it has |
4832 // already been written to. This can happen if the a function | 4825 // already been written to. This can happen if the a function |
4833 // has a local variable named 'arguments'. | 4826 // has a local variable named 'arguments'. |
4834 LoadFromSlot(scope_->arguments()->var()->slot(), NOT_INSIDE_TYPEOF); | 4827 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF); |
4835 Result probe = frame_->Pop(); | 4828 Result probe = frame_->Pop(); |
4836 if (probe.is_constant()) { | 4829 if (probe.is_constant()) { |
4837 // We have to skip updating the arguments object if it has been | 4830 // We have to skip updating the arguments object if it has been |
4838 // assigned a proper value. | 4831 // assigned a proper value. |
4839 skip_arguments = !probe.handle()->IsTheHole(); | 4832 skip_arguments = !probe.handle()->IsTheHole(); |
4840 } else { | 4833 } else { |
4841 __ CompareRoot(probe.reg(), Heap::kTheHoleValueRootIndex); | 4834 __ CompareRoot(probe.reg(), Heap::kTheHoleValueRootIndex); |
4842 probe.Unuse(); | 4835 probe.Unuse(); |
4843 done.Branch(not_equal); | 4836 done.Branch(not_equal); |
4844 } | 4837 } |
(...skipping 4353 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
9198 // Call the function from C++. | 9191 // Call the function from C++. |
9199 return FUNCTION_CAST<ModuloFunction>(buffer); | 9192 return FUNCTION_CAST<ModuloFunction>(buffer); |
9200 } | 9193 } |
9201 | 9194 |
9202 #endif | 9195 #endif |
9203 | 9196 |
9204 | 9197 |
9205 #undef __ | 9198 #undef __ |
9206 | 9199 |
9207 } } // namespace v8::internal | 9200 } } // namespace v8::internal |
OLD | NEW |