Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(212)

Side by Side Diff: src/x64/builtins-x64.cc

Issue 7084032: Add asserts and state tracking to ensure that we do not call (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/macro-assembler.h ('k') | src/x64/code-stubs-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
103 RelocInfo::CODE_TARGET); 103 RelocInfo::CODE_TARGET);
104 } 104 }
105 105
106 106
107 static void Generate_JSConstructStubHelper(MacroAssembler* masm, 107 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
108 bool is_api_function, 108 bool is_api_function,
109 bool count_constructions) { 109 bool count_constructions) {
110 // Should never count constructions for api objects. 110 // Should never count constructions for api objects.
111 ASSERT(!is_api_function || !count_constructions); 111 ASSERT(!is_api_function || !count_constructions);
112 112
113 // Enter a construct frame. 113 // Enter a construct frame.
114 __ EnterConstructFrame(); 114 {
115 FrameScope scope(masm, StackFrame::CONSTRUCT);
115 116
116 // Store a smi-tagged arguments count on the stack. 117 // Store a smi-tagged arguments count on the stack.
117 __ Integer32ToSmi(rax, rax); 118 __ Integer32ToSmi(rax, rax);
118 __ push(rax); 119 __ push(rax);
119 120
120 // Push the function to invoke on the stack. 121 // Push the function to invoke on the stack.
121 __ push(rdi); 122 __ push(rdi);
122 123
123 // Try to allocate the object without transitioning into C code. If any of the 124 // Try to allocate the object without transitioning into C code. If any of
124 // preconditions is not met, the code bails out to the runtime call. 125 // the preconditions is not met, the code bails out to the runtime call.
125 Label rt_call, allocated; 126 Label rt_call, allocated;
126 if (FLAG_inline_new) { 127 if (FLAG_inline_new) {
127 Label undo_allocation; 128 Label undo_allocation;
128 129
129 #ifdef ENABLE_DEBUGGER_SUPPORT 130 #ifdef ENABLE_DEBUGGER_SUPPORT
130 ExternalReference debug_step_in_fp = 131 ExternalReference debug_step_in_fp =
131 ExternalReference::debug_step_in_fp_address(masm->isolate()); 132 ExternalReference::debug_step_in_fp_address(masm->isolate());
132 __ movq(kScratchRegister, debug_step_in_fp); 133 __ movq(kScratchRegister, debug_step_in_fp);
133 __ cmpq(Operand(kScratchRegister, 0), Immediate(0)); 134 __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
134 __ j(not_equal, &rt_call); 135 __ j(not_equal, &rt_call);
135 #endif 136 #endif
136 137
137 // Verified that the constructor is a JSFunction. 138 // Verified that the constructor is a JSFunction.
138 // Load the initial map and verify that it is in fact a map. 139 // Load the initial map and verify that it is in fact a map.
139 // rdi: constructor 140 // rdi: constructor
140 __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 141 __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
141 // Will both indicate a NULL and a Smi 142 // Will both indicate a NULL and a Smi
142 ASSERT(kSmiTag == 0); 143 ASSERT(kSmiTag == 0);
143 __ JumpIfSmi(rax, &rt_call); 144 __ JumpIfSmi(rax, &rt_call);
144 // rdi: constructor 145 // rdi: constructor
145 // rax: initial map (if proven valid below) 146 // rax: initial map (if proven valid below)
146 __ CmpObjectType(rax, MAP_TYPE, rbx); 147 __ CmpObjectType(rax, MAP_TYPE, rbx);
147 __ j(not_equal, &rt_call); 148 __ j(not_equal, &rt_call);
148 149
149 // Check that the constructor is not constructing a JSFunction (see comments 150 // Check that the constructor is not constructing a JSFunction (see
150 // in Runtime_NewObject in runtime.cc). In which case the initial map's 151 // comments in Runtime_NewObject in runtime.cc). In which case the initial
151 // instance type would be JS_FUNCTION_TYPE. 152 // map's instance type would be JS_FUNCTION_TYPE.
152 // rdi: constructor 153 // rdi: constructor
153 // rax: initial map 154 // rax: initial map
154 __ CmpInstanceType(rax, JS_FUNCTION_TYPE); 155 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
155 __ j(equal, &rt_call); 156 __ j(equal, &rt_call);
156 157
157 if (count_constructions) { 158 if (count_constructions) {
158 Label allocate; 159 Label allocate;
159 // Decrease generous allocation count. 160 // Decrease generous allocation count.
160 __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 161 __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
161 __ decb(FieldOperand(rcx, SharedFunctionInfo::kConstructionCountOffset)); 162 __ decb(FieldOperand(rcx,
163 SharedFunctionInfo::kConstructionCountOffset));
162 __ j(not_zero, &allocate); 164 __ j(not_zero, &allocate);
163 165
164 __ push(rax); 166 __ push(rax);
165 __ push(rdi); 167 __ push(rdi);
166 168
167 __ push(rdi); // constructor 169 __ push(rdi); // constructor
168 // The call will replace the stub, so the countdown is only done once. 170 // The call will replace the stub, so the countdown is only done once.
169 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); 171 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
170 172
171 __ pop(rdi); 173 __ pop(rdi);
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
206 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize)); 208 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
207 __ jmp(&entry); 209 __ jmp(&entry);
208 __ bind(&loop); 210 __ bind(&loop);
209 __ movq(Operand(rcx, 0), rdx); 211 __ movq(Operand(rcx, 0), rdx);
210 __ addq(rcx, Immediate(kPointerSize)); 212 __ addq(rcx, Immediate(kPointerSize));
211 __ bind(&entry); 213 __ bind(&entry);
212 __ cmpq(rcx, rdi); 214 __ cmpq(rcx, rdi);
213 __ j(less, &loop); 215 __ j(less, &loop);
214 } 216 }
215 217
216 // Add the object tag to make the JSObject real, so that we can continue and 218 // Add the object tag to make the JSObject real, so that we can continue
217 // jump into the continuation code at any time from now on. Any failures 219 // and jump into the continuation code at any time from now on. Any
218 // need to undo the allocation, so that the heap is in a consistent state 220 // failures need to undo the allocation, so that the heap is in a
219 // and verifiable. 221 // consistent state and verifiable.
220 // rax: initial map 222 // rax: initial map
221 // rbx: JSObject 223 // rbx: JSObject
222 // rdi: start of next object 224 // rdi: start of next object
223 __ or_(rbx, Immediate(kHeapObjectTag)); 225 __ or_(rbx, Immediate(kHeapObjectTag));
224 226
225 // Check if a non-empty properties array is needed. 227 // Check if a non-empty properties array is needed.
226 // Allocate and initialize a FixedArray if it is. 228 // Allocate and initialize a FixedArray if it is.
227 // rax: initial map 229 // rax: initial map
228 // rbx: JSObject 230 // rbx: JSObject
229 // rdi: start of next object 231 // rdi: start of next object
230 // Calculate total properties described map. 232 // Calculate total properties described map.
231 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset)); 233 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
232 __ movzxbq(rcx, FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset)); 234 __ movzxbq(rcx,
235 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
233 __ addq(rdx, rcx); 236 __ addq(rdx, rcx);
234 // Calculate unused properties past the end of the in-object properties. 237 // Calculate unused properties past the end of the in-object properties.
235 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset)); 238 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
236 __ subq(rdx, rcx); 239 __ subq(rdx, rcx);
237 // Done if no extra properties are to be allocated. 240 // Done if no extra properties are to be allocated.
238 __ j(zero, &allocated); 241 __ j(zero, &allocated);
239 __ Assert(positive, "Property allocation count failed."); 242 __ Assert(positive, "Property allocation count failed.");
240 243
241 // Scale the number of elements by pointer size and add the header for 244 // Scale the number of elements by pointer size and add the header for
242 // FixedArrays to the start of the next object calculation from above. 245 // FixedArrays to the start of the next object calculation from above.
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
367 __ j(above_equal, &exit); 370 __ j(above_equal, &exit);
368 371
369 // Throw away the result of the constructor invocation and use the 372 // Throw away the result of the constructor invocation and use the
370 // on-stack receiver as the result. 373 // on-stack receiver as the result.
371 __ bind(&use_receiver); 374 __ bind(&use_receiver);
372 __ movq(rax, Operand(rsp, 0)); 375 __ movq(rax, Operand(rsp, 0));
373 376
374 // Restore the arguments count and leave the construct frame. 377 // Restore the arguments count and leave the construct frame.
375 __ bind(&exit); 378 __ bind(&exit);
376 __ movq(rbx, Operand(rsp, kPointerSize)); // get arguments count 379 __ movq(rbx, Operand(rsp, kPointerSize)); // get arguments count
377 __ LeaveConstructFrame(); 380
381 // Leave the construct frame.
382 }
378 383
379 // Remove caller arguments from the stack and return. 384 // Remove caller arguments from the stack and return.
380 __ pop(rcx); 385 __ pop(rcx);
381 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); 386 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
382 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); 387 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
383 __ push(rcx); 388 __ push(rcx);
384 Counters* counters = masm->isolate()->counters(); 389 Counters* counters = masm->isolate()->counters();
385 __ IncrementCounter(counters->constructed_objects(), 1); 390 __ IncrementCounter(counters->constructed_objects(), 1);
386 __ ret(0); 391 __ ret(0);
387 } 392 }
(...skipping 17 matching lines...) Expand all
405 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, 410 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
406 bool is_construct) { 411 bool is_construct) {
407 // Expects five C++ function parameters. 412 // Expects five C++ function parameters.
408 // - Address entry (ignored) 413 // - Address entry (ignored)
409 // - JSFunction* function ( 414 // - JSFunction* function (
410 // - Object* receiver 415 // - Object* receiver
411 // - int argc 416 // - int argc
412 // - Object*** argv 417 // - Object*** argv
413 // (see Handle::Invoke in execution.cc). 418 // (see Handle::Invoke in execution.cc).
414 419
420 // Open a C++ scope for the FrameScope.
421 {
415 // Platform specific argument handling. After this, the stack contains 422 // Platform specific argument handling. After this, the stack contains
416 // an internal frame and the pushed function and receiver, and 423 // an internal frame and the pushed function and receiver, and
417 // register rax and rbx holds the argument count and argument array, 424 // register rax and rbx holds the argument count and argument array,
418 // while rdi holds the function pointer and rsi the context. 425 // while rdi holds the function pointer and rsi the context.
426
419 #ifdef _WIN64 427 #ifdef _WIN64
420 // MSVC parameters in: 428 // MSVC parameters in:
421 // rcx : entry (ignored) 429 // rcx : entry (ignored)
422 // rdx : function 430 // rdx : function
423 // r8 : receiver 431 // r8 : receiver
424 // r9 : argc 432 // r9 : argc
425 // [rsp+0x20] : argv 433 // [rsp+0x20] : argv
426 434
427 // Clear the context before we push it when entering the JS frame. 435 // Clear the context before we push it when entering the internal frame.
428 __ Set(rsi, 0); 436 __ Set(rsi, 0);
429 __ EnterInternalFrame(); 437 // Enter an internal frame.
438 FrameScope scope(masm, StackFrame::INTERNAL);
430 439
431 // Load the function context into rsi. 440 // Load the function context into rsi.
432 __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset)); 441 __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
433 442
434 // Push the function and the receiver onto the stack. 443 // Push the function and the receiver onto the stack.
435 __ push(rdx); 444 __ push(rdx);
436 __ push(r8); 445 __ push(r8);
437 446
438 // Load the number of arguments and setup pointer to the arguments. 447 // Load the number of arguments and setup pointer to the arguments.
439 __ movq(rax, r9); 448 __ movq(rax, r9);
440 // Load the previous frame pointer to access C argument on stack 449 // Load the previous frame pointer to access C argument on stack
441 __ movq(kScratchRegister, Operand(rbp, 0)); 450 __ movq(kScratchRegister, Operand(rbp, 0));
442 __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset)); 451 __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
443 // Load the function pointer into rdi. 452 // Load the function pointer into rdi.
444 __ movq(rdi, rdx); 453 __ movq(rdi, rdx);
445 #else // _WIN64 454 #else // _WIN64
446 // GCC parameters in: 455 // GCC parameters in:
447 // rdi : entry (ignored) 456 // rdi : entry (ignored)
448 // rsi : function 457 // rsi : function
449 // rdx : receiver 458 // rdx : receiver
450 // rcx : argc 459 // rcx : argc
451 // r8 : argv 460 // r8 : argv
452 461
453 __ movq(rdi, rsi); 462 __ movq(rdi, rsi);
454 // rdi : function 463 // rdi : function
455 464
456 // Clear the context before we push it when entering the JS frame. 465 // Clear the context before we push it when entering the internal frame.
457 __ Set(rsi, 0); 466 __ Set(rsi, 0);
458 // Enter an internal frame. 467 // Enter an internal frame.
459 __ EnterInternalFrame(); 468 FrameScope scope(masm, StackFrame::INTERNAL);
460 469
461 // Push the function and receiver and setup the context. 470 // Push the function and receiver and setup the context.
462 __ push(rdi); 471 __ push(rdi);
463 __ push(rdx); 472 __ push(rdx);
464 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 473 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
465 474
466 // Load the number of arguments and setup pointer to the arguments. 475 // Load the number of arguments and setup pointer to the arguments.
467 __ movq(rax, rcx); 476 __ movq(rax, rcx);
468 __ movq(rbx, r8); 477 __ movq(rbx, r8);
469 #endif // _WIN64 478 #endif // _WIN64
(...skipping 26 matching lines...) Expand all
496 if (is_construct) { 505 if (is_construct) {
497 // Expects rdi to hold function pointer. 506 // Expects rdi to hold function pointer.
498 __ Call(masm->isolate()->builtins()->JSConstructCall(), 507 __ Call(masm->isolate()->builtins()->JSConstructCall(),
499 RelocInfo::CODE_TARGET); 508 RelocInfo::CODE_TARGET);
500 } else { 509 } else {
501 ParameterCount actual(rax); 510 ParameterCount actual(rax);
502 // Function must be in rdi. 511 // Function must be in rdi.
503 __ InvokeFunction(rdi, actual, CALL_FUNCTION, 512 __ InvokeFunction(rdi, actual, CALL_FUNCTION,
504 NullCallWrapper(), CALL_AS_METHOD); 513 NullCallWrapper(), CALL_AS_METHOD);
505 } 514 }
506 515 // Exit the internal frame. Notice that this also removes the empty
507 // Exit the JS frame. Notice that this also removes the empty
508 // context and the function left on the stack by the code 516 // context and the function left on the stack by the code
509 // invocation. 517 // invocation.
510 __ LeaveInternalFrame(); 518 }
519
511 // TODO(X64): Is argument correct? Is there a receiver to remove? 520 // TODO(X64): Is argument correct? Is there a receiver to remove?
512 __ ret(1 * kPointerSize); // remove receiver 521 __ ret(1 * kPointerSize); // Remove receiver.
513 } 522 }
514 523
515 524
516 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { 525 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
517 Generate_JSEntryTrampolineHelper(masm, false); 526 Generate_JSEntryTrampolineHelper(masm, false);
518 } 527 }
519 528
520 529
521 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { 530 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
522 Generate_JSEntryTrampolineHelper(masm, true); 531 Generate_JSEntryTrampolineHelper(masm, true);
523 } 532 }
524 533
525 534
526 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { 535 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
527 // Enter an internal frame. 536 // Enter an internal frame.
528 __ EnterInternalFrame(); 537 {
538 FrameScope scope(masm, StackFrame::INTERNAL);
529 539
530 // Push a copy of the function onto the stack. 540 // Push a copy of the function onto the stack.
531 __ push(rdi); 541 __ push(rdi);
532 // Push call kind information. 542 // Push call kind information.
533 __ push(rcx); 543 __ push(rcx);
534 544
535 __ push(rdi); // Function is also the parameter to the runtime call. 545 __ push(rdi); // Function is also the parameter to the runtime call.
536 __ CallRuntime(Runtime::kLazyCompile, 1); 546 __ CallRuntime(Runtime::kLazyCompile, 1);
537 547
538 // Restore call kind information. 548 // Restore call kind information.
539 __ pop(rcx); 549 __ pop(rcx);
540 // Restore receiver. 550 // Restore receiver.
541 __ pop(rdi); 551 __ pop(rdi);
542 552
543 // Tear down temporary frame. 553 // Tear down internal frame.
544 __ LeaveInternalFrame(); 554 }
545 555
546 // Do a tail-call of the compiled function. 556 // Do a tail-call of the compiled function.
547 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); 557 __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
548 __ jmp(rax); 558 __ jmp(rax);
549 } 559 }
550 560
551 561
552 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { 562 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
553 // Enter an internal frame. 563 // Enter an internal frame.
554 __ EnterInternalFrame(); 564 {
565 FrameScope scope(masm, StackFrame::INTERNAL);
555 566
556 // Push a copy of the function onto the stack. 567 // Push a copy of the function onto the stack.
557 __ push(rdi); 568 __ push(rdi);
558 // Push call kind information. 569 // Push call kind information.
559 __ push(rcx); 570 __ push(rcx);
560 571
561 __ push(rdi); // Function is also the parameter to the runtime call. 572 __ push(rdi); // Function is also the parameter to the runtime call.
562 __ CallRuntime(Runtime::kLazyRecompile, 1); 573 __ CallRuntime(Runtime::kLazyRecompile, 1);
563 574
564 // Restore call kind information. 575 // Restore call kind information.
565 __ pop(rcx); 576 __ pop(rcx);
566 // Restore function. 577 // Restore function.
567 __ pop(rdi); 578 __ pop(rdi);
568 579
569 // Tear down temporary frame. 580 // Tear down internal frame.
570 __ LeaveInternalFrame(); 581 }
571 582
572 // Do a tail-call of the compiled function. 583 // Do a tail-call of the compiled function.
573 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); 584 __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
574 __ jmp(rax); 585 __ jmp(rax);
575 } 586 }
576 587
577 588
578 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, 589 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
579 Deoptimizer::BailoutType type) { 590 Deoptimizer::BailoutType type) {
580 // Enter an internal frame. 591 // Enter an internal frame.
581 __ EnterInternalFrame(); 592 {
593 FrameScope scope(masm, StackFrame::INTERNAL);
582 594
583 // Pass the deoptimization type to the runtime system. 595 // Pass the deoptimization type to the runtime system.
584 __ Push(Smi::FromInt(static_cast<int>(type))); 596 __ Push(Smi::FromInt(static_cast<int>(type)));
585 597
586 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); 598 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
587 // Tear down temporary frame. 599 // Tear down internal frame.
588 __ LeaveInternalFrame(); 600 }
589 601
590 // Get the full codegen state from the stack and untag it. 602 // Get the full codegen state from the stack and untag it.
591 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize)); 603 __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
592 604
593 // Switch on the state. 605 // Switch on the state.
594 Label not_no_registers, not_tos_rax; 606 Label not_no_registers, not_tos_rax;
595 __ cmpq(rcx, Immediate(FullCodeGenerator::NO_REGISTERS)); 607 __ cmpq(rcx, Immediate(FullCodeGenerator::NO_REGISTERS));
596 __ j(not_equal, &not_no_registers, Label::kNear); 608 __ j(not_equal, &not_no_registers, Label::kNear);
597 __ ret(1 * kPointerSize); // Remove state. 609 __ ret(1 * kPointerSize); // Remove state.
598 610
(...skipping 16 matching lines...) Expand all
615 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); 627 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
616 } 628 }
617 629
618 630
619 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) { 631 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
620 // For now, we are relying on the fact that Runtime::NotifyOSR 632 // For now, we are relying on the fact that Runtime::NotifyOSR
621 // doesn't do any garbage collection which allows us to save/restore 633 // doesn't do any garbage collection which allows us to save/restore
622 // the registers without worrying about which of them contain 634 // the registers without worrying about which of them contain
623 // pointers. This seems a bit fragile. 635 // pointers. This seems a bit fragile.
624 __ Pushad(); 636 __ Pushad();
625 __ EnterInternalFrame(); 637 {
626 __ CallRuntime(Runtime::kNotifyOSR, 0); 638 FrameScope scope(masm, StackFrame::INTERNAL);
627 __ LeaveInternalFrame(); 639 __ CallRuntime(Runtime::kNotifyOSR, 0);
640 }
628 __ Popad(); 641 __ Popad();
629 __ ret(0); 642 __ ret(0);
630 } 643 }
631 644
632 645
633 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { 646 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
634 // Stack Layout: 647 // Stack Layout:
635 // rsp[0]: Return address 648 // rsp[0]: Return address
636 // rsp[1]: Argument n 649 // rsp[1]: Argument n
637 // rsp[2]: Argument n-1 650 // rsp[2]: Argument n-1
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
687 __ j(equal, &use_global_receiver); 700 __ j(equal, &use_global_receiver);
688 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); 701 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
689 __ j(equal, &use_global_receiver); 702 __ j(equal, &use_global_receiver);
690 703
691 STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE); 704 STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE);
692 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); 705 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
693 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx); 706 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
694 __ j(above_equal, &shift_arguments); 707 __ j(above_equal, &shift_arguments);
695 708
696 __ bind(&convert_to_object); 709 __ bind(&convert_to_object);
697 __ EnterInternalFrame(); // In order to preserve argument count. 710 {
711 // Enter an internal frame in order to preserve argument count.
712 FrameScope scope(masm, StackFrame::INTERNAL);
698 __ Integer32ToSmi(rax, rax); 713 __ Integer32ToSmi(rax, rax);
699 __ push(rax); 714 __ push(rax);
700 715
701 __ push(rbx); 716 __ push(rbx);
702 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 717 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
703 __ movq(rbx, rax); 718 __ movq(rbx, rax);
704 719
705 __ pop(rax); 720 __ pop(rax);
706 __ SmiToInteger32(rax, rax); 721 __ SmiToInteger32(rax, rax);
707 __ LeaveInternalFrame(); 722 }
723
708 // Restore the function to rdi. 724 // Restore the function to rdi.
709 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize)); 725 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
710 __ jmp(&patch_receiver, Label::kNear); 726 __ jmp(&patch_receiver, Label::kNear);
711 727
712 // Use the global receiver object from the called function as the 728 // Use the global receiver object from the called function as the
713 // receiver. 729 // receiver.
714 __ bind(&use_global_receiver); 730 __ bind(&use_global_receiver);
715 const int kGlobalIndex = 731 const int kGlobalIndex =
716 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize; 732 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
717 __ movq(rbx, FieldOperand(rsi, kGlobalIndex)); 733 __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
780 NullCallWrapper(), CALL_AS_METHOD); 796 NullCallWrapper(), CALL_AS_METHOD);
781 } 797 }
782 798
783 799
784 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { 800 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
785 // Stack at entry: 801 // Stack at entry:
786 // rsp: return address 802 // rsp: return address
787 // rsp+8: arguments 803 // rsp+8: arguments
788 // rsp+16: receiver ("this") 804 // rsp+16: receiver ("this")
789 // rsp+24: function 805 // rsp+24: function
790 __ EnterInternalFrame(); 806 {
807 FrameScope scope(masm, StackFrame::INTERNAL);
791 // Stack frame: 808 // Stack frame:
792 // rbp: Old base pointer 809 // rbp: Old base pointer
793 // rbp[1]: return address 810 // rbp[1]: return address
794 // rbp[2]: function arguments 811 // rbp[2]: function arguments
795 // rbp[3]: receiver 812 // rbp[3]: receiver
796 // rbp[4]: function 813 // rbp[4]: function
797 static const int kArgumentsOffset = 2 * kPointerSize; 814 static const int kArgumentsOffset = 2 * kPointerSize;
798 static const int kReceiverOffset = 3 * kPointerSize; 815 static const int kReceiverOffset = 3 * kPointerSize;
799 static const int kFunctionOffset = 4 * kPointerSize; 816 static const int kFunctionOffset = 4 * kPointerSize;
800 __ push(Operand(rbp, kFunctionOffset)); 817 __ push(Operand(rbp, kFunctionOffset));
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after
913 __ cmpq(rax, Operand(rbp, kLimitOffset)); 930 __ cmpq(rax, Operand(rbp, kLimitOffset));
914 __ j(not_equal, &loop); 931 __ j(not_equal, &loop);
915 932
916 // Invoke the function. 933 // Invoke the function.
917 ParameterCount actual(rax); 934 ParameterCount actual(rax);
918 __ SmiToInteger32(rax, rax); 935 __ SmiToInteger32(rax, rax);
919 __ movq(rdi, Operand(rbp, kFunctionOffset)); 936 __ movq(rdi, Operand(rbp, kFunctionOffset));
920 __ InvokeFunction(rdi, actual, CALL_FUNCTION, 937 __ InvokeFunction(rdi, actual, CALL_FUNCTION,
921 NullCallWrapper(), CALL_AS_METHOD); 938 NullCallWrapper(), CALL_AS_METHOD);
922 939
923 __ LeaveInternalFrame(); 940 // Leave internal frame.
941 }
924 __ ret(3 * kPointerSize); // remove function, receiver, and arguments 942 __ ret(3 * kPointerSize); // remove function, receiver, and arguments
925 } 943 }
926 944
927 945
928 // Number of empty elements to allocate for an empty array. 946 // Number of empty elements to allocate for an empty array.
929 static const int kPreallocatedArrayElements = 4; 947 static const int kPreallocatedArrayElements = 4;
930 948
931 949
932 // Allocate an empty JSArray. The allocated array is put into the result 950 // Allocate an empty JSArray. The allocated array is put into the result
933 // register. If the parameter initial_capacity is larger than zero an elements 951 // register. If the parameter initial_capacity is larger than zero an elements
(...skipping 539 matching lines...) Expand 10 before | Expand all | Expand 10 after
1473 // should perform a stack guard check so we can get interrupts while 1491 // should perform a stack guard check so we can get interrupts while
1474 // waiting for on-stack replacement. 1492 // waiting for on-stack replacement.
1475 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1493 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1476 __ movq(rcx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset)); 1494 __ movq(rcx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
1477 __ movq(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset)); 1495 __ movq(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset));
1478 __ cmpb(rbx, FieldOperand(rcx, Code::kAllowOSRAtLoopNestingLevelOffset)); 1496 __ cmpb(rbx, FieldOperand(rcx, Code::kAllowOSRAtLoopNestingLevelOffset));
1479 __ j(greater, &stack_check); 1497 __ j(greater, &stack_check);
1480 1498
1481 // Pass the function to optimize as the argument to the on-stack 1499 // Pass the function to optimize as the argument to the on-stack
1482 // replacement runtime function. 1500 // replacement runtime function.
1483 __ EnterInternalFrame(); 1501 {
1502 FrameScope scope(masm, StackFrame::INTERNAL);
1484 __ push(rax); 1503 __ push(rax);
1485 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); 1504 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1486 __ LeaveInternalFrame(); 1505 }
1487 1506
1488 // If the result was -1 it means that we couldn't optimize the 1507 // If the result was -1 it means that we couldn't optimize the
1489 // function. Just return and continue in the unoptimized version. 1508 // function. Just return and continue in the unoptimized version.
1490 Label skip; 1509 Label skip;
1491 __ SmiCompare(rax, Smi::FromInt(-1)); 1510 __ SmiCompare(rax, Smi::FromInt(-1));
1492 __ j(not_equal, &skip, Label::kNear); 1511 __ j(not_equal, &skip, Label::kNear);
1493 __ ret(0); 1512 __ ret(0);
1494 1513
1495 // If we decide not to perform on-stack replacement we perform a 1514 // If we decide not to perform on-stack replacement we perform a
1496 // stack guard check to enable interrupts. 1515 // stack guard check to enable interrupts.
1497 __ bind(&stack_check); 1516 __ bind(&stack_check);
1498 Label ok; 1517 Label ok;
1499 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); 1518 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
1500 __ j(above_equal, &ok, Label::kNear); 1519 __ j(above_equal, &ok, Label::kNear);
1501 1520
1502 StackCheckStub stub; 1521 StackCheckStub stub;
1503 __ TailCallStub(&stub); 1522 __ TailCallStub(&stub);
1504 __ Abort("Unreachable code: returned from tail call."); 1523 if (FLAG_debug_code) {
1524 __ Abort("Unreachable code: returned from tail call.");
1525 }
1505 __ bind(&ok); 1526 __ bind(&ok);
1506 __ ret(0); 1527 __ ret(0);
1507 1528
1508 __ bind(&skip); 1529 __ bind(&skip);
1509 // Untag the AST id and push it on the stack. 1530 // Untag the AST id and push it on the stack.
1510 __ SmiToInteger32(rax, rax); 1531 __ SmiToInteger32(rax, rax);
1511 __ push(rax); 1532 __ push(rax);
1512 1533
1513 // Generate the code for doing the frame-to-frame translation using 1534 // Generate the code for doing the frame-to-frame translation using
1514 // the deoptimizer infrastructure. 1535 // the deoptimizer infrastructure.
1515 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR); 1536 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
1516 generator.Generate(); 1537 generator.Generate();
1517 } 1538 }
1518 1539
1519 1540
1520 #undef __ 1541 #undef __
1521 1542
1522 } } // namespace v8::internal 1543 } } // namespace v8::internal
1523 1544
1524 #endif // V8_TARGET_ARCH_X64 1545 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/macro-assembler.h ('k') | src/x64/code-stubs-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698