Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(72)

Side by Side Diff: src/x64/full-codegen-x64.cc

Issue 6460038: Version 3.1.3.... (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/cpu-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 25 matching lines...) Expand all
36 #include "full-codegen.h" 36 #include "full-codegen.h"
37 #include "parser.h" 37 #include "parser.h"
38 #include "scopes.h" 38 #include "scopes.h"
39 #include "stub-cache.h" 39 #include "stub-cache.h"
40 40
41 namespace v8 { 41 namespace v8 {
42 namespace internal { 42 namespace internal {
43 43
44 #define __ ACCESS_MASM(masm_) 44 #define __ ACCESS_MASM(masm_)
45 45
46
47 class JumpPatchSite BASE_EMBEDDED {
48 public:
49 explicit JumpPatchSite(MacroAssembler* masm)
50 : masm_(masm) {
51 #ifdef DEBUG
52 info_emitted_ = false;
53 #endif
54 }
55
56 ~JumpPatchSite() {
57 ASSERT(patch_site_.is_bound() == info_emitted_);
58 }
59
60 void EmitJumpIfNotSmi(Register reg, NearLabel* target) {
61 __ testb(reg, Immediate(kSmiTagMask));
62 EmitJump(not_carry, target); // Always taken before patched.
63 }
64
65 void EmitJumpIfSmi(Register reg, NearLabel* target) {
66 __ testb(reg, Immediate(kSmiTagMask));
67 EmitJump(carry, target); // Never taken before patched.
68 }
69
70 void EmitPatchInfo() {
71 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
72 ASSERT(is_int8(delta_to_patch_site));
73 __ testl(rax, Immediate(delta_to_patch_site));
74 #ifdef DEBUG
75 info_emitted_ = true;
76 #endif
77 }
78
79 bool is_bound() const { return patch_site_.is_bound(); }
80
81 private:
82 // jc will be patched with jz, jnc will become jnz.
83 void EmitJump(Condition cc, NearLabel* target) {
84 ASSERT(!patch_site_.is_bound() && !info_emitted_);
85 ASSERT(cc == carry || cc == not_carry);
86 __ bind(&patch_site_);
87 __ j(cc, target);
88 }
89
90 MacroAssembler* masm_;
91 Label patch_site_;
92 #ifdef DEBUG
93 bool info_emitted_;
94 #endif
95 };
96
97
46 // Generate code for a JS function. On entry to the function the receiver 98 // Generate code for a JS function. On entry to the function the receiver
47 // and arguments have been pushed on the stack left to right, with the 99 // and arguments have been pushed on the stack left to right, with the
48 // return address on top of them. The actual argument count matches the 100 // return address on top of them. The actual argument count matches the
49 // formal parameter count expected by the function. 101 // formal parameter count expected by the function.
50 // 102 //
51 // The live registers are: 103 // The live registers are:
52 // o rdi: the JS function object being called (ie, ourselves) 104 // o rdi: the JS function object being called (ie, ourselves)
53 // o rsi: our context 105 // o rsi: our context
54 // o rbp: our caller's frame pointer 106 // o rbp: our caller's frame pointer
55 // o rsp: stack pointer (pointing to return address) 107 // o rsp: stack pointer (pointing to return address)
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after
238 // Add a label for checking the size of the code used for returning. 290 // Add a label for checking the size of the code used for returning.
239 Label check_exit_codesize; 291 Label check_exit_codesize;
240 masm_->bind(&check_exit_codesize); 292 masm_->bind(&check_exit_codesize);
241 #endif 293 #endif
242 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); 294 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
243 __ RecordJSReturn(); 295 __ RecordJSReturn();
244 // Do not use the leave instruction here because it is too short to 296 // Do not use the leave instruction here because it is too short to
245 // patch with the code required by the debugger. 297 // patch with the code required by the debugger.
246 __ movq(rsp, rbp); 298 __ movq(rsp, rbp);
247 __ pop(rbp); 299 __ pop(rbp);
248 __ ret((scope()->num_parameters() + 1) * kPointerSize); 300
301 int arguments_bytes = (scope()->num_parameters() + 1) * kPointerSize;
302 __ Ret(arguments_bytes, rcx);
303
249 #ifdef ENABLE_DEBUGGER_SUPPORT 304 #ifdef ENABLE_DEBUGGER_SUPPORT
250 // Add padding that will be overwritten by a debugger breakpoint. We 305 // Add padding that will be overwritten by a debugger breakpoint. We
251 // have just generated "movq rsp, rbp; pop rbp; ret k" with length 7 306 // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k"
252 // (3 + 1 + 3). 307 // (3 + 1 + 3).
253 const int kPadding = Assembler::kJSReturnSequenceLength - 7; 308 const int kPadding = Assembler::kJSReturnSequenceLength - 7;
254 for (int i = 0; i < kPadding; ++i) { 309 for (int i = 0; i < kPadding; ++i) {
255 masm_->int3(); 310 masm_->int3();
256 } 311 }
257 // Check that the size of the code used for returning matches what is 312 // Check that the size of the code used for returning is large enough
258 // expected by the debugger. 313 // for the debugger's requirements.
259 ASSERT_EQ(Assembler::kJSReturnSequenceLength, 314 ASSERT(Assembler::kJSReturnSequenceLength <=
260 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); 315 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
261 #endif 316 #endif
262 } 317 }
263 } 318 }
264 319
265 320
266 FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand( 321 FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand(
267 Token::Value op, Expression* left, Expression* right) { 322 Token::Value op, Expression* left, Expression* right) {
268 ASSERT(ShouldInlineSmiCase(op)); 323 ASSERT(ShouldInlineSmiCase(op));
269 return kNoConstants; 324 return kNoConstants;
270 } 325 }
(...skipping 381 matching lines...) Expand 10 before | Expand all | Expand 10 after
652 __ Push(Smi::FromInt(0)); // no initial value! 707 __ Push(Smi::FromInt(0)); // no initial value!
653 } 708 }
654 __ CallRuntime(Runtime::kDeclareContextSlot, 4); 709 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
655 break; 710 break;
656 } 711 }
657 } 712 }
658 713
659 } else if (prop != NULL) { 714 } else if (prop != NULL) {
660 if (function != NULL || mode == Variable::CONST) { 715 if (function != NULL || mode == Variable::CONST) {
661 // We are declaring a function or constant that rewrites to a 716 // We are declaring a function or constant that rewrites to a
662 // property. Use (keyed) IC to set the initial value. 717 // property. Use (keyed) IC to set the initial value. We
663 VisitForStackValue(prop->obj()); 718 // cannot visit the rewrite because it's shared and we risk
719 // recording duplicate AST IDs for bailouts from optimized code.
720 ASSERT(prop->obj()->AsVariableProxy() != NULL);
721 { AccumulatorValueContext for_object(this);
722 EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
723 }
664 if (function != NULL) { 724 if (function != NULL) {
665 VisitForStackValue(prop->key()); 725 __ push(rax);
666 VisitForAccumulatorValue(function); 726 VisitForAccumulatorValue(function);
667 __ pop(rcx); 727 __ pop(rdx);
668 } else { 728 } else {
669 VisitForAccumulatorValue(prop->key()); 729 __ movq(rdx, rax);
670 __ movq(rcx, result_register()); 730 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
671 __ LoadRoot(result_register(), Heap::kTheHoleValueRootIndex);
672 } 731 }
673 __ pop(rdx); 732 ASSERT(prop->key()->AsLiteral() != NULL &&
733 prop->key()->AsLiteral()->handle()->IsSmi());
734 __ Move(rcx, prop->key()->AsLiteral()->handle());
674 735
675 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); 736 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
676 EmitCallIC(ic, RelocInfo::CODE_TARGET); 737 EmitCallIC(ic, RelocInfo::CODE_TARGET);
677 } 738 }
678 } 739 }
679 } 740 }
680 741
681 742
682 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { 743 void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
683 EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun()); 744 EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun());
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
721 Comment cmnt(masm_, "[ Case comparison"); 782 Comment cmnt(masm_, "[ Case comparison");
722 __ bind(&next_test); 783 __ bind(&next_test);
723 next_test.Unuse(); 784 next_test.Unuse();
724 785
725 // Compile the label expression. 786 // Compile the label expression.
726 VisitForAccumulatorValue(clause->label()); 787 VisitForAccumulatorValue(clause->label());
727 788
728 // Perform the comparison as if via '==='. 789 // Perform the comparison as if via '==='.
729 __ movq(rdx, Operand(rsp, 0)); // Switch value. 790 __ movq(rdx, Operand(rsp, 0)); // Switch value.
730 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); 791 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
792 JumpPatchSite patch_site(masm_);
731 if (inline_smi_code) { 793 if (inline_smi_code) {
732 Label slow_case; 794 NearLabel slow_case;
733 __ JumpIfNotBothSmi(rdx, rax, &slow_case); 795 __ movq(rcx, rdx);
734 __ SmiCompare(rdx, rax); 796 __ or_(rcx, rax);
797 patch_site.EmitJumpIfNotSmi(rcx, &slow_case);
798
799 __ cmpq(rdx, rax);
735 __ j(not_equal, &next_test); 800 __ j(not_equal, &next_test);
736 __ Drop(1); // Switch value is no longer needed. 801 __ Drop(1); // Switch value is no longer needed.
737 __ jmp(clause->body_target()->entry_label()); 802 __ jmp(clause->body_target()->entry_label());
738 __ bind(&slow_case); 803 __ bind(&slow_case);
739 } 804 }
740 805
741 CompareFlags flags = inline_smi_code 806 // Record position before stub call for type feedback.
742 ? NO_SMI_COMPARE_IN_STUB 807 SetSourcePosition(clause->position());
743 : NO_COMPARE_FLAGS; 808 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
744 CompareStub stub(equal, true, flags); 809 EmitCallIC(ic, &patch_site);
745 __ CallStub(&stub); 810
746 __ testq(rax, rax); 811 __ testq(rax, rax);
747 __ j(not_equal, &next_test); 812 __ j(not_equal, &next_test);
748 __ Drop(1); // Switch value is no longer needed. 813 __ Drop(1); // Switch value is no longer needed.
749 __ jmp(clause->body_target()->entry_label()); 814 __ jmp(clause->body_target()->entry_label());
750 } 815 }
751 816
752 // Discard the test value and jump to the default if present, otherwise to 817 // Discard the test value and jump to the default if present, otherwise to
753 // the end of the statement. 818 // the end of the statement.
754 __ bind(&next_test); 819 __ bind(&next_test);
755 __ Drop(1); // Switch value is no longer needed. 820 __ Drop(1); // Switch value is no longer needed.
(...skipping 759 matching lines...) Expand 10 before | Expand all | Expand 10 after
1515 Token::Value op, 1580 Token::Value op,
1516 OverwriteMode mode, 1581 OverwriteMode mode,
1517 Expression* left, 1582 Expression* left,
1518 Expression* right, 1583 Expression* right,
1519 ConstantOperand constant) { 1584 ConstantOperand constant) {
1520 ASSERT(constant == kNoConstants); // Only handled case. 1585 ASSERT(constant == kNoConstants); // Only handled case.
1521 1586
1522 // Do combined smi check of the operands. Left operand is on the 1587 // Do combined smi check of the operands. Left operand is on the
1523 // stack (popped into rdx). Right operand is in rax but moved into 1588 // stack (popped into rdx). Right operand is in rax but moved into
1524 // rcx to make the shifts easier. 1589 // rcx to make the shifts easier.
1525 Label done, stub_call, smi_case; 1590 NearLabel done, stub_call, smi_case;
1526 __ pop(rdx); 1591 __ pop(rdx);
1527 __ movq(rcx, rax); 1592 __ movq(rcx, rax);
1528 Condition smi = masm()->CheckBothSmi(rdx, rax); 1593 __ or_(rax, rdx);
1529 __ j(smi, &smi_case); 1594 JumpPatchSite patch_site(masm_);
1595 patch_site.EmitJumpIfSmi(rax, &smi_case);
1530 1596
1531 __ bind(&stub_call); 1597 __ bind(&stub_call);
1598 __ movq(rax, rcx);
1532 TypeRecordingBinaryOpStub stub(op, mode); 1599 TypeRecordingBinaryOpStub stub(op, mode);
1533 __ movq(rax, rcx); 1600 EmitCallIC(stub.GetCode(), &patch_site);
1534 __ CallStub(&stub);
1535 __ jmp(&done); 1601 __ jmp(&done);
1536 1602
1537 __ bind(&smi_case); 1603 __ bind(&smi_case);
1538 switch (op) { 1604 switch (op) {
1539 case Token::SAR: 1605 case Token::SAR:
1540 __ SmiShiftArithmeticRight(rax, rdx, rcx); 1606 __ SmiShiftArithmeticRight(rax, rdx, rcx);
1541 break; 1607 break;
1542 case Token::SHL: 1608 case Token::SHL:
1543 __ SmiShiftLeft(rax, rdx, rcx); 1609 __ SmiShiftLeft(rax, rdx, rcx);
1544 break; 1610 break;
(...skipping 1645 matching lines...) Expand 10 before | Expand all | Expand 10 after
3190 __ movq(Operand(rsp, kPointerSize), rax); 3256 __ movq(Operand(rsp, kPointerSize), rax);
3191 break; 3257 break;
3192 case KEYED_PROPERTY: 3258 case KEYED_PROPERTY:
3193 __ movq(Operand(rsp, 2 * kPointerSize), rax); 3259 __ movq(Operand(rsp, 2 * kPointerSize), rax);
3194 break; 3260 break;
3195 } 3261 }
3196 } 3262 }
3197 } 3263 }
3198 3264
3199 // Inline smi case if we are in a loop. 3265 // Inline smi case if we are in a loop.
3200 Label stub_call, done; 3266 NearLabel stub_call, done;
3267 JumpPatchSite patch_site(masm_);
3268
3201 if (ShouldInlineSmiCase(expr->op())) { 3269 if (ShouldInlineSmiCase(expr->op())) {
3202 if (expr->op() == Token::INC) { 3270 if (expr->op() == Token::INC) {
3203 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); 3271 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
3204 } else { 3272 } else {
3205 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); 3273 __ SmiSubConstant(rax, rax, Smi::FromInt(1));
3206 } 3274 }
3207 __ j(overflow, &stub_call); 3275 __ j(overflow, &stub_call);
3208 // We could eliminate this smi check if we split the code at 3276 // We could eliminate this smi check if we split the code at
3209 // the first smi check before calling ToNumber. 3277 // the first smi check before calling ToNumber.
3210 is_smi = masm_->CheckSmi(rax); 3278 patch_site.EmitJumpIfSmi(rax, &done);
3211 __ j(is_smi, &done);
3212 3279
3213 __ bind(&stub_call); 3280 __ bind(&stub_call);
3214 // Call stub. Undo operation first. 3281 // Call stub. Undo operation first.
3215 if (expr->op() == Token::INC) { 3282 if (expr->op() == Token::INC) {
3216 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); 3283 __ SmiSubConstant(rax, rax, Smi::FromInt(1));
3217 } else { 3284 } else {
3218 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); 3285 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
3219 } 3286 }
3220 } 3287 }
3221 3288
3222 // Record position before stub call. 3289 // Record position before stub call.
3223 SetSourcePosition(expr->position()); 3290 SetSourcePosition(expr->position());
3224 3291
3225 // Call stub for +1/-1. 3292 // Call stub for +1/-1.
3226 TypeRecordingBinaryOpStub stub(expr->binary_op(), NO_OVERWRITE); 3293 TypeRecordingBinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
3227 if (expr->op() == Token::INC) { 3294 if (expr->op() == Token::INC) {
3228 __ Move(rdx, Smi::FromInt(1)); 3295 __ Move(rdx, Smi::FromInt(1));
3229 } else { 3296 } else {
3230 __ movq(rdx, rax); 3297 __ movq(rdx, rax);
3231 __ Move(rax, Smi::FromInt(1)); 3298 __ Move(rax, Smi::FromInt(1));
3232 } 3299 }
3233 __ CallStub(&stub); 3300 EmitCallIC(stub.GetCode(), &patch_site);
3301 __ bind(&done);
3234 3302
3235 __ bind(&done);
3236 // Store the value returned in rax. 3303 // Store the value returned in rax.
3237 switch (assign_type) { 3304 switch (assign_type) {
3238 case VARIABLE: 3305 case VARIABLE:
3239 if (expr->is_postfix()) { 3306 if (expr->is_postfix()) {
3240 // Perform the assignment as if via '='. 3307 // Perform the assignment as if via '='.
3241 { EffectContext context(this); 3308 { EffectContext context(this);
3242 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 3309 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3243 Token::ASSIGN); 3310 Token::ASSIGN);
3244 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 3311 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3245 context.Plug(rax); 3312 context.Plug(rax);
(...skipping 247 matching lines...) Expand 10 before | Expand all | Expand 10 after
3493 cc = greater_equal; 3560 cc = greater_equal;
3494 __ pop(rdx); 3561 __ pop(rdx);
3495 break; 3562 break;
3496 case Token::IN: 3563 case Token::IN:
3497 case Token::INSTANCEOF: 3564 case Token::INSTANCEOF:
3498 default: 3565 default:
3499 UNREACHABLE(); 3566 UNREACHABLE();
3500 } 3567 }
3501 3568
3502 bool inline_smi_code = ShouldInlineSmiCase(op); 3569 bool inline_smi_code = ShouldInlineSmiCase(op);
3570 JumpPatchSite patch_site(masm_);
3503 if (inline_smi_code) { 3571 if (inline_smi_code) {
3504 Label slow_case; 3572 NearLabel slow_case;
3505 __ JumpIfNotBothSmi(rax, rdx, &slow_case); 3573 __ movq(rcx, rdx);
3506 __ SmiCompare(rdx, rax); 3574 __ or_(rcx, rax);
3575 patch_site.EmitJumpIfNotSmi(rcx, &slow_case);
3576 __ cmpq(rdx, rax);
3507 Split(cc, if_true, if_false, NULL); 3577 Split(cc, if_true, if_false, NULL);
3508 __ bind(&slow_case); 3578 __ bind(&slow_case);
3509 } 3579 }
3510 3580
3511 CompareFlags flags = inline_smi_code 3581 // Record position and call the compare IC.
3512 ? NO_SMI_COMPARE_IN_STUB 3582 SetSourcePosition(expr->position());
3513 : NO_COMPARE_FLAGS; 3583 Handle<Code> ic = CompareIC::GetUninitialized(op);
3514 CompareStub stub(cc, strict, flags); 3584 EmitCallIC(ic, &patch_site);
3515 __ CallStub(&stub);
3516 3585
3517 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); 3586 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3518 __ testq(rax, rax); 3587 __ testq(rax, rax);
3519 Split(cc, if_true, if_false, fall_through); 3588 Split(cc, if_true, if_false, fall_through);
3520 } 3589 }
3521 } 3590 }
3522 3591
3523 // Convert the result of the comparison into one expected for this 3592 // Convert the result of the comparison into one expected for this
3524 // expression's context. 3593 // expression's context.
3525 context()->Plug(if_true, if_false); 3594 context()->Plug(if_true, if_false);
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
3568 3637
3569 3638
3570 Register FullCodeGenerator::context_register() { 3639 Register FullCodeGenerator::context_register() {
3571 return rsi; 3640 return rsi;
3572 } 3641 }
3573 3642
3574 3643
3575 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) { 3644 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
3576 ASSERT(mode == RelocInfo::CODE_TARGET || 3645 ASSERT(mode == RelocInfo::CODE_TARGET ||
3577 mode == RelocInfo::CODE_TARGET_CONTEXT); 3646 mode == RelocInfo::CODE_TARGET_CONTEXT);
3647 switch (ic->kind()) {
3648 case Code::LOAD_IC:
3649 __ IncrementCounter(&Counters::named_load_full, 1);
3650 break;
3651 case Code::KEYED_LOAD_IC:
3652 __ IncrementCounter(&Counters::keyed_load_full, 1);
3653 break;
3654 case Code::STORE_IC:
3655 __ IncrementCounter(&Counters::named_store_full, 1);
3656 break;
3657 case Code::KEYED_STORE_IC:
3658 __ IncrementCounter(&Counters::keyed_store_full, 1);
3659 default:
3660 break;
3661 }
3662
3578 __ call(ic, mode); 3663 __ call(ic, mode);
3579 3664
3580 // Crankshaft doesn't need patching of inlined loads and stores. 3665 // Crankshaft doesn't need patching of inlined loads and stores.
3581 if (V8::UseCrankshaft()) return; 3666 // When compiling the snapshot we need to produce code that works
3667 // with and without Crankshaft.
3668 if (V8::UseCrankshaft() && !Serializer::enabled()) {
3669 return;
3670 }
3582 3671
3583 // If we're calling a (keyed) load or store stub, we have to mark 3672 // If we're calling a (keyed) load or store stub, we have to mark
3584 // the call as containing no inlined code so we will not attempt to 3673 // the call as containing no inlined code so we will not attempt to
3585 // patch it. 3674 // patch it.
3586 switch (ic->kind()) { 3675 switch (ic->kind()) {
3587 case Code::LOAD_IC: 3676 case Code::LOAD_IC:
3588 case Code::KEYED_LOAD_IC: 3677 case Code::KEYED_LOAD_IC:
3589 case Code::STORE_IC: 3678 case Code::STORE_IC:
3590 case Code::KEYED_STORE_IC: 3679 case Code::KEYED_STORE_IC:
3591 __ nop(); // Signals no inlined code. 3680 __ nop(); // Signals no inlined code.
3592 break; 3681 break;
3593 default: 3682 default:
3594 // Do nothing. 3683 // Do nothing.
3595 break; 3684 break;
3596 } 3685 }
3597 } 3686 }
3598 3687
3599 3688
3689 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
3690 __ call(ic, RelocInfo::CODE_TARGET);
3691 if (patch_site != NULL && patch_site->is_bound()) {
3692 patch_site->EmitPatchInfo();
3693 } else {
3694 __ nop(); // Signals no inlined code.
3695 }
3696 }
3697
3698
3600 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 3699 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3601 ASSERT(IsAligned(frame_offset, kPointerSize)); 3700 ASSERT(IsAligned(frame_offset, kPointerSize));
3602 __ movq(Operand(rbp, frame_offset), value); 3701 __ movq(Operand(rbp, frame_offset), value);
3603 } 3702 }
3604 3703
3605 3704
3606 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 3705 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3607 __ movq(dst, ContextOperand(rsi, context_index)); 3706 __ movq(dst, ContextOperand(rsi, context_index));
3608 } 3707 }
3609 3708
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
3641 __ ret(0); 3740 __ ret(0);
3642 } 3741 }
3643 3742
3644 3743
3645 #undef __ 3744 #undef __
3646 3745
3647 3746
3648 } } // namespace v8::internal 3747 } } // namespace v8::internal
3649 3748
3650 #endif // V8_TARGET_ARCH_X64 3749 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/cpu-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698