Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/x64/full-codegen-x64.cc

Issue 181453002: Reset trunk to 3.24.35.4 (Closed) Base URL: https://v8.googlecode.com/svn/trunk
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/disasm-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
111 // o rsi: our context 111 // o rsi: our context
112 // o rbp: our caller's frame pointer 112 // o rbp: our caller's frame pointer
113 // o rsp: stack pointer (pointing to return address) 113 // o rsp: stack pointer (pointing to return address)
114 // 114 //
115 // The function builds a JS frame. Please see JavaScriptFrameConstants in 115 // The function builds a JS frame. Please see JavaScriptFrameConstants in
116 // frames-x64.h for its layout. 116 // frames-x64.h for its layout.
117 void FullCodeGenerator::Generate() { 117 void FullCodeGenerator::Generate() {
118 CompilationInfo* info = info_; 118 CompilationInfo* info = info_;
119 handler_table_ = 119 handler_table_ =
120 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); 120 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
121
122 InitializeFeedbackVector();
123
124 profiling_counter_ = isolate()->factory()->NewCell( 121 profiling_counter_ = isolate()->factory()->NewCell(
125 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); 122 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
126 SetFunctionPosition(function()); 123 SetFunctionPosition(function());
127 Comment cmnt(masm_, "[ function compiled by full code generator"); 124 Comment cmnt(masm_, "[ function compiled by full code generator");
128 125
129 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 126 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
130 127
131 #ifdef DEBUG 128 #ifdef DEBUG
132 if (strlen(FLAG_stop_at) > 0 && 129 if (strlen(FLAG_stop_at) > 0 &&
133 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 130 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
(...skipping 500 matching lines...) Expand 10 before | Expand all | Expand 10 after
634 if (false_label_ != fall_through_) __ jmp(false_label_); 631 if (false_label_ != fall_through_) __ jmp(false_label_);
635 } 632 }
636 } 633 }
637 634
638 635
639 void FullCodeGenerator::DoTest(Expression* condition, 636 void FullCodeGenerator::DoTest(Expression* condition,
640 Label* if_true, 637 Label* if_true,
641 Label* if_false, 638 Label* if_false,
642 Label* fall_through) { 639 Label* fall_through) {
643 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate()); 640 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
644 CallIC(ic, condition->test_id()); 641 CallIC(ic, NOT_CONTEXTUAL, condition->test_id());
645 __ testq(result_register(), result_register()); 642 __ testq(result_register(), result_register());
646 // The stub returns nonzero for true. 643 // The stub returns nonzero for true.
647 Split(not_zero, if_true, if_false, fall_through); 644 Split(not_zero, if_true, if_false, fall_through);
648 } 645 }
649 646
650 647
651 void FullCodeGenerator::Split(Condition cc, 648 void FullCodeGenerator::Split(Condition cc,
652 Label* if_true, 649 Label* if_true,
653 Label* if_false, 650 Label* if_false,
654 Label* fall_through) { 651 Label* fall_through) {
(...skipping 332 matching lines...) Expand 10 before | Expand all | Expand 10 after
987 __ cmpq(rdx, rax); 984 __ cmpq(rdx, rax);
988 __ j(not_equal, &next_test); 985 __ j(not_equal, &next_test);
989 __ Drop(1); // Switch value is no longer needed. 986 __ Drop(1); // Switch value is no longer needed.
990 __ jmp(clause->body_target()); 987 __ jmp(clause->body_target());
991 __ bind(&slow_case); 988 __ bind(&slow_case);
992 } 989 }
993 990
994 // Record position before stub call for type feedback. 991 // Record position before stub call for type feedback.
995 SetSourcePosition(clause->position()); 992 SetSourcePosition(clause->position());
996 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT); 993 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
997 CallIC(ic, clause->CompareId()); 994 CallIC(ic, NOT_CONTEXTUAL, clause->CompareId());
998 patch_site.EmitPatchInfo(); 995 patch_site.EmitPatchInfo();
999 996
1000 Label skip; 997 Label skip;
1001 __ jmp(&skip, Label::kNear); 998 __ jmp(&skip, Label::kNear);
1002 PrepareForBailout(clause, TOS_REG); 999 PrepareForBailout(clause, TOS_REG);
1003 __ CompareRoot(rax, Heap::kTrueValueRootIndex); 1000 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
1004 __ j(not_equal, &next_test); 1001 __ j(not_equal, &next_test);
1005 __ Drop(1); 1002 __ Drop(1);
1006 __ jmp(clause->body_target()); 1003 __ jmp(clause->body_target());
1007 __ bind(&skip); 1004 __ bind(&skip);
(...skipping 23 matching lines...) Expand all
1031 VisitStatements(clause->statements()); 1028 VisitStatements(clause->statements());
1032 } 1029 }
1033 1030
1034 __ bind(nested_statement.break_label()); 1031 __ bind(nested_statement.break_label());
1035 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1032 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1036 } 1033 }
1037 1034
1038 1035
1039 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { 1036 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1040 Comment cmnt(masm_, "[ ForInStatement"); 1037 Comment cmnt(masm_, "[ ForInStatement");
1041 int slot = stmt->ForInFeedbackSlot();
1042 SetStatementPosition(stmt); 1038 SetStatementPosition(stmt);
1043 1039
1044 Label loop, exit; 1040 Label loop, exit;
1045 ForIn loop_statement(this, stmt); 1041 ForIn loop_statement(this, stmt);
1046 increment_loop_depth(); 1042 increment_loop_depth();
1047 1043
1048 // Get the object to enumerate over. If the object is null or undefined, skip 1044 // Get the object to enumerate over. If the object is null or undefined, skip
1049 // over the loop. See ECMA-262 version 5, section 12.6.4. 1045 // over the loop. See ECMA-262 version 5, section 12.6.4.
1050 VisitForAccumulatorValue(stmt->enumerable()); 1046 VisitForAccumulatorValue(stmt->enumerable());
1051 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); 1047 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
1120 __ jmp(&loop); 1116 __ jmp(&loop);
1121 1117
1122 __ bind(&no_descriptors); 1118 __ bind(&no_descriptors);
1123 __ addq(rsp, Immediate(kPointerSize)); 1119 __ addq(rsp, Immediate(kPointerSize));
1124 __ jmp(&exit); 1120 __ jmp(&exit);
1125 1121
1126 // We got a fixed array in register rax. Iterate through that. 1122 // We got a fixed array in register rax. Iterate through that.
1127 Label non_proxy; 1123 Label non_proxy;
1128 __ bind(&fixed_array); 1124 __ bind(&fixed_array);
1129 1125
1130 Handle<Object> feedback = Handle<Object>( 1126 Handle<Cell> cell = isolate()->factory()->NewCell(
1131 Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker), 1127 Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker),
1132 isolate()); 1128 isolate()));
1133 StoreFeedbackVectorSlot(slot, feedback); 1129 RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
1130 __ Move(rbx, cell);
1131 __ Move(FieldOperand(rbx, Cell::kValueOffset),
1132 Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker));
1134 1133
1135 // No need for a write barrier, we are storing a Smi in the feedback vector.
1136 __ Move(rbx, FeedbackVector());
1137 __ Move(FieldOperand(rbx, FixedArray::OffsetOfElementAt(slot)),
1138 Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker));
1139 __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check 1134 __ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check
1140 __ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object 1135 __ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object
1141 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1136 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1142 __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx); 1137 __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx);
1143 __ j(above, &non_proxy); 1138 __ j(above, &non_proxy);
1144 __ Move(rbx, Smi::FromInt(0)); // Zero indicates proxy 1139 __ Move(rbx, Smi::FromInt(0)); // Zero indicates proxy
1145 __ bind(&non_proxy); 1140 __ bind(&non_proxy);
1146 __ push(rbx); // Smi 1141 __ push(rbx); // Smi
1147 __ push(rax); // Array 1142 __ push(rax); // Array
1148 __ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset)); 1143 __ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset));
(...skipping 288 matching lines...) Expand 10 before | Expand all | Expand 10 after
1437 1432
1438 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { 1433 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1439 // Record position before possible IC call. 1434 // Record position before possible IC call.
1440 SetSourcePosition(proxy->position()); 1435 SetSourcePosition(proxy->position());
1441 Variable* var = proxy->var(); 1436 Variable* var = proxy->var();
1442 1437
1443 // Three cases: global variables, lookup variables, and all other types of 1438 // Three cases: global variables, lookup variables, and all other types of
1444 // variables. 1439 // variables.
1445 switch (var->location()) { 1440 switch (var->location()) {
1446 case Variable::UNALLOCATED: { 1441 case Variable::UNALLOCATED: {
1447 Comment cmnt(masm_, "[ Global variable"); 1442 Comment cmnt(masm_, "Global variable");
1448 // Use inline caching. Variable name is passed in rcx and the global 1443 // Use inline caching. Variable name is passed in rcx and the global
1449 // object on the stack. 1444 // object on the stack.
1450 __ Move(rcx, var->name()); 1445 __ Move(rcx, var->name());
1451 __ movp(rax, GlobalObjectOperand()); 1446 __ movp(rax, GlobalObjectOperand());
1452 CallLoadIC(CONTEXTUAL); 1447 CallLoadIC(CONTEXTUAL);
1453 context()->Plug(rax); 1448 context()->Plug(rax);
1454 break; 1449 break;
1455 } 1450 }
1456 1451
1457 case Variable::PARAMETER: 1452 case Variable::PARAMETER:
1458 case Variable::LOCAL: 1453 case Variable::LOCAL:
1459 case Variable::CONTEXT: { 1454 case Variable::CONTEXT: {
1460 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context slot" 1455 Comment cmnt(masm_, var->IsContextSlot() ? "Context slot" : "Stack slot");
1461 : "[ Stack slot");
1462 if (var->binding_needs_init()) { 1456 if (var->binding_needs_init()) {
1463 // var->scope() may be NULL when the proxy is located in eval code and 1457 // var->scope() may be NULL when the proxy is located in eval code and
1464 // refers to a potential outside binding. Currently those bindings are 1458 // refers to a potential outside binding. Currently those bindings are
1465 // always looked up dynamically, i.e. in that case 1459 // always looked up dynamically, i.e. in that case
1466 // var->location() == LOOKUP. 1460 // var->location() == LOOKUP.
1467 // always holds. 1461 // always holds.
1468 ASSERT(var->scope() != NULL); 1462 ASSERT(var->scope() != NULL);
1469 1463
1470 // Check if the binding really needs an initialization check. The check 1464 // Check if the binding really needs an initialization check. The check
1471 // can be skipped in the following situation: we have a LET or CONST 1465 // can be skipped in the following situation: we have a LET or CONST
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
1513 __ bind(&done); 1507 __ bind(&done);
1514 context()->Plug(rax); 1508 context()->Plug(rax);
1515 break; 1509 break;
1516 } 1510 }
1517 } 1511 }
1518 context()->Plug(var); 1512 context()->Plug(var);
1519 break; 1513 break;
1520 } 1514 }
1521 1515
1522 case Variable::LOOKUP: { 1516 case Variable::LOOKUP: {
1523 Comment cmnt(masm_, "[ Lookup slot");
1524 Label done, slow; 1517 Label done, slow;
1525 // Generate code for loading from variables potentially shadowed 1518 // Generate code for loading from variables potentially shadowed
1526 // by eval-introduced variables. 1519 // by eval-introduced variables.
1527 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done); 1520 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1528 __ bind(&slow); 1521 __ bind(&slow);
1522 Comment cmnt(masm_, "Lookup slot");
1529 __ push(rsi); // Context. 1523 __ push(rsi); // Context.
1530 __ Push(var->name()); 1524 __ Push(var->name());
1531 __ CallRuntime(Runtime::kLoadContextSlot, 2); 1525 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1532 __ bind(&done); 1526 __ bind(&done);
1533 context()->Plug(rax); 1527 context()->Plug(rax);
1534 break; 1528 break;
1535 } 1529 }
1536 } 1530 }
1537 } 1531 }
1538 1532
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
1657 UNREACHABLE(); 1651 UNREACHABLE();
1658 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1652 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1659 ASSERT(!CompileTimeValue::IsCompileTimeValue(value)); 1653 ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
1660 // Fall through. 1654 // Fall through.
1661 case ObjectLiteral::Property::COMPUTED: 1655 case ObjectLiteral::Property::COMPUTED:
1662 if (key->value()->IsInternalizedString()) { 1656 if (key->value()->IsInternalizedString()) {
1663 if (property->emit_store()) { 1657 if (property->emit_store()) {
1664 VisitForAccumulatorValue(value); 1658 VisitForAccumulatorValue(value);
1665 __ Move(rcx, key->value()); 1659 __ Move(rcx, key->value());
1666 __ movp(rdx, Operand(rsp, 0)); 1660 __ movp(rdx, Operand(rsp, 0));
1667 CallStoreIC(key->LiteralFeedbackId()); 1661 CallStoreIC(NOT_CONTEXTUAL, key->LiteralFeedbackId());
1668 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1662 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1669 } else { 1663 } else {
1670 VisitForEffect(value); 1664 VisitForEffect(value);
1671 } 1665 }
1672 break; 1666 break;
1673 } 1667 }
1674 __ push(Operand(rsp, 0)); // Duplicate receiver. 1668 __ push(Operand(rsp, 0)); // Duplicate receiver.
1675 VisitForStackValue(key); 1669 VisitForStackValue(key);
1676 VisitForStackValue(value); 1670 VisitForStackValue(value);
1677 if (property->emit_store()) { 1671 if (property->emit_store()) {
(...skipping 394 matching lines...) Expand 10 before | Expand all | Expand 10 after
2072 __ LoadRoot(rcx, Heap::knext_stringRootIndex); // "next" 2066 __ LoadRoot(rcx, Heap::knext_stringRootIndex); // "next"
2073 __ push(rcx); 2067 __ push(rcx);
2074 __ push(Operand(rsp, 2 * kPointerSize)); // iter 2068 __ push(Operand(rsp, 2 * kPointerSize)); // iter
2075 __ push(rax); // received 2069 __ push(rax); // received
2076 2070
2077 // result = receiver[f](arg); 2071 // result = receiver[f](arg);
2078 __ bind(&l_call); 2072 __ bind(&l_call);
2079 __ movp(rdx, Operand(rsp, kPointerSize)); 2073 __ movp(rdx, Operand(rsp, kPointerSize));
2080 __ movp(rax, Operand(rsp, 2 * kPointerSize)); 2074 __ movp(rax, Operand(rsp, 2 * kPointerSize));
2081 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 2075 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2082 CallIC(ic, TypeFeedbackId::None()); 2076 CallIC(ic, NOT_CONTEXTUAL, TypeFeedbackId::None());
2083 __ movp(rdi, rax); 2077 __ movp(rdi, rax);
2084 __ movp(Operand(rsp, 2 * kPointerSize), rdi); 2078 __ movp(Operand(rsp, 2 * kPointerSize), rdi);
2085 CallFunctionStub stub(1, CALL_AS_METHOD); 2079 CallFunctionStub stub(1, CALL_AS_METHOD);
2086 __ CallStub(&stub); 2080 __ CallStub(&stub);
2087 2081
2088 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2082 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2089 __ Drop(1); // The function is still on the stack; drop it. 2083 __ Drop(1); // The function is still on the stack; drop it.
2090 2084
2091 // if (!result.done) goto l_try; 2085 // if (!result.done) goto l_try;
2092 __ bind(&l_loop); 2086 __ bind(&l_loop);
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
2263 SetSourcePosition(prop->position()); 2257 SetSourcePosition(prop->position());
2264 Literal* key = prop->key()->AsLiteral(); 2258 Literal* key = prop->key()->AsLiteral();
2265 __ Move(rcx, key->value()); 2259 __ Move(rcx, key->value());
2266 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId()); 2260 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2267 } 2261 }
2268 2262
2269 2263
2270 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { 2264 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2271 SetSourcePosition(prop->position()); 2265 SetSourcePosition(prop->position());
2272 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 2266 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2273 CallIC(ic, prop->PropertyFeedbackId()); 2267 CallIC(ic, NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2274 } 2268 }
2275 2269
2276 2270
2277 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 2271 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2278 Token::Value op, 2272 Token::Value op,
2279 OverwriteMode mode, 2273 OverwriteMode mode,
2280 Expression* left, 2274 Expression* left,
2281 Expression* right) { 2275 Expression* right) {
2282 // Do combined smi check of the operands. Left operand is on the 2276 // Do combined smi check of the operands. Left operand is on the
2283 // stack (popped into rdx). Right operand is in rax but moved into 2277 // stack (popped into rdx). Right operand is in rax but moved into
2284 // rcx to make the shifts easier. 2278 // rcx to make the shifts easier.
2285 Label done, stub_call, smi_case; 2279 Label done, stub_call, smi_case;
2286 __ pop(rdx); 2280 __ pop(rdx);
2287 __ movp(rcx, rax); 2281 __ movp(rcx, rax);
2288 __ or_(rax, rdx); 2282 __ or_(rax, rdx);
2289 JumpPatchSite patch_site(masm_); 2283 JumpPatchSite patch_site(masm_);
2290 patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear); 2284 patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
2291 2285
2292 __ bind(&stub_call); 2286 __ bind(&stub_call);
2293 __ movp(rax, rcx); 2287 __ movp(rax, rcx);
2294 BinaryOpICStub stub(op, mode); 2288 BinaryOpICStub stub(op, mode);
2295 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); 2289 CallIC(stub.GetCode(isolate()), NOT_CONTEXTUAL,
2290 expr->BinaryOperationFeedbackId());
2296 patch_site.EmitPatchInfo(); 2291 patch_site.EmitPatchInfo();
2297 __ jmp(&done, Label::kNear); 2292 __ jmp(&done, Label::kNear);
2298 2293
2299 __ bind(&smi_case); 2294 __ bind(&smi_case);
2300 switch (op) { 2295 switch (op) {
2301 case Token::SAR: 2296 case Token::SAR:
2302 __ SmiShiftArithmeticRight(rax, rdx, rcx); 2297 __ SmiShiftArithmeticRight(rax, rdx, rcx);
2303 break; 2298 break;
2304 case Token::SHL: 2299 case Token::SHL:
2305 __ SmiShiftLeft(rax, rdx, rcx); 2300 __ SmiShiftLeft(rax, rdx, rcx);
(...skipping 28 matching lines...) Expand all
2334 context()->Plug(rax); 2329 context()->Plug(rax);
2335 } 2330 }
2336 2331
2337 2332
2338 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, 2333 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2339 Token::Value op, 2334 Token::Value op,
2340 OverwriteMode mode) { 2335 OverwriteMode mode) {
2341 __ pop(rdx); 2336 __ pop(rdx);
2342 BinaryOpICStub stub(op, mode); 2337 BinaryOpICStub stub(op, mode);
2343 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 2338 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2344 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); 2339 CallIC(stub.GetCode(isolate()), NOT_CONTEXTUAL,
2340 expr->BinaryOperationFeedbackId());
2345 patch_site.EmitPatchInfo(); 2341 patch_site.EmitPatchInfo();
2346 context()->Plug(rax); 2342 context()->Plug(rax);
2347 } 2343 }
2348 2344
2349 2345
2350 void FullCodeGenerator::EmitAssignment(Expression* expr) { 2346 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2351 // Invalid left-hand sides are rewritten by the parser to have a 'throw 2347 // Invalid left-hand sides are rewritten by the parser to have a 'throw
2352 // ReferenceError' on the left-hand side. 2348 // ReferenceError' on the left-hand side.
2353 if (!expr->IsValidLeftHandSide()) { 2349 if (!expr->IsValidLeftHandSide()) {
2354 VisitForEffect(expr); 2350 VisitForEffect(expr);
(...skipping 17 matching lines...) Expand all
2372 EffectContext context(this); 2368 EffectContext context(this);
2373 EmitVariableAssignment(var, Token::ASSIGN); 2369 EmitVariableAssignment(var, Token::ASSIGN);
2374 break; 2370 break;
2375 } 2371 }
2376 case NAMED_PROPERTY: { 2372 case NAMED_PROPERTY: {
2377 __ push(rax); // Preserve value. 2373 __ push(rax); // Preserve value.
2378 VisitForAccumulatorValue(prop->obj()); 2374 VisitForAccumulatorValue(prop->obj());
2379 __ movp(rdx, rax); 2375 __ movp(rdx, rax);
2380 __ pop(rax); // Restore value. 2376 __ pop(rax); // Restore value.
2381 __ Move(rcx, prop->key()->AsLiteral()->value()); 2377 __ Move(rcx, prop->key()->AsLiteral()->value());
2382 CallStoreIC(); 2378 CallStoreIC(NOT_CONTEXTUAL);
2383 break; 2379 break;
2384 } 2380 }
2385 case KEYED_PROPERTY: { 2381 case KEYED_PROPERTY: {
2386 __ push(rax); // Preserve value. 2382 __ push(rax); // Preserve value.
2387 VisitForStackValue(prop->obj()); 2383 VisitForStackValue(prop->obj());
2388 VisitForAccumulatorValue(prop->key()); 2384 VisitForAccumulatorValue(prop->key());
2389 __ movp(rcx, rax); 2385 __ movp(rcx, rax);
2390 __ pop(rdx); 2386 __ pop(rdx);
2391 __ pop(rax); // Restore value. 2387 __ pop(rax); // Restore value.
2392 Handle<Code> ic = is_classic_mode() 2388 Handle<Code> ic = is_classic_mode()
2393 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2389 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2394 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2390 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2395 CallIC(ic); 2391 CallIC(ic);
2396 break; 2392 break;
2397 } 2393 }
2398 } 2394 }
2399 context()->Plug(rax); 2395 context()->Plug(rax);
2400 } 2396 }
2401 2397
2402 2398
2403 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2404 Variable* var, MemOperand location) {
2405 __ movp(location, rax);
2406 if (var->IsContextSlot()) {
2407 __ movp(rdx, rax);
2408 __ RecordWriteContextSlot(
2409 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2410 }
2411 }
2412
2413
2414 void FullCodeGenerator::EmitCallStoreContextSlot(
2415 Handle<String> name, LanguageMode mode) {
2416 __ push(rax); // Value.
2417 __ push(rsi); // Context.
2418 __ Push(name);
2419 __ Push(Smi::FromInt(mode));
2420 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2421 }
2422
2423
2424 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 2399 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2425 Token::Value op) { 2400 Token::Value op) {
2426 if (var->IsUnallocated()) { 2401 if (var->IsUnallocated()) {
2427 // Global var, const, or let. 2402 // Global var, const, or let.
2428 __ Move(rcx, var->name()); 2403 __ Move(rcx, var->name());
2429 __ movp(rdx, GlobalObjectOperand()); 2404 __ movp(rdx, GlobalObjectOperand());
2430 CallStoreIC(); 2405 CallStoreIC(CONTEXTUAL);
2431
2432 } else if (op == Token::INIT_CONST) { 2406 } else if (op == Token::INIT_CONST) {
2433 // Const initializers need a write barrier. 2407 // Const initializers need a write barrier.
2434 ASSERT(!var->IsParameter()); // No const parameters. 2408 ASSERT(!var->IsParameter()); // No const parameters.
2435 if (var->IsLookupSlot()) { 2409 if (var->IsStackLocal()) {
2410 Label skip;
2411 __ movp(rdx, StackOperand(var));
2412 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2413 __ j(not_equal, &skip);
2414 __ movp(StackOperand(var), rax);
2415 __ bind(&skip);
2416 } else {
2417 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2418 // Like var declarations, const declarations are hoisted to function
2419 // scope. However, unlike var initializers, const initializers are
2420 // able to drill a hole to that function context, even from inside a
2421 // 'with' context. We thus bypass the normal static scope lookup for
2422 // var->IsContextSlot().
2436 __ push(rax); 2423 __ push(rax);
2437 __ push(rsi); 2424 __ push(rsi);
2438 __ Push(var->name()); 2425 __ Push(var->name());
2439 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); 2426 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2440 } else {
2441 ASSERT(var->IsStackLocal() || var->IsContextSlot());
2442 Label skip;
2443 MemOperand location = VarOperand(var, rcx);
2444 __ movp(rdx, location);
2445 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2446 __ j(not_equal, &skip);
2447 EmitStoreToStackLocalOrContextSlot(var, location);
2448 __ bind(&skip);
2449 } 2427 }
2450 2428
2451 } else if (var->mode() == LET && op != Token::INIT_LET) { 2429 } else if (var->mode() == LET && op != Token::INIT_LET) {
2452 // Non-initializing assignment to let variable needs a write barrier. 2430 // Non-initializing assignment to let variable needs a write barrier.
2453 if (var->IsLookupSlot()) { 2431 if (var->IsLookupSlot()) {
2454 EmitCallStoreContextSlot(var->name(), language_mode()); 2432 __ push(rax); // Value.
2433 __ push(rsi); // Context.
2434 __ Push(var->name());
2435 __ Push(Smi::FromInt(language_mode()));
2436 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2455 } else { 2437 } else {
2456 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 2438 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2457 Label assign; 2439 Label assign;
2458 MemOperand location = VarOperand(var, rcx); 2440 MemOperand location = VarOperand(var, rcx);
2459 __ movp(rdx, location); 2441 __ movp(rdx, location);
2460 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); 2442 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2461 __ j(not_equal, &assign, Label::kNear); 2443 __ j(not_equal, &assign, Label::kNear);
2462 __ Push(var->name()); 2444 __ Push(var->name());
2463 __ CallRuntime(Runtime::kThrowReferenceError, 1); 2445 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2464 __ bind(&assign); 2446 __ bind(&assign);
2465 EmitStoreToStackLocalOrContextSlot(var, location); 2447 __ movp(location, rax);
2448 if (var->IsContextSlot()) {
2449 __ movp(rdx, rax);
2450 __ RecordWriteContextSlot(
2451 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2452 }
2466 } 2453 }
2467 2454
2468 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) { 2455 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2469 // Assignment to var or initializing assignment to let/const 2456 // Assignment to var or initializing assignment to let/const
2470 // in harmony mode. 2457 // in harmony mode.
2471 if (var->IsLookupSlot()) { 2458 if (var->IsStackAllocated() || var->IsContextSlot()) {
2472 EmitCallStoreContextSlot(var->name(), language_mode());
2473 } else {
2474 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2475 MemOperand location = VarOperand(var, rcx); 2459 MemOperand location = VarOperand(var, rcx);
2476 if (generate_debug_code_ && op == Token::INIT_LET) { 2460 if (generate_debug_code_ && op == Token::INIT_LET) {
2477 // Check for an uninitialized let binding. 2461 // Check for an uninitialized let binding.
2478 __ movp(rdx, location); 2462 __ movp(rdx, location);
2479 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); 2463 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2480 __ Check(equal, kLetBindingReInitialization); 2464 __ Check(equal, kLetBindingReInitialization);
2481 } 2465 }
2482 EmitStoreToStackLocalOrContextSlot(var, location); 2466 // Perform the assignment.
2467 __ movp(location, rax);
2468 if (var->IsContextSlot()) {
2469 __ movp(rdx, rax);
2470 __ RecordWriteContextSlot(
2471 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2472 }
2473 } else {
2474 ASSERT(var->IsLookupSlot());
2475 __ push(rax); // Value.
2476 __ push(rsi); // Context.
2477 __ Push(var->name());
2478 __ Push(Smi::FromInt(language_mode()));
2479 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2483 } 2480 }
2484 } 2481 }
2485 // Non-initializing assignments to consts are ignored. 2482 // Non-initializing assignments to consts are ignored.
2486 } 2483 }
2487 2484
2488 2485
2489 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2486 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2490 // Assignment to a property, using a named store IC. 2487 // Assignment to a property, using a named store IC.
2491 Property* prop = expr->target()->AsProperty(); 2488 Property* prop = expr->target()->AsProperty();
2492 ASSERT(prop != NULL); 2489 ASSERT(prop != NULL);
2493 ASSERT(prop->key()->AsLiteral() != NULL); 2490 ASSERT(prop->key()->AsLiteral() != NULL);
2494 2491
2495 // Record source code position before IC call. 2492 // Record source code position before IC call.
2496 SetSourcePosition(expr->position()); 2493 SetSourcePosition(expr->position());
2497 __ Move(rcx, prop->key()->AsLiteral()->value()); 2494 __ Move(rcx, prop->key()->AsLiteral()->value());
2498 __ pop(rdx); 2495 __ pop(rdx);
2499 CallStoreIC(expr->AssignmentFeedbackId()); 2496 CallStoreIC(NOT_CONTEXTUAL, expr->AssignmentFeedbackId());
2500 2497
2501 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2498 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2502 context()->Plug(rax); 2499 context()->Plug(rax);
2503 } 2500 }
2504 2501
2505 2502
2506 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2503 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2507 // Assignment to a property, using a keyed store IC. 2504 // Assignment to a property, using a keyed store IC.
2508 2505
2509 __ pop(rcx); 2506 __ pop(rcx);
2510 __ pop(rdx); 2507 __ pop(rdx);
2511 // Record source code position before IC call. 2508 // Record source code position before IC call.
2512 SetSourcePosition(expr->position()); 2509 SetSourcePosition(expr->position());
2513 Handle<Code> ic = is_classic_mode() 2510 Handle<Code> ic = is_classic_mode()
2514 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2511 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2515 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2512 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2516 CallIC(ic, expr->AssignmentFeedbackId()); 2513 CallIC(ic, NOT_CONTEXTUAL, expr->AssignmentFeedbackId());
2517 2514
2518 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2515 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2519 context()->Plug(rax); 2516 context()->Plug(rax);
2520 } 2517 }
2521 2518
2522 2519
2523 void FullCodeGenerator::VisitProperty(Property* expr) { 2520 void FullCodeGenerator::VisitProperty(Property* expr) {
2524 Comment cmnt(masm_, "[ Property"); 2521 Comment cmnt(masm_, "[ Property");
2525 Expression* key = expr->key(); 2522 Expression* key = expr->key();
2526 2523
2527 if (key->IsPropertyName()) { 2524 if (key->IsPropertyName()) {
2528 VisitForAccumulatorValue(expr->obj()); 2525 VisitForAccumulatorValue(expr->obj());
2529 EmitNamedPropertyLoad(expr); 2526 EmitNamedPropertyLoad(expr);
2530 PrepareForBailoutForId(expr->LoadId(), TOS_REG); 2527 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2531 context()->Plug(rax); 2528 context()->Plug(rax);
2532 } else { 2529 } else {
2533 VisitForStackValue(expr->obj()); 2530 VisitForStackValue(expr->obj());
2534 VisitForAccumulatorValue(expr->key()); 2531 VisitForAccumulatorValue(expr->key());
2535 __ pop(rdx); 2532 __ pop(rdx);
2536 EmitKeyedPropertyLoad(expr); 2533 EmitKeyedPropertyLoad(expr);
2537 context()->Plug(rax); 2534 context()->Plug(rax);
2538 } 2535 }
2539 } 2536 }
2540 2537
2541 2538
2542 void FullCodeGenerator::CallIC(Handle<Code> code, 2539 void FullCodeGenerator::CallIC(Handle<Code> code,
2540 ContextualMode mode,
2543 TypeFeedbackId ast_id) { 2541 TypeFeedbackId ast_id) {
2544 ic_total_count_++; 2542 ic_total_count_++;
2543 ASSERT(mode != CONTEXTUAL || ast_id.IsNone());
2545 __ call(code, RelocInfo::CODE_TARGET, ast_id); 2544 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2546 } 2545 }
2547 2546
2548 2547
2549 // Code common for calls using the IC. 2548 // Code common for calls using the IC.
2550 void FullCodeGenerator::EmitCallWithIC(Call* expr) { 2549 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2551 Expression* callee = expr->expression(); 2550 Expression* callee = expr->expression();
2552 ZoneList<Expression*>* args = expr->arguments(); 2551 ZoneList<Expression*>* args = expr->arguments();
2553 int arg_count = args->length(); 2552 int arg_count = args->length();
2554 2553
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
2644 int arg_count = args->length(); 2643 int arg_count = args->length();
2645 { PreservePositionScope scope(masm()->positions_recorder()); 2644 { PreservePositionScope scope(masm()->positions_recorder());
2646 for (int i = 0; i < arg_count; i++) { 2645 for (int i = 0; i < arg_count; i++) {
2647 VisitForStackValue(args->at(i)); 2646 VisitForStackValue(args->at(i));
2648 } 2647 }
2649 } 2648 }
2650 // Record source position for debugger. 2649 // Record source position for debugger.
2651 SetSourcePosition(expr->position()); 2650 SetSourcePosition(expr->position());
2652 2651
2653 Handle<Object> uninitialized = 2652 Handle<Object> uninitialized =
2654 TypeFeedbackInfo::UninitializedSentinel(isolate()); 2653 TypeFeedbackCells::UninitializedSentinel(isolate());
2655 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized); 2654 Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
2656 __ Move(rbx, FeedbackVector()); 2655 RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
2657 __ Move(rdx, Smi::FromInt(expr->CallFeedbackSlot())); 2656 __ Move(rbx, cell);
2658 2657
2659 // Record call targets in unoptimized code. 2658 // Record call targets in unoptimized code.
2660 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); 2659 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
2661 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize)); 2660 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2662 __ CallStub(&stub); 2661 __ CallStub(&stub, expr->CallFeedbackId());
2663 RecordJSReturnSite(expr); 2662 RecordJSReturnSite(expr);
2664 // Restore context register. 2663 // Restore context register.
2665 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2664 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2666 // Discard the function left on TOS. 2665 // Discard the function left on TOS.
2667 context()->DropAndPlug(1, rax); 2666 context()->DropAndPlug(1, rax);
2668 } 2667 }
2669 2668
2670 2669
2671 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { 2670 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2672 // Push copy of the first argument or undefined if it doesn't exist. 2671 // Push copy of the first argument or undefined if it doesn't exist.
(...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after
2824 // Call the construct call builtin that handles allocation and 2823 // Call the construct call builtin that handles allocation and
2825 // constructor invocation. 2824 // constructor invocation.
2826 SetSourcePosition(expr->position()); 2825 SetSourcePosition(expr->position());
2827 2826
2828 // Load function and argument count into rdi and rax. 2827 // Load function and argument count into rdi and rax.
2829 __ Set(rax, arg_count); 2828 __ Set(rax, arg_count);
2830 __ movp(rdi, Operand(rsp, arg_count * kPointerSize)); 2829 __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
2831 2830
2832 // Record call targets in unoptimized code, but not in the snapshot. 2831 // Record call targets in unoptimized code, but not in the snapshot.
2833 Handle<Object> uninitialized = 2832 Handle<Object> uninitialized =
2834 TypeFeedbackInfo::UninitializedSentinel(isolate()); 2833 TypeFeedbackCells::UninitializedSentinel(isolate());
2835 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); 2834 Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
2836 __ Move(rbx, FeedbackVector()); 2835 RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
2837 __ Move(rdx, Smi::FromInt(expr->CallNewFeedbackSlot())); 2836 __ Move(rbx, cell);
2838 2837
2839 CallConstructStub stub(RECORD_CALL_TARGET); 2838 CallConstructStub stub(RECORD_CALL_TARGET);
2840 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); 2839 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2841 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 2840 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2842 context()->Plug(rax); 2841 context()->Plug(rax);
2843 } 2842 }
2844 2843
2845 2844
2846 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 2845 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2847 ZoneList<Expression*>* args = expr->arguments(); 2846 ZoneList<Expression*>* args = expr->arguments();
(...skipping 1555 matching lines...) Expand 10 before | Expand all | Expand 10 after
4403 } 4402 }
4404 4403
4405 // Record position before stub call. 4404 // Record position before stub call.
4406 SetSourcePosition(expr->position()); 4405 SetSourcePosition(expr->position());
4407 4406
4408 // Call stub for +1/-1. 4407 // Call stub for +1/-1.
4409 __ bind(&stub_call); 4408 __ bind(&stub_call);
4410 __ movp(rdx, rax); 4409 __ movp(rdx, rax);
4411 __ Move(rax, Smi::FromInt(1)); 4410 __ Move(rax, Smi::FromInt(1));
4412 BinaryOpICStub stub(expr->binary_op(), NO_OVERWRITE); 4411 BinaryOpICStub stub(expr->binary_op(), NO_OVERWRITE);
4413 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId()); 4412 CallIC(stub.GetCode(isolate()),
4413 NOT_CONTEXTUAL,
4414 expr->CountBinOpFeedbackId());
4414 patch_site.EmitPatchInfo(); 4415 patch_site.EmitPatchInfo();
4415 __ bind(&done); 4416 __ bind(&done);
4416 4417
4417 // Store the value returned in rax. 4418 // Store the value returned in rax.
4418 switch (assign_type) { 4419 switch (assign_type) {
4419 case VARIABLE: 4420 case VARIABLE:
4420 if (expr->is_postfix()) { 4421 if (expr->is_postfix()) {
4421 // Perform the assignment as if via '='. 4422 // Perform the assignment as if via '='.
4422 { EffectContext context(this); 4423 { EffectContext context(this);
4423 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4424 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
(...skipping 10 matching lines...) Expand all
4434 // Perform the assignment as if via '='. 4435 // Perform the assignment as if via '='.
4435 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4436 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4436 Token::ASSIGN); 4437 Token::ASSIGN);
4437 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4438 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4438 context()->Plug(rax); 4439 context()->Plug(rax);
4439 } 4440 }
4440 break; 4441 break;
4441 case NAMED_PROPERTY: { 4442 case NAMED_PROPERTY: {
4442 __ Move(rcx, prop->key()->AsLiteral()->value()); 4443 __ Move(rcx, prop->key()->AsLiteral()->value());
4443 __ pop(rdx); 4444 __ pop(rdx);
4444 CallStoreIC(expr->CountStoreFeedbackId()); 4445 CallStoreIC(NOT_CONTEXTUAL, expr->CountStoreFeedbackId());
4445 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4446 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4446 if (expr->is_postfix()) { 4447 if (expr->is_postfix()) {
4447 if (!context()->IsEffect()) { 4448 if (!context()->IsEffect()) {
4448 context()->PlugTOS(); 4449 context()->PlugTOS();
4449 } 4450 }
4450 } else { 4451 } else {
4451 context()->Plug(rax); 4452 context()->Plug(rax);
4452 } 4453 }
4453 break; 4454 break;
4454 } 4455 }
4455 case KEYED_PROPERTY: { 4456 case KEYED_PROPERTY: {
4456 __ pop(rcx); 4457 __ pop(rcx);
4457 __ pop(rdx); 4458 __ pop(rdx);
4458 Handle<Code> ic = is_classic_mode() 4459 Handle<Code> ic = is_classic_mode()
4459 ? isolate()->builtins()->KeyedStoreIC_Initialize() 4460 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4460 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 4461 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4461 CallIC(ic, expr->CountStoreFeedbackId()); 4462 CallIC(ic, NOT_CONTEXTUAL, expr->CountStoreFeedbackId());
4462 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4463 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4463 if (expr->is_postfix()) { 4464 if (expr->is_postfix()) {
4464 if (!context()->IsEffect()) { 4465 if (!context()->IsEffect()) {
4465 context()->PlugTOS(); 4466 context()->PlugTOS();
4466 } 4467 }
4467 } else { 4468 } else {
4468 context()->Plug(rax); 4469 context()->Plug(rax);
4469 } 4470 }
4470 break; 4471 break;
4471 } 4472 }
4472 } 4473 }
4473 } 4474 }
4474 4475
4475 4476
4476 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 4477 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4477 VariableProxy* proxy = expr->AsVariableProxy(); 4478 VariableProxy* proxy = expr->AsVariableProxy();
4478 ASSERT(!context()->IsEffect()); 4479 ASSERT(!context()->IsEffect());
4479 ASSERT(!context()->IsTest()); 4480 ASSERT(!context()->IsTest());
4480 4481
4481 if (proxy != NULL && proxy->var()->IsUnallocated()) { 4482 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4482 Comment cmnt(masm_, "[ Global variable"); 4483 Comment cmnt(masm_, "Global variable");
4483 __ Move(rcx, proxy->name()); 4484 __ Move(rcx, proxy->name());
4484 __ movp(rax, GlobalObjectOperand()); 4485 __ movp(rax, GlobalObjectOperand());
4485 // Use a regular load, not a contextual load, to avoid a reference 4486 // Use a regular load, not a contextual load, to avoid a reference
4486 // error. 4487 // error.
4487 CallLoadIC(NOT_CONTEXTUAL); 4488 CallLoadIC(NOT_CONTEXTUAL);
4488 PrepareForBailout(expr, TOS_REG); 4489 PrepareForBailout(expr, TOS_REG);
4489 context()->Plug(rax); 4490 context()->Plug(rax);
4490 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { 4491 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4491 Comment cmnt(masm_, "[ Lookup slot");
4492 Label done, slow; 4492 Label done, slow;
4493 4493
4494 // Generate code for loading from variables potentially shadowed 4494 // Generate code for loading from variables potentially shadowed
4495 // by eval-introduced variables. 4495 // by eval-introduced variables.
4496 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done); 4496 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4497 4497
4498 __ bind(&slow); 4498 __ bind(&slow);
4499 __ push(rsi); 4499 __ push(rsi);
4500 __ Push(proxy->name()); 4500 __ Push(proxy->name());
4501 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); 4501 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after
4640 __ or_(rcx, rax); 4640 __ or_(rcx, rax);
4641 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear); 4641 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
4642 __ cmpq(rdx, rax); 4642 __ cmpq(rdx, rax);
4643 Split(cc, if_true, if_false, NULL); 4643 Split(cc, if_true, if_false, NULL);
4644 __ bind(&slow_case); 4644 __ bind(&slow_case);
4645 } 4645 }
4646 4646
4647 // Record position and call the compare IC. 4647 // Record position and call the compare IC.
4648 SetSourcePosition(expr->position()); 4648 SetSourcePosition(expr->position());
4649 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); 4649 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4650 CallIC(ic, expr->CompareOperationFeedbackId()); 4650 CallIC(ic, NOT_CONTEXTUAL, expr->CompareOperationFeedbackId());
4651 patch_site.EmitPatchInfo(); 4651 patch_site.EmitPatchInfo();
4652 4652
4653 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4653 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4654 __ testq(rax, rax); 4654 __ testq(rax, rax);
4655 Split(cc, if_true, if_false, fall_through); 4655 Split(cc, if_true, if_false, fall_through);
4656 } 4656 }
4657 } 4657 }
4658 4658
4659 // Convert the result of the comparison into one expected for this 4659 // Convert the result of the comparison into one expected for this
4660 // expression's context. 4660 // expression's context.
(...skipping 14 matching lines...) Expand all
4675 VisitForAccumulatorValue(sub_expr); 4675 VisitForAccumulatorValue(sub_expr);
4676 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4676 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4677 if (expr->op() == Token::EQ_STRICT) { 4677 if (expr->op() == Token::EQ_STRICT) {
4678 Heap::RootListIndex nil_value = nil == kNullValue ? 4678 Heap::RootListIndex nil_value = nil == kNullValue ?
4679 Heap::kNullValueRootIndex : 4679 Heap::kNullValueRootIndex :
4680 Heap::kUndefinedValueRootIndex; 4680 Heap::kUndefinedValueRootIndex;
4681 __ CompareRoot(rax, nil_value); 4681 __ CompareRoot(rax, nil_value);
4682 Split(equal, if_true, if_false, fall_through); 4682 Split(equal, if_true, if_false, fall_through);
4683 } else { 4683 } else {
4684 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); 4684 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4685 CallIC(ic, expr->CompareOperationFeedbackId()); 4685 CallIC(ic, NOT_CONTEXTUAL, expr->CompareOperationFeedbackId());
4686 __ testq(rax, rax); 4686 __ testq(rax, rax);
4687 Split(not_zero, if_true, if_false, fall_through); 4687 Split(not_zero, if_true, if_false, fall_through);
4688 } 4688 }
4689 context()->Plug(if_true, if_false); 4689 context()->Plug(if_true, if_false);
4690 } 4690 }
4691 4691
4692 4692
4693 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { 4693 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4694 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 4694 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4695 context()->Plug(rax); 4695 context()->Plug(rax);
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after
4907 4907
4908 ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(), 4908 ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4909 Assembler::target_address_at(call_target_address)); 4909 Assembler::target_address_at(call_target_address));
4910 return OSR_AFTER_STACK_CHECK; 4910 return OSR_AFTER_STACK_CHECK;
4911 } 4911 }
4912 4912
4913 4913
4914 } } // namespace v8::internal 4914 } } // namespace v8::internal
4915 4915
4916 #endif // V8_TARGET_ARCH_X64 4916 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/disasm-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698