Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(55)

Side by Side Diff: src/arm/full-codegen-arm.cc

Issue 181453002: Reset trunk to 3.24.35.4 (Closed) Base URL: https://v8.googlecode.com/svn/trunk
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/debug-arm.cc ('k') | src/arm/ic-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after
123 // o fp: our caller's frame pointer 123 // o fp: our caller's frame pointer
124 // o sp: stack pointer 124 // o sp: stack pointer
125 // o lr: return address 125 // o lr: return address
126 // 126 //
127 // The function builds a JS frame. Please see JavaScriptFrameConstants in 127 // The function builds a JS frame. Please see JavaScriptFrameConstants in
128 // frames-arm.h for its layout. 128 // frames-arm.h for its layout.
129 void FullCodeGenerator::Generate() { 129 void FullCodeGenerator::Generate() {
130 CompilationInfo* info = info_; 130 CompilationInfo* info = info_;
131 handler_table_ = 131 handler_table_ =
132 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); 132 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
133
134 InitializeFeedbackVector();
135
136 profiling_counter_ = isolate()->factory()->NewCell( 133 profiling_counter_ = isolate()->factory()->NewCell(
137 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); 134 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
138 SetFunctionPosition(function()); 135 SetFunctionPosition(function());
139 Comment cmnt(masm_, "[ function compiled by full code generator"); 136 Comment cmnt(masm_, "[ function compiled by full code generator");
140 137
141 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 138 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
142 139
143 #ifdef DEBUG 140 #ifdef DEBUG
144 if (strlen(FLAG_stop_at) > 0 && 141 if (strlen(FLAG_stop_at) > 0 &&
145 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 142 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
(...skipping 518 matching lines...) Expand 10 before | Expand all | Expand 10 after
664 if (false_label_ != fall_through_) __ b(false_label_); 661 if (false_label_ != fall_through_) __ b(false_label_);
665 } 662 }
666 } 663 }
667 664
668 665
669 void FullCodeGenerator::DoTest(Expression* condition, 666 void FullCodeGenerator::DoTest(Expression* condition,
670 Label* if_true, 667 Label* if_true,
671 Label* if_false, 668 Label* if_false,
672 Label* fall_through) { 669 Label* fall_through) {
673 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate()); 670 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
674 CallIC(ic, condition->test_id()); 671 CallIC(ic, NOT_CONTEXTUAL, condition->test_id());
675 __ tst(result_register(), result_register()); 672 __ tst(result_register(), result_register());
676 Split(ne, if_true, if_false, fall_through); 673 Split(ne, if_true, if_false, fall_through);
677 } 674 }
678 675
679 676
680 void FullCodeGenerator::Split(Condition cond, 677 void FullCodeGenerator::Split(Condition cond,
681 Label* if_true, 678 Label* if_true,
682 Label* if_false, 679 Label* if_false,
683 Label* fall_through) { 680 Label* fall_through) {
684 if (if_false == fall_through) { 681 if (if_false == fall_through) {
(...skipping 340 matching lines...) Expand 10 before | Expand all | Expand 10 after
1025 __ cmp(r1, r0); 1022 __ cmp(r1, r0);
1026 __ b(ne, &next_test); 1023 __ b(ne, &next_test);
1027 __ Drop(1); // Switch value is no longer needed. 1024 __ Drop(1); // Switch value is no longer needed.
1028 __ b(clause->body_target()); 1025 __ b(clause->body_target());
1029 __ bind(&slow_case); 1026 __ bind(&slow_case);
1030 } 1027 }
1031 1028
1032 // Record position before stub call for type feedback. 1029 // Record position before stub call for type feedback.
1033 SetSourcePosition(clause->position()); 1030 SetSourcePosition(clause->position());
1034 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT); 1031 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1035 CallIC(ic, clause->CompareId()); 1032 CallIC(ic, NOT_CONTEXTUAL, clause->CompareId());
1036 patch_site.EmitPatchInfo(); 1033 patch_site.EmitPatchInfo();
1037 1034
1038 Label skip; 1035 Label skip;
1039 __ b(&skip); 1036 __ b(&skip);
1040 PrepareForBailout(clause, TOS_REG); 1037 PrepareForBailout(clause, TOS_REG);
1041 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 1038 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1042 __ cmp(r0, ip); 1039 __ cmp(r0, ip);
1043 __ b(ne, &next_test); 1040 __ b(ne, &next_test);
1044 __ Drop(1); 1041 __ Drop(1);
1045 __ jmp(clause->body_target()); 1042 __ jmp(clause->body_target());
(...skipping 24 matching lines...) Expand all
1070 VisitStatements(clause->statements()); 1067 VisitStatements(clause->statements());
1071 } 1068 }
1072 1069
1073 __ bind(nested_statement.break_label()); 1070 __ bind(nested_statement.break_label());
1074 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1071 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1075 } 1072 }
1076 1073
1077 1074
1078 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { 1075 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1079 Comment cmnt(masm_, "[ ForInStatement"); 1076 Comment cmnt(masm_, "[ ForInStatement");
1080 int slot = stmt->ForInFeedbackSlot();
1081 SetStatementPosition(stmt); 1077 SetStatementPosition(stmt);
1082 1078
1083 Label loop, exit; 1079 Label loop, exit;
1084 ForIn loop_statement(this, stmt); 1080 ForIn loop_statement(this, stmt);
1085 increment_loop_depth(); 1081 increment_loop_depth();
1086 1082
1087 // Get the object to enumerate over. If the object is null or undefined, skip 1083 // Get the object to enumerate over. If the object is null or undefined, skip
1088 // over the loop. See ECMA-262 version 5, section 12.6.4. 1084 // over the loop. See ECMA-262 version 5, section 12.6.4.
1089 VisitForAccumulatorValue(stmt->enumerable()); 1085 VisitForAccumulatorValue(stmt->enumerable());
1090 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1086 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
1160 __ jmp(&loop); 1156 __ jmp(&loop);
1161 1157
1162 __ bind(&no_descriptors); 1158 __ bind(&no_descriptors);
1163 __ Drop(1); 1159 __ Drop(1);
1164 __ jmp(&exit); 1160 __ jmp(&exit);
1165 1161
1166 // We got a fixed array in register r0. Iterate through that. 1162 // We got a fixed array in register r0. Iterate through that.
1167 Label non_proxy; 1163 Label non_proxy;
1168 __ bind(&fixed_array); 1164 __ bind(&fixed_array);
1169 1165
1170 Handle<Object> feedback = Handle<Object>( 1166 Handle<Cell> cell = isolate()->factory()->NewCell(
1171 Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker), 1167 Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker),
1172 isolate()); 1168 isolate()));
1173 StoreFeedbackVectorSlot(slot, feedback); 1169 RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
1174 __ Move(r1, FeedbackVector()); 1170 __ Move(r1, cell);
1175 __ mov(r2, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker))); 1171 __ mov(r2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
1176 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(slot))); 1172 __ str(r2, FieldMemOperand(r1, Cell::kValueOffset));
1177 1173
1178 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check 1174 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1179 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object 1175 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1180 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1176 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1181 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE); 1177 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1182 __ b(gt, &non_proxy); 1178 __ b(gt, &non_proxy);
1183 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy 1179 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1184 __ bind(&non_proxy); 1180 __ bind(&non_proxy);
1185 __ Push(r1, r0); // Smi and array 1181 __ Push(r1, r0); // Smi and array
1186 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset)); 1182 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
(...skipping 288 matching lines...) Expand 10 before | Expand all | Expand 10 after
1475 1471
1476 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { 1472 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1477 // Record position before possible IC call. 1473 // Record position before possible IC call.
1478 SetSourcePosition(proxy->position()); 1474 SetSourcePosition(proxy->position());
1479 Variable* var = proxy->var(); 1475 Variable* var = proxy->var();
1480 1476
1481 // Three cases: global variables, lookup variables, and all other types of 1477 // Three cases: global variables, lookup variables, and all other types of
1482 // variables. 1478 // variables.
1483 switch (var->location()) { 1479 switch (var->location()) {
1484 case Variable::UNALLOCATED: { 1480 case Variable::UNALLOCATED: {
1485 Comment cmnt(masm_, "[ Global variable"); 1481 Comment cmnt(masm_, "Global variable");
1486 // Use inline caching. Variable name is passed in r2 and the global 1482 // Use inline caching. Variable name is passed in r2 and the global
1487 // object (receiver) in r0. 1483 // object (receiver) in r0.
1488 __ ldr(r0, GlobalObjectOperand()); 1484 __ ldr(r0, GlobalObjectOperand());
1489 __ mov(r2, Operand(var->name())); 1485 __ mov(r2, Operand(var->name()));
1490 CallLoadIC(CONTEXTUAL); 1486 CallLoadIC(CONTEXTUAL);
1491 context()->Plug(r0); 1487 context()->Plug(r0);
1492 break; 1488 break;
1493 } 1489 }
1494 1490
1495 case Variable::PARAMETER: 1491 case Variable::PARAMETER:
1496 case Variable::LOCAL: 1492 case Variable::LOCAL:
1497 case Variable::CONTEXT: { 1493 case Variable::CONTEXT: {
1498 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" 1494 Comment cmnt(masm_, var->IsContextSlot()
1499 : "[ Stack variable"); 1495 ? "Context variable"
1496 : "Stack variable");
1500 if (var->binding_needs_init()) { 1497 if (var->binding_needs_init()) {
1501 // var->scope() may be NULL when the proxy is located in eval code and 1498 // var->scope() may be NULL when the proxy is located in eval code and
1502 // refers to a potential outside binding. Currently those bindings are 1499 // refers to a potential outside binding. Currently those bindings are
1503 // always looked up dynamically, i.e. in that case 1500 // always looked up dynamically, i.e. in that case
1504 // var->location() == LOOKUP. 1501 // var->location() == LOOKUP.
1505 // always holds. 1502 // always holds.
1506 ASSERT(var->scope() != NULL); 1503 ASSERT(var->scope() != NULL);
1507 1504
1508 // Check if the binding really needs an initialization check. The check 1505 // Check if the binding really needs an initialization check. The check
1509 // can be skipped in the following situation: we have a LET or CONST 1506 // can be skipped in the following situation: we have a LET or CONST
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
1552 } 1549 }
1553 context()->Plug(r0); 1550 context()->Plug(r0);
1554 break; 1551 break;
1555 } 1552 }
1556 } 1553 }
1557 context()->Plug(var); 1554 context()->Plug(var);
1558 break; 1555 break;
1559 } 1556 }
1560 1557
1561 case Variable::LOOKUP: { 1558 case Variable::LOOKUP: {
1562 Comment cmnt(masm_, "[ Lookup variable");
1563 Label done, slow; 1559 Label done, slow;
1564 // Generate code for loading from variables potentially shadowed 1560 // Generate code for loading from variables potentially shadowed
1565 // by eval-introduced variables. 1561 // by eval-introduced variables.
1566 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done); 1562 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1567 __ bind(&slow); 1563 __ bind(&slow);
1564 Comment cmnt(masm_, "Lookup variable");
1568 __ mov(r1, Operand(var->name())); 1565 __ mov(r1, Operand(var->name()));
1569 __ Push(cp, r1); // Context and name. 1566 __ Push(cp, r1); // Context and name.
1570 __ CallRuntime(Runtime::kLoadContextSlot, 2); 1567 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1571 __ bind(&done); 1568 __ bind(&done);
1572 context()->Plug(r0); 1569 context()->Plug(r0);
1573 } 1570 }
1574 } 1571 }
1575 } 1572 }
1576 1573
1577 1574
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after
1688 UNREACHABLE(); 1685 UNREACHABLE();
1689 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1686 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1690 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value())); 1687 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1691 // Fall through. 1688 // Fall through.
1692 case ObjectLiteral::Property::COMPUTED: 1689 case ObjectLiteral::Property::COMPUTED:
1693 if (key->value()->IsInternalizedString()) { 1690 if (key->value()->IsInternalizedString()) {
1694 if (property->emit_store()) { 1691 if (property->emit_store()) {
1695 VisitForAccumulatorValue(value); 1692 VisitForAccumulatorValue(value);
1696 __ mov(r2, Operand(key->value())); 1693 __ mov(r2, Operand(key->value()));
1697 __ ldr(r1, MemOperand(sp)); 1694 __ ldr(r1, MemOperand(sp));
1698 CallStoreIC(key->LiteralFeedbackId()); 1695 CallStoreIC(NOT_CONTEXTUAL, key->LiteralFeedbackId());
1699 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1696 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1700 } else { 1697 } else {
1701 VisitForEffect(value); 1698 VisitForEffect(value);
1702 } 1699 }
1703 break; 1700 break;
1704 } 1701 }
1705 // Duplicate receiver on stack. 1702 // Duplicate receiver on stack.
1706 __ ldr(r0, MemOperand(sp)); 1703 __ ldr(r0, MemOperand(sp));
1707 __ push(r0); 1704 __ push(r0);
1708 VisitForStackValue(key); 1705 VisitForStackValue(key);
(...skipping 381 matching lines...) Expand 10 before | Expand all | Expand 10 after
2090 __ bind(&l_next); 2087 __ bind(&l_next);
2091 __ LoadRoot(r2, Heap::knext_stringRootIndex); // "next" 2088 __ LoadRoot(r2, Heap::knext_stringRootIndex); // "next"
2092 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter 2089 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2093 __ Push(r2, r3, r0); // "next", iter, received 2090 __ Push(r2, r3, r0); // "next", iter, received
2094 2091
2095 // result = receiver[f](arg); 2092 // result = receiver[f](arg);
2096 __ bind(&l_call); 2093 __ bind(&l_call);
2097 __ ldr(r1, MemOperand(sp, kPointerSize)); 2094 __ ldr(r1, MemOperand(sp, kPointerSize));
2098 __ ldr(r0, MemOperand(sp, 2 * kPointerSize)); 2095 __ ldr(r0, MemOperand(sp, 2 * kPointerSize));
2099 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 2096 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2100 CallIC(ic, TypeFeedbackId::None()); 2097 CallIC(ic, NOT_CONTEXTUAL, TypeFeedbackId::None());
2101 __ mov(r1, r0); 2098 __ mov(r1, r0);
2102 __ str(r1, MemOperand(sp, 2 * kPointerSize)); 2099 __ str(r1, MemOperand(sp, 2 * kPointerSize));
2103 CallFunctionStub stub(1, CALL_AS_METHOD); 2100 CallFunctionStub stub(1, CALL_AS_METHOD);
2104 __ CallStub(&stub); 2101 __ CallStub(&stub);
2105 2102
2106 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2103 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2107 __ Drop(1); // The function is still on the stack; drop it. 2104 __ Drop(1); // The function is still on the stack; drop it.
2108 2105
2109 // if (!result.done) goto l_try; 2106 // if (!result.done) goto l_try;
2110 __ bind(&l_loop); 2107 __ bind(&l_loop);
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after
2287 __ mov(r2, Operand(key->value())); 2284 __ mov(r2, Operand(key->value()));
2288 // Call load IC. It has arguments receiver and property name r0 and r2. 2285 // Call load IC. It has arguments receiver and property name r0 and r2.
2289 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId()); 2286 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2290 } 2287 }
2291 2288
2292 2289
2293 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { 2290 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2294 SetSourcePosition(prop->position()); 2291 SetSourcePosition(prop->position());
2295 // Call keyed load IC. It has arguments key and receiver in r0 and r1. 2292 // Call keyed load IC. It has arguments key and receiver in r0 and r1.
2296 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 2293 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2297 CallIC(ic, prop->PropertyFeedbackId()); 2294 CallIC(ic, NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2298 } 2295 }
2299 2296
2300 2297
2301 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 2298 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2302 Token::Value op, 2299 Token::Value op,
2303 OverwriteMode mode, 2300 OverwriteMode mode,
2304 Expression* left_expr, 2301 Expression* left_expr,
2305 Expression* right_expr) { 2302 Expression* right_expr) {
2306 Label done, smi_case, stub_call; 2303 Label done, smi_case, stub_call;
2307 2304
2308 Register scratch1 = r2; 2305 Register scratch1 = r2;
2309 Register scratch2 = r3; 2306 Register scratch2 = r3;
2310 2307
2311 // Get the arguments. 2308 // Get the arguments.
2312 Register left = r1; 2309 Register left = r1;
2313 Register right = r0; 2310 Register right = r0;
2314 __ pop(left); 2311 __ pop(left);
2315 2312
2316 // Perform combined smi check on both operands. 2313 // Perform combined smi check on both operands.
2317 __ orr(scratch1, left, Operand(right)); 2314 __ orr(scratch1, left, Operand(right));
2318 STATIC_ASSERT(kSmiTag == 0); 2315 STATIC_ASSERT(kSmiTag == 0);
2319 JumpPatchSite patch_site(masm_); 2316 JumpPatchSite patch_site(masm_);
2320 patch_site.EmitJumpIfSmi(scratch1, &smi_case); 2317 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2321 2318
2322 __ bind(&stub_call); 2319 __ bind(&stub_call);
2323 BinaryOpICStub stub(op, mode); 2320 BinaryOpICStub stub(op, mode);
2324 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); 2321 CallIC(stub.GetCode(isolate()), NOT_CONTEXTUAL,
2322 expr->BinaryOperationFeedbackId());
2325 patch_site.EmitPatchInfo(); 2323 patch_site.EmitPatchInfo();
2326 __ jmp(&done); 2324 __ jmp(&done);
2327 2325
2328 __ bind(&smi_case); 2326 __ bind(&smi_case);
2329 // Smi case. This code works the same way as the smi-smi case in the type 2327 // Smi case. This code works the same way as the smi-smi case in the type
2330 // recording binary operation stub, see 2328 // recording binary operation stub, see
2331 switch (op) { 2329 switch (op) {
2332 case Token::SAR: 2330 case Token::SAR:
2333 __ GetLeastBitsFromSmi(scratch1, right, 5); 2331 __ GetLeastBitsFromSmi(scratch1, right, 5);
2334 __ mov(right, Operand(left, ASR, scratch1)); 2332 __ mov(right, Operand(left, ASR, scratch1));
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
2391 context()->Plug(r0); 2389 context()->Plug(r0);
2392 } 2390 }
2393 2391
2394 2392
2395 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, 2393 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2396 Token::Value op, 2394 Token::Value op,
2397 OverwriteMode mode) { 2395 OverwriteMode mode) {
2398 __ pop(r1); 2396 __ pop(r1);
2399 BinaryOpICStub stub(op, mode); 2397 BinaryOpICStub stub(op, mode);
2400 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 2398 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2401 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); 2399 CallIC(stub.GetCode(isolate()), NOT_CONTEXTUAL,
2400 expr->BinaryOperationFeedbackId());
2402 patch_site.EmitPatchInfo(); 2401 patch_site.EmitPatchInfo();
2403 context()->Plug(r0); 2402 context()->Plug(r0);
2404 } 2403 }
2405 2404
2406 2405
2407 void FullCodeGenerator::EmitAssignment(Expression* expr) { 2406 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2408 // Invalid left-hand sides are rewritten by the parser to have a 'throw 2407 // Invalid left-hand sides are rewritten by the parser to have a 'throw
2409 // ReferenceError' on the left-hand side. 2408 // ReferenceError' on the left-hand side.
2410 if (!expr->IsValidLeftHandSide()) { 2409 if (!expr->IsValidLeftHandSide()) {
2411 VisitForEffect(expr); 2410 VisitForEffect(expr);
(...skipping 17 matching lines...) Expand all
2429 EffectContext context(this); 2428 EffectContext context(this);
2430 EmitVariableAssignment(var, Token::ASSIGN); 2429 EmitVariableAssignment(var, Token::ASSIGN);
2431 break; 2430 break;
2432 } 2431 }
2433 case NAMED_PROPERTY: { 2432 case NAMED_PROPERTY: {
2434 __ push(r0); // Preserve value. 2433 __ push(r0); // Preserve value.
2435 VisitForAccumulatorValue(prop->obj()); 2434 VisitForAccumulatorValue(prop->obj());
2436 __ mov(r1, r0); 2435 __ mov(r1, r0);
2437 __ pop(r0); // Restore value. 2436 __ pop(r0); // Restore value.
2438 __ mov(r2, Operand(prop->key()->AsLiteral()->value())); 2437 __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
2439 CallStoreIC(); 2438 CallStoreIC(NOT_CONTEXTUAL);
2440 break; 2439 break;
2441 } 2440 }
2442 case KEYED_PROPERTY: { 2441 case KEYED_PROPERTY: {
2443 __ push(r0); // Preserve value. 2442 __ push(r0); // Preserve value.
2444 VisitForStackValue(prop->obj()); 2443 VisitForStackValue(prop->obj());
2445 VisitForAccumulatorValue(prop->key()); 2444 VisitForAccumulatorValue(prop->key());
2446 __ mov(r1, r0); 2445 __ mov(r1, r0);
2447 __ Pop(r0, r2); // r0 = restored value. 2446 __ Pop(r0, r2); // r0 = restored value.
2448 Handle<Code> ic = is_classic_mode() 2447 Handle<Code> ic = is_classic_mode()
2449 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2448 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2450 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2449 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2451 CallIC(ic); 2450 CallIC(ic);
2452 break; 2451 break;
2453 } 2452 }
2454 } 2453 }
2455 context()->Plug(r0); 2454 context()->Plug(r0);
2456 } 2455 }
2457 2456
2458 2457
2459 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2460 Variable* var, MemOperand location) {
2461 __ str(result_register(), location);
2462 if (var->IsContextSlot()) {
2463 // RecordWrite may destroy all its register arguments.
2464 __ mov(r3, result_register());
2465 int offset = Context::SlotOffset(var->index());
2466 __ RecordWriteContextSlot(
2467 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2468 }
2469 }
2470
2471
2472 void FullCodeGenerator::EmitCallStoreContextSlot(
2473 Handle<String> name, LanguageMode mode) {
2474 __ push(r0); // Value.
2475 __ mov(r1, Operand(name));
2476 __ mov(r0, Operand(Smi::FromInt(mode)));
2477 __ Push(cp, r1, r0); // Context, name, strict mode.
2478 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2479 }
2480
2481
2482 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 2458 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2483 Token::Value op) { 2459 Token::Value op) {
2484 if (var->IsUnallocated()) { 2460 if (var->IsUnallocated()) {
2485 // Global var, const, or let. 2461 // Global var, const, or let.
2486 __ mov(r2, Operand(var->name())); 2462 __ mov(r2, Operand(var->name()));
2487 __ ldr(r1, GlobalObjectOperand()); 2463 __ ldr(r1, GlobalObjectOperand());
2488 CallStoreIC(); 2464 CallStoreIC(CONTEXTUAL);
2489
2490 } else if (op == Token::INIT_CONST) { 2465 } else if (op == Token::INIT_CONST) {
2491 // Const initializers need a write barrier. 2466 // Const initializers need a write barrier.
2492 ASSERT(!var->IsParameter()); // No const parameters. 2467 ASSERT(!var->IsParameter()); // No const parameters.
2493 if (var->IsLookupSlot()) { 2468 if (var->IsStackLocal()) {
2469 __ ldr(r1, StackOperand(var));
2470 __ CompareRoot(r1, Heap::kTheHoleValueRootIndex);
2471 __ str(result_register(), StackOperand(var), eq);
2472 } else {
2473 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2474 // Like var declarations, const declarations are hoisted to function
2475 // scope. However, unlike var initializers, const initializers are
2476 // able to drill a hole to that function context, even from inside a
2477 // 'with' context. We thus bypass the normal static scope lookup for
2478 // var->IsContextSlot().
2494 __ push(r0); 2479 __ push(r0);
2495 __ mov(r0, Operand(var->name())); 2480 __ mov(r0, Operand(var->name()));
2496 __ Push(cp, r0); // Context and name. 2481 __ Push(cp, r0); // Context and name.
2497 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); 2482 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2498 } else {
2499 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2500 Label skip;
2501 MemOperand location = VarOperand(var, r1);
2502 __ ldr(r2, location);
2503 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2504 __ b(ne, &skip);
2505 EmitStoreToStackLocalOrContextSlot(var, location);
2506 __ bind(&skip);
2507 } 2483 }
2508 2484
2509 } else if (var->mode() == LET && op != Token::INIT_LET) { 2485 } else if (var->mode() == LET && op != Token::INIT_LET) {
2510 // Non-initializing assignment to let variable needs a write barrier. 2486 // Non-initializing assignment to let variable needs a write barrier.
2511 if (var->IsLookupSlot()) { 2487 if (var->IsLookupSlot()) {
2512 EmitCallStoreContextSlot(var->name(), language_mode()); 2488 __ push(r0); // Value.
2489 __ mov(r1, Operand(var->name()));
2490 __ mov(r0, Operand(Smi::FromInt(language_mode())));
2491 __ Push(cp, r1, r0); // Context, name, strict mode.
2492 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2513 } else { 2493 } else {
2514 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 2494 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2515 Label assign; 2495 Label assign;
2516 MemOperand location = VarOperand(var, r1); 2496 MemOperand location = VarOperand(var, r1);
2517 __ ldr(r3, location); 2497 __ ldr(r3, location);
2518 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); 2498 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2519 __ b(ne, &assign); 2499 __ b(ne, &assign);
2520 __ mov(r3, Operand(var->name())); 2500 __ mov(r3, Operand(var->name()));
2521 __ push(r3); 2501 __ push(r3);
2522 __ CallRuntime(Runtime::kThrowReferenceError, 1); 2502 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2523 // Perform the assignment. 2503 // Perform the assignment.
2524 __ bind(&assign); 2504 __ bind(&assign);
2525 EmitStoreToStackLocalOrContextSlot(var, location); 2505 __ str(result_register(), location);
2506 if (var->IsContextSlot()) {
2507 // RecordWrite may destroy all its register arguments.
2508 __ mov(r3, result_register());
2509 int offset = Context::SlotOffset(var->index());
2510 __ RecordWriteContextSlot(
2511 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2512 }
2526 } 2513 }
2527 2514
2528 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) { 2515 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2529 // Assignment to var or initializing assignment to let/const 2516 // Assignment to var or initializing assignment to let/const
2530 // in harmony mode. 2517 // in harmony mode.
2531 if (var->IsLookupSlot()) { 2518 if (var->IsStackAllocated() || var->IsContextSlot()) {
2532 EmitCallStoreContextSlot(var->name(), language_mode());
2533 } else {
2534 ASSERT((var->IsStackAllocated() || var->IsContextSlot()));
2535 MemOperand location = VarOperand(var, r1); 2519 MemOperand location = VarOperand(var, r1);
2536 if (generate_debug_code_ && op == Token::INIT_LET) { 2520 if (generate_debug_code_ && op == Token::INIT_LET) {
2537 // Check for an uninitialized let binding. 2521 // Check for an uninitialized let binding.
2538 __ ldr(r2, location); 2522 __ ldr(r2, location);
2539 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); 2523 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2540 __ Check(eq, kLetBindingReInitialization); 2524 __ Check(eq, kLetBindingReInitialization);
2541 } 2525 }
2542 EmitStoreToStackLocalOrContextSlot(var, location); 2526 // Perform the assignment.
2527 __ str(r0, location);
2528 if (var->IsContextSlot()) {
2529 __ mov(r3, r0);
2530 int offset = Context::SlotOffset(var->index());
2531 __ RecordWriteContextSlot(
2532 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2533 }
2534 } else {
2535 ASSERT(var->IsLookupSlot());
2536 __ push(r0); // Value.
2537 __ mov(r1, Operand(var->name()));
2538 __ mov(r0, Operand(Smi::FromInt(language_mode())));
2539 __ Push(cp, r1, r0); // Context, name, strict mode.
2540 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2543 } 2541 }
2544 } 2542 }
2545 // Non-initializing assignments to consts are ignored. 2543 // Non-initializing assignments to consts are ignored.
2546 } 2544 }
2547 2545
2548 2546
2549 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2547 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2550 // Assignment to a property, using a named store IC. 2548 // Assignment to a property, using a named store IC.
2551 Property* prop = expr->target()->AsProperty(); 2549 Property* prop = expr->target()->AsProperty();
2552 ASSERT(prop != NULL); 2550 ASSERT(prop != NULL);
2553 ASSERT(prop->key()->AsLiteral() != NULL); 2551 ASSERT(prop->key()->AsLiteral() != NULL);
2554 2552
2555 // Record source code position before IC call. 2553 // Record source code position before IC call.
2556 SetSourcePosition(expr->position()); 2554 SetSourcePosition(expr->position());
2557 __ mov(r2, Operand(prop->key()->AsLiteral()->value())); 2555 __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
2558 __ pop(r1); 2556 __ pop(r1);
2559 2557
2560 CallStoreIC(expr->AssignmentFeedbackId()); 2558 CallStoreIC(NOT_CONTEXTUAL, expr->AssignmentFeedbackId());
2561 2559
2562 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2560 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2563 context()->Plug(r0); 2561 context()->Plug(r0);
2564 } 2562 }
2565 2563
2566 2564
2567 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2565 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2568 // Assignment to a property, using a keyed store IC. 2566 // Assignment to a property, using a keyed store IC.
2569 2567
2570 // Record source code position before IC call. 2568 // Record source code position before IC call.
2571 SetSourcePosition(expr->position()); 2569 SetSourcePosition(expr->position());
2572 __ Pop(r2, r1); // r1 = key. 2570 __ Pop(r2, r1); // r1 = key.
2573 2571
2574 Handle<Code> ic = is_classic_mode() 2572 Handle<Code> ic = is_classic_mode()
2575 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2573 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2576 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2574 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2577 CallIC(ic, expr->AssignmentFeedbackId()); 2575 CallIC(ic, NOT_CONTEXTUAL, expr->AssignmentFeedbackId());
2578 2576
2579 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2577 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2580 context()->Plug(r0); 2578 context()->Plug(r0);
2581 } 2579 }
2582 2580
2583 2581
2584 void FullCodeGenerator::VisitProperty(Property* expr) { 2582 void FullCodeGenerator::VisitProperty(Property* expr) {
2585 Comment cmnt(masm_, "[ Property"); 2583 Comment cmnt(masm_, "[ Property");
2586 Expression* key = expr->key(); 2584 Expression* key = expr->key();
2587 2585
2588 if (key->IsPropertyName()) { 2586 if (key->IsPropertyName()) {
2589 VisitForAccumulatorValue(expr->obj()); 2587 VisitForAccumulatorValue(expr->obj());
2590 EmitNamedPropertyLoad(expr); 2588 EmitNamedPropertyLoad(expr);
2591 PrepareForBailoutForId(expr->LoadId(), TOS_REG); 2589 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2592 context()->Plug(r0); 2590 context()->Plug(r0);
2593 } else { 2591 } else {
2594 VisitForStackValue(expr->obj()); 2592 VisitForStackValue(expr->obj());
2595 VisitForAccumulatorValue(expr->key()); 2593 VisitForAccumulatorValue(expr->key());
2596 __ pop(r1); 2594 __ pop(r1);
2597 EmitKeyedPropertyLoad(expr); 2595 EmitKeyedPropertyLoad(expr);
2598 context()->Plug(r0); 2596 context()->Plug(r0);
2599 } 2597 }
2600 } 2598 }
2601 2599
2602 2600
2603 void FullCodeGenerator::CallIC(Handle<Code> code, 2601 void FullCodeGenerator::CallIC(Handle<Code> code,
2602 ContextualMode mode,
2604 TypeFeedbackId ast_id) { 2603 TypeFeedbackId ast_id) {
2605 ic_total_count_++; 2604 ic_total_count_++;
2606 // All calls must have a predictable size in full-codegen code to ensure that 2605 // All calls must have a predictable size in full-codegen code to ensure that
2607 // the debugger can patch them correctly. 2606 // the debugger can patch them correctly.
2607 ASSERT(mode != CONTEXTUAL || ast_id.IsNone());
2608 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al, 2608 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2609 NEVER_INLINE_TARGET_ADDRESS); 2609 NEVER_INLINE_TARGET_ADDRESS);
2610 } 2610 }
2611 2611
2612 2612
2613 // Code common for calls using the IC. 2613 // Code common for calls using the IC.
2614 void FullCodeGenerator::EmitCallWithIC(Call* expr) { 2614 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2615 Expression* callee = expr->expression(); 2615 Expression* callee = expr->expression();
2616 ZoneList<Expression*>* args = expr->arguments(); 2616 ZoneList<Expression*>* args = expr->arguments();
2617 int arg_count = args->length(); 2617 int arg_count = args->length();
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
2709 int arg_count = args->length(); 2709 int arg_count = args->length();
2710 { PreservePositionScope scope(masm()->positions_recorder()); 2710 { PreservePositionScope scope(masm()->positions_recorder());
2711 for (int i = 0; i < arg_count; i++) { 2711 for (int i = 0; i < arg_count; i++) {
2712 VisitForStackValue(args->at(i)); 2712 VisitForStackValue(args->at(i));
2713 } 2713 }
2714 } 2714 }
2715 // Record source position for debugger. 2715 // Record source position for debugger.
2716 SetSourcePosition(expr->position()); 2716 SetSourcePosition(expr->position());
2717 2717
2718 Handle<Object> uninitialized = 2718 Handle<Object> uninitialized =
2719 TypeFeedbackInfo::UninitializedSentinel(isolate()); 2719 TypeFeedbackCells::UninitializedSentinel(isolate());
2720 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized); 2720 Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
2721 __ Move(r2, FeedbackVector()); 2721 RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
2722 __ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot()))); 2722 __ mov(r2, Operand(cell));
2723 2723
2724 // Record call targets in unoptimized code. 2724 // Record call targets in unoptimized code.
2725 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); 2725 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
2726 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2726 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2727 __ CallStub(&stub); 2727 __ CallStub(&stub, expr->CallFeedbackId());
2728 RecordJSReturnSite(expr); 2728 RecordJSReturnSite(expr);
2729 // Restore context register. 2729 // Restore context register.
2730 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2730 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2731 context()->DropAndPlug(1, r0); 2731 context()->DropAndPlug(1, r0);
2732 } 2732 }
2733 2733
2734 2734
2735 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { 2735 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2736 // r4: copy of the first argument or undefined if it doesn't exist. 2736 // r4: copy of the first argument or undefined if it doesn't exist.
2737 if (arg_count > 0) { 2737 if (arg_count > 0) {
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after
2898 // Call the construct call builtin that handles allocation and 2898 // Call the construct call builtin that handles allocation and
2899 // constructor invocation. 2899 // constructor invocation.
2900 SetSourcePosition(expr->position()); 2900 SetSourcePosition(expr->position());
2901 2901
2902 // Load function and argument count into r1 and r0. 2902 // Load function and argument count into r1 and r0.
2903 __ mov(r0, Operand(arg_count)); 2903 __ mov(r0, Operand(arg_count));
2904 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); 2904 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2905 2905
2906 // Record call targets in unoptimized code. 2906 // Record call targets in unoptimized code.
2907 Handle<Object> uninitialized = 2907 Handle<Object> uninitialized =
2908 TypeFeedbackInfo::UninitializedSentinel(isolate()); 2908 TypeFeedbackCells::UninitializedSentinel(isolate());
2909 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); 2909 Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized);
2910 __ Move(r2, FeedbackVector()); 2910 RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
2911 __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); 2911 __ mov(r2, Operand(cell));
2912 2912
2913 CallConstructStub stub(RECORD_CALL_TARGET); 2913 CallConstructStub stub(RECORD_CALL_TARGET);
2914 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); 2914 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2915 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 2915 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2916 context()->Plug(r0); 2916 context()->Plug(r0);
2917 } 2917 }
2918 2918
2919 2919
2920 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 2920 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2921 ZoneList<Expression*>* args = expr->arguments(); 2921 ZoneList<Expression*>* args = expr->arguments();
(...skipping 1482 matching lines...) Expand 10 before | Expand all | Expand 10 after
4404 4404
4405 4405
4406 __ bind(&stub_call); 4406 __ bind(&stub_call);
4407 __ mov(r1, r0); 4407 __ mov(r1, r0);
4408 __ mov(r0, Operand(Smi::FromInt(count_value))); 4408 __ mov(r0, Operand(Smi::FromInt(count_value)));
4409 4409
4410 // Record position before stub call. 4410 // Record position before stub call.
4411 SetSourcePosition(expr->position()); 4411 SetSourcePosition(expr->position());
4412 4412
4413 BinaryOpICStub stub(Token::ADD, NO_OVERWRITE); 4413 BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
4414 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId()); 4414 CallIC(stub.GetCode(isolate()),
4415 NOT_CONTEXTUAL,
4416 expr->CountBinOpFeedbackId());
4415 patch_site.EmitPatchInfo(); 4417 patch_site.EmitPatchInfo();
4416 __ bind(&done); 4418 __ bind(&done);
4417 4419
4418 // Store the value returned in r0. 4420 // Store the value returned in r0.
4419 switch (assign_type) { 4421 switch (assign_type) {
4420 case VARIABLE: 4422 case VARIABLE:
4421 if (expr->is_postfix()) { 4423 if (expr->is_postfix()) {
4422 { EffectContext context(this); 4424 { EffectContext context(this);
4423 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4425 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4424 Token::ASSIGN); 4426 Token::ASSIGN);
4425 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4427 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4426 context.Plug(r0); 4428 context.Plug(r0);
4427 } 4429 }
4428 // For all contexts except EffectConstant We have the result on 4430 // For all contexts except EffectConstant We have the result on
4429 // top of the stack. 4431 // top of the stack.
4430 if (!context()->IsEffect()) { 4432 if (!context()->IsEffect()) {
4431 context()->PlugTOS(); 4433 context()->PlugTOS();
4432 } 4434 }
4433 } else { 4435 } else {
4434 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4436 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4435 Token::ASSIGN); 4437 Token::ASSIGN);
4436 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4438 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4437 context()->Plug(r0); 4439 context()->Plug(r0);
4438 } 4440 }
4439 break; 4441 break;
4440 case NAMED_PROPERTY: { 4442 case NAMED_PROPERTY: {
4441 __ mov(r2, Operand(prop->key()->AsLiteral()->value())); 4443 __ mov(r2, Operand(prop->key()->AsLiteral()->value()));
4442 __ pop(r1); 4444 __ pop(r1);
4443 CallStoreIC(expr->CountStoreFeedbackId()); 4445 CallStoreIC(NOT_CONTEXTUAL, expr->CountStoreFeedbackId());
4444 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4446 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4445 if (expr->is_postfix()) { 4447 if (expr->is_postfix()) {
4446 if (!context()->IsEffect()) { 4448 if (!context()->IsEffect()) {
4447 context()->PlugTOS(); 4449 context()->PlugTOS();
4448 } 4450 }
4449 } else { 4451 } else {
4450 context()->Plug(r0); 4452 context()->Plug(r0);
4451 } 4453 }
4452 break; 4454 break;
4453 } 4455 }
4454 case KEYED_PROPERTY: { 4456 case KEYED_PROPERTY: {
4455 __ Pop(r2, r1); // r1 = key. r2 = receiver. 4457 __ Pop(r2, r1); // r1 = key. r2 = receiver.
4456 Handle<Code> ic = is_classic_mode() 4458 Handle<Code> ic = is_classic_mode()
4457 ? isolate()->builtins()->KeyedStoreIC_Initialize() 4459 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4458 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 4460 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4459 CallIC(ic, expr->CountStoreFeedbackId()); 4461 CallIC(ic, NOT_CONTEXTUAL, expr->CountStoreFeedbackId());
4460 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4462 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4461 if (expr->is_postfix()) { 4463 if (expr->is_postfix()) {
4462 if (!context()->IsEffect()) { 4464 if (!context()->IsEffect()) {
4463 context()->PlugTOS(); 4465 context()->PlugTOS();
4464 } 4466 }
4465 } else { 4467 } else {
4466 context()->Plug(r0); 4468 context()->Plug(r0);
4467 } 4469 }
4468 break; 4470 break;
4469 } 4471 }
4470 } 4472 }
4471 } 4473 }
4472 4474
4473 4475
4474 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 4476 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4475 ASSERT(!context()->IsEffect()); 4477 ASSERT(!context()->IsEffect());
4476 ASSERT(!context()->IsTest()); 4478 ASSERT(!context()->IsTest());
4477 VariableProxy* proxy = expr->AsVariableProxy(); 4479 VariableProxy* proxy = expr->AsVariableProxy();
4478 if (proxy != NULL && proxy->var()->IsUnallocated()) { 4480 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4479 Comment cmnt(masm_, "[ Global variable"); 4481 Comment cmnt(masm_, "Global variable");
4480 __ ldr(r0, GlobalObjectOperand()); 4482 __ ldr(r0, GlobalObjectOperand());
4481 __ mov(r2, Operand(proxy->name())); 4483 __ mov(r2, Operand(proxy->name()));
4482 // Use a regular load, not a contextual load, to avoid a reference 4484 // Use a regular load, not a contextual load, to avoid a reference
4483 // error. 4485 // error.
4484 CallLoadIC(NOT_CONTEXTUAL); 4486 CallLoadIC(NOT_CONTEXTUAL);
4485 PrepareForBailout(expr, TOS_REG); 4487 PrepareForBailout(expr, TOS_REG);
4486 context()->Plug(r0); 4488 context()->Plug(r0);
4487 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { 4489 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4488 Comment cmnt(masm_, "[ Lookup slot");
4489 Label done, slow; 4490 Label done, slow;
4490 4491
4491 // Generate code for loading from variables potentially shadowed 4492 // Generate code for loading from variables potentially shadowed
4492 // by eval-introduced variables. 4493 // by eval-introduced variables.
4493 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done); 4494 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4494 4495
4495 __ bind(&slow); 4496 __ bind(&slow);
4496 __ mov(r0, Operand(proxy->name())); 4497 __ mov(r0, Operand(proxy->name()));
4497 __ Push(cp, r0); 4498 __ Push(cp, r0);
4498 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); 4499 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
4640 __ orr(r2, r0, Operand(r1)); 4641 __ orr(r2, r0, Operand(r1));
4641 patch_site.EmitJumpIfNotSmi(r2, &slow_case); 4642 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4642 __ cmp(r1, r0); 4643 __ cmp(r1, r0);
4643 Split(cond, if_true, if_false, NULL); 4644 Split(cond, if_true, if_false, NULL);
4644 __ bind(&slow_case); 4645 __ bind(&slow_case);
4645 } 4646 }
4646 4647
4647 // Record position and call the compare IC. 4648 // Record position and call the compare IC.
4648 SetSourcePosition(expr->position()); 4649 SetSourcePosition(expr->position());
4649 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); 4650 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4650 CallIC(ic, expr->CompareOperationFeedbackId()); 4651 CallIC(ic, NOT_CONTEXTUAL, expr->CompareOperationFeedbackId());
4651 patch_site.EmitPatchInfo(); 4652 patch_site.EmitPatchInfo();
4652 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4653 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4653 __ cmp(r0, Operand::Zero()); 4654 __ cmp(r0, Operand::Zero());
4654 Split(cond, if_true, if_false, fall_through); 4655 Split(cond, if_true, if_false, fall_through);
4655 } 4656 }
4656 } 4657 }
4657 4658
4658 // Convert the result of the comparison into one expected for this 4659 // Convert the result of the comparison into one expected for this
4659 // expression's context. 4660 // expression's context.
4660 context()->Plug(if_true, if_false); 4661 context()->Plug(if_true, if_false);
(...skipping 14 matching lines...) Expand all
4675 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4676 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4676 if (expr->op() == Token::EQ_STRICT) { 4677 if (expr->op() == Token::EQ_STRICT) {
4677 Heap::RootListIndex nil_value = nil == kNullValue ? 4678 Heap::RootListIndex nil_value = nil == kNullValue ?
4678 Heap::kNullValueRootIndex : 4679 Heap::kNullValueRootIndex :
4679 Heap::kUndefinedValueRootIndex; 4680 Heap::kUndefinedValueRootIndex;
4680 __ LoadRoot(r1, nil_value); 4681 __ LoadRoot(r1, nil_value);
4681 __ cmp(r0, r1); 4682 __ cmp(r0, r1);
4682 Split(eq, if_true, if_false, fall_through); 4683 Split(eq, if_true, if_false, fall_through);
4683 } else { 4684 } else {
4684 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); 4685 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4685 CallIC(ic, expr->CompareOperationFeedbackId()); 4686 CallIC(ic, NOT_CONTEXTUAL, expr->CompareOperationFeedbackId());
4686 __ cmp(r0, Operand(0)); 4687 __ cmp(r0, Operand(0));
4687 Split(ne, if_true, if_false, fall_through); 4688 Split(ne, if_true, if_false, fall_through);
4688 } 4689 }
4689 context()->Plug(if_true, if_false); 4690 context()->Plug(if_true, if_false);
4690 } 4691 }
4691 4692
4692 4693
4693 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { 4694 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4694 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4695 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4695 context()->Plug(r0); 4696 context()->Plug(r0);
(...skipping 222 matching lines...) Expand 10 before | Expand all | Expand 10 after
4918 ASSERT(Memory::uint32_at(interrupt_address_pointer) == 4919 ASSERT(Memory::uint32_at(interrupt_address_pointer) ==
4919 reinterpret_cast<uint32_t>( 4920 reinterpret_cast<uint32_t>(
4920 isolate->builtins()->OsrAfterStackCheck()->entry())); 4921 isolate->builtins()->OsrAfterStackCheck()->entry()));
4921 return OSR_AFTER_STACK_CHECK; 4922 return OSR_AFTER_STACK_CHECK;
4922 } 4923 }
4923 4924
4924 4925
4925 } } // namespace v8::internal 4926 } } // namespace v8::internal
4926 4927
4927 #endif // V8_TARGET_ARCH_ARM 4928 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/debug-arm.cc ('k') | src/arm/ic-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698