Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(5)

Side by Side Diff: src/mips/full-codegen-mips.cc

Issue 346413004: Remove distinction between hidden and normal runtime functions (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Fix natives fuzzing Created 6 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_MIPS 7 #if V8_TARGET_ARCH_MIPS
8 8
9 // Note on Mips implementation: 9 // Note on Mips implementation:
10 // 10 //
(...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after
199 199
200 // Possibly allocate a local context. 200 // Possibly allocate a local context.
201 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 201 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
202 if (heap_slots > 0) { 202 if (heap_slots > 0) {
203 Comment cmnt(masm_, "[ Allocate context"); 203 Comment cmnt(masm_, "[ Allocate context");
204 // Argument to NewContext is the function, which is still in a1. 204 // Argument to NewContext is the function, which is still in a1.
205 bool need_write_barrier = true; 205 bool need_write_barrier = true;
206 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) { 206 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
207 __ push(a1); 207 __ push(a1);
208 __ Push(info->scope()->GetScopeInfo()); 208 __ Push(info->scope()->GetScopeInfo());
209 __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2); 209 __ CallRuntime(Runtime::kNewGlobalContext, 2);
210 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { 210 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
211 FastNewContextStub stub(isolate(), heap_slots); 211 FastNewContextStub stub(isolate(), heap_slots);
212 __ CallStub(&stub); 212 __ CallStub(&stub);
213 // Result of FastNewContextStub is always in new space. 213 // Result of FastNewContextStub is always in new space.
214 need_write_barrier = false; 214 need_write_barrier = false;
215 } else { 215 } else {
216 __ push(a1); 216 __ push(a1);
217 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1); 217 __ CallRuntime(Runtime::kNewFunctionContext, 1);
218 } 218 }
219 function_in_register = false; 219 function_in_register = false;
220 // Context is returned in v0. It replaces the context passed to us. 220 // Context is returned in v0. It replaces the context passed to us.
221 // It's saved in the stack and kept live in cp. 221 // It's saved in the stack and kept live in cp.
222 __ mov(cp, v0); 222 __ mov(cp, v0);
223 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset)); 223 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
224 // Copy any necessary parameters into the context. 224 // Copy any necessary parameters into the context.
225 int num_parameters = info->scope()->num_parameters(); 225 int num_parameters = info->scope()->num_parameters();
226 for (int i = 0; i < num_parameters; i++) { 226 for (int i = 0; i < num_parameters; i++) {
227 Variable* var = scope()->parameter(i); 227 Variable* var = scope()->parameter(i);
(...skipping 621 matching lines...) Expand 10 before | Expand all | Expand 10 after
849 // 'undefined') because we may have a (legal) redeclaration and we 849 // 'undefined') because we may have a (legal) redeclaration and we
850 // must not destroy the current value. 850 // must not destroy the current value.
851 if (hole_init) { 851 if (hole_init) {
852 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex); 852 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
853 __ Push(cp, a2, a1, a0); 853 __ Push(cp, a2, a1, a0);
854 } else { 854 } else {
855 ASSERT(Smi::FromInt(0) == 0); 855 ASSERT(Smi::FromInt(0) == 0);
856 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value. 856 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
857 __ Push(cp, a2, a1, a0); 857 __ Push(cp, a2, a1, a0);
858 } 858 }
859 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4); 859 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
860 break; 860 break;
861 } 861 }
862 } 862 }
863 } 863 }
864 864
865 865
866 void FullCodeGenerator::VisitFunctionDeclaration( 866 void FullCodeGenerator::VisitFunctionDeclaration(
867 FunctionDeclaration* declaration) { 867 FunctionDeclaration* declaration) {
868 VariableProxy* proxy = declaration->proxy(); 868 VariableProxy* proxy = declaration->proxy();
869 Variable* variable = proxy->var(); 869 Variable* variable = proxy->var();
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
905 break; 905 break;
906 } 906 }
907 907
908 case Variable::LOOKUP: { 908 case Variable::LOOKUP: {
909 Comment cmnt(masm_, "[ FunctionDeclaration"); 909 Comment cmnt(masm_, "[ FunctionDeclaration");
910 __ li(a2, Operand(variable->name())); 910 __ li(a2, Operand(variable->name()));
911 __ li(a1, Operand(Smi::FromInt(NONE))); 911 __ li(a1, Operand(Smi::FromInt(NONE)));
912 __ Push(cp, a2, a1); 912 __ Push(cp, a2, a1);
913 // Push initial value for function declaration. 913 // Push initial value for function declaration.
914 VisitForStackValue(declaration->fun()); 914 VisitForStackValue(declaration->fun());
915 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4); 915 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
916 break; 916 break;
917 } 917 }
918 } 918 }
919 } 919 }
920 920
921 921
922 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { 922 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
923 Variable* variable = declaration->proxy()->var(); 923 Variable* variable = declaration->proxy()->var();
924 ASSERT(variable->location() == Variable::CONTEXT); 924 ASSERT(variable->location() == Variable::CONTEXT);
925 ASSERT(variable->interface()->IsFrozen()); 925 ASSERT(variable->interface()->IsFrozen());
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
977 // TODO(rossberg) 977 // TODO(rossberg)
978 } 978 }
979 979
980 980
981 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 981 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
982 // Call the runtime to declare the globals. 982 // Call the runtime to declare the globals.
983 // The context is the first argument. 983 // The context is the first argument.
984 __ li(a1, Operand(pairs)); 984 __ li(a1, Operand(pairs));
985 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags()))); 985 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
986 __ Push(cp, a1, a0); 986 __ Push(cp, a1, a0);
987 __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3); 987 __ CallRuntime(Runtime::kDeclareGlobals, 3);
988 // Return value is ignored. 988 // Return value is ignored.
989 } 989 }
990 990
991 991
992 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { 992 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
993 // Call the runtime to declare the modules. 993 // Call the runtime to declare the modules.
994 __ Push(descriptions); 994 __ Push(descriptions);
995 __ CallRuntime(Runtime::kHiddenDeclareModules, 1); 995 __ CallRuntime(Runtime::kDeclareModules, 1);
996 // Return value is ignored. 996 // Return value is ignored.
997 } 997 }
998 998
999 999
1000 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 1000 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1001 Comment cmnt(masm_, "[ SwitchStatement"); 1001 Comment cmnt(masm_, "[ SwitchStatement");
1002 Breakable nested_statement(this, stmt); 1002 Breakable nested_statement(this, stmt);
1003 SetStatementPosition(stmt); 1003 SetStatementPosition(stmt);
1004 1004
1005 // Keep the switch value on the stack until a case matches. 1005 // Keep the switch value on the stack until a case matches.
(...skipping 327 matching lines...) Expand 10 before | Expand all | Expand 10 after
1333 FastNewClosureStub stub(isolate(), 1333 FastNewClosureStub stub(isolate(),
1334 info->strict_mode(), 1334 info->strict_mode(),
1335 info->is_generator()); 1335 info->is_generator());
1336 __ li(a2, Operand(info)); 1336 __ li(a2, Operand(info));
1337 __ CallStub(&stub); 1337 __ CallStub(&stub);
1338 } else { 1338 } else {
1339 __ li(a0, Operand(info)); 1339 __ li(a0, Operand(info));
1340 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex 1340 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1341 : Heap::kFalseValueRootIndex); 1341 : Heap::kFalseValueRootIndex);
1342 __ Push(cp, a0, a1); 1342 __ Push(cp, a0, a1);
1343 __ CallRuntime(Runtime::kHiddenNewClosure, 3); 1343 __ CallRuntime(Runtime::kNewClosure, 3);
1344 } 1344 }
1345 context()->Plug(v0); 1345 context()->Plug(v0);
1346 } 1346 }
1347 1347
1348 1348
1349 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 1349 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1350 Comment cmnt(masm_, "[ VariableProxy"); 1350 Comment cmnt(masm_, "[ VariableProxy");
1351 EmitVariableLoad(expr); 1351 EmitVariableLoad(expr);
1352 } 1352 }
1353 1353
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
1455 local->mode() == CONST_LEGACY) { 1455 local->mode() == CONST_LEGACY) {
1456 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 1456 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1457 __ subu(at, v0, at); // Sub as compare: at == 0 on eq. 1457 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1458 if (local->mode() == CONST_LEGACY) { 1458 if (local->mode() == CONST_LEGACY) {
1459 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); 1459 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1460 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole. 1460 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1461 } else { // LET || CONST 1461 } else { // LET || CONST
1462 __ Branch(done, ne, at, Operand(zero_reg)); 1462 __ Branch(done, ne, at, Operand(zero_reg));
1463 __ li(a0, Operand(var->name())); 1463 __ li(a0, Operand(var->name()));
1464 __ push(a0); 1464 __ push(a0);
1465 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1); 1465 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1466 } 1466 }
1467 } 1467 }
1468 __ Branch(done); 1468 __ Branch(done);
1469 } 1469 }
1470 } 1470 }
1471 1471
1472 1472
1473 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { 1473 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1474 // Record position before possible IC call. 1474 // Record position before possible IC call.
1475 SetSourcePosition(proxy->position()); 1475 SetSourcePosition(proxy->position());
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
1534 GetVar(v0, var); 1534 GetVar(v0, var);
1535 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 1535 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1536 __ subu(at, v0, at); // Sub as compare: at == 0 on eq. 1536 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1537 if (var->mode() == LET || var->mode() == CONST) { 1537 if (var->mode() == LET || var->mode() == CONST) {
1538 // Throw a reference error when using an uninitialized let/const 1538 // Throw a reference error when using an uninitialized let/const
1539 // binding in harmony mode. 1539 // binding in harmony mode.
1540 Label done; 1540 Label done;
1541 __ Branch(&done, ne, at, Operand(zero_reg)); 1541 __ Branch(&done, ne, at, Operand(zero_reg));
1542 __ li(a0, Operand(var->name())); 1542 __ li(a0, Operand(var->name()));
1543 __ push(a0); 1543 __ push(a0);
1544 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1); 1544 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1545 __ bind(&done); 1545 __ bind(&done);
1546 } else { 1546 } else {
1547 // Uninitalized const bindings outside of harmony mode are unholed. 1547 // Uninitalized const bindings outside of harmony mode are unholed.
1548 ASSERT(var->mode() == CONST_LEGACY); 1548 ASSERT(var->mode() == CONST_LEGACY);
1549 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); 1549 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1550 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole. 1550 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1551 } 1551 }
1552 context()->Plug(v0); 1552 context()->Plug(v0);
1553 break; 1553 break;
1554 } 1554 }
1555 } 1555 }
1556 context()->Plug(var); 1556 context()->Plug(var);
1557 break; 1557 break;
1558 } 1558 }
1559 1559
1560 case Variable::LOOKUP: { 1560 case Variable::LOOKUP: {
1561 Comment cmnt(masm_, "[ Lookup variable"); 1561 Comment cmnt(masm_, "[ Lookup variable");
1562 Label done, slow; 1562 Label done, slow;
1563 // Generate code for loading from variables potentially shadowed 1563 // Generate code for loading from variables potentially shadowed
1564 // by eval-introduced variables. 1564 // by eval-introduced variables.
1565 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done); 1565 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1566 __ bind(&slow); 1566 __ bind(&slow);
1567 __ li(a1, Operand(var->name())); 1567 __ li(a1, Operand(var->name()));
1568 __ Push(cp, a1); // Context and name. 1568 __ Push(cp, a1); // Context and name.
1569 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2); 1569 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1570 __ bind(&done); 1570 __ bind(&done);
1571 context()->Plug(v0); 1571 context()->Plug(v0);
1572 } 1572 }
1573 } 1573 }
1574 } 1574 }
1575 1575
1576 1576
1577 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1577 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1578 Comment cmnt(masm_, "[ RegExpLiteral"); 1578 Comment cmnt(masm_, "[ RegExpLiteral");
1579 Label materialized; 1579 Label materialized;
(...skipping 11 matching lines...) Expand all
1591 __ lw(t1, FieldMemOperand(t0, literal_offset)); 1591 __ lw(t1, FieldMemOperand(t0, literal_offset));
1592 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 1592 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1593 __ Branch(&materialized, ne, t1, Operand(at)); 1593 __ Branch(&materialized, ne, t1, Operand(at));
1594 1594
1595 // Create regexp literal using runtime function. 1595 // Create regexp literal using runtime function.
1596 // Result will be in v0. 1596 // Result will be in v0.
1597 __ li(a3, Operand(Smi::FromInt(expr->literal_index()))); 1597 __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1598 __ li(a2, Operand(expr->pattern())); 1598 __ li(a2, Operand(expr->pattern()));
1599 __ li(a1, Operand(expr->flags())); 1599 __ li(a1, Operand(expr->flags()));
1600 __ Push(t0, a3, a2, a1); 1600 __ Push(t0, a3, a2, a1);
1601 __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4); 1601 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1602 __ mov(t1, v0); 1602 __ mov(t1, v0);
1603 1603
1604 __ bind(&materialized); 1604 __ bind(&materialized);
1605 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 1605 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1606 Label allocated, runtime_allocate; 1606 Label allocated, runtime_allocate;
1607 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT); 1607 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1608 __ jmp(&allocated); 1608 __ jmp(&allocated);
1609 1609
1610 __ bind(&runtime_allocate); 1610 __ bind(&runtime_allocate);
1611 __ li(a0, Operand(Smi::FromInt(size))); 1611 __ li(a0, Operand(Smi::FromInt(size)));
1612 __ Push(t1, a0); 1612 __ Push(t1, a0);
1613 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1); 1613 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1614 __ pop(t1); 1614 __ pop(t1);
1615 1615
1616 __ bind(&allocated); 1616 __ bind(&allocated);
1617 1617
1618 // After this, registers are used as follows: 1618 // After this, registers are used as follows:
1619 // v0: Newly allocated regexp. 1619 // v0: Newly allocated regexp.
1620 // t1: Materialized regexp. 1620 // t1: Materialized regexp.
1621 // a2: temp. 1621 // a2: temp.
1622 __ CopyFields(v0, t1, a2.bit(), size / kPointerSize); 1622 __ CopyFields(v0, t1, a2.bit(), size / kPointerSize);
1623 context()->Plug(v0); 1623 context()->Plug(v0);
(...skipping 24 matching lines...) Expand all
1648 : ObjectLiteral::kNoFlags; 1648 : ObjectLiteral::kNoFlags;
1649 flags |= expr->has_function() 1649 flags |= expr->has_function()
1650 ? ObjectLiteral::kHasFunction 1650 ? ObjectLiteral::kHasFunction
1651 : ObjectLiteral::kNoFlags; 1651 : ObjectLiteral::kNoFlags;
1652 __ li(a0, Operand(Smi::FromInt(flags))); 1652 __ li(a0, Operand(Smi::FromInt(flags)));
1653 int properties_count = constant_properties->length() / 2; 1653 int properties_count = constant_properties->length() / 2;
1654 if (expr->may_store_doubles() || expr->depth() > 1 || 1654 if (expr->may_store_doubles() || expr->depth() > 1 ||
1655 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements || 1655 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1656 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { 1656 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1657 __ Push(a3, a2, a1, a0); 1657 __ Push(a3, a2, a1, a0);
1658 __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4); 1658 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1659 } else { 1659 } else {
1660 FastCloneShallowObjectStub stub(isolate(), properties_count); 1660 FastCloneShallowObjectStub stub(isolate(), properties_count);
1661 __ CallStub(&stub); 1661 __ CallStub(&stub);
1662 } 1662 }
1663 1663
1664 // If result_saved is true the result is on top of the stack. If 1664 // If result_saved is true the result is on top of the stack. If
1665 // result_saved is false the result is in v0. 1665 // result_saved is false the result is in v0.
1666 bool result_saved = false; 1666 bool result_saved = false;
1667 1667
1668 // Mark all computed expressions that are bound to a key that 1668 // Mark all computed expressions that are bound to a key that
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
1792 } 1792 }
1793 1793
1794 __ mov(a0, result_register()); 1794 __ mov(a0, result_register());
1795 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1795 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1796 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset)); 1796 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1797 __ li(a2, Operand(Smi::FromInt(expr->literal_index()))); 1797 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1798 __ li(a1, Operand(constant_elements)); 1798 __ li(a1, Operand(constant_elements));
1799 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) { 1799 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1800 __ li(a0, Operand(Smi::FromInt(flags))); 1800 __ li(a0, Operand(Smi::FromInt(flags)));
1801 __ Push(a3, a2, a1, a0); 1801 __ Push(a3, a2, a1, a0);
1802 __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4); 1802 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1803 } else { 1803 } else {
1804 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); 1804 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1805 __ CallStub(&stub); 1805 __ CallStub(&stub);
1806 } 1806 }
1807 1807
1808 bool result_saved = false; // Is the result saved to the stack? 1808 bool result_saved = false; // Is the result saved to the stack?
1809 1809
1810 // Emit code to evaluate all the non-constant subexpressions and to store 1810 // Emit code to evaluate all the non-constant subexpressions and to store
1811 // them into the newly cloned array. 1811 // them into the newly cloned array.
1812 for (int i = 0; i < length; i++) { 1812 for (int i = 0; i < length; i++) {
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
1985 ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); 1985 ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1986 __ li(a1, Operand(Smi::FromInt(continuation.pos()))); 1986 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
1987 __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset)); 1987 __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
1988 __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset)); 1988 __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
1989 __ mov(a1, cp); 1989 __ mov(a1, cp);
1990 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2, 1990 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
1991 kRAHasBeenSaved, kDontSaveFPRegs); 1991 kRAHasBeenSaved, kDontSaveFPRegs);
1992 __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset)); 1992 __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1993 __ Branch(&post_runtime, eq, sp, Operand(a1)); 1993 __ Branch(&post_runtime, eq, sp, Operand(a1));
1994 __ push(v0); // generator object 1994 __ push(v0); // generator object
1995 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1); 1995 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1996 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 1996 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1997 __ bind(&post_runtime); 1997 __ bind(&post_runtime);
1998 __ pop(result_register()); 1998 __ pop(result_register());
1999 EmitReturnSequence(); 1999 EmitReturnSequence();
2000 2000
2001 __ bind(&resume); 2001 __ bind(&resume);
2002 context()->Plug(result_register()); 2002 context()->Plug(result_register());
2003 break; 2003 break;
2004 } 2004 }
2005 2005
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
2053 const int generator_object_depth = kPointerSize + handler_size; 2053 const int generator_object_depth = kPointerSize + handler_size;
2054 __ lw(a0, MemOperand(sp, generator_object_depth)); 2054 __ lw(a0, MemOperand(sp, generator_object_depth));
2055 __ push(a0); // g 2055 __ push(a0); // g
2056 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); 2056 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2057 __ li(a1, Operand(Smi::FromInt(l_continuation.pos()))); 2057 __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2058 __ sw(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset)); 2058 __ sw(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
2059 __ sw(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset)); 2059 __ sw(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
2060 __ mov(a1, cp); 2060 __ mov(a1, cp);
2061 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2, 2061 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2062 kRAHasBeenSaved, kDontSaveFPRegs); 2062 kRAHasBeenSaved, kDontSaveFPRegs);
2063 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1); 2063 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2064 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2064 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2065 __ pop(v0); // result 2065 __ pop(v0); // result
2066 EmitReturnSequence(); 2066 EmitReturnSequence();
2067 __ mov(a0, v0); 2067 __ mov(a0, v0);
2068 __ bind(&l_resume); // received in a0 2068 __ bind(&l_resume); // received in a0
2069 __ PopTryHandler(); 2069 __ PopTryHandler();
2070 2070
2071 // receiver = iter; f = 'next'; arg = received; 2071 // receiver = iter; f = 'next'; arg = received;
2072 __ bind(&l_next); 2072 __ bind(&l_next);
2073 __ LoadRoot(a2, Heap::knext_stringRootIndex); // "next" 2073 __ LoadRoot(a2, Heap::knext_stringRootIndex); // "next"
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2108 break; 2108 break;
2109 } 2109 }
2110 } 2110 }
2111 } 2111 }
2112 2112
2113 2113
2114 void FullCodeGenerator::EmitGeneratorResume(Expression *generator, 2114 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2115 Expression *value, 2115 Expression *value,
2116 JSGeneratorObject::ResumeMode resume_mode) { 2116 JSGeneratorObject::ResumeMode resume_mode) {
2117 // The value stays in a0, and is ultimately read by the resumed generator, as 2117 // The value stays in a0, and is ultimately read by the resumed generator, as
2118 // if CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject) returned it. Or it 2118 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2119 // is read to throw the value when the resumed generator is already closed. 2119 // is read to throw the value when the resumed generator is already closed.
2120 // a1 will hold the generator object until the activation has been resumed. 2120 // a1 will hold the generator object until the activation has been resumed.
2121 VisitForStackValue(generator); 2121 VisitForStackValue(generator);
2122 VisitForAccumulatorValue(value); 2122 VisitForAccumulatorValue(value);
2123 __ pop(a1); 2123 __ pop(a1);
2124 2124
2125 // Check generator state. 2125 // Check generator state.
2126 Label wrong_state, closed_state, done; 2126 Label wrong_state, closed_state, done;
2127 __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); 2127 __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2128 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0); 2128 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
2190 Label push_operand_holes, call_resume; 2190 Label push_operand_holes, call_resume;
2191 __ bind(&push_operand_holes); 2191 __ bind(&push_operand_holes);
2192 __ Subu(a3, a3, Operand(1)); 2192 __ Subu(a3, a3, Operand(1));
2193 __ Branch(&call_resume, lt, a3, Operand(zero_reg)); 2193 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2194 __ push(a2); 2194 __ push(a2);
2195 __ Branch(&push_operand_holes); 2195 __ Branch(&push_operand_holes);
2196 __ bind(&call_resume); 2196 __ bind(&call_resume);
2197 ASSERT(!result_register().is(a1)); 2197 ASSERT(!result_register().is(a1));
2198 __ Push(a1, result_register()); 2198 __ Push(a1, result_register());
2199 __ Push(Smi::FromInt(resume_mode)); 2199 __ Push(Smi::FromInt(resume_mode));
2200 __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3); 2200 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2201 // Not reached: the runtime call returns elsewhere. 2201 // Not reached: the runtime call returns elsewhere.
2202 __ stop("not-reached"); 2202 __ stop("not-reached");
2203 2203
2204 // Reach here when generator is closed. 2204 // Reach here when generator is closed.
2205 __ bind(&closed_state); 2205 __ bind(&closed_state);
2206 if (resume_mode == JSGeneratorObject::NEXT) { 2206 if (resume_mode == JSGeneratorObject::NEXT) {
2207 // Return completed iterator result when generator is closed. 2207 // Return completed iterator result when generator is closed.
2208 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); 2208 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2209 __ push(a2); 2209 __ push(a2);
2210 // Pop value from top-of-stack slot; box result into result register. 2210 // Pop value from top-of-stack slot; box result into result register.
2211 EmitCreateIteratorResult(true); 2211 EmitCreateIteratorResult(true);
2212 } else { 2212 } else {
2213 // Throw the provided value. 2213 // Throw the provided value.
2214 __ push(a0); 2214 __ push(a0);
2215 __ CallRuntime(Runtime::kHiddenThrow, 1); 2215 __ CallRuntime(Runtime::kThrow, 1);
2216 } 2216 }
2217 __ jmp(&done); 2217 __ jmp(&done);
2218 2218
2219 // Throw error if we attempt to operate on a running generator. 2219 // Throw error if we attempt to operate on a running generator.
2220 __ bind(&wrong_state); 2220 __ bind(&wrong_state);
2221 __ push(a1); 2221 __ push(a1);
2222 __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1); 2222 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2223 2223
2224 __ bind(&done); 2224 __ bind(&done);
2225 context()->Plug(result_register()); 2225 context()->Plug(result_register());
2226 } 2226 }
2227 2227
2228 2228
2229 void FullCodeGenerator::EmitCreateIteratorResult(bool done) { 2229 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2230 Label gc_required; 2230 Label gc_required;
2231 Label allocated; 2231 Label allocated;
2232 2232
2233 Handle<Map> map(isolate()->native_context()->iterator_result_map()); 2233 Handle<Map> map(isolate()->native_context()->iterator_result_map());
2234 2234
2235 __ Allocate(map->instance_size(), v0, a2, a3, &gc_required, TAG_OBJECT); 2235 __ Allocate(map->instance_size(), v0, a2, a3, &gc_required, TAG_OBJECT);
2236 __ jmp(&allocated); 2236 __ jmp(&allocated);
2237 2237
2238 __ bind(&gc_required); 2238 __ bind(&gc_required);
2239 __ Push(Smi::FromInt(map->instance_size())); 2239 __ Push(Smi::FromInt(map->instance_size()));
2240 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1); 2240 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2241 __ lw(context_register(), 2241 __ lw(context_register(),
2242 MemOperand(fp, StandardFrameConstants::kContextOffset)); 2242 MemOperand(fp, StandardFrameConstants::kContextOffset));
2243 2243
2244 __ bind(&allocated); 2244 __ bind(&allocated);
2245 __ li(a1, Operand(map)); 2245 __ li(a1, Operand(map));
2246 __ pop(a2); 2246 __ pop(a2);
2247 __ li(a3, Operand(isolate()->factory()->ToBoolean(done))); 2247 __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2248 __ li(t0, Operand(isolate()->factory()->empty_fixed_array())); 2248 __ li(t0, Operand(isolate()->factory()->empty_fixed_array()));
2249 ASSERT_EQ(map->instance_size(), 5 * kPointerSize); 2249 ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2250 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); 2250 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
(...skipping 197 matching lines...) Expand 10 before | Expand all | Expand 10 after
2448 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs); 2448 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2449 } 2449 }
2450 } 2450 }
2451 2451
2452 2452
2453 void FullCodeGenerator::EmitCallStoreContextSlot( 2453 void FullCodeGenerator::EmitCallStoreContextSlot(
2454 Handle<String> name, StrictMode strict_mode) { 2454 Handle<String> name, StrictMode strict_mode) {
2455 __ li(a1, Operand(name)); 2455 __ li(a1, Operand(name));
2456 __ li(a0, Operand(Smi::FromInt(strict_mode))); 2456 __ li(a0, Operand(Smi::FromInt(strict_mode)));
2457 __ Push(v0, cp, a1, a0); // Value, context, name, strict mode. 2457 __ Push(v0, cp, a1, a0); // Value, context, name, strict mode.
2458 __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4); 2458 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2459 } 2459 }
2460 2460
2461 2461
2462 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) { 2462 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2463 if (var->IsUnallocated()) { 2463 if (var->IsUnallocated()) {
2464 // Global var, const, or let. 2464 // Global var, const, or let.
2465 __ mov(a0, result_register()); 2465 __ mov(a0, result_register());
2466 __ li(a2, Operand(var->name())); 2466 __ li(a2, Operand(var->name()));
2467 __ lw(a1, GlobalObjectOperand()); 2467 __ lw(a1, GlobalObjectOperand());
2468 CallStoreIC(); 2468 CallStoreIC();
2469 2469
2470 } else if (op == Token::INIT_CONST_LEGACY) { 2470 } else if (op == Token::INIT_CONST_LEGACY) {
2471 // Const initializers need a write barrier. 2471 // Const initializers need a write barrier.
2472 ASSERT(!var->IsParameter()); // No const parameters. 2472 ASSERT(!var->IsParameter()); // No const parameters.
2473 if (var->IsLookupSlot()) { 2473 if (var->IsLookupSlot()) {
2474 __ li(a0, Operand(var->name())); 2474 __ li(a0, Operand(var->name()));
2475 __ Push(v0, cp, a0); // Context and name. 2475 __ Push(v0, cp, a0); // Context and name.
2476 __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3); 2476 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2477 } else { 2477 } else {
2478 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 2478 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2479 Label skip; 2479 Label skip;
2480 MemOperand location = VarOperand(var, a1); 2480 MemOperand location = VarOperand(var, a1);
2481 __ lw(a2, location); 2481 __ lw(a2, location);
2482 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 2482 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2483 __ Branch(&skip, ne, a2, Operand(at)); 2483 __ Branch(&skip, ne, a2, Operand(at));
2484 EmitStoreToStackLocalOrContextSlot(var, location); 2484 EmitStoreToStackLocalOrContextSlot(var, location);
2485 __ bind(&skip); 2485 __ bind(&skip);
2486 } 2486 }
2487 2487
2488 } else if (var->mode() == LET && op != Token::INIT_LET) { 2488 } else if (var->mode() == LET && op != Token::INIT_LET) {
2489 // Non-initializing assignment to let variable needs a write barrier. 2489 // Non-initializing assignment to let variable needs a write barrier.
2490 if (var->IsLookupSlot()) { 2490 if (var->IsLookupSlot()) {
2491 EmitCallStoreContextSlot(var->name(), strict_mode()); 2491 EmitCallStoreContextSlot(var->name(), strict_mode());
2492 } else { 2492 } else {
2493 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 2493 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2494 Label assign; 2494 Label assign;
2495 MemOperand location = VarOperand(var, a1); 2495 MemOperand location = VarOperand(var, a1);
2496 __ lw(a3, location); 2496 __ lw(a3, location);
2497 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); 2497 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2498 __ Branch(&assign, ne, a3, Operand(t0)); 2498 __ Branch(&assign, ne, a3, Operand(t0));
2499 __ li(a3, Operand(var->name())); 2499 __ li(a3, Operand(var->name()));
2500 __ push(a3); 2500 __ push(a3);
2501 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1); 2501 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2502 // Perform the assignment. 2502 // Perform the assignment.
2503 __ bind(&assign); 2503 __ bind(&assign);
2504 EmitStoreToStackLocalOrContextSlot(var, location); 2504 EmitStoreToStackLocalOrContextSlot(var, location);
2505 } 2505 }
2506 2506
2507 } else if (!var->is_const_mode() || op == Token::INIT_CONST) { 2507 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2508 // Assignment to var or initializing assignment to let/const 2508 // Assignment to var or initializing assignment to let/const
2509 // in harmony mode. 2509 // in harmony mode.
2510 if (var->IsLookupSlot()) { 2510 if (var->IsLookupSlot()) {
2511 EmitCallStoreContextSlot(var->name(), strict_mode()); 2511 EmitCallStoreContextSlot(var->name(), strict_mode());
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after
2689 __ lw(t1, MemOperand(fp, receiver_offset * kPointerSize)); 2689 __ lw(t1, MemOperand(fp, receiver_offset * kPointerSize));
2690 2690
2691 // t0: the strict mode. 2691 // t0: the strict mode.
2692 __ li(t0, Operand(Smi::FromInt(strict_mode()))); 2692 __ li(t0, Operand(Smi::FromInt(strict_mode())));
2693 2693
2694 // a1: the start position of the scope the calls resides in. 2694 // a1: the start position of the scope the calls resides in.
2695 __ li(a1, Operand(Smi::FromInt(scope()->start_position()))); 2695 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2696 2696
2697 // Do the runtime call. 2697 // Do the runtime call.
2698 __ Push(t2, t1, t0, a1); 2698 __ Push(t2, t1, t0, a1);
2699 __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5); 2699 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2700 } 2700 }
2701 2701
2702 2702
2703 void FullCodeGenerator::VisitCall(Call* expr) { 2703 void FullCodeGenerator::VisitCall(Call* expr) {
2704 #ifdef DEBUG 2704 #ifdef DEBUG
2705 // We want to verify that RecordJSReturnSite gets called on all paths 2705 // We want to verify that RecordJSReturnSite gets called on all paths
2706 // through this function. Avoid early returns. 2706 // through this function. Avoid early returns.
2707 expr->return_is_recorded_ = false; 2707 expr->return_is_recorded_ = false;
2708 #endif 2708 #endif
2709 2709
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
2761 // by eval-introduced variables. 2761 // by eval-introduced variables.
2762 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done); 2762 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2763 } 2763 }
2764 2764
2765 __ bind(&slow); 2765 __ bind(&slow);
2766 // Call the runtime to find the function to call (returned in v0) 2766 // Call the runtime to find the function to call (returned in v0)
2767 // and the object holding it (returned in v1). 2767 // and the object holding it (returned in v1).
2768 ASSERT(!context_register().is(a2)); 2768 ASSERT(!context_register().is(a2));
2769 __ li(a2, Operand(proxy->name())); 2769 __ li(a2, Operand(proxy->name()));
2770 __ Push(context_register(), a2); 2770 __ Push(context_register(), a2);
2771 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2); 2771 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2772 __ Push(v0, v1); // Function, receiver. 2772 __ Push(v0, v1); // Function, receiver.
2773 2773
2774 // If fast case code has been generated, emit code to push the 2774 // If fast case code has been generated, emit code to push the
2775 // function and receiver and have the slow path jump around this 2775 // function and receiver and have the slow path jump around this
2776 // code. 2776 // code.
2777 if (done.is_linked()) { 2777 if (done.is_linked()) {
2778 Label call; 2778 Label call;
2779 __ Branch(&call); 2779 __ Branch(&call);
2780 __ bind(&done); 2780 __ bind(&done);
2781 // Push function. 2781 // Push function.
(...skipping 617 matching lines...) Expand 10 before | Expand all | Expand 10 after
3399 } 3399 }
3400 __ bind(&runtime); 3400 __ bind(&runtime);
3401 __ PrepareCallCFunction(2, scratch1); 3401 __ PrepareCallCFunction(2, scratch1);
3402 __ li(a1, Operand(index)); 3402 __ li(a1, Operand(index));
3403 __ Move(a0, object); 3403 __ Move(a0, object);
3404 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); 3404 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3405 __ jmp(&done); 3405 __ jmp(&done);
3406 } 3406 }
3407 3407
3408 __ bind(&not_date_object); 3408 __ bind(&not_date_object);
3409 __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0); 3409 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3410 __ bind(&done); 3410 __ bind(&done);
3411 context()->Plug(v0); 3411 context()->Plug(v0);
3412 } 3412 }
3413 3413
3414 3414
3415 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { 3415 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3416 ZoneList<Expression*>* args = expr->arguments(); 3416 ZoneList<Expression*>* args = expr->arguments();
3417 ASSERT_EQ(3, args->length()); 3417 ASSERT_EQ(3, args->length());
3418 3418
3419 Register string = v0; 3419 Register string = v0;
(...skipping 350 matching lines...) Expand 10 before | Expand all | Expand 10 after
3770 // a3 now points to key of indexed element of cache. 3770 // a3 now points to key of indexed element of cache.
3771 __ lw(a2, MemOperand(a3)); 3771 __ lw(a2, MemOperand(a3));
3772 __ Branch(&not_found, ne, key, Operand(a2)); 3772 __ Branch(&not_found, ne, key, Operand(a2));
3773 3773
3774 __ lw(v0, MemOperand(a3, kPointerSize)); 3774 __ lw(v0, MemOperand(a3, kPointerSize));
3775 __ Branch(&done); 3775 __ Branch(&done);
3776 3776
3777 __ bind(&not_found); 3777 __ bind(&not_found);
3778 // Call runtime to perform the lookup. 3778 // Call runtime to perform the lookup.
3779 __ Push(cache, key); 3779 __ Push(cache, key);
3780 __ CallRuntime(Runtime::kHiddenGetFromCache, 2); 3780 __ CallRuntime(Runtime::kGetFromCache, 2);
3781 3781
3782 __ bind(&done); 3782 __ bind(&done);
3783 context()->Plug(v0); 3783 context()->Plug(v0);
3784 } 3784 }
3785 3785
3786 3786
3787 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { 3787 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3788 ZoneList<Expression*>* args = expr->arguments(); 3788 ZoneList<Expression*>* args = expr->arguments();
3789 VisitForAccumulatorValue(args->at(0)); 3789 VisitForAccumulatorValue(args->at(0));
3790 3790
(...skipping 361 matching lines...) Expand 10 before | Expand all | Expand 10 after
4152 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 4152 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4153 // Result of deleting non-global, non-dynamic variables is false. 4153 // Result of deleting non-global, non-dynamic variables is false.
4154 // The subexpression does not have side effects. 4154 // The subexpression does not have side effects.
4155 context()->Plug(var->is_this()); 4155 context()->Plug(var->is_this());
4156 } else { 4156 } else {
4157 // Non-global variable. Call the runtime to try to delete from the 4157 // Non-global variable. Call the runtime to try to delete from the
4158 // context where the variable was introduced. 4158 // context where the variable was introduced.
4159 ASSERT(!context_register().is(a2)); 4159 ASSERT(!context_register().is(a2));
4160 __ li(a2, Operand(var->name())); 4160 __ li(a2, Operand(var->name()));
4161 __ Push(context_register(), a2); 4161 __ Push(context_register(), a2);
4162 __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2); 4162 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
4163 context()->Plug(v0); 4163 context()->Plug(v0);
4164 } 4164 }
4165 } else { 4165 } else {
4166 // Result of deleting non-property, non-variable reference is true. 4166 // Result of deleting non-property, non-variable reference is true.
4167 // The subexpression may have side effects. 4167 // The subexpression may have side effects.
4168 VisitForEffect(expr->expression()); 4168 VisitForEffect(expr->expression());
4169 context()->Plug(true); 4169 context()->Plug(true);
4170 } 4170 }
4171 break; 4171 break;
4172 } 4172 }
(...skipping 261 matching lines...) Expand 10 before | Expand all | Expand 10 after
4434 Comment cmnt(masm_, "[ Lookup slot"); 4434 Comment cmnt(masm_, "[ Lookup slot");
4435 Label done, slow; 4435 Label done, slow;
4436 4436
4437 // Generate code for loading from variables potentially shadowed 4437 // Generate code for loading from variables potentially shadowed
4438 // by eval-introduced variables. 4438 // by eval-introduced variables.
4439 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done); 4439 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4440 4440
4441 __ bind(&slow); 4441 __ bind(&slow);
4442 __ li(a0, Operand(proxy->name())); 4442 __ li(a0, Operand(proxy->name()));
4443 __ Push(cp, a0); 4443 __ Push(cp, a0);
4444 __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2); 4444 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4445 PrepareForBailout(expr, TOS_REG); 4445 PrepareForBailout(expr, TOS_REG);
4446 __ bind(&done); 4446 __ bind(&done);
4447 4447
4448 context()->Plug(v0); 4448 context()->Plug(v0);
4449 } else { 4449 } else {
4450 // This expression cannot throw a reference error at the top level. 4450 // This expression cannot throw a reference error at the top level.
4451 VisitInDuplicateContext(expr); 4451 VisitInDuplicateContext(expr);
4452 } 4452 }
4453 } 4453 }
4454 4454
(...skipping 399 matching lines...) Expand 10 before | Expand all | Expand 10 after
4854 Assembler::target_address_at(pc_immediate_load_address)) == 4854 Assembler::target_address_at(pc_immediate_load_address)) ==
4855 reinterpret_cast<uint32_t>( 4855 reinterpret_cast<uint32_t>(
4856 isolate->builtins()->OsrAfterStackCheck()->entry())); 4856 isolate->builtins()->OsrAfterStackCheck()->entry()));
4857 return OSR_AFTER_STACK_CHECK; 4857 return OSR_AFTER_STACK_CHECK;
4858 } 4858 }
4859 4859
4860 4860
4861 } } // namespace v8::internal 4861 } } // namespace v8::internal
4862 4862
4863 #endif // V8_TARGET_ARCH_MIPS 4863 #endif // V8_TARGET_ARCH_MIPS
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698