Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1630)

Side by Side Diff: src/mips/full-codegen-mips.cc

Issue 8417035: Introduce extended mode. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressed more comments. Created 9 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips/code-stubs-mips.cc ('k') | src/mips/lithium-codegen-mips.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after
144 if (strlen(FLAG_stop_at) > 0 && 144 if (strlen(FLAG_stop_at) > 0 &&
145 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { 145 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
146 __ stop("stop-at"); 146 __ stop("stop-at");
147 } 147 }
148 #endif 148 #endif
149 149
150 // Strict mode functions and builtins need to replace the receiver 150 // Strict mode functions and builtins need to replace the receiver
151 // with undefined when called as functions (without an explicit 151 // with undefined when called as functions (without an explicit
152 // receiver object). t1 is zero for method calls and non-zero for 152 // receiver object). t1 is zero for method calls and non-zero for
153 // function calls. 153 // function calls.
154 if (info->is_strict_mode() || info->is_native()) { 154 if (!info->is_classic_mode() || info->is_native()) {
155 Label ok; 155 Label ok;
156 __ Branch(&ok, eq, t1, Operand(zero_reg)); 156 __ Branch(&ok, eq, t1, Operand(zero_reg));
157 int receiver_offset = info->scope()->num_parameters() * kPointerSize; 157 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
158 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); 158 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
159 __ sw(a2, MemOperand(sp, receiver_offset)); 159 __ sw(a2, MemOperand(sp, receiver_offset));
160 __ bind(&ok); 160 __ bind(&ok);
161 } 161 }
162 162
163 // Open a frame scope to indicate that there is a frame on the stack. The 163 // Open a frame scope to indicate that there is a frame on the stack. The
164 // MANUAL indicates that the scope shouldn't actually generate code to set up 164 // MANUAL indicates that the scope shouldn't actually generate code to set up
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
236 __ Addu(a2, fp, 236 __ Addu(a2, fp,
237 Operand(StandardFrameConstants::kCallerSPOffset + offset)); 237 Operand(StandardFrameConstants::kCallerSPOffset + offset));
238 __ li(a1, Operand(Smi::FromInt(num_parameters))); 238 __ li(a1, Operand(Smi::FromInt(num_parameters)));
239 __ Push(a3, a2, a1); 239 __ Push(a3, a2, a1);
240 240
241 // Arguments to ArgumentsAccessStub: 241 // Arguments to ArgumentsAccessStub:
242 // function, receiver address, parameter count. 242 // function, receiver address, parameter count.
243 // The stub will rewrite receiever and parameter count if the previous 243 // The stub will rewrite receiever and parameter count if the previous
244 // stack frame was an arguments adapter frame. 244 // stack frame was an arguments adapter frame.
245 ArgumentsAccessStub::Type type; 245 ArgumentsAccessStub::Type type;
246 if (is_strict_mode()) { 246 if (!is_classic_mode()) {
247 type = ArgumentsAccessStub::NEW_STRICT; 247 type = ArgumentsAccessStub::NEW_STRICT;
248 } else if (function()->has_duplicate_parameters()) { 248 } else if (function()->has_duplicate_parameters()) {
249 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW; 249 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
250 } else { 250 } else {
251 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST; 251 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
252 } 252 }
253 ArgumentsAccessStub stub(type); 253 ArgumentsAccessStub stub(type);
254 __ CallStub(&stub); 254 __ CallStub(&stub);
255 255
256 SetVar(arguments, v0, a1, a2); 256 SetVar(arguments, v0, a1, a2);
(...skipping 858 matching lines...) Expand 10 before | Expand all | Expand 10 after
1115 // space for nested functions that don't need literals cloning. If 1115 // space for nested functions that don't need literals cloning. If
1116 // we're running with the --always-opt or the --prepare-always-opt 1116 // we're running with the --always-opt or the --prepare-always-opt
1117 // flag, we need to use the runtime function so that the new function 1117 // flag, we need to use the runtime function so that the new function
1118 // we are creating here gets a chance to have its code optimized and 1118 // we are creating here gets a chance to have its code optimized and
1119 // doesn't just get a copy of the existing unoptimized code. 1119 // doesn't just get a copy of the existing unoptimized code.
1120 if (!FLAG_always_opt && 1120 if (!FLAG_always_opt &&
1121 !FLAG_prepare_always_opt && 1121 !FLAG_prepare_always_opt &&
1122 !pretenure && 1122 !pretenure &&
1123 scope()->is_function_scope() && 1123 scope()->is_function_scope() &&
1124 info->num_literals() == 0) { 1124 info->num_literals() == 0) {
1125 FastNewClosureStub stub(info->strict_mode_flag()); 1125 FastNewClosureStub stub(info->language_mode());
1126 __ li(a0, Operand(info)); 1126 __ li(a0, Operand(info));
1127 __ push(a0); 1127 __ push(a0);
1128 __ CallStub(&stub); 1128 __ CallStub(&stub);
1129 } else { 1129 } else {
1130 __ li(a0, Operand(info)); 1130 __ li(a0, Operand(info));
1131 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex 1131 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1132 : Heap::kFalseValueRootIndex); 1132 : Heap::kFalseValueRootIndex);
1133 __ Push(cp, a0, a1); 1133 __ Push(cp, a0, a1);
1134 __ CallRuntime(Runtime::kNewClosure, 3); 1134 __ CallRuntime(Runtime::kNewClosure, 3);
1135 } 1135 }
(...skipping 329 matching lines...) Expand 10 before | Expand all | Expand 10 after
1465 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1465 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1466 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value())); 1466 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1467 // Fall through. 1467 // Fall through.
1468 case ObjectLiteral::Property::COMPUTED: 1468 case ObjectLiteral::Property::COMPUTED:
1469 if (key->handle()->IsSymbol()) { 1469 if (key->handle()->IsSymbol()) {
1470 if (property->emit_store()) { 1470 if (property->emit_store()) {
1471 VisitForAccumulatorValue(value); 1471 VisitForAccumulatorValue(value);
1472 __ mov(a0, result_register()); 1472 __ mov(a0, result_register());
1473 __ li(a2, Operand(key->handle())); 1473 __ li(a2, Operand(key->handle()));
1474 __ lw(a1, MemOperand(sp)); 1474 __ lw(a1, MemOperand(sp));
1475 Handle<Code> ic = is_strict_mode() 1475 Handle<Code> ic = is_classic_mode()
1476 ? isolate()->builtins()->StoreIC_Initialize_Strict() 1476 ? isolate()->builtins()->StoreIC_Initialize()
1477 : isolate()->builtins()->StoreIC_Initialize(); 1477 : isolate()->builtins()->StoreIC_Initialize_Strict();
1478 __ Call(ic, RelocInfo::CODE_TARGET, key->id()); 1478 __ Call(ic, RelocInfo::CODE_TARGET, key->id());
1479 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1479 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1480 } else { 1480 } else {
1481 VisitForEffect(value); 1481 VisitForEffect(value);
1482 } 1482 }
1483 break; 1483 break;
1484 } 1484 }
1485 // Fall through. 1485 // Fall through.
1486 case ObjectLiteral::Property::PROTOTYPE: 1486 case ObjectLiteral::Property::PROTOTYPE:
1487 // Duplicate receiver on stack. 1487 // Duplicate receiver on stack.
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after
1604 // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS 1604 // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
1605 __ JumpIfSmi(result_register(), &smi_element); 1605 __ JumpIfSmi(result_register(), &smi_element);
1606 __ CheckFastSmiOnlyElements(a2, a3, &fast_elements); 1606 __ CheckFastSmiOnlyElements(a2, a3, &fast_elements);
1607 1607
1608 // Store into the array literal requires a elements transition. Call into 1608 // Store into the array literal requires a elements transition. Call into
1609 // the runtime. 1609 // the runtime.
1610 __ bind(&slow_elements); 1610 __ bind(&slow_elements);
1611 __ push(t6); // Copy of array literal. 1611 __ push(t6); // Copy of array literal.
1612 __ li(a1, Operand(Smi::FromInt(i))); 1612 __ li(a1, Operand(Smi::FromInt(i)));
1613 __ li(a2, Operand(Smi::FromInt(NONE))); // PropertyAttributes 1613 __ li(a2, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1614 __ li(a3, Operand(Smi::FromInt(strict_mode_flag()))); // Strict mode. 1614 StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
1615 ? kNonStrictMode : kStrictMode;
1616 __ li(a3, Operand(Smi::FromInt(strict_mode_flag))); // Strict mode.
1615 __ Push(a1, result_register(), a2, a3); 1617 __ Push(a1, result_register(), a2, a3);
1616 __ CallRuntime(Runtime::kSetProperty, 5); 1618 __ CallRuntime(Runtime::kSetProperty, 5);
1617 __ Branch(&element_done); 1619 __ Branch(&element_done);
1618 1620
1619 // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS. 1621 // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
1620 __ bind(&double_elements); 1622 __ bind(&double_elements);
1621 __ li(a3, Operand(Smi::FromInt(i))); 1623 __ li(a3, Operand(Smi::FromInt(i)));
1622 __ StoreNumberToDoubleElements(result_register(), a3, t6, a1, t0, t1, t5, 1624 __ StoreNumberToDoubleElements(result_register(), a3, t6, a1, t0, t1, t5,
1623 t3, &slow_elements); 1625 t3, &slow_elements);
1624 __ Branch(&element_done); 1626 __ Branch(&element_done);
(...skipping 299 matching lines...) Expand 10 before | Expand all | Expand 10 after
1924 EffectContext context(this); 1926 EffectContext context(this);
1925 EmitVariableAssignment(var, Token::ASSIGN); 1927 EmitVariableAssignment(var, Token::ASSIGN);
1926 break; 1928 break;
1927 } 1929 }
1928 case NAMED_PROPERTY: { 1930 case NAMED_PROPERTY: {
1929 __ push(result_register()); // Preserve value. 1931 __ push(result_register()); // Preserve value.
1930 VisitForAccumulatorValue(prop->obj()); 1932 VisitForAccumulatorValue(prop->obj());
1931 __ mov(a1, result_register()); 1933 __ mov(a1, result_register());
1932 __ pop(a0); // Restore value. 1934 __ pop(a0); // Restore value.
1933 __ li(a2, Operand(prop->key()->AsLiteral()->handle())); 1935 __ li(a2, Operand(prop->key()->AsLiteral()->handle()));
1934 Handle<Code> ic = is_strict_mode() 1936 Handle<Code> ic = is_classic_mode()
1935 ? isolate()->builtins()->StoreIC_Initialize_Strict() 1937 ? isolate()->builtins()->StoreIC_Initialize()
1936 : isolate()->builtins()->StoreIC_Initialize(); 1938 : isolate()->builtins()->StoreIC_Initialize_Strict();
1937 __ Call(ic); 1939 __ Call(ic);
1938 break; 1940 break;
1939 } 1941 }
1940 case KEYED_PROPERTY: { 1942 case KEYED_PROPERTY: {
1941 __ push(result_register()); // Preserve value. 1943 __ push(result_register()); // Preserve value.
1942 VisitForStackValue(prop->obj()); 1944 VisitForStackValue(prop->obj());
1943 VisitForAccumulatorValue(prop->key()); 1945 VisitForAccumulatorValue(prop->key());
1944 __ mov(a1, result_register()); 1946 __ mov(a1, result_register());
1945 __ pop(a2); 1947 __ pop(a2);
1946 __ pop(a0); // Restore value. 1948 __ pop(a0); // Restore value.
1947 Handle<Code> ic = is_strict_mode() 1949 Handle<Code> ic = is_classic_mode()
1948 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() 1950 ? isolate()->builtins()->KeyedStoreIC_Initialize()
1949 : isolate()->builtins()->KeyedStoreIC_Initialize(); 1951 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
1950 __ Call(ic); 1952 __ Call(ic);
1951 break; 1953 break;
1952 } 1954 }
1953 } 1955 }
1954 PrepareForBailoutForId(bailout_ast_id, TOS_REG); 1956 PrepareForBailoutForId(bailout_ast_id, TOS_REG);
1955 context()->Plug(v0); 1957 context()->Plug(v0);
1956 } 1958 }
1957 1959
1958 1960
1959 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 1961 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1960 Token::Value op) { 1962 Token::Value op) {
1961 if (var->IsUnallocated()) { 1963 if (var->IsUnallocated()) {
1962 // Global var, const, or let. 1964 // Global var, const, or let.
1963 __ mov(a0, result_register()); 1965 __ mov(a0, result_register());
1964 __ li(a2, Operand(var->name())); 1966 __ li(a2, Operand(var->name()));
1965 __ lw(a1, GlobalObjectOperand()); 1967 __ lw(a1, GlobalObjectOperand());
1966 Handle<Code> ic = is_strict_mode() 1968 Handle<Code> ic = is_classic_mode()
1967 ? isolate()->builtins()->StoreIC_Initialize_Strict() 1969 ? isolate()->builtins()->StoreIC_Initialize()
1968 : isolate()->builtins()->StoreIC_Initialize(); 1970 : isolate()->builtins()->StoreIC_Initialize_Strict();
1969 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); 1971 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
1970 1972
1971 } else if (op == Token::INIT_CONST) { 1973 } else if (op == Token::INIT_CONST) {
1972 // Const initializers need a write barrier. 1974 // Const initializers need a write barrier.
1973 ASSERT(!var->IsParameter()); // No const parameters. 1975 ASSERT(!var->IsParameter()); // No const parameters.
1974 if (var->IsStackLocal()) { 1976 if (var->IsStackLocal()) {
1975 Label skip; 1977 Label skip;
1976 __ lw(a1, StackOperand(var)); 1978 __ lw(a1, StackOperand(var));
1977 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); 1979 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
1978 __ Branch(&skip, ne, a1, Operand(t0)); 1980 __ Branch(&skip, ne, a1, Operand(t0));
(...skipping 10 matching lines...) Expand all
1989 __ li(a0, Operand(var->name())); 1991 __ li(a0, Operand(var->name()));
1990 __ Push(cp, a0); // Context and name. 1992 __ Push(cp, a0); // Context and name.
1991 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); 1993 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1992 } 1994 }
1993 1995
1994 } else if (var->mode() == LET && op != Token::INIT_LET) { 1996 } else if (var->mode() == LET && op != Token::INIT_LET) {
1995 // Non-initializing assignment to let variable needs a write barrier. 1997 // Non-initializing assignment to let variable needs a write barrier.
1996 if (var->IsLookupSlot()) { 1998 if (var->IsLookupSlot()) {
1997 __ push(v0); // Value. 1999 __ push(v0); // Value.
1998 __ li(a1, Operand(var->name())); 2000 __ li(a1, Operand(var->name()));
1999 __ li(a0, Operand(Smi::FromInt(strict_mode_flag()))); 2001 __ li(a0, Operand(Smi::FromInt(language_mode())));
2000 __ Push(cp, a1, a0); // Context, name, strict mode. 2002 __ Push(cp, a1, a0); // Context, name, strict mode.
2001 __ CallRuntime(Runtime::kStoreContextSlot, 4); 2003 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2002 } else { 2004 } else {
2003 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 2005 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2004 Label assign; 2006 Label assign;
2005 MemOperand location = VarOperand(var, a1); 2007 MemOperand location = VarOperand(var, a1);
2006 __ lw(a3, location); 2008 __ lw(a3, location);
2007 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); 2009 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2008 __ Branch(&assign, ne, a3, Operand(t0)); 2010 __ Branch(&assign, ne, a3, Operand(t0));
2009 __ li(a3, Operand(var->name())); 2011 __ li(a3, Operand(var->name()));
(...skipping 27 matching lines...) Expand all
2037 if (var->IsContextSlot()) { 2039 if (var->IsContextSlot()) {
2038 __ mov(a3, v0); 2040 __ mov(a3, v0);
2039 int offset = Context::SlotOffset(var->index()); 2041 int offset = Context::SlotOffset(var->index());
2040 __ RecordWriteContextSlot( 2042 __ RecordWriteContextSlot(
2041 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs); 2043 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2042 } 2044 }
2043 } else { 2045 } else {
2044 ASSERT(var->IsLookupSlot()); 2046 ASSERT(var->IsLookupSlot());
2045 __ push(v0); // Value. 2047 __ push(v0); // Value.
2046 __ li(a1, Operand(var->name())); 2048 __ li(a1, Operand(var->name()));
2047 __ li(a0, Operand(Smi::FromInt(strict_mode_flag()))); 2049 __ li(a0, Operand(Smi::FromInt(language_mode())));
2048 __ Push(cp, a1, a0); // Context, name, strict mode. 2050 __ Push(cp, a1, a0); // Context, name, strict mode.
2049 __ CallRuntime(Runtime::kStoreContextSlot, 4); 2051 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2050 } 2052 }
2051 } 2053 }
2052 // Non-initializing assignments to consts are ignored. 2054 // Non-initializing assignments to consts are ignored.
2053 } 2055 }
2054 2056
2055 2057
2056 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2058 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2057 // Assignment to a property, using a named store IC. 2059 // Assignment to a property, using a named store IC.
(...skipping 17 matching lines...) Expand all
2075 __ mov(a0, result_register()); // Load the value. 2077 __ mov(a0, result_register()); // Load the value.
2076 __ li(a2, Operand(prop->key()->AsLiteral()->handle())); 2078 __ li(a2, Operand(prop->key()->AsLiteral()->handle()));
2077 // Load receiver to a1. Leave a copy in the stack if needed for turning the 2079 // Load receiver to a1. Leave a copy in the stack if needed for turning the
2078 // receiver into fast case. 2080 // receiver into fast case.
2079 if (expr->ends_initialization_block()) { 2081 if (expr->ends_initialization_block()) {
2080 __ lw(a1, MemOperand(sp)); 2082 __ lw(a1, MemOperand(sp));
2081 } else { 2083 } else {
2082 __ pop(a1); 2084 __ pop(a1);
2083 } 2085 }
2084 2086
2085 Handle<Code> ic = is_strict_mode() 2087 Handle<Code> ic = is_classic_mode()
2086 ? isolate()->builtins()->StoreIC_Initialize_Strict() 2088 ? isolate()->builtins()->StoreIC_Initialize()
2087 : isolate()->builtins()->StoreIC_Initialize(); 2089 : isolate()->builtins()->StoreIC_Initialize_Strict();
2088 __ Call(ic, RelocInfo::CODE_TARGET, expr->id()); 2090 __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
2089 2091
2090 // If the assignment ends an initialization block, revert to fast case. 2092 // If the assignment ends an initialization block, revert to fast case.
2091 if (expr->ends_initialization_block()) { 2093 if (expr->ends_initialization_block()) {
2092 __ push(v0); // Result of assignment, saved even if not needed. 2094 __ push(v0); // Result of assignment, saved even if not needed.
2093 // Receiver is under the result value. 2095 // Receiver is under the result value.
2094 __ lw(t0, MemOperand(sp, kPointerSize)); 2096 __ lw(t0, MemOperand(sp, kPointerSize));
2095 __ push(t0); 2097 __ push(t0);
2096 __ CallRuntime(Runtime::kToFastProperties, 1); 2098 __ CallRuntime(Runtime::kToFastProperties, 1);
2097 __ pop(v0); 2099 __ pop(v0);
(...skipping 29 matching lines...) Expand all
2127 __ mov(a0, result_register()); 2129 __ mov(a0, result_register());
2128 __ pop(a1); // Key. 2130 __ pop(a1); // Key.
2129 // Load receiver to a2. Leave a copy in the stack if needed for turning the 2131 // Load receiver to a2. Leave a copy in the stack if needed for turning the
2130 // receiver into fast case. 2132 // receiver into fast case.
2131 if (expr->ends_initialization_block()) { 2133 if (expr->ends_initialization_block()) {
2132 __ lw(a2, MemOperand(sp)); 2134 __ lw(a2, MemOperand(sp));
2133 } else { 2135 } else {
2134 __ pop(a2); 2136 __ pop(a2);
2135 } 2137 }
2136 2138
2137 Handle<Code> ic = is_strict_mode() 2139 Handle<Code> ic = is_classic_mode()
2138 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() 2140 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2139 : isolate()->builtins()->KeyedStoreIC_Initialize(); 2141 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2140 __ Call(ic, RelocInfo::CODE_TARGET, expr->id()); 2142 __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
2141 2143
2142 // If the assignment ends an initialization block, revert to fast case. 2144 // If the assignment ends an initialization block, revert to fast case.
2143 if (expr->ends_initialization_block()) { 2145 if (expr->ends_initialization_block()) {
2144 __ push(v0); // Result of assignment, saved even if not needed. 2146 __ push(v0); // Result of assignment, saved even if not needed.
2145 // Receiver is under the result value. 2147 // Receiver is under the result value.
2146 __ lw(t0, MemOperand(sp, kPointerSize)); 2148 __ lw(t0, MemOperand(sp, kPointerSize));
2147 __ push(t0); 2149 __ push(t0);
2148 __ CallRuntime(Runtime::kToFastProperties, 1); 2150 __ CallRuntime(Runtime::kToFastProperties, 1);
2149 __ pop(v0); 2151 __ pop(v0);
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
2253 2255
2254 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { 2256 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2255 // Push copy of the first argument or undefined if it doesn't exist. 2257 // Push copy of the first argument or undefined if it doesn't exist.
2256 if (arg_count > 0) { 2258 if (arg_count > 0) {
2257 __ lw(a1, MemOperand(sp, arg_count * kPointerSize)); 2259 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2258 } else { 2260 } else {
2259 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); 2261 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2260 } 2262 }
2261 __ push(a1); 2263 __ push(a1);
2262 2264
2263 // Push the receiver of the enclosing function and do runtime call. 2265 // Push the receiver of the enclosing function.
2264 int receiver_offset = 2 + info_->scope()->num_parameters(); 2266 int receiver_offset = 2 + info_->scope()->num_parameters();
2265 __ lw(a1, MemOperand(fp, receiver_offset * kPointerSize)); 2267 __ lw(a1, MemOperand(fp, receiver_offset * kPointerSize));
2266 __ push(a1); 2268 __ push(a1);
2267 // Push the strict mode flag. In harmony mode every eval call 2269 // Push the language mode.
2268 // is a strict mode eval call. 2270 __ li(a1, Operand(Smi::FromInt(language_mode())));
2269 StrictModeFlag strict_mode =
2270 FLAG_harmony_scoping ? kStrictMode : strict_mode_flag();
2271 __ li(a1, Operand(Smi::FromInt(strict_mode)));
2272 __ push(a1); 2271 __ push(a1);
2273 2272
2274 // Push the start position of the scope the calls resides in. 2273 // Push the start position of the scope the calls resides in.
2275 __ li(a1, Operand(Smi::FromInt(scope()->start_position()))); 2274 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2276 __ push(a1); 2275 __ push(a1);
2277 2276
2277 // Do the runtime call.
2278 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); 2278 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2279 } 2279 }
2280 2280
2281 2281
2282 void FullCodeGenerator::VisitCall(Call* expr) { 2282 void FullCodeGenerator::VisitCall(Call* expr) {
2283 #ifdef DEBUG 2283 #ifdef DEBUG
2284 // We want to verify that RecordJSReturnSite gets called on all paths 2284 // We want to verify that RecordJSReturnSite gets called on all paths
2285 // through this function. Avoid early returns. 2285 // through this function. Avoid early returns.
2286 expr->return_is_recorded_ = false; 2286 expr->return_is_recorded_ = false;
2287 #endif 2287 #endif
(...skipping 1487 matching lines...) Expand 10 before | Expand all | Expand 10 after
3775 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 3775 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3776 switch (expr->op()) { 3776 switch (expr->op()) {
3777 case Token::DELETE: { 3777 case Token::DELETE: {
3778 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 3778 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3779 Property* property = expr->expression()->AsProperty(); 3779 Property* property = expr->expression()->AsProperty();
3780 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 3780 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3781 3781
3782 if (property != NULL) { 3782 if (property != NULL) {
3783 VisitForStackValue(property->obj()); 3783 VisitForStackValue(property->obj());
3784 VisitForStackValue(property->key()); 3784 VisitForStackValue(property->key());
3785 __ li(a1, Operand(Smi::FromInt(strict_mode_flag()))); 3785 StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
3786 ? kNonStrictMode : kStrictMode;
3787 __ li(a1, Operand(Smi::FromInt(strict_mode_flag)));
3786 __ push(a1); 3788 __ push(a1);
3787 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 3789 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3788 context()->Plug(v0); 3790 context()->Plug(v0);
3789 } else if (proxy != NULL) { 3791 } else if (proxy != NULL) {
3790 Variable* var = proxy->var(); 3792 Variable* var = proxy->var();
3791 // Delete of an unqualified identifier is disallowed in strict mode 3793 // Delete of an unqualified identifier is disallowed in strict mode
3792 // but "delete this" is allowed. 3794 // but "delete this" is allowed.
3793 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this()); 3795 ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
3794 if (var->IsUnallocated()) { 3796 if (var->IsUnallocated()) {
3795 __ lw(a2, GlobalObjectOperand()); 3797 __ lw(a2, GlobalObjectOperand());
3796 __ li(a1, Operand(var->name())); 3798 __ li(a1, Operand(var->name()));
3797 __ li(a0, Operand(Smi::FromInt(kNonStrictMode))); 3799 __ li(a0, Operand(Smi::FromInt(kNonStrictMode)));
3798 __ Push(a2, a1, a0); 3800 __ Push(a2, a1, a0);
3799 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 3801 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3800 context()->Plug(v0); 3802 context()->Plug(v0);
3801 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 3803 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3802 // Result of deleting non-global, non-dynamic variables is false. 3804 // Result of deleting non-global, non-dynamic variables is false.
3803 // The subexpression does not have side effects. 3805 // The subexpression does not have side effects.
(...skipping 245 matching lines...) Expand 10 before | Expand all | Expand 10 after
4049 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4051 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4050 Token::ASSIGN); 4052 Token::ASSIGN);
4051 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4053 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4052 context()->Plug(v0); 4054 context()->Plug(v0);
4053 } 4055 }
4054 break; 4056 break;
4055 case NAMED_PROPERTY: { 4057 case NAMED_PROPERTY: {
4056 __ mov(a0, result_register()); // Value. 4058 __ mov(a0, result_register()); // Value.
4057 __ li(a2, Operand(prop->key()->AsLiteral()->handle())); // Name. 4059 __ li(a2, Operand(prop->key()->AsLiteral()->handle())); // Name.
4058 __ pop(a1); // Receiver. 4060 __ pop(a1); // Receiver.
4059 Handle<Code> ic = is_strict_mode() 4061 Handle<Code> ic = is_classic_mode()
4060 ? isolate()->builtins()->StoreIC_Initialize_Strict() 4062 ? isolate()->builtins()->StoreIC_Initialize()
4061 : isolate()->builtins()->StoreIC_Initialize(); 4063 : isolate()->builtins()->StoreIC_Initialize_Strict();
4062 __ Call(ic, RelocInfo::CODE_TARGET, expr->id()); 4064 __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
4063 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4065 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4064 if (expr->is_postfix()) { 4066 if (expr->is_postfix()) {
4065 if (!context()->IsEffect()) { 4067 if (!context()->IsEffect()) {
4066 context()->PlugTOS(); 4068 context()->PlugTOS();
4067 } 4069 }
4068 } else { 4070 } else {
4069 context()->Plug(v0); 4071 context()->Plug(v0);
4070 } 4072 }
4071 break; 4073 break;
4072 } 4074 }
4073 case KEYED_PROPERTY: { 4075 case KEYED_PROPERTY: {
4074 __ mov(a0, result_register()); // Value. 4076 __ mov(a0, result_register()); // Value.
4075 __ pop(a1); // Key. 4077 __ pop(a1); // Key.
4076 __ pop(a2); // Receiver. 4078 __ pop(a2); // Receiver.
4077 Handle<Code> ic = is_strict_mode() 4079 Handle<Code> ic = is_classic_mode()
4078 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() 4080 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4079 : isolate()->builtins()->KeyedStoreIC_Initialize(); 4081 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4080 __ Call(ic, RelocInfo::CODE_TARGET, expr->id()); 4082 __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
4081 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4083 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4082 if (expr->is_postfix()) { 4084 if (expr->is_postfix()) {
4083 if (!context()->IsEffect()) { 4085 if (!context()->IsEffect()) {
4084 context()->PlugTOS(); 4086 context()->PlugTOS();
4085 } 4087 }
4086 } else { 4088 } else {
4087 context()->Plug(v0); 4089 context()->Plug(v0);
4088 } 4090 }
4089 break; 4091 break;
(...skipping 344 matching lines...) Expand 10 before | Expand all | Expand 10 after
4434 *context_length = 0; 4436 *context_length = 0;
4435 return previous_; 4437 return previous_;
4436 } 4438 }
4437 4439
4438 4440
4439 #undef __ 4441 #undef __
4440 4442
4441 } } // namespace v8::internal 4443 } } // namespace v8::internal
4442 4444
4443 #endif // V8_TARGET_ARCH_MIPS 4445 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/code-stubs-mips.cc ('k') | src/mips/lithium-codegen-mips.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698