Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(136)

Side by Side Diff: src/ia32/full-codegen-ia32.cc

Issue 196133017: Experimental parser: merge r19949 (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/frames-ia32.h ('k') | src/ia32/ic-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
112 // o ebp: our caller's frame pointer 112 // o ebp: our caller's frame pointer
113 // o esp: stack pointer (pointing to return address) 113 // o esp: stack pointer (pointing to return address)
114 // 114 //
115 // The function builds a JS frame. Please see JavaScriptFrameConstants in 115 // The function builds a JS frame. Please see JavaScriptFrameConstants in
116 // frames-ia32.h for its layout. 116 // frames-ia32.h for its layout.
117 void FullCodeGenerator::Generate() { 117 void FullCodeGenerator::Generate() {
118 CompilationInfo* info = info_; 118 CompilationInfo* info = info_;
119 handler_table_ = 119 handler_table_ =
120 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); 120 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
121 121
122 InitializeFeedbackVector();
123
124 profiling_counter_ = isolate()->factory()->NewCell( 122 profiling_counter_ = isolate()->factory()->NewCell(
125 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); 123 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
126 SetFunctionPosition(function()); 124 SetFunctionPosition(function());
127 Comment cmnt(masm_, "[ function compiled by full code generator"); 125 Comment cmnt(masm_, "[ function compiled by full code generator");
128 126
129 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 127 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
130 128
131 #ifdef DEBUG 129 #ifdef DEBUG
132 if (strlen(FLAG_stop_at) > 0 && 130 if (strlen(FLAG_stop_at) > 0 &&
133 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 131 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
134 __ int3(); 132 __ int3();
135 } 133 }
136 #endif 134 #endif
137 135
138 // Classic mode functions and builtins need to replace the receiver with the 136 // Sloppy mode functions and builtins need to replace the receiver with the
139 // global proxy when called as functions (without an explicit receiver 137 // global proxy when called as functions (without an explicit receiver
140 // object). 138 // object).
141 if (info->is_classic_mode() && !info->is_native()) { 139 if (info->strict_mode() == SLOPPY && !info->is_native()) {
142 Label ok; 140 Label ok;
143 // +1 for return address. 141 // +1 for return address.
144 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize; 142 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
145 __ mov(ecx, Operand(esp, receiver_offset)); 143 __ mov(ecx, Operand(esp, receiver_offset));
146 144
147 __ cmp(ecx, isolate()->factory()->undefined_value()); 145 __ cmp(ecx, isolate()->factory()->undefined_value());
148 __ j(not_equal, &ok, Label::kNear); 146 __ j(not_equal, &ok, Label::kNear);
149 147
150 __ mov(ecx, GlobalObjectOperand()); 148 __ mov(ecx, GlobalObjectOperand());
151 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalReceiverOffset)); 149 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalReceiverOffset));
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
238 int offset = num_parameters * kPointerSize; 236 int offset = num_parameters * kPointerSize;
239 __ lea(edx, 237 __ lea(edx,
240 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset)); 238 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
241 __ push(edx); 239 __ push(edx);
242 __ push(Immediate(Smi::FromInt(num_parameters))); 240 __ push(Immediate(Smi::FromInt(num_parameters)));
243 // Arguments to ArgumentsAccessStub: 241 // Arguments to ArgumentsAccessStub:
244 // function, receiver address, parameter count. 242 // function, receiver address, parameter count.
245 // The stub will rewrite receiver and parameter count if the previous 243 // The stub will rewrite receiver and parameter count if the previous
246 // stack frame was an arguments adapter frame. 244 // stack frame was an arguments adapter frame.
247 ArgumentsAccessStub::Type type; 245 ArgumentsAccessStub::Type type;
248 if (!is_classic_mode()) { 246 if (strict_mode() == STRICT) {
249 type = ArgumentsAccessStub::NEW_STRICT; 247 type = ArgumentsAccessStub::NEW_STRICT;
250 } else if (function()->has_duplicate_parameters()) { 248 } else if (function()->has_duplicate_parameters()) {
251 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW; 249 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
252 } else { 250 } else {
253 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST; 251 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
254 } 252 }
255 ArgumentsAccessStub stub(type); 253 ArgumentsAccessStub stub(type);
256 __ CallStub(&stub); 254 __ CallStub(&stub);
257 255
258 SetVar(arguments, eax, ebx, edx); 256 SetVar(arguments, eax, ebx, edx);
259 } 257 }
260 258
261 if (FLAG_trace) { 259 if (FLAG_trace) {
262 __ CallRuntime(Runtime::kTraceEnter, 0); 260 __ CallRuntime(Runtime::kTraceEnter, 0);
263 } 261 }
264 262
265 // Visit the declarations and body unless there is an illegal 263 // Visit the declarations and body unless there is an illegal
266 // redeclaration. 264 // redeclaration.
267 if (scope()->HasIllegalRedeclaration()) { 265 if (scope()->HasIllegalRedeclaration()) {
268 Comment cmnt(masm_, "[ Declarations"); 266 Comment cmnt(masm_, "[ Declarations");
269 scope()->VisitIllegalRedeclaration(this); 267 scope()->VisitIllegalRedeclaration(this);
270 268
271 } else { 269 } else {
272 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); 270 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
273 { Comment cmnt(masm_, "[ Declarations"); 271 { Comment cmnt(masm_, "[ Declarations");
274 // For named function expressions, declare the function name as a 272 // For named function expressions, declare the function name as a
275 // constant. 273 // constant.
276 if (scope()->is_function_scope() && scope()->function() != NULL) { 274 if (scope()->is_function_scope() && scope()->function() != NULL) {
277 VariableDeclaration* function = scope()->function(); 275 VariableDeclaration* function = scope()->function();
278 ASSERT(function->proxy()->var()->mode() == CONST || 276 ASSERT(function->proxy()->var()->mode() == CONST ||
279 function->proxy()->var()->mode() == CONST_HARMONY); 277 function->proxy()->var()->mode() == CONST_LEGACY);
280 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); 278 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
281 VisitVariableDeclaration(function); 279 VisitVariableDeclaration(function);
282 } 280 }
283 VisitDeclarations(scope()->declarations()); 281 VisitDeclarations(scope()->declarations());
284 } 282 }
285 283
286 { Comment cmnt(masm_, "[ Stack check"); 284 { Comment cmnt(masm_, "[ Stack check");
287 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); 285 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
288 Label ok; 286 Label ok;
289 ExternalReference stack_limit = 287 ExternalReference stack_limit =
(...skipping 449 matching lines...) Expand 10 before | Expand all | Expand 10 after
739 737
740 738
741 void FullCodeGenerator::VisitVariableDeclaration( 739 void FullCodeGenerator::VisitVariableDeclaration(
742 VariableDeclaration* declaration) { 740 VariableDeclaration* declaration) {
743 // If it was not possible to allocate the variable at compile time, we 741 // If it was not possible to allocate the variable at compile time, we
744 // need to "declare" it at runtime to make sure it actually exists in the 742 // need to "declare" it at runtime to make sure it actually exists in the
745 // local context. 743 // local context.
746 VariableProxy* proxy = declaration->proxy(); 744 VariableProxy* proxy = declaration->proxy();
747 VariableMode mode = declaration->mode(); 745 VariableMode mode = declaration->mode();
748 Variable* variable = proxy->var(); 746 Variable* variable = proxy->var();
749 bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET; 747 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
750 switch (variable->location()) { 748 switch (variable->location()) {
751 case Variable::UNALLOCATED: 749 case Variable::UNALLOCATED:
752 globals_->Add(variable->name(), zone()); 750 globals_->Add(variable->name(), zone());
753 globals_->Add(variable->binding_needs_init() 751 globals_->Add(variable->binding_needs_init()
754 ? isolate()->factory()->the_hole_value() 752 ? isolate()->factory()->the_hole_value()
755 : isolate()->factory()->undefined_value(), zone()); 753 : isolate()->factory()->undefined_value(), zone());
756 break; 754 break;
757 755
758 case Variable::PARAMETER: 756 case Variable::PARAMETER:
759 case Variable::LOCAL: 757 case Variable::LOCAL:
(...skipping 337 matching lines...) Expand 10 before | Expand all | Expand 10 after
1097 __ jmp(&loop); 1095 __ jmp(&loop);
1098 1096
1099 __ bind(&no_descriptors); 1097 __ bind(&no_descriptors);
1100 __ add(esp, Immediate(kPointerSize)); 1098 __ add(esp, Immediate(kPointerSize));
1101 __ jmp(&exit); 1099 __ jmp(&exit);
1102 1100
1103 // We got a fixed array in register eax. Iterate through that. 1101 // We got a fixed array in register eax. Iterate through that.
1104 Label non_proxy; 1102 Label non_proxy;
1105 __ bind(&fixed_array); 1103 __ bind(&fixed_array);
1106 1104
1107 Handle<Object> feedback = Handle<Object>(
1108 Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
1109 isolate());
1110 StoreFeedbackVectorSlot(slot, feedback);
1111
1112 // No need for a write barrier, we are storing a Smi in the feedback vector. 1105 // No need for a write barrier, we are storing a Smi in the feedback vector.
1113 __ LoadHeapObject(ebx, FeedbackVector()); 1106 __ LoadHeapObject(ebx, FeedbackVector());
1114 __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(slot)), 1107 __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(slot)),
1115 Immediate(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker))); 1108 Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
1116 1109
1117 __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check 1110 __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
1118 __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object 1111 __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
1119 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1112 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1120 __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx); 1113 __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
1121 __ j(above, &non_proxy); 1114 __ j(above, &non_proxy);
1122 __ Set(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy 1115 __ Set(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy
1123 __ bind(&non_proxy); 1116 __ bind(&non_proxy);
1124 __ push(ebx); // Smi 1117 __ push(ebx); // Smi
1125 __ push(eax); // Array 1118 __ push(eax); // Array
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after
1260 // space for nested functions that don't need literals cloning. If 1253 // space for nested functions that don't need literals cloning. If
1261 // we're running with the --always-opt or the --prepare-always-opt 1254 // we're running with the --always-opt or the --prepare-always-opt
1262 // flag, we need to use the runtime function so that the new function 1255 // flag, we need to use the runtime function so that the new function
1263 // we are creating here gets a chance to have its code optimized and 1256 // we are creating here gets a chance to have its code optimized and
1264 // doesn't just get a copy of the existing unoptimized code. 1257 // doesn't just get a copy of the existing unoptimized code.
1265 if (!FLAG_always_opt && 1258 if (!FLAG_always_opt &&
1266 !FLAG_prepare_always_opt && 1259 !FLAG_prepare_always_opt &&
1267 !pretenure && 1260 !pretenure &&
1268 scope()->is_function_scope() && 1261 scope()->is_function_scope() &&
1269 info->num_literals() == 0) { 1262 info->num_literals() == 0) {
1270 FastNewClosureStub stub(info->language_mode(), info->is_generator()); 1263 FastNewClosureStub stub(info->strict_mode(), info->is_generator());
1271 __ mov(ebx, Immediate(info)); 1264 __ mov(ebx, Immediate(info));
1272 __ CallStub(&stub); 1265 __ CallStub(&stub);
1273 } else { 1266 } else {
1274 __ push(esi); 1267 __ push(esi);
1275 __ push(Immediate(info)); 1268 __ push(Immediate(info));
1276 __ push(Immediate(pretenure 1269 __ push(Immediate(pretenure
1277 ? isolate()->factory()->true_value() 1270 ? isolate()->factory()->true_value()
1278 : isolate()->factory()->false_value())); 1271 : isolate()->factory()->false_value()));
1279 __ CallRuntime(Runtime::kNewClosure, 3); 1272 __ CallRuntime(Runtime::kNewClosure, 3);
1280 } 1273 }
1281 context()->Plug(eax); 1274 context()->Plug(eax);
1282 } 1275 }
1283 1276
1284 1277
1285 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 1278 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1286 Comment cmnt(masm_, "[ VariableProxy"); 1279 Comment cmnt(masm_, "[ VariableProxy");
1287 EmitVariableLoad(expr); 1280 EmitVariableLoad(expr);
1288 } 1281 }
1289 1282
1290 1283
1291 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, 1284 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1292 TypeofState typeof_state, 1285 TypeofState typeof_state,
1293 Label* slow) { 1286 Label* slow) {
1294 Register context = esi; 1287 Register context = esi;
1295 Register temp = edx; 1288 Register temp = edx;
1296 1289
1297 Scope* s = scope(); 1290 Scope* s = scope();
1298 while (s != NULL) { 1291 while (s != NULL) {
1299 if (s->num_heap_slots() > 0) { 1292 if (s->num_heap_slots() > 0) {
1300 if (s->calls_non_strict_eval()) { 1293 if (s->calls_sloppy_eval()) {
1301 // Check that extension is NULL. 1294 // Check that extension is NULL.
1302 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), 1295 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1303 Immediate(0)); 1296 Immediate(0));
1304 __ j(not_equal, slow); 1297 __ j(not_equal, slow);
1305 } 1298 }
1306 // Load next context in chain. 1299 // Load next context in chain.
1307 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX)); 1300 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1308 // Walk the rest of the chain without clobbering esi. 1301 // Walk the rest of the chain without clobbering esi.
1309 context = temp; 1302 context = temp;
1310 } 1303 }
1311 // If no outer scope calls eval, we do not need to check more 1304 // If no outer scope calls eval, we do not need to check more
1312 // context extensions. If we have reached an eval scope, we check 1305 // context extensions. If we have reached an eval scope, we check
1313 // all extensions from this point. 1306 // all extensions from this point.
1314 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break; 1307 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1315 s = s->outer_scope(); 1308 s = s->outer_scope();
1316 } 1309 }
1317 1310
1318 if (s != NULL && s->is_eval_scope()) { 1311 if (s != NULL && s->is_eval_scope()) {
1319 // Loop up the context chain. There is no frame effect so it is 1312 // Loop up the context chain. There is no frame effect so it is
1320 // safe to use raw labels here. 1313 // safe to use raw labels here.
1321 Label next, fast; 1314 Label next, fast;
1322 if (!context.is(temp)) { 1315 if (!context.is(temp)) {
1323 __ mov(temp, context); 1316 __ mov(temp, context);
1324 } 1317 }
(...skipping 24 matching lines...) Expand all
1349 1342
1350 1343
1351 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1344 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1352 Label* slow) { 1345 Label* slow) {
1353 ASSERT(var->IsContextSlot()); 1346 ASSERT(var->IsContextSlot());
1354 Register context = esi; 1347 Register context = esi;
1355 Register temp = ebx; 1348 Register temp = ebx;
1356 1349
1357 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1350 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1358 if (s->num_heap_slots() > 0) { 1351 if (s->num_heap_slots() > 0) {
1359 if (s->calls_non_strict_eval()) { 1352 if (s->calls_sloppy_eval()) {
1360 // Check that extension is NULL. 1353 // Check that extension is NULL.
1361 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), 1354 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1362 Immediate(0)); 1355 Immediate(0));
1363 __ j(not_equal, slow); 1356 __ j(not_equal, slow);
1364 } 1357 }
1365 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX)); 1358 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1366 // Walk the rest of the chain without clobbering esi. 1359 // Walk the rest of the chain without clobbering esi.
1367 context = temp; 1360 context = temp;
1368 } 1361 }
1369 } 1362 }
(...skipping 16 matching lines...) Expand all
1386 // eval-introduced variables. Eval is used a lot without 1379 // eval-introduced variables. Eval is used a lot without
1387 // introducing variables. In those cases, we do not want to 1380 // introducing variables. In those cases, we do not want to
1388 // perform a runtime call for all variables in the scope 1381 // perform a runtime call for all variables in the scope
1389 // containing the eval. 1382 // containing the eval.
1390 if (var->mode() == DYNAMIC_GLOBAL) { 1383 if (var->mode() == DYNAMIC_GLOBAL) {
1391 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); 1384 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1392 __ jmp(done); 1385 __ jmp(done);
1393 } else if (var->mode() == DYNAMIC_LOCAL) { 1386 } else if (var->mode() == DYNAMIC_LOCAL) {
1394 Variable* local = var->local_if_not_shadowed(); 1387 Variable* local = var->local_if_not_shadowed();
1395 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow)); 1388 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1396 if (local->mode() == LET || 1389 if (local->mode() == LET || local->mode() == CONST ||
1397 local->mode() == CONST || 1390 local->mode() == CONST_LEGACY) {
1398 local->mode() == CONST_HARMONY) {
1399 __ cmp(eax, isolate()->factory()->the_hole_value()); 1391 __ cmp(eax, isolate()->factory()->the_hole_value());
1400 __ j(not_equal, done); 1392 __ j(not_equal, done);
1401 if (local->mode() == CONST) { 1393 if (local->mode() == CONST_LEGACY) {
1402 __ mov(eax, isolate()->factory()->undefined_value()); 1394 __ mov(eax, isolate()->factory()->undefined_value());
1403 } else { // LET || CONST_HARMONY 1395 } else { // LET || CONST
1404 __ push(Immediate(var->name())); 1396 __ push(Immediate(var->name()));
1405 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1397 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1406 } 1398 }
1407 } 1399 }
1408 __ jmp(done); 1400 __ jmp(done);
1409 } 1401 }
1410 } 1402 }
1411 1403
1412 1404
1413 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { 1405 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
1458 // binding is initialized: 1450 // binding is initialized:
1459 // function() { f(); let x = 1; function f() { x = 2; } } 1451 // function() { f(); let x = 1; function f() { x = 2; } }
1460 // 1452 //
1461 bool skip_init_check; 1453 bool skip_init_check;
1462 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) { 1454 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1463 skip_init_check = false; 1455 skip_init_check = false;
1464 } else { 1456 } else {
1465 // Check that we always have valid source position. 1457 // Check that we always have valid source position.
1466 ASSERT(var->initializer_position() != RelocInfo::kNoPosition); 1458 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1467 ASSERT(proxy->position() != RelocInfo::kNoPosition); 1459 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1468 skip_init_check = var->mode() != CONST && 1460 skip_init_check = var->mode() != CONST_LEGACY &&
1469 var->initializer_position() < proxy->position(); 1461 var->initializer_position() < proxy->position();
1470 } 1462 }
1471 1463
1472 if (!skip_init_check) { 1464 if (!skip_init_check) {
1473 // Let and const need a read barrier. 1465 // Let and const need a read barrier.
1474 Label done; 1466 Label done;
1475 GetVar(eax, var); 1467 GetVar(eax, var);
1476 __ cmp(eax, isolate()->factory()->the_hole_value()); 1468 __ cmp(eax, isolate()->factory()->the_hole_value());
1477 __ j(not_equal, &done, Label::kNear); 1469 __ j(not_equal, &done, Label::kNear);
1478 if (var->mode() == LET || var->mode() == CONST_HARMONY) { 1470 if (var->mode() == LET || var->mode() == CONST) {
1479 // Throw a reference error when using an uninitialized let/const 1471 // Throw a reference error when using an uninitialized let/const
1480 // binding in harmony mode. 1472 // binding in harmony mode.
1481 __ push(Immediate(var->name())); 1473 __ push(Immediate(var->name()));
1482 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1474 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1483 } else { 1475 } else {
1484 // Uninitalized const bindings outside of harmony mode are unholed. 1476 // Uninitalized const bindings outside of harmony mode are unholed.
1485 ASSERT(var->mode() == CONST); 1477 ASSERT(var->mode() == CONST_LEGACY);
1486 __ mov(eax, isolate()->factory()->undefined_value()); 1478 __ mov(eax, isolate()->factory()->undefined_value());
1487 } 1479 }
1488 __ bind(&done); 1480 __ bind(&done);
1489 context()->Plug(eax); 1481 context()->Plug(eax);
1490 break; 1482 break;
1491 } 1483 }
1492 } 1484 }
1493 context()->Plug(var); 1485 context()->Plug(var);
1494 break; 1486 break;
1495 } 1487 }
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
1580 1572
1581 expr->BuildConstantProperties(isolate()); 1573 expr->BuildConstantProperties(isolate());
1582 Handle<FixedArray> constant_properties = expr->constant_properties(); 1574 Handle<FixedArray> constant_properties = expr->constant_properties();
1583 int flags = expr->fast_elements() 1575 int flags = expr->fast_elements()
1584 ? ObjectLiteral::kFastElements 1576 ? ObjectLiteral::kFastElements
1585 : ObjectLiteral::kNoFlags; 1577 : ObjectLiteral::kNoFlags;
1586 flags |= expr->has_function() 1578 flags |= expr->has_function()
1587 ? ObjectLiteral::kHasFunction 1579 ? ObjectLiteral::kHasFunction
1588 : ObjectLiteral::kNoFlags; 1580 : ObjectLiteral::kNoFlags;
1589 int properties_count = constant_properties->length() / 2; 1581 int properties_count = constant_properties->length() / 2;
1590 if ((FLAG_track_double_fields && expr->may_store_doubles()) || 1582 if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() ||
1591 expr->depth() > 1 || Serializer::enabled() ||
1592 flags != ObjectLiteral::kFastElements || 1583 flags != ObjectLiteral::kFastElements ||
1593 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { 1584 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1594 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 1585 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1595 __ push(FieldOperand(edi, JSFunction::kLiteralsOffset)); 1586 __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
1596 __ push(Immediate(Smi::FromInt(expr->literal_index()))); 1587 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1597 __ push(Immediate(constant_properties)); 1588 __ push(Immediate(constant_properties));
1598 __ push(Immediate(Smi::FromInt(flags))); 1589 __ push(Immediate(Smi::FromInt(flags)));
1599 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); 1590 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1600 } else { 1591 } else {
1601 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 1592 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
(...skipping 665 matching lines...) Expand 10 before | Expand all | Expand 10 after
2267 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); 2258 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2268 patch_site.EmitPatchInfo(); 2259 patch_site.EmitPatchInfo();
2269 __ jmp(&done, Label::kNear); 2260 __ jmp(&done, Label::kNear);
2270 2261
2271 // Smi case. 2262 // Smi case.
2272 __ bind(&smi_case); 2263 __ bind(&smi_case);
2273 __ mov(eax, edx); // Copy left operand in case of a stub call. 2264 __ mov(eax, edx); // Copy left operand in case of a stub call.
2274 2265
2275 switch (op) { 2266 switch (op) {
2276 case Token::SAR: 2267 case Token::SAR:
2277 __ SmiUntag(eax);
2278 __ SmiUntag(ecx); 2268 __ SmiUntag(ecx);
2279 __ sar_cl(eax); // No checks of result necessary 2269 __ sar_cl(eax); // No checks of result necessary
2280 __ SmiTag(eax); 2270 __ and_(eax, Immediate(~kSmiTagMask));
2281 break; 2271 break;
2282 case Token::SHL: { 2272 case Token::SHL: {
2283 Label result_ok; 2273 Label result_ok;
2284 __ SmiUntag(eax); 2274 __ SmiUntag(eax);
2285 __ SmiUntag(ecx); 2275 __ SmiUntag(ecx);
2286 __ shl_cl(eax); 2276 __ shl_cl(eax);
2287 // Check that the *signed* result fits in a smi. 2277 // Check that the *signed* result fits in a smi.
2288 __ cmp(eax, 0xc0000000); 2278 __ cmp(eax, 0xc0000000);
2289 __ j(positive, &result_ok); 2279 __ j(positive, &result_ok);
2290 __ SmiTag(ecx); 2280 __ SmiTag(ecx);
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
2390 CallStoreIC(); 2380 CallStoreIC();
2391 break; 2381 break;
2392 } 2382 }
2393 case KEYED_PROPERTY: { 2383 case KEYED_PROPERTY: {
2394 __ push(eax); // Preserve value. 2384 __ push(eax); // Preserve value.
2395 VisitForStackValue(prop->obj()); 2385 VisitForStackValue(prop->obj());
2396 VisitForAccumulatorValue(prop->key()); 2386 VisitForAccumulatorValue(prop->key());
2397 __ mov(ecx, eax); 2387 __ mov(ecx, eax);
2398 __ pop(edx); // Receiver. 2388 __ pop(edx); // Receiver.
2399 __ pop(eax); // Restore value. 2389 __ pop(eax); // Restore value.
2400 Handle<Code> ic = is_classic_mode() 2390 Handle<Code> ic = strict_mode() == SLOPPY
2401 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2391 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2402 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2392 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2403 CallIC(ic); 2393 CallIC(ic);
2404 break; 2394 break;
2405 } 2395 }
2406 } 2396 }
2407 context()->Plug(eax); 2397 context()->Plug(eax);
2408 } 2398 }
2409 2399
2410 2400
2411 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( 2401 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2412 Variable* var, MemOperand location) { 2402 Variable* var, MemOperand location) {
2413 __ mov(location, eax); 2403 __ mov(location, eax);
2414 if (var->IsContextSlot()) { 2404 if (var->IsContextSlot()) {
2415 __ mov(edx, eax); 2405 __ mov(edx, eax);
2416 int offset = Context::SlotOffset(var->index()); 2406 int offset = Context::SlotOffset(var->index());
2417 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs); 2407 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2418 } 2408 }
2419 } 2409 }
2420 2410
2421 2411
2422 void FullCodeGenerator::EmitCallStoreContextSlot( 2412 void FullCodeGenerator::EmitCallStoreContextSlot(
2423 Handle<String> name, LanguageMode mode) { 2413 Handle<String> name, StrictMode strict_mode) {
2424 __ push(eax); // Value. 2414 __ push(eax); // Value.
2425 __ push(esi); // Context. 2415 __ push(esi); // Context.
2426 __ push(Immediate(name)); 2416 __ push(Immediate(name));
2427 __ push(Immediate(Smi::FromInt(mode))); 2417 __ push(Immediate(Smi::FromInt(strict_mode)));
2428 __ CallRuntime(Runtime::kStoreContextSlot, 4); 2418 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2429 } 2419 }
2430 2420
2431 2421
2432 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 2422 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2433 Token::Value op) { 2423 Token::Value op) {
2434 if (var->IsUnallocated()) { 2424 if (var->IsUnallocated()) {
2435 // Global var, const, or let. 2425 // Global var, const, or let.
2436 __ mov(ecx, var->name()); 2426 __ mov(ecx, var->name());
2437 __ mov(edx, GlobalObjectOperand()); 2427 __ mov(edx, GlobalObjectOperand());
2438 CallStoreIC(); 2428 CallStoreIC();
2439 2429
2440 } else if (op == Token::INIT_CONST) { 2430 } else if (op == Token::INIT_CONST_LEGACY) {
2441 // Const initializers need a write barrier. 2431 // Const initializers need a write barrier.
2442 ASSERT(!var->IsParameter()); // No const parameters. 2432 ASSERT(!var->IsParameter()); // No const parameters.
2443 if (var->IsLookupSlot()) { 2433 if (var->IsLookupSlot()) {
2444 __ push(eax); 2434 __ push(eax);
2445 __ push(esi); 2435 __ push(esi);
2446 __ push(Immediate(var->name())); 2436 __ push(Immediate(var->name()));
2447 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); 2437 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2448 } else { 2438 } else {
2449 ASSERT(var->IsStackLocal() || var->IsContextSlot()); 2439 ASSERT(var->IsStackLocal() || var->IsContextSlot());
2450 Label skip; 2440 Label skip;
2451 MemOperand location = VarOperand(var, ecx); 2441 MemOperand location = VarOperand(var, ecx);
2452 __ mov(edx, location); 2442 __ mov(edx, location);
2453 __ cmp(edx, isolate()->factory()->the_hole_value()); 2443 __ cmp(edx, isolate()->factory()->the_hole_value());
2454 __ j(not_equal, &skip, Label::kNear); 2444 __ j(not_equal, &skip, Label::kNear);
2455 EmitStoreToStackLocalOrContextSlot(var, location); 2445 EmitStoreToStackLocalOrContextSlot(var, location);
2456 __ bind(&skip); 2446 __ bind(&skip);
2457 } 2447 }
2458 2448
2459 } else if (var->mode() == LET && op != Token::INIT_LET) { 2449 } else if (var->mode() == LET && op != Token::INIT_LET) {
2460 // Non-initializing assignment to let variable needs a write barrier. 2450 // Non-initializing assignment to let variable needs a write barrier.
2461 if (var->IsLookupSlot()) { 2451 if (var->IsLookupSlot()) {
2462 EmitCallStoreContextSlot(var->name(), language_mode()); 2452 EmitCallStoreContextSlot(var->name(), strict_mode());
2463 } else { 2453 } else {
2464 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 2454 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2465 Label assign; 2455 Label assign;
2466 MemOperand location = VarOperand(var, ecx); 2456 MemOperand location = VarOperand(var, ecx);
2467 __ mov(edx, location); 2457 __ mov(edx, location);
2468 __ cmp(edx, isolate()->factory()->the_hole_value()); 2458 __ cmp(edx, isolate()->factory()->the_hole_value());
2469 __ j(not_equal, &assign, Label::kNear); 2459 __ j(not_equal, &assign, Label::kNear);
2470 __ push(Immediate(var->name())); 2460 __ push(Immediate(var->name()));
2471 __ CallRuntime(Runtime::kThrowReferenceError, 1); 2461 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2472 __ bind(&assign); 2462 __ bind(&assign);
2473 EmitStoreToStackLocalOrContextSlot(var, location); 2463 EmitStoreToStackLocalOrContextSlot(var, location);
2474 } 2464 }
2475 2465
2476 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) { 2466 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2477 // Assignment to var or initializing assignment to let/const 2467 // Assignment to var or initializing assignment to let/const
2478 // in harmony mode. 2468 // in harmony mode.
2479 if (var->IsLookupSlot()) { 2469 if (var->IsLookupSlot()) {
2480 EmitCallStoreContextSlot(var->name(), language_mode()); 2470 EmitCallStoreContextSlot(var->name(), strict_mode());
2481 } else { 2471 } else {
2482 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 2472 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2483 MemOperand location = VarOperand(var, ecx); 2473 MemOperand location = VarOperand(var, ecx);
2484 if (generate_debug_code_ && op == Token::INIT_LET) { 2474 if (generate_debug_code_ && op == Token::INIT_LET) {
2485 // Check for an uninitialized let binding. 2475 // Check for an uninitialized let binding.
2486 __ mov(edx, location); 2476 __ mov(edx, location);
2487 __ cmp(edx, isolate()->factory()->the_hole_value()); 2477 __ cmp(edx, isolate()->factory()->the_hole_value());
2488 __ Check(equal, kLetBindingReInitialization); 2478 __ Check(equal, kLetBindingReInitialization);
2489 } 2479 }
2490 EmitStoreToStackLocalOrContextSlot(var, location); 2480 EmitStoreToStackLocalOrContextSlot(var, location);
(...skipping 25 matching lines...) Expand all
2516 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2506 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2517 // Assignment to a property, using a keyed store IC. 2507 // Assignment to a property, using a keyed store IC.
2518 // eax : value 2508 // eax : value
2519 // esp[0] : key 2509 // esp[0] : key
2520 // esp[kPointerSize] : receiver 2510 // esp[kPointerSize] : receiver
2521 2511
2522 __ pop(ecx); // Key. 2512 __ pop(ecx); // Key.
2523 __ pop(edx); 2513 __ pop(edx);
2524 // Record source code position before IC call. 2514 // Record source code position before IC call.
2525 SetSourcePosition(expr->position()); 2515 SetSourcePosition(expr->position());
2526 Handle<Code> ic = is_classic_mode() 2516 Handle<Code> ic = strict_mode() == SLOPPY
2527 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2517 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2528 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2518 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2529 CallIC(ic, expr->AssignmentFeedbackId()); 2519 CallIC(ic, expr->AssignmentFeedbackId());
2530 2520
2531 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2521 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2532 context()->Plug(eax); 2522 context()->Plug(eax);
2533 } 2523 }
2534 2524
2535 2525
2536 void FullCodeGenerator::VisitProperty(Property* expr) { 2526 void FullCodeGenerator::VisitProperty(Property* expr) {
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
2570 int arg_count = args->length(); 2560 int arg_count = args->length();
2571 2561
2572 CallFunctionFlags flags; 2562 CallFunctionFlags flags;
2573 // Get the target function. 2563 // Get the target function.
2574 if (callee->IsVariableProxy()) { 2564 if (callee->IsVariableProxy()) {
2575 { StackValueContext context(this); 2565 { StackValueContext context(this);
2576 EmitVariableLoad(callee->AsVariableProxy()); 2566 EmitVariableLoad(callee->AsVariableProxy());
2577 PrepareForBailout(callee, NO_REGISTERS); 2567 PrepareForBailout(callee, NO_REGISTERS);
2578 } 2568 }
2579 // Push undefined as receiver. This is patched in the method prologue if it 2569 // Push undefined as receiver. This is patched in the method prologue if it
2580 // is a classic mode method. 2570 // is a sloppy mode method.
2581 __ push(Immediate(isolate()->factory()->undefined_value())); 2571 __ push(Immediate(isolate()->factory()->undefined_value()));
2582 flags = NO_CALL_FUNCTION_FLAGS; 2572 flags = NO_CALL_FUNCTION_FLAGS;
2583 } else { 2573 } else {
2584 // Load the function from the receiver. 2574 // Load the function from the receiver.
2585 ASSERT(callee->IsProperty()); 2575 ASSERT(callee->IsProperty());
2586 __ mov(edx, Operand(esp, 0)); 2576 __ mov(edx, Operand(esp, 0));
2587 EmitNamedPropertyLoad(callee->AsProperty()); 2577 EmitNamedPropertyLoad(callee->AsProperty());
2588 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2578 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2589 // Push the target function under the receiver. 2579 // Push the target function under the receiver.
2590 __ push(Operand(esp, 0)); 2580 __ push(Operand(esp, 0));
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
2661 ZoneList<Expression*>* args = expr->arguments(); 2651 ZoneList<Expression*>* args = expr->arguments();
2662 int arg_count = args->length(); 2652 int arg_count = args->length();
2663 { PreservePositionScope scope(masm()->positions_recorder()); 2653 { PreservePositionScope scope(masm()->positions_recorder());
2664 for (int i = 0; i < arg_count; i++) { 2654 for (int i = 0; i < arg_count; i++) {
2665 VisitForStackValue(args->at(i)); 2655 VisitForStackValue(args->at(i));
2666 } 2656 }
2667 } 2657 }
2668 // Record source position for debugger. 2658 // Record source position for debugger.
2669 SetSourcePosition(expr->position()); 2659 SetSourcePosition(expr->position());
2670 2660
2671 Handle<Object> uninitialized =
2672 TypeFeedbackInfo::UninitializedSentinel(isolate());
2673 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
2674 __ LoadHeapObject(ebx, FeedbackVector()); 2661 __ LoadHeapObject(ebx, FeedbackVector());
2675 __ mov(edx, Immediate(Smi::FromInt(expr->CallFeedbackSlot()))); 2662 __ mov(edx, Immediate(Smi::FromInt(expr->CallFeedbackSlot())));
2676 2663
2677 // Record call targets in unoptimized code. 2664 // Record call targets in unoptimized code.
2678 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); 2665 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
2679 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize)); 2666 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2680 __ CallStub(&stub); 2667 __ CallStub(&stub);
2681 2668
2682 RecordJSReturnSite(expr); 2669 RecordJSReturnSite(expr);
2683 // Restore context register. 2670 // Restore context register.
2684 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 2671 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2685 context()->DropAndPlug(1, eax); 2672 context()->DropAndPlug(1, eax);
2686 } 2673 }
2687 2674
2688 2675
2689 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { 2676 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2690 // Push copy of the first argument or undefined if it doesn't exist. 2677 // Push copy of the first argument or undefined if it doesn't exist.
2691 if (arg_count > 0) { 2678 if (arg_count > 0) {
2692 __ push(Operand(esp, arg_count * kPointerSize)); 2679 __ push(Operand(esp, arg_count * kPointerSize));
2693 } else { 2680 } else {
2694 __ push(Immediate(isolate()->factory()->undefined_value())); 2681 __ push(Immediate(isolate()->factory()->undefined_value()));
2695 } 2682 }
2696 2683
2697 // Push the receiver of the enclosing function. 2684 // Push the receiver of the enclosing function.
2698 __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize)); 2685 __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2699 // Push the language mode. 2686 // Push the language mode.
2700 __ push(Immediate(Smi::FromInt(language_mode()))); 2687 __ push(Immediate(Smi::FromInt(strict_mode())));
2701 2688
2702 // Push the start position of the scope the calls resides in. 2689 // Push the start position of the scope the calls resides in.
2703 __ push(Immediate(Smi::FromInt(scope()->start_position()))); 2690 __ push(Immediate(Smi::FromInt(scope()->start_position())));
2704 2691
2705 // Do the runtime call. 2692 // Do the runtime call.
2706 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); 2693 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2707 } 2694 }
2708 2695
2709 2696
2710 void FullCodeGenerator::VisitCall(Call* expr) { 2697 void FullCodeGenerator::VisitCall(Call* expr) {
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after
2841 2828
2842 // Call the construct call builtin that handles allocation and 2829 // Call the construct call builtin that handles allocation and
2843 // constructor invocation. 2830 // constructor invocation.
2844 SetSourcePosition(expr->position()); 2831 SetSourcePosition(expr->position());
2845 2832
2846 // Load function and argument count into edi and eax. 2833 // Load function and argument count into edi and eax.
2847 __ Set(eax, Immediate(arg_count)); 2834 __ Set(eax, Immediate(arg_count));
2848 __ mov(edi, Operand(esp, arg_count * kPointerSize)); 2835 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2849 2836
2850 // Record call targets in unoptimized code. 2837 // Record call targets in unoptimized code.
2851 Handle<Object> uninitialized =
2852 TypeFeedbackInfo::UninitializedSentinel(isolate());
2853 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
2854 __ LoadHeapObject(ebx, FeedbackVector()); 2838 __ LoadHeapObject(ebx, FeedbackVector());
2855 __ mov(edx, Immediate(Smi::FromInt(expr->CallNewFeedbackSlot()))); 2839 __ mov(edx, Immediate(Smi::FromInt(expr->CallNewFeedbackSlot())));
2856 2840
2857 CallConstructStub stub(RECORD_CALL_TARGET); 2841 CallConstructStub stub(RECORD_CALL_TARGET);
2858 __ call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); 2842 __ call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2859 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 2843 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2860 context()->Plug(eax); 2844 context()->Plug(eax);
2861 } 2845 }
2862 2846
2863 2847
(...skipping 1311 matching lines...) Expand 10 before | Expand all | Expand 10 after
4175 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 4159 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4176 switch (expr->op()) { 4160 switch (expr->op()) {
4177 case Token::DELETE: { 4161 case Token::DELETE: {
4178 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 4162 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4179 Property* property = expr->expression()->AsProperty(); 4163 Property* property = expr->expression()->AsProperty();
4180 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 4164 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4181 4165
4182 if (property != NULL) { 4166 if (property != NULL) {
4183 VisitForStackValue(property->obj()); 4167 VisitForStackValue(property->obj());
4184 VisitForStackValue(property->key()); 4168 VisitForStackValue(property->key());
4185 StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE) 4169 __ push(Immediate(Smi::FromInt(strict_mode())));
4186 ? kNonStrictMode : kStrictMode;
4187 __ push(Immediate(Smi::FromInt(strict_mode_flag)));
4188 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4170 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4189 context()->Plug(eax); 4171 context()->Plug(eax);
4190 } else if (proxy != NULL) { 4172 } else if (proxy != NULL) {
4191 Variable* var = proxy->var(); 4173 Variable* var = proxy->var();
4192 // Delete of an unqualified identifier is disallowed in strict mode 4174 // Delete of an unqualified identifier is disallowed in strict mode
4193 // but "delete this" is allowed. 4175 // but "delete this" is allowed.
4194 ASSERT(language_mode() == CLASSIC_MODE || var->is_this()); 4176 ASSERT(strict_mode() == SLOPPY || var->is_this());
4195 if (var->IsUnallocated()) { 4177 if (var->IsUnallocated()) {
4196 __ push(GlobalObjectOperand()); 4178 __ push(GlobalObjectOperand());
4197 __ push(Immediate(var->name())); 4179 __ push(Immediate(var->name()));
4198 __ push(Immediate(Smi::FromInt(kNonStrictMode))); 4180 __ push(Immediate(Smi::FromInt(SLOPPY)));
4199 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4181 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4200 context()->Plug(eax); 4182 context()->Plug(eax);
4201 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 4183 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4202 // Result of deleting non-global variables is false. 'this' is 4184 // Result of deleting non-global variables is false. 'this' is
4203 // not really a variable, though we implement it as one. The 4185 // not really a variable, though we implement it as one. The
4204 // subexpression does not have side effects. 4186 // subexpression does not have side effects.
4205 context()->Plug(var->is_this()); 4187 context()->Plug(var->is_this());
4206 } else { 4188 } else {
4207 // Non-global variable. Call the runtime to try to delete from the 4189 // Non-global variable. Call the runtime to try to delete from the
4208 // context where the variable was introduced. 4190 // context where the variable was introduced.
(...skipping 246 matching lines...) Expand 10 before | Expand all | Expand 10 after
4455 context()->PlugTOS(); 4437 context()->PlugTOS();
4456 } 4438 }
4457 } else { 4439 } else {
4458 context()->Plug(eax); 4440 context()->Plug(eax);
4459 } 4441 }
4460 break; 4442 break;
4461 } 4443 }
4462 case KEYED_PROPERTY: { 4444 case KEYED_PROPERTY: {
4463 __ pop(ecx); 4445 __ pop(ecx);
4464 __ pop(edx); 4446 __ pop(edx);
4465 Handle<Code> ic = is_classic_mode() 4447 Handle<Code> ic = strict_mode() == SLOPPY
4466 ? isolate()->builtins()->KeyedStoreIC_Initialize() 4448 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4467 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 4449 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4468 CallIC(ic, expr->CountStoreFeedbackId()); 4450 CallIC(ic, expr->CountStoreFeedbackId());
4469 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4451 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4470 if (expr->is_postfix()) { 4452 if (expr->is_postfix()) {
4471 // Result is on the stack 4453 // Result is on the stack
4472 if (!context()->IsEffect()) { 4454 if (!context()->IsEffect()) {
4473 context()->PlugTOS(); 4455 context()->PlugTOS();
4474 } 4456 }
4475 } else { 4457 } else {
(...skipping 398 matching lines...) Expand 10 before | Expand all | Expand 10 after
4874 // nop 4856 // nop
4875 // nop 4857 // nop
4876 // call <on-stack replacment> 4858 // call <on-stack replacment>
4877 // ok: 4859 // ok:
4878 *jns_instr_address = kNopByteOne; 4860 *jns_instr_address = kNopByteOne;
4879 *jns_offset_address = kNopByteTwo; 4861 *jns_offset_address = kNopByteTwo;
4880 break; 4862 break;
4881 } 4863 }
4882 4864
4883 Assembler::set_target_address_at(call_target_address, 4865 Assembler::set_target_address_at(call_target_address,
4866 unoptimized_code,
4884 replacement_code->entry()); 4867 replacement_code->entry());
4885 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( 4868 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4886 unoptimized_code, call_target_address, replacement_code); 4869 unoptimized_code, call_target_address, replacement_code);
4887 } 4870 }
4888 4871
4889 4872
4890 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( 4873 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4891 Isolate* isolate, 4874 Isolate* isolate,
4892 Code* unoptimized_code, 4875 Code* unoptimized_code,
4893 Address pc) { 4876 Address pc) {
4894 Address call_target_address = pc - kIntSize; 4877 Address call_target_address = pc - kIntSize;
4895 Address jns_instr_address = call_target_address - 3; 4878 Address jns_instr_address = call_target_address - 3;
4896 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); 4879 ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
4897 4880
4898 if (*jns_instr_address == kJnsInstruction) { 4881 if (*jns_instr_address == kJnsInstruction) {
4899 ASSERT_EQ(kJnsOffset, *(call_target_address - 2)); 4882 ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
4900 ASSERT_EQ(isolate->builtins()->InterruptCheck()->entry(), 4883 ASSERT_EQ(isolate->builtins()->InterruptCheck()->entry(),
4901 Assembler::target_address_at(call_target_address)); 4884 Assembler::target_address_at(call_target_address,
4885 unoptimized_code));
4902 return INTERRUPT; 4886 return INTERRUPT;
4903 } 4887 }
4904 4888
4905 ASSERT_EQ(kNopByteOne, *jns_instr_address); 4889 ASSERT_EQ(kNopByteOne, *jns_instr_address);
4906 ASSERT_EQ(kNopByteTwo, *(call_target_address - 2)); 4890 ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
4907 4891
4908 if (Assembler::target_address_at(call_target_address) == 4892 if (Assembler::target_address_at(call_target_address, unoptimized_code) ==
4909 isolate->builtins()->OnStackReplacement()->entry()) { 4893 isolate->builtins()->OnStackReplacement()->entry()) {
4910 return ON_STACK_REPLACEMENT; 4894 return ON_STACK_REPLACEMENT;
4911 } 4895 }
4912 4896
4913 ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(), 4897 ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4914 Assembler::target_address_at(call_target_address)); 4898 Assembler::target_address_at(call_target_address,
4899 unoptimized_code));
4915 return OSR_AFTER_STACK_CHECK; 4900 return OSR_AFTER_STACK_CHECK;
4916 } 4901 }
4917 4902
4918 4903
4919 } } // namespace v8::internal 4904 } } // namespace v8::internal
4920 4905
4921 #endif // V8_TARGET_ARCH_IA32 4906 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/frames-ia32.h ('k') | src/ia32/ic-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698