Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(829)

Side by Side Diff: src/ia32/full-codegen-ia32.cc

Issue 6529055: [Isolates] Merge crankshaft (r5922 from bleeding_edge). (Closed)
Patch Set: Win32 port Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/ia32/frames-ia32.h ('k') | src/ia32/ic-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after
161 } 161 }
162 // Visit all the explicit declarations unless there is an illegal 162 // Visit all the explicit declarations unless there is an illegal
163 // redeclaration. 163 // redeclaration.
164 if (scope()->HasIllegalRedeclaration()) { 164 if (scope()->HasIllegalRedeclaration()) {
165 scope()->VisitIllegalRedeclaration(this); 165 scope()->VisitIllegalRedeclaration(this);
166 } else { 166 } else {
167 VisitDeclarations(scope()->declarations()); 167 VisitDeclarations(scope()->declarations());
168 } 168 }
169 } 169 }
170 170
171 if (FLAG_trace) {
172 __ CallRuntime(Runtime::kTraceEnter, 0);
173 }
174
171 { Comment cmnt(masm_, "[ Stack check"); 175 { Comment cmnt(masm_, "[ Stack check");
176 PrepareForBailout(info->function(), NO_REGISTERS);
172 NearLabel ok; 177 NearLabel ok;
173 ExternalReference stack_limit = 178 ExternalReference stack_limit =
174 ExternalReference::address_of_stack_limit(); 179 ExternalReference::address_of_stack_limit();
175 __ cmp(esp, Operand::StaticVariable(stack_limit)); 180 __ cmp(esp, Operand::StaticVariable(stack_limit));
176 __ j(above_equal, &ok, taken); 181 __ j(above_equal, &ok, taken);
177 StackCheckStub stub; 182 StackCheckStub stub;
178 __ CallStub(&stub); 183 __ CallStub(&stub);
179 __ bind(&ok); 184 __ bind(&ok);
180 } 185 }
181 186
182 if (FLAG_trace) {
183 __ CallRuntime(Runtime::kTraceEnter, 0);
184 }
185
186 { Comment cmnt(masm_, "[ Body"); 187 { Comment cmnt(masm_, "[ Body");
187 ASSERT(loop_depth() == 0); 188 ASSERT(loop_depth() == 0);
188 VisitStatements(function()->body()); 189 VisitStatements(function()->body());
189 ASSERT(loop_depth() == 0); 190 ASSERT(loop_depth() == 0);
190 } 191 }
191 192
192 { Comment cmnt(masm_, "[ return <undefined>;"); 193 { Comment cmnt(masm_, "[ return <undefined>;");
193 // Emit a 'return undefined' in case control fell off the end of the body. 194 // Emit a 'return undefined' in case control fell off the end of the body.
194 __ mov(eax, FACTORY->undefined_value()); 195 __ mov(eax, FACTORY->undefined_value());
195 EmitReturnSequence(); 196 EmitReturnSequence();
196 } 197 }
197 } 198 }
198 199
199 200
201 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
202 Comment cmnt(masm_, "[ Stack check");
203 NearLabel ok;
204 ExternalReference stack_limit = ExternalReference::address_of_stack_limit();
205 __ cmp(esp, Operand::StaticVariable(stack_limit));
206 __ j(above_equal, &ok, taken);
207 StackCheckStub stub;
208 __ CallStub(&stub);
209 __ bind(&ok);
210 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
211 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
212 RecordStackCheck(stmt->OsrEntryId());
213 // Loop stack checks can be patched to perform on-stack
214 // replacement. In order to decide whether or not to perform OSR we
215 // embed the loop depth in a test instruction after the call so we
216 // can extract it from the OSR builtin.
217 ASSERT(loop_depth() > 0);
218 __ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
219 }
220
221
200 void FullCodeGenerator::EmitReturnSequence() { 222 void FullCodeGenerator::EmitReturnSequence() {
201 Comment cmnt(masm_, "[ Return sequence"); 223 Comment cmnt(masm_, "[ Return sequence");
202 if (return_label_.is_bound()) { 224 if (return_label_.is_bound()) {
203 __ jmp(&return_label_); 225 __ jmp(&return_label_);
204 } else { 226 } else {
205 // Common return label 227 // Common return label
206 __ bind(&return_label_); 228 __ bind(&return_label_);
207 if (FLAG_trace) { 229 if (FLAG_trace) {
208 __ push(eax); 230 __ push(eax);
209 __ CallRuntime(Runtime::kTraceExit, 1); 231 __ CallRuntime(Runtime::kTraceExit, 1);
210 } 232 }
211 #ifdef DEBUG 233 #ifdef DEBUG
212 // Add a label for checking the size of the code used for returning. 234 // Add a label for checking the size of the code used for returning.
213 Label check_exit_codesize; 235 Label check_exit_codesize;
214 masm_->bind(&check_exit_codesize); 236 masm_->bind(&check_exit_codesize);
215 #endif 237 #endif
216 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); 238 SetSourcePosition(function()->end_position() - 1);
217 __ RecordJSReturn(); 239 __ RecordJSReturn();
218 // Do not use the leave instruction here because it is too short to 240 // Do not use the leave instruction here because it is too short to
219 // patch with the code required by the debugger. 241 // patch with the code required by the debugger.
220 __ mov(esp, ebp); 242 __ mov(esp, ebp);
221 __ pop(ebp); 243 __ pop(ebp);
222 __ ret((scope()->num_parameters() + 1) * kPointerSize); 244 __ ret((scope()->num_parameters() + 1) * kPointerSize);
223 #ifdef ENABLE_DEBUGGER_SUPPORT 245 #ifdef ENABLE_DEBUGGER_SUPPORT
224 // Check that the size of the code used for returning matches what is 246 // Check that the size of the code used for returning matches what is
225 // expected by the debugger. 247 // expected by the debugger.
226 ASSERT_EQ(Assembler::kJSReturnSequenceLength, 248 ASSERT_EQ(Assembler::kJSReturnSequenceLength,
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
259 void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const { 281 void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
260 MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register()); 282 MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
261 // Memory operands can be pushed directly. 283 // Memory operands can be pushed directly.
262 __ push(slot_operand); 284 __ push(slot_operand);
263 } 285 }
264 286
265 287
266 void FullCodeGenerator::TestContext::Plug(Slot* slot) const { 288 void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
267 // For simplicity we always test the accumulator register. 289 // For simplicity we always test the accumulator register.
268 codegen()->Move(result_register(), slot); 290 codegen()->Move(result_register(), slot);
291 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
269 codegen()->DoTest(true_label_, false_label_, fall_through_); 292 codegen()->DoTest(true_label_, false_label_, fall_through_);
270 } 293 }
271 294
272 295
273 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 296 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
274 UNREACHABLE(); // Not used on IA32. 297 UNREACHABLE(); // Not used on IA32.
275 } 298 }
276 299
277 300
278 void FullCodeGenerator::AccumulatorValueContext::Plug( 301 void FullCodeGenerator::AccumulatorValueContext::Plug(
(...skipping 23 matching lines...) Expand all
302 } 325 }
303 326
304 327
305 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { 328 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
306 // Immediates can be pushed directly. 329 // Immediates can be pushed directly.
307 __ push(Immediate(lit)); 330 __ push(Immediate(lit));
308 } 331 }
309 332
310 333
311 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { 334 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
335 codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
336 true,
337 true_label_,
338 false_label_);
312 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals. 339 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
313 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { 340 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
314 __ jmp(false_label_); 341 if (false_label_ != fall_through_) __ jmp(false_label_);
315 } else if (lit->IsTrue() || lit->IsJSObject()) { 342 } else if (lit->IsTrue() || lit->IsJSObject()) {
316 __ jmp(true_label_); 343 if (true_label_ != fall_through_) __ jmp(true_label_);
317 } else if (lit->IsString()) { 344 } else if (lit->IsString()) {
318 if (String::cast(*lit)->length() == 0) { 345 if (String::cast(*lit)->length() == 0) {
319 __ jmp(false_label_); 346 if (false_label_ != fall_through_) __ jmp(false_label_);
320 } else { 347 } else {
321 __ jmp(true_label_); 348 if (true_label_ != fall_through_) __ jmp(true_label_);
322 } 349 }
323 } else if (lit->IsSmi()) { 350 } else if (lit->IsSmi()) {
324 if (Smi::cast(*lit)->value() == 0) { 351 if (Smi::cast(*lit)->value() == 0) {
325 __ jmp(false_label_); 352 if (false_label_ != fall_through_) __ jmp(false_label_);
326 } else { 353 } else {
327 __ jmp(true_label_); 354 if (true_label_ != fall_through_) __ jmp(true_label_);
328 } 355 }
329 } else { 356 } else {
330 // For simplicity we always test the accumulator register. 357 // For simplicity we always test the accumulator register.
331 __ mov(result_register(), lit); 358 __ mov(result_register(), lit);
332 codegen()->DoTest(true_label_, false_label_, fall_through_); 359 codegen()->DoTest(true_label_, false_label_, fall_through_);
333 } 360 }
334 } 361 }
335 362
336 363
337 void FullCodeGenerator::EffectContext::DropAndPlug(int count, 364 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
(...skipping 19 matching lines...) Expand all
357 __ mov(Operand(esp, 0), reg); 384 __ mov(Operand(esp, 0), reg);
358 } 385 }
359 386
360 387
361 void FullCodeGenerator::TestContext::DropAndPlug(int count, 388 void FullCodeGenerator::TestContext::DropAndPlug(int count,
362 Register reg) const { 389 Register reg) const {
363 ASSERT(count > 0); 390 ASSERT(count > 0);
364 // For simplicity we always test the accumulator register. 391 // For simplicity we always test the accumulator register.
365 __ Drop(count); 392 __ Drop(count);
366 __ Move(result_register(), reg); 393 __ Move(result_register(), reg);
394 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
367 codegen()->DoTest(true_label_, false_label_, fall_through_); 395 codegen()->DoTest(true_label_, false_label_, fall_through_);
368 } 396 }
369 397
370 398
371 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 399 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
372 Label* materialize_false) const { 400 Label* materialize_false) const {
373 ASSERT_EQ(materialize_true, materialize_false); 401 ASSERT(materialize_true == materialize_false);
374 __ bind(materialize_true); 402 __ bind(materialize_true);
375 } 403 }
376 404
377 405
378 void FullCodeGenerator::AccumulatorValueContext::Plug( 406 void FullCodeGenerator::AccumulatorValueContext::Plug(
379 Label* materialize_true, 407 Label* materialize_true,
380 Label* materialize_false) const { 408 Label* materialize_false) const {
381 NearLabel done; 409 NearLabel done;
382 __ bind(materialize_true); 410 __ bind(materialize_true);
383 __ mov(result_register(), FACTORY->true_value()); 411 __ mov(result_register(), FACTORY->true_value());
(...skipping 12 matching lines...) Expand all
396 __ push(Immediate(FACTORY->true_value())); 424 __ push(Immediate(FACTORY->true_value()));
397 __ jmp(&done); 425 __ jmp(&done);
398 __ bind(materialize_false); 426 __ bind(materialize_false);
399 __ push(Immediate(FACTORY->false_value())); 427 __ push(Immediate(FACTORY->false_value()));
400 __ bind(&done); 428 __ bind(&done);
401 } 429 }
402 430
403 431
404 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 432 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
405 Label* materialize_false) const { 433 Label* materialize_false) const {
434 ASSERT(materialize_true == true_label_);
406 ASSERT(materialize_false == false_label_); 435 ASSERT(materialize_false == false_label_);
407 ASSERT(materialize_true == true_label_);
408 } 436 }
409 437
410 438
411 void FullCodeGenerator::EffectContext::Plug(bool flag) const { 439 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
412 } 440 }
413 441
414 442
415 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { 443 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
416 Handle<Object> value = 444 Handle<Object> value =
417 flag ? FACTORY->true_value() : FACTORY->false_value(); 445 flag ? FACTORY->true_value() : FACTORY->false_value();
418 __ mov(result_register(), value); 446 __ mov(result_register(), value);
419 } 447 }
420 448
421 449
422 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { 450 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
423 Handle<Object> value = 451 Handle<Object> value =
424 flag ? FACTORY->true_value() : FACTORY->false_value(); 452 flag ? FACTORY->true_value() : FACTORY->false_value();
425 __ push(Immediate(value)); 453 __ push(Immediate(value));
426 } 454 }
427 455
428 456
429 void FullCodeGenerator::TestContext::Plug(bool flag) const { 457 void FullCodeGenerator::TestContext::Plug(bool flag) const {
458 codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
459 true,
460 true_label_,
461 false_label_);
430 if (flag) { 462 if (flag) {
431 if (true_label_ != fall_through_) __ jmp(true_label_); 463 if (true_label_ != fall_through_) __ jmp(true_label_);
432 } else { 464 } else {
433 if (false_label_ != fall_through_) __ jmp(false_label_); 465 if (false_label_ != fall_through_) __ jmp(false_label_);
434 } 466 }
435 } 467 }
436 468
437 469
438 void FullCodeGenerator::DoTest(Label* if_true, 470 void FullCodeGenerator::DoTest(Label* if_true,
439 Label* if_false, 471 Label* if_false,
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
511 MemOperand location = EmitSlotSearch(dst, scratch1); 543 MemOperand location = EmitSlotSearch(dst, scratch1);
512 __ mov(location, src); 544 __ mov(location, src);
513 // Emit the write barrier code if the location is in the heap. 545 // Emit the write barrier code if the location is in the heap.
514 if (dst->type() == Slot::CONTEXT) { 546 if (dst->type() == Slot::CONTEXT) {
515 int offset = FixedArray::kHeaderSize + dst->index() * kPointerSize; 547 int offset = FixedArray::kHeaderSize + dst->index() * kPointerSize;
516 __ RecordWrite(scratch1, offset, src, scratch2); 548 __ RecordWrite(scratch1, offset, src, scratch2);
517 } 549 }
518 } 550 }
519 551
520 552
553 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
554 bool should_normalize,
555 Label* if_true,
556 Label* if_false) {
557 // Only prepare for bailouts before splits if we're in a test
558 // context. Otherwise, we let the Visit function deal with the
559 // preparation to avoid preparing with the same AST id twice.
560 if (!context()->IsTest() || !info_->IsOptimizable()) return;
561
562 NearLabel skip;
563 if (should_normalize) __ jmp(&skip);
564
565 ForwardBailoutStack* current = forward_bailout_stack_;
566 while (current != NULL) {
567 PrepareForBailout(current->expr(), state);
568 current = current->parent();
569 }
570
571 if (should_normalize) {
572 __ cmp(eax, FACTORY->true_value());
573 Split(equal, if_true, if_false, NULL);
574 __ bind(&skip);
575 }
576 }
577
578
521 void FullCodeGenerator::EmitDeclaration(Variable* variable, 579 void FullCodeGenerator::EmitDeclaration(Variable* variable,
522 Variable::Mode mode, 580 Variable::Mode mode,
523 FunctionLiteral* function) { 581 FunctionLiteral* function) {
524 Comment cmnt(masm_, "[ Declaration"); 582 Comment cmnt(masm_, "[ Declaration");
525 ASSERT(variable != NULL); // Must have been resolved. 583 ASSERT(variable != NULL); // Must have been resolved.
526 Slot* slot = variable->AsSlot(); 584 Slot* slot = variable->AsSlot();
527 Property* prop = variable->AsProperty(); 585 Property* prop = variable->AsProperty();
528 if (slot != NULL) { 586 if (slot != NULL) {
529 switch (slot->type()) { 587 switch (slot->type()) {
530 case Slot::PARAMETER: 588 case Slot::PARAMETER:
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
622 __ push(Immediate(Smi::FromInt(is_eval() ? 1 : 0))); 680 __ push(Immediate(Smi::FromInt(is_eval() ? 1 : 0)));
623 __ CallRuntime(Runtime::kDeclareGlobals, 3); 681 __ CallRuntime(Runtime::kDeclareGlobals, 3);
624 // Return value is ignored. 682 // Return value is ignored.
625 } 683 }
626 684
627 685
628 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 686 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
629 Comment cmnt(masm_, "[ SwitchStatement"); 687 Comment cmnt(masm_, "[ SwitchStatement");
630 Breakable nested_statement(this, stmt); 688 Breakable nested_statement(this, stmt);
631 SetStatementPosition(stmt); 689 SetStatementPosition(stmt);
690
691 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
692
632 // Keep the switch value on the stack until a case matches. 693 // Keep the switch value on the stack until a case matches.
633 VisitForStackValue(stmt->tag()); 694 VisitForStackValue(stmt->tag());
634 695
635 ZoneList<CaseClause*>* clauses = stmt->cases(); 696 ZoneList<CaseClause*>* clauses = stmt->cases();
636 CaseClause* default_clause = NULL; // Can occur anywhere in the list. 697 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
637 698
638 Label next_test; // Recycled for each test. 699 Label next_test; // Recycled for each test.
639 // Compile all the tests with branches to their bodies. 700 // Compile all the tests with branches to their bodies.
640 for (int i = 0; i < clauses->length(); i++) { 701 for (int i = 0; i < clauses->length(); i++) {
641 CaseClause* clause = clauses->at(i); 702 CaseClause* clause = clauses->at(i);
(...skipping 19 matching lines...) Expand all
661 __ or_(ecx, Operand(eax)); 722 __ or_(ecx, Operand(eax));
662 __ test(ecx, Immediate(kSmiTagMask)); 723 __ test(ecx, Immediate(kSmiTagMask));
663 __ j(not_zero, &slow_case, not_taken); 724 __ j(not_zero, &slow_case, not_taken);
664 __ cmp(edx, Operand(eax)); 725 __ cmp(edx, Operand(eax));
665 __ j(not_equal, &next_test); 726 __ j(not_equal, &next_test);
666 __ Drop(1); // Switch value is no longer needed. 727 __ Drop(1); // Switch value is no longer needed.
667 __ jmp(clause->body_target()->entry_label()); 728 __ jmp(clause->body_target()->entry_label());
668 __ bind(&slow_case); 729 __ bind(&slow_case);
669 } 730 }
670 731
671 CompareFlags flags = inline_smi_code 732 // Record position before stub call for type feedback.
672 ? NO_SMI_COMPARE_IN_STUB 733 SetSourcePosition(clause->position());
673 : NO_COMPARE_FLAGS; 734
674 CompareStub stub(equal, true, flags); 735 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
675 __ CallStub(&stub); 736 __ call(ic, RelocInfo::CODE_TARGET);
737
676 __ test(eax, Operand(eax)); 738 __ test(eax, Operand(eax));
677 __ j(not_equal, &next_test); 739 __ j(not_equal, &next_test);
678 __ Drop(1); // Switch value is no longer needed. 740 __ Drop(1); // Switch value is no longer needed.
679 __ jmp(clause->body_target()->entry_label()); 741 __ jmp(clause->body_target()->entry_label());
680 } 742 }
681 743
682 // Discard the test value and jump to the default if present, otherwise to 744 // Discard the test value and jump to the default if present, otherwise to
683 // the end of the statement. 745 // the end of the statement.
684 __ bind(&next_test); 746 __ bind(&next_test);
685 __ Drop(1); // Switch value is no longer needed. 747 __ Drop(1); // Switch value is no longer needed.
686 if (default_clause == NULL) { 748 if (default_clause == NULL) {
687 __ jmp(nested_statement.break_target()); 749 __ jmp(nested_statement.break_target());
688 } else { 750 } else {
689 __ jmp(default_clause->body_target()->entry_label()); 751 __ jmp(default_clause->body_target()->entry_label());
690 } 752 }
691 753
692 // Compile all the case bodies. 754 // Compile all the case bodies.
693 for (int i = 0; i < clauses->length(); i++) { 755 for (int i = 0; i < clauses->length(); i++) {
694 Comment cmnt(masm_, "[ Case body"); 756 Comment cmnt(masm_, "[ Case body");
695 CaseClause* clause = clauses->at(i); 757 CaseClause* clause = clauses->at(i);
696 __ bind(clause->body_target()->entry_label()); 758 __ bind(clause->body_target()->entry_label());
697 VisitStatements(clause->statements()); 759 VisitStatements(clause->statements());
698 } 760 }
699 761
700 __ bind(nested_statement.break_target()); 762 __ bind(nested_statement.break_target());
763 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
701 } 764 }
702 765
703 766
704 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { 767 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
705 Comment cmnt(masm_, "[ ForInStatement"); 768 Comment cmnt(masm_, "[ ForInStatement");
706 SetStatementPosition(stmt); 769 SetStatementPosition(stmt);
707 770
708 Label loop, exit; 771 Label loop, exit;
709 ForIn loop_statement(this, stmt); 772 ForIn loop_statement(this, stmt);
710 increment_loop_depth(); 773 increment_loop_depth();
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after
845 __ mov(ebx, Operand(eax)); 908 __ mov(ebx, Operand(eax));
846 909
847 // Update the 'each' property or variable from the possibly filtered 910 // Update the 'each' property or variable from the possibly filtered
848 // entry in register ebx. 911 // entry in register ebx.
849 __ bind(&update_each); 912 __ bind(&update_each);
850 __ mov(result_register(), ebx); 913 __ mov(result_register(), ebx);
851 // Perform the assignment as if via '='. 914 // Perform the assignment as if via '='.
852 EmitAssignment(stmt->each()); 915 EmitAssignment(stmt->each());
853 916
854 // Generate code for the body of the loop. 917 // Generate code for the body of the loop.
855 Label stack_limit_hit;
856 NearLabel stack_check_done;
857 Visit(stmt->body()); 918 Visit(stmt->body());
858 919
859 __ StackLimitCheck(&stack_limit_hit);
860 __ bind(&stack_check_done);
861
862 // Generate code for going to the next element by incrementing the 920 // Generate code for going to the next element by incrementing the
863 // index (smi) stored on top of the stack. 921 // index (smi) stored on top of the stack.
864 __ bind(loop_statement.continue_target()); 922 __ bind(loop_statement.continue_target());
865 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1))); 923 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
924
925 EmitStackCheck(stmt);
866 __ jmp(&loop); 926 __ jmp(&loop);
867 927
868 // Slow case for the stack limit check.
869 StackCheckStub stack_check_stub;
870 __ bind(&stack_limit_hit);
871 __ CallStub(&stack_check_stub);
872 __ jmp(&stack_check_done);
873
874 // Remove the pointers stored on the stack. 928 // Remove the pointers stored on the stack.
875 __ bind(loop_statement.break_target()); 929 __ bind(loop_statement.break_target());
876 __ add(Operand(esp), Immediate(5 * kPointerSize)); 930 __ add(Operand(esp), Immediate(5 * kPointerSize));
877 931
878 // Exit and decrement the loop depth. 932 // Exit and decrement the loop depth.
879 __ bind(&exit); 933 __ bind(&exit);
880 decrement_loop_depth(); 934 decrement_loop_depth();
881 } 935 }
882 936
883 937
884 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, 938 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
885 bool pretenure) { 939 bool pretenure) {
886 // Use the fast case closure allocation code that allocates in new 940 // Use the fast case closure allocation code that allocates in new
887 // space for nested functions that don't need literals cloning. 941 // space for nested functions that don't need literals cloning. If
888 if (scope()->is_function_scope() && 942 // we're running with the --always-opt or the --prepare-always-opt
943 // flag, we need to use the runtime function so that the new function
944 // we are creating here gets a chance to have its code optimized and
945 // doesn't just get a copy of the existing unoptimized code.
946 if (!FLAG_always_opt &&
947 !FLAG_prepare_always_opt &&
948 scope()->is_function_scope() &&
889 info->num_literals() == 0 && 949 info->num_literals() == 0 &&
890 !pretenure) { 950 !pretenure) {
891 FastNewClosureStub stub; 951 FastNewClosureStub stub;
892 __ push(Immediate(info)); 952 __ push(Immediate(info));
893 __ CallStub(&stub); 953 __ CallStub(&stub);
894 } else { 954 } else {
895 __ push(esi); 955 __ push(esi);
896 __ push(Immediate(info)); 956 __ push(Immediate(info));
897 __ push(Immediate(pretenure 957 __ push(Immediate(pretenure
898 ? FACTORY->true_value() 958 ? FACTORY->true_value()
(...skipping 329 matching lines...) Expand 10 before | Expand all | Expand 10 after
1228 if (!result_saved) { 1288 if (!result_saved) {
1229 __ push(eax); // Save result on the stack 1289 __ push(eax); // Save result on the stack
1230 result_saved = true; 1290 result_saved = true;
1231 } 1291 }
1232 switch (property->kind()) { 1292 switch (property->kind()) {
1233 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1293 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1234 ASSERT(!CompileTimeValue::IsCompileTimeValue(value)); 1294 ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
1235 // Fall through. 1295 // Fall through.
1236 case ObjectLiteral::Property::COMPUTED: 1296 case ObjectLiteral::Property::COMPUTED:
1237 if (key->handle()->IsSymbol()) { 1297 if (key->handle()->IsSymbol()) {
1238 VisitForAccumulatorValue(value);
1239 __ mov(ecx, Immediate(key->handle()));
1240 __ mov(edx, Operand(esp, 0));
1241 if (property->emit_store()) { 1298 if (property->emit_store()) {
1299 VisitForAccumulatorValue(value);
1300 __ mov(ecx, Immediate(key->handle()));
1301 __ mov(edx, Operand(esp, 0));
1242 Handle<Code> ic(Isolate::Current()->builtins()->builtin( 1302 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
1243 Builtins::StoreIC_Initialize)); 1303 Builtins::StoreIC_Initialize));
1244 EmitCallIC(ic, RelocInfo::CODE_TARGET); 1304 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1305 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1306 } else {
1307 VisitForEffect(value);
1245 } 1308 }
1246 break; 1309 break;
1247 } 1310 }
1248 // Fall through. 1311 // Fall through.
1249 case ObjectLiteral::Property::PROTOTYPE: 1312 case ObjectLiteral::Property::PROTOTYPE:
1250 __ push(Operand(esp, 0)); // Duplicate receiver. 1313 __ push(Operand(esp, 0)); // Duplicate receiver.
1251 VisitForStackValue(key); 1314 VisitForStackValue(key);
1252 VisitForStackValue(value); 1315 VisitForStackValue(value);
1253 if (property->emit_store()) { 1316 if (property->emit_store()) {
1254 __ CallRuntime(Runtime::kSetProperty, 3); 1317 __ CallRuntime(Runtime::kSetProperty, 3);
(...skipping 27 matching lines...) Expand all
1282 Comment cmnt(masm_, "[ ArrayLiteral"); 1345 Comment cmnt(masm_, "[ ArrayLiteral");
1283 1346
1284 ZoneList<Expression*>* subexprs = expr->values(); 1347 ZoneList<Expression*>* subexprs = expr->values();
1285 int length = subexprs->length(); 1348 int length = subexprs->length();
1286 1349
1287 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 1350 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1288 __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset)); 1351 __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
1289 __ push(Immediate(Smi::FromInt(expr->literal_index()))); 1352 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1290 __ push(Immediate(expr->constant_elements())); 1353 __ push(Immediate(expr->constant_elements()));
1291 if (expr->constant_elements()->map() == HEAP->fixed_cow_array_map()) { 1354 if (expr->constant_elements()->map() == HEAP->fixed_cow_array_map()) {
1355 ASSERT(expr->depth() == 1);
1292 FastCloneShallowArrayStub stub( 1356 FastCloneShallowArrayStub stub(
1293 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); 1357 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
1294 __ CallStub(&stub); 1358 __ CallStub(&stub);
1295 __ IncrementCounter(COUNTERS->cow_arrays_created_stub(), 1); 1359 __ IncrementCounter(COUNTERS->cow_arrays_created_stub(), 1);
1296 } else if (expr->depth() > 1) { 1360 } else if (expr->depth() > 1) {
1297 __ CallRuntime(Runtime::kCreateArrayLiteral, 3); 1361 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1298 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { 1362 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1299 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); 1363 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1300 } else { 1364 } else {
1301 FastCloneShallowArrayStub stub( 1365 FastCloneShallowArrayStub stub(
(...skipping 21 matching lines...) Expand all
1323 VisitForAccumulatorValue(subexpr); 1387 VisitForAccumulatorValue(subexpr);
1324 1388
1325 // Store the subexpression value in the array's elements. 1389 // Store the subexpression value in the array's elements.
1326 __ mov(ebx, Operand(esp, 0)); // Copy of array literal. 1390 __ mov(ebx, Operand(esp, 0)); // Copy of array literal.
1327 __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset)); 1391 __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
1328 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1392 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1329 __ mov(FieldOperand(ebx, offset), result_register()); 1393 __ mov(FieldOperand(ebx, offset), result_register());
1330 1394
1331 // Update the write barrier for the array store. 1395 // Update the write barrier for the array store.
1332 __ RecordWrite(ebx, offset, result_register(), ecx); 1396 __ RecordWrite(ebx, offset, result_register(), ecx);
1397
1398 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1333 } 1399 }
1334 1400
1335 if (result_saved) { 1401 if (result_saved) {
1336 context()->PlugTOS(); 1402 context()->PlugTOS();
1337 } else { 1403 } else {
1338 context()->Plug(eax); 1404 context()->Plug(eax);
1339 } 1405 }
1340 } 1406 }
1341 1407
1342 1408
(...skipping 24 matching lines...) Expand all
1367 break; 1433 break;
1368 case NAMED_PROPERTY: 1434 case NAMED_PROPERTY:
1369 if (expr->is_compound()) { 1435 if (expr->is_compound()) {
1370 // We need the receiver both on the stack and in the accumulator. 1436 // We need the receiver both on the stack and in the accumulator.
1371 VisitForAccumulatorValue(property->obj()); 1437 VisitForAccumulatorValue(property->obj());
1372 __ push(result_register()); 1438 __ push(result_register());
1373 } else { 1439 } else {
1374 VisitForStackValue(property->obj()); 1440 VisitForStackValue(property->obj());
1375 } 1441 }
1376 break; 1442 break;
1377 case KEYED_PROPERTY: 1443 case KEYED_PROPERTY: {
1378 if (expr->is_compound()) { 1444 if (expr->is_compound()) {
1379 VisitForStackValue(property->obj()); 1445 if (property->is_arguments_access()) {
1380 VisitForAccumulatorValue(property->key()); 1446 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1447 __ push(EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx));
1448 __ mov(eax, Immediate(property->key()->AsLiteral()->handle()));
1449 } else {
1450 VisitForStackValue(property->obj());
1451 VisitForAccumulatorValue(property->key());
1452 }
1381 __ mov(edx, Operand(esp, 0)); 1453 __ mov(edx, Operand(esp, 0));
1382 __ push(eax); 1454 __ push(eax);
1383 } else { 1455 } else {
1384 VisitForStackValue(property->obj()); 1456 if (property->is_arguments_access()) {
1385 VisitForStackValue(property->key()); 1457 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1458 __ push(EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx));
1459 __ push(Immediate(property->key()->AsLiteral()->handle()));
1460 } else {
1461 VisitForStackValue(property->obj());
1462 VisitForStackValue(property->key());
1463 }
1386 } 1464 }
1387 break; 1465 break;
1466 }
1388 } 1467 }
1389 1468
1390 if (expr->is_compound()) { 1469 if (expr->is_compound()) {
1391 { AccumulatorValueContext context(this); 1470 { AccumulatorValueContext context(this);
1392 switch (assign_type) { 1471 switch (assign_type) {
1393 case VARIABLE: 1472 case VARIABLE:
1394 EmitVariableLoad(expr->target()->AsVariableProxy()->var()); 1473 EmitVariableLoad(expr->target()->AsVariableProxy()->var());
1395 break; 1474 break;
1396 case NAMED_PROPERTY: 1475 case NAMED_PROPERTY:
1397 EmitNamedPropertyLoad(property); 1476 EmitNamedPropertyLoad(property);
1398 break; 1477 break;
1399 case KEYED_PROPERTY: 1478 case KEYED_PROPERTY:
1400 EmitKeyedPropertyLoad(property); 1479 EmitKeyedPropertyLoad(property);
1401 break; 1480 break;
1402 } 1481 }
1403 } 1482 }
1404 1483
1484 // For property compound assignments we need another deoptimization
1485 // point after the property load.
1486 if (property != NULL) {
1487 PrepareForBailoutForId(expr->compound_bailout_id(), TOS_REG);
1488 }
1489
1405 Token::Value op = expr->binary_op(); 1490 Token::Value op = expr->binary_op();
1406 ConstantOperand constant = ShouldInlineSmiCase(op) 1491 ConstantOperand constant = ShouldInlineSmiCase(op)
1407 ? GetConstantOperand(op, expr->target(), expr->value()) 1492 ? GetConstantOperand(op, expr->target(), expr->value())
1408 : kNoConstants; 1493 : kNoConstants;
1409 ASSERT(constant == kRightConstant || constant == kNoConstants); 1494 ASSERT(constant == kRightConstant || constant == kNoConstants);
1410 if (constant == kNoConstants) { 1495 if (constant == kNoConstants) {
1411 __ push(eax); // Left operand goes on the stack. 1496 __ push(eax); // Left operand goes on the stack.
1412 VisitForAccumulatorValue(expr->value()); 1497 VisitForAccumulatorValue(expr->value());
1413 } 1498 }
1414 1499
1415 OverwriteMode mode = expr->value()->ResultOverwriteAllowed() 1500 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1416 ? OVERWRITE_RIGHT 1501 ? OVERWRITE_RIGHT
1417 : NO_OVERWRITE; 1502 : NO_OVERWRITE;
1418 SetSourcePosition(expr->position() + 1); 1503 SetSourcePosition(expr->position() + 1);
1419 AccumulatorValueContext context(this); 1504 AccumulatorValueContext context(this);
1420 if (ShouldInlineSmiCase(op)) { 1505 if (ShouldInlineSmiCase(op)) {
1421 EmitInlineSmiBinaryOp(expr, 1506 EmitInlineSmiBinaryOp(expr,
1422 op, 1507 op,
1423 mode, 1508 mode,
1424 expr->target(), 1509 expr->target(),
1425 expr->value(), 1510 expr->value(),
1426 constant); 1511 constant);
1427 } else { 1512 } else {
1428 EmitBinaryOp(op, mode); 1513 EmitBinaryOp(op, mode);
1429 } 1514 }
1515
1516 // Deoptimization point in case the binary operation may have side effects.
1517 PrepareForBailout(expr->binary_operation(), TOS_REG);
1430 } else { 1518 } else {
1431 VisitForAccumulatorValue(expr->value()); 1519 VisitForAccumulatorValue(expr->value());
1432 } 1520 }
1433 1521
1434 // Record source position before possible IC call. 1522 // Record source position before possible IC call.
1435 SetSourcePosition(expr->position()); 1523 SetSourcePosition(expr->position());
1436 1524
1437 // Store the value. 1525 // Store the value.
1438 switch (assign_type) { 1526 switch (assign_type) {
1439 case VARIABLE: 1527 case VARIABLE:
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
1476 Label done; 1564 Label done;
1477 __ add(Operand(eax), Immediate(value)); 1565 __ add(Operand(eax), Immediate(value));
1478 __ j(overflow, &call_stub); 1566 __ j(overflow, &call_stub);
1479 __ test(eax, Immediate(kSmiTagMask)); 1567 __ test(eax, Immediate(kSmiTagMask));
1480 __ j(zero, &done); 1568 __ j(zero, &done);
1481 1569
1482 // Undo the optimistic add operation and call the shared stub. 1570 // Undo the optimistic add operation and call the shared stub.
1483 __ bind(&call_stub); 1571 __ bind(&call_stub);
1484 __ sub(Operand(eax), Immediate(value)); 1572 __ sub(Operand(eax), Immediate(value));
1485 Token::Value op = Token::ADD; 1573 Token::Value op = Token::ADD;
1486 GenericBinaryOpStub stub(op, mode, NO_SMI_CODE_IN_STUB, TypeInfo::Unknown()); 1574 TypeRecordingBinaryOpStub stub(op, mode);
1487 if (left_is_constant_smi) { 1575 if (left_is_constant_smi) {
1488 __ push(Immediate(value)); 1576 __ mov(edx, Immediate(value));
1489 __ push(eax);
1490 } else { 1577 } else {
1491 __ push(eax); 1578 __ mov(edx, eax);
1492 __ push(Immediate(value)); 1579 __ mov(eax, Immediate(value));
1493 } 1580 }
1494 __ CallStub(&stub); 1581 __ CallStub(&stub);
1495 __ bind(&done); 1582 __ bind(&done);
1496 context()->Plug(eax); 1583 context()->Plug(eax);
1497 } 1584 }
1498 1585
1499 1586
1500 void FullCodeGenerator::EmitConstantSmiSub(Expression* expr, 1587 void FullCodeGenerator::EmitConstantSmiSub(Expression* expr,
1501 OverwriteMode mode, 1588 OverwriteMode mode,
1502 bool left_is_constant_smi, 1589 bool left_is_constant_smi,
1503 Smi* value) { 1590 Smi* value) {
1504 Label call_stub, done; 1591 Label call_stub, done;
1505 if (left_is_constant_smi) { 1592 if (left_is_constant_smi) {
1506 __ mov(ecx, eax); 1593 __ mov(ecx, eax);
1507 __ mov(eax, Immediate(value)); 1594 __ mov(eax, Immediate(value));
1508 __ sub(Operand(eax), ecx); 1595 __ sub(Operand(eax), ecx);
1509 } else { 1596 } else {
1510 __ sub(Operand(eax), Immediate(value)); 1597 __ sub(Operand(eax), Immediate(value));
1511 } 1598 }
1512 __ j(overflow, &call_stub); 1599 __ j(overflow, &call_stub);
1513 __ test(eax, Immediate(kSmiTagMask)); 1600 __ test(eax, Immediate(kSmiTagMask));
1514 __ j(zero, &done); 1601 __ j(zero, &done);
1515 1602
1516 __ bind(&call_stub); 1603 __ bind(&call_stub);
1517 if (left_is_constant_smi) { 1604 if (left_is_constant_smi) {
1518 __ push(Immediate(value)); 1605 __ mov(edx, Immediate(value));
1519 __ push(ecx); 1606 __ mov(eax, ecx);
1520 } else { 1607 } else {
1521 // Undo the optimistic sub operation. 1608 __ add(Operand(eax), Immediate(value)); // Undo the subtraction.
1522 __ add(Operand(eax), Immediate(value)); 1609 __ mov(edx, eax);
1523 1610 __ mov(eax, Immediate(value));
1524 __ push(eax);
1525 __ push(Immediate(value));
1526 } 1611 }
1527
1528 Token::Value op = Token::SUB; 1612 Token::Value op = Token::SUB;
1529 GenericBinaryOpStub stub(op, mode, NO_SMI_CODE_IN_STUB, TypeInfo::Unknown()); 1613 TypeRecordingBinaryOpStub stub(op, mode);
1530 __ CallStub(&stub); 1614 __ CallStub(&stub);
1531 __ bind(&done); 1615 __ bind(&done);
1532 context()->Plug(eax); 1616 context()->Plug(eax);
1533 } 1617 }
1534 1618
1535 1619
1536 void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr, 1620 void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr,
1537 Token::Value op, 1621 Token::Value op,
1538 OverwriteMode mode, 1622 OverwriteMode mode,
1539 Smi* value) { 1623 Smi* value) {
1540 Label call_stub, smi_case, done; 1624 Label call_stub, smi_case, done;
1541 int shift_value = value->value() & 0x1f; 1625 int shift_value = value->value() & 0x1f;
1542 1626
1543 __ test(eax, Immediate(kSmiTagMask)); 1627 __ test(eax, Immediate(kSmiTagMask));
1544 __ j(zero, &smi_case); 1628 __ j(zero, &smi_case);
1545 1629
1546 __ bind(&call_stub); 1630 __ bind(&call_stub);
1547 GenericBinaryOpStub stub(op, mode, NO_SMI_CODE_IN_STUB, TypeInfo::Unknown()); 1631 __ mov(edx, eax);
1548 __ push(eax); 1632 __ mov(eax, Immediate(value));
1549 __ push(Immediate(value)); 1633 TypeRecordingBinaryOpStub stub(op, mode);
1550 __ CallStub(&stub); 1634 __ CallStub(&stub);
1551 __ jmp(&done); 1635 __ jmp(&done);
1552 1636
1553 __ bind(&smi_case); 1637 __ bind(&smi_case);
1554 switch (op) { 1638 switch (op) {
1555 case Token::SHL: 1639 case Token::SHL:
1556 if (shift_value != 0) { 1640 if (shift_value != 0) {
1557 __ mov(edx, eax); 1641 __ mov(edx, eax);
1558 if (shift_value > 1) { 1642 if (shift_value > 1) {
1559 __ shl(edx, shift_value - 1); 1643 __ shl(edx, shift_value - 1);
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
1596 1680
1597 1681
1598 void FullCodeGenerator::EmitConstantSmiBitOp(Expression* expr, 1682 void FullCodeGenerator::EmitConstantSmiBitOp(Expression* expr,
1599 Token::Value op, 1683 Token::Value op,
1600 OverwriteMode mode, 1684 OverwriteMode mode,
1601 Smi* value) { 1685 Smi* value) {
1602 Label smi_case, done; 1686 Label smi_case, done;
1603 __ test(eax, Immediate(kSmiTagMask)); 1687 __ test(eax, Immediate(kSmiTagMask));
1604 __ j(zero, &smi_case); 1688 __ j(zero, &smi_case);
1605 1689
1606 GenericBinaryOpStub stub(op, mode, NO_SMI_CODE_IN_STUB, TypeInfo::Unknown());
1607 // The order of the arguments does not matter for bit-ops with a 1690 // The order of the arguments does not matter for bit-ops with a
1608 // constant operand. 1691 // constant operand.
1609 __ push(Immediate(value)); 1692 __ mov(edx, Immediate(value));
1610 __ push(eax); 1693 TypeRecordingBinaryOpStub stub(op, mode);
1611 __ CallStub(&stub); 1694 __ CallStub(&stub);
1612 __ jmp(&done); 1695 __ jmp(&done);
1613 1696
1614 __ bind(&smi_case); 1697 __ bind(&smi_case);
1615 switch (op) { 1698 switch (op) {
1616 case Token::BIT_OR: 1699 case Token::BIT_OR:
1617 __ or_(Operand(eax), Immediate(value)); 1700 __ or_(Operand(eax), Immediate(value));
1618 break; 1701 break;
1619 case Token::BIT_XOR: 1702 case Token::BIT_XOR:
1620 __ xor_(Operand(eax), Immediate(value)); 1703 __ xor_(Operand(eax), Immediate(value));
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
1679 // Do combined smi check of the operands. Left operand is on the 1762 // Do combined smi check of the operands. Left operand is on the
1680 // stack. Right operand is in eax. 1763 // stack. Right operand is in eax.
1681 Label done, stub_call, smi_case; 1764 Label done, stub_call, smi_case;
1682 __ pop(edx); 1765 __ pop(edx);
1683 __ mov(ecx, eax); 1766 __ mov(ecx, eax);
1684 __ or_(eax, Operand(edx)); 1767 __ or_(eax, Operand(edx));
1685 __ test(eax, Immediate(kSmiTagMask)); 1768 __ test(eax, Immediate(kSmiTagMask));
1686 __ j(zero, &smi_case); 1769 __ j(zero, &smi_case);
1687 1770
1688 __ bind(&stub_call); 1771 __ bind(&stub_call);
1689 GenericBinaryOpStub stub(op, mode, NO_SMI_CODE_IN_STUB, TypeInfo::Unknown()); 1772 __ mov(eax, ecx);
1690 if (stub.ArgsInRegistersSupported()) { 1773 TypeRecordingBinaryOpStub stub(op, mode);
1691 stub.GenerateCall(masm_, edx, ecx); 1774 __ CallStub(&stub);
1692 } else {
1693 __ push(edx);
1694 __ push(ecx);
1695 __ CallStub(&stub);
1696 }
1697 __ jmp(&done); 1775 __ jmp(&done);
1698 1776
1699 __ bind(&smi_case); 1777 __ bind(&smi_case);
1700 __ mov(eax, edx); // Copy left operand in case of a stub call. 1778 __ mov(eax, edx); // Copy left operand in case of a stub call.
1701 1779
1702 switch (op) { 1780 switch (op) {
1703 case Token::SAR: 1781 case Token::SAR:
1704 __ SmiUntag(eax); 1782 __ SmiUntag(eax);
1705 __ SmiUntag(ecx); 1783 __ SmiUntag(ecx);
1706 __ sar_cl(eax); // No checks of result necessary 1784 __ sar_cl(eax); // No checks of result necessary
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
1765 UNREACHABLE(); 1843 UNREACHABLE();
1766 } 1844 }
1767 1845
1768 __ bind(&done); 1846 __ bind(&done);
1769 context()->Plug(eax); 1847 context()->Plug(eax);
1770 } 1848 }
1771 1849
1772 1850
1773 void FullCodeGenerator::EmitBinaryOp(Token::Value op, 1851 void FullCodeGenerator::EmitBinaryOp(Token::Value op,
1774 OverwriteMode mode) { 1852 OverwriteMode mode) {
1775 TypeInfo type = TypeInfo::Unknown(); 1853 __ pop(edx);
1776 GenericBinaryOpStub stub(op, mode, NO_GENERIC_BINARY_FLAGS, type); 1854 TypeRecordingBinaryOpStub stub(op, mode);
1777 if (stub.ArgsInRegistersSupported()) { 1855 __ CallStub(&stub);
1778 __ pop(edx);
1779 stub.GenerateCall(masm_, edx, eax);
1780 } else {
1781 __ push(result_register());
1782 __ CallStub(&stub);
1783 }
1784 context()->Plug(eax); 1856 context()->Plug(eax);
1785 } 1857 }
1786 1858
1787 1859
1788 void FullCodeGenerator::EmitAssignment(Expression* expr) { 1860 void FullCodeGenerator::EmitAssignment(Expression* expr) {
1789 // Invalid left-hand sides are rewritten to have a 'throw 1861 // Invalid left-hand sides are rewritten to have a 'throw
1790 // ReferenceError' on the left-hand side. 1862 // ReferenceError' on the left-hand side.
1791 if (!expr->IsValidLeftHandSide()) { 1863 if (!expr->IsValidLeftHandSide()) {
1792 VisitForEffect(expr); 1864 VisitForEffect(expr);
1793 return; 1865 return;
(...skipping 199 matching lines...) Expand 10 before | Expand all | Expand 10 after
1993 } 2065 }
1994 2066
1995 2067
1996 void FullCodeGenerator::VisitProperty(Property* expr) { 2068 void FullCodeGenerator::VisitProperty(Property* expr) {
1997 Comment cmnt(masm_, "[ Property"); 2069 Comment cmnt(masm_, "[ Property");
1998 Expression* key = expr->key(); 2070 Expression* key = expr->key();
1999 2071
2000 if (key->IsPropertyName()) { 2072 if (key->IsPropertyName()) {
2001 VisitForAccumulatorValue(expr->obj()); 2073 VisitForAccumulatorValue(expr->obj());
2002 EmitNamedPropertyLoad(expr); 2074 EmitNamedPropertyLoad(expr);
2075 context()->Plug(eax);
2003 } else { 2076 } else {
2004 VisitForStackValue(expr->obj()); 2077 VisitForStackValue(expr->obj());
2005 VisitForAccumulatorValue(expr->key()); 2078 VisitForAccumulatorValue(expr->key());
2006 __ pop(edx); 2079 __ pop(edx);
2007 EmitKeyedPropertyLoad(expr); 2080 EmitKeyedPropertyLoad(expr);
2081 context()->Plug(eax);
2008 } 2082 }
2009 context()->Plug(eax);
2010 } 2083 }
2011 2084
2012 2085
2013 void FullCodeGenerator::EmitCallWithIC(Call* expr, 2086 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2014 Handle<Object> name, 2087 Handle<Object> name,
2015 RelocInfo::Mode mode) { 2088 RelocInfo::Mode mode) {
2016 // Code common for calls using the IC. 2089 // Code common for calls using the IC.
2017 ZoneList<Expression*>* args = expr->arguments(); 2090 ZoneList<Expression*>* args = expr->arguments();
2018 int arg_count = args->length(); 2091 int arg_count = args->length();
2019 { PreservePositionScope scope(masm()->positions_recorder()); 2092 { PreservePositionScope scope(masm()->positions_recorder());
2020 for (int i = 0; i < arg_count; i++) { 2093 for (int i = 0; i < arg_count; i++) {
2021 VisitForStackValue(args->at(i)); 2094 VisitForStackValue(args->at(i));
2022 } 2095 }
2023 __ Set(ecx, Immediate(name)); 2096 __ Set(ecx, Immediate(name));
2024 } 2097 }
2025 // Record source position of the IC call. 2098 // Record source position of the IC call.
2026 SetSourcePosition(expr->position()); 2099 SetSourcePosition(expr->position());
2027 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; 2100 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2028 Handle<Code> ic = ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, 2101 Handle<Code> ic = ISOLATE->stub_cache()->ComputeCallInitialize(arg_count,
2029 in_loop); 2102 in_loop);
2030 EmitCallIC(ic, mode); 2103 EmitCallIC(ic, mode);
2104 RecordJSReturnSite(expr);
2031 // Restore context register. 2105 // Restore context register.
2032 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 2106 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2033 context()->Plug(eax); 2107 context()->Plug(eax);
2034 } 2108 }
2035 2109
2036 2110
2037 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, 2111 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2038 Expression* key, 2112 Expression* key,
2039 RelocInfo::Mode mode) { 2113 RelocInfo::Mode mode) {
2040 // Load the key. 2114 // Load the key.
(...skipping 13 matching lines...) Expand all
2054 VisitForStackValue(args->at(i)); 2128 VisitForStackValue(args->at(i));
2055 } 2129 }
2056 } 2130 }
2057 // Record source position of the IC call. 2131 // Record source position of the IC call.
2058 SetSourcePosition(expr->position()); 2132 SetSourcePosition(expr->position());
2059 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; 2133 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2060 Handle<Code> ic = ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count, 2134 Handle<Code> ic = ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count,
2061 in_loop); 2135 in_loop);
2062 __ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize)); // Key. 2136 __ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize)); // Key.
2063 EmitCallIC(ic, mode); 2137 EmitCallIC(ic, mode);
2138 RecordJSReturnSite(expr);
2064 // Restore context register. 2139 // Restore context register.
2065 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 2140 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2066 context()->DropAndPlug(1, eax); // Drop the key still on the stack. 2141 context()->DropAndPlug(1, eax); // Drop the key still on the stack.
2067 } 2142 }
2068 2143
2069 2144
2070 void FullCodeGenerator::EmitCallWithStub(Call* expr) { 2145 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2071 // Code common for calls using the call stub. 2146 // Code common for calls using the call stub.
2072 ZoneList<Expression*>* args = expr->arguments(); 2147 ZoneList<Expression*>* args = expr->arguments();
2073 int arg_count = args->length(); 2148 int arg_count = args->length();
2074 { PreservePositionScope scope(masm()->positions_recorder()); 2149 { PreservePositionScope scope(masm()->positions_recorder());
2075 for (int i = 0; i < arg_count; i++) { 2150 for (int i = 0; i < arg_count; i++) {
2076 VisitForStackValue(args->at(i)); 2151 VisitForStackValue(args->at(i));
2077 } 2152 }
2078 } 2153 }
2079 // Record source position for debugger. 2154 // Record source position for debugger.
2080 SetSourcePosition(expr->position()); 2155 SetSourcePosition(expr->position());
2081 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; 2156 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2082 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE); 2157 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
2083 __ CallStub(&stub); 2158 __ CallStub(&stub);
2159 RecordJSReturnSite(expr);
2084 // Restore context register. 2160 // Restore context register.
2085 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 2161 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2086 context()->DropAndPlug(1, eax); 2162 context()->DropAndPlug(1, eax);
2087 } 2163 }
2088 2164
2089 2165
2090 void FullCodeGenerator::VisitCall(Call* expr) { 2166 void FullCodeGenerator::VisitCall(Call* expr) {
2167 #ifdef DEBUG
2168 // We want to verify that RecordJSReturnSite gets called on all paths
2169 // through this function. Avoid early returns.
2170 expr->return_is_recorded_ = false;
2171 #endif
2172
2091 Comment cmnt(masm_, "[ Call"); 2173 Comment cmnt(masm_, "[ Call");
2092 Expression* fun = expr->expression(); 2174 Expression* fun = expr->expression();
2093 Variable* var = fun->AsVariableProxy()->AsVariable(); 2175 Variable* var = fun->AsVariableProxy()->AsVariable();
2094 2176
2095 if (var != NULL && var->is_possibly_eval()) { 2177 if (var != NULL && var->is_possibly_eval()) {
2096 // In a call to eval, we first call %ResolvePossiblyDirectEval to 2178 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2097 // resolve the function we need to call and the receiver of the 2179 // resolve the function we need to call and the receiver of the
2098 // call. Then we call the resolved function using the given 2180 // call. Then we call the resolved function using the given
2099 // arguments. 2181 // arguments.
2100 ZoneList<Expression*>* args = expr->arguments(); 2182 ZoneList<Expression*>* args = expr->arguments();
(...skipping 25 matching lines...) Expand all
2126 // The runtime call returns a pair of values in eax (function) and 2208 // The runtime call returns a pair of values in eax (function) and
2127 // edx (receiver). Touch up the stack with the right values. 2209 // edx (receiver). Touch up the stack with the right values.
2128 __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx); 2210 __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
2129 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax); 2211 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2130 } 2212 }
2131 // Record source position for debugger. 2213 // Record source position for debugger.
2132 SetSourcePosition(expr->position()); 2214 SetSourcePosition(expr->position());
2133 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; 2215 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2134 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE); 2216 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
2135 __ CallStub(&stub); 2217 __ CallStub(&stub);
2218 RecordJSReturnSite(expr);
2136 // Restore context register. 2219 // Restore context register.
2137 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 2220 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2138 context()->DropAndPlug(1, eax); 2221 context()->DropAndPlug(1, eax);
2139 } else if (var != NULL && !var->is_this() && var->is_global()) { 2222 } else if (var != NULL && !var->is_this() && var->is_global()) {
2140 // Push global object as receiver for the call IC. 2223 // Push global object as receiver for the call IC.
2141 __ push(GlobalObjectOperand()); 2224 __ push(GlobalObjectOperand());
2142 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT); 2225 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
2143 } else if (var != NULL && var->AsSlot() != NULL && 2226 } else if (var != NULL && var->AsSlot() != NULL &&
2144 var->AsSlot()->type() == Slot::LOOKUP) { 2227 var->AsSlot()->type() == Slot::LOOKUP) {
2145 // Call to a lookup slot (dynamically introduced variable). 2228 // Call to a lookup slot (dynamically introduced variable).
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
2227 } 2310 }
2228 { PreservePositionScope scope(masm()->positions_recorder()); 2311 { PreservePositionScope scope(masm()->positions_recorder());
2229 VisitForStackValue(fun); 2312 VisitForStackValue(fun);
2230 } 2313 }
2231 // Load global receiver object. 2314 // Load global receiver object.
2232 __ mov(ebx, GlobalObjectOperand()); 2315 __ mov(ebx, GlobalObjectOperand());
2233 __ push(FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset)); 2316 __ push(FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
2234 // Emit function call. 2317 // Emit function call.
2235 EmitCallWithStub(expr); 2318 EmitCallWithStub(expr);
2236 } 2319 }
2320
2321 #ifdef DEBUG
2322 // RecordJSReturnSite should have been called.
2323 ASSERT(expr->return_is_recorded_);
2324 #endif
2237 } 2325 }
2238 2326
2239 2327
2240 void FullCodeGenerator::VisitCallNew(CallNew* expr) { 2328 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2241 Comment cmnt(masm_, "[ CallNew"); 2329 Comment cmnt(masm_, "[ CallNew");
2242 // According to ECMA-262, section 11.2.2, page 44, the function 2330 // According to ECMA-262, section 11.2.2, page 44, the function
2243 // expression in new calls must be evaluated before the 2331 // expression in new calls must be evaluated before the
2244 // arguments. 2332 // arguments.
2245 2333
2246 // Push constructor on the stack. If it's not a function it's used as 2334 // Push constructor on the stack. If it's not a function it's used as
(...skipping 28 matching lines...) Expand all
2275 2363
2276 VisitForAccumulatorValue(args->at(0)); 2364 VisitForAccumulatorValue(args->at(0));
2277 2365
2278 Label materialize_true, materialize_false; 2366 Label materialize_true, materialize_false;
2279 Label* if_true = NULL; 2367 Label* if_true = NULL;
2280 Label* if_false = NULL; 2368 Label* if_false = NULL;
2281 Label* fall_through = NULL; 2369 Label* fall_through = NULL;
2282 context()->PrepareTest(&materialize_true, &materialize_false, 2370 context()->PrepareTest(&materialize_true, &materialize_false,
2283 &if_true, &if_false, &fall_through); 2371 &if_true, &if_false, &fall_through);
2284 2372
2373 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2285 __ test(eax, Immediate(kSmiTagMask)); 2374 __ test(eax, Immediate(kSmiTagMask));
2286 Split(zero, if_true, if_false, fall_through); 2375 Split(zero, if_true, if_false, fall_through);
2287 2376
2288 context()->Plug(if_true, if_false); 2377 context()->Plug(if_true, if_false);
2289 } 2378 }
2290 2379
2291 2380
2292 void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) { 2381 void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) {
2293 ASSERT(args->length() == 1); 2382 ASSERT(args->length() == 1);
2294 2383
2295 VisitForAccumulatorValue(args->at(0)); 2384 VisitForAccumulatorValue(args->at(0));
2296 2385
2297 Label materialize_true, materialize_false; 2386 Label materialize_true, materialize_false;
2298 Label* if_true = NULL; 2387 Label* if_true = NULL;
2299 Label* if_false = NULL; 2388 Label* if_false = NULL;
2300 Label* fall_through = NULL; 2389 Label* fall_through = NULL;
2301 context()->PrepareTest(&materialize_true, &materialize_false, 2390 context()->PrepareTest(&materialize_true, &materialize_false,
2302 &if_true, &if_false, &fall_through); 2391 &if_true, &if_false, &fall_through);
2303 2392
2393 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2304 __ test(eax, Immediate(kSmiTagMask | 0x80000000)); 2394 __ test(eax, Immediate(kSmiTagMask | 0x80000000));
2305 Split(zero, if_true, if_false, fall_through); 2395 Split(zero, if_true, if_false, fall_through);
2306 2396
2307 context()->Plug(if_true, if_false); 2397 context()->Plug(if_true, if_false);
2308 } 2398 }
2309 2399
2310 2400
2311 void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) { 2401 void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
2312 ASSERT(args->length() == 1); 2402 ASSERT(args->length() == 1);
2313 2403
(...skipping 12 matching lines...) Expand all
2326 __ j(equal, if_true); 2416 __ j(equal, if_true);
2327 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 2417 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2328 // Undetectable objects behave like undefined when tested with typeof. 2418 // Undetectable objects behave like undefined when tested with typeof.
2329 __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset)); 2419 __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
2330 __ test(ecx, Immediate(1 << Map::kIsUndetectable)); 2420 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
2331 __ j(not_zero, if_false); 2421 __ j(not_zero, if_false);
2332 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset)); 2422 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
2333 __ cmp(ecx, FIRST_JS_OBJECT_TYPE); 2423 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
2334 __ j(below, if_false); 2424 __ j(below, if_false);
2335 __ cmp(ecx, LAST_JS_OBJECT_TYPE); 2425 __ cmp(ecx, LAST_JS_OBJECT_TYPE);
2426 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2336 Split(below_equal, if_true, if_false, fall_through); 2427 Split(below_equal, if_true, if_false, fall_through);
2337 2428
2338 context()->Plug(if_true, if_false); 2429 context()->Plug(if_true, if_false);
2339 } 2430 }
2340 2431
2341 2432
2342 void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) { 2433 void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) {
2343 ASSERT(args->length() == 1); 2434 ASSERT(args->length() == 1);
2344 2435
2345 VisitForAccumulatorValue(args->at(0)); 2436 VisitForAccumulatorValue(args->at(0));
2346 2437
2347 Label materialize_true, materialize_false; 2438 Label materialize_true, materialize_false;
2348 Label* if_true = NULL; 2439 Label* if_true = NULL;
2349 Label* if_false = NULL; 2440 Label* if_false = NULL;
2350 Label* fall_through = NULL; 2441 Label* fall_through = NULL;
2351 context()->PrepareTest(&materialize_true, &materialize_false, 2442 context()->PrepareTest(&materialize_true, &materialize_false,
2352 &if_true, &if_false, &fall_through); 2443 &if_true, &if_false, &fall_through);
2353 2444
2354 __ test(eax, Immediate(kSmiTagMask)); 2445 __ test(eax, Immediate(kSmiTagMask));
2355 __ j(equal, if_false); 2446 __ j(equal, if_false);
2356 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ebx); 2447 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ebx);
2448 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2357 Split(above_equal, if_true, if_false, fall_through); 2449 Split(above_equal, if_true, if_false, fall_through);
2358 2450
2359 context()->Plug(if_true, if_false); 2451 context()->Plug(if_true, if_false);
2360 } 2452 }
2361 2453
2362 2454
2363 void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) { 2455 void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) {
2364 ASSERT(args->length() == 1); 2456 ASSERT(args->length() == 1);
2365 2457
2366 VisitForAccumulatorValue(args->at(0)); 2458 VisitForAccumulatorValue(args->at(0));
2367 2459
2368 Label materialize_true, materialize_false; 2460 Label materialize_true, materialize_false;
2369 Label* if_true = NULL; 2461 Label* if_true = NULL;
2370 Label* if_false = NULL; 2462 Label* if_false = NULL;
2371 Label* fall_through = NULL; 2463 Label* fall_through = NULL;
2372 context()->PrepareTest(&materialize_true, &materialize_false, 2464 context()->PrepareTest(&materialize_true, &materialize_false,
2373 &if_true, &if_false, &fall_through); 2465 &if_true, &if_false, &fall_through);
2374 2466
2375 __ test(eax, Immediate(kSmiTagMask)); 2467 __ test(eax, Immediate(kSmiTagMask));
2376 __ j(zero, if_false); 2468 __ j(zero, if_false);
2377 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 2469 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2378 __ movzx_b(ebx, FieldOperand(ebx, Map::kBitFieldOffset)); 2470 __ movzx_b(ebx, FieldOperand(ebx, Map::kBitFieldOffset));
2379 __ test(ebx, Immediate(1 << Map::kIsUndetectable)); 2471 __ test(ebx, Immediate(1 << Map::kIsUndetectable));
2472 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2380 Split(not_zero, if_true, if_false, fall_through); 2473 Split(not_zero, if_true, if_false, fall_through);
2381 2474
2382 context()->Plug(if_true, if_false); 2475 context()->Plug(if_true, if_false);
2383 } 2476 }
2384 2477
2385 2478
2386 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( 2479 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2387 ZoneList<Expression*>* args) { 2480 ZoneList<Expression*>* args) {
2388 ASSERT(args->length() == 1); 2481 ASSERT(args->length() == 1);
2389 2482
2390 VisitForAccumulatorValue(args->at(0)); 2483 VisitForAccumulatorValue(args->at(0));
2391 2484
2392 Label materialize_true, materialize_false; 2485 Label materialize_true, materialize_false;
2393 Label* if_true = NULL; 2486 Label* if_true = NULL;
2394 Label* if_false = NULL; 2487 Label* if_false = NULL;
2395 Label* fall_through = NULL; 2488 Label* fall_through = NULL;
2396 context()->PrepareTest(&materialize_true, &materialize_false, 2489 context()->PrepareTest(&materialize_true, &materialize_false,
2397 &if_true, &if_false, &fall_through); 2490 &if_true, &if_false, &fall_through);
2398 2491
2399 // Just indicate false, as %_IsStringWrapperSafeForDefaultValueOf() is only 2492 // TODO(3110205): Implement this.
2400 // used in a few functions in runtime.js which should not normally be hit by 2493 // Currently unimplemented. Emit false, a safe choice.
2401 // this compiler. 2494 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2402 __ jmp(if_false); 2495 __ jmp(if_false);
2403 context()->Plug(if_true, if_false); 2496 context()->Plug(if_true, if_false);
2404 } 2497 }
2405 2498
2406 2499
2407 void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) { 2500 void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
2408 ASSERT(args->length() == 1); 2501 ASSERT(args->length() == 1);
2409 2502
2410 VisitForAccumulatorValue(args->at(0)); 2503 VisitForAccumulatorValue(args->at(0));
2411 2504
2412 Label materialize_true, materialize_false; 2505 Label materialize_true, materialize_false;
2413 Label* if_true = NULL; 2506 Label* if_true = NULL;
2414 Label* if_false = NULL; 2507 Label* if_false = NULL;
2415 Label* fall_through = NULL; 2508 Label* fall_through = NULL;
2416 context()->PrepareTest(&materialize_true, &materialize_false, 2509 context()->PrepareTest(&materialize_true, &materialize_false,
2417 &if_true, &if_false, &fall_through); 2510 &if_true, &if_false, &fall_through);
2418 2511
2419 __ test(eax, Immediate(kSmiTagMask)); 2512 __ test(eax, Immediate(kSmiTagMask));
2420 __ j(zero, if_false); 2513 __ j(zero, if_false);
2421 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx); 2514 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
2515 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2422 Split(equal, if_true, if_false, fall_through); 2516 Split(equal, if_true, if_false, fall_through);
2423 2517
2424 context()->Plug(if_true, if_false); 2518 context()->Plug(if_true, if_false);
2425 } 2519 }
2426 2520
2427 2521
2428 void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) { 2522 void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) {
2429 ASSERT(args->length() == 1); 2523 ASSERT(args->length() == 1);
2430 2524
2431 VisitForAccumulatorValue(args->at(0)); 2525 VisitForAccumulatorValue(args->at(0));
2432 2526
2433 Label materialize_true, materialize_false; 2527 Label materialize_true, materialize_false;
2434 Label* if_true = NULL; 2528 Label* if_true = NULL;
2435 Label* if_false = NULL; 2529 Label* if_false = NULL;
2436 Label* fall_through = NULL; 2530 Label* fall_through = NULL;
2437 context()->PrepareTest(&materialize_true, &materialize_false, 2531 context()->PrepareTest(&materialize_true, &materialize_false,
2438 &if_true, &if_false, &fall_through); 2532 &if_true, &if_false, &fall_through);
2439 2533
2440 __ test(eax, Immediate(kSmiTagMask)); 2534 __ test(eax, Immediate(kSmiTagMask));
2441 __ j(equal, if_false); 2535 __ j(equal, if_false);
2442 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx); 2536 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
2537 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2443 Split(equal, if_true, if_false, fall_through); 2538 Split(equal, if_true, if_false, fall_through);
2444 2539
2445 context()->Plug(if_true, if_false); 2540 context()->Plug(if_true, if_false);
2446 } 2541 }
2447 2542
2448 2543
2449 void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) { 2544 void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) {
2450 ASSERT(args->length() == 1); 2545 ASSERT(args->length() == 1);
2451 2546
2452 VisitForAccumulatorValue(args->at(0)); 2547 VisitForAccumulatorValue(args->at(0));
2453 2548
2454 Label materialize_true, materialize_false; 2549 Label materialize_true, materialize_false;
2455 Label* if_true = NULL; 2550 Label* if_true = NULL;
2456 Label* if_false = NULL; 2551 Label* if_false = NULL;
2457 Label* fall_through = NULL; 2552 Label* fall_through = NULL;
2458 context()->PrepareTest(&materialize_true, &materialize_false, 2553 context()->PrepareTest(&materialize_true, &materialize_false,
2459 &if_true, &if_false, &fall_through); 2554 &if_true, &if_false, &fall_through);
2460 2555
2461 __ test(eax, Immediate(kSmiTagMask)); 2556 __ test(eax, Immediate(kSmiTagMask));
2462 __ j(equal, if_false); 2557 __ j(equal, if_false);
2463 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx); 2558 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
2559 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2464 Split(equal, if_true, if_false, fall_through); 2560 Split(equal, if_true, if_false, fall_through);
2465 2561
2466 context()->Plug(if_true, if_false); 2562 context()->Plug(if_true, if_false);
2467 } 2563 }
2468 2564
2469 2565
2470 2566
2471 void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) { 2567 void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) {
2472 ASSERT(args->length() == 0); 2568 ASSERT(args->length() == 0);
2473 2569
(...skipping 11 matching lines...) Expand all
2485 Label check_frame_marker; 2581 Label check_frame_marker;
2486 __ cmp(Operand(eax, StandardFrameConstants::kContextOffset), 2582 __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
2487 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 2583 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2488 __ j(not_equal, &check_frame_marker); 2584 __ j(not_equal, &check_frame_marker);
2489 __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset)); 2585 __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
2490 2586
2491 // Check the marker in the calling frame. 2587 // Check the marker in the calling frame.
2492 __ bind(&check_frame_marker); 2588 __ bind(&check_frame_marker);
2493 __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset), 2589 __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
2494 Immediate(Smi::FromInt(StackFrame::CONSTRUCT))); 2590 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
2591 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2495 Split(equal, if_true, if_false, fall_through); 2592 Split(equal, if_true, if_false, fall_through);
2496 2593
2497 context()->Plug(if_true, if_false); 2594 context()->Plug(if_true, if_false);
2498 } 2595 }
2499 2596
2500 2597
2501 void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) { 2598 void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) {
2502 ASSERT(args->length() == 2); 2599 ASSERT(args->length() == 2);
2503 2600
2504 // Load the two objects into registers and perform the comparison. 2601 // Load the two objects into registers and perform the comparison.
2505 VisitForStackValue(args->at(0)); 2602 VisitForStackValue(args->at(0));
2506 VisitForAccumulatorValue(args->at(1)); 2603 VisitForAccumulatorValue(args->at(1));
2507 2604
2508 Label materialize_true, materialize_false; 2605 Label materialize_true, materialize_false;
2509 Label* if_true = NULL; 2606 Label* if_true = NULL;
2510 Label* if_false = NULL; 2607 Label* if_false = NULL;
2511 Label* fall_through = NULL; 2608 Label* fall_through = NULL;
2512 context()->PrepareTest(&materialize_true, &materialize_false, 2609 context()->PrepareTest(&materialize_true, &materialize_false,
2513 &if_true, &if_false, &fall_through); 2610 &if_true, &if_false, &fall_through);
2514 2611
2515 __ pop(ebx); 2612 __ pop(ebx);
2516 __ cmp(eax, Operand(ebx)); 2613 __ cmp(eax, Operand(ebx));
2614 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2517 Split(equal, if_true, if_false, fall_through); 2615 Split(equal, if_true, if_false, fall_through);
2518 2616
2519 context()->Plug(if_true, if_false); 2617 context()->Plug(if_true, if_false);
2520 } 2618 }
2521 2619
2522 2620
2523 void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) { 2621 void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) {
2524 ASSERT(args->length() == 1); 2622 ASSERT(args->length() == 1);
2525 2623
2526 // ArgumentsAccessStub expects the key in edx and the formal 2624 // ArgumentsAccessStub expects the key in edx and the formal
(...skipping 197 matching lines...) Expand 10 before | Expand all | Expand 10 after
2724 __ bind(&done); 2822 __ bind(&done);
2725 context()->Plug(eax); 2823 context()->Plug(eax);
2726 } 2824 }
2727 2825
2728 2826
2729 void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) { 2827 void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
2730 // Load the arguments on the stack and call the runtime function. 2828 // Load the arguments on the stack and call the runtime function.
2731 ASSERT(args->length() == 2); 2829 ASSERT(args->length() == 2);
2732 VisitForStackValue(args->at(0)); 2830 VisitForStackValue(args->at(0));
2733 VisitForStackValue(args->at(1)); 2831 VisitForStackValue(args->at(1));
2734 __ CallRuntime(Runtime::kMath_pow, 2); 2832
2833 MathPowStub stub;
2834 __ CallStub(&stub);
2735 context()->Plug(eax); 2835 context()->Plug(eax);
2736 } 2836 }
2737 2837
2738 2838
2739 void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) { 2839 void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) {
2740 ASSERT(args->length() == 2); 2840 ASSERT(args->length() == 2);
2741 2841
2742 VisitForStackValue(args->at(0)); // Load the object. 2842 VisitForStackValue(args->at(0)); // Load the object.
2743 VisitForAccumulatorValue(args->at(1)); // Load the value. 2843 VisitForAccumulatorValue(args->at(1)); // Load the value.
2744 __ pop(ebx); // eax = value. ebx = object. 2844 __ pop(ebx); // eax = value. ebx = object.
(...skipping 221 matching lines...) Expand 10 before | Expand all | Expand 10 after
2966 // InvokeFunction requires function in edi. Move it in there. 3066 // InvokeFunction requires function in edi. Move it in there.
2967 if (!result_register().is(edi)) __ mov(edi, result_register()); 3067 if (!result_register().is(edi)) __ mov(edi, result_register());
2968 ParameterCount count(arg_count); 3068 ParameterCount count(arg_count);
2969 __ InvokeFunction(edi, count, CALL_FUNCTION); 3069 __ InvokeFunction(edi, count, CALL_FUNCTION);
2970 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 3070 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2971 context()->Plug(eax); 3071 context()->Plug(eax);
2972 } 3072 }
2973 3073
2974 3074
2975 void FullCodeGenerator::EmitRegExpConstructResult(ZoneList<Expression*>* args) { 3075 void FullCodeGenerator::EmitRegExpConstructResult(ZoneList<Expression*>* args) {
3076 // Load the arguments on the stack and call the stub.
3077 RegExpConstructResultStub stub;
2976 ASSERT(args->length() == 3); 3078 ASSERT(args->length() == 3);
2977 VisitForStackValue(args->at(0)); 3079 VisitForStackValue(args->at(0));
2978 VisitForStackValue(args->at(1)); 3080 VisitForStackValue(args->at(1));
2979 VisitForStackValue(args->at(2)); 3081 VisitForStackValue(args->at(2));
2980 __ CallRuntime(Runtime::kRegExpConstructResult, 3); 3082 __ CallStub(&stub);
2981 context()->Plug(eax); 3083 context()->Plug(eax);
2982 } 3084 }
2983 3085
2984 3086
2985 void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) { 3087 void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
2986 ASSERT(args->length() == 3); 3088 ASSERT(args->length() == 3);
2987 VisitForStackValue(args->at(0)); 3089 VisitForStackValue(args->at(0));
2988 VisitForStackValue(args->at(1)); 3090 VisitForStackValue(args->at(1));
2989 VisitForStackValue(args->at(2)); 3091 VisitForStackValue(args->at(2));
3092 Label done;
3093 Label slow_case;
3094 Register object = eax;
3095 Register index_1 = ebx;
3096 Register index_2 = ecx;
3097 Register elements = edi;
3098 Register temp = edx;
3099 __ mov(object, Operand(esp, 2 * kPointerSize));
3100 // Fetch the map and check if array is in fast case.
3101 // Check that object doesn't require security checks and
3102 // has no indexed interceptor.
3103 __ CmpObjectType(object, FIRST_JS_OBJECT_TYPE, temp);
3104 __ j(below, &slow_case);
3105 __ test_b(FieldOperand(temp, Map::kBitFieldOffset),
3106 KeyedLoadIC::kSlowCaseBitFieldMask);
3107 __ j(not_zero, &slow_case);
3108
3109 // Check the object's elements are in fast case and writable.
3110 __ mov(elements, FieldOperand(object, JSObject::kElementsOffset));
3111 __ cmp(FieldOperand(elements, HeapObject::kMapOffset),
3112 Immediate(FACTORY->fixed_array_map()));
3113 __ j(not_equal, &slow_case);
3114
3115 // Check that both indices are smis.
3116 __ mov(index_1, Operand(esp, 1 * kPointerSize));
3117 __ mov(index_2, Operand(esp, 0));
3118 __ mov(temp, index_1);
3119 __ or_(temp, Operand(index_2));
3120 __ test(temp, Immediate(kSmiTagMask));
3121 __ j(not_zero, &slow_case);
3122
3123 // Bring addresses into index1 and index2.
3124 __ lea(index_1, CodeGenerator::FixedArrayElementOperand(elements, index_1));
3125 __ lea(index_2, CodeGenerator::FixedArrayElementOperand(elements, index_2));
3126
3127 // Swap elements. Use object and temp as scratch registers.
3128 __ mov(object, Operand(index_1, 0));
3129 __ mov(temp, Operand(index_2, 0));
3130 __ mov(Operand(index_2, 0), object);
3131 __ mov(Operand(index_1, 0), temp);
3132
3133 Label new_space;
3134 __ InNewSpace(elements, temp, equal, &new_space);
3135
3136 __ mov(object, elements);
3137 __ RecordWriteHelper(object, index_1, temp);
3138 __ RecordWriteHelper(elements, index_2, temp);
3139
3140 __ bind(&new_space);
3141 // We are done. Drop elements from the stack, and return undefined.
3142 __ add(Operand(esp), Immediate(3 * kPointerSize));
3143 __ mov(eax, FACTORY->undefined_value());
3144 __ jmp(&done);
3145
3146 __ bind(&slow_case);
2990 __ CallRuntime(Runtime::kSwapElements, 3); 3147 __ CallRuntime(Runtime::kSwapElements, 3);
3148
3149 __ bind(&done);
2991 context()->Plug(eax); 3150 context()->Plug(eax);
2992 } 3151 }
2993 3152
2994 3153
2995 void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) { 3154 void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
2996 ASSERT_EQ(2, args->length()); 3155 ASSERT_EQ(2, args->length());
2997 3156
2998 ASSERT_NE(NULL, args->at(0)->AsLiteral()); 3157 ASSERT_NE(NULL, args->at(0)->AsLiteral());
2999 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); 3158 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3000 3159
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
3089 3248
3090 Label materialize_true, materialize_false; 3249 Label materialize_true, materialize_false;
3091 Label* if_true = NULL; 3250 Label* if_true = NULL;
3092 Label* if_false = NULL; 3251 Label* if_false = NULL;
3093 Label* fall_through = NULL; 3252 Label* fall_through = NULL;
3094 context()->PrepareTest(&materialize_true, &materialize_false, 3253 context()->PrepareTest(&materialize_true, &materialize_false,
3095 &if_true, &if_false, &fall_through); 3254 &if_true, &if_false, &fall_through);
3096 3255
3097 __ test(FieldOperand(eax, String::kHashFieldOffset), 3256 __ test(FieldOperand(eax, String::kHashFieldOffset),
3098 Immediate(String::kContainsCachedArrayIndexMask)); 3257 Immediate(String::kContainsCachedArrayIndexMask));
3258 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3099 Split(zero, if_true, if_false, fall_through); 3259 Split(zero, if_true, if_false, fall_through);
3100 3260
3101 context()->Plug(if_true, if_false); 3261 context()->Plug(if_true, if_false);
3102 } 3262 }
3103 3263
3104 3264
3105 void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) { 3265 void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) {
3106 ASSERT(args->length() == 1); 3266 ASSERT(args->length() == 1);
3107 3267
3108 VisitForAccumulatorValue(args->at(0)); 3268 VisitForAccumulatorValue(args->at(0));
(...skipping 285 matching lines...) Expand 10 before | Expand all | Expand 10 after
3394 case Token::NOT: { 3554 case Token::NOT: {
3395 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); 3555 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3396 3556
3397 Label materialize_true, materialize_false; 3557 Label materialize_true, materialize_false;
3398 Label* if_true = NULL; 3558 Label* if_true = NULL;
3399 Label* if_false = NULL; 3559 Label* if_false = NULL;
3400 Label* fall_through = NULL; 3560 Label* fall_through = NULL;
3401 // Notice that the labels are swapped. 3561 // Notice that the labels are swapped.
3402 context()->PrepareTest(&materialize_true, &materialize_false, 3562 context()->PrepareTest(&materialize_true, &materialize_false,
3403 &if_false, &if_true, &fall_through); 3563 &if_false, &if_true, &fall_through);
3564 if (context()->IsTest()) ForwardBailoutToChild(expr);
3404 VisitForControl(expr->expression(), if_true, if_false, fall_through); 3565 VisitForControl(expr->expression(), if_true, if_false, fall_through);
3405 context()->Plug(if_false, if_true); // Labels swapped. 3566 context()->Plug(if_false, if_true); // Labels swapped.
3406 break; 3567 break;
3407 } 3568 }
3408 3569
3409 case Token::TYPEOF: { 3570 case Token::TYPEOF: {
3410 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 3571 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3411 { StackValueContext context(this); 3572 { StackValueContext context(this);
3412 VisitForTypeofValue(expr->expression()); 3573 VisitForTypeofValue(expr->expression());
3413 } 3574 }
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
3510 // Reserve space for result of postfix operation. 3671 // Reserve space for result of postfix operation.
3511 if (expr->is_postfix() && !context()->IsEffect()) { 3672 if (expr->is_postfix() && !context()->IsEffect()) {
3512 __ push(Immediate(Smi::FromInt(0))); 3673 __ push(Immediate(Smi::FromInt(0)));
3513 } 3674 }
3514 if (assign_type == NAMED_PROPERTY) { 3675 if (assign_type == NAMED_PROPERTY) {
3515 // Put the object both on the stack and in the accumulator. 3676 // Put the object both on the stack and in the accumulator.
3516 VisitForAccumulatorValue(prop->obj()); 3677 VisitForAccumulatorValue(prop->obj());
3517 __ push(eax); 3678 __ push(eax);
3518 EmitNamedPropertyLoad(prop); 3679 EmitNamedPropertyLoad(prop);
3519 } else { 3680 } else {
3520 VisitForStackValue(prop->obj()); 3681 if (prop->is_arguments_access()) {
3521 VisitForAccumulatorValue(prop->key()); 3682 VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
3683 __ push(EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx));
3684 __ mov(eax, Immediate(prop->key()->AsLiteral()->handle()));
3685 } else {
3686 VisitForStackValue(prop->obj());
3687 VisitForAccumulatorValue(prop->key());
3688 }
3522 __ mov(edx, Operand(esp, 0)); 3689 __ mov(edx, Operand(esp, 0));
3523 __ push(eax); 3690 __ push(eax);
3524 EmitKeyedPropertyLoad(prop); 3691 EmitKeyedPropertyLoad(prop);
3525 } 3692 }
3526 } 3693 }
3527 3694
3695 // We need a second deoptimization point after loading the value
3696 // in case evaluating the property load my have a side effect.
3697 PrepareForBailout(expr->increment(), TOS_REG);
3698
3528 // Call ToNumber only if operand is not a smi. 3699 // Call ToNumber only if operand is not a smi.
3529 NearLabel no_conversion; 3700 NearLabel no_conversion;
3530 if (ShouldInlineSmiCase(expr->op())) { 3701 if (ShouldInlineSmiCase(expr->op())) {
3531 __ test(eax, Immediate(kSmiTagMask)); 3702 __ test(eax, Immediate(kSmiTagMask));
3532 __ j(zero, &no_conversion); 3703 __ j(zero, &no_conversion);
3533 } 3704 }
3534 __ push(eax); 3705 __ push(eax);
3535 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION); 3706 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
3536 __ bind(&no_conversion); 3707 __ bind(&no_conversion);
3537 3708
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
3570 __ test(eax, Immediate(kSmiTagMask)); 3741 __ test(eax, Immediate(kSmiTagMask));
3571 __ j(zero, &done); 3742 __ j(zero, &done);
3572 __ bind(&stub_call); 3743 __ bind(&stub_call);
3573 // Call stub. Undo operation first. 3744 // Call stub. Undo operation first.
3574 if (expr->op() == Token::INC) { 3745 if (expr->op() == Token::INC) {
3575 __ sub(Operand(eax), Immediate(Smi::FromInt(1))); 3746 __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
3576 } else { 3747 } else {
3577 __ add(Operand(eax), Immediate(Smi::FromInt(1))); 3748 __ add(Operand(eax), Immediate(Smi::FromInt(1)));
3578 } 3749 }
3579 } 3750 }
3751
3752 // Record position before stub call.
3753 SetSourcePosition(expr->position());
3754
3580 // Call stub for +1/-1. 3755 // Call stub for +1/-1.
3581 GenericBinaryOpStub stub(expr->binary_op(), 3756 __ mov(edx, eax);
3582 NO_OVERWRITE, 3757 __ mov(eax, Immediate(Smi::FromInt(1)));
3583 NO_GENERIC_BINARY_FLAGS, 3758 TypeRecordingBinaryOpStub stub(expr->binary_op(),
3584 TypeInfo::Unknown()); 3759 NO_OVERWRITE);
3585 stub.GenerateCall(masm(), eax, Smi::FromInt(1)); 3760 __ CallStub(&stub);
3586 __ bind(&done); 3761 __ bind(&done);
3587 3762
3588 // Store the value returned in eax. 3763 // Store the value returned in eax.
3589 switch (assign_type) { 3764 switch (assign_type) {
3590 case VARIABLE: 3765 case VARIABLE:
3591 if (expr->is_postfix()) { 3766 if (expr->is_postfix()) {
3592 // Perform the assignment as if via '='. 3767 // Perform the assignment as if via '='.
3593 { EffectContext context(this); 3768 { EffectContext context(this);
3594 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 3769 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3595 Token::ASSIGN); 3770 Token::ASSIGN);
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
3647 3822
3648 if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) { 3823 if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) {
3649 Comment cmnt(masm_, "Global variable"); 3824 Comment cmnt(masm_, "Global variable");
3650 __ mov(eax, GlobalObjectOperand()); 3825 __ mov(eax, GlobalObjectOperand());
3651 __ mov(ecx, Immediate(proxy->name())); 3826 __ mov(ecx, Immediate(proxy->name()));
3652 Handle<Code> ic(Isolate::Current()->builtins()->builtin( 3827 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
3653 Builtins::LoadIC_Initialize)); 3828 Builtins::LoadIC_Initialize));
3654 // Use a regular load, not a contextual load, to avoid a reference 3829 // Use a regular load, not a contextual load, to avoid a reference
3655 // error. 3830 // error.
3656 EmitCallIC(ic, RelocInfo::CODE_TARGET); 3831 EmitCallIC(ic, RelocInfo::CODE_TARGET);
3832 PrepareForBailout(expr, TOS_REG);
3657 context()->Plug(eax); 3833 context()->Plug(eax);
3658 } else if (proxy != NULL && 3834 } else if (proxy != NULL &&
3659 proxy->var()->AsSlot() != NULL && 3835 proxy->var()->AsSlot() != NULL &&
3660 proxy->var()->AsSlot()->type() == Slot::LOOKUP) { 3836 proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
3661 Label done, slow; 3837 Label done, slow;
3662 3838
3663 // Generate code for loading from variables potentially shadowed 3839 // Generate code for loading from variables potentially shadowed
3664 // by eval-introduced variables. 3840 // by eval-introduced variables.
3665 Slot* slot = proxy->var()->AsSlot(); 3841 Slot* slot = proxy->var()->AsSlot();
3666 EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done); 3842 EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
3667 3843
3668 __ bind(&slow); 3844 __ bind(&slow);
3669 __ push(esi); 3845 __ push(esi);
3670 __ push(Immediate(proxy->name())); 3846 __ push(Immediate(proxy->name()));
3671 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); 3847 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
3848 PrepareForBailout(expr, TOS_REG);
3672 __ bind(&done); 3849 __ bind(&done);
3673 3850
3674 context()->Plug(eax); 3851 context()->Plug(eax);
3675 } else { 3852 } else {
3676 // This expression cannot throw a reference error at the top level. 3853 // This expression cannot throw a reference error at the top level.
3677 Visit(expr); 3854 context()->HandleExpression(expr);
3678 } 3855 }
3679 } 3856 }
3680 3857
3681 3858
3682 bool FullCodeGenerator::TryLiteralCompare(Token::Value op, 3859 bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
3683 Expression* left, 3860 Expression* left,
3684 Expression* right, 3861 Expression* right,
3685 Label* if_true, 3862 Label* if_true,
3686 Label* if_false, 3863 Label* if_false,
3687 Label* fall_through) { 3864 Label* fall_through) {
3688 if (op != Token::EQ && op != Token::EQ_STRICT) return false; 3865 if (op != Token::EQ && op != Token::EQ_STRICT) return false;
3689 3866
3690 // Check for the pattern: typeof <expression> == <string literal>. 3867 // Check for the pattern: typeof <expression> == <string literal>.
3691 Literal* right_literal = right->AsLiteral(); 3868 Literal* right_literal = right->AsLiteral();
3692 if (right_literal == NULL) return false; 3869 if (right_literal == NULL) return false;
3693 Handle<Object> right_literal_value = right_literal->handle(); 3870 Handle<Object> right_literal_value = right_literal->handle();
3694 if (!right_literal_value->IsString()) return false; 3871 if (!right_literal_value->IsString()) return false;
3695 UnaryOperation* left_unary = left->AsUnaryOperation(); 3872 UnaryOperation* left_unary = left->AsUnaryOperation();
3696 if (left_unary == NULL || left_unary->op() != Token::TYPEOF) return false; 3873 if (left_unary == NULL || left_unary->op() != Token::TYPEOF) return false;
3697 Handle<String> check = Handle<String>::cast(right_literal_value); 3874 Handle<String> check = Handle<String>::cast(right_literal_value);
3698 3875
3699 { AccumulatorValueContext context(this); 3876 { AccumulatorValueContext context(this);
3700 VisitForTypeofValue(left_unary->expression()); 3877 VisitForTypeofValue(left_unary->expression());
3701 } 3878 }
3879 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3702 3880
3703 if (check->Equals(HEAP->number_symbol())) { 3881 if (check->Equals(HEAP->number_symbol())) {
3704 __ test(eax, Immediate(kSmiTagMask)); 3882 __ test(eax, Immediate(kSmiTagMask));
3705 __ j(zero, if_true); 3883 __ j(zero, if_true);
3706 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), 3884 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
3707 FACTORY->heap_number_map()); 3885 FACTORY->heap_number_map());
3708 Split(equal, if_true, if_false, fall_through); 3886 Split(equal, if_true, if_false, fall_through);
3709 } else if (check->Equals(HEAP->string_symbol())) { 3887 } else if (check->Equals(HEAP->string_symbol())) {
3710 __ test(eax, Immediate(kSmiTagMask)); 3888 __ test(eax, Immediate(kSmiTagMask));
3711 __ j(zero, if_false); 3889 __ j(zero, if_false);
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
3787 if (TryLiteralCompare(op, left, right, if_true, if_false, fall_through)) { 3965 if (TryLiteralCompare(op, left, right, if_true, if_false, fall_through)) {
3788 context()->Plug(if_true, if_false); 3966 context()->Plug(if_true, if_false);
3789 return; 3967 return;
3790 } 3968 }
3791 3969
3792 VisitForStackValue(expr->left()); 3970 VisitForStackValue(expr->left());
3793 switch (expr->op()) { 3971 switch (expr->op()) {
3794 case Token::IN: 3972 case Token::IN:
3795 VisitForStackValue(expr->right()); 3973 VisitForStackValue(expr->right());
3796 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); 3974 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
3975 PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
3797 __ cmp(eax, FACTORY->true_value()); 3976 __ cmp(eax, FACTORY->true_value());
3798 Split(equal, if_true, if_false, fall_through); 3977 Split(equal, if_true, if_false, fall_through);
3799 break; 3978 break;
3800 3979
3801 case Token::INSTANCEOF: { 3980 case Token::INSTANCEOF: {
3802 VisitForStackValue(expr->right()); 3981 VisitForStackValue(expr->right());
3803 InstanceofStub stub; 3982 InstanceofStub stub;
3804 __ CallStub(&stub); 3983 __ CallStub(&stub);
3984 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3805 __ test(eax, Operand(eax)); 3985 __ test(eax, Operand(eax));
3806 // The stub returns 0 for true. 3986 // The stub returns 0 for true.
3807 Split(zero, if_true, if_false, fall_through); 3987 Split(zero, if_true, if_false, fall_through);
3808 break; 3988 break;
3809 } 3989 }
3810 3990
3811 default: { 3991 default: {
3812 VisitForAccumulatorValue(expr->right()); 3992 VisitForAccumulatorValue(expr->right());
3813 Condition cc = no_condition; 3993 Condition cc = no_condition;
3814 bool strict = false; 3994 bool strict = false;
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
3851 NearLabel slow_case; 4031 NearLabel slow_case;
3852 __ mov(ecx, Operand(edx)); 4032 __ mov(ecx, Operand(edx));
3853 __ or_(ecx, Operand(eax)); 4033 __ or_(ecx, Operand(eax));
3854 __ test(ecx, Immediate(kSmiTagMask)); 4034 __ test(ecx, Immediate(kSmiTagMask));
3855 __ j(not_zero, &slow_case, not_taken); 4035 __ j(not_zero, &slow_case, not_taken);
3856 __ cmp(edx, Operand(eax)); 4036 __ cmp(edx, Operand(eax));
3857 Split(cc, if_true, if_false, NULL); 4037 Split(cc, if_true, if_false, NULL);
3858 __ bind(&slow_case); 4038 __ bind(&slow_case);
3859 } 4039 }
3860 4040
3861 CompareFlags flags = inline_smi_code 4041 // Record position and call the compare IC.
3862 ? NO_SMI_COMPARE_IN_STUB 4042 Handle<Code> ic = CompareIC::GetUninitialized(op);
3863 : NO_COMPARE_FLAGS; 4043 SetSourcePosition(expr->position());
3864 CompareStub stub(cc, strict, flags); 4044 __ call(ic, RelocInfo::CODE_TARGET);
3865 __ CallStub(&stub); 4045 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3866 __ test(eax, Operand(eax)); 4046 __ test(eax, Operand(eax));
3867 Split(cc, if_true, if_false, fall_through); 4047 Split(cc, if_true, if_false, fall_through);
3868 } 4048 }
3869 } 4049 }
3870 4050
3871 // Convert the result of the comparison into one expected for this 4051 // Convert the result of the comparison into one expected for this
3872 // expression's context. 4052 // expression's context.
3873 context()->Plug(if_true, if_false); 4053 context()->Plug(if_true, if_false);
3874 } 4054 }
3875 4055
3876 4056
3877 void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) { 4057 void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) {
3878 Label materialize_true, materialize_false; 4058 Label materialize_true, materialize_false;
3879 Label* if_true = NULL; 4059 Label* if_true = NULL;
3880 Label* if_false = NULL; 4060 Label* if_false = NULL;
3881 Label* fall_through = NULL; 4061 Label* fall_through = NULL;
3882 context()->PrepareTest(&materialize_true, &materialize_false, 4062 context()->PrepareTest(&materialize_true, &materialize_false,
3883 &if_true, &if_false, &fall_through); 4063 &if_true, &if_false, &fall_through);
3884 4064
3885 VisitForAccumulatorValue(expr->expression()); 4065 VisitForAccumulatorValue(expr->expression());
4066 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4067
3886 __ cmp(eax, FACTORY->null_value()); 4068 __ cmp(eax, FACTORY->null_value());
3887 if (expr->is_strict()) { 4069 if (expr->is_strict()) {
3888 Split(equal, if_true, if_false, fall_through); 4070 Split(equal, if_true, if_false, fall_through);
3889 } else { 4071 } else {
3890 __ j(equal, if_true); 4072 __ j(equal, if_true);
3891 __ cmp(eax, FACTORY->undefined_value()); 4073 __ cmp(eax, FACTORY->undefined_value());
3892 __ j(equal, if_true); 4074 __ j(equal, if_true);
3893 __ test(eax, Immediate(kSmiTagMask)); 4075 __ test(eax, Immediate(kSmiTagMask));
3894 __ j(zero, if_false); 4076 __ j(zero, if_false);
3895 // It can be an undetectable object. 4077 // It can be an undetectable object.
(...skipping 18 matching lines...) Expand all
3914 4096
3915 4097
3916 Register FullCodeGenerator::context_register() { 4098 Register FullCodeGenerator::context_register() {
3917 return esi; 4099 return esi;
3918 } 4100 }
3919 4101
3920 4102
3921 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) { 4103 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
3922 ASSERT(mode == RelocInfo::CODE_TARGET || 4104 ASSERT(mode == RelocInfo::CODE_TARGET ||
3923 mode == RelocInfo::CODE_TARGET_CONTEXT); 4105 mode == RelocInfo::CODE_TARGET_CONTEXT);
4106 switch (ic->kind()) {
4107 case Code::LOAD_IC:
4108 __ IncrementCounter(COUNTERS->named_load_full(), 1);
4109 break;
4110 case Code::KEYED_LOAD_IC:
4111 __ IncrementCounter(COUNTERS->keyed_load_full(), 1);
4112 break;
4113 case Code::STORE_IC:
4114 __ IncrementCounter(COUNTERS->named_store_full(), 1);
4115 break;
4116 case Code::KEYED_STORE_IC:
4117 __ IncrementCounter(COUNTERS->keyed_store_full(), 1);
4118 default:
4119 break;
4120 }
4121
3924 __ call(ic, mode); 4122 __ call(ic, mode);
3925 4123
4124 // Crankshaft doesn't need patching of inlined loads and stores.
4125 // When compiling the snapshot we need to produce code that works
4126 // with and without Crankshaft.
4127 if (V8::UseCrankshaft() && !Serializer::enabled()) {
4128 return;
4129 }
4130
3926 // If we're calling a (keyed) load or store stub, we have to mark 4131 // If we're calling a (keyed) load or store stub, we have to mark
3927 // the call as containing no inlined code so we will not attempt to 4132 // the call as containing no inlined code so we will not attempt to
3928 // patch it. 4133 // patch it.
3929 switch (ic->kind()) { 4134 switch (ic->kind()) {
3930 case Code::LOAD_IC: 4135 case Code::LOAD_IC:
3931 case Code::KEYED_LOAD_IC: 4136 case Code::KEYED_LOAD_IC:
3932 case Code::STORE_IC: 4137 case Code::STORE_IC:
3933 case Code::KEYED_STORE_IC: 4138 case Code::KEYED_STORE_IC:
3934 __ nop(); // Signals no inlined code. 4139 __ nop(); // Signals no inlined code.
3935 break; 4140 break;
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
3980 // And return. 4185 // And return.
3981 __ ret(0); 4186 __ ret(0);
3982 } 4187 }
3983 4188
3984 4189
3985 #undef __ 4190 #undef __
3986 4191
3987 } } // namespace v8::internal 4192 } } // namespace v8::internal
3988 4193
3989 #endif // V8_TARGET_ARCH_IA32 4194 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/frames-ia32.h ('k') | src/ia32/ic-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698