Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(399)

Side by Side Diff: src/ia32/full-codegen-ia32.cc

Issue 6529032: Merge 6168:6800 from bleeding_edge to experimental/gc branch. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/disasm-ia32.cc ('k') | src/ia32/ic-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
(...skipping 28 matching lines...) Expand all
40 40
41 namespace v8 { 41 namespace v8 {
42 namespace internal { 42 namespace internal {
43 43
44 44
45 #define __ ACCESS_MASM(masm_) 45 #define __ ACCESS_MASM(masm_)
46 46
47 47
48 class JumpPatchSite BASE_EMBEDDED { 48 class JumpPatchSite BASE_EMBEDDED {
49 public: 49 public:
50 explicit JumpPatchSite(MacroAssembler* masm) 50 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
51 : masm_(masm) {
52 #ifdef DEBUG 51 #ifdef DEBUG
53 info_emitted_ = false; 52 info_emitted_ = false;
54 #endif 53 #endif
55 } 54 }
56 55
57 ~JumpPatchSite() { 56 ~JumpPatchSite() {
58 ASSERT(patch_site_.is_bound() == info_emitted_); 57 ASSERT(patch_site_.is_bound() == info_emitted_);
59 } 58 }
60 59
61 void EmitJumpIfNotSmi(Register reg, NearLabel* target) { 60 void EmitJumpIfNotSmi(Register reg, NearLabel* target) {
62 __ test(reg, Immediate(kSmiTagMask)); 61 __ test(reg, Immediate(kSmiTagMask));
63 EmitJump(not_carry, target); // Always taken before patched. 62 EmitJump(not_carry, target); // Always taken before patched.
64 } 63 }
65 64
66 void EmitJumpIfSmi(Register reg, NearLabel* target) { 65 void EmitJumpIfSmi(Register reg, NearLabel* target) {
67 __ test(reg, Immediate(kSmiTagMask)); 66 __ test(reg, Immediate(kSmiTagMask));
68 EmitJump(carry, target); // Never taken before patched. 67 EmitJump(carry, target); // Never taken before patched.
69 } 68 }
70 69
71 void EmitPatchInfo() { 70 void EmitPatchInfo() {
72 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_); 71 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
73 ASSERT(is_int8(delta_to_patch_site)); 72 ASSERT(is_int8(delta_to_patch_site));
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after
199 // The stub will rewrite receiver and parameter count if the previous 198 // The stub will rewrite receiver and parameter count if the previous
200 // stack frame was an arguments adapter frame. 199 // stack frame was an arguments adapter frame.
201 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); 200 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
202 __ CallStub(&stub); 201 __ CallStub(&stub);
203 __ mov(ecx, eax); // Duplicate result. 202 __ mov(ecx, eax); // Duplicate result.
204 Move(arguments->AsSlot(), eax, ebx, edx); 203 Move(arguments->AsSlot(), eax, ebx, edx);
205 Slot* dot_arguments_slot = scope()->arguments_shadow()->AsSlot(); 204 Slot* dot_arguments_slot = scope()->arguments_shadow()->AsSlot();
206 Move(dot_arguments_slot, ecx, ebx, edx); 205 Move(dot_arguments_slot, ecx, ebx, edx);
207 } 206 }
208 207
209 { Comment cmnt(masm_, "[ Declarations");
210 // For named function expressions, declare the function name as a
211 // constant.
212 if (scope()->is_function_scope() && scope()->function() != NULL) {
213 EmitDeclaration(scope()->function(), Variable::CONST, NULL);
214 }
215 // Visit all the explicit declarations unless there is an illegal
216 // redeclaration.
217 if (scope()->HasIllegalRedeclaration()) {
218 scope()->VisitIllegalRedeclaration(this);
219 } else {
220 VisitDeclarations(scope()->declarations());
221 }
222 }
223
224 if (FLAG_trace) { 208 if (FLAG_trace) {
225 __ CallRuntime(Runtime::kTraceEnter, 0); 209 __ CallRuntime(Runtime::kTraceEnter, 0);
226 } 210 }
227 211
228 { Comment cmnt(masm_, "[ Stack check"); 212 // Visit the declarations and body unless there is an illegal
229 PrepareForBailout(info->function(), NO_REGISTERS); 213 // redeclaration.
230 NearLabel ok; 214 if (scope()->HasIllegalRedeclaration()) {
231 ExternalReference stack_limit = 215 Comment cmnt(masm_, "[ Declarations");
232 ExternalReference::address_of_stack_limit(); 216 scope()->VisitIllegalRedeclaration(this);
233 __ cmp(esp, Operand::StaticVariable(stack_limit)); 217
234 __ j(above_equal, &ok, taken); 218 } else {
235 StackCheckStub stub; 219 { Comment cmnt(masm_, "[ Declarations");
236 __ CallStub(&stub); 220 // For named function expressions, declare the function name as a
237 __ bind(&ok); 221 // constant.
222 if (scope()->is_function_scope() && scope()->function() != NULL) {
223 EmitDeclaration(scope()->function(), Variable::CONST, NULL);
224 }
225 VisitDeclarations(scope()->declarations());
226 }
227
228 { Comment cmnt(masm_, "[ Stack check");
229 PrepareForBailout(info->function(), NO_REGISTERS);
230 NearLabel ok;
231 ExternalReference stack_limit =
232 ExternalReference::address_of_stack_limit();
233 __ cmp(esp, Operand::StaticVariable(stack_limit));
234 __ j(above_equal, &ok, taken);
235 StackCheckStub stub;
236 __ CallStub(&stub);
237 __ bind(&ok);
238 }
239
240 { Comment cmnt(masm_, "[ Body");
241 ASSERT(loop_depth() == 0);
242 VisitStatements(function()->body());
243 ASSERT(loop_depth() == 0);
244 }
238 } 245 }
239 246
240 { Comment cmnt(masm_, "[ Body"); 247 // Always emit a 'return undefined' in case control fell off the end of
241 ASSERT(loop_depth() == 0); 248 // the body.
242 VisitStatements(function()->body());
243 ASSERT(loop_depth() == 0);
244 }
245
246 { Comment cmnt(masm_, "[ return <undefined>;"); 249 { Comment cmnt(masm_, "[ return <undefined>;");
247 // Emit a 'return undefined' in case control fell off the end of the body.
248 __ mov(eax, Factory::undefined_value()); 250 __ mov(eax, Factory::undefined_value());
249 EmitReturnSequence(); 251 EmitReturnSequence();
250 } 252 }
251 } 253 }
252 254
253 255
254 void FullCodeGenerator::ClearAccumulator() { 256 void FullCodeGenerator::ClearAccumulator() {
255 __ Set(eax, Immediate(Smi::FromInt(0))); 257 __ Set(eax, Immediate(Smi::FromInt(0)));
256 } 258 }
257 259
258 260
259 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) { 261 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
260 Comment cmnt(masm_, "[ Stack check"); 262 Comment cmnt(masm_, "[ Stack check");
261 NearLabel ok; 263 NearLabel ok;
262 ExternalReference stack_limit = ExternalReference::address_of_stack_limit(); 264 ExternalReference stack_limit = ExternalReference::address_of_stack_limit();
263 __ cmp(esp, Operand::StaticVariable(stack_limit)); 265 __ cmp(esp, Operand::StaticVariable(stack_limit));
264 __ j(above_equal, &ok, taken); 266 __ j(above_equal, &ok, taken);
265 StackCheckStub stub; 267 StackCheckStub stub;
266 __ CallStub(&stub); 268 __ CallStub(&stub);
269 // Record a mapping of this PC offset to the OSR id. This is used to find
270 // the AST id from the unoptimized code in order to use it as a key into
271 // the deoptimization input data found in the optimized code.
272 RecordStackCheck(stmt->OsrEntryId());
273
274 // Loop stack checks can be patched to perform on-stack replacement. In
275 // order to decide whether or not to perform OSR we embed the loop depth
276 // in a test instruction after the call so we can extract it from the OSR
277 // builtin.
278 ASSERT(loop_depth() > 0);
279 __ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
280
267 __ bind(&ok); 281 __ bind(&ok);
268 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 282 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
283 // Record a mapping of the OSR id to this PC. This is used if the OSR
284 // entry becomes the target of a bailout. We don't expect it to be, but
285 // we want it to work if it is.
269 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); 286 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
270 RecordStackCheck(stmt->OsrEntryId());
271 // Loop stack checks can be patched to perform on-stack
272 // replacement. In order to decide whether or not to perform OSR we
273 // embed the loop depth in a test instruction after the call so we
274 // can extract it from the OSR builtin.
275 ASSERT(loop_depth() > 0);
276 __ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
277 } 287 }
278 288
279 289
280 void FullCodeGenerator::EmitReturnSequence() { 290 void FullCodeGenerator::EmitReturnSequence() {
281 Comment cmnt(masm_, "[ Return sequence"); 291 Comment cmnt(masm_, "[ Return sequence");
282 if (return_label_.is_bound()) { 292 if (return_label_.is_bound()) {
283 __ jmp(&return_label_); 293 __ jmp(&return_label_);
284 } else { 294 } else {
285 // Common return label 295 // Common return label
286 __ bind(&return_label_); 296 __ bind(&return_label_);
287 if (FLAG_trace) { 297 if (FLAG_trace) {
288 __ push(eax); 298 __ push(eax);
289 __ CallRuntime(Runtime::kTraceExit, 1); 299 __ CallRuntime(Runtime::kTraceExit, 1);
290 } 300 }
291 #ifdef DEBUG 301 #ifdef DEBUG
292 // Add a label for checking the size of the code used for returning. 302 // Add a label for checking the size of the code used for returning.
293 Label check_exit_codesize; 303 Label check_exit_codesize;
294 masm_->bind(&check_exit_codesize); 304 masm_->bind(&check_exit_codesize);
295 #endif 305 #endif
296 SetSourcePosition(function()->end_position() - 1); 306 SetSourcePosition(function()->end_position() - 1);
297 __ RecordJSReturn(); 307 __ RecordJSReturn();
298 // Do not use the leave instruction here because it is too short to 308 // Do not use the leave instruction here because it is too short to
299 // patch with the code required by the debugger. 309 // patch with the code required by the debugger.
300 __ mov(esp, ebp); 310 __ mov(esp, ebp);
301 __ pop(ebp); 311 __ pop(ebp);
302 __ ret((scope()->num_parameters() + 1) * kPointerSize); 312
313 int arguments_bytes = (scope()->num_parameters() + 1) * kPointerSize;
314 __ Ret(arguments_bytes, ecx);
303 #ifdef ENABLE_DEBUGGER_SUPPORT 315 #ifdef ENABLE_DEBUGGER_SUPPORT
304 // Check that the size of the code used for returning matches what is 316 // Check that the size of the code used for returning is large enough
305 // expected by the debugger. 317 // for the debugger's requirements.
306 ASSERT_EQ(Assembler::kJSReturnSequenceLength, 318 ASSERT(Assembler::kJSReturnSequenceLength <=
307 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); 319 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
308 #endif 320 #endif
309 } 321 }
310 } 322 }
311 323
312 324
313 FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand( 325 FullCodeGenerator::ConstantOperand FullCodeGenerator::GetConstantOperand(
314 Token::Value op, Expression* left, Expression* right) { 326 Token::Value op, Expression* left, Expression* right) {
315 ASSERT(ShouldInlineSmiCase(op)); 327 ASSERT(ShouldInlineSmiCase(op));
316 if (op == Token::DIV || op == Token::MOD || op == Token::MUL) { 328 if (op == Token::DIV || op == Token::MOD || op == Token::MUL) {
317 // We never generate inlined constant smi operations for these. 329 // We never generate inlined constant smi operations for these.
(...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after
596 Register src, 608 Register src,
597 Register scratch1, 609 Register scratch1,
598 Register scratch2) { 610 Register scratch2) {
599 ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented. 611 ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented.
600 ASSERT(!scratch1.is(src) && !scratch2.is(src)); 612 ASSERT(!scratch1.is(src) && !scratch2.is(src));
601 MemOperand location = EmitSlotSearch(dst, scratch1); 613 MemOperand location = EmitSlotSearch(dst, scratch1);
602 __ mov(location, src); 614 __ mov(location, src);
603 615
604 // Emit the write barrier code if the location is in the heap. 616 // Emit the write barrier code if the location is in the heap.
605 if (dst->type() == Slot::CONTEXT) { 617 if (dst->type() == Slot::CONTEXT) {
606 int offset = FixedArray::kHeaderSize + dst->index() * kPointerSize; 618 int offset = Context::SlotOffset(dst->index());
619 ASSERT(!scratch1.is(esi) && !src.is(esi) && !scratch2.is(esi));
607 __ RecordWrite(scratch1, offset, src, scratch2, kDontSaveFPRegs); 620 __ RecordWrite(scratch1, offset, src, scratch2, kDontSaveFPRegs);
608 } 621 }
609 } 622 }
610 623
611 624
612 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, 625 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
613 bool should_normalize, 626 bool should_normalize,
614 Label* if_true, 627 Label* if_true,
615 Label* if_false) { 628 Label* if_false) {
616 // Only prepare for bailouts before splits if we're in a test 629 // Only prepare for bailouts before splits if we're in a test
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
652 } else if (function != NULL) { 665 } else if (function != NULL) {
653 VisitForAccumulatorValue(function); 666 VisitForAccumulatorValue(function);
654 __ mov(Operand(ebp, SlotOffset(slot)), result_register()); 667 __ mov(Operand(ebp, SlotOffset(slot)), result_register());
655 } 668 }
656 break; 669 break;
657 670
658 case Slot::CONTEXT: 671 case Slot::CONTEXT:
659 // We bypass the general EmitSlotSearch because we know more about 672 // We bypass the general EmitSlotSearch because we know more about
660 // this specific context. 673 // this specific context.
661 674
662 // The variable in the decl always resides in the current context. 675 // The variable in the decl always resides in the current function
676 // context.
663 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); 677 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
664 if (FLAG_debug_code) { 678 if (FLAG_debug_code) {
665 // Check if we have the correct context pointer. 679 // Check that we're not inside a 'with'.
666 __ mov(ebx, ContextOperand(esi, Context::FCONTEXT_INDEX)); 680 __ mov(ebx, ContextOperand(esi, Context::FCONTEXT_INDEX));
667 __ cmp(ebx, Operand(esi)); 681 __ cmp(ebx, Operand(esi));
668 __ Check(equal, "Unexpected declaration in current context."); 682 __ Check(equal, "Unexpected declaration in current context.");
669 } 683 }
670 if (mode == Variable::CONST) { 684 if (mode == Variable::CONST) {
671 __ mov(ContextOperand(esi, slot->index()), 685 __ mov(ContextOperand(esi, slot->index()),
672 Immediate(Factory::the_hole_value())); 686 Immediate(Factory::the_hole_value()));
673 // No write barrier since the hole value is in old space. 687 // No write barrier since the hole value is in old space.
674 } else if (function != NULL) { 688 } else if (function != NULL) {
675 VisitForAccumulatorValue(function); 689 VisitForAccumulatorValue(function);
(...skipping 23 matching lines...) Expand all
699 __ push(Immediate(Smi::FromInt(0))); // No initial value! 713 __ push(Immediate(Smi::FromInt(0))); // No initial value!
700 } 714 }
701 __ CallRuntime(Runtime::kDeclareContextSlot, 4); 715 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
702 break; 716 break;
703 } 717 }
704 } 718 }
705 719
706 } else if (prop != NULL) { 720 } else if (prop != NULL) {
707 if (function != NULL || mode == Variable::CONST) { 721 if (function != NULL || mode == Variable::CONST) {
708 // We are declaring a function or constant that rewrites to a 722 // We are declaring a function or constant that rewrites to a
709 // property. Use (keyed) IC to set the initial value. 723 // property. Use (keyed) IC to set the initial value. We cannot
710 VisitForStackValue(prop->obj()); 724 // visit the rewrite because it's shared and we risk recording
725 // duplicate AST IDs for bailouts from optimized code.
726 ASSERT(prop->obj()->AsVariableProxy() != NULL);
727 { AccumulatorValueContext for_object(this);
728 EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
729 }
730
711 if (function != NULL) { 731 if (function != NULL) {
712 VisitForStackValue(prop->key()); 732 __ push(eax);
713 VisitForAccumulatorValue(function); 733 VisitForAccumulatorValue(function);
714 __ pop(ecx); 734 __ pop(edx);
715 } else { 735 } else {
716 VisitForAccumulatorValue(prop->key()); 736 __ mov(edx, eax);
717 __ mov(ecx, result_register()); 737 __ mov(eax, Factory::the_hole_value());
718 __ mov(result_register(), Factory::the_hole_value());
719 } 738 }
720 __ pop(edx); 739 ASSERT(prop->key()->AsLiteral() != NULL &&
740 prop->key()->AsLiteral()->handle()->IsSmi());
741 __ Set(ecx, Immediate(prop->key()->AsLiteral()->handle()));
721 742
722 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); 743 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
723 EmitCallIC(ic, RelocInfo::CODE_TARGET); 744 EmitCallIC(ic, RelocInfo::CODE_TARGET);
724 } 745 }
725 } 746 }
726 } 747 }
727 748
728 749
729 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { 750 void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
730 EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun()); 751 EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun());
(...skipping 19 matching lines...) Expand all
750 VisitForStackValue(stmt->tag()); 771 VisitForStackValue(stmt->tag());
751 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 772 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
752 773
753 ZoneList<CaseClause*>* clauses = stmt->cases(); 774 ZoneList<CaseClause*>* clauses = stmt->cases();
754 CaseClause* default_clause = NULL; // Can occur anywhere in the list. 775 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
755 776
756 Label next_test; // Recycled for each test. 777 Label next_test; // Recycled for each test.
757 // Compile all the tests with branches to their bodies. 778 // Compile all the tests with branches to their bodies.
758 for (int i = 0; i < clauses->length(); i++) { 779 for (int i = 0; i < clauses->length(); i++) {
759 CaseClause* clause = clauses->at(i); 780 CaseClause* clause = clauses->at(i);
781 clause->body_target()->entry_label()->Unuse();
782
760 // The default is not a test, but remember it as final fall through. 783 // The default is not a test, but remember it as final fall through.
761 if (clause->is_default()) { 784 if (clause->is_default()) {
762 default_clause = clause; 785 default_clause = clause;
763 continue; 786 continue;
764 } 787 }
765 788
766 Comment cmnt(masm_, "[ Case comparison"); 789 Comment cmnt(masm_, "[ Case comparison");
767 __ bind(&next_test); 790 __ bind(&next_test);
768 next_test.Unuse(); 791 next_test.Unuse();
769 792
(...skipping 338 matching lines...) Expand 10 before | Expand all | Expand 10 after
1108 } 1131 }
1109 __ mov(temp, ContextOperand(context, Context::CLOSURE_INDEX)); 1132 __ mov(temp, ContextOperand(context, Context::CLOSURE_INDEX));
1110 __ mov(temp, FieldOperand(temp, JSFunction::kContextOffset)); 1133 __ mov(temp, FieldOperand(temp, JSFunction::kContextOffset));
1111 // Walk the rest of the chain without clobbering esi. 1134 // Walk the rest of the chain without clobbering esi.
1112 context = temp; 1135 context = temp;
1113 } 1136 }
1114 } 1137 }
1115 // Check that last extension is NULL. 1138 // Check that last extension is NULL.
1116 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0)); 1139 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1117 __ j(not_equal, slow); 1140 __ j(not_equal, slow);
1118 __ mov(temp, ContextOperand(context, Context::FCONTEXT_INDEX)); 1141
1119 return ContextOperand(temp, slot->index()); 1142 // This function is used only for loads, not stores, so it's safe to
1143 // return an esi-based operand (the write barrier cannot be allowed to
1144 // destroy the esi register).
1145 return ContextOperand(context, slot->index());
1120 } 1146 }
1121 1147
1122 1148
1123 void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase( 1149 void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
1124 Slot* slot, 1150 Slot* slot,
1125 TypeofState typeof_state, 1151 TypeofState typeof_state,
1126 Label* slow, 1152 Label* slow,
1127 Label* done) { 1153 Label* done) {
1128 // Generate fast-case code for variables that might be shadowed by 1154 // Generate fast-case code for variables that might be shadowed by
1129 // eval-introduced variables. Eval is used a lot without 1155 // eval-introduced variables. Eval is used a lot without
(...skipping 361 matching lines...) Expand 10 before | Expand all | Expand 10 after
1491 VisitForAccumulatorValue(property->obj()); 1517 VisitForAccumulatorValue(property->obj());
1492 __ push(result_register()); 1518 __ push(result_register());
1493 } else { 1519 } else {
1494 VisitForStackValue(property->obj()); 1520 VisitForStackValue(property->obj());
1495 } 1521 }
1496 break; 1522 break;
1497 case KEYED_PROPERTY: { 1523 case KEYED_PROPERTY: {
1498 if (expr->is_compound()) { 1524 if (expr->is_compound()) {
1499 if (property->is_arguments_access()) { 1525 if (property->is_arguments_access()) {
1500 VariableProxy* obj_proxy = property->obj()->AsVariableProxy(); 1526 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1501 __ push(EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx)); 1527 MemOperand slot_operand =
1528 EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
1529 __ push(slot_operand);
1502 __ mov(eax, Immediate(property->key()->AsLiteral()->handle())); 1530 __ mov(eax, Immediate(property->key()->AsLiteral()->handle()));
1503 } else { 1531 } else {
1504 VisitForStackValue(property->obj()); 1532 VisitForStackValue(property->obj());
1505 VisitForAccumulatorValue(property->key()); 1533 VisitForAccumulatorValue(property->key());
1506 } 1534 }
1507 __ mov(edx, Operand(esp, 0)); 1535 __ mov(edx, Operand(esp, 0));
1508 __ push(eax); 1536 __ push(eax);
1509 } else { 1537 } else {
1510 if (property->is_arguments_access()) { 1538 if (property->is_arguments_access()) {
1511 VariableProxy* obj_proxy = property->obj()->AsVariableProxy(); 1539 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1512 __ push(EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx)); 1540 MemOperand slot_operand =
1541 EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
1542 __ push(slot_operand);
1513 __ push(Immediate(property->key()->AsLiteral()->handle())); 1543 __ push(Immediate(property->key()->AsLiteral()->handle()));
1514 } else { 1544 } else {
1515 VisitForStackValue(property->obj()); 1545 VisitForStackValue(property->obj());
1516 VisitForStackValue(property->key()); 1546 VisitForStackValue(property->key());
1517 } 1547 }
1518 } 1548 }
1519 break; 1549 break;
1520 } 1550 }
1521 } 1551 }
1522 1552
(...skipping 425 matching lines...) Expand 10 before | Expand all | Expand 10 after
1948 VisitForAccumulatorValue(prop->obj()); 1978 VisitForAccumulatorValue(prop->obj());
1949 __ mov(edx, eax); 1979 __ mov(edx, eax);
1950 __ pop(eax); // Restore value. 1980 __ pop(eax); // Restore value.
1951 __ mov(ecx, prop->key()->AsLiteral()->handle()); 1981 __ mov(ecx, prop->key()->AsLiteral()->handle());
1952 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); 1982 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
1953 EmitCallIC(ic, RelocInfo::CODE_TARGET); 1983 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1954 break; 1984 break;
1955 } 1985 }
1956 case KEYED_PROPERTY: { 1986 case KEYED_PROPERTY: {
1957 __ push(eax); // Preserve value. 1987 __ push(eax); // Preserve value.
1958 VisitForStackValue(prop->obj()); 1988 if (prop->is_synthetic()) {
1959 VisitForAccumulatorValue(prop->key()); 1989 ASSERT(prop->obj()->AsVariableProxy() != NULL);
1960 __ mov(ecx, eax); 1990 ASSERT(prop->key()->AsLiteral() != NULL);
1961 __ pop(edx); 1991 { AccumulatorValueContext for_object(this);
1992 EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
1993 }
1994 __ mov(edx, eax);
1995 __ Set(ecx, Immediate(prop->key()->AsLiteral()->handle()));
1996 } else {
1997 VisitForStackValue(prop->obj());
1998 VisitForAccumulatorValue(prop->key());
1999 __ mov(ecx, eax);
2000 __ pop(edx);
2001 }
1962 __ pop(eax); // Restore value. 2002 __ pop(eax); // Restore value.
1963 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); 2003 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
1964 EmitCallIC(ic, RelocInfo::CODE_TARGET); 2004 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1965 break; 2005 break;
1966 } 2006 }
1967 } 2007 }
1968 PrepareForBailoutForId(bailout_ast_id, TOS_REG); 2008 PrepareForBailoutForId(bailout_ast_id, TOS_REG);
1969 context()->Plug(eax); 2009 context()->Plug(eax);
1970 } 2010 }
1971 2011
1972 2012
1973 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 2013 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1974 Token::Value op) { 2014 Token::Value op) {
1975 // Left-hand sides that rewrite to explicit property accesses do not reach 2015 // Left-hand sides that rewrite to explicit property accesses do not reach
1976 // here. 2016 // here.
1977 ASSERT(var != NULL); 2017 ASSERT(var != NULL);
1978 ASSERT(var->is_global() || var->AsSlot() != NULL); 2018 ASSERT(var->is_global() || var->AsSlot() != NULL);
1979 2019
1980 if (var->is_global()) { 2020 if (var->is_global()) {
1981 ASSERT(!var->is_this()); 2021 ASSERT(!var->is_this());
1982 // Assignment to a global variable. Use inline caching for the 2022 // Assignment to a global variable. Use inline caching for the
1983 // assignment. Right-hand-side value is passed in eax, variable name in 2023 // assignment. Right-hand-side value is passed in eax, variable name in
1984 // ecx, and the global object on the stack. 2024 // ecx, and the global object on the stack.
1985 __ mov(ecx, var->name()); 2025 __ mov(ecx, var->name());
1986 __ mov(edx, GlobalObjectOperand()); 2026 __ mov(edx, GlobalObjectOperand());
1987 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); 2027 Handle<Code> ic(Builtins::builtin(
1988 EmitCallIC(ic, RelocInfo::CODE_TARGET); 2028 is_strict() ? Builtins::StoreIC_Initialize_Strict
2029 : Builtins::StoreIC_Initialize));
2030 EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1989 2031
1990 } else if (var->mode() != Variable::CONST || op == Token::INIT_CONST) { 2032 } else if (op == Token::INIT_CONST) {
1991 // Perform the assignment for non-const variables and for initialization 2033 // Like var declarations, const declarations are hoisted to function
1992 // of const variables. Const assignments are simply skipped. 2034 // scope. However, unlike var initializers, const initializers are able
1993 Label done; 2035 // to drill a hole to that function context, even from inside a 'with'
2036 // context. We thus bypass the normal static scope lookup.
2037 Slot* slot = var->AsSlot();
2038 Label skip;
2039 switch (slot->type()) {
2040 case Slot::PARAMETER:
2041 // No const parameters.
2042 UNREACHABLE();
2043 break;
2044 case Slot::LOCAL:
2045 __ mov(edx, Operand(ebp, SlotOffset(slot)));
2046 __ cmp(edx, Factory::the_hole_value());
2047 __ j(not_equal, &skip);
2048 __ mov(Operand(ebp, SlotOffset(slot)), eax);
2049 break;
2050 case Slot::CONTEXT: {
2051 __ mov(ecx, ContextOperand(esi, Context::FCONTEXT_INDEX));
2052 __ mov(edx, ContextOperand(ecx, slot->index()));
2053 __ cmp(edx, Factory::the_hole_value());
2054 __ j(not_equal, &skip);
2055 __ mov(ContextOperand(ecx, slot->index()), eax);
2056 int offset = Context::SlotOffset(slot->index());
2057 __ mov(edx, eax); // Preserve the stored value in eax.
2058 __ RecordWrite(ecx, offset, edx, ebx, kDontSaveFPRegs);
2059 break;
2060 }
2061 case Slot::LOOKUP:
2062 __ push(eax);
2063 __ push(esi);
2064 __ push(Immediate(var->name()));
2065 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2066 break;
2067 }
2068 __ bind(&skip);
2069
2070 } else if (var->mode() != Variable::CONST) {
2071 // Perform the assignment for non-const variables. Const assignments
2072 // are simply skipped.
1994 Slot* slot = var->AsSlot(); 2073 Slot* slot = var->AsSlot();
1995 switch (slot->type()) { 2074 switch (slot->type()) {
1996 case Slot::PARAMETER: 2075 case Slot::PARAMETER:
1997 case Slot::LOCAL: 2076 case Slot::LOCAL:
1998 if (op == Token::INIT_CONST) {
1999 // Detect const reinitialization by checking for the hole value.
2000 __ mov(edx, Operand(ebp, SlotOffset(slot)));
2001 __ cmp(edx, Factory::the_hole_value());
2002 __ j(not_equal, &done);
2003 }
2004 // Perform the assignment. 2077 // Perform the assignment.
2005 __ mov(Operand(ebp, SlotOffset(slot)), eax); 2078 __ mov(Operand(ebp, SlotOffset(slot)), eax);
2006 break; 2079 break;
2007 2080
2008 case Slot::CONTEXT: { 2081 case Slot::CONTEXT: {
2009 MemOperand target = EmitSlotSearch(slot, ecx); 2082 MemOperand target = EmitSlotSearch(slot, ecx);
2010 if (op == Token::INIT_CONST) {
2011 // Detect const reinitialization by checking for the hole value.
2012 __ mov(edx, target);
2013 __ cmp(edx, Factory::the_hole_value());
2014 __ j(not_equal, &done);
2015 }
2016 // Perform the assignment and issue the write barrier. 2083 // Perform the assignment and issue the write barrier.
2017 __ mov(target, eax); 2084 __ mov(target, eax);
2018 2085
2019 // The value of the assignment is in eax. RecordWrite clobbers its 2086 // The value of the assignment is in eax. RecordWrite clobbers its
2020 // register arguments. 2087 // register arguments.
2021 __ mov(edx, eax); 2088 __ mov(edx, eax);
2022 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; 2089 int offset = Context::SlotOffset(slot->index());
2023 __ RecordWrite(ecx, offset, edx, ebx, kDontSaveFPRegs); 2090 __ RecordWrite(ecx, offset, edx, ebx, kDontSaveFPRegs);
2024 break; 2091 break;
2025 } 2092 }
2026 2093
2027 case Slot::LOOKUP: 2094 case Slot::LOOKUP:
2028 // Call the runtime for the assignment. The runtime will ignore 2095 // Call the runtime for the assignment.
2029 // const reinitialization.
2030 __ push(eax); // Value. 2096 __ push(eax); // Value.
2031 __ push(esi); // Context. 2097 __ push(esi); // Context.
2032 __ push(Immediate(var->name())); 2098 __ push(Immediate(var->name()));
2033 if (op == Token::INIT_CONST) { 2099 __ CallRuntime(Runtime::kStoreContextSlot, 3);
2034 // The runtime will ignore const redeclaration.
2035 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2036 } else {
2037 __ CallRuntime(Runtime::kStoreContextSlot, 3);
2038 }
2039 break; 2100 break;
2040 } 2101 }
2041 __ bind(&done);
2042 } 2102 }
2043 } 2103 }
2044 2104
2045 2105
2046 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2106 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2047 // Assignment to a property, using a named store IC. 2107 // Assignment to a property, using a named store IC.
2048 Property* prop = expr->target()->AsProperty(); 2108 Property* prop = expr->target()->AsProperty();
2049 ASSERT(prop != NULL); 2109 ASSERT(prop != NULL);
2050 ASSERT(prop->key()->AsLiteral() != NULL); 2110 ASSERT(prop->key()->AsLiteral() != NULL);
2051 2111
(...skipping 199 matching lines...) Expand 10 before | Expand all | Expand 10 after
2251 2311
2252 // Push copy of the first argument or undefined if it doesn't exist. 2312 // Push copy of the first argument or undefined if it doesn't exist.
2253 if (arg_count > 0) { 2313 if (arg_count > 0) {
2254 __ push(Operand(esp, arg_count * kPointerSize)); 2314 __ push(Operand(esp, arg_count * kPointerSize));
2255 } else { 2315 } else {
2256 __ push(Immediate(Factory::undefined_value())); 2316 __ push(Immediate(Factory::undefined_value()));
2257 } 2317 }
2258 2318
2259 // Push the receiver of the enclosing function and do runtime call. 2319 // Push the receiver of the enclosing function and do runtime call.
2260 __ push(Operand(ebp, (2 + scope()->num_parameters()) * kPointerSize)); 2320 __ push(Operand(ebp, (2 + scope()->num_parameters()) * kPointerSize));
2261 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 3); 2321 // Push the strict mode flag.
2322 __ push(Immediate(Smi::FromInt(strict_mode_flag())));
2323 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 4);
2262 2324
2263 // The runtime call returns a pair of values in eax (function) and 2325 // The runtime call returns a pair of values in eax (function) and
2264 // edx (receiver). Touch up the stack with the right values. 2326 // edx (receiver). Touch up the stack with the right values.
2265 __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx); 2327 __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
2266 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax); 2328 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2267 } 2329 }
2268 // Record source position for debugger. 2330 // Record source position for debugger.
2269 SetSourcePosition(expr->position()); 2331 SetSourcePosition(expr->position());
2270 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; 2332 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2271 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE); 2333 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
2322 Property* prop = fun->AsProperty(); 2384 Property* prop = fun->AsProperty();
2323 Literal* key = prop->key()->AsLiteral(); 2385 Literal* key = prop->key()->AsLiteral();
2324 if (key != NULL && key->handle()->IsSymbol()) { 2386 if (key != NULL && key->handle()->IsSymbol()) {
2325 // Call to a named property, use call IC. 2387 // Call to a named property, use call IC.
2326 VisitForStackValue(prop->obj()); 2388 VisitForStackValue(prop->obj());
2327 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET); 2389 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
2328 } else { 2390 } else {
2329 // Call to a keyed property. 2391 // Call to a keyed property.
2330 // For a synthetic property use keyed load IC followed by function call, 2392 // For a synthetic property use keyed load IC followed by function call,
2331 // for a regular property use keyed EmitCallIC. 2393 // for a regular property use keyed EmitCallIC.
2332 { PreservePositionScope scope(masm()->positions_recorder());
2333 VisitForStackValue(prop->obj());
2334 }
2335 if (prop->is_synthetic()) { 2394 if (prop->is_synthetic()) {
2336 { PreservePositionScope scope(masm()->positions_recorder()); 2395 // Do not visit the object and key subexpressions (they are shared
2337 VisitForAccumulatorValue(prop->key()); 2396 // by all occurrences of the same rewritten parameter).
2338 } 2397 ASSERT(prop->obj()->AsVariableProxy() != NULL);
2398 ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
2399 Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
2400 MemOperand operand = EmitSlotSearch(slot, edx);
2401 __ mov(edx, operand);
2402
2403 ASSERT(prop->key()->AsLiteral() != NULL);
2404 ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
2405 __ mov(eax, prop->key()->AsLiteral()->handle());
2406
2339 // Record source code position for IC call. 2407 // Record source code position for IC call.
2340 SetSourcePosition(prop->position()); 2408 SetSourcePosition(prop->position());
2341 __ pop(edx); // We do not need to keep the receiver.
2342 2409
2343 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); 2410 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
2344 EmitCallIC(ic, RelocInfo::CODE_TARGET); 2411 EmitCallIC(ic, RelocInfo::CODE_TARGET);
2345 // Push result (function). 2412 // Push result (function).
2346 __ push(eax); 2413 __ push(eax);
2347 // Push Global receiver. 2414 // Push Global receiver.
2348 __ mov(ecx, GlobalObjectOperand()); 2415 __ mov(ecx, GlobalObjectOperand());
2349 __ push(FieldOperand(ecx, GlobalObject::kGlobalReceiverOffset)); 2416 __ push(FieldOperand(ecx, GlobalObject::kGlobalReceiverOffset));
2350 EmitCallWithStub(expr); 2417 EmitCallWithStub(expr);
2351 } else { 2418 } else {
2419 { PreservePositionScope scope(masm()->positions_recorder());
2420 VisitForStackValue(prop->obj());
2421 }
2352 EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET); 2422 EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET);
2353 } 2423 }
2354 } 2424 }
2355 } else { 2425 } else {
2356 // Call to some other expression. If the expression is an anonymous 2426 // Call to some other expression. If the expression is an anonymous
2357 // function literal not called in a loop, mark it as one that should 2427 // function literal not called in a loop, mark it as one that should
2358 // also use the full code generator. 2428 // also use the full code generator.
2359 FunctionLiteral* lit = fun->AsFunctionLiteral(); 2429 FunctionLiteral* lit = fun->AsFunctionLiteral();
2360 if (lit != NULL && 2430 if (lit != NULL &&
2361 lit->name()->Equals(Heap::empty_string()) && 2431 lit->name()->Equals(Heap::empty_string()) &&
(...skipping 974 matching lines...) Expand 10 before | Expand all | Expand 10 after
3336 } 3406 }
3337 3407
3338 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset)); 3408 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3339 __ IndexFromHash(eax, eax); 3409 __ IndexFromHash(eax, eax);
3340 3410
3341 context()->Plug(eax); 3411 context()->Plug(eax);
3342 } 3412 }
3343 3413
3344 3414
3345 void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) { 3415 void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
3346 Label bailout; 3416 Label bailout, done, one_char_separator, long_separator,
3347 Label done; 3417 non_trivial_array, not_size_one_array, loop, loop_condition,
3418 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3348 3419
3349 ASSERT(args->length() == 2); 3420 ASSERT(args->length() == 2);
3350 // We will leave the separator on the stack until the end of the function. 3421 // We will leave the separator on the stack until the end of the function.
3351 VisitForStackValue(args->at(1)); 3422 VisitForStackValue(args->at(1));
3352 // Load this to eax (= array) 3423 // Load this to eax (= array)
3353 VisitForAccumulatorValue(args->at(0)); 3424 VisitForAccumulatorValue(args->at(0));
3354
3355 // All aliases of the same register have disjoint lifetimes. 3425 // All aliases of the same register have disjoint lifetimes.
3356 Register array = eax; 3426 Register array = eax;
3357 Register result_pos = no_reg; 3427 Register elements = no_reg; // Will be eax.
3358 3428
3359 Register index = edi; 3429 Register index = edx;
3360 3430
3361 Register current_string_length = ecx; // Will be ecx when live. 3431 Register string_length = ecx;
3362 3432
3363 Register current_string = edx; 3433 Register string = esi;
3364 3434
3365 Register scratch = ebx; 3435 Register scratch = ebx;
3366 3436
3367 Register scratch_2 = esi; 3437 Register array_length = edi;
3368 Register new_padding_chars = scratch_2; 3438 Register result_pos = no_reg; // Will be edi.
3369 3439
3370 Operand separator = Operand(esp, 4 * kPointerSize); // Already pushed. 3440 // Separator operand is already pushed.
3371 Operand elements = Operand(esp, 3 * kPointerSize); 3441 Operand separator_operand = Operand(esp, 2 * kPointerSize);
3372 Operand result = Operand(esp, 2 * kPointerSize); 3442 Operand result_operand = Operand(esp, 1 * kPointerSize);
3373 Operand padding_chars = Operand(esp, 1 * kPointerSize); 3443 Operand array_length_operand = Operand(esp, 0);
3374 Operand array_length = Operand(esp, 0); 3444 __ sub(Operand(esp), Immediate(2 * kPointerSize));
3375 __ sub(Operand(esp), Immediate(4 * kPointerSize)); 3445 __ cld();
3376 3446 // Check that the array is a JSArray
3377
3378 // Check that eax is a JSArray
3379 __ test(array, Immediate(kSmiTagMask)); 3447 __ test(array, Immediate(kSmiTagMask));
3380 __ j(zero, &bailout); 3448 __ j(zero, &bailout);
3381 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch); 3449 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3382 __ j(not_equal, &bailout); 3450 __ j(not_equal, &bailout);
3383 3451
3384 // Check that the array has fast elements. 3452 // Check that the array has fast elements.
3385 __ test_b(FieldOperand(scratch, Map::kBitField2Offset), 3453 __ test_b(FieldOperand(scratch, Map::kBitField2Offset),
3386 1 << Map::kHasFastElements); 3454 1 << Map::kHasFastElements);
3387 __ j(zero, &bailout); 3455 __ j(zero, &bailout);
3388 3456
3389 // If the array is empty, return the empty string. 3457 // If the array has length zero, return the empty string.
3390 __ mov(scratch, FieldOperand(array, JSArray::kLengthOffset)); 3458 __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
3391 __ sar(scratch, 1); 3459 __ sar(array_length, 1);
3392 Label non_trivial; 3460 __ j(not_zero, &non_trivial_array);
3393 __ j(not_zero, &non_trivial); 3461 __ mov(result_operand, Factory::empty_string());
3394 __ mov(result, Factory::empty_string()); 3462 __ jmp(&done);
3395 __ jmp(&done); 3463
3396 3464 // Save the array length.
3397 __ bind(&non_trivial); 3465 __ bind(&non_trivial_array);
3398 __ mov(array_length, scratch); 3466 __ mov(array_length_operand, array_length);
3399 3467
3400 __ mov(scratch, FieldOperand(array, JSArray::kElementsOffset)); 3468 // Save the FixedArray containing array's elements.
3401 __ mov(elements, scratch);
3402
3403 // End of array's live range. 3469 // End of array's live range.
3404 result_pos = array; 3470 elements = array;
3471 __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
3405 array = no_reg; 3472 array = no_reg;
3406 3473
3407 3474
3408 // Check that the separator is a flat ascii string. 3475 // Check that all array elements are sequential ASCII strings, and
3409 __ mov(current_string, separator); 3476 // accumulate the sum of their lengths, as a smi-encoded value.
3410 __ test(current_string, Immediate(kSmiTagMask)); 3477 __ Set(index, Immediate(0));
3478 __ Set(string_length, Immediate(0));
3479 // Loop condition: while (index < length).
3480 // Live loop registers: index, array_length, string,
3481 // scratch, string_length, elements.
3482 __ jmp(&loop_condition);
3483 __ bind(&loop);
3484 __ cmp(index, Operand(array_length));
3485 __ j(greater_equal, &done);
3486
3487 __ mov(string, FieldOperand(elements, index,
3488 times_pointer_size,
3489 FixedArray::kHeaderSize));
3490 __ test(string, Immediate(kSmiTagMask));
3411 __ j(zero, &bailout); 3491 __ j(zero, &bailout);
3412 __ mov(scratch, FieldOperand(current_string, HeapObject::kMapOffset)); 3492 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3413 __ mov_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); 3493 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3414 __ and_(scratch, Immediate( 3494 __ and_(scratch, Immediate(
3415 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask)); 3495 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3416 __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag); 3496 __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
3417 __ j(not_equal, &bailout); 3497 __ j(not_equal, &bailout);
3418 // If the separator is the empty string, replace it with NULL. 3498 __ add(string_length,
3419 // The test for NULL is quicker than the empty string test, in a loop. 3499 FieldOperand(string, SeqAsciiString::kLengthOffset));
3420 __ cmp(FieldOperand(current_string, SeqAsciiString::kLengthOffset), 3500 __ j(overflow, &bailout);
3421 Immediate(0)); 3501 __ add(Operand(index), Immediate(1));
3422 Label separator_checked; 3502 __ bind(&loop_condition);
3423 __ j(not_zero, &separator_checked); 3503 __ cmp(index, Operand(array_length));
3424 __ mov(separator, Immediate(0)); 3504 __ j(less, &loop);
3425 __ bind(&separator_checked); 3505
3426 3506 // If array_length is 1, return elements[0], a string.
3427 // Check that elements[0] is a flat ascii string, and copy it in new space. 3507 __ cmp(array_length, 1);
3428 __ mov(scratch, elements); 3508 __ j(not_equal, &not_size_one_array);
3429 __ mov(current_string, FieldOperand(scratch, FixedArray::kHeaderSize)); 3509 __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
3430 __ test(current_string, Immediate(kSmiTagMask)); 3510 __ mov(result_operand, scratch);
3511 __ jmp(&done);
3512
3513 __ bind(&not_size_one_array);
3514
3515 // End of array_length live range.
3516 result_pos = array_length;
3517 array_length = no_reg;
3518
3519 // Live registers:
3520 // string_length: Sum of string lengths, as a smi.
3521 // elements: FixedArray of strings.
3522
3523 // Check that the separator is a flat ASCII string.
3524 __ mov(string, separator_operand);
3525 __ test(string, Immediate(kSmiTagMask));
3431 __ j(zero, &bailout); 3526 __ j(zero, &bailout);
3432 __ mov(scratch, FieldOperand(current_string, HeapObject::kMapOffset)); 3527 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3433 __ mov_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); 3528 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3434 __ and_(scratch, Immediate( 3529 __ and_(scratch, Immediate(
3435 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask)); 3530 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3436 __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag); 3531 __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
3437 __ j(not_equal, &bailout); 3532 __ j(not_equal, &bailout);
3438 3533
3439 // Allocate space to copy it. Round up the size to the alignment granularity. 3534 // Add (separator length times array_length) - separator length
3440 __ mov(current_string_length, 3535 // to string_length.
3441 FieldOperand(current_string, String::kLengthOffset)); 3536 __ mov(scratch, separator_operand);
3442 __ shr(current_string_length, 1); 3537 __ mov(scratch, FieldOperand(scratch, SeqAsciiString::kLengthOffset));
3443 3538 __ sub(string_length, Operand(scratch)); // May be negative, temporarily.
3539 __ imul(scratch, array_length_operand);
3540 __ j(overflow, &bailout);
3541 __ add(string_length, Operand(scratch));
3542 __ j(overflow, &bailout);
3543
3544 __ shr(string_length, 1);
3444 // Live registers and stack values: 3545 // Live registers and stack values:
3445 // current_string_length: length of elements[0]. 3546 // string_length
3446 3547 // elements
3447 // New string result in new space = elements[0] 3548 __ AllocateAsciiString(result_pos, string_length, scratch,
3448 __ AllocateAsciiString(result_pos, current_string_length, scratch_2, 3549 index, string, &bailout);
3449 index, no_reg, &bailout); 3550 __ mov(result_operand, result_pos);
3450 __ mov(result, result_pos); 3551 __ lea(result_pos, FieldOperand(result_pos, SeqAsciiString::kHeaderSize));
3451 3552
3452 // Adjust current_string_length to include padding bytes at end of string. 3553
3453 // Keep track of the number of padding bytes. 3554 __ mov(string, separator_operand);
3454 __ mov(new_padding_chars, current_string_length); 3555 __ cmp(FieldOperand(string, SeqAsciiString::kLengthOffset),
3455 __ add(Operand(current_string_length), Immediate(kObjectAlignmentMask)); 3556 Immediate(Smi::FromInt(1)));
3456 __ and_(Operand(current_string_length), Immediate(~kObjectAlignmentMask)); 3557 __ j(equal, &one_char_separator);
3457 __ sub(new_padding_chars, Operand(current_string_length)); 3558 __ j(greater, &long_separator);
3458 __ neg(new_padding_chars); 3559
3459 __ mov(padding_chars, new_padding_chars); 3560
3460 3561 // Empty separator case
3461 Label copy_loop_1_done; 3562 __ mov(index, Immediate(0));
3462 Label copy_loop_1; 3563 __ jmp(&loop_1_condition);
3463 __ test(current_string_length, Operand(current_string_length)); 3564 // Loop condition: while (index < length).
3464 __ j(zero, &copy_loop_1_done); 3565 __ bind(&loop_1);
3465 __ bind(&copy_loop_1); 3566 // Each iteration of the loop concatenates one string to the result.
3466 __ sub(Operand(current_string_length), Immediate(kPointerSize)); 3567 // Live values in registers:
3467 __ mov(scratch, FieldOperand(current_string, current_string_length, 3568 // index: which element of the elements array we are adding to the result.
3468 times_1, SeqAsciiString::kHeaderSize)); 3569 // result_pos: the position to which we are currently copying characters.
3469 __ mov(FieldOperand(result_pos, current_string_length, 3570 // elements: the FixedArray of strings we are joining.
3470 times_1, SeqAsciiString::kHeaderSize), 3571
3471 scratch); 3572 // Get string = array[index].
3472 __ j(not_zero, &copy_loop_1); 3573 __ mov(string, FieldOperand(elements, index,
3473 __ bind(&copy_loop_1_done); 3574 times_pointer_size,
3474 3575 FixedArray::kHeaderSize));
3475 __ mov(index, Immediate(1)); 3576 __ mov(string_length,
3476 // Loop condition: while (index < length). 3577 FieldOperand(string, String::kLengthOffset));
3477 Label loop; 3578 __ shr(string_length, 1);
3478 __ bind(&loop); 3579 __ lea(string,
3479 __ cmp(index, array_length); 3580 FieldOperand(string, SeqAsciiString::kHeaderSize));
3480 __ j(greater_equal, &done); 3581 __ CopyBytes(string, result_pos, string_length, scratch);
3481 3582 __ add(Operand(index), Immediate(1));
3482 // If the separator is the empty string, signalled by NULL, skip it. 3583 __ bind(&loop_1_condition);
3483 Label separator_done; 3584 __ cmp(index, array_length_operand);
3484 __ mov(current_string, separator); 3585 __ j(less, &loop_1); // End while (index < length).
3485 __ test(current_string, Operand(current_string)); 3586 __ jmp(&done);
3486 __ j(zero, &separator_done); 3587
3487 3588
3488 // Append separator to result. It is known to be a flat ascii string. 3589
3489 __ AppendStringToTopOfNewSpace(current_string, current_string_length, 3590 // One-character separator case
3490 result_pos, scratch, scratch_2, result, 3591 __ bind(&one_char_separator);
3491 padding_chars, &bailout); 3592 // Replace separator with its ascii character value.
3492 __ bind(&separator_done); 3593 __ mov_b(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
3493 3594 __ mov_b(separator_operand, scratch);
3494 // Add next element of array to the end of the result. 3595
3495 // Get current_string = array[index]. 3596 __ Set(index, Immediate(0));
3496 __ mov(scratch, elements); 3597 // Jump into the loop after the code that copies the separator, so the first
3497 __ mov(current_string, FieldOperand(scratch, index, 3598 // element is not preceded by a separator
3498 times_pointer_size, 3599 __ jmp(&loop_2_entry);
3499 FixedArray::kHeaderSize)); 3600 // Loop condition: while (index < length).
3500 // If current != flat ascii string drop result, return undefined. 3601 __ bind(&loop_2);
3501 __ test(current_string, Immediate(kSmiTagMask)); 3602 // Each iteration of the loop concatenates one string to the result.
3502 __ j(zero, &bailout); 3603 // Live values in registers:
3503 __ mov(scratch, FieldOperand(current_string, HeapObject::kMapOffset)); 3604 // index: which element of the elements array we are adding to the result.
3504 __ mov_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); 3605 // result_pos: the position to which we are currently copying characters.
3505 __ and_(scratch, Immediate( 3606
3506 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask)); 3607 // Copy the separator character to the result.
3507 __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag); 3608 __ mov_b(scratch, separator_operand);
3508 __ j(not_equal, &bailout); 3609 __ mov_b(Operand(result_pos, 0), scratch);
3509 3610 __ inc(result_pos);
3510 // Append current to the result. 3611
3511 __ AppendStringToTopOfNewSpace(current_string, current_string_length, 3612 __ bind(&loop_2_entry);
3512 result_pos, scratch, scratch_2, result, 3613 // Get string = array[index].
3513 padding_chars, &bailout); 3614 __ mov(string, FieldOperand(elements, index,
3514 __ add(Operand(index), Immediate(1)); 3615 times_pointer_size,
3515 __ jmp(&loop); // End while (index < length). 3616 FixedArray::kHeaderSize));
3617 __ mov(string_length,
3618 FieldOperand(string, String::kLengthOffset));
3619 __ shr(string_length, 1);
3620 __ lea(string,
3621 FieldOperand(string, SeqAsciiString::kHeaderSize));
3622 __ CopyBytes(string, result_pos, string_length, scratch);
3623 __ add(Operand(index), Immediate(1));
3624
3625 __ cmp(index, array_length_operand);
3626 __ j(less, &loop_2); // End while (index < length).
3627 __ jmp(&done);
3628
3629
3630 // Long separator case (separator is more than one character).
3631 __ bind(&long_separator);
3632
3633 __ Set(index, Immediate(0));
3634 // Jump into the loop after the code that copies the separator, so the first
3635 // element is not preceded by a separator
3636 __ jmp(&loop_3_entry);
3637 // Loop condition: while (index < length).
3638 __ bind(&loop_3);
3639 // Each iteration of the loop concatenates one string to the result.
3640 // Live values in registers:
3641 // index: which element of the elements array we are adding to the result.
3642 // result_pos: the position to which we are currently copying characters.
3643
3644 // Copy the separator to the result.
3645 __ mov(string, separator_operand);
3646 __ mov(string_length,
3647 FieldOperand(string, String::kLengthOffset));
3648 __ shr(string_length, 1);
3649 __ lea(string,
3650 FieldOperand(string, SeqAsciiString::kHeaderSize));
3651 __ CopyBytes(string, result_pos, string_length, scratch);
3652
3653 __ bind(&loop_3_entry);
3654 // Get string = array[index].
3655 __ mov(string, FieldOperand(elements, index,
3656 times_pointer_size,
3657 FixedArray::kHeaderSize));
3658 __ mov(string_length,
3659 FieldOperand(string, String::kLengthOffset));
3660 __ shr(string_length, 1);
3661 __ lea(string,
3662 FieldOperand(string, SeqAsciiString::kHeaderSize));
3663 __ CopyBytes(string, result_pos, string_length, scratch);
3664 __ add(Operand(index), Immediate(1));
3665
3666 __ cmp(index, array_length_operand);
3667 __ j(less, &loop_3); // End while (index < length).
3668 __ jmp(&done);
3669
3516 3670
3517 __ bind(&bailout); 3671 __ bind(&bailout);
3518 __ mov(result, Factory::undefined_value()); 3672 __ mov(result_operand, Factory::undefined_value());
3519 __ bind(&done); 3673 __ bind(&done);
3520 __ mov(eax, result); 3674 __ mov(eax, result_operand);
3521 // Drop temp values from the stack, and restore context register. 3675 // Drop temp values from the stack, and restore context register.
3522 __ add(Operand(esp), Immediate(5 * kPointerSize)); 3676 __ add(Operand(esp), Immediate(3 * kPointerSize));
3523 3677
3524 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 3678 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3525 context()->Plug(eax); 3679 context()->Plug(eax);
3526 } 3680 }
3527 3681
3528 3682
3529 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 3683 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3530 Handle<String> name = expr->name(); 3684 Handle<String> name = expr->name();
3531 if (name->length() > 0 && name->Get(0) == '_') { 3685 if (name->length() > 0 && name->Get(0) == '_') {
3532 Comment cmnt(masm_, "[ InlineRuntimeCall"); 3686 Comment cmnt(masm_, "[ InlineRuntimeCall");
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
3564 context()->Plug(eax); 3718 context()->Plug(eax);
3565 } 3719 }
3566 3720
3567 3721
3568 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 3722 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3569 switch (expr->op()) { 3723 switch (expr->op()) {
3570 case Token::DELETE: { 3724 case Token::DELETE: {
3571 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 3725 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3572 Property* prop = expr->expression()->AsProperty(); 3726 Property* prop = expr->expression()->AsProperty();
3573 Variable* var = expr->expression()->AsVariableProxy()->AsVariable(); 3727 Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
3574 if (prop == NULL && var == NULL) { 3728
3729 if (prop != NULL) {
3730 if (prop->is_synthetic()) {
3731 // Result of deleting parameters is false, even when they rewrite
3732 // to accesses on the arguments object.
3733 context()->Plug(false);
3734 } else {
3735 VisitForStackValue(prop->obj());
3736 VisitForStackValue(prop->key());
3737 __ push(Immediate(Smi::FromInt(strict_mode_flag())));
3738 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3739 context()->Plug(eax);
3740 }
3741 } else if (var != NULL) {
3742 // Delete of an unqualified identifier is disallowed in strict mode
3743 // so this code can only be reached in non-strict mode.
3744 ASSERT(strict_mode_flag() == kNonStrictMode);
3745 if (var->is_global()) {
3746 __ push(GlobalObjectOperand());
3747 __ push(Immediate(var->name()));
3748 __ push(Immediate(Smi::FromInt(kNonStrictMode)));
3749 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3750 context()->Plug(eax);
3751 } else if (var->AsSlot() != NULL &&
3752 var->AsSlot()->type() != Slot::LOOKUP) {
3753 // Result of deleting non-global, non-dynamic variables is false.
3754 // The subexpression does not have side effects.
3755 context()->Plug(false);
3756 } else {
3757 // Non-global variable. Call the runtime to try to delete from the
3758 // context where the variable was introduced.
3759 __ push(context_register());
3760 __ push(Immediate(var->name()));
3761 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3762 context()->Plug(eax);
3763 }
3764 } else {
3575 // Result of deleting non-property, non-variable reference is true. 3765 // Result of deleting non-property, non-variable reference is true.
3576 // The subexpression may have side effects. 3766 // The subexpression may have side effects.
3577 VisitForEffect(expr->expression()); 3767 VisitForEffect(expr->expression());
3578 context()->Plug(true); 3768 context()->Plug(true);
3579 } else if (var != NULL &&
3580 !var->is_global() &&
3581 var->AsSlot() != NULL &&
3582 var->AsSlot()->type() != Slot::LOOKUP) {
3583 // Result of deleting non-global, non-dynamic variables is false.
3584 // The subexpression does not have side effects.
3585 context()->Plug(false);
3586 } else {
3587 // Property or variable reference. Call the delete builtin with
3588 // object and property name as arguments.
3589 if (prop != NULL) {
3590 VisitForStackValue(prop->obj());
3591 VisitForStackValue(prop->key());
3592 } else if (var->is_global()) {
3593 __ push(GlobalObjectOperand());
3594 __ push(Immediate(var->name()));
3595 } else {
3596 // Non-global variable. Call the runtime to look up the context
3597 // where the variable was introduced.
3598 __ push(context_register());
3599 __ push(Immediate(var->name()));
3600 __ CallRuntime(Runtime::kLookupContext, 2);
3601 __ push(eax);
3602 __ push(Immediate(var->name()));
3603 }
3604 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3605 context()->Plug(eax);
3606 } 3769 }
3607 break; 3770 break;
3608 } 3771 }
3609 3772
3610 case Token::VOID: { 3773 case Token::VOID: {
3611 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); 3774 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3612 VisitForEffect(expr->expression()); 3775 VisitForEffect(expr->expression());
3613 context()->Plug(Factory::undefined_value()); 3776 context()->Plug(Factory::undefined_value());
3614 break; 3777 break;
3615 } 3778 }
(...skipping 23 matching lines...) Expand all
3639 context()->Plug(eax); 3802 context()->Plug(eax);
3640 break; 3803 break;
3641 } 3804 }
3642 3805
3643 case Token::ADD: { 3806 case Token::ADD: {
3644 Comment cmt(masm_, "[ UnaryOperation (ADD)"); 3807 Comment cmt(masm_, "[ UnaryOperation (ADD)");
3645 VisitForAccumulatorValue(expr->expression()); 3808 VisitForAccumulatorValue(expr->expression());
3646 Label no_conversion; 3809 Label no_conversion;
3647 __ test(result_register(), Immediate(kSmiTagMask)); 3810 __ test(result_register(), Immediate(kSmiTagMask));
3648 __ j(zero, &no_conversion); 3811 __ j(zero, &no_conversion);
3649 __ push(result_register()); 3812 ToNumberStub convert_stub;
3650 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION); 3813 __ CallStub(&convert_stub);
3651 __ bind(&no_conversion); 3814 __ bind(&no_conversion);
3652 context()->Plug(result_register()); 3815 context()->Plug(result_register());
3653 break; 3816 break;
3654 } 3817 }
3655 3818
3656 case Token::SUB: { 3819 case Token::SUB: {
3657 Comment cmt(masm_, "[ UnaryOperation (SUB)"); 3820 Comment cmt(masm_, "[ UnaryOperation (SUB)");
3658 bool can_overwrite = expr->expression()->ResultOverwriteAllowed(); 3821 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3659 UnaryOverwriteMode overwrite = 3822 UnaryOverwriteMode overwrite =
3660 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE; 3823 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
3736 __ push(Immediate(Smi::FromInt(0))); 3899 __ push(Immediate(Smi::FromInt(0)));
3737 } 3900 }
3738 if (assign_type == NAMED_PROPERTY) { 3901 if (assign_type == NAMED_PROPERTY) {
3739 // Put the object both on the stack and in the accumulator. 3902 // Put the object both on the stack and in the accumulator.
3740 VisitForAccumulatorValue(prop->obj()); 3903 VisitForAccumulatorValue(prop->obj());
3741 __ push(eax); 3904 __ push(eax);
3742 EmitNamedPropertyLoad(prop); 3905 EmitNamedPropertyLoad(prop);
3743 } else { 3906 } else {
3744 if (prop->is_arguments_access()) { 3907 if (prop->is_arguments_access()) {
3745 VariableProxy* obj_proxy = prop->obj()->AsVariableProxy(); 3908 VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
3746 __ push(EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx)); 3909 MemOperand slot_operand =
3910 EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
3911 __ push(slot_operand);
3747 __ mov(eax, Immediate(prop->key()->AsLiteral()->handle())); 3912 __ mov(eax, Immediate(prop->key()->AsLiteral()->handle()));
3748 } else { 3913 } else {
3749 VisitForStackValue(prop->obj()); 3914 VisitForStackValue(prop->obj());
3750 VisitForAccumulatorValue(prop->key()); 3915 VisitForAccumulatorValue(prop->key());
3751 } 3916 }
3752 __ mov(edx, Operand(esp, 0)); 3917 __ mov(edx, Operand(esp, 0));
3753 __ push(eax); 3918 __ push(eax);
3754 EmitKeyedPropertyLoad(prop); 3919 EmitKeyedPropertyLoad(prop);
3755 } 3920 }
3756 } 3921 }
3757 3922
3758 // We need a second deoptimization point after loading the value 3923 // We need a second deoptimization point after loading the value
3759 // in case evaluating the property load my have a side effect. 3924 // in case evaluating the property load my have a side effect.
3760 PrepareForBailout(expr->increment(), TOS_REG); 3925 PrepareForBailout(expr->increment(), TOS_REG);
3761 3926
3762 // Call ToNumber only if operand is not a smi. 3927 // Call ToNumber only if operand is not a smi.
3763 NearLabel no_conversion; 3928 NearLabel no_conversion;
3764 if (ShouldInlineSmiCase(expr->op())) { 3929 if (ShouldInlineSmiCase(expr->op())) {
3765 __ test(eax, Immediate(kSmiTagMask)); 3930 __ test(eax, Immediate(kSmiTagMask));
3766 __ j(zero, &no_conversion); 3931 __ j(zero, &no_conversion);
3767 } 3932 }
3768 __ push(eax); 3933 ToNumberStub convert_stub;
3769 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION); 3934 __ CallStub(&convert_stub);
3770 __ bind(&no_conversion); 3935 __ bind(&no_conversion);
3771 3936
3772 // Save result for postfix expressions. 3937 // Save result for postfix expressions.
3773 if (expr->is_postfix()) { 3938 if (expr->is_postfix()) {
3774 if (!context()->IsEffect()) { 3939 if (!context()->IsEffect()) {
3775 // Save the result on the stack. If we have a named or keyed property 3940 // Save the result on the stack. If we have a named or keyed property
3776 // we store the result under the receiver that is currently on top 3941 // we store the result under the receiver that is currently on top
3777 // of the stack. 3942 // of the stack.
3778 switch (assign_type) { 3943 switch (assign_type) {
3779 case VARIABLE: 3944 case VARIABLE:
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
3812 __ add(Operand(eax), Immediate(Smi::FromInt(1))); 3977 __ add(Operand(eax), Immediate(Smi::FromInt(1)));
3813 } 3978 }
3814 } 3979 }
3815 3980
3816 // Record position before stub call. 3981 // Record position before stub call.
3817 SetSourcePosition(expr->position()); 3982 SetSourcePosition(expr->position());
3818 3983
3819 // Call stub for +1/-1. 3984 // Call stub for +1/-1.
3820 __ mov(edx, eax); 3985 __ mov(edx, eax);
3821 __ mov(eax, Immediate(Smi::FromInt(1))); 3986 __ mov(eax, Immediate(Smi::FromInt(1)));
3822 TypeRecordingBinaryOpStub stub(expr->binary_op(), 3987 TypeRecordingBinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
3823 NO_OVERWRITE);
3824 EmitCallIC(stub.GetCode(), &patch_site); 3988 EmitCallIC(stub.GetCode(), &patch_site);
3825 __ bind(&done); 3989 __ bind(&done);
3826 3990
3827 // Store the value returned in eax. 3991 // Store the value returned in eax.
3828 switch (assign_type) { 3992 switch (assign_type) {
3829 case VARIABLE: 3993 case VARIABLE:
3830 if (expr->is_postfix()) { 3994 if (expr->is_postfix()) {
3831 // Perform the assignment as if via '='. 3995 // Perform the assignment as if via '='.
3832 { EffectContext context(this); 3996 { EffectContext context(this);
3833 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 3997 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
(...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after
4039 case Token::IN: 4203 case Token::IN:
4040 VisitForStackValue(expr->right()); 4204 VisitForStackValue(expr->right());
4041 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); 4205 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4042 PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); 4206 PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
4043 __ cmp(eax, Factory::true_value()); 4207 __ cmp(eax, Factory::true_value());
4044 Split(equal, if_true, if_false, fall_through); 4208 Split(equal, if_true, if_false, fall_through);
4045 break; 4209 break;
4046 4210
4047 case Token::INSTANCEOF: { 4211 case Token::INSTANCEOF: {
4048 VisitForStackValue(expr->right()); 4212 VisitForStackValue(expr->right());
4049 __ IncrementCounter(&Counters::instance_of_full, 1);
4050 InstanceofStub stub(InstanceofStub::kNoFlags); 4213 InstanceofStub stub(InstanceofStub::kNoFlags);
4051 __ CallStub(&stub); 4214 __ CallStub(&stub);
4052 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); 4215 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4053 __ test(eax, Operand(eax)); 4216 __ test(eax, Operand(eax));
4054 // The stub returns 0 for true. 4217 // The stub returns 0 for true.
4055 Split(zero, if_true, if_false, fall_through); 4218 Split(zero, if_true, if_false, fall_through);
4056 break; 4219 break;
4057 } 4220 }
4058 4221
4059 default: { 4222 default: {
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after
4264 // And return. 4427 // And return.
4265 __ ret(0); 4428 __ ret(0);
4266 } 4429 }
4267 4430
4268 4431
4269 #undef __ 4432 #undef __
4270 4433
4271 } } // namespace v8::internal 4434 } } // namespace v8::internal
4272 4435
4273 #endif // V8_TARGET_ARCH_IA32 4436 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/disasm-ia32.cc ('k') | src/ia32/ic-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698