Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(488)

Side by Side Diff: src/a64/full-codegen-a64.cc

Issue 196133017: Experimental parser: merge r19949 (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/a64/frames-a64.cc ('k') | src/a64/ic-a64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
79 InstructionAccurateScope scope(masm_, 1); 79 InstructionAccurateScope scope(masm_, 1);
80 ASSERT(!info_emitted_); 80 ASSERT(!info_emitted_);
81 ASSERT(reg.Is64Bits()); 81 ASSERT(reg.Is64Bits());
82 ASSERT(!reg.Is(csp)); 82 ASSERT(!reg.Is(csp));
83 reg_ = reg; 83 reg_ = reg;
84 __ bind(&patch_site_); 84 __ bind(&patch_site_);
85 __ tbnz(xzr, 0, target); // Never taken before patched. 85 __ tbnz(xzr, 0, target); // Never taken before patched.
86 } 86 }
87 87
88 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) { 88 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
89 // We need to use ip0, so don't allow access to the MacroAssembler. 89 UseScratchRegisterScope temps(masm_);
90 InstructionAccurateScope scope(masm_); 90 Register temp = temps.AcquireX();
91 __ orr(ip0, reg1, reg2); 91 __ Orr(temp, reg1, reg2);
92 EmitJumpIfNotSmi(ip0, target); 92 EmitJumpIfNotSmi(temp, target);
93 } 93 }
94 94
95 void EmitPatchInfo() { 95 void EmitPatchInfo() {
96 Assembler::BlockConstPoolScope scope(masm_); 96 Assembler::BlockPoolsScope scope(masm_);
97 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_); 97 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
98 #ifdef DEBUG 98 #ifdef DEBUG
99 info_emitted_ = true; 99 info_emitted_ = true;
100 #endif 100 #endif
101 } 101 }
102 102
103 private: 103 private:
104 MacroAssembler* masm_; 104 MacroAssembler* masm_;
105 Label patch_site_; 105 Label patch_site_;
106 Register reg_; 106 Register reg_;
(...skipping 15 matching lines...) Expand all
122 // - jssp: stack pointer. 122 // - jssp: stack pointer.
123 // - lr: return address. 123 // - lr: return address.
124 // 124 //
125 // The function builds a JS frame. See JavaScriptFrameConstants in 125 // The function builds a JS frame. See JavaScriptFrameConstants in
126 // frames-arm.h for its layout. 126 // frames-arm.h for its layout.
127 void FullCodeGenerator::Generate() { 127 void FullCodeGenerator::Generate() {
128 CompilationInfo* info = info_; 128 CompilationInfo* info = info_;
129 handler_table_ = 129 handler_table_ =
130 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); 130 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
131 131
132 InitializeFeedbackVector();
133
134 profiling_counter_ = isolate()->factory()->NewCell( 132 profiling_counter_ = isolate()->factory()->NewCell(
135 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); 133 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
136 SetFunctionPosition(function()); 134 SetFunctionPosition(function());
137 Comment cmnt(masm_, "[ Function compiled by full code generator"); 135 Comment cmnt(masm_, "[ Function compiled by full code generator");
138 136
139 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 137 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
140 138
141 #ifdef DEBUG 139 #ifdef DEBUG
142 if (strlen(FLAG_stop_at) > 0 && 140 if (strlen(FLAG_stop_at) > 0 &&
143 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 141 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
144 __ Debug("stop-at", __LINE__, BREAK); 142 __ Debug("stop-at", __LINE__, BREAK);
145 } 143 }
146 #endif 144 #endif
147 145
148 // Classic mode functions and builtins need to replace the receiver with the 146 // Sloppy mode functions and builtins need to replace the receiver with the
149 // global proxy when called as functions (without an explicit receiver 147 // global proxy when called as functions (without an explicit receiver
150 // object). 148 // object).
151 if (info->is_classic_mode() && !info->is_native()) { 149 if (info->strict_mode() == SLOPPY && !info->is_native()) {
152 Label ok; 150 Label ok;
153 int receiver_offset = info->scope()->num_parameters() * kXRegSizeInBytes; 151 int receiver_offset = info->scope()->num_parameters() * kXRegSize;
154 __ Peek(x10, receiver_offset); 152 __ Peek(x10, receiver_offset);
155 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok); 153 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
156 154
157 __ Ldr(x10, GlobalObjectMemOperand()); 155 __ Ldr(x10, GlobalObjectMemOperand());
158 __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalReceiverOffset)); 156 __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalReceiverOffset));
159 __ Poke(x10, receiver_offset); 157 __ Poke(x10, receiver_offset);
160 158
161 __ Bind(&ok); 159 __ Bind(&ok);
162 } 160 }
163 161
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
244 int offset = num_parameters * kPointerSize; 242 int offset = num_parameters * kPointerSize;
245 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset); 243 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset);
246 __ Mov(x1, Operand(Smi::FromInt(num_parameters))); 244 __ Mov(x1, Operand(Smi::FromInt(num_parameters)));
247 __ Push(x3, x2, x1); 245 __ Push(x3, x2, x1);
248 246
249 // Arguments to ArgumentsAccessStub: 247 // Arguments to ArgumentsAccessStub:
250 // function, receiver address, parameter count. 248 // function, receiver address, parameter count.
251 // The stub will rewrite receiver and parameter count if the previous 249 // The stub will rewrite receiver and parameter count if the previous
252 // stack frame was an arguments adapter frame. 250 // stack frame was an arguments adapter frame.
253 ArgumentsAccessStub::Type type; 251 ArgumentsAccessStub::Type type;
254 if (!is_classic_mode()) { 252 if (strict_mode() == STRICT) {
255 type = ArgumentsAccessStub::NEW_STRICT; 253 type = ArgumentsAccessStub::NEW_STRICT;
256 } else if (function()->has_duplicate_parameters()) { 254 } else if (function()->has_duplicate_parameters()) {
257 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW; 255 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
258 } else { 256 } else {
259 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST; 257 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
260 } 258 }
261 ArgumentsAccessStub stub(type); 259 ArgumentsAccessStub stub(type);
262 __ CallStub(&stub); 260 __ CallStub(&stub);
263 261
264 SetVar(arguments, x0, x1, x2); 262 SetVar(arguments, x0, x1, x2);
265 } 263 }
266 264
267 if (FLAG_trace) { 265 if (FLAG_trace) {
268 __ CallRuntime(Runtime::kTraceEnter, 0); 266 __ CallRuntime(Runtime::kTraceEnter, 0);
269 } 267 }
270 268
271 269
272 // Visit the declarations and body unless there is an illegal 270 // Visit the declarations and body unless there is an illegal
273 // redeclaration. 271 // redeclaration.
274 if (scope()->HasIllegalRedeclaration()) { 272 if (scope()->HasIllegalRedeclaration()) {
275 Comment cmnt(masm_, "[ Declarations"); 273 Comment cmnt(masm_, "[ Declarations");
276 scope()->VisitIllegalRedeclaration(this); 274 scope()->VisitIllegalRedeclaration(this);
277 275
278 } else { 276 } else {
279 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); 277 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
280 { Comment cmnt(masm_, "[ Declarations"); 278 { Comment cmnt(masm_, "[ Declarations");
281 if (scope()->is_function_scope() && scope()->function() != NULL) { 279 if (scope()->is_function_scope() && scope()->function() != NULL) {
282 VariableDeclaration* function = scope()->function(); 280 VariableDeclaration* function = scope()->function();
283 ASSERT(function->proxy()->var()->mode() == CONST || 281 ASSERT(function->proxy()->var()->mode() == CONST ||
284 function->proxy()->var()->mode() == CONST_HARMONY); 282 function->proxy()->var()->mode() == CONST_LEGACY);
285 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); 283 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
286 VisitVariableDeclaration(function); 284 VisitVariableDeclaration(function);
287 } 285 }
288 VisitDeclarations(scope()->declarations()); 286 VisitDeclarations(scope()->declarations());
289 } 287 }
290 } 288 }
291 289
292 { Comment cmnt(masm_, "[ Stack check"); 290 { Comment cmnt(masm_, "[ Stack check");
293 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); 291 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
294 Label ok; 292 Label ok;
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
343 __ Mov(x3, Operand(Smi::FromInt(reset_value))); 341 __ Mov(x3, Operand(Smi::FromInt(reset_value)));
344 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset)); 342 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
345 } 343 }
346 344
347 345
348 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, 346 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
349 Label* back_edge_target) { 347 Label* back_edge_target) {
350 ASSERT(jssp.Is(__ StackPointer())); 348 ASSERT(jssp.Is(__ StackPointer()));
351 Comment cmnt(masm_, "[ Back edge bookkeeping"); 349 Comment cmnt(masm_, "[ Back edge bookkeeping");
352 // Block literal pools whilst emitting back edge code. 350 // Block literal pools whilst emitting back edge code.
353 Assembler::BlockConstPoolScope block_const_pool(masm_); 351 Assembler::BlockPoolsScope block_const_pool(masm_);
354 Label ok; 352 Label ok;
355 353
356 ASSERT(back_edge_target->is_bound()); 354 ASSERT(back_edge_target->is_bound());
357 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); 355 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
358 int weight = Min(kMaxBackEdgeWeight, 356 int weight = Min(kMaxBackEdgeWeight,
359 Max(1, distance / kCodeSizeMultiplier)); 357 Max(1, distance / kCodeSizeMultiplier));
360 EmitProfilingCounterDecrement(weight); 358 EmitProfilingCounterDecrement(weight);
361 __ B(pl, &ok); 359 __ B(pl, &ok);
362 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); 360 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
363 361
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
420 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); 418 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
421 __ RecordJSReturn(); 419 __ RecordJSReturn();
422 // This code is generated using Assembler methods rather than Macro 420 // This code is generated using Assembler methods rather than Macro
423 // Assembler methods because it will be patched later on, and so the size 421 // Assembler methods because it will be patched later on, and so the size
424 // of the generated code must be consistent. 422 // of the generated code must be consistent.
425 const Register& current_sp = __ StackPointer(); 423 const Register& current_sp = __ StackPointer();
426 // Nothing ensures 16 bytes alignment here. 424 // Nothing ensures 16 bytes alignment here.
427 ASSERT(!current_sp.Is(csp)); 425 ASSERT(!current_sp.Is(csp));
428 __ mov(current_sp, fp); 426 __ mov(current_sp, fp);
429 int no_frame_start = masm_->pc_offset(); 427 int no_frame_start = masm_->pc_offset();
430 __ ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSizeInBytes, PostIndex)); 428 __ ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
431 // Drop the arguments and receiver and return. 429 // Drop the arguments and receiver and return.
432 // TODO(all): This implementation is overkill as it supports 2**31+1 430 // TODO(all): This implementation is overkill as it supports 2**31+1
433 // arguments, consider how to improve it without creating a security 431 // arguments, consider how to improve it without creating a security
434 // hole. 432 // hole.
435 __ LoadLiteral(ip0, 3 * kInstructionSize); 433 __ LoadLiteral(ip0, 3 * kInstructionSize);
436 __ add(current_sp, current_sp, ip0); 434 __ add(current_sp, current_sp, ip0);
437 __ ret(); 435 __ ret();
438 __ dc64(kXRegSizeInBytes * (info_->scope()->num_parameters() + 1)); 436 __ dc64(kXRegSize * (info_->scope()->num_parameters() + 1));
439 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); 437 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
440 } 438 }
441 } 439 }
442 } 440 }
443 441
444 442
445 void FullCodeGenerator::EffectContext::Plug(Variable* var) const { 443 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
446 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 444 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
447 } 445 }
448 446
(...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after
687 __ B(InvertCondition(cond), if_false); 685 __ B(InvertCondition(cond), if_false);
688 } else { 686 } else {
689 __ B(cond, if_true); 687 __ B(cond, if_true);
690 __ B(if_false); 688 __ B(if_false);
691 } 689 }
692 } 690 }
693 691
694 692
695 MemOperand FullCodeGenerator::StackOperand(Variable* var) { 693 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
696 // Offset is negative because higher indexes are at lower addresses. 694 // Offset is negative because higher indexes are at lower addresses.
697 int offset = -var->index() * kXRegSizeInBytes; 695 int offset = -var->index() * kXRegSize;
698 // Adjust by a (parameter or local) base offset. 696 // Adjust by a (parameter or local) base offset.
699 if (var->IsParameter()) { 697 if (var->IsParameter()) {
700 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; 698 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
701 } else { 699 } else {
702 offset += JavaScriptFrameConstants::kLocal0Offset; 700 offset += JavaScriptFrameConstants::kLocal0Offset;
703 } 701 }
704 return MemOperand(fp, offset); 702 return MemOperand(fp, offset);
705 } 703 }
706 704
707 705
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
785 783
786 784
787 void FullCodeGenerator::VisitVariableDeclaration( 785 void FullCodeGenerator::VisitVariableDeclaration(
788 VariableDeclaration* declaration) { 786 VariableDeclaration* declaration) {
789 // If it was not possible to allocate the variable at compile time, we 787 // If it was not possible to allocate the variable at compile time, we
790 // need to "declare" it at runtime to make sure it actually exists in the 788 // need to "declare" it at runtime to make sure it actually exists in the
791 // local context. 789 // local context.
792 VariableProxy* proxy = declaration->proxy(); 790 VariableProxy* proxy = declaration->proxy();
793 VariableMode mode = declaration->mode(); 791 VariableMode mode = declaration->mode();
794 Variable* variable = proxy->var(); 792 Variable* variable = proxy->var();
795 bool hole_init = (mode == CONST) || (mode == CONST_HARMONY) || (mode == LET); 793 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
796 794
797 switch (variable->location()) { 795 switch (variable->location()) {
798 case Variable::UNALLOCATED: 796 case Variable::UNALLOCATED:
799 globals_->Add(variable->name(), zone()); 797 globals_->Add(variable->name(), zone());
800 globals_->Add(variable->binding_needs_init() 798 globals_->Add(variable->binding_needs_init()
801 ? isolate()->factory()->the_hole_value() 799 ? isolate()->factory()->the_hole_value()
802 : isolate()->factory()->undefined_value(), 800 : isolate()->factory()->undefined_value(),
803 zone()); 801 zone());
804 break; 802 break;
805 803
(...skipping 349 matching lines...) Expand 10 before | Expand all | Expand 10 after
1155 __ Push(x2, x1, x0); 1153 __ Push(x2, x1, x0);
1156 __ B(&loop); 1154 __ B(&loop);
1157 1155
1158 __ Bind(&no_descriptors); 1156 __ Bind(&no_descriptors);
1159 __ Drop(1); 1157 __ Drop(1);
1160 __ B(&exit); 1158 __ B(&exit);
1161 1159
1162 // We got a fixed array in register x0. Iterate through that. 1160 // We got a fixed array in register x0. Iterate through that.
1163 __ Bind(&fixed_array); 1161 __ Bind(&fixed_array);
1164 1162
1165 Handle<Object> feedback = Handle<Object>(
1166 Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
1167 isolate());
1168 StoreFeedbackVectorSlot(slot, feedback);
1169 __ LoadObject(x1, FeedbackVector()); 1163 __ LoadObject(x1, FeedbackVector());
1170 __ Mov(x10, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker))); 1164 __ Mov(x10, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
1171 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot))); 1165 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot)));
1172 1166
1173 __ Mov(x1, Operand(Smi::FromInt(1))); // Smi indicates slow check. 1167 __ Mov(x1, Operand(Smi::FromInt(1))); // Smi indicates slow check.
1174 __ Peek(x10, 0); // Get enumerated object. 1168 __ Peek(x10, 0); // Get enumerated object.
1175 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1169 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1176 // TODO(all): similar check was done already. Can we avoid it here? 1170 // TODO(all): similar check was done already. Can we avoid it here?
1177 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE); 1171 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE);
1178 ASSERT(Smi::FromInt(0) == 0); 1172 ASSERT(Smi::FromInt(0) == 0);
1179 __ CzeroX(x1, le); // Zero indicates proxy. 1173 __ CzeroX(x1, le); // Zero indicates proxy.
1180 __ Push(x1, x0); // Smi and array 1174 __ Push(x1, x0); // Smi and array
1181 __ Ldr(x1, FieldMemOperand(x0, FixedArray::kLengthOffset)); 1175 __ Ldr(x1, FieldMemOperand(x0, FixedArray::kLengthOffset));
1182 __ Push(x1, xzr); // Fixed array length (as smi) and initial index. 1176 __ Push(x1, xzr); // Fixed array length (as smi) and initial index.
1183 1177
1184 // Generate code for doing the condition check. 1178 // Generate code for doing the condition check.
1185 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); 1179 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1186 __ Bind(&loop); 1180 __ Bind(&loop);
1187 // Load the current count to x0, load the length to x1. 1181 // Load the current count to x0, load the length to x1.
1188 __ PeekPair(x0, x1, 0); 1182 __ PeekPair(x0, x1, 0);
1189 __ Cmp(x0, x1); // Compare to the array length. 1183 __ Cmp(x0, x1); // Compare to the array length.
1190 __ B(hs, loop_statement.break_label()); 1184 __ B(hs, loop_statement.break_label());
1191 1185
1192 // Get the current entry of the array into register r3. 1186 // Get the current entry of the array into register r3.
1193 __ Peek(x10, 2 * kXRegSizeInBytes); 1187 __ Peek(x10, 2 * kXRegSize);
1194 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2)); 1188 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1195 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag)); 1189 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1196 1190
1197 // Get the expected map from the stack or a smi in the 1191 // Get the expected map from the stack or a smi in the
1198 // permanent slow case into register x10. 1192 // permanent slow case into register x10.
1199 __ Peek(x2, 3 * kXRegSizeInBytes); 1193 __ Peek(x2, 3 * kXRegSize);
1200 1194
1201 // Check if the expected map still matches that of the enumerable. 1195 // Check if the expected map still matches that of the enumerable.
1202 // If not, we may have to filter the key. 1196 // If not, we may have to filter the key.
1203 Label update_each; 1197 Label update_each;
1204 __ Peek(x1, 4 * kXRegSizeInBytes); 1198 __ Peek(x1, 4 * kXRegSize);
1205 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset)); 1199 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1206 __ Cmp(x11, x2); 1200 __ Cmp(x11, x2);
1207 __ B(eq, &update_each); 1201 __ B(eq, &update_each);
1208 1202
1209 // For proxies, no filtering is done. 1203 // For proxies, no filtering is done.
1210 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. 1204 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1211 STATIC_ASSERT(kSmiTag == 0); 1205 STATIC_ASSERT(kSmiTag == 0);
1212 __ Cbz(x2, &update_each); 1206 __ Cbz(x2, &update_each);
1213 1207
1214 // Convert the entry to a string or (smi) 0 if it isn't a property 1208 // Convert the entry to a string or (smi) 0 if it isn't a property
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
1319 // nested functions that don't need literals cloning. If we're running with 1313 // nested functions that don't need literals cloning. If we're running with
1320 // the --always-opt or the --prepare-always-opt flag, we need to use the 1314 // the --always-opt or the --prepare-always-opt flag, we need to use the
1321 // runtime function so that the new function we are creating here gets a 1315 // runtime function so that the new function we are creating here gets a
1322 // chance to have its code optimized and doesn't just get a copy of the 1316 // chance to have its code optimized and doesn't just get a copy of the
1323 // existing unoptimized code. 1317 // existing unoptimized code.
1324 if (!FLAG_always_opt && 1318 if (!FLAG_always_opt &&
1325 !FLAG_prepare_always_opt && 1319 !FLAG_prepare_always_opt &&
1326 !pretenure && 1320 !pretenure &&
1327 scope()->is_function_scope() && 1321 scope()->is_function_scope() &&
1328 info->num_literals() == 0) { 1322 info->num_literals() == 0) {
1329 FastNewClosureStub stub(info->language_mode(), info->is_generator()); 1323 FastNewClosureStub stub(info->strict_mode(), info->is_generator());
1330 __ Mov(x2, Operand(info)); 1324 __ Mov(x2, Operand(info));
1331 __ CallStub(&stub); 1325 __ CallStub(&stub);
1332 } else { 1326 } else {
1333 __ Mov(x11, Operand(info)); 1327 __ Mov(x11, Operand(info));
1334 __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex 1328 __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex
1335 : Heap::kFalseValueRootIndex); 1329 : Heap::kFalseValueRootIndex);
1336 __ Push(cp, x11, x10); 1330 __ Push(cp, x11, x10);
1337 __ CallRuntime(Runtime::kNewClosure, 3); 1331 __ CallRuntime(Runtime::kNewClosure, 3);
1338 } 1332 }
1339 context()->Plug(x0); 1333 context()->Plug(x0);
1340 } 1334 }
1341 1335
1342 1336
1343 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 1337 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1344 Comment cmnt(masm_, "[ VariableProxy"); 1338 Comment cmnt(masm_, "[ VariableProxy");
1345 EmitVariableLoad(expr); 1339 EmitVariableLoad(expr);
1346 } 1340 }
1347 1341
1348 1342
1349 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, 1343 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1350 TypeofState typeof_state, 1344 TypeofState typeof_state,
1351 Label* slow) { 1345 Label* slow) {
1352 Register current = cp; 1346 Register current = cp;
1353 Register next = x10; 1347 Register next = x10;
1354 Register temp = x11; 1348 Register temp = x11;
1355 1349
1356 Scope* s = scope(); 1350 Scope* s = scope();
1357 while (s != NULL) { 1351 while (s != NULL) {
1358 if (s->num_heap_slots() > 0) { 1352 if (s->num_heap_slots() > 0) {
1359 if (s->calls_non_strict_eval()) { 1353 if (s->calls_sloppy_eval()) {
1360 // Check that extension is NULL. 1354 // Check that extension is NULL.
1361 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX)); 1355 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1362 __ Cbnz(temp, slow); 1356 __ Cbnz(temp, slow);
1363 } 1357 }
1364 // Load next context in chain. 1358 // Load next context in chain.
1365 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX)); 1359 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1366 // Walk the rest of the chain without clobbering cp. 1360 // Walk the rest of the chain without clobbering cp.
1367 current = next; 1361 current = next;
1368 } 1362 }
1369 // If no outer scope calls eval, we do not need to check more 1363 // If no outer scope calls eval, we do not need to check more
1370 // context extensions. 1364 // context extensions.
1371 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break; 1365 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1372 s = s->outer_scope(); 1366 s = s->outer_scope();
1373 } 1367 }
1374 1368
1375 if (s->is_eval_scope()) { 1369 if (s->is_eval_scope()) {
1376 Label loop, fast; 1370 Label loop, fast;
1377 __ Mov(next, current); 1371 __ Mov(next, current);
1378 1372
1379 __ Bind(&loop); 1373 __ Bind(&loop);
1380 // Terminate at native context. 1374 // Terminate at native context.
1381 __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset)); 1375 __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
(...skipping 17 matching lines...) Expand all
1399 1393
1400 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1394 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1401 Label* slow) { 1395 Label* slow) {
1402 ASSERT(var->IsContextSlot()); 1396 ASSERT(var->IsContextSlot());
1403 Register context = cp; 1397 Register context = cp;
1404 Register next = x10; 1398 Register next = x10;
1405 Register temp = x11; 1399 Register temp = x11;
1406 1400
1407 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1401 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1408 if (s->num_heap_slots() > 0) { 1402 if (s->num_heap_slots() > 0) {
1409 if (s->calls_non_strict_eval()) { 1403 if (s->calls_sloppy_eval()) {
1410 // Check that extension is NULL. 1404 // Check that extension is NULL.
1411 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); 1405 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1412 __ Cbnz(temp, slow); 1406 __ Cbnz(temp, slow);
1413 } 1407 }
1414 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX)); 1408 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1415 // Walk the rest of the chain without clobbering cp. 1409 // Walk the rest of the chain without clobbering cp.
1416 context = next; 1410 context = next;
1417 } 1411 }
1418 } 1412 }
1419 // Check that last extension is NULL. 1413 // Check that last extension is NULL.
(...skipping 15 matching lines...) Expand all
1435 // eval-introduced variables. Eval is used a lot without 1429 // eval-introduced variables. Eval is used a lot without
1436 // introducing variables. In those cases, we do not want to 1430 // introducing variables. In those cases, we do not want to
1437 // perform a runtime call for all variables in the scope 1431 // perform a runtime call for all variables in the scope
1438 // containing the eval. 1432 // containing the eval.
1439 if (var->mode() == DYNAMIC_GLOBAL) { 1433 if (var->mode() == DYNAMIC_GLOBAL) {
1440 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); 1434 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1441 __ B(done); 1435 __ B(done);
1442 } else if (var->mode() == DYNAMIC_LOCAL) { 1436 } else if (var->mode() == DYNAMIC_LOCAL) {
1443 Variable* local = var->local_if_not_shadowed(); 1437 Variable* local = var->local_if_not_shadowed();
1444 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow)); 1438 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1445 if (local->mode() == LET || 1439 if (local->mode() == LET || local->mode() == CONST ||
1446 local->mode() == CONST || 1440 local->mode() == CONST_LEGACY) {
1447 local->mode() == CONST_HARMONY) {
1448 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done); 1441 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1449 if (local->mode() == CONST) { 1442 if (local->mode() == CONST_LEGACY) {
1450 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex); 1443 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1451 } else { // LET || CONST_HARMONY 1444 } else { // LET || CONST
1452 __ Mov(x0, Operand(var->name())); 1445 __ Mov(x0, Operand(var->name()));
1453 __ Push(x0); 1446 __ Push(x0);
1454 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1447 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1455 } 1448 }
1456 } 1449 }
1457 __ B(done); 1450 __ B(done);
1458 } 1451 }
1459 } 1452 }
1460 1453
1461 1454
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
1508 // binding is initialized: 1501 // binding is initialized:
1509 // function() { f(); let x = 1; function f() { x = 2; } } 1502 // function() { f(); let x = 1; function f() { x = 2; } }
1510 // 1503 //
1511 bool skip_init_check; 1504 bool skip_init_check;
1512 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) { 1505 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1513 skip_init_check = false; 1506 skip_init_check = false;
1514 } else { 1507 } else {
1515 // Check that we always have valid source position. 1508 // Check that we always have valid source position.
1516 ASSERT(var->initializer_position() != RelocInfo::kNoPosition); 1509 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1517 ASSERT(proxy->position() != RelocInfo::kNoPosition); 1510 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1518 skip_init_check = var->mode() != CONST && 1511 skip_init_check = var->mode() != CONST_LEGACY &&
1519 var->initializer_position() < proxy->position(); 1512 var->initializer_position() < proxy->position();
1520 } 1513 }
1521 1514
1522 if (!skip_init_check) { 1515 if (!skip_init_check) {
1523 // Let and const need a read barrier. 1516 // Let and const need a read barrier.
1524 GetVar(x0, var); 1517 GetVar(x0, var);
1525 Label done; 1518 Label done;
1526 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done); 1519 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1527 if (var->mode() == LET || var->mode() == CONST_HARMONY) { 1520 if (var->mode() == LET || var->mode() == CONST) {
1528 // Throw a reference error when using an uninitialized let/const 1521 // Throw a reference error when using an uninitialized let/const
1529 // binding in harmony mode. 1522 // binding in harmony mode.
1530 __ Mov(x0, Operand(var->name())); 1523 __ Mov(x0, Operand(var->name()));
1531 __ Push(x0); 1524 __ Push(x0);
1532 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1525 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1533 __ Bind(&done); 1526 __ Bind(&done);
1534 } else { 1527 } else {
1535 // Uninitalized const bindings outside of harmony mode are unholed. 1528 // Uninitalized const bindings outside of harmony mode are unholed.
1536 ASSERT(var->mode() == CONST); 1529 ASSERT(var->mode() == CONST_LEGACY);
1537 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex); 1530 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1538 __ Bind(&done); 1531 __ Bind(&done);
1539 } 1532 }
1540 context()->Plug(x0); 1533 context()->Plug(x0);
1541 break; 1534 break;
1542 } 1535 }
1543 } 1536 }
1544 context()->Plug(var); 1537 context()->Plug(var);
1545 break; 1538 break;
1546 } 1539 }
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
1633 int flags = expr->fast_elements() 1626 int flags = expr->fast_elements()
1634 ? ObjectLiteral::kFastElements 1627 ? ObjectLiteral::kFastElements
1635 : ObjectLiteral::kNoFlags; 1628 : ObjectLiteral::kNoFlags;
1636 flags |= expr->has_function() 1629 flags |= expr->has_function()
1637 ? ObjectLiteral::kHasFunction 1630 ? ObjectLiteral::kHasFunction
1638 : ObjectLiteral::kNoFlags; 1631 : ObjectLiteral::kNoFlags;
1639 __ Mov(x0, Operand(Smi::FromInt(flags))); 1632 __ Mov(x0, Operand(Smi::FromInt(flags)));
1640 int properties_count = constant_properties->length() / 2; 1633 int properties_count = constant_properties->length() / 2;
1641 const int max_cloned_properties = 1634 const int max_cloned_properties =
1642 FastCloneShallowObjectStub::kMaximumClonedProperties; 1635 FastCloneShallowObjectStub::kMaximumClonedProperties;
1643 if ((FLAG_track_double_fields && expr->may_store_doubles()) || 1636 if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() ||
1644 (expr->depth() > 1) || Serializer::enabled() || 1637 flags != ObjectLiteral::kFastElements ||
1645 (flags != ObjectLiteral::kFastElements) || 1638 properties_count > max_cloned_properties) {
1646 (properties_count > max_cloned_properties)) {
1647 __ Push(x3, x2, x1, x0); 1639 __ Push(x3, x2, x1, x0);
1648 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); 1640 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1649 } else { 1641 } else {
1650 FastCloneShallowObjectStub stub(properties_count); 1642 FastCloneShallowObjectStub stub(properties_count);
1651 __ CallStub(&stub); 1643 __ CallStub(&stub);
1652 } 1644 }
1653 1645
1654 // If result_saved is true the result is on top of the stack. If 1646 // If result_saved is true the result is on top of the stack. If
1655 // result_saved is false the result is in x0. 1647 // result_saved is false the result is in x0.
1656 bool result_saved = false; 1648 bool result_saved = false;
(...skipping 26 matching lines...) Expand all
1683 VisitForAccumulatorValue(value); 1675 VisitForAccumulatorValue(value);
1684 __ Mov(x2, Operand(key->value())); 1676 __ Mov(x2, Operand(key->value()));
1685 __ Peek(x1, 0); 1677 __ Peek(x1, 0);
1686 CallStoreIC(key->LiteralFeedbackId()); 1678 CallStoreIC(key->LiteralFeedbackId());
1687 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1679 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1688 } else { 1680 } else {
1689 VisitForEffect(value); 1681 VisitForEffect(value);
1690 } 1682 }
1691 break; 1683 break;
1692 } 1684 }
1693 // Duplicate receiver on stack.
1694 __ Peek(x0, 0);
1695 __ Push(x0);
1696 VisitForStackValue(key);
1697 VisitForStackValue(value);
1698 if (property->emit_store()) { 1685 if (property->emit_store()) {
1686 // Duplicate receiver on stack.
1687 __ Peek(x0, 0);
1688 __ Push(x0);
1689 VisitForStackValue(key);
1690 VisitForStackValue(value);
1699 __ Mov(x0, Operand(Smi::FromInt(NONE))); // PropertyAttributes 1691 __ Mov(x0, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1700 __ Push(x0); 1692 __ Push(x0);
1701 __ CallRuntime(Runtime::kSetProperty, 4); 1693 __ CallRuntime(Runtime::kSetProperty, 4);
1702 } else { 1694 } else {
1703 __ Drop(3); 1695 VisitForEffect(key);
1696 VisitForEffect(value);
1704 } 1697 }
1705 break; 1698 break;
1706 case ObjectLiteral::Property::PROTOTYPE: 1699 case ObjectLiteral::Property::PROTOTYPE:
1707 // Duplicate receiver on stack.
1708 __ Peek(x0, 0);
1709 // TODO(jbramley): This push shouldn't be necessary if we don't call the
1710 // runtime below. In that case, skip it.
1711 __ Push(x0);
1712 VisitForStackValue(value);
1713 if (property->emit_store()) { 1700 if (property->emit_store()) {
1701 // Duplicate receiver on stack.
1702 __ Peek(x0, 0);
1703 __ Push(x0);
1704 VisitForStackValue(value);
1714 __ CallRuntime(Runtime::kSetPrototype, 2); 1705 __ CallRuntime(Runtime::kSetPrototype, 2);
1715 } else { 1706 } else {
1716 __ Drop(2); 1707 VisitForEffect(value);
1717 } 1708 }
1718 break; 1709 break;
1719 case ObjectLiteral::Property::GETTER: 1710 case ObjectLiteral::Property::GETTER:
1720 accessor_table.lookup(key)->second->getter = value; 1711 accessor_table.lookup(key)->second->getter = value;
1721 break; 1712 break;
1722 case ObjectLiteral::Property::SETTER: 1713 case ObjectLiteral::Property::SETTER:
1723 accessor_table.lookup(key)->second->setter = value; 1714 accessor_table.lookup(key)->second->setter = value;
1724 break; 1715 break;
1725 } 1716 }
1726 } 1717 }
(...skipping 273 matching lines...) Expand 10 before | Expand all | Expand 10 after
2000 __ Pop(left); 1991 __ Pop(left);
2001 1992
2002 // Perform combined smi check on both operands. 1993 // Perform combined smi check on both operands.
2003 __ Orr(x10, left, right); 1994 __ Orr(x10, left, right);
2004 JumpPatchSite patch_site(masm_); 1995 JumpPatchSite patch_site(masm_);
2005 patch_site.EmitJumpIfSmi(x10, &both_smis); 1996 patch_site.EmitJumpIfSmi(x10, &both_smis);
2006 1997
2007 __ Bind(&stub_call); 1998 __ Bind(&stub_call);
2008 BinaryOpICStub stub(op, mode); 1999 BinaryOpICStub stub(op, mode);
2009 { 2000 {
2010 Assembler::BlockConstPoolScope scope(masm_); 2001 Assembler::BlockPoolsScope scope(masm_);
2011 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); 2002 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2012 patch_site.EmitPatchInfo(); 2003 patch_site.EmitPatchInfo();
2013 } 2004 }
2014 __ B(&done); 2005 __ B(&done);
2015 2006
2016 __ Bind(&both_smis); 2007 __ Bind(&both_smis);
2017 // Smi case. This code works in the same way as the smi-smi case in the type 2008 // Smi case. This code works in the same way as the smi-smi case in the type
2018 // recording binary operation stub, see 2009 // recording binary operation stub, see
2019 // BinaryOpStub::GenerateSmiSmiOperation for comments. 2010 // BinaryOpStub::GenerateSmiSmiOperation for comments.
2020 // TODO(all): That doesn't exist any more. Where are the comments? 2011 // TODO(all): That doesn't exist any more. Where are the comments?
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2055 Label not_minus_zero, done; 2046 Label not_minus_zero, done;
2056 __ Smulh(x10, left, right); 2047 __ Smulh(x10, left, right);
2057 __ Cbnz(x10, &not_minus_zero); 2048 __ Cbnz(x10, &not_minus_zero);
2058 __ Eor(x11, left, right); 2049 __ Eor(x11, left, right);
2059 __ Tbnz(x11, kXSignBit, &stub_call); 2050 __ Tbnz(x11, kXSignBit, &stub_call);
2060 STATIC_ASSERT(kSmiTag == 0); 2051 STATIC_ASSERT(kSmiTag == 0);
2061 __ Mov(result, x10); 2052 __ Mov(result, x10);
2062 __ B(&done); 2053 __ B(&done);
2063 __ Bind(&not_minus_zero); 2054 __ Bind(&not_minus_zero);
2064 __ Cls(x11, x10); 2055 __ Cls(x11, x10);
2065 __ Cmp(x11, kXRegSize - kSmiShift); 2056 __ Cmp(x11, kXRegSizeInBits - kSmiShift);
2066 __ B(lt, &stub_call); 2057 __ B(lt, &stub_call);
2067 __ SmiTag(result, x10); 2058 __ SmiTag(result, x10);
2068 __ Bind(&done); 2059 __ Bind(&done);
2069 break; 2060 break;
2070 } 2061 }
2071 case Token::BIT_OR: 2062 case Token::BIT_OR:
2072 __ Orr(result, left, right); 2063 __ Orr(result, left, right);
2073 break; 2064 break;
2074 case Token::BIT_AND: 2065 case Token::BIT_AND:
2075 __ And(result, left, right); 2066 __ And(result, left, right);
(...skipping 10 matching lines...) Expand all
2086 } 2077 }
2087 2078
2088 2079
2089 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, 2080 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2090 Token::Value op, 2081 Token::Value op,
2091 OverwriteMode mode) { 2082 OverwriteMode mode) {
2092 __ Pop(x1); 2083 __ Pop(x1);
2093 BinaryOpICStub stub(op, mode); 2084 BinaryOpICStub stub(op, mode);
2094 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code. 2085 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
2095 { 2086 {
2096 Assembler::BlockConstPoolScope scope(masm_); 2087 Assembler::BlockPoolsScope scope(masm_);
2097 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); 2088 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2098 patch_site.EmitPatchInfo(); 2089 patch_site.EmitPatchInfo();
2099 } 2090 }
2100 context()->Plug(x0); 2091 context()->Plug(x0);
2101 } 2092 }
2102 2093
2103 2094
2104 void FullCodeGenerator::EmitAssignment(Expression* expr) { 2095 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2105 // Invalid left-hand sides are rewritten to have a 'throw 2096 // Invalid left-hand sides are rewritten to have a 'throw
2106 // ReferenceError' on the left-hand side. 2097 // ReferenceError' on the left-hand side.
(...skipping 30 matching lines...) Expand all
2137 __ Mov(x2, Operand(prop->key()->AsLiteral()->value())); 2128 __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
2138 CallStoreIC(); 2129 CallStoreIC();
2139 break; 2130 break;
2140 } 2131 }
2141 case KEYED_PROPERTY: { 2132 case KEYED_PROPERTY: {
2142 __ Push(x0); // Preserve value. 2133 __ Push(x0); // Preserve value.
2143 VisitForStackValue(prop->obj()); 2134 VisitForStackValue(prop->obj());
2144 VisitForAccumulatorValue(prop->key()); 2135 VisitForAccumulatorValue(prop->key());
2145 __ Mov(x1, x0); 2136 __ Mov(x1, x0);
2146 __ Pop(x2, x0); 2137 __ Pop(x2, x0);
2147 Handle<Code> ic = is_classic_mode() 2138 Handle<Code> ic = strict_mode() == SLOPPY
2148 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2139 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2149 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2140 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2150 CallIC(ic); 2141 CallIC(ic);
2151 break; 2142 break;
2152 } 2143 }
2153 } 2144 }
2154 context()->Plug(x0); 2145 context()->Plug(x0);
2155 } 2146 }
2156 2147
2157 2148
2158 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( 2149 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2159 Variable* var, MemOperand location) { 2150 Variable* var, MemOperand location) {
2160 __ Str(result_register(), location); 2151 __ Str(result_register(), location);
2161 if (var->IsContextSlot()) { 2152 if (var->IsContextSlot()) {
2162 // RecordWrite may destroy all its register arguments. 2153 // RecordWrite may destroy all its register arguments.
2163 __ Mov(x10, result_register()); 2154 __ Mov(x10, result_register());
2164 int offset = Context::SlotOffset(var->index()); 2155 int offset = Context::SlotOffset(var->index());
2165 __ RecordWriteContextSlot( 2156 __ RecordWriteContextSlot(
2166 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs); 2157 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2167 } 2158 }
2168 } 2159 }
2169 2160
2170 2161
2171 void FullCodeGenerator::EmitCallStoreContextSlot( 2162 void FullCodeGenerator::EmitCallStoreContextSlot(
2172 Handle<String> name, LanguageMode mode) { 2163 Handle<String> name, StrictMode strict_mode) {
2173 __ Mov(x11, Operand(name)); 2164 __ Mov(x11, Operand(name));
2174 __ Mov(x10, Operand(Smi::FromInt(mode))); 2165 __ Mov(x10, Operand(Smi::FromInt(strict_mode)));
2175 // jssp[0] : mode. 2166 // jssp[0] : mode.
2176 // jssp[8] : name. 2167 // jssp[8] : name.
2177 // jssp[16] : context. 2168 // jssp[16] : context.
2178 // jssp[24] : value. 2169 // jssp[24] : value.
2179 __ Push(x0, cp, x11, x10); 2170 __ Push(x0, cp, x11, x10);
2180 __ CallRuntime(Runtime::kStoreContextSlot, 4); 2171 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2181 } 2172 }
2182 2173
2183 2174
2184 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 2175 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2185 Token::Value op) { 2176 Token::Value op) {
2186 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment"); 2177 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2187 if (var->IsUnallocated()) { 2178 if (var->IsUnallocated()) {
2188 // Global var, const, or let. 2179 // Global var, const, or let.
2189 __ Mov(x2, Operand(var->name())); 2180 __ Mov(x2, Operand(var->name()));
2190 __ Ldr(x1, GlobalObjectMemOperand()); 2181 __ Ldr(x1, GlobalObjectMemOperand());
2191 CallStoreIC(); 2182 CallStoreIC();
2192 2183
2193 } else if (op == Token::INIT_CONST) { 2184 } else if (op == Token::INIT_CONST_LEGACY) {
2194 // Const initializers need a write barrier. 2185 // Const initializers need a write barrier.
2195 ASSERT(!var->IsParameter()); // No const parameters. 2186 ASSERT(!var->IsParameter()); // No const parameters.
2196 if (var->IsLookupSlot()) { 2187 if (var->IsLookupSlot()) {
2197 __ Push(x0); 2188 __ Push(x0);
2198 __ Mov(x0, Operand(var->name())); 2189 __ Mov(x0, Operand(var->name()));
2199 __ Push(cp, x0); // Context and name. 2190 __ Push(cp, x0); // Context and name.
2200 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); 2191 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2201 } else { 2192 } else {
2202 ASSERT(var->IsStackLocal() || var->IsContextSlot()); 2193 ASSERT(var->IsStackLocal() || var->IsContextSlot());
2203 Label skip; 2194 Label skip;
2204 MemOperand location = VarOperand(var, x1); 2195 MemOperand location = VarOperand(var, x1);
2205 __ Ldr(x10, location); 2196 __ Ldr(x10, location);
2206 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip); 2197 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2207 EmitStoreToStackLocalOrContextSlot(var, location); 2198 EmitStoreToStackLocalOrContextSlot(var, location);
2208 __ Bind(&skip); 2199 __ Bind(&skip);
2209 } 2200 }
2210 2201
2211 } else if (var->mode() == LET && op != Token::INIT_LET) { 2202 } else if (var->mode() == LET && op != Token::INIT_LET) {
2212 // Non-initializing assignment to let variable needs a write barrier. 2203 // Non-initializing assignment to let variable needs a write barrier.
2213 if (var->IsLookupSlot()) { 2204 if (var->IsLookupSlot()) {
2214 EmitCallStoreContextSlot(var->name(), language_mode()); 2205 EmitCallStoreContextSlot(var->name(), strict_mode());
2215 } else { 2206 } else {
2216 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 2207 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2217 Label assign; 2208 Label assign;
2218 MemOperand location = VarOperand(var, x1); 2209 MemOperand location = VarOperand(var, x1);
2219 __ Ldr(x10, location); 2210 __ Ldr(x10, location);
2220 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign); 2211 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2221 __ Mov(x10, Operand(var->name())); 2212 __ Mov(x10, Operand(var->name()));
2222 __ Push(x10); 2213 __ Push(x10);
2223 __ CallRuntime(Runtime::kThrowReferenceError, 1); 2214 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2224 // Perform the assignment. 2215 // Perform the assignment.
2225 __ Bind(&assign); 2216 __ Bind(&assign);
2226 EmitStoreToStackLocalOrContextSlot(var, location); 2217 EmitStoreToStackLocalOrContextSlot(var, location);
2227 } 2218 }
2228 2219
2229 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) { 2220 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2230 // Assignment to var or initializing assignment to let/const 2221 // Assignment to var or initializing assignment to let/const
2231 // in harmony mode. 2222 // in harmony mode.
2232 if (var->IsLookupSlot()) { 2223 if (var->IsLookupSlot()) {
2233 EmitCallStoreContextSlot(var->name(), language_mode()); 2224 EmitCallStoreContextSlot(var->name(), strict_mode());
2234 } else { 2225 } else {
2235 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 2226 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2236 MemOperand location = VarOperand(var, x1); 2227 MemOperand location = VarOperand(var, x1);
2237 if (FLAG_debug_code && op == Token::INIT_LET) { 2228 if (FLAG_debug_code && op == Token::INIT_LET) {
2238 __ Ldr(x10, location); 2229 __ Ldr(x10, location);
2239 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex); 2230 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2240 __ Check(eq, kLetBindingReInitialization); 2231 __ Check(eq, kLetBindingReInitialization);
2241 } 2232 }
2242 EmitStoreToStackLocalOrContextSlot(var, location); 2233 EmitStoreToStackLocalOrContextSlot(var, location);
2243 } 2234 }
(...skipping 23 matching lines...) Expand all
2267 2258
2268 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2259 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2269 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment"); 2260 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2270 // Assignment to a property, using a keyed store IC. 2261 // Assignment to a property, using a keyed store IC.
2271 2262
2272 // Record source code position before IC call. 2263 // Record source code position before IC call.
2273 SetSourcePosition(expr->position()); 2264 SetSourcePosition(expr->position());
2274 // TODO(all): Could we pass this in registers rather than on the stack? 2265 // TODO(all): Could we pass this in registers rather than on the stack?
2275 __ Pop(x1, x2); // Key and object holding the property. 2266 __ Pop(x1, x2); // Key and object holding the property.
2276 2267
2277 Handle<Code> ic = is_classic_mode() 2268 Handle<Code> ic = strict_mode() == SLOPPY
2278 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2269 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2279 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2270 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2280 CallIC(ic, expr->AssignmentFeedbackId()); 2271 CallIC(ic, expr->AssignmentFeedbackId());
2281 2272
2282 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2273 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2283 context()->Plug(x0); 2274 context()->Plug(x0);
2284 } 2275 }
2285 2276
2286 2277
2287 void FullCodeGenerator::VisitProperty(Property* expr) { 2278 void FullCodeGenerator::VisitProperty(Property* expr) {
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
2321 int arg_count = args->length(); 2312 int arg_count = args->length();
2322 2313
2323 CallFunctionFlags flags; 2314 CallFunctionFlags flags;
2324 // Get the target function. 2315 // Get the target function.
2325 if (callee->IsVariableProxy()) { 2316 if (callee->IsVariableProxy()) {
2326 { StackValueContext context(this); 2317 { StackValueContext context(this);
2327 EmitVariableLoad(callee->AsVariableProxy()); 2318 EmitVariableLoad(callee->AsVariableProxy());
2328 PrepareForBailout(callee, NO_REGISTERS); 2319 PrepareForBailout(callee, NO_REGISTERS);
2329 } 2320 }
2330 // Push undefined as receiver. This is patched in the method prologue if it 2321 // Push undefined as receiver. This is patched in the method prologue if it
2331 // is a classic mode method. 2322 // is a sloppy mode method.
2332 __ Push(isolate()->factory()->undefined_value()); 2323 __ Push(isolate()->factory()->undefined_value());
2333 flags = NO_CALL_FUNCTION_FLAGS; 2324 flags = NO_CALL_FUNCTION_FLAGS;
2334 } else { 2325 } else {
2335 // Load the function from the receiver. 2326 // Load the function from the receiver.
2336 ASSERT(callee->IsProperty()); 2327 ASSERT(callee->IsProperty());
2337 __ Peek(x0, 0); 2328 __ Peek(x0, 0);
2338 EmitNamedPropertyLoad(callee->AsProperty()); 2329 EmitNamedPropertyLoad(callee->AsProperty());
2339 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2330 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2340 // Push the target function under the receiver. 2331 // Push the target function under the receiver.
2341 __ Pop(x10); 2332 __ Pop(x10);
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
2410 ZoneList<Expression*>* args = expr->arguments(); 2401 ZoneList<Expression*>* args = expr->arguments();
2411 int arg_count = args->length(); 2402 int arg_count = args->length();
2412 { PreservePositionScope scope(masm()->positions_recorder()); 2403 { PreservePositionScope scope(masm()->positions_recorder());
2413 for (int i = 0; i < arg_count; i++) { 2404 for (int i = 0; i < arg_count; i++) {
2414 VisitForStackValue(args->at(i)); 2405 VisitForStackValue(args->at(i));
2415 } 2406 }
2416 } 2407 }
2417 // Record source position for debugger. 2408 // Record source position for debugger.
2418 SetSourcePosition(expr->position()); 2409 SetSourcePosition(expr->position());
2419 2410
2420 Handle<Object> uninitialized =
2421 TypeFeedbackInfo::UninitializedSentinel(isolate());
2422 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
2423 __ LoadObject(x2, FeedbackVector()); 2411 __ LoadObject(x2, FeedbackVector());
2424 __ Mov(x3, Operand(Smi::FromInt(expr->CallFeedbackSlot()))); 2412 __ Mov(x3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
2425 2413
2426 // Record call targets in unoptimized code. 2414 // Record call targets in unoptimized code.
2427 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); 2415 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
2428 __ Peek(x1, (arg_count + 1) * kXRegSizeInBytes); 2416 __ Peek(x1, (arg_count + 1) * kXRegSize);
2429 __ CallStub(&stub); 2417 __ CallStub(&stub);
2430 RecordJSReturnSite(expr); 2418 RecordJSReturnSite(expr);
2431 // Restore context register. 2419 // Restore context register.
2432 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2420 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2433 context()->DropAndPlug(1, x0); 2421 context()->DropAndPlug(1, x0);
2434 } 2422 }
2435 2423
2436 2424
2437 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { 2425 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2438 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval"); 2426 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2439 // Prepare to push a copy of the first argument or undefined if it doesn't 2427 // Prepare to push a copy of the first argument or undefined if it doesn't
2440 // exist. 2428 // exist.
2441 if (arg_count > 0) { 2429 if (arg_count > 0) {
2442 __ Peek(x10, arg_count * kXRegSizeInBytes); 2430 __ Peek(x10, arg_count * kXRegSize);
2443 } else { 2431 } else {
2444 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); 2432 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2445 } 2433 }
2446 2434
2447 // Prepare to push the receiver of the enclosing function. 2435 // Prepare to push the receiver of the enclosing function.
2448 int receiver_offset = 2 + info_->scope()->num_parameters(); 2436 int receiver_offset = 2 + info_->scope()->num_parameters();
2449 __ Ldr(x11, MemOperand(fp, receiver_offset * kPointerSize)); 2437 __ Ldr(x11, MemOperand(fp, receiver_offset * kPointerSize));
2450 2438
2451 // Push. 2439 // Push.
2452 __ Push(x10, x11); 2440 __ Push(x10, x11);
2453 2441
2454 // Prepare to push the language mode. 2442 // Prepare to push the language mode.
2455 __ Mov(x10, Operand(Smi::FromInt(language_mode()))); 2443 __ Mov(x10, Operand(Smi::FromInt(strict_mode())));
2456 // Prepare to push the start position of the scope the calls resides in. 2444 // Prepare to push the start position of the scope the calls resides in.
2457 __ Mov(x11, Operand(Smi::FromInt(scope()->start_position()))); 2445 __ Mov(x11, Operand(Smi::FromInt(scope()->start_position())));
2458 2446
2459 // Push. 2447 // Push.
2460 __ Push(x10, x11); 2448 __ Push(x10, x11);
2461 2449
2462 // Do the runtime call. 2450 // Do the runtime call.
2463 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); 2451 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2464 } 2452 }
2465 2453
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
2503 // The runtime call returns a pair of values in x0 (function) and 2491 // The runtime call returns a pair of values in x0 (function) and
2504 // x1 (receiver). Touch up the stack with the right values. 2492 // x1 (receiver). Touch up the stack with the right values.
2505 __ PokePair(x1, x0, arg_count * kPointerSize); 2493 __ PokePair(x1, x0, arg_count * kPointerSize);
2506 } 2494 }
2507 2495
2508 // Record source position for debugger. 2496 // Record source position for debugger.
2509 SetSourcePosition(expr->position()); 2497 SetSourcePosition(expr->position());
2510 2498
2511 // Call the evaluated function. 2499 // Call the evaluated function.
2512 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); 2500 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
2513 __ Peek(x1, (arg_count + 1) * kXRegSizeInBytes); 2501 __ Peek(x1, (arg_count + 1) * kXRegSize);
2514 __ CallStub(&stub); 2502 __ CallStub(&stub);
2515 RecordJSReturnSite(expr); 2503 RecordJSReturnSite(expr);
2516 // Restore context register. 2504 // Restore context register.
2517 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2505 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2518 context()->DropAndPlug(1, x0); 2506 context()->DropAndPlug(1, x0);
2519 2507
2520 } else if (call_type == Call::GLOBAL_CALL) { 2508 } else if (call_type == Call::GLOBAL_CALL) {
2521 EmitCallWithIC(expr); 2509 EmitCallWithIC(expr);
2522 2510
2523 } else if (call_type == Call::LOOKUP_SLOT_CALL) { 2511 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
2606 for (int i = 0; i < arg_count; i++) { 2594 for (int i = 0; i < arg_count; i++) {
2607 VisitForStackValue(args->at(i)); 2595 VisitForStackValue(args->at(i));
2608 } 2596 }
2609 2597
2610 // Call the construct call builtin that handles allocation and 2598 // Call the construct call builtin that handles allocation and
2611 // constructor invocation. 2599 // constructor invocation.
2612 SetSourcePosition(expr->position()); 2600 SetSourcePosition(expr->position());
2613 2601
2614 // Load function and argument count into x1 and x0. 2602 // Load function and argument count into x1 and x0.
2615 __ Mov(x0, arg_count); 2603 __ Mov(x0, arg_count);
2616 __ Peek(x1, arg_count * kXRegSizeInBytes); 2604 __ Peek(x1, arg_count * kXRegSize);
2617 2605
2618 // Record call targets in unoptimized code. 2606 // Record call targets in unoptimized code.
2619 Handle<Object> uninitialized =
2620 TypeFeedbackInfo::UninitializedSentinel(isolate());
2621 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
2622 __ LoadObject(x2, FeedbackVector()); 2607 __ LoadObject(x2, FeedbackVector());
2623 __ Mov(x3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); 2608 __ Mov(x3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
2624 2609
2625 CallConstructStub stub(RECORD_CALL_TARGET); 2610 CallConstructStub stub(RECORD_CALL_TARGET);
2626 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); 2611 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2627 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 2612 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2628 context()->Plug(x0); 2613 context()->Plug(x0);
2629 } 2614 }
2630 2615
2631 2616
(...skipping 1253 matching lines...) Expand 10 before | Expand all | Expand 10 after
3885 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 3870 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3886 switch (expr->op()) { 3871 switch (expr->op()) {
3887 case Token::DELETE: { 3872 case Token::DELETE: {
3888 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 3873 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3889 Property* property = expr->expression()->AsProperty(); 3874 Property* property = expr->expression()->AsProperty();
3890 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 3875 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3891 3876
3892 if (property != NULL) { 3877 if (property != NULL) {
3893 VisitForStackValue(property->obj()); 3878 VisitForStackValue(property->obj());
3894 VisitForStackValue(property->key()); 3879 VisitForStackValue(property->key());
3895 StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE) 3880 __ Mov(x10, Operand(Smi::FromInt(strict_mode())));
3896 ? kNonStrictMode : kStrictMode;
3897 __ Mov(x10, Operand(Smi::FromInt(strict_mode_flag)));
3898 __ Push(x10); 3881 __ Push(x10);
3899 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 3882 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3900 context()->Plug(x0); 3883 context()->Plug(x0);
3901 } else if (proxy != NULL) { 3884 } else if (proxy != NULL) {
3902 Variable* var = proxy->var(); 3885 Variable* var = proxy->var();
3903 // Delete of an unqualified identifier is disallowed in strict mode 3886 // Delete of an unqualified identifier is disallowed in strict mode
3904 // but "delete this" is allowed. 3887 // but "delete this" is allowed.
3905 ASSERT(language_mode() == CLASSIC_MODE || var->is_this()); 3888 ASSERT(strict_mode() == SLOPPY || var->is_this());
3906 if (var->IsUnallocated()) { 3889 if (var->IsUnallocated()) {
3907 __ Ldr(x12, GlobalObjectMemOperand()); 3890 __ Ldr(x12, GlobalObjectMemOperand());
3908 __ Mov(x11, Operand(var->name())); 3891 __ Mov(x11, Operand(var->name()));
3909 __ Mov(x10, Operand(Smi::FromInt(kNonStrictMode))); 3892 __ Mov(x10, Operand(Smi::FromInt(SLOPPY)));
3910 __ Push(x12, x11, x10); 3893 __ Push(x12, x11, x10);
3911 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 3894 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3912 context()->Plug(x0); 3895 context()->Plug(x0);
3913 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 3896 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3914 // Result of deleting non-global, non-dynamic variables is false. 3897 // Result of deleting non-global, non-dynamic variables is false.
3915 // The subexpression does not have side effects. 3898 // The subexpression does not have side effects.
3916 context()->Plug(var->is_this()); 3899 context()->Plug(var->is_this());
3917 } else { 3900 } else {
3918 // Non-global variable. Call the runtime to try to delete from the 3901 // Non-global variable. Call the runtime to try to delete from the
3919 // context where the variable was introduced. 3902 // context where the variable was introduced.
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after
4093 if (expr->is_postfix()) { 4076 if (expr->is_postfix()) {
4094 if (!context()->IsEffect()) { 4077 if (!context()->IsEffect()) {
4095 // Save the result on the stack. If we have a named or keyed property 4078 // Save the result on the stack. If we have a named or keyed property
4096 // we store the result under the receiver that is currently on top 4079 // we store the result under the receiver that is currently on top
4097 // of the stack. 4080 // of the stack.
4098 switch (assign_type) { 4081 switch (assign_type) {
4099 case VARIABLE: 4082 case VARIABLE:
4100 __ Push(x0); 4083 __ Push(x0);
4101 break; 4084 break;
4102 case NAMED_PROPERTY: 4085 case NAMED_PROPERTY:
4103 __ Poke(x0, kXRegSizeInBytes); 4086 __ Poke(x0, kXRegSize);
4104 break; 4087 break;
4105 case KEYED_PROPERTY: 4088 case KEYED_PROPERTY:
4106 __ Poke(x0, 2 * kXRegSizeInBytes); 4089 __ Poke(x0, 2 * kXRegSize);
4107 break; 4090 break;
4108 } 4091 }
4109 } 4092 }
4110 } 4093 }
4111 4094
4112 __ Bind(&stub_call); 4095 __ Bind(&stub_call);
4113 __ Mov(x1, x0); 4096 __ Mov(x1, x0);
4114 __ Mov(x0, Operand(Smi::FromInt(count_value))); 4097 __ Mov(x0, Operand(Smi::FromInt(count_value)));
4115 4098
4116 // Record position before stub call. 4099 // Record position before stub call.
4117 SetSourcePosition(expr->position()); 4100 SetSourcePosition(expr->position());
4118 4101
4119 { 4102 {
4120 Assembler::BlockConstPoolScope scope(masm_); 4103 Assembler::BlockPoolsScope scope(masm_);
4121 BinaryOpICStub stub(Token::ADD, NO_OVERWRITE); 4104 BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
4122 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId()); 4105 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
4123 patch_site.EmitPatchInfo(); 4106 patch_site.EmitPatchInfo();
4124 } 4107 }
4125 __ Bind(&done); 4108 __ Bind(&done);
4126 4109
4127 // Store the value returned in x0. 4110 // Store the value returned in x0.
4128 switch (assign_type) { 4111 switch (assign_type) {
4129 case VARIABLE: 4112 case VARIABLE:
4130 if (expr->is_postfix()) { 4113 if (expr->is_postfix()) {
(...skipping 25 matching lines...) Expand all
4156 context()->PlugTOS(); 4139 context()->PlugTOS();
4157 } 4140 }
4158 } else { 4141 } else {
4159 context()->Plug(x0); 4142 context()->Plug(x0);
4160 } 4143 }
4161 break; 4144 break;
4162 } 4145 }
4163 case KEYED_PROPERTY: { 4146 case KEYED_PROPERTY: {
4164 __ Pop(x1); // Key. 4147 __ Pop(x1); // Key.
4165 __ Pop(x2); // Receiver. 4148 __ Pop(x2); // Receiver.
4166 Handle<Code> ic = is_classic_mode() 4149 Handle<Code> ic = strict_mode() == SLOPPY
4167 ? isolate()->builtins()->KeyedStoreIC_Initialize() 4150 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4168 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 4151 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4169 CallIC(ic, expr->CountStoreFeedbackId()); 4152 CallIC(ic, expr->CountStoreFeedbackId());
4170 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4153 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4171 if (expr->is_postfix()) { 4154 if (expr->is_postfix()) {
4172 if (!context()->IsEffect()) { 4155 if (!context()->IsEffect()) {
4173 context()->PlugTOS(); 4156 context()->PlugTOS();
4174 } 4157 }
4175 } else { 4158 } else {
4176 context()->Plug(x0); 4159 context()->Plug(x0);
(...skipping 807 matching lines...) Expand 10 before | Expand all | Expand 10 after
4984 return previous_; 4967 return previous_;
4985 } 4968 }
4986 4969
4987 4970
4988 #undef __ 4971 #undef __
4989 4972
4990 4973
4991 } } // namespace v8::internal 4974 } } // namespace v8::internal
4992 4975
4993 #endif // V8_TARGET_ARCH_A64 4976 #endif // V8_TARGET_ARCH_A64
OLDNEW
« no previous file with comments | « src/a64/frames-a64.cc ('k') | src/a64/ic-a64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698