Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(30)

Side by Side Diff: src/a64/full-codegen-a64.cc

Issue 148293020: Merge experimental/a64 to bleeding_edge. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Remove ARM from OWNERS Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/a64/frames-a64.cc ('k') | src/a64/ic-a64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its 12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived 13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission. 14 // from this software without specific prior written permission.
15 // 15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #if V8_TARGET_ARCH_ARM 30 #if V8_TARGET_ARCH_A64
31 31
32 #include "code-stubs.h" 32 #include "code-stubs.h"
33 #include "codegen.h" 33 #include "codegen.h"
34 #include "compiler.h" 34 #include "compiler.h"
35 #include "debug.h" 35 #include "debug.h"
36 #include "full-codegen.h" 36 #include "full-codegen.h"
37 #include "isolate-inl.h" 37 #include "isolate-inl.h"
38 #include "parser.h" 38 #include "parser.h"
39 #include "scopes.h" 39 #include "scopes.h"
40 #include "stub-cache.h" 40 #include "stub-cache.h"
41 41
42 #include "arm/code-stubs-arm.h" 42 #include "a64/code-stubs-a64.h"
43 #include "arm/macro-assembler-arm.h" 43 #include "a64/macro-assembler-a64.h"
44 44
45 namespace v8 { 45 namespace v8 {
46 namespace internal { 46 namespace internal {
47 47
48 #define __ ACCESS_MASM(masm_) 48 #define __ ACCESS_MASM(masm_)
49 49
50
51 // A patch site is a location in the code which it is possible to patch. This
52 // class has a number of methods to emit the code which is patchable and the
53 // method EmitPatchInfo to record a marker back to the patchable code. This
54 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
55 // immediate value is used) is the delta from the pc to the first instruction of
56 // the patchable code.
57 class JumpPatchSite BASE_EMBEDDED { 50 class JumpPatchSite BASE_EMBEDDED {
58 public: 51 public:
59 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { 52 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
60 #ifdef DEBUG 53 #ifdef DEBUG
61 info_emitted_ = false; 54 info_emitted_ = false;
62 #endif 55 #endif
63 } 56 }
64 57
65 ~JumpPatchSite() { 58 ~JumpPatchSite() {
66 ASSERT(patch_site_.is_bound() == info_emitted_); 59 if (patch_site_.is_bound()) {
60 ASSERT(info_emitted_);
61 } else {
62 ASSERT(reg_.IsNone());
63 }
67 } 64 }
68 65
69 // When initially emitting this ensure that a jump is always generated to skip
70 // the inlined smi code.
71 void EmitJumpIfNotSmi(Register reg, Label* target) { 66 void EmitJumpIfNotSmi(Register reg, Label* target) {
72 ASSERT(!patch_site_.is_bound() && !info_emitted_); 67 // This code will be patched by PatchInlinedSmiCode, in ic-a64.cc.
73 Assembler::BlockConstPoolScope block_const_pool(masm_); 68 InstructionAccurateScope scope(masm_, 1);
69 ASSERT(!info_emitted_);
70 ASSERT(reg.Is64Bits());
71 ASSERT(!reg.Is(csp));
72 reg_ = reg;
74 __ bind(&patch_site_); 73 __ bind(&patch_site_);
75 __ cmp(reg, Operand(reg)); 74 __ tbz(xzr, 0, target); // Always taken before patched.
76 __ b(eq, target); // Always taken before patched.
77 } 75 }
78 76
79 // When initially emitting this ensure that a jump is never generated to skip
80 // the inlined smi code.
81 void EmitJumpIfSmi(Register reg, Label* target) { 77 void EmitJumpIfSmi(Register reg, Label* target) {
82 ASSERT(!patch_site_.is_bound() && !info_emitted_); 78 // This code will be patched by PatchInlinedSmiCode, in ic-a64.cc.
83 Assembler::BlockConstPoolScope block_const_pool(masm_); 79 InstructionAccurateScope scope(masm_, 1);
80 ASSERT(!info_emitted_);
81 ASSERT(reg.Is64Bits());
82 ASSERT(!reg.Is(csp));
83 reg_ = reg;
84 __ bind(&patch_site_); 84 __ bind(&patch_site_);
85 __ cmp(reg, Operand(reg)); 85 __ tbnz(xzr, 0, target); // Never taken before patched.
86 __ b(ne, target); // Never taken before patched. 86 }
87
88 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
89 // We need to use ip0, so don't allow access to the MacroAssembler.
90 InstructionAccurateScope scope(masm_);
91 __ orr(ip0, reg1, reg2);
92 EmitJumpIfNotSmi(ip0, target);
87 } 93 }
88 94
89 void EmitPatchInfo() { 95 void EmitPatchInfo() {
90 // Block literal pool emission whilst recording patch site information. 96 Assembler::BlockConstPoolScope scope(masm_);
91 Assembler::BlockConstPoolScope block_const_pool(masm_); 97 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
92 if (patch_site_.is_bound()) {
93 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
94 Register reg;
95 reg.set_code(delta_to_patch_site / kOff12Mask);
96 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
97 #ifdef DEBUG 98 #ifdef DEBUG
98 info_emitted_ = true; 99 info_emitted_ = true;
99 #endif 100 #endif
100 } else {
101 __ nop(); // Signals no inlined code.
102 }
103 } 101 }
104 102
105 private: 103 private:
106 MacroAssembler* masm_; 104 MacroAssembler* masm_;
107 Label patch_site_; 105 Label patch_site_;
106 Register reg_;
108 #ifdef DEBUG 107 #ifdef DEBUG
109 bool info_emitted_; 108 bool info_emitted_;
110 #endif 109 #endif
111 }; 110 };
112 111
113 112
114 // Generate code for a JS function. On entry to the function the receiver 113 // Generate code for a JS function. On entry to the function the receiver
115 // and arguments have been pushed on the stack left to right. The actual 114 // and arguments have been pushed on the stack left to right. The actual
116 // argument count matches the formal parameter count expected by the 115 // argument count matches the formal parameter count expected by the
117 // function. 116 // function.
118 // 117 //
119 // The live registers are: 118 // The live registers are:
120 // o r1: the JS function object being called (i.e., ourselves) 119 // - x1: the JS function object being called (i.e. ourselves).
121 // o cp: our context 120 // - cp: our context.
122 // o pp: our caller's constant pool pointer (if FLAG_enable_ool_constant_pool) 121 // - fp: our caller's frame pointer.
123 // o fp: our caller's frame pointer 122 // - jssp: stack pointer.
124 // o sp: stack pointer 123 // - lr: return address.
125 // o lr: return address
126 // 124 //
127 // The function builds a JS frame. Please see JavaScriptFrameConstants in 125 // The function builds a JS frame. See JavaScriptFrameConstants in
128 // frames-arm.h for its layout. 126 // frames-arm.h for its layout.
129 void FullCodeGenerator::Generate() { 127 void FullCodeGenerator::Generate() {
130 CompilationInfo* info = info_; 128 CompilationInfo* info = info_;
131 handler_table_ = 129 handler_table_ =
132 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); 130 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
133 131
134 InitializeFeedbackVector(); 132 InitializeFeedbackVector();
135 133
136 profiling_counter_ = isolate()->factory()->NewCell( 134 profiling_counter_ = isolate()->factory()->NewCell(
137 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); 135 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
138 SetFunctionPosition(function()); 136 SetFunctionPosition(function());
139 Comment cmnt(masm_, "[ function compiled by full code generator"); 137 Comment cmnt(masm_, "[ Function compiled by full code generator");
140 138
141 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 139 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
142 140
143 #ifdef DEBUG 141 #ifdef DEBUG
144 if (strlen(FLAG_stop_at) > 0 && 142 if (strlen(FLAG_stop_at) > 0 &&
145 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 143 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
146 __ stop("stop-at"); 144 __ Debug("stop-at", __LINE__, BREAK);
147 } 145 }
148 #endif 146 #endif
149 147
150 // Classic mode functions and builtins need to replace the receiver with the 148 // Classic mode functions and builtins need to replace the receiver with the
151 // global proxy when called as functions (without an explicit receiver 149 // global proxy when called as functions (without an explicit receiver
152 // object). 150 // object).
153 if (info->is_classic_mode() && !info->is_native()) { 151 if (info->is_classic_mode() && !info->is_native()) {
154 Label ok; 152 Label ok;
155 int receiver_offset = info->scope()->num_parameters() * kPointerSize; 153 int receiver_offset = info->scope()->num_parameters() * kXRegSizeInBytes;
156 __ ldr(r2, MemOperand(sp, receiver_offset)); 154 __ Peek(x10, receiver_offset);
157 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); 155 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
158 __ b(ne, &ok);
159 156
160 __ ldr(r2, GlobalObjectOperand()); 157 __ Ldr(x10, GlobalObjectMemOperand());
161 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset)); 158 __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalReceiverOffset));
159 __ Poke(x10, receiver_offset);
162 160
163 __ str(r2, MemOperand(sp, receiver_offset)); 161 __ Bind(&ok);
164
165 __ bind(&ok);
166 } 162 }
167 163
168 // Open a frame scope to indicate that there is a frame on the stack. The 164
169 // MANUAL indicates that the scope shouldn't actually generate code to set up 165 // Open a frame scope to indicate that there is a frame on the stack.
170 // the frame (that is done below). 166 // The MANUAL indicates that the scope shouldn't actually generate code
167 // to set up the frame because we do it manually below.
171 FrameScope frame_scope(masm_, StackFrame::MANUAL); 168 FrameScope frame_scope(masm_, StackFrame::MANUAL);
172 169
170 // This call emits the following sequence in a way that can be patched for
171 // code ageing support:
172 // Push(lr, fp, cp, x1);
173 // Add(fp, jssp, 2 * kPointerSize);
173 info->set_prologue_offset(masm_->pc_offset()); 174 info->set_prologue_offset(masm_->pc_offset());
174 __ Prologue(BUILD_FUNCTION_FRAME); 175 __ Prologue(BUILD_FUNCTION_FRAME);
175 info->AddNoFrameRange(0, masm_->pc_offset()); 176 info->AddNoFrameRange(0, masm_->pc_offset());
176 __ LoadConstantPoolPointerRegister();
177 177
178 // Reserve space on the stack for locals.
178 { Comment cmnt(masm_, "[ Allocate locals"); 179 { Comment cmnt(masm_, "[ Allocate locals");
179 int locals_count = info->scope()->num_stack_slots(); 180 int locals_count = info->scope()->num_stack_slots();
180 // Generators allocate locals, if any, in context slots. 181 // Generators allocate locals, if any, in context slots.
181 ASSERT(!info->function()->is_generator() || locals_count == 0); 182 ASSERT(!info->function()->is_generator() || locals_count == 0);
183
182 if (locals_count > 0) { 184 if (locals_count > 0) {
183 // Emit a loop to initialize stack cells for locals when optimizing for 185 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
184 // size. Otherwise, unroll the loop for maximum performance. 186 __ PushMultipleTimes(locals_count, x10);
185 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
186 if (FLAG_optimize_for_size && locals_count > 4) {
187 Label loop;
188 __ mov(r2, Operand(locals_count));
189 __ bind(&loop);
190 __ sub(r2, r2, Operand(1), SetCC);
191 __ push(r9);
192 __ b(&loop, ne);
193 } else {
194 for (int i = 0; i < locals_count; i++) {
195 __ push(r9);
196 }
197 }
198 } 187 }
199 } 188 }
200 189
201 bool function_in_register = true; 190 bool function_in_register_x1 = true;
202 191
203 // Possibly allocate a local context.
204 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 192 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
205 if (heap_slots > 0) { 193 if (heap_slots > 0) {
206 // Argument to NewContext is the function, which is still in r1. 194 // Argument to NewContext is the function, which is still in x1.
207 Comment cmnt(masm_, "[ Allocate context"); 195 Comment cmnt(masm_, "[ Allocate context");
208 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) { 196 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
209 __ push(r1); 197 __ Mov(x10, Operand(info->scope()->GetScopeInfo()));
210 __ Push(info->scope()->GetScopeInfo()); 198 __ Push(x1, x10);
211 __ CallRuntime(Runtime::kNewGlobalContext, 2); 199 __ CallRuntime(Runtime::kNewGlobalContext, 2);
212 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { 200 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
213 FastNewContextStub stub(heap_slots); 201 FastNewContextStub stub(heap_slots);
214 __ CallStub(&stub); 202 __ CallStub(&stub);
215 } else { 203 } else {
216 __ push(r1); 204 __ Push(x1);
217 __ CallRuntime(Runtime::kNewFunctionContext, 1); 205 __ CallRuntime(Runtime::kNewFunctionContext, 1);
218 } 206 }
219 function_in_register = false; 207 function_in_register_x1 = false;
220 // Context is returned in r0. It replaces the context passed to us. 208 // Context is returned in x0. It replaces the context passed to us.
221 // It's saved in the stack and kept live in cp. 209 // It's saved in the stack and kept live in cp.
222 __ mov(cp, r0); 210 __ Mov(cp, x0);
223 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset)); 211 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
224 // Copy any necessary parameters into the context. 212 // Copy any necessary parameters into the context.
225 int num_parameters = info->scope()->num_parameters(); 213 int num_parameters = info->scope()->num_parameters();
226 for (int i = 0; i < num_parameters; i++) { 214 for (int i = 0; i < num_parameters; i++) {
227 Variable* var = scope()->parameter(i); 215 Variable* var = scope()->parameter(i);
228 if (var->IsContextSlot()) { 216 if (var->IsContextSlot()) {
229 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 217 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
230 (num_parameters - 1 - i) * kPointerSize; 218 (num_parameters - 1 - i) * kPointerSize;
231 // Load parameter from stack. 219 // Load parameter from stack.
232 __ ldr(r0, MemOperand(fp, parameter_offset)); 220 __ Ldr(x10, MemOperand(fp, parameter_offset));
233 // Store it in the context. 221 // Store it in the context.
234 MemOperand target = ContextOperand(cp, var->index()); 222 MemOperand target = ContextMemOperand(cp, var->index());
235 __ str(r0, target); 223 __ Str(x10, target);
236 224
237 // Update the write barrier. 225 // Update the write barrier.
238 __ RecordWriteContextSlot( 226 __ RecordWriteContextSlot(
239 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs); 227 cp, target.offset(), x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
240 } 228 }
241 } 229 }
242 } 230 }
243 231
244 Variable* arguments = scope()->arguments(); 232 Variable* arguments = scope()->arguments();
245 if (arguments != NULL) { 233 if (arguments != NULL) {
246 // Function uses arguments object. 234 // Function uses arguments object.
247 Comment cmnt(masm_, "[ Allocate arguments object"); 235 Comment cmnt(masm_, "[ Allocate arguments object");
248 if (!function_in_register) { 236 if (!function_in_register_x1) {
249 // Load this again, if it's used by the local context below. 237 // Load this again, if it's used by the local context below.
250 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 238 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
251 } else { 239 } else {
252 __ mov(r3, r1); 240 __ Mov(x3, x1);
253 } 241 }
254 // Receiver is just before the parameters on the caller's stack. 242 // Receiver is just before the parameters on the caller's stack.
255 int num_parameters = info->scope()->num_parameters(); 243 int num_parameters = info->scope()->num_parameters();
256 int offset = num_parameters * kPointerSize; 244 int offset = num_parameters * kPointerSize;
257 __ add(r2, fp, 245 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset);
258 Operand(StandardFrameConstants::kCallerSPOffset + offset)); 246 __ Mov(x1, Operand(Smi::FromInt(num_parameters)));
259 __ mov(r1, Operand(Smi::FromInt(num_parameters))); 247 __ Push(x3, x2, x1);
260 __ Push(r3, r2, r1);
261 248
262 // Arguments to ArgumentsAccessStub: 249 // Arguments to ArgumentsAccessStub:
263 // function, receiver address, parameter count. 250 // function, receiver address, parameter count.
264 // The stub will rewrite receiever and parameter count if the previous 251 // The stub will rewrite receiver and parameter count if the previous
265 // stack frame was an arguments adapter frame. 252 // stack frame was an arguments adapter frame.
266 ArgumentsAccessStub::Type type; 253 ArgumentsAccessStub::Type type;
267 if (!is_classic_mode()) { 254 if (!is_classic_mode()) {
268 type = ArgumentsAccessStub::NEW_STRICT; 255 type = ArgumentsAccessStub::NEW_STRICT;
269 } else if (function()->has_duplicate_parameters()) { 256 } else if (function()->has_duplicate_parameters()) {
270 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW; 257 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
271 } else { 258 } else {
272 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST; 259 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
273 } 260 }
274 ArgumentsAccessStub stub(type); 261 ArgumentsAccessStub stub(type);
275 __ CallStub(&stub); 262 __ CallStub(&stub);
276 263
277 SetVar(arguments, r0, r1, r2); 264 SetVar(arguments, x0, x1, x2);
278 } 265 }
279 266
280 if (FLAG_trace) { 267 if (FLAG_trace) {
281 __ CallRuntime(Runtime::kTraceEnter, 0); 268 __ CallRuntime(Runtime::kTraceEnter, 0);
282 } 269 }
283 270
271
284 // Visit the declarations and body unless there is an illegal 272 // Visit the declarations and body unless there is an illegal
285 // redeclaration. 273 // redeclaration.
286 if (scope()->HasIllegalRedeclaration()) { 274 if (scope()->HasIllegalRedeclaration()) {
287 Comment cmnt(masm_, "[ Declarations"); 275 Comment cmnt(masm_, "[ Declarations");
288 scope()->VisitIllegalRedeclaration(this); 276 scope()->VisitIllegalRedeclaration(this);
289 277
290 } else { 278 } else {
291 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); 279 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
292 { Comment cmnt(masm_, "[ Declarations"); 280 { Comment cmnt(masm_, "[ Declarations");
293 // For named function expressions, declare the function name as a
294 // constant.
295 if (scope()->is_function_scope() && scope()->function() != NULL) { 281 if (scope()->is_function_scope() && scope()->function() != NULL) {
296 VariableDeclaration* function = scope()->function(); 282 VariableDeclaration* function = scope()->function();
297 ASSERT(function->proxy()->var()->mode() == CONST || 283 ASSERT(function->proxy()->var()->mode() == CONST ||
298 function->proxy()->var()->mode() == CONST_HARMONY); 284 function->proxy()->var()->mode() == CONST_HARMONY);
299 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); 285 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
300 VisitVariableDeclaration(function); 286 VisitVariableDeclaration(function);
301 } 287 }
302 VisitDeclarations(scope()->declarations()); 288 VisitDeclarations(scope()->declarations());
303 } 289 }
290 }
304 291
305 { Comment cmnt(masm_, "[ Stack check"); 292 { Comment cmnt(masm_, "[ Stack check");
306 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); 293 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
307 Label ok; 294 Label ok;
308 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 295 ASSERT(jssp.Is(__ StackPointer()));
309 __ cmp(sp, Operand(ip)); 296 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
310 __ b(hs, &ok); 297 __ B(hs, &ok);
311 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); 298 PredictableCodeSizeScope predictable(masm_,
312 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET); 299 Assembler::kCallSizeWithRelocation);
313 __ bind(&ok); 300 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
314 } 301 __ Bind(&ok);
302 }
315 303
316 { Comment cmnt(masm_, "[ Body"); 304 { Comment cmnt(masm_, "[ Body");
317 ASSERT(loop_depth() == 0); 305 ASSERT(loop_depth() == 0);
318 VisitStatements(function()->body()); 306 VisitStatements(function()->body());
319 ASSERT(loop_depth() == 0); 307 ASSERT(loop_depth() == 0);
320 }
321 } 308 }
322 309
323 // Always emit a 'return undefined' in case control fell off the end of 310 // Always emit a 'return undefined' in case control fell off the end of
324 // the body. 311 // the body.
325 { Comment cmnt(masm_, "[ return <undefined>;"); 312 { Comment cmnt(masm_, "[ return <undefined>;");
326 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 313 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
327 } 314 }
328 EmitReturnSequence(); 315 EmitReturnSequence();
329 316
330 // Force emit the constant pool, so it doesn't get emitted in the middle 317 // Force emit the constant pool, so it doesn't get emitted in the middle
331 // of the back edge table. 318 // of the back edge table.
332 masm()->CheckConstPool(true, false); 319 masm()->CheckConstPool(true, false);
333 } 320 }
334 321
335 322
336 void FullCodeGenerator::ClearAccumulator() { 323 void FullCodeGenerator::ClearAccumulator() {
337 __ mov(r0, Operand(Smi::FromInt(0))); 324 __ Mov(x0, Operand(Smi::FromInt(0)));
338 } 325 }
339 326
340 327
341 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { 328 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
342 __ mov(r2, Operand(profiling_counter_)); 329 __ Mov(x2, Operand(profiling_counter_));
343 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); 330 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
344 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC); 331 __ Subs(x3, x3, Operand(Smi::FromInt(delta)));
345 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); 332 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
346 } 333 }
347 334
348 335
349 void FullCodeGenerator::EmitProfilingCounterReset() { 336 void FullCodeGenerator::EmitProfilingCounterReset() {
350 int reset_value = FLAG_interrupt_budget; 337 int reset_value = FLAG_interrupt_budget;
351 if (isolate()->IsDebuggerActive()) { 338 if (isolate()->IsDebuggerActive()) {
352 // Detect debug break requests as soon as possible. 339 // Detect debug break requests as soon as possible.
353 reset_value = FLAG_interrupt_budget >> 4; 340 reset_value = FLAG_interrupt_budget >> 4;
354 } 341 }
355 __ mov(r2, Operand(profiling_counter_)); 342 __ Mov(x2, Operand(profiling_counter_));
356 __ mov(r3, Operand(Smi::FromInt(reset_value))); 343 __ Mov(x3, Operand(Smi::FromInt(reset_value)));
357 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); 344 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
358 } 345 }
359 346
360 347
361 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, 348 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
362 Label* back_edge_target) { 349 Label* back_edge_target) {
350 ASSERT(jssp.Is(__ StackPointer()));
363 Comment cmnt(masm_, "[ Back edge bookkeeping"); 351 Comment cmnt(masm_, "[ Back edge bookkeeping");
364 // Block literal pools whilst emitting back edge code. 352 // Block literal pools whilst emitting back edge code.
365 Assembler::BlockConstPoolScope block_const_pool(masm_); 353 Assembler::BlockConstPoolScope block_const_pool(masm_);
366 Label ok; 354 Label ok;
367 355
368 ASSERT(back_edge_target->is_bound()); 356 ASSERT(back_edge_target->is_bound());
369 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); 357 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
370 int weight = Min(kMaxBackEdgeWeight, 358 int weight = Min(kMaxBackEdgeWeight,
371 Max(1, distance / kCodeSizeMultiplier)); 359 Max(1, distance / kCodeSizeMultiplier));
372 EmitProfilingCounterDecrement(weight); 360 EmitProfilingCounterDecrement(weight);
373 __ b(pl, &ok); 361 __ B(pl, &ok);
374 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); 362 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
375 363
376 // Record a mapping of this PC offset to the OSR id. This is used to find 364 // Record a mapping of this PC offset to the OSR id. This is used to find
377 // the AST id from the unoptimized code in order to use it as a key into 365 // the AST id from the unoptimized code in order to use it as a key into
378 // the deoptimization input data found in the optimized code. 366 // the deoptimization input data found in the optimized code.
379 RecordBackEdge(stmt->OsrEntryId()); 367 RecordBackEdge(stmt->OsrEntryId());
380 368
381 EmitProfilingCounterReset(); 369 EmitProfilingCounterReset();
382 370
383 __ bind(&ok); 371 __ Bind(&ok);
384 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 372 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
385 // Record a mapping of the OSR id to this PC. This is used if the OSR 373 // Record a mapping of the OSR id to this PC. This is used if the OSR
386 // entry becomes the target of a bailout. We don't expect it to be, but 374 // entry becomes the target of a bailout. We don't expect it to be, but
387 // we want it to work if it is. 375 // we want it to work if it is.
388 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); 376 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
389 } 377 }
390 378
391 379
392 void FullCodeGenerator::EmitReturnSequence() { 380 void FullCodeGenerator::EmitReturnSequence() {
393 Comment cmnt(masm_, "[ Return sequence"); 381 Comment cmnt(masm_, "[ Return sequence");
382
394 if (return_label_.is_bound()) { 383 if (return_label_.is_bound()) {
395 __ b(&return_label_); 384 __ B(&return_label_);
385
396 } else { 386 } else {
397 __ bind(&return_label_); 387 __ Bind(&return_label_);
398 if (FLAG_trace) { 388 if (FLAG_trace) {
399 // Push the return value on the stack as the parameter. 389 // Push the return value on the stack as the parameter.
400 // Runtime::TraceExit returns its parameter in r0. 390 // Runtime::TraceExit returns its parameter in x0.
401 __ push(r0); 391 __ Push(result_register());
402 __ CallRuntime(Runtime::kTraceExit, 1); 392 __ CallRuntime(Runtime::kTraceExit, 1);
393 ASSERT(x0.Is(result_register()));
403 } 394 }
404 // Pretend that the exit is a backwards jump to the entry. 395 // Pretend that the exit is a backwards jump to the entry.
405 int weight = 1; 396 int weight = 1;
406 if (info_->ShouldSelfOptimize()) { 397 if (info_->ShouldSelfOptimize()) {
407 weight = FLAG_interrupt_budget / FLAG_self_opt_count; 398 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
408 } else { 399 } else {
409 int distance = masm_->pc_offset(); 400 int distance = masm_->pc_offset();
410 weight = Min(kMaxBackEdgeWeight, 401 weight = Min(kMaxBackEdgeWeight,
411 Max(1, distance / kCodeSizeMultiplier)); 402 Max(1, distance / kCodeSizeMultiplier));
412 } 403 }
413 EmitProfilingCounterDecrement(weight); 404 EmitProfilingCounterDecrement(weight);
414 Label ok; 405 Label ok;
415 __ b(pl, &ok); 406 __ B(pl, &ok);
416 __ push(r0); 407 __ Push(x0);
417 __ Call(isolate()->builtins()->InterruptCheck(), 408 __ Call(isolate()->builtins()->InterruptCheck(),
418 RelocInfo::CODE_TARGET); 409 RelocInfo::CODE_TARGET);
419 __ pop(r0); 410 __ Pop(x0);
420 EmitProfilingCounterReset(); 411 EmitProfilingCounterReset();
421 __ bind(&ok); 412 __ Bind(&ok);
422 413
423 #ifdef DEBUG
424 // Add a label for checking the size of the code used for returning.
425 Label check_exit_codesize;
426 __ bind(&check_exit_codesize);
427 #endif
428 // Make sure that the constant pool is not emitted inside of the return 414 // Make sure that the constant pool is not emitted inside of the return
429 // sequence. 415 // sequence. This sequence can get patched when the debugger is used. See
430 { Assembler::BlockConstPoolScope block_const_pool(masm_); 416 // debug-a64.cc:BreakLocationIterator::SetDebugBreakAtReturn().
431 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize; 417 {
418 InstructionAccurateScope scope(masm_,
419 Assembler::kJSRetSequenceInstructions);
432 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); 420 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
433 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
434 PredictableCodeSizeScope predictable(masm_, -1);
435 __ RecordJSReturn(); 421 __ RecordJSReturn();
436 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT); 422 // This code is generated using Assembler methods rather than Macro
437 __ add(sp, sp, Operand(sp_delta)); 423 // Assembler methods because it will be patched later on, and so the size
438 __ Jump(lr); 424 // of the generated code must be consistent.
425 const Register& current_sp = __ StackPointer();
426 // Nothing ensures 16 bytes alignment here.
427 ASSERT(!current_sp.Is(csp));
428 __ mov(current_sp, fp);
429 int no_frame_start = masm_->pc_offset();
430 __ ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSizeInBytes, PostIndex));
431 // Drop the arguments and receiver and return.
432 // TODO(all): This implementation is overkill as it supports 2**31+1
433 // arguments, consider how to improve it without creating a security
434 // hole.
435 __ LoadLiteral(ip0, 3 * kInstructionSize);
436 __ add(current_sp, current_sp, ip0);
437 __ ret();
438 __ dc64(kXRegSizeInBytes * (info_->scope()->num_parameters() + 1));
439 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); 439 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
440 } 440 }
441
442 #ifdef DEBUG
443 // Check that the size of the code used for returning is large enough
444 // for the debugger's requirements.
445 ASSERT(Assembler::kJSReturnSequenceInstructions <=
446 masm_->InstructionsGeneratedSince(&check_exit_codesize));
447 #endif
448 } 441 }
449 } 442 }
450 443
451 444
452 void FullCodeGenerator::EffectContext::Plug(Variable* var) const { 445 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
453 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 446 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
454 } 447 }
455 448
456 449
457 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const { 450 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
458 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 451 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
459 codegen()->GetVar(result_register(), var); 452 codegen()->GetVar(result_register(), var);
460 } 453 }
461 454
462 455
463 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { 456 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
464 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 457 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
465 codegen()->GetVar(result_register(), var); 458 codegen()->GetVar(result_register(), var);
466 __ push(result_register()); 459 __ Push(result_register());
467 } 460 }
468 461
469 462
470 void FullCodeGenerator::TestContext::Plug(Variable* var) const { 463 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
471 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 464 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
472 // For simplicity we always test the accumulator register. 465 // For simplicity we always test the accumulator register.
473 codegen()->GetVar(result_register(), var); 466 codegen()->GetVar(result_register(), var);
474 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 467 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
475 codegen()->DoTest(this); 468 codegen()->DoTest(this);
476 } 469 }
477 470
478 471
479 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 472 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
473 // Root values have no side effects.
480 } 474 }
481 475
482 476
483 void FullCodeGenerator::AccumulatorValueContext::Plug( 477 void FullCodeGenerator::AccumulatorValueContext::Plug(
484 Heap::RootListIndex index) const { 478 Heap::RootListIndex index) const {
485 __ LoadRoot(result_register(), index); 479 __ LoadRoot(result_register(), index);
486 } 480 }
487 481
488 482
489 void FullCodeGenerator::StackValueContext::Plug( 483 void FullCodeGenerator::StackValueContext::Plug(
490 Heap::RootListIndex index) const { 484 Heap::RootListIndex index) const {
491 __ LoadRoot(result_register(), index); 485 __ LoadRoot(result_register(), index);
492 __ push(result_register()); 486 __ Push(result_register());
493 } 487 }
494 488
495 489
496 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { 490 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
497 codegen()->PrepareForBailoutBeforeSplit(condition(), 491 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
498 true,
499 true_label_,
500 false_label_); 492 false_label_);
501 if (index == Heap::kUndefinedValueRootIndex || 493 if (index == Heap::kUndefinedValueRootIndex ||
502 index == Heap::kNullValueRootIndex || 494 index == Heap::kNullValueRootIndex ||
503 index == Heap::kFalseValueRootIndex) { 495 index == Heap::kFalseValueRootIndex) {
504 if (false_label_ != fall_through_) __ b(false_label_); 496 if (false_label_ != fall_through_) __ B(false_label_);
505 } else if (index == Heap::kTrueValueRootIndex) { 497 } else if (index == Heap::kTrueValueRootIndex) {
506 if (true_label_ != fall_through_) __ b(true_label_); 498 if (true_label_ != fall_through_) __ B(true_label_);
507 } else { 499 } else {
508 __ LoadRoot(result_register(), index); 500 __ LoadRoot(result_register(), index);
509 codegen()->DoTest(this); 501 codegen()->DoTest(this);
510 } 502 }
511 } 503 }
512 504
513 505
514 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const { 506 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
515 } 507 }
516 508
517 509
518 void FullCodeGenerator::AccumulatorValueContext::Plug( 510 void FullCodeGenerator::AccumulatorValueContext::Plug(
519 Handle<Object> lit) const { 511 Handle<Object> lit) const {
520 __ mov(result_register(), Operand(lit)); 512 __ Mov(result_register(), Operand(lit));
521 } 513 }
522 514
523 515
524 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { 516 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
525 // Immediates cannot be pushed directly. 517 // Immediates cannot be pushed directly.
526 __ mov(result_register(), Operand(lit)); 518 __ Mov(result_register(), Operand(lit));
527 __ push(result_register()); 519 __ Push(result_register());
528 } 520 }
529 521
530 522
531 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { 523 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
532 codegen()->PrepareForBailoutBeforeSplit(condition(), 524 codegen()->PrepareForBailoutBeforeSplit(condition(),
533 true, 525 true,
534 true_label_, 526 true_label_,
535 false_label_); 527 false_label_);
536 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals. 528 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
537 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { 529 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
538 if (false_label_ != fall_through_) __ b(false_label_); 530 if (false_label_ != fall_through_) __ B(false_label_);
539 } else if (lit->IsTrue() || lit->IsJSObject()) { 531 } else if (lit->IsTrue() || lit->IsJSObject()) {
540 if (true_label_ != fall_through_) __ b(true_label_); 532 if (true_label_ != fall_through_) __ B(true_label_);
541 } else if (lit->IsString()) { 533 } else if (lit->IsString()) {
542 if (String::cast(*lit)->length() == 0) { 534 if (String::cast(*lit)->length() == 0) {
543 if (false_label_ != fall_through_) __ b(false_label_); 535 if (false_label_ != fall_through_) __ B(false_label_);
544 } else { 536 } else {
545 if (true_label_ != fall_through_) __ b(true_label_); 537 if (true_label_ != fall_through_) __ B(true_label_);
546 } 538 }
547 } else if (lit->IsSmi()) { 539 } else if (lit->IsSmi()) {
548 if (Smi::cast(*lit)->value() == 0) { 540 if (Smi::cast(*lit)->value() == 0) {
549 if (false_label_ != fall_through_) __ b(false_label_); 541 if (false_label_ != fall_through_) __ B(false_label_);
550 } else { 542 } else {
551 if (true_label_ != fall_through_) __ b(true_label_); 543 if (true_label_ != fall_through_) __ B(true_label_);
552 } 544 }
553 } else { 545 } else {
554 // For simplicity we always test the accumulator register. 546 // For simplicity we always test the accumulator register.
555 __ mov(result_register(), Operand(lit)); 547 __ Mov(result_register(), Operand(lit));
556 codegen()->DoTest(this); 548 codegen()->DoTest(this);
557 } 549 }
558 } 550 }
559 551
560 552
561 void FullCodeGenerator::EffectContext::DropAndPlug(int count, 553 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
562 Register reg) const { 554 Register reg) const {
563 ASSERT(count > 0); 555 ASSERT(count > 0);
564 __ Drop(count); 556 __ Drop(count);
565 } 557 }
566 558
567 559
568 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( 560 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
569 int count, 561 int count,
570 Register reg) const { 562 Register reg) const {
571 ASSERT(count > 0); 563 ASSERT(count > 0);
572 __ Drop(count); 564 __ Drop(count);
573 __ Move(result_register(), reg); 565 __ Move(result_register(), reg);
574 } 566 }
575 567
576 568
577 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, 569 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
578 Register reg) const { 570 Register reg) const {
579 ASSERT(count > 0); 571 ASSERT(count > 0);
580 if (count > 1) __ Drop(count - 1); 572 if (count > 1) __ Drop(count - 1);
581 __ str(reg, MemOperand(sp, 0)); 573 __ Poke(reg, 0);
582 } 574 }
583 575
584 576
585 void FullCodeGenerator::TestContext::DropAndPlug(int count, 577 void FullCodeGenerator::TestContext::DropAndPlug(int count,
586 Register reg) const { 578 Register reg) const {
587 ASSERT(count > 0); 579 ASSERT(count > 0);
588 // For simplicity we always test the accumulator register. 580 // For simplicity we always test the accumulator register.
589 __ Drop(count); 581 __ Drop(count);
590 __ Move(result_register(), reg); 582 __ Mov(result_register(), reg);
591 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 583 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
592 codegen()->DoTest(this); 584 codegen()->DoTest(this);
593 } 585 }
594 586
595 587
596 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 588 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
597 Label* materialize_false) const { 589 Label* materialize_false) const {
598 ASSERT(materialize_true == materialize_false); 590 ASSERT(materialize_true == materialize_false);
599 __ bind(materialize_true); 591 __ Bind(materialize_true);
600 } 592 }
601 593
602 594
603 void FullCodeGenerator::AccumulatorValueContext::Plug( 595 void FullCodeGenerator::AccumulatorValueContext::Plug(
604 Label* materialize_true, 596 Label* materialize_true,
605 Label* materialize_false) const { 597 Label* materialize_false) const {
606 Label done; 598 Label done;
607 __ bind(materialize_true); 599 __ Bind(materialize_true);
608 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); 600 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
609 __ jmp(&done); 601 __ B(&done);
610 __ bind(materialize_false); 602 __ Bind(materialize_false);
611 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); 603 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
612 __ bind(&done); 604 __ Bind(&done);
613 } 605 }
614 606
615 607
616 void FullCodeGenerator::StackValueContext::Plug( 608 void FullCodeGenerator::StackValueContext::Plug(
617 Label* materialize_true, 609 Label* materialize_true,
618 Label* materialize_false) const { 610 Label* materialize_false) const {
619 Label done; 611 Label done;
620 __ bind(materialize_true); 612 __ Bind(materialize_true);
621 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 613 __ LoadRoot(x10, Heap::kTrueValueRootIndex);
622 __ jmp(&done); 614 __ B(&done);
623 __ bind(materialize_false); 615 __ Bind(materialize_false);
624 __ LoadRoot(ip, Heap::kFalseValueRootIndex); 616 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
625 __ bind(&done); 617 __ Bind(&done);
626 __ push(ip); 618 __ Push(x10);
627 } 619 }
628 620
629 621
630 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 622 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
631 Label* materialize_false) const { 623 Label* materialize_false) const {
632 ASSERT(materialize_true == true_label_); 624 ASSERT(materialize_true == true_label_);
633 ASSERT(materialize_false == false_label_); 625 ASSERT(materialize_false == false_label_);
634 } 626 }
635 627
636 628
637 void FullCodeGenerator::EffectContext::Plug(bool flag) const { 629 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
638 } 630 }
639 631
640 632
641 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { 633 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
642 Heap::RootListIndex value_root_index = 634 Heap::RootListIndex value_root_index =
643 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 635 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
644 __ LoadRoot(result_register(), value_root_index); 636 __ LoadRoot(result_register(), value_root_index);
645 } 637 }
646 638
647 639
648 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { 640 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
649 Heap::RootListIndex value_root_index = 641 Heap::RootListIndex value_root_index =
650 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 642 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
651 __ LoadRoot(ip, value_root_index); 643 __ LoadRoot(x10, value_root_index);
652 __ push(ip); 644 __ Push(x10);
653 } 645 }
654 646
655 647
656 void FullCodeGenerator::TestContext::Plug(bool flag) const { 648 void FullCodeGenerator::TestContext::Plug(bool flag) const {
657 codegen()->PrepareForBailoutBeforeSplit(condition(), 649 codegen()->PrepareForBailoutBeforeSplit(condition(),
658 true, 650 true,
659 true_label_, 651 true_label_,
660 false_label_); 652 false_label_);
661 if (flag) { 653 if (flag) {
662 if (true_label_ != fall_through_) __ b(true_label_); 654 if (true_label_ != fall_through_) {
655 __ B(true_label_);
656 }
663 } else { 657 } else {
664 if (false_label_ != fall_through_) __ b(false_label_); 658 if (false_label_ != fall_through_) {
659 __ B(false_label_);
660 }
665 } 661 }
666 } 662 }
667 663
668 664
669 void FullCodeGenerator::DoTest(Expression* condition, 665 void FullCodeGenerator::DoTest(Expression* condition,
670 Label* if_true, 666 Label* if_true,
671 Label* if_false, 667 Label* if_false,
672 Label* fall_through) { 668 Label* fall_through) {
673 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate()); 669 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
674 CallIC(ic, condition->test_id()); 670 CallIC(ic, condition->test_id());
675 __ tst(result_register(), result_register()); 671 __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through);
676 Split(ne, if_true, if_false, fall_through);
677 } 672 }
678 673
679 674
675 // If (cond), branch to if_true.
676 // If (!cond), branch to if_false.
677 // fall_through is used as an optimization in cases where only one branch
678 // instruction is necessary.
680 void FullCodeGenerator::Split(Condition cond, 679 void FullCodeGenerator::Split(Condition cond,
681 Label* if_true, 680 Label* if_true,
682 Label* if_false, 681 Label* if_false,
683 Label* fall_through) { 682 Label* fall_through) {
684 if (if_false == fall_through) { 683 if (if_false == fall_through) {
685 __ b(cond, if_true); 684 __ B(cond, if_true);
686 } else if (if_true == fall_through) { 685 } else if (if_true == fall_through) {
687 __ b(NegateCondition(cond), if_false); 686 ASSERT(if_false != fall_through);
687 __ B(InvertCondition(cond), if_false);
688 } else { 688 } else {
689 __ b(cond, if_true); 689 __ B(cond, if_true);
690 __ b(if_false); 690 __ B(if_false);
691 } 691 }
692 } 692 }
693 693
694 694
695 MemOperand FullCodeGenerator::StackOperand(Variable* var) { 695 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
696 ASSERT(var->IsStackAllocated());
697 // Offset is negative because higher indexes are at lower addresses. 696 // Offset is negative because higher indexes are at lower addresses.
698 int offset = -var->index() * kPointerSize; 697 int offset = -var->index() * kXRegSizeInBytes;
699 // Adjust by a (parameter or local) base offset. 698 // Adjust by a (parameter or local) base offset.
700 if (var->IsParameter()) { 699 if (var->IsParameter()) {
701 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; 700 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
702 } else { 701 } else {
703 offset += JavaScriptFrameConstants::kLocal0Offset; 702 offset += JavaScriptFrameConstants::kLocal0Offset;
704 } 703 }
705 return MemOperand(fp, offset); 704 return MemOperand(fp, offset);
706 } 705 }
707 706
708 707
709 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { 708 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
710 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); 709 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
711 if (var->IsContextSlot()) { 710 if (var->IsContextSlot()) {
712 int context_chain_length = scope()->ContextChainLength(var->scope()); 711 int context_chain_length = scope()->ContextChainLength(var->scope());
713 __ LoadContext(scratch, context_chain_length); 712 __ LoadContext(scratch, context_chain_length);
714 return ContextOperand(scratch, var->index()); 713 return ContextMemOperand(scratch, var->index());
715 } else { 714 } else {
716 return StackOperand(var); 715 return StackOperand(var);
717 } 716 }
718 } 717 }
719 718
720 719
721 void FullCodeGenerator::GetVar(Register dest, Variable* var) { 720 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
722 // Use destination as scratch. 721 // Use destination as scratch.
723 MemOperand location = VarOperand(var, dest); 722 MemOperand location = VarOperand(var, dest);
724 __ ldr(dest, location); 723 __ Ldr(dest, location);
725 } 724 }
726 725
727 726
728 void FullCodeGenerator::SetVar(Variable* var, 727 void FullCodeGenerator::SetVar(Variable* var,
729 Register src, 728 Register src,
730 Register scratch0, 729 Register scratch0,
731 Register scratch1) { 730 Register scratch1) {
732 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); 731 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
733 ASSERT(!scratch0.is(src)); 732 ASSERT(!AreAliased(src, scratch0, scratch1));
734 ASSERT(!scratch0.is(scratch1));
735 ASSERT(!scratch1.is(src));
736 MemOperand location = VarOperand(var, scratch0); 733 MemOperand location = VarOperand(var, scratch0);
737 __ str(src, location); 734 __ Str(src, location);
738 735
739 // Emit the write barrier code if the location is in the heap. 736 // Emit the write barrier code if the location is in the heap.
740 if (var->IsContextSlot()) { 737 if (var->IsContextSlot()) {
738 // scratch0 contains the correct context.
741 __ RecordWriteContextSlot(scratch0, 739 __ RecordWriteContextSlot(scratch0,
742 location.offset(), 740 location.offset(),
743 src, 741 src,
744 scratch1, 742 scratch1,
745 kLRHasBeenSaved, 743 kLRHasBeenSaved,
746 kDontSaveFPRegs); 744 kDontSaveFPRegs);
747 } 745 }
748 } 746 }
749 747
750 748
751 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, 749 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
752 bool should_normalize, 750 bool should_normalize,
753 Label* if_true, 751 Label* if_true,
754 Label* if_false) { 752 Label* if_false) {
755 // Only prepare for bailouts before splits if we're in a test 753 // Only prepare for bailouts before splits if we're in a test
756 // context. Otherwise, we let the Visit function deal with the 754 // context. Otherwise, we let the Visit function deal with the
757 // preparation to avoid preparing with the same AST id twice. 755 // preparation to avoid preparing with the same AST id twice.
758 if (!context()->IsTest() || !info_->IsOptimizable()) return; 756 if (!context()->IsTest() || !info_->IsOptimizable()) return;
759 757
758 // TODO(all): Investigate to see if there is something to work on here.
760 Label skip; 759 Label skip;
761 if (should_normalize) __ b(&skip); 760 if (should_normalize) {
761 __ B(&skip);
762 }
762 PrepareForBailout(expr, TOS_REG); 763 PrepareForBailout(expr, TOS_REG);
763 if (should_normalize) { 764 if (should_normalize) {
764 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 765 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
765 __ cmp(r0, ip);
766 Split(eq, if_true, if_false, NULL); 766 Split(eq, if_true, if_false, NULL);
767 __ bind(&skip); 767 __ Bind(&skip);
768 } 768 }
769 } 769 }
770 770
771 771
772 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 772 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
773 // The variable in the declaration always resides in the current function 773 // The variable in the declaration always resides in the current function
774 // context. 774 // context.
775 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); 775 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
776 if (generate_debug_code_) { 776 if (generate_debug_code_) {
777 // Check that we're not inside a with or catch context. 777 // Check that we're not inside a with or catch context.
778 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset)); 778 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
779 __ CompareRoot(r1, Heap::kWithContextMapRootIndex); 779 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
780 __ Check(ne, kDeclarationInWithContext); 780 __ Check(ne, kDeclarationInWithContext);
781 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); 781 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
782 __ Check(ne, kDeclarationInCatchContext); 782 __ Check(ne, kDeclarationInCatchContext);
783 } 783 }
784 } 784 }
785 785
786 786
787 void FullCodeGenerator::VisitVariableDeclaration( 787 void FullCodeGenerator::VisitVariableDeclaration(
788 VariableDeclaration* declaration) { 788 VariableDeclaration* declaration) {
789 // If it was not possible to allocate the variable at compile time, we 789 // If it was not possible to allocate the variable at compile time, we
790 // need to "declare" it at runtime to make sure it actually exists in the 790 // need to "declare" it at runtime to make sure it actually exists in the
791 // local context. 791 // local context.
792 VariableProxy* proxy = declaration->proxy(); 792 VariableProxy* proxy = declaration->proxy();
793 VariableMode mode = declaration->mode(); 793 VariableMode mode = declaration->mode();
794 Variable* variable = proxy->var(); 794 Variable* variable = proxy->var();
795 bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET; 795 bool hole_init = (mode == CONST) || (mode == CONST_HARMONY) || (mode == LET);
796
796 switch (variable->location()) { 797 switch (variable->location()) {
797 case Variable::UNALLOCATED: 798 case Variable::UNALLOCATED:
798 globals_->Add(variable->name(), zone()); 799 globals_->Add(variable->name(), zone());
799 globals_->Add(variable->binding_needs_init() 800 globals_->Add(variable->binding_needs_init()
800 ? isolate()->factory()->the_hole_value() 801 ? isolate()->factory()->the_hole_value()
801 : isolate()->factory()->undefined_value(), 802 : isolate()->factory()->undefined_value(),
802 zone()); 803 zone());
803 break; 804 break;
804 805
805 case Variable::PARAMETER: 806 case Variable::PARAMETER:
806 case Variable::LOCAL: 807 case Variable::LOCAL:
807 if (hole_init) { 808 if (hole_init) {
808 Comment cmnt(masm_, "[ VariableDeclaration"); 809 Comment cmnt(masm_, "[ VariableDeclaration");
809 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 810 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
810 __ str(ip, StackOperand(variable)); 811 __ Str(x10, StackOperand(variable));
811 } 812 }
812 break; 813 break;
813 814
814 case Variable::CONTEXT: 815 case Variable::CONTEXT:
815 if (hole_init) { 816 if (hole_init) {
816 Comment cmnt(masm_, "[ VariableDeclaration"); 817 Comment cmnt(masm_, "[ VariableDeclaration");
817 EmitDebugCheckDeclarationContext(variable); 818 EmitDebugCheckDeclarationContext(variable);
818 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 819 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
819 __ str(ip, ContextOperand(cp, variable->index())); 820 __ Str(x10, ContextMemOperand(cp, variable->index()));
820 // No write barrier since the_hole_value is in old space. 821 // No write barrier since the_hole_value is in old space.
821 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 822 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
822 } 823 }
823 break; 824 break;
824 825
825 case Variable::LOOKUP: { 826 case Variable::LOOKUP: {
826 Comment cmnt(masm_, "[ VariableDeclaration"); 827 Comment cmnt(masm_, "[ VariableDeclaration");
827 __ mov(r2, Operand(variable->name())); 828 __ Mov(x2, Operand(variable->name()));
828 // Declaration nodes are always introduced in one of four modes. 829 // Declaration nodes are always introduced in one of four modes.
829 ASSERT(IsDeclaredVariableMode(mode)); 830 ASSERT(IsDeclaredVariableMode(mode));
830 PropertyAttributes attr = 831 PropertyAttributes attr = IsImmutableVariableMode(mode) ? READ_ONLY
831 IsImmutableVariableMode(mode) ? READ_ONLY : NONE; 832 : NONE;
832 __ mov(r1, Operand(Smi::FromInt(attr))); 833 __ Mov(x1, Operand(Smi::FromInt(attr)));
833 // Push initial value, if any. 834 // Push initial value, if any.
834 // Note: For variables we must not push an initial value (such as 835 // Note: For variables we must not push an initial value (such as
835 // 'undefined') because we may have a (legal) redeclaration and we 836 // 'undefined') because we may have a (legal) redeclaration and we
836 // must not destroy the current value. 837 // must not destroy the current value.
837 if (hole_init) { 838 if (hole_init) {
838 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); 839 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
839 __ Push(cp, r2, r1, r0); 840 __ Push(cp, x2, x1, x0);
840 } else { 841 } else {
841 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value. 842 // Pushing 0 (xzr) indicates no initial value.
842 __ Push(cp, r2, r1, r0); 843 __ Push(cp, x2, x1, xzr);
843 } 844 }
844 __ CallRuntime(Runtime::kDeclareContextSlot, 4); 845 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
845 break; 846 break;
846 } 847 }
847 } 848 }
848 } 849 }
849 850
850 851
851 void FullCodeGenerator::VisitFunctionDeclaration( 852 void FullCodeGenerator::VisitFunctionDeclaration(
852 FunctionDeclaration* declaration) { 853 FunctionDeclaration* declaration) {
853 VariableProxy* proxy = declaration->proxy(); 854 VariableProxy* proxy = declaration->proxy();
854 Variable* variable = proxy->var(); 855 Variable* variable = proxy->var();
855 switch (variable->location()) { 856 switch (variable->location()) {
856 case Variable::UNALLOCATED: { 857 case Variable::UNALLOCATED: {
857 globals_->Add(variable->name(), zone()); 858 globals_->Add(variable->name(), zone());
858 Handle<SharedFunctionInfo> function = 859 Handle<SharedFunctionInfo> function =
859 Compiler::BuildFunctionInfo(declaration->fun(), script()); 860 Compiler::BuildFunctionInfo(declaration->fun(), script());
860 // Check for stack-overflow exception. 861 // Check for stack overflow exception.
861 if (function.is_null()) return SetStackOverflow(); 862 if (function.is_null()) return SetStackOverflow();
862 globals_->Add(function, zone()); 863 globals_->Add(function, zone());
863 break; 864 break;
864 } 865 }
865 866
866 case Variable::PARAMETER: 867 case Variable::PARAMETER:
867 case Variable::LOCAL: { 868 case Variable::LOCAL: {
868 Comment cmnt(masm_, "[ FunctionDeclaration"); 869 Comment cmnt(masm_, "[ Function Declaration");
869 VisitForAccumulatorValue(declaration->fun()); 870 VisitForAccumulatorValue(declaration->fun());
870 __ str(result_register(), StackOperand(variable)); 871 __ Str(result_register(), StackOperand(variable));
871 break; 872 break;
872 } 873 }
873 874
874 case Variable::CONTEXT: { 875 case Variable::CONTEXT: {
875 Comment cmnt(masm_, "[ FunctionDeclaration"); 876 Comment cmnt(masm_, "[ Function Declaration");
876 EmitDebugCheckDeclarationContext(variable); 877 EmitDebugCheckDeclarationContext(variable);
877 VisitForAccumulatorValue(declaration->fun()); 878 VisitForAccumulatorValue(declaration->fun());
878 __ str(result_register(), ContextOperand(cp, variable->index())); 879 __ Str(result_register(), ContextMemOperand(cp, variable->index()));
879 int offset = Context::SlotOffset(variable->index()); 880 int offset = Context::SlotOffset(variable->index());
880 // We know that we have written a function, which is not a smi. 881 // We know that we have written a function, which is not a smi.
881 __ RecordWriteContextSlot(cp, 882 __ RecordWriteContextSlot(cp,
882 offset, 883 offset,
883 result_register(), 884 result_register(),
884 r2, 885 x2,
885 kLRHasBeenSaved, 886 kLRHasBeenSaved,
886 kDontSaveFPRegs, 887 kDontSaveFPRegs,
887 EMIT_REMEMBERED_SET, 888 EMIT_REMEMBERED_SET,
888 OMIT_SMI_CHECK); 889 OMIT_SMI_CHECK);
889 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 890 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
890 break; 891 break;
891 } 892 }
892 893
893 case Variable::LOOKUP: { 894 case Variable::LOOKUP: {
894 Comment cmnt(masm_, "[ FunctionDeclaration"); 895 Comment cmnt(masm_, "[ Function Declaration");
895 __ mov(r2, Operand(variable->name())); 896 __ Mov(x2, Operand(variable->name()));
896 __ mov(r1, Operand(Smi::FromInt(NONE))); 897 __ Mov(x1, Operand(Smi::FromInt(NONE)));
897 __ Push(cp, r2, r1); 898 __ Push(cp, x2, x1);
898 // Push initial value for function declaration. 899 // Push initial value for function declaration.
899 VisitForStackValue(declaration->fun()); 900 VisitForStackValue(declaration->fun());
900 __ CallRuntime(Runtime::kDeclareContextSlot, 4); 901 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
901 break; 902 break;
902 } 903 }
903 } 904 }
904 } 905 }
905 906
906 907
907 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { 908 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
908 Variable* variable = declaration->proxy()->var(); 909 Variable* variable = declaration->proxy()->var();
909 ASSERT(variable->location() == Variable::CONTEXT); 910 ASSERT(variable->location() == Variable::CONTEXT);
910 ASSERT(variable->interface()->IsFrozen()); 911 ASSERT(variable->interface()->IsFrozen());
911 912
912 Comment cmnt(masm_, "[ ModuleDeclaration"); 913 Comment cmnt(masm_, "[ ModuleDeclaration");
913 EmitDebugCheckDeclarationContext(variable); 914 EmitDebugCheckDeclarationContext(variable);
914 915
915 // Load instance object. 916 // Load instance object.
916 __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope())); 917 __ LoadContext(x1, scope_->ContextChainLength(scope_->GlobalScope()));
917 __ ldr(r1, ContextOperand(r1, variable->interface()->Index())); 918 __ Ldr(x1, ContextMemOperand(x1, variable->interface()->Index()));
918 __ ldr(r1, ContextOperand(r1, Context::EXTENSION_INDEX)); 919 __ Ldr(x1, ContextMemOperand(x1, Context::EXTENSION_INDEX));
919 920
920 // Assign it. 921 // Assign it.
921 __ str(r1, ContextOperand(cp, variable->index())); 922 __ Str(x1, ContextMemOperand(cp, variable->index()));
922 // We know that we have written a module, which is not a smi. 923 // We know that we have written a module, which is not a smi.
923 __ RecordWriteContextSlot(cp, 924 __ RecordWriteContextSlot(cp,
924 Context::SlotOffset(variable->index()), 925 Context::SlotOffset(variable->index()),
925 r1, 926 x1,
926 r3, 927 x3,
927 kLRHasBeenSaved, 928 kLRHasBeenSaved,
928 kDontSaveFPRegs, 929 kDontSaveFPRegs,
929 EMIT_REMEMBERED_SET, 930 EMIT_REMEMBERED_SET,
930 OMIT_SMI_CHECK); 931 OMIT_SMI_CHECK);
931 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS); 932 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
932 933
933 // Traverse into body. 934 // Traverse info body.
934 Visit(declaration->module()); 935 Visit(declaration->module());
935 } 936 }
936 937
937 938
938 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) { 939 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
939 VariableProxy* proxy = declaration->proxy(); 940 VariableProxy* proxy = declaration->proxy();
940 Variable* variable = proxy->var(); 941 Variable* variable = proxy->var();
941 switch (variable->location()) { 942 switch (variable->location()) {
942 case Variable::UNALLOCATED: 943 case Variable::UNALLOCATED:
943 // TODO(rossberg) 944 // TODO(rossberg)
(...skipping 14 matching lines...) Expand all
958 } 959 }
959 960
960 961
961 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) { 962 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
962 // TODO(rossberg) 963 // TODO(rossberg)
963 } 964 }
964 965
965 966
966 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 967 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
967 // Call the runtime to declare the globals. 968 // Call the runtime to declare the globals.
968 // The context is the first argument. 969 __ Mov(x11, Operand(pairs));
969 __ mov(r1, Operand(pairs)); 970 Register flags = xzr;
970 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags()))); 971 if (Smi::FromInt(DeclareGlobalsFlags())) {
971 __ Push(cp, r1, r0); 972 flags = x10;
973 __ Mov(flags, Operand(Smi::FromInt(DeclareGlobalsFlags())));
974 }
975 __ Push(cp, x11, flags);
972 __ CallRuntime(Runtime::kDeclareGlobals, 3); 976 __ CallRuntime(Runtime::kDeclareGlobals, 3);
973 // Return value is ignored. 977 // Return value is ignored.
974 } 978 }
975 979
976 980
977 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { 981 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
978 // Call the runtime to declare the modules. 982 // Call the runtime to declare the modules.
979 __ Push(descriptions); 983 __ Push(descriptions);
980 __ CallRuntime(Runtime::kDeclareModules, 1); 984 __ CallRuntime(Runtime::kDeclareModules, 1);
981 // Return value is ignored. 985 // Return value is ignored.
982 } 986 }
983 987
984 988
985 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 989 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
990 ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
986 Comment cmnt(masm_, "[ SwitchStatement"); 991 Comment cmnt(masm_, "[ SwitchStatement");
987 Breakable nested_statement(this, stmt); 992 Breakable nested_statement(this, stmt);
988 SetStatementPosition(stmt); 993 SetStatementPosition(stmt);
989 994
990 // Keep the switch value on the stack until a case matches. 995 // Keep the switch value on the stack until a case matches.
991 VisitForStackValue(stmt->tag()); 996 VisitForStackValue(stmt->tag());
992 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 997 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
993 998
994 ZoneList<CaseClause*>* clauses = stmt->cases(); 999 ZoneList<CaseClause*>* clauses = stmt->cases();
995 CaseClause* default_clause = NULL; // Can occur anywhere in the list. 1000 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
996 1001
997 Label next_test; // Recycled for each test. 1002 Label next_test; // Recycled for each test.
998 // Compile all the tests with branches to their bodies. 1003 // Compile all the tests with branches to their bodies.
999 for (int i = 0; i < clauses->length(); i++) { 1004 for (int i = 0; i < clauses->length(); i++) {
1000 CaseClause* clause = clauses->at(i); 1005 CaseClause* clause = clauses->at(i);
1001 clause->body_target()->Unuse(); 1006 clause->body_target()->Unuse();
1002 1007
1003 // The default is not a test, but remember it as final fall through. 1008 // The default is not a test, but remember it as final fall through.
1004 if (clause->is_default()) { 1009 if (clause->is_default()) {
1005 default_clause = clause; 1010 default_clause = clause;
1006 continue; 1011 continue;
1007 } 1012 }
1008 1013
1009 Comment cmnt(masm_, "[ Case comparison"); 1014 Comment cmnt(masm_, "[ Case comparison");
1010 __ bind(&next_test); 1015 __ Bind(&next_test);
1011 next_test.Unuse(); 1016 next_test.Unuse();
1012 1017
1013 // Compile the label expression. 1018 // Compile the label expression.
1014 VisitForAccumulatorValue(clause->label()); 1019 VisitForAccumulatorValue(clause->label());
1015 1020
1016 // Perform the comparison as if via '==='. 1021 // Perform the comparison as if via '==='.
1017 __ ldr(r1, MemOperand(sp, 0)); // Switch value. 1022 __ Peek(x1, 0); // Switch value.
1018 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); 1023
1019 JumpPatchSite patch_site(masm_); 1024 JumpPatchSite patch_site(masm_);
1020 if (inline_smi_code) { 1025 if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
1021 Label slow_case; 1026 Label slow_case;
1022 __ orr(r2, r1, r0); 1027 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
1023 patch_site.EmitJumpIfNotSmi(r2, &slow_case); 1028 __ Cmp(x1, x0);
1024 1029 __ B(ne, &next_test);
1025 __ cmp(r1, r0);
1026 __ b(ne, &next_test);
1027 __ Drop(1); // Switch value is no longer needed. 1030 __ Drop(1); // Switch value is no longer needed.
1028 __ b(clause->body_target()); 1031 __ B(clause->body_target());
1029 __ bind(&slow_case); 1032 __ Bind(&slow_case);
1030 } 1033 }
1031 1034
1032 // Record position before stub call for type feedback. 1035 // Record position before stub call for type feedback.
1033 SetSourcePosition(clause->position()); 1036 SetSourcePosition(clause->position());
1034 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT); 1037 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1035 CallIC(ic, clause->CompareId()); 1038 CallIC(ic, clause->CompareId());
1036 patch_site.EmitPatchInfo(); 1039 patch_site.EmitPatchInfo();
1037 1040
1038 Label skip; 1041 Label skip;
1039 __ b(&skip); 1042 __ B(&skip);
1040 PrepareForBailout(clause, TOS_REG); 1043 PrepareForBailout(clause, TOS_REG);
1041 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 1044 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
1042 __ cmp(r0, ip);
1043 __ b(ne, &next_test);
1044 __ Drop(1); 1045 __ Drop(1);
1045 __ jmp(clause->body_target()); 1046 __ B(clause->body_target());
1046 __ bind(&skip); 1047 __ Bind(&skip);
1047 1048
1048 __ cmp(r0, Operand::Zero()); 1049 __ Cbnz(x0, &next_test);
1049 __ b(ne, &next_test);
1050 __ Drop(1); // Switch value is no longer needed. 1050 __ Drop(1); // Switch value is no longer needed.
1051 __ b(clause->body_target()); 1051 __ B(clause->body_target());
1052 } 1052 }
1053 1053
1054 // Discard the test value and jump to the default if present, otherwise to 1054 // Discard the test value and jump to the default if present, otherwise to
1055 // the end of the statement. 1055 // the end of the statement.
1056 __ bind(&next_test); 1056 __ Bind(&next_test);
1057 __ Drop(1); // Switch value is no longer needed. 1057 __ Drop(1); // Switch value is no longer needed.
1058 if (default_clause == NULL) { 1058 if (default_clause == NULL) {
1059 __ b(nested_statement.break_label()); 1059 __ B(nested_statement.break_label());
1060 } else { 1060 } else {
1061 __ b(default_clause->body_target()); 1061 __ B(default_clause->body_target());
1062 } 1062 }
1063 1063
1064 // Compile all the case bodies. 1064 // Compile all the case bodies.
1065 for (int i = 0; i < clauses->length(); i++) { 1065 for (int i = 0; i < clauses->length(); i++) {
1066 Comment cmnt(masm_, "[ Case body"); 1066 Comment cmnt(masm_, "[ Case body");
1067 CaseClause* clause = clauses->at(i); 1067 CaseClause* clause = clauses->at(i);
1068 __ bind(clause->body_target()); 1068 __ Bind(clause->body_target());
1069 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS); 1069 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1070 VisitStatements(clause->statements()); 1070 VisitStatements(clause->statements());
1071 } 1071 }
1072 1072
1073 __ bind(nested_statement.break_label()); 1073 __ Bind(nested_statement.break_label());
1074 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1074 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1075 } 1075 }
1076 1076
1077 1077
1078 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { 1078 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1079 ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
1079 Comment cmnt(masm_, "[ ForInStatement"); 1080 Comment cmnt(masm_, "[ ForInStatement");
1080 int slot = stmt->ForInFeedbackSlot(); 1081 int slot = stmt->ForInFeedbackSlot();
1082 // TODO(all): This visitor probably needs better comments and a revisit.
1081 SetStatementPosition(stmt); 1083 SetStatementPosition(stmt);
1082 1084
1083 Label loop, exit; 1085 Label loop, exit;
1084 ForIn loop_statement(this, stmt); 1086 ForIn loop_statement(this, stmt);
1085 increment_loop_depth(); 1087 increment_loop_depth();
1086 1088
1087 // Get the object to enumerate over. If the object is null or undefined, skip 1089 // Get the object to enumerate over. If the object is null or undefined, skip
1088 // over the loop. See ECMA-262 version 5, section 12.6.4. 1090 // over the loop. See ECMA-262 version 5, section 12.6.4.
1089 VisitForAccumulatorValue(stmt->enumerable()); 1091 VisitForAccumulatorValue(stmt->enumerable());
1090 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1092 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1091 __ cmp(r0, ip); 1093 Register null_value = x15;
1092 __ b(eq, &exit);
1093 Register null_value = r5;
1094 __ LoadRoot(null_value, Heap::kNullValueRootIndex); 1094 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1095 __ cmp(r0, null_value); 1095 __ Cmp(x0, null_value);
1096 __ b(eq, &exit); 1096 __ B(eq, &exit);
1097 1097
1098 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); 1098 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1099 1099
1100 // Convert the object to a JS object. 1100 // Convert the object to a JS object.
1101 Label convert, done_convert; 1101 Label convert, done_convert;
1102 __ JumpIfSmi(r0, &convert); 1102 __ JumpIfSmi(x0, &convert);
1103 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 1103 __ JumpIfObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE, &done_convert, ge);
1104 __ b(ge, &done_convert); 1104 __ Bind(&convert);
1105 __ bind(&convert); 1105 __ Push(x0);
1106 __ push(r0);
1107 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1106 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1108 __ bind(&done_convert); 1107 __ Bind(&done_convert);
1109 __ push(r0); 1108 __ Push(x0);
1110 1109
1111 // Check for proxies. 1110 // Check for proxies.
1112 Label call_runtime; 1111 Label call_runtime;
1113 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1112 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1114 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE); 1113 __ JumpIfObjectType(x0, x10, x11, LAST_JS_PROXY_TYPE, &call_runtime, le);
1115 __ b(le, &call_runtime);
1116 1114
1117 // Check cache validity in generated code. This is a fast case for 1115 // Check cache validity in generated code. This is a fast case for
1118 // the JSObject::IsSimpleEnum cache validity checks. If we cannot 1116 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1119 // guarantee cache validity, call the runtime system to check cache 1117 // guarantee cache validity, call the runtime system to check cache
1120 // validity or get the property names in a fixed array. 1118 // validity or get the property names in a fixed array.
1121 __ CheckEnumCache(null_value, &call_runtime); 1119 __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
1122 1120
1123 // The enum cache is valid. Load the map of the object being 1121 // The enum cache is valid. Load the map of the object being
1124 // iterated over and use the cache for the iteration. 1122 // iterated over and use the cache for the iteration.
1125 Label use_cache; 1123 Label use_cache;
1126 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 1124 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1127 __ b(&use_cache); 1125 __ B(&use_cache);
1128 1126
1129 // Get the set of properties to enumerate. 1127 // Get the set of properties to enumerate.
1130 __ bind(&call_runtime); 1128 __ Bind(&call_runtime);
1131 __ push(r0); // Duplicate the enumerable object on the stack. 1129 __ Push(x0); // Duplicate the enumerable object on the stack.
1132 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); 1130 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1133 1131
1134 // If we got a map from the runtime call, we can do a fast 1132 // If we got a map from the runtime call, we can do a fast
1135 // modification check. Otherwise, we got a fixed array, and we have 1133 // modification check. Otherwise, we got a fixed array, and we have
1136 // to do a slow check. 1134 // to do a slow check.
1137 Label fixed_array; 1135 Label fixed_array, no_descriptors;
1138 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); 1136 __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1139 __ LoadRoot(ip, Heap::kMetaMapRootIndex); 1137 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1140 __ cmp(r2, ip);
1141 __ b(ne, &fixed_array);
1142 1138
1143 // We got a map in register r0. Get the enumeration cache from it. 1139 // We got a map in register x0. Get the enumeration cache from it.
1144 Label no_descriptors; 1140 __ Bind(&use_cache);
1145 __ bind(&use_cache);
1146 1141
1147 __ EnumLength(r1, r0); 1142 __ EnumLengthUntagged(x1, x0);
1148 __ cmp(r1, Operand(Smi::FromInt(0))); 1143 __ Cbz(x1, &no_descriptors);
1149 __ b(eq, &no_descriptors);
1150 1144
1151 __ LoadInstanceDescriptors(r0, r2); 1145 __ LoadInstanceDescriptors(x0, x2);
1152 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset)); 1146 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1153 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset)); 1147 __ Ldr(x2,
1148 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1154 1149
1155 // Set up the four remaining stack slots. 1150 // Set up the four remaining stack slots.
1156 __ push(r0); // Map. 1151 __ Push(x0); // Map.
1157 __ mov(r0, Operand(Smi::FromInt(0))); 1152 __ Mov(x0, Operand(Smi::FromInt(0)));
1158 // Push enumeration cache, enumeration cache length (as smi) and zero. 1153 // Push enumeration cache, enumeration cache length (as smi) and zero.
1159 __ Push(r2, r1, r0); 1154 __ SmiTag(x1);
1160 __ jmp(&loop); 1155 __ Push(x2, x1, x0);
1156 __ B(&loop);
1161 1157
1162 __ bind(&no_descriptors); 1158 __ Bind(&no_descriptors);
1163 __ Drop(1); 1159 __ Drop(1);
1164 __ jmp(&exit); 1160 __ B(&exit);
1165 1161
1166 // We got a fixed array in register r0. Iterate through that. 1162 // We got a fixed array in register x0. Iterate through that.
1167 Label non_proxy; 1163 __ Bind(&fixed_array);
1168 __ bind(&fixed_array);
1169 1164
1170 Handle<Object> feedback = Handle<Object>( 1165 Handle<Object> feedback = Handle<Object>(
1171 Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker), 1166 Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
1172 isolate()); 1167 isolate());
1173 StoreFeedbackVectorSlot(slot, feedback); 1168 StoreFeedbackVectorSlot(slot, feedback);
1174 __ Move(r1, FeedbackVector()); 1169 __ LoadObject(x1, FeedbackVector());
1175 __ mov(r2, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker))); 1170 __ Mov(x10, Operand(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)));
1176 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(slot))); 1171 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot)));
1177 1172
1178 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check 1173 __ Mov(x1, Operand(Smi::FromInt(1))); // Smi indicates slow check.
1179 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object 1174 __ Peek(x10, 0); // Get enumerated object.
1180 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1175 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1181 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE); 1176 // TODO(all): similar check was done already. Can we avoid it here?
1182 __ b(gt, &non_proxy); 1177 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE);
1183 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy 1178 ASSERT(Smi::FromInt(0) == 0);
1184 __ bind(&non_proxy); 1179 __ CzeroX(x1, le); // Zero indicates proxy.
1185 __ Push(r1, r0); // Smi and array 1180 __ Push(x1, x0); // Smi and array
1186 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset)); 1181 __ Ldr(x1, FieldMemOperand(x0, FixedArray::kLengthOffset));
1187 __ mov(r0, Operand(Smi::FromInt(0))); 1182 __ Push(x1, xzr); // Fixed array length (as smi) and initial index.
1188 __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1189 1183
1190 // Generate code for doing the condition check. 1184 // Generate code for doing the condition check.
1191 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); 1185 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1192 __ bind(&loop); 1186 __ Bind(&loop);
1193 // Load the current count to r0, load the length to r1. 1187 // Load the current count to x0, load the length to x1.
1194 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize)); 1188 __ PeekPair(x0, x1, 0);
1195 __ cmp(r0, r1); // Compare to the array length. 1189 __ Cmp(x0, x1); // Compare to the array length.
1196 __ b(hs, loop_statement.break_label()); 1190 __ B(hs, loop_statement.break_label());
1197 1191
1198 // Get the current entry of the array into register r3. 1192 // Get the current entry of the array into register r3.
1199 __ ldr(r2, MemOperand(sp, 2 * kPointerSize)); 1193 __ Peek(x10, 2 * kXRegSizeInBytes);
1200 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 1194 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1201 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0)); 1195 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1202 1196
1203 // Get the expected map from the stack or a smi in the 1197 // Get the expected map from the stack or a smi in the
1204 // permanent slow case into register r2. 1198 // permanent slow case into register x10.
1205 __ ldr(r2, MemOperand(sp, 3 * kPointerSize)); 1199 __ Peek(x2, 3 * kXRegSizeInBytes);
1206 1200
1207 // Check if the expected map still matches that of the enumerable. 1201 // Check if the expected map still matches that of the enumerable.
1208 // If not, we may have to filter the key. 1202 // If not, we may have to filter the key.
1209 Label update_each; 1203 Label update_each;
1210 __ ldr(r1, MemOperand(sp, 4 * kPointerSize)); 1204 __ Peek(x1, 4 * kXRegSizeInBytes);
1211 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset)); 1205 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1212 __ cmp(r4, Operand(r2)); 1206 __ Cmp(x11, x2);
1213 __ b(eq, &update_each); 1207 __ B(eq, &update_each);
1214 1208
1215 // For proxies, no filtering is done. 1209 // For proxies, no filtering is done.
1216 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. 1210 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1217 __ cmp(r2, Operand(Smi::FromInt(0))); 1211 STATIC_ASSERT(kSmiTag == 0);
1218 __ b(eq, &update_each); 1212 __ Cbz(x2, &update_each);
1219 1213
1220 // Convert the entry to a string or (smi) 0 if it isn't a property 1214 // Convert the entry to a string or (smi) 0 if it isn't a property
1221 // any more. If the property has been removed while iterating, we 1215 // any more. If the property has been removed while iterating, we
1222 // just skip it. 1216 // just skip it.
1223 __ push(r1); // Enumerable. 1217 __ Push(x1, x3);
1224 __ push(r3); // Current entry.
1225 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); 1218 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1226 __ mov(r3, Operand(r0), SetCC); 1219 __ Mov(x3, x0);
1227 __ b(eq, loop_statement.continue_label()); 1220 __ Cbz(x0, loop_statement.continue_label());
1228 1221
1229 // Update the 'each' property or variable from the possibly filtered 1222 // Update the 'each' property or variable from the possibly filtered
1230 // entry in register r3. 1223 // entry in register x3.
1231 __ bind(&update_each); 1224 __ Bind(&update_each);
1232 __ mov(result_register(), r3); 1225 __ Mov(result_register(), x3);
1233 // Perform the assignment as if via '='. 1226 // Perform the assignment as if via '='.
1234 { EffectContext context(this); 1227 { EffectContext context(this);
1235 EmitAssignment(stmt->each()); 1228 EmitAssignment(stmt->each());
1236 } 1229 }
1237 1230
1238 // Generate code for the body of the loop. 1231 // Generate code for the body of the loop.
1239 Visit(stmt->body()); 1232 Visit(stmt->body());
1240 1233
1241 // Generate code for the going to the next element by incrementing 1234 // Generate code for going to the next element by incrementing
1242 // the index (smi) stored on top of the stack. 1235 // the index (smi) stored on top of the stack.
1243 __ bind(loop_statement.continue_label()); 1236 __ Bind(loop_statement.continue_label());
1244 __ pop(r0); 1237 // TODO(all): We could use a callee saved register to avoid popping.
1245 __ add(r0, r0, Operand(Smi::FromInt(1))); 1238 __ Pop(x0);
1246 __ push(r0); 1239 __ Add(x0, x0, Operand(Smi::FromInt(1)));
1240 __ Push(x0);
1247 1241
1248 EmitBackEdgeBookkeeping(stmt, &loop); 1242 EmitBackEdgeBookkeeping(stmt, &loop);
1249 __ b(&loop); 1243 __ B(&loop);
1250 1244
1251 // Remove the pointers stored on the stack. 1245 // Remove the pointers stored on the stack.
1252 __ bind(loop_statement.break_label()); 1246 __ Bind(loop_statement.break_label());
1253 __ Drop(5); 1247 __ Drop(5);
1254 1248
1255 // Exit and decrement the loop depth. 1249 // Exit and decrement the loop depth.
1256 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1250 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1257 __ bind(&exit); 1251 __ Bind(&exit);
1258 decrement_loop_depth(); 1252 decrement_loop_depth();
1259 } 1253 }
1260 1254
1261 1255
1262 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) { 1256 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1263 Comment cmnt(masm_, "[ ForOfStatement"); 1257 Comment cmnt(masm_, "[ ForOfStatement");
1264 SetStatementPosition(stmt); 1258 SetStatementPosition(stmt);
1265 1259
1266 Iteration loop_statement(this, stmt); 1260 Iteration loop_statement(this, stmt);
1267 increment_loop_depth(); 1261 increment_loop_depth();
1268 1262
1269 // var iterator = iterable[@@iterator]() 1263 // var iterator = iterable[@@iterator]()
1270 VisitForAccumulatorValue(stmt->assign_iterator()); 1264 VisitForAccumulatorValue(stmt->assign_iterator());
1271 1265
1272 // As with for-in, skip the loop if the iterator is null or undefined. 1266 // As with for-in, skip the loop if the iterator is null or undefined.
1273 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex); 1267 Register iterator = x0;
1274 __ b(eq, loop_statement.break_label()); 1268 __ JumpIfRoot(iterator, Heap::kUndefinedValueRootIndex,
1275 __ CompareRoot(r0, Heap::kNullValueRootIndex); 1269 loop_statement.break_label());
1276 __ b(eq, loop_statement.break_label()); 1270 __ JumpIfRoot(iterator, Heap::kNullValueRootIndex,
1271 loop_statement.break_label());
1277 1272
1278 // Convert the iterator to a JS object. 1273 // Convert the iterator to a JS object.
1279 Label convert, done_convert; 1274 Label convert, done_convert;
1280 __ JumpIfSmi(r0, &convert); 1275 __ JumpIfSmi(iterator, &convert);
1281 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 1276 __ CompareObjectType(iterator, x1, x1, FIRST_SPEC_OBJECT_TYPE);
1282 __ b(ge, &done_convert); 1277 __ B(ge, &done_convert);
1283 __ bind(&convert); 1278 __ Bind(&convert);
1284 __ push(r0); 1279 __ Push(iterator);
1285 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1280 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1286 __ bind(&done_convert); 1281 __ Bind(&done_convert);
1287 __ push(r0); 1282 __ Push(iterator);
1288 1283
1289 // Loop entry. 1284 // Loop entry.
1290 __ bind(loop_statement.continue_label()); 1285 __ Bind(loop_statement.continue_label());
1291 1286
1292 // result = iterator.next() 1287 // result = iterator.next()
1293 VisitForEffect(stmt->next_result()); 1288 VisitForEffect(stmt->next_result());
1294 1289
1295 // if (result.done) break; 1290 // if (result.done) break;
1296 Label result_not_done; 1291 Label result_not_done;
1297 VisitForControl(stmt->result_done(), 1292 VisitForControl(stmt->result_done(),
1298 loop_statement.break_label(), 1293 loop_statement.break_label(),
1299 &result_not_done, 1294 &result_not_done,
1300 &result_not_done); 1295 &result_not_done);
1301 __ bind(&result_not_done); 1296 __ Bind(&result_not_done);
1302 1297
1303 // each = result.value 1298 // each = result.value
1304 VisitForEffect(stmt->assign_each()); 1299 VisitForEffect(stmt->assign_each());
1305 1300
1306 // Generate code for the body of the loop. 1301 // Generate code for the body of the loop.
1307 Visit(stmt->body()); 1302 Visit(stmt->body());
1308 1303
1309 // Check stack before looping. 1304 // Check stack before looping.
1310 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS); 1305 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1311 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label()); 1306 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1312 __ jmp(loop_statement.continue_label()); 1307 __ B(loop_statement.continue_label());
1313 1308
1314 // Exit and decrement the loop depth. 1309 // Exit and decrement the loop depth.
1315 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1310 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1316 __ bind(loop_statement.break_label()); 1311 __ Bind(loop_statement.break_label());
1317 decrement_loop_depth(); 1312 decrement_loop_depth();
1318 } 1313 }
1319 1314
1320 1315
1321 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, 1316 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1322 bool pretenure) { 1317 bool pretenure) {
1323 // Use the fast case closure allocation code that allocates in new 1318 // Use the fast case closure allocation code that allocates in new space for
1324 // space for nested functions that don't need literals cloning. If 1319 // nested functions that don't need literals cloning. If we're running with
1325 // we're running with the --always-opt or the --prepare-always-opt 1320 // the --always-opt or the --prepare-always-opt flag, we need to use the
1326 // flag, we need to use the runtime function so that the new function 1321 // runtime function so that the new function we are creating here gets a
1327 // we are creating here gets a chance to have its code optimized and 1322 // chance to have its code optimized and doesn't just get a copy of the
1328 // doesn't just get a copy of the existing unoptimized code. 1323 // existing unoptimized code.
1329 if (!FLAG_always_opt && 1324 if (!FLAG_always_opt &&
1330 !FLAG_prepare_always_opt && 1325 !FLAG_prepare_always_opt &&
1331 !pretenure && 1326 !pretenure &&
1332 scope()->is_function_scope() && 1327 scope()->is_function_scope() &&
1333 info->num_literals() == 0) { 1328 info->num_literals() == 0) {
1334 FastNewClosureStub stub(info->language_mode(), info->is_generator()); 1329 FastNewClosureStub stub(info->language_mode(), info->is_generator());
1335 __ mov(r2, Operand(info)); 1330 __ Mov(x2, Operand(info));
1336 __ CallStub(&stub); 1331 __ CallStub(&stub);
1337 } else { 1332 } else {
1338 __ mov(r0, Operand(info)); 1333 __ Mov(x11, Operand(info));
1339 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex 1334 __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex
1340 : Heap::kFalseValueRootIndex); 1335 : Heap::kFalseValueRootIndex);
1341 __ Push(cp, r0, r1); 1336 __ Push(cp, x11, x10);
1342 __ CallRuntime(Runtime::kNewClosure, 3); 1337 __ CallRuntime(Runtime::kNewClosure, 3);
1343 } 1338 }
1344 context()->Plug(r0); 1339 context()->Plug(x0);
1345 } 1340 }
1346 1341
1347 1342
1348 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 1343 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1349 Comment cmnt(masm_, "[ VariableProxy"); 1344 Comment cmnt(masm_, "[ VariableProxy");
1350 EmitVariableLoad(expr); 1345 EmitVariableLoad(expr);
1351 } 1346 }
1352 1347
1353 1348
1354 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, 1349 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1355 TypeofState typeof_state, 1350 TypeofState typeof_state,
1356 Label* slow) { 1351 Label* slow) {
1357 Register current = cp; 1352 Register current = cp;
1358 Register next = r1; 1353 Register next = x10;
1359 Register temp = r2; 1354 Register temp = x11;
1360 1355
1361 Scope* s = scope(); 1356 Scope* s = scope();
1362 while (s != NULL) { 1357 while (s != NULL) {
1363 if (s->num_heap_slots() > 0) { 1358 if (s->num_heap_slots() > 0) {
1364 if (s->calls_non_strict_eval()) { 1359 if (s->calls_non_strict_eval()) {
1365 // Check that extension is NULL. 1360 // Check that extension is NULL.
1366 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); 1361 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1367 __ tst(temp, temp); 1362 __ Cbnz(temp, slow);
1368 __ b(ne, slow);
1369 } 1363 }
1370 // Load next context in chain. 1364 // Load next context in chain.
1371 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX)); 1365 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1372 // Walk the rest of the chain without clobbering cp. 1366 // Walk the rest of the chain without clobbering cp.
1373 current = next; 1367 current = next;
1374 } 1368 }
1375 // If no outer scope calls eval, we do not need to check more 1369 // If no outer scope calls eval, we do not need to check more
1376 // context extensions. 1370 // context extensions.
1377 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break; 1371 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1378 s = s->outer_scope(); 1372 s = s->outer_scope();
1379 } 1373 }
1380 1374
1381 if (s->is_eval_scope()) { 1375 if (s->is_eval_scope()) {
1382 Label loop, fast; 1376 Label loop, fast;
1383 if (!current.is(next)) { 1377 __ Mov(next, current);
1384 __ Move(next, current); 1378
1385 } 1379 __ Bind(&loop);
1386 __ bind(&loop);
1387 // Terminate at native context. 1380 // Terminate at native context.
1388 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset)); 1381 __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1389 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex); 1382 __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1390 __ cmp(temp, ip);
1391 __ b(eq, &fast);
1392 // Check that extension is NULL. 1383 // Check that extension is NULL.
1393 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX)); 1384 __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1394 __ tst(temp, temp); 1385 __ Cbnz(temp, slow);
1395 __ b(ne, slow);
1396 // Load next context in chain. 1386 // Load next context in chain.
1397 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX)); 1387 __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1398 __ b(&loop); 1388 __ B(&loop);
1399 __ bind(&fast); 1389 __ Bind(&fast);
1400 } 1390 }
1401 1391
1402 __ ldr(r0, GlobalObjectOperand()); 1392 __ Ldr(x0, GlobalObjectMemOperand());
1403 __ mov(r2, Operand(var->name())); 1393 __ Mov(x2, Operand(var->name()));
1404 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) 1394 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) ? NOT_CONTEXTUAL
1405 ? NOT_CONTEXTUAL 1395 : CONTEXTUAL;
1406 : CONTEXTUAL;
1407 CallLoadIC(mode); 1396 CallLoadIC(mode);
1408 } 1397 }
1409 1398
1410 1399
1411 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1400 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1412 Label* slow) { 1401 Label* slow) {
1413 ASSERT(var->IsContextSlot()); 1402 ASSERT(var->IsContextSlot());
1414 Register context = cp; 1403 Register context = cp;
1415 Register next = r3; 1404 Register next = x10;
1416 Register temp = r4; 1405 Register temp = x11;
1417 1406
1418 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1407 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1419 if (s->num_heap_slots() > 0) { 1408 if (s->num_heap_slots() > 0) {
1420 if (s->calls_non_strict_eval()) { 1409 if (s->calls_non_strict_eval()) {
1421 // Check that extension is NULL. 1410 // Check that extension is NULL.
1422 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1411 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1423 __ tst(temp, temp); 1412 __ Cbnz(temp, slow);
1424 __ b(ne, slow);
1425 } 1413 }
1426 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX)); 1414 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1427 // Walk the rest of the chain without clobbering cp. 1415 // Walk the rest of the chain without clobbering cp.
1428 context = next; 1416 context = next;
1429 } 1417 }
1430 } 1418 }
1431 // Check that last extension is NULL. 1419 // Check that last extension is NULL.
1432 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1420 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1433 __ tst(temp, temp); 1421 __ Cbnz(temp, slow);
1434 __ b(ne, slow);
1435 1422
1436 // This function is used only for loads, not stores, so it's safe to 1423 // This function is used only for loads, not stores, so it's safe to
1437 // return an cp-based operand (the write barrier cannot be allowed to 1424 // return an cp-based operand (the write barrier cannot be allowed to
1438 // destroy the cp register). 1425 // destroy the cp register).
1439 return ContextOperand(context, var->index()); 1426 return ContextMemOperand(context, var->index());
1440 } 1427 }
1441 1428
1442 1429
1443 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var, 1430 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1444 TypeofState typeof_state, 1431 TypeofState typeof_state,
1445 Label* slow, 1432 Label* slow,
1446 Label* done) { 1433 Label* done) {
1447 // Generate fast-case code for variables that might be shadowed by 1434 // Generate fast-case code for variables that might be shadowed by
1448 // eval-introduced variables. Eval is used a lot without 1435 // eval-introduced variables. Eval is used a lot without
1449 // introducing variables. In those cases, we do not want to 1436 // introducing variables. In those cases, we do not want to
1450 // perform a runtime call for all variables in the scope 1437 // perform a runtime call for all variables in the scope
1451 // containing the eval. 1438 // containing the eval.
1452 if (var->mode() == DYNAMIC_GLOBAL) { 1439 if (var->mode() == DYNAMIC_GLOBAL) {
1453 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); 1440 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1454 __ jmp(done); 1441 __ B(done);
1455 } else if (var->mode() == DYNAMIC_LOCAL) { 1442 } else if (var->mode() == DYNAMIC_LOCAL) {
1456 Variable* local = var->local_if_not_shadowed(); 1443 Variable* local = var->local_if_not_shadowed();
1457 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow)); 1444 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1458 if (local->mode() == LET || 1445 if (local->mode() == LET ||
1459 local->mode() == CONST || 1446 local->mode() == CONST ||
1460 local->mode() == CONST_HARMONY) { 1447 local->mode() == CONST_HARMONY) {
1461 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); 1448 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1462 if (local->mode() == CONST) { 1449 if (local->mode() == CONST) {
1463 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); 1450 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1464 } else { // LET || CONST_HARMONY 1451 } else { // LET || CONST_HARMONY
1465 __ b(ne, done); 1452 __ Mov(x0, Operand(var->name()));
1466 __ mov(r0, Operand(var->name())); 1453 __ Push(x0);
1467 __ push(r0);
1468 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1454 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1469 } 1455 }
1470 } 1456 }
1471 __ jmp(done); 1457 __ B(done);
1472 } 1458 }
1473 } 1459 }
1474 1460
1475 1461
1476 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { 1462 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1477 // Record position before possible IC call. 1463 // Record position before possible IC call.
1478 SetSourcePosition(proxy->position()); 1464 SetSourcePosition(proxy->position());
1479 Variable* var = proxy->var(); 1465 Variable* var = proxy->var();
1480 1466
1481 // Three cases: global variables, lookup variables, and all other types of 1467 // Three cases: global variables, lookup variables, and all other types of
1482 // variables. 1468 // variables.
1483 switch (var->location()) { 1469 switch (var->location()) {
1484 case Variable::UNALLOCATED: { 1470 case Variable::UNALLOCATED: {
1485 Comment cmnt(masm_, "Global variable"); 1471 Comment cmnt(masm_, "Global variable");
1486 // Use inline caching. Variable name is passed in r2 and the global 1472 // Use inline caching. Variable name is passed in x2 and the global
1487 // object (receiver) in r0. 1473 // object (receiver) in x0.
1488 __ ldr(r0, GlobalObjectOperand()); 1474 __ Ldr(x0, GlobalObjectMemOperand());
1489 __ mov(r2, Operand(var->name())); 1475 __ Mov(x2, Operand(var->name()));
1490 CallLoadIC(CONTEXTUAL); 1476 CallLoadIC(CONTEXTUAL);
1491 context()->Plug(r0); 1477 context()->Plug(x0);
1492 break; 1478 break;
1493 } 1479 }
1494 1480
1495 case Variable::PARAMETER: 1481 case Variable::PARAMETER:
1496 case Variable::LOCAL: 1482 case Variable::LOCAL:
1497 case Variable::CONTEXT: { 1483 case Variable::CONTEXT: {
1498 Comment cmnt(masm_, var->IsContextSlot() 1484 Comment cmnt(masm_, var->IsContextSlot()
1499 ? "Context variable" 1485 ? "Context variable"
1500 : "Stack variable"); 1486 : "Stack variable");
1501 if (var->binding_needs_init()) { 1487 if (var->binding_needs_init()) {
(...skipping 26 matching lines...) Expand all
1528 } else { 1514 } else {
1529 // Check that we always have valid source position. 1515 // Check that we always have valid source position.
1530 ASSERT(var->initializer_position() != RelocInfo::kNoPosition); 1516 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1531 ASSERT(proxy->position() != RelocInfo::kNoPosition); 1517 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1532 skip_init_check = var->mode() != CONST && 1518 skip_init_check = var->mode() != CONST &&
1533 var->initializer_position() < proxy->position(); 1519 var->initializer_position() < proxy->position();
1534 } 1520 }
1535 1521
1536 if (!skip_init_check) { 1522 if (!skip_init_check) {
1537 // Let and const need a read barrier. 1523 // Let and const need a read barrier.
1538 GetVar(r0, var); 1524 GetVar(x0, var);
1539 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); 1525 Label done;
1526 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1540 if (var->mode() == LET || var->mode() == CONST_HARMONY) { 1527 if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1541 // Throw a reference error when using an uninitialized let/const 1528 // Throw a reference error when using an uninitialized let/const
1542 // binding in harmony mode. 1529 // binding in harmony mode.
1543 Label done; 1530 __ Mov(x0, Operand(var->name()));
1544 __ b(ne, &done); 1531 __ Push(x0);
1545 __ mov(r0, Operand(var->name()));
1546 __ push(r0);
1547 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1532 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1548 __ bind(&done); 1533 __ Bind(&done);
1549 } else { 1534 } else {
1550 // Uninitalized const bindings outside of harmony mode are unholed. 1535 // Uninitalized const bindings outside of harmony mode are unholed.
1551 ASSERT(var->mode() == CONST); 1536 ASSERT(var->mode() == CONST);
1552 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); 1537 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1538 __ Bind(&done);
1553 } 1539 }
1554 context()->Plug(r0); 1540 context()->Plug(x0);
1555 break; 1541 break;
1556 } 1542 }
1557 } 1543 }
1558 context()->Plug(var); 1544 context()->Plug(var);
1559 break; 1545 break;
1560 } 1546 }
1561 1547
1562 case Variable::LOOKUP: { 1548 case Variable::LOOKUP: {
1563 Label done, slow; 1549 Label done, slow;
1564 // Generate code for loading from variables potentially shadowed 1550 // Generate code for loading from variables potentially shadowed by
1565 // by eval-introduced variables. 1551 // eval-introduced variables.
1566 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done); 1552 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1567 __ bind(&slow); 1553 __ Bind(&slow);
1568 Comment cmnt(masm_, "Lookup variable"); 1554 Comment cmnt(masm_, "Lookup variable");
1569 __ mov(r1, Operand(var->name())); 1555 __ Mov(x1, Operand(var->name()));
1570 __ Push(cp, r1); // Context and name. 1556 __ Push(cp, x1); // Context and name.
1571 __ CallRuntime(Runtime::kLoadContextSlot, 2); 1557 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1572 __ bind(&done); 1558 __ Bind(&done);
1573 context()->Plug(r0); 1559 context()->Plug(x0);
1560 break;
1574 } 1561 }
1575 } 1562 }
1576 } 1563 }
1577 1564
1578 1565
1579 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1566 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1580 Comment cmnt(masm_, "[ RegExpLiteral"); 1567 Comment cmnt(masm_, "[ RegExpLiteral");
1581 Label materialized; 1568 Label materialized;
1582 // Registers will be used as follows: 1569 // Registers will be used as follows:
1583 // r5 = materialized value (RegExp literal) 1570 // x5 = materialized value (RegExp literal)
1584 // r4 = JS function, literals array 1571 // x4 = JS function, literals array
1585 // r3 = literal index 1572 // x3 = literal index
1586 // r2 = RegExp pattern 1573 // x2 = RegExp pattern
1587 // r1 = RegExp flags 1574 // x1 = RegExp flags
1588 // r0 = RegExp literal clone 1575 // x0 = RegExp literal clone
1589 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1576 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1590 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset)); 1577 __ Ldr(x4, FieldMemOperand(x10, JSFunction::kLiteralsOffset));
1591 int literal_offset = 1578 int literal_offset =
1592 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; 1579 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1593 __ ldr(r5, FieldMemOperand(r4, literal_offset)); 1580 __ Ldr(x5, FieldMemOperand(x4, literal_offset));
1594 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1581 __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized);
1595 __ cmp(r5, ip);
1596 __ b(ne, &materialized);
1597 1582
1598 // Create regexp literal using runtime function. 1583 // Create regexp literal using runtime function.
1599 // Result will be in r0. 1584 // Result will be in x0.
1600 __ mov(r3, Operand(Smi::FromInt(expr->literal_index()))); 1585 __ Mov(x3, Operand(Smi::FromInt(expr->literal_index())));
1601 __ mov(r2, Operand(expr->pattern())); 1586 __ Mov(x2, Operand(expr->pattern()));
1602 __ mov(r1, Operand(expr->flags())); 1587 __ Mov(x1, Operand(expr->flags()));
1603 __ Push(r4, r3, r2, r1); 1588 __ Push(x4, x3, x2, x1);
1604 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); 1589 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1605 __ mov(r5, r0); 1590 __ Mov(x5, x0);
1606 1591
1607 __ bind(&materialized); 1592 __ Bind(&materialized);
1608 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 1593 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1609 Label allocated, runtime_allocate; 1594 Label allocated, runtime_allocate;
1610 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT); 1595 __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT);
1611 __ jmp(&allocated); 1596 __ B(&allocated);
1612 1597
1613 __ bind(&runtime_allocate); 1598 __ Bind(&runtime_allocate);
1614 __ mov(r0, Operand(Smi::FromInt(size))); 1599 __ Mov(x10, Operand(Smi::FromInt(size)));
1615 __ Push(r5, r0); 1600 __ Push(x5, x10);
1616 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 1601 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1617 __ pop(r5); 1602 __ Pop(x5);
1618 1603
1619 __ bind(&allocated); 1604 __ Bind(&allocated);
1620 // After this, registers are used as follows: 1605 // After this, registers are used as follows:
1621 // r0: Newly allocated regexp. 1606 // x0: Newly allocated regexp.
1622 // r5: Materialized regexp. 1607 // x5: Materialized regexp.
1623 // r2: temp. 1608 // x10, x11, x12: temps.
1624 __ CopyFields(r0, r5, d0, size / kPointerSize); 1609 __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize);
1625 context()->Plug(r0); 1610 context()->Plug(x0);
1626 } 1611 }
1627 1612
1628 1613
1629 void FullCodeGenerator::EmitAccessor(Expression* expression) { 1614 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1630 if (expression == NULL) { 1615 if (expression == NULL) {
1631 __ LoadRoot(r1, Heap::kNullValueRootIndex); 1616 __ LoadRoot(x10, Heap::kNullValueRootIndex);
1632 __ push(r1); 1617 __ Push(x10);
1633 } else { 1618 } else {
1634 VisitForStackValue(expression); 1619 VisitForStackValue(expression);
1635 } 1620 }
1636 } 1621 }
1637 1622
1638 1623
1639 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { 1624 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1640 Comment cmnt(masm_, "[ ObjectLiteral"); 1625 Comment cmnt(masm_, "[ ObjectLiteral");
1641 1626
1642 expr->BuildConstantProperties(isolate()); 1627 expr->BuildConstantProperties(isolate());
1643 Handle<FixedArray> constant_properties = expr->constant_properties(); 1628 Handle<FixedArray> constant_properties = expr->constant_properties();
1644 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1629 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1645 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); 1630 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1646 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); 1631 __ Mov(x2, Operand(Smi::FromInt(expr->literal_index())));
1647 __ mov(r1, Operand(constant_properties)); 1632 __ Mov(x1, Operand(constant_properties));
1648 int flags = expr->fast_elements() 1633 int flags = expr->fast_elements()
1649 ? ObjectLiteral::kFastElements 1634 ? ObjectLiteral::kFastElements
1650 : ObjectLiteral::kNoFlags; 1635 : ObjectLiteral::kNoFlags;
1651 flags |= expr->has_function() 1636 flags |= expr->has_function()
1652 ? ObjectLiteral::kHasFunction 1637 ? ObjectLiteral::kHasFunction
1653 : ObjectLiteral::kNoFlags; 1638 : ObjectLiteral::kNoFlags;
1654 __ mov(r0, Operand(Smi::FromInt(flags))); 1639 __ Mov(x0, Operand(Smi::FromInt(flags)));
1655 int properties_count = constant_properties->length() / 2; 1640 int properties_count = constant_properties->length() / 2;
1641 const int max_cloned_properties =
1642 FastCloneShallowObjectStub::kMaximumClonedProperties;
1656 if ((FLAG_track_double_fields && expr->may_store_doubles()) || 1643 if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
1657 expr->depth() > 1 || Serializer::enabled() || 1644 (expr->depth() > 1) || Serializer::enabled() ||
1658 flags != ObjectLiteral::kFastElements || 1645 (flags != ObjectLiteral::kFastElements) ||
1659 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { 1646 (properties_count > max_cloned_properties)) {
1660 __ Push(r3, r2, r1, r0); 1647 __ Push(x3, x2, x1, x0);
1661 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); 1648 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1662 } else { 1649 } else {
1663 FastCloneShallowObjectStub stub(properties_count); 1650 FastCloneShallowObjectStub stub(properties_count);
1664 __ CallStub(&stub); 1651 __ CallStub(&stub);
1665 } 1652 }
1666 1653
1667 // If result_saved is true the result is on top of the stack. If 1654 // If result_saved is true the result is on top of the stack. If
1668 // result_saved is false the result is in r0. 1655 // result_saved is false the result is in x0.
1669 bool result_saved = false; 1656 bool result_saved = false;
1670 1657
1671 // Mark all computed expressions that are bound to a key that 1658 // Mark all computed expressions that are bound to a key that
1672 // is shadowed by a later occurrence of the same key. For the 1659 // is shadowed by a later occurrence of the same key. For the
1673 // marked expressions, no store code is emitted. 1660 // marked expressions, no store code is emitted.
1674 expr->CalculateEmitStore(zone()); 1661 expr->CalculateEmitStore(zone());
1675 1662
1676 AccessorTable accessor_table(zone()); 1663 AccessorTable accessor_table(zone());
1677 for (int i = 0; i < expr->properties()->length(); i++) { 1664 for (int i = 0; i < expr->properties()->length(); i++) {
1678 ObjectLiteral::Property* property = expr->properties()->at(i); 1665 ObjectLiteral::Property* property = expr->properties()->at(i);
1679 if (property->IsCompileTimeValue()) continue; 1666 if (property->IsCompileTimeValue()) continue;
1680 1667
1681 Literal* key = property->key(); 1668 Literal* key = property->key();
1682 Expression* value = property->value(); 1669 Expression* value = property->value();
1683 if (!result_saved) { 1670 if (!result_saved) {
1684 __ push(r0); // Save result on stack 1671 __ Push(x0); // Save result on stack
1685 result_saved = true; 1672 result_saved = true;
1686 } 1673 }
1687 switch (property->kind()) { 1674 switch (property->kind()) {
1688 case ObjectLiteral::Property::CONSTANT: 1675 case ObjectLiteral::Property::CONSTANT:
1689 UNREACHABLE(); 1676 UNREACHABLE();
1690 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1677 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1691 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value())); 1678 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1692 // Fall through. 1679 // Fall through.
1693 case ObjectLiteral::Property::COMPUTED: 1680 case ObjectLiteral::Property::COMPUTED:
1694 if (key->value()->IsInternalizedString()) { 1681 if (key->value()->IsInternalizedString()) {
1695 if (property->emit_store()) { 1682 if (property->emit_store()) {
1696 VisitForAccumulatorValue(value); 1683 VisitForAccumulatorValue(value);
1697 __ mov(r2, Operand(key->value())); 1684 __ Mov(x2, Operand(key->value()));
1698 __ ldr(r1, MemOperand(sp)); 1685 __ Peek(x1, 0);
1699 CallStoreIC(key->LiteralFeedbackId()); 1686 CallStoreIC(key->LiteralFeedbackId());
1700 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1687 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1701 } else { 1688 } else {
1702 VisitForEffect(value); 1689 VisitForEffect(value);
1703 } 1690 }
1704 break; 1691 break;
1705 } 1692 }
1706 // Duplicate receiver on stack. 1693 // Duplicate receiver on stack.
1707 __ ldr(r0, MemOperand(sp)); 1694 __ Peek(x0, 0);
1708 __ push(r0); 1695 __ Push(x0);
1709 VisitForStackValue(key); 1696 VisitForStackValue(key);
1710 VisitForStackValue(value); 1697 VisitForStackValue(value);
1711 if (property->emit_store()) { 1698 if (property->emit_store()) {
1712 __ mov(r0, Operand(Smi::FromInt(NONE))); // PropertyAttributes 1699 __ Mov(x0, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1713 __ push(r0); 1700 __ Push(x0);
1714 __ CallRuntime(Runtime::kSetProperty, 4); 1701 __ CallRuntime(Runtime::kSetProperty, 4);
1715 } else { 1702 } else {
1716 __ Drop(3); 1703 __ Drop(3);
1717 } 1704 }
1718 break; 1705 break;
1719 case ObjectLiteral::Property::PROTOTYPE: 1706 case ObjectLiteral::Property::PROTOTYPE:
1720 // Duplicate receiver on stack. 1707 // Duplicate receiver on stack.
1721 __ ldr(r0, MemOperand(sp)); 1708 __ Peek(x0, 0);
1722 __ push(r0); 1709 // TODO(jbramley): This push shouldn't be necessary if we don't call the
1710 // runtime below. In that case, skip it.
1711 __ Push(x0);
1723 VisitForStackValue(value); 1712 VisitForStackValue(value);
1724 if (property->emit_store()) { 1713 if (property->emit_store()) {
1725 __ CallRuntime(Runtime::kSetPrototype, 2); 1714 __ CallRuntime(Runtime::kSetPrototype, 2);
1726 } else { 1715 } else {
1727 __ Drop(2); 1716 __ Drop(2);
1728 } 1717 }
1729 break; 1718 break;
1730
1731 case ObjectLiteral::Property::GETTER: 1719 case ObjectLiteral::Property::GETTER:
1732 accessor_table.lookup(key)->second->getter = value; 1720 accessor_table.lookup(key)->second->getter = value;
1733 break; 1721 break;
1734 case ObjectLiteral::Property::SETTER: 1722 case ObjectLiteral::Property::SETTER:
1735 accessor_table.lookup(key)->second->setter = value; 1723 accessor_table.lookup(key)->second->setter = value;
1736 break; 1724 break;
1737 } 1725 }
1738 } 1726 }
1739 1727
1740 // Emit code to define accessors, using only a single call to the runtime for 1728 // Emit code to define accessors, using only a single call to the runtime for
1741 // each pair of corresponding getters and setters. 1729 // each pair of corresponding getters and setters.
1742 for (AccessorTable::Iterator it = accessor_table.begin(); 1730 for (AccessorTable::Iterator it = accessor_table.begin();
1743 it != accessor_table.end(); 1731 it != accessor_table.end();
1744 ++it) { 1732 ++it) {
1745 __ ldr(r0, MemOperand(sp)); // Duplicate receiver. 1733 __ Peek(x10, 0); // Duplicate receiver.
1746 __ push(r0); 1734 __ Push(x10);
1747 VisitForStackValue(it->first); 1735 VisitForStackValue(it->first);
1748 EmitAccessor(it->second->getter); 1736 EmitAccessor(it->second->getter);
1749 EmitAccessor(it->second->setter); 1737 EmitAccessor(it->second->setter);
1750 __ mov(r0, Operand(Smi::FromInt(NONE))); 1738 __ Mov(x10, Operand(Smi::FromInt(NONE)));
1751 __ push(r0); 1739 __ Push(x10);
1752 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5); 1740 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1753 } 1741 }
1754 1742
1755 if (expr->has_function()) { 1743 if (expr->has_function()) {
1756 ASSERT(result_saved); 1744 ASSERT(result_saved);
1757 __ ldr(r0, MemOperand(sp)); 1745 __ Peek(x0, 0);
1758 __ push(r0); 1746 __ Push(x0);
1759 __ CallRuntime(Runtime::kToFastProperties, 1); 1747 __ CallRuntime(Runtime::kToFastProperties, 1);
1760 } 1748 }
1761 1749
1762 if (result_saved) { 1750 if (result_saved) {
1763 context()->PlugTOS(); 1751 context()->PlugTOS();
1764 } else { 1752 } else {
1765 context()->Plug(r0); 1753 context()->Plug(x0);
1766 } 1754 }
1767 } 1755 }
1768 1756
1769 1757
1770 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1758 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1771 Comment cmnt(masm_, "[ ArrayLiteral"); 1759 Comment cmnt(masm_, "[ ArrayLiteral");
1772 1760
1773 expr->BuildConstantElements(isolate()); 1761 expr->BuildConstantElements(isolate());
1774 int flags = expr->depth() == 1 1762 int flags = (expr->depth() == 1) ? ArrayLiteral::kShallowElements
1775 ? ArrayLiteral::kShallowElements 1763 : ArrayLiteral::kNoFlags;
1776 : ArrayLiteral::kNoFlags;
1777 1764
1778 ZoneList<Expression*>* subexprs = expr->values(); 1765 ZoneList<Expression*>* subexprs = expr->values();
1779 int length = subexprs->length(); 1766 int length = subexprs->length();
1780 Handle<FixedArray> constant_elements = expr->constant_elements(); 1767 Handle<FixedArray> constant_elements = expr->constant_elements();
1781 ASSERT_EQ(2, constant_elements->length()); 1768 ASSERT_EQ(2, constant_elements->length());
1782 ElementsKind constant_elements_kind = 1769 ElementsKind constant_elements_kind =
1783 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); 1770 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1784 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind); 1771 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1785 Handle<FixedArrayBase> constant_elements_values( 1772 Handle<FixedArrayBase> constant_elements_values(
1786 FixedArrayBase::cast(constant_elements->get(1))); 1773 FixedArrayBase::cast(constant_elements->get(1)));
1787 1774
1788 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; 1775 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1789 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { 1776 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1790 // If the only customer of allocation sites is transitioning, then 1777 // If the only customer of allocation sites is transitioning, then
1791 // we can turn it off if we don't have anywhere else to transition to. 1778 // we can turn it off if we don't have anywhere else to transition to.
1792 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; 1779 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1793 } 1780 }
1794 1781
1795 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1782 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1796 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); 1783 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1797 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); 1784 // TODO(jbramley): Can these Operand constructors be implicit?
1798 __ mov(r1, Operand(constant_elements)); 1785 __ Mov(x2, Operand(Smi::FromInt(expr->literal_index())));
1786 __ Mov(x1, Operand(constant_elements));
1799 if (has_fast_elements && constant_elements_values->map() == 1787 if (has_fast_elements && constant_elements_values->map() ==
1800 isolate()->heap()->fixed_cow_array_map()) { 1788 isolate()->heap()->fixed_cow_array_map()) {
1801 FastCloneShallowArrayStub stub( 1789 FastCloneShallowArrayStub stub(
1802 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, 1790 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1803 allocation_site_mode, 1791 allocation_site_mode,
1804 length); 1792 length);
1805 __ CallStub(&stub); 1793 __ CallStub(&stub);
1806 __ IncrementCounter( 1794 __ IncrementCounter(
1807 isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2); 1795 isolate()->counters()->cow_arrays_created_stub(), 1, x10, x11);
1808 } else if (expr->depth() > 1 || Serializer::enabled() || 1796 } else if ((expr->depth() > 1) || Serializer::enabled() ||
1809 length > FastCloneShallowArrayStub::kMaximumClonedLength) { 1797 length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1810 __ mov(r0, Operand(Smi::FromInt(flags))); 1798 __ Mov(x0, Operand(Smi::FromInt(flags)));
1811 __ Push(r3, r2, r1, r0); 1799 __ Push(x3, x2, x1, x0);
1812 __ CallRuntime(Runtime::kCreateArrayLiteral, 4); 1800 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1813 } else { 1801 } else {
1814 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || 1802 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1815 FLAG_smi_only_arrays); 1803 FLAG_smi_only_arrays);
1816 FastCloneShallowArrayStub::Mode mode = 1804 FastCloneShallowArrayStub::Mode mode =
1817 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; 1805 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1818 1806
1819 if (has_fast_elements) { 1807 if (has_fast_elements) {
1820 mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; 1808 mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
1821 } 1809 }
1822 1810
1823 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); 1811 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1824 __ CallStub(&stub); 1812 __ CallStub(&stub);
1825 } 1813 }
1826 1814
1827 bool result_saved = false; // Is the result saved to the stack? 1815 bool result_saved = false; // Is the result saved to the stack?
1828 1816
1829 // Emit code to evaluate all the non-constant subexpressions and to store 1817 // Emit code to evaluate all the non-constant subexpressions and to store
1830 // them into the newly cloned array. 1818 // them into the newly cloned array.
1831 for (int i = 0; i < length; i++) { 1819 for (int i = 0; i < length; i++) {
1832 Expression* subexpr = subexprs->at(i); 1820 Expression* subexpr = subexprs->at(i);
1833 // If the subexpression is a literal or a simple materialized literal it 1821 // If the subexpression is a literal or a simple materialized literal it
1834 // is already set in the cloned array. 1822 // is already set in the cloned array.
1835 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; 1823 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1836 1824
1837 if (!result_saved) { 1825 if (!result_saved) {
1838 __ push(r0); 1826 __ Push(x0);
1839 __ Push(Smi::FromInt(expr->literal_index())); 1827 __ Push(Smi::FromInt(expr->literal_index()));
1840 result_saved = true; 1828 result_saved = true;
1841 } 1829 }
1842 VisitForAccumulatorValue(subexpr); 1830 VisitForAccumulatorValue(subexpr);
1843 1831
1844 if (IsFastObjectElementsKind(constant_elements_kind)) { 1832 if (IsFastObjectElementsKind(constant_elements_kind)) {
1845 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1833 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1846 __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal. 1834 __ Peek(x6, kPointerSize); // Copy of array literal.
1847 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset)); 1835 __ Ldr(x1, FieldMemOperand(x6, JSObject::kElementsOffset));
1848 __ str(result_register(), FieldMemOperand(r1, offset)); 1836 __ Str(result_register(), FieldMemOperand(x1, offset));
1849 // Update the write barrier for the array store. 1837 // Update the write barrier for the array store.
1850 __ RecordWriteField(r1, offset, result_register(), r2, 1838 __ RecordWriteField(x1, offset, result_register(), x10,
1851 kLRHasBeenSaved, kDontSaveFPRegs, 1839 kLRHasBeenSaved, kDontSaveFPRegs,
1852 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK); 1840 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1853 } else { 1841 } else {
1854 __ mov(r3, Operand(Smi::FromInt(i))); 1842 __ Mov(x3, Operand(Smi::FromInt(i)));
1855 StoreArrayLiteralElementStub stub; 1843 StoreArrayLiteralElementStub stub;
1856 __ CallStub(&stub); 1844 __ CallStub(&stub);
1857 } 1845 }
1858 1846
1859 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); 1847 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1860 } 1848 }
1861 1849
1862 if (result_saved) { 1850 if (result_saved) {
1863 __ pop(); // literal index 1851 __ Drop(1); // literal index
1864 context()->PlugTOS(); 1852 context()->PlugTOS();
1865 } else { 1853 } else {
1866 context()->Plug(r0); 1854 context()->Plug(x0);
1867 } 1855 }
1868 } 1856 }
1869 1857
1870 1858
1871 void FullCodeGenerator::VisitAssignment(Assignment* expr) { 1859 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1872 Comment cmnt(masm_, "[ Assignment"); 1860 Comment cmnt(masm_, "[ Assignment");
1873 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError' 1861 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1874 // on the left-hand side. 1862 // on the left-hand side.
1875 if (!expr->target()->IsValidLeftHandSide()) { 1863 if (!expr->target()->IsValidLeftHandSide()) {
1876 VisitForEffect(expr->target()); 1864 VisitForEffect(expr->target());
(...skipping 13 matching lines...) Expand all
1890 1878
1891 // Evaluate LHS expression. 1879 // Evaluate LHS expression.
1892 switch (assign_type) { 1880 switch (assign_type) {
1893 case VARIABLE: 1881 case VARIABLE:
1894 // Nothing to do here. 1882 // Nothing to do here.
1895 break; 1883 break;
1896 case NAMED_PROPERTY: 1884 case NAMED_PROPERTY:
1897 if (expr->is_compound()) { 1885 if (expr->is_compound()) {
1898 // We need the receiver both on the stack and in the accumulator. 1886 // We need the receiver both on the stack and in the accumulator.
1899 VisitForAccumulatorValue(property->obj()); 1887 VisitForAccumulatorValue(property->obj());
1900 __ push(result_register()); 1888 __ Push(result_register());
1901 } else { 1889 } else {
1902 VisitForStackValue(property->obj()); 1890 VisitForStackValue(property->obj());
1903 } 1891 }
1904 break; 1892 break;
1905 case KEYED_PROPERTY: 1893 case KEYED_PROPERTY:
1906 if (expr->is_compound()) { 1894 if (expr->is_compound()) {
1907 VisitForStackValue(property->obj()); 1895 VisitForStackValue(property->obj());
1908 VisitForAccumulatorValue(property->key()); 1896 VisitForAccumulatorValue(property->key());
1909 __ ldr(r1, MemOperand(sp, 0)); 1897 __ Peek(x1, 0);
1910 __ push(r0); 1898 __ Push(x0);
1911 } else { 1899 } else {
1912 VisitForStackValue(property->obj()); 1900 VisitForStackValue(property->obj());
1913 VisitForStackValue(property->key()); 1901 VisitForStackValue(property->key());
1914 } 1902 }
1915 break; 1903 break;
1916 } 1904 }
1917 1905
1918 // For compound assignments we need another deoptimization point after the 1906 // For compound assignments we need another deoptimization point after the
1919 // variable/property load. 1907 // variable/property load.
1920 if (expr->is_compound()) { 1908 if (expr->is_compound()) {
1921 { AccumulatorValueContext context(this); 1909 { AccumulatorValueContext context(this);
1922 switch (assign_type) { 1910 switch (assign_type) {
1923 case VARIABLE: 1911 case VARIABLE:
1924 EmitVariableLoad(expr->target()->AsVariableProxy()); 1912 EmitVariableLoad(expr->target()->AsVariableProxy());
1925 PrepareForBailout(expr->target(), TOS_REG); 1913 PrepareForBailout(expr->target(), TOS_REG);
1926 break; 1914 break;
1927 case NAMED_PROPERTY: 1915 case NAMED_PROPERTY:
1928 EmitNamedPropertyLoad(property); 1916 EmitNamedPropertyLoad(property);
1929 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1917 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1930 break; 1918 break;
1931 case KEYED_PROPERTY: 1919 case KEYED_PROPERTY:
1932 EmitKeyedPropertyLoad(property); 1920 EmitKeyedPropertyLoad(property);
1933 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1921 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1934 break; 1922 break;
1935 } 1923 }
1936 } 1924 }
1937 1925
1938 Token::Value op = expr->binary_op(); 1926 Token::Value op = expr->binary_op();
1939 __ push(r0); // Left operand goes on the stack. 1927 __ Push(x0); // Left operand goes on the stack.
1940 VisitForAccumulatorValue(expr->value()); 1928 VisitForAccumulatorValue(expr->value());
1941 1929
1942 OverwriteMode mode = expr->value()->ResultOverwriteAllowed() 1930 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1943 ? OVERWRITE_RIGHT 1931 ? OVERWRITE_RIGHT
1944 : NO_OVERWRITE; 1932 : NO_OVERWRITE;
1945 SetSourcePosition(expr->position() + 1); 1933 SetSourcePosition(expr->position() + 1);
1946 AccumulatorValueContext context(this); 1934 AccumulatorValueContext context(this);
1947 if (ShouldInlineSmiCase(op)) { 1935 if (ShouldInlineSmiCase(op)) {
1948 EmitInlineSmiBinaryOp(expr->binary_operation(), 1936 EmitInlineSmiBinaryOp(expr->binary_operation(),
1949 op, 1937 op,
(...skipping 12 matching lines...) Expand all
1962 1950
1963 // Record source position before possible IC call. 1951 // Record source position before possible IC call.
1964 SetSourcePosition(expr->position()); 1952 SetSourcePosition(expr->position());
1965 1953
1966 // Store the value. 1954 // Store the value.
1967 switch (assign_type) { 1955 switch (assign_type) {
1968 case VARIABLE: 1956 case VARIABLE:
1969 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), 1957 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1970 expr->op()); 1958 expr->op());
1971 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 1959 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1972 context()->Plug(r0); 1960 context()->Plug(x0);
1973 break; 1961 break;
1974 case NAMED_PROPERTY: 1962 case NAMED_PROPERTY:
1975 EmitNamedPropertyAssignment(expr); 1963 EmitNamedPropertyAssignment(expr);
1976 break; 1964 break;
1977 case KEYED_PROPERTY: 1965 case KEYED_PROPERTY:
1978 EmitKeyedPropertyAssignment(expr); 1966 EmitKeyedPropertyAssignment(expr);
1979 break; 1967 break;
1980 } 1968 }
1981 } 1969 }
1982 1970
1983 1971
1984 void FullCodeGenerator::VisitYield(Yield* expr) {
1985 Comment cmnt(masm_, "[ Yield");
1986 // Evaluate yielded value first; the initial iterator definition depends on
1987 // this. It stays on the stack while we update the iterator.
1988 VisitForStackValue(expr->expression());
1989
1990 switch (expr->yield_kind()) {
1991 case Yield::SUSPEND:
1992 // Pop value from top-of-stack slot; box result into result register.
1993 EmitCreateIteratorResult(false);
1994 __ push(result_register());
1995 // Fall through.
1996 case Yield::INITIAL: {
1997 Label suspend, continuation, post_runtime, resume;
1998
1999 __ jmp(&suspend);
2000
2001 __ bind(&continuation);
2002 __ jmp(&resume);
2003
2004 __ bind(&suspend);
2005 VisitForAccumulatorValue(expr->generator_object());
2006 ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2007 __ mov(r1, Operand(Smi::FromInt(continuation.pos())));
2008 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2009 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2010 __ mov(r1, cp);
2011 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2012 kLRHasBeenSaved, kDontSaveFPRegs);
2013 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2014 __ cmp(sp, r1);
2015 __ b(eq, &post_runtime);
2016 __ push(r0); // generator object
2017 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2018 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2019 __ bind(&post_runtime);
2020 __ pop(result_register());
2021 EmitReturnSequence();
2022
2023 __ bind(&resume);
2024 context()->Plug(result_register());
2025 break;
2026 }
2027
2028 case Yield::FINAL: {
2029 VisitForAccumulatorValue(expr->generator_object());
2030 __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2031 __ str(r1, FieldMemOperand(result_register(),
2032 JSGeneratorObject::kContinuationOffset));
2033 // Pop value from top-of-stack slot, box result into result register.
2034 EmitCreateIteratorResult(true);
2035 EmitUnwindBeforeReturn();
2036 EmitReturnSequence();
2037 break;
2038 }
2039
2040 case Yield::DELEGATING: {
2041 VisitForStackValue(expr->generator_object());
2042
2043 // Initial stack layout is as follows:
2044 // [sp + 1 * kPointerSize] iter
2045 // [sp + 0 * kPointerSize] g
2046
2047 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2048 Label l_next, l_call, l_loop;
2049 // Initial send value is undefined.
2050 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2051 __ b(&l_next);
2052
2053 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2054 __ bind(&l_catch);
2055 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2056 __ LoadRoot(r2, Heap::kthrow_stringRootIndex); // "throw"
2057 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2058 __ Push(r2, r3, r0); // "throw", iter, except
2059 __ jmp(&l_call);
2060
2061 // try { received = %yield result }
2062 // Shuffle the received result above a try handler and yield it without
2063 // re-boxing.
2064 __ bind(&l_try);
2065 __ pop(r0); // result
2066 __ PushTryHandler(StackHandler::CATCH, expr->index());
2067 const int handler_size = StackHandlerConstants::kSize;
2068 __ push(r0); // result
2069 __ jmp(&l_suspend);
2070 __ bind(&l_continuation);
2071 __ jmp(&l_resume);
2072 __ bind(&l_suspend);
2073 const int generator_object_depth = kPointerSize + handler_size;
2074 __ ldr(r0, MemOperand(sp, generator_object_depth));
2075 __ push(r0); // g
2076 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2077 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos())));
2078 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset));
2079 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset));
2080 __ mov(r1, cp);
2081 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2,
2082 kLRHasBeenSaved, kDontSaveFPRegs);
2083 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2084 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2085 __ pop(r0); // result
2086 EmitReturnSequence();
2087 __ bind(&l_resume); // received in r0
2088 __ PopTryHandler();
2089
2090 // receiver = iter; f = 'next'; arg = received;
2091 __ bind(&l_next);
2092 __ LoadRoot(r2, Heap::knext_stringRootIndex); // "next"
2093 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2094 __ Push(r2, r3, r0); // "next", iter, received
2095
2096 // result = receiver[f](arg);
2097 __ bind(&l_call);
2098 __ ldr(r1, MemOperand(sp, kPointerSize));
2099 __ ldr(r0, MemOperand(sp, 2 * kPointerSize));
2100 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2101 CallIC(ic, TypeFeedbackId::None());
2102 __ mov(r1, r0);
2103 __ str(r1, MemOperand(sp, 2 * kPointerSize));
2104 CallFunctionStub stub(1, CALL_AS_METHOD);
2105 __ CallStub(&stub);
2106
2107 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2108 __ Drop(1); // The function is still on the stack; drop it.
2109
2110 // if (!result.done) goto l_try;
2111 __ bind(&l_loop);
2112 __ push(r0); // save result
2113 __ LoadRoot(r2, Heap::kdone_stringRootIndex); // "done"
2114 CallLoadIC(NOT_CONTEXTUAL); // result.done in r0
2115 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2116 CallIC(bool_ic);
2117 __ cmp(r0, Operand(0));
2118 __ b(eq, &l_try);
2119
2120 // result.value
2121 __ pop(r0); // result
2122 __ LoadRoot(r2, Heap::kvalue_stringRootIndex); // "value"
2123 CallLoadIC(NOT_CONTEXTUAL); // result.value in r0
2124 context()->DropAndPlug(2, r0); // drop iter and g
2125 break;
2126 }
2127 }
2128 }
2129
2130
2131 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2132 Expression *value,
2133 JSGeneratorObject::ResumeMode resume_mode) {
2134 // The value stays in r0, and is ultimately read by the resumed generator, as
2135 // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2136 // is read to throw the value when the resumed generator is already closed.
2137 // r1 will hold the generator object until the activation has been resumed.
2138 VisitForStackValue(generator);
2139 VisitForAccumulatorValue(value);
2140 __ pop(r1);
2141
2142 // Check generator state.
2143 Label wrong_state, closed_state, done;
2144 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2145 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2146 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2147 __ cmp(r3, Operand(Smi::FromInt(0)));
2148 __ b(eq, &closed_state);
2149 __ b(lt, &wrong_state);
2150
2151 // Load suspended function and context.
2152 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
2153 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
2154
2155 // Load receiver and store as the first argument.
2156 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
2157 __ push(r2);
2158
2159 // Push holes for the rest of the arguments to the generator function.
2160 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2161 __ ldr(r3,
2162 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
2163 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
2164 Label push_argument_holes, push_frame;
2165 __ bind(&push_argument_holes);
2166 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
2167 __ b(mi, &push_frame);
2168 __ push(r2);
2169 __ jmp(&push_argument_holes);
2170
2171 // Enter a new JavaScript frame, and initialize its slots as they were when
2172 // the generator was suspended.
2173 Label resume_frame;
2174 __ bind(&push_frame);
2175 __ bl(&resume_frame);
2176 __ jmp(&done);
2177 __ bind(&resume_frame);
2178 // lr = return address.
2179 // fp = caller's frame pointer.
2180 // pp = caller's constant pool (if FLAG_enable_ool_constant_pool),
2181 // cp = callee's context,
2182 // r4 = callee's JS function.
2183 __ PushFixedFrame(r4);
2184 // Adjust FP to point to saved FP.
2185 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2186
2187 // Load the operand stack size.
2188 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
2189 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
2190 __ SmiUntag(r3);
2191
2192 // If we are sending a value and there is no operand stack, we can jump back
2193 // in directly.
2194 if (resume_mode == JSGeneratorObject::NEXT) {
2195 Label slow_resume;
2196 __ cmp(r3, Operand(0));
2197 __ b(ne, &slow_resume);
2198 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2199 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2200 __ SmiUntag(r2);
2201 __ add(r3, r3, r2);
2202 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2203 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2204 __ Jump(r3);
2205 __ bind(&slow_resume);
2206 }
2207
2208 // Otherwise, we push holes for the operand stack and call the runtime to fix
2209 // up the stack and the handlers.
2210 Label push_operand_holes, call_resume;
2211 __ bind(&push_operand_holes);
2212 __ sub(r3, r3, Operand(1), SetCC);
2213 __ b(mi, &call_resume);
2214 __ push(r2);
2215 __ b(&push_operand_holes);
2216 __ bind(&call_resume);
2217 ASSERT(!result_register().is(r1));
2218 __ Push(r1, result_register());
2219 __ Push(Smi::FromInt(resume_mode));
2220 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2221 // Not reached: the runtime call returns elsewhere.
2222 __ stop("not-reached");
2223
2224 // Reach here when generator is closed.
2225 __ bind(&closed_state);
2226 if (resume_mode == JSGeneratorObject::NEXT) {
2227 // Return completed iterator result when generator is closed.
2228 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2229 __ push(r2);
2230 // Pop value from top-of-stack slot; box result into result register.
2231 EmitCreateIteratorResult(true);
2232 } else {
2233 // Throw the provided value.
2234 __ push(r0);
2235 __ CallRuntime(Runtime::kThrow, 1);
2236 }
2237 __ jmp(&done);
2238
2239 // Throw error if we attempt to operate on a running generator.
2240 __ bind(&wrong_state);
2241 __ push(r1);
2242 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2243
2244 __ bind(&done);
2245 context()->Plug(result_register());
2246 }
2247
2248
2249 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2250 Label gc_required;
2251 Label allocated;
2252
2253 Handle<Map> map(isolate()->native_context()->generator_result_map());
2254
2255 __ Allocate(map->instance_size(), r0, r2, r3, &gc_required, TAG_OBJECT);
2256 __ jmp(&allocated);
2257
2258 __ bind(&gc_required);
2259 __ Push(Smi::FromInt(map->instance_size()));
2260 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2261 __ ldr(context_register(),
2262 MemOperand(fp, StandardFrameConstants::kContextOffset));
2263
2264 __ bind(&allocated);
2265 __ mov(r1, Operand(map));
2266 __ pop(r2);
2267 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
2268 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
2269 ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2270 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2271 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2272 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2273 __ str(r2,
2274 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset));
2275 __ str(r3,
2276 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset));
2277
2278 // Only the value field needs a write barrier, as the other values are in the
2279 // root set.
2280 __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset,
2281 r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
2282 }
2283
2284
2285 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { 1972 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2286 SetSourcePosition(prop->position()); 1973 SetSourcePosition(prop->position());
2287 Literal* key = prop->key()->AsLiteral(); 1974 Literal* key = prop->key()->AsLiteral();
2288 __ mov(r2, Operand(key->value())); 1975 __ Mov(x2, Operand(key->value()));
2289 // Call load IC. It has arguments receiver and property name r0 and r2. 1976 // Call load IC. It has arguments receiver and property name x0 and x2.
2290 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId()); 1977 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2291 } 1978 }
2292 1979
2293 1980
2294 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { 1981 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2295 SetSourcePosition(prop->position()); 1982 SetSourcePosition(prop->position());
2296 // Call keyed load IC. It has arguments key and receiver in r0 and r1. 1983 // Call keyed load IC. It has arguments key and receiver in r0 and r1.
2297 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 1984 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2298 CallIC(ic, prop->PropertyFeedbackId()); 1985 CallIC(ic, prop->PropertyFeedbackId());
2299 } 1986 }
2300 1987
2301 1988
2302 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 1989 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2303 Token::Value op, 1990 Token::Value op,
2304 OverwriteMode mode, 1991 OverwriteMode mode,
2305 Expression* left_expr, 1992 Expression* left_expr,
2306 Expression* right_expr) { 1993 Expression* right_expr) {
2307 Label done, smi_case, stub_call; 1994 Label done, both_smis, stub_call;
2308
2309 Register scratch1 = r2;
2310 Register scratch2 = r3;
2311 1995
2312 // Get the arguments. 1996 // Get the arguments.
2313 Register left = r1; 1997 Register left = x1;
2314 Register right = r0; 1998 Register right = x0;
2315 __ pop(left); 1999 Register result = x0;
2000 __ Pop(left);
2316 2001
2317 // Perform combined smi check on both operands. 2002 // Perform combined smi check on both operands.
2318 __ orr(scratch1, left, Operand(right)); 2003 __ Orr(x10, left, right);
2319 STATIC_ASSERT(kSmiTag == 0);
2320 JumpPatchSite patch_site(masm_); 2004 JumpPatchSite patch_site(masm_);
2321 patch_site.EmitJumpIfSmi(scratch1, &smi_case); 2005 patch_site.EmitJumpIfSmi(x10, &both_smis);
2322 2006
2323 __ bind(&stub_call); 2007 __ Bind(&stub_call);
2324 BinaryOpICStub stub(op, mode); 2008 BinaryOpICStub stub(op, mode);
2325 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); 2009 {
2326 patch_site.EmitPatchInfo(); 2010 Assembler::BlockConstPoolScope scope(masm_);
2327 __ jmp(&done); 2011 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2012 patch_site.EmitPatchInfo();
2013 }
2014 __ B(&done);
2328 2015
2329 __ bind(&smi_case); 2016 __ Bind(&both_smis);
2330 // Smi case. This code works the same way as the smi-smi case in the type 2017 // Smi case. This code works in the same way as the smi-smi case in the type
2331 // recording binary operation stub, see 2018 // recording binary operation stub, see
2019 // BinaryOpStub::GenerateSmiSmiOperation for comments.
2020 // TODO(all): That doesn't exist any more. Where are the comments?
2021 //
2022 // The set of operations that needs to be supported here is controlled by
2023 // FullCodeGenerator::ShouldInlineSmiCase().
2332 switch (op) { 2024 switch (op) {
2333 case Token::SAR: 2025 case Token::SAR:
2334 __ GetLeastBitsFromSmi(scratch1, right, 5); 2026 __ Ubfx(right, right, kSmiShift, 5);
2335 __ mov(right, Operand(left, ASR, scratch1)); 2027 __ Asr(result, left, right);
2336 __ bic(right, right, Operand(kSmiTagMask)); 2028 __ Bic(result, result, kSmiShiftMask);
2337 break; 2029 break;
2338 case Token::SHL: { 2030 case Token::SHL:
2339 __ SmiUntag(scratch1, left); 2031 __ Ubfx(right, right, kSmiShift, 5);
2340 __ GetLeastBitsFromSmi(scratch2, right, 5); 2032 __ Lsl(result, left, right);
2341 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2342 __ TrySmiTag(right, scratch1, &stub_call);
2343 break; 2033 break;
2344 }
2345 case Token::SHR: { 2034 case Token::SHR: {
2346 __ SmiUntag(scratch1, left); 2035 Label right_not_zero;
2347 __ GetLeastBitsFromSmi(scratch2, right, 5); 2036 __ Cbnz(right, &right_not_zero);
2348 __ mov(scratch1, Operand(scratch1, LSR, scratch2)); 2037 __ Tbnz(left, kXSignBit, &stub_call);
2349 __ tst(scratch1, Operand(0xc0000000)); 2038 __ Bind(&right_not_zero);
2350 __ b(ne, &stub_call); 2039 __ Ubfx(right, right, kSmiShift, 5);
2351 __ SmiTag(right, scratch1); 2040 __ Lsr(result, left, right);
2041 __ Bic(result, result, kSmiShiftMask);
2352 break; 2042 break;
2353 } 2043 }
2354 case Token::ADD: 2044 case Token::ADD:
2355 __ add(scratch1, left, Operand(right), SetCC); 2045 __ Adds(x10, left, right);
2356 __ b(vs, &stub_call); 2046 __ B(vs, &stub_call);
2357 __ mov(right, scratch1); 2047 __ Mov(result, x10);
2358 break; 2048 break;
2359 case Token::SUB: 2049 case Token::SUB:
2360 __ sub(scratch1, left, Operand(right), SetCC); 2050 __ Subs(x10, left, right);
2361 __ b(vs, &stub_call); 2051 __ B(vs, &stub_call);
2362 __ mov(right, scratch1); 2052 __ Mov(result, x10);
2363 break; 2053 break;
2364 case Token::MUL: { 2054 case Token::MUL: {
2365 __ SmiUntag(ip, right); 2055 Label not_minus_zero, done;
2366 __ smull(scratch1, scratch2, left, ip); 2056 __ Smulh(x10, left, right);
2367 __ mov(ip, Operand(scratch1, ASR, 31)); 2057 __ Cbnz(x10, &not_minus_zero);
2368 __ cmp(ip, Operand(scratch2)); 2058 __ Eor(x11, left, right);
2369 __ b(ne, &stub_call); 2059 __ Tbnz(x11, kXSignBit, &stub_call);
2370 __ cmp(scratch1, Operand::Zero()); 2060 STATIC_ASSERT(kSmiTag == 0);
2371 __ mov(right, Operand(scratch1), LeaveCC, ne); 2061 __ Mov(result, x10);
2372 __ b(ne, &done); 2062 __ B(&done);
2373 __ add(scratch2, right, Operand(left), SetCC); 2063 __ Bind(&not_minus_zero);
2374 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl); 2064 __ Cls(x11, x10);
2375 __ b(mi, &stub_call); 2065 __ Cmp(x11, kXRegSize - kSmiShift);
2066 __ B(lt, &stub_call);
2067 __ SmiTag(result, x10);
2068 __ Bind(&done);
2376 break; 2069 break;
2377 } 2070 }
2378 case Token::BIT_OR: 2071 case Token::BIT_OR:
2379 __ orr(right, left, Operand(right)); 2072 __ Orr(result, left, right);
2380 break; 2073 break;
2381 case Token::BIT_AND: 2074 case Token::BIT_AND:
2382 __ and_(right, left, Operand(right)); 2075 __ And(result, left, right);
2383 break; 2076 break;
2384 case Token::BIT_XOR: 2077 case Token::BIT_XOR:
2385 __ eor(right, left, Operand(right)); 2078 __ Eor(result, left, right);
2386 break; 2079 break;
2387 default: 2080 default:
2388 UNREACHABLE(); 2081 UNREACHABLE();
2389 } 2082 }
2390 2083
2391 __ bind(&done); 2084 __ Bind(&done);
2392 context()->Plug(r0); 2085 context()->Plug(x0);
2393 } 2086 }
2394 2087
2395 2088
2396 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, 2089 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2397 Token::Value op, 2090 Token::Value op,
2398 OverwriteMode mode) { 2091 OverwriteMode mode) {
2399 __ pop(r1); 2092 __ Pop(x1);
2400 BinaryOpICStub stub(op, mode); 2093 BinaryOpICStub stub(op, mode);
2401 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 2094 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
2402 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); 2095 {
2403 patch_site.EmitPatchInfo(); 2096 Assembler::BlockConstPoolScope scope(masm_);
2404 context()->Plug(r0); 2097 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2098 patch_site.EmitPatchInfo();
2099 }
2100 context()->Plug(x0);
2405 } 2101 }
2406 2102
2407 2103
2408 void FullCodeGenerator::EmitAssignment(Expression* expr) { 2104 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2409 // Invalid left-hand sides are rewritten by the parser to have a 'throw 2105 // Invalid left-hand sides are rewritten to have a 'throw
2410 // ReferenceError' on the left-hand side. 2106 // ReferenceError' on the left-hand side.
2411 if (!expr->IsValidLeftHandSide()) { 2107 if (!expr->IsValidLeftHandSide()) {
2412 VisitForEffect(expr); 2108 VisitForEffect(expr);
2413 return; 2109 return;
2414 } 2110 }
2415 2111
2416 // Left-hand side can only be a property, a global or a (parameter or local) 2112 // Left-hand side can only be a property, a global or a (parameter or local)
2417 // slot. 2113 // slot.
2418 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 2114 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2419 LhsKind assign_type = VARIABLE; 2115 LhsKind assign_type = VARIABLE;
2420 Property* prop = expr->AsProperty(); 2116 Property* prop = expr->AsProperty();
2421 if (prop != NULL) { 2117 if (prop != NULL) {
2422 assign_type = (prop->key()->IsPropertyName()) 2118 assign_type = (prop->key()->IsPropertyName())
2423 ? NAMED_PROPERTY 2119 ? NAMED_PROPERTY
2424 : KEYED_PROPERTY; 2120 : KEYED_PROPERTY;
2425 } 2121 }
2426 2122
2427 switch (assign_type) { 2123 switch (assign_type) {
2428 case VARIABLE: { 2124 case VARIABLE: {
2429 Variable* var = expr->AsVariableProxy()->var(); 2125 Variable* var = expr->AsVariableProxy()->var();
2430 EffectContext context(this); 2126 EffectContext context(this);
2431 EmitVariableAssignment(var, Token::ASSIGN); 2127 EmitVariableAssignment(var, Token::ASSIGN);
2432 break; 2128 break;
2433 } 2129 }
2434 case NAMED_PROPERTY: { 2130 case NAMED_PROPERTY: {
2435 __ push(r0); // Preserve value. 2131 __ Push(x0); // Preserve value.
2436 VisitForAccumulatorValue(prop->obj()); 2132 VisitForAccumulatorValue(prop->obj());
2437 __ mov(r1, r0); 2133 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2438 __ pop(r0); // Restore value. 2134 // this copy.
2439 __ mov(r2, Operand(prop->key()->AsLiteral()->value())); 2135 __ Mov(x1, x0);
2136 __ Pop(x0); // Restore value.
2137 __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
2440 CallStoreIC(); 2138 CallStoreIC();
2441 break; 2139 break;
2442 } 2140 }
2443 case KEYED_PROPERTY: { 2141 case KEYED_PROPERTY: {
2444 __ push(r0); // Preserve value. 2142 __ Push(x0); // Preserve value.
2445 VisitForStackValue(prop->obj()); 2143 VisitForStackValue(prop->obj());
2446 VisitForAccumulatorValue(prop->key()); 2144 VisitForAccumulatorValue(prop->key());
2447 __ mov(r1, r0); 2145 __ Mov(x1, x0);
2448 __ Pop(r0, r2); // r0 = restored value. 2146 __ Pop(x2, x0);
2449 Handle<Code> ic = is_classic_mode() 2147 Handle<Code> ic = is_classic_mode()
2450 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2148 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2451 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2149 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2452 CallIC(ic); 2150 CallIC(ic);
2453 break; 2151 break;
2454 } 2152 }
2455 } 2153 }
2456 context()->Plug(r0); 2154 context()->Plug(x0);
2457 } 2155 }
2458 2156
2459 2157
2460 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 2158 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2461 Token::Value op) { 2159 Token::Value op) {
2160 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2462 if (var->IsUnallocated()) { 2161 if (var->IsUnallocated()) {
2463 // Global var, const, or let. 2162 // Global var, const, or let.
2464 __ mov(r2, Operand(var->name())); 2163 __ Mov(x2, Operand(var->name()));
2465 __ ldr(r1, GlobalObjectOperand()); 2164 __ Ldr(x1, GlobalObjectMemOperand());
2466 CallStoreIC(); 2165 CallStoreIC();
2166
2467 } else if (op == Token::INIT_CONST) { 2167 } else if (op == Token::INIT_CONST) {
2468 // Const initializers need a write barrier. 2168 // Const initializers need a write barrier.
2469 ASSERT(!var->IsParameter()); // No const parameters. 2169 ASSERT(!var->IsParameter()); // No const parameters.
2470 if (var->IsStackLocal()) { 2170 if (var->IsStackLocal()) {
2471 __ ldr(r1, StackOperand(var)); 2171 Label skip;
2472 __ CompareRoot(r1, Heap::kTheHoleValueRootIndex); 2172 __ Ldr(x1, StackOperand(var));
2473 __ str(result_register(), StackOperand(var), eq); 2173 __ JumpIfNotRoot(x1, Heap::kTheHoleValueRootIndex, &skip);
2174 __ Str(result_register(), StackOperand(var));
2175 __ Bind(&skip);
2474 } else { 2176 } else {
2475 ASSERT(var->IsContextSlot() || var->IsLookupSlot()); 2177 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2476 // Like var declarations, const declarations are hoisted to function 2178 // Like var declarations, const declarations are hoisted to function
2477 // scope. However, unlike var initializers, const initializers are 2179 // scope. However, unlike var initializers, const initializers are
2478 // able to drill a hole to that function context, even from inside a 2180 // able to drill a hole to that function context, even from inside a
2479 // 'with' context. We thus bypass the normal static scope lookup for 2181 // 'with' context. We thus bypass the normal static scope lookup for
2480 // var->IsContextSlot(). 2182 // var->IsContextSlot().
2481 __ push(r0); 2183 __ Push(x0);
2482 __ mov(r0, Operand(var->name())); 2184 __ Mov(x0, Operand(var->name()));
2483 __ Push(cp, r0); // Context and name. 2185 __ Push(cp, x0); // Context and name.
2484 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); 2186 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2485 } 2187 }
2486 2188
2487 } else if (var->mode() == LET && op != Token::INIT_LET) { 2189 } else if (var->mode() == LET && op != Token::INIT_LET) {
2488 // Non-initializing assignment to let variable needs a write barrier. 2190 // Non-initializing assignment to let variable needs a write barrier.
2489 if (var->IsLookupSlot()) { 2191 if (var->IsLookupSlot()) {
2490 __ push(r0); // Value. 2192 __ Push(x0, cp); // Context, value.
2491 __ mov(r1, Operand(var->name())); 2193 __ Mov(x11, Operand(var->name()));
2492 __ mov(r0, Operand(Smi::FromInt(language_mode()))); 2194 __ Mov(x10, Operand(Smi::FromInt(language_mode())));
2493 __ Push(cp, r1, r0); // Context, name, strict mode. 2195 __ Push(x11, x10); // Strict mode, name.
2494 __ CallRuntime(Runtime::kStoreContextSlot, 4); 2196 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2495 } else { 2197 } else {
2496 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 2198 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2497 Label assign; 2199 Label assign;
2498 MemOperand location = VarOperand(var, r1); 2200 MemOperand location = VarOperand(var, x1);
2499 __ ldr(r3, location); 2201 __ Ldr(x10, location);
2500 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); 2202 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2501 __ b(ne, &assign); 2203 __ Mov(x10, Operand(var->name()));
2502 __ mov(r3, Operand(var->name())); 2204 __ Push(x10);
2503 __ push(r3);
2504 __ CallRuntime(Runtime::kThrowReferenceError, 1); 2205 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2505 // Perform the assignment. 2206 // Perform the assignment.
2506 __ bind(&assign); 2207 __ Bind(&assign);
2507 __ str(result_register(), location); 2208 __ Str(result_register(), location);
2508 if (var->IsContextSlot()) { 2209 if (var->IsContextSlot()) {
2509 // RecordWrite may destroy all its register arguments. 2210 // RecordWrite may destroy all its register arguments.
2510 __ mov(r3, result_register()); 2211 __ Mov(x10, result_register());
2511 int offset = Context::SlotOffset(var->index()); 2212 int offset = Context::SlotOffset(var->index());
2512 __ RecordWriteContextSlot( 2213 __ RecordWriteContextSlot(
2513 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs); 2214 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2514 } 2215 }
2515 } 2216 }
2516 2217
2517 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) { 2218 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2518 // Assignment to var or initializing assignment to let/const 2219 // Assignment to var or initializing assignment to let/const
2519 // in harmony mode. 2220 // in harmony mode.
2520 if (var->IsStackAllocated() || var->IsContextSlot()) { 2221 if (var->IsStackAllocated() || var->IsContextSlot()) {
2521 MemOperand location = VarOperand(var, r1); 2222 MemOperand location = VarOperand(var, x1);
2522 if (generate_debug_code_ && op == Token::INIT_LET) { 2223 if (FLAG_debug_code && op == Token::INIT_LET) {
2523 // Check for an uninitialized let binding. 2224 __ Ldr(x10, location);
2524 __ ldr(r2, location); 2225 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2525 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2526 __ Check(eq, kLetBindingReInitialization); 2226 __ Check(eq, kLetBindingReInitialization);
2527 } 2227 }
2528 // Perform the assignment. 2228 // Perform the assignment.
2529 __ str(r0, location); 2229 __ Str(x0, location);
2530 if (var->IsContextSlot()) { 2230 if (var->IsContextSlot()) {
2531 __ mov(r3, r0); 2231 __ Mov(x10, x0);
2532 int offset = Context::SlotOffset(var->index()); 2232 int offset = Context::SlotOffset(var->index());
2533 __ RecordWriteContextSlot( 2233 __ RecordWriteContextSlot(
2534 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs); 2234 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2535 } 2235 }
2536 } else { 2236 } else {
2537 ASSERT(var->IsLookupSlot()); 2237 ASSERT(var->IsLookupSlot());
2538 __ push(r0); // Value. 2238 __ Mov(x11, Operand(var->name()));
2539 __ mov(r1, Operand(var->name())); 2239 __ Mov(x10, Operand(Smi::FromInt(language_mode())));
2540 __ mov(r0, Operand(Smi::FromInt(language_mode()))); 2240 // jssp[0] : mode.
2541 __ Push(cp, r1, r0); // Context, name, strict mode. 2241 // jssp[8] : name.
2242 // jssp[16] : context.
2243 // jssp[24] : value.
2244 __ Push(x0, cp, x11, x10);
2542 __ CallRuntime(Runtime::kStoreContextSlot, 4); 2245 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2543 } 2246 }
2544 } 2247 }
2545 // Non-initializing assignments to consts are ignored. 2248 // Non-initializing assignments to consts are ignored.
2546 } 2249 }
2547 2250
2548 2251
2549 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2252 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2253 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2550 // Assignment to a property, using a named store IC. 2254 // Assignment to a property, using a named store IC.
2551 Property* prop = expr->target()->AsProperty(); 2255 Property* prop = expr->target()->AsProperty();
2552 ASSERT(prop != NULL); 2256 ASSERT(prop != NULL);
2553 ASSERT(prop->key()->AsLiteral() != NULL); 2257 ASSERT(prop->key()->AsLiteral() != NULL);
2554 2258
2555 // Record source code position before IC call. 2259 // Record source code position before IC call.
2556 SetSourcePosition(expr->position()); 2260 SetSourcePosition(expr->position());
2557 __ mov(r2, Operand(prop->key()->AsLiteral()->value())); 2261 __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
2558 __ pop(r1); 2262 __ Pop(x1);
2559 2263
2560 CallStoreIC(expr->AssignmentFeedbackId()); 2264 CallStoreIC(expr->AssignmentFeedbackId());
2561 2265
2562 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2266 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2563 context()->Plug(r0); 2267 context()->Plug(x0);
2564 } 2268 }
2565 2269
2566 2270
2567 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2271 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2272 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2568 // Assignment to a property, using a keyed store IC. 2273 // Assignment to a property, using a keyed store IC.
2569 2274
2570 // Record source code position before IC call. 2275 // Record source code position before IC call.
2571 SetSourcePosition(expr->position()); 2276 SetSourcePosition(expr->position());
2572 __ Pop(r2, r1); // r1 = key. 2277 // TODO(all): Could we pass this in registers rather than on the stack?
2278 __ Pop(x1, x2); // Key and object holding the property.
2573 2279
2574 Handle<Code> ic = is_classic_mode() 2280 Handle<Code> ic = is_classic_mode()
2575 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2281 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2576 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2282 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2577 CallIC(ic, expr->AssignmentFeedbackId()); 2283 CallIC(ic, expr->AssignmentFeedbackId());
2578 2284
2579 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2285 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2580 context()->Plug(r0); 2286 context()->Plug(x0);
2581 } 2287 }
2582 2288
2583 2289
2584 void FullCodeGenerator::VisitProperty(Property* expr) { 2290 void FullCodeGenerator::VisitProperty(Property* expr) {
2585 Comment cmnt(masm_, "[ Property"); 2291 Comment cmnt(masm_, "[ Property");
2586 Expression* key = expr->key(); 2292 Expression* key = expr->key();
2587 2293
2588 if (key->IsPropertyName()) { 2294 if (key->IsPropertyName()) {
2589 VisitForAccumulatorValue(expr->obj()); 2295 VisitForAccumulatorValue(expr->obj());
2590 EmitNamedPropertyLoad(expr); 2296 EmitNamedPropertyLoad(expr);
2591 PrepareForBailoutForId(expr->LoadId(), TOS_REG); 2297 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2592 context()->Plug(r0); 2298 context()->Plug(x0);
2593 } else { 2299 } else {
2594 VisitForStackValue(expr->obj()); 2300 VisitForStackValue(expr->obj());
2595 VisitForAccumulatorValue(expr->key()); 2301 VisitForAccumulatorValue(expr->key());
2596 __ pop(r1); 2302 __ Pop(x1);
2597 EmitKeyedPropertyLoad(expr); 2303 EmitKeyedPropertyLoad(expr);
2598 context()->Plug(r0); 2304 context()->Plug(x0);
2599 } 2305 }
2600 } 2306 }
2601 2307
2602 2308
2603 void FullCodeGenerator::CallIC(Handle<Code> code, 2309 void FullCodeGenerator::CallIC(Handle<Code> code,
2604 TypeFeedbackId ast_id) { 2310 TypeFeedbackId ast_id) {
2605 ic_total_count_++; 2311 ic_total_count_++;
2606 // All calls must have a predictable size in full-codegen code to ensure that 2312 // All calls must have a predictable size in full-codegen code to ensure that
2607 // the debugger can patch them correctly. 2313 // the debugger can patch them correctly.
2608 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al, 2314 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2609 NEVER_INLINE_TARGET_ADDRESS);
2610 } 2315 }
2611 2316
2612 2317
2613 // Code common for calls using the IC. 2318 // Code common for calls using the IC.
2614 void FullCodeGenerator::EmitCallWithIC(Call* expr) { 2319 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2320 ASM_LOCATION("EmitCallWithIC");
2321
2615 Expression* callee = expr->expression(); 2322 Expression* callee = expr->expression();
2616 ZoneList<Expression*>* args = expr->arguments(); 2323 ZoneList<Expression*>* args = expr->arguments();
2617 int arg_count = args->length(); 2324 int arg_count = args->length();
2618 2325
2619 CallFunctionFlags flags; 2326 CallFunctionFlags flags;
2620 // Get the target function. 2327 // Get the target function.
2621 if (callee->IsVariableProxy()) { 2328 if (callee->IsVariableProxy()) {
2622 { StackValueContext context(this); 2329 { StackValueContext context(this);
2623 EmitVariableLoad(callee->AsVariableProxy()); 2330 EmitVariableLoad(callee->AsVariableProxy());
2624 PrepareForBailout(callee, NO_REGISTERS); 2331 PrepareForBailout(callee, NO_REGISTERS);
2625 } 2332 }
2626 // Push undefined as receiver. This is patched in the method prologue if it 2333 // Push undefined as receiver. This is patched in the method prologue if it
2627 // is a classic mode method. 2334 // is a classic mode method.
2628 __ Push(isolate()->factory()->undefined_value()); 2335 __ Push(isolate()->factory()->undefined_value());
2629 flags = NO_CALL_FUNCTION_FLAGS; 2336 flags = NO_CALL_FUNCTION_FLAGS;
2630 } else { 2337 } else {
2631 // Load the function from the receiver. 2338 // Load the function from the receiver.
2632 ASSERT(callee->IsProperty()); 2339 ASSERT(callee->IsProperty());
2633 __ ldr(r0, MemOperand(sp, 0)); 2340 __ Peek(x0, 0);
2634 EmitNamedPropertyLoad(callee->AsProperty()); 2341 EmitNamedPropertyLoad(callee->AsProperty());
2635 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2342 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2636 // Push the target function under the receiver. 2343 // Push the target function under the receiver.
2637 __ ldr(ip, MemOperand(sp, 0)); 2344 __ Pop(x10);
2638 __ push(ip); 2345 __ Push(x0, x10);
2639 __ str(r0, MemOperand(sp, kPointerSize));
2640 flags = CALL_AS_METHOD; 2346 flags = CALL_AS_METHOD;
2641 } 2347 }
2642 2348
2643 // Load the arguments. 2349 // Load the arguments.
2644 { PreservePositionScope scope(masm()->positions_recorder()); 2350 { PreservePositionScope scope(masm()->positions_recorder());
2645 for (int i = 0; i < arg_count; i++) { 2351 for (int i = 0; i < arg_count; i++) {
2646 VisitForStackValue(args->at(i)); 2352 VisitForStackValue(args->at(i));
2647 } 2353 }
2648 } 2354 }
2649 2355
2650 // Record source position for debugger. 2356 // Record source position for debugger.
2651 SetSourcePosition(expr->position()); 2357 SetSourcePosition(expr->position());
2652 CallFunctionStub stub(arg_count, flags); 2358 CallFunctionStub stub(arg_count, flags);
2653 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2359 __ Peek(x1, (arg_count + 1) * kPointerSize);
2654 __ CallStub(&stub); 2360 __ CallStub(&stub);
2655 2361
2656 RecordJSReturnSite(expr); 2362 RecordJSReturnSite(expr);
2657 2363
2658 // Restore context register. 2364 // Restore context register.
2659 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2365 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2660 2366
2661 context()->DropAndPlug(1, r0); 2367 context()->DropAndPlug(1, x0);
2662 } 2368 }
2663 2369
2664 2370
2665 // Code common for calls using the IC. 2371 // Code common for calls using the IC.
2666 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, 2372 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2667 Expression* key) { 2373 Expression* key) {
2668 // Load the key. 2374 // Load the key.
2669 VisitForAccumulatorValue(key); 2375 VisitForAccumulatorValue(key);
2670 2376
2671 Expression* callee = expr->expression(); 2377 Expression* callee = expr->expression();
2672 ZoneList<Expression*>* args = expr->arguments(); 2378 ZoneList<Expression*>* args = expr->arguments();
2673 int arg_count = args->length(); 2379 int arg_count = args->length();
2674 2380
2675 // Load the function from the receiver. 2381 // Load the function from the receiver.
2676 ASSERT(callee->IsProperty()); 2382 ASSERT(callee->IsProperty());
2677 __ ldr(r1, MemOperand(sp, 0)); 2383 __ Peek(x1, 0);
2678 EmitKeyedPropertyLoad(callee->AsProperty()); 2384 EmitKeyedPropertyLoad(callee->AsProperty());
2679 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2385 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2680 2386
2681 // Push the target function under the receiver. 2387 // Push the target function under the receiver.
2682 __ ldr(ip, MemOperand(sp, 0)); 2388 __ Pop(x10);
2683 __ push(ip); 2389 __ Push(x0, x10);
2684 __ str(r0, MemOperand(sp, kPointerSize));
2685 2390
2686 { PreservePositionScope scope(masm()->positions_recorder()); 2391 { PreservePositionScope scope(masm()->positions_recorder());
2687 for (int i = 0; i < arg_count; i++) { 2392 for (int i = 0; i < arg_count; i++) {
2688 VisitForStackValue(args->at(i)); 2393 VisitForStackValue(args->at(i));
2689 } 2394 }
2690 } 2395 }
2691 2396
2692 // Record source position for debugger. 2397 // Record source position for debugger.
2693 SetSourcePosition(expr->position()); 2398 SetSourcePosition(expr->position());
2694 CallFunctionStub stub(arg_count, CALL_AS_METHOD); 2399 CallFunctionStub stub(arg_count, CALL_AS_METHOD);
2695 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2400 __ Peek(x1, (arg_count + 1) * kPointerSize);
2696 __ CallStub(&stub); 2401 __ CallStub(&stub);
2697 2402
2698 RecordJSReturnSite(expr); 2403 RecordJSReturnSite(expr);
2699 // Restore context register. 2404 // Restore context register.
2700 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2405 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2701 2406
2702 context()->DropAndPlug(1, r0); 2407 context()->DropAndPlug(1, x0);
2703 } 2408 }
2704 2409
2705 2410
2706 void FullCodeGenerator::EmitCallWithStub(Call* expr) { 2411 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2707 // Code common for calls using the call stub. 2412 // Code common for calls using the call stub.
2708 ZoneList<Expression*>* args = expr->arguments(); 2413 ZoneList<Expression*>* args = expr->arguments();
2709 int arg_count = args->length(); 2414 int arg_count = args->length();
2710 { PreservePositionScope scope(masm()->positions_recorder()); 2415 { PreservePositionScope scope(masm()->positions_recorder());
2711 for (int i = 0; i < arg_count; i++) { 2416 for (int i = 0; i < arg_count; i++) {
2712 VisitForStackValue(args->at(i)); 2417 VisitForStackValue(args->at(i));
2713 } 2418 }
2714 } 2419 }
2715 // Record source position for debugger. 2420 // Record source position for debugger.
2716 SetSourcePosition(expr->position()); 2421 SetSourcePosition(expr->position());
2717 2422
2718 Handle<Object> uninitialized = 2423 Handle<Object> uninitialized =
2719 TypeFeedbackInfo::UninitializedSentinel(isolate()); 2424 TypeFeedbackInfo::UninitializedSentinel(isolate());
2720 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized); 2425 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
2721 __ Move(r2, FeedbackVector()); 2426 __ LoadObject(x2, FeedbackVector());
2722 __ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot()))); 2427 __ Mov(x3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
2723 2428
2724 // Record call targets in unoptimized code. 2429 // Record call targets in unoptimized code.
2725 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); 2430 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
2726 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2431 __ Peek(x1, (arg_count + 1) * kXRegSizeInBytes);
2727 __ CallStub(&stub); 2432 __ CallStub(&stub);
2728 RecordJSReturnSite(expr); 2433 RecordJSReturnSite(expr);
2729 // Restore context register. 2434 // Restore context register.
2730 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2435 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2731 context()->DropAndPlug(1, r0); 2436 context()->DropAndPlug(1, x0);
2732 } 2437 }
2733 2438
2734 2439
2735 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { 2440 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2736 // r4: copy of the first argument or undefined if it doesn't exist. 2441 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2442 // Prepare to push a copy of the first argument or undefined if it doesn't
2443 // exist.
2737 if (arg_count > 0) { 2444 if (arg_count > 0) {
2738 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize)); 2445 __ Peek(x10, arg_count * kXRegSizeInBytes);
2739 } else { 2446 } else {
2740 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); 2447 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2741 } 2448 }
2742 2449
2743 // r3: the receiver of the enclosing function. 2450 // Prepare to push the receiver of the enclosing function.
2744 int receiver_offset = 2 + info_->scope()->num_parameters(); 2451 int receiver_offset = 2 + info_->scope()->num_parameters();
2745 __ ldr(r3, MemOperand(fp, receiver_offset * kPointerSize)); 2452 __ Ldr(x11, MemOperand(fp, receiver_offset * kPointerSize));
2746 2453
2747 // r2: the language mode. 2454 // Push.
2748 __ mov(r2, Operand(Smi::FromInt(language_mode()))); 2455 __ Push(x10, x11);
2749 2456
2750 // r1: the start position of the scope the calls resides in. 2457 // Prepare to push the language mode.
2751 __ mov(r1, Operand(Smi::FromInt(scope()->start_position()))); 2458 __ Mov(x10, Operand(Smi::FromInt(language_mode())));
2459 // Prepare to push the start position of the scope the calls resides in.
2460 __ Mov(x11, Operand(Smi::FromInt(scope()->start_position())));
2461
2462 // Push.
2463 __ Push(x10, x11);
2752 2464
2753 // Do the runtime call. 2465 // Do the runtime call.
2754 __ Push(r4, r3, r2, r1);
2755 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); 2466 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2756 } 2467 }
2757 2468
2758 2469
2759 void FullCodeGenerator::VisitCall(Call* expr) { 2470 void FullCodeGenerator::VisitCall(Call* expr) {
2760 #ifdef DEBUG 2471 #ifdef DEBUG
2761 // We want to verify that RecordJSReturnSite gets called on all paths 2472 // We want to verify that RecordJSReturnSite gets called on all paths
2762 // through this function. Avoid early returns. 2473 // through this function. Avoid early returns.
2763 expr->return_is_recorded_ = false; 2474 expr->return_is_recorded_ = false;
2764 #endif 2475 #endif
2765 2476
2766 Comment cmnt(masm_, "[ Call"); 2477 Comment cmnt(masm_, "[ Call");
2767 Expression* callee = expr->expression(); 2478 Expression* callee = expr->expression();
2768 Call::CallType call_type = expr->GetCallType(isolate()); 2479 Call::CallType call_type = expr->GetCallType(isolate());
2769 2480
2770 if (call_type == Call::POSSIBLY_EVAL_CALL) { 2481 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2771 // In a call to eval, we first call %ResolvePossiblyDirectEval to 2482 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2772 // resolve the function we need to call and the receiver of the 2483 // resolve the function we need to call and the receiver of the
2773 // call. Then we call the resolved function using the given 2484 // call. Then we call the resolved function using the given
2774 // arguments. 2485 // arguments.
2775 ZoneList<Expression*>* args = expr->arguments(); 2486 ZoneList<Expression*>* args = expr->arguments();
2776 int arg_count = args->length(); 2487 int arg_count = args->length();
2777 2488
2778 { PreservePositionScope pos_scope(masm()->positions_recorder()); 2489 {
2490 PreservePositionScope pos_scope(masm()->positions_recorder());
2779 VisitForStackValue(callee); 2491 VisitForStackValue(callee);
2780 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 2492 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2781 __ push(r2); // Reserved receiver slot. 2493 __ Push(x10); // Reserved receiver slot.
2782 2494
2783 // Push the arguments. 2495 // Push the arguments.
2784 for (int i = 0; i < arg_count; i++) { 2496 for (int i = 0; i < arg_count; i++) {
2785 VisitForStackValue(args->at(i)); 2497 VisitForStackValue(args->at(i));
2786 } 2498 }
2787 2499
2788 // Push a copy of the function (found below the arguments) and 2500 // Push a copy of the function (found below the arguments) and
2789 // resolve eval. 2501 // resolve eval.
2790 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2502 __ Peek(x10, (arg_count + 1) * kPointerSize);
2791 __ push(r1); 2503 __ Push(x10);
2792 EmitResolvePossiblyDirectEval(arg_count); 2504 EmitResolvePossiblyDirectEval(arg_count);
2793 2505
2794 // The runtime call returns a pair of values in r0 (function) and 2506 // The runtime call returns a pair of values in x0 (function) and
2795 // r1 (receiver). Touch up the stack with the right values. 2507 // x1 (receiver). Touch up the stack with the right values.
2796 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2508 __ PokePair(x1, x0, arg_count * kPointerSize);
2797 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
2798 } 2509 }
2799 2510
2800 // Record source position for debugger. 2511 // Record source position for debugger.
2801 SetSourcePosition(expr->position()); 2512 SetSourcePosition(expr->position());
2513
2514 // Call the evaluated function.
2802 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); 2515 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
2803 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2516 __ Peek(x1, (arg_count + 1) * kXRegSizeInBytes);
2804 __ CallStub(&stub); 2517 __ CallStub(&stub);
2805 RecordJSReturnSite(expr); 2518 RecordJSReturnSite(expr);
2806 // Restore context register. 2519 // Restore context register.
2807 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2520 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2808 context()->DropAndPlug(1, r0); 2521 context()->DropAndPlug(1, x0);
2522
2809 } else if (call_type == Call::GLOBAL_CALL) { 2523 } else if (call_type == Call::GLOBAL_CALL) {
2810 EmitCallWithIC(expr); 2524 EmitCallWithIC(expr);
2811 2525
2812 } else if (call_type == Call::LOOKUP_SLOT_CALL) { 2526 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2813 // Call to a lookup slot (dynamically introduced variable). 2527 // Call to a lookup slot (dynamically introduced variable).
2814 VariableProxy* proxy = callee->AsVariableProxy(); 2528 VariableProxy* proxy = callee->AsVariableProxy();
2815 Label slow, done; 2529 Label slow, done;
2816 2530
2817 { PreservePositionScope scope(masm()->positions_recorder()); 2531 { PreservePositionScope scope(masm()->positions_recorder());
2818 // Generate code for loading from variables potentially shadowed 2532 // Generate code for loading from variables potentially shadowed
2819 // by eval-introduced variables. 2533 // by eval-introduced variables.
2820 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done); 2534 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2821 } 2535 }
2822 2536
2823 __ bind(&slow); 2537 __ Bind(&slow);
2824 // Call the runtime to find the function to call (returned in r0) 2538 // Call the runtime to find the function to call (returned in x0)
2825 // and the object holding it (returned in edx). 2539 // and the object holding it (returned in x1).
2826 ASSERT(!context_register().is(r2)); 2540 __ Push(context_register());
2827 __ mov(r2, Operand(proxy->name())); 2541 __ Mov(x10, Operand(proxy->name()));
2828 __ Push(context_register(), r2); 2542 __ Push(x10);
2829 __ CallRuntime(Runtime::kLoadContextSlot, 2); 2543 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2830 __ Push(r0, r1); // Function, receiver. 2544 __ Push(x0, x1); // Receiver, function.
2831 2545
2832 // If fast case code has been generated, emit code to push the 2546 // If fast case code has been generated, emit code to push the
2833 // function and receiver and have the slow path jump around this 2547 // function and receiver and have the slow path jump around this
2834 // code. 2548 // code.
2835 if (done.is_linked()) { 2549 if (done.is_linked()) {
2836 Label call; 2550 Label call;
2837 __ b(&call); 2551 __ B(&call);
2838 __ bind(&done); 2552 __ Bind(&done);
2839 // Push function. 2553 // Push function.
2840 __ push(r0); 2554 __ Push(x0);
2841 // The receiver is implicitly the global receiver. Indicate this 2555 // The receiver is implicitly the global receiver. Indicate this
2842 // by passing the hole to the call function stub. 2556 // by passing the undefined to the call function stub.
2843 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 2557 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2844 __ push(r1); 2558 __ Push(x1);
2845 __ bind(&call); 2559 __ Bind(&call);
2846 } 2560 }
2847 2561
2848 // The receiver is either the global receiver or an object found 2562 // The receiver is either the global receiver or an object found
2849 // by LoadContextSlot. 2563 // by LoadContextSlot.
2850 EmitCallWithStub(expr); 2564 EmitCallWithStub(expr);
2851 } else if (call_type == Call::PROPERTY_CALL) { 2565 } else if (call_type == Call::PROPERTY_CALL) {
2852 Property* property = callee->AsProperty(); 2566 Property* property = callee->AsProperty();
2853 { PreservePositionScope scope(masm()->positions_recorder()); 2567 { PreservePositionScope scope(masm()->positions_recorder());
2854 VisitForStackValue(property->obj()); 2568 VisitForStackValue(property->obj());
2855 } 2569 }
2856 if (property->key()->IsPropertyName()) { 2570 if (property->key()->IsPropertyName()) {
2857 EmitCallWithIC(expr); 2571 EmitCallWithIC(expr);
2858 } else { 2572 } else {
2859 EmitKeyedCallWithIC(expr, property->key()); 2573 EmitKeyedCallWithIC(expr, property->key());
2860 } 2574 }
2575
2861 } else { 2576 } else {
2862 ASSERT(call_type == Call::OTHER_CALL); 2577 ASSERT(call_type == Call::OTHER_CALL);
2863 // Call to an arbitrary expression not handled specially above. 2578 // Call to an arbitrary expression not handled specially above.
2864 { PreservePositionScope scope(masm()->positions_recorder()); 2579 { PreservePositionScope scope(masm()->positions_recorder());
2865 VisitForStackValue(callee); 2580 VisitForStackValue(callee);
2866 } 2581 }
2867 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 2582 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2868 __ push(r1); 2583 __ Push(x1);
2869 // Emit function call. 2584 // Emit function call.
2870 EmitCallWithStub(expr); 2585 EmitCallWithStub(expr);
2871 } 2586 }
2872 2587
2873 #ifdef DEBUG 2588 #ifdef DEBUG
2874 // RecordJSReturnSite should have been called. 2589 // RecordJSReturnSite should have been called.
2875 ASSERT(expr->return_is_recorded_); 2590 ASSERT(expr->return_is_recorded_);
2876 #endif 2591 #endif
2877 } 2592 }
2878 2593
(...skipping 13 matching lines...) Expand all
2892 ZoneList<Expression*>* args = expr->arguments(); 2607 ZoneList<Expression*>* args = expr->arguments();
2893 int arg_count = args->length(); 2608 int arg_count = args->length();
2894 for (int i = 0; i < arg_count; i++) { 2609 for (int i = 0; i < arg_count; i++) {
2895 VisitForStackValue(args->at(i)); 2610 VisitForStackValue(args->at(i));
2896 } 2611 }
2897 2612
2898 // Call the construct call builtin that handles allocation and 2613 // Call the construct call builtin that handles allocation and
2899 // constructor invocation. 2614 // constructor invocation.
2900 SetSourcePosition(expr->position()); 2615 SetSourcePosition(expr->position());
2901 2616
2902 // Load function and argument count into r1 and r0. 2617 // Load function and argument count into x1 and x0.
2903 __ mov(r0, Operand(arg_count)); 2618 __ Mov(x0, arg_count);
2904 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); 2619 __ Peek(x1, arg_count * kXRegSizeInBytes);
2905 2620
2906 // Record call targets in unoptimized code. 2621 // Record call targets in unoptimized code.
2907 Handle<Object> uninitialized = 2622 Handle<Object> uninitialized =
2908 TypeFeedbackInfo::UninitializedSentinel(isolate()); 2623 TypeFeedbackInfo::UninitializedSentinel(isolate());
2909 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); 2624 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
2910 __ Move(r2, FeedbackVector()); 2625 __ LoadObject(x2, FeedbackVector());
2911 __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); 2626 __ Mov(x3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
2912 2627
2913 CallConstructStub stub(RECORD_CALL_TARGET); 2628 CallConstructStub stub(RECORD_CALL_TARGET);
2914 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); 2629 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2915 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 2630 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2916 context()->Plug(r0); 2631 context()->Plug(x0);
2917 } 2632 }
2918 2633
2919 2634
2920 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 2635 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2921 ZoneList<Expression*>* args = expr->arguments(); 2636 ZoneList<Expression*>* args = expr->arguments();
2922 ASSERT(args->length() == 1); 2637 ASSERT(args->length() == 1);
2923 2638
2924 VisitForAccumulatorValue(args->at(0)); 2639 VisitForAccumulatorValue(args->at(0));
2925 2640
2926 Label materialize_true, materialize_false; 2641 Label materialize_true, materialize_false;
2927 Label* if_true = NULL; 2642 Label* if_true = NULL;
2928 Label* if_false = NULL; 2643 Label* if_false = NULL;
2929 Label* fall_through = NULL; 2644 Label* fall_through = NULL;
2930 context()->PrepareTest(&materialize_true, &materialize_false, 2645 context()->PrepareTest(&materialize_true, &materialize_false,
2931 &if_true, &if_false, &fall_through); 2646 &if_true, &if_false, &fall_through);
2932 2647
2933 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2648 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2934 __ SmiTst(r0); 2649 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
2935 Split(eq, if_true, if_false, fall_through);
2936 2650
2937 context()->Plug(if_true, if_false); 2651 context()->Plug(if_true, if_false);
2938 } 2652 }
2939 2653
2940 2654
2941 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) { 2655 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2942 ZoneList<Expression*>* args = expr->arguments(); 2656 ZoneList<Expression*>* args = expr->arguments();
2943 ASSERT(args->length() == 1); 2657 ASSERT(args->length() == 1);
2944 2658
2945 VisitForAccumulatorValue(args->at(0)); 2659 VisitForAccumulatorValue(args->at(0));
2946 2660
2947 Label materialize_true, materialize_false; 2661 Label materialize_true, materialize_false;
2948 Label* if_true = NULL; 2662 Label* if_true = NULL;
2949 Label* if_false = NULL; 2663 Label* if_false = NULL;
2950 Label* fall_through = NULL; 2664 Label* fall_through = NULL;
2951 context()->PrepareTest(&materialize_true, &materialize_false, 2665 context()->PrepareTest(&materialize_true, &materialize_false,
2952 &if_true, &if_false, &fall_through); 2666 &if_true, &if_false, &fall_through);
2953 2667
2954 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2668 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2955 __ NonNegativeSmiTst(r0); 2669 __ TestAndSplit(x0, kSmiTagMask | (0x80000000UL << kSmiShift), if_true,
2956 Split(eq, if_true, if_false, fall_through); 2670 if_false, fall_through);
2957 2671
2958 context()->Plug(if_true, if_false); 2672 context()->Plug(if_true, if_false);
2959 } 2673 }
2960 2674
2961 2675
2962 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) { 2676 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2963 ZoneList<Expression*>* args = expr->arguments(); 2677 ZoneList<Expression*>* args = expr->arguments();
2964 ASSERT(args->length() == 1); 2678 ASSERT(args->length() == 1);
2965 2679
2966 VisitForAccumulatorValue(args->at(0)); 2680 VisitForAccumulatorValue(args->at(0));
2967 2681
2968 Label materialize_true, materialize_false; 2682 Label materialize_true, materialize_false;
2969 Label* if_true = NULL; 2683 Label* if_true = NULL;
2970 Label* if_false = NULL; 2684 Label* if_false = NULL;
2971 Label* fall_through = NULL; 2685 Label* fall_through = NULL;
2972 context()->PrepareTest(&materialize_true, &materialize_false, 2686 context()->PrepareTest(&materialize_true, &materialize_false,
2973 &if_true, &if_false, &fall_through); 2687 &if_true, &if_false, &fall_through);
2974 2688
2975 __ JumpIfSmi(r0, if_false); 2689 __ JumpIfSmi(x0, if_false);
2976 __ LoadRoot(ip, Heap::kNullValueRootIndex); 2690 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
2977 __ cmp(r0, ip); 2691 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
2978 __ b(eq, if_true);
2979 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
2980 // Undetectable objects behave like undefined when tested with typeof. 2692 // Undetectable objects behave like undefined when tested with typeof.
2981 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset)); 2693 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
2982 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 2694 __ Tbnz(x11, Map::kIsUndetectable, if_false);
2983 __ b(ne, if_false); 2695 __ Ldrb(x12, FieldMemOperand(x10, Map::kInstanceTypeOffset));
2984 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset)); 2696 __ Cmp(x12, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
2985 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); 2697 __ B(lt, if_false);
2986 __ b(lt, if_false); 2698 __ Cmp(x12, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
2987 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2988 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2699 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2989 Split(le, if_true, if_false, fall_through); 2700 Split(le, if_true, if_false, fall_through);
2990 2701
2991 context()->Plug(if_true, if_false); 2702 context()->Plug(if_true, if_false);
2992 } 2703 }
2993 2704
2994 2705
2995 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) { 2706 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2996 ZoneList<Expression*>* args = expr->arguments(); 2707 ZoneList<Expression*>* args = expr->arguments();
2997 ASSERT(args->length() == 1); 2708 ASSERT(args->length() == 1);
2998 2709
2999 VisitForAccumulatorValue(args->at(0)); 2710 VisitForAccumulatorValue(args->at(0));
3000 2711
3001 Label materialize_true, materialize_false; 2712 Label materialize_true, materialize_false;
3002 Label* if_true = NULL; 2713 Label* if_true = NULL;
3003 Label* if_false = NULL; 2714 Label* if_false = NULL;
3004 Label* fall_through = NULL; 2715 Label* fall_through = NULL;
3005 context()->PrepareTest(&materialize_true, &materialize_false, 2716 context()->PrepareTest(&materialize_true, &materialize_false,
3006 &if_true, &if_false, &fall_through); 2717 &if_true, &if_false, &fall_through);
3007 2718
3008 __ JumpIfSmi(r0, if_false); 2719 __ JumpIfSmi(x0, if_false);
3009 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 2720 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3010 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2721 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3011 Split(ge, if_true, if_false, fall_through); 2722 Split(ge, if_true, if_false, fall_through);
3012 2723
3013 context()->Plug(if_true, if_false); 2724 context()->Plug(if_true, if_false);
3014 } 2725 }
3015 2726
3016 2727
3017 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) { 2728 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2729 ASM_LOCATION("FullCodeGenerator::EmitIsUndetectableObject");
3018 ZoneList<Expression*>* args = expr->arguments(); 2730 ZoneList<Expression*>* args = expr->arguments();
3019 ASSERT(args->length() == 1); 2731 ASSERT(args->length() == 1);
3020 2732
3021 VisitForAccumulatorValue(args->at(0)); 2733 VisitForAccumulatorValue(args->at(0));
3022 2734
3023 Label materialize_true, materialize_false; 2735 Label materialize_true, materialize_false;
3024 Label* if_true = NULL; 2736 Label* if_true = NULL;
3025 Label* if_false = NULL; 2737 Label* if_false = NULL;
3026 Label* fall_through = NULL; 2738 Label* fall_through = NULL;
3027 context()->PrepareTest(&materialize_true, &materialize_false, 2739 context()->PrepareTest(&materialize_true, &materialize_false,
3028 &if_true, &if_false, &fall_through); 2740 &if_true, &if_false, &fall_through);
3029 2741
3030 __ JumpIfSmi(r0, if_false); 2742 __ JumpIfSmi(x0, if_false);
3031 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 2743 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
3032 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset)); 2744 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
3033 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 2745 __ Tst(x11, 1 << Map::kIsUndetectable);
3034 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2746 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3035 Split(ne, if_true, if_false, fall_through); 2747 Split(ne, if_true, if_false, fall_through);
3036 2748
3037 context()->Plug(if_true, if_false); 2749 context()->Plug(if_true, if_false);
3038 } 2750 }
3039 2751
3040 2752
3041 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( 2753 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3042 CallRuntime* expr) { 2754 CallRuntime* expr) {
3043 ZoneList<Expression*>* args = expr->arguments(); 2755 ZoneList<Expression*>* args = expr->arguments();
3044 ASSERT(args->length() == 1); 2756 ASSERT(args->length() == 1);
3045
3046 VisitForAccumulatorValue(args->at(0)); 2757 VisitForAccumulatorValue(args->at(0));
3047 2758
3048 Label materialize_true, materialize_false, skip_lookup; 2759 Label materialize_true, materialize_false, skip_lookup;
3049 Label* if_true = NULL; 2760 Label* if_true = NULL;
3050 Label* if_false = NULL; 2761 Label* if_false = NULL;
3051 Label* fall_through = NULL; 2762 Label* fall_through = NULL;
3052 context()->PrepareTest(&materialize_true, &materialize_false, 2763 context()->PrepareTest(&materialize_true, &materialize_false,
3053 &if_true, &if_false, &fall_through); 2764 &if_true, &if_false, &fall_through);
3054 2765
3055 __ AssertNotSmi(r0); 2766 Register object = x0;
2767 __ AssertNotSmi(object);
3056 2768
3057 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 2769 Register map = x10;
3058 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset)); 2770 Register bitfield2 = x11;
3059 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); 2771 __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
3060 __ b(ne, &skip_lookup); 2772 __ Ldrb(bitfield2, FieldMemOperand(map, Map::kBitField2Offset));
2773 __ Tbnz(bitfield2, Map::kStringWrapperSafeForDefaultValueOf, &skip_lookup);
3061 2774
3062 // Check for fast case object. Generate false result for slow case object. 2775 // Check for fast case object. Generate false result for slow case object.
3063 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset)); 2776 Register props = x12;
3064 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); 2777 Register props_map = x12;
3065 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); 2778 Register hash_table_map = x13;
3066 __ cmp(r2, ip); 2779 __ Ldr(props, FieldMemOperand(object, JSObject::kPropertiesOffset));
3067 __ b(eq, if_false); 2780 __ Ldr(props_map, FieldMemOperand(props, HeapObject::kMapOffset));
2781 __ LoadRoot(hash_table_map, Heap::kHashTableMapRootIndex);
2782 __ Cmp(props_map, hash_table_map);
2783 __ B(eq, if_false);
3068 2784
3069 // Look for valueOf name in the descriptor array, and indicate false if 2785 // Look for valueOf name in the descriptor array, and indicate false if found.
3070 // found. Since we omit an enumeration index check, if it is added via a 2786 // Since we omit an enumeration index check, if it is added via a transition
3071 // transition that shares its descriptor array, this is a false positive. 2787 // that shares its descriptor array, this is a false positive.
3072 Label entry, loop, done; 2788 Label loop, done;
3073 2789
3074 // Skip loop if no descriptors are valid. 2790 // Skip loop if no descriptors are valid.
3075 __ NumberOfOwnDescriptors(r3, r1); 2791 Register descriptors = x12;
3076 __ cmp(r3, Operand::Zero()); 2792 Register descriptors_length = x13;
3077 __ b(eq, &done); 2793 __ NumberOfOwnDescriptors(descriptors_length, map);
2794 __ Cbz(descriptors_length, &done);
3078 2795
3079 __ LoadInstanceDescriptors(r1, r4); 2796 __ LoadInstanceDescriptors(map, descriptors);
3080 // r4: descriptor array. 2797
3081 // r3: valid entries in the descriptor array. 2798 // Calculate the end of the descriptor array.
3082 __ mov(ip, Operand(DescriptorArray::kDescriptorSize)); 2799 Register descriptors_end = x14;
3083 __ mul(r3, r3, ip); 2800 __ Mov(x15, DescriptorArray::kDescriptorSize);
2801 __ Mul(descriptors_length, descriptors_length, x15);
3084 // Calculate location of the first key name. 2802 // Calculate location of the first key name.
3085 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag)); 2803 __ Add(descriptors, descriptors,
2804 DescriptorArray::kFirstOffset - kHeapObjectTag);
3086 // Calculate the end of the descriptor array. 2805 // Calculate the end of the descriptor array.
3087 __ mov(r2, r4); 2806 __ Add(descriptors_end, descriptors,
3088 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3)); 2807 Operand(descriptors_length, LSL, kPointerSizeLog2));
3089 2808
3090 // Loop through all the keys in the descriptor array. If one of these is the 2809 // Loop through all the keys in the descriptor array. If one of these is the
3091 // string "valueOf" the result is false. 2810 // string "valueOf" the result is false.
3092 // The use of ip to store the valueOf string assumes that it is not otherwise 2811 // TODO(all): optimise this loop to combine the add and ldr into an
3093 // used in the loop below. 2812 // addressing mode.
3094 __ mov(ip, Operand(isolate()->factory()->value_of_string())); 2813 Register valueof_string = x1;
3095 __ jmp(&entry); 2814 __ Mov(valueof_string, Operand(isolate()->factory()->value_of_string()));
3096 __ bind(&loop); 2815 __ Bind(&loop);
3097 __ ldr(r3, MemOperand(r4, 0)); 2816 __ Ldr(x15, MemOperand(descriptors));
3098 __ cmp(r3, ip); 2817 __ Cmp(x15, valueof_string);
3099 __ b(eq, if_false); 2818 __ B(eq, if_false);
3100 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize)); 2819 __ Add(descriptors, descriptors,
3101 __ bind(&entry); 2820 DescriptorArray::kDescriptorSize * kPointerSize);
3102 __ cmp(r4, Operand(r2)); 2821 __ Cmp(descriptors, descriptors_end);
3103 __ b(ne, &loop); 2822 __ B(ne, &loop);
3104 2823
3105 __ bind(&done); 2824 __ Bind(&done);
3106 2825
3107 // Set the bit in the map to indicate that there is no local valueOf field. 2826 // Set the bit in the map to indicate that there is no local valueOf field.
3108 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset)); 2827 __ Ldrb(x2, FieldMemOperand(map, Map::kBitField2Offset));
3109 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); 2828 __ Orr(x2, x2, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3110 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset)); 2829 __ Strb(x2, FieldMemOperand(map, Map::kBitField2Offset));
3111 2830
3112 __ bind(&skip_lookup); 2831 __ Bind(&skip_lookup);
3113 2832
3114 // If a valueOf property is not found on the object check that its 2833 // If a valueOf property is not found on the object check that its prototype
3115 // prototype is the un-modified String prototype. If not result is false. 2834 // is the unmodified String prototype. If not result is false.
3116 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset)); 2835 Register prototype = x1;
3117 __ JumpIfSmi(r2, if_false); 2836 Register global_idx = x2;
3118 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); 2837 Register native_context = x2;
3119 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 2838 Register string_proto = x3;
3120 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset)); 2839 Register proto_map = x4;
3121 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); 2840 __ Ldr(prototype, FieldMemOperand(map, Map::kPrototypeOffset));
3122 __ cmp(r2, r3); 2841 __ JumpIfSmi(prototype, if_false);
2842 __ Ldr(proto_map, FieldMemOperand(prototype, HeapObject::kMapOffset));
2843 __ Ldr(global_idx, GlobalObjectMemOperand());
2844 __ Ldr(native_context,
2845 FieldMemOperand(global_idx, GlobalObject::kNativeContextOffset));
2846 __ Ldr(string_proto,
2847 ContextMemOperand(native_context,
2848 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2849 __ Cmp(proto_map, string_proto);
2850
3123 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2851 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3124 Split(eq, if_true, if_false, fall_through); 2852 Split(eq, if_true, if_false, fall_through);
3125 2853
3126 context()->Plug(if_true, if_false); 2854 context()->Plug(if_true, if_false);
3127 } 2855 }
3128 2856
3129 2857
3130 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { 2858 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3131 ZoneList<Expression*>* args = expr->arguments(); 2859 ZoneList<Expression*>* args = expr->arguments();
3132 ASSERT(args->length() == 1); 2860 ASSERT(args->length() == 1);
3133 2861
3134 VisitForAccumulatorValue(args->at(0)); 2862 VisitForAccumulatorValue(args->at(0));
3135 2863
3136 Label materialize_true, materialize_false; 2864 Label materialize_true, materialize_false;
3137 Label* if_true = NULL; 2865 Label* if_true = NULL;
3138 Label* if_false = NULL; 2866 Label* if_false = NULL;
3139 Label* fall_through = NULL; 2867 Label* fall_through = NULL;
3140 context()->PrepareTest(&materialize_true, &materialize_false, 2868 context()->PrepareTest(&materialize_true, &materialize_false,
3141 &if_true, &if_false, &fall_through); 2869 &if_true, &if_false, &fall_through);
3142 2870
3143 __ JumpIfSmi(r0, if_false); 2871 __ JumpIfSmi(x0, if_false);
3144 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE); 2872 __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE);
3145 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2873 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3146 Split(eq, if_true, if_false, fall_through); 2874 Split(eq, if_true, if_false, fall_through);
3147 2875
3148 context()->Plug(if_true, if_false); 2876 context()->Plug(if_true, if_false);
3149 } 2877 }
3150 2878
3151 2879
3152 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) { 2880 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3153 ZoneList<Expression*>* args = expr->arguments(); 2881 ZoneList<Expression*>* args = expr->arguments();
3154 ASSERT(args->length() == 1); 2882 ASSERT(args->length() == 1);
3155 2883
3156 VisitForAccumulatorValue(args->at(0)); 2884 VisitForAccumulatorValue(args->at(0));
3157 2885
3158 Label materialize_true, materialize_false; 2886 Label materialize_true, materialize_false;
3159 Label* if_true = NULL; 2887 Label* if_true = NULL;
3160 Label* if_false = NULL; 2888 Label* if_false = NULL;
3161 Label* fall_through = NULL; 2889 Label* fall_through = NULL;
3162 context()->PrepareTest(&materialize_true, &materialize_false, 2890 context()->PrepareTest(&materialize_true, &materialize_false,
3163 &if_true, &if_false, &fall_through); 2891 &if_true, &if_false, &fall_through);
3164 2892
3165 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK); 2893 // Only a HeapNumber can be -0.0, so return false if we have something else.
3166 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 2894 __ CheckMap(x0, x1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3167 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); 2895
3168 __ cmp(r2, Operand(0x80000000)); 2896 // Test the bit pattern.
3169 __ cmp(r1, Operand(0x00000000), eq); 2897 __ Ldr(x10, FieldMemOperand(x0, HeapNumber::kValueOffset));
2898 __ Cmp(x10, 1); // Set V on 0x8000000000000000.
3170 2899
3171 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2900 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3172 Split(eq, if_true, if_false, fall_through); 2901 Split(vs, if_true, if_false, fall_through);
3173 2902
3174 context()->Plug(if_true, if_false); 2903 context()->Plug(if_true, if_false);
3175 } 2904 }
3176 2905
3177 2906
3178 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { 2907 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3179 ZoneList<Expression*>* args = expr->arguments(); 2908 ZoneList<Expression*>* args = expr->arguments();
3180 ASSERT(args->length() == 1); 2909 ASSERT(args->length() == 1);
3181 2910
3182 VisitForAccumulatorValue(args->at(0)); 2911 VisitForAccumulatorValue(args->at(0));
3183 2912
3184 Label materialize_true, materialize_false; 2913 Label materialize_true, materialize_false;
3185 Label* if_true = NULL; 2914 Label* if_true = NULL;
3186 Label* if_false = NULL; 2915 Label* if_false = NULL;
3187 Label* fall_through = NULL; 2916 Label* fall_through = NULL;
3188 context()->PrepareTest(&materialize_true, &materialize_false, 2917 context()->PrepareTest(&materialize_true, &materialize_false,
3189 &if_true, &if_false, &fall_through); 2918 &if_true, &if_false, &fall_through);
3190 2919
3191 __ JumpIfSmi(r0, if_false); 2920 __ JumpIfSmi(x0, if_false);
3192 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE); 2921 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
3193 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2922 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3194 Split(eq, if_true, if_false, fall_through); 2923 Split(eq, if_true, if_false, fall_through);
3195 2924
3196 context()->Plug(if_true, if_false); 2925 context()->Plug(if_true, if_false);
3197 } 2926 }
3198 2927
3199 2928
3200 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { 2929 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3201 ZoneList<Expression*>* args = expr->arguments(); 2930 ZoneList<Expression*>* args = expr->arguments();
3202 ASSERT(args->length() == 1); 2931 ASSERT(args->length() == 1);
3203 2932
3204 VisitForAccumulatorValue(args->at(0)); 2933 VisitForAccumulatorValue(args->at(0));
3205 2934
3206 Label materialize_true, materialize_false; 2935 Label materialize_true, materialize_false;
3207 Label* if_true = NULL; 2936 Label* if_true = NULL;
3208 Label* if_false = NULL; 2937 Label* if_false = NULL;
3209 Label* fall_through = NULL; 2938 Label* fall_through = NULL;
3210 context()->PrepareTest(&materialize_true, &materialize_false, 2939 context()->PrepareTest(&materialize_true, &materialize_false,
3211 &if_true, &if_false, &fall_through); 2940 &if_true, &if_false, &fall_through);
3212 2941
3213 __ JumpIfSmi(r0, if_false); 2942 __ JumpIfSmi(x0, if_false);
3214 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE); 2943 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
3215 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2944 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3216 Split(eq, if_true, if_false, fall_through); 2945 Split(eq, if_true, if_false, fall_through);
3217 2946
3218 context()->Plug(if_true, if_false); 2947 context()->Plug(if_true, if_false);
3219 } 2948 }
3220 2949
3221 2950
3222 2951
3223 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) { 2952 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3224 ASSERT(expr->arguments()->length() == 0); 2953 ASSERT(expr->arguments()->length() == 0);
3225 2954
3226 Label materialize_true, materialize_false; 2955 Label materialize_true, materialize_false;
3227 Label* if_true = NULL; 2956 Label* if_true = NULL;
3228 Label* if_false = NULL; 2957 Label* if_false = NULL;
3229 Label* fall_through = NULL; 2958 Label* fall_through = NULL;
3230 context()->PrepareTest(&materialize_true, &materialize_false, 2959 context()->PrepareTest(&materialize_true, &materialize_false,
3231 &if_true, &if_false, &fall_through); 2960 &if_true, &if_false, &fall_through);
3232 2961
3233 // Get the frame pointer for the calling frame. 2962 // Get the frame pointer for the calling frame.
3234 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 2963 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3235 2964
3236 // Skip the arguments adaptor frame if it exists. 2965 // Skip the arguments adaptor frame if it exists.
3237 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset)); 2966 Label check_frame_marker;
3238 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 2967 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
3239 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq); 2968 __ Cmp(x1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2969 __ B(ne, &check_frame_marker);
2970 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
3240 2971
3241 // Check the marker in the calling frame. 2972 // Check the marker in the calling frame.
3242 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset)); 2973 __ Bind(&check_frame_marker);
3243 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); 2974 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
2975 __ Cmp(x1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
3244 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2976 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3245 Split(eq, if_true, if_false, fall_through); 2977 Split(eq, if_true, if_false, fall_through);
3246 2978
3247 context()->Plug(if_true, if_false); 2979 context()->Plug(if_true, if_false);
3248 } 2980 }
3249 2981
3250 2982
3251 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { 2983 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3252 ZoneList<Expression*>* args = expr->arguments(); 2984 ZoneList<Expression*>* args = expr->arguments();
3253 ASSERT(args->length() == 2); 2985 ASSERT(args->length() == 2);
3254 2986
3255 // Load the two objects into registers and perform the comparison. 2987 // Load the two objects into registers and perform the comparison.
3256 VisitForStackValue(args->at(0)); 2988 VisitForStackValue(args->at(0));
3257 VisitForAccumulatorValue(args->at(1)); 2989 VisitForAccumulatorValue(args->at(1));
3258 2990
3259 Label materialize_true, materialize_false; 2991 Label materialize_true, materialize_false;
3260 Label* if_true = NULL; 2992 Label* if_true = NULL;
3261 Label* if_false = NULL; 2993 Label* if_false = NULL;
3262 Label* fall_through = NULL; 2994 Label* fall_through = NULL;
3263 context()->PrepareTest(&materialize_true, &materialize_false, 2995 context()->PrepareTest(&materialize_true, &materialize_false,
3264 &if_true, &if_false, &fall_through); 2996 &if_true, &if_false, &fall_through);
3265 2997
3266 __ pop(r1); 2998 __ Pop(x1);
3267 __ cmp(r0, r1); 2999 __ Cmp(x0, x1);
3268 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3000 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3269 Split(eq, if_true, if_false, fall_through); 3001 Split(eq, if_true, if_false, fall_through);
3270 3002
3271 context()->Plug(if_true, if_false); 3003 context()->Plug(if_true, if_false);
3272 } 3004 }
3273 3005
3274 3006
3275 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { 3007 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3276 ZoneList<Expression*>* args = expr->arguments(); 3008 ZoneList<Expression*>* args = expr->arguments();
3277 ASSERT(args->length() == 1); 3009 ASSERT(args->length() == 1);
3278 3010
3279 // ArgumentsAccessStub expects the key in edx and the formal 3011 // ArgumentsAccessStub expects the key in x1.
3280 // parameter count in r0.
3281 VisitForAccumulatorValue(args->at(0)); 3012 VisitForAccumulatorValue(args->at(0));
3282 __ mov(r1, r0); 3013 __ Mov(x1, x0);
3283 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 3014 __ Mov(x0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3284 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); 3015 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3285 __ CallStub(&stub); 3016 __ CallStub(&stub);
3286 context()->Plug(r0); 3017 context()->Plug(x0);
3287 } 3018 }
3288 3019
3289 3020
3290 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { 3021 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3291 ASSERT(expr->arguments()->length() == 0); 3022 ASSERT(expr->arguments()->length() == 0);
3292 3023 Label exit;
3293 // Get the number of formal parameters. 3024 // Get the number of formal parameters.
3294 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 3025 __ Mov(x0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3295 3026
3296 // Check if the calling frame is an arguments adaptor frame. 3027 // Check if the calling frame is an arguments adaptor frame.
3297 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3028 __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3298 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); 3029 __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset));
3299 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3030 __ Cmp(x13, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3031 __ B(ne, &exit);
3300 3032
3301 // Arguments adaptor case: Read the arguments length from the 3033 // Arguments adaptor case: Read the arguments length from the
3302 // adaptor frame. 3034 // adaptor frame.
3303 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq); 3035 __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset));
3304 3036
3305 context()->Plug(r0); 3037 __ Bind(&exit);
3038 context()->Plug(x0);
3306 } 3039 }
3307 3040
3308 3041
3309 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { 3042 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3043 ASM_LOCATION("FullCodeGenerator::EmitClassOf");
3310 ZoneList<Expression*>* args = expr->arguments(); 3044 ZoneList<Expression*>* args = expr->arguments();
3311 ASSERT(args->length() == 1); 3045 ASSERT(args->length() == 1);
3312 Label done, null, function, non_function_constructor; 3046 Label done, null, function, non_function_constructor;
3313 3047
3314 VisitForAccumulatorValue(args->at(0)); 3048 VisitForAccumulatorValue(args->at(0));
3315 3049
3316 // If the object is a smi, we return null. 3050 // If the object is a smi, we return null.
3317 __ JumpIfSmi(r0, &null); 3051 __ JumpIfSmi(x0, &null);
3318 3052
3319 // Check that the object is a JS object but take special care of JS 3053 // Check that the object is a JS object but take special care of JS
3320 // functions to make sure they have 'Function' as their class. 3054 // functions to make sure they have 'Function' as their class.
3321 // Assume that there are only two callable types, and one of them is at 3055 // Assume that there are only two callable types, and one of them is at
3322 // either end of the type range for JS object types. Saves extra comparisons. 3056 // either end of the type range for JS object types. Saves extra comparisons.
3323 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 3057 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3324 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE); 3058 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3325 // Map is now in r0. 3059 // x10: object's map.
3326 __ b(lt, &null); 3060 // x11: object's type.
3061 __ B(lt, &null);
3327 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == 3062 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3328 FIRST_SPEC_OBJECT_TYPE + 1); 3063 FIRST_SPEC_OBJECT_TYPE + 1);
3329 __ b(eq, &function); 3064 __ B(eq, &function);
3330 3065
3331 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE)); 3066 __ Cmp(x11, LAST_SPEC_OBJECT_TYPE);
3332 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == 3067 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3333 LAST_SPEC_OBJECT_TYPE - 1); 3068 LAST_SPEC_OBJECT_TYPE - 1);
3334 __ b(eq, &function); 3069 __ B(eq, &function);
3335 // Assume that there is no larger type. 3070 // Assume that there is no larger type.
3336 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); 3071 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3337 3072
3338 // Check if the constructor in the map is a JS function. 3073 // Check if the constructor in the map is a JS function.
3339 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset)); 3074 __ Ldr(x12, FieldMemOperand(x10, Map::kConstructorOffset));
3340 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); 3075 __ JumpIfNotObjectType(x12, x13, x14, JS_FUNCTION_TYPE,
3341 __ b(ne, &non_function_constructor); 3076 &non_function_constructor);
3342 3077
3343 // r0 now contains the constructor function. Grab the 3078 // x12 now contains the constructor function. Grab the
3344 // instance class name from there. 3079 // instance class name from there.
3345 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); 3080 __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
3346 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset)); 3081 __ Ldr(x0,
3347 __ b(&done); 3082 FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
3083 __ B(&done);
3348 3084
3349 // Functions have class 'Function'. 3085 // Functions have class 'Function'.
3350 __ bind(&function); 3086 __ Bind(&function);
3351 __ LoadRoot(r0, Heap::kfunction_class_stringRootIndex); 3087 __ LoadRoot(x0, Heap::kfunction_class_stringRootIndex);
3352 __ jmp(&done); 3088 __ B(&done);
3353 3089
3354 // Objects with a non-function constructor have class 'Object'. 3090 // Objects with a non-function constructor have class 'Object'.
3355 __ bind(&non_function_constructor); 3091 __ Bind(&non_function_constructor);
3356 __ LoadRoot(r0, Heap::kObject_stringRootIndex); 3092 __ LoadRoot(x0, Heap::kObject_stringRootIndex);
3357 __ jmp(&done); 3093 __ B(&done);
3358 3094
3359 // Non-JS objects have class null. 3095 // Non-JS objects have class null.
3360 __ bind(&null); 3096 __ Bind(&null);
3361 __ LoadRoot(r0, Heap::kNullValueRootIndex); 3097 __ LoadRoot(x0, Heap::kNullValueRootIndex);
3362 3098
3363 // All done. 3099 // All done.
3364 __ bind(&done); 3100 __ Bind(&done);
3365 3101
3366 context()->Plug(r0); 3102 context()->Plug(x0);
3367 } 3103 }
3368 3104
3369 3105
3370 void FullCodeGenerator::EmitLog(CallRuntime* expr) { 3106 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3371 // Conditionally generate a log call. 3107 // Conditionally generate a log call.
3372 // Args: 3108 // Args:
3373 // 0 (literal string): The type of logging (corresponds to the flags). 3109 // 0 (literal string): The type of logging (corresponds to the flags).
3374 // This is used to determine whether or not to generate the log call. 3110 // This is used to determine whether or not to generate the log call.
3375 // 1 (string): Format string. Access the string at argument index 2 3111 // 1 (string): Format string. Access the string at argument index 2
3376 // with '%2s' (see Logger::LogRuntime for all the formats). 3112 // with '%2s' (see Logger::LogRuntime for all the formats).
3377 // 2 (array): Arguments to the format string. 3113 // 2 (array): Arguments to the format string.
3378 ZoneList<Expression*>* args = expr->arguments(); 3114 ZoneList<Expression*>* args = expr->arguments();
3379 ASSERT_EQ(args->length(), 3); 3115 ASSERT_EQ(args->length(), 3);
3380 if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) { 3116 if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
3381 VisitForStackValue(args->at(1)); 3117 VisitForStackValue(args->at(1));
3382 VisitForStackValue(args->at(2)); 3118 VisitForStackValue(args->at(2));
3383 __ CallRuntime(Runtime::kLog, 2); 3119 __ CallRuntime(Runtime::kLog, 2);
3384 } 3120 }
3385 3121
3386 // Finally, we're expected to leave a value on the top of the stack. 3122 // Finally, we're expected to leave a value on the top of the stack.
3387 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 3123 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3388 context()->Plug(r0); 3124 context()->Plug(x0);
3389 } 3125 }
3390 3126
3391 3127
3392 void FullCodeGenerator::EmitSubString(CallRuntime* expr) { 3128 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3393 // Load the arguments on the stack and call the stub. 3129 // Load the arguments on the stack and call the stub.
3394 SubStringStub stub; 3130 SubStringStub stub;
3395 ZoneList<Expression*>* args = expr->arguments(); 3131 ZoneList<Expression*>* args = expr->arguments();
3396 ASSERT(args->length() == 3); 3132 ASSERT(args->length() == 3);
3397 VisitForStackValue(args->at(0)); 3133 VisitForStackValue(args->at(0));
3398 VisitForStackValue(args->at(1)); 3134 VisitForStackValue(args->at(1));
3399 VisitForStackValue(args->at(2)); 3135 VisitForStackValue(args->at(2));
3400 __ CallStub(&stub); 3136 __ CallStub(&stub);
3401 context()->Plug(r0); 3137 context()->Plug(x0);
3402 } 3138 }
3403 3139
3404 3140
3405 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { 3141 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3406 // Load the arguments on the stack and call the stub. 3142 // Load the arguments on the stack and call the stub.
3407 RegExpExecStub stub; 3143 RegExpExecStub stub;
3408 ZoneList<Expression*>* args = expr->arguments(); 3144 ZoneList<Expression*>* args = expr->arguments();
3409 ASSERT(args->length() == 4); 3145 ASSERT(args->length() == 4);
3410 VisitForStackValue(args->at(0)); 3146 VisitForStackValue(args->at(0));
3411 VisitForStackValue(args->at(1)); 3147 VisitForStackValue(args->at(1));
3412 VisitForStackValue(args->at(2)); 3148 VisitForStackValue(args->at(2));
3413 VisitForStackValue(args->at(3)); 3149 VisitForStackValue(args->at(3));
3414 __ CallStub(&stub); 3150 __ CallStub(&stub);
3415 context()->Plug(r0); 3151 context()->Plug(x0);
3416 } 3152 }
3417 3153
3418 3154
3419 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { 3155 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3156 ASM_LOCATION("FullCodeGenerator::EmitValueOf");
3420 ZoneList<Expression*>* args = expr->arguments(); 3157 ZoneList<Expression*>* args = expr->arguments();
3421 ASSERT(args->length() == 1); 3158 ASSERT(args->length() == 1);
3422 VisitForAccumulatorValue(args->at(0)); // Load the object. 3159 VisitForAccumulatorValue(args->at(0)); // Load the object.
3423 3160
3424 Label done; 3161 Label done;
3425 // If the object is a smi return the object. 3162 // If the object is a smi return the object.
3426 __ JumpIfSmi(r0, &done); 3163 __ JumpIfSmi(x0, &done);
3427 // If the object is not a value type, return the object. 3164 // If the object is not a value type, return the object.
3428 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE); 3165 __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
3429 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq); 3166 __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
3430 3167
3431 __ bind(&done); 3168 __ Bind(&done);
3432 context()->Plug(r0); 3169 context()->Plug(x0);
3433 } 3170 }
3434 3171
3435 3172
3436 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { 3173 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3437 ZoneList<Expression*>* args = expr->arguments(); 3174 ZoneList<Expression*>* args = expr->arguments();
3438 ASSERT(args->length() == 2); 3175 ASSERT(args->length() == 2);
3439 ASSERT_NE(NULL, args->at(1)->AsLiteral()); 3176 ASSERT_NE(NULL, args->at(1)->AsLiteral());
3440 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value())); 3177 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3441 3178
3442 VisitForAccumulatorValue(args->at(0)); // Load the object. 3179 VisitForAccumulatorValue(args->at(0)); // Load the object.
3443 3180
3444 Label runtime, done, not_date_object; 3181 Label runtime, done, not_date_object;
3445 Register object = r0; 3182 Register object = x0;
3446 Register result = r0; 3183 Register result = x0;
3447 Register scratch0 = r9; 3184 Register stamp_addr = x10;
3448 Register scratch1 = r1; 3185 Register stamp_cache = x11;
3449 3186
3450 __ JumpIfSmi(object, &not_date_object); 3187 __ JumpIfSmi(object, &not_date_object);
3451 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE); 3188 __ JumpIfNotObjectType(object, x10, x10, JS_DATE_TYPE, &not_date_object);
3452 __ b(ne, &not_date_object);
3453 3189
3454 if (index->value() == 0) { 3190 if (index->value() == 0) {
3455 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset)); 3191 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3456 __ jmp(&done); 3192 __ B(&done);
3457 } else { 3193 } else {
3458 if (index->value() < JSDate::kFirstUncachedField) { 3194 if (index->value() < JSDate::kFirstUncachedField) {
3459 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); 3195 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3460 __ mov(scratch1, Operand(stamp)); 3196 __ Mov(x10, Operand(stamp));
3461 __ ldr(scratch1, MemOperand(scratch1)); 3197 __ Ldr(stamp_addr, MemOperand(x10));
3462 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset)); 3198 __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset));
3463 __ cmp(scratch1, scratch0); 3199 __ Cmp(stamp_addr, stamp_cache);
3464 __ b(ne, &runtime); 3200 __ B(ne, &runtime);
3465 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset + 3201 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3466 kPointerSize * index->value())); 3202 kPointerSize * index->value()));
3467 __ jmp(&done); 3203 __ B(&done);
3468 } 3204 }
3469 __ bind(&runtime); 3205
3470 __ PrepareCallCFunction(2, scratch1); 3206 __ Bind(&runtime);
3471 __ mov(r1, Operand(index)); 3207 __ Mov(x1, Operand(index));
3472 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); 3208 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3473 __ jmp(&done); 3209 __ B(&done);
3474 } 3210 }
3475 3211
3476 __ bind(&not_date_object); 3212 __ Bind(&not_date_object);
3477 __ CallRuntime(Runtime::kThrowNotDateError, 0); 3213 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3478 __ bind(&done); 3214 __ Bind(&done);
3479 context()->Plug(r0); 3215 context()->Plug(x0);
3480 } 3216 }
3481 3217
3482 3218
3483 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { 3219 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3484 ZoneList<Expression*>* args = expr->arguments(); 3220 ZoneList<Expression*>* args = expr->arguments();
3485 ASSERT_EQ(3, args->length()); 3221 ASSERT_EQ(3, args->length());
3486 3222
3487 Register string = r0; 3223 Register string = x0;
3488 Register index = r1; 3224 Register index = x1;
3489 Register value = r2; 3225 Register value = x2;
3226 Register scratch = x10;
3490 3227
3491 VisitForStackValue(args->at(1)); // index 3228 VisitForStackValue(args->at(1)); // index
3492 VisitForStackValue(args->at(2)); // value 3229 VisitForStackValue(args->at(2)); // value
3493 VisitForAccumulatorValue(args->at(0)); // string 3230 VisitForAccumulatorValue(args->at(0)); // string
3494 __ Pop(index, value); 3231 __ Pop(value, index);
3495 3232
3496 if (FLAG_debug_code) { 3233 if (FLAG_debug_code) {
3497 __ SmiTst(value); 3234 __ AssertSmi(value, kNonSmiValue);
3498 __ Check(eq, kNonSmiValue); 3235 __ AssertSmi(index, kNonSmiIndex);
3499 __ SmiTst(index);
3500 __ Check(eq, kNonSmiIndex);
3501 __ SmiUntag(index, index);
3502 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; 3236 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3503 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type); 3237 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3504 __ SmiTag(index, index); 3238 one_byte_seq_type);
3505 } 3239 }
3506 3240
3507 __ SmiUntag(value, value); 3241 __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3508 __ add(ip, 3242 __ SmiUntag(value);
3509 string, 3243 __ SmiUntag(index);
3510 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3244 __ Strb(value, MemOperand(scratch, index));
3511 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3512 context()->Plug(string); 3245 context()->Plug(string);
3513 } 3246 }
3514 3247
3515 3248
3516 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { 3249 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3517 ZoneList<Expression*>* args = expr->arguments(); 3250 ZoneList<Expression*>* args = expr->arguments();
3518 ASSERT_EQ(3, args->length()); 3251 ASSERT_EQ(3, args->length());
3519 3252
3520 Register string = r0; 3253 Register string = x0;
3521 Register index = r1; 3254 Register index = x1;
3522 Register value = r2; 3255 Register value = x2;
3256 Register scratch = x10;
3523 3257
3524 VisitForStackValue(args->at(1)); // index 3258 VisitForStackValue(args->at(1)); // index
3525 VisitForStackValue(args->at(2)); // value 3259 VisitForStackValue(args->at(2)); // value
3526 VisitForAccumulatorValue(args->at(0)); // string 3260 VisitForAccumulatorValue(args->at(0)); // string
3527 __ Pop(index, value); 3261 __ Pop(value, index);
3528 3262
3529 if (FLAG_debug_code) { 3263 if (FLAG_debug_code) {
3530 __ SmiTst(value); 3264 __ AssertSmi(value, kNonSmiValue);
3531 __ Check(eq, kNonSmiValue); 3265 __ AssertSmi(index, kNonSmiIndex);
3532 __ SmiTst(index);
3533 __ Check(eq, kNonSmiIndex);
3534 __ SmiUntag(index, index);
3535 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; 3266 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3536 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type); 3267 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3537 __ SmiTag(index, index); 3268 two_byte_seq_type);
3538 } 3269 }
3539 3270
3540 __ SmiUntag(value, value); 3271 __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
3541 __ add(ip, 3272 __ SmiUntag(value);
3542 string, 3273 __ SmiUntag(index);
3543 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 3274 __ Strh(value, MemOperand(scratch, index, LSL, 1));
3544 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3545 __ strh(value, MemOperand(ip, index));
3546 context()->Plug(string); 3275 context()->Plug(string);
3547 } 3276 }
3548 3277
3549 3278
3550
3551 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { 3279 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3552 // Load the arguments on the stack and call the runtime function. 3280 // Load the arguments on the stack and call the MathPow stub.
3553 ZoneList<Expression*>* args = expr->arguments(); 3281 ZoneList<Expression*>* args = expr->arguments();
3554 ASSERT(args->length() == 2); 3282 ASSERT(args->length() == 2);
3555 VisitForStackValue(args->at(0)); 3283 VisitForStackValue(args->at(0));
3556 VisitForStackValue(args->at(1)); 3284 VisitForStackValue(args->at(1));
3557 MathPowStub stub(MathPowStub::ON_STACK); 3285 MathPowStub stub(MathPowStub::ON_STACK);
3558 __ CallStub(&stub); 3286 __ CallStub(&stub);
3559 context()->Plug(r0); 3287 context()->Plug(x0);
3560 } 3288 }
3561 3289
3562 3290
3563 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { 3291 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3564 ZoneList<Expression*>* args = expr->arguments(); 3292 ZoneList<Expression*>* args = expr->arguments();
3565 ASSERT(args->length() == 2); 3293 ASSERT(args->length() == 2);
3566 VisitForStackValue(args->at(0)); // Load the object. 3294 VisitForStackValue(args->at(0)); // Load the object.
3567 VisitForAccumulatorValue(args->at(1)); // Load the value. 3295 VisitForAccumulatorValue(args->at(1)); // Load the value.
3568 __ pop(r1); // r0 = value. r1 = object. 3296 __ Pop(x1);
3297 // x0 = value.
3298 // x1 = object.
3569 3299
3570 Label done; 3300 Label done;
3571 // If the object is a smi, return the value. 3301 // If the object is a smi, return the value.
3572 __ JumpIfSmi(r1, &done); 3302 __ JumpIfSmi(x1, &done);
3573 3303
3574 // If the object is not a value type, return the value. 3304 // If the object is not a value type, return the value.
3575 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE); 3305 __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done);
3576 __ b(ne, &done);
3577 3306
3578 // Store the value. 3307 // Store the value.
3579 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset)); 3308 __ Str(x0, FieldMemOperand(x1, JSValue::kValueOffset));
3580 // Update the write barrier. Save the value as it will be 3309 // Update the write barrier. Save the value as it will be
3581 // overwritten by the write barrier code and is needed afterward. 3310 // overwritten by the write barrier code and is needed afterward.
3582 __ mov(r2, r0); 3311 __ Mov(x10, x0);
3583 __ RecordWriteField( 3312 __ RecordWriteField(
3584 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs); 3313 x1, JSValue::kValueOffset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
3585 3314
3586 __ bind(&done); 3315 __ Bind(&done);
3587 context()->Plug(r0); 3316 context()->Plug(x0);
3588 } 3317 }
3589 3318
3590 3319
3591 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { 3320 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3592 ZoneList<Expression*>* args = expr->arguments(); 3321 ZoneList<Expression*>* args = expr->arguments();
3593 ASSERT_EQ(args->length(), 1); 3322 ASSERT_EQ(args->length(), 1);
3594 // Load the argument into r0 and call the stub. 3323
3324 // Load the argument into x0 and call the stub.
3595 VisitForAccumulatorValue(args->at(0)); 3325 VisitForAccumulatorValue(args->at(0));
3596 3326
3597 NumberToStringStub stub; 3327 NumberToStringStub stub;
3598 __ CallStub(&stub); 3328 __ CallStub(&stub);
3599 context()->Plug(r0); 3329 context()->Plug(x0);
3600 } 3330 }
3601 3331
3602 3332
3603 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { 3333 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3604 ZoneList<Expression*>* args = expr->arguments(); 3334 ZoneList<Expression*>* args = expr->arguments();
3605 ASSERT(args->length() == 1); 3335 ASSERT(args->length() == 1);
3336
3606 VisitForAccumulatorValue(args->at(0)); 3337 VisitForAccumulatorValue(args->at(0));
3607 3338
3608 Label done; 3339 Label done;
3609 StringCharFromCodeGenerator generator(r0, r1); 3340 Register code = x0;
3341 Register result = x1;
3342
3343 StringCharFromCodeGenerator generator(code, result);
3610 generator.GenerateFast(masm_); 3344 generator.GenerateFast(masm_);
3611 __ jmp(&done); 3345 __ B(&done);
3612 3346
3613 NopRuntimeCallHelper call_helper; 3347 NopRuntimeCallHelper call_helper;
3614 generator.GenerateSlow(masm_, call_helper); 3348 generator.GenerateSlow(masm_, call_helper);
3615 3349
3616 __ bind(&done); 3350 __ Bind(&done);
3617 context()->Plug(r1); 3351 context()->Plug(result);
3618 } 3352 }
3619 3353
3620 3354
3621 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { 3355 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3622 ZoneList<Expression*>* args = expr->arguments(); 3356 ZoneList<Expression*>* args = expr->arguments();
3623 ASSERT(args->length() == 2); 3357 ASSERT(args->length() == 2);
3358
3624 VisitForStackValue(args->at(0)); 3359 VisitForStackValue(args->at(0));
3625 VisitForAccumulatorValue(args->at(1)); 3360 VisitForAccumulatorValue(args->at(1));
3626 3361
3627 Register object = r1; 3362 Register object = x1;
3628 Register index = r0; 3363 Register index = x0;
3629 Register result = r3; 3364 Register result = x3;
3630 3365
3631 __ pop(object); 3366 __ Pop(object);
3632 3367
3633 Label need_conversion; 3368 Label need_conversion;
3634 Label index_out_of_range; 3369 Label index_out_of_range;
3635 Label done; 3370 Label done;
3636 StringCharCodeAtGenerator generator(object, 3371 StringCharCodeAtGenerator generator(object,
3637 index, 3372 index,
3638 result, 3373 result,
3639 &need_conversion, 3374 &need_conversion,
3640 &need_conversion, 3375 &need_conversion,
3641 &index_out_of_range, 3376 &index_out_of_range,
3642 STRING_INDEX_IS_NUMBER); 3377 STRING_INDEX_IS_NUMBER);
3643 generator.GenerateFast(masm_); 3378 generator.GenerateFast(masm_);
3644 __ jmp(&done); 3379 __ B(&done);
3645 3380
3646 __ bind(&index_out_of_range); 3381 __ Bind(&index_out_of_range);
3647 // When the index is out of range, the spec requires us to return 3382 // When the index is out of range, the spec requires us to return NaN.
3648 // NaN.
3649 __ LoadRoot(result, Heap::kNanValueRootIndex); 3383 __ LoadRoot(result, Heap::kNanValueRootIndex);
3650 __ jmp(&done); 3384 __ B(&done);
3651 3385
3652 __ bind(&need_conversion); 3386 __ Bind(&need_conversion);
3653 // Load the undefined value into the result register, which will 3387 // Load the undefined value into the result register, which will
3654 // trigger conversion. 3388 // trigger conversion.
3655 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); 3389 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3656 __ jmp(&done); 3390 __ B(&done);
3657 3391
3658 NopRuntimeCallHelper call_helper; 3392 NopRuntimeCallHelper call_helper;
3659 generator.GenerateSlow(masm_, call_helper); 3393 generator.GenerateSlow(masm_, call_helper);
3660 3394
3661 __ bind(&done); 3395 __ Bind(&done);
3662 context()->Plug(result); 3396 context()->Plug(result);
3663 } 3397 }
3664 3398
3665 3399
3666 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { 3400 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3667 ZoneList<Expression*>* args = expr->arguments(); 3401 ZoneList<Expression*>* args = expr->arguments();
3668 ASSERT(args->length() == 2); 3402 ASSERT(args->length() == 2);
3403
3669 VisitForStackValue(args->at(0)); 3404 VisitForStackValue(args->at(0));
3670 VisitForAccumulatorValue(args->at(1)); 3405 VisitForAccumulatorValue(args->at(1));
3671 3406
3672 Register object = r1; 3407 Register object = x1;
3673 Register index = r0; 3408 Register index = x0;
3674 Register scratch = r3; 3409 Register result = x0;
3675 Register result = r0;
3676 3410
3677 __ pop(object); 3411 __ Pop(object);
3678 3412
3679 Label need_conversion; 3413 Label need_conversion;
3680 Label index_out_of_range; 3414 Label index_out_of_range;
3681 Label done; 3415 Label done;
3682 StringCharAtGenerator generator(object, 3416 StringCharAtGenerator generator(object,
3683 index, 3417 index,
3684 scratch, 3418 x3,
3685 result, 3419 result,
3686 &need_conversion, 3420 &need_conversion,
3687 &need_conversion, 3421 &need_conversion,
3688 &index_out_of_range, 3422 &index_out_of_range,
3689 STRING_INDEX_IS_NUMBER); 3423 STRING_INDEX_IS_NUMBER);
3690 generator.GenerateFast(masm_); 3424 generator.GenerateFast(masm_);
3691 __ jmp(&done); 3425 __ B(&done);
3692 3426
3693 __ bind(&index_out_of_range); 3427 __ Bind(&index_out_of_range);
3694 // When the index is out of range, the spec requires us to return 3428 // When the index is out of range, the spec requires us to return
3695 // the empty string. 3429 // the empty string.
3696 __ LoadRoot(result, Heap::kempty_stringRootIndex); 3430 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3697 __ jmp(&done); 3431 __ B(&done);
3698 3432
3699 __ bind(&need_conversion); 3433 __ Bind(&need_conversion);
3700 // Move smi zero into the result register, which will trigger 3434 // Move smi zero into the result register, which will trigger conversion.
3701 // conversion. 3435 __ Mov(result, Operand(Smi::FromInt(0)));
3702 __ mov(result, Operand(Smi::FromInt(0))); 3436 __ B(&done);
3703 __ jmp(&done);
3704 3437
3705 NopRuntimeCallHelper call_helper; 3438 NopRuntimeCallHelper call_helper;
3706 generator.GenerateSlow(masm_, call_helper); 3439 generator.GenerateSlow(masm_, call_helper);
3707 3440
3708 __ bind(&done); 3441 __ Bind(&done);
3709 context()->Plug(result); 3442 context()->Plug(result);
3710 } 3443 }
3711 3444
3712 3445
3713 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { 3446 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3447 ASM_LOCATION("FullCodeGenerator::EmitStringAdd");
3714 ZoneList<Expression*>* args = expr->arguments(); 3448 ZoneList<Expression*>* args = expr->arguments();
3715 ASSERT_EQ(2, args->length()); 3449 ASSERT_EQ(2, args->length());
3450
3716 VisitForStackValue(args->at(0)); 3451 VisitForStackValue(args->at(0));
3717 VisitForAccumulatorValue(args->at(1)); 3452 VisitForAccumulatorValue(args->at(1));
3718 3453
3719 __ pop(r1); 3454 __ Pop(x1);
3720 StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED); 3455 StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
3721 __ CallStub(&stub); 3456 __ CallStub(&stub);
3722 context()->Plug(r0); 3457
3458 context()->Plug(x0);
3723 } 3459 }
3724 3460
3725 3461
3726 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { 3462 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3727 ZoneList<Expression*>* args = expr->arguments(); 3463 ZoneList<Expression*>* args = expr->arguments();
3728 ASSERT_EQ(2, args->length()); 3464 ASSERT_EQ(2, args->length());
3729 VisitForStackValue(args->at(0)); 3465 VisitForStackValue(args->at(0));
3730 VisitForStackValue(args->at(1)); 3466 VisitForStackValue(args->at(1));
3731 3467
3732 StringCompareStub stub; 3468 StringCompareStub stub;
3733 __ CallStub(&stub); 3469 __ CallStub(&stub);
3734 context()->Plug(r0); 3470 context()->Plug(x0);
3735 } 3471 }
3736 3472
3737 3473
3738 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) { 3474 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3739 // Load the argument on the stack and call the runtime function. 3475 // Load the argument on the stack and call the runtime function.
3740 ZoneList<Expression*>* args = expr->arguments(); 3476 ZoneList<Expression*>* args = expr->arguments();
3741 ASSERT(args->length() == 1); 3477 ASSERT(args->length() == 1);
3742 VisitForStackValue(args->at(0)); 3478 VisitForStackValue(args->at(0));
3743 __ CallRuntime(Runtime::kMath_log, 1); 3479 __ CallRuntime(Runtime::kMath_log, 1);
3744 context()->Plug(r0); 3480 context()->Plug(x0);
3745 } 3481 }
3746 3482
3747 3483
3748 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) { 3484 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3749 // Load the argument on the stack and call the runtime function. 3485 // Load the argument on the stack and call the runtime function.
3750 ZoneList<Expression*>* args = expr->arguments(); 3486 ZoneList<Expression*>* args = expr->arguments();
3751 ASSERT(args->length() == 1); 3487 ASSERT(args->length() == 1);
3752 VisitForStackValue(args->at(0)); 3488 VisitForStackValue(args->at(0));
3753 __ CallRuntime(Runtime::kMath_sqrt, 1); 3489 __ CallRuntime(Runtime::kMath_sqrt, 1);
3754 context()->Plug(r0); 3490 context()->Plug(x0);
3755 } 3491 }
3756 3492
3757 3493
3758 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { 3494 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3495 ASM_LOCATION("FullCodeGenerator::EmitCallFunction");
3759 ZoneList<Expression*>* args = expr->arguments(); 3496 ZoneList<Expression*>* args = expr->arguments();
3760 ASSERT(args->length() >= 2); 3497 ASSERT(args->length() >= 2);
3761 3498
3762 int arg_count = args->length() - 2; // 2 ~ receiver and function. 3499 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3763 for (int i = 0; i < arg_count + 1; i++) { 3500 for (int i = 0; i < arg_count + 1; i++) {
3764 VisitForStackValue(args->at(i)); 3501 VisitForStackValue(args->at(i));
3765 } 3502 }
3766 VisitForAccumulatorValue(args->last()); // Function. 3503 VisitForAccumulatorValue(args->last()); // Function.
3767 3504
3768 Label runtime, done; 3505 Label runtime, done;
3769 // Check for non-function argument (including proxy). 3506 // Check for non-function argument (including proxy).
3770 __ JumpIfSmi(r0, &runtime); 3507 __ JumpIfSmi(x0, &runtime);
3771 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); 3508 __ JumpIfNotObjectType(x0, x1, x1, JS_FUNCTION_TYPE, &runtime);
3772 __ b(ne, &runtime);
3773 3509
3774 // InvokeFunction requires the function in r1. Move it in there. 3510 // InvokeFunction requires the function in x1. Move it in there.
3775 __ mov(r1, result_register()); 3511 __ Mov(x1, x0);
3776 ParameterCount count(arg_count); 3512 ParameterCount count(arg_count);
3777 __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper()); 3513 __ InvokeFunction(x1, count, CALL_FUNCTION, NullCallWrapper());
3778 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3514 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3779 __ jmp(&done); 3515 __ B(&done);
3780 3516
3781 __ bind(&runtime); 3517 __ Bind(&runtime);
3782 __ push(r0); 3518 __ Push(x0);
3783 __ CallRuntime(Runtime::kCall, args->length()); 3519 __ CallRuntime(Runtime::kCall, args->length());
3784 __ bind(&done); 3520 __ Bind(&done);
3785 3521
3786 context()->Plug(r0); 3522 context()->Plug(x0);
3787 } 3523 }
3788 3524
3789 3525
3790 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { 3526 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3791 RegExpConstructResultStub stub; 3527 RegExpConstructResultStub stub;
3792 ZoneList<Expression*>* args = expr->arguments(); 3528 ZoneList<Expression*>* args = expr->arguments();
3793 ASSERT(args->length() == 3); 3529 ASSERT(args->length() == 3);
3794 VisitForStackValue(args->at(0)); 3530 VisitForStackValue(args->at(0));
3795 VisitForStackValue(args->at(1)); 3531 VisitForStackValue(args->at(1));
3796 VisitForAccumulatorValue(args->at(2)); 3532 VisitForAccumulatorValue(args->at(2));
3797 __ pop(r1); 3533 __ Pop(x1, x2);
3798 __ pop(r2);
3799 __ CallStub(&stub); 3534 __ CallStub(&stub);
3800 context()->Plug(r0); 3535 context()->Plug(x0);
3801 } 3536 }
3802 3537
3803 3538
3804 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { 3539 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3805 ZoneList<Expression*>* args = expr->arguments(); 3540 ZoneList<Expression*>* args = expr->arguments();
3806 ASSERT_EQ(2, args->length()); 3541 ASSERT_EQ(2, args->length());
3807 ASSERT_NE(NULL, args->at(0)->AsLiteral()); 3542 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3808 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value(); 3543 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3809 3544
3810 Handle<FixedArray> jsfunction_result_caches( 3545 Handle<FixedArray> jsfunction_result_caches(
3811 isolate()->native_context()->jsfunction_result_caches()); 3546 isolate()->native_context()->jsfunction_result_caches());
3812 if (jsfunction_result_caches->length() <= cache_id) { 3547 if (jsfunction_result_caches->length() <= cache_id) {
3813 __ Abort(kAttemptToUseUndefinedCache); 3548 __ Abort(kAttemptToUseUndefinedCache);
3814 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 3549 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3815 context()->Plug(r0); 3550 context()->Plug(x0);
3816 return; 3551 return;
3817 } 3552 }
3818 3553
3819 VisitForAccumulatorValue(args->at(1)); 3554 VisitForAccumulatorValue(args->at(1));
3820 3555
3821 Register key = r0; 3556 Register key = x0;
3822 Register cache = r1; 3557 Register cache = x1;
3823 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 3558 __ Ldr(cache, GlobalObjectMemOperand());
3824 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset)); 3559 __ Ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3825 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); 3560 __ Ldr(cache, ContextMemOperand(cache,
3826 __ ldr(cache, 3561 Context::JSFUNCTION_RESULT_CACHES_INDEX));
3562 __ Ldr(cache,
3827 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); 3563 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3828 3564
3565 Label done;
3566 __ Ldrsw(x2, UntagSmiFieldMemOperand(cache,
3567 JSFunctionResultCache::kFingerOffset));
3568 __ Add(x3, cache, FixedArray::kHeaderSize - kHeapObjectTag);
3569 __ Add(x3, x3, Operand(x2, LSL, kPointerSizeLog2));
3829 3570
3830 Label done, not_found; 3571 // Load the key and data from the cache.
3831 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset)); 3572 __ Ldp(x2, x3, MemOperand(x3));
3832 // r2 now holds finger offset as a smi.
3833 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3834 // r3 now points to the start of fixed array elements.
3835 __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex));
3836 // Note side effect of PreIndex: r3 now points to the key of the pair.
3837 __ cmp(key, r2);
3838 __ b(ne, &not_found);
3839 3573
3840 __ ldr(r0, MemOperand(r3, kPointerSize)); 3574 __ Cmp(key, x2);
3841 __ b(&done); 3575 __ CmovX(x0, x3, eq);
3576 __ B(eq, &done);
3842 3577
3843 __ bind(&not_found);
3844 // Call runtime to perform the lookup. 3578 // Call runtime to perform the lookup.
3845 __ Push(cache, key); 3579 __ Push(cache, key);
3846 __ CallRuntime(Runtime::kGetFromCache, 2); 3580 __ CallRuntime(Runtime::kGetFromCache, 2);
3847 3581
3848 __ bind(&done); 3582 __ Bind(&done);
3849 context()->Plug(r0); 3583 context()->Plug(x0);
3850 } 3584 }
3851 3585
3852 3586
3853 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { 3587 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3854 ZoneList<Expression*>* args = expr->arguments(); 3588 ZoneList<Expression*>* args = expr->arguments();
3855 VisitForAccumulatorValue(args->at(0)); 3589 VisitForAccumulatorValue(args->at(0));
3856 3590
3857 Label materialize_true, materialize_false; 3591 Label materialize_true, materialize_false;
3858 Label* if_true = NULL; 3592 Label* if_true = NULL;
3859 Label* if_false = NULL; 3593 Label* if_false = NULL;
3860 Label* fall_through = NULL; 3594 Label* fall_through = NULL;
3861 context()->PrepareTest(&materialize_true, &materialize_false, 3595 context()->PrepareTest(&materialize_true, &materialize_false,
3862 &if_true, &if_false, &fall_through); 3596 &if_true, &if_false, &fall_through);
3863 3597
3864 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset)); 3598 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3865 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask)); 3599 __ Tst(x10, String::kContainsCachedArrayIndexMask);
3866 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3600 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3867 Split(eq, if_true, if_false, fall_through); 3601 Split(eq, if_true, if_false, fall_through);
3868 3602
3869 context()->Plug(if_true, if_false); 3603 context()->Plug(if_true, if_false);
3870 } 3604 }
3871 3605
3872 3606
3873 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { 3607 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3874 ZoneList<Expression*>* args = expr->arguments(); 3608 ZoneList<Expression*>* args = expr->arguments();
3875 ASSERT(args->length() == 1); 3609 ASSERT(args->length() == 1);
3876 VisitForAccumulatorValue(args->at(0)); 3610 VisitForAccumulatorValue(args->at(0));
3877 3611
3878 __ AssertString(r0); 3612 __ AssertString(x0);
3879 3613
3880 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset)); 3614 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3881 __ IndexFromHash(r0, r0); 3615 __ IndexFromHash(x10, x0);
3882 3616
3883 context()->Plug(r0); 3617 context()->Plug(x0);
3884 } 3618 }
3885 3619
3886 3620
3887 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { 3621 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3888 Label bailout, done, one_char_separator, long_separator, non_trivial_array, 3622 ASM_LOCATION("FullCodeGenerator::EmitFastAsciiArrayJoin");
3889 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop, 3623
3890 one_char_separator_loop_entry, long_separator_loop;
3891 ZoneList<Expression*>* args = expr->arguments(); 3624 ZoneList<Expression*>* args = expr->arguments();
3892 ASSERT(args->length() == 2); 3625 ASSERT(args->length() == 2);
3893 VisitForStackValue(args->at(1)); 3626 VisitForStackValue(args->at(1));
3894 VisitForAccumulatorValue(args->at(0)); 3627 VisitForAccumulatorValue(args->at(0));
3895 3628
3896 // All aliases of the same register have disjoint lifetimes. 3629 Register array = x0;
3897 Register array = r0; 3630 Register result = x0;
3898 Register elements = no_reg; // Will be r0. 3631 Register elements = x1;
3899 Register result = no_reg; // Will be r0. 3632 Register element = x2;
3900 Register separator = r1; 3633 Register separator = x3;
3901 Register array_length = r2; 3634 Register array_length = x4;
3902 Register result_pos = no_reg; // Will be r2 3635 Register result_pos = x5;
3903 Register string_length = r3; 3636 Register map = x6;
3904 Register string = r4; 3637 Register string_length = x10;
3905 Register element = r5; 3638 Register elements_end = x11;
3906 Register elements_end = r6; 3639 Register string = x12;
3907 Register scratch = r9; 3640 Register scratch1 = x13;
3641 Register scratch2 = x14;
3642 Register scratch3 = x7;
3643 Register separator_length = x15;
3908 3644
3909 // Separator operand is on the stack. 3645 Label bailout, done, one_char_separator, long_separator,
3910 __ pop(separator); 3646 non_trivial_array, not_size_one_array, loop,
3647 empty_separator_loop, one_char_separator_loop,
3648 one_char_separator_loop_entry, long_separator_loop;
3649
3650 // The separator operand is on the stack.
3651 __ Pop(separator);
3911 3652
3912 // Check that the array is a JSArray. 3653 // Check that the array is a JSArray.
3913 __ JumpIfSmi(array, &bailout); 3654 __ JumpIfSmi(array, &bailout);
3914 __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE); 3655 __ JumpIfNotObjectType(array, map, scratch1, JS_ARRAY_TYPE, &bailout);
3915 __ b(ne, &bailout);
3916 3656
3917 // Check that the array has fast elements. 3657 // Check that the array has fast elements.
3918 __ CheckFastElements(scratch, array_length, &bailout); 3658 __ CheckFastElements(map, scratch1, &bailout);
3919 3659
3920 // If the array has length zero, return the empty string. 3660 // If the array has length zero, return the empty string.
3921 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset)); 3661 // Load and untag the length of the array.
3922 __ SmiUntag(array_length, SetCC); 3662 // It is an unsigned value, so we can skip sign extension.
3923 __ b(ne, &non_trivial_array); 3663 // We assume little endianness.
3924 __ LoadRoot(r0, Heap::kempty_stringRootIndex); 3664 __ Ldrsw(array_length,
3925 __ b(&done); 3665 UntagSmiFieldMemOperand(array, JSArray::kLengthOffset));
3666 __ Cbnz(array_length, &non_trivial_array);
3667 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3668 __ B(&done);
3926 3669
3927 __ bind(&non_trivial_array); 3670 __ Bind(&non_trivial_array);
3928
3929 // Get the FixedArray containing array's elements. 3671 // Get the FixedArray containing array's elements.
3930 elements = array; 3672 __ Ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3931 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3932 array = no_reg; // End of array's live range.
3933 3673
3934 // Check that all array elements are sequential ASCII strings, and 3674 // Check that all array elements are sequential ASCII strings, and
3935 // accumulate the sum of their lengths, as a smi-encoded value. 3675 // accumulate the sum of their lengths.
3936 __ mov(string_length, Operand::Zero()); 3676 __ Mov(string_length, 0);
3937 __ add(element, 3677 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
3938 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 3678 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3939 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3940 // Loop condition: while (element < elements_end). 3679 // Loop condition: while (element < elements_end).
3941 // Live values in registers: 3680 // Live values in registers:
3942 // elements: Fixed array of strings. 3681 // elements: Fixed array of strings.
3943 // array_length: Length of the fixed array of strings (not smi) 3682 // array_length: Length of the fixed array of strings (not smi)
3944 // separator: Separator string 3683 // separator: Separator string
3945 // string_length: Accumulated sum of string lengths (smi). 3684 // string_length: Accumulated sum of string lengths (not smi).
3946 // element: Current array element. 3685 // element: Current array element.
3947 // elements_end: Array end. 3686 // elements_end: Array end.
3948 if (generate_debug_code_) { 3687 if (FLAG_debug_code) {
3949 __ cmp(array_length, Operand::Zero()); 3688 __ Cmp(array_length, Operand(0));
3950 __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin); 3689 __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3951 } 3690 }
3952 __ bind(&loop); 3691 __ Bind(&loop);
3953 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 3692 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
3954 __ JumpIfSmi(string, &bailout); 3693 __ JumpIfSmi(string, &bailout);
3955 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset)); 3694 __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3956 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); 3695 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3957 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout); 3696 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3958 __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset)); 3697 __ Ldrsw(scratch1,
3959 __ add(string_length, string_length, Operand(scratch), SetCC); 3698 UntagSmiFieldMemOperand(string, SeqOneByteString::kLengthOffset));
3960 __ b(vs, &bailout); 3699 __ Adds(string_length, string_length, scratch1);
3961 __ cmp(element, elements_end); 3700 __ B(vs, &bailout);
3962 __ b(lt, &loop); 3701 __ Cmp(element, elements_end);
3702 __ B(lt, &loop);
3963 3703
3964 // If array_length is 1, return elements[0], a string. 3704 // If array_length is 1, return elements[0], a string.
3965 __ cmp(array_length, Operand(1)); 3705 __ Cmp(array_length, 1);
3966 __ b(ne, &not_size_one_array); 3706 __ B(ne, &not_size_one_array);
3967 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize)); 3707 __ Ldr(result, FieldMemOperand(elements, FixedArray::kHeaderSize));
3968 __ b(&done); 3708 __ B(&done);
3969 3709
3970 __ bind(&not_size_one_array); 3710 __ Bind(&not_size_one_array);
3971 3711
3972 // Live values in registers: 3712 // Live values in registers:
3973 // separator: Separator string 3713 // separator: Separator string
3974 // array_length: Length of the array. 3714 // array_length: Length of the array (not smi).
3975 // string_length: Sum of string lengths (smi). 3715 // string_length: Sum of string lengths (not smi).
3976 // elements: FixedArray of strings. 3716 // elements: FixedArray of strings.
3977 3717
3978 // Check that the separator is a flat ASCII string. 3718 // Check that the separator is a flat ASCII string.
3979 __ JumpIfSmi(separator, &bailout); 3719 __ JumpIfSmi(separator, &bailout);
3980 __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset)); 3720 __ Ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3981 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); 3721 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3982 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout); 3722 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3983 3723
3984 // Add (separator length times array_length) - separator length to the 3724 // Add (separator length times array_length) - separator length to the
3985 // string_length to get the length of the result string. array_length is not 3725 // string_length to get the length of the result string.
3986 // smi but the other values are, so the result is a smi 3726 // Load the separator length as untagged.
3987 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); 3727 // We assume little endianness, and that the length is positive.
3988 __ sub(string_length, string_length, Operand(scratch)); 3728 __ Ldrsw(separator_length,
3989 __ smull(scratch, ip, array_length, scratch); 3729 UntagSmiFieldMemOperand(separator,
3990 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are 3730 SeqOneByteString::kLengthOffset));
3991 // zero. 3731 __ Sub(string_length, string_length, separator_length);
3992 __ cmp(ip, Operand::Zero()); 3732 __ Umaddl(string_length, array_length.W(), separator_length.W(),
3993 __ b(ne, &bailout); 3733 string_length);
3994 __ tst(scratch, Operand(0x80000000));
3995 __ b(ne, &bailout);
3996 __ add(string_length, string_length, Operand(scratch), SetCC);
3997 __ b(vs, &bailout);
3998 __ SmiUntag(string_length);
3999 3734
4000 // Get first element in the array to free up the elements register to be used 3735 // Get first element in the array.
4001 // for the result. 3736 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
4002 __ add(element,
4003 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4004 result = elements; // End of live range for elements.
4005 elements = no_reg;
4006 // Live values in registers: 3737 // Live values in registers:
4007 // element: First array element 3738 // element: First array element
4008 // separator: Separator string 3739 // separator: Separator string
4009 // string_length: Length of result string (not smi) 3740 // string_length: Length of result string (not smi)
4010 // array_length: Length of the array. 3741 // array_length: Length of the array (not smi).
4011 __ AllocateAsciiString(result, 3742 __ AllocateAsciiString(result, string_length, scratch1, scratch2, scratch3,
4012 string_length,
4013 scratch,
4014 string, // used as scratch
4015 elements_end, // used as scratch
4016 &bailout); 3743 &bailout);
3744
4017 // Prepare for looping. Set up elements_end to end of the array. Set 3745 // Prepare for looping. Set up elements_end to end of the array. Set
4018 // result_pos to the position of the result where to write the first 3746 // result_pos to the position of the result where to write the first
4019 // character. 3747 // character.
4020 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); 3748 // TODO(all): useless unless AllocateAsciiString trashes the register.
4021 result_pos = array_length; // End of live range for array_length. 3749 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4022 array_length = no_reg; 3750 __ Add(result_pos, result, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4023 __ add(result_pos,
4024 result,
4025 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4026 3751
4027 // Check the length of the separator. 3752 // Check the length of the separator.
4028 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); 3753 __ Cmp(separator_length, 1);
4029 __ cmp(scratch, Operand(Smi::FromInt(1))); 3754 __ B(eq, &one_char_separator);
4030 __ b(eq, &one_char_separator); 3755 __ B(gt, &long_separator);
4031 __ b(gt, &long_separator);
4032 3756
4033 // Empty separator case 3757 // Empty separator case
4034 __ bind(&empty_separator_loop); 3758 __ Bind(&empty_separator_loop);
4035 // Live values in registers: 3759 // Live values in registers:
4036 // result_pos: the position to which we are currently copying characters. 3760 // result_pos: the position to which we are currently copying characters.
4037 // element: Current array element. 3761 // element: Current array element.
4038 // elements_end: Array end. 3762 // elements_end: Array end.
4039 3763
4040 // Copy next array element to the result. 3764 // Copy next array element to the result.
4041 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 3765 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4042 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 3766 __ Ldrsw(string_length,
4043 __ SmiUntag(string_length); 3767 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4044 __ add(string, 3768 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4045 string, 3769 __ CopyBytes(result_pos, string, string_length, scratch1);
4046 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3770 __ Cmp(element, elements_end);
4047 __ CopyBytes(string, result_pos, string_length, scratch); 3771 __ B(lt, &empty_separator_loop); // End while (element < elements_end).
4048 __ cmp(element, elements_end); 3772 __ B(&done);
4049 __ b(lt, &empty_separator_loop); // End while (element < elements_end).
4050 ASSERT(result.is(r0));
4051 __ b(&done);
4052 3773
4053 // One-character separator case 3774 // One-character separator case
4054 __ bind(&one_char_separator); 3775 __ Bind(&one_char_separator);
4055 // Replace separator with its ASCII character value. 3776 // Replace separator with its ASCII character value.
4056 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize)); 3777 __ Ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4057 // Jump into the loop after the code that copies the separator, so the first 3778 // Jump into the loop after the code that copies the separator, so the first
4058 // element is not preceded by a separator 3779 // element is not preceded by a separator
4059 __ jmp(&one_char_separator_loop_entry); 3780 __ B(&one_char_separator_loop_entry);
4060 3781
4061 __ bind(&one_char_separator_loop); 3782 __ Bind(&one_char_separator_loop);
4062 // Live values in registers: 3783 // Live values in registers:
4063 // result_pos: the position to which we are currently copying characters. 3784 // result_pos: the position to which we are currently copying characters.
4064 // element: Current array element. 3785 // element: Current array element.
4065 // elements_end: Array end. 3786 // elements_end: Array end.
4066 // separator: Single separator ASCII char (in lower byte). 3787 // separator: Single separator ASCII char (in lower byte).
4067 3788
4068 // Copy the separator character to the result. 3789 // Copy the separator character to the result.
4069 __ strb(separator, MemOperand(result_pos, 1, PostIndex)); 3790 __ Strb(separator, MemOperand(result_pos, 1, PostIndex));
4070 3791
4071 // Copy next array element to the result. 3792 // Copy next array element to the result.
4072 __ bind(&one_char_separator_loop_entry); 3793 __ Bind(&one_char_separator_loop_entry);
4073 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 3794 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4074 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 3795 __ Ldrsw(string_length,
4075 __ SmiUntag(string_length); 3796 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4076 __ add(string, 3797 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4077 string, 3798 __ CopyBytes(result_pos, string, string_length, scratch1);
4078 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3799 __ Cmp(element, elements_end);
4079 __ CopyBytes(string, result_pos, string_length, scratch); 3800 __ B(lt, &one_char_separator_loop); // End while (element < elements_end).
4080 __ cmp(element, elements_end); 3801 __ B(&done);
4081 __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
4082 ASSERT(result.is(r0));
4083 __ b(&done);
4084 3802
4085 // Long separator case (separator is more than one character). Entry is at the 3803 // Long separator case (separator is more than one character). Entry is at the
4086 // label long_separator below. 3804 // label long_separator below.
4087 __ bind(&long_separator_loop); 3805 __ Bind(&long_separator_loop);
4088 // Live values in registers: 3806 // Live values in registers:
4089 // result_pos: the position to which we are currently copying characters. 3807 // result_pos: the position to which we are currently copying characters.
4090 // element: Current array element. 3808 // element: Current array element.
4091 // elements_end: Array end. 3809 // elements_end: Array end.
4092 // separator: Separator string. 3810 // separator: Separator string.
4093 3811
4094 // Copy the separator to the result. 3812 // Copy the separator to the result.
4095 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset)); 3813 // TODO(all): hoist next two instructions.
4096 __ SmiUntag(string_length); 3814 __ Ldrsw(string_length,
4097 __ add(string, 3815 UntagSmiFieldMemOperand(separator, String::kLengthOffset));
4098 separator, 3816 __ Add(string, separator, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4099 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3817 __ CopyBytes(result_pos, string, string_length, scratch1);
4100 __ CopyBytes(string, result_pos, string_length, scratch);
4101 3818
4102 __ bind(&long_separator); 3819 __ Bind(&long_separator);
4103 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 3820 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4104 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 3821 __ Ldrsw(string_length,
4105 __ SmiUntag(string_length); 3822 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4106 __ add(string, 3823 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4107 string, 3824 __ CopyBytes(result_pos, string, string_length, scratch1);
4108 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3825 __ Cmp(element, elements_end);
4109 __ CopyBytes(string, result_pos, string_length, scratch); 3826 __ B(lt, &long_separator_loop); // End while (element < elements_end).
4110 __ cmp(element, elements_end); 3827 __ B(&done);
4111 __ b(lt, &long_separator_loop); // End while (element < elements_end).
4112 ASSERT(result.is(r0));
4113 __ b(&done);
4114 3828
4115 __ bind(&bailout); 3829 __ Bind(&bailout);
4116 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 3830 // Returning undefined will force slower code to handle it.
4117 __ bind(&done); 3831 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4118 context()->Plug(r0); 3832 __ Bind(&done);
3833 context()->Plug(result);
4119 } 3834 }
4120 3835
4121 3836
4122 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 3837 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4123 Handle<String> name = expr->name(); 3838 Handle<String> name = expr->name();
4124 if (name->length() > 0 && name->Get(0) == '_') { 3839 if (name->length() > 0 && name->Get(0) == '_') {
4125 Comment cmnt(masm_, "[ InlineRuntimeCall"); 3840 Comment cmnt(masm_, "[ InlineRuntimeCall");
4126 EmitInlineRuntimeCall(expr); 3841 EmitInlineRuntimeCall(expr);
4127 return; 3842 return;
4128 } 3843 }
4129 3844
4130 Comment cmnt(masm_, "[ CallRuntime"); 3845 Comment cmnt(masm_, "[ CallRunTime");
4131 ZoneList<Expression*>* args = expr->arguments(); 3846 ZoneList<Expression*>* args = expr->arguments();
4132 int arg_count = args->length(); 3847 int arg_count = args->length();
4133 3848
4134 if (expr->is_jsruntime()) { 3849 if (expr->is_jsruntime()) {
4135 // Push the builtins object as the receiver. 3850 // Push the builtins object as the receiver.
4136 __ ldr(r0, GlobalObjectOperand()); 3851 __ Ldr(x10, GlobalObjectMemOperand());
4137 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kBuiltinsOffset)); 3852 __ Ldr(x0, FieldMemOperand(x10, GlobalObject::kBuiltinsOffset));
4138 __ push(r0); 3853 __ Push(x0);
4139 3854
4140 // Load the function from the receiver. 3855 // Load the function from the receiver.
4141 __ mov(r2, Operand(expr->name())); 3856 __ Mov(x2, Operand(name));
4142 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId()); 3857 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4143 3858
4144 // Push the target function under the receiver. 3859 // Push the target function under the receiver.
4145 __ ldr(ip, MemOperand(sp, 0)); 3860 __ Pop(x10);
4146 __ push(ip); 3861 __ Push(x0, x10);
4147 __ str(r0, MemOperand(sp, kPointerSize));
4148 3862
4149 // Push the arguments ("left-to-right").
4150 int arg_count = args->length(); 3863 int arg_count = args->length();
4151 for (int i = 0; i < arg_count; i++) { 3864 for (int i = 0; i < arg_count; i++) {
4152 VisitForStackValue(args->at(i)); 3865 VisitForStackValue(args->at(i));
4153 } 3866 }
4154 3867
4155 // Record source position of the IC call. 3868 // Record source position of the IC call.
4156 SetSourcePosition(expr->position()); 3869 SetSourcePosition(expr->position());
4157 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); 3870 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
4158 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 3871 __ Peek(x1, (arg_count + 1) * kPointerSize);
4159 __ CallStub(&stub); 3872 __ CallStub(&stub);
4160 3873
4161 // Restore context register. 3874 // Restore context register.
4162 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3875 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4163 3876
4164 context()->DropAndPlug(1, r0); 3877 context()->DropAndPlug(1, x0);
4165 } else { 3878 } else {
4166 // Push the arguments ("left-to-right"). 3879 // Push the arguments ("left-to-right").
4167 for (int i = 0; i < arg_count; i++) { 3880 for (int i = 0; i < arg_count; i++) {
4168 VisitForStackValue(args->at(i)); 3881 VisitForStackValue(args->at(i));
4169 } 3882 }
4170 3883
4171 // Call the C runtime function. 3884 // Call the C runtime function.
4172 __ CallRuntime(expr->function(), arg_count); 3885 __ CallRuntime(expr->function(), arg_count);
4173 context()->Plug(r0); 3886 context()->Plug(x0);
4174 } 3887 }
4175 } 3888 }
4176 3889
4177 3890
4178 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 3891 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4179 switch (expr->op()) { 3892 switch (expr->op()) {
4180 case Token::DELETE: { 3893 case Token::DELETE: {
4181 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 3894 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4182 Property* property = expr->expression()->AsProperty(); 3895 Property* property = expr->expression()->AsProperty();
4183 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 3896 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4184 3897
4185 if (property != NULL) { 3898 if (property != NULL) {
4186 VisitForStackValue(property->obj()); 3899 VisitForStackValue(property->obj());
4187 VisitForStackValue(property->key()); 3900 VisitForStackValue(property->key());
4188 StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE) 3901 StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
4189 ? kNonStrictMode : kStrictMode; 3902 ? kNonStrictMode : kStrictMode;
4190 __ mov(r1, Operand(Smi::FromInt(strict_mode_flag))); 3903 __ Mov(x10, Operand(Smi::FromInt(strict_mode_flag)));
4191 __ push(r1); 3904 __ Push(x10);
4192 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 3905 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4193 context()->Plug(r0); 3906 context()->Plug(x0);
4194 } else if (proxy != NULL) { 3907 } else if (proxy != NULL) {
4195 Variable* var = proxy->var(); 3908 Variable* var = proxy->var();
4196 // Delete of an unqualified identifier is disallowed in strict mode 3909 // Delete of an unqualified identifier is disallowed in strict mode
4197 // but "delete this" is allowed. 3910 // but "delete this" is allowed.
4198 ASSERT(language_mode() == CLASSIC_MODE || var->is_this()); 3911 ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
4199 if (var->IsUnallocated()) { 3912 if (var->IsUnallocated()) {
4200 __ ldr(r2, GlobalObjectOperand()); 3913 __ Ldr(x12, GlobalObjectMemOperand());
4201 __ mov(r1, Operand(var->name())); 3914 __ Mov(x11, Operand(var->name()));
4202 __ mov(r0, Operand(Smi::FromInt(kNonStrictMode))); 3915 __ Mov(x10, Operand(Smi::FromInt(kNonStrictMode)));
4203 __ Push(r2, r1, r0); 3916 __ Push(x12, x11, x10);
4204 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 3917 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4205 context()->Plug(r0); 3918 context()->Plug(x0);
4206 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 3919 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4207 // Result of deleting non-global, non-dynamic variables is false. 3920 // Result of deleting non-global, non-dynamic variables is false.
4208 // The subexpression does not have side effects. 3921 // The subexpression does not have side effects.
4209 context()->Plug(var->is_this()); 3922 context()->Plug(var->is_this());
4210 } else { 3923 } else {
4211 // Non-global variable. Call the runtime to try to delete from the 3924 // Non-global variable. Call the runtime to try to delete from the
4212 // context where the variable was introduced. 3925 // context where the variable was introduced.
4213 ASSERT(!context_register().is(r2)); 3926 __ Mov(x2, Operand(var->name()));
4214 __ mov(r2, Operand(var->name())); 3927 __ Push(context_register(), x2);
4215 __ Push(context_register(), r2);
4216 __ CallRuntime(Runtime::kDeleteContextSlot, 2); 3928 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
4217 context()->Plug(r0); 3929 context()->Plug(x0);
4218 } 3930 }
4219 } else { 3931 } else {
4220 // Result of deleting non-property, non-variable reference is true. 3932 // Result of deleting non-property, non-variable reference is true.
4221 // The subexpression may have side effects. 3933 // The subexpression may have side effects.
4222 VisitForEffect(expr->expression()); 3934 VisitForEffect(expr->expression());
4223 context()->Plug(true); 3935 context()->Plug(true);
4224 } 3936 }
4225 break; 3937 break;
3938 break;
4226 } 3939 }
4227
4228 case Token::VOID: { 3940 case Token::VOID: {
4229 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); 3941 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4230 VisitForEffect(expr->expression()); 3942 VisitForEffect(expr->expression());
4231 context()->Plug(Heap::kUndefinedValueRootIndex); 3943 context()->Plug(Heap::kUndefinedValueRootIndex);
4232 break; 3944 break;
4233 } 3945 }
4234
4235 case Token::NOT: { 3946 case Token::NOT: {
4236 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); 3947 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4237 if (context()->IsEffect()) { 3948 if (context()->IsEffect()) {
4238 // Unary NOT has no side effects so it's only necessary to visit the 3949 // Unary NOT has no side effects so it's only necessary to visit the
4239 // subexpression. Match the optimizing compiler by not branching. 3950 // subexpression. Match the optimizing compiler by not branching.
4240 VisitForEffect(expr->expression()); 3951 VisitForEffect(expr->expression());
4241 } else if (context()->IsTest()) { 3952 } else if (context()->IsTest()) {
4242 const TestContext* test = TestContext::cast(context()); 3953 const TestContext* test = TestContext::cast(context());
4243 // The labels are swapped for the recursive call. 3954 // The labels are swapped for the recursive call.
4244 VisitForControl(expr->expression(), 3955 VisitForControl(expr->expression(),
4245 test->false_label(), 3956 test->false_label(),
4246 test->true_label(), 3957 test->true_label(),
4247 test->fall_through()); 3958 test->fall_through());
4248 context()->Plug(test->true_label(), test->false_label()); 3959 context()->Plug(test->true_label(), test->false_label());
4249 } else { 3960 } else {
4250 // We handle value contexts explicitly rather than simply visiting
4251 // for control and plugging the control flow into the context,
4252 // because we need to prepare a pair of extra administrative AST ids
4253 // for the optimizing compiler.
4254 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue()); 3961 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3962 // TODO(jbramley): This could be much more efficient using (for
3963 // example) the CSEL instruction.
4255 Label materialize_true, materialize_false, done; 3964 Label materialize_true, materialize_false, done;
4256 VisitForControl(expr->expression(), 3965 VisitForControl(expr->expression(),
4257 &materialize_false, 3966 &materialize_false,
4258 &materialize_true, 3967 &materialize_true,
4259 &materialize_true); 3968 &materialize_true);
4260 __ bind(&materialize_true); 3969
3970 __ Bind(&materialize_true);
4261 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); 3971 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4262 __ LoadRoot(r0, Heap::kTrueValueRootIndex); 3972 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
4263 if (context()->IsStackValue()) __ push(r0); 3973 __ B(&done);
4264 __ jmp(&done); 3974
4265 __ bind(&materialize_false); 3975 __ Bind(&materialize_false);
4266 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); 3976 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4267 __ LoadRoot(r0, Heap::kFalseValueRootIndex); 3977 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
4268 if (context()->IsStackValue()) __ push(r0); 3978 __ B(&done);
4269 __ bind(&done); 3979
3980 __ Bind(&done);
3981 if (context()->IsStackValue()) {
3982 __ Push(result_register());
3983 }
4270 } 3984 }
4271 break; 3985 break;
4272 } 3986 }
4273
4274 case Token::TYPEOF: { 3987 case Token::TYPEOF: {
4275 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 3988 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4276 { StackValueContext context(this); 3989 {
3990 StackValueContext context(this);
4277 VisitForTypeofValue(expr->expression()); 3991 VisitForTypeofValue(expr->expression());
4278 } 3992 }
4279 __ CallRuntime(Runtime::kTypeof, 1); 3993 __ CallRuntime(Runtime::kTypeof, 1);
4280 context()->Plug(r0); 3994 context()->Plug(x0);
4281 break; 3995 break;
4282 } 3996 }
4283
4284 default: 3997 default:
4285 UNREACHABLE(); 3998 UNREACHABLE();
4286 } 3999 }
4287 } 4000 }
4288 4001
4289 4002
4290 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 4003 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4291 Comment cmnt(masm_, "[ CountOperation"); 4004 Comment cmnt(masm_, "[ CountOperation");
4292 SetSourcePosition(expr->position()); 4005 SetSourcePosition(expr->position());
4293 4006
(...skipping 17 matching lines...) Expand all
4311 } 4024 }
4312 4025
4313 // Evaluate expression and get value. 4026 // Evaluate expression and get value.
4314 if (assign_type == VARIABLE) { 4027 if (assign_type == VARIABLE) {
4315 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL); 4028 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4316 AccumulatorValueContext context(this); 4029 AccumulatorValueContext context(this);
4317 EmitVariableLoad(expr->expression()->AsVariableProxy()); 4030 EmitVariableLoad(expr->expression()->AsVariableProxy());
4318 } else { 4031 } else {
4319 // Reserve space for result of postfix operation. 4032 // Reserve space for result of postfix operation.
4320 if (expr->is_postfix() && !context()->IsEffect()) { 4033 if (expr->is_postfix() && !context()->IsEffect()) {
4321 __ mov(ip, Operand(Smi::FromInt(0))); 4034 __ Push(xzr);
4322 __ push(ip);
4323 } 4035 }
4324 if (assign_type == NAMED_PROPERTY) { 4036 if (assign_type == NAMED_PROPERTY) {
4325 // Put the object both on the stack and in the accumulator. 4037 // Put the object both on the stack and in the accumulator.
4326 VisitForAccumulatorValue(prop->obj()); 4038 VisitForAccumulatorValue(prop->obj());
4327 __ push(r0); 4039 __ Push(x0);
4328 EmitNamedPropertyLoad(prop); 4040 EmitNamedPropertyLoad(prop);
4329 } else { 4041 } else {
4042 // KEYED_PROPERTY
4330 VisitForStackValue(prop->obj()); 4043 VisitForStackValue(prop->obj());
4331 VisitForAccumulatorValue(prop->key()); 4044 VisitForAccumulatorValue(prop->key());
4332 __ ldr(r1, MemOperand(sp, 0)); 4045 __ Peek(x1, 0);
4333 __ push(r0); 4046 __ Push(x0);
4334 EmitKeyedPropertyLoad(prop); 4047 EmitKeyedPropertyLoad(prop);
4335 } 4048 }
4336 } 4049 }
4337 4050
4338 // We need a second deoptimization point after loading the value 4051 // We need a second deoptimization point after loading the value
4339 // in case evaluating the property load my have a side effect. 4052 // in case evaluating the property load my have a side effect.
4340 if (assign_type == VARIABLE) { 4053 if (assign_type == VARIABLE) {
4341 PrepareForBailout(expr->expression(), TOS_REG); 4054 PrepareForBailout(expr->expression(), TOS_REG);
4342 } else { 4055 } else {
4343 PrepareForBailoutForId(prop->LoadId(), TOS_REG); 4056 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4344 } 4057 }
4345 4058
4346 // Inline smi case if we are in a loop. 4059 // Inline smi case if we are in a loop.
4347 Label stub_call, done; 4060 Label stub_call, done;
4348 JumpPatchSite patch_site(masm_); 4061 JumpPatchSite patch_site(masm_);
4349 4062
4350 int count_value = expr->op() == Token::INC ? 1 : -1; 4063 int count_value = expr->op() == Token::INC ? 1 : -1;
4351 if (ShouldInlineSmiCase(expr->op())) { 4064 if (ShouldInlineSmiCase(expr->op())) {
4352 Label slow; 4065 Label slow;
4353 patch_site.EmitJumpIfNotSmi(r0, &slow); 4066 patch_site.EmitJumpIfNotSmi(x0, &slow);
4354 4067
4355 // Save result for postfix expressions. 4068 // Save result for postfix expressions.
4356 if (expr->is_postfix()) { 4069 if (expr->is_postfix()) {
4357 if (!context()->IsEffect()) { 4070 if (!context()->IsEffect()) {
4358 // Save the result on the stack. If we have a named or keyed property 4071 // Save the result on the stack. If we have a named or keyed property we
4359 // we store the result under the receiver that is currently on top 4072 // store the result under the receiver that is currently on top of the
4360 // of the stack. 4073 // stack.
4361 switch (assign_type) { 4074 switch (assign_type) {
4362 case VARIABLE: 4075 case VARIABLE:
4363 __ push(r0); 4076 __ Push(x0);
4364 break; 4077 break;
4365 case NAMED_PROPERTY: 4078 case NAMED_PROPERTY:
4366 __ str(r0, MemOperand(sp, kPointerSize)); 4079 __ Poke(x0, kPointerSize);
4367 break; 4080 break;
4368 case KEYED_PROPERTY: 4081 case KEYED_PROPERTY:
4369 __ str(r0, MemOperand(sp, 2 * kPointerSize)); 4082 __ Poke(x0, kPointerSize * 2);
4370 break; 4083 break;
4371 } 4084 }
4372 } 4085 }
4373 } 4086 }
4374 4087
4375 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC); 4088 __ Adds(x0, x0, Operand(Smi::FromInt(count_value)));
4376 __ b(vc, &done); 4089 __ B(vc, &done);
4377 // Call stub. Undo operation first. 4090 // Call stub. Undo operation first.
4378 __ sub(r0, r0, Operand(Smi::FromInt(count_value))); 4091 __ Sub(x0, x0, Operand(Smi::FromInt(count_value)));
4379 __ jmp(&stub_call); 4092 __ B(&stub_call);
4380 __ bind(&slow); 4093 __ Bind(&slow);
4381 } 4094 }
4382 ToNumberStub convert_stub; 4095 ToNumberStub convert_stub;
4383 __ CallStub(&convert_stub); 4096 __ CallStub(&convert_stub);
4384 4097
4385 // Save result for postfix expressions. 4098 // Save result for postfix expressions.
4386 if (expr->is_postfix()) { 4099 if (expr->is_postfix()) {
4387 if (!context()->IsEffect()) { 4100 if (!context()->IsEffect()) {
4388 // Save the result on the stack. If we have a named or keyed property 4101 // Save the result on the stack. If we have a named or keyed property
4389 // we store the result under the receiver that is currently on top 4102 // we store the result under the receiver that is currently on top
4390 // of the stack. 4103 // of the stack.
4391 switch (assign_type) { 4104 switch (assign_type) {
4392 case VARIABLE: 4105 case VARIABLE:
4393 __ push(r0); 4106 __ Push(x0);
4394 break; 4107 break;
4395 case NAMED_PROPERTY: 4108 case NAMED_PROPERTY:
4396 __ str(r0, MemOperand(sp, kPointerSize)); 4109 __ Poke(x0, kXRegSizeInBytes);
4397 break; 4110 break;
4398 case KEYED_PROPERTY: 4111 case KEYED_PROPERTY:
4399 __ str(r0, MemOperand(sp, 2 * kPointerSize)); 4112 __ Poke(x0, 2 * kXRegSizeInBytes);
4400 break; 4113 break;
4401 } 4114 }
4402 } 4115 }
4403 } 4116 }
4404 4117
4405 4118 __ Bind(&stub_call);
4406 __ bind(&stub_call); 4119 __ Mov(x1, x0);
4407 __ mov(r1, r0); 4120 __ Mov(x0, Operand(Smi::FromInt(count_value)));
4408 __ mov(r0, Operand(Smi::FromInt(count_value)));
4409 4121
4410 // Record position before stub call. 4122 // Record position before stub call.
4411 SetSourcePosition(expr->position()); 4123 SetSourcePosition(expr->position());
4412 4124
4413 BinaryOpICStub stub(Token::ADD, NO_OVERWRITE); 4125 {
4414 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId()); 4126 Assembler::BlockConstPoolScope scope(masm_);
4415 patch_site.EmitPatchInfo(); 4127 BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
4416 __ bind(&done); 4128 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
4129 patch_site.EmitPatchInfo();
4130 }
4131 __ Bind(&done);
4417 4132
4418 // Store the value returned in r0. 4133 // Store the value returned in x0.
4419 switch (assign_type) { 4134 switch (assign_type) {
4420 case VARIABLE: 4135 case VARIABLE:
4421 if (expr->is_postfix()) { 4136 if (expr->is_postfix()) {
4422 { EffectContext context(this); 4137 { EffectContext context(this);
4423 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4138 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4424 Token::ASSIGN); 4139 Token::ASSIGN);
4425 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4140 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4426 context.Plug(r0); 4141 context.Plug(x0);
4427 } 4142 }
4428 // For all contexts except EffectConstant We have the result on 4143 // For all contexts except EffectConstant We have the result on
4429 // top of the stack. 4144 // top of the stack.
4430 if (!context()->IsEffect()) { 4145 if (!context()->IsEffect()) {
4431 context()->PlugTOS(); 4146 context()->PlugTOS();
4432 } 4147 }
4433 } else { 4148 } else {
4434 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4149 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4435 Token::ASSIGN); 4150 Token::ASSIGN);
4436 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4151 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4437 context()->Plug(r0); 4152 context()->Plug(x0);
4438 } 4153 }
4439 break; 4154 break;
4440 case NAMED_PROPERTY: { 4155 case NAMED_PROPERTY: {
4441 __ mov(r2, Operand(prop->key()->AsLiteral()->value())); 4156 __ Mov(x2, Operand(prop->key()->AsLiteral()->value()));
4442 __ pop(r1); 4157 __ Pop(x1);
4443 CallStoreIC(expr->CountStoreFeedbackId()); 4158 CallStoreIC(expr->CountStoreFeedbackId());
4444 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4159 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4445 if (expr->is_postfix()) { 4160 if (expr->is_postfix()) {
4446 if (!context()->IsEffect()) { 4161 if (!context()->IsEffect()) {
4447 context()->PlugTOS(); 4162 context()->PlugTOS();
4448 } 4163 }
4449 } else { 4164 } else {
4450 context()->Plug(r0); 4165 context()->Plug(x0);
4451 } 4166 }
4452 break; 4167 break;
4453 } 4168 }
4454 case KEYED_PROPERTY: { 4169 case KEYED_PROPERTY: {
4455 __ Pop(r2, r1); // r1 = key. r2 = receiver. 4170 __ Pop(x1); // Key.
4171 __ Pop(x2); // Receiver.
4456 Handle<Code> ic = is_classic_mode() 4172 Handle<Code> ic = is_classic_mode()
4457 ? isolate()->builtins()->KeyedStoreIC_Initialize() 4173 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4458 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 4174 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4459 CallIC(ic, expr->CountStoreFeedbackId()); 4175 CallIC(ic, expr->CountStoreFeedbackId());
4460 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4176 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4461 if (expr->is_postfix()) { 4177 if (expr->is_postfix()) {
4462 if (!context()->IsEffect()) { 4178 if (!context()->IsEffect()) {
4463 context()->PlugTOS(); 4179 context()->PlugTOS();
4464 } 4180 }
4465 } else { 4181 } else {
4466 context()->Plug(r0); 4182 context()->Plug(x0);
4467 } 4183 }
4468 break; 4184 break;
4469 } 4185 }
4470 } 4186 }
4471 } 4187 }
4472 4188
4473 4189
4474 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 4190 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4475 ASSERT(!context()->IsEffect()); 4191 ASSERT(!context()->IsEffect());
4476 ASSERT(!context()->IsTest()); 4192 ASSERT(!context()->IsTest());
4477 VariableProxy* proxy = expr->AsVariableProxy(); 4193 VariableProxy* proxy = expr->AsVariableProxy();
4478 if (proxy != NULL && proxy->var()->IsUnallocated()) { 4194 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4479 Comment cmnt(masm_, "Global variable"); 4195 Comment cmnt(masm_, "Global variable");
4480 __ ldr(r0, GlobalObjectOperand()); 4196 __ Ldr(x0, GlobalObjectMemOperand());
4481 __ mov(r2, Operand(proxy->name())); 4197 __ Mov(x2, Operand(proxy->name()));
4482 // Use a regular load, not a contextual load, to avoid a reference 4198 // Use a regular load, not a contextual load, to avoid a reference
4483 // error. 4199 // error.
4484 CallLoadIC(NOT_CONTEXTUAL); 4200 CallLoadIC(NOT_CONTEXTUAL);
4485 PrepareForBailout(expr, TOS_REG); 4201 PrepareForBailout(expr, TOS_REG);
4486 context()->Plug(r0); 4202 context()->Plug(x0);
4487 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { 4203 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4488 Label done, slow; 4204 Label done, slow;
4489 4205
4490 // Generate code for loading from variables potentially shadowed 4206 // Generate code for loading from variables potentially shadowed
4491 // by eval-introduced variables. 4207 // by eval-introduced variables.
4492 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done); 4208 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4493 4209
4494 __ bind(&slow); 4210 __ Bind(&slow);
4495 __ mov(r0, Operand(proxy->name())); 4211 __ Mov(x0, Operand(proxy->name()));
4496 __ Push(cp, r0); 4212 __ Push(cp, x0);
4497 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); 4213 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4498 PrepareForBailout(expr, TOS_REG); 4214 PrepareForBailout(expr, TOS_REG);
4499 __ bind(&done); 4215 __ Bind(&done);
4500 4216
4501 context()->Plug(r0); 4217 context()->Plug(x0);
4502 } else { 4218 } else {
4503 // This expression cannot throw a reference error at the top level. 4219 // This expression cannot throw a reference error at the top level.
4504 VisitInDuplicateContext(expr); 4220 VisitInDuplicateContext(expr);
4505 } 4221 }
4506 } 4222 }
4507 4223
4508 4224
4509 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, 4225 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4510 Expression* sub_expr, 4226 Expression* sub_expr,
4511 Handle<String> check) { 4227 Handle<String> check) {
4228 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
4229 Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
4512 Label materialize_true, materialize_false; 4230 Label materialize_true, materialize_false;
4513 Label* if_true = NULL; 4231 Label* if_true = NULL;
4514 Label* if_false = NULL; 4232 Label* if_false = NULL;
4515 Label* fall_through = NULL; 4233 Label* fall_through = NULL;
4516 context()->PrepareTest(&materialize_true, &materialize_false, 4234 context()->PrepareTest(&materialize_true, &materialize_false,
4517 &if_true, &if_false, &fall_through); 4235 &if_true, &if_false, &fall_through);
4518 4236
4519 { AccumulatorValueContext context(this); 4237 { AccumulatorValueContext context(this);
4520 VisitForTypeofValue(sub_expr); 4238 VisitForTypeofValue(sub_expr);
4521 } 4239 }
4522 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4240 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4523 4241
4524 if (check->Equals(isolate()->heap()->number_string())) { 4242 if (check->Equals(isolate()->heap()->number_string())) {
4525 __ JumpIfSmi(r0, if_true); 4243 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
4526 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 4244 __ JumpIfSmi(x0, if_true);
4527 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); 4245 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4528 __ cmp(r0, ip); 4246 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
4529 Split(eq, if_true, if_false, fall_through); 4247 Split(eq, if_true, if_false, fall_through);
4530 } else if (check->Equals(isolate()->heap()->string_string())) { 4248 } else if (check->Equals(isolate()->heap()->string_string())) {
4531 __ JumpIfSmi(r0, if_false); 4249 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
4250 __ JumpIfSmi(x0, if_false);
4532 // Check for undetectable objects => false. 4251 // Check for undetectable objects => false.
4533 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE); 4252 __ JumpIfObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE, if_false, ge);
4534 __ b(ge, if_false); 4253 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4535 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4254 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_true, if_false,
4536 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4255 fall_through);
4537 Split(eq, if_true, if_false, fall_through);
4538 } else if (check->Equals(isolate()->heap()->symbol_string())) { 4256 } else if (check->Equals(isolate()->heap()->symbol_string())) {
4539 __ JumpIfSmi(r0, if_false); 4257 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
4540 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE); 4258 __ JumpIfSmi(x0, if_false);
4259 __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
4541 Split(eq, if_true, if_false, fall_through); 4260 Split(eq, if_true, if_false, fall_through);
4542 } else if (check->Equals(isolate()->heap()->boolean_string())) { 4261 } else if (check->Equals(isolate()->heap()->boolean_string())) {
4543 __ CompareRoot(r0, Heap::kTrueValueRootIndex); 4262 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
4544 __ b(eq, if_true); 4263 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
4545 __ CompareRoot(r0, Heap::kFalseValueRootIndex); 4264 __ CompareRoot(x0, Heap::kFalseValueRootIndex);
4546 Split(eq, if_true, if_false, fall_through); 4265 Split(eq, if_true, if_false, fall_through);
4547 } else if (FLAG_harmony_typeof && 4266 } else if (FLAG_harmony_typeof &&
4548 check->Equals(isolate()->heap()->null_string())) { 4267 check->Equals(isolate()->heap()->null_string())) {
4549 __ CompareRoot(r0, Heap::kNullValueRootIndex); 4268 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof null_string");
4269 __ CompareRoot(x0, Heap::kNullValueRootIndex);
4550 Split(eq, if_true, if_false, fall_through); 4270 Split(eq, if_true, if_false, fall_through);
4551 } else if (check->Equals(isolate()->heap()->undefined_string())) { 4271 } else if (check->Equals(isolate()->heap()->undefined_string())) {
4552 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex); 4272 ASM_LOCATION(
4553 __ b(eq, if_true); 4273 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
4554 __ JumpIfSmi(r0, if_false); 4274 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
4275 __ JumpIfSmi(x0, if_false);
4555 // Check for undetectable objects => true. 4276 // Check for undetectable objects => true.
4556 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 4277 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4557 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4278 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4558 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4279 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
4559 Split(ne, if_true, if_false, fall_through); 4280 fall_through);
4281 } else if (check->Equals(isolate()->heap()->function_string())) {
4282 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
4283 __ JumpIfSmi(x0, if_false);
4284 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4285 __ JumpIfObjectType(x0, x10, x11, JS_FUNCTION_TYPE, if_true);
4286 __ CompareAndSplit(x11, JS_FUNCTION_PROXY_TYPE, eq, if_true, if_false,
4287 fall_through);
4560 4288
4561 } else if (check->Equals(isolate()->heap()->function_string())) {
4562 __ JumpIfSmi(r0, if_false);
4563 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4564 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
4565 __ b(eq, if_true);
4566 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
4567 Split(eq, if_true, if_false, fall_through);
4568 } else if (check->Equals(isolate()->heap()->object_string())) { 4289 } else if (check->Equals(isolate()->heap()->object_string())) {
4569 __ JumpIfSmi(r0, if_false); 4290 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
4291 __ JumpIfSmi(x0, if_false);
4570 if (!FLAG_harmony_typeof) { 4292 if (!FLAG_harmony_typeof) {
4571 __ CompareRoot(r0, Heap::kNullValueRootIndex); 4293 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
4572 __ b(eq, if_true);
4573 } 4294 }
4574 // Check for JS objects => true. 4295 // Check for JS objects => true.
4575 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); 4296 Register map = x10;
4576 __ b(lt, if_false); 4297 __ JumpIfObjectType(x0, map, x11, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE,
4577 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); 4298 if_false, lt);
4578 __ b(gt, if_false); 4299 __ CompareInstanceType(map, x11, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4300 __ B(gt, if_false);
4579 // Check for undetectable objects => false. 4301 // Check for undetectable objects => false.
4580 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4302 __ Ldrb(x10, FieldMemOperand(map, Map::kBitFieldOffset));
4581 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4303
4582 Split(eq, if_true, if_false, fall_through); 4304 __ TestAndSplit(x10, 1 << Map::kIsUndetectable, if_true, if_false,
4305 fall_through);
4306
4583 } else { 4307 } else {
4584 if (if_false != fall_through) __ jmp(if_false); 4308 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
4309 if (if_false != fall_through) __ B(if_false);
4585 } 4310 }
4586 context()->Plug(if_true, if_false); 4311 context()->Plug(if_true, if_false);
4587 } 4312 }
4588 4313
4589 4314
4590 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 4315 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4591 Comment cmnt(masm_, "[ CompareOperation"); 4316 Comment cmnt(masm_, "[ CompareOperation");
4592 SetSourcePosition(expr->position()); 4317 SetSourcePosition(expr->position());
4593 4318
4594 // First we try a fast inlined version of the compare when one of 4319 // Try to generate an optimized comparison with a literal value.
4595 // the operands is a literal. 4320 // TODO(jbramley): This only checks common values like NaN or undefined.
4596 if (TryLiteralCompare(expr)) return; 4321 // Should it also handle A64 immediate operands?
4322 if (TryLiteralCompare(expr)) {
4323 return;
4324 }
4597 4325
4598 // Always perform the comparison for its control flow. Pack the result 4326 // Assign labels according to context()->PrepareTest.
4599 // into the expression's context after the comparison is performed. 4327 Label materialize_true;
4600 Label materialize_true, materialize_false; 4328 Label materialize_false;
4601 Label* if_true = NULL; 4329 Label* if_true = NULL;
4602 Label* if_false = NULL; 4330 Label* if_false = NULL;
4603 Label* fall_through = NULL; 4331 Label* fall_through = NULL;
4604 context()->PrepareTest(&materialize_true, &materialize_false, 4332 context()->PrepareTest(&materialize_true, &materialize_false,
4605 &if_true, &if_false, &fall_through); 4333 &if_true, &if_false, &fall_through);
4606 4334
4607 Token::Value op = expr->op(); 4335 Token::Value op = expr->op();
4608 VisitForStackValue(expr->left()); 4336 VisitForStackValue(expr->left());
4609 switch (op) { 4337 switch (op) {
4610 case Token::IN: 4338 case Token::IN:
4611 VisitForStackValue(expr->right()); 4339 VisitForStackValue(expr->right());
4612 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); 4340 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4613 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); 4341 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4614 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 4342 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4615 __ cmp(r0, ip);
4616 Split(eq, if_true, if_false, fall_through); 4343 Split(eq, if_true, if_false, fall_through);
4617 break; 4344 break;
4618 4345
4619 case Token::INSTANCEOF: { 4346 case Token::INSTANCEOF: {
4620 VisitForStackValue(expr->right()); 4347 VisitForStackValue(expr->right());
4621 InstanceofStub stub(InstanceofStub::kNoFlags); 4348 InstanceofStub stub(InstanceofStub::kNoFlags);
4622 __ CallStub(&stub); 4349 __ CallStub(&stub);
4623 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4350 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4624 // The stub returns 0 for true. 4351 // The stub returns 0 for true.
4625 __ tst(r0, r0); 4352 __ CompareAndSplit(x0, 0, eq, if_true, if_false, fall_through);
4626 Split(eq, if_true, if_false, fall_through);
4627 break; 4353 break;
4628 } 4354 }
4629 4355
4630 default: { 4356 default: {
4631 VisitForAccumulatorValue(expr->right()); 4357 VisitForAccumulatorValue(expr->right());
4632 Condition cond = CompareIC::ComputeCondition(op); 4358 Condition cond = CompareIC::ComputeCondition(op);
4633 __ pop(r1);
4634 4359
4635 bool inline_smi_code = ShouldInlineSmiCase(op); 4360 // Pop the stack value.
4361 __ Pop(x1);
4362
4636 JumpPatchSite patch_site(masm_); 4363 JumpPatchSite patch_site(masm_);
4637 if (inline_smi_code) { 4364 if (ShouldInlineSmiCase(op)) {
4638 Label slow_case; 4365 Label slow_case;
4639 __ orr(r2, r0, Operand(r1)); 4366 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4640 patch_site.EmitJumpIfNotSmi(r2, &slow_case); 4367 __ Cmp(x1, x0);
4641 __ cmp(r1, r0);
4642 Split(cond, if_true, if_false, NULL); 4368 Split(cond, if_true, if_false, NULL);
4643 __ bind(&slow_case); 4369 __ Bind(&slow_case);
4644 } 4370 }
4645 4371
4646 // Record position and call the compare IC. 4372 // Record position and call the compare IC.
4647 SetSourcePosition(expr->position()); 4373 SetSourcePosition(expr->position());
4648 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); 4374 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4649 CallIC(ic, expr->CompareOperationFeedbackId()); 4375 CallIC(ic, expr->CompareOperationFeedbackId());
4650 patch_site.EmitPatchInfo(); 4376 patch_site.EmitPatchInfo();
4651 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4377 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4652 __ cmp(r0, Operand::Zero()); 4378 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
4653 Split(cond, if_true, if_false, fall_through);
4654 } 4379 }
4655 } 4380 }
4656 4381
4657 // Convert the result of the comparison into one expected for this 4382 // Convert the result of the comparison into one expected for this
4658 // expression's context. 4383 // expression's context.
4659 context()->Plug(if_true, if_false); 4384 context()->Plug(if_true, if_false);
4660 } 4385 }
4661 4386
4662 4387
4663 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, 4388 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4664 Expression* sub_expr, 4389 Expression* sub_expr,
4665 NilValue nil) { 4390 NilValue nil) {
4391 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
4666 Label materialize_true, materialize_false; 4392 Label materialize_true, materialize_false;
4667 Label* if_true = NULL; 4393 Label* if_true = NULL;
4668 Label* if_false = NULL; 4394 Label* if_false = NULL;
4669 Label* fall_through = NULL; 4395 Label* fall_through = NULL;
4670 context()->PrepareTest(&materialize_true, &materialize_false, 4396 context()->PrepareTest(&materialize_true, &materialize_false,
4671 &if_true, &if_false, &fall_through); 4397 &if_true, &if_false, &fall_through);
4672 4398
4673 VisitForAccumulatorValue(sub_expr); 4399 VisitForAccumulatorValue(sub_expr);
4674 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4400 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4401
4675 if (expr->op() == Token::EQ_STRICT) { 4402 if (expr->op() == Token::EQ_STRICT) {
4676 Heap::RootListIndex nil_value = nil == kNullValue ? 4403 Heap::RootListIndex nil_value = nil == kNullValue ?
4677 Heap::kNullValueRootIndex : 4404 Heap::kNullValueRootIndex :
4678 Heap::kUndefinedValueRootIndex; 4405 Heap::kUndefinedValueRootIndex;
4679 __ LoadRoot(r1, nil_value); 4406 __ CompareRoot(x0, nil_value);
4680 __ cmp(r0, r1);
4681 Split(eq, if_true, if_false, fall_through); 4407 Split(eq, if_true, if_false, fall_through);
4682 } else { 4408 } else {
4683 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); 4409 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4684 CallIC(ic, expr->CompareOperationFeedbackId()); 4410 CallIC(ic, expr->CompareOperationFeedbackId());
4685 __ cmp(r0, Operand(0)); 4411 __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through);
4686 Split(ne, if_true, if_false, fall_through);
4687 } 4412 }
4413
4688 context()->Plug(if_true, if_false); 4414 context()->Plug(if_true, if_false);
4689 } 4415 }
4690 4416
4691 4417
4692 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { 4418 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4693 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4419 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4694 context()->Plug(r0); 4420 context()->Plug(x0);
4695 } 4421 }
4696 4422
4697 4423
4424 void FullCodeGenerator::VisitYield(Yield* expr) {
4425 Comment cmnt(masm_, "[ Yield");
4426 // Evaluate yielded value first; the initial iterator definition depends on
4427 // this. It stays on the stack while we update the iterator.
4428 VisitForStackValue(expr->expression());
4429
4430 // TODO(jbramley): Tidy this up once the merge is done, using named registers
4431 // and suchlike. The implementation changes a little by bleeding_edge so I
4432 // don't want to spend too much time on it now.
4433
4434 switch (expr->yield_kind()) {
4435 case Yield::SUSPEND:
4436 // Pop value from top-of-stack slot; box result into result register.
4437 EmitCreateIteratorResult(false);
4438 __ Push(result_register());
4439 // Fall through.
4440 case Yield::INITIAL: {
4441 Label suspend, continuation, post_runtime, resume;
4442
4443 __ B(&suspend);
4444
4445 // TODO(jbramley): This label is bound here because the following code
4446 // looks at its pos(). Is it possible to do something more efficient here,
4447 // perhaps using Adr?
4448 __ Bind(&continuation);
4449 __ B(&resume);
4450
4451 __ Bind(&suspend);
4452 VisitForAccumulatorValue(expr->generator_object());
4453 ASSERT((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
4454 __ Mov(x1, Operand(Smi::FromInt(continuation.pos())));
4455 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4456 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4457 __ Mov(x1, cp);
4458 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4459 kLRHasBeenSaved, kDontSaveFPRegs);
4460 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
4461 __ Cmp(__ StackPointer(), x1);
4462 __ B(eq, &post_runtime);
4463 __ Push(x0); // generator object
4464 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4465 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4466 __ Bind(&post_runtime);
4467 __ Pop(result_register());
4468 EmitReturnSequence();
4469
4470 __ Bind(&resume);
4471 context()->Plug(result_register());
4472 break;
4473 }
4474
4475 case Yield::FINAL: {
4476 VisitForAccumulatorValue(expr->generator_object());
4477 __ Mov(x1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
4478 __ Str(x1, FieldMemOperand(result_register(),
4479 JSGeneratorObject::kContinuationOffset));
4480 // Pop value from top-of-stack slot, box result into result register.
4481 EmitCreateIteratorResult(true);
4482 EmitUnwindBeforeReturn();
4483 EmitReturnSequence();
4484 break;
4485 }
4486
4487 case Yield::DELEGATING: {
4488 VisitForStackValue(expr->generator_object());
4489
4490 // Initial stack layout is as follows:
4491 // [sp + 1 * kPointerSize] iter
4492 // [sp + 0 * kPointerSize] g
4493
4494 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
4495 Label l_next, l_call, l_loop;
4496 // Initial send value is undefined.
4497 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
4498 __ B(&l_next);
4499
4500 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
4501 __ Bind(&l_catch);
4502 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
4503 __ LoadRoot(x2, Heap::kthrow_stringRootIndex); // "throw"
4504 __ Peek(x3, 1 * kPointerSize); // iter
4505 __ Push(x2, x3, x0); // "throw", iter, except
4506 __ B(&l_call);
4507
4508 // try { received = %yield result }
4509 // Shuffle the received result above a try handler and yield it without
4510 // re-boxing.
4511 __ Bind(&l_try);
4512 __ Pop(x0); // result
4513 __ PushTryHandler(StackHandler::CATCH, expr->index());
4514 const int handler_size = StackHandlerConstants::kSize;
4515 __ Push(x0); // result
4516 __ B(&l_suspend);
4517
4518 // TODO(jbramley): This label is bound here because the following code
4519 // looks at its pos(). Is it possible to do something more efficient here,
4520 // perhaps using Adr?
4521 __ Bind(&l_continuation);
4522 __ B(&l_resume);
4523
4524 __ Bind(&l_suspend);
4525 const int generator_object_depth = kPointerSize + handler_size;
4526 __ Peek(x0, generator_object_depth);
4527 __ Push(x0); // g
4528 ASSERT((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos()));
4529 __ Mov(x1, Operand(Smi::FromInt(l_continuation.pos())));
4530 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4531 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4532 __ Mov(x1, cp);
4533 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4534 kLRHasBeenSaved, kDontSaveFPRegs);
4535 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4536 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4537 __ Pop(x0); // result
4538 EmitReturnSequence();
4539 __ Bind(&l_resume); // received in x0
4540 __ PopTryHandler();
4541
4542 // receiver = iter; f = 'next'; arg = received;
4543 __ Bind(&l_next);
4544 __ LoadRoot(x2, Heap::knext_stringRootIndex); // "next"
4545 __ Peek(x3, 1 * kPointerSize); // iter
4546 __ Push(x2, x3, x0); // "next", iter, received
4547
4548 // result = receiver[f](arg);
4549 __ Bind(&l_call);
4550 __ Peek(x1, 1 * kPointerSize);
4551 __ Peek(x0, 2 * kPointerSize);
4552 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
4553 CallIC(ic, TypeFeedbackId::None());
4554 __ Mov(x1, x0);
4555 __ Poke(x1, 2 * kPointerSize);
4556 CallFunctionStub stub(1, CALL_AS_METHOD);
4557 __ CallStub(&stub);
4558
4559 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4560 __ Drop(1); // The function is still on the stack; drop it.
4561
4562 // if (!result.done) goto l_try;
4563 __ Bind(&l_loop);
4564 __ Push(x0); // save result
4565 __ LoadRoot(x2, Heap::kdone_stringRootIndex); // "done"
4566 CallLoadIC(NOT_CONTEXTUAL); // result.done in x0
4567 // The ToBooleanStub argument (result.done) is in x0.
4568 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
4569 CallIC(bool_ic);
4570 __ Cbz(x0, &l_try);
4571
4572 // result.value
4573 __ Pop(x0); // result
4574 __ LoadRoot(x2, Heap::kvalue_stringRootIndex); // "value"
4575 CallLoadIC(NOT_CONTEXTUAL); // result.value in x0
4576 context()->DropAndPlug(2, x0); // drop iter and g
4577 break;
4578 }
4579 }
4580 }
4581
4582
4583 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
4584 Expression *value,
4585 JSGeneratorObject::ResumeMode resume_mode) {
4586 ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
4587 Register value_reg = x0;
4588 Register generator_object = x1;
4589 Register the_hole = x2;
4590 Register operand_stack_size = w3;
4591 Register function = x4;
4592
4593 // The value stays in x0, and is ultimately read by the resumed generator, as
4594 // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
4595 // is read to throw the value when the resumed generator is already closed. r1
4596 // will hold the generator object until the activation has been resumed.
4597 VisitForStackValue(generator);
4598 VisitForAccumulatorValue(value);
4599 __ Pop(generator_object);
4600
4601 // Check generator state.
4602 Label wrong_state, closed_state, done;
4603 __ Ldr(x10, FieldMemOperand(generator_object,
4604 JSGeneratorObject::kContinuationOffset));
4605 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
4606 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
4607 __ CompareAndBranch(x10, Operand(Smi::FromInt(0)), eq, &closed_state);
4608 __ CompareAndBranch(x10, Operand(Smi::FromInt(0)), lt, &wrong_state);
4609
4610 // Load suspended function and context.
4611 __ Ldr(cp, FieldMemOperand(generator_object,
4612 JSGeneratorObject::kContextOffset));
4613 __ Ldr(function, FieldMemOperand(generator_object,
4614 JSGeneratorObject::kFunctionOffset));
4615
4616 // Load receiver and store as the first argument.
4617 __ Ldr(x10, FieldMemOperand(generator_object,
4618 JSGeneratorObject::kReceiverOffset));
4619 __ Push(x10);
4620
4621 // Push holes for the rest of the arguments to the generator function.
4622 __ Ldr(x10, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
4623
4624 // The number of arguments is stored as an int32_t, and -1 is a marker
4625 // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
4626 // extension to correctly handle it. However, in this case, we operate on
4627 // 32-bit W registers, so extension isn't required.
4628 __ Ldr(w10, FieldMemOperand(x10,
4629 SharedFunctionInfo::kFormalParameterCountOffset));
4630 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
4631
4632 // TODO(jbramley): Write a variant of PushMultipleTimes which takes a register
4633 // instead of a constant count, and use it to replace this loop.
4634 Label push_argument_holes, push_frame;
4635 __ Bind(&push_argument_holes);
4636 __ Subs(w10, w10, 1);
4637 __ B(mi, &push_frame);
4638 __ Push(the_hole);
4639 __ B(&push_argument_holes);
4640
4641 // Enter a new JavaScript frame, and initialize its slots as they were when
4642 // the generator was suspended.
4643 Label resume_frame;
4644 __ Bind(&push_frame);
4645 __ Bl(&resume_frame);
4646 __ B(&done);
4647
4648 __ Bind(&resume_frame);
4649 __ Push(lr, // Return address.
4650 fp, // Caller's frame pointer.
4651 cp, // Callee's context.
4652 function); // Callee's JS Function.
4653 __ Add(fp, __ StackPointer(), kPointerSize * 2);
4654
4655 // Load and untag the operand stack size.
4656 __ Ldr(x10, FieldMemOperand(generator_object,
4657 JSGeneratorObject::kOperandStackOffset));
4658 __ Ldr(operand_stack_size,
4659 UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
4660
4661 // If we are sending a value and there is no operand stack, we can jump back
4662 // in directly.
4663 if (resume_mode == JSGeneratorObject::NEXT) {
4664 Label slow_resume;
4665 __ Cbnz(operand_stack_size, &slow_resume);
4666 __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
4667 __ Ldrsw(x11,
4668 UntagSmiFieldMemOperand(generator_object,
4669 JSGeneratorObject::kContinuationOffset));
4670 __ Add(x10, x10, x11);
4671 __ Mov(x12, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
4672 __ Str(x12, FieldMemOperand(generator_object,
4673 JSGeneratorObject::kContinuationOffset));
4674 __ Br(x10);
4675
4676 __ Bind(&slow_resume);
4677 }
4678
4679 // Otherwise, we push holes for the operand stack and call the runtime to fix
4680 // up the stack and the handlers.
4681 // TODO(jbramley): Write a variant of PushMultipleTimes which takes a register
4682 // instead of a constant count, and use it to replace this loop.
4683 Label push_operand_holes, call_resume;
4684 __ Bind(&push_operand_holes);
4685 __ Subs(operand_stack_size, operand_stack_size, 1);
4686 __ B(mi, &call_resume);
4687 __ Push(the_hole);
4688 __ B(&push_operand_holes);
4689
4690 __ Bind(&call_resume);
4691 __ Mov(x10, Operand(Smi::FromInt(resume_mode)));
4692 __ Push(generator_object, result_register(), x10);
4693 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
4694 // Not reached: the runtime call returns elsewhere.
4695 __ Unreachable();
4696
4697 // Reach here when generator is closed.
4698 __ Bind(&closed_state);
4699 if (resume_mode == JSGeneratorObject::NEXT) {
4700 // Return completed iterator result when generator is closed.
4701 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
4702 __ Push(x10);
4703 // Pop value from top-of-stack slot; box result into result register.
4704 EmitCreateIteratorResult(true);
4705 } else {
4706 // Throw the provided value.
4707 __ Push(value_reg);
4708 __ CallRuntime(Runtime::kThrow, 1);
4709 }
4710 __ B(&done);
4711
4712 // Throw error if we attempt to operate on a running generator.
4713 __ Bind(&wrong_state);
4714 __ Push(generator_object);
4715 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
4716
4717 __ Bind(&done);
4718 context()->Plug(result_register());
4719 }
4720
4721
4722 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
4723 Label gc_required;
4724 Label allocated;
4725
4726 Handle<Map> map(isolate()->native_context()->generator_result_map());
4727
4728 // Allocate and populate an object with this form: { value: VAL, done: DONE }
4729
4730 Register result = x0;
4731 __ Allocate(map->instance_size(), result, x10, x11, &gc_required, TAG_OBJECT);
4732 __ B(&allocated);
4733
4734 __ Bind(&gc_required);
4735 __ Push(Smi::FromInt(map->instance_size()));
4736 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
4737 __ Ldr(context_register(),
4738 MemOperand(fp, StandardFrameConstants::kContextOffset));
4739
4740 __ Bind(&allocated);
4741 Register map_reg = x1;
4742 Register result_value = x2;
4743 Register boolean_done = x3;
4744 Register empty_fixed_array = x4;
4745 __ Mov(map_reg, Operand(map));
4746 __ Pop(result_value);
4747 __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done)));
4748 __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array()));
4749 ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
4750 // TODO(jbramley): Use Stp if possible.
4751 __ Str(map_reg, FieldMemOperand(result, HeapObject::kMapOffset));
4752 __ Str(empty_fixed_array,
4753 FieldMemOperand(result, JSObject::kPropertiesOffset));
4754 __ Str(empty_fixed_array, FieldMemOperand(result, JSObject::kElementsOffset));
4755 __ Str(result_value,
4756 FieldMemOperand(result,
4757 JSGeneratorObject::kResultValuePropertyOffset));
4758 __ Str(boolean_done,
4759 FieldMemOperand(result,
4760 JSGeneratorObject::kResultDonePropertyOffset));
4761
4762 // Only the value field needs a write barrier, as the other values are in the
4763 // root set.
4764 __ RecordWriteField(result, JSGeneratorObject::kResultValuePropertyOffset,
4765 x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
4766 }
4767
4768
4769 // TODO(all): I don't like this method.
4770 // It seems to me that in too many places x0 is used in place of this.
4771 // Also, this function is not suitable for all places where x0 should be
4772 // abstracted (eg. when used as an argument). But some places assume that the
4773 // first argument register is x0, and use this function instead.
4774 // Considering that most of the register allocation is hard-coded in the
4775 // FullCodeGen, that it is unlikely we will need to change it extensively, and
4776 // that abstracting the allocation through functions would not yield any
4777 // performance benefit, I think the existence of this function is debatable.
4698 Register FullCodeGenerator::result_register() { 4778 Register FullCodeGenerator::result_register() {
4699 return r0; 4779 return x0;
4700 } 4780 }
4701 4781
4702 4782
4703 Register FullCodeGenerator::context_register() { 4783 Register FullCodeGenerator::context_register() {
4704 return cp; 4784 return cp;
4705 } 4785 }
4706 4786
4707 4787
4708 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 4788 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4709 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 4789 ASSERT(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
4710 __ str(value, MemOperand(fp, frame_offset)); 4790 __ Str(value, MemOperand(fp, frame_offset));
4711 } 4791 }
4712 4792
4713 4793
4714 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 4794 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4715 __ ldr(dst, ContextOperand(cp, context_index)); 4795 __ Ldr(dst, ContextMemOperand(cp, context_index));
4716 } 4796 }
4717 4797
4718 4798
4719 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { 4799 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4720 Scope* declaration_scope = scope()->DeclarationScope(); 4800 Scope* declaration_scope = scope()->DeclarationScope();
4721 if (declaration_scope->is_global_scope() || 4801 if (declaration_scope->is_global_scope() ||
4722 declaration_scope->is_module_scope()) { 4802 declaration_scope->is_module_scope()) {
4723 // Contexts nested in the native context have a canonical empty function 4803 // Contexts nested in the native context have a canonical empty function
4724 // as their closure, not the anonymous closure containing the global 4804 // as their closure, not the anonymous closure containing the global
4725 // code. Pass a smi sentinel and let the runtime look up the empty 4805 // code. Pass a smi sentinel and let the runtime look up the empty
4726 // function. 4806 // function.
4727 __ mov(ip, Operand(Smi::FromInt(0))); 4807 ASSERT(kSmiTag == 0);
4808 __ Push(xzr);
4728 } else if (declaration_scope->is_eval_scope()) { 4809 } else if (declaration_scope->is_eval_scope()) {
4729 // Contexts created by a call to eval have the same closure as the 4810 // Contexts created by a call to eval have the same closure as the
4730 // context calling eval, not the anonymous closure containing the eval 4811 // context calling eval, not the anonymous closure containing the eval
4731 // code. Fetch it from the context. 4812 // code. Fetch it from the context.
4732 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX)); 4813 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
4814 __ Push(x10);
4733 } else { 4815 } else {
4734 ASSERT(declaration_scope->is_function_scope()); 4816 ASSERT(declaration_scope->is_function_scope());
4735 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4817 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4818 __ Push(x10);
4736 } 4819 }
4737 __ push(ip);
4738 } 4820 }
4739 4821
4740 4822
4741 // ----------------------------------------------------------------------------
4742 // Non-local control flow support.
4743
4744 void FullCodeGenerator::EnterFinallyBlock() { 4823 void FullCodeGenerator::EnterFinallyBlock() {
4745 ASSERT(!result_register().is(r1)); 4824 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
4746 // Store result register while executing finally block. 4825 ASSERT(!result_register().is(x10));
4747 __ push(result_register()); 4826 // Preserve the result register while executing finally block.
4748 // Cook return address in link register to stack (smi encoded Code* delta) 4827 // Also cook the return address in lr to the stack (smi encoded Code* delta).
4749 __ sub(r1, lr, Operand(masm_->CodeObject())); 4828 __ Sub(x10, lr, Operand(masm_->CodeObject()));
4750 __ SmiTag(r1); 4829 __ SmiTag(x10);
4751 4830 __ Push(result_register(), x10);
4752 // Store result register while executing finally block.
4753 __ push(r1);
4754 4831
4755 // Store pending message while executing finally block. 4832 // Store pending message while executing finally block.
4756 ExternalReference pending_message_obj = 4833 ExternalReference pending_message_obj =
4757 ExternalReference::address_of_pending_message_obj(isolate()); 4834 ExternalReference::address_of_pending_message_obj(isolate());
4758 __ mov(ip, Operand(pending_message_obj)); 4835 __ Mov(x10, Operand(pending_message_obj));
4759 __ ldr(r1, MemOperand(ip)); 4836 __ Ldr(x10, MemOperand(x10));
4760 __ push(r1);
4761 4837
4762 ExternalReference has_pending_message = 4838 ExternalReference has_pending_message =
4763 ExternalReference::address_of_has_pending_message(isolate()); 4839 ExternalReference::address_of_has_pending_message(isolate());
4764 __ mov(ip, Operand(has_pending_message)); 4840 __ Mov(x11, Operand(has_pending_message));
4765 __ ldr(r1, MemOperand(ip)); 4841 __ Ldr(x11, MemOperand(x11));
4766 __ SmiTag(r1); 4842 __ SmiTag(x11);
4767 __ push(r1); 4843
4844 __ Push(x10, x11);
4768 4845
4769 ExternalReference pending_message_script = 4846 ExternalReference pending_message_script =
4770 ExternalReference::address_of_pending_message_script(isolate()); 4847 ExternalReference::address_of_pending_message_script(isolate());
4771 __ mov(ip, Operand(pending_message_script)); 4848 __ Mov(x10, Operand(pending_message_script));
4772 __ ldr(r1, MemOperand(ip)); 4849 __ Ldr(x10, MemOperand(x10));
4773 __ push(r1); 4850 __ Push(x10);
4774 } 4851 }
4775 4852
4776 4853
4777 void FullCodeGenerator::ExitFinallyBlock() { 4854 void FullCodeGenerator::ExitFinallyBlock() {
4778 ASSERT(!result_register().is(r1)); 4855 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
4856 ASSERT(!result_register().is(x10));
4857
4779 // Restore pending message from stack. 4858 // Restore pending message from stack.
4780 __ pop(r1); 4859 __ Pop(x10, x11, x12);
4781 ExternalReference pending_message_script = 4860 ExternalReference pending_message_script =
4782 ExternalReference::address_of_pending_message_script(isolate()); 4861 ExternalReference::address_of_pending_message_script(isolate());
4783 __ mov(ip, Operand(pending_message_script)); 4862 __ Mov(x13, Operand(pending_message_script));
4784 __ str(r1, MemOperand(ip)); 4863 __ Str(x10, MemOperand(x13));
4785 4864
4786 __ pop(r1); 4865 __ SmiUntag(x11);
4787 __ SmiUntag(r1);
4788 ExternalReference has_pending_message = 4866 ExternalReference has_pending_message =
4789 ExternalReference::address_of_has_pending_message(isolate()); 4867 ExternalReference::address_of_has_pending_message(isolate());
4790 __ mov(ip, Operand(has_pending_message)); 4868 __ Mov(x13, Operand(has_pending_message));
4791 __ str(r1, MemOperand(ip)); 4869 __ Str(x11, MemOperand(x13));
4792 4870
4793 __ pop(r1);
4794 ExternalReference pending_message_obj = 4871 ExternalReference pending_message_obj =
4795 ExternalReference::address_of_pending_message_obj(isolate()); 4872 ExternalReference::address_of_pending_message_obj(isolate());
4796 __ mov(ip, Operand(pending_message_obj)); 4873 __ Mov(x13, Operand(pending_message_obj));
4797 __ str(r1, MemOperand(ip)); 4874 __ Str(x12, MemOperand(x13));
4798 4875
4799 // Restore result register from stack. 4876 // Restore result register and cooked return address from the stack.
4800 __ pop(r1); 4877 __ Pop(x10, result_register());
4801 4878
4802 // Uncook return address and return. 4879 // Uncook the return address (see EnterFinallyBlock).
4803 __ pop(result_register()); 4880 __ SmiUntag(x10);
4804 __ SmiUntag(r1); 4881 __ Add(x11, x10, Operand(masm_->CodeObject()));
4805 __ add(pc, r1, Operand(masm_->CodeObject())); 4882 __ Br(x11);
4806 }
4807
4808
4809 #undef __
4810
4811 #define __ ACCESS_MASM(masm())
4812
4813 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4814 int* stack_depth,
4815 int* context_length) {
4816 // The macros used here must preserve the result register.
4817
4818 // Because the handler block contains the context of the finally
4819 // code, we can restore it directly from there for the finally code
4820 // rather than iteratively unwinding contexts via their previous
4821 // links.
4822 __ Drop(*stack_depth); // Down to the handler block.
4823 if (*context_length > 0) {
4824 // Restore the context to its dedicated register and the stack.
4825 __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
4826 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4827 }
4828 __ PopTryHandler();
4829 __ bl(finally_entry_);
4830
4831 *stack_depth = 0;
4832 *context_length = 0;
4833 return previous_;
4834 } 4883 }
4835 4884
4836 4885
4837 #undef __ 4886 #undef __
4838 4887
4839 4888
4840 static const int32_t kBranchBeforeInterrupt = 0x5a000004;
4841
4842
4843 void BackEdgeTable::PatchAt(Code* unoptimized_code, 4889 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4844 Address pc, 4890 Address pc,
4845 BackEdgeState target_state, 4891 BackEdgeState target_state,
4846 Code* replacement_code) { 4892 Code* replacement_code) {
4847 static const int kInstrSize = Assembler::kInstrSize; 4893 // Turn the jump into a nop.
4848 Address branch_address = pc - 3 * kInstrSize; 4894 Address branch_address = pc - 3 * kInstructionSize;
4849 CodePatcher patcher(branch_address, 1); 4895 PatchingAssembler patcher(branch_address, 1);
4850 4896
4851 switch (target_state) { 4897 switch (target_state) {
4852 case INTERRUPT: 4898 case INTERRUPT:
4853 // <decrement profiling counter> 4899 // <decrement profiling counter>
4854 // 2a 00 00 01 bpl ok 4900 // .. .. .. .. b.pl ok
4855 // e5 9f c? ?? ldr ip, [pc, <interrupt stub address>] 4901 // .. .. .. .. ldr x16, pc+<interrupt stub address>
4856 // e1 2f ff 3c blx ip 4902 // .. .. .. .. blr x16
4903 // ... more instructions.
4857 // ok-label 4904 // ok-label
4858 patcher.masm()->b(4 * kInstrSize, pl); // Jump offset is 4 instructions. 4905 // Jump offset is 6 instructions.
4859 ASSERT_EQ(kBranchBeforeInterrupt, Memory::int32_at(branch_address)); 4906 ASSERT(Instruction::Cast(branch_address)
4907 ->IsNop(Assembler::INTERRUPT_CODE_NOP));
4908 patcher.b(6, pl);
4860 break; 4909 break;
4861 case ON_STACK_REPLACEMENT: 4910 case ON_STACK_REPLACEMENT:
4862 case OSR_AFTER_STACK_CHECK: 4911 case OSR_AFTER_STACK_CHECK:
4863 // <decrement profiling counter> 4912 // <decrement profiling counter>
4864 // e1 a0 00 00 mov r0, r0 (NOP) 4913 // .. .. .. .. mov x0, x0 (NOP)
4865 // e5 9f c? ?? ldr ip, [pc, <on-stack replacement address>] 4914 // .. .. .. .. ldr x16, pc+<on-stack replacement address>
4866 // e1 2f ff 3c blx ip 4915 // .. .. .. .. blr x16
4867 // ok-label 4916 ASSERT(Instruction::Cast(branch_address)->IsCondBranchImm());
4868 patcher.masm()->nop(); 4917 ASSERT(Instruction::Cast(branch_address)->ImmPCOffset() ==
4918 6 * kInstructionSize);
4919 patcher.nop(Assembler::INTERRUPT_CODE_NOP);
4869 break; 4920 break;
4870 } 4921 }
4871 4922
4872 Address pc_immediate_load_address = pc - 2 * kInstrSize;
4873 // Replace the call address. 4923 // Replace the call address.
4874 uint32_t interrupt_address_offset = 4924 Instruction* load = Instruction::Cast(pc)->preceding(2);
4875 Memory::uint16_at(pc_immediate_load_address) & 0xfff; 4925 Address interrupt_address_pointer =
4876 Address interrupt_address_pointer = pc + interrupt_address_offset; 4926 reinterpret_cast<Address>(load) + load->ImmPCOffset();
4877 Memory::uint32_at(interrupt_address_pointer) = 4927 ASSERT((Memory::uint64_at(interrupt_address_pointer) ==
4878 reinterpret_cast<uint32_t>(replacement_code->entry()); 4928 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4929 ->builtins()
4930 ->OnStackReplacement()
4931 ->entry())) ||
4932 (Memory::uint64_at(interrupt_address_pointer) ==
4933 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4934 ->builtins()
4935 ->InterruptCheck()
4936 ->entry())) ||
4937 (Memory::uint64_at(interrupt_address_pointer) ==
4938 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4939 ->builtins()
4940 ->OsrAfterStackCheck()
4941 ->entry())) ||
4942 (Memory::uint64_at(interrupt_address_pointer) ==
4943 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
4944 ->builtins()
4945 ->OnStackReplacement()
4946 ->entry())));
4947 Memory::uint64_at(interrupt_address_pointer) =
4948 reinterpret_cast<uint64_t>(replacement_code->entry());
4879 4949
4880 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( 4950 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4881 unoptimized_code, pc_immediate_load_address, replacement_code); 4951 unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
4882 } 4952 }
4883 4953
4884 4954
4885 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( 4955 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4886 Isolate* isolate, 4956 Isolate* isolate,
4887 Code* unoptimized_code, 4957 Code* unoptimized_code,
4888 Address pc) { 4958 Address pc) {
4889 static const int kInstrSize = Assembler::kInstrSize; 4959 // TODO(jbramley): There should be some extra assertions here (as in the ARM
4890 ASSERT(Memory::int32_at(pc - kInstrSize) == kBlxIp); 4960 // back-end), but this function is gone in bleeding_edge so it might not
4961 // matter anyway.
4962 Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
4891 4963
4892 Address branch_address = pc - 3 * kInstrSize; 4964 if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
4893 Address pc_immediate_load_address = pc - 2 * kInstrSize; 4965 Instruction* load = Instruction::Cast(pc)->preceding(2);
4894 uint32_t interrupt_address_offset = 4966 uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
4895 Memory::uint16_at(pc_immediate_load_address) & 0xfff; 4967 load->ImmPCOffset());
4896 Address interrupt_address_pointer = pc + interrupt_address_offset; 4968 if (entry == reinterpret_cast<uint64_t>(
4897 4969 isolate->builtins()->OnStackReplacement()->entry())) {
4898 if (Memory::int32_at(branch_address) == kBranchBeforeInterrupt) { 4970 return ON_STACK_REPLACEMENT;
4899 ASSERT(Memory::uint32_at(interrupt_address_pointer) == 4971 } else if (entry == reinterpret_cast<uint64_t>(
4900 reinterpret_cast<uint32_t>( 4972 isolate->builtins()->OsrAfterStackCheck()->entry())) {
4901 isolate->builtins()->InterruptCheck()->entry())); 4973 return OSR_AFTER_STACK_CHECK;
4902 ASSERT(Assembler::IsLdrPcImmediateOffset( 4974 } else {
4903 Assembler::instr_at(pc_immediate_load_address))); 4975 UNREACHABLE();
4904 return INTERRUPT; 4976 }
4905 } 4977 }
4906 4978
4907 ASSERT(Assembler::IsNop(Assembler::instr_at(branch_address))); 4979 return INTERRUPT;
4908 ASSERT(Assembler::IsLdrPcImmediateOffset( 4980 }
4909 Assembler::instr_at(pc_immediate_load_address)));
4910 4981
4911 if (Memory::uint32_at(interrupt_address_pointer) == 4982
4912 reinterpret_cast<uint32_t>( 4983 #define __ ACCESS_MASM(masm())
4913 isolate->builtins()->OnStackReplacement()->entry())) { 4984
4914 return ON_STACK_REPLACEMENT; 4985
4986 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4987 int* stack_depth,
4988 int* context_length) {
4989 ASM_LOCATION("FullCodeGenerator::TryFinally::Exit");
4990 // The macros used here must preserve the result register.
4991
4992 // Because the handler block contains the context of the finally
4993 // code, we can restore it directly from there for the finally code
4994 // rather than iteratively unwinding contexts via their previous
4995 // links.
4996 __ Drop(*stack_depth); // Down to the handler block.
4997 if (*context_length > 0) {
4998 // Restore the context to its dedicated register and the stack.
4999 __ Peek(cp, StackHandlerConstants::kContextOffset);
5000 __ Str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4915 } 5001 }
5002 __ PopTryHandler();
5003 __ Bl(finally_entry_);
4916 5004
4917 ASSERT(Memory::uint32_at(interrupt_address_pointer) == 5005 *stack_depth = 0;
4918 reinterpret_cast<uint32_t>( 5006 *context_length = 0;
4919 isolate->builtins()->OsrAfterStackCheck()->entry())); 5007 return previous_;
4920 return OSR_AFTER_STACK_CHECK;
4921 } 5008 }
4922 5009
4923 5010
5011 #undef __
5012
5013
4924 } } // namespace v8::internal 5014 } } // namespace v8::internal
4925 5015
4926 #endif // V8_TARGET_ARCH_ARM 5016 #endif // V8_TARGET_ARCH_A64
OLDNEW
« no previous file with comments | « src/a64/frames-a64.cc ('k') | src/a64/ic-a64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698