Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(50)

Side by Side Diff: src/a64/full-codegen-a64.cc

Issue 144963003: A64: add missing files. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/a64/frames-a64.cc ('k') | src/a64/ic-a64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its 12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived 13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission. 14 // from this software without specific prior written permission.
15 // 15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #if defined(V8_TARGET_ARCH_ARM) 30 #if defined(V8_TARGET_ARCH_A64)
31 31
32 #include "code-stubs.h" 32 #include "code-stubs.h"
33 #include "codegen.h" 33 #include "codegen.h"
34 #include "compiler.h" 34 #include "compiler.h"
35 #include "debug.h" 35 #include "debug.h"
36 #include "full-codegen.h" 36 #include "full-codegen.h"
37 #include "isolate-inl.h" 37 #include "isolate-inl.h"
38 #include "parser.h" 38 #include "parser.h"
39 #include "scopes.h" 39 #include "scopes.h"
40 #include "stub-cache.h" 40 #include "stub-cache.h"
41 41
42 #include "arm/code-stubs-arm.h" 42 #include "a64/code-stubs-a64.h"
43 #include "arm/macro-assembler-arm.h" 43 #include "a64/macro-assembler-a64.h"
44 44
45 namespace v8 { 45 namespace v8 {
46 namespace internal { 46 namespace internal {
47 47
48 #define __ ACCESS_MASM(masm_) 48 #define __ ACCESS_MASM(masm_)
49 49
50
51 // A patch site is a location in the code which it is possible to patch. This
52 // class has a number of methods to emit the code which is patchable and the
53 // method EmitPatchInfo to record a marker back to the patchable code. This
54 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
55 // immediate value is used) is the delta from the pc to the first instruction of
56 // the patchable code.
57 class JumpPatchSite BASE_EMBEDDED { 50 class JumpPatchSite BASE_EMBEDDED {
58 public: 51 public:
59 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { 52 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
60 #ifdef DEBUG 53 #ifdef DEBUG
61 info_emitted_ = false; 54 info_emitted_ = false;
62 #endif 55 #endif
63 } 56 }
64 57
65 ~JumpPatchSite() { 58 ~JumpPatchSite() {
66 ASSERT(patch_site_.is_bound() == info_emitted_); 59 if (patch_site_.is_bound()) {
60 ASSERT(info_emitted_);
61 } else {
62 ASSERT(reg_.IsNone());
63 }
67 } 64 }
68 65
69 // When initially emitting this ensure that a jump is always generated to skip
70 // the inlined smi code.
71 void EmitJumpIfNotSmi(Register reg, Label* target) { 66 void EmitJumpIfNotSmi(Register reg, Label* target) {
72 ASSERT(!patch_site_.is_bound() && !info_emitted_); 67 // This code will be patched by PatchInlinedSmiCode, in ic-a64.cc.
73 Assembler::BlockConstPoolScope block_const_pool(masm_); 68 InstructionAccurateScope scope(masm_, 1);
69 ASSERT(!info_emitted_);
70 ASSERT(reg.Is64Bits());
71 ASSERT(!reg.Is(csp));
72 reg_ = reg;
74 __ bind(&patch_site_); 73 __ bind(&patch_site_);
75 __ cmp(reg, Operand(reg)); 74 __ tbz(xzr, 0, target); // Always taken before patched.
76 __ b(eq, target); // Always taken before patched.
77 } 75 }
78 76
79 // When initially emitting this ensure that a jump is never generated to skip
80 // the inlined smi code.
81 void EmitJumpIfSmi(Register reg, Label* target) { 77 void EmitJumpIfSmi(Register reg, Label* target) {
82 ASSERT(!patch_site_.is_bound() && !info_emitted_); 78 // This code will be patched by PatchInlinedSmiCode, in ic-a64.cc.
83 Assembler::BlockConstPoolScope block_const_pool(masm_); 79 InstructionAccurateScope scope(masm_, 1);
80 ASSERT(!info_emitted_);
81 ASSERT(reg.Is64Bits());
82 ASSERT(!reg.Is(csp));
83 reg_ = reg;
84 __ bind(&patch_site_); 84 __ bind(&patch_site_);
85 __ cmp(reg, Operand(reg)); 85 __ tbnz(xzr, 0, target); // Never taken before patched.
86 __ b(ne, target); // Never taken before patched. 86 }
87
88 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
89 // We need to use ip0, so don't allow access to the MacroAssembler.
90 InstructionAccurateScope scope(masm_);
91 __ orr(ip0, reg1, reg2);
92 EmitJumpIfNotSmi(ip0, target);
87 } 93 }
88 94
89 void EmitPatchInfo() { 95 void EmitPatchInfo() {
90 // Block literal pool emission whilst recording patch site information. 96 Assembler::BlockConstPoolScope scope(masm_);
91 Assembler::BlockConstPoolScope block_const_pool(masm_); 97 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
92 if (patch_site_.is_bound()) {
93 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
94 Register reg;
95 reg.set_code(delta_to_patch_site / kOff12Mask);
96 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
97 #ifdef DEBUG 98 #ifdef DEBUG
98 info_emitted_ = true; 99 info_emitted_ = true;
99 #endif 100 #endif
100 } else {
101 __ nop(); // Signals no inlined code.
102 }
103 } 101 }
104 102
105 private: 103 private:
106 MacroAssembler* masm_; 104 MacroAssembler* masm_;
107 Label patch_site_; 105 Label patch_site_;
106 Register reg_;
108 #ifdef DEBUG 107 #ifdef DEBUG
109 bool info_emitted_; 108 bool info_emitted_;
110 #endif 109 #endif
111 }; 110 };
112 111
113 112
114 // Generate code for a JS function. On entry to the function the receiver 113 // Generate code for a JS function. On entry to the function the receiver
115 // and arguments have been pushed on the stack left to right. The actual 114 // and arguments have been pushed on the stack left to right. The actual
116 // argument count matches the formal parameter count expected by the 115 // argument count matches the formal parameter count expected by the
117 // function. 116 // function.
118 // 117 //
119 // The live registers are: 118 // The live registers are:
120 // o r1: the JS function object being called (i.e., ourselves) 119 // - x1: the JS function object being called (i.e. ourselves).
121 // o cp: our context 120 // - x5: call kind or strict mode.
122 // o fp: our caller's frame pointer 121 // - cp: our context.
123 // o sp: stack pointer 122 // - fp: our caller's frame pointer.
124 // o lr: return address 123 // - jssp: stack pointer.
124 // - lr: return address.
125 // 125 //
126 // The function builds a JS frame. Please see JavaScriptFrameConstants in 126 // The function builds a JS frame. See JavaScriptFrameConstants in
127 // frames-arm.h for its layout. 127 // frames-arm.h for its layout.
128 void FullCodeGenerator::Generate() { 128 void FullCodeGenerator::Generate() {
129 CompilationInfo* info = info_; 129 CompilationInfo* info = info_;
130 handler_table_ = 130 handler_table_ =
131 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); 131 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
132 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell( 132 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
133 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); 133 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
134 SetFunctionPosition(function()); 134 SetFunctionPosition(function());
135 Comment cmnt(masm_, "[ function compiled by full code generator"); 135 Comment cmnt(masm_, "[ Function compiled by full code generator");
136 136
137 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 137 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
138 138
139 #ifdef DEBUG 139 #ifdef DEBUG
140 if (strlen(FLAG_stop_at) > 0 && 140 if (strlen(FLAG_stop_at) > 0 &&
141 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 141 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
142 __ stop("stop-at"); 142 __ Debug("stop-at", __LINE__, BREAK);
143 } 143 }
144 #endif 144 #endif
145 145
146 // Strict mode functions and builtins need to replace the receiver 146 // Strict mode functions and builtins need to replace the receiver
147 // with undefined when called as functions (without an explicit 147 // with undefined when called as functions (without an explicit
148 // receiver object). r5 is zero for method calls and non-zero for 148 // receiver object). x5 is zero for method calls and non-zero for
149 // function calls. 149 // function calls.
150 if (!info->is_classic_mode() || info->is_native()) { 150 if (!info->is_classic_mode() || info->is_native()) {
151 Label ok; 151 Label ok;
152 __ cmp(r5, Operand::Zero()); 152 __ Cbz(x5, &ok);
153 __ b(eq, &ok); 153 int receiver_offset = info->scope()->num_parameters() * kXRegSizeInBytes;
154 int receiver_offset = info->scope()->num_parameters() * kPointerSize; 154 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
155 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 155 __ Poke(x10, receiver_offset);
156 __ str(r2, MemOperand(sp, receiver_offset)); 156 __ Bind(&ok);
157 __ bind(&ok);
158 } 157 }
159 158
160 // Open a frame scope to indicate that there is a frame on the stack. The 159
161 // MANUAL indicates that the scope shouldn't actually generate code to set up 160 // Open a frame scope to indicate that there is a frame on the stack.
162 // the frame (that is done below). 161 // The MANUAL indicates that the scope shouldn't actually generate code
162 // to set up the frame because we do it manually below.
163 FrameScope frame_scope(masm_, StackFrame::MANUAL); 163 FrameScope frame_scope(masm_, StackFrame::MANUAL);
164 164
165 // This call emits the following sequence in a way that can be patched for
166 // code ageing support:
167 // Push(lr, fp, cp, x1);
168 // Add(fp, jssp, 2 * kPointerSize);
165 info->set_prologue_offset(masm_->pc_offset()); 169 info->set_prologue_offset(masm_->pc_offset());
166 { 170 __ EmitFrameSetupForCodeAgePatching();
167 PredictableCodeSizeScope predictible_code_size_scope(
168 masm_, kNoCodeAgeSequenceLength * Assembler::kInstrSize);
169 // The following three instructions must remain together and unmodified
170 // for code aging to work properly.
171 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
172 // Load undefined value here, so the value is ready for the loop
173 // below.
174 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
175 // Adjust FP to point to saved FP.
176 __ add(fp, sp, Operand(2 * kPointerSize));
177 }
178 info->AddNoFrameRange(0, masm_->pc_offset()); 171 info->AddNoFrameRange(0, masm_->pc_offset());
179 172
173 // Reserve space on the stack for locals.
180 { Comment cmnt(masm_, "[ Allocate locals"); 174 { Comment cmnt(masm_, "[ Allocate locals");
181 int locals_count = info->scope()->num_stack_slots(); 175 int locals_count = info->scope()->num_stack_slots();
182 // Generators allocate locals, if any, in context slots. 176 // Generators allocate locals, if any, in context slots.
183 ASSERT(!info->function()->is_generator() || locals_count == 0); 177 ASSERT(!info->function()->is_generator() || locals_count == 0);
184 for (int i = 0; i < locals_count; i++) { 178
185 __ push(ip); 179 if (locals_count > 0) {
180 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
181 __ PushMultipleTimes(locals_count, x10);
186 } 182 }
187 } 183 }
188 184
189 bool function_in_register = true; 185 bool function_in_register_x1 = true;
190 186
191 // Possibly allocate a local context.
192 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 187 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
193 if (heap_slots > 0) { 188 if (heap_slots > 0) {
194 // Argument to NewContext is the function, which is still in r1. 189 // Argument to NewContext is the function, which is still in x1.
195 Comment cmnt(masm_, "[ Allocate context"); 190 Comment cmnt(masm_, "[ Allocate context");
196 __ push(r1); 191 __ Push(x1);
197 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) { 192 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
198 __ Push(info->scope()->GetScopeInfo()); 193 __ Push(info->scope()->GetScopeInfo());
199 __ CallRuntime(Runtime::kNewGlobalContext, 2); 194 __ CallRuntime(Runtime::kNewGlobalContext, 2);
200 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { 195 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
201 FastNewContextStub stub(heap_slots); 196 FastNewContextStub stub(heap_slots);
202 __ CallStub(&stub); 197 __ CallStub(&stub);
203 } else { 198 } else {
204 __ CallRuntime(Runtime::kNewFunctionContext, 1); 199 __ CallRuntime(Runtime::kNewFunctionContext, 1);
205 } 200 }
206 function_in_register = false; 201 function_in_register_x1 = false;
207 // Context is returned in both r0 and cp. It replaces the context 202 // Context is returned in both x0 and cp. It replaces the context
208 // passed to us. It's saved in the stack and kept live in cp. 203 // passed to us. It's saved in the stack and kept live in cp.
209 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 204 __ Str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
210 // Copy any necessary parameters into the context. 205 // Copy any necessary parameters into the context.
211 int num_parameters = info->scope()->num_parameters(); 206 int num_parameters = info->scope()->num_parameters();
212 for (int i = 0; i < num_parameters; i++) { 207 for (int i = 0; i < num_parameters; i++) {
213 Variable* var = scope()->parameter(i); 208 Variable* var = scope()->parameter(i);
214 if (var->IsContextSlot()) { 209 if (var->IsContextSlot()) {
215 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 210 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
216 (num_parameters - 1 - i) * kPointerSize; 211 (num_parameters - 1 - i) * kPointerSize;
217 // Load parameter from stack. 212 // Load parameter from stack.
218 __ ldr(r0, MemOperand(fp, parameter_offset)); 213 __ Ldr(x10, MemOperand(fp, parameter_offset));
219 // Store it in the context. 214 // Store it in the context.
220 MemOperand target = ContextOperand(cp, var->index()); 215 MemOperand target = ContextMemOperand(cp, var->index());
221 __ str(r0, target); 216 __ Str(x10, target);
222 217
223 // Update the write barrier. 218 // Update the write barrier.
224 __ RecordWriteContextSlot( 219 __ RecordWriteContextSlot(
225 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs); 220 cp, target.offset(), x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
226 } 221 }
227 } 222 }
228 } 223 }
229 224
230 Variable* arguments = scope()->arguments(); 225 Variable* arguments = scope()->arguments();
231 if (arguments != NULL) { 226 if (arguments != NULL) {
232 // Function uses arguments object. 227 // Function uses arguments object.
233 Comment cmnt(masm_, "[ Allocate arguments object"); 228 Comment cmnt(masm_, "[ Allocate arguments object");
234 if (!function_in_register) { 229 if (!function_in_register_x1) {
235 // Load this again, if it's used by the local context below. 230 // Load this again, if it's used by the local context below.
236 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 231 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
237 } else { 232 } else {
238 __ mov(r3, r1); 233 __ Mov(x3, x1);
239 } 234 }
240 // Receiver is just before the parameters on the caller's stack. 235 // Receiver is just before the parameters on the caller's stack.
241 int num_parameters = info->scope()->num_parameters(); 236 int num_parameters = info->scope()->num_parameters();
242 int offset = num_parameters * kPointerSize; 237 int offset = num_parameters * kPointerSize;
243 __ add(r2, fp, 238 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset);
244 Operand(StandardFrameConstants::kCallerSPOffset + offset)); 239 __ Mov(x1, Operand(Smi::FromInt(num_parameters)));
245 __ mov(r1, Operand(Smi::FromInt(num_parameters))); 240 __ Push(x3, x2, x1);
246 __ Push(r3, r2, r1);
247 241
248 // Arguments to ArgumentsAccessStub: 242 // Arguments to ArgumentsAccessStub:
249 // function, receiver address, parameter count. 243 // function, receiver address, parameter count.
250 // The stub will rewrite receiever and parameter count if the previous 244 // The stub will rewrite receiver and parameter count if the previous
251 // stack frame was an arguments adapter frame. 245 // stack frame was an arguments adapter frame.
252 ArgumentsAccessStub::Type type; 246 ArgumentsAccessStub::Type type;
253 if (!is_classic_mode()) { 247 if (!is_classic_mode()) {
254 type = ArgumentsAccessStub::NEW_STRICT; 248 type = ArgumentsAccessStub::NEW_STRICT;
255 } else if (function()->has_duplicate_parameters()) { 249 } else if (function()->has_duplicate_parameters()) {
256 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW; 250 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
257 } else { 251 } else {
258 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST; 252 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
259 } 253 }
260 ArgumentsAccessStub stub(type); 254 ArgumentsAccessStub stub(type);
261 __ CallStub(&stub); 255 __ CallStub(&stub);
262 256
263 SetVar(arguments, r0, r1, r2); 257 SetVar(arguments, x0, x1, x2);
264 } 258 }
265 259
266 if (FLAG_trace) { 260 if (FLAG_trace) {
267 __ CallRuntime(Runtime::kTraceEnter, 0); 261 __ CallRuntime(Runtime::kTraceEnter, 0);
268 } 262 }
269 263
264
270 // Visit the declarations and body unless there is an illegal 265 // Visit the declarations and body unless there is an illegal
271 // redeclaration. 266 // redeclaration.
272 if (scope()->HasIllegalRedeclaration()) { 267 if (scope()->HasIllegalRedeclaration()) {
273 Comment cmnt(masm_, "[ Declarations"); 268 Comment cmnt(masm_, "[ Declarations");
274 scope()->VisitIllegalRedeclaration(this); 269 scope()->VisitIllegalRedeclaration(this);
275 270
276 } else { 271 } else {
277 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); 272 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
278 { Comment cmnt(masm_, "[ Declarations"); 273 { Comment cmnt(masm_, "[ Declarations");
279 // For named function expressions, declare the function name as a
280 // constant.
281 if (scope()->is_function_scope() && scope()->function() != NULL) { 274 if (scope()->is_function_scope() && scope()->function() != NULL) {
282 VariableDeclaration* function = scope()->function(); 275 VariableDeclaration* function = scope()->function();
283 ASSERT(function->proxy()->var()->mode() == CONST || 276 ASSERT(function->proxy()->var()->mode() == CONST ||
284 function->proxy()->var()->mode() == CONST_HARMONY); 277 function->proxy()->var()->mode() == CONST_HARMONY);
285 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); 278 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
286 VisitVariableDeclaration(function); 279 VisitVariableDeclaration(function);
287 } 280 }
288 VisitDeclarations(scope()->declarations()); 281 VisitDeclarations(scope()->declarations());
289 } 282 }
283 }
290 284
291 { Comment cmnt(masm_, "[ Stack check"); 285 // TODO(jbramley): Why not call EmitBackEdgeBookkeeping()?
292 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); 286 { Comment cmnt(masm_, "[ Stack check");
293 Label ok; 287 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
294 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 288 Label ok;
295 __ cmp(sp, Operand(ip)); 289 ASSERT(jssp.Is(__ StackPointer()));
296 __ b(hs, &ok); 290 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
297 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); 291 __ B(hs, &ok);
298 StackCheckStub stub; 292 PredictableCodeSizeScope predictable(masm_,
299 __ CallStub(&stub); 293 Assembler::kCallSizeWithRelocation);
300 __ bind(&ok); 294 StackCheckStub stub;
301 } 295 __ CallStub(&stub);
296 __ Bind(&ok);
297 }
302 298
303 { Comment cmnt(masm_, "[ Body"); 299 { Comment cmnt(masm_, "[ Body");
304 ASSERT(loop_depth() == 0); 300 ASSERT(loop_depth() == 0);
305 VisitStatements(function()->body()); 301 VisitStatements(function()->body());
306 ASSERT(loop_depth() == 0); 302 ASSERT(loop_depth() == 0);
307 }
308 } 303 }
309 304
310 // Always emit a 'return undefined' in case control fell off the end of 305 // Always emit a 'return undefined' in case control fell off the end of
311 // the body. 306 // the body.
312 { Comment cmnt(masm_, "[ return <undefined>;"); 307 { Comment cmnt(masm_, "[ return <undefined>;");
313 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 308 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
314 } 309 }
315 EmitReturnSequence(); 310 EmitReturnSequence();
316 311
317 // Force emit the constant pool, so it doesn't get emitted in the middle 312 // Force emit the constant pool, so it doesn't get emitted in the middle
318 // of the back edge table. 313 // of the back edge table.
319 masm()->CheckConstPool(true, false); 314 masm()->CheckConstPool(true, false);
320 } 315 }
321 316
322 317
323 void FullCodeGenerator::ClearAccumulator() { 318 void FullCodeGenerator::ClearAccumulator() {
324 __ mov(r0, Operand(Smi::FromInt(0))); 319 __ Mov(x0, Operand(Smi::FromInt(0)));
325 } 320 }
326 321
327 322
323 // TODO(mcapewel): untested, ported as part of merge.
328 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { 324 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
329 __ mov(r2, Operand(profiling_counter_)); 325 __ Mov(x2, Operand(profiling_counter_));
330 __ ldr(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset)); 326 __ Ldr(x3, FieldMemOperand(x2, JSGlobalPropertyCell::kValueOffset));
331 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC); 327 __ Subs(x3, x3, Operand(Smi::FromInt(delta)));
332 __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset)); 328 __ Str(x3, FieldMemOperand(x2, JSGlobalPropertyCell::kValueOffset));
333 } 329 }
334 330
335 331
332 // TODO(mcapewel): untested, ported as part of merge.
336 void FullCodeGenerator::EmitProfilingCounterReset() { 333 void FullCodeGenerator::EmitProfilingCounterReset() {
337 int reset_value = FLAG_interrupt_budget; 334 int reset_value = FLAG_interrupt_budget;
338 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) { 335 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
339 // Self-optimization is a one-off thing: if it fails, don't try again. 336 // Self-optimization is a one-off thing. if it fails, don't try again.
340 reset_value = Smi::kMaxValue; 337 reset_value = Smi::kMaxValue;
341 } 338 }
342 if (isolate()->IsDebuggerActive()) { 339 if (isolate()->IsDebuggerActive()) {
343 // Detect debug break requests as soon as possible. 340 // Detect debug break requests as soon as possible.
344 reset_value = FLAG_interrupt_budget >> 4; 341 reset_value = FLAG_interrupt_budget >> 4;
345 } 342 }
346 __ mov(r2, Operand(profiling_counter_)); 343 __ Mov(x2, Operand(profiling_counter_));
347 __ mov(r3, Operand(Smi::FromInt(reset_value))); 344 __ Mov(x3, Operand(Smi::FromInt(reset_value)));
348 __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset)); 345 __ Str(x3, FieldMemOperand(x2, JSGlobalPropertyCell::kValueOffset));
349 } 346 }
350 347
351 348
352 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, 349 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
353 Label* back_edge_target) { 350 Label* back_edge_target) {
351 ASSERT(jssp.Is(__ StackPointer()));
354 Comment cmnt(masm_, "[ Back edge bookkeeping"); 352 Comment cmnt(masm_, "[ Back edge bookkeeping");
355 // Block literal pools whilst emitting back edge code. 353 // Block literal pools whilst emitting back edge code.
356 Assembler::BlockConstPoolScope block_const_pool(masm_); 354 Assembler::BlockConstPoolScope block_const_pool(masm_);
357 Label ok; 355 Label ok;
358 356
359 int weight = 1; 357 int weight = 1;
360 if (FLAG_weighted_back_edges) { 358 if (FLAG_weighted_back_edges) {
361 ASSERT(back_edge_target->is_bound()); 359 ASSERT(back_edge_target->is_bound());
362 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); 360 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
363 weight = Min(kMaxBackEdgeWeight, 361 weight = Min(kMaxBackEdgeWeight,
364 Max(1, distance / kBackEdgeDistanceUnit)); 362 Max(1, distance / kBackEdgeDistanceUnit));
365 } 363 }
366 EmitProfilingCounterDecrement(weight); 364 EmitProfilingCounterDecrement(weight);
367 __ b(pl, &ok); 365 __ B(pl, &ok);
368 InterruptStub stub; 366 InterruptStub stub;
369 __ CallStub(&stub); 367 __ CallStub(&stub);
370 368
371 // Record a mapping of this PC offset to the OSR id. This is used to find 369 // TODO(all): Implement OSR/Crankshaft code.
372 // the AST id from the unoptimized code in order to use it as a key into
373 // the deoptimization input data found in the optimized code.
374 RecordBackEdge(stmt->OsrEntryId());
375 370
376 EmitProfilingCounterReset(); 371 EmitProfilingCounterReset();
377 372
378 __ bind(&ok); 373 __ Bind(&ok);
379 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 374 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
380 // Record a mapping of the OSR id to this PC. This is used if the OSR 375
381 // entry becomes the target of a bailout. We don't expect it to be, but 376 // TODO(all): Implement OSR/Crankshaft code.
382 // we want it to work if it is. 377 ASM_UNIMPLEMENTED(
383 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); 378 "FullCodeGenerator::EmitBackEdgeBookkeeping "
379 "Implement OSR/Crankshaft code.");
384 } 380 }
385 381
386 382
387 void FullCodeGenerator::EmitReturnSequence() { 383 void FullCodeGenerator::EmitReturnSequence() {
388 Comment cmnt(masm_, "[ Return sequence"); 384 Comment cmnt(masm_, "[ Return sequence");
385
389 if (return_label_.is_bound()) { 386 if (return_label_.is_bound()) {
390 __ b(&return_label_); 387 __ B(&return_label_);
388
391 } else { 389 } else {
392 __ bind(&return_label_); 390 __ Bind(&return_label_);
393 if (FLAG_trace) { 391 if (FLAG_trace) {
394 // Push the return value on the stack as the parameter. 392 // Push the return value on the stack as the parameter.
395 // Runtime::TraceExit returns its parameter in r0. 393 // Runtime::TraceExit returns its parameter in x0.
396 __ push(r0); 394 __ Push(result_register());
397 __ CallRuntime(Runtime::kTraceExit, 1); 395 __ CallRuntime(Runtime::kTraceExit, 1);
396 ASSERT(x0.Is(result_register()));
398 } 397 }
399 if (FLAG_interrupt_at_exit || FLAG_self_optimization) { 398 if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
400 // Pretend that the exit is a backwards jump to the entry. 399 // Pretend that the exit is a backwards jump to the entry.
401 int weight = 1; 400 int weight = 1;
402 if (info_->ShouldSelfOptimize()) { 401 if (info_->ShouldSelfOptimize()) {
403 weight = FLAG_interrupt_budget / FLAG_self_opt_count; 402 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
404 } else if (FLAG_weighted_back_edges) { 403 } else if (FLAG_weighted_back_edges) {
405 int distance = masm_->pc_offset(); 404 int distance = masm_->pc_offset();
406 weight = Min(kMaxBackEdgeWeight, 405 weight = Min(kMaxBackEdgeWeight,
407 Max(1, distance / kBackEdgeDistanceUnit)); 406 Max(1, distance / kBackEdgeDistanceUnit));
408 } 407 }
409 EmitProfilingCounterDecrement(weight); 408 EmitProfilingCounterDecrement(weight);
410 Label ok; 409 Label ok;
411 __ b(pl, &ok); 410 __ B(pl, &ok);
412 __ push(r0); 411 __ Push(x0);
413 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) { 412 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
414 __ ldr(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 413 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
415 __ push(r2); 414 __ Push(x10);
416 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1); 415 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
417 } else { 416 } else {
418 InterruptStub stub; 417 InterruptStub stub;
419 __ CallStub(&stub); 418 __ CallStub(&stub);
420 } 419 }
421 __ pop(r0); 420 __ Pop(x0);
422 EmitProfilingCounterReset(); 421 EmitProfilingCounterReset();
423 __ bind(&ok); 422 __ Bind(&ok);
424 } 423 }
425 424
426 #ifdef DEBUG
427 // Add a label for checking the size of the code used for returning.
428 Label check_exit_codesize;
429 masm_->bind(&check_exit_codesize);
430 #endif
431 // Make sure that the constant pool is not emitted inside of the return 425 // Make sure that the constant pool is not emitted inside of the return
432 // sequence. 426 // sequence. This sequence can get patched when the debugger is used. See
433 { Assembler::BlockConstPoolScope block_const_pool(masm_); 427 // debug-a64.cc:BreakLocationIterator::SetDebugBreakAtReturn().
434 // Here we use masm_-> instead of the __ macro to avoid the code coverage 428 {
435 // tool from instrumenting as we rely on the code size here. 429 InstructionAccurateScope scope(masm_,
436 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize; 430 Assembler::kJSRetSequenceInstructions);
437 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); 431 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
438 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
439 PredictableCodeSizeScope predictable(masm_, -1);
440 __ RecordJSReturn(); 432 __ RecordJSReturn();
441 masm_->mov(sp, fp); 433 // This code is generated using Assembler methods rather than Macro
434 // Assembler methods because it will be patched later on, and so the size
435 // of the generated code must be consistent.
436 const Register& current_sp = __ StackPointer();
437 // Nothing ensures 16 bytes alignment here.
438 ASSERT(!current_sp.Is(csp));
439 __ mov(current_sp, fp);
442 int no_frame_start = masm_->pc_offset(); 440 int no_frame_start = masm_->pc_offset();
443 masm_->ldm(ia_w, sp, fp.bit() | lr.bit()); 441 __ ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSizeInBytes, PostIndex));
444 masm_->add(sp, sp, Operand(sp_delta)); 442 // Drop the arguments and receiver and return.
445 masm_->Jump(lr); 443 // TODO(all): This implementation is overkill as it supports 2**31+1
444 // arguments, consider how to improve it without creating a security
445 // hole.
446 __ LoadLiteral(ip0, 3 * kInstructionSize);
447 __ add(current_sp, current_sp, ip0);
448 __ ret();
449 __ dc64(kXRegSizeInBytes * (info_->scope()->num_parameters() + 1));
446 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); 450 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
447 } 451 }
448
449 #ifdef DEBUG
450 // Check that the size of the code used for returning is large enough
451 // for the debugger's requirements.
452 ASSERT(Assembler::kJSReturnSequenceInstructions <=
453 masm_->InstructionsGeneratedSince(&check_exit_codesize));
454 #endif
455 } 452 }
456 } 453 }
457 454
458 455
459 void FullCodeGenerator::EffectContext::Plug(Variable* var) const { 456 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
460 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 457 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
461 } 458 }
462 459
463 460
464 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const { 461 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
465 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 462 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
466 codegen()->GetVar(result_register(), var); 463 codegen()->GetVar(result_register(), var);
467 } 464 }
468 465
469 466
470 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { 467 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
471 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 468 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
472 codegen()->GetVar(result_register(), var); 469 codegen()->GetVar(result_register(), var);
473 __ push(result_register()); 470 __ Push(result_register());
474 } 471 }
475 472
476 473
477 void FullCodeGenerator::TestContext::Plug(Variable* var) const { 474 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
478 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 475 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
479 // For simplicity we always test the accumulator register. 476 // For simplicity we always test the accumulator register.
480 codegen()->GetVar(result_register(), var); 477 codegen()->GetVar(result_register(), var);
481 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 478 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
482 codegen()->DoTest(this); 479 codegen()->DoTest(this);
483 } 480 }
484 481
485 482
486 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 483 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
484 // Root values have no side effects.
487 } 485 }
488 486
489 487
490 void FullCodeGenerator::AccumulatorValueContext::Plug( 488 void FullCodeGenerator::AccumulatorValueContext::Plug(
491 Heap::RootListIndex index) const { 489 Heap::RootListIndex index) const {
492 __ LoadRoot(result_register(), index); 490 __ LoadRoot(result_register(), index);
493 } 491 }
494 492
495 493
496 void FullCodeGenerator::StackValueContext::Plug( 494 void FullCodeGenerator::StackValueContext::Plug(
497 Heap::RootListIndex index) const { 495 Heap::RootListIndex index) const {
498 __ LoadRoot(result_register(), index); 496 __ LoadRoot(result_register(), index);
499 __ push(result_register()); 497 __ Push(result_register());
500 } 498 }
501 499
502 500
503 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { 501 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
504 codegen()->PrepareForBailoutBeforeSplit(condition(), 502 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
505 true,
506 true_label_,
507 false_label_); 503 false_label_);
508 if (index == Heap::kUndefinedValueRootIndex || 504 if (index == Heap::kUndefinedValueRootIndex ||
509 index == Heap::kNullValueRootIndex || 505 index == Heap::kNullValueRootIndex ||
510 index == Heap::kFalseValueRootIndex) { 506 index == Heap::kFalseValueRootIndex) {
511 if (false_label_ != fall_through_) __ b(false_label_); 507 if (false_label_ != fall_through_) __ B(false_label_);
512 } else if (index == Heap::kTrueValueRootIndex) { 508 } else if (index == Heap::kTrueValueRootIndex) {
513 if (true_label_ != fall_through_) __ b(true_label_); 509 if (true_label_ != fall_through_) __ B(true_label_);
514 } else { 510 } else {
515 __ LoadRoot(result_register(), index); 511 __ LoadRoot(result_register(), index);
516 codegen()->DoTest(this); 512 codegen()->DoTest(this);
517 } 513 }
518 } 514 }
519 515
520 516
521 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const { 517 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
522 } 518 }
523 519
524 520
525 void FullCodeGenerator::AccumulatorValueContext::Plug( 521 void FullCodeGenerator::AccumulatorValueContext::Plug(
526 Handle<Object> lit) const { 522 Handle<Object> lit) const {
527 __ mov(result_register(), Operand(lit)); 523 __ Mov(result_register(), Operand(lit));
528 } 524 }
529 525
530 526
531 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { 527 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
532 // Immediates cannot be pushed directly. 528 // Immediates cannot be pushed directly.
533 __ mov(result_register(), Operand(lit)); 529 __ Mov(result_register(), Operand(lit));
534 __ push(result_register()); 530 __ Push(result_register());
535 } 531 }
536 532
537 533
538 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { 534 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
539 codegen()->PrepareForBailoutBeforeSplit(condition(), 535 codegen()->PrepareForBailoutBeforeSplit(condition(),
540 true, 536 true,
541 true_label_, 537 true_label_,
542 false_label_); 538 false_label_);
543 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals. 539 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
544 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { 540 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
545 if (false_label_ != fall_through_) __ b(false_label_); 541 if (false_label_ != fall_through_) __ B(false_label_);
546 } else if (lit->IsTrue() || lit->IsJSObject()) { 542 } else if (lit->IsTrue() || lit->IsJSObject()) {
547 if (true_label_ != fall_through_) __ b(true_label_); 543 if (true_label_ != fall_through_) __ B(true_label_);
548 } else if (lit->IsString()) { 544 } else if (lit->IsString()) {
549 if (String::cast(*lit)->length() == 0) { 545 if (String::cast(*lit)->length() == 0) {
550 if (false_label_ != fall_through_) __ b(false_label_); 546 if (false_label_ != fall_through_) __ B(false_label_);
551 } else { 547 } else {
552 if (true_label_ != fall_through_) __ b(true_label_); 548 if (true_label_ != fall_through_) __ B(true_label_);
553 } 549 }
554 } else if (lit->IsSmi()) { 550 } else if (lit->IsSmi()) {
555 if (Smi::cast(*lit)->value() == 0) { 551 if (Smi::cast(*lit)->value() == 0) {
556 if (false_label_ != fall_through_) __ b(false_label_); 552 if (false_label_ != fall_through_) __ B(false_label_);
557 } else { 553 } else {
558 if (true_label_ != fall_through_) __ b(true_label_); 554 if (true_label_ != fall_through_) __ B(true_label_);
559 } 555 }
560 } else { 556 } else {
561 // For simplicity we always test the accumulator register. 557 // For simplicity we always test the accumulator register.
562 __ mov(result_register(), Operand(lit)); 558 __ Mov(result_register(), Operand(lit));
563 codegen()->DoTest(this); 559 codegen()->DoTest(this);
564 } 560 }
565 } 561 }
566 562
567 563
568 void FullCodeGenerator::EffectContext::DropAndPlug(int count, 564 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
569 Register reg) const { 565 Register reg) const {
570 ASSERT(count > 0); 566 ASSERT(count > 0);
571 __ Drop(count); 567 __ Drop(count);
572 } 568 }
573 569
574 570
575 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( 571 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
576 int count, 572 int count,
577 Register reg) const { 573 Register reg) const {
578 ASSERT(count > 0); 574 ASSERT(count > 0);
579 __ Drop(count); 575 __ Drop(count);
580 __ Move(result_register(), reg); 576 __ Move(result_register(), reg);
581 } 577 }
582 578
583 579
584 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, 580 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
585 Register reg) const { 581 Register reg) const {
586 ASSERT(count > 0); 582 ASSERT(count > 0);
587 if (count > 1) __ Drop(count - 1); 583 if (count > 1) __ Drop(count - 1);
588 __ str(reg, MemOperand(sp, 0)); 584 __ Poke(reg, 0);
589 } 585 }
590 586
591 587
592 void FullCodeGenerator::TestContext::DropAndPlug(int count, 588 void FullCodeGenerator::TestContext::DropAndPlug(int count,
593 Register reg) const { 589 Register reg) const {
594 ASSERT(count > 0); 590 ASSERT(count > 0);
595 // For simplicity we always test the accumulator register. 591 // For simplicity we always test the accumulator register.
596 __ Drop(count); 592 __ Drop(count);
597 __ Move(result_register(), reg); 593 __ Mov(result_register(), reg);
598 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 594 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
599 codegen()->DoTest(this); 595 codegen()->DoTest(this);
600 } 596 }
601 597
602 598
603 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 599 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
604 Label* materialize_false) const { 600 Label* materialize_false) const {
605 ASSERT(materialize_true == materialize_false); 601 ASSERT(materialize_true == materialize_false);
606 __ bind(materialize_true); 602 __ Bind(materialize_true);
607 } 603 }
608 604
609 605
610 void FullCodeGenerator::AccumulatorValueContext::Plug( 606 void FullCodeGenerator::AccumulatorValueContext::Plug(
611 Label* materialize_true, 607 Label* materialize_true,
612 Label* materialize_false) const { 608 Label* materialize_false) const {
613 Label done; 609 Label done;
614 __ bind(materialize_true); 610 __ Bind(materialize_true);
615 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); 611 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
616 __ jmp(&done); 612 __ B(&done);
617 __ bind(materialize_false); 613 __ Bind(materialize_false);
618 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); 614 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
619 __ bind(&done); 615 __ Bind(&done);
620 } 616 }
621 617
622 618
623 void FullCodeGenerator::StackValueContext::Plug( 619 void FullCodeGenerator::StackValueContext::Plug(
624 Label* materialize_true, 620 Label* materialize_true,
625 Label* materialize_false) const { 621 Label* materialize_false) const {
626 Label done; 622 Label done;
627 __ bind(materialize_true); 623 __ Bind(materialize_true);
628 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 624 __ LoadRoot(x10, Heap::kTrueValueRootIndex);
629 __ push(ip); 625 __ B(&done);
630 __ jmp(&done); 626 __ Bind(materialize_false);
631 __ bind(materialize_false); 627 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
632 __ LoadRoot(ip, Heap::kFalseValueRootIndex); 628 __ Bind(&done);
633 __ push(ip); 629 __ Push(x10);
634 __ bind(&done);
635 } 630 }
636 631
637 632
638 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 633 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
639 Label* materialize_false) const { 634 Label* materialize_false) const {
640 ASSERT(materialize_true == true_label_); 635 ASSERT(materialize_true == true_label_);
641 ASSERT(materialize_false == false_label_); 636 ASSERT(materialize_false == false_label_);
642 } 637 }
643 638
644 639
645 void FullCodeGenerator::EffectContext::Plug(bool flag) const { 640 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
646 } 641 }
647 642
648 643
649 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { 644 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
650 Heap::RootListIndex value_root_index = 645 Heap::RootListIndex value_root_index =
651 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 646 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
652 __ LoadRoot(result_register(), value_root_index); 647 __ LoadRoot(result_register(), value_root_index);
653 } 648 }
654 649
655 650
656 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { 651 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
657 Heap::RootListIndex value_root_index = 652 Heap::RootListIndex value_root_index =
658 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 653 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
659 __ LoadRoot(ip, value_root_index); 654 __ LoadRoot(x10, value_root_index);
660 __ push(ip); 655 __ Push(x10);
661 } 656 }
662 657
663 658
664 void FullCodeGenerator::TestContext::Plug(bool flag) const { 659 void FullCodeGenerator::TestContext::Plug(bool flag) const {
665 codegen()->PrepareForBailoutBeforeSplit(condition(), 660 codegen()->PrepareForBailoutBeforeSplit(condition(),
666 true, 661 true,
667 true_label_, 662 true_label_,
668 false_label_); 663 false_label_);
669 if (flag) { 664 if (flag) {
670 if (true_label_ != fall_through_) __ b(true_label_); 665 if (true_label_ != fall_through_) {
666 __ B(true_label_);
667 }
671 } else { 668 } else {
672 if (false_label_ != fall_through_) __ b(false_label_); 669 if (false_label_ != fall_through_) {
670 __ B(false_label_);
671 }
673 } 672 }
674 } 673 }
675 674
676 675
677 void FullCodeGenerator::DoTest(Expression* condition, 676 void FullCodeGenerator::DoTest(Expression* condition,
678 Label* if_true, 677 Label* if_true,
679 Label* if_false, 678 Label* if_false,
680 Label* fall_through) { 679 Label* fall_through) {
681 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate()); 680 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
682 CallIC(ic, RelocInfo::CODE_TARGET, condition->test_id()); 681 CallIC(ic, RelocInfo::CODE_TARGET, condition->test_id());
683 __ tst(result_register(), result_register()); 682 __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through);
684 Split(ne, if_true, if_false, fall_through);
685 } 683 }
686 684
687 685
686 // If (cond), branch to if_true.
687 // If (!cond), branch to if_false.
688 // fall_through is used as an optimization in cases where only one branch
689 // instruction is necessary.
688 void FullCodeGenerator::Split(Condition cond, 690 void FullCodeGenerator::Split(Condition cond,
689 Label* if_true, 691 Label* if_true,
690 Label* if_false, 692 Label* if_false,
691 Label* fall_through) { 693 Label* fall_through) {
692 if (if_false == fall_through) { 694 if (if_false == fall_through) {
693 __ b(cond, if_true); 695 __ B(cond, if_true);
694 } else if (if_true == fall_through) { 696 } else if (if_true == fall_through) {
695 __ b(NegateCondition(cond), if_false); 697 ASSERT(if_false != fall_through);
698 __ B(InvertCondition(cond), if_false);
696 } else { 699 } else {
697 __ b(cond, if_true); 700 __ B(cond, if_true);
698 __ b(if_false); 701 __ B(if_false);
699 } 702 }
700 } 703 }
701 704
702 705
703 MemOperand FullCodeGenerator::StackOperand(Variable* var) { 706 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
704 ASSERT(var->IsStackAllocated());
705 // Offset is negative because higher indexes are at lower addresses. 707 // Offset is negative because higher indexes are at lower addresses.
706 int offset = -var->index() * kPointerSize; 708 int offset = -var->index() * kXRegSizeInBytes;
707 // Adjust by a (parameter or local) base offset. 709 // Adjust by a (parameter or local) base offset.
708 if (var->IsParameter()) { 710 if (var->IsParameter()) {
709 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; 711 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
710 } else { 712 } else {
711 offset += JavaScriptFrameConstants::kLocal0Offset; 713 offset += JavaScriptFrameConstants::kLocal0Offset;
712 } 714 }
713 return MemOperand(fp, offset); 715 return MemOperand(fp, offset);
714 } 716 }
715 717
716 718
717 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { 719 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
718 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); 720 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
719 if (var->IsContextSlot()) { 721 if (var->IsContextSlot()) {
720 int context_chain_length = scope()->ContextChainLength(var->scope()); 722 int context_chain_length = scope()->ContextChainLength(var->scope());
721 __ LoadContext(scratch, context_chain_length); 723 __ LoadContext(scratch, context_chain_length);
722 return ContextOperand(scratch, var->index()); 724 return ContextMemOperand(scratch, var->index());
723 } else { 725 } else {
724 return StackOperand(var); 726 return StackOperand(var);
725 } 727 }
726 } 728 }
727 729
728 730
729 void FullCodeGenerator::GetVar(Register dest, Variable* var) { 731 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
730 // Use destination as scratch. 732 // Use destination as scratch.
731 MemOperand location = VarOperand(var, dest); 733 MemOperand location = VarOperand(var, dest);
732 __ ldr(dest, location); 734 __ Ldr(dest, location);
733 } 735 }
734 736
735 737
736 void FullCodeGenerator::SetVar(Variable* var, 738 void FullCodeGenerator::SetVar(Variable* var,
737 Register src, 739 Register src,
738 Register scratch0, 740 Register scratch0,
739 Register scratch1) { 741 Register scratch1) {
740 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); 742 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
741 ASSERT(!scratch0.is(src)); 743 ASSERT(!AreAliased(src, scratch0, scratch1));
742 ASSERT(!scratch0.is(scratch1));
743 ASSERT(!scratch1.is(src));
744 MemOperand location = VarOperand(var, scratch0); 744 MemOperand location = VarOperand(var, scratch0);
745 __ str(src, location); 745 __ Str(src, location);
746 746
747 // Emit the write barrier code if the location is in the heap. 747 // Emit the write barrier code if the location is in the heap.
748 if (var->IsContextSlot()) { 748 if (var->IsContextSlot()) {
749 // scratch0 contains the correct context.
749 __ RecordWriteContextSlot(scratch0, 750 __ RecordWriteContextSlot(scratch0,
750 location.offset(), 751 location.offset(),
751 src, 752 src,
752 scratch1, 753 scratch1,
753 kLRHasBeenSaved, 754 kLRHasBeenSaved,
754 kDontSaveFPRegs); 755 kDontSaveFPRegs);
755 } 756 }
756 } 757 }
757 758
758 759
759 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, 760 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
760 bool should_normalize, 761 bool should_normalize,
761 Label* if_true, 762 Label* if_true,
762 Label* if_false) { 763 Label* if_false) {
763 // Only prepare for bailouts before splits if we're in a test 764 // Only prepare for bailouts before splits if we're in a test
764 // context. Otherwise, we let the Visit function deal with the 765 // context. Otherwise, we let the Visit function deal with the
765 // preparation to avoid preparing with the same AST id twice. 766 // preparation to avoid preparing with the same AST id twice.
766 if (!context()->IsTest() || !info_->IsOptimizable()) return; 767 if (!context()->IsTest() || !info_->IsOptimizable()) return;
767 768
769 // TODO(all): Investigate to see if there is something to work on here.
768 Label skip; 770 Label skip;
769 if (should_normalize) __ b(&skip); 771 if (should_normalize) {
772 __ B(&skip);
773 }
770 PrepareForBailout(expr, TOS_REG); 774 PrepareForBailout(expr, TOS_REG);
771 if (should_normalize) { 775 if (should_normalize) {
772 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 776 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
773 __ cmp(r0, ip);
774 Split(eq, if_true, if_false, NULL); 777 Split(eq, if_true, if_false, NULL);
775 __ bind(&skip); 778 __ Bind(&skip);
776 } 779 }
777 } 780 }
778 781
779 782
780 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 783 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
781 // The variable in the declaration always resides in the current function 784 // The variable in the declaration always resides in the current function
782 // context. 785 // context.
783 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); 786 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
784 if (generate_debug_code_) { 787 if (generate_debug_code_) {
785 // Check that we're not inside a with or catch context. 788 // Check that we're not inside a with or catch context.
786 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset)); 789 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
787 __ CompareRoot(r1, Heap::kWithContextMapRootIndex); 790 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
788 __ Check(ne, "Declaration in with context."); 791 __ Check(ne, "Declaration in with context.");
789 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); 792 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
790 __ Check(ne, "Declaration in catch context."); 793 __ Check(ne, "Declaration in catch context.");
791 } 794 }
792 } 795 }
793 796
794 797
795 void FullCodeGenerator::VisitVariableDeclaration( 798 void FullCodeGenerator::VisitVariableDeclaration(
796 VariableDeclaration* declaration) { 799 VariableDeclaration* declaration) {
797 // If it was not possible to allocate the variable at compile time, we 800 // If it was not possible to allocate the variable at compile time, we
798 // need to "declare" it at runtime to make sure it actually exists in the 801 // need to "declare" it at runtime to make sure it actually exists in the
799 // local context. 802 // local context.
800 VariableProxy* proxy = declaration->proxy(); 803 VariableProxy* proxy = declaration->proxy();
801 VariableMode mode = declaration->mode(); 804 VariableMode mode = declaration->mode();
802 Variable* variable = proxy->var(); 805 Variable* variable = proxy->var();
803 bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET; 806 bool hole_init = (mode == CONST) || (mode == CONST_HARMONY) || (mode == LET);
807
804 switch (variable->location()) { 808 switch (variable->location()) {
805 case Variable::UNALLOCATED: 809 case Variable::UNALLOCATED:
806 globals_->Add(variable->name(), zone()); 810 globals_->Add(variable->name(), zone());
807 globals_->Add(variable->binding_needs_init() 811 globals_->Add(variable->binding_needs_init()
808 ? isolate()->factory()->the_hole_value() 812 ? isolate()->factory()->the_hole_value()
809 : isolate()->factory()->undefined_value(), 813 : isolate()->factory()->undefined_value(),
810 zone()); 814 zone());
811 break; 815 break;
812 816
813 case Variable::PARAMETER: 817 case Variable::PARAMETER:
814 case Variable::LOCAL: 818 case Variable::LOCAL:
815 if (hole_init) { 819 if (hole_init) {
816 Comment cmnt(masm_, "[ VariableDeclaration"); 820 Comment cmnt(masm_, "[ VariableDeclaration");
817 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 821 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
818 __ str(ip, StackOperand(variable)); 822 __ Str(x10, StackOperand(variable));
819 } 823 }
820 break; 824 break;
821 825
822 case Variable::CONTEXT: 826 case Variable::CONTEXT:
823 if (hole_init) { 827 if (hole_init) {
824 Comment cmnt(masm_, "[ VariableDeclaration"); 828 Comment cmnt(masm_, "[ VariableDeclaration");
825 EmitDebugCheckDeclarationContext(variable); 829 EmitDebugCheckDeclarationContext(variable);
826 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 830 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
827 __ str(ip, ContextOperand(cp, variable->index())); 831 __ Str(x10, ContextMemOperand(cp, variable->index()));
828 // No write barrier since the_hole_value is in old space. 832 // No write barrier since the_hole_value is in old space.
829 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 833 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
830 } 834 }
831 break; 835 break;
832 836
833 case Variable::LOOKUP: { 837 case Variable::LOOKUP: {
834 Comment cmnt(masm_, "[ VariableDeclaration"); 838 Comment cmnt(masm_, "[ VariableDeclaration");
835 __ mov(r2, Operand(variable->name())); 839 __ Mov(x2, Operand(variable->name()));
836 // Declaration nodes are always introduced in one of four modes. 840 // Declaration nodes are always introduced in one of four modes.
837 ASSERT(IsDeclaredVariableMode(mode)); 841 ASSERT(IsDeclaredVariableMode(mode));
838 PropertyAttributes attr = 842 PropertyAttributes attr = IsImmutableVariableMode(mode) ? READ_ONLY
839 IsImmutableVariableMode(mode) ? READ_ONLY : NONE; 843 : NONE;
840 __ mov(r1, Operand(Smi::FromInt(attr))); 844 __ Mov(x1, Operand(Smi::FromInt(attr)));
841 // Push initial value, if any. 845 // Push initial value, if any.
842 // Note: For variables we must not push an initial value (such as 846 // Note: For variables we must not push an initial value (such as
843 // 'undefined') because we may have a (legal) redeclaration and we 847 // 'undefined') because we may have a (legal) redeclaration and we
844 // must not destroy the current value. 848 // must not destroy the current value.
845 if (hole_init) { 849 if (hole_init) {
846 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); 850 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
847 __ Push(cp, r2, r1, r0); 851 __ Push(cp, x2, x1, x0);
848 } else { 852 } else {
849 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value. 853 // Pushing 0 (xzr) indicates no initial value.
850 __ Push(cp, r2, r1, r0); 854 __ Push(cp, x2, x1, xzr);
851 } 855 }
852 __ CallRuntime(Runtime::kDeclareContextSlot, 4); 856 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
853 break; 857 break;
854 } 858 }
855 } 859 }
856 } 860 }
857 861
858 862
859 void FullCodeGenerator::VisitFunctionDeclaration( 863 void FullCodeGenerator::VisitFunctionDeclaration(
860 FunctionDeclaration* declaration) { 864 FunctionDeclaration* declaration) {
861 VariableProxy* proxy = declaration->proxy(); 865 VariableProxy* proxy = declaration->proxy();
862 Variable* variable = proxy->var(); 866 Variable* variable = proxy->var();
863 switch (variable->location()) { 867 switch (variable->location()) {
864 case Variable::UNALLOCATED: { 868 case Variable::UNALLOCATED: {
865 globals_->Add(variable->name(), zone()); 869 globals_->Add(variable->name(), zone());
866 Handle<SharedFunctionInfo> function = 870 Handle<SharedFunctionInfo> function =
867 Compiler::BuildFunctionInfo(declaration->fun(), script()); 871 Compiler::BuildFunctionInfo(declaration->fun(), script());
868 // Check for stack-overflow exception. 872 // Check for stack overflow exception.
869 if (function.is_null()) return SetStackOverflow(); 873 if (function.is_null()) return SetStackOverflow();
870 globals_->Add(function, zone()); 874 globals_->Add(function, zone());
871 break; 875 break;
872 } 876 }
873 877
874 case Variable::PARAMETER: 878 case Variable::PARAMETER:
875 case Variable::LOCAL: { 879 case Variable::LOCAL: {
876 Comment cmnt(masm_, "[ FunctionDeclaration"); 880 Comment cmnt(masm_, "[ Function Declaration");
877 VisitForAccumulatorValue(declaration->fun()); 881 VisitForAccumulatorValue(declaration->fun());
878 __ str(result_register(), StackOperand(variable)); 882 __ Str(result_register(), StackOperand(variable));
879 break; 883 break;
880 } 884 }
881 885
882 case Variable::CONTEXT: { 886 case Variable::CONTEXT: {
883 Comment cmnt(masm_, "[ FunctionDeclaration"); 887 Comment cmnt(masm_, "[ Function Declaration");
884 EmitDebugCheckDeclarationContext(variable); 888 EmitDebugCheckDeclarationContext(variable);
885 VisitForAccumulatorValue(declaration->fun()); 889 VisitForAccumulatorValue(declaration->fun());
886 __ str(result_register(), ContextOperand(cp, variable->index())); 890 __ Str(result_register(), ContextMemOperand(cp, variable->index()));
887 int offset = Context::SlotOffset(variable->index()); 891 int offset = Context::SlotOffset(variable->index());
888 // We know that we have written a function, which is not a smi. 892 // We know that we have written a function, which is not a smi.
889 __ RecordWriteContextSlot(cp, 893 __ RecordWriteContextSlot(cp,
890 offset, 894 offset,
891 result_register(), 895 result_register(),
892 r2, 896 x2,
893 kLRHasBeenSaved, 897 kLRHasBeenSaved,
894 kDontSaveFPRegs, 898 kDontSaveFPRegs,
895 EMIT_REMEMBERED_SET, 899 EMIT_REMEMBERED_SET,
896 OMIT_SMI_CHECK); 900 OMIT_SMI_CHECK);
897 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 901 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
898 break; 902 break;
899 } 903 }
900 904
901 case Variable::LOOKUP: { 905 case Variable::LOOKUP: {
902 Comment cmnt(masm_, "[ FunctionDeclaration"); 906 Comment cmnt(masm_, "[ Function Declaration");
903 __ mov(r2, Operand(variable->name())); 907 __ Mov(x2, Operand(variable->name()));
904 __ mov(r1, Operand(Smi::FromInt(NONE))); 908 __ Mov(x1, Operand(Smi::FromInt(NONE)));
905 __ Push(cp, r2, r1); 909 __ Push(cp, x2, x1);
906 // Push initial value for function declaration. 910 // Push initial value for function declaration.
907 VisitForStackValue(declaration->fun()); 911 VisitForStackValue(declaration->fun());
908 __ CallRuntime(Runtime::kDeclareContextSlot, 4); 912 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
909 break; 913 break;
910 } 914 }
911 } 915 }
912 } 916 }
913 917
914 918
915 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { 919 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
916 Variable* variable = declaration->proxy()->var(); 920 Variable* variable = declaration->proxy()->var();
917 ASSERT(variable->location() == Variable::CONTEXT); 921 ASSERT(variable->location() == Variable::CONTEXT);
918 ASSERT(variable->interface()->IsFrozen()); 922 ASSERT(variable->interface()->IsFrozen());
919 923
920 Comment cmnt(masm_, "[ ModuleDeclaration"); 924 Comment cmnt(masm_, "[ ModuleDeclaration");
921 EmitDebugCheckDeclarationContext(variable); 925 EmitDebugCheckDeclarationContext(variable);
922 926
923 // Load instance object. 927 // Load instance object.
924 __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope())); 928 __ LoadContext(x1, scope_->ContextChainLength(scope_->GlobalScope()));
925 __ ldr(r1, ContextOperand(r1, variable->interface()->Index())); 929 __ Ldr(x1, ContextMemOperand(x1, variable->interface()->Index()));
926 __ ldr(r1, ContextOperand(r1, Context::EXTENSION_INDEX)); 930 __ Ldr(x1, ContextMemOperand(x1, Context::EXTENSION_INDEX));
927 931
928 // Assign it. 932 // Assign it.
929 __ str(r1, ContextOperand(cp, variable->index())); 933 __ Str(x1, ContextMemOperand(cp, variable->index()));
930 // We know that we have written a module, which is not a smi. 934 // We know that we have written a module, which is not a smi.
931 __ RecordWriteContextSlot(cp, 935 __ RecordWriteContextSlot(cp,
932 Context::SlotOffset(variable->index()), 936 Context::SlotOffset(variable->index()),
933 r1, 937 x1,
934 r3, 938 x3,
935 kLRHasBeenSaved, 939 kLRHasBeenSaved,
936 kDontSaveFPRegs, 940 kDontSaveFPRegs,
937 EMIT_REMEMBERED_SET, 941 EMIT_REMEMBERED_SET,
938 OMIT_SMI_CHECK); 942 OMIT_SMI_CHECK);
939 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS); 943 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
940 944
941 // Traverse into body. 945 // Traverse info body.
942 Visit(declaration->module()); 946 Visit(declaration->module());
943 } 947 }
944 948
945 949
946 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) { 950 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
947 VariableProxy* proxy = declaration->proxy(); 951 VariableProxy* proxy = declaration->proxy();
948 Variable* variable = proxy->var(); 952 Variable* variable = proxy->var();
949 switch (variable->location()) { 953 switch (variable->location()) {
950 case Variable::UNALLOCATED: 954 case Variable::UNALLOCATED:
951 // TODO(rossberg) 955 // TODO(rossberg)
(...skipping 14 matching lines...) Expand all
966 } 970 }
967 971
968 972
969 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) { 973 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
970 // TODO(rossberg) 974 // TODO(rossberg)
971 } 975 }
972 976
973 977
974 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 978 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
975 // Call the runtime to declare the globals. 979 // Call the runtime to declare the globals.
976 // The context is the first argument. 980 __ Mov(x11, Operand(pairs));
977 __ mov(r1, Operand(pairs)); 981 Register flags = xzr;
978 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags()))); 982 if (Smi::FromInt(DeclareGlobalsFlags())) {
979 __ Push(cp, r1, r0); 983 flags = x10;
984 __ Mov(flags, Operand(Smi::FromInt(DeclareGlobalsFlags())));
985 }
986 __ Push(cp, x11, flags);
980 __ CallRuntime(Runtime::kDeclareGlobals, 3); 987 __ CallRuntime(Runtime::kDeclareGlobals, 3);
981 // Return value is ignored. 988 // Return value is ignored.
982 } 989 }
983 990
984 991
985 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { 992 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
986 // Call the runtime to declare the modules. 993 // Call the runtime to declare the modules.
987 __ Push(descriptions); 994 __ Push(descriptions);
988 __ CallRuntime(Runtime::kDeclareModules, 1); 995 __ CallRuntime(Runtime::kDeclareModules, 1);
989 // Return value is ignored. 996 // Return value is ignored.
990 } 997 }
991 998
992 999
993 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 1000 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1001 ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
994 Comment cmnt(masm_, "[ SwitchStatement"); 1002 Comment cmnt(masm_, "[ SwitchStatement");
995 Breakable nested_statement(this, stmt); 1003 Breakable nested_statement(this, stmt);
996 SetStatementPosition(stmt); 1004 SetStatementPosition(stmt);
997 1005
998 // Keep the switch value on the stack until a case matches. 1006 // Keep the switch value on the stack until a case matches.
999 VisitForStackValue(stmt->tag()); 1007 VisitForStackValue(stmt->tag());
1000 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 1008 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1001 1009
1002 ZoneList<CaseClause*>* clauses = stmt->cases(); 1010 ZoneList<CaseClause*>* clauses = stmt->cases();
1003 CaseClause* default_clause = NULL; // Can occur anywhere in the list. 1011 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1004 1012
1005 Label next_test; // Recycled for each test. 1013 Label next_test; // Recycled for each test.
1006 // Compile all the tests with branches to their bodies. 1014 // Compile all the tests with branches to their bodies.
1007 for (int i = 0; i < clauses->length(); i++) { 1015 for (int i = 0; i < clauses->length(); i++) {
1008 CaseClause* clause = clauses->at(i); 1016 CaseClause* clause = clauses->at(i);
1009 clause->body_target()->Unuse(); 1017 clause->body_target()->Unuse();
1010 1018
1011 // The default is not a test, but remember it as final fall through. 1019 // The default is not a test, but remember it as final fall through.
1012 if (clause->is_default()) { 1020 if (clause->is_default()) {
1013 default_clause = clause; 1021 default_clause = clause;
1014 continue; 1022 continue;
1015 } 1023 }
1016 1024
1017 Comment cmnt(masm_, "[ Case comparison"); 1025 Comment cmnt(masm_, "[ Case comparison");
1018 __ bind(&next_test); 1026 __ Bind(&next_test);
1019 next_test.Unuse(); 1027 next_test.Unuse();
1020 1028
1021 // Compile the label expression. 1029 // Compile the label expression.
1022 VisitForAccumulatorValue(clause->label()); 1030 VisitForAccumulatorValue(clause->label());
1023 1031
1024 // Perform the comparison as if via '==='. 1032 // Perform the comparison as if via '==='.
1025 __ ldr(r1, MemOperand(sp, 0)); // Switch value. 1033 __ Peek(x1, 0); // Switch value.
1026 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); 1034
1027 JumpPatchSite patch_site(masm_); 1035 JumpPatchSite patch_site(masm_);
1028 if (inline_smi_code) { 1036 if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
1029 Label slow_case; 1037 Label slow_case;
1030 __ orr(r2, r1, r0); 1038 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
1031 patch_site.EmitJumpIfNotSmi(r2, &slow_case); 1039 __ Cmp(x1, x0);
1032 1040 __ B(ne, &next_test);
1033 __ cmp(r1, r0);
1034 __ b(ne, &next_test);
1035 __ Drop(1); // Switch value is no longer needed. 1041 __ Drop(1); // Switch value is no longer needed.
1036 __ b(clause->body_target()); 1042 __ B(clause->body_target());
1037 __ bind(&slow_case); 1043 __ Bind(&slow_case);
1038 } 1044 }
1039 1045
1040 // Record position before stub call for type feedback. 1046 // Record position before stub call for type feedback.
1041 SetSourcePosition(clause->position()); 1047 SetSourcePosition(clause->position());
1042 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT); 1048 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1043 CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId()); 1049 CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1044 patch_site.EmitPatchInfo(); 1050 patch_site.EmitPatchInfo();
1045 1051
1046 __ cmp(r0, Operand::Zero()); 1052 __ Cbnz(x0, &next_test);
1047 __ b(ne, &next_test);
1048 __ Drop(1); // Switch value is no longer needed. 1053 __ Drop(1); // Switch value is no longer needed.
1049 __ b(clause->body_target()); 1054 __ B(clause->body_target());
1050 } 1055 }
1051 1056
1052 // Discard the test value and jump to the default if present, otherwise to 1057 // Discard the test value and jump to the default if present, otherwise to
1053 // the end of the statement. 1058 // the end of the statement.
1054 __ bind(&next_test); 1059 __ Bind(&next_test);
1055 __ Drop(1); // Switch value is no longer needed. 1060 __ Drop(1); // Switch value is no longer needed.
1056 if (default_clause == NULL) { 1061 if (default_clause == NULL) {
1057 __ b(nested_statement.break_label()); 1062 __ B(nested_statement.break_label());
1058 } else { 1063 } else {
1059 __ b(default_clause->body_target()); 1064 __ B(default_clause->body_target());
1060 } 1065 }
1061 1066
1062 // Compile all the case bodies. 1067 // Compile all the case bodies.
1063 for (int i = 0; i < clauses->length(); i++) { 1068 for (int i = 0; i < clauses->length(); i++) {
1064 Comment cmnt(masm_, "[ Case body"); 1069 Comment cmnt(masm_, "[ Case body");
1065 CaseClause* clause = clauses->at(i); 1070 CaseClause* clause = clauses->at(i);
1066 __ bind(clause->body_target()); 1071 __ Bind(clause->body_target());
1067 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS); 1072 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1068 VisitStatements(clause->statements()); 1073 VisitStatements(clause->statements());
1069 } 1074 }
1070 1075
1071 __ bind(nested_statement.break_label()); 1076 __ Bind(nested_statement.break_label());
1072 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1077 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1073 } 1078 }
1074 1079
1075 1080
1076 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { 1081 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1082 ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
1077 Comment cmnt(masm_, "[ ForInStatement"); 1083 Comment cmnt(masm_, "[ ForInStatement");
1084 // TODO(all): This visitor probably needs better comments and a revisit.
1078 SetStatementPosition(stmt); 1085 SetStatementPosition(stmt);
1079 1086
1080 Label loop, exit; 1087 Label loop, exit;
1081 ForIn loop_statement(this, stmt); 1088 ForIn loop_statement(this, stmt);
1082 increment_loop_depth(); 1089 increment_loop_depth();
1083 1090
1084 // Get the object to enumerate over. If the object is null or undefined, skip 1091 // Get the object to enumerate over. If the object is null or undefined, skip
1085 // over the loop. See ECMA-262 version 5, section 12.6.4. 1092 // over the loop. See ECMA-262 version 5, section 12.6.4.
1086 VisitForAccumulatorValue(stmt->enumerable()); 1093 VisitForAccumulatorValue(stmt->enumerable());
1087 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1094 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1088 __ cmp(r0, ip); 1095 Register null_value = x15;
1089 __ b(eq, &exit);
1090 Register null_value = r5;
1091 __ LoadRoot(null_value, Heap::kNullValueRootIndex); 1096 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1092 __ cmp(r0, null_value); 1097 __ Cmp(x0, null_value);
1093 __ b(eq, &exit); 1098 __ B(eq, &exit);
1094 1099
1095 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); 1100 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1096 1101
1097 // Convert the object to a JS object. 1102 // Convert the object to a JS object.
1098 Label convert, done_convert; 1103 Label convert, done_convert;
1099 __ JumpIfSmi(r0, &convert); 1104 __ JumpIfSmi(x0, &convert);
1100 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 1105 __ JumpIfObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE, &done_convert, ge);
1101 __ b(ge, &done_convert); 1106 __ Bind(&convert);
1102 __ bind(&convert); 1107 __ Push(x0);
1103 __ push(r0);
1104 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1108 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1105 __ bind(&done_convert); 1109 __ Bind(&done_convert);
1106 __ push(r0); 1110 __ Push(x0);
1107 1111
1108 // Check for proxies. 1112 // Check for proxies.
1109 Label call_runtime; 1113 Label call_runtime;
1110 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1114 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1111 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE); 1115 __ JumpIfObjectType(x0, x10, x11, LAST_JS_PROXY_TYPE, &call_runtime, le);
1112 __ b(le, &call_runtime);
1113 1116
1114 // Check cache validity in generated code. This is a fast case for 1117 // Check cache validity in generated code. This is a fast case for
1115 // the JSObject::IsSimpleEnum cache validity checks. If we cannot 1118 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1116 // guarantee cache validity, call the runtime system to check cache 1119 // guarantee cache validity, call the runtime system to check cache
1117 // validity or get the property names in a fixed array. 1120 // validity or get the property names in a fixed array.
1118 __ CheckEnumCache(null_value, &call_runtime); 1121 __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
1119 1122
1120 // The enum cache is valid. Load the map of the object being 1123 // The enum cache is valid. Load the map of the object being
1121 // iterated over and use the cache for the iteration. 1124 // iterated over and use the cache for the iteration.
1122 Label use_cache; 1125 Label use_cache;
1123 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 1126 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1124 __ b(&use_cache); 1127 __ B(&use_cache);
1125 1128
1126 // Get the set of properties to enumerate. 1129 // Get the set of properties to enumerate.
1127 __ bind(&call_runtime); 1130 __ Bind(&call_runtime);
1128 __ push(r0); // Duplicate the enumerable object on the stack. 1131 __ Push(x0); // Duplicate the enumerable object on the stack.
1129 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); 1132 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1130 1133
1131 // If we got a map from the runtime call, we can do a fast 1134 // If we got a map from the runtime call, we can do a fast
1132 // modification check. Otherwise, we got a fixed array, and we have 1135 // modification check. Otherwise, we got a fixed array, and we have
1133 // to do a slow check. 1136 // to do a slow check.
1134 Label fixed_array; 1137 Label fixed_array, no_descriptors;
1135 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); 1138 __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1136 __ LoadRoot(ip, Heap::kMetaMapRootIndex); 1139 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1137 __ cmp(r2, ip);
1138 __ b(ne, &fixed_array);
1139 1140
1140 // We got a map in register r0. Get the enumeration cache from it. 1141 // We got a map in register x0. Get the enumeration cache from it.
1141 Label no_descriptors; 1142 __ Bind(&use_cache);
1142 __ bind(&use_cache);
1143 1143
1144 __ EnumLength(r1, r0); 1144 __ EnumLengthUntagged(x1, x0);
1145 __ cmp(r1, Operand(Smi::FromInt(0))); 1145 __ Cbz(x1, &no_descriptors);
1146 __ b(eq, &no_descriptors);
1147 1146
1148 __ LoadInstanceDescriptors(r0, r2); 1147 __ LoadInstanceDescriptors(x0, x2);
1149 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset)); 1148 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1150 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset)); 1149 __ Ldr(x2,
1150 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1151 1151
1152 // Set up the four remaining stack slots. 1152 // Set up the four remaining stack slots.
1153 __ push(r0); // Map. 1153 __ Push(x0); // Map.
1154 __ mov(r0, Operand(Smi::FromInt(0))); 1154 __ Mov(x0, Operand(Smi::FromInt(0)));
1155 // Push enumeration cache, enumeration cache length (as smi) and zero. 1155 // Push enumeration cache, enumeration cache length (as smi) and zero.
1156 __ Push(r2, r1, r0); 1156 __ SmiTag(x1);
1157 __ jmp(&loop); 1157 __ Push(x2, x1, x0);
1158 __ B(&loop);
1158 1159
1159 __ bind(&no_descriptors); 1160 __ Bind(&no_descriptors);
1160 __ Drop(1); 1161 __ Drop(1);
1161 __ jmp(&exit); 1162 __ B(&exit);
1162 1163
1163 // We got a fixed array in register r0. Iterate through that. 1164 // We got a fixed array in register x0. Iterate through that.
1164 Label non_proxy; 1165 Label non_proxy;
1165 __ bind(&fixed_array); 1166 __ Bind(&fixed_array);
1166 1167
1167 Handle<JSGlobalPropertyCell> cell = 1168 Handle<JSGlobalPropertyCell> cell =
1168 isolate()->factory()->NewJSGlobalPropertyCell( 1169 isolate()->factory()->NewJSGlobalPropertyCell(
1169 Handle<Object>( 1170 Handle<Object>(
1170 Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker), 1171 Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker),
1171 isolate())); 1172 isolate()));
1172 RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell); 1173 RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
1173 __ LoadHeapObject(r1, cell); 1174 __ LoadHeapObject(x1, cell);
1174 __ mov(r2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker))); 1175 __ Mov(x10, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
1175 __ str(r2, FieldMemOperand(r1, JSGlobalPropertyCell::kValueOffset)); 1176 __ Str(x10, FieldMemOperand(x1, JSGlobalPropertyCell::kValueOffset));
1176 1177
1177 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check 1178 __ Mov(x1, Operand(Smi::FromInt(1))); // Smi indicates slow check.
1178 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object 1179 __ Peek(x10, 0); // Get enumerated object.
1179 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1180 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1180 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE); 1181 // TODO(all): similar check was done already. Can we avoid it here?
1181 __ b(gt, &non_proxy); 1182 __ JumpIfObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE, &non_proxy, gt);
1182 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy 1183 // TODO(all): use csel here
1183 __ bind(&non_proxy); 1184 __ Mov(x1, Operand(Smi::FromInt(0))); // Zero indicates proxy.
1184 __ Push(r1, r0); // Smi and array 1185 __ Bind(&non_proxy);
1185 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset)); 1186 __ Push(x1, x0); // Smi and array
1186 __ mov(r0, Operand(Smi::FromInt(0))); 1187 __ Ldr(x1, FieldMemOperand(x0, FixedArray::kLengthOffset));
1187 __ Push(r1, r0); // Fixed array length (as smi) and initial index. 1188 __ Push(x1, xzr); // Fixed array length (as smi) and initial index.
1188 1189
1189 // Generate code for doing the condition check. 1190 // Generate code for doing the condition check.
1190 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); 1191 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1191 __ bind(&loop); 1192 __ Bind(&loop);
1192 // Load the current count to r0, load the length to r1. 1193 // Load the current count to x0, load the length to x1.
1193 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize)); 1194 // TODO(jbramley): Consider making something like PeekPair.
1194 __ cmp(r0, r1); // Compare to the array length. 1195 __ Ldp(x0, x1, MemOperand(jssp));
1195 __ b(hs, loop_statement.break_label()); 1196 __ Cmp(x0, x1); // Compare to the array length.
1197 __ B(hs, loop_statement.break_label());
1196 1198
1197 // Get the current entry of the array into register r3. 1199 // Get the current entry of the array into register r3.
1198 __ ldr(r2, MemOperand(sp, 2 * kPointerSize)); 1200 __ Peek(x10, 2 * kXRegSizeInBytes);
1199 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 1201 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1200 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0)); 1202 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1201 1203
1202 // Get the expected map from the stack or a smi in the 1204 // Get the expected map from the stack or a smi in the
1203 // permanent slow case into register r2. 1205 // permanent slow case into register x10.
1204 __ ldr(r2, MemOperand(sp, 3 * kPointerSize)); 1206 __ Peek(x2, 3 * kXRegSizeInBytes);
1205 1207
1206 // Check if the expected map still matches that of the enumerable. 1208 // Check if the expected map still matches that of the enumerable.
1207 // If not, we may have to filter the key. 1209 // If not, we may have to filter the key.
1208 Label update_each; 1210 Label update_each;
1209 __ ldr(r1, MemOperand(sp, 4 * kPointerSize)); 1211 __ Peek(x1, 4 * kXRegSizeInBytes);
1210 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset)); 1212 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1211 __ cmp(r4, Operand(r2)); 1213 __ Cmp(x11, x2);
1212 __ b(eq, &update_each); 1214 __ B(eq, &update_each);
1213 1215
1214 // For proxies, no filtering is done. 1216 // For proxies, no filtering is done.
1215 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. 1217 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1216 __ cmp(r2, Operand(Smi::FromInt(0))); 1218 STATIC_ASSERT(kSmiTag == 0);
1217 __ b(eq, &update_each); 1219 __ Cbz(x2, &update_each);
1218 1220
1219 // Convert the entry to a string or (smi) 0 if it isn't a property 1221 // Convert the entry to a string or (smi) 0 if it isn't a property
1220 // any more. If the property has been removed while iterating, we 1222 // any more. If the property has been removed while iterating, we
1221 // just skip it. 1223 // just skip it.
1222 __ push(r1); // Enumerable. 1224 __ Push(x1, x3);
1223 __ push(r3); // Current entry.
1224 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); 1225 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1225 __ mov(r3, Operand(r0), SetCC); 1226 __ Mov(x3, x0);
1226 __ b(eq, loop_statement.continue_label()); 1227 __ Cbz(x0, loop_statement.continue_label());
1227 1228
1228 // Update the 'each' property or variable from the possibly filtered 1229 // Update the 'each' property or variable from the possibly filtered
1229 // entry in register r3. 1230 // entry in register x3.
1230 __ bind(&update_each); 1231 __ Bind(&update_each);
1231 __ mov(result_register(), r3); 1232 __ Mov(result_register(), x3);
1232 // Perform the assignment as if via '='. 1233 // Perform the assignment as if via '='.
1233 { EffectContext context(this); 1234 { EffectContext context(this);
1234 EmitAssignment(stmt->each()); 1235 EmitAssignment(stmt->each());
1235 } 1236 }
1236 1237
1237 // Generate code for the body of the loop. 1238 // Generate code for the body of the loop.
1238 Visit(stmt->body()); 1239 Visit(stmt->body());
1239 1240
1240 // Generate code for the going to the next element by incrementing 1241 // Generate code for going to the next element by incrementing
1241 // the index (smi) stored on top of the stack. 1242 // the index (smi) stored on top of the stack.
1242 __ bind(loop_statement.continue_label()); 1243 __ Bind(loop_statement.continue_label());
1243 __ pop(r0); 1244 // TODO(all): We could use a callee saved register to avoid popping.
1244 __ add(r0, r0, Operand(Smi::FromInt(1))); 1245 __ Pop(x0);
1245 __ push(r0); 1246 __ Add(x0, x0, Operand(Smi::FromInt(1)));
1247 __ Push(x0);
1246 1248
1247 EmitBackEdgeBookkeeping(stmt, &loop); 1249 EmitBackEdgeBookkeeping(stmt, &loop);
1248 __ b(&loop); 1250 __ B(&loop);
1249 1251
1250 // Remove the pointers stored on the stack. 1252 // Remove the pointers stored on the stack.
1251 __ bind(loop_statement.break_label()); 1253 __ Bind(loop_statement.break_label());
1252 __ Drop(5); 1254 __ Drop(5);
1253 1255
1254 // Exit and decrement the loop depth. 1256 // Exit and decrement the loop depth.
1255 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1257 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1256 __ bind(&exit); 1258 __ Bind(&exit);
1257 decrement_loop_depth(); 1259 decrement_loop_depth();
1258 } 1260 }
1259 1261
1260 1262
1261 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) { 1263 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1262 Comment cmnt(masm_, "[ ForOfStatement"); 1264 UNIMPLEMENTED();
1263 SetStatementPosition(stmt);
1264
1265 Iteration loop_statement(this, stmt);
1266 increment_loop_depth();
1267
1268 // var iterator = iterable[@@iterator]()
1269 VisitForAccumulatorValue(stmt->assign_iterator());
1270
1271 // As with for-in, skip the loop if the iterator is null or undefined.
1272 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
1273 __ b(eq, loop_statement.break_label());
1274 __ CompareRoot(r0, Heap::kNullValueRootIndex);
1275 __ b(eq, loop_statement.break_label());
1276
1277 // Convert the iterator to a JS object.
1278 Label convert, done_convert;
1279 __ JumpIfSmi(r0, &convert);
1280 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1281 __ b(ge, &done_convert);
1282 __ bind(&convert);
1283 __ push(r0);
1284 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1285 __ bind(&done_convert);
1286 __ push(r0);
1287
1288 // Loop entry.
1289 __ bind(loop_statement.continue_label());
1290
1291 // result = iterator.next()
1292 VisitForEffect(stmt->next_result());
1293
1294 // if (result.done) break;
1295 Label result_not_done;
1296 VisitForControl(stmt->result_done(),
1297 loop_statement.break_label(),
1298 &result_not_done,
1299 &result_not_done);
1300 __ bind(&result_not_done);
1301
1302 // each = result.value
1303 VisitForEffect(stmt->assign_each());
1304
1305 // Generate code for the body of the loop.
1306 Visit(stmt->body());
1307
1308 // Check stack before looping.
1309 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1310 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1311 __ jmp(loop_statement.continue_label());
1312
1313 // Exit and decrement the loop depth.
1314 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1315 __ bind(loop_statement.break_label());
1316 decrement_loop_depth();
1317 } 1265 }
1318 1266
1319 1267
1320 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, 1268 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1321 bool pretenure) { 1269 bool pretenure) {
1322 // Use the fast case closure allocation code that allocates in new 1270 // Use the fast case closure allocation code that allocates in new space for
1323 // space for nested functions that don't need literals cloning. If 1271 // nested functions that don't need literals cloning. If we're running with
1324 // we're running with the --always-opt or the --prepare-always-opt 1272 // the --always-opt or the --prepare-always-opt flag, we need to use the
1325 // flag, we need to use the runtime function so that the new function 1273 // runtime function so that the new function we are creating here gets a
1326 // we are creating here gets a chance to have its code optimized and 1274 // chance to have its code optimized and doesn't just get a copy of the
1327 // doesn't just get a copy of the existing unoptimized code. 1275 // existing unoptimized code.
1328 if (!FLAG_always_opt && 1276 if (!FLAG_always_opt &&
1329 !FLAG_prepare_always_opt && 1277 !FLAG_prepare_always_opt &&
1330 !pretenure && 1278 !pretenure &&
1331 scope()->is_function_scope() && 1279 scope()->is_function_scope() &&
1332 info->num_literals() == 0) { 1280 info->num_literals() == 0) {
1333 FastNewClosureStub stub(info->language_mode(), info->is_generator()); 1281 FastNewClosureStub stub(info->language_mode(), info->is_generator());
1334 __ mov(r0, Operand(info)); 1282 __ Mov(x11, Operand(info));
1335 __ push(r0); 1283 __ Push(x11);
1336 __ CallStub(&stub); 1284 __ CallStub(&stub);
1337 } else { 1285 } else {
1338 __ mov(r0, Operand(info)); 1286 __ Mov(x11, Operand(info));
1339 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex 1287 __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex
1340 : Heap::kFalseValueRootIndex); 1288 : Heap::kFalseValueRootIndex);
1341 __ Push(cp, r0, r1); 1289 __ Push(cp, x11, x10);
1342 __ CallRuntime(Runtime::kNewClosure, 3); 1290 __ CallRuntime(Runtime::kNewClosure, 3);
1343 } 1291 }
1344 context()->Plug(r0); 1292 context()->Plug(x0);
1345 } 1293 }
1346 1294
1347 1295
1348 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 1296 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1349 Comment cmnt(masm_, "[ VariableProxy"); 1297 Comment cmnt(masm_, "[ VariableProxy");
1350 EmitVariableLoad(expr); 1298 EmitVariableLoad(expr);
1351 } 1299 }
1352 1300
1353 1301
1354 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, 1302 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1355 TypeofState typeof_state, 1303 TypeofState typeof_state,
1356 Label* slow) { 1304 Label* slow) {
1357 Register current = cp; 1305 Register current = cp;
1358 Register next = r1; 1306 Register next = x10;
1359 Register temp = r2; 1307 Register temp = x11;
1360 1308
1361 Scope* s = scope(); 1309 Scope* s = scope();
1362 while (s != NULL) { 1310 while (s != NULL) {
1363 if (s->num_heap_slots() > 0) { 1311 if (s->num_heap_slots() > 0) {
1364 if (s->calls_non_strict_eval()) { 1312 if (s->calls_non_strict_eval()) {
1365 // Check that extension is NULL. 1313 // Check that extension is NULL.
1366 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); 1314 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1367 __ tst(temp, temp); 1315 __ Cbnz(temp, slow);
1368 __ b(ne, slow);
1369 } 1316 }
1370 // Load next context in chain. 1317 // Load next context in chain.
1371 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX)); 1318 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1372 // Walk the rest of the chain without clobbering cp. 1319 // Walk the rest of the chain without clobbering cp.
1373 current = next; 1320 current = next;
1374 } 1321 }
1375 // If no outer scope calls eval, we do not need to check more 1322 // If no outer scope calls eval, we do not need to check more
1376 // context extensions. 1323 // context extensions.
1377 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break; 1324 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1378 s = s->outer_scope(); 1325 s = s->outer_scope();
1379 } 1326 }
1380 1327
1381 if (s->is_eval_scope()) { 1328 if (s->is_eval_scope()) {
1382 Label loop, fast; 1329 Label loop, fast;
1383 if (!current.is(next)) { 1330 __ Mov(next, current);
1384 __ Move(next, current); 1331
1385 } 1332 __ Bind(&loop);
1386 __ bind(&loop);
1387 // Terminate at native context. 1333 // Terminate at native context.
1388 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset)); 1334 __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1389 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex); 1335 __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1390 __ cmp(temp, ip);
1391 __ b(eq, &fast);
1392 // Check that extension is NULL. 1336 // Check that extension is NULL.
1393 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX)); 1337 __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1394 __ tst(temp, temp); 1338 __ Cbnz(temp, slow);
1395 __ b(ne, slow);
1396 // Load next context in chain. 1339 // Load next context in chain.
1397 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX)); 1340 __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1398 __ b(&loop); 1341 __ B(&loop);
1399 __ bind(&fast); 1342 __ Bind(&fast);
1400 } 1343 }
1401 1344
1402 __ ldr(r0, GlobalObjectOperand()); 1345 __ Ldr(x0, GlobalObjectMemOperand());
1403 __ mov(r2, Operand(var->name())); 1346 __ Mov(x2, Operand(var->name()));
1404 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) 1347 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1405 ? RelocInfo::CODE_TARGET 1348 ? RelocInfo::CODE_TARGET
1406 : RelocInfo::CODE_TARGET_CONTEXT; 1349 : RelocInfo::CODE_TARGET_CONTEXT;
1407 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1350 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1408 CallIC(ic, mode); 1351 CallIC(ic, mode);
1409 } 1352 }
1410 1353
1411 1354
1412 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1355 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1413 Label* slow) { 1356 Label* slow) {
1414 ASSERT(var->IsContextSlot()); 1357 ASSERT(var->IsContextSlot());
1415 Register context = cp; 1358 Register context = cp;
1416 Register next = r3; 1359 Register next = x10;
1417 Register temp = r4; 1360 Register temp = x11;
1418 1361
1419 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1362 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1420 if (s->num_heap_slots() > 0) { 1363 if (s->num_heap_slots() > 0) {
1421 if (s->calls_non_strict_eval()) { 1364 if (s->calls_non_strict_eval()) {
1422 // Check that extension is NULL. 1365 // Check that extension is NULL.
1423 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1366 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1424 __ tst(temp, temp); 1367 __ Cbnz(temp, slow);
1425 __ b(ne, slow);
1426 } 1368 }
1427 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX)); 1369 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1428 // Walk the rest of the chain without clobbering cp. 1370 // Walk the rest of the chain without clobbering cp.
1429 context = next; 1371 context = next;
1430 } 1372 }
1431 } 1373 }
1432 // Check that last extension is NULL. 1374 // Check that last extension is NULL.
1433 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1375 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1434 __ tst(temp, temp); 1376 __ Cbnz(temp, slow);
1435 __ b(ne, slow);
1436 1377
1437 // This function is used only for loads, not stores, so it's safe to 1378 // This function is used only for loads, not stores, so it's safe to
1438 // return an cp-based operand (the write barrier cannot be allowed to 1379 // return an cp-based operand (the write barrier cannot be allowed to
1439 // destroy the cp register). 1380 // destroy the cp register).
1440 return ContextOperand(context, var->index()); 1381 return ContextMemOperand(context, var->index());
1441 } 1382 }
1442 1383
1443 1384
1444 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var, 1385 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1445 TypeofState typeof_state, 1386 TypeofState typeof_state,
1446 Label* slow, 1387 Label* slow,
1447 Label* done) { 1388 Label* done) {
1448 // Generate fast-case code for variables that might be shadowed by 1389 // Generate fast-case code for variables that might be shadowed by
1449 // eval-introduced variables. Eval is used a lot without 1390 // eval-introduced variables. Eval is used a lot without
1450 // introducing variables. In those cases, we do not want to 1391 // introducing variables. In those cases, we do not want to
1451 // perform a runtime call for all variables in the scope 1392 // perform a runtime call for all variables in the scope
1452 // containing the eval. 1393 // containing the eval.
1453 if (var->mode() == DYNAMIC_GLOBAL) { 1394 if (var->mode() == DYNAMIC_GLOBAL) {
1454 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); 1395 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1455 __ jmp(done); 1396 __ B(done);
1456 } else if (var->mode() == DYNAMIC_LOCAL) { 1397 } else if (var->mode() == DYNAMIC_LOCAL) {
1457 Variable* local = var->local_if_not_shadowed(); 1398 Variable* local = var->local_if_not_shadowed();
1458 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow)); 1399 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1459 if (local->mode() == LET || 1400 if (local->mode() == LET ||
1460 local->mode() == CONST || 1401 local->mode() == CONST ||
1461 local->mode() == CONST_HARMONY) { 1402 local->mode() == CONST_HARMONY) {
1462 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); 1403 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1463 if (local->mode() == CONST) { 1404 if (local->mode() == CONST) {
1464 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); 1405 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1465 } else { // LET || CONST_HARMONY 1406 } else { // LET || CONST_HARMONY
1466 __ b(ne, done); 1407 __ Mov(x0, Operand(var->name()));
1467 __ mov(r0, Operand(var->name())); 1408 __ Push(x0);
1468 __ push(r0);
1469 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1409 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1470 } 1410 }
1471 } 1411 }
1472 __ jmp(done); 1412 __ B(done);
1473 } 1413 }
1474 } 1414 }
1475 1415
1476 1416
1477 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { 1417 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1478 // Record position before possible IC call. 1418 // Record position before possible IC call.
1479 SetSourcePosition(proxy->position()); 1419 SetSourcePosition(proxy->position());
1480 Variable* var = proxy->var(); 1420 Variable* var = proxy->var();
1481 1421
1482 // Three cases: global variables, lookup variables, and all other types of 1422 // Three cases: global variables, lookup variables, and all other types of
1483 // variables. 1423 // variables.
1484 switch (var->location()) { 1424 switch (var->location()) {
1485 case Variable::UNALLOCATED: { 1425 case Variable::UNALLOCATED: {
1486 Comment cmnt(masm_, "Global variable"); 1426 Comment cmnt(masm_, "Global variable");
1487 // Use inline caching. Variable name is passed in r2 and the global 1427 // Use inline caching. Variable name is passed in x2 and the global
1488 // object (receiver) in r0. 1428 // object (receiver) in x0.
1489 __ ldr(r0, GlobalObjectOperand()); 1429 __ Ldr(x0, GlobalObjectMemOperand());
1490 __ mov(r2, Operand(var->name())); 1430 __ Mov(x2, Operand(var->name()));
1491 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1431 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1492 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT); 1432 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1493 context()->Plug(r0); 1433 context()->Plug(x0);
1494 break; 1434 break;
1495 } 1435 }
1496 1436
1497 case Variable::PARAMETER: 1437 case Variable::PARAMETER:
1498 case Variable::LOCAL: 1438 case Variable::LOCAL:
1499 case Variable::CONTEXT: { 1439 case Variable::CONTEXT: {
1500 Comment cmnt(masm_, var->IsContextSlot() 1440 Comment cmnt(masm_, var->IsContextSlot()
1501 ? "Context variable" 1441 ? "Context variable"
1502 : "Stack variable"); 1442 : "Stack variable");
1503 if (var->binding_needs_init()) { 1443 if (var->binding_needs_init()) {
(...skipping 26 matching lines...) Expand all
1530 } else { 1470 } else {
1531 // Check that we always have valid source position. 1471 // Check that we always have valid source position.
1532 ASSERT(var->initializer_position() != RelocInfo::kNoPosition); 1472 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1533 ASSERT(proxy->position() != RelocInfo::kNoPosition); 1473 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1534 skip_init_check = var->mode() != CONST && 1474 skip_init_check = var->mode() != CONST &&
1535 var->initializer_position() < proxy->position(); 1475 var->initializer_position() < proxy->position();
1536 } 1476 }
1537 1477
1538 if (!skip_init_check) { 1478 if (!skip_init_check) {
1539 // Let and const need a read barrier. 1479 // Let and const need a read barrier.
1540 GetVar(r0, var); 1480 GetVar(x0, var);
1541 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); 1481 Label done;
1482 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1542 if (var->mode() == LET || var->mode() == CONST_HARMONY) { 1483 if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1543 // Throw a reference error when using an uninitialized let/const 1484 // Throw a reference error when using an uninitialized let/const
1544 // binding in harmony mode. 1485 // binding in harmony mode.
1545 Label done; 1486 __ Mov(x0, Operand(var->name()));
1546 __ b(ne, &done); 1487 __ Push(x0);
1547 __ mov(r0, Operand(var->name()));
1548 __ push(r0);
1549 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1488 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1550 __ bind(&done); 1489 __ Bind(&done);
1551 } else { 1490 } else {
1552 // Uninitalized const bindings outside of harmony mode are unholed. 1491 // Uninitalized const bindings outside of harmony mode are unholed.
1553 ASSERT(var->mode() == CONST); 1492 ASSERT(var->mode() == CONST);
1554 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); 1493 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1494 __ Bind(&done);
1555 } 1495 }
1556 context()->Plug(r0); 1496 context()->Plug(x0);
1557 break; 1497 break;
1558 } 1498 }
1559 } 1499 }
1560 context()->Plug(var); 1500 context()->Plug(var);
1561 break; 1501 break;
1562 } 1502 }
1563 1503
1564 case Variable::LOOKUP: { 1504 case Variable::LOOKUP: {
1565 Label done, slow; 1505 Label done, slow;
1566 // Generate code for loading from variables potentially shadowed 1506 // Generate code for loading from variables potentially shadowed by
1567 // by eval-introduced variables. 1507 // eval-introduced variables.
1568 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done); 1508 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1569 __ bind(&slow); 1509 __ Bind(&slow);
1570 Comment cmnt(masm_, "Lookup variable"); 1510 Comment cmnt(masm_, "Lookup variable");
1571 __ mov(r1, Operand(var->name())); 1511 __ Mov(x1, Operand(var->name()));
1572 __ Push(cp, r1); // Context and name. 1512 __ Push(cp, x1); // Context and name.
1573 __ CallRuntime(Runtime::kLoadContextSlot, 2); 1513 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1574 __ bind(&done); 1514 __ Bind(&done);
1575 context()->Plug(r0); 1515 context()->Plug(x0);
1516 break;
1576 } 1517 }
1577 } 1518 }
1578 } 1519 }
1579 1520
1580 1521
1581 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1522 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1582 Comment cmnt(masm_, "[ RegExpLiteral"); 1523 Comment cmnt(masm_, "[ RegExpLiteral");
1583 Label materialized; 1524 Label materialized;
1584 // Registers will be used as follows: 1525 // Registers will be used as follows:
1585 // r5 = materialized value (RegExp literal) 1526 // x5 = materialized value (RegExp literal)
1586 // r4 = JS function, literals array 1527 // x4 = JS function, literals array
1587 // r3 = literal index 1528 // x3 = literal index
1588 // r2 = RegExp pattern 1529 // x2 = RegExp pattern
1589 // r1 = RegExp flags 1530 // x1 = RegExp flags
1590 // r0 = RegExp literal clone 1531 // x0 = RegExp literal clone
1591 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1532 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1592 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset)); 1533 __ Ldr(x4, FieldMemOperand(x10, JSFunction::kLiteralsOffset));
1593 int literal_offset = 1534 int literal_offset =
1594 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; 1535 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1595 __ ldr(r5, FieldMemOperand(r4, literal_offset)); 1536 __ Ldr(x5, FieldMemOperand(x4, literal_offset));
1596 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1537 __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized);
1597 __ cmp(r5, ip);
1598 __ b(ne, &materialized);
1599 1538
1600 // Create regexp literal using runtime function. 1539 // Create regexp literal using runtime function.
1601 // Result will be in r0. 1540 // Result will be in x0.
1602 __ mov(r3, Operand(Smi::FromInt(expr->literal_index()))); 1541 __ Mov(x3, Operand(Smi::FromInt(expr->literal_index())));
1603 __ mov(r2, Operand(expr->pattern())); 1542 __ Mov(x2, Operand(expr->pattern()));
1604 __ mov(r1, Operand(expr->flags())); 1543 __ Mov(x1, Operand(expr->flags()));
1605 __ Push(r4, r3, r2, r1); 1544 __ Push(x4, x3, x2, x1);
1606 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); 1545 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1607 __ mov(r5, r0); 1546 __ Mov(x5, x0);
1608 1547
1609 __ bind(&materialized); 1548 __ Bind(&materialized);
1610 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 1549 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1611 Label allocated, runtime_allocate; 1550 Label allocated, runtime_allocate;
1612 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT); 1551 __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT);
1613 __ jmp(&allocated); 1552 __ B(&allocated);
1614 1553
1615 __ bind(&runtime_allocate); 1554 __ Bind(&runtime_allocate);
1616 __ push(r5); 1555 __ Mov(x10, Operand(Smi::FromInt(size)));
1617 __ mov(r0, Operand(Smi::FromInt(size))); 1556 __ Push(x5, x10);
1618 __ push(r0);
1619 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 1557 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1620 __ pop(r5); 1558 __ Pop(x5);
1621 1559
1622 __ bind(&allocated); 1560 __ Bind(&allocated);
1623 // After this, registers are used as follows: 1561 // After this, registers are used as follows:
1624 // r0: Newly allocated regexp. 1562 // x0: Newly allocated regexp.
1625 // r5: Materialized regexp. 1563 // x5: Materialized regexp.
1626 // r2: temp. 1564 // x10, x11, x12: temps.
1627 __ CopyFields(r0, r5, d0, s0, size / kPointerSize); 1565 __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize);
1628 context()->Plug(r0); 1566 context()->Plug(x0);
1629 } 1567 }
1630 1568
1631 1569
1632 void FullCodeGenerator::EmitAccessor(Expression* expression) { 1570 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1633 if (expression == NULL) { 1571 if (expression == NULL) {
1634 __ LoadRoot(r1, Heap::kNullValueRootIndex); 1572 __ LoadRoot(x10, Heap::kNullValueRootIndex);
1635 __ push(r1); 1573 __ Push(x10);
1636 } else { 1574 } else {
1637 VisitForStackValue(expression); 1575 VisitForStackValue(expression);
1638 } 1576 }
1639 } 1577 }
1640 1578
1641 1579
1642 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { 1580 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1643 Comment cmnt(masm_, "[ ObjectLiteral"); 1581 Comment cmnt(masm_, "[ ObjectLiteral");
1644 Handle<FixedArray> constant_properties = expr->constant_properties(); 1582 Handle<FixedArray> constant_properties = expr->constant_properties();
1645 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1583 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1646 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); 1584 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1647 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); 1585 __ Mov(x2, Operand(Smi::FromInt(expr->literal_index())));
1648 __ mov(r1, Operand(constant_properties)); 1586 __ Mov(x1, Operand(constant_properties));
1649 int flags = expr->fast_elements() 1587 int flags = expr->fast_elements()
1650 ? ObjectLiteral::kFastElements 1588 ? ObjectLiteral::kFastElements
1651 : ObjectLiteral::kNoFlags; 1589 : ObjectLiteral::kNoFlags;
1652 flags |= expr->has_function() 1590 flags |= expr->has_function()
1653 ? ObjectLiteral::kHasFunction 1591 ? ObjectLiteral::kHasFunction
1654 : ObjectLiteral::kNoFlags; 1592 : ObjectLiteral::kNoFlags;
1655 __ mov(r0, Operand(Smi::FromInt(flags))); 1593 __ Mov(x0, Operand(Smi::FromInt(flags)));
1656 int properties_count = constant_properties->length() / 2; 1594 int properties_count = constant_properties->length() / 2;
1595 const int max_cloned_properties =
1596 FastCloneShallowObjectStub::kMaximumClonedProperties;
1657 if ((FLAG_track_double_fields && expr->may_store_doubles()) || 1597 if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
1658 expr->depth() > 1) { 1598 expr->depth() > 1) {
1659 __ Push(r3, r2, r1, r0); 1599 __ Push(x3, x2, x1, x0);
1660 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); 1600 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1661 } else if (Serializer::enabled() || flags != ObjectLiteral::kFastElements || 1601 } else if (Serializer::enabled() || (flags != ObjectLiteral::kFastElements) ||
1662 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { 1602 (properties_count > max_cloned_properties)) {
1663 __ Push(r3, r2, r1, r0); 1603 __ Push(x3, x2, x1, x0);
1664 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4); 1604 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1665 } else { 1605 } else {
1666 FastCloneShallowObjectStub stub(properties_count); 1606 FastCloneShallowObjectStub stub(properties_count);
1667 __ CallStub(&stub); 1607 __ CallStub(&stub);
1668 } 1608 }
1669 1609
1670 // If result_saved is true the result is on top of the stack. If 1610 // If result_saved is true the result is on top of the stack. If
1671 // result_saved is false the result is in r0. 1611 // result_saved is false the result is in x0.
1672 bool result_saved = false; 1612 bool result_saved = false;
1673 1613
1674 // Mark all computed expressions that are bound to a key that 1614 // Mark all computed expressions that are bound to a key that
1675 // is shadowed by a later occurrence of the same key. For the 1615 // is shadowed by a later occurrence of the same key. For the
1676 // marked expressions, no store code is emitted. 1616 // marked expressions, no store code is emitted.
1677 expr->CalculateEmitStore(zone()); 1617 expr->CalculateEmitStore(zone());
1678 1618
1679 AccessorTable accessor_table(zone()); 1619 AccessorTable accessor_table(zone());
1680 for (int i = 0; i < expr->properties()->length(); i++) { 1620 for (int i = 0; i < expr->properties()->length(); i++) {
1681 ObjectLiteral::Property* property = expr->properties()->at(i); 1621 ObjectLiteral::Property* property = expr->properties()->at(i);
1682 if (property->IsCompileTimeValue()) continue; 1622 if (property->IsCompileTimeValue()) continue;
1683 1623
1684 Literal* key = property->key(); 1624 Literal* key = property->key();
1685 Expression* value = property->value(); 1625 Expression* value = property->value();
1686 if (!result_saved) { 1626 if (!result_saved) {
1687 __ push(r0); // Save result on stack 1627 __ Push(x0); // Save result on stack
1688 result_saved = true; 1628 result_saved = true;
1689 } 1629 }
1690 switch (property->kind()) { 1630 switch (property->kind()) {
1691 case ObjectLiteral::Property::CONSTANT: 1631 case ObjectLiteral::Property::CONSTANT:
1692 UNREACHABLE(); 1632 UNREACHABLE();
1693 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1633 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1694 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value())); 1634 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1695 // Fall through. 1635 // Fall through.
1696 case ObjectLiteral::Property::COMPUTED: 1636 case ObjectLiteral::Property::COMPUTED:
1697 if (key->handle()->IsInternalizedString()) { 1637 if (key->handle()->IsInternalizedString()) {
1698 if (property->emit_store()) { 1638 if (property->emit_store()) {
1699 VisitForAccumulatorValue(value); 1639 VisitForAccumulatorValue(value);
1700 __ mov(r2, Operand(key->handle())); 1640 __ Mov(x2, Operand(key->handle()));
1701 __ ldr(r1, MemOperand(sp)); 1641 __ Peek(x1, 0);
1702 Handle<Code> ic = is_classic_mode() 1642 Handle<Code> ic = is_classic_mode()
1703 ? isolate()->builtins()->StoreIC_Initialize() 1643 ? isolate()->builtins()->StoreIC_Initialize()
1704 : isolate()->builtins()->StoreIC_Initialize_Strict(); 1644 : isolate()->builtins()->StoreIC_Initialize_Strict();
1705 CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId()); 1645 CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
1706 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1646 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1707 } else { 1647 } else {
1708 VisitForEffect(value); 1648 VisitForEffect(value);
1709 } 1649 }
1710 break; 1650 break;
1711 } 1651 }
1712 // Duplicate receiver on stack. 1652 // Duplicate receiver on stack.
1713 __ ldr(r0, MemOperand(sp)); 1653 __ Peek(x0, 0);
1714 __ push(r0); 1654 __ Push(x0);
1715 VisitForStackValue(key); 1655 VisitForStackValue(key);
1716 VisitForStackValue(value); 1656 VisitForStackValue(value);
1717 if (property->emit_store()) { 1657 if (property->emit_store()) {
1718 __ mov(r0, Operand(Smi::FromInt(NONE))); // PropertyAttributes 1658 __ Mov(x0, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1719 __ push(r0); 1659 __ Push(x0);
1720 __ CallRuntime(Runtime::kSetProperty, 4); 1660 __ CallRuntime(Runtime::kSetProperty, 4);
1721 } else { 1661 } else {
1722 __ Drop(3); 1662 __ Drop(3);
1723 } 1663 }
1724 break; 1664 break;
1725 case ObjectLiteral::Property::PROTOTYPE: 1665 case ObjectLiteral::Property::PROTOTYPE:
1726 // Duplicate receiver on stack. 1666 // Duplicate receiver on stack.
1727 __ ldr(r0, MemOperand(sp)); 1667 __ Peek(x0, 0);
1728 __ push(r0); 1668 // TODO(jbramley): This push shouldn't be necessary if we don't call the
1669 // runtime below. In that case, skip it.
1670 __ Push(x0);
1729 VisitForStackValue(value); 1671 VisitForStackValue(value);
1730 if (property->emit_store()) { 1672 if (property->emit_store()) {
1731 __ CallRuntime(Runtime::kSetPrototype, 2); 1673 __ CallRuntime(Runtime::kSetPrototype, 2);
1732 } else { 1674 } else {
1733 __ Drop(2); 1675 __ Drop(2);
1734 } 1676 }
1735 break; 1677 break;
1736
1737 case ObjectLiteral::Property::GETTER: 1678 case ObjectLiteral::Property::GETTER:
1738 accessor_table.lookup(key)->second->getter = value; 1679 accessor_table.lookup(key)->second->getter = value;
1739 break; 1680 break;
1740 case ObjectLiteral::Property::SETTER: 1681 case ObjectLiteral::Property::SETTER:
1741 accessor_table.lookup(key)->second->setter = value; 1682 accessor_table.lookup(key)->second->setter = value;
1742 break; 1683 break;
1743 } 1684 }
1744 } 1685 }
1745 1686
1746 // Emit code to define accessors, using only a single call to the runtime for 1687 // Emit code to define accessors, using only a single call to the runtime for
1747 // each pair of corresponding getters and setters. 1688 // each pair of corresponding getters and setters.
1748 for (AccessorTable::Iterator it = accessor_table.begin(); 1689 for (AccessorTable::Iterator it = accessor_table.begin();
1749 it != accessor_table.end(); 1690 it != accessor_table.end();
1750 ++it) { 1691 ++it) {
1751 __ ldr(r0, MemOperand(sp)); // Duplicate receiver. 1692 __ Peek(x10, 0); // Duplicate receiver.
1752 __ push(r0); 1693 __ Push(x10);
1753 VisitForStackValue(it->first); 1694 VisitForStackValue(it->first);
1754 EmitAccessor(it->second->getter); 1695 EmitAccessor(it->second->getter);
1755 EmitAccessor(it->second->setter); 1696 EmitAccessor(it->second->setter);
1756 __ mov(r0, Operand(Smi::FromInt(NONE))); 1697 __ Mov(x10, Operand(Smi::FromInt(NONE)));
1757 __ push(r0); 1698 __ Push(x10);
1758 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5); 1699 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1759 } 1700 }
1760 1701
1761 if (expr->has_function()) { 1702 if (expr->has_function()) {
1762 ASSERT(result_saved); 1703 ASSERT(result_saved);
1763 __ ldr(r0, MemOperand(sp)); 1704 __ Peek(x0, 0);
1764 __ push(r0); 1705 __ Push(x0);
1765 __ CallRuntime(Runtime::kToFastProperties, 1); 1706 __ CallRuntime(Runtime::kToFastProperties, 1);
1766 } 1707 }
1767 1708
1768 if (result_saved) { 1709 if (result_saved) {
1769 context()->PlugTOS(); 1710 context()->PlugTOS();
1770 } else { 1711 } else {
1771 context()->Plug(r0); 1712 context()->Plug(x0);
1772 } 1713 }
1773 } 1714 }
1774 1715
1775 1716
1776 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1717 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1777 Comment cmnt(masm_, "[ ArrayLiteral"); 1718 Comment cmnt(masm_, "[ ArrayLiteral");
1778 1719
1779 ZoneList<Expression*>* subexprs = expr->values(); 1720 ZoneList<Expression*>* subexprs = expr->values();
1780 int length = subexprs->length(); 1721 int length = subexprs->length();
1781 Handle<FixedArray> constant_elements = expr->constant_elements(); 1722 Handle<FixedArray> constant_elements = expr->constant_elements();
1782 ASSERT_EQ(2, constant_elements->length()); 1723 ASSERT_EQ(2, constant_elements->length());
1783 ElementsKind constant_elements_kind = 1724 ElementsKind constant_elements_kind =
1784 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); 1725 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1785 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind); 1726 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1786 Handle<FixedArrayBase> constant_elements_values( 1727 Handle<FixedArrayBase> constant_elements_values(
1787 FixedArrayBase::cast(constant_elements->get(1))); 1728 FixedArrayBase::cast(constant_elements->get(1)));
1788 1729
1789 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1730 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1790 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); 1731 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1791 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); 1732 // TODO(jbramley): Can these Operand constructors be implicit?
1792 __ mov(r1, Operand(constant_elements)); 1733 __ Mov(x2, Operand(Smi::FromInt(expr->literal_index())));
1734 __ Mov(x1, Operand(constant_elements));
1793 if (has_fast_elements && constant_elements_values->map() == 1735 if (has_fast_elements && constant_elements_values->map() ==
1794 isolate()->heap()->fixed_cow_array_map()) { 1736 isolate()->heap()->fixed_cow_array_map()) {
1795 FastCloneShallowArrayStub stub( 1737 FastCloneShallowArrayStub stub(
1796 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, 1738 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1797 DONT_TRACK_ALLOCATION_SITE, 1739 DONT_TRACK_ALLOCATION_SITE,
1798 length); 1740 length);
1799 __ CallStub(&stub); 1741 __ CallStub(&stub);
1800 __ IncrementCounter( 1742 __ IncrementCounter(
1801 isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2); 1743 isolate()->counters()->cow_arrays_created_stub(), 1, x10, x11);
1802 } else if (expr->depth() > 1) { 1744 } else if (expr->depth() > 1) {
1803 __ Push(r3, r2, r1); 1745 __ Push(x3, x2, x1);
1804 __ CallRuntime(Runtime::kCreateArrayLiteral, 3); 1746 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1805 } else if (Serializer::enabled() || 1747 } else if (Serializer::enabled() ||
1806 length > FastCloneShallowArrayStub::kMaximumClonedLength) { 1748 length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1807 __ Push(r3, r2, r1); 1749 __ Push(x3, x2, x1);
1808 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); 1750 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1809 } else { 1751 } else {
1810 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || 1752 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1811 FLAG_smi_only_arrays); 1753 FLAG_smi_only_arrays);
1812 FastCloneShallowArrayStub::Mode mode = 1754 FastCloneShallowArrayStub::Mode mode =
1813 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; 1755 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1814 AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites 1756 AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
1815 ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE; 1757 ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
1816 1758
1817 if (has_fast_elements) { 1759 if (has_fast_elements) {
(...skipping 12 matching lines...) Expand all
1830 for (int i = 0; i < length; i++) { 1772 for (int i = 0; i < length; i++) {
1831 Expression* subexpr = subexprs->at(i); 1773 Expression* subexpr = subexprs->at(i);
1832 // If the subexpression is a literal or a simple materialized literal it 1774 // If the subexpression is a literal or a simple materialized literal it
1833 // is already set in the cloned array. 1775 // is already set in the cloned array.
1834 if (subexpr->AsLiteral() != NULL || 1776 if (subexpr->AsLiteral() != NULL ||
1835 CompileTimeValue::IsCompileTimeValue(subexpr)) { 1777 CompileTimeValue::IsCompileTimeValue(subexpr)) {
1836 continue; 1778 continue;
1837 } 1779 }
1838 1780
1839 if (!result_saved) { 1781 if (!result_saved) {
1840 __ push(r0); 1782 __ Push(x0);
1841 result_saved = true; 1783 result_saved = true;
1842 } 1784 }
1843 VisitForAccumulatorValue(subexpr); 1785 VisitForAccumulatorValue(subexpr);
1844 1786
1845 if (IsFastObjectElementsKind(constant_elements_kind)) { 1787 if (IsFastObjectElementsKind(constant_elements_kind)) {
1846 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1788 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1847 __ ldr(r6, MemOperand(sp)); // Copy of array literal. 1789 __ Peek(x6, 0); // Copy of array literal.
1848 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset)); 1790 __ Ldr(x1, FieldMemOperand(x6, JSObject::kElementsOffset));
1849 __ str(result_register(), FieldMemOperand(r1, offset)); 1791 __ Str(result_register(), FieldMemOperand(x1, offset));
1850 // Update the write barrier for the array store. 1792 // Update the write barrier for the array store.
1851 __ RecordWriteField(r1, offset, result_register(), r2, 1793 __ RecordWriteField(x1, offset, result_register(), x10,
1852 kLRHasBeenSaved, kDontSaveFPRegs, 1794 kLRHasBeenSaved, kDontSaveFPRegs,
1853 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK); 1795 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1854 } else { 1796 } else {
1855 __ ldr(r1, MemOperand(sp)); // Copy of array literal. 1797 __ Peek(x1, 0); // Copy of array literal.
1856 __ ldr(r2, FieldMemOperand(r1, JSObject::kMapOffset)); 1798 __ Ldr(x2, FieldMemOperand(x1, JSObject::kMapOffset));
1857 __ mov(r3, Operand(Smi::FromInt(i))); 1799 __ Mov(x3, Operand(Smi::FromInt(i)));
1858 __ mov(r4, Operand(Smi::FromInt(expr->literal_index()))); 1800 __ Mov(x4, Operand(Smi::FromInt(expr->literal_index())));
1859 StoreArrayLiteralElementStub stub; 1801 StoreArrayLiteralElementStub stub;
1860 __ CallStub(&stub); 1802 __ CallStub(&stub);
1861 } 1803 }
1862 1804
1863 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); 1805 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1864 } 1806 }
1865 1807
1866 if (result_saved) { 1808 if (result_saved) {
1867 context()->PlugTOS(); 1809 context()->PlugTOS();
1868 } else { 1810 } else {
1869 context()->Plug(r0); 1811 context()->Plug(x0);
1870 } 1812 }
1871 } 1813 }
1872 1814
1873 1815
1874 void FullCodeGenerator::VisitAssignment(Assignment* expr) { 1816 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1875 Comment cmnt(masm_, "[ Assignment"); 1817 Comment cmnt(masm_, "[ Assignment");
1876 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError' 1818 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1877 // on the left-hand side. 1819 // on the left-hand side.
1878 if (!expr->target()->IsValidLeftHandSide()) { 1820 if (!expr->target()->IsValidLeftHandSide()) {
1879 VisitForEffect(expr->target()); 1821 VisitForEffect(expr->target());
(...skipping 13 matching lines...) Expand all
1893 1835
1894 // Evaluate LHS expression. 1836 // Evaluate LHS expression.
1895 switch (assign_type) { 1837 switch (assign_type) {
1896 case VARIABLE: 1838 case VARIABLE:
1897 // Nothing to do here. 1839 // Nothing to do here.
1898 break; 1840 break;
1899 case NAMED_PROPERTY: 1841 case NAMED_PROPERTY:
1900 if (expr->is_compound()) { 1842 if (expr->is_compound()) {
1901 // We need the receiver both on the stack and in the accumulator. 1843 // We need the receiver both on the stack and in the accumulator.
1902 VisitForAccumulatorValue(property->obj()); 1844 VisitForAccumulatorValue(property->obj());
1903 __ push(result_register()); 1845 __ Push(result_register());
1904 } else { 1846 } else {
1905 VisitForStackValue(property->obj()); 1847 VisitForStackValue(property->obj());
1906 } 1848 }
1907 break; 1849 break;
1908 case KEYED_PROPERTY: 1850 case KEYED_PROPERTY:
1909 if (expr->is_compound()) { 1851 if (expr->is_compound()) {
1910 VisitForStackValue(property->obj()); 1852 VisitForStackValue(property->obj());
1911 VisitForAccumulatorValue(property->key()); 1853 VisitForAccumulatorValue(property->key());
1912 __ ldr(r1, MemOperand(sp, 0)); 1854 __ Peek(x1, 0);
1913 __ push(r0); 1855 __ Push(x0);
1914 } else { 1856 } else {
1915 VisitForStackValue(property->obj()); 1857 VisitForStackValue(property->obj());
1916 VisitForStackValue(property->key()); 1858 VisitForStackValue(property->key());
1917 } 1859 }
1918 break; 1860 break;
1919 } 1861 }
1920 1862
1921 // For compound assignments we need another deoptimization point after the 1863 // For compound assignments we need another deoptimization point after the
1922 // variable/property load. 1864 // variable/property load.
1923 if (expr->is_compound()) { 1865 if (expr->is_compound()) {
1924 { AccumulatorValueContext context(this); 1866 { AccumulatorValueContext context(this);
1925 switch (assign_type) { 1867 switch (assign_type) {
1926 case VARIABLE: 1868 case VARIABLE:
1927 EmitVariableLoad(expr->target()->AsVariableProxy()); 1869 EmitVariableLoad(expr->target()->AsVariableProxy());
1928 PrepareForBailout(expr->target(), TOS_REG); 1870 PrepareForBailout(expr->target(), TOS_REG);
1929 break; 1871 break;
1930 case NAMED_PROPERTY: 1872 case NAMED_PROPERTY:
1931 EmitNamedPropertyLoad(property); 1873 EmitNamedPropertyLoad(property);
1932 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1874 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1933 break; 1875 break;
1934 case KEYED_PROPERTY: 1876 case KEYED_PROPERTY:
1935 EmitKeyedPropertyLoad(property); 1877 EmitKeyedPropertyLoad(property);
1936 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1878 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1937 break; 1879 break;
1938 } 1880 }
1939 } 1881 }
1940 1882
1941 Token::Value op = expr->binary_op(); 1883 Token::Value op = expr->binary_op();
1942 __ push(r0); // Left operand goes on the stack. 1884 __ Push(x0); // Left operand goes on the stack.
1943 VisitForAccumulatorValue(expr->value()); 1885 VisitForAccumulatorValue(expr->value());
1944 1886
1945 OverwriteMode mode = expr->value()->ResultOverwriteAllowed() 1887 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1946 ? OVERWRITE_RIGHT 1888 ? OVERWRITE_RIGHT
1947 : NO_OVERWRITE; 1889 : NO_OVERWRITE;
1948 SetSourcePosition(expr->position() + 1); 1890 SetSourcePosition(expr->position() + 1);
1949 AccumulatorValueContext context(this); 1891 AccumulatorValueContext context(this);
1950 if (ShouldInlineSmiCase(op)) { 1892 if (ShouldInlineSmiCase(op)) {
1951 EmitInlineSmiBinaryOp(expr->binary_operation(), 1893 EmitInlineSmiBinaryOp(expr->binary_operation(),
1952 op, 1894 op,
(...skipping 12 matching lines...) Expand all
1965 1907
1966 // Record source position before possible IC call. 1908 // Record source position before possible IC call.
1967 SetSourcePosition(expr->position()); 1909 SetSourcePosition(expr->position());
1968 1910
1969 // Store the value. 1911 // Store the value.
1970 switch (assign_type) { 1912 switch (assign_type) {
1971 case VARIABLE: 1913 case VARIABLE:
1972 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), 1914 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1973 expr->op()); 1915 expr->op());
1974 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 1916 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1975 context()->Plug(r0); 1917 context()->Plug(x0);
1976 break; 1918 break;
1977 case NAMED_PROPERTY: 1919 case NAMED_PROPERTY:
1978 EmitNamedPropertyAssignment(expr); 1920 EmitNamedPropertyAssignment(expr);
1979 break; 1921 break;
1980 case KEYED_PROPERTY: 1922 case KEYED_PROPERTY:
1981 EmitKeyedPropertyAssignment(expr); 1923 EmitKeyedPropertyAssignment(expr);
1982 break; 1924 break;
1983 } 1925 }
1984 } 1926 }
1985 1927
1986 1928
1987 void FullCodeGenerator::VisitYield(Yield* expr) {
1988 Comment cmnt(masm_, "[ Yield");
1989 // Evaluate yielded value first; the initial iterator definition depends on
1990 // this. It stays on the stack while we update the iterator.
1991 VisitForStackValue(expr->expression());
1992
1993 switch (expr->yield_kind()) {
1994 case Yield::INITIAL:
1995 case Yield::SUSPEND: {
1996 VisitForStackValue(expr->generator_object());
1997 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1998 __ ldr(context_register(),
1999 MemOperand(fp, StandardFrameConstants::kContextOffset));
2000
2001 Label resume;
2002 __ CompareRoot(result_register(), Heap::kTheHoleValueRootIndex);
2003 __ b(ne, &resume);
2004 if (expr->yield_kind() == Yield::SUSPEND) {
2005 EmitReturnIteratorResult(false);
2006 } else {
2007 __ pop(result_register());
2008 EmitReturnSequence();
2009 }
2010
2011 __ bind(&resume);
2012 context()->Plug(result_register());
2013 break;
2014 }
2015
2016 case Yield::FINAL: {
2017 VisitForAccumulatorValue(expr->generator_object());
2018 __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2019 __ str(r1, FieldMemOperand(result_register(),
2020 JSGeneratorObject::kContinuationOffset));
2021 EmitReturnIteratorResult(true);
2022 break;
2023 }
2024
2025 case Yield::DELEGATING: {
2026 VisitForStackValue(expr->generator_object());
2027
2028 // Initial stack layout is as follows:
2029 // [sp + 1 * kPointerSize] iter
2030 // [sp + 0 * kPointerSize] g
2031
2032 Label l_catch, l_try, l_resume, l_next, l_call, l_loop;
2033 // Initial send value is undefined.
2034 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2035 __ b(&l_next);
2036
2037 // catch (e) { receiver = iter; f = iter.throw; arg = e; goto l_call; }
2038 __ bind(&l_catch);
2039 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2040 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2041 __ push(r3); // iter
2042 __ push(r0); // exception
2043 __ mov(r0, r3); // iter
2044 __ LoadRoot(r2, Heap::kthrow_stringRootIndex); // "throw"
2045 Handle<Code> throw_ic = isolate()->builtins()->LoadIC_Initialize();
2046 CallIC(throw_ic); // iter.throw in r0
2047 __ jmp(&l_call);
2048
2049 // try { received = yield result.value }
2050 __ bind(&l_try);
2051 __ pop(r0); // result.value
2052 __ PushTryHandler(StackHandler::CATCH, expr->index());
2053 const int handler_size = StackHandlerConstants::kSize;
2054 __ push(r0); // result.value
2055 __ ldr(r3, MemOperand(sp, (0 + 1) * kPointerSize + handler_size)); // g
2056 __ push(r3); // g
2057 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2058 __ ldr(context_register(),
2059 MemOperand(fp, StandardFrameConstants::kContextOffset));
2060 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
2061 __ b(ne, &l_resume);
2062 EmitReturnIteratorResult(false);
2063 __ bind(&l_resume); // received in r0
2064 __ PopTryHandler();
2065
2066 // receiver = iter; f = iter.next; arg = received;
2067 __ bind(&l_next);
2068 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
2069 __ push(r3); // iter
2070 __ push(r0); // received
2071 __ mov(r0, r3); // iter
2072 __ LoadRoot(r2, Heap::knext_stringRootIndex); // "next"
2073 Handle<Code> next_ic = isolate()->builtins()->LoadIC_Initialize();
2074 CallIC(next_ic); // iter.next in r0
2075
2076 // result = f.call(receiver, arg);
2077 __ bind(&l_call);
2078 Label l_call_runtime;
2079 __ JumpIfSmi(r0, &l_call_runtime);
2080 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
2081 __ b(ne, &l_call_runtime);
2082 __ mov(r1, r0);
2083 ParameterCount count(1);
2084 __ InvokeFunction(r1, count, CALL_FUNCTION,
2085 NullCallWrapper(), CALL_AS_METHOD);
2086 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2087 __ jmp(&l_loop);
2088 __ bind(&l_call_runtime);
2089 __ push(r0);
2090 __ CallRuntime(Runtime::kCall, 3);
2091
2092 // val = result.value; if (!result.done) goto l_try;
2093 __ bind(&l_loop);
2094 // result.value
2095 __ push(r0); // save result
2096 __ LoadRoot(r2, Heap::kvalue_stringRootIndex); // "value"
2097 Handle<Code> value_ic = isolate()->builtins()->LoadIC_Initialize();
2098 CallIC(value_ic); // result.value in r0
2099 __ pop(r1); // result
2100 __ push(r0); // result.value
2101 __ mov(r0, r1); // result
2102 __ LoadRoot(r2, Heap::kdone_stringRootIndex); // "done"
2103 Handle<Code> done_ic = isolate()->builtins()->LoadIC_Initialize();
2104 CallIC(done_ic); // result.done in r0
2105 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2106 CallIC(bool_ic);
2107 __ cmp(r0, Operand(0));
2108 __ b(eq, &l_try);
2109
2110 // result.value
2111 __ pop(r0); // result.value
2112 context()->DropAndPlug(2, r0); // drop iter and g
2113 break;
2114 }
2115 }
2116 }
2117
2118
2119 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2120 Expression *value,
2121 JSGeneratorObject::ResumeMode resume_mode) {
2122 // The value stays in r0, and is ultimately read by the resumed generator, as
2123 // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. r1
2124 // will hold the generator object until the activation has been resumed.
2125 VisitForStackValue(generator);
2126 VisitForAccumulatorValue(value);
2127 __ pop(r1);
2128
2129 // Check generator state.
2130 Label wrong_state, done;
2131 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2132 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting <= 0);
2133 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed <= 0);
2134 __ cmp(r3, Operand(Smi::FromInt(0)));
2135 __ b(le, &wrong_state);
2136
2137 // Load suspended function and context.
2138 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
2139 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
2140
2141 // Load receiver and store as the first argument.
2142 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
2143 __ push(r2);
2144
2145 // Push holes for the rest of the arguments to the generator function.
2146 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2147 __ ldr(r3,
2148 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
2149 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
2150 Label push_argument_holes, push_frame;
2151 __ bind(&push_argument_holes);
2152 __ sub(r3, r3, Operand(1), SetCC);
2153 __ b(mi, &push_frame);
2154 __ push(r2);
2155 __ jmp(&push_argument_holes);
2156
2157 // Enter a new JavaScript frame, and initialize its slots as they were when
2158 // the generator was suspended.
2159 Label resume_frame;
2160 __ bind(&push_frame);
2161 __ bl(&resume_frame);
2162 __ jmp(&done);
2163 __ bind(&resume_frame);
2164 __ push(lr); // Return address.
2165 __ push(fp); // Caller's frame pointer.
2166 __ mov(fp, sp);
2167 __ push(cp); // Callee's context.
2168 __ push(r4); // Callee's JS Function.
2169
2170 // Load the operand stack size.
2171 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
2172 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset));
2173 __ SmiUntag(r3);
2174
2175 // If we are sending a value and there is no operand stack, we can jump back
2176 // in directly.
2177 if (resume_mode == JSGeneratorObject::NEXT) {
2178 Label slow_resume;
2179 __ cmp(r3, Operand(0));
2180 __ b(ne, &slow_resume);
2181 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2182 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2183 __ SmiUntag(r2);
2184 __ add(r3, r3, r2);
2185 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2186 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
2187 __ Jump(r3);
2188 __ bind(&slow_resume);
2189 }
2190
2191 // Otherwise, we push holes for the operand stack and call the runtime to fix
2192 // up the stack and the handlers.
2193 Label push_operand_holes, call_resume;
2194 __ bind(&push_operand_holes);
2195 __ sub(r3, r3, Operand(1), SetCC);
2196 __ b(mi, &call_resume);
2197 __ push(r2);
2198 __ b(&push_operand_holes);
2199 __ bind(&call_resume);
2200 __ push(r1);
2201 __ push(result_register());
2202 __ Push(Smi::FromInt(resume_mode));
2203 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2204 // Not reached: the runtime call returns elsewhere.
2205 __ stop("not-reached");
2206
2207 // Throw error if we attempt to operate on a running generator.
2208 __ bind(&wrong_state);
2209 __ push(r1);
2210 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2211
2212 __ bind(&done);
2213 context()->Plug(result_register());
2214 }
2215
2216
2217 void FullCodeGenerator::EmitReturnIteratorResult(bool done) {
2218 Label gc_required;
2219 Label allocated;
2220
2221 Handle<Map> map(isolate()->native_context()->generator_result_map());
2222
2223 __ Allocate(map->instance_size(), r0, r2, r3, &gc_required, TAG_OBJECT);
2224
2225 __ bind(&allocated);
2226 __ mov(r1, Operand(map));
2227 __ pop(r2);
2228 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done)));
2229 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array()));
2230 ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2231 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2232 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2233 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2234 __ str(r2,
2235 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset));
2236 __ str(r3,
2237 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset));
2238
2239 // Only the value field needs a write barrier, as the other values are in the
2240 // root set.
2241 __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset,
2242 r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
2243
2244 if (done) {
2245 // Exit all nested statements.
2246 NestedStatement* current = nesting_stack_;
2247 int stack_depth = 0;
2248 int context_length = 0;
2249 while (current != NULL) {
2250 current = current->Exit(&stack_depth, &context_length);
2251 }
2252 __ Drop(stack_depth);
2253 }
2254
2255 EmitReturnSequence();
2256
2257 __ bind(&gc_required);
2258 __ Push(Smi::FromInt(map->instance_size()));
2259 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2260 __ ldr(context_register(),
2261 MemOperand(fp, StandardFrameConstants::kContextOffset));
2262 __ jmp(&allocated);
2263 }
2264
2265
2266 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { 1929 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2267 SetSourcePosition(prop->position()); 1930 SetSourcePosition(prop->position());
2268 Literal* key = prop->key()->AsLiteral(); 1931 Literal* key = prop->key()->AsLiteral();
2269 __ mov(r2, Operand(key->handle())); 1932 __ Mov(x2, Operand(key->handle()));
2270 // Call load IC. It has arguments receiver and property name r0 and r2. 1933 // Call load IC. It has arguments receiver and property name x0 and x2.
2271 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1934 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2272 CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId()); 1935 CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
2273 } 1936 }
2274 1937
2275 1938
2276 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { 1939 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2277 SetSourcePosition(prop->position()); 1940 SetSourcePosition(prop->position());
2278 // Call keyed load IC. It has arguments key and receiver in r0 and r1. 1941 // Call keyed load IC. It has arguments key and receiver in r0 and r1.
2279 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 1942 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2280 CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId()); 1943 CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
2281 } 1944 }
2282 1945
2283 1946
2284 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 1947 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2285 Token::Value op, 1948 Token::Value op,
2286 OverwriteMode mode, 1949 OverwriteMode mode,
2287 Expression* left_expr, 1950 Expression* left_expr,
2288 Expression* right_expr) { 1951 Expression* right_expr) {
2289 Label done, smi_case, stub_call; 1952 Label done, both_smis, stub_call;
2290
2291 Register scratch1 = r2;
2292 Register scratch2 = r3;
2293 1953
2294 // Get the arguments. 1954 // Get the arguments.
2295 Register left = r1; 1955 Register left = x1;
2296 Register right = r0; 1956 Register right = x0;
2297 __ pop(left); 1957 Register result = x0;
1958 __ Pop(left);
2298 1959
2299 // Perform combined smi check on both operands. 1960 // Perform combined smi check on both operands.
2300 __ orr(scratch1, left, Operand(right)); 1961 __ Orr(x10, left, right);
2301 STATIC_ASSERT(kSmiTag == 0);
2302 JumpPatchSite patch_site(masm_); 1962 JumpPatchSite patch_site(masm_);
2303 patch_site.EmitJumpIfSmi(scratch1, &smi_case); 1963 patch_site.EmitJumpIfSmi(x10, &both_smis);
2304 1964
2305 __ bind(&stub_call); 1965 __ Bind(&stub_call);
2306 BinaryOpStub stub(op, mode); 1966 BinaryOpStub stub(op, mode);
2307 CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 1967 {
2308 expr->BinaryOperationFeedbackId()); 1968 Assembler::BlockConstPoolScope scope(masm_);
2309 patch_site.EmitPatchInfo(); 1969 CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
2310 __ jmp(&done); 1970 expr->BinaryOperationFeedbackId());
1971 patch_site.EmitPatchInfo();
1972 }
1973 __ B(&done);
2311 1974
2312 __ bind(&smi_case); 1975 __ Bind(&both_smis);
2313 // Smi case. This code works the same way as the smi-smi case in the type 1976 // Smi case. This code works in the same way as the smi-smi case in the type
2314 // recording binary operation stub, see 1977 // recording binary operation stub, see
2315 // BinaryOpStub::GenerateSmiSmiOperation for comments. 1978 // BinaryOpStub::GenerateSmiSmiOperation for comments.
1979 //
1980 // The set of operations that needs to be supported here is controlled by
1981 // FullCodeGenerator::ShouldInlineSmiCase().
2316 switch (op) { 1982 switch (op) {
2317 case Token::SAR: 1983 case Token::SAR:
2318 __ GetLeastBitsFromSmi(scratch1, right, 5); 1984 __ Ubfx(right, right, kSmiShift, 5);
2319 __ mov(right, Operand(left, ASR, scratch1)); 1985 __ Asr(result, left, right);
2320 __ bic(right, right, Operand(kSmiTagMask)); 1986 __ Bic(result, result, kSmiShiftMask);
2321 break; 1987 break;
2322 case Token::SHL: { 1988 case Token::SHL:
2323 __ SmiUntag(scratch1, left); 1989 __ Ubfx(right, right, kSmiShift, 5);
2324 __ GetLeastBitsFromSmi(scratch2, right, 5); 1990 __ Lsl(result, left, right);
2325 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
2326 __ TrySmiTag(right, scratch1, &stub_call);
2327 break; 1991 break;
2328 }
2329 case Token::SHR: { 1992 case Token::SHR: {
2330 __ SmiUntag(scratch1, left); 1993 Label right_not_zero;
2331 __ GetLeastBitsFromSmi(scratch2, right, 5); 1994 __ Cbnz(right, &right_not_zero);
2332 __ mov(scratch1, Operand(scratch1, LSR, scratch2)); 1995 __ Tbnz(left, kXSignBit, &stub_call);
2333 __ tst(scratch1, Operand(0xc0000000)); 1996 __ Bind(&right_not_zero);
2334 __ b(ne, &stub_call); 1997 __ Ubfx(right, right, kSmiShift, 5);
2335 __ SmiTag(right, scratch1); 1998 __ Lsr(result, left, right);
1999 __ Bic(result, result, kSmiShiftMask);
2336 break; 2000 break;
2337 } 2001 }
2338 case Token::ADD: 2002 case Token::ADD:
2339 __ add(scratch1, left, Operand(right), SetCC); 2003 __ Adds(x10, left, right);
2340 __ b(vs, &stub_call); 2004 __ B(vs, &stub_call);
2341 __ mov(right, scratch1); 2005 __ Mov(result, x10);
2342 break; 2006 break;
2343 case Token::SUB: 2007 case Token::SUB:
2344 __ sub(scratch1, left, Operand(right), SetCC); 2008 __ Subs(x10, left, right);
2345 __ b(vs, &stub_call); 2009 __ B(vs, &stub_call);
2346 __ mov(right, scratch1); 2010 __ Mov(result, x10);
2347 break; 2011 break;
2348 case Token::MUL: { 2012 case Token::MUL: {
2349 __ SmiUntag(ip, right); 2013 Label not_minus_zero, done;
2350 __ smull(scratch1, scratch2, left, ip); 2014 __ Smulh(x10, left, right);
2351 __ mov(ip, Operand(scratch1, ASR, 31)); 2015 __ Cbnz(x10, &not_minus_zero);
2352 __ cmp(ip, Operand(scratch2)); 2016 __ Eor(x11, left, right);
2353 __ b(ne, &stub_call); 2017 __ Tbnz(x11, kXSignBit, &stub_call);
2354 __ cmp(scratch1, Operand::Zero()); 2018 STATIC_ASSERT(kSmiTag == 0);
2355 __ mov(right, Operand(scratch1), LeaveCC, ne); 2019 __ Mov(result, x10);
2356 __ b(ne, &done); 2020 __ B(&done);
2357 __ add(scratch2, right, Operand(left), SetCC); 2021 __ Bind(&not_minus_zero);
2358 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl); 2022 __ Cls(x11, x10);
2359 __ b(mi, &stub_call); 2023 __ Cmp(x11, kXRegSize - kSmiShift);
2024 __ B(lt, &stub_call);
2025 __ SmiTag(result, x10);
2026 __ Bind(&done);
2360 break; 2027 break;
2361 } 2028 }
2362 case Token::BIT_OR: 2029 case Token::BIT_OR:
2363 __ orr(right, left, Operand(right)); 2030 __ Orr(result, left, right);
2364 break; 2031 break;
2365 case Token::BIT_AND: 2032 case Token::BIT_AND:
2366 __ and_(right, left, Operand(right)); 2033 __ And(result, left, right);
2367 break; 2034 break;
2368 case Token::BIT_XOR: 2035 case Token::BIT_XOR:
2369 __ eor(right, left, Operand(right)); 2036 __ Eor(result, left, right);
2370 break; 2037 break;
2371 default: 2038 default:
2372 UNREACHABLE(); 2039 UNREACHABLE();
2373 } 2040 }
2374 2041
2375 __ bind(&done); 2042 __ Bind(&done);
2376 context()->Plug(r0); 2043 context()->Plug(x0);
2377 } 2044 }
2378 2045
2379 2046
2380 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, 2047 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2381 Token::Value op, 2048 Token::Value op,
2382 OverwriteMode mode) { 2049 OverwriteMode mode) {
2383 __ pop(r1); 2050 __ Pop(x1);
2384 BinaryOpStub stub(op, mode); 2051 BinaryOpStub stub(op, mode);
2385 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 2052 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
2386 CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 2053 {
2387 expr->BinaryOperationFeedbackId()); 2054 Assembler::BlockConstPoolScope scope(masm_);
2388 patch_site.EmitPatchInfo(); 2055 CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
2389 context()->Plug(r0); 2056 expr->BinaryOperationFeedbackId());
2057 patch_site.EmitPatchInfo();
2058 }
2059 context()->Plug(x0);
2390 } 2060 }
2391 2061
2392 2062
2393 void FullCodeGenerator::EmitAssignment(Expression* expr) { 2063 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2394 // Invalid left-hand sides are rewritten by the parser to have a 'throw 2064 // Invalid left-hand sides are rewritten to have a 'throw
2395 // ReferenceError' on the left-hand side. 2065 // ReferenceError' on the left-hand side.
2396 if (!expr->IsValidLeftHandSide()) { 2066 if (!expr->IsValidLeftHandSide()) {
2397 VisitForEffect(expr); 2067 VisitForEffect(expr);
2398 return; 2068 return;
2399 } 2069 }
2400 2070
2401 // Left-hand side can only be a property, a global or a (parameter or local) 2071 // Left-hand side can only be a property, a global or a (parameter or local)
2402 // slot. 2072 // slot.
2403 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 2073 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2404 LhsKind assign_type = VARIABLE; 2074 LhsKind assign_type = VARIABLE;
2405 Property* prop = expr->AsProperty(); 2075 Property* prop = expr->AsProperty();
2406 if (prop != NULL) { 2076 if (prop != NULL) {
2407 assign_type = (prop->key()->IsPropertyName()) 2077 assign_type = (prop->key()->IsPropertyName())
2408 ? NAMED_PROPERTY 2078 ? NAMED_PROPERTY
2409 : KEYED_PROPERTY; 2079 : KEYED_PROPERTY;
2410 } 2080 }
2411 2081
2412 switch (assign_type) { 2082 switch (assign_type) {
2413 case VARIABLE: { 2083 case VARIABLE: {
2414 Variable* var = expr->AsVariableProxy()->var(); 2084 Variable* var = expr->AsVariableProxy()->var();
2415 EffectContext context(this); 2085 EffectContext context(this);
2416 EmitVariableAssignment(var, Token::ASSIGN); 2086 EmitVariableAssignment(var, Token::ASSIGN);
2417 break; 2087 break;
2418 } 2088 }
2419 case NAMED_PROPERTY: { 2089 case NAMED_PROPERTY: {
2420 __ push(r0); // Preserve value. 2090 __ Push(x0); // Preserve value.
2421 VisitForAccumulatorValue(prop->obj()); 2091 VisitForAccumulatorValue(prop->obj());
2422 __ mov(r1, r0); 2092 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2423 __ pop(r0); // Restore value. 2093 // this copy.
2424 __ mov(r2, Operand(prop->key()->AsLiteral()->handle())); 2094 __ Mov(x1, x0);
2095 __ Pop(x0); // Restore value.
2096 __ Mov(x2, Operand(prop->key()->AsLiteral()->handle()));
2425 Handle<Code> ic = is_classic_mode() 2097 Handle<Code> ic = is_classic_mode()
2426 ? isolate()->builtins()->StoreIC_Initialize() 2098 ? isolate()->builtins()->StoreIC_Initialize()
2427 : isolate()->builtins()->StoreIC_Initialize_Strict(); 2099 : isolate()->builtins()->StoreIC_Initialize_Strict();
2428 CallIC(ic); 2100 CallIC(ic);
2429 break; 2101 break;
2430 } 2102 }
2431 case KEYED_PROPERTY: { 2103 case KEYED_PROPERTY: {
2432 __ push(r0); // Preserve value. 2104 __ Push(x0); // Preserve value.
2433 VisitForStackValue(prop->obj()); 2105 VisitForStackValue(prop->obj());
2434 VisitForAccumulatorValue(prop->key()); 2106 VisitForAccumulatorValue(prop->key());
2435 __ mov(r1, r0); 2107 __ Mov(x1, x0);
2436 __ pop(r2); 2108 __ Pop(x2, x0);
2437 __ pop(r0); // Restore value.
2438 Handle<Code> ic = is_classic_mode() 2109 Handle<Code> ic = is_classic_mode()
2439 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2110 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2440 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2111 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2441 CallIC(ic); 2112 CallIC(ic);
2442 break; 2113 break;
2443 } 2114 }
2444 } 2115 }
2445 context()->Plug(r0); 2116 context()->Plug(x0);
2446 } 2117 }
2447 2118
2448 2119
2449 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 2120 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2450 Token::Value op) { 2121 Token::Value op) {
2122 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2451 if (var->IsUnallocated()) { 2123 if (var->IsUnallocated()) {
2452 // Global var, const, or let. 2124 // Global var, const, or let.
2453 __ mov(r2, Operand(var->name())); 2125 __ Mov(x2, Operand(var->name()));
2454 __ ldr(r1, GlobalObjectOperand()); 2126 __ Ldr(x1, GlobalObjectMemOperand());
2455 Handle<Code> ic = is_classic_mode() 2127 Handle<Code> ic = is_classic_mode()
2456 ? isolate()->builtins()->StoreIC_Initialize() 2128 ? isolate()->builtins()->StoreIC_Initialize()
2457 : isolate()->builtins()->StoreIC_Initialize_Strict(); 2129 : isolate()->builtins()->StoreIC_Initialize_Strict();
2458 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT); 2130 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2459 2131
2460 } else if (op == Token::INIT_CONST) { 2132 } else if (op == Token::INIT_CONST) {
2461 // Const initializers need a write barrier. 2133 // Const initializers need a write barrier.
2462 ASSERT(!var->IsParameter()); // No const parameters. 2134 ASSERT(!var->IsParameter()); // No const parameters.
2463 if (var->IsStackLocal()) { 2135 if (var->IsStackLocal()) {
2464 Label skip; 2136 Label skip;
2465 __ ldr(r1, StackOperand(var)); 2137 __ Ldr(x1, StackOperand(var));
2466 __ CompareRoot(r1, Heap::kTheHoleValueRootIndex); 2138 __ JumpIfNotRoot(x1, Heap::kTheHoleValueRootIndex, &skip);
2467 __ b(ne, &skip); 2139 __ Str(result_register(), StackOperand(var));
2468 __ str(result_register(), StackOperand(var)); 2140 __ Bind(&skip);
2469 __ bind(&skip);
2470 } else { 2141 } else {
2471 ASSERT(var->IsContextSlot() || var->IsLookupSlot()); 2142 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2472 // Like var declarations, const declarations are hoisted to function 2143 // Like var declarations, const declarations are hoisted to function
2473 // scope. However, unlike var initializers, const initializers are 2144 // scope. However, unlike var initializers, const initializers are
2474 // able to drill a hole to that function context, even from inside a 2145 // able to drill a hole to that function context, even from inside a
2475 // 'with' context. We thus bypass the normal static scope lookup for 2146 // 'with' context. We thus bypass the normal static scope lookup for
2476 // var->IsContextSlot(). 2147 // var->IsContextSlot().
2477 __ push(r0); 2148 __ Push(x0);
2478 __ mov(r0, Operand(var->name())); 2149 __ Mov(x0, Operand(var->name()));
2479 __ Push(cp, r0); // Context and name. 2150 __ Push(cp, x0); // Context and name.
2480 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); 2151 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2481 } 2152 }
2482 2153
2483 } else if (var->mode() == LET && op != Token::INIT_LET) { 2154 } else if (var->mode() == LET && op != Token::INIT_LET) {
2484 // Non-initializing assignment to let variable needs a write barrier. 2155 // Non-initializing assignment to let variable needs a write barrier.
2485 if (var->IsLookupSlot()) { 2156 if (var->IsLookupSlot()) {
2486 __ push(r0); // Value. 2157 __ Push(x0, cp); // Context, value.
2487 __ mov(r1, Operand(var->name())); 2158 __ Mov(x11, Operand(var->name()));
2488 __ mov(r0, Operand(Smi::FromInt(language_mode()))); 2159 __ Mov(x10, Operand(Smi::FromInt(language_mode())));
2489 __ Push(cp, r1, r0); // Context, name, strict mode. 2160 __ Push(x11, x10); // Strict mode, name.
2490 __ CallRuntime(Runtime::kStoreContextSlot, 4); 2161 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2491 } else { 2162 } else {
2492 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 2163 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2493 Label assign; 2164 Label assign;
2494 MemOperand location = VarOperand(var, r1); 2165 MemOperand location = VarOperand(var, x1);
2495 __ ldr(r3, location); 2166 __ Ldr(x10, location);
2496 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); 2167 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2497 __ b(ne, &assign); 2168 __ Mov(x10, Operand(var->name()));
2498 __ mov(r3, Operand(var->name())); 2169 __ Push(x10);
2499 __ push(r3);
2500 __ CallRuntime(Runtime::kThrowReferenceError, 1); 2170 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2501 // Perform the assignment. 2171 // Perform the assignment.
2502 __ bind(&assign); 2172 __ Bind(&assign);
2503 __ str(result_register(), location); 2173 __ Str(result_register(), location);
2504 if (var->IsContextSlot()) { 2174 if (var->IsContextSlot()) {
2505 // RecordWrite may destroy all its register arguments. 2175 // RecordWrite may destroy all its register arguments.
2506 __ mov(r3, result_register()); 2176 __ Mov(x10, result_register());
2507 int offset = Context::SlotOffset(var->index()); 2177 int offset = Context::SlotOffset(var->index());
2508 __ RecordWriteContextSlot( 2178 __ RecordWriteContextSlot(
2509 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs); 2179 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2510 } 2180 }
2511 } 2181 }
2512 2182
2513 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) { 2183 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2514 // Assignment to var or initializing assignment to let/const 2184 // Assignment to var or initializing assignment to let/const
2515 // in harmony mode. 2185 // in harmony mode.
2516 if (var->IsStackAllocated() || var->IsContextSlot()) { 2186 if (var->IsStackAllocated() || var->IsContextSlot()) {
2517 MemOperand location = VarOperand(var, r1); 2187 MemOperand location = VarOperand(var, x1);
2518 if (generate_debug_code_ && op == Token::INIT_LET) { 2188 if (FLAG_debug_code && op == Token::INIT_LET) {
2519 // Check for an uninitialized let binding. 2189 __ Ldr(x10, location);
2520 __ ldr(r2, location); 2190 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2521 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2522 __ Check(eq, "Let binding re-initialization."); 2191 __ Check(eq, "Let binding re-initialization.");
2523 } 2192 }
2524 // Perform the assignment. 2193 // Perform the assignment.
2525 __ str(r0, location); 2194 __ Str(x0, location);
2526 if (var->IsContextSlot()) { 2195 if (var->IsContextSlot()) {
2527 __ mov(r3, r0); 2196 __ Mov(x10, x0);
2528 int offset = Context::SlotOffset(var->index()); 2197 int offset = Context::SlotOffset(var->index());
2529 __ RecordWriteContextSlot( 2198 __ RecordWriteContextSlot(
2530 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs); 2199 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2531 } 2200 }
2532 } else { 2201 } else {
2533 ASSERT(var->IsLookupSlot()); 2202 ASSERT(var->IsLookupSlot());
2534 __ push(r0); // Value. 2203 __ Mov(x11, Operand(var->name()));
2535 __ mov(r1, Operand(var->name())); 2204 __ Mov(x10, Operand(Smi::FromInt(language_mode())));
2536 __ mov(r0, Operand(Smi::FromInt(language_mode()))); 2205 // jssp[0] : mode.
2537 __ Push(cp, r1, r0); // Context, name, strict mode. 2206 // jssp[8] : name.
2207 // jssp[16] : context.
2208 // jssp[24] : value.
2209 __ Push(x0, cp, x11, x10);
2538 __ CallRuntime(Runtime::kStoreContextSlot, 4); 2210 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2539 } 2211 }
2540 } 2212 }
2541 // Non-initializing assignments to consts are ignored. 2213 // Non-initializing assignments to consts are ignored.
2542 } 2214 }
2543 2215
2544 2216
2545 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2217 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2218 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2546 // Assignment to a property, using a named store IC. 2219 // Assignment to a property, using a named store IC.
2547 Property* prop = expr->target()->AsProperty(); 2220 Property* prop = expr->target()->AsProperty();
2548 ASSERT(prop != NULL); 2221 ASSERT(prop != NULL);
2549 ASSERT(prop->key()->AsLiteral() != NULL); 2222 ASSERT(prop->key()->AsLiteral() != NULL);
2550 2223
2551 // Record source code position before IC call. 2224 // Record source code position before IC call.
2552 SetSourcePosition(expr->position()); 2225 SetSourcePosition(expr->position());
2553 __ mov(r2, Operand(prop->key()->AsLiteral()->handle())); 2226 __ Mov(x2, Operand(prop->key()->AsLiteral()->handle()));
2554 __ pop(r1); 2227 __ Pop(x1);
2555 2228
2556 Handle<Code> ic = is_classic_mode() 2229 Handle<Code> ic = is_classic_mode()
2557 ? isolate()->builtins()->StoreIC_Initialize() 2230 ? isolate()->builtins()->StoreIC_Initialize()
2558 : isolate()->builtins()->StoreIC_Initialize_Strict(); 2231 : isolate()->builtins()->StoreIC_Initialize_Strict();
2559 CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId()); 2232 CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2560 2233
2561 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2234 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2562 context()->Plug(r0); 2235 context()->Plug(x0);
2563 } 2236 }
2564 2237
2565 2238
2566 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2239 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2240 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2567 // Assignment to a property, using a keyed store IC. 2241 // Assignment to a property, using a keyed store IC.
2568 2242
2569 // Record source code position before IC call. 2243 // Record source code position before IC call.
2570 SetSourcePosition(expr->position()); 2244 SetSourcePosition(expr->position());
2571 __ pop(r1); // Key. 2245 // TODO(all): Could we pass this in registers rather than on the stack?
2572 __ pop(r2); 2246 __ Pop(x1, x2); // Key and object holding the property.
2573 2247
2574 Handle<Code> ic = is_classic_mode() 2248 Handle<Code> ic = is_classic_mode()
2575 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2249 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2576 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2250 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2577 CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId()); 2251 CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2578 2252
2579 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2253 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2580 context()->Plug(r0); 2254 context()->Plug(x0);
2581 } 2255 }
2582 2256
2583 2257
2584 void FullCodeGenerator::VisitProperty(Property* expr) { 2258 void FullCodeGenerator::VisitProperty(Property* expr) {
2585 Comment cmnt(masm_, "[ Property"); 2259 Comment cmnt(masm_, "[ Property");
2586 Expression* key = expr->key(); 2260 Expression* key = expr->key();
2587 2261
2588 if (key->IsPropertyName()) { 2262 if (key->IsPropertyName()) {
2589 VisitForAccumulatorValue(expr->obj()); 2263 VisitForAccumulatorValue(expr->obj());
2590 EmitNamedPropertyLoad(expr); 2264 EmitNamedPropertyLoad(expr);
2591 PrepareForBailoutForId(expr->LoadId(), TOS_REG); 2265 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2592 context()->Plug(r0); 2266 context()->Plug(x0);
2593 } else { 2267 } else {
2594 VisitForStackValue(expr->obj()); 2268 VisitForStackValue(expr->obj());
2595 VisitForAccumulatorValue(expr->key()); 2269 VisitForAccumulatorValue(expr->key());
2596 __ pop(r1); 2270 __ Pop(x1);
2597 EmitKeyedPropertyLoad(expr); 2271 EmitKeyedPropertyLoad(expr);
2598 context()->Plug(r0); 2272 context()->Plug(x0);
2599 } 2273 }
2600 } 2274 }
2601 2275
2602 2276
2603 void FullCodeGenerator::CallIC(Handle<Code> code, 2277 void FullCodeGenerator::CallIC(Handle<Code> code,
2604 RelocInfo::Mode rmode, 2278 RelocInfo::Mode rmode,
2605 TypeFeedbackId ast_id) { 2279 TypeFeedbackId ast_id) {
2606 ic_total_count_++; 2280 ic_total_count_++;
2607 // All calls must have a predictable size in full-codegen code to ensure that 2281 // All calls must have a predictable size in full-codegen code to ensure that
2608 // the debugger can patch them correctly. 2282 // the debugger can patch them correctly.
2609 __ Call(code, rmode, ast_id, al, NEVER_INLINE_TARGET_ADDRESS); 2283 __ Call(code, rmode, ast_id);
2610 } 2284 }
2611 2285
2286
2612 void FullCodeGenerator::EmitCallWithIC(Call* expr, 2287 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2613 Handle<Object> name, 2288 Handle<Object> name,
2614 RelocInfo::Mode mode) { 2289 RelocInfo::Mode mode) {
2290 ASM_LOCATION("EmitCallWithIC");
2615 // Code common for calls using the IC. 2291 // Code common for calls using the IC.
2616 ZoneList<Expression*>* args = expr->arguments(); 2292 ZoneList<Expression*>* args = expr->arguments();
2617 int arg_count = args->length(); 2293 int arg_count = args->length();
2618 { PreservePositionScope scope(masm()->positions_recorder()); 2294 { PreservePositionScope scope(masm()->positions_recorder());
2619 for (int i = 0; i < arg_count; i++) { 2295 for (int i = 0; i < arg_count; i++) {
2620 VisitForStackValue(args->at(i)); 2296 VisitForStackValue(args->at(i));
2621 } 2297 }
2622 __ mov(r2, Operand(name)); 2298 __ Mov(x2, Operand(name));
2623 } 2299 }
2624 // Record source position for debugger. 2300 // Record source position for debugger.
2625 SetSourcePosition(expr->position()); 2301 SetSourcePosition(expr->position());
2626 // Call the IC initialization code. 2302 // Call the IC initialization code.
2627 Handle<Code> ic = 2303 Handle<Code> ic =
2628 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode); 2304 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2629 CallIC(ic, mode, expr->CallFeedbackId()); 2305 CallIC(ic, mode, expr->CallFeedbackId());
2630 RecordJSReturnSite(expr); 2306 RecordJSReturnSite(expr);
2631 // Restore context register. 2307 // Restore context register.
2632 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2308 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2633 context()->Plug(r0); 2309 context()->Plug(x0);
2634 } 2310 }
2635 2311
2636 2312
2637 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, 2313 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2638 Expression* key) { 2314 Expression* key) {
2639 // Load the key. 2315 // Load the key.
2640 VisitForAccumulatorValue(key); 2316 VisitForAccumulatorValue(key);
2317 // Load the key.
2641 2318
2642 // Swap the name of the function and the receiver on the stack to follow 2319 // Swap the name of the function and the receiver on the stack to follow
2643 // the calling convention for call ICs. 2320 // the calling convention for call ICs.
2644 __ pop(r1); 2321 __ Pop(x1);
2645 __ push(r0); 2322 __ Push(x0, x1);
2646 __ push(r1);
2647 2323
2648 // Code common for calls using the IC. 2324 // Code common for calls using the IC.
2649 ZoneList<Expression*>* args = expr->arguments(); 2325 ZoneList<Expression*>* args = expr->arguments();
2650 int arg_count = args->length(); 2326 int arg_count = args->length();
2651 { PreservePositionScope scope(masm()->positions_recorder()); 2327 { PreservePositionScope scope(masm()->positions_recorder());
2652 for (int i = 0; i < arg_count; i++) { 2328 for (int i = 0; i < arg_count; i++) {
2653 VisitForStackValue(args->at(i)); 2329 VisitForStackValue(args->at(i));
2654 } 2330 }
2655 } 2331 }
2656 // Record source position for debugger. 2332 // Record source position for debugger.
2657 SetSourcePosition(expr->position()); 2333 SetSourcePosition(expr->position());
2658 // Call the IC initialization code. 2334 // Call the IC initialization code.
2659 Handle<Code> ic = 2335 Handle<Code> ic =
2660 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count); 2336 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2661 __ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key. 2337 __ Peek(x2, (arg_count + 1) * kXRegSizeInBytes); // Key.
2662 CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId()); 2338 CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
2663 RecordJSReturnSite(expr); 2339 RecordJSReturnSite(expr);
2664 // Restore context register. 2340 // Restore context register.
2665 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2341 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2666 context()->DropAndPlug(1, r0); // Drop the key still on the stack. 2342 context()->DropAndPlug(1, x0); // Drop the key still on the stack.
2667 } 2343 }
2668 2344
2669 2345
2670 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) { 2346 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2671 // Code common for calls using the call stub. 2347 // Code common for calls using the call stub.
2672 ZoneList<Expression*>* args = expr->arguments(); 2348 ZoneList<Expression*>* args = expr->arguments();
2673 int arg_count = args->length(); 2349 int arg_count = args->length();
2674 { PreservePositionScope scope(masm()->positions_recorder()); 2350 { PreservePositionScope scope(masm()->positions_recorder());
2675 for (int i = 0; i < arg_count; i++) { 2351 for (int i = 0; i < arg_count; i++) {
2676 VisitForStackValue(args->at(i)); 2352 VisitForStackValue(args->at(i));
2677 } 2353 }
2678 } 2354 }
2679 // Record source position for debugger. 2355 // Record source position for debugger.
2680 SetSourcePosition(expr->position()); 2356 SetSourcePosition(expr->position());
2681 2357
2682 // Record call targets in unoptimized code. 2358 // Record call targets in unoptimized code.
2683 flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET); 2359 flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
2684 Handle<Object> uninitialized = 2360 Handle<Object> uninitialized =
2685 TypeFeedbackCells::UninitializedSentinel(isolate()); 2361 TypeFeedbackCells::UninitializedSentinel(isolate());
2686 Handle<JSGlobalPropertyCell> cell = 2362 Handle<JSGlobalPropertyCell> cell =
2687 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized); 2363 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2688 RecordTypeFeedbackCell(expr->CallFeedbackId(), cell); 2364 RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
2689 __ mov(r2, Operand(cell)); 2365 __ Mov(x2, Operand(cell));
2690 2366
2691 CallFunctionStub stub(arg_count, flags); 2367 CallFunctionStub stub(arg_count, flags);
2692 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2368 __ Peek(x1, (arg_count + 1) * kXRegSizeInBytes);
2693 __ CallStub(&stub, expr->CallFeedbackId()); 2369 __ CallStub(&stub, expr->CallFeedbackId());
2694 RecordJSReturnSite(expr); 2370 RecordJSReturnSite(expr);
2695 // Restore context register. 2371 // Restore context register.
2696 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2372 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2697 context()->DropAndPlug(1, r0); 2373 context()->DropAndPlug(1, x0);
2698 } 2374 }
2699 2375
2700 2376
2701 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { 2377 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2702 // Push copy of the first argument or undefined if it doesn't exist. 2378 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2379 // Prepare to push a copy of the first argument or undefined if it doesn't
2380 // exist.
2703 if (arg_count > 0) { 2381 if (arg_count > 0) {
2704 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); 2382 __ Peek(x10, arg_count * kXRegSizeInBytes);
2705 } else { 2383 } else {
2706 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 2384 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2707 } 2385 }
2708 __ push(r1);
2709 2386
2710 // Push the receiver of the enclosing function. 2387 // Prepare to push the receiver of the enclosing function.
2711 int receiver_offset = 2 + info_->scope()->num_parameters(); 2388 int receiver_offset = 2 + info_->scope()->num_parameters();
2712 __ ldr(r1, MemOperand(fp, receiver_offset * kPointerSize)); 2389 __ Ldr(x11, MemOperand(fp, receiver_offset * kPointerSize));
2713 __ push(r1);
2714 // Push the language mode.
2715 __ mov(r1, Operand(Smi::FromInt(language_mode())));
2716 __ push(r1);
2717 2390
2718 // Push the start position of the scope the calls resides in. 2391 // Push.
2719 __ mov(r1, Operand(Smi::FromInt(scope()->start_position()))); 2392 __ Push(x10, x11);
2720 __ push(r1); 2393
2394 // Prepare to push the language mode.
2395 __ Mov(x10, Operand(Smi::FromInt(language_mode())));
2396 // Prepare to push the start position of the scope the calls resides in.
2397 __ Mov(x11, Operand(Smi::FromInt(scope()->start_position())));
2398
2399 // Push.
2400 __ Push(x10, x11);
2721 2401
2722 // Do the runtime call. 2402 // Do the runtime call.
2723 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); 2403 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2724 } 2404 }
2725 2405
2726 2406
2727 void FullCodeGenerator::VisitCall(Call* expr) { 2407 void FullCodeGenerator::VisitCall(Call* expr) {
2728 #ifdef DEBUG 2408 #ifdef DEBUG
2729 // We want to verify that RecordJSReturnSite gets called on all paths 2409 // We want to verify that RecordJSReturnSite gets called on all paths
2730 // through this function. Avoid early returns. 2410 // through this function. Avoid early returns.
2731 expr->return_is_recorded_ = false; 2411 expr->return_is_recorded_ = false;
2732 #endif 2412 #endif
2733 2413
2734 Comment cmnt(masm_, "[ Call"); 2414 Comment cmnt(masm_, "[ Call");
2735 Expression* callee = expr->expression(); 2415 Expression* callee = expr->expression();
2736 VariableProxy* proxy = callee->AsVariableProxy(); 2416 VariableProxy* proxy = callee->AsVariableProxy();
2737 Property* property = callee->AsProperty(); 2417 Property* property = callee->AsProperty();
2738 2418
2739 if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) { 2419 if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
2740 // In a call to eval, we first call %ResolvePossiblyDirectEval to 2420 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2741 // resolve the function we need to call and the receiver of the 2421 // resolve the function we need to call and the receiver of the
2742 // call. Then we call the resolved function using the given 2422 // call. Then we call the resolved function using the given
2743 // arguments. 2423 // arguments.
2744 ZoneList<Expression*>* args = expr->arguments(); 2424 ZoneList<Expression*>* args = expr->arguments();
2745 int arg_count = args->length(); 2425 int arg_count = args->length();
2746 2426
2747 { PreservePositionScope pos_scope(masm()->positions_recorder()); 2427 {
2428 PreservePositionScope pos_scope(masm()->positions_recorder());
2748 VisitForStackValue(callee); 2429 VisitForStackValue(callee);
2749 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 2430 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2750 __ push(r2); // Reserved receiver slot. 2431 __ Push(x10); // Reserved receiver slot.
2751 2432
2752 // Push the arguments. 2433 // Push the arguments.
2753 for (int i = 0; i < arg_count; i++) { 2434 for (int i = 0; i < arg_count; i++) {
2754 VisitForStackValue(args->at(i)); 2435 VisitForStackValue(args->at(i));
2755 } 2436 }
2756 2437
2757 // Push a copy of the function (found below the arguments) and 2438 // Push a copy of the function (found below the arguments) and
2758 // resolve eval. 2439 // resolve eval.
2759 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2440 __ Peek(x10, (arg_count + 1) * kPointerSize);
2760 __ push(r1); 2441 __ Push(x10);
2761 EmitResolvePossiblyDirectEval(arg_count); 2442 EmitResolvePossiblyDirectEval(arg_count);
2762 2443
2763 // The runtime call returns a pair of values in r0 (function) and 2444 // The runtime call returns a pair of values in x0 (function) and
2764 // r1 (receiver). Touch up the stack with the right values. 2445 // x1 (receiver). Touch up the stack with the right values.
2765 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2446 // TODO(jbramley): Consider adding PokePair.
2766 __ str(r1, MemOperand(sp, arg_count * kPointerSize)); 2447 __ Stp(x1, x0, MemOperand(jssp, arg_count * kPointerSize));
2767 } 2448 }
2768 2449
2769 // Record source position for debugger. 2450 // Record source position for debugger.
2770 SetSourcePosition(expr->position()); 2451 SetSourcePosition(expr->position());
2452
2453 // Call the evaluated function.
2771 CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT); 2454 CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2772 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2455 __ Peek(x1, (arg_count + 1) * kXRegSizeInBytes);
2773 __ CallStub(&stub); 2456 __ CallStub(&stub);
2774 RecordJSReturnSite(expr); 2457 RecordJSReturnSite(expr);
2775 // Restore context register. 2458 // Restore context register.
2776 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2459 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2777 context()->DropAndPlug(1, r0); 2460 context()->DropAndPlug(1, x0);
2461
2778 } else if (proxy != NULL && proxy->var()->IsUnallocated()) { 2462 } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2779 // Push global object as receiver for the call IC. 2463 // Push global object as receiver for the call IC.
2780 __ ldr(r0, GlobalObjectOperand()); 2464 __ Ldr(x10, GlobalObjectMemOperand());
2781 __ push(r0); 2465 __ Push(x10);
2782 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT); 2466 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2467
2783 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { 2468 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2784 // Call to a lookup slot (dynamically introduced variable). 2469 // Call to a lookup slot (dynamically introduced variable).
2785 Label slow, done; 2470 Label slow, done;
2786 2471
2787 { PreservePositionScope scope(masm()->positions_recorder()); 2472 { PreservePositionScope scope(masm()->positions_recorder());
2788 // Generate code for loading from variables potentially shadowed 2473 // Generate code for loading from variables potentially shadowed
2789 // by eval-introduced variables. 2474 // by eval-introduced variables.
2790 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done); 2475 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2791 } 2476 }
2792 2477
2793 __ bind(&slow); 2478 __ Bind(&slow);
2794 // Call the runtime to find the function to call (returned in r0) 2479 // Call the runtime to find the function to call (returned in x0)
2795 // and the object holding it (returned in edx). 2480 // and the object holding it (returned in x1).
2796 __ push(context_register()); 2481 __ Push(context_register());
2797 __ mov(r2, Operand(proxy->name())); 2482 __ Mov(x10, Operand(proxy->name()));
2798 __ push(r2); 2483 __ Push(x10);
2799 __ CallRuntime(Runtime::kLoadContextSlot, 2); 2484 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2800 __ Push(r0, r1); // Function, receiver. 2485 __ Push(x0, x1); // Receiver, function.
2801 2486
2802 // If fast case code has been generated, emit code to push the 2487 // If fast case code has been generated, emit code to push the
2803 // function and receiver and have the slow path jump around this 2488 // function and receiver and have the slow path jump around this
2804 // code. 2489 // code.
2805 if (done.is_linked()) { 2490 if (done.is_linked()) {
2806 Label call; 2491 Label call;
2807 __ b(&call); 2492 __ B(&call);
2808 __ bind(&done); 2493 __ Bind(&done);
2809 // Push function. 2494 // Push function.
2810 __ push(r0); 2495 __ Push(x0);
2811 // The receiver is implicitly the global receiver. Indicate this 2496 // The receiver is implicitly the global receiver. Indicate this
2812 // by passing the hole to the call function stub. 2497 // by passing the hole to the call function stub.
2813 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex); 2498 __ LoadRoot(x1, Heap::kTheHoleValueRootIndex);
2814 __ push(r1); 2499 __ Push(x1);
2815 __ bind(&call); 2500 __ Bind(&call);
2816 } 2501 }
2817 2502
2818 // The receiver is either the global receiver or an object found 2503 // The receiver is either the global receiver or an object found
2819 // by LoadContextSlot. That object could be the hole if the 2504 // by LoadContextSlot. That object could be the hole if the
2820 // receiver is implicitly the global object. 2505 // receiver is implicitly the global object.
2821 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT); 2506 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2822 } else if (property != NULL) { 2507 } else if (property != NULL) {
2823 { PreservePositionScope scope(masm()->positions_recorder()); 2508 { PreservePositionScope scope(masm()->positions_recorder());
2824 VisitForStackValue(property->obj()); 2509 VisitForStackValue(property->obj());
2825 } 2510 }
2826 if (property->key()->IsPropertyName()) { 2511 if (property->key()->IsPropertyName()) {
2827 EmitCallWithIC(expr, 2512 EmitCallWithIC(expr,
2828 property->key()->AsLiteral()->handle(), 2513 property->key()->AsLiteral()->handle(),
2829 RelocInfo::CODE_TARGET); 2514 RelocInfo::CODE_TARGET);
2830 } else { 2515 } else {
2831 EmitKeyedCallWithIC(expr, property->key()); 2516 EmitKeyedCallWithIC(expr, property->key());
2832 } 2517 }
2518
2833 } else { 2519 } else {
2834 // Call to an arbitrary expression not handled specially above. 2520 // Call to an arbitrary expression not handled specially above.
2835 { PreservePositionScope scope(masm()->positions_recorder()); 2521 { PreservePositionScope scope(masm()->positions_recorder());
2836 VisitForStackValue(callee); 2522 VisitForStackValue(callee);
2837 } 2523 }
2838 // Load global receiver object. 2524 // Load global receiver object.
2839 __ ldr(r1, GlobalObjectOperand()); 2525 __ Ldr(x1, GlobalObjectMemOperand());
2840 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset)); 2526 __ Ldr(x1, FieldMemOperand(x1, GlobalObject::kGlobalReceiverOffset));
2841 __ push(r1); 2527 __ Push(x1);
2842 // Emit function call. 2528 // Emit function call.
2843 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); 2529 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2844 } 2530 }
2845 2531
2846 #ifdef DEBUG 2532 #ifdef DEBUG
2847 // RecordJSReturnSite should have been called. 2533 // RecordJSReturnSite should have been called.
2848 ASSERT(expr->return_is_recorded_); 2534 ASSERT(expr->return_is_recorded_);
2849 #endif 2535 #endif
2850 } 2536 }
2851 2537
(...skipping 13 matching lines...) Expand all
2865 ZoneList<Expression*>* args = expr->arguments(); 2551 ZoneList<Expression*>* args = expr->arguments();
2866 int arg_count = args->length(); 2552 int arg_count = args->length();
2867 for (int i = 0; i < arg_count; i++) { 2553 for (int i = 0; i < arg_count; i++) {
2868 VisitForStackValue(args->at(i)); 2554 VisitForStackValue(args->at(i));
2869 } 2555 }
2870 2556
2871 // Call the construct call builtin that handles allocation and 2557 // Call the construct call builtin that handles allocation and
2872 // constructor invocation. 2558 // constructor invocation.
2873 SetSourcePosition(expr->position()); 2559 SetSourcePosition(expr->position());
2874 2560
2875 // Load function and argument count into r1 and r0. 2561 // Load function and argument count into x1 and x0.
2876 __ mov(r0, Operand(arg_count)); 2562 __ Mov(x0, arg_count);
2877 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); 2563 __ Peek(x1, arg_count * kXRegSizeInBytes);
2878 2564
2879 // Record call targets in unoptimized code. 2565 // Record call targets in unoptimized code.
2880 Handle<Object> uninitialized = 2566 Handle<Object> uninitialized =
2881 TypeFeedbackCells::UninitializedSentinel(isolate()); 2567 TypeFeedbackCells::UninitializedSentinel(isolate());
2882 Handle<JSGlobalPropertyCell> cell = 2568 Handle<JSGlobalPropertyCell> cell =
2883 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized); 2569 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2884 RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell); 2570 RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
2885 __ mov(r2, Operand(cell)); 2571 __ Mov(x2, Operand(cell));
2886 2572
2887 CallConstructStub stub(RECORD_CALL_TARGET); 2573 CallConstructStub stub(RECORD_CALL_TARGET);
2888 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); 2574 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2889 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 2575 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2890 context()->Plug(r0); 2576 context()->Plug(x0);
2891 } 2577 }
2892 2578
2893 2579
2894 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 2580 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2895 ZoneList<Expression*>* args = expr->arguments(); 2581 ZoneList<Expression*>* args = expr->arguments();
2896 ASSERT(args->length() == 1); 2582 ASSERT(args->length() == 1);
2897 2583
2898 VisitForAccumulatorValue(args->at(0)); 2584 VisitForAccumulatorValue(args->at(0));
2899 2585
2900 Label materialize_true, materialize_false; 2586 Label materialize_true, materialize_false;
2901 Label* if_true = NULL; 2587 Label* if_true = NULL;
2902 Label* if_false = NULL; 2588 Label* if_false = NULL;
2903 Label* fall_through = NULL; 2589 Label* fall_through = NULL;
2904 context()->PrepareTest(&materialize_true, &materialize_false, 2590 context()->PrepareTest(&materialize_true, &materialize_false,
2905 &if_true, &if_false, &fall_through); 2591 &if_true, &if_false, &fall_through);
2906 2592
2907 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2593 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2908 __ SmiTst(r0); 2594 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
2909 Split(eq, if_true, if_false, fall_through);
2910 2595
2911 context()->Plug(if_true, if_false); 2596 context()->Plug(if_true, if_false);
2912 } 2597 }
2913 2598
2914 2599
2915 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) { 2600 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2916 ZoneList<Expression*>* args = expr->arguments(); 2601 ZoneList<Expression*>* args = expr->arguments();
2917 ASSERT(args->length() == 1); 2602 ASSERT(args->length() == 1);
2918 2603
2919 VisitForAccumulatorValue(args->at(0)); 2604 VisitForAccumulatorValue(args->at(0));
2920 2605
2921 Label materialize_true, materialize_false; 2606 Label materialize_true, materialize_false;
2922 Label* if_true = NULL; 2607 Label* if_true = NULL;
2923 Label* if_false = NULL; 2608 Label* if_false = NULL;
2924 Label* fall_through = NULL; 2609 Label* fall_through = NULL;
2925 context()->PrepareTest(&materialize_true, &materialize_false, 2610 context()->PrepareTest(&materialize_true, &materialize_false,
2926 &if_true, &if_false, &fall_through); 2611 &if_true, &if_false, &fall_through);
2927 2612
2928 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2613 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2929 __ NonNegativeSmiTst(r0); 2614 __ TestAndSplit(x0, kSmiTagMask | (0x80000000UL << kSmiShift), if_true,
2930 Split(eq, if_true, if_false, fall_through); 2615 if_false, fall_through);
2931 2616
2932 context()->Plug(if_true, if_false); 2617 context()->Plug(if_true, if_false);
2933 } 2618 }
2934 2619
2935 2620
2936 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) { 2621 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2937 ZoneList<Expression*>* args = expr->arguments(); 2622 ZoneList<Expression*>* args = expr->arguments();
2938 ASSERT(args->length() == 1); 2623 ASSERT(args->length() == 1);
2939 2624
2940 VisitForAccumulatorValue(args->at(0)); 2625 VisitForAccumulatorValue(args->at(0));
2941 2626
2942 Label materialize_true, materialize_false; 2627 Label materialize_true, materialize_false;
2943 Label* if_true = NULL; 2628 Label* if_true = NULL;
2944 Label* if_false = NULL; 2629 Label* if_false = NULL;
2945 Label* fall_through = NULL; 2630 Label* fall_through = NULL;
2946 context()->PrepareTest(&materialize_true, &materialize_false, 2631 context()->PrepareTest(&materialize_true, &materialize_false,
2947 &if_true, &if_false, &fall_through); 2632 &if_true, &if_false, &fall_through);
2948 2633
2949 __ JumpIfSmi(r0, if_false); 2634 __ JumpIfSmi(x0, if_false);
2950 __ LoadRoot(ip, Heap::kNullValueRootIndex); 2635 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
2951 __ cmp(r0, ip); 2636 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
2952 __ b(eq, if_true);
2953 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
2954 // Undetectable objects behave like undefined when tested with typeof. 2637 // Undetectable objects behave like undefined when tested with typeof.
2955 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset)); 2638 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
2956 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 2639 __ Tbnz(x11, Map::kIsUndetectable, if_false);
2957 __ b(ne, if_false); 2640 __ Ldrb(x12, FieldMemOperand(x10, Map::kInstanceTypeOffset));
2958 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset)); 2641 __ Cmp(x12, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
2959 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); 2642 __ B(lt, if_false);
2960 __ b(lt, if_false); 2643 __ Cmp(x12, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
2961 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2962 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2644 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2963 Split(le, if_true, if_false, fall_through); 2645 Split(le, if_true, if_false, fall_through);
2964 2646
2965 context()->Plug(if_true, if_false); 2647 context()->Plug(if_true, if_false);
2966 } 2648 }
2967 2649
2968 2650
2969 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) { 2651 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2970 ZoneList<Expression*>* args = expr->arguments(); 2652 ZoneList<Expression*>* args = expr->arguments();
2971 ASSERT(args->length() == 1); 2653 ASSERT(args->length() == 1);
2972 2654
2973 VisitForAccumulatorValue(args->at(0)); 2655 VisitForAccumulatorValue(args->at(0));
2974 2656
2975 Label materialize_true, materialize_false; 2657 Label materialize_true, materialize_false;
2976 Label* if_true = NULL; 2658 Label* if_true = NULL;
2977 Label* if_false = NULL; 2659 Label* if_false = NULL;
2978 Label* fall_through = NULL; 2660 Label* fall_through = NULL;
2979 context()->PrepareTest(&materialize_true, &materialize_false, 2661 context()->PrepareTest(&materialize_true, &materialize_false,
2980 &if_true, &if_false, &fall_through); 2662 &if_true, &if_false, &fall_through);
2981 2663
2982 __ JumpIfSmi(r0, if_false); 2664 __ JumpIfSmi(x0, if_false);
2983 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 2665 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
2984 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2666 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2985 Split(ge, if_true, if_false, fall_through); 2667 Split(ge, if_true, if_false, fall_through);
2986 2668
2987 context()->Plug(if_true, if_false); 2669 context()->Plug(if_true, if_false);
2988 } 2670 }
2989 2671
2990 2672
2991 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) { 2673 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2674 ASM_LOCATION("FullCodeGenerator::EmitIsUndetectableObject");
2992 ZoneList<Expression*>* args = expr->arguments(); 2675 ZoneList<Expression*>* args = expr->arguments();
2993 ASSERT(args->length() == 1); 2676 ASSERT(args->length() == 1);
2994 2677
2995 VisitForAccumulatorValue(args->at(0)); 2678 VisitForAccumulatorValue(args->at(0));
2996 2679
2997 Label materialize_true, materialize_false; 2680 Label materialize_true, materialize_false;
2998 Label* if_true = NULL; 2681 Label* if_true = NULL;
2999 Label* if_false = NULL; 2682 Label* if_false = NULL;
3000 Label* fall_through = NULL; 2683 Label* fall_through = NULL;
3001 context()->PrepareTest(&materialize_true, &materialize_false, 2684 context()->PrepareTest(&materialize_true, &materialize_false,
3002 &if_true, &if_false, &fall_through); 2685 &if_true, &if_false, &fall_through);
3003 2686
3004 __ JumpIfSmi(r0, if_false); 2687 __ JumpIfSmi(x0, if_false);
3005 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 2688 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
3006 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset)); 2689 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
3007 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 2690 __ Tst(x11, 1 << Map::kIsUndetectable);
3008 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2691 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3009 Split(ne, if_true, if_false, fall_through); 2692 Split(ne, if_true, if_false, fall_through);
3010 2693
3011 context()->Plug(if_true, if_false); 2694 context()->Plug(if_true, if_false);
3012 } 2695 }
3013 2696
3014 2697
3015 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( 2698 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3016 CallRuntime* expr) { 2699 CallRuntime* expr) {
3017 ZoneList<Expression*>* args = expr->arguments(); 2700 ZoneList<Expression*>* args = expr->arguments();
3018 ASSERT(args->length() == 1); 2701 ASSERT(args->length() == 1);
3019
3020 VisitForAccumulatorValue(args->at(0)); 2702 VisitForAccumulatorValue(args->at(0));
3021 2703
3022 Label materialize_true, materialize_false; 2704 Label materialize_true, materialize_false;
3023 Label* if_true = NULL; 2705 Label* if_true = NULL;
3024 Label* if_false = NULL; 2706 Label* if_false = NULL;
3025 Label* fall_through = NULL; 2707 Label* fall_through = NULL;
3026 context()->PrepareTest(&materialize_true, &materialize_false, 2708 context()->PrepareTest(&materialize_true, &materialize_false,
3027 &if_true, &if_false, &fall_through); 2709 &if_true, &if_false, &fall_through);
3028 2710
3029 __ AssertNotSmi(r0); 2711 Register object = x0;
2712 __ AssertNotSmi(object);
3030 2713
3031 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 2714 Register map = x10;
3032 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset)); 2715 Register bitfield2 = x11;
3033 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); 2716 __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
3034 __ b(ne, if_true); 2717 __ Ldrb(bitfield2, FieldMemOperand(map, Map::kBitField2Offset));
2718 __ Tbnz(bitfield2, Map::kStringWrapperSafeForDefaultValueOf, if_true);
3035 2719
3036 // Check for fast case object. Generate false result for slow case object. 2720 // Check for fast case object. Generate false result for slow case object.
3037 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset)); 2721 Register props = x12;
3038 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); 2722 Register props_map = x12;
3039 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); 2723 Register hash_table_map = x13;
3040 __ cmp(r2, ip); 2724 __ Ldr(props, FieldMemOperand(object, JSObject::kPropertiesOffset));
3041 __ b(eq, if_false); 2725 __ Ldr(props_map, FieldMemOperand(props, HeapObject::kMapOffset));
2726 __ LoadRoot(hash_table_map, Heap::kHashTableMapRootIndex);
2727 __ Cmp(props_map, hash_table_map);
2728 __ B(eq, if_false);
3042 2729
3043 // Look for valueOf name in the descriptor array, and indicate false if 2730 // Look for valueOf name in the descriptor array, and indicate false if found.
3044 // found. Since we omit an enumeration index check, if it is added via a 2731 // Since we omit an enumeration index check, if it is added via a transition
3045 // transition that shares its descriptor array, this is a false positive. 2732 // that shares its descriptor array, this is a false positive.
3046 Label entry, loop, done; 2733 Label loop, done;
3047 2734
3048 // Skip loop if no descriptors are valid. 2735 // Skip loop if no descriptors are valid.
3049 __ NumberOfOwnDescriptors(r3, r1); 2736 Register descriptors = x12;
3050 __ cmp(r3, Operand::Zero()); 2737 Register descriptors_length = x13;
3051 __ b(eq, &done); 2738 __ NumberOfOwnDescriptors(descriptors_length, map);
2739 __ Cbz(descriptors_length, &done);
3052 2740
3053 __ LoadInstanceDescriptors(r1, r4); 2741 __ LoadInstanceDescriptors(map, descriptors);
3054 // r4: descriptor array. 2742
3055 // r3: valid entries in the descriptor array. 2743 // Calculate the end of the descriptor array.
3056 __ mov(ip, Operand(DescriptorArray::kDescriptorSize)); 2744 Register descriptors_end = x14;
3057 __ mul(r3, r3, ip); 2745 __ Mov(x15, DescriptorArray::kDescriptorSize);
2746 __ Mul(descriptors_length, descriptors_length, x15);
3058 // Calculate location of the first key name. 2747 // Calculate location of the first key name.
3059 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag)); 2748 __ Add(descriptors, descriptors,
2749 DescriptorArray::kFirstOffset - kHeapObjectTag);
3060 // Calculate the end of the descriptor array. 2750 // Calculate the end of the descriptor array.
3061 __ mov(r2, r4); 2751 __ Add(descriptors_end, descriptors,
3062 __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3)); 2752 Operand(descriptors_length, LSL, kPointerSizeLog2));
3063 2753
3064 // Loop through all the keys in the descriptor array. If one of these is the 2754 // Loop through all the keys in the descriptor array. If one of these is the
3065 // string "valueOf" the result is false. 2755 // string "valueOf" the result is false.
3066 // The use of ip to store the valueOf string assumes that it is not otherwise 2756 // TODO(all): optimise this loop to combine the add and ldr into an
3067 // used in the loop below. 2757 // addressing mode.
3068 __ mov(ip, Operand(isolate()->factory()->value_of_string())); 2758 Register valueof_string = x1;
3069 __ jmp(&entry); 2759 __ Mov(valueof_string, Operand(isolate()->factory()->value_of_string()));
3070 __ bind(&loop); 2760 __ Bind(&loop);
3071 __ ldr(r3, MemOperand(r4, 0)); 2761 __ Ldr(x15, MemOperand(descriptors));
3072 __ cmp(r3, ip); 2762 __ Cmp(x15, valueof_string);
3073 __ b(eq, if_false); 2763 __ B(eq, if_false);
3074 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize)); 2764 __ Add(descriptors, descriptors,
3075 __ bind(&entry); 2765 DescriptorArray::kDescriptorSize * kPointerSize);
3076 __ cmp(r4, Operand(r2)); 2766 __ Cmp(descriptors, descriptors_end);
3077 __ b(ne, &loop); 2767 __ B(ne, &loop);
3078 2768
3079 __ bind(&done); 2769 __ Bind(&done);
3080 // If a valueOf property is not found on the object check that its 2770 // If a valueOf property is not found on the object check that its prototype
3081 // prototype is the un-modified String prototype. If not result is false. 2771 // is the unmodified String prototype. If not result is false.
3082 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset)); 2772 Register prototype = x1;
3083 __ JumpIfSmi(r2, if_false); 2773 Register global_idx = x2;
3084 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); 2774 Register native_context = x2;
3085 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 2775 Register string_proto = x3;
3086 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset)); 2776 Register proto_map = x4;
3087 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); 2777 __ Ldr(prototype, FieldMemOperand(map, Map::kPrototypeOffset));
3088 __ cmp(r2, r3); 2778 __ JumpIfSmi(prototype, if_false);
3089 __ b(ne, if_false); 2779 __ Ldr(proto_map, FieldMemOperand(prototype, HeapObject::kMapOffset));
2780 __ Ldr(global_idx, GlobalObjectMemOperand());
2781 __ Ldr(native_context,
2782 FieldMemOperand(global_idx, GlobalObject::kNativeContextOffset));
2783 __ Ldr(string_proto,
2784 ContextMemOperand(native_context,
2785 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2786 __ Cmp(proto_map, string_proto);
2787 __ B(ne, if_false);
3090 2788
3091 // Set the bit in the map to indicate that it has been checked safe for 2789 // Set the bit in the map to indicate that it has been checked safe for
3092 // default valueOf and set true result. 2790 // default valueOf and set true result.
3093 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset)); 2791 __ Orr(bitfield2, bitfield2, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3094 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); 2792 __ Strb(bitfield2, FieldMemOperand(map, Map::kBitField2Offset));
3095 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset)); 2793 __ B(if_true);
3096 __ jmp(if_true);
3097 2794
3098 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2795 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3099 context()->Plug(if_true, if_false); 2796 context()->Plug(if_true, if_false);
3100 } 2797 }
3101 2798
3102 2799
3103 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { 2800 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3104 ZoneList<Expression*>* args = expr->arguments(); 2801 ZoneList<Expression*>* args = expr->arguments();
3105 ASSERT(args->length() == 1); 2802 ASSERT(args->length() == 1);
3106 2803
3107 VisitForAccumulatorValue(args->at(0)); 2804 VisitForAccumulatorValue(args->at(0));
3108 2805
3109 Label materialize_true, materialize_false; 2806 Label materialize_true, materialize_false;
3110 Label* if_true = NULL; 2807 Label* if_true = NULL;
3111 Label* if_false = NULL; 2808 Label* if_false = NULL;
3112 Label* fall_through = NULL; 2809 Label* fall_through = NULL;
3113 context()->PrepareTest(&materialize_true, &materialize_false, 2810 context()->PrepareTest(&materialize_true, &materialize_false,
3114 &if_true, &if_false, &fall_through); 2811 &if_true, &if_false, &fall_through);
3115 2812
3116 __ JumpIfSmi(r0, if_false); 2813 __ JumpIfSmi(x0, if_false);
3117 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE); 2814 __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE);
3118 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2815 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3119 Split(eq, if_true, if_false, fall_through); 2816 Split(eq, if_true, if_false, fall_through);
3120 2817
3121 context()->Plug(if_true, if_false); 2818 context()->Plug(if_true, if_false);
3122 } 2819 }
3123 2820
3124 2821
3125 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { 2822 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3126 ZoneList<Expression*>* args = expr->arguments(); 2823 ZoneList<Expression*>* args = expr->arguments();
3127 ASSERT(args->length() == 1); 2824 ASSERT(args->length() == 1);
3128 2825
3129 VisitForAccumulatorValue(args->at(0)); 2826 VisitForAccumulatorValue(args->at(0));
3130 2827
3131 Label materialize_true, materialize_false; 2828 Label materialize_true, materialize_false;
3132 Label* if_true = NULL; 2829 Label* if_true = NULL;
3133 Label* if_false = NULL; 2830 Label* if_false = NULL;
3134 Label* fall_through = NULL; 2831 Label* fall_through = NULL;
3135 context()->PrepareTest(&materialize_true, &materialize_false, 2832 context()->PrepareTest(&materialize_true, &materialize_false,
3136 &if_true, &if_false, &fall_through); 2833 &if_true, &if_false, &fall_through);
3137 2834
3138 __ JumpIfSmi(r0, if_false); 2835 __ JumpIfSmi(x0, if_false);
3139 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE); 2836 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
3140 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2837 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3141 Split(eq, if_true, if_false, fall_through); 2838 Split(eq, if_true, if_false, fall_through);
3142 2839
3143 context()->Plug(if_true, if_false); 2840 context()->Plug(if_true, if_false);
3144 } 2841 }
3145 2842
3146 2843
3147 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { 2844 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3148 ZoneList<Expression*>* args = expr->arguments(); 2845 ZoneList<Expression*>* args = expr->arguments();
3149 ASSERT(args->length() == 1); 2846 ASSERT(args->length() == 1);
3150 2847
3151 VisitForAccumulatorValue(args->at(0)); 2848 VisitForAccumulatorValue(args->at(0));
3152 2849
3153 Label materialize_true, materialize_false; 2850 Label materialize_true, materialize_false;
3154 Label* if_true = NULL; 2851 Label* if_true = NULL;
3155 Label* if_false = NULL; 2852 Label* if_false = NULL;
3156 Label* fall_through = NULL; 2853 Label* fall_through = NULL;
3157 context()->PrepareTest(&materialize_true, &materialize_false, 2854 context()->PrepareTest(&materialize_true, &materialize_false,
3158 &if_true, &if_false, &fall_through); 2855 &if_true, &if_false, &fall_through);
3159 2856
3160 __ JumpIfSmi(r0, if_false); 2857 __ JumpIfSmi(x0, if_false);
3161 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE); 2858 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
3162 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2859 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3163 Split(eq, if_true, if_false, fall_through); 2860 Split(eq, if_true, if_false, fall_through);
3164 2861
3165 context()->Plug(if_true, if_false); 2862 context()->Plug(if_true, if_false);
3166 } 2863 }
3167 2864
3168 2865
3169 2866
3170 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) { 2867 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3171 ASSERT(expr->arguments()->length() == 0); 2868 ASSERT(expr->arguments()->length() == 0);
3172 2869
3173 Label materialize_true, materialize_false; 2870 Label materialize_true, materialize_false;
3174 Label* if_true = NULL; 2871 Label* if_true = NULL;
3175 Label* if_false = NULL; 2872 Label* if_false = NULL;
3176 Label* fall_through = NULL; 2873 Label* fall_through = NULL;
3177 context()->PrepareTest(&materialize_true, &materialize_false, 2874 context()->PrepareTest(&materialize_true, &materialize_false,
3178 &if_true, &if_false, &fall_through); 2875 &if_true, &if_false, &fall_through);
3179 2876
3180 // Get the frame pointer for the calling frame. 2877 // Get the frame pointer for the calling frame.
3181 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 2878 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3182 2879
3183 // Skip the arguments adaptor frame if it exists. 2880 // Skip the arguments adaptor frame if it exists.
3184 Label check_frame_marker; 2881 Label check_frame_marker;
3185 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset)); 2882 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
3186 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 2883 __ Cmp(x1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3187 __ b(ne, &check_frame_marker); 2884 __ B(ne, &check_frame_marker);
3188 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset)); 2885 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
3189 2886
3190 // Check the marker in the calling frame. 2887 // Check the marker in the calling frame.
3191 __ bind(&check_frame_marker); 2888 __ Bind(&check_frame_marker);
3192 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset)); 2889 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
3193 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); 2890 __ Cmp(x1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
3194 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2891 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3195 Split(eq, if_true, if_false, fall_through); 2892 Split(eq, if_true, if_false, fall_through);
3196 2893
3197 context()->Plug(if_true, if_false); 2894 context()->Plug(if_true, if_false);
3198 } 2895 }
3199 2896
3200 2897
3201 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { 2898 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3202 ZoneList<Expression*>* args = expr->arguments(); 2899 ZoneList<Expression*>* args = expr->arguments();
3203 ASSERT(args->length() == 2); 2900 ASSERT(args->length() == 2);
3204 2901
3205 // Load the two objects into registers and perform the comparison. 2902 // Load the two objects into registers and perform the comparison.
3206 VisitForStackValue(args->at(0)); 2903 VisitForStackValue(args->at(0));
3207 VisitForAccumulatorValue(args->at(1)); 2904 VisitForAccumulatorValue(args->at(1));
3208 2905
3209 Label materialize_true, materialize_false; 2906 Label materialize_true, materialize_false;
3210 Label* if_true = NULL; 2907 Label* if_true = NULL;
3211 Label* if_false = NULL; 2908 Label* if_false = NULL;
3212 Label* fall_through = NULL; 2909 Label* fall_through = NULL;
3213 context()->PrepareTest(&materialize_true, &materialize_false, 2910 context()->PrepareTest(&materialize_true, &materialize_false,
3214 &if_true, &if_false, &fall_through); 2911 &if_true, &if_false, &fall_through);
3215 2912
3216 __ pop(r1); 2913 __ Pop(x1);
3217 __ cmp(r0, r1); 2914 __ Cmp(x0, x1);
3218 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2915 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3219 Split(eq, if_true, if_false, fall_through); 2916 Split(eq, if_true, if_false, fall_through);
3220 2917
3221 context()->Plug(if_true, if_false); 2918 context()->Plug(if_true, if_false);
3222 } 2919 }
3223 2920
3224 2921
3225 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { 2922 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3226 ZoneList<Expression*>* args = expr->arguments(); 2923 ZoneList<Expression*>* args = expr->arguments();
3227 ASSERT(args->length() == 1); 2924 ASSERT(args->length() == 1);
3228 2925
3229 // ArgumentsAccessStub expects the key in edx and the formal 2926 // ArgumentsAccessStub expects the key in x1.
3230 // parameter count in r0.
3231 VisitForAccumulatorValue(args->at(0)); 2927 VisitForAccumulatorValue(args->at(0));
3232 __ mov(r1, r0); 2928 __ Mov(x1, x0);
3233 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 2929 __ Mov(x0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3234 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); 2930 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3235 __ CallStub(&stub); 2931 __ CallStub(&stub);
3236 context()->Plug(r0); 2932 context()->Plug(x0);
3237 } 2933 }
3238 2934
3239 2935
3240 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { 2936 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3241 ASSERT(expr->arguments()->length() == 0); 2937 ASSERT(expr->arguments()->length() == 0);
3242 Label exit; 2938 Label exit;
3243 // Get the number of formal parameters. 2939 // Get the number of formal parameters.
3244 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 2940 __ Mov(x0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3245 2941
3246 // Check if the calling frame is an arguments adaptor frame. 2942 // Check if the calling frame is an arguments adaptor frame.
3247 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 2943 __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3248 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); 2944 __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset));
3249 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 2945 __ Cmp(x13, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3250 __ b(ne, &exit); 2946 __ B(ne, &exit);
3251 2947
3252 // Arguments adaptor case: Read the arguments length from the 2948 // Arguments adaptor case: Read the arguments length from the
3253 // adaptor frame. 2949 // adaptor frame.
3254 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2950 __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset));
3255 2951
3256 __ bind(&exit); 2952 __ Bind(&exit);
3257 context()->Plug(r0); 2953 context()->Plug(x0);
3258 } 2954 }
3259 2955
3260 2956
3261 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { 2957 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2958 ASM_LOCATION("FullCodeGenerator::EmitClassOf");
3262 ZoneList<Expression*>* args = expr->arguments(); 2959 ZoneList<Expression*>* args = expr->arguments();
3263 ASSERT(args->length() == 1); 2960 ASSERT(args->length() == 1);
3264 Label done, null, function, non_function_constructor; 2961 Label done, null, function, non_function_constructor;
3265 2962
3266 VisitForAccumulatorValue(args->at(0)); 2963 VisitForAccumulatorValue(args->at(0));
3267 2964
3268 // If the object is a smi, we return null. 2965 // If the object is a smi, we return null.
3269 __ JumpIfSmi(r0, &null); 2966 __ JumpIfSmi(x0, &null);
3270 2967
3271 // Check that the object is a JS object but take special care of JS 2968 // Check that the object is a JS object but take special care of JS
3272 // functions to make sure they have 'Function' as their class. 2969 // functions to make sure they have 'Function' as their class.
3273 // Assume that there are only two callable types, and one of them is at 2970 // Assume that there are only two callable types, and one of them is at
3274 // either end of the type range for JS object types. Saves extra comparisons. 2971 // either end of the type range for JS object types. Saves extra comparisons.
3275 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 2972 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3276 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE); 2973 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3277 // Map is now in r0. 2974 // x10: object's map.
3278 __ b(lt, &null); 2975 // x11: object's type.
2976 __ B(lt, &null);
3279 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == 2977 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3280 FIRST_SPEC_OBJECT_TYPE + 1); 2978 FIRST_SPEC_OBJECT_TYPE + 1);
3281 __ b(eq, &function); 2979 __ B(eq, &function);
3282 2980
3283 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE)); 2981 __ Cmp(x11, LAST_SPEC_OBJECT_TYPE);
3284 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == 2982 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3285 LAST_SPEC_OBJECT_TYPE - 1); 2983 LAST_SPEC_OBJECT_TYPE - 1);
3286 __ b(eq, &function); 2984 __ B(eq, &function);
3287 // Assume that there is no larger type. 2985 // Assume that there is no larger type.
3288 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); 2986 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3289 2987
3290 // Check if the constructor in the map is a JS function. 2988 // Check if the constructor in the map is a JS function.
3291 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset)); 2989 __ Ldr(x12, FieldMemOperand(x10, Map::kConstructorOffset));
3292 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); 2990 __ JumpIfNotObjectType(x12, x13, x14, JS_FUNCTION_TYPE,
3293 __ b(ne, &non_function_constructor); 2991 &non_function_constructor);
3294 2992
3295 // r0 now contains the constructor function. Grab the 2993 // x12 now contains the constructor function. Grab the
3296 // instance class name from there. 2994 // instance class name from there.
3297 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); 2995 __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
3298 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset)); 2996 __ Ldr(x0,
3299 __ b(&done); 2997 FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
2998 __ B(&done);
3300 2999
3301 // Functions have class 'Function'. 3000 // Functions have class 'Function'.
3302 __ bind(&function); 3001 __ Bind(&function);
3303 __ LoadRoot(r0, Heap::kfunction_class_stringRootIndex); 3002 __ LoadRoot(x0, Heap::kfunction_class_stringRootIndex);
3304 __ jmp(&done); 3003 __ B(&done);
3305 3004
3306 // Objects with a non-function constructor have class 'Object'. 3005 // Objects with a non-function constructor have class 'Object'.
3307 __ bind(&non_function_constructor); 3006 __ Bind(&non_function_constructor);
3308 __ LoadRoot(r0, Heap::kObject_stringRootIndex); 3007 __ LoadRoot(x0, Heap::kObject_stringRootIndex);
3309 __ jmp(&done); 3008 __ B(&done);
3310 3009
3311 // Non-JS objects have class null. 3010 // Non-JS objects have class null.
3312 __ bind(&null); 3011 __ Bind(&null);
3313 __ LoadRoot(r0, Heap::kNullValueRootIndex); 3012 __ LoadRoot(x0, Heap::kNullValueRootIndex);
3314 3013
3315 // All done. 3014 // All done.
3316 __ bind(&done); 3015 __ Bind(&done);
3317 3016
3318 context()->Plug(r0); 3017 context()->Plug(x0);
3319 } 3018 }
3320 3019
3321 3020
3322 void FullCodeGenerator::EmitLog(CallRuntime* expr) { 3021 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3323 // Conditionally generate a log call. 3022 // Conditionally generate a log call.
3324 // Args: 3023 // Args:
3325 // 0 (literal string): The type of logging (corresponds to the flags). 3024 // 0 (literal string): The type of logging (corresponds to the flags).
3326 // This is used to determine whether or not to generate the log call. 3025 // This is used to determine whether or not to generate the log call.
3327 // 1 (string): Format string. Access the string at argument index 2 3026 // 1 (string): Format string. Access the string at argument index 2
3328 // with '%2s' (see Logger::LogRuntime for all the formats). 3027 // with '%2s' (see Logger::LogRuntime for all the formats).
3329 // 2 (array): Arguments to the format string. 3028 // 2 (array): Arguments to the format string.
3330 ZoneList<Expression*>* args = expr->arguments(); 3029 ZoneList<Expression*>* args = expr->arguments();
3331 ASSERT_EQ(args->length(), 3); 3030 ASSERT_EQ(args->length(), 3);
3332 if (CodeGenerator::ShouldGenerateLog(args->at(0))) { 3031 if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
3333 VisitForStackValue(args->at(1)); 3032 VisitForStackValue(args->at(1));
3334 VisitForStackValue(args->at(2)); 3033 VisitForStackValue(args->at(2));
3335 __ CallRuntime(Runtime::kLog, 2); 3034 __ CallRuntime(Runtime::kLog, 2);
3336 } 3035 }
3337 3036
3338 // Finally, we're expected to leave a value on the top of the stack. 3037 // Finally, we're expected to leave a value on the top of the stack.
3339 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 3038 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3340 context()->Plug(r0); 3039 context()->Plug(x0);
3341 } 3040 }
3342 3041
3343 3042
3344 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) { 3043 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
3345 ASSERT(expr->arguments()->length() == 0); 3044 ASSERT(expr->arguments()->length() == 0);
3346 Label slow_allocate_heapnumber; 3045 Label slow_allocate_heapnumber;
3347 Label heapnumber_allocated; 3046 Label heapnumber_allocated;
3047 Register heap_num = x19; // Callee-saved register.
3348 3048
3349 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); 3049 __ LoadRoot(x5, Heap::kHeapNumberMapRootIndex);
3350 __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber); 3050 __ AllocateHeapNumber(heap_num, &slow_allocate_heapnumber, x10, x11, x5);
3351 __ jmp(&heapnumber_allocated); 3051 __ B(&heapnumber_allocated);
3352 3052
3353 __ bind(&slow_allocate_heapnumber); 3053 __ Bind(&slow_allocate_heapnumber);
3354 // Allocate a heap number. 3054 // Call the runtime to allocate the heap number.
3355 __ CallRuntime(Runtime::kNumberAlloc, 0); 3055 __ CallRuntime(Runtime::kNumberAlloc, 0);
3356 __ mov(r4, Operand(r0)); 3056 __ Mov(heap_num, x0);
3357 3057
3358 __ bind(&heapnumber_allocated); 3058 __ Bind(&heapnumber_allocated);
3359 3059
3360 // Convert 32 random bits in r0 to 0.(32 random bits) in a double 3060 // Get 32 random bits.
3361 // by computing: 3061 __ Ldr(x0,
3362 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). 3062 ContextMemOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
3363 __ PrepareCallCFunction(1, r0); 3063 __ Ldr(x0, FieldMemOperand(x0, GlobalObject::kNativeContextOffset));
3364 __ ldr(r0,
3365 ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
3366 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kNativeContextOffset));
3367 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1); 3064 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3368 3065
3369 // 0x41300000 is the top half of 1.0 x 2^20 as a double. 3066 // Interpret the 32 random bits as a 0.32 fixed point number, and convert to
3370 // Create this constant using mov/orr to avoid PC relative load. 3067 // a double in the range 0.0 <= number < 1.0.
3371 __ mov(r1, Operand(0x41000000)); 3068 __ Ucvtf(d0, x0, 32);
3372 __ orr(r1, r1, Operand(0x300000)); 3069 __ Str(d0, FieldMemOperand(heap_num, HeapNumber::kValueOffset));
3373 // Move 0x41300000xxxxxxxx (x = random bits) to VFP. 3070 __ Mov(x0, heap_num);
3374 __ vmov(d7, r0, r1);
3375 // Move 0x4130000000000000 to VFP.
3376 __ mov(r0, Operand::Zero());
3377 __ vmov(d8, r0, r1);
3378 // Subtract and store the result in the heap number.
3379 __ vsub(d7, d7, d8);
3380 __ sub(r0, r4, Operand(kHeapObjectTag));
3381 __ vstr(d7, r0, HeapNumber::kValueOffset);
3382 __ mov(r0, r4);
3383 3071
3384 context()->Plug(r0); 3072 context()->Plug(x0);
3385 } 3073 }
3386 3074
3387 3075
3388 void FullCodeGenerator::EmitSubString(CallRuntime* expr) { 3076 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3389 // Load the arguments on the stack and call the stub. 3077 // Load the arguments on the stack and call the stub.
3390 SubStringStub stub; 3078 SubStringStub stub;
3391 ZoneList<Expression*>* args = expr->arguments(); 3079 ZoneList<Expression*>* args = expr->arguments();
3392 ASSERT(args->length() == 3); 3080 ASSERT(args->length() == 3);
3393 VisitForStackValue(args->at(0)); 3081 VisitForStackValue(args->at(0));
3394 VisitForStackValue(args->at(1)); 3082 VisitForStackValue(args->at(1));
3395 VisitForStackValue(args->at(2)); 3083 VisitForStackValue(args->at(2));
3396 __ CallStub(&stub); 3084 __ CallStub(&stub);
3397 context()->Plug(r0); 3085 context()->Plug(x0);
3398 } 3086 }
3399 3087
3400 3088
3401 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { 3089 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3402 // Load the arguments on the stack and call the stub. 3090 // Load the arguments on the stack and call the stub.
3403 RegExpExecStub stub; 3091 RegExpExecStub stub;
3404 ZoneList<Expression*>* args = expr->arguments(); 3092 ZoneList<Expression*>* args = expr->arguments();
3405 ASSERT(args->length() == 4); 3093 ASSERT(args->length() == 4);
3406 VisitForStackValue(args->at(0)); 3094 VisitForStackValue(args->at(0));
3407 VisitForStackValue(args->at(1)); 3095 VisitForStackValue(args->at(1));
3408 VisitForStackValue(args->at(2)); 3096 VisitForStackValue(args->at(2));
3409 VisitForStackValue(args->at(3)); 3097 VisitForStackValue(args->at(3));
3410 __ CallStub(&stub); 3098 __ CallStub(&stub);
3411 context()->Plug(r0); 3099 context()->Plug(x0);
3412 } 3100 }
3413 3101
3414 3102
3415 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { 3103 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3104 ASM_LOCATION("FullCodeGenerator::EmitValueOf");
3416 ZoneList<Expression*>* args = expr->arguments(); 3105 ZoneList<Expression*>* args = expr->arguments();
3417 ASSERT(args->length() == 1); 3106 ASSERT(args->length() == 1);
3418 VisitForAccumulatorValue(args->at(0)); // Load the object. 3107 VisitForAccumulatorValue(args->at(0)); // Load the object.
3419 3108
3420 Label done; 3109 Label done;
3421 // If the object is a smi return the object. 3110 // If the object is a smi return the object.
3422 __ JumpIfSmi(r0, &done); 3111 __ JumpIfSmi(x0, &done);
3423 // If the object is not a value type, return the object. 3112 // If the object is not a value type, return the object.
3424 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE); 3113 __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
3425 __ b(ne, &done); 3114 __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
3426 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset));
3427 3115
3428 __ bind(&done); 3116 __ Bind(&done);
3429 context()->Plug(r0); 3117 context()->Plug(x0);
3430 } 3118 }
3431 3119
3432 3120
3433 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { 3121 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3434 ZoneList<Expression*>* args = expr->arguments(); 3122 ZoneList<Expression*>* args = expr->arguments();
3435 ASSERT(args->length() == 2); 3123 ASSERT(args->length() == 2);
3436 ASSERT_NE(NULL, args->at(1)->AsLiteral()); 3124 ASSERT_NE(NULL, args->at(1)->AsLiteral());
3437 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle())); 3125 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3438 3126
3439 VisitForAccumulatorValue(args->at(0)); // Load the object. 3127 VisitForAccumulatorValue(args->at(0)); // Load the object.
3440 3128
3441 Label runtime, done, not_date_object; 3129 Label runtime, done, not_date_object;
3442 Register object = r0; 3130 Register object = x0;
3443 Register result = r0; 3131 Register result = x0;
3444 Register scratch0 = r9; 3132 Register stamp_addr = x10;
3445 Register scratch1 = r1; 3133 Register stamp_cache = x11;
3446 3134
3447 __ JumpIfSmi(object, &not_date_object); 3135 __ JumpIfSmi(object, &not_date_object);
3448 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE); 3136 __ JumpIfNotObjectType(object, x10, x10, JS_DATE_TYPE, &not_date_object);
3449 __ b(ne, &not_date_object);
3450 3137
3451 if (index->value() == 0) { 3138 if (index->value() == 0) {
3452 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset)); 3139 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3453 __ jmp(&done); 3140 __ B(&done);
3454 } else { 3141 } else {
3455 if (index->value() < JSDate::kFirstUncachedField) { 3142 if (index->value() < JSDate::kFirstUncachedField) {
3456 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); 3143 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3457 __ mov(scratch1, Operand(stamp)); 3144 __ Mov(x10, Operand(stamp));
3458 __ ldr(scratch1, MemOperand(scratch1)); 3145 __ Ldr(stamp_addr, MemOperand(x10));
3459 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset)); 3146 __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset));
3460 __ cmp(scratch1, scratch0); 3147 __ Cmp(stamp_addr, stamp_cache);
3461 __ b(ne, &runtime); 3148 __ B(ne, &runtime);
3462 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset + 3149 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3463 kPointerSize * index->value())); 3150 kPointerSize * index->value()));
3464 __ jmp(&done); 3151 __ B(&done);
3465 } 3152 }
3466 __ bind(&runtime); 3153
3467 __ PrepareCallCFunction(2, scratch1); 3154 __ Bind(&runtime);
3468 __ mov(r1, Operand(index)); 3155 __ Mov(x1, Operand(index));
3469 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); 3156 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3470 __ jmp(&done); 3157 __ B(&done);
3471 } 3158 }
3472 3159
3473 __ bind(&not_date_object); 3160 __ Bind(&not_date_object);
3474 __ CallRuntime(Runtime::kThrowNotDateError, 0); 3161 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3475 __ bind(&done); 3162 __ Bind(&done);
3476 context()->Plug(r0); 3163 context()->Plug(x0);
3477 } 3164 }
3478 3165
3479 3166
3480 void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string, 3167 void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
3481 Register index, 3168 Register index,
3482 Register value, 3169 Register value,
3483 uint32_t encoding_mask) { 3170 uint32_t encoding_mask) {
3484 __ SmiTst(index); 3171 // TODO(jbramley): Use a safer scratch register.
3485 __ Check(eq, "Non-smi index"); 3172 Register scratch = __ Tmp1();
3486 __ SmiTst(value); 3173 ASSERT(!AreAliased(string, index, value, scratch));
3487 __ Check(eq, "Non-smi value");
3488 3174
3489 __ ldr(ip, FieldMemOperand(string, String::kLengthOffset)); 3175 __ AssertSmi(index, "Non-smi index");
3490 __ cmp(index, ip); 3176 __ AssertSmi(value, "Non-smi value");
3177
3178 __ Ldr(scratch, FieldMemOperand(string, String::kLengthOffset));
3179 __ Cmp(index, scratch);
3491 __ Check(lt, "Index is too large"); 3180 __ Check(lt, "Index is too large");
3492 3181
3493 __ cmp(index, Operand(Smi::FromInt(0))); 3182 __ Cmp(index, Operand(Smi::FromInt(0)));
3494 __ Check(ge, "Index is negative"); 3183 __ Check(ge, "Index is negative");
3495 3184
3496 __ ldr(ip, FieldMemOperand(string, HeapObject::kMapOffset)); 3185 __ Ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
3497 __ ldrb(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset)); 3186 __ Ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
3187 __ And(scratch, scratch, kStringRepresentationMask | kStringEncodingMask);
3498 3188
3499 __ and_(ip, ip, Operand(kStringRepresentationMask | kStringEncodingMask)); 3189 __ Cmp(scratch, encoding_mask);
3500 __ cmp(ip, Operand(encoding_mask));
3501 __ Check(eq, "Unexpected string type"); 3190 __ Check(eq, "Unexpected string type");
3502 } 3191 }
3503 3192
3504 3193
3505 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { 3194 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3506 ZoneList<Expression*>* args = expr->arguments(); 3195 ZoneList<Expression*>* args = expr->arguments();
3507 ASSERT_EQ(3, args->length()); 3196 ASSERT_EQ(3, args->length());
3508 3197
3509 Register string = r0; 3198 Register string = x0;
3510 Register index = r1; 3199 Register index = x1;
3511 Register value = r2; 3200 Register value = x2;
3201 Register scratch = x10;
3512 3202
3513 VisitForStackValue(args->at(1)); // index 3203 VisitForStackValue(args->at(1)); // index
3514 VisitForStackValue(args->at(2)); // value 3204 VisitForStackValue(args->at(2)); // value
3515 __ pop(value);
3516 __ pop(index);
3517 VisitForAccumulatorValue(args->at(0)); // string 3205 VisitForAccumulatorValue(args->at(0)); // string
3206 // TODO(jbramley): This is broken on ARM, but fixed on bleeding_edge.
3207 __ Pop(value, index);
3518 3208
3519 if (FLAG_debug_code) { 3209 if (FLAG_debug_code) {
3520 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; 3210 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3521 EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type); 3211 EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3522 } 3212 }
3523 3213
3524 __ SmiUntag(value, value); 3214 __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3525 __ add(ip, 3215 __ SmiUntag(value);
3526 string, 3216 __ SmiUntag(index);
3527 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3217 __ Strb(value, MemOperand(scratch, index));
3528 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3529 context()->Plug(string); 3218 context()->Plug(string);
3530 } 3219 }
3531 3220
3532 3221
3533 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { 3222 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3534 ZoneList<Expression*>* args = expr->arguments(); 3223 ZoneList<Expression*>* args = expr->arguments();
3535 ASSERT_EQ(3, args->length()); 3224 ASSERT_EQ(3, args->length());
3536 3225
3537 Register string = r0; 3226 Register string = x0;
3538 Register index = r1; 3227 Register index = x1;
3539 Register value = r2; 3228 Register value = x2;
3229 Register scratch = x10;
3540 3230
3541 VisitForStackValue(args->at(1)); // index 3231 VisitForStackValue(args->at(1)); // index
3542 VisitForStackValue(args->at(2)); // value 3232 VisitForStackValue(args->at(2)); // value
3543 __ pop(value);
3544 __ pop(index);
3545 VisitForAccumulatorValue(args->at(0)); // string 3233 VisitForAccumulatorValue(args->at(0)); // string
3234 // TODO(jbramley): This is broken on ARM, but fixed on bleeding_edge.
3235 __ Pop(value, index);
3546 3236
3547 if (FLAG_debug_code) { 3237 if (FLAG_debug_code) {
3548 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; 3238 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3549 EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type); 3239 EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3550 } 3240 }
3551 3241
3552 __ SmiUntag(value, value); 3242 __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
3553 __ add(ip, 3243 __ SmiUntag(value);
3554 string, 3244 __ SmiUntag(index);
3555 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 3245 __ Strh(value, MemOperand(scratch, index, LSL, 1));
3556 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3557 __ strh(value, MemOperand(ip, index));
3558 context()->Plug(string); 3246 context()->Plug(string);
3559 } 3247 }
3560 3248
3561 3249
3562
3563 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { 3250 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3564 // Load the arguments on the stack and call the runtime function. 3251 // Load the arguments on the stack and call the MathPow stub.
3565 ZoneList<Expression*>* args = expr->arguments(); 3252 ZoneList<Expression*>* args = expr->arguments();
3566 ASSERT(args->length() == 2); 3253 ASSERT(args->length() == 2);
3567 VisitForStackValue(args->at(0)); 3254 VisitForStackValue(args->at(0));
3568 VisitForStackValue(args->at(1)); 3255 VisitForStackValue(args->at(1));
3569 MathPowStub stub(MathPowStub::ON_STACK); 3256 MathPowStub stub(MathPowStub::ON_STACK);
3570 __ CallStub(&stub); 3257 __ CallStub(&stub);
3571 context()->Plug(r0); 3258 context()->Plug(x0);
3572 } 3259 }
3573 3260
3574 3261
3575 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { 3262 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3576 ZoneList<Expression*>* args = expr->arguments(); 3263 ZoneList<Expression*>* args = expr->arguments();
3577 ASSERT(args->length() == 2); 3264 ASSERT(args->length() == 2);
3578 VisitForStackValue(args->at(0)); // Load the object. 3265 VisitForStackValue(args->at(0)); // Load the object.
3579 VisitForAccumulatorValue(args->at(1)); // Load the value. 3266 VisitForAccumulatorValue(args->at(1)); // Load the value.
3580 __ pop(r1); // r0 = value. r1 = object. 3267 __ Pop(x1);
3268 // x0 = value.
3269 // x1 = object.
3581 3270
3582 Label done; 3271 Label done;
3583 // If the object is a smi, return the value. 3272 // If the object is a smi, return the value.
3584 __ JumpIfSmi(r1, &done); 3273 __ JumpIfSmi(x1, &done);
3585 3274
3586 // If the object is not a value type, return the value. 3275 // If the object is not a value type, return the value.
3587 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE); 3276 __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done);
3588 __ b(ne, &done);
3589 3277
3590 // Store the value. 3278 // Store the value.
3591 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset)); 3279 __ Str(x0, FieldMemOperand(x1, JSValue::kValueOffset));
3592 // Update the write barrier. Save the value as it will be 3280 // Update the write barrier. Save the value as it will be
3593 // overwritten by the write barrier code and is needed afterward. 3281 // overwritten by the write barrier code and is needed afterward.
3594 __ mov(r2, r0); 3282 __ Mov(x10, x0);
3595 __ RecordWriteField( 3283 __ RecordWriteField(
3596 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs); 3284 x1, JSValue::kValueOffset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
3597 3285
3598 __ bind(&done); 3286 __ Bind(&done);
3599 context()->Plug(r0); 3287 context()->Plug(x0);
3600 } 3288 }
3601 3289
3602 3290
3603 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { 3291 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3604 ZoneList<Expression*>* args = expr->arguments(); 3292 ZoneList<Expression*>* args = expr->arguments();
3605 ASSERT_EQ(args->length(), 1); 3293 ASSERT_EQ(args->length(), 1);
3606 // Load the argument on the stack and call the stub. 3294 // Load the argument on the stack and call the stub.
3607 VisitForStackValue(args->at(0)); 3295 VisitForStackValue(args->at(0));
3608 3296
3609 NumberToStringStub stub; 3297 NumberToStringStub stub;
3610 __ CallStub(&stub); 3298 __ CallStub(&stub);
3611 context()->Plug(r0); 3299 context()->Plug(x0);
3612 } 3300 }
3613 3301
3614 3302
3615 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { 3303 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3616 ZoneList<Expression*>* args = expr->arguments(); 3304 ZoneList<Expression*>* args = expr->arguments();
3617 ASSERT(args->length() == 1); 3305 ASSERT(args->length() == 1);
3306
3618 VisitForAccumulatorValue(args->at(0)); 3307 VisitForAccumulatorValue(args->at(0));
3619 3308
3620 Label done; 3309 Label done;
3621 StringCharFromCodeGenerator generator(r0, r1); 3310 Register code = x0;
3311 Register result = x1;
3312
3313 StringCharFromCodeGenerator generator(code, result);
3622 generator.GenerateFast(masm_); 3314 generator.GenerateFast(masm_);
3623 __ jmp(&done); 3315 __ B(&done);
3624 3316
3625 NopRuntimeCallHelper call_helper; 3317 NopRuntimeCallHelper call_helper;
3626 generator.GenerateSlow(masm_, call_helper); 3318 generator.GenerateSlow(masm_, call_helper);
3627 3319
3628 __ bind(&done); 3320 __ Bind(&done);
3629 context()->Plug(r1); 3321 context()->Plug(result);
3630 } 3322 }
3631 3323
3632 3324
3633 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { 3325 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3634 ZoneList<Expression*>* args = expr->arguments(); 3326 ZoneList<Expression*>* args = expr->arguments();
3635 ASSERT(args->length() == 2); 3327 ASSERT(args->length() == 2);
3328
3636 VisitForStackValue(args->at(0)); 3329 VisitForStackValue(args->at(0));
3637 VisitForAccumulatorValue(args->at(1)); 3330 VisitForAccumulatorValue(args->at(1));
3638 3331
3639 Register object = r1; 3332 Register object = x1;
3640 Register index = r0; 3333 Register index = x0;
3641 Register result = r3; 3334 Register result = x3;
3642 3335
3643 __ pop(object); 3336 __ Pop(object);
3644 3337
3645 Label need_conversion; 3338 Label need_conversion;
3646 Label index_out_of_range; 3339 Label index_out_of_range;
3647 Label done; 3340 Label done;
3648 StringCharCodeAtGenerator generator(object, 3341 StringCharCodeAtGenerator generator(object,
3649 index, 3342 index,
3650 result, 3343 result,
3651 &need_conversion, 3344 &need_conversion,
3652 &need_conversion, 3345 &need_conversion,
3653 &index_out_of_range, 3346 &index_out_of_range,
3654 STRING_INDEX_IS_NUMBER); 3347 STRING_INDEX_IS_NUMBER);
3655 generator.GenerateFast(masm_); 3348 generator.GenerateFast(masm_);
3656 __ jmp(&done); 3349 __ B(&done);
3657 3350
3658 __ bind(&index_out_of_range); 3351 __ Bind(&index_out_of_range);
3659 // When the index is out of range, the spec requires us to return 3352 // When the index is out of range, the spec requires us to return NaN.
3660 // NaN.
3661 __ LoadRoot(result, Heap::kNanValueRootIndex); 3353 __ LoadRoot(result, Heap::kNanValueRootIndex);
3662 __ jmp(&done); 3354 __ B(&done);
3663 3355
3664 __ bind(&need_conversion); 3356 __ Bind(&need_conversion);
3665 // Load the undefined value into the result register, which will 3357 // Load the undefined value into the result register, which will
3666 // trigger conversion. 3358 // trigger conversion.
3667 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); 3359 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3668 __ jmp(&done); 3360 __ B(&done);
3669 3361
3670 NopRuntimeCallHelper call_helper; 3362 NopRuntimeCallHelper call_helper;
3671 generator.GenerateSlow(masm_, call_helper); 3363 generator.GenerateSlow(masm_, call_helper);
3672 3364
3673 __ bind(&done); 3365 __ Bind(&done);
3674 context()->Plug(result); 3366 context()->Plug(result);
3675 } 3367 }
3676 3368
3677 3369
3678 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { 3370 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3679 ZoneList<Expression*>* args = expr->arguments(); 3371 ZoneList<Expression*>* args = expr->arguments();
3680 ASSERT(args->length() == 2); 3372 ASSERT(args->length() == 2);
3373
3681 VisitForStackValue(args->at(0)); 3374 VisitForStackValue(args->at(0));
3682 VisitForAccumulatorValue(args->at(1)); 3375 VisitForAccumulatorValue(args->at(1));
3683 3376
3684 Register object = r1; 3377 Register object = x1;
3685 Register index = r0; 3378 Register index = x0;
3686 Register scratch = r3; 3379 Register result = x0;
3687 Register result = r0;
3688 3380
3689 __ pop(object); 3381 __ Pop(object);
3690 3382
3691 Label need_conversion; 3383 Label need_conversion;
3692 Label index_out_of_range; 3384 Label index_out_of_range;
3693 Label done; 3385 Label done;
3694 StringCharAtGenerator generator(object, 3386 StringCharAtGenerator generator(object,
3695 index, 3387 index,
3696 scratch, 3388 x3,
3697 result, 3389 result,
3698 &need_conversion, 3390 &need_conversion,
3699 &need_conversion, 3391 &need_conversion,
3700 &index_out_of_range, 3392 &index_out_of_range,
3701 STRING_INDEX_IS_NUMBER); 3393 STRING_INDEX_IS_NUMBER);
3702 generator.GenerateFast(masm_); 3394 generator.GenerateFast(masm_);
3703 __ jmp(&done); 3395 __ B(&done);
3704 3396
3705 __ bind(&index_out_of_range); 3397 __ Bind(&index_out_of_range);
3706 // When the index is out of range, the spec requires us to return 3398 // When the index is out of range, the spec requires us to return
3707 // the empty string. 3399 // the empty string.
3708 __ LoadRoot(result, Heap::kempty_stringRootIndex); 3400 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3709 __ jmp(&done); 3401 __ B(&done);
3710 3402
3711 __ bind(&need_conversion); 3403 __ Bind(&need_conversion);
3712 // Move smi zero into the result register, which will trigger 3404 // Move smi zero into the result register, which will trigger conversion.
3713 // conversion. 3405 __ Mov(result, Operand(Smi::FromInt(0)));
3714 __ mov(result, Operand(Smi::FromInt(0))); 3406 __ B(&done);
3715 __ jmp(&done);
3716 3407
3717 NopRuntimeCallHelper call_helper; 3408 NopRuntimeCallHelper call_helper;
3718 generator.GenerateSlow(masm_, call_helper); 3409 generator.GenerateSlow(masm_, call_helper);
3719 3410
3720 __ bind(&done); 3411 __ Bind(&done);
3721 context()->Plug(result); 3412 context()->Plug(result);
3722 } 3413 }
3723 3414
3724 3415
3725 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { 3416 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3417 ASM_LOCATION("FullCodeGenerator::EmitStringAdd");
3726 ZoneList<Expression*>* args = expr->arguments(); 3418 ZoneList<Expression*>* args = expr->arguments();
3727 ASSERT_EQ(2, args->length()); 3419 ASSERT_EQ(2, args->length());
3728 VisitForStackValue(args->at(0)); 3420 VisitForStackValue(args->at(0));
3729 VisitForStackValue(args->at(1)); 3421 VisitForStackValue(args->at(1));
3730 3422
3731 StringAddStub stub(NO_STRING_ADD_FLAGS); 3423 StringAddStub stub(NO_STRING_ADD_FLAGS);
3732 __ CallStub(&stub); 3424 __ CallStub(&stub);
3733 context()->Plug(r0); 3425 context()->Plug(x0);
3734 } 3426 }
3735 3427
3736 3428
3737 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { 3429 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3738 ZoneList<Expression*>* args = expr->arguments(); 3430 ZoneList<Expression*>* args = expr->arguments();
3739 ASSERT_EQ(2, args->length()); 3431 ASSERT_EQ(2, args->length());
3740 VisitForStackValue(args->at(0)); 3432 VisitForStackValue(args->at(0));
3741 VisitForStackValue(args->at(1)); 3433 VisitForStackValue(args->at(1));
3742 3434
3743 StringCompareStub stub; 3435 StringCompareStub stub;
3744 __ CallStub(&stub); 3436 __ CallStub(&stub);
3745 context()->Plug(r0); 3437 context()->Plug(x0);
3746 } 3438 }
3747 3439
3748 3440
3749 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) { 3441 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3750 // Load the argument on the stack and call the stub. 3442 // Load the argument on the stack and call the stub.
3751 TranscendentalCacheStub stub(TranscendentalCache::SIN, 3443 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3752 TranscendentalCacheStub::TAGGED); 3444 TranscendentalCacheStub::TAGGED);
3753 ZoneList<Expression*>* args = expr->arguments(); 3445 ZoneList<Expression*>* args = expr->arguments();
3754 ASSERT(args->length() == 1); 3446 ASSERT(args->length() == 1);
3755 VisitForStackValue(args->at(0)); 3447 VisitForStackValue(args->at(0));
3756 __ CallStub(&stub); 3448 __ CallStub(&stub);
3757 context()->Plug(r0); 3449 context()->Plug(x0);
3758 } 3450 }
3759 3451
3760 3452
3761 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) { 3453 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3762 // Load the argument on the stack and call the stub. 3454 // Load the argument on the stack and call the stub.
3763 TranscendentalCacheStub stub(TranscendentalCache::COS, 3455 TranscendentalCacheStub stub(TranscendentalCache::COS,
3764 TranscendentalCacheStub::TAGGED); 3456 TranscendentalCacheStub::TAGGED);
3765 ZoneList<Expression*>* args = expr->arguments(); 3457 ZoneList<Expression*>* args = expr->arguments();
3766 ASSERT(args->length() == 1); 3458 ASSERT(args->length() == 1);
3767 VisitForStackValue(args->at(0)); 3459 VisitForStackValue(args->at(0));
3768 __ CallStub(&stub); 3460 __ CallStub(&stub);
3769 context()->Plug(r0); 3461 context()->Plug(x0);
3770 } 3462 }
3771 3463
3772 3464
3773 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) { 3465 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3774 // Load the argument on the stack and call the stub. 3466 // Load the argument on the stack and call the stub.
3775 TranscendentalCacheStub stub(TranscendentalCache::TAN, 3467 TranscendentalCacheStub stub(TranscendentalCache::TAN,
3776 TranscendentalCacheStub::TAGGED); 3468 TranscendentalCacheStub::TAGGED);
3777 ZoneList<Expression*>* args = expr->arguments(); 3469 ZoneList<Expression*>* args = expr->arguments();
3778 ASSERT(args->length() == 1); 3470 ASSERT(args->length() == 1);
3779 VisitForStackValue(args->at(0)); 3471 VisitForStackValue(args->at(0));
3780 __ CallStub(&stub); 3472 __ CallStub(&stub);
3781 context()->Plug(r0); 3473 context()->Plug(x0);
3782 } 3474 }
3783 3475
3784 3476
3785 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) { 3477 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3786 // Load the argument on the stack and call the stub. 3478 // Load the argument on the stack and call the stub.
3787 TranscendentalCacheStub stub(TranscendentalCache::LOG, 3479 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3788 TranscendentalCacheStub::TAGGED); 3480 TranscendentalCacheStub::TAGGED);
3789 ZoneList<Expression*>* args = expr->arguments(); 3481 ZoneList<Expression*>* args = expr->arguments();
3790 ASSERT(args->length() == 1); 3482 ASSERT(args->length() == 1);
3791 VisitForStackValue(args->at(0)); 3483 VisitForStackValue(args->at(0));
3792 __ CallStub(&stub); 3484 __ CallStub(&stub);
3793 context()->Plug(r0); 3485 context()->Plug(x0);
3794 } 3486 }
3795 3487
3796 3488
3797 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) { 3489 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3798 // Load the argument on the stack and call the runtime function. 3490 // Load the argument on the stack and call the runtime function.
3799 ZoneList<Expression*>* args = expr->arguments(); 3491 ZoneList<Expression*>* args = expr->arguments();
3800 ASSERT(args->length() == 1); 3492 ASSERT(args->length() == 1);
3801 VisitForStackValue(args->at(0)); 3493 VisitForStackValue(args->at(0));
3802 __ CallRuntime(Runtime::kMath_sqrt, 1); 3494 __ CallRuntime(Runtime::kMath_sqrt, 1);
3803 context()->Plug(r0); 3495 context()->Plug(x0);
3804 } 3496 }
3805 3497
3806 3498
3807 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { 3499 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3500 ASM_LOCATION("FullCodeGenerator::EmitCallFunction");
3808 ZoneList<Expression*>* args = expr->arguments(); 3501 ZoneList<Expression*>* args = expr->arguments();
3809 ASSERT(args->length() >= 2); 3502 ASSERT(args->length() >= 2);
3810 3503
3811 int arg_count = args->length() - 2; // 2 ~ receiver and function. 3504 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3812 for (int i = 0; i < arg_count + 1; i++) { 3505 for (int i = 0; i < arg_count + 1; i++) {
3813 VisitForStackValue(args->at(i)); 3506 VisitForStackValue(args->at(i));
3814 } 3507 }
3815 VisitForAccumulatorValue(args->last()); // Function. 3508 VisitForAccumulatorValue(args->last()); // Function.
3816 3509
3817 Label runtime, done; 3510 Label runtime, done;
3818 // Check for non-function argument (including proxy). 3511 // Check for non-function argument (including proxy).
3819 __ JumpIfSmi(r0, &runtime); 3512 __ JumpIfSmi(x0, &runtime);
3820 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); 3513 __ JumpIfNotObjectType(x0, x1, x1, JS_FUNCTION_TYPE, &runtime);
3821 __ b(ne, &runtime);
3822 3514
3823 // InvokeFunction requires the function in r1. Move it in there. 3515 // InvokeFunction requires the function in x1. Move it in there.
3824 __ mov(r1, result_register()); 3516 __ Mov(x1, x0);
3825 ParameterCount count(arg_count); 3517 ParameterCount count(arg_count);
3826 __ InvokeFunction(r1, count, CALL_FUNCTION, 3518 __ InvokeFunction(x1, count, CALL_FUNCTION,
3827 NullCallWrapper(), CALL_AS_METHOD); 3519 NullCallWrapper(), CALL_AS_METHOD);
3828 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3520 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3829 __ jmp(&done); 3521 __ B(&done);
3830 3522
3831 __ bind(&runtime); 3523 __ Bind(&runtime);
3832 __ push(r0); 3524 __ Push(x0);
3833 __ CallRuntime(Runtime::kCall, args->length()); 3525 __ CallRuntime(Runtime::kCall, args->length());
3834 __ bind(&done); 3526 __ Bind(&done);
3835 3527
3836 context()->Plug(r0); 3528 context()->Plug(x0);
3837 } 3529 }
3838 3530
3839 3531
3840 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { 3532 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3841 RegExpConstructResultStub stub; 3533 RegExpConstructResultStub stub;
3842 ZoneList<Expression*>* args = expr->arguments(); 3534 ZoneList<Expression*>* args = expr->arguments();
3843 ASSERT(args->length() == 3); 3535 ASSERT(args->length() == 3);
3844 VisitForStackValue(args->at(0)); 3536 VisitForStackValue(args->at(0));
3845 VisitForStackValue(args->at(1)); 3537 VisitForStackValue(args->at(1));
3846 VisitForStackValue(args->at(2)); 3538 VisitForStackValue(args->at(2));
3847 __ CallStub(&stub); 3539 __ CallStub(&stub);
3848 context()->Plug(r0); 3540 context()->Plug(x0);
3849 } 3541 }
3850 3542
3851 3543
3852 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { 3544 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3853 ZoneList<Expression*>* args = expr->arguments(); 3545 ZoneList<Expression*>* args = expr->arguments();
3854 ASSERT_EQ(2, args->length()); 3546 ASSERT_EQ(2, args->length());
3855 ASSERT_NE(NULL, args->at(0)->AsLiteral()); 3547 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3856 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); 3548 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3857 3549
3858 Handle<FixedArray> jsfunction_result_caches( 3550 Handle<FixedArray> jsfunction_result_caches(
3859 isolate()->native_context()->jsfunction_result_caches()); 3551 isolate()->native_context()->jsfunction_result_caches());
3860 if (jsfunction_result_caches->length() <= cache_id) { 3552 if (jsfunction_result_caches->length() <= cache_id) {
3861 __ Abort("Attempt to use undefined cache."); 3553 __ Abort("Attempt to use undefined cache.");
3862 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 3554 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3863 context()->Plug(r0); 3555 context()->Plug(x0);
3864 return; 3556 return;
3865 } 3557 }
3866 3558
3867 VisitForAccumulatorValue(args->at(1)); 3559 VisitForAccumulatorValue(args->at(1));
3868 3560
3869 Register key = r0; 3561 Register key = x0;
3870 Register cache = r1; 3562 Register cache = x1;
3871 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 3563 __ Ldr(cache, GlobalObjectMemOperand());
3872 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset)); 3564 __ Ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3873 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); 3565 __ Ldr(cache, ContextMemOperand(cache,
3874 __ ldr(cache, 3566 Context::JSFUNCTION_RESULT_CACHES_INDEX));
3567 __ Ldr(cache,
3875 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); 3568 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3876 3569
3570 Label done;
3571 __ Ldrsw(x2, UntagSmiFieldMemOperand(cache,
3572 JSFunctionResultCache::kFingerOffset));
3573 __ Add(x3, cache, FixedArray::kHeaderSize - kHeapObjectTag);
3574 __ Add(x3, x3, Operand(x2, LSL, kPointerSizeLog2));
3877 3575
3878 Label done, not_found; 3576 // Load the key and data from the cache.
3879 // tmp now holds finger offset as a smi. 3577 __ Ldp(x2, x3, MemOperand(x3));
3880 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3881 // r2 now holds finger offset as a smi.
3882 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3883 // r3 now points to the start of fixed array elements.
3884 __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex));
3885 // Note side effect of PreIndex: r3 now points to the key of the pair.
3886 __ cmp(key, r2);
3887 __ b(ne, &not_found);
3888 3578
3889 __ ldr(r0, MemOperand(r3, kPointerSize)); 3579 __ Cmp(key, x2);
3890 __ b(&done); 3580 __ CmovX(x0, x3, eq);
3581 __ B(eq, &done);
3891 3582
3892 __ bind(&not_found);
3893 // Call runtime to perform the lookup. 3583 // Call runtime to perform the lookup.
3894 __ Push(cache, key); 3584 __ Push(cache, key);
3895 __ CallRuntime(Runtime::kGetFromCache, 2); 3585 __ CallRuntime(Runtime::kGetFromCache, 2);
3896 3586
3897 __ bind(&done); 3587 __ Bind(&done);
3898 context()->Plug(r0); 3588 context()->Plug(x0);
3899 } 3589 }
3900 3590
3901 3591
3902 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) { 3592 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3593 ASM_UNIMPLEMENTED("EmitIsRegExpEquivalent has not been tested");
3594
3903 ZoneList<Expression*>* args = expr->arguments(); 3595 ZoneList<Expression*>* args = expr->arguments();
3904 ASSERT_EQ(2, args->length()); 3596 ASSERT_EQ(2, args->length());
3905 3597
3906 Register right = r0; 3598 Register right = x2;
3907 Register left = r1; 3599 Register left = x1;
3908 Register tmp = r2;
3909 Register tmp2 = r3;
3910 3600
3911 VisitForStackValue(args->at(0)); 3601 VisitForStackValue(args->at(0));
3912 VisitForAccumulatorValue(args->at(1)); 3602 VisitForAccumulatorValue(args->at(1));
3913 __ pop(left); 3603 __ Pop(left);
3604 __ Mov(right, x0);
3914 3605
3915 Label done, fail, ok; 3606 // Speculatively set result to true.
3916 __ cmp(left, Operand(right)); 3607 Register result = x0;
3917 __ b(eq, &ok); 3608 __ LoadRoot(result, Heap::kTrueValueRootIndex);
3609
3610 Label fail, ok;
3611 __ Cmp(left, right);
3612 __ B(eq, &ok);
3613
3918 // Fail if either is a non-HeapObject. 3614 // Fail if either is a non-HeapObject.
3919 __ and_(tmp, left, Operand(right)); 3615 __ JumpIfEitherSmi(left, right, &fail);
3920 __ JumpIfSmi(tmp, &fail);
3921 __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
3922 __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
3923 __ cmp(tmp2, Operand(JS_REGEXP_TYPE));
3924 __ b(ne, &fail);
3925 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3926 __ cmp(tmp, Operand(tmp2));
3927 __ b(ne, &fail);
3928 __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
3929 __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
3930 __ cmp(tmp, tmp2);
3931 __ b(eq, &ok);
3932 __ bind(&fail);
3933 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
3934 __ jmp(&done);
3935 __ bind(&ok);
3936 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
3937 __ bind(&done);
3938 3616
3939 context()->Plug(r0); 3617 Register left_map = x10;
3618 __ Ldr(left_map, FieldMemOperand(left, HeapObject::kMapOffset));
3619 __ Ldrb(x11, FieldMemOperand(left_map, Map::kInstanceTypeOffset));
3620 __ Cmp(x11, JS_REGEXP_TYPE);
3621 __ B(ne, &fail);
3622
3623 Register right_map = x11;
3624 __ Ldr(right_map, FieldMemOperand(right, HeapObject::kMapOffset));
3625 __ Cmp(left_map, right_map);
3626 __ B(ne, &fail);
3627
3628 __ Ldr(x10, FieldMemOperand(left, JSRegExp::kDataOffset));
3629 __ Ldr(x11, FieldMemOperand(right, JSRegExp::kDataOffset));
3630 __ Cmp(x10, x11);
3631 __ B(eq, &ok);
3632
3633 __ Bind(&fail);
3634 __ LoadRoot(result, Heap::kFalseValueRootIndex);
3635
3636 __ Bind(&ok);
3637 context()->Plug(result);
3940 } 3638 }
3941 3639
3942 3640
3943 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { 3641 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3944 ZoneList<Expression*>* args = expr->arguments(); 3642 ZoneList<Expression*>* args = expr->arguments();
3945 VisitForAccumulatorValue(args->at(0)); 3643 VisitForAccumulatorValue(args->at(0));
3946 3644
3947 Label materialize_true, materialize_false; 3645 Label materialize_true, materialize_false;
3948 Label* if_true = NULL; 3646 Label* if_true = NULL;
3949 Label* if_false = NULL; 3647 Label* if_false = NULL;
3950 Label* fall_through = NULL; 3648 Label* fall_through = NULL;
3951 context()->PrepareTest(&materialize_true, &materialize_false, 3649 context()->PrepareTest(&materialize_true, &materialize_false,
3952 &if_true, &if_false, &fall_through); 3650 &if_true, &if_false, &fall_through);
3953 3651
3954 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset)); 3652 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3955 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask)); 3653 __ Tst(x10, String::kContainsCachedArrayIndexMask);
3956 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3654 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3957 Split(eq, if_true, if_false, fall_through); 3655 Split(eq, if_true, if_false, fall_through);
3958 3656
3959 context()->Plug(if_true, if_false); 3657 context()->Plug(if_true, if_false);
3960 } 3658 }
3961 3659
3962 3660
3963 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { 3661 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3964 ZoneList<Expression*>* args = expr->arguments(); 3662 ZoneList<Expression*>* args = expr->arguments();
3965 ASSERT(args->length() == 1); 3663 ASSERT(args->length() == 1);
3966 VisitForAccumulatorValue(args->at(0)); 3664 VisitForAccumulatorValue(args->at(0));
3967 3665
3968 __ AssertString(r0); 3666 __ AssertString(x0);
3969 3667
3970 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset)); 3668 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
3971 __ IndexFromHash(r0, r0); 3669 __ IndexFromHash(x10, x0);
3972 3670
3973 context()->Plug(r0); 3671 context()->Plug(x0);
3974 } 3672 }
3975 3673
3976 3674
3977 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { 3675 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3978 Label bailout, done, one_char_separator, long_separator, 3676 ASM_LOCATION("FullCodeGenerator::EmitFastAsciiArrayJoin");
3979 non_trivial_array, not_size_one_array, loop, 3677
3980 empty_separator_loop, one_char_separator_loop,
3981 one_char_separator_loop_entry, long_separator_loop;
3982 ZoneList<Expression*>* args = expr->arguments(); 3678 ZoneList<Expression*>* args = expr->arguments();
3983 ASSERT(args->length() == 2); 3679 ASSERT(args->length() == 2);
3984 VisitForStackValue(args->at(1)); 3680 VisitForStackValue(args->at(1));
3985 VisitForAccumulatorValue(args->at(0)); 3681 VisitForAccumulatorValue(args->at(0));
3986 3682
3987 // All aliases of the same register have disjoint lifetimes. 3683 Register array = x0;
3988 Register array = r0; 3684 Register result = x0;
3989 Register elements = no_reg; // Will be r0. 3685 Register elements = x1;
3990 Register result = no_reg; // Will be r0. 3686 Register element = x2;
3991 Register separator = r1; 3687 Register separator = x3;
3992 Register array_length = r2; 3688 Register array_length = x4;
3993 Register result_pos = no_reg; // Will be r2 3689 Register result_pos = x5;
3994 Register string_length = r3; 3690 Register map = x6;
3995 Register string = r4; 3691 Register string_length = x10;
3996 Register element = r5; 3692 Register elements_end = x11;
3997 Register elements_end = r6; 3693 Register string = x12;
3998 Register scratch1 = r7; 3694 Register scratch1 = x13;
3999 Register scratch2 = r9; 3695 Register scratch2 = x14;
3696 Register scratch3 = x7;
3697 Register separator_length = x15;
4000 3698
4001 // Separator operand is on the stack. 3699 Label bailout, done, one_char_separator, long_separator,
4002 __ pop(separator); 3700 non_trivial_array, not_size_one_array, loop,
3701 empty_separator_loop, one_char_separator_loop,
3702 one_char_separator_loop_entry, long_separator_loop;
3703
3704 // The separator operand is on the stack.
3705 __ Pop(separator);
4003 3706
4004 // Check that the array is a JSArray. 3707 // Check that the array is a JSArray.
4005 __ JumpIfSmi(array, &bailout); 3708 __ JumpIfSmi(array, &bailout);
4006 __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE); 3709 __ JumpIfNotObjectType(array, map, scratch1, JS_ARRAY_TYPE, &bailout);
4007 __ b(ne, &bailout);
4008 3710
4009 // Check that the array has fast elements. 3711 // Check that the array has fast elements.
4010 __ CheckFastElements(scratch1, scratch2, &bailout); 3712 __ CheckFastElements(map, scratch1, &bailout);
4011 3713
4012 // If the array has length zero, return the empty string. 3714 // If the array has length zero, return the empty string.
4013 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset)); 3715 // Load and untag the length of the array.
4014 __ SmiUntag(array_length, SetCC); 3716 // It is an unsigned value, so we can skip sign extension.
4015 __ b(ne, &non_trivial_array); 3717 // We assume little endianness.
4016 __ LoadRoot(r0, Heap::kempty_stringRootIndex); 3718 __ Ldrsw(array_length,
4017 __ b(&done); 3719 UntagSmiFieldMemOperand(array, JSArray::kLengthOffset));
3720 __ Cbnz(array_length, &non_trivial_array);
3721 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3722 __ B(&done);
4018 3723
4019 __ bind(&non_trivial_array); 3724 __ Bind(&non_trivial_array);
4020
4021 // Get the FixedArray containing array's elements. 3725 // Get the FixedArray containing array's elements.
4022 elements = array; 3726 __ Ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4023 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4024 array = no_reg; // End of array's live range.
4025 3727
4026 // Check that all array elements are sequential ASCII strings, and 3728 // Check that all array elements are sequential ASCII strings, and
4027 // accumulate the sum of their lengths, as a smi-encoded value. 3729 // accumulate the sum of their lengths.
4028 __ mov(string_length, Operand::Zero()); 3730 __ Mov(string_length, 0);
4029 __ add(element, 3731 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
4030 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 3732 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4031 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4032 // Loop condition: while (element < elements_end). 3733 // Loop condition: while (element < elements_end).
4033 // Live values in registers: 3734 // Live values in registers:
4034 // elements: Fixed array of strings. 3735 // elements: Fixed array of strings.
4035 // array_length: Length of the fixed array of strings (not smi) 3736 // array_length: Length of the fixed array of strings (not smi)
4036 // separator: Separator string 3737 // separator: Separator string
4037 // string_length: Accumulated sum of string lengths (smi). 3738 // string_length: Accumulated sum of string lengths (not smi).
4038 // element: Current array element. 3739 // element: Current array element.
4039 // elements_end: Array end. 3740 // elements_end: Array end.
4040 if (generate_debug_code_) { 3741 if (FLAG_debug_code) {
4041 __ cmp(array_length, Operand::Zero()); 3742 __ Cmp(array_length, Operand(0));
4042 __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin"); 3743 __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin");
4043 } 3744 }
4044 __ bind(&loop); 3745 __ Bind(&loop);
4045 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 3746 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4046 __ JumpIfSmi(string, &bailout); 3747 __ JumpIfSmi(string, &bailout);
4047 __ ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset)); 3748 __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4048 __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); 3749 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4049 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); 3750 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
4050 __ ldr(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset)); 3751 __ Ldrsw(scratch1,
4051 __ add(string_length, string_length, Operand(scratch1), SetCC); 3752 UntagSmiFieldMemOperand(string, SeqOneByteString::kLengthOffset));
4052 __ b(vs, &bailout); 3753 __ Adds(string_length, string_length, scratch1);
4053 __ cmp(element, elements_end); 3754 __ B(vs, &bailout);
4054 __ b(lt, &loop); 3755 __ Cmp(element, elements_end);
3756 __ B(lt, &loop);
4055 3757
4056 // If array_length is 1, return elements[0], a string. 3758 // If array_length is 1, return elements[0], a string.
4057 __ cmp(array_length, Operand(1)); 3759 __ Cmp(array_length, 1);
4058 __ b(ne, &not_size_one_array); 3760 __ B(ne, &not_size_one_array);
4059 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize)); 3761 __ Ldr(result, FieldMemOperand(elements, FixedArray::kHeaderSize));
4060 __ b(&done); 3762 __ B(&done);
4061 3763
4062 __ bind(&not_size_one_array); 3764 __ Bind(&not_size_one_array);
4063 3765
4064 // Live values in registers: 3766 // Live values in registers:
4065 // separator: Separator string 3767 // separator: Separator string
4066 // array_length: Length of the array. 3768 // array_length: Length of the array (not smi).
4067 // string_length: Sum of string lengths (smi). 3769 // string_length: Sum of string lengths (not smi).
4068 // elements: FixedArray of strings. 3770 // elements: FixedArray of strings.
4069 3771
4070 // Check that the separator is a flat ASCII string. 3772 // Check that the separator is a flat ASCII string.
4071 __ JumpIfSmi(separator, &bailout); 3773 __ JumpIfSmi(separator, &bailout);
4072 __ ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset)); 3774 __ Ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4073 __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); 3775 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4074 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); 3776 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
4075 3777
4076 // Add (separator length times array_length) - separator length to the 3778 // Add (separator length times array_length) - separator length to the
4077 // string_length to get the length of the result string. array_length is not 3779 // string_length to get the length of the result string.
4078 // smi but the other values are, so the result is a smi 3780 // Load the separator length as untagged.
4079 __ ldr(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); 3781 // We assume little endianness, and that the length is positive.
4080 __ sub(string_length, string_length, Operand(scratch1)); 3782 __ Ldrsw(separator_length,
4081 __ smull(scratch2, ip, array_length, scratch1); 3783 UntagSmiFieldMemOperand(separator,
4082 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are 3784 SeqOneByteString::kLengthOffset));
4083 // zero. 3785 __ Sub(string_length, string_length, separator_length);
4084 __ cmp(ip, Operand::Zero()); 3786 __ Umaddl(string_length, array_length.W(), separator_length.W(),
4085 __ b(ne, &bailout); 3787 string_length);
4086 __ tst(scratch2, Operand(0x80000000));
4087 __ b(ne, &bailout);
4088 __ add(string_length, string_length, Operand(scratch2), SetCC);
4089 __ b(vs, &bailout);
4090 __ SmiUntag(string_length);
4091 3788
4092 // Get first element in the array to free up the elements register to be used 3789 // Get first element in the array.
4093 // for the result. 3790 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
4094 __ add(element,
4095 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4096 result = elements; // End of live range for elements.
4097 elements = no_reg;
4098 // Live values in registers: 3791 // Live values in registers:
4099 // element: First array element 3792 // element: First array element
4100 // separator: Separator string 3793 // separator: Separator string
4101 // string_length: Length of result string (not smi) 3794 // string_length: Length of result string (not smi)
4102 // array_length: Length of the array. 3795 // array_length: Length of the array (not smi).
4103 __ AllocateAsciiString(result, 3796 __ AllocateAsciiString(result, string_length, scratch1, scratch2, scratch3,
4104 string_length,
4105 scratch1,
4106 scratch2,
4107 elements_end,
4108 &bailout); 3797 &bailout);
3798
4109 // Prepare for looping. Set up elements_end to end of the array. Set 3799 // Prepare for looping. Set up elements_end to end of the array. Set
4110 // result_pos to the position of the result where to write the first 3800 // result_pos to the position of the result where to write the first
4111 // character. 3801 // character.
4112 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); 3802 // TODO(all): useless unless AllocateAsciiString trashes the register.
4113 result_pos = array_length; // End of live range for array_length. 3803 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4114 array_length = no_reg; 3804 __ Add(result_pos, result, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4115 __ add(result_pos,
4116 result,
4117 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4118 3805
4119 // Check the length of the separator. 3806 // Check the length of the separator.
4120 __ ldr(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); 3807 __ Cmp(separator_length, 1);
4121 __ cmp(scratch1, Operand(Smi::FromInt(1))); 3808 __ B(eq, &one_char_separator);
4122 __ b(eq, &one_char_separator); 3809 __ B(gt, &long_separator);
4123 __ b(gt, &long_separator);
4124 3810
4125 // Empty separator case 3811 // Empty separator case
4126 __ bind(&empty_separator_loop); 3812 __ Bind(&empty_separator_loop);
4127 // Live values in registers: 3813 // Live values in registers:
4128 // result_pos: the position to which we are currently copying characters. 3814 // result_pos: the position to which we are currently copying characters.
4129 // element: Current array element. 3815 // element: Current array element.
4130 // elements_end: Array end. 3816 // elements_end: Array end.
4131 3817
4132 // Copy next array element to the result. 3818 // Copy next array element to the result.
4133 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 3819 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4134 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 3820 __ Ldrsw(string_length,
4135 __ SmiUntag(string_length); 3821 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4136 __ add(string, 3822 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4137 string, 3823 __ CopyBytes(result_pos, string, string_length, scratch1);
4138 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3824 __ Cmp(element, elements_end);
4139 __ CopyBytes(string, result_pos, string_length, scratch1); 3825 __ B(lt, &empty_separator_loop); // End while (element < elements_end).
4140 __ cmp(element, elements_end); 3826 __ B(&done);
4141 __ b(lt, &empty_separator_loop); // End while (element < elements_end).
4142 ASSERT(result.is(r0));
4143 __ b(&done);
4144 3827
4145 // One-character separator case 3828 // One-character separator case
4146 __ bind(&one_char_separator); 3829 __ Bind(&one_char_separator);
4147 // Replace separator with its ASCII character value. 3830 // Replace separator with its ASCII character value.
4148 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize)); 3831 __ Ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4149 // Jump into the loop after the code that copies the separator, so the first 3832 // Jump into the loop after the code that copies the separator, so the first
4150 // element is not preceded by a separator 3833 // element is not preceded by a separator
4151 __ jmp(&one_char_separator_loop_entry); 3834 __ B(&one_char_separator_loop_entry);
4152 3835
4153 __ bind(&one_char_separator_loop); 3836 __ Bind(&one_char_separator_loop);
4154 // Live values in registers: 3837 // Live values in registers:
4155 // result_pos: the position to which we are currently copying characters. 3838 // result_pos: the position to which we are currently copying characters.
4156 // element: Current array element. 3839 // element: Current array element.
4157 // elements_end: Array end. 3840 // elements_end: Array end.
4158 // separator: Single separator ASCII char (in lower byte). 3841 // separator: Single separator ASCII char (in lower byte).
4159 3842
4160 // Copy the separator character to the result. 3843 // Copy the separator character to the result.
4161 __ strb(separator, MemOperand(result_pos, 1, PostIndex)); 3844 __ Strb(separator, MemOperand(result_pos, 1, PostIndex));
4162 3845
4163 // Copy next array element to the result. 3846 // Copy next array element to the result.
4164 __ bind(&one_char_separator_loop_entry); 3847 __ Bind(&one_char_separator_loop_entry);
4165 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 3848 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4166 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 3849 __ Ldrsw(string_length,
4167 __ SmiUntag(string_length); 3850 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4168 __ add(string, 3851 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4169 string, 3852 __ CopyBytes(result_pos, string, string_length, scratch1);
4170 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3853 __ Cmp(element, elements_end);
4171 __ CopyBytes(string, result_pos, string_length, scratch1); 3854 __ B(lt, &one_char_separator_loop); // End while (element < elements_end).
4172 __ cmp(element, elements_end); 3855 __ B(&done);
4173 __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
4174 ASSERT(result.is(r0));
4175 __ b(&done);
4176 3856
4177 // Long separator case (separator is more than one character). Entry is at the 3857 // Long separator case (separator is more than one character). Entry is at the
4178 // label long_separator below. 3858 // label long_separator below.
4179 __ bind(&long_separator_loop); 3859 __ Bind(&long_separator_loop);
4180 // Live values in registers: 3860 // Live values in registers:
4181 // result_pos: the position to which we are currently copying characters. 3861 // result_pos: the position to which we are currently copying characters.
4182 // element: Current array element. 3862 // element: Current array element.
4183 // elements_end: Array end. 3863 // elements_end: Array end.
4184 // separator: Separator string. 3864 // separator: Separator string.
4185 3865
4186 // Copy the separator to the result. 3866 // Copy the separator to the result.
4187 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset)); 3867 // TODO(all): hoist next two instructions.
4188 __ SmiUntag(string_length); 3868 __ Ldrsw(string_length,
4189 __ add(string, 3869 UntagSmiFieldMemOperand(separator, String::kLengthOffset));
4190 separator, 3870 __ Add(string, separator, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4191 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3871 __ CopyBytes(result_pos, string, string_length, scratch1);
4192 __ CopyBytes(string, result_pos, string_length, scratch1);
4193 3872
4194 __ bind(&long_separator); 3873 __ Bind(&long_separator);
4195 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 3874 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4196 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 3875 __ Ldrsw(string_length,
4197 __ SmiUntag(string_length); 3876 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4198 __ add(string, 3877 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4199 string, 3878 __ CopyBytes(result_pos, string, string_length, scratch1);
4200 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3879 __ Cmp(element, elements_end);
4201 __ CopyBytes(string, result_pos, string_length, scratch1); 3880 __ B(lt, &long_separator_loop); // End while (element < elements_end).
4202 __ cmp(element, elements_end); 3881 __ B(&done);
4203 __ b(lt, &long_separator_loop); // End while (element < elements_end).
4204 ASSERT(result.is(r0));
4205 __ b(&done);
4206 3882
4207 __ bind(&bailout); 3883 __ Bind(&bailout);
4208 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 3884 // Returning undefined will force slower code to handle it.
4209 __ bind(&done); 3885 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4210 context()->Plug(r0); 3886 __ Bind(&done);
3887 context()->Plug(result);
4211 } 3888 }
4212 3889
4213 3890
4214 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 3891 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4215 Handle<String> name = expr->name(); 3892 Handle<String> name = expr->name();
4216 if (name->length() > 0 && name->Get(0) == '_') { 3893 if (name->length() > 0 && name->Get(0) == '_') {
4217 Comment cmnt(masm_, "[ InlineRuntimeCall"); 3894 Comment cmnt(masm_, "[ InlineRuntimeCall");
4218 EmitInlineRuntimeCall(expr); 3895 EmitInlineRuntimeCall(expr);
4219 return; 3896 return;
4220 } 3897 }
4221 3898
4222 Comment cmnt(masm_, "[ CallRuntime"); 3899 Comment cmnt(masm_, "[ CallRunTime");
4223 ZoneList<Expression*>* args = expr->arguments(); 3900 ZoneList<Expression*>* args = expr->arguments();
4224 3901
4225 if (expr->is_jsruntime()) { 3902 if (expr->is_jsruntime()) {
4226 // Prepare for calling JS runtime function. 3903 // Prepare for calling JS runtime function.
4227 __ ldr(r0, GlobalObjectOperand()); 3904 __ Ldr(x10, GlobalObjectMemOperand());
4228 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kBuiltinsOffset)); 3905 __ Ldr(x11, FieldMemOperand(x10, GlobalObject::kBuiltinsOffset));
4229 __ push(r0); 3906 __ Push(x11);
4230 } 3907 }
4231 3908
4232 // Push the arguments ("left-to-right").
4233 int arg_count = args->length(); 3909 int arg_count = args->length();
4234 for (int i = 0; i < arg_count; i++) { 3910 for (int i = 0; i < arg_count; i++) {
4235 VisitForStackValue(args->at(i)); 3911 VisitForStackValue(args->at(i));
4236 } 3912 }
4237 3913
4238 if (expr->is_jsruntime()) { 3914 if (expr->is_jsruntime()) {
4239 // Call the JS runtime function. 3915 // Call the JS runtime function.
4240 __ mov(r2, Operand(expr->name())); 3916 __ Mov(x2, Operand(expr->name()));
4241 RelocInfo::Mode mode = RelocInfo::CODE_TARGET; 3917 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
4242 Handle<Code> ic = 3918 Handle<Code> ic =
4243 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode); 3919 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
4244 CallIC(ic, mode, expr->CallRuntimeFeedbackId()); 3920 CallIC(ic, mode, expr->CallRuntimeFeedbackId());
4245 // Restore context register. 3921 // Restore context register.
4246 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3922 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4247 } else { 3923 } else {
4248 // Call the C runtime function. 3924 // Call the C runtime function.
4249 __ CallRuntime(expr->function(), arg_count); 3925 __ CallRuntime(expr->function(), arg_count);
4250 } 3926 }
4251 context()->Plug(r0); 3927
3928 context()->Plug(x0);
4252 } 3929 }
4253 3930
4254 3931
4255 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 3932 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4256 switch (expr->op()) { 3933 switch (expr->op()) {
4257 case Token::DELETE: { 3934 case Token::DELETE: {
4258 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 3935 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4259 Property* property = expr->expression()->AsProperty(); 3936 Property* property = expr->expression()->AsProperty();
4260 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 3937 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4261 3938
4262 if (property != NULL) { 3939 if (property != NULL) {
4263 VisitForStackValue(property->obj()); 3940 VisitForStackValue(property->obj());
4264 VisitForStackValue(property->key()); 3941 VisitForStackValue(property->key());
4265 StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE) 3942 StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
4266 ? kNonStrictMode : kStrictMode; 3943 ? kNonStrictMode : kStrictMode;
4267 __ mov(r1, Operand(Smi::FromInt(strict_mode_flag))); 3944 __ Mov(x10, Operand(Smi::FromInt(strict_mode_flag)));
4268 __ push(r1); 3945 __ Push(x10);
4269 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 3946 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4270 context()->Plug(r0); 3947 context()->Plug(x0);
4271 } else if (proxy != NULL) { 3948 } else if (proxy != NULL) {
4272 Variable* var = proxy->var(); 3949 Variable* var = proxy->var();
4273 // Delete of an unqualified identifier is disallowed in strict mode 3950 // Delete of an unqualified identifier is disallowed in strict mode
4274 // but "delete this" is allowed. 3951 // but "delete this" is allowed.
4275 ASSERT(language_mode() == CLASSIC_MODE || var->is_this()); 3952 ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
4276 if (var->IsUnallocated()) { 3953 if (var->IsUnallocated()) {
4277 __ ldr(r2, GlobalObjectOperand()); 3954 __ Ldr(x12, GlobalObjectMemOperand());
4278 __ mov(r1, Operand(var->name())); 3955 __ Mov(x11, Operand(var->name()));
4279 __ mov(r0, Operand(Smi::FromInt(kNonStrictMode))); 3956 __ Mov(x10, Operand(Smi::FromInt(kNonStrictMode)));
4280 __ Push(r2, r1, r0); 3957 __ Push(x12, x11, x10);
4281 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 3958 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4282 context()->Plug(r0); 3959 context()->Plug(x0);
4283 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 3960 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4284 // Result of deleting non-global, non-dynamic variables is false. 3961 // Result of deleting non-global, non-dynamic variables is false.
4285 // The subexpression does not have side effects. 3962 // The subexpression does not have side effects.
4286 context()->Plug(var->is_this()); 3963 context()->Plug(var->is_this());
4287 } else { 3964 } else {
4288 // Non-global variable. Call the runtime to try to delete from the 3965 // Non-global variable. Call the runtime to try to delete from the
4289 // context where the variable was introduced. 3966 // context where the variable was introduced.
4290 __ push(context_register()); 3967 __ Mov(x2, Operand(var->name()));
4291 __ mov(r2, Operand(var->name())); 3968 __ Push(context_register(), x2);
4292 __ push(r2);
4293 __ CallRuntime(Runtime::kDeleteContextSlot, 2); 3969 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
4294 context()->Plug(r0); 3970 context()->Plug(x0);
4295 } 3971 }
4296 } else { 3972 } else {
4297 // Result of deleting non-property, non-variable reference is true. 3973 // Result of deleting non-property, non-variable reference is true.
4298 // The subexpression may have side effects. 3974 // The subexpression may have side effects.
4299 VisitForEffect(expr->expression()); 3975 VisitForEffect(expr->expression());
4300 context()->Plug(true); 3976 context()->Plug(true);
4301 } 3977 }
4302 break; 3978 break;
3979 break;
4303 } 3980 }
4304
4305 case Token::VOID: { 3981 case Token::VOID: {
4306 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); 3982 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4307 VisitForEffect(expr->expression()); 3983 VisitForEffect(expr->expression());
4308 context()->Plug(Heap::kUndefinedValueRootIndex); 3984 context()->Plug(Heap::kUndefinedValueRootIndex);
4309 break; 3985 break;
4310 } 3986 }
4311
4312 case Token::NOT: { 3987 case Token::NOT: {
4313 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); 3988 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4314 if (context()->IsEffect()) { 3989 if (context()->IsEffect()) {
4315 // Unary NOT has no side effects so it's only necessary to visit the 3990 // Unary NOT has no side effects so it's only necessary to visit the
4316 // subexpression. Match the optimizing compiler by not branching. 3991 // subexpression. Match the optimizing compiler by not branching.
4317 VisitForEffect(expr->expression()); 3992 VisitForEffect(expr->expression());
4318 } else if (context()->IsTest()) { 3993 } else if (context()->IsTest()) {
4319 const TestContext* test = TestContext::cast(context()); 3994 const TestContext* test = TestContext::cast(context());
4320 // The labels are swapped for the recursive call. 3995 // The labels are swapped for the recursive call.
4321 VisitForControl(expr->expression(), 3996 VisitForControl(expr->expression(),
4322 test->false_label(), 3997 test->false_label(),
4323 test->true_label(), 3998 test->true_label(),
4324 test->fall_through()); 3999 test->fall_through());
4325 context()->Plug(test->true_label(), test->false_label()); 4000 context()->Plug(test->true_label(), test->false_label());
4326 } else { 4001 } else {
4327 // We handle value contexts explicitly rather than simply visiting
4328 // for control and plugging the control flow into the context,
4329 // because we need to prepare a pair of extra administrative AST ids
4330 // for the optimizing compiler.
4331 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue()); 4002 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4003 // TODO(jbramley): This could be much more efficient using (for
4004 // example) the CSEL instruction.
4332 Label materialize_true, materialize_false, done; 4005 Label materialize_true, materialize_false, done;
4333 VisitForControl(expr->expression(), 4006 VisitForControl(expr->expression(),
4334 &materialize_false, 4007 &materialize_false,
4335 &materialize_true, 4008 &materialize_true,
4336 &materialize_true); 4009 &materialize_true);
4337 __ bind(&materialize_true); 4010
4011 __ Bind(&materialize_true);
4338 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); 4012 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4339 __ LoadRoot(r0, Heap::kTrueValueRootIndex); 4013 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
4340 if (context()->IsStackValue()) __ push(r0); 4014 __ B(&done);
4341 __ jmp(&done); 4015
4342 __ bind(&materialize_false); 4016 __ Bind(&materialize_false);
4343 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); 4017 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4344 __ LoadRoot(r0, Heap::kFalseValueRootIndex); 4018 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
4345 if (context()->IsStackValue()) __ push(r0); 4019 __ B(&done);
4346 __ bind(&done); 4020
4021 __ Bind(&done);
4022 if (context()->IsStackValue()) {
4023 __ Push(result_register());
4024 }
4347 } 4025 }
4348 break; 4026 break;
4349 } 4027 }
4350
4351 case Token::TYPEOF: { 4028 case Token::TYPEOF: {
4352 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 4029 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4353 { StackValueContext context(this); 4030 {
4031 StackValueContext context(this);
4354 VisitForTypeofValue(expr->expression()); 4032 VisitForTypeofValue(expr->expression());
4355 } 4033 }
4356 __ CallRuntime(Runtime::kTypeof, 1); 4034 __ CallRuntime(Runtime::kTypeof, 1);
4357 context()->Plug(r0); 4035 context()->Plug(x0);
4358 break; 4036 break;
4359 } 4037 }
4360 4038 case Token::SUB: {
4361 case Token::SUB:
4362 EmitUnaryOperation(expr, "[ UnaryOperation (SUB)"); 4039 EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
4363 break; 4040 break;
4364 4041 }
4365 case Token::BIT_NOT: 4042 case Token::BIT_NOT: {
4366 EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)"); 4043 EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
4367 break; 4044 break;
4368 4045 }
4369 default: 4046 default:
4370 UNREACHABLE(); 4047 UNREACHABLE();
4371 } 4048 }
4372 } 4049 }
4373 4050
4374 4051
4375 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr, 4052 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
4376 const char* comment) { 4053 const char* comment) {
4377 // TODO(svenpanne): Allowing format strings in Comment would be nice here...
4378 Comment cmt(masm_, comment); 4054 Comment cmt(masm_, comment);
4379 bool can_overwrite = expr->expression()->ResultOverwriteAllowed(); 4055 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
4380 UnaryOverwriteMode overwrite = 4056 UnaryOverwriteMode overwrite =
4381 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE; 4057 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
4382 UnaryOpStub stub(expr->op(), overwrite); 4058 UnaryOpStub stub(expr->op(), overwrite);
4383 // UnaryOpStub expects the argument to be in the 4059 // UnaryOpStub expects the argument to be in the accumulator register x0.
4384 // accumulator register r0.
4385 VisitForAccumulatorValue(expr->expression()); 4060 VisitForAccumulatorValue(expr->expression());
4386 SetSourcePosition(expr->position()); 4061 SetSourcePosition(expr->position());
4387 CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 4062 CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
4388 expr->UnaryOperationFeedbackId()); 4063 expr->UnaryOperationFeedbackId());
4389 context()->Plug(r0); 4064 context()->Plug(x0);
4390 } 4065 }
4391 4066
4392 4067
4393 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 4068 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4394 Comment cmnt(masm_, "[ CountOperation"); 4069 Comment cmnt(masm_, "[ CountOperation");
4395 SetSourcePosition(expr->position()); 4070 SetSourcePosition(expr->position());
4396 4071
4397 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError' 4072 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
4398 // as the left-hand side. 4073 // as the left-hand side.
4399 if (!expr->expression()->IsValidLeftHandSide()) { 4074 if (!expr->expression()->IsValidLeftHandSide()) {
(...skipping 14 matching lines...) Expand all
4414 } 4089 }
4415 4090
4416 // Evaluate expression and get value. 4091 // Evaluate expression and get value.
4417 if (assign_type == VARIABLE) { 4092 if (assign_type == VARIABLE) {
4418 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL); 4093 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4419 AccumulatorValueContext context(this); 4094 AccumulatorValueContext context(this);
4420 EmitVariableLoad(expr->expression()->AsVariableProxy()); 4095 EmitVariableLoad(expr->expression()->AsVariableProxy());
4421 } else { 4096 } else {
4422 // Reserve space for result of postfix operation. 4097 // Reserve space for result of postfix operation.
4423 if (expr->is_postfix() && !context()->IsEffect()) { 4098 if (expr->is_postfix() && !context()->IsEffect()) {
4424 __ mov(ip, Operand(Smi::FromInt(0))); 4099 __ Push(xzr);
4425 __ push(ip);
4426 } 4100 }
4427 if (assign_type == NAMED_PROPERTY) { 4101 if (assign_type == NAMED_PROPERTY) {
4428 // Put the object both on the stack and in the accumulator. 4102 // Put the object both on the stack and in the accumulator.
4429 VisitForAccumulatorValue(prop->obj()); 4103 VisitForAccumulatorValue(prop->obj());
4430 __ push(r0); 4104 __ Push(x0);
4431 EmitNamedPropertyLoad(prop); 4105 EmitNamedPropertyLoad(prop);
4432 } else { 4106 } else {
4107 // KEYED_PROPERTY
4433 VisitForStackValue(prop->obj()); 4108 VisitForStackValue(prop->obj());
4434 VisitForAccumulatorValue(prop->key()); 4109 VisitForAccumulatorValue(prop->key());
4435 __ ldr(r1, MemOperand(sp, 0)); 4110 __ Peek(x1, 0);
4436 __ push(r0); 4111 __ Push(x0);
4437 EmitKeyedPropertyLoad(prop); 4112 EmitKeyedPropertyLoad(prop);
4438 } 4113 }
4439 } 4114 }
4440 4115
4441 // We need a second deoptimization point after loading the value 4116 // We need a second deoptimization point after loading the value
4442 // in case evaluating the property load my have a side effect. 4117 // in case evaluating the property load my have a side effect.
4443 if (assign_type == VARIABLE) { 4118 if (assign_type == VARIABLE) {
4444 PrepareForBailout(expr->expression(), TOS_REG); 4119 PrepareForBailout(expr->expression(), TOS_REG);
4445 } else { 4120 } else {
4446 PrepareForBailoutForId(prop->LoadId(), TOS_REG); 4121 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4447 } 4122 }
4448 4123
4449 // Call ToNumber only if operand is not a smi. 4124 // Call ToNumber only if operand is not a smi.
4450 Label no_conversion; 4125 Label no_conversion;
4451 __ JumpIfSmi(r0, &no_conversion); 4126 __ JumpIfSmi(x0, &no_conversion);
4452 ToNumberStub convert_stub; 4127 ToNumberStub convert_stub;
4453 __ CallStub(&convert_stub); 4128 __ CallStub(&convert_stub);
4454 __ bind(&no_conversion); 4129 __ Bind(&no_conversion);
4455 4130
4456 // Save result for postfix expressions. 4131 // Save result for postfix expressions.
4457 if (expr->is_postfix()) { 4132 if (expr->is_postfix()) {
4458 if (!context()->IsEffect()) { 4133 if (!context()->IsEffect()) {
4459 // Save the result on the stack. If we have a named or keyed property 4134 // Save the result on the stack. If we have a named or keyed property
4460 // we store the result under the receiver that is currently on top 4135 // we store the result under the receiver that is currently on top
4461 // of the stack. 4136 // of the stack.
4462 switch (assign_type) { 4137 switch (assign_type) {
4463 case VARIABLE: 4138 case VARIABLE:
4464 __ push(r0); 4139 __ Push(x0);
4465 break; 4140 break;
4466 case NAMED_PROPERTY: 4141 case NAMED_PROPERTY:
4467 __ str(r0, MemOperand(sp, kPointerSize)); 4142 __ Poke(x0, kXRegSizeInBytes);
4468 break; 4143 break;
4469 case KEYED_PROPERTY: 4144 case KEYED_PROPERTY:
4470 __ str(r0, MemOperand(sp, 2 * kPointerSize)); 4145 __ Poke(x0, 2 * kXRegSizeInBytes);
4471 break; 4146 break;
4472 } 4147 }
4473 } 4148 }
4474 } 4149 }
4475 4150
4476 4151
4477 // Inline smi case if we are in a loop. 4152 // Inline smi case if we are in a loop.
4478 Label stub_call, done; 4153 Label done;
4479 JumpPatchSite patch_site(masm_); 4154 JumpPatchSite patch_site(masm_);
4480 4155
4481 int count_value = expr->op() == Token::INC ? 1 : -1; 4156 int count_value = expr->op() == Token::INC ? 1 : -1;
4482 if (ShouldInlineSmiCase(expr->op())) { 4157 if (ShouldInlineSmiCase(expr->op())) {
4483 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC); 4158 Label stub_call;
4484 __ b(vs, &stub_call); 4159 // Try the add using SMI operations.
4160 __ Adds(x0, x0, Operand(Smi::FromInt(count_value)));
4161 __ B(vs, &stub_call);
4485 // We could eliminate this smi check if we split the code at 4162 // We could eliminate this smi check if we split the code at
4486 // the first smi check before calling ToNumber. 4163 // the first smi check before calling ToNumber.
4487 patch_site.EmitJumpIfSmi(r0, &done); 4164 patch_site.EmitJumpIfSmi(x0, &done);
4488 4165
4489 __ bind(&stub_call); 4166 // Reverse the speculative add, then fall back to a stub call.
4490 // Call stub. Undo operation first. 4167 __ Bind(&stub_call);
4491 __ sub(r0, r0, Operand(Smi::FromInt(count_value))); 4168 __ Sub(x0, x0, Operand(Smi::FromInt(count_value)));
4492 } 4169 }
4493 __ mov(r1, r0); 4170 __ Mov(x1, x0);
4494 __ mov(r0, Operand(Smi::FromInt(count_value))); 4171 __ Mov(x0, Operand(Smi::FromInt(count_value)));
4495 4172
4496 // Record position before stub call. 4173 // Record position before stub call.
4497 SetSourcePosition(expr->position()); 4174 SetSourcePosition(expr->position());
4498 4175
4499 BinaryOpStub stub(Token::ADD, NO_OVERWRITE); 4176 {
4500 CallIC(stub.GetCode(isolate()), 4177 Assembler::BlockConstPoolScope scope(masm_);
4501 RelocInfo::CODE_TARGET, 4178 BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
4502 expr->CountBinOpFeedbackId()); 4179 CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
4503 patch_site.EmitPatchInfo(); 4180 expr->CountBinOpFeedbackId());
4504 __ bind(&done); 4181 patch_site.EmitPatchInfo();
4182 }
4183 __ Bind(&done);
4505 4184
4506 // Store the value returned in r0. 4185 // Store the value returned in x0.
4507 switch (assign_type) { 4186 switch (assign_type) {
4508 case VARIABLE: 4187 case VARIABLE:
4509 if (expr->is_postfix()) { 4188 if (expr->is_postfix()) {
4510 { EffectContext context(this); 4189 { EffectContext context(this);
4511 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4190 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4512 Token::ASSIGN); 4191 Token::ASSIGN);
4513 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4192 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4514 context.Plug(r0); 4193 context.Plug(x0);
4515 } 4194 }
4516 // For all contexts except EffectConstant We have the result on 4195 // For all contexts except EffectConstant We have the result on
4517 // top of the stack. 4196 // top of the stack.
4518 if (!context()->IsEffect()) { 4197 if (!context()->IsEffect()) {
4519 context()->PlugTOS(); 4198 context()->PlugTOS();
4520 } 4199 }
4521 } else { 4200 } else {
4522 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4201 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4523 Token::ASSIGN); 4202 Token::ASSIGN);
4524 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4203 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4525 context()->Plug(r0); 4204 context()->Plug(x0);
4526 } 4205 }
4527 break; 4206 break;
4528 case NAMED_PROPERTY: { 4207 case NAMED_PROPERTY: {
4529 __ mov(r2, Operand(prop->key()->AsLiteral()->handle())); 4208 __ Mov(x2, Operand(prop->key()->AsLiteral()->handle()));
4530 __ pop(r1); 4209 __ Pop(x1);
4531 Handle<Code> ic = is_classic_mode() 4210 Handle<Code> ic = is_classic_mode()
4532 ? isolate()->builtins()->StoreIC_Initialize() 4211 ? isolate()->builtins()->StoreIC_Initialize()
4533 : isolate()->builtins()->StoreIC_Initialize_Strict(); 4212 : isolate()->builtins()->StoreIC_Initialize_Strict();
4534 CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId()); 4213 CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4535 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4214 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4536 if (expr->is_postfix()) { 4215 if (expr->is_postfix()) {
4537 if (!context()->IsEffect()) { 4216 if (!context()->IsEffect()) {
4538 context()->PlugTOS(); 4217 context()->PlugTOS();
4539 } 4218 }
4540 } else { 4219 } else {
4541 context()->Plug(r0); 4220 context()->Plug(x0);
4542 } 4221 }
4543 break; 4222 break;
4544 } 4223 }
4545 case KEYED_PROPERTY: { 4224 case KEYED_PROPERTY: {
4546 __ pop(r1); // Key. 4225 __ Pop(x1); // Key.
4547 __ pop(r2); // Receiver. 4226 __ Pop(x2); // Receiver.
4548 Handle<Code> ic = is_classic_mode() 4227 Handle<Code> ic = is_classic_mode()
4549 ? isolate()->builtins()->KeyedStoreIC_Initialize() 4228 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4550 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 4229 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4551 CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId()); 4230 CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4552 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4231 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4553 if (expr->is_postfix()) { 4232 if (expr->is_postfix()) {
4554 if (!context()->IsEffect()) { 4233 if (!context()->IsEffect()) {
4555 context()->PlugTOS(); 4234 context()->PlugTOS();
4556 } 4235 }
4557 } else { 4236 } else {
4558 context()->Plug(r0); 4237 context()->Plug(x0);
4559 } 4238 }
4560 break; 4239 break;
4561 } 4240 }
4562 } 4241 }
4563 } 4242 }
4564 4243
4565 4244
4566 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 4245 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4567 ASSERT(!context()->IsEffect()); 4246 ASSERT(!context()->IsEffect());
4568 ASSERT(!context()->IsTest()); 4247 ASSERT(!context()->IsTest());
4569 VariableProxy* proxy = expr->AsVariableProxy(); 4248 VariableProxy* proxy = expr->AsVariableProxy();
4570 if (proxy != NULL && proxy->var()->IsUnallocated()) { 4249 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4571 Comment cmnt(masm_, "Global variable"); 4250 Comment cmnt(masm_, "Global variable");
4572 __ ldr(r0, GlobalObjectOperand()); 4251 __ Ldr(x0, GlobalObjectMemOperand());
4573 __ mov(r2, Operand(proxy->name())); 4252 __ Mov(x2, Operand(proxy->name()));
4574 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 4253 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4575 // Use a regular load, not a contextual load, to avoid a reference 4254 // Use a regular load, not a contextual load, to avoid a reference
4576 // error. 4255 // error.
4577 CallIC(ic); 4256 CallIC(ic);
4578 PrepareForBailout(expr, TOS_REG); 4257 PrepareForBailout(expr, TOS_REG);
4579 context()->Plug(r0); 4258 context()->Plug(x0);
4580 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { 4259 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4581 Label done, slow; 4260 Label done, slow;
4582 4261
4583 // Generate code for loading from variables potentially shadowed 4262 // Generate code for loading from variables potentially shadowed
4584 // by eval-introduced variables. 4263 // by eval-introduced variables.
4585 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done); 4264 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4586 4265
4587 __ bind(&slow); 4266 __ Bind(&slow);
4588 __ mov(r0, Operand(proxy->name())); 4267 __ Mov(x0, Operand(proxy->name()));
4589 __ Push(cp, r0); 4268 __ Push(cp, x0);
4590 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); 4269 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4591 PrepareForBailout(expr, TOS_REG); 4270 PrepareForBailout(expr, TOS_REG);
4592 __ bind(&done); 4271 __ Bind(&done);
4593 4272
4594 context()->Plug(r0); 4273 context()->Plug(x0);
4595 } else { 4274 } else {
4596 // This expression cannot throw a reference error at the top level. 4275 // This expression cannot throw a reference error at the top level.
4597 VisitInDuplicateContext(expr); 4276 VisitInDuplicateContext(expr);
4598 } 4277 }
4599 } 4278 }
4600 4279
4601 4280
4602 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, 4281 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4603 Expression* sub_expr, 4282 Expression* sub_expr,
4604 Handle<String> check) { 4283 Handle<String> check) {
4284 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
4285 Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
4605 Label materialize_true, materialize_false; 4286 Label materialize_true, materialize_false;
4606 Label* if_true = NULL; 4287 Label* if_true = NULL;
4607 Label* if_false = NULL; 4288 Label* if_false = NULL;
4608 Label* fall_through = NULL; 4289 Label* fall_through = NULL;
4609 context()->PrepareTest(&materialize_true, &materialize_false, 4290 context()->PrepareTest(&materialize_true, &materialize_false,
4610 &if_true, &if_false, &fall_through); 4291 &if_true, &if_false, &fall_through);
4611 4292
4612 { AccumulatorValueContext context(this); 4293 { AccumulatorValueContext context(this);
4613 VisitForTypeofValue(sub_expr); 4294 VisitForTypeofValue(sub_expr);
4614 } 4295 }
4615 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4296 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4616 4297
4617 if (check->Equals(isolate()->heap()->number_string())) { 4298 if (check->Equals(isolate()->heap()->number_string())) {
4618 __ JumpIfSmi(r0, if_true); 4299 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
4619 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 4300 __ JumpIfSmi(x0, if_true);
4620 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); 4301 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4621 __ cmp(r0, ip); 4302 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
4622 Split(eq, if_true, if_false, fall_through); 4303 Split(eq, if_true, if_false, fall_through);
4623 } else if (check->Equals(isolate()->heap()->string_string())) { 4304 } else if (check->Equals(isolate()->heap()->string_string())) {
4624 __ JumpIfSmi(r0, if_false); 4305 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
4306 __ JumpIfSmi(x0, if_false);
4625 // Check for undetectable objects => false. 4307 // Check for undetectable objects => false.
4626 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE); 4308 __ JumpIfObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE, if_false, ge);
4627 __ b(ge, if_false); 4309 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4628 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4310 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_true, if_false,
4629 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4311 fall_through);
4630 Split(eq, if_true, if_false, fall_through);
4631 } else if (check->Equals(isolate()->heap()->symbol_string())) { 4312 } else if (check->Equals(isolate()->heap()->symbol_string())) {
4632 __ JumpIfSmi(r0, if_false); 4313 __ JumpIfSmi(x0, if_false);
4633 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE); 4314 __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
4634 Split(eq, if_true, if_false, fall_through); 4315 Split(eq, if_true, if_false, fall_through);
4635 } else if (check->Equals(isolate()->heap()->boolean_string())) { 4316 } else if (check->Equals(isolate()->heap()->boolean_string())) {
4636 __ CompareRoot(r0, Heap::kTrueValueRootIndex); 4317 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
4637 __ b(eq, if_true); 4318 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
4638 __ CompareRoot(r0, Heap::kFalseValueRootIndex); 4319 __ CompareRoot(x0, Heap::kFalseValueRootIndex);
4639 Split(eq, if_true, if_false, fall_through); 4320 Split(eq, if_true, if_false, fall_through);
4640 } else if (FLAG_harmony_typeof && 4321 } else if (FLAG_harmony_typeof &&
4641 check->Equals(isolate()->heap()->null_string())) { 4322 check->Equals(isolate()->heap()->null_string())) {
4642 __ CompareRoot(r0, Heap::kNullValueRootIndex); 4323 __ CompareRoot(x0, Heap::kNullValueRootIndex);
4643 Split(eq, if_true, if_false, fall_through); 4324 Split(eq, if_true, if_false, fall_through);
4644 } else if (check->Equals(isolate()->heap()->undefined_string())) { 4325 } else if (check->Equals(isolate()->heap()->undefined_string())) {
4645 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex); 4326 ASM_LOCATION(
4646 __ b(eq, if_true); 4327 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
4647 __ JumpIfSmi(r0, if_false); 4328 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
4329 __ JumpIfSmi(x0, if_false);
4648 // Check for undetectable objects => true. 4330 // Check for undetectable objects => true.
4649 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 4331 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4650 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4332 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4651 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4333 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
4652 Split(ne, if_true, if_false, fall_through); 4334 fall_through);
4335 } else if (check->Equals(isolate()->heap()->function_string())) {
4336 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
4337 __ JumpIfSmi(x0, if_false);
4338 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4339 __ JumpIfObjectType(x0, x10, x11, JS_FUNCTION_TYPE, if_true);
4340 __ CompareAndSplit(x11, JS_FUNCTION_PROXY_TYPE, eq, if_true, if_false,
4341 fall_through);
4653 4342
4654 } else if (check->Equals(isolate()->heap()->function_string())) {
4655 __ JumpIfSmi(r0, if_false);
4656 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4657 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
4658 __ b(eq, if_true);
4659 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
4660 Split(eq, if_true, if_false, fall_through);
4661 } else if (check->Equals(isolate()->heap()->object_string())) { 4343 } else if (check->Equals(isolate()->heap()->object_string())) {
4662 __ JumpIfSmi(r0, if_false); 4344 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
4345 __ JumpIfSmi(x0, if_false);
4663 if (!FLAG_harmony_typeof) { 4346 if (!FLAG_harmony_typeof) {
4664 __ CompareRoot(r0, Heap::kNullValueRootIndex); 4347 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
4665 __ b(eq, if_true);
4666 } 4348 }
4667 // Check for JS objects => true. 4349 // Check for JS objects => true.
4668 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); 4350 Register map = x10;
4669 __ b(lt, if_false); 4351 __ JumpIfObjectType(x0, map, x11, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE,
4670 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); 4352 if_false, lt);
4671 __ b(gt, if_false); 4353 __ CompareInstanceType(map, x11, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4354 __ B(gt, if_false);
4672 // Check for undetectable objects => false. 4355 // Check for undetectable objects => false.
4673 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4356 __ Ldrb(x10, FieldMemOperand(map, Map::kBitFieldOffset));
4674 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4357
4675 Split(eq, if_true, if_false, fall_through); 4358 __ TestAndSplit(x10, 1 << Map::kIsUndetectable, if_true, if_false,
4359 fall_through);
4360
4676 } else { 4361 } else {
4677 if (if_false != fall_through) __ jmp(if_false); 4362 if (if_false != fall_through) __ B(if_false);
4678 } 4363 }
4679 context()->Plug(if_true, if_false); 4364 context()->Plug(if_true, if_false);
4680 } 4365 }
4681 4366
4682 4367
4683 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 4368 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4684 Comment cmnt(masm_, "[ CompareOperation"); 4369 Comment cmnt(masm_, "[ CompareOperation");
4685 SetSourcePosition(expr->position()); 4370 SetSourcePosition(expr->position());
4686 4371
4687 // First we try a fast inlined version of the compare when one of 4372 // Try to generate an optimized comparison with a literal value.
4688 // the operands is a literal. 4373 // TODO(jbramley): This only checks common values like NaN or undefined.
4689 if (TryLiteralCompare(expr)) return; 4374 // Should it also handle A64 immediate operands?
4375 if (TryLiteralCompare(expr)) {
4376 return;
4377 }
4690 4378
4691 // Always perform the comparison for its control flow. Pack the result 4379 // Assign labels according to context()->PrepareTest.
4692 // into the expression's context after the comparison is performed. 4380 Label materialize_true;
4693 Label materialize_true, materialize_false; 4381 Label materialize_false;
4694 Label* if_true = NULL; 4382 Label* if_true = NULL;
4695 Label* if_false = NULL; 4383 Label* if_false = NULL;
4696 Label* fall_through = NULL; 4384 Label* fall_through = NULL;
4697 context()->PrepareTest(&materialize_true, &materialize_false, 4385 context()->PrepareTest(&materialize_true, &materialize_false,
4698 &if_true, &if_false, &fall_through); 4386 &if_true, &if_false, &fall_through);
4699 4387
4700 Token::Value op = expr->op(); 4388 Token::Value op = expr->op();
4701 VisitForStackValue(expr->left()); 4389 VisitForStackValue(expr->left());
4702 switch (op) { 4390 switch (op) {
4703 case Token::IN: 4391 case Token::IN:
4704 VisitForStackValue(expr->right()); 4392 VisitForStackValue(expr->right());
4705 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); 4393 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4706 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); 4394 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4707 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 4395 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4708 __ cmp(r0, ip);
4709 Split(eq, if_true, if_false, fall_through); 4396 Split(eq, if_true, if_false, fall_through);
4710 break; 4397 break;
4711 4398
4712 case Token::INSTANCEOF: { 4399 case Token::INSTANCEOF: {
4713 VisitForStackValue(expr->right()); 4400 VisitForStackValue(expr->right());
4714 InstanceofStub stub(InstanceofStub::kNoFlags); 4401 InstanceofStub stub(InstanceofStub::kNoFlags);
4715 __ CallStub(&stub); 4402 __ CallStub(&stub);
4716 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4403 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4717 // The stub returns 0 for true. 4404 // The stub returns 0 for true.
4718 __ tst(r0, r0); 4405 __ CompareAndSplit(x0, 0, eq, if_true, if_false, fall_through);
4719 Split(eq, if_true, if_false, fall_through);
4720 break; 4406 break;
4721 } 4407 }
4722 4408
4723 default: { 4409 default: {
4724 VisitForAccumulatorValue(expr->right()); 4410 VisitForAccumulatorValue(expr->right());
4725 Condition cond = CompareIC::ComputeCondition(op); 4411 Condition cond = CompareIC::ComputeCondition(op);
4726 __ pop(r1);
4727 4412
4728 bool inline_smi_code = ShouldInlineSmiCase(op); 4413 // Pop the stack value.
4414 __ Pop(x1);
4415
4729 JumpPatchSite patch_site(masm_); 4416 JumpPatchSite patch_site(masm_);
4730 if (inline_smi_code) { 4417 if (ShouldInlineSmiCase(op)) {
4731 Label slow_case; 4418 Label slow_case;
4732 __ orr(r2, r0, Operand(r1)); 4419 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4733 patch_site.EmitJumpIfNotSmi(r2, &slow_case); 4420 __ Cmp(x1, x0);
4734 __ cmp(r1, r0);
4735 Split(cond, if_true, if_false, NULL); 4421 Split(cond, if_true, if_false, NULL);
4736 __ bind(&slow_case); 4422 __ Bind(&slow_case);
4737 } 4423 }
4738 4424
4739 // Record position and call the compare IC. 4425 // Record position and call the compare IC.
4740 SetSourcePosition(expr->position()); 4426 SetSourcePosition(expr->position());
4741 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); 4427 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4742 CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId()); 4428 CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
4743 patch_site.EmitPatchInfo(); 4429 patch_site.EmitPatchInfo();
4744 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4430 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4745 __ cmp(r0, Operand::Zero()); 4431 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
4746 Split(cond, if_true, if_false, fall_through);
4747 } 4432 }
4748 } 4433 }
4749 4434
4750 // Convert the result of the comparison into one expected for this 4435 // Convert the result of the comparison into one expected for this
4751 // expression's context. 4436 // expression's context.
4752 context()->Plug(if_true, if_false); 4437 context()->Plug(if_true, if_false);
4753 } 4438 }
4754 4439
4755 4440
4756 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, 4441 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4757 Expression* sub_expr, 4442 Expression* sub_expr,
4758 NilValue nil) { 4443 NilValue nil) {
4444 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
4759 Label materialize_true, materialize_false; 4445 Label materialize_true, materialize_false;
4760 Label* if_true = NULL; 4446 Label* if_true = NULL;
4761 Label* if_false = NULL; 4447 Label* if_false = NULL;
4762 Label* fall_through = NULL; 4448 Label* fall_through = NULL;
4763 context()->PrepareTest(&materialize_true, &materialize_false, 4449 context()->PrepareTest(&materialize_true, &materialize_false,
4764 &if_true, &if_false, &fall_through); 4450 &if_true, &if_false, &fall_through);
4765 4451
4766 VisitForAccumulatorValue(sub_expr); 4452 VisitForAccumulatorValue(sub_expr);
4767 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4453 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4454
4768 if (expr->op() == Token::EQ_STRICT) { 4455 if (expr->op() == Token::EQ_STRICT) {
4769 Heap::RootListIndex nil_value = nil == kNullValue ? 4456 Heap::RootListIndex nil_value = nil == kNullValue ?
4770 Heap::kNullValueRootIndex : 4457 Heap::kNullValueRootIndex :
4771 Heap::kUndefinedValueRootIndex; 4458 Heap::kUndefinedValueRootIndex;
4772 __ LoadRoot(r1, nil_value); 4459 __ CompareRoot(x0, nil_value);
4773 __ cmp(r0, r1);
4774 Split(eq, if_true, if_false, fall_through); 4460 Split(eq, if_true, if_false, fall_through);
4775 } else { 4461 } else {
4776 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); 4462 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4777 CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId()); 4463 CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
4778 __ cmp(r0, Operand(0)); 4464 __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through);
4779 Split(ne, if_true, if_false, fall_through);
4780 } 4465 }
4466
4781 context()->Plug(if_true, if_false); 4467 context()->Plug(if_true, if_false);
4782 } 4468 }
4783 4469
4784 4470
4785 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { 4471 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4786 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4472 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4787 context()->Plug(r0); 4473 context()->Plug(x0);
4788 } 4474 }
4789 4475
4790 4476
4477 void FullCodeGenerator::VisitYield(Yield* expr) {
4478 Comment cmnt(masm_, "[ Yield");
4479 // Evaluate yielded value first; the initial iterator definition depends on
4480 // this. It stays on the stack while we update the iterator.
4481 VisitForStackValue(expr->expression());
4482
4483 switch (expr->yield_kind()) {
4484 case Yield::INITIAL:
4485 case Yield::SUSPEND: {
4486 VisitForStackValue(expr->generator_object());
4487 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4488 __ Ldr(context_register(),
4489 MemOperand(fp, StandardFrameConstants::kContextOffset));
4490
4491 Label resume;
4492 __ JumpIfNotRoot(result_register(), Heap::kTheHoleValueRootIndex,
4493 &resume);
4494 if (expr->yield_kind() == Yield::SUSPEND) {
4495 EmitReturnIteratorResult(false);
4496 } else {
4497 __ Pop(result_register());
4498 EmitReturnSequence();
4499 }
4500
4501 __ Bind(&resume);
4502 context()->Plug(result_register());
4503 break;
4504 }
4505
4506 case Yield::FINAL: {
4507 VisitForAccumulatorValue(expr->generator_object());
4508 __ Mov(x1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
4509 __ Str(x1, FieldMemOperand(result_register(),
4510 JSGeneratorObject::kContinuationOffset));
4511 EmitReturnIteratorResult(true);
4512 break;
4513 }
4514
4515 case Yield::DELEGATING: {
4516 VisitForStackValue(expr->generator_object());
4517
4518 // Initial stack layout is as follows:
4519 // [sp + 1 * kPointerSize] iter
4520 // [sp + 0 * kPointerSize] g
4521
4522 // TODO(jbramley): Tidy this up once the merge is done, using named
4523 // registers and suchlike. The implementation changes a little by
4524 // bleeding_edge so I don't want to spend too much time on it now.
4525
4526 Label l_catch, l_try, l_resume, l_next, l_call, l_loop;
4527 // Initial send value is undefined.
4528 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
4529 __ B(&l_next);
4530
4531 // catch (e) { receiver = iter; f = iter.throw; arg = e; goto l_call; }
4532 __ Bind(&l_catch);
4533 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
4534 __ Peek(x3, 1 * kPointerSize); // iter
4535 __ Push(x3); // iter
4536 __ Push(x0); // exception
4537 __ Mov(x0, x3); // iter
4538 __ LoadRoot(x2, Heap::kthrow_stringRootIndex); // "throw"
4539 Handle<Code> throw_ic = isolate()->builtins()->LoadIC_Initialize();
4540 CallIC(throw_ic); // iter.throw in x0
4541 __ B(&l_call);
4542
4543 // try { received = yield result.value }
4544 __ Bind(&l_try);
4545 __ Pop(x0); // result.value
4546 __ PushTryHandler(StackHandler::CATCH, expr->index());
4547 const int handler_size = StackHandlerConstants::kSize;
4548 __ Push(x0); // result.value
4549 __ Peek(x3, (1 * kPointerSize) + handler_size); // g
4550 __ Push(x3); // g
4551 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4552 __ Ldr(context_register(),
4553 MemOperand(fp, StandardFrameConstants::kContextOffset));
4554 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &l_resume);
4555 EmitReturnIteratorResult(false);
4556 __ Bind(&l_resume); // received in x0
4557 __ PopTryHandler();
4558
4559 // receiver = iter; f = iter.next; arg = received;
4560 __ Bind(&l_next);
4561 __ Peek(x3, 1 * kPointerSize); // iter
4562 __ Push(x3); // iter
4563 __ Push(x0); // received
4564 __ Mov(x0, x3); // iter
4565 __ LoadRoot(x2, Heap::knext_stringRootIndex); // "next"
4566 Handle<Code> next_ic = isolate()->builtins()->LoadIC_Initialize();
4567 CallIC(next_ic); // iter.next in r0
4568
4569 // result = f.call(receiver, arg);
4570 __ Bind(&l_call);
4571 Label l_call_runtime;
4572 __ JumpIfSmi(x0, &l_call_runtime);
4573 __ JumpIfNotObjectType(x0, x1, x1, JS_FUNCTION_TYPE, &l_call_runtime);
4574 __ Mov(x1, x0);
4575 ParameterCount count(1);
4576 __ InvokeFunction(x1, count, CALL_FUNCTION,
4577 NullCallWrapper(), CALL_AS_METHOD);
4578 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4579 __ B(&l_loop);
4580 __ Bind(&l_call_runtime);
4581 __ Push(x0);
4582 __ CallRuntime(Runtime::kCall, 3);
4583
4584 // val = result.value; if (!result.done) goto l_try;
4585 __ Bind(&l_loop);
4586 // result.value
4587 __ Push(x0); // save result
4588 __ LoadRoot(x2, Heap::kvalue_stringRootIndex); // "value"
4589 Handle<Code> value_ic = isolate()->builtins()->LoadIC_Initialize();
4590 CallIC(value_ic); // result.value in x0
4591 __ Pop(x1); // result
4592 __ Push(x0); // result.value
4593 __ Mov(x0, x1); // result
4594 __ LoadRoot(x2, Heap::kdone_stringRootIndex); // "done"
4595 Handle<Code> done_ic = isolate()->builtins()->LoadIC_Initialize();
4596 CallIC(done_ic); // result.done in x0
4597 // The ToBooleanStub argument (result.done) is in x0.
4598 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
4599 CallIC(bool_ic);
4600 __ Cbz(x0, &l_try);
4601
4602 // result.value
4603 __ Pop(x0); // result.value
4604 context()->DropAndPlug(2, x0); // drop iter and g
4605 break;
4606 }
4607 }
4608 }
4609
4610
4611 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
4612 Expression *value,
4613 JSGeneratorObject::ResumeMode resume_mode) {
4614 ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
4615 Register generator_object = x1;
4616 Register the_hole = x2;
4617 Register operand_stack_size = w3;
4618 Register function = x4;
4619
4620 // The value stays in x0, and is ultimately read by the resumed generator, as
4621 // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. x1
4622 // will hold the generator object until the activation has been resumed.
4623 VisitForStackValue(generator);
4624 VisitForAccumulatorValue(value);
4625 __ Pop(generator_object);
4626
4627 // Check generator state.
4628 Label wrong_state, done;
4629 __ Ldr(x10, FieldMemOperand(generator_object,
4630 JSGeneratorObject::kContinuationOffset));
4631 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting <= 0);
4632 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed <= 0);
4633 __ CompareAndBranch(x10, Operand(Smi::FromInt(0)), le, &wrong_state);
4634
4635 // Load suspended function and context.
4636 __ Ldr(cp, FieldMemOperand(generator_object,
4637 JSGeneratorObject::kContextOffset));
4638 __ Ldr(function, FieldMemOperand(generator_object,
4639 JSGeneratorObject::kFunctionOffset));
4640
4641 // Load receiver and store as the first argument.
4642 __ Ldr(x10, FieldMemOperand(generator_object,
4643 JSGeneratorObject::kReceiverOffset));
4644 __ Push(x10);
4645
4646 // Push holes for the rest of the arguments to the generator function.
4647 __ Ldr(x10, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
4648
4649 // The number of arguments is stored as an int32_t, and -1 is a marker
4650 // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
4651 // extension to correctly handle it. However, in this case, we operate on
4652 // 32-bit W registers, so extension isn't required.
4653 __ Ldr(w10, FieldMemOperand(x10,
4654 SharedFunctionInfo::kFormalParameterCountOffset));
4655 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
4656
4657 // TODO(jbramley): Write a variant of PushMultipleTimes which takes a register
4658 // instead of a constant count, and use it to replace this loop.
4659 // TODO(jbramley): ARM doesn't seem to untag its smis here, so it pushes twice
4660 // as many holes as it needs to.
4661 Label push_argument_holes, push_frame;
4662 __ Bind(&push_argument_holes);
4663 __ Subs(w10, w10, 1);
4664 __ B(mi, &push_frame);
4665 __ Push(the_hole);
4666 __ B(&push_argument_holes);
4667
4668 // Enter a new JavaScript frame, and initialize its slots as they were when
4669 // the generator was suspended.
4670 Label resume_frame;
4671 __ Bind(&push_frame);
4672 __ Bl(&resume_frame);
4673 __ B(&done);
4674
4675 __ Bind(&resume_frame);
4676 __ Push(lr, // Return address.
4677 fp, // Caller's frame pointer.
4678 cp, // Callee's context.
4679 function); // Callee's JS Function.
4680 __ Add(fp, __ StackPointer(), kPointerSize * 2);
4681
4682 // Load and untag the operand stack size.
4683 __ Ldr(x10, FieldMemOperand(generator_object,
4684 JSGeneratorObject::kOperandStackOffset));
4685 __ Ldr(operand_stack_size,
4686 UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
4687
4688 // If we are sending a value and there is no operand stack, we can jump back
4689 // in directly.
4690 if (resume_mode == JSGeneratorObject::NEXT) {
4691 Label slow_resume;
4692 __ Cbnz(operand_stack_size, &slow_resume);
4693 __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
4694 __ Ldrsw(x11,
4695 UntagSmiFieldMemOperand(generator_object,
4696 JSGeneratorObject::kContinuationOffset));
4697 __ Add(x10, x10, x11);
4698 __ Mov(x12, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
4699 __ Str(x12, FieldMemOperand(generator_object,
4700 JSGeneratorObject::kContinuationOffset));
4701 __ Br(x10);
4702
4703 __ Bind(&slow_resume);
4704 }
4705
4706 // Otherwise, we push holes for the operand stack and call the runtime to fix
4707 // up the stack and the handlers.
4708 // TODO(jbramley): Write a variant of PushMultipleTimes which takes a register
4709 // instead of a constant count, and use it to replace this loop.
4710 Label push_operand_holes, call_resume;
4711 __ Bind(&push_operand_holes);
4712 __ Subs(operand_stack_size, operand_stack_size, 1);
4713 __ B(mi, &call_resume);
4714 __ Push(the_hole);
4715 __ B(&push_operand_holes);
4716
4717 __ Bind(&call_resume);
4718 __ Mov(x10, Operand(Smi::FromInt(resume_mode)));
4719 __ Push(generator_object, result_register(), x10);
4720 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
4721 // Not reached: the runtime call returns elsewhere.
4722 __ Unreachable();
4723
4724 // Throw error if we attempt to operate on a running generator.
4725 __ Bind(&wrong_state);
4726 __ Push(generator_object);
4727 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
4728
4729 __ Bind(&done);
4730 context()->Plug(result_register());
4731 }
4732
4733
4734 void FullCodeGenerator::EmitReturnIteratorResult(bool done) {
4735 Label gc_required;
4736 Label allocated;
4737
4738 Handle<Map> map(isolate()->native_context()->generator_result_map());
4739
4740 // Allocate and populate an object with this form: { value: VAL, done: DONE }
4741
4742 Register result = x0;
4743 __ Allocate(map->instance_size(), result, x10, x11, &gc_required, TAG_OBJECT);
4744 __ Bind(&allocated);
4745
4746 Register map_reg = x1;
4747 Register result_value = x2;
4748 Register boolean_done = x3;
4749 Register empty_fixed_array = x4;
4750 __ Mov(map_reg, Operand(map));
4751 __ Pop(result_value);
4752 __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done)));
4753 __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array()));
4754 ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
4755 // TODO(jbramley): Use Stp if possible.
4756 __ Str(map_reg, FieldMemOperand(result, HeapObject::kMapOffset));
4757 __ Str(empty_fixed_array,
4758 FieldMemOperand(result, JSObject::kPropertiesOffset));
4759 __ Str(empty_fixed_array, FieldMemOperand(result, JSObject::kElementsOffset));
4760 __ Str(result_value,
4761 FieldMemOperand(result,
4762 JSGeneratorObject::kResultValuePropertyOffset));
4763 __ Str(boolean_done,
4764 FieldMemOperand(result,
4765 JSGeneratorObject::kResultDonePropertyOffset));
4766
4767 // Only the value field needs a write barrier, as the other values are in the
4768 // root set.
4769 __ RecordWriteField(result, JSGeneratorObject::kResultValuePropertyOffset,
4770 x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
4771
4772 if (done) {
4773 // Exit all nested statements.
4774 NestedStatement* current = nesting_stack_;
4775 int stack_depth = 0;
4776 int context_length = 0;
4777 while (current != NULL) {
4778 current = current->Exit(&stack_depth, &context_length);
4779 }
4780 __ Drop(stack_depth);
4781 }
4782
4783 EmitReturnSequence();
4784
4785 __ Bind(&gc_required);
4786 __ Push(Smi::FromInt(map->instance_size()));
4787 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
4788 __ Ldr(context_register(),
4789 MemOperand(fp, StandardFrameConstants::kContextOffset));
4790 __ B(&allocated);
4791 }
4792
4793
4794 // TODO(all): I don't like this method.
4795 // It seems to me that in too many places x0 is used in place of this.
4796 // Also, this function is not suitable for all places where x0 should be
4797 // abstracted (eg. when used as an argument). But some places assume that the
4798 // first argument register is x0, and use this function instead.
4799 // Considering that most of the register allocation is hard-coded in the
4800 // FullCodeGen, that it is unlikely we will need to change it extensively, and
4801 // that abstracting the allocation through functions would not yield any
4802 // performance benefit, I think the existence of this function is debatable.
4791 Register FullCodeGenerator::result_register() { 4803 Register FullCodeGenerator::result_register() {
4792 return r0; 4804 return x0;
4793 } 4805 }
4794 4806
4795 4807
4796 Register FullCodeGenerator::context_register() { 4808 Register FullCodeGenerator::context_register() {
4797 return cp; 4809 return cp;
4798 } 4810 }
4799 4811
4800 4812
4801 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 4813 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4802 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 4814 ASSERT(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
4803 __ str(value, MemOperand(fp, frame_offset)); 4815 __ Str(value, MemOperand(fp, frame_offset));
4804 } 4816 }
4805 4817
4806 4818
4807 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 4819 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4808 __ ldr(dst, ContextOperand(cp, context_index)); 4820 __ Ldr(dst, ContextMemOperand(cp, context_index));
4809 } 4821 }
4810 4822
4811 4823
4812 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { 4824 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4813 Scope* declaration_scope = scope()->DeclarationScope(); 4825 Scope* declaration_scope = scope()->DeclarationScope();
4814 if (declaration_scope->is_global_scope() || 4826 if (declaration_scope->is_global_scope() ||
4815 declaration_scope->is_module_scope()) { 4827 declaration_scope->is_module_scope()) {
4816 // Contexts nested in the native context have a canonical empty function 4828 // Contexts nested in the native context have a canonical empty function
4817 // as their closure, not the anonymous closure containing the global 4829 // as their closure, not the anonymous closure containing the global
4818 // code. Pass a smi sentinel and let the runtime look up the empty 4830 // code. Pass a smi sentinel and let the runtime look up the empty
4819 // function. 4831 // function.
4820 __ mov(ip, Operand(Smi::FromInt(0))); 4832 ASSERT(kSmiTag == 0);
4833 __ Push(xzr);
4821 } else if (declaration_scope->is_eval_scope()) { 4834 } else if (declaration_scope->is_eval_scope()) {
4822 // Contexts created by a call to eval have the same closure as the 4835 // Contexts created by a call to eval have the same closure as the
4823 // context calling eval, not the anonymous closure containing the eval 4836 // context calling eval, not the anonymous closure containing the eval
4824 // code. Fetch it from the context. 4837 // code. Fetch it from the context.
4825 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX)); 4838 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
4839 __ Push(x10);
4826 } else { 4840 } else {
4827 ASSERT(declaration_scope->is_function_scope()); 4841 ASSERT(declaration_scope->is_function_scope());
4828 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4842 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4843 __ Push(x10);
4829 } 4844 }
4830 __ push(ip);
4831 } 4845 }
4832 4846
4833 4847
4834 // ----------------------------------------------------------------------------
4835 // Non-local control flow support.
4836
4837 void FullCodeGenerator::EnterFinallyBlock() { 4848 void FullCodeGenerator::EnterFinallyBlock() {
4838 ASSERT(!result_register().is(r1)); 4849 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
4839 // Store result register while executing finally block. 4850 ASSERT(!result_register().is(x10));
4840 __ push(result_register()); 4851 // Preserve the result register while executing finally block.
4841 // Cook return address in link register to stack (smi encoded Code* delta) 4852 // Also cook the return address in lr to the stack (smi encoded Code* delta).
4842 __ sub(r1, lr, Operand(masm_->CodeObject())); 4853 __ Sub(x10, lr, Operand(masm_->CodeObject()));
4843 __ SmiTag(r1); 4854 __ SmiTag(x10);
4844 4855 __ Push(result_register(), x10);
4845 // Store result register while executing finally block.
4846 __ push(r1);
4847 4856
4848 // Store pending message while executing finally block. 4857 // Store pending message while executing finally block.
4849 ExternalReference pending_message_obj = 4858 ExternalReference pending_message_obj =
4850 ExternalReference::address_of_pending_message_obj(isolate()); 4859 ExternalReference::address_of_pending_message_obj(isolate());
4851 __ mov(ip, Operand(pending_message_obj)); 4860 __ Mov(x10, Operand(pending_message_obj));
4852 __ ldr(r1, MemOperand(ip)); 4861 __ Ldr(x10, MemOperand(x10));
4853 __ push(r1);
4854 4862
4855 ExternalReference has_pending_message = 4863 ExternalReference has_pending_message =
4856 ExternalReference::address_of_has_pending_message(isolate()); 4864 ExternalReference::address_of_has_pending_message(isolate());
4857 __ mov(ip, Operand(has_pending_message)); 4865 __ Mov(x11, Operand(has_pending_message));
4858 __ ldr(r1, MemOperand(ip)); 4866 __ Ldr(x11, MemOperand(x11));
4859 __ SmiTag(r1); 4867 __ SmiTag(x11);
4860 __ push(r1); 4868
4869 __ Push(x10, x11);
4861 4870
4862 ExternalReference pending_message_script = 4871 ExternalReference pending_message_script =
4863 ExternalReference::address_of_pending_message_script(isolate()); 4872 ExternalReference::address_of_pending_message_script(isolate());
4864 __ mov(ip, Operand(pending_message_script)); 4873 __ Mov(x10, Operand(pending_message_script));
4865 __ ldr(r1, MemOperand(ip)); 4874 __ Ldr(x10, MemOperand(x10));
4866 __ push(r1); 4875 __ Push(x10);
4867 } 4876 }
4868 4877
4869 4878
4870 void FullCodeGenerator::ExitFinallyBlock() { 4879 void FullCodeGenerator::ExitFinallyBlock() {
4871 ASSERT(!result_register().is(r1)); 4880 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
4881 ASSERT(!result_register().is(x10));
4882
4872 // Restore pending message from stack. 4883 // Restore pending message from stack.
4873 __ pop(r1); 4884 __ Pop(x10, x11, x12);
4874 ExternalReference pending_message_script = 4885 ExternalReference pending_message_script =
4875 ExternalReference::address_of_pending_message_script(isolate()); 4886 ExternalReference::address_of_pending_message_script(isolate());
4876 __ mov(ip, Operand(pending_message_script)); 4887 __ Mov(x13, Operand(pending_message_script));
4877 __ str(r1, MemOperand(ip)); 4888 __ Str(x10, MemOperand(x13));
4878 4889
4879 __ pop(r1); 4890 __ SmiUntag(x11);
4880 __ SmiUntag(r1);
4881 ExternalReference has_pending_message = 4891 ExternalReference has_pending_message =
4882 ExternalReference::address_of_has_pending_message(isolate()); 4892 ExternalReference::address_of_has_pending_message(isolate());
4883 __ mov(ip, Operand(has_pending_message)); 4893 __ Mov(x13, Operand(has_pending_message));
4884 __ str(r1, MemOperand(ip)); 4894 __ Str(x11, MemOperand(x13));
4885 4895
4886 __ pop(r1);
4887 ExternalReference pending_message_obj = 4896 ExternalReference pending_message_obj =
4888 ExternalReference::address_of_pending_message_obj(isolate()); 4897 ExternalReference::address_of_pending_message_obj(isolate());
4889 __ mov(ip, Operand(pending_message_obj)); 4898 __ Mov(x13, Operand(pending_message_obj));
4890 __ str(r1, MemOperand(ip)); 4899 __ Str(x12, MemOperand(x13));
4891 4900
4892 // Restore result register from stack. 4901 // Restore result register and cooked return address from the stack.
4893 __ pop(r1); 4902 __ Pop(x10, result_register());
4894 4903
4895 // Uncook return address and return. 4904 // Uncook the return address (see EnterFinallyBlock).
4896 __ pop(result_register()); 4905 __ SmiUntag(x10);
4897 __ SmiUntag(r1); 4906 __ Add(x11, x10, Operand(masm_->CodeObject()));
4898 __ add(pc, r1, Operand(masm_->CodeObject())); 4907 __ Br(x11);
4899 } 4908 }
4900 4909
4901 4910
4902 #undef __ 4911 #undef __
4903 4912
4913
4904 #define __ ACCESS_MASM(masm()) 4914 #define __ ACCESS_MASM(masm())
4905 4915
4916
4906 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( 4917 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4907 int* stack_depth, 4918 int* stack_depth,
4908 int* context_length) { 4919 int* context_length) {
4920 ASM_LOCATION("FullCodeGenerator::TryFinally::Exit");
4909 // The macros used here must preserve the result register. 4921 // The macros used here must preserve the result register.
4910 4922
4911 // Because the handler block contains the context of the finally 4923 // Because the handler block contains the context of the finally
4912 // code, we can restore it directly from there for the finally code 4924 // code, we can restore it directly from there for the finally code
4913 // rather than iteratively unwinding contexts via their previous 4925 // rather than iteratively unwinding contexts via their previous
4914 // links. 4926 // links.
4915 __ Drop(*stack_depth); // Down to the handler block. 4927 __ Drop(*stack_depth); // Down to the handler block.
4916 if (*context_length > 0) { 4928 if (*context_length > 0) {
4917 // Restore the context to its dedicated register and the stack. 4929 // Restore the context to its dedicated register and the stack.
4918 __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset)); 4930 __ Peek(cp, StackHandlerConstants::kContextOffset);
4919 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 4931 __ Str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4920 } 4932 }
4921 __ PopTryHandler(); 4933 __ PopTryHandler();
4922 __ bl(finally_entry_); 4934 __ Bl(finally_entry_);
4923 4935
4924 *stack_depth = 0; 4936 *stack_depth = 0;
4925 *context_length = 0; 4937 *context_length = 0;
4926 return previous_; 4938 return previous_;
4927 } 4939 }
4928 4940
4929 4941
4930 #undef __ 4942 #undef __
4931 4943
4944
4932 } } // namespace v8::internal 4945 } } // namespace v8::internal
4933 4946
4934 #endif // V8_TARGET_ARCH_ARM 4947 #endif // V8_TARGET_ARCH_A64
OLDNEW
« no previous file with comments | « src/a64/frames-a64.cc ('k') | src/a64/ic-a64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698