Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(412)

Side by Side Diff: src/ppc/full-codegen-ppc.cc

Issue 571173003: PowerPC specific sub-directories (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Remove IBM copyright, update code to later level Created 6 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_ARM 7 #if V8_TARGET_ARCH_PPC
8 8
9 #include "src/code-factory.h" 9 #include "src/code-factory.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
11 #include "src/codegen.h" 11 #include "src/codegen.h"
12 #include "src/compiler.h" 12 #include "src/compiler.h"
13 #include "src/debug.h" 13 #include "src/debug.h"
14 #include "src/full-codegen.h" 14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h" 15 #include "src/ic/ic.h"
16 #include "src/isolate-inl.h" 16 #include "src/isolate-inl.h"
17 #include "src/parser.h" 17 #include "src/parser.h"
18 #include "src/scopes.h" 18 #include "src/scopes.h"
19 19
20 #include "src/arm/code-stubs-arm.h" 20 #include "src/ppc/code-stubs-ppc.h"
21 #include "src/arm/macro-assembler-arm.h" 21 #include "src/ppc/macro-assembler-ppc.h"
22 22
23 namespace v8 { 23 namespace v8 {
24 namespace internal { 24 namespace internal {
25 25
26 #define __ ACCESS_MASM(masm_) 26 #define __ ACCESS_MASM(masm_)
27 27
28
29 // A patch site is a location in the code which it is possible to patch. This 28 // A patch site is a location in the code which it is possible to patch. This
30 // class has a number of methods to emit the code which is patchable and the 29 // class has a number of methods to emit the code which is patchable and the
31 // method EmitPatchInfo to record a marker back to the patchable code. This 30 // method EmitPatchInfo to record a marker back to the patchable code. This
32 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit 31 // marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit
33 // immediate value is used) is the delta from the pc to the first instruction of 32 // immediate value is used) is the delta from the pc to the first instruction of
34 // the patchable code. 33 // the patchable code.
34 // See PatchInlinedSmiCode in ic-ppc.cc for the code that patches it
35 class JumpPatchSite BASE_EMBEDDED { 35 class JumpPatchSite BASE_EMBEDDED {
36 public: 36 public:
37 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { 37 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
38 #ifdef DEBUG 38 #ifdef DEBUG
39 info_emitted_ = false; 39 info_emitted_ = false;
40 #endif 40 #endif
41 } 41 }
42 42
43 ~JumpPatchSite() { 43 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); }
44 DCHECK(patch_site_.is_bound() == info_emitted_);
45 }
46 44
47 // When initially emitting this ensure that a jump is always generated to skip 45 // When initially emitting this ensure that a jump is always generated to skip
48 // the inlined smi code. 46 // the inlined smi code.
49 void EmitJumpIfNotSmi(Register reg, Label* target) { 47 void EmitJumpIfNotSmi(Register reg, Label* target) {
50 DCHECK(!patch_site_.is_bound() && !info_emitted_); 48 DCHECK(!patch_site_.is_bound() && !info_emitted_);
51 Assembler::BlockConstPoolScope block_const_pool(masm_); 49 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
52 __ bind(&patch_site_); 50 __ bind(&patch_site_);
53 __ cmp(reg, Operand(reg)); 51 __ cmp(reg, reg, cr0);
54 __ b(eq, target); // Always taken before patched. 52 __ beq(target, cr0); // Always taken before patched.
55 } 53 }
56 54
57 // When initially emitting this ensure that a jump is never generated to skip 55 // When initially emitting this ensure that a jump is never generated to skip
58 // the inlined smi code. 56 // the inlined smi code.
59 void EmitJumpIfSmi(Register reg, Label* target) { 57 void EmitJumpIfSmi(Register reg, Label* target) {
58 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
60 DCHECK(!patch_site_.is_bound() && !info_emitted_); 59 DCHECK(!patch_site_.is_bound() && !info_emitted_);
61 Assembler::BlockConstPoolScope block_const_pool(masm_);
62 __ bind(&patch_site_); 60 __ bind(&patch_site_);
63 __ cmp(reg, Operand(reg)); 61 __ cmp(reg, reg, cr0);
64 __ b(ne, target); // Never taken before patched. 62 __ bne(target, cr0); // Never taken before patched.
65 } 63 }
66 64
67 void EmitPatchInfo() { 65 void EmitPatchInfo() {
68 // Block literal pool emission whilst recording patch site information.
69 Assembler::BlockConstPoolScope block_const_pool(masm_);
70 if (patch_site_.is_bound()) { 66 if (patch_site_.is_bound()) {
71 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); 67 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
72 Register reg; 68 Register reg;
73 reg.set_code(delta_to_patch_site / kOff12Mask); 69 // I believe this is using reg as the high bits of of the offset
74 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask); 70 reg.set_code(delta_to_patch_site / kOff16Mask);
71 __ cmpi(reg, Operand(delta_to_patch_site % kOff16Mask));
75 #ifdef DEBUG 72 #ifdef DEBUG
76 info_emitted_ = true; 73 info_emitted_ = true;
77 #endif 74 #endif
78 } else { 75 } else {
79 __ nop(); // Signals no inlined code. 76 __ nop(); // Signals no inlined code.
80 } 77 }
81 } 78 }
82 79
83 private: 80 private:
84 MacroAssembler* masm_; 81 MacroAssembler* masm_;
85 Label patch_site_; 82 Label patch_site_;
86 #ifdef DEBUG 83 #ifdef DEBUG
87 bool info_emitted_; 84 bool info_emitted_;
88 #endif 85 #endif
89 }; 86 };
90 87
91 88
92 // Generate code for a JS function. On entry to the function the receiver 89 // Generate code for a JS function. On entry to the function the receiver
93 // and arguments have been pushed on the stack left to right. The actual 90 // and arguments have been pushed on the stack left to right. The actual
94 // argument count matches the formal parameter count expected by the 91 // argument count matches the formal parameter count expected by the
95 // function. 92 // function.
96 // 93 //
97 // The live registers are: 94 // The live registers are:
98 // o r1: the JS function object being called (i.e., ourselves) 95 // o r4: the JS function object being called (i.e., ourselves)
99 // o cp: our context 96 // o cp: our context
100 // o pp: our caller's constant pool pointer (if FLAG_enable_ool_constant_pool) 97 // o fp: our caller's frame pointer (aka r31)
101 // o fp: our caller's frame pointer
102 // o sp: stack pointer 98 // o sp: stack pointer
103 // o lr: return address 99 // o lr: return address
100 // o ip: our own function entry (required by the prologue)
104 // 101 //
105 // The function builds a JS frame. Please see JavaScriptFrameConstants in 102 // The function builds a JS frame. Please see JavaScriptFrameConstants in
106 // frames-arm.h for its layout. 103 // frames-ppc.h for its layout.
107 void FullCodeGenerator::Generate() { 104 void FullCodeGenerator::Generate() {
108 CompilationInfo* info = info_; 105 CompilationInfo* info = info_;
109 handler_table_ = 106 handler_table_ =
110 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); 107 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
111 108
112 profiling_counter_ = isolate()->factory()->NewCell( 109 profiling_counter_ = isolate()->factory()->NewCell(
113 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); 110 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
114 SetFunctionPosition(function()); 111 SetFunctionPosition(function());
115 Comment cmnt(masm_, "[ function compiled by full code generator"); 112 Comment cmnt(masm_, "[ function compiled by full code generator");
116 113
117 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 114 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
118 115
119 #ifdef DEBUG 116 #ifdef DEBUG
120 if (strlen(FLAG_stop_at) > 0 && 117 if (strlen(FLAG_stop_at) > 0 &&
121 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 118 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
122 __ stop("stop-at"); 119 __ stop("stop-at");
123 } 120 }
124 #endif 121 #endif
125 122
126 // Sloppy mode functions and builtins need to replace the receiver with the 123 // Sloppy mode functions and builtins need to replace the receiver with the
127 // global proxy when called as functions (without an explicit receiver 124 // global proxy when called as functions (without an explicit receiver
128 // object). 125 // object).
129 if (info->strict_mode() == SLOPPY && !info->is_native()) { 126 if (info->strict_mode() == SLOPPY && !info->is_native()) {
130 Label ok; 127 Label ok;
131 int receiver_offset = info->scope()->num_parameters() * kPointerSize; 128 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
132 __ ldr(r2, MemOperand(sp, receiver_offset)); 129 __ LoadP(r5, MemOperand(sp, receiver_offset), r0);
133 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); 130 __ CompareRoot(r5, Heap::kUndefinedValueRootIndex);
134 __ b(ne, &ok); 131 __ bne(&ok);
135 132
136 __ ldr(r2, GlobalObjectOperand()); 133 __ LoadP(r5, GlobalObjectOperand());
137 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalProxyOffset)); 134 __ LoadP(r5, FieldMemOperand(r5, GlobalObject::kGlobalProxyOffset));
138 135
139 __ str(r2, MemOperand(sp, receiver_offset)); 136 __ StoreP(r5, MemOperand(sp, receiver_offset), r0);
140 137
141 __ bind(&ok); 138 __ bind(&ok);
142 } 139 }
143 140
144 // Open a frame scope to indicate that there is a frame on the stack. The 141 // Open a frame scope to indicate that there is a frame on the stack. The
145 // MANUAL indicates that the scope shouldn't actually generate code to set up 142 // MANUAL indicates that the scope shouldn't actually generate code to set up
146 // the frame (that is done below). 143 // the frame (that is done below).
147 FrameScope frame_scope(masm_, StackFrame::MANUAL); 144 FrameScope frame_scope(masm_, StackFrame::MANUAL);
145 int prologue_offset = masm_->pc_offset();
148 146
149 info->set_prologue_offset(masm_->pc_offset()); 147 if (prologue_offset) {
150 __ Prologue(info->IsCodePreAgingActive()); 148 // Prologue logic requires it's starting address in ip and the
149 // corresponding offset from the function entry.
150 prologue_offset += Instruction::kInstrSize;
151 __ addi(ip, ip, Operand(prologue_offset));
152 }
153 info->set_prologue_offset(prologue_offset);
154 __ Prologue(info->IsCodePreAgingActive(), prologue_offset);
151 info->AddNoFrameRange(0, masm_->pc_offset()); 155 info->AddNoFrameRange(0, masm_->pc_offset());
152 156
153 { Comment cmnt(masm_, "[ Allocate locals"); 157 {
158 Comment cmnt(masm_, "[ Allocate locals");
154 int locals_count = info->scope()->num_stack_slots(); 159 int locals_count = info->scope()->num_stack_slots();
155 // Generators allocate locals, if any, in context slots. 160 // Generators allocate locals, if any, in context slots.
156 DCHECK(!info->function()->is_generator() || locals_count == 0); 161 DCHECK(!info->function()->is_generator() || locals_count == 0);
157 if (locals_count > 0) { 162 if (locals_count > 0) {
158 if (locals_count >= 128) { 163 if (locals_count >= 128) {
159 Label ok; 164 Label ok;
160 __ sub(r9, sp, Operand(locals_count * kPointerSize)); 165 __ Add(ip, sp, -(locals_count * kPointerSize), r0);
161 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex); 166 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
162 __ cmp(r9, Operand(r2)); 167 __ cmpl(ip, r5);
163 __ b(hs, &ok); 168 __ bc_short(ge, &ok);
164 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); 169 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
165 __ bind(&ok); 170 __ bind(&ok);
166 } 171 }
167 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex); 172 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
168 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32; 173 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
169 if (locals_count >= kMaxPushes) { 174 if (locals_count >= kMaxPushes) {
170 int loop_iterations = locals_count / kMaxPushes; 175 int loop_iterations = locals_count / kMaxPushes;
171 __ mov(r2, Operand(loop_iterations)); 176 __ mov(r5, Operand(loop_iterations));
177 __ mtctr(r5);
172 Label loop_header; 178 Label loop_header;
173 __ bind(&loop_header); 179 __ bind(&loop_header);
174 // Do pushes. 180 // Do pushes.
175 for (int i = 0; i < kMaxPushes; i++) { 181 for (int i = 0; i < kMaxPushes; i++) {
176 __ push(r9); 182 __ push(ip);
177 } 183 }
178 // Continue loop if not done. 184 // Continue loop if not done.
179 __ sub(r2, r2, Operand(1), SetCC); 185 __ bdnz(&loop_header);
180 __ b(&loop_header, ne);
181 } 186 }
182 int remaining = locals_count % kMaxPushes; 187 int remaining = locals_count % kMaxPushes;
183 // Emit the remaining pushes. 188 // Emit the remaining pushes.
184 for (int i = 0; i < remaining; i++) { 189 for (int i = 0; i < remaining; i++) {
185 __ push(r9); 190 __ push(ip);
186 } 191 }
187 } 192 }
188 } 193 }
189 194
190 bool function_in_register = true; 195 bool function_in_register = true;
191 196
192 // Possibly allocate a local context. 197 // Possibly allocate a local context.
193 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 198 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
194 if (heap_slots > 0) { 199 if (heap_slots > 0) {
195 // Argument to NewContext is the function, which is still in r1. 200 // Argument to NewContext is the function, which is still in r4.
196 Comment cmnt(masm_, "[ Allocate context"); 201 Comment cmnt(masm_, "[ Allocate context");
197 bool need_write_barrier = true; 202 bool need_write_barrier = true;
198 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) { 203 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
199 __ push(r1); 204 __ push(r4);
200 __ Push(info->scope()->GetScopeInfo()); 205 __ Push(info->scope()->GetScopeInfo());
201 __ CallRuntime(Runtime::kNewGlobalContext, 2); 206 __ CallRuntime(Runtime::kNewGlobalContext, 2);
202 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { 207 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
203 FastNewContextStub stub(isolate(), heap_slots); 208 FastNewContextStub stub(isolate(), heap_slots);
204 __ CallStub(&stub); 209 __ CallStub(&stub);
205 // Result of FastNewContextStub is always in new space. 210 // Result of FastNewContextStub is always in new space.
206 need_write_barrier = false; 211 need_write_barrier = false;
207 } else { 212 } else {
208 __ push(r1); 213 __ push(r4);
209 __ CallRuntime(Runtime::kNewFunctionContext, 1); 214 __ CallRuntime(Runtime::kNewFunctionContext, 1);
210 } 215 }
211 function_in_register = false; 216 function_in_register = false;
212 // Context is returned in r0. It replaces the context passed to us. 217 // Context is returned in r3. It replaces the context passed to us.
213 // It's saved in the stack and kept live in cp. 218 // It's saved in the stack and kept live in cp.
214 __ mov(cp, r0); 219 __ mr(cp, r3);
215 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset)); 220 __ StoreP(r3, MemOperand(fp, StandardFrameConstants::kContextOffset));
216 // Copy any necessary parameters into the context. 221 // Copy any necessary parameters into the context.
217 int num_parameters = info->scope()->num_parameters(); 222 int num_parameters = info->scope()->num_parameters();
218 for (int i = 0; i < num_parameters; i++) { 223 for (int i = 0; i < num_parameters; i++) {
219 Variable* var = scope()->parameter(i); 224 Variable* var = scope()->parameter(i);
220 if (var->IsContextSlot()) { 225 if (var->IsContextSlot()) {
221 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 226 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
222 (num_parameters - 1 - i) * kPointerSize; 227 (num_parameters - 1 - i) * kPointerSize;
223 // Load parameter from stack. 228 // Load parameter from stack.
224 __ ldr(r0, MemOperand(fp, parameter_offset)); 229 __ LoadP(r3, MemOperand(fp, parameter_offset), r0);
225 // Store it in the context. 230 // Store it in the context.
226 MemOperand target = ContextOperand(cp, var->index()); 231 MemOperand target = ContextOperand(cp, var->index());
227 __ str(r0, target); 232 __ StoreP(r3, target, r0);
228 233
229 // Update the write barrier. 234 // Update the write barrier.
230 if (need_write_barrier) { 235 if (need_write_barrier) {
231 __ RecordWriteContextSlot( 236 __ RecordWriteContextSlot(cp, target.offset(), r3, r6,
232 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs); 237 kLRHasBeenSaved, kDontSaveFPRegs);
233 } else if (FLAG_debug_code) { 238 } else if (FLAG_debug_code) {
234 Label done; 239 Label done;
235 __ JumpIfInNewSpace(cp, r0, &done); 240 __ JumpIfInNewSpace(cp, r3, &done);
236 __ Abort(kExpectedNewSpaceObject); 241 __ Abort(kExpectedNewSpaceObject);
237 __ bind(&done); 242 __ bind(&done);
238 } 243 }
239 } 244 }
240 } 245 }
241 } 246 }
242 247
243 Variable* arguments = scope()->arguments(); 248 Variable* arguments = scope()->arguments();
244 if (arguments != NULL) { 249 if (arguments != NULL) {
245 // Function uses arguments object. 250 // Function uses arguments object.
246 Comment cmnt(masm_, "[ Allocate arguments object"); 251 Comment cmnt(masm_, "[ Allocate arguments object");
247 if (!function_in_register) { 252 if (!function_in_register) {
248 // Load this again, if it's used by the local context below. 253 // Load this again, if it's used by the local context below.
249 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 254 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
250 } else { 255 } else {
251 __ mov(r3, r1); 256 __ mr(r6, r4);
252 } 257 }
253 // Receiver is just before the parameters on the caller's stack. 258 // Receiver is just before the parameters on the caller's stack.
254 int num_parameters = info->scope()->num_parameters(); 259 int num_parameters = info->scope()->num_parameters();
255 int offset = num_parameters * kPointerSize; 260 int offset = num_parameters * kPointerSize;
256 __ add(r2, fp, 261 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
257 Operand(StandardFrameConstants::kCallerSPOffset + offset)); 262 __ LoadSmiLiteral(r4, Smi::FromInt(num_parameters));
258 __ mov(r1, Operand(Smi::FromInt(num_parameters))); 263 __ Push(r6, r5, r4);
259 __ Push(r3, r2, r1);
260 264
261 // Arguments to ArgumentsAccessStub: 265 // Arguments to ArgumentsAccessStub:
262 // function, receiver address, parameter count. 266 // function, receiver address, parameter count.
263 // The stub will rewrite receiever and parameter count if the previous 267 // The stub will rewrite receiever and parameter count if the previous
264 // stack frame was an arguments adapter frame. 268 // stack frame was an arguments adapter frame.
265 ArgumentsAccessStub::Type type; 269 ArgumentsAccessStub::Type type;
266 if (strict_mode() == STRICT) { 270 if (strict_mode() == STRICT) {
267 type = ArgumentsAccessStub::NEW_STRICT; 271 type = ArgumentsAccessStub::NEW_STRICT;
268 } else if (function()->has_duplicate_parameters()) { 272 } else if (function()->has_duplicate_parameters()) {
269 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW; 273 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
270 } else { 274 } else {
271 type = ArgumentsAccessStub::NEW_SLOPPY_FAST; 275 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
272 } 276 }
273 ArgumentsAccessStub stub(isolate(), type); 277 ArgumentsAccessStub stub(isolate(), type);
274 __ CallStub(&stub); 278 __ CallStub(&stub);
275 279
276 SetVar(arguments, r0, r1, r2); 280 SetVar(arguments, r3, r4, r5);
277 } 281 }
278 282
279 if (FLAG_trace) { 283 if (FLAG_trace) {
280 __ CallRuntime(Runtime::kTraceEnter, 0); 284 __ CallRuntime(Runtime::kTraceEnter, 0);
281 } 285 }
282 286
283 // Visit the declarations and body unless there is an illegal 287 // Visit the declarations and body unless there is an illegal
284 // redeclaration. 288 // redeclaration.
285 if (scope()->HasIllegalRedeclaration()) { 289 if (scope()->HasIllegalRedeclaration()) {
286 Comment cmnt(masm_, "[ Declarations"); 290 Comment cmnt(masm_, "[ Declarations");
287 scope()->VisitIllegalRedeclaration(this); 291 scope()->VisitIllegalRedeclaration(this);
288 292
289 } else { 293 } else {
290 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); 294 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
291 { Comment cmnt(masm_, "[ Declarations"); 295 {
296 Comment cmnt(masm_, "[ Declarations");
292 // For named function expressions, declare the function name as a 297 // For named function expressions, declare the function name as a
293 // constant. 298 // constant.
294 if (scope()->is_function_scope() && scope()->function() != NULL) { 299 if (scope()->is_function_scope() && scope()->function() != NULL) {
295 VariableDeclaration* function = scope()->function(); 300 VariableDeclaration* function = scope()->function();
296 DCHECK(function->proxy()->var()->mode() == CONST || 301 DCHECK(function->proxy()->var()->mode() == CONST ||
297 function->proxy()->var()->mode() == CONST_LEGACY); 302 function->proxy()->var()->mode() == CONST_LEGACY);
298 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED); 303 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
299 VisitVariableDeclaration(function); 304 VisitVariableDeclaration(function);
300 } 305 }
301 VisitDeclarations(scope()->declarations()); 306 VisitDeclarations(scope()->declarations());
302 } 307 }
303 308
304 { Comment cmnt(masm_, "[ Stack check"); 309 {
310 Comment cmnt(masm_, "[ Stack check");
305 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); 311 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
306 Label ok; 312 Label ok;
307 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 313 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
308 __ cmp(sp, Operand(ip)); 314 __ cmpl(sp, ip);
309 __ b(hs, &ok); 315 __ bc_short(ge, &ok);
310 Handle<Code> stack_check = isolate()->builtins()->StackCheck(); 316 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
311 PredictableCodeSizeScope predictable(masm_,
312 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
313 __ Call(stack_check, RelocInfo::CODE_TARGET);
314 __ bind(&ok); 317 __ bind(&ok);
315 } 318 }
316 319
317 { Comment cmnt(masm_, "[ Body"); 320 {
321 Comment cmnt(masm_, "[ Body");
318 DCHECK(loop_depth() == 0); 322 DCHECK(loop_depth() == 0);
319 VisitStatements(function()->body()); 323 VisitStatements(function()->body());
320 DCHECK(loop_depth() == 0); 324 DCHECK(loop_depth() == 0);
321 } 325 }
322 } 326 }
323 327
324 // Always emit a 'return undefined' in case control fell off the end of 328 // Always emit a 'return undefined' in case control fell off the end of
325 // the body. 329 // the body.
326 { Comment cmnt(masm_, "[ return <undefined>;"); 330 {
327 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 331 Comment cmnt(masm_, "[ return <undefined>;");
332 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
328 } 333 }
329 EmitReturnSequence(); 334 EmitReturnSequence();
330
331 // Force emit the constant pool, so it doesn't get emitted in the middle
332 // of the back edge table.
333 masm()->CheckConstPool(true, false);
334 } 335 }
335 336
336 337
337 void FullCodeGenerator::ClearAccumulator() { 338 void FullCodeGenerator::ClearAccumulator() {
338 __ mov(r0, Operand(Smi::FromInt(0))); 339 __ LoadSmiLiteral(r3, Smi::FromInt(0));
339 } 340 }
340 341
341 342
342 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { 343 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
343 __ mov(r2, Operand(profiling_counter_)); 344 __ mov(r5, Operand(profiling_counter_));
344 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); 345 __ LoadP(r6, FieldMemOperand(r5, Cell::kValueOffset));
345 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC); 346 __ SubSmiLiteral(r6, r6, Smi::FromInt(delta), r0);
346 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); 347 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
347 } 348 }
348 349
349 350
350 #ifdef CAN_USE_ARMV7_INSTRUCTIONS
351 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
352 #else
353 static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
354 #endif
355
356
357 void FullCodeGenerator::EmitProfilingCounterReset() { 351 void FullCodeGenerator::EmitProfilingCounterReset() {
358 Assembler::BlockConstPoolScope block_const_pool(masm_);
359 PredictableCodeSizeScope predictable_code_size_scope(
360 masm_, kProfileCounterResetSequenceLength);
361 Label start;
362 __ bind(&start);
363 int reset_value = FLAG_interrupt_budget; 352 int reset_value = FLAG_interrupt_budget;
364 if (info_->is_debug()) { 353 if (info_->is_debug()) {
365 // Detect debug break requests as soon as possible. 354 // Detect debug break requests as soon as possible.
366 reset_value = FLAG_interrupt_budget >> 4; 355 reset_value = FLAG_interrupt_budget >> 4;
367 } 356 }
368 __ mov(r2, Operand(profiling_counter_)); 357 __ mov(r5, Operand(profiling_counter_));
369 // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5 358 __ LoadSmiLiteral(r6, Smi::FromInt(reset_value));
370 // instructions (for ARMv6) depending upon whether it is an extended constant 359 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
371 // pool - insert nop to compensate.
372 int expected_instr_count =
373 (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
374 DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
375 while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
376 __ nop();
377 }
378 __ mov(r3, Operand(Smi::FromInt(reset_value)));
379 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
380 } 360 }
381 361
382 362
383 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, 363 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
384 Label* back_edge_target) { 364 Label* back_edge_target) {
385 Comment cmnt(masm_, "[ Back edge bookkeeping"); 365 Comment cmnt(masm_, "[ Back edge bookkeeping");
386 // Block literal pools whilst emitting back edge code.
387 Assembler::BlockConstPoolScope block_const_pool(masm_);
388 Label ok; 366 Label ok;
389 367
390 DCHECK(back_edge_target->is_bound()); 368 DCHECK(back_edge_target->is_bound());
391 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); 369 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
392 int weight = Min(kMaxBackEdgeWeight, 370 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
393 Max(1, distance / kCodeSizeMultiplier));
394 EmitProfilingCounterDecrement(weight); 371 EmitProfilingCounterDecrement(weight);
395 __ b(pl, &ok); 372 {
396 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); 373 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
374 // BackEdgeTable::PatchAt manipulates this sequence.
375 __ cmpi(r6, Operand::Zero());
376 __ bc_short(ge, &ok);
377 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
397 378
398 // Record a mapping of this PC offset to the OSR id. This is used to find 379 // Record a mapping of this PC offset to the OSR id. This is used to find
399 // the AST id from the unoptimized code in order to use it as a key into 380 // the AST id from the unoptimized code in order to use it as a key into
400 // the deoptimization input data found in the optimized code. 381 // the deoptimization input data found in the optimized code.
401 RecordBackEdge(stmt->OsrEntryId()); 382 RecordBackEdge(stmt->OsrEntryId());
402 383 }
403 EmitProfilingCounterReset(); 384 EmitProfilingCounterReset();
404 385
405 __ bind(&ok); 386 __ bind(&ok);
406 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 387 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
407 // Record a mapping of the OSR id to this PC. This is used if the OSR 388 // Record a mapping of the OSR id to this PC. This is used if the OSR
408 // entry becomes the target of a bailout. We don't expect it to be, but 389 // entry becomes the target of a bailout. We don't expect it to be, but
409 // we want it to work if it is. 390 // we want it to work if it is.
410 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); 391 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
411 } 392 }
412 393
413 394
414 void FullCodeGenerator::EmitReturnSequence() { 395 void FullCodeGenerator::EmitReturnSequence() {
415 Comment cmnt(masm_, "[ Return sequence"); 396 Comment cmnt(masm_, "[ Return sequence");
416 if (return_label_.is_bound()) { 397 if (return_label_.is_bound()) {
417 __ b(&return_label_); 398 __ b(&return_label_);
418 } else { 399 } else {
419 __ bind(&return_label_); 400 __ bind(&return_label_);
420 if (FLAG_trace) { 401 if (FLAG_trace) {
421 // Push the return value on the stack as the parameter. 402 // Push the return value on the stack as the parameter.
422 // Runtime::TraceExit returns its parameter in r0. 403 // Runtime::TraceExit returns its parameter in r3
423 __ push(r0); 404 __ push(r3);
424 __ CallRuntime(Runtime::kTraceExit, 1); 405 __ CallRuntime(Runtime::kTraceExit, 1);
425 } 406 }
426 // Pretend that the exit is a backwards jump to the entry. 407 // Pretend that the exit is a backwards jump to the entry.
427 int weight = 1; 408 int weight = 1;
428 if (info_->ShouldSelfOptimize()) { 409 if (info_->ShouldSelfOptimize()) {
429 weight = FLAG_interrupt_budget / FLAG_self_opt_count; 410 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
430 } else { 411 } else {
431 int distance = masm_->pc_offset(); 412 int distance = masm_->pc_offset();
432 weight = Min(kMaxBackEdgeWeight, 413 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
433 Max(1, distance / kCodeSizeMultiplier));
434 } 414 }
435 EmitProfilingCounterDecrement(weight); 415 EmitProfilingCounterDecrement(weight);
436 Label ok; 416 Label ok;
437 __ b(pl, &ok); 417 __ cmpi(r6, Operand::Zero());
438 __ push(r0); 418 __ bge(&ok);
439 __ Call(isolate()->builtins()->InterruptCheck(), 419 __ push(r3);
440 RelocInfo::CODE_TARGET); 420 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
441 __ pop(r0); 421 __ pop(r3);
442 EmitProfilingCounterReset(); 422 EmitProfilingCounterReset();
443 __ bind(&ok); 423 __ bind(&ok);
444 424
445 #ifdef DEBUG 425 #ifdef DEBUG
446 // Add a label for checking the size of the code used for returning. 426 // Add a label for checking the size of the code used for returning.
447 Label check_exit_codesize; 427 Label check_exit_codesize;
448 __ bind(&check_exit_codesize); 428 __ bind(&check_exit_codesize);
449 #endif 429 #endif
450 // Make sure that the constant pool is not emitted inside of the return 430 // Make sure that the constant pool is not emitted inside of the return
451 // sequence. 431 // sequence.
452 { Assembler::BlockConstPoolScope block_const_pool(masm_); 432 {
433 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
453 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize; 434 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
454 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); 435 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
455 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
456 PredictableCodeSizeScope predictable(masm_, -1);
457 __ RecordJSReturn(); 436 __ RecordJSReturn();
458 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT); 437 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta);
459 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_); 438 #if V8_TARGET_ARCH_PPC64
460 __ add(sp, sp, Operand(sp_delta)); 439 // With 64bit we may need nop() instructions to ensure we have
461 __ Jump(lr); 440 // enough space to SetDebugBreakAtReturn()
462 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); 441 if (is_int16(sp_delta)) {
442 #if !V8_OOL_CONSTANT_POOL
443 masm_->nop();
444 #endif
445 masm_->nop();
463 } 446 }
447 #endif
448 __ blr();
449 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
464 } 450 }
465 451
466 #ifdef DEBUG 452 #ifdef DEBUG
467 // Check that the size of the code used for returning is large enough 453 // Check that the size of the code used for returning is large enough
468 // for the debugger's requirements. 454 // for the debugger's requirements.
469 DCHECK(Assembler::kJSReturnSequenceInstructions <= 455 DCHECK(Assembler::kJSReturnSequenceInstructions <=
470 masm_->InstructionsGeneratedSince(&check_exit_codesize)); 456 masm_->InstructionsGeneratedSince(&check_exit_codesize));
471 #endif 457 #endif
472 } 458 }
473 } 459 }
(...skipping 19 matching lines...) Expand all
493 479
494 void FullCodeGenerator::TestContext::Plug(Variable* var) const { 480 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
495 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 481 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
496 // For simplicity we always test the accumulator register. 482 // For simplicity we always test the accumulator register.
497 codegen()->GetVar(result_register(), var); 483 codegen()->GetVar(result_register(), var);
498 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 484 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
499 codegen()->DoTest(this); 485 codegen()->DoTest(this);
500 } 486 }
501 487
502 488
503 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 489 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {}
504 }
505 490
506 491
507 void FullCodeGenerator::AccumulatorValueContext::Plug( 492 void FullCodeGenerator::AccumulatorValueContext::Plug(
508 Heap::RootListIndex index) const { 493 Heap::RootListIndex index) const {
509 __ LoadRoot(result_register(), index); 494 __ LoadRoot(result_register(), index);
510 } 495 }
511 496
512 497
513 void FullCodeGenerator::StackValueContext::Plug( 498 void FullCodeGenerator::StackValueContext::Plug(
514 Heap::RootListIndex index) const { 499 Heap::RootListIndex index) const {
515 __ LoadRoot(result_register(), index); 500 __ LoadRoot(result_register(), index);
516 __ push(result_register()); 501 __ push(result_register());
517 } 502 }
518 503
519 504
520 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { 505 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
521 codegen()->PrepareForBailoutBeforeSplit(condition(), 506 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
522 true,
523 true_label_,
524 false_label_); 507 false_label_);
525 if (index == Heap::kUndefinedValueRootIndex || 508 if (index == Heap::kUndefinedValueRootIndex ||
526 index == Heap::kNullValueRootIndex || 509 index == Heap::kNullValueRootIndex ||
527 index == Heap::kFalseValueRootIndex) { 510 index == Heap::kFalseValueRootIndex) {
528 if (false_label_ != fall_through_) __ b(false_label_); 511 if (false_label_ != fall_through_) __ b(false_label_);
529 } else if (index == Heap::kTrueValueRootIndex) { 512 } else if (index == Heap::kTrueValueRootIndex) {
530 if (true_label_ != fall_through_) __ b(true_label_); 513 if (true_label_ != fall_through_) __ b(true_label_);
531 } else { 514 } else {
532 __ LoadRoot(result_register(), index); 515 __ LoadRoot(result_register(), index);
533 codegen()->DoTest(this); 516 codegen()->DoTest(this);
534 } 517 }
535 } 518 }
536 519
537 520
538 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const { 521 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {}
539 }
540 522
541 523
542 void FullCodeGenerator::AccumulatorValueContext::Plug( 524 void FullCodeGenerator::AccumulatorValueContext::Plug(
543 Handle<Object> lit) const { 525 Handle<Object> lit) const {
544 __ mov(result_register(), Operand(lit)); 526 __ mov(result_register(), Operand(lit));
545 } 527 }
546 528
547 529
548 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { 530 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
549 // Immediates cannot be pushed directly. 531 // Immediates cannot be pushed directly.
550 __ mov(result_register(), Operand(lit)); 532 __ mov(result_register(), Operand(lit));
551 __ push(result_register()); 533 __ push(result_register());
552 } 534 }
553 535
554 536
555 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { 537 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
556 codegen()->PrepareForBailoutBeforeSplit(condition(), 538 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
557 true,
558 true_label_,
559 false_label_); 539 false_label_);
560 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals. 540 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
561 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { 541 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
562 if (false_label_ != fall_through_) __ b(false_label_); 542 if (false_label_ != fall_through_) __ b(false_label_);
563 } else if (lit->IsTrue() || lit->IsJSObject()) { 543 } else if (lit->IsTrue() || lit->IsJSObject()) {
564 if (true_label_ != fall_through_) __ b(true_label_); 544 if (true_label_ != fall_through_) __ b(true_label_);
565 } else if (lit->IsString()) { 545 } else if (lit->IsString()) {
566 if (String::cast(*lit)->length() == 0) { 546 if (String::cast(*lit)->length() == 0) {
567 if (false_label_ != fall_through_) __ b(false_label_); 547 if (false_label_ != fall_through_) __ b(false_label_);
568 } else { 548 } else {
(...skipping 14 matching lines...) Expand all
583 563
584 564
585 void FullCodeGenerator::EffectContext::DropAndPlug(int count, 565 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
586 Register reg) const { 566 Register reg) const {
587 DCHECK(count > 0); 567 DCHECK(count > 0);
588 __ Drop(count); 568 __ Drop(count);
589 } 569 }
590 570
591 571
592 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( 572 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
593 int count, 573 int count, Register reg) const {
594 Register reg) const {
595 DCHECK(count > 0); 574 DCHECK(count > 0);
596 __ Drop(count); 575 __ Drop(count);
597 __ Move(result_register(), reg); 576 __ Move(result_register(), reg);
598 } 577 }
599 578
600 579
601 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, 580 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
602 Register reg) const { 581 Register reg) const {
603 DCHECK(count > 0); 582 DCHECK(count > 0);
604 if (count > 1) __ Drop(count - 1); 583 if (count > 1) __ Drop(count - 1);
605 __ str(reg, MemOperand(sp, 0)); 584 __ StoreP(reg, MemOperand(sp, 0));
606 } 585 }
607 586
608 587
609 void FullCodeGenerator::TestContext::DropAndPlug(int count, 588 void FullCodeGenerator::TestContext::DropAndPlug(int count,
610 Register reg) const { 589 Register reg) const {
611 DCHECK(count > 0); 590 DCHECK(count > 0);
612 // For simplicity we always test the accumulator register. 591 // For simplicity we always test the accumulator register.
613 __ Drop(count); 592 __ Drop(count);
614 __ Move(result_register(), reg); 593 __ Move(result_register(), reg);
615 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 594 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
616 codegen()->DoTest(this); 595 codegen()->DoTest(this);
617 } 596 }
618 597
619 598
620 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 599 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
621 Label* materialize_false) const { 600 Label* materialize_false) const {
622 DCHECK(materialize_true == materialize_false); 601 DCHECK(materialize_true == materialize_false);
623 __ bind(materialize_true); 602 __ bind(materialize_true);
624 } 603 }
625 604
626 605
627 void FullCodeGenerator::AccumulatorValueContext::Plug( 606 void FullCodeGenerator::AccumulatorValueContext::Plug(
628 Label* materialize_true, 607 Label* materialize_true, Label* materialize_false) const {
629 Label* materialize_false) const {
630 Label done; 608 Label done;
631 __ bind(materialize_true); 609 __ bind(materialize_true);
632 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); 610 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
633 __ jmp(&done); 611 __ b(&done);
634 __ bind(materialize_false); 612 __ bind(materialize_false);
635 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); 613 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
636 __ bind(&done); 614 __ bind(&done);
637 } 615 }
638 616
639 617
640 void FullCodeGenerator::StackValueContext::Plug( 618 void FullCodeGenerator::StackValueContext::Plug(
641 Label* materialize_true, 619 Label* materialize_true, Label* materialize_false) const {
642 Label* materialize_false) const {
643 Label done; 620 Label done;
644 __ bind(materialize_true); 621 __ bind(materialize_true);
645 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 622 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
646 __ jmp(&done); 623 __ b(&done);
647 __ bind(materialize_false); 624 __ bind(materialize_false);
648 __ LoadRoot(ip, Heap::kFalseValueRootIndex); 625 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
649 __ bind(&done); 626 __ bind(&done);
650 __ push(ip); 627 __ push(ip);
651 } 628 }
652 629
653 630
654 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 631 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
655 Label* materialize_false) const { 632 Label* materialize_false) const {
656 DCHECK(materialize_true == true_label_); 633 DCHECK(materialize_true == true_label_);
657 DCHECK(materialize_false == false_label_); 634 DCHECK(materialize_false == false_label_);
658 } 635 }
659 636
660 637
661 void FullCodeGenerator::EffectContext::Plug(bool flag) const { 638 void FullCodeGenerator::EffectContext::Plug(bool flag) const {}
662 }
663 639
664 640
665 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { 641 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
666 Heap::RootListIndex value_root_index = 642 Heap::RootListIndex value_root_index =
667 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 643 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
668 __ LoadRoot(result_register(), value_root_index); 644 __ LoadRoot(result_register(), value_root_index);
669 } 645 }
670 646
671 647
672 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { 648 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
673 Heap::RootListIndex value_root_index = 649 Heap::RootListIndex value_root_index =
674 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 650 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
675 __ LoadRoot(ip, value_root_index); 651 __ LoadRoot(ip, value_root_index);
676 __ push(ip); 652 __ push(ip);
677 } 653 }
678 654
679 655
680 void FullCodeGenerator::TestContext::Plug(bool flag) const { 656 void FullCodeGenerator::TestContext::Plug(bool flag) const {
681 codegen()->PrepareForBailoutBeforeSplit(condition(), 657 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
682 true,
683 true_label_,
684 false_label_); 658 false_label_);
685 if (flag) { 659 if (flag) {
686 if (true_label_ != fall_through_) __ b(true_label_); 660 if (true_label_ != fall_through_) __ b(true_label_);
687 } else { 661 } else {
688 if (false_label_ != fall_through_) __ b(false_label_); 662 if (false_label_ != fall_through_) __ b(false_label_);
689 } 663 }
690 } 664 }
691 665
692 666
693 void FullCodeGenerator::DoTest(Expression* condition, 667 void FullCodeGenerator::DoTest(Expression* condition, Label* if_true,
694 Label* if_true, 668 Label* if_false, Label* fall_through) {
695 Label* if_false,
696 Label* fall_through) {
697 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate()); 669 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
698 CallIC(ic, condition->test_id()); 670 CallIC(ic, condition->test_id());
699 __ tst(result_register(), result_register()); 671 __ cmpi(result_register(), Operand::Zero());
700 Split(ne, if_true, if_false, fall_through); 672 Split(ne, if_true, if_false, fall_through);
701 } 673 }
702 674
703 675
704 void FullCodeGenerator::Split(Condition cond, 676 void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false,
705 Label* if_true, 677 Label* fall_through, CRegister cr) {
706 Label* if_false,
707 Label* fall_through) {
708 if (if_false == fall_through) { 678 if (if_false == fall_through) {
709 __ b(cond, if_true); 679 __ b(cond, if_true, cr);
710 } else if (if_true == fall_through) { 680 } else if (if_true == fall_through) {
711 __ b(NegateCondition(cond), if_false); 681 __ b(NegateCondition(cond), if_false, cr);
712 } else { 682 } else {
713 __ b(cond, if_true); 683 __ b(cond, if_true, cr);
714 __ b(if_false); 684 __ b(if_false);
715 } 685 }
716 } 686 }
717 687
718 688
719 MemOperand FullCodeGenerator::StackOperand(Variable* var) { 689 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
720 DCHECK(var->IsStackAllocated()); 690 DCHECK(var->IsStackAllocated());
721 // Offset is negative because higher indexes are at lower addresses. 691 // Offset is negative because higher indexes are at lower addresses.
722 int offset = -var->index() * kPointerSize; 692 int offset = -var->index() * kPointerSize;
723 // Adjust by a (parameter or local) base offset. 693 // Adjust by a (parameter or local) base offset.
(...skipping 14 matching lines...) Expand all
738 return ContextOperand(scratch, var->index()); 708 return ContextOperand(scratch, var->index());
739 } else { 709 } else {
740 return StackOperand(var); 710 return StackOperand(var);
741 } 711 }
742 } 712 }
743 713
744 714
745 void FullCodeGenerator::GetVar(Register dest, Variable* var) { 715 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
746 // Use destination as scratch. 716 // Use destination as scratch.
747 MemOperand location = VarOperand(var, dest); 717 MemOperand location = VarOperand(var, dest);
748 __ ldr(dest, location); 718 __ LoadP(dest, location, r0);
749 } 719 }
750 720
751 721
752 void FullCodeGenerator::SetVar(Variable* var, 722 void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0,
753 Register src,
754 Register scratch0,
755 Register scratch1) { 723 Register scratch1) {
756 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 724 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
757 DCHECK(!scratch0.is(src)); 725 DCHECK(!scratch0.is(src));
758 DCHECK(!scratch0.is(scratch1)); 726 DCHECK(!scratch0.is(scratch1));
759 DCHECK(!scratch1.is(src)); 727 DCHECK(!scratch1.is(src));
760 MemOperand location = VarOperand(var, scratch0); 728 MemOperand location = VarOperand(var, scratch0);
761 __ str(src, location); 729 __ StoreP(src, location, r0);
762 730
763 // Emit the write barrier code if the location is in the heap. 731 // Emit the write barrier code if the location is in the heap.
764 if (var->IsContextSlot()) { 732 if (var->IsContextSlot()) {
765 __ RecordWriteContextSlot(scratch0, 733 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1,
766 location.offset(), 734 kLRHasBeenSaved, kDontSaveFPRegs);
767 src,
768 scratch1,
769 kLRHasBeenSaved,
770 kDontSaveFPRegs);
771 } 735 }
772 } 736 }
773 737
774 738
775 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, 739 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
776 bool should_normalize, 740 bool should_normalize,
777 Label* if_true, 741 Label* if_true,
778 Label* if_false) { 742 Label* if_false) {
779 // Only prepare for bailouts before splits if we're in a test 743 // Only prepare for bailouts before splits if we're in a test
780 // context. Otherwise, we let the Visit function deal with the 744 // context. Otherwise, we let the Visit function deal with the
781 // preparation to avoid preparing with the same AST id twice. 745 // preparation to avoid preparing with the same AST id twice.
782 if (!context()->IsTest() || !info_->IsOptimizable()) return; 746 if (!context()->IsTest() || !info_->IsOptimizable()) return;
783 747
784 Label skip; 748 Label skip;
785 if (should_normalize) __ b(&skip); 749 if (should_normalize) __ b(&skip);
786 PrepareForBailout(expr, TOS_REG); 750 PrepareForBailout(expr, TOS_REG);
787 if (should_normalize) { 751 if (should_normalize) {
788 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 752 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
789 __ cmp(r0, ip); 753 __ cmp(r3, ip);
790 Split(eq, if_true, if_false, NULL); 754 Split(eq, if_true, if_false, NULL);
791 __ bind(&skip); 755 __ bind(&skip);
792 } 756 }
793 } 757 }
794 758
795 759
796 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 760 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
797 // The variable in the declaration always resides in the current function 761 // The variable in the declaration always resides in the current function
798 // context. 762 // context.
799 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope())); 763 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
800 if (generate_debug_code_) { 764 if (generate_debug_code_) {
801 // Check that we're not inside a with or catch context. 765 // Check that we're not inside a with or catch context.
802 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset)); 766 __ LoadP(r4, FieldMemOperand(cp, HeapObject::kMapOffset));
803 __ CompareRoot(r1, Heap::kWithContextMapRootIndex); 767 __ CompareRoot(r4, Heap::kWithContextMapRootIndex);
804 __ Check(ne, kDeclarationInWithContext); 768 __ Check(ne, kDeclarationInWithContext);
805 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); 769 __ CompareRoot(r4, Heap::kCatchContextMapRootIndex);
806 __ Check(ne, kDeclarationInCatchContext); 770 __ Check(ne, kDeclarationInCatchContext);
807 } 771 }
808 } 772 }
809 773
810 774
811 void FullCodeGenerator::VisitVariableDeclaration( 775 void FullCodeGenerator::VisitVariableDeclaration(
812 VariableDeclaration* declaration) { 776 VariableDeclaration* declaration) {
813 // If it was not possible to allocate the variable at compile time, we 777 // If it was not possible to allocate the variable at compile time, we
814 // need to "declare" it at runtime to make sure it actually exists in the 778 // need to "declare" it at runtime to make sure it actually exists in the
815 // local context. 779 // local context.
816 VariableProxy* proxy = declaration->proxy(); 780 VariableProxy* proxy = declaration->proxy();
817 VariableMode mode = declaration->mode(); 781 VariableMode mode = declaration->mode();
818 Variable* variable = proxy->var(); 782 Variable* variable = proxy->var();
819 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY; 783 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
820 switch (variable->location()) { 784 switch (variable->location()) {
821 case Variable::UNALLOCATED: 785 case Variable::UNALLOCATED:
822 globals_->Add(variable->name(), zone()); 786 globals_->Add(variable->name(), zone());
823 globals_->Add(variable->binding_needs_init() 787 globals_->Add(variable->binding_needs_init()
824 ? isolate()->factory()->the_hole_value() 788 ? isolate()->factory()->the_hole_value()
825 : isolate()->factory()->undefined_value(), 789 : isolate()->factory()->undefined_value(),
826 zone()); 790 zone());
827 break; 791 break;
828 792
829 case Variable::PARAMETER: 793 case Variable::PARAMETER:
830 case Variable::LOCAL: 794 case Variable::LOCAL:
831 if (hole_init) { 795 if (hole_init) {
832 Comment cmnt(masm_, "[ VariableDeclaration"); 796 Comment cmnt(masm_, "[ VariableDeclaration");
833 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 797 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
834 __ str(ip, StackOperand(variable)); 798 __ StoreP(ip, StackOperand(variable));
835 } 799 }
836 break; 800 break;
837 801
838 case Variable::CONTEXT: 802 case Variable::CONTEXT:
839 if (hole_init) { 803 if (hole_init) {
840 Comment cmnt(masm_, "[ VariableDeclaration"); 804 Comment cmnt(masm_, "[ VariableDeclaration");
841 EmitDebugCheckDeclarationContext(variable); 805 EmitDebugCheckDeclarationContext(variable);
842 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 806 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
843 __ str(ip, ContextOperand(cp, variable->index())); 807 __ StoreP(ip, ContextOperand(cp, variable->index()), r0);
844 // No write barrier since the_hole_value is in old space. 808 // No write barrier since the_hole_value is in old space.
845 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 809 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
846 } 810 }
847 break; 811 break;
848 812
849 case Variable::LOOKUP: { 813 case Variable::LOOKUP: {
850 Comment cmnt(masm_, "[ VariableDeclaration"); 814 Comment cmnt(masm_, "[ VariableDeclaration");
851 __ mov(r2, Operand(variable->name())); 815 __ mov(r5, Operand(variable->name()));
852 // Declaration nodes are always introduced in one of four modes. 816 // Declaration nodes are always introduced in one of four modes.
853 DCHECK(IsDeclaredVariableMode(mode)); 817 DCHECK(IsDeclaredVariableMode(mode));
854 PropertyAttributes attr = 818 PropertyAttributes attr =
855 IsImmutableVariableMode(mode) ? READ_ONLY : NONE; 819 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
856 __ mov(r1, Operand(Smi::FromInt(attr))); 820 __ LoadSmiLiteral(r4, Smi::FromInt(attr));
857 // Push initial value, if any. 821 // Push initial value, if any.
858 // Note: For variables we must not push an initial value (such as 822 // Note: For variables we must not push an initial value (such as
859 // 'undefined') because we may have a (legal) redeclaration and we 823 // 'undefined') because we may have a (legal) redeclaration and we
860 // must not destroy the current value. 824 // must not destroy the current value.
861 if (hole_init) { 825 if (hole_init) {
862 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); 826 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
863 __ Push(cp, r2, r1, r0); 827 __ Push(cp, r5, r4, r3);
864 } else { 828 } else {
865 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value. 829 __ LoadSmiLiteral(r3, Smi::FromInt(0)); // Indicates no initial value.
866 __ Push(cp, r2, r1, r0); 830 __ Push(cp, r5, r4, r3);
867 } 831 }
868 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); 832 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
869 break; 833 break;
870 } 834 }
871 } 835 }
872 } 836 }
873 837
874 838
875 void FullCodeGenerator::VisitFunctionDeclaration( 839 void FullCodeGenerator::VisitFunctionDeclaration(
876 FunctionDeclaration* declaration) { 840 FunctionDeclaration* declaration) {
877 VariableProxy* proxy = declaration->proxy(); 841 VariableProxy* proxy = declaration->proxy();
878 Variable* variable = proxy->var(); 842 Variable* variable = proxy->var();
879 switch (variable->location()) { 843 switch (variable->location()) {
880 case Variable::UNALLOCATED: { 844 case Variable::UNALLOCATED: {
881 globals_->Add(variable->name(), zone()); 845 globals_->Add(variable->name(), zone());
882 Handle<SharedFunctionInfo> function = 846 Handle<SharedFunctionInfo> function =
883 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_); 847 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
884 // Check for stack-overflow exception. 848 // Check for stack-overflow exception.
885 if (function.is_null()) return SetStackOverflow(); 849 if (function.is_null()) return SetStackOverflow();
886 globals_->Add(function, zone()); 850 globals_->Add(function, zone());
887 break; 851 break;
888 } 852 }
889 853
890 case Variable::PARAMETER: 854 case Variable::PARAMETER:
891 case Variable::LOCAL: { 855 case Variable::LOCAL: {
892 Comment cmnt(masm_, "[ FunctionDeclaration"); 856 Comment cmnt(masm_, "[ FunctionDeclaration");
893 VisitForAccumulatorValue(declaration->fun()); 857 VisitForAccumulatorValue(declaration->fun());
894 __ str(result_register(), StackOperand(variable)); 858 __ StoreP(result_register(), StackOperand(variable));
895 break; 859 break;
896 } 860 }
897 861
898 case Variable::CONTEXT: { 862 case Variable::CONTEXT: {
899 Comment cmnt(masm_, "[ FunctionDeclaration"); 863 Comment cmnt(masm_, "[ FunctionDeclaration");
900 EmitDebugCheckDeclarationContext(variable); 864 EmitDebugCheckDeclarationContext(variable);
901 VisitForAccumulatorValue(declaration->fun()); 865 VisitForAccumulatorValue(declaration->fun());
902 __ str(result_register(), ContextOperand(cp, variable->index())); 866 __ StoreP(result_register(), ContextOperand(cp, variable->index()), r0);
903 int offset = Context::SlotOffset(variable->index()); 867 int offset = Context::SlotOffset(variable->index());
904 // We know that we have written a function, which is not a smi. 868 // We know that we have written a function, which is not a smi.
905 __ RecordWriteContextSlot(cp, 869 __ RecordWriteContextSlot(cp, offset, result_register(), r5,
906 offset, 870 kLRHasBeenSaved, kDontSaveFPRegs,
907 result_register(), 871 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
908 r2,
909 kLRHasBeenSaved,
910 kDontSaveFPRegs,
911 EMIT_REMEMBERED_SET,
912 OMIT_SMI_CHECK);
913 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 872 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
914 break; 873 break;
915 } 874 }
916 875
917 case Variable::LOOKUP: { 876 case Variable::LOOKUP: {
918 Comment cmnt(masm_, "[ FunctionDeclaration"); 877 Comment cmnt(masm_, "[ FunctionDeclaration");
919 __ mov(r2, Operand(variable->name())); 878 __ mov(r5, Operand(variable->name()));
920 __ mov(r1, Operand(Smi::FromInt(NONE))); 879 __ LoadSmiLiteral(r4, Smi::FromInt(NONE));
921 __ Push(cp, r2, r1); 880 __ Push(cp, r5, r4);
922 // Push initial value for function declaration. 881 // Push initial value for function declaration.
923 VisitForStackValue(declaration->fun()); 882 VisitForStackValue(declaration->fun());
924 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); 883 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
925 break; 884 break;
926 } 885 }
927 } 886 }
928 } 887 }
929 888
930 889
931 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { 890 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
932 Variable* variable = declaration->proxy()->var(); 891 Variable* variable = declaration->proxy()->var();
933 DCHECK(variable->location() == Variable::CONTEXT); 892 DCHECK(variable->location() == Variable::CONTEXT);
934 DCHECK(variable->interface()->IsFrozen()); 893 DCHECK(variable->interface()->IsFrozen());
935 894
936 Comment cmnt(masm_, "[ ModuleDeclaration"); 895 Comment cmnt(masm_, "[ ModuleDeclaration");
937 EmitDebugCheckDeclarationContext(variable); 896 EmitDebugCheckDeclarationContext(variable);
938 897
939 // Load instance object. 898 // Load instance object.
940 __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope())); 899 __ LoadContext(r4, scope_->ContextChainLength(scope_->GlobalScope()));
941 __ ldr(r1, ContextOperand(r1, variable->interface()->Index())); 900 __ LoadP(r4, ContextOperand(r4, variable->interface()->Index()));
942 __ ldr(r1, ContextOperand(r1, Context::EXTENSION_INDEX)); 901 __ LoadP(r4, ContextOperand(r4, Context::EXTENSION_INDEX));
943 902
944 // Assign it. 903 // Assign it.
945 __ str(r1, ContextOperand(cp, variable->index())); 904 __ StoreP(r4, ContextOperand(cp, variable->index()), r0);
946 // We know that we have written a module, which is not a smi. 905 // We know that we have written a module, which is not a smi.
947 __ RecordWriteContextSlot(cp, 906 __ RecordWriteContextSlot(cp, Context::SlotOffset(variable->index()), r4, r6,
948 Context::SlotOffset(variable->index()), 907 kLRHasBeenSaved, kDontSaveFPRegs,
949 r1, 908 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
950 r3,
951 kLRHasBeenSaved,
952 kDontSaveFPRegs,
953 EMIT_REMEMBERED_SET,
954 OMIT_SMI_CHECK);
955 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS); 909 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
956 910
957 // Traverse into body. 911 // Traverse into body.
958 Visit(declaration->module()); 912 Visit(declaration->module());
959 } 913 }
960 914
961 915
962 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) { 916 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
963 VariableProxy* proxy = declaration->proxy(); 917 VariableProxy* proxy = declaration->proxy();
964 Variable* variable = proxy->var(); 918 Variable* variable = proxy->var();
(...skipping 18 matching lines...) Expand all
983 937
984 938
985 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) { 939 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
986 // TODO(rossberg) 940 // TODO(rossberg)
987 } 941 }
988 942
989 943
990 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 944 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
991 // Call the runtime to declare the globals. 945 // Call the runtime to declare the globals.
992 // The context is the first argument. 946 // The context is the first argument.
993 __ mov(r1, Operand(pairs)); 947 __ mov(r4, Operand(pairs));
994 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags()))); 948 __ LoadSmiLiteral(r3, Smi::FromInt(DeclareGlobalsFlags()));
995 __ Push(cp, r1, r0); 949 __ Push(cp, r4, r3);
996 __ CallRuntime(Runtime::kDeclareGlobals, 3); 950 __ CallRuntime(Runtime::kDeclareGlobals, 3);
997 // Return value is ignored. 951 // Return value is ignored.
998 } 952 }
999 953
1000 954
1001 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { 955 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1002 // Call the runtime to declare the modules. 956 // Call the runtime to declare the modules.
1003 __ Push(descriptions); 957 __ Push(descriptions);
1004 __ CallRuntime(Runtime::kDeclareModules, 1); 958 __ CallRuntime(Runtime::kDeclareModules, 1);
1005 // Return value is ignored. 959 // Return value is ignored.
(...skipping 25 matching lines...) Expand all
1031 } 985 }
1032 986
1033 Comment cmnt(masm_, "[ Case comparison"); 987 Comment cmnt(masm_, "[ Case comparison");
1034 __ bind(&next_test); 988 __ bind(&next_test);
1035 next_test.Unuse(); 989 next_test.Unuse();
1036 990
1037 // Compile the label expression. 991 // Compile the label expression.
1038 VisitForAccumulatorValue(clause->label()); 992 VisitForAccumulatorValue(clause->label());
1039 993
1040 // Perform the comparison as if via '==='. 994 // Perform the comparison as if via '==='.
1041 __ ldr(r1, MemOperand(sp, 0)); // Switch value. 995 __ LoadP(r4, MemOperand(sp, 0)); // Switch value.
1042 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); 996 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1043 JumpPatchSite patch_site(masm_); 997 JumpPatchSite patch_site(masm_);
1044 if (inline_smi_code) { 998 if (inline_smi_code) {
1045 Label slow_case; 999 Label slow_case;
1046 __ orr(r2, r1, r0); 1000 __ orx(r5, r4, r3);
1047 patch_site.EmitJumpIfNotSmi(r2, &slow_case); 1001 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
1048 1002
1049 __ cmp(r1, r0); 1003 __ cmp(r4, r3);
1050 __ b(ne, &next_test); 1004 __ bne(&next_test);
1051 __ Drop(1); // Switch value is no longer needed. 1005 __ Drop(1); // Switch value is no longer needed.
1052 __ b(clause->body_target()); 1006 __ b(clause->body_target());
1053 __ bind(&slow_case); 1007 __ bind(&slow_case);
1054 } 1008 }
1055 1009
1056 // Record position before stub call for type feedback. 1010 // Record position before stub call for type feedback.
1057 SetSourcePosition(clause->position()); 1011 SetSourcePosition(clause->position());
1058 Handle<Code> ic = 1012 Handle<Code> ic =
1059 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code(); 1013 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1060 CallIC(ic, clause->CompareId()); 1014 CallIC(ic, clause->CompareId());
1061 patch_site.EmitPatchInfo(); 1015 patch_site.EmitPatchInfo();
1062 1016
1063 Label skip; 1017 Label skip;
1064 __ b(&skip); 1018 __ b(&skip);
1065 PrepareForBailout(clause, TOS_REG); 1019 PrepareForBailout(clause, TOS_REG);
1066 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 1020 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1067 __ cmp(r0, ip); 1021 __ cmp(r3, ip);
1068 __ b(ne, &next_test); 1022 __ bne(&next_test);
1069 __ Drop(1); 1023 __ Drop(1);
1070 __ jmp(clause->body_target()); 1024 __ b(clause->body_target());
1071 __ bind(&skip); 1025 __ bind(&skip);
1072 1026
1073 __ cmp(r0, Operand::Zero()); 1027 __ cmpi(r3, Operand::Zero());
1074 __ b(ne, &next_test); 1028 __ bne(&next_test);
1075 __ Drop(1); // Switch value is no longer needed. 1029 __ Drop(1); // Switch value is no longer needed.
1076 __ b(clause->body_target()); 1030 __ b(clause->body_target());
1077 } 1031 }
1078 1032
1079 // Discard the test value and jump to the default if present, otherwise to 1033 // Discard the test value and jump to the default if present, otherwise to
1080 // the end of the statement. 1034 // the end of the statement.
1081 __ bind(&next_test); 1035 __ bind(&next_test);
1082 __ Drop(1); // Switch value is no longer needed. 1036 __ Drop(1); // Switch value is no longer needed.
1083 if (default_clause == NULL) { 1037 if (default_clause == NULL) {
1084 __ b(nested_statement.break_label()); 1038 __ b(nested_statement.break_label());
(...skipping 21 matching lines...) Expand all
1106 SetStatementPosition(stmt); 1060 SetStatementPosition(stmt);
1107 1061
1108 Label loop, exit; 1062 Label loop, exit;
1109 ForIn loop_statement(this, stmt); 1063 ForIn loop_statement(this, stmt);
1110 increment_loop_depth(); 1064 increment_loop_depth();
1111 1065
1112 // Get the object to enumerate over. If the object is null or undefined, skip 1066 // Get the object to enumerate over. If the object is null or undefined, skip
1113 // over the loop. See ECMA-262 version 5, section 12.6.4. 1067 // over the loop. See ECMA-262 version 5, section 12.6.4.
1114 VisitForAccumulatorValue(stmt->enumerable()); 1068 VisitForAccumulatorValue(stmt->enumerable());
1115 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1069 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1116 __ cmp(r0, ip); 1070 __ cmp(r3, ip);
1117 __ b(eq, &exit); 1071 __ beq(&exit);
1118 Register null_value = r5; 1072 Register null_value = r7;
1119 __ LoadRoot(null_value, Heap::kNullValueRootIndex); 1073 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1120 __ cmp(r0, null_value); 1074 __ cmp(r3, null_value);
1121 __ b(eq, &exit); 1075 __ beq(&exit);
1122 1076
1123 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); 1077 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1124 1078
1125 // Convert the object to a JS object. 1079 // Convert the object to a JS object.
1126 Label convert, done_convert; 1080 Label convert, done_convert;
1127 __ JumpIfSmi(r0, &convert); 1081 __ JumpIfSmi(r3, &convert);
1128 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 1082 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
1129 __ b(ge, &done_convert); 1083 __ bge(&done_convert);
1130 __ bind(&convert); 1084 __ bind(&convert);
1131 __ push(r0); 1085 __ push(r3);
1132 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1086 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1133 __ bind(&done_convert); 1087 __ bind(&done_convert);
1134 __ push(r0); 1088 __ push(r3);
1135 1089
1136 // Check for proxies. 1090 // Check for proxies.
1137 Label call_runtime; 1091 Label call_runtime;
1138 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1092 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1139 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE); 1093 __ CompareObjectType(r3, r4, r4, LAST_JS_PROXY_TYPE);
1140 __ b(le, &call_runtime); 1094 __ ble(&call_runtime);
1141 1095
1142 // Check cache validity in generated code. This is a fast case for 1096 // Check cache validity in generated code. This is a fast case for
1143 // the JSObject::IsSimpleEnum cache validity checks. If we cannot 1097 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1144 // guarantee cache validity, call the runtime system to check cache 1098 // guarantee cache validity, call the runtime system to check cache
1145 // validity or get the property names in a fixed array. 1099 // validity or get the property names in a fixed array.
1146 __ CheckEnumCache(null_value, &call_runtime); 1100 __ CheckEnumCache(null_value, &call_runtime);
1147 1101
1148 // The enum cache is valid. Load the map of the object being 1102 // The enum cache is valid. Load the map of the object being
1149 // iterated over and use the cache for the iteration. 1103 // iterated over and use the cache for the iteration.
1150 Label use_cache; 1104 Label use_cache;
1151 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 1105 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
1152 __ b(&use_cache); 1106 __ b(&use_cache);
1153 1107
1154 // Get the set of properties to enumerate. 1108 // Get the set of properties to enumerate.
1155 __ bind(&call_runtime); 1109 __ bind(&call_runtime);
1156 __ push(r0); // Duplicate the enumerable object on the stack. 1110 __ push(r3); // Duplicate the enumerable object on the stack.
1157 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); 1111 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1158 1112
1159 // If we got a map from the runtime call, we can do a fast 1113 // If we got a map from the runtime call, we can do a fast
1160 // modification check. Otherwise, we got a fixed array, and we have 1114 // modification check. Otherwise, we got a fixed array, and we have
1161 // to do a slow check. 1115 // to do a slow check.
1162 Label fixed_array; 1116 Label fixed_array;
1163 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); 1117 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
1164 __ LoadRoot(ip, Heap::kMetaMapRootIndex); 1118 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1165 __ cmp(r2, ip); 1119 __ cmp(r5, ip);
1166 __ b(ne, &fixed_array); 1120 __ bne(&fixed_array);
1167 1121
1168 // We got a map in register r0. Get the enumeration cache from it. 1122 // We got a map in register r3. Get the enumeration cache from it.
1169 Label no_descriptors; 1123 Label no_descriptors;
1170 __ bind(&use_cache); 1124 __ bind(&use_cache);
1171 1125
1172 __ EnumLength(r1, r0); 1126 __ EnumLength(r4, r3);
1173 __ cmp(r1, Operand(Smi::FromInt(0))); 1127 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0);
1174 __ b(eq, &no_descriptors); 1128 __ beq(&no_descriptors);
1175 1129
1176 __ LoadInstanceDescriptors(r0, r2); 1130 __ LoadInstanceDescriptors(r3, r5);
1177 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset)); 1131 __ LoadP(r5, FieldMemOperand(r5, DescriptorArray::kEnumCacheOffset));
1178 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset)); 1132 __ LoadP(r5,
1133 FieldMemOperand(r5, DescriptorArray::kEnumCacheBridgeCacheOffset));
1179 1134
1180 // Set up the four remaining stack slots. 1135 // Set up the four remaining stack slots.
1181 __ push(r0); // Map. 1136 __ push(r3); // Map.
1182 __ mov(r0, Operand(Smi::FromInt(0))); 1137 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1183 // Push enumeration cache, enumeration cache length (as smi) and zero. 1138 // Push enumeration cache, enumeration cache length (as smi) and zero.
1184 __ Push(r2, r1, r0); 1139 __ Push(r5, r4, r3);
1185 __ jmp(&loop); 1140 __ b(&loop);
1186 1141
1187 __ bind(&no_descriptors); 1142 __ bind(&no_descriptors);
1188 __ Drop(1); 1143 __ Drop(1);
1189 __ jmp(&exit); 1144 __ b(&exit);
1190 1145
1191 // We got a fixed array in register r0. Iterate through that. 1146 // We got a fixed array in register r3. Iterate through that.
1192 Label non_proxy; 1147 Label non_proxy;
1193 __ bind(&fixed_array); 1148 __ bind(&fixed_array);
1194 1149
1195 __ Move(r1, FeedbackVector()); 1150 __ Move(r4, FeedbackVector());
1196 __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); 1151 __ mov(r5, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1197 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(slot))); 1152 __ StoreP(r5, FieldMemOperand(r4, FixedArray::OffsetOfElementAt(slot)), r0);
1198 1153
1199 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check 1154 __ LoadSmiLiteral(r4, Smi::FromInt(1)); // Smi indicates slow check
1200 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object 1155 __ LoadP(r5, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1201 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1156 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1202 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE); 1157 __ CompareObjectType(r5, r6, r6, LAST_JS_PROXY_TYPE);
1203 __ b(gt, &non_proxy); 1158 __ bgt(&non_proxy);
1204 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy 1159 __ LoadSmiLiteral(r4, Smi::FromInt(0)); // Zero indicates proxy
1205 __ bind(&non_proxy); 1160 __ bind(&non_proxy);
1206 __ Push(r1, r0); // Smi and array 1161 __ Push(r4, r3); // Smi and array
1207 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset)); 1162 __ LoadP(r4, FieldMemOperand(r3, FixedArray::kLengthOffset));
1208 __ mov(r0, Operand(Smi::FromInt(0))); 1163 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1209 __ Push(r1, r0); // Fixed array length (as smi) and initial index. 1164 __ Push(r4, r3); // Fixed array length (as smi) and initial index.
1210 1165
1211 // Generate code for doing the condition check. 1166 // Generate code for doing the condition check.
1212 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); 1167 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1213 __ bind(&loop); 1168 __ bind(&loop);
1214 // Load the current count to r0, load the length to r1. 1169 // Load the current count to r3, load the length to r4.
1215 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize)); 1170 __ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
1216 __ cmp(r0, r1); // Compare to the array length. 1171 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
1217 __ b(hs, loop_statement.break_label()); 1172 __ cmpl(r3, r4); // Compare to the array length.
1173 __ bge(loop_statement.break_label());
1218 1174
1219 // Get the current entry of the array into register r3. 1175 // Get the current entry of the array into register r6.
1220 __ ldr(r2, MemOperand(sp, 2 * kPointerSize)); 1176 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
1221 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 1177 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1222 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0)); 1178 __ SmiToPtrArrayOffset(r6, r3);
1179 __ LoadPX(r6, MemOperand(r6, r5));
1223 1180
1224 // Get the expected map from the stack or a smi in the 1181 // Get the expected map from the stack or a smi in the
1225 // permanent slow case into register r2. 1182 // permanent slow case into register r5.
1226 __ ldr(r2, MemOperand(sp, 3 * kPointerSize)); 1183 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
1227 1184
1228 // Check if the expected map still matches that of the enumerable. 1185 // Check if the expected map still matches that of the enumerable.
1229 // If not, we may have to filter the key. 1186 // If not, we may have to filter the key.
1230 Label update_each; 1187 Label update_each;
1231 __ ldr(r1, MemOperand(sp, 4 * kPointerSize)); 1188 __ LoadP(r4, MemOperand(sp, 4 * kPointerSize));
1232 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset)); 1189 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1233 __ cmp(r4, Operand(r2)); 1190 __ cmp(r7, r5);
1234 __ b(eq, &update_each); 1191 __ beq(&update_each);
1235 1192
1236 // For proxies, no filtering is done. 1193 // For proxies, no filtering is done.
1237 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. 1194 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1238 __ cmp(r2, Operand(Smi::FromInt(0))); 1195 __ CmpSmiLiteral(r5, Smi::FromInt(0), r0);
1239 __ b(eq, &update_each); 1196 __ beq(&update_each);
1240 1197
1241 // Convert the entry to a string or (smi) 0 if it isn't a property 1198 // Convert the entry to a string or (smi) 0 if it isn't a property
1242 // any more. If the property has been removed while iterating, we 1199 // any more. If the property has been removed while iterating, we
1243 // just skip it. 1200 // just skip it.
1244 __ push(r1); // Enumerable. 1201 __ Push(r4, r6); // Enumerable and current entry.
1245 __ push(r3); // Current entry.
1246 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); 1202 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1247 __ mov(r3, Operand(r0), SetCC); 1203 __ mr(r6, r3);
1248 __ b(eq, loop_statement.continue_label()); 1204 __ cmpi(r6, Operand::Zero());
1205 __ beq(loop_statement.continue_label());
1249 1206
1250 // Update the 'each' property or variable from the possibly filtered 1207 // Update the 'each' property or variable from the possibly filtered
1251 // entry in register r3. 1208 // entry in register r6.
1252 __ bind(&update_each); 1209 __ bind(&update_each);
1253 __ mov(result_register(), r3); 1210 __ mr(result_register(), r6);
1254 // Perform the assignment as if via '='. 1211 // Perform the assignment as if via '='.
1255 { EffectContext context(this); 1212 {
1213 EffectContext context(this);
1256 EmitAssignment(stmt->each()); 1214 EmitAssignment(stmt->each());
1257 } 1215 }
1258 1216
1259 // Generate code for the body of the loop. 1217 // Generate code for the body of the loop.
1260 Visit(stmt->body()); 1218 Visit(stmt->body());
1261 1219
1262 // Generate code for the going to the next element by incrementing 1220 // Generate code for the going to the next element by incrementing
1263 // the index (smi) stored on top of the stack. 1221 // the index (smi) stored on top of the stack.
1264 __ bind(loop_statement.continue_label()); 1222 __ bind(loop_statement.continue_label());
1265 __ pop(r0); 1223 __ pop(r3);
1266 __ add(r0, r0, Operand(Smi::FromInt(1))); 1224 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
1267 __ push(r0); 1225 __ push(r3);
1268 1226
1269 EmitBackEdgeBookkeeping(stmt, &loop); 1227 EmitBackEdgeBookkeeping(stmt, &loop);
1270 __ b(&loop); 1228 __ b(&loop);
1271 1229
1272 // Remove the pointers stored on the stack. 1230 // Remove the pointers stored on the stack.
1273 __ bind(loop_statement.break_label()); 1231 __ bind(loop_statement.break_label());
1274 __ Drop(5); 1232 __ Drop(5);
1275 1233
1276 // Exit and decrement the loop depth. 1234 // Exit and decrement the loop depth.
1277 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1235 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
(...skipping 13 matching lines...) Expand all
1291 VisitForEffect(stmt->assign_iterator()); 1249 VisitForEffect(stmt->assign_iterator());
1292 1250
1293 // Loop entry. 1251 // Loop entry.
1294 __ bind(loop_statement.continue_label()); 1252 __ bind(loop_statement.continue_label());
1295 1253
1296 // result = iterator.next() 1254 // result = iterator.next()
1297 VisitForEffect(stmt->next_result()); 1255 VisitForEffect(stmt->next_result());
1298 1256
1299 // if (result.done) break; 1257 // if (result.done) break;
1300 Label result_not_done; 1258 Label result_not_done;
1301 VisitForControl(stmt->result_done(), 1259 VisitForControl(stmt->result_done(), loop_statement.break_label(),
1302 loop_statement.break_label(), 1260 &result_not_done, &result_not_done);
1303 &result_not_done,
1304 &result_not_done);
1305 __ bind(&result_not_done); 1261 __ bind(&result_not_done);
1306 1262
1307 // each = result.value 1263 // each = result.value
1308 VisitForEffect(stmt->assign_each()); 1264 VisitForEffect(stmt->assign_each());
1309 1265
1310 // Generate code for the body of the loop. 1266 // Generate code for the body of the loop.
1311 Visit(stmt->body()); 1267 Visit(stmt->body());
1312 1268
1313 // Check stack before looping. 1269 // Check stack before looping.
1314 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS); 1270 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1315 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label()); 1271 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1316 __ jmp(loop_statement.continue_label()); 1272 __ b(loop_statement.continue_label());
1317 1273
1318 // Exit and decrement the loop depth. 1274 // Exit and decrement the loop depth.
1319 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1275 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1320 __ bind(loop_statement.break_label()); 1276 __ bind(loop_statement.break_label());
1321 decrement_loop_depth(); 1277 decrement_loop_depth();
1322 } 1278 }
1323 1279
1324 1280
1325 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, 1281 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1326 bool pretenure) { 1282 bool pretenure) {
1327 // Use the fast case closure allocation code that allocates in new 1283 // Use the fast case closure allocation code that allocates in new
1328 // space for nested functions that don't need literals cloning. If 1284 // space for nested functions that don't need literals cloning. If
1329 // we're running with the --always-opt or the --prepare-always-opt 1285 // we're running with the --always-opt or the --prepare-always-opt
1330 // flag, we need to use the runtime function so that the new function 1286 // flag, we need to use the runtime function so that the new function
1331 // we are creating here gets a chance to have its code optimized and 1287 // we are creating here gets a chance to have its code optimized and
1332 // doesn't just get a copy of the existing unoptimized code. 1288 // doesn't just get a copy of the existing unoptimized code.
1333 if (!FLAG_always_opt && 1289 if (!FLAG_always_opt && !FLAG_prepare_always_opt && !pretenure &&
1334 !FLAG_prepare_always_opt && 1290 scope()->is_function_scope() && info->num_literals() == 0) {
1335 !pretenure &&
1336 scope()->is_function_scope() &&
1337 info->num_literals() == 0) {
1338 FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind()); 1291 FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1339 __ mov(r2, Operand(info)); 1292 __ mov(r5, Operand(info));
1340 __ CallStub(&stub); 1293 __ CallStub(&stub);
1341 } else { 1294 } else {
1342 __ mov(r0, Operand(info)); 1295 __ mov(r3, Operand(info));
1343 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex 1296 __ LoadRoot(
1344 : Heap::kFalseValueRootIndex); 1297 r4, pretenure ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1345 __ Push(cp, r0, r1); 1298 __ Push(cp, r3, r4);
1346 __ CallRuntime(Runtime::kNewClosure, 3); 1299 __ CallRuntime(Runtime::kNewClosure, 3);
1347 } 1300 }
1348 context()->Plug(r0); 1301 context()->Plug(r3);
1349 } 1302 }
1350 1303
1351 1304
1352 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 1305 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1353 Comment cmnt(masm_, "[ VariableProxy"); 1306 Comment cmnt(masm_, "[ VariableProxy");
1354 EmitVariableLoad(expr); 1307 EmitVariableLoad(expr);
1355 } 1308 }
1356 1309
1357 1310
1358 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) { 1311 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1359 Comment cnmt(masm_, "[ SuperReference "); 1312 Comment cnmt(masm_, "[ SuperReference ");
1360 1313
1361 __ ldr(LoadDescriptor::ReceiverRegister(), 1314 __ LoadP(LoadDescriptor::ReceiverRegister(),
1362 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1315 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1363 1316
1364 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol()); 1317 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1365 __ Move(LoadDescriptor::NameRegister(), home_object_symbol); 1318 __ Move(LoadDescriptor::NameRegister(), home_object_symbol);
1366 1319
1367 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId()); 1320 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1368 1321
1369 __ cmp(r0, Operand(isolate()->factory()->undefined_value())); 1322 __ Cmpi(r3, Operand(isolate()->factory()->undefined_value()), r0);
1370 Label done; 1323 Label done;
1371 __ b(ne, &done); 1324 __ bne(&done);
1372 __ CallRuntime(Runtime::kThrowNonMethodError, 0); 1325 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1373 __ bind(&done); 1326 __ bind(&done);
1374 } 1327 }
1375 1328
1376 1329
1377 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy, 1330 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1378 TypeofState typeof_state, 1331 TypeofState typeof_state,
1379 Label* slow) { 1332 Label* slow) {
1380 Register current = cp; 1333 Register current = cp;
1381 Register next = r1; 1334 Register next = r4;
1382 Register temp = r2; 1335 Register temp = r5;
1383 1336
1384 Scope* s = scope(); 1337 Scope* s = scope();
1385 while (s != NULL) { 1338 while (s != NULL) {
1386 if (s->num_heap_slots() > 0) { 1339 if (s->num_heap_slots() > 0) {
1387 if (s->calls_sloppy_eval()) { 1340 if (s->calls_sloppy_eval()) {
1388 // Check that extension is NULL. 1341 // Check that extension is NULL.
1389 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); 1342 __ LoadP(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1390 __ tst(temp, temp); 1343 __ cmpi(temp, Operand::Zero());
1391 __ b(ne, slow); 1344 __ bne(slow);
1392 } 1345 }
1393 // Load next context in chain. 1346 // Load next context in chain.
1394 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX)); 1347 __ LoadP(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1395 // Walk the rest of the chain without clobbering cp. 1348 // Walk the rest of the chain without clobbering cp.
1396 current = next; 1349 current = next;
1397 } 1350 }
1398 // If no outer scope calls eval, we do not need to check more 1351 // If no outer scope calls eval, we do not need to check more
1399 // context extensions. 1352 // context extensions.
1400 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; 1353 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1401 s = s->outer_scope(); 1354 s = s->outer_scope();
1402 } 1355 }
1403 1356
1404 if (s->is_eval_scope()) { 1357 if (s->is_eval_scope()) {
1405 Label loop, fast; 1358 Label loop, fast;
1406 if (!current.is(next)) { 1359 if (!current.is(next)) {
1407 __ Move(next, current); 1360 __ Move(next, current);
1408 } 1361 }
1409 __ bind(&loop); 1362 __ bind(&loop);
1410 // Terminate at native context. 1363 // Terminate at native context.
1411 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset)); 1364 __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1412 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex); 1365 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1413 __ cmp(temp, ip); 1366 __ cmp(temp, ip);
1414 __ b(eq, &fast); 1367 __ beq(&fast);
1415 // Check that extension is NULL. 1368 // Check that extension is NULL.
1416 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX)); 1369 __ LoadP(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1417 __ tst(temp, temp); 1370 __ cmpi(temp, Operand::Zero());
1418 __ b(ne, slow); 1371 __ bne(slow);
1419 // Load next context in chain. 1372 // Load next context in chain.
1420 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX)); 1373 __ LoadP(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1421 __ b(&loop); 1374 __ b(&loop);
1422 __ bind(&fast); 1375 __ bind(&fast);
1423 } 1376 }
1424 1377
1425 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); 1378 __ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1426 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->var()->name())); 1379 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1427 if (FLAG_vector_ics) { 1380 if (FLAG_vector_ics) {
1428 __ mov(VectorLoadICDescriptor::SlotRegister(), 1381 __ mov(VectorLoadICDescriptor::SlotRegister(),
1429 Operand(Smi::FromInt(proxy->VariableFeedbackSlot()))); 1382 Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
1430 } 1383 }
1431 1384
1432 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) 1385 ContextualMode mode =
1433 ? NOT_CONTEXTUAL 1386 (typeof_state == INSIDE_TYPEOF) ? NOT_CONTEXTUAL : CONTEXTUAL;
1434 : CONTEXTUAL;
1435 CallLoadIC(mode); 1387 CallLoadIC(mode);
1436 } 1388 }
1437 1389
1438 1390
1439 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1391 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1440 Label* slow) { 1392 Label* slow) {
1441 DCHECK(var->IsContextSlot()); 1393 DCHECK(var->IsContextSlot());
1442 Register context = cp; 1394 Register context = cp;
1443 Register next = r3; 1395 Register next = r6;
1444 Register temp = r4; 1396 Register temp = r7;
1445 1397
1446 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1398 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1447 if (s->num_heap_slots() > 0) { 1399 if (s->num_heap_slots() > 0) {
1448 if (s->calls_sloppy_eval()) { 1400 if (s->calls_sloppy_eval()) {
1449 // Check that extension is NULL. 1401 // Check that extension is NULL.
1450 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1402 __ LoadP(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1451 __ tst(temp, temp); 1403 __ cmpi(temp, Operand::Zero());
1452 __ b(ne, slow); 1404 __ bne(slow);
1453 } 1405 }
1454 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX)); 1406 __ LoadP(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1455 // Walk the rest of the chain without clobbering cp. 1407 // Walk the rest of the chain without clobbering cp.
1456 context = next; 1408 context = next;
1457 } 1409 }
1458 } 1410 }
1459 // Check that last extension is NULL. 1411 // Check that last extension is NULL.
1460 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1412 __ LoadP(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1461 __ tst(temp, temp); 1413 __ cmpi(temp, Operand::Zero());
1462 __ b(ne, slow); 1414 __ bne(slow);
1463 1415
1464 // This function is used only for loads, not stores, so it's safe to 1416 // This function is used only for loads, not stores, so it's safe to
1465 // return an cp-based operand (the write barrier cannot be allowed to 1417 // return an cp-based operand (the write barrier cannot be allowed to
1466 // destroy the cp register). 1418 // destroy the cp register).
1467 return ContextOperand(context, var->index()); 1419 return ContextOperand(context, var->index());
1468 } 1420 }
1469 1421
1470 1422
1471 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, 1423 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1472 TypeofState typeof_state, 1424 TypeofState typeof_state,
1473 Label* slow, 1425 Label* slow, Label* done) {
1474 Label* done) {
1475 // Generate fast-case code for variables that might be shadowed by 1426 // Generate fast-case code for variables that might be shadowed by
1476 // eval-introduced variables. Eval is used a lot without 1427 // eval-introduced variables. Eval is used a lot without
1477 // introducing variables. In those cases, we do not want to 1428 // introducing variables. In those cases, we do not want to
1478 // perform a runtime call for all variables in the scope 1429 // perform a runtime call for all variables in the scope
1479 // containing the eval. 1430 // containing the eval.
1480 Variable* var = proxy->var(); 1431 Variable* var = proxy->var();
1481 if (var->mode() == DYNAMIC_GLOBAL) { 1432 if (var->mode() == DYNAMIC_GLOBAL) {
1482 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow); 1433 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1483 __ jmp(done); 1434 __ b(done);
1484 } else if (var->mode() == DYNAMIC_LOCAL) { 1435 } else if (var->mode() == DYNAMIC_LOCAL) {
1485 Variable* local = var->local_if_not_shadowed(); 1436 Variable* local = var->local_if_not_shadowed();
1486 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow)); 1437 __ LoadP(r3, ContextSlotOperandCheckExtensions(local, slow));
1487 if (local->mode() == LET || local->mode() == CONST || 1438 if (local->mode() == LET || local->mode() == CONST ||
1488 local->mode() == CONST_LEGACY) { 1439 local->mode() == CONST_LEGACY) {
1489 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); 1440 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1441 __ bne(done);
1490 if (local->mode() == CONST_LEGACY) { 1442 if (local->mode() == CONST_LEGACY) {
1491 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); 1443 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1492 } else { // LET || CONST 1444 } else { // LET || CONST
1493 __ b(ne, done); 1445 __ mov(r3, Operand(var->name()));
1494 __ mov(r0, Operand(var->name())); 1446 __ push(r3);
1495 __ push(r0);
1496 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1447 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1497 } 1448 }
1498 } 1449 }
1499 __ jmp(done); 1450 __ b(done);
1500 } 1451 }
1501 } 1452 }
1502 1453
1503 1454
1504 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { 1455 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1505 // Record position before possible IC call. 1456 // Record position before possible IC call.
1506 SetSourcePosition(proxy->position()); 1457 SetSourcePosition(proxy->position());
1507 Variable* var = proxy->var(); 1458 Variable* var = proxy->var();
1508 1459
1509 // Three cases: global variables, lookup variables, and all other types of 1460 // Three cases: global variables, lookup variables, and all other types of
1510 // variables. 1461 // variables.
1511 switch (var->location()) { 1462 switch (var->location()) {
1512 case Variable::UNALLOCATED: { 1463 case Variable::UNALLOCATED: {
1513 Comment cmnt(masm_, "[ Global variable"); 1464 Comment cmnt(masm_, "[ Global variable");
1514 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); 1465 __ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1515 __ mov(LoadDescriptor::NameRegister(), Operand(var->name())); 1466 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1516 if (FLAG_vector_ics) { 1467 if (FLAG_vector_ics) {
1517 __ mov(VectorLoadICDescriptor::SlotRegister(), 1468 __ mov(VectorLoadICDescriptor::SlotRegister(),
1518 Operand(Smi::FromInt(proxy->VariableFeedbackSlot()))); 1469 Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
1519 } 1470 }
1520 CallLoadIC(CONTEXTUAL); 1471 CallLoadIC(CONTEXTUAL);
1521 context()->Plug(r0); 1472 context()->Plug(r3);
1522 break; 1473 break;
1523 } 1474 }
1524 1475
1525 case Variable::PARAMETER: 1476 case Variable::PARAMETER:
1526 case Variable::LOCAL: 1477 case Variable::LOCAL:
1527 case Variable::CONTEXT: { 1478 case Variable::CONTEXT: {
1528 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" 1479 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1529 : "[ Stack variable"); 1480 : "[ Stack variable");
1530 if (var->binding_needs_init()) { 1481 if (var->binding_needs_init()) {
1531 // var->scope() may be NULL when the proxy is located in eval code and 1482 // var->scope() may be NULL when the proxy is located in eval code and
(...skipping 20 matching lines...) Expand all
1552 // function() { f(); let x = 1; function f() { x = 2; } } 1503 // function() { f(); let x = 1; function f() { x = 2; } }
1553 // 1504 //
1554 bool skip_init_check; 1505 bool skip_init_check;
1555 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) { 1506 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1556 skip_init_check = false; 1507 skip_init_check = false;
1557 } else { 1508 } else {
1558 // Check that we always have valid source position. 1509 // Check that we always have valid source position.
1559 DCHECK(var->initializer_position() != RelocInfo::kNoPosition); 1510 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1560 DCHECK(proxy->position() != RelocInfo::kNoPosition); 1511 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1561 skip_init_check = var->mode() != CONST_LEGACY && 1512 skip_init_check = var->mode() != CONST_LEGACY &&
1562 var->initializer_position() < proxy->position(); 1513 var->initializer_position() < proxy->position();
1563 } 1514 }
1564 1515
1565 if (!skip_init_check) { 1516 if (!skip_init_check) {
1517 Label done;
1566 // Let and const need a read barrier. 1518 // Let and const need a read barrier.
1567 GetVar(r0, var); 1519 GetVar(r3, var);
1568 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); 1520 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1521 __ bne(&done);
1569 if (var->mode() == LET || var->mode() == CONST) { 1522 if (var->mode() == LET || var->mode() == CONST) {
1570 // Throw a reference error when using an uninitialized let/const 1523 // Throw a reference error when using an uninitialized let/const
1571 // binding in harmony mode. 1524 // binding in harmony mode.
1572 Label done; 1525 __ mov(r3, Operand(var->name()));
1573 __ b(ne, &done); 1526 __ push(r3);
1574 __ mov(r0, Operand(var->name()));
1575 __ push(r0);
1576 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1527 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1577 __ bind(&done);
1578 } else { 1528 } else {
1579 // Uninitalized const bindings outside of harmony mode are unholed. 1529 // Uninitalized const bindings outside of harmony mode are unholed.
1580 DCHECK(var->mode() == CONST_LEGACY); 1530 DCHECK(var->mode() == CONST_LEGACY);
1581 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); 1531 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1582 } 1532 }
1583 context()->Plug(r0); 1533 __ bind(&done);
1534 context()->Plug(r3);
1584 break; 1535 break;
1585 } 1536 }
1586 } 1537 }
1587 context()->Plug(var); 1538 context()->Plug(var);
1588 break; 1539 break;
1589 } 1540 }
1590 1541
1591 case Variable::LOOKUP: { 1542 case Variable::LOOKUP: {
1592 Comment cmnt(masm_, "[ Lookup variable"); 1543 Comment cmnt(masm_, "[ Lookup variable");
1593 Label done, slow; 1544 Label done, slow;
1594 // Generate code for loading from variables potentially shadowed 1545 // Generate code for loading from variables potentially shadowed
1595 // by eval-introduced variables. 1546 // by eval-introduced variables.
1596 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done); 1547 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1597 __ bind(&slow); 1548 __ bind(&slow);
1598 __ mov(r1, Operand(var->name())); 1549 __ mov(r4, Operand(var->name()));
1599 __ Push(cp, r1); // Context and name. 1550 __ Push(cp, r4); // Context and name.
1600 __ CallRuntime(Runtime::kLoadLookupSlot, 2); 1551 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1601 __ bind(&done); 1552 __ bind(&done);
1602 context()->Plug(r0); 1553 context()->Plug(r3);
1603 } 1554 }
1604 } 1555 }
1605 } 1556 }
1606 1557
1607 1558
1608 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1559 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1609 Comment cmnt(masm_, "[ RegExpLiteral"); 1560 Comment cmnt(masm_, "[ RegExpLiteral");
1610 Label materialized; 1561 Label materialized;
1611 // Registers will be used as follows: 1562 // Registers will be used as follows:
1612 // r5 = materialized value (RegExp literal) 1563 // r8 = materialized value (RegExp literal)
1613 // r4 = JS function, literals array 1564 // r7 = JS function, literals array
1614 // r3 = literal index 1565 // r6 = literal index
1615 // r2 = RegExp pattern 1566 // r5 = RegExp pattern
1616 // r1 = RegExp flags 1567 // r4 = RegExp flags
1617 // r0 = RegExp literal clone 1568 // r3 = RegExp literal clone
1618 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1569 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1619 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset)); 1570 __ LoadP(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1620 int literal_offset = 1571 int literal_offset =
1621 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; 1572 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1622 __ ldr(r5, FieldMemOperand(r4, literal_offset)); 1573 __ LoadP(r8, FieldMemOperand(r7, literal_offset), r0);
1623 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1574 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1624 __ cmp(r5, ip); 1575 __ cmp(r8, ip);
1625 __ b(ne, &materialized); 1576 __ bne(&materialized);
1626 1577
1627 // Create regexp literal using runtime function. 1578 // Create regexp literal using runtime function.
1628 // Result will be in r0. 1579 // Result will be in r3.
1629 __ mov(r3, Operand(Smi::FromInt(expr->literal_index()))); 1580 __ LoadSmiLiteral(r6, Smi::FromInt(expr->literal_index()));
1630 __ mov(r2, Operand(expr->pattern())); 1581 __ mov(r5, Operand(expr->pattern()));
1631 __ mov(r1, Operand(expr->flags())); 1582 __ mov(r4, Operand(expr->flags()));
1632 __ Push(r4, r3, r2, r1); 1583 __ Push(r7, r6, r5, r4);
1633 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); 1584 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1634 __ mov(r5, r0); 1585 __ mr(r8, r3);
1635 1586
1636 __ bind(&materialized); 1587 __ bind(&materialized);
1637 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 1588 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1638 Label allocated, runtime_allocate; 1589 Label allocated, runtime_allocate;
1639 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT); 1590 __ Allocate(size, r3, r5, r6, &runtime_allocate, TAG_OBJECT);
1640 __ jmp(&allocated); 1591 __ b(&allocated);
1641 1592
1642 __ bind(&runtime_allocate); 1593 __ bind(&runtime_allocate);
1643 __ mov(r0, Operand(Smi::FromInt(size))); 1594 __ LoadSmiLiteral(r3, Smi::FromInt(size));
1644 __ Push(r5, r0); 1595 __ Push(r8, r3);
1645 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 1596 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1646 __ pop(r5); 1597 __ pop(r8);
1647 1598
1648 __ bind(&allocated); 1599 __ bind(&allocated);
1649 // After this, registers are used as follows: 1600 // After this, registers are used as follows:
1650 // r0: Newly allocated regexp. 1601 // r3: Newly allocated regexp.
1651 // r5: Materialized regexp. 1602 // r8: Materialized regexp.
1652 // r2: temp. 1603 // r5: temp.
1653 __ CopyFields(r0, r5, d0, size / kPointerSize); 1604 __ CopyFields(r3, r8, r5.bit(), size / kPointerSize);
1654 context()->Plug(r0); 1605 context()->Plug(r3);
1655 } 1606 }
1656 1607
1657 1608
1658 void FullCodeGenerator::EmitAccessor(Expression* expression) { 1609 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1659 if (expression == NULL) { 1610 if (expression == NULL) {
1660 __ LoadRoot(r1, Heap::kNullValueRootIndex); 1611 __ LoadRoot(r4, Heap::kNullValueRootIndex);
1661 __ push(r1); 1612 __ push(r4);
1662 } else { 1613 } else {
1663 VisitForStackValue(expression); 1614 VisitForStackValue(expression);
1664 } 1615 }
1665 } 1616 }
1666 1617
1667 1618
1668 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { 1619 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1669 Comment cmnt(masm_, "[ ObjectLiteral"); 1620 Comment cmnt(masm_, "[ ObjectLiteral");
1670 1621
1671 expr->BuildConstantProperties(isolate()); 1622 expr->BuildConstantProperties(isolate());
1672 Handle<FixedArray> constant_properties = expr->constant_properties(); 1623 Handle<FixedArray> constant_properties = expr->constant_properties();
1673 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1624 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1674 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); 1625 __ LoadP(r6, FieldMemOperand(r6, JSFunction::kLiteralsOffset));
1675 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); 1626 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1676 __ mov(r1, Operand(constant_properties)); 1627 __ mov(r4, Operand(constant_properties));
1677 int flags = expr->fast_elements() 1628 int flags = expr->fast_elements() ? ObjectLiteral::kFastElements
1678 ? ObjectLiteral::kFastElements 1629 : ObjectLiteral::kNoFlags;
1679 : ObjectLiteral::kNoFlags; 1630 flags |= expr->has_function() ? ObjectLiteral::kHasFunction
1680 flags |= expr->has_function() 1631 : ObjectLiteral::kNoFlags;
1681 ? ObjectLiteral::kHasFunction 1632 __ LoadSmiLiteral(r3, Smi::FromInt(flags));
1682 : ObjectLiteral::kNoFlags;
1683 __ mov(r0, Operand(Smi::FromInt(flags)));
1684 int properties_count = constant_properties->length() / 2; 1633 int properties_count = constant_properties->length() / 2;
1685 if (expr->may_store_doubles() || expr->depth() > 1 || 1634 if (expr->may_store_doubles() || expr->depth() > 1 ||
1686 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements || 1635 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1687 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { 1636 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1688 __ Push(r3, r2, r1, r0); 1637 __ Push(r6, r5, r4, r3);
1689 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); 1638 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1690 } else { 1639 } else {
1691 FastCloneShallowObjectStub stub(isolate(), properties_count); 1640 FastCloneShallowObjectStub stub(isolate(), properties_count);
1692 __ CallStub(&stub); 1641 __ CallStub(&stub);
1693 } 1642 }
1694 1643
1695 // If result_saved is true the result is on top of the stack. If 1644 // If result_saved is true the result is on top of the stack. If
1696 // result_saved is false the result is in r0. 1645 // result_saved is false the result is in r3.
1697 bool result_saved = false; 1646 bool result_saved = false;
1698 1647
1699 // Mark all computed expressions that are bound to a key that 1648 // Mark all computed expressions that are bound to a key that
1700 // is shadowed by a later occurrence of the same key. For the 1649 // is shadowed by a later occurrence of the same key. For the
1701 // marked expressions, no store code is emitted. 1650 // marked expressions, no store code is emitted.
1702 expr->CalculateEmitStore(zone()); 1651 expr->CalculateEmitStore(zone());
1703 1652
1704 AccessorTable accessor_table(zone()); 1653 AccessorTable accessor_table(zone());
1705 for (int i = 0; i < expr->properties()->length(); i++) { 1654 for (int i = 0; i < expr->properties()->length(); i++) {
1706 ObjectLiteral::Property* property = expr->properties()->at(i); 1655 ObjectLiteral::Property* property = expr->properties()->at(i);
1707 if (property->IsCompileTimeValue()) continue; 1656 if (property->IsCompileTimeValue()) continue;
1708 1657
1709 Literal* key = property->key(); 1658 Literal* key = property->key();
1710 Expression* value = property->value(); 1659 Expression* value = property->value();
1711 if (!result_saved) { 1660 if (!result_saved) {
1712 __ push(r0); // Save result on stack 1661 __ push(r3); // Save result on stack
1713 result_saved = true; 1662 result_saved = true;
1714 } 1663 }
1715 switch (property->kind()) { 1664 switch (property->kind()) {
1716 case ObjectLiteral::Property::CONSTANT: 1665 case ObjectLiteral::Property::CONSTANT:
1717 UNREACHABLE(); 1666 UNREACHABLE();
1718 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1667 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1719 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value())); 1668 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1720 // Fall through. 1669 // Fall through.
1721 case ObjectLiteral::Property::COMPUTED: 1670 case ObjectLiteral::Property::COMPUTED:
1722 if (key->value()->IsInternalizedString()) { 1671 if (key->value()->IsInternalizedString()) {
1723 if (property->emit_store()) { 1672 if (property->emit_store()) {
1724 VisitForAccumulatorValue(value); 1673 VisitForAccumulatorValue(value);
1725 DCHECK(StoreDescriptor::ValueRegister().is(r0)); 1674 DCHECK(StoreDescriptor::ValueRegister().is(r3));
1726 __ mov(StoreDescriptor::NameRegister(), Operand(key->value())); 1675 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1727 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); 1676 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1728 CallStoreIC(key->LiteralFeedbackId()); 1677 CallStoreIC(key->LiteralFeedbackId());
1729 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1678 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1730 } else { 1679 } else {
1731 VisitForEffect(value); 1680 VisitForEffect(value);
1732 } 1681 }
1733 break; 1682 break;
1734 } 1683 }
1735 // Duplicate receiver on stack. 1684 // Duplicate receiver on stack.
1736 __ ldr(r0, MemOperand(sp)); 1685 __ LoadP(r3, MemOperand(sp));
1737 __ push(r0); 1686 __ push(r3);
1738 VisitForStackValue(key); 1687 VisitForStackValue(key);
1739 VisitForStackValue(value); 1688 VisitForStackValue(value);
1740 if (property->emit_store()) { 1689 if (property->emit_store()) {
1741 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes 1690 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY)); // PropertyAttributes
1742 __ push(r0); 1691 __ push(r3);
1743 __ CallRuntime(Runtime::kSetProperty, 4); 1692 __ CallRuntime(Runtime::kSetProperty, 4);
1744 } else { 1693 } else {
1745 __ Drop(3); 1694 __ Drop(3);
1746 } 1695 }
1747 break; 1696 break;
1748 case ObjectLiteral::Property::PROTOTYPE: 1697 case ObjectLiteral::Property::PROTOTYPE:
1749 // Duplicate receiver on stack. 1698 // Duplicate receiver on stack.
1750 __ ldr(r0, MemOperand(sp)); 1699 __ LoadP(r3, MemOperand(sp));
1751 __ push(r0); 1700 __ push(r3);
1752 VisitForStackValue(value); 1701 VisitForStackValue(value);
1753 if (property->emit_store()) { 1702 if (property->emit_store()) {
1754 __ CallRuntime(Runtime::kSetPrototype, 2); 1703 __ CallRuntime(Runtime::kSetPrototype, 2);
1755 } else { 1704 } else {
1756 __ Drop(2); 1705 __ Drop(2);
1757 } 1706 }
1758 break; 1707 break;
1759
1760 case ObjectLiteral::Property::GETTER: 1708 case ObjectLiteral::Property::GETTER:
1761 accessor_table.lookup(key)->second->getter = value; 1709 accessor_table.lookup(key)->second->getter = value;
1762 break; 1710 break;
1763 case ObjectLiteral::Property::SETTER: 1711 case ObjectLiteral::Property::SETTER:
1764 accessor_table.lookup(key)->second->setter = value; 1712 accessor_table.lookup(key)->second->setter = value;
1765 break; 1713 break;
1766 } 1714 }
1767 } 1715 }
1768 1716
1769 // Emit code to define accessors, using only a single call to the runtime for 1717 // Emit code to define accessors, using only a single call to the runtime for
1770 // each pair of corresponding getters and setters. 1718 // each pair of corresponding getters and setters.
1771 for (AccessorTable::Iterator it = accessor_table.begin(); 1719 for (AccessorTable::Iterator it = accessor_table.begin();
1772 it != accessor_table.end(); 1720 it != accessor_table.end(); ++it) {
1773 ++it) { 1721 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
1774 __ ldr(r0, MemOperand(sp)); // Duplicate receiver. 1722 __ push(r3);
1775 __ push(r0);
1776 VisitForStackValue(it->first); 1723 VisitForStackValue(it->first);
1777 EmitAccessor(it->second->getter); 1724 EmitAccessor(it->second->getter);
1778 EmitAccessor(it->second->setter); 1725 EmitAccessor(it->second->setter);
1779 __ mov(r0, Operand(Smi::FromInt(NONE))); 1726 __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
1780 __ push(r0); 1727 __ push(r3);
1781 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5); 1728 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1782 } 1729 }
1783 1730
1784 if (expr->has_function()) { 1731 if (expr->has_function()) {
1785 DCHECK(result_saved); 1732 DCHECK(result_saved);
1786 __ ldr(r0, MemOperand(sp)); 1733 __ LoadP(r3, MemOperand(sp));
1787 __ push(r0); 1734 __ push(r3);
1788 __ CallRuntime(Runtime::kToFastProperties, 1); 1735 __ CallRuntime(Runtime::kToFastProperties, 1);
1789 } 1736 }
1790 1737
1791 if (result_saved) { 1738 if (result_saved) {
1792 context()->PlugTOS(); 1739 context()->PlugTOS();
1793 } else { 1740 } else {
1794 context()->Plug(r0); 1741 context()->Plug(r3);
1795 } 1742 }
1796 } 1743 }
1797 1744
1798 1745
1799 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1746 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1800 Comment cmnt(masm_, "[ ArrayLiteral"); 1747 Comment cmnt(masm_, "[ ArrayLiteral");
1801 1748
1802 expr->BuildConstantElements(isolate()); 1749 expr->BuildConstantElements(isolate());
1803 int flags = expr->depth() == 1 1750 int flags = expr->depth() == 1 ? ArrayLiteral::kShallowElements
1804 ? ArrayLiteral::kShallowElements 1751 : ArrayLiteral::kNoFlags;
1805 : ArrayLiteral::kNoFlags;
1806 1752
1807 ZoneList<Expression*>* subexprs = expr->values(); 1753 ZoneList<Expression*>* subexprs = expr->values();
1808 int length = subexprs->length(); 1754 int length = subexprs->length();
1809 Handle<FixedArray> constant_elements = expr->constant_elements(); 1755 Handle<FixedArray> constant_elements = expr->constant_elements();
1810 DCHECK_EQ(2, constant_elements->length()); 1756 DCHECK_EQ(2, constant_elements->length());
1811 ElementsKind constant_elements_kind = 1757 ElementsKind constant_elements_kind =
1812 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); 1758 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1813 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind); 1759 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1814 Handle<FixedArrayBase> constant_elements_values( 1760 Handle<FixedArrayBase> constant_elements_values(
1815 FixedArrayBase::cast(constant_elements->get(1))); 1761 FixedArrayBase::cast(constant_elements->get(1)));
1816 1762
1817 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; 1763 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1818 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { 1764 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1819 // If the only customer of allocation sites is transitioning, then 1765 // If the only customer of allocation sites is transitioning, then
1820 // we can turn it off if we don't have anywhere else to transition to. 1766 // we can turn it off if we don't have anywhere else to transition to.
1821 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; 1767 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1822 } 1768 }
1823 1769
1824 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1770 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1825 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); 1771 __ LoadP(r6, FieldMemOperand(r6, JSFunction::kLiteralsOffset));
1826 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); 1772 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1827 __ mov(r1, Operand(constant_elements)); 1773 __ mov(r4, Operand(constant_elements));
1828 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) { 1774 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1829 __ mov(r0, Operand(Smi::FromInt(flags))); 1775 __ LoadSmiLiteral(r3, Smi::FromInt(flags));
1830 __ Push(r3, r2, r1, r0); 1776 __ Push(r6, r5, r4, r3);
1831 __ CallRuntime(Runtime::kCreateArrayLiteral, 4); 1777 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1832 } else { 1778 } else {
1833 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); 1779 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1834 __ CallStub(&stub); 1780 __ CallStub(&stub);
1835 } 1781 }
1836 1782
1837 bool result_saved = false; // Is the result saved to the stack? 1783 bool result_saved = false; // Is the result saved to the stack?
1838 1784
1839 // Emit code to evaluate all the non-constant subexpressions and to store 1785 // Emit code to evaluate all the non-constant subexpressions and to store
1840 // them into the newly cloned array. 1786 // them into the newly cloned array.
1841 for (int i = 0; i < length; i++) { 1787 for (int i = 0; i < length; i++) {
1842 Expression* subexpr = subexprs->at(i); 1788 Expression* subexpr = subexprs->at(i);
1843 // If the subexpression is a literal or a simple materialized literal it 1789 // If the subexpression is a literal or a simple materialized literal it
1844 // is already set in the cloned array. 1790 // is already set in the cloned array.
1845 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; 1791 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1846 1792
1847 if (!result_saved) { 1793 if (!result_saved) {
1848 __ push(r0); 1794 __ push(r3);
1849 __ Push(Smi::FromInt(expr->literal_index())); 1795 __ Push(Smi::FromInt(expr->literal_index()));
1850 result_saved = true; 1796 result_saved = true;
1851 } 1797 }
1852 VisitForAccumulatorValue(subexpr); 1798 VisitForAccumulatorValue(subexpr);
1853 1799
1854 if (IsFastObjectElementsKind(constant_elements_kind)) { 1800 if (IsFastObjectElementsKind(constant_elements_kind)) {
1855 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1801 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1856 __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal. 1802 __ LoadP(r8, MemOperand(sp, kPointerSize)); // Copy of array literal.
1857 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset)); 1803 __ LoadP(r4, FieldMemOperand(r8, JSObject::kElementsOffset));
1858 __ str(result_register(), FieldMemOperand(r1, offset)); 1804 __ StoreP(result_register(), FieldMemOperand(r4, offset), r0);
1859 // Update the write barrier for the array store. 1805 // Update the write barrier for the array store.
1860 __ RecordWriteField(r1, offset, result_register(), r2, 1806 __ RecordWriteField(r4, offset, result_register(), r5, kLRHasBeenSaved,
1861 kLRHasBeenSaved, kDontSaveFPRegs, 1807 kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1862 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK); 1808 INLINE_SMI_CHECK);
1863 } else { 1809 } else {
1864 __ mov(r3, Operand(Smi::FromInt(i))); 1810 __ LoadSmiLiteral(r6, Smi::FromInt(i));
1865 StoreArrayLiteralElementStub stub(isolate()); 1811 StoreArrayLiteralElementStub stub(isolate());
1866 __ CallStub(&stub); 1812 __ CallStub(&stub);
1867 } 1813 }
1868 1814
1869 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); 1815 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1870 } 1816 }
1871 1817
1872 if (result_saved) { 1818 if (result_saved) {
1873 __ pop(); // literal index 1819 __ pop(); // literal index
1874 context()->PlugTOS(); 1820 context()->PlugTOS();
1875 } else { 1821 } else {
1876 context()->Plug(r0); 1822 context()->Plug(r3);
1877 } 1823 }
1878 } 1824 }
1879 1825
1880 1826
1881 void FullCodeGenerator::VisitAssignment(Assignment* expr) { 1827 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1882 DCHECK(expr->target()->IsValidReferenceExpression()); 1828 DCHECK(expr->target()->IsValidReferenceExpression());
1883 1829
1884 Comment cmnt(masm_, "[ Assignment"); 1830 Comment cmnt(masm_, "[ Assignment");
1885 1831
1886 // Left-hand side can only be a property, a global or a (parameter or local) 1832 // Left-hand side can only be a property, a global or a (parameter or local)
1887 // slot. 1833 // slot.
1888 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 1834 enum LhsKind {
1835 VARIABLE,
1836 NAMED_PROPERTY,
1837 KEYED_PROPERTY,
1838 NAMED_SUPER_PROPERTY
1839 };
1889 LhsKind assign_type = VARIABLE; 1840 LhsKind assign_type = VARIABLE;
1890 Property* property = expr->target()->AsProperty(); 1841 Property* property = expr->target()->AsProperty();
1891 if (property != NULL) { 1842 if (property != NULL) {
1892 assign_type = (property->key()->IsPropertyName()) 1843 assign_type = (property->key()->IsPropertyName())
1893 ? NAMED_PROPERTY 1844 ? (property->IsSuperAccess() ? NAMED_SUPER_PROPERTY
1894 : KEYED_PROPERTY; 1845 : NAMED_PROPERTY)
1846 : KEYED_PROPERTY;
1895 } 1847 }
1896 1848
1897 // Evaluate LHS expression. 1849 // Evaluate LHS expression.
1898 switch (assign_type) { 1850 switch (assign_type) {
1899 case VARIABLE: 1851 case VARIABLE:
1900 // Nothing to do here. 1852 // Nothing to do here.
1901 break; 1853 break;
1902 case NAMED_PROPERTY: 1854 case NAMED_PROPERTY:
1903 if (expr->is_compound()) { 1855 if (expr->is_compound()) {
1904 // We need the receiver both on the stack and in the register. 1856 // We need the receiver both on the stack and in the register.
1905 VisitForStackValue(property->obj()); 1857 VisitForStackValue(property->obj());
1906 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 1858 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1907 } else { 1859 } else {
1908 VisitForStackValue(property->obj()); 1860 VisitForStackValue(property->obj());
1909 } 1861 }
1910 break; 1862 break;
1863 case NAMED_SUPER_PROPERTY:
1864 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1865 EmitLoadHomeObject(property->obj()->AsSuperReference());
1866 __ Push(result_register());
1867 if (expr->is_compound()) {
1868 const Register scratch = r4;
1869 __ LoadP(scratch, MemOperand(sp, kPointerSize));
1870 __ Push(scratch);
1871 __ Push(result_register());
1872 }
1873 break;
1911 case KEYED_PROPERTY: 1874 case KEYED_PROPERTY:
1912 if (expr->is_compound()) { 1875 if (expr->is_compound()) {
1913 VisitForStackValue(property->obj()); 1876 VisitForStackValue(property->obj());
1914 VisitForStackValue(property->key()); 1877 VisitForStackValue(property->key());
1915 __ ldr(LoadDescriptor::ReceiverRegister(), 1878 __ LoadP(LoadDescriptor::ReceiverRegister(),
1916 MemOperand(sp, 1 * kPointerSize)); 1879 MemOperand(sp, 1 * kPointerSize));
1917 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); 1880 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1918 } else { 1881 } else {
1919 VisitForStackValue(property->obj()); 1882 VisitForStackValue(property->obj());
1920 VisitForStackValue(property->key()); 1883 VisitForStackValue(property->key());
1921 } 1884 }
1922 break; 1885 break;
1923 } 1886 }
1924 1887
1925 // For compound assignments we need another deoptimization point after the 1888 // For compound assignments we need another deoptimization point after the
1926 // variable/property load. 1889 // variable/property load.
1927 if (expr->is_compound()) { 1890 if (expr->is_compound()) {
1928 { AccumulatorValueContext context(this); 1891 {
1892 AccumulatorValueContext context(this);
1929 switch (assign_type) { 1893 switch (assign_type) {
1930 case VARIABLE: 1894 case VARIABLE:
1931 EmitVariableLoad(expr->target()->AsVariableProxy()); 1895 EmitVariableLoad(expr->target()->AsVariableProxy());
1932 PrepareForBailout(expr->target(), TOS_REG); 1896 PrepareForBailout(expr->target(), TOS_REG);
1933 break; 1897 break;
1934 case NAMED_PROPERTY: 1898 case NAMED_PROPERTY:
1935 EmitNamedPropertyLoad(property); 1899 EmitNamedPropertyLoad(property);
1936 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1900 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1937 break; 1901 break;
1902 case NAMED_SUPER_PROPERTY:
1903 EmitNamedSuperPropertyLoad(property);
1904 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1905 break;
1938 case KEYED_PROPERTY: 1906 case KEYED_PROPERTY:
1939 EmitKeyedPropertyLoad(property); 1907 EmitKeyedPropertyLoad(property);
1940 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1908 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1941 break; 1909 break;
1942 } 1910 }
1943 } 1911 }
1944 1912
1945 Token::Value op = expr->binary_op(); 1913 Token::Value op = expr->binary_op();
1946 __ push(r0); // Left operand goes on the stack. 1914 __ push(r3); // Left operand goes on the stack.
1947 VisitForAccumulatorValue(expr->value()); 1915 VisitForAccumulatorValue(expr->value());
1948 1916
1949 OverwriteMode mode = expr->value()->ResultOverwriteAllowed() 1917 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1950 ? OVERWRITE_RIGHT 1918 ? OVERWRITE_RIGHT
1951 : NO_OVERWRITE; 1919 : NO_OVERWRITE;
1952 SetSourcePosition(expr->position() + 1); 1920 SetSourcePosition(expr->position() + 1);
1953 AccumulatorValueContext context(this); 1921 AccumulatorValueContext context(this);
1954 if (ShouldInlineSmiCase(op)) { 1922 if (ShouldInlineSmiCase(op)) {
1955 EmitInlineSmiBinaryOp(expr->binary_operation(), 1923 EmitInlineSmiBinaryOp(expr->binary_operation(), op, mode, expr->target(),
1956 op,
1957 mode,
1958 expr->target(),
1959 expr->value()); 1924 expr->value());
1960 } else { 1925 } else {
1961 EmitBinaryOp(expr->binary_operation(), op, mode); 1926 EmitBinaryOp(expr->binary_operation(), op, mode);
1962 } 1927 }
1963 1928
1964 // Deoptimization point in case the binary operation may have side effects. 1929 // Deoptimization point in case the binary operation may have side effects.
1965 PrepareForBailout(expr->binary_operation(), TOS_REG); 1930 PrepareForBailout(expr->binary_operation(), TOS_REG);
1966 } else { 1931 } else {
1967 VisitForAccumulatorValue(expr->value()); 1932 VisitForAccumulatorValue(expr->value());
1968 } 1933 }
1969 1934
1970 // Record source position before possible IC call. 1935 // Record source position before possible IC call.
1971 SetSourcePosition(expr->position()); 1936 SetSourcePosition(expr->position());
1972 1937
1973 // Store the value. 1938 // Store the value.
1974 switch (assign_type) { 1939 switch (assign_type) {
1975 case VARIABLE: 1940 case VARIABLE:
1976 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), 1941 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1977 expr->op()); 1942 expr->op());
1978 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 1943 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1979 context()->Plug(r0); 1944 context()->Plug(r3);
1980 break; 1945 break;
1981 case NAMED_PROPERTY: 1946 case NAMED_PROPERTY:
1982 EmitNamedPropertyAssignment(expr); 1947 EmitNamedPropertyAssignment(expr);
1983 break; 1948 break;
1949 case NAMED_SUPER_PROPERTY:
1950 EmitNamedSuperPropertyStore(property);
1951 context()->Plug(r3);
1952 break;
1984 case KEYED_PROPERTY: 1953 case KEYED_PROPERTY:
1985 EmitKeyedPropertyAssignment(expr); 1954 EmitKeyedPropertyAssignment(expr);
1986 break; 1955 break;
1987 } 1956 }
1988 } 1957 }
1989 1958
1990 1959
1991 void FullCodeGenerator::VisitYield(Yield* expr) { 1960 void FullCodeGenerator::VisitYield(Yield* expr) {
1992 Comment cmnt(masm_, "[ Yield"); 1961 Comment cmnt(masm_, "[ Yield");
1993 // Evaluate yielded value first; the initial iterator definition depends on 1962 // Evaluate yielded value first; the initial iterator definition depends on
1994 // this. It stays on the stack while we update the iterator. 1963 // this. It stays on the stack while we update the iterator.
1995 VisitForStackValue(expr->expression()); 1964 VisitForStackValue(expr->expression());
1996 1965
1997 switch (expr->yield_kind()) { 1966 switch (expr->yield_kind()) {
1998 case Yield::kSuspend: 1967 case Yield::kSuspend:
1999 // Pop value from top-of-stack slot; box result into result register. 1968 // Pop value from top-of-stack slot; box result into result register.
2000 EmitCreateIteratorResult(false); 1969 EmitCreateIteratorResult(false);
2001 __ push(result_register()); 1970 __ push(result_register());
2002 // Fall through. 1971 // Fall through.
2003 case Yield::kInitial: { 1972 case Yield::kInitial: {
2004 Label suspend, continuation, post_runtime, resume; 1973 Label suspend, continuation, post_runtime, resume;
2005 1974
2006 __ jmp(&suspend); 1975 __ b(&suspend);
2007 1976
2008 __ bind(&continuation); 1977 __ bind(&continuation);
2009 __ jmp(&resume); 1978 __ b(&resume);
2010 1979
2011 __ bind(&suspend); 1980 __ bind(&suspend);
2012 VisitForAccumulatorValue(expr->generator_object()); 1981 VisitForAccumulatorValue(expr->generator_object());
2013 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); 1982 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2014 __ mov(r1, Operand(Smi::FromInt(continuation.pos()))); 1983 __ LoadSmiLiteral(r4, Smi::FromInt(continuation.pos()));
2015 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset)); 1984 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
2016 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset)); 1985 r0);
2017 __ mov(r1, cp); 1986 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
2018 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2, 1987 __ mr(r4, cp);
1988 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
2019 kLRHasBeenSaved, kDontSaveFPRegs); 1989 kLRHasBeenSaved, kDontSaveFPRegs);
2020 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset)); 1990 __ addi(r4, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2021 __ cmp(sp, r1); 1991 __ cmp(sp, r4);
2022 __ b(eq, &post_runtime); 1992 __ beq(&post_runtime);
2023 __ push(r0); // generator object 1993 __ push(r3); // generator object
2024 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 1994 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2025 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 1995 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2026 __ bind(&post_runtime); 1996 __ bind(&post_runtime);
2027 __ pop(result_register()); 1997 __ pop(result_register());
2028 EmitReturnSequence(); 1998 EmitReturnSequence();
2029 1999
2030 __ bind(&resume); 2000 __ bind(&resume);
2031 context()->Plug(result_register()); 2001 context()->Plug(result_register());
2032 break; 2002 break;
2033 } 2003 }
2034 2004
2035 case Yield::kFinal: { 2005 case Yield::kFinal: {
2036 VisitForAccumulatorValue(expr->generator_object()); 2006 VisitForAccumulatorValue(expr->generator_object());
2037 __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed))); 2007 __ LoadSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
2038 __ str(r1, FieldMemOperand(result_register(), 2008 __ StoreP(r4, FieldMemOperand(result_register(),
2039 JSGeneratorObject::kContinuationOffset)); 2009 JSGeneratorObject::kContinuationOffset),
2010 r0);
2040 // Pop value from top-of-stack slot, box result into result register. 2011 // Pop value from top-of-stack slot, box result into result register.
2041 EmitCreateIteratorResult(true); 2012 EmitCreateIteratorResult(true);
2042 EmitUnwindBeforeReturn(); 2013 EmitUnwindBeforeReturn();
2043 EmitReturnSequence(); 2014 EmitReturnSequence();
2044 break; 2015 break;
2045 } 2016 }
2046 2017
2047 case Yield::kDelegating: { 2018 case Yield::kDelegating: {
2048 VisitForStackValue(expr->generator_object()); 2019 VisitForStackValue(expr->generator_object());
2049 2020
2050 // Initial stack layout is as follows: 2021 // Initial stack layout is as follows:
2051 // [sp + 1 * kPointerSize] iter 2022 // [sp + 1 * kPointerSize] iter
2052 // [sp + 0 * kPointerSize] g 2023 // [sp + 0 * kPointerSize] g
2053 2024
2054 Label l_catch, l_try, l_suspend, l_continuation, l_resume; 2025 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2055 Label l_next, l_call, l_loop; 2026 Label l_next, l_call;
2056 Register load_receiver = LoadDescriptor::ReceiverRegister(); 2027 Register load_receiver = LoadDescriptor::ReceiverRegister();
2057 Register load_name = LoadDescriptor::NameRegister(); 2028 Register load_name = LoadDescriptor::NameRegister();
2058 2029
2059 // Initial send value is undefined. 2030 // Initial send value is undefined.
2060 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 2031 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
2061 __ b(&l_next); 2032 __ b(&l_next);
2062 2033
2063 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; } 2034 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2064 __ bind(&l_catch); 2035 __ bind(&l_catch);
2065 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos())); 2036 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2066 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw" 2037 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2067 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter 2038 __ LoadP(r6, MemOperand(sp, 1 * kPointerSize)); // iter
2068 __ Push(load_name, r3, r0); // "throw", iter, except 2039 __ Push(load_name, r6, r3); // "throw", iter, except
2069 __ jmp(&l_call); 2040 __ b(&l_call);
2070 2041
2071 // try { received = %yield result } 2042 // try { received = %yield result }
2072 // Shuffle the received result above a try handler and yield it without 2043 // Shuffle the received result above a try handler and yield it without
2073 // re-boxing. 2044 // re-boxing.
2074 __ bind(&l_try); 2045 __ bind(&l_try);
2075 __ pop(r0); // result 2046 __ pop(r3); // result
2076 __ PushTryHandler(StackHandler::CATCH, expr->index()); 2047 __ PushTryHandler(StackHandler::CATCH, expr->index());
2077 const int handler_size = StackHandlerConstants::kSize; 2048 const int handler_size = StackHandlerConstants::kSize;
2078 __ push(r0); // result 2049 __ push(r3); // result
2079 __ jmp(&l_suspend); 2050 __ b(&l_suspend);
2080 __ bind(&l_continuation); 2051 __ bind(&l_continuation);
2081 __ jmp(&l_resume); 2052 __ b(&l_resume);
2082 __ bind(&l_suspend); 2053 __ bind(&l_suspend);
2083 const int generator_object_depth = kPointerSize + handler_size; 2054 const int generator_object_depth = kPointerSize + handler_size;
2084 __ ldr(r0, MemOperand(sp, generator_object_depth)); 2055 __ LoadP(r3, MemOperand(sp, generator_object_depth));
2085 __ push(r0); // g 2056 __ push(r3); // g
2086 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); 2057 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2087 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos()))); 2058 __ LoadSmiLiteral(r4, Smi::FromInt(l_continuation.pos()));
2088 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset)); 2059 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
2089 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset)); 2060 r0);
2090 __ mov(r1, cp); 2061 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
2091 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2, 2062 __ mr(r4, cp);
2063 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
2092 kLRHasBeenSaved, kDontSaveFPRegs); 2064 kLRHasBeenSaved, kDontSaveFPRegs);
2093 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 2065 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2094 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2066 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2095 __ pop(r0); // result 2067 __ pop(r3); // result
2096 EmitReturnSequence(); 2068 EmitReturnSequence();
2097 __ bind(&l_resume); // received in r0 2069 __ bind(&l_resume); // received in r3
2098 __ PopTryHandler(); 2070 __ PopTryHandler();
2099 2071
2100 // receiver = iter; f = 'next'; arg = received; 2072 // receiver = iter; f = 'next'; arg = received;
2101 __ bind(&l_next); 2073 __ bind(&l_next);
2102 2074
2103 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next" 2075 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2104 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter 2076 __ LoadP(r6, MemOperand(sp, 1 * kPointerSize)); // iter
2105 __ Push(load_name, r3, r0); // "next", iter, received 2077 __ Push(load_name, r6, r3); // "next", iter, received
2106 2078
2107 // result = receiver[f](arg); 2079 // result = receiver[f](arg);
2108 __ bind(&l_call); 2080 __ bind(&l_call);
2109 __ ldr(load_receiver, MemOperand(sp, kPointerSize)); 2081 __ LoadP(load_receiver, MemOperand(sp, kPointerSize));
2110 __ ldr(load_name, MemOperand(sp, 2 * kPointerSize)); 2082 __ LoadP(load_name, MemOperand(sp, 2 * kPointerSize));
2111 if (FLAG_vector_ics) { 2083 if (FLAG_vector_ics) {
2112 __ mov(VectorLoadICDescriptor::SlotRegister(), 2084 __ mov(VectorLoadICDescriptor::SlotRegister(),
2113 Operand(Smi::FromInt(expr->KeyedLoadFeedbackSlot()))); 2085 Operand(Smi::FromInt(expr->KeyedLoadFeedbackSlot())));
2114 } 2086 }
2115 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code(); 2087 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2116 CallIC(ic, TypeFeedbackId::None()); 2088 CallIC(ic, TypeFeedbackId::None());
2117 __ mov(r1, r0); 2089 __ mr(r4, r3);
2118 __ str(r1, MemOperand(sp, 2 * kPointerSize)); 2090 __ StoreP(r4, MemOperand(sp, 2 * kPointerSize));
2119 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD); 2091 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2120 __ CallStub(&stub); 2092 __ CallStub(&stub);
2121 2093
2122 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2094 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2123 __ Drop(1); // The function is still on the stack; drop it. 2095 __ Drop(1); // The function is still on the stack; drop it.
2124 2096
2125 // if (!result.done) goto l_try; 2097 // if (!result.done) goto l_try;
2126 __ bind(&l_loop); 2098 __ Move(load_receiver, r3);
2127 __ Move(load_receiver, r0);
2128 2099
2129 __ push(load_receiver); // save result 2100 __ push(load_receiver); // save result
2130 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done" 2101 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2131 if (FLAG_vector_ics) { 2102 if (FLAG_vector_ics) {
2132 __ mov(VectorLoadICDescriptor::SlotRegister(), 2103 __ mov(VectorLoadICDescriptor::SlotRegister(),
2133 Operand(Smi::FromInt(expr->DoneFeedbackSlot()))); 2104 Operand(Smi::FromInt(expr->DoneFeedbackSlot())));
2134 } 2105 }
2135 CallLoadIC(NOT_CONTEXTUAL); // r0=result.done 2106 CallLoadIC(NOT_CONTEXTUAL); // r0=result.done
2136 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate()); 2107 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2137 CallIC(bool_ic); 2108 CallIC(bool_ic);
2138 __ cmp(r0, Operand(0)); 2109 __ cmpi(r3, Operand::Zero());
2139 __ b(eq, &l_try); 2110 __ beq(&l_try);
2140 2111
2141 // result.value 2112 // result.value
2142 __ pop(load_receiver); // result 2113 __ pop(load_receiver); // result
2143 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value" 2114 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2144 if (FLAG_vector_ics) { 2115 if (FLAG_vector_ics) {
2145 __ mov(VectorLoadICDescriptor::SlotRegister(), 2116 __ mov(VectorLoadICDescriptor::SlotRegister(),
2146 Operand(Smi::FromInt(expr->ValueFeedbackSlot()))); 2117 Operand(Smi::FromInt(expr->ValueFeedbackSlot())));
2147 } 2118 }
2148 CallLoadIC(NOT_CONTEXTUAL); // r0=result.value 2119 CallLoadIC(NOT_CONTEXTUAL); // r3=result.value
2149 context()->DropAndPlug(2, r0); // drop iter and g 2120 context()->DropAndPlug(2, r3); // drop iter and g
2150 break; 2121 break;
2151 } 2122 }
2152 } 2123 }
2153 } 2124 }
2154 2125
2155 2126
2156 void FullCodeGenerator::EmitGeneratorResume(Expression *generator, 2127 void FullCodeGenerator::EmitGeneratorResume(
2157 Expression *value, 2128 Expression* generator, Expression* value,
2158 JSGeneratorObject::ResumeMode resume_mode) { 2129 JSGeneratorObject::ResumeMode resume_mode) {
2159 // The value stays in r0, and is ultimately read by the resumed generator, as 2130 // The value stays in r3, and is ultimately read by the resumed generator, as
2160 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it 2131 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2161 // is read to throw the value when the resumed generator is already closed. 2132 // is read to throw the value when the resumed generator is already closed.
2162 // r1 will hold the generator object until the activation has been resumed. 2133 // r4 will hold the generator object until the activation has been resumed.
2163 VisitForStackValue(generator); 2134 VisitForStackValue(generator);
2164 VisitForAccumulatorValue(value); 2135 VisitForAccumulatorValue(value);
2165 __ pop(r1); 2136 __ pop(r4);
2166 2137
2167 // Check generator state. 2138 // Check generator state.
2168 Label wrong_state, closed_state, done; 2139 Label wrong_state, closed_state, done;
2169 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset)); 2140 __ LoadP(r6, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
2170 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0); 2141 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2171 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0); 2142 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2172 __ cmp(r3, Operand(Smi::FromInt(0))); 2143 __ CmpSmiLiteral(r6, Smi::FromInt(0), r0);
2173 __ b(eq, &closed_state); 2144 __ beq(&closed_state);
2174 __ b(lt, &wrong_state); 2145 __ blt(&wrong_state);
2175 2146
2176 // Load suspended function and context. 2147 // Load suspended function and context.
2177 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset)); 2148 __ LoadP(cp, FieldMemOperand(r4, JSGeneratorObject::kContextOffset));
2178 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset)); 2149 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
2179 2150
2180 // Load receiver and store as the first argument. 2151 // Load receiver and store as the first argument.
2181 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset)); 2152 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
2182 __ push(r2); 2153 __ push(r5);
2183 2154
2184 // Push holes for the rest of the arguments to the generator function. 2155 // Push holes for the rest of the arguments to the generator function.
2185 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); 2156 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
2186 __ ldr(r3, 2157 __ LoadWordArith(
2187 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset)); 2158 r6, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
2188 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex); 2159 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
2189 Label push_argument_holes, push_frame; 2160 Label argument_loop, push_frame;
2190 __ bind(&push_argument_holes); 2161 #if V8_TARGET_ARCH_PPC64
2191 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC); 2162 __ cmpi(r6, Operand::Zero());
2192 __ b(mi, &push_frame); 2163 __ beq(&push_frame);
2193 __ push(r2); 2164 #else
2194 __ jmp(&push_argument_holes); 2165 __ SmiUntag(r6, SetRC);
2166 __ beq(&push_frame, cr0);
2167 #endif
2168 __ mtctr(r6);
2169 __ bind(&argument_loop);
2170 __ push(r5);
2171 __ bdnz(&argument_loop);
2195 2172
2196 // Enter a new JavaScript frame, and initialize its slots as they were when 2173 // Enter a new JavaScript frame, and initialize its slots as they were when
2197 // the generator was suspended. 2174 // the generator was suspended.
2198 Label resume_frame; 2175 Label resume_frame;
2199 __ bind(&push_frame); 2176 __ bind(&push_frame);
2200 __ bl(&resume_frame); 2177 __ b(&resume_frame, SetLK);
2201 __ jmp(&done); 2178 __ b(&done);
2202 __ bind(&resume_frame); 2179 __ bind(&resume_frame);
2203 // lr = return address. 2180 // lr = return address.
2204 // fp = caller's frame pointer. 2181 // fp = caller's frame pointer.
2205 // pp = caller's constant pool (if FLAG_enable_ool_constant_pool),
2206 // cp = callee's context, 2182 // cp = callee's context,
2207 // r4 = callee's JS function. 2183 // r7 = callee's JS function.
2208 __ PushFixedFrame(r4); 2184 __ PushFixedFrame(r7);
2209 // Adjust FP to point to saved FP. 2185 // Adjust FP to point to saved FP.
2210 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); 2186 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2211 2187
2212 // Load the operand stack size. 2188 // Load the operand stack size.
2213 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset)); 2189 __ LoadP(r6, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset));
2214 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset)); 2190 __ LoadP(r6, FieldMemOperand(r6, FixedArray::kLengthOffset));
2215 __ SmiUntag(r3); 2191 __ SmiUntag(r6, SetRC);
2216 2192
2217 // If we are sending a value and there is no operand stack, we can jump back 2193 // If we are sending a value and there is no operand stack, we can jump back
2218 // in directly. 2194 // in directly.
2195 Label call_resume;
2219 if (resume_mode == JSGeneratorObject::NEXT) { 2196 if (resume_mode == JSGeneratorObject::NEXT) {
2220 Label slow_resume; 2197 Label slow_resume;
2221 __ cmp(r3, Operand(0)); 2198 __ bne(&slow_resume, cr0);
2222 __ b(ne, &slow_resume); 2199 __ LoadP(ip, FieldMemOperand(r7, JSFunction::kCodeEntryOffset));
2223 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset)); 2200 #if V8_OOL_CONSTANT_POOL
2224 2201 {
2225 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_); 2202 ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2226 if (FLAG_enable_ool_constant_pool) { 2203 // Load the new code object's constant pool pointer.
2227 // Load the new code object's constant pool pointer. 2204 __ LoadP(kConstantPoolRegister,
2228 __ ldr(pp, 2205 MemOperand(ip, Code::kConstantPoolOffset - Code::kHeaderSize));
2229 MemOperand(r3, Code::kConstantPoolOffset - Code::kHeaderSize)); 2206 #endif
2230 } 2207 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
2231 2208 __ SmiUntag(r5);
2232 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset)); 2209 __ add(ip, ip, r5);
2233 __ SmiUntag(r2); 2210 __ LoadSmiLiteral(r5,
2234 __ add(r3, r3, r2); 2211 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2235 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting))); 2212 __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset),
2236 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset)); 2213 r0);
2237 __ Jump(r3); 2214 __ Jump(ip);
2215 __ bind(&slow_resume);
2216 #if V8_OOL_CONSTANT_POOL
2238 } 2217 }
2239 __ bind(&slow_resume); 2218 #endif
2219 } else {
2220 __ beq(&call_resume, cr0);
2240 } 2221 }
2241 2222
2242 // Otherwise, we push holes for the operand stack and call the runtime to fix 2223 // Otherwise, we push holes for the operand stack and call the runtime to fix
2243 // up the stack and the handlers. 2224 // up the stack and the handlers.
2244 Label push_operand_holes, call_resume; 2225 Label operand_loop;
2245 __ bind(&push_operand_holes); 2226 __ mtctr(r6);
2246 __ sub(r3, r3, Operand(1), SetCC); 2227 __ bind(&operand_loop);
2247 __ b(mi, &call_resume); 2228 __ push(r5);
2248 __ push(r2); 2229 __ bdnz(&operand_loop);
2249 __ b(&push_operand_holes); 2230
2250 __ bind(&call_resume); 2231 __ bind(&call_resume);
2251 DCHECK(!result_register().is(r1)); 2232 DCHECK(!result_register().is(r4));
2252 __ Push(r1, result_register()); 2233 __ Push(r4, result_register());
2253 __ Push(Smi::FromInt(resume_mode)); 2234 __ Push(Smi::FromInt(resume_mode));
2254 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3); 2235 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2255 // Not reached: the runtime call returns elsewhere. 2236 // Not reached: the runtime call returns elsewhere.
2256 __ stop("not-reached"); 2237 __ stop("not-reached");
2257 2238
2258 // Reach here when generator is closed. 2239 // Reach here when generator is closed.
2259 __ bind(&closed_state); 2240 __ bind(&closed_state);
2260 if (resume_mode == JSGeneratorObject::NEXT) { 2241 if (resume_mode == JSGeneratorObject::NEXT) {
2261 // Return completed iterator result when generator is closed. 2242 // Return completed iterator result when generator is closed.
2262 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 2243 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
2263 __ push(r2); 2244 __ push(r5);
2264 // Pop value from top-of-stack slot; box result into result register. 2245 // Pop value from top-of-stack slot; box result into result register.
2265 EmitCreateIteratorResult(true); 2246 EmitCreateIteratorResult(true);
2266 } else { 2247 } else {
2267 // Throw the provided value. 2248 // Throw the provided value.
2268 __ push(r0); 2249 __ push(r3);
2269 __ CallRuntime(Runtime::kThrow, 1); 2250 __ CallRuntime(Runtime::kThrow, 1);
2270 } 2251 }
2271 __ jmp(&done); 2252 __ b(&done);
2272 2253
2273 // Throw error if we attempt to operate on a running generator. 2254 // Throw error if we attempt to operate on a running generator.
2274 __ bind(&wrong_state); 2255 __ bind(&wrong_state);
2275 __ push(r1); 2256 __ push(r4);
2276 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1); 2257 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2277 2258
2278 __ bind(&done); 2259 __ bind(&done);
2279 context()->Plug(result_register()); 2260 context()->Plug(result_register());
2280 } 2261 }
2281 2262
2282 2263
2283 void FullCodeGenerator::EmitCreateIteratorResult(bool done) { 2264 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2284 Label gc_required; 2265 Label gc_required;
2285 Label allocated; 2266 Label allocated;
2286 2267
2287 Handle<Map> map(isolate()->native_context()->iterator_result_map()); 2268 Handle<Map> map(isolate()->native_context()->iterator_result_map());
2288 2269
2289 __ Allocate(map->instance_size(), r0, r2, r3, &gc_required, TAG_OBJECT); 2270 __ Allocate(map->instance_size(), r3, r5, r6, &gc_required, TAG_OBJECT);
2290 __ jmp(&allocated); 2271 __ b(&allocated);
2291 2272
2292 __ bind(&gc_required); 2273 __ bind(&gc_required);
2293 __ Push(Smi::FromInt(map->instance_size())); 2274 __ Push(Smi::FromInt(map->instance_size()));
2294 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 2275 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2295 __ ldr(context_register(), 2276 __ LoadP(context_register(),
2296 MemOperand(fp, StandardFrameConstants::kContextOffset)); 2277 MemOperand(fp, StandardFrameConstants::kContextOffset));
2297 2278
2298 __ bind(&allocated); 2279 __ bind(&allocated);
2299 __ mov(r1, Operand(map)); 2280 __ mov(r4, Operand(map));
2300 __ pop(r2); 2281 __ pop(r5);
2301 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done))); 2282 __ mov(r6, Operand(isolate()->factory()->ToBoolean(done)));
2302 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array())); 2283 __ mov(r7, Operand(isolate()->factory()->empty_fixed_array()));
2303 DCHECK_EQ(map->instance_size(), 5 * kPointerSize); 2284 DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
2304 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 2285 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
2305 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset)); 2286 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
2306 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset)); 2287 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
2307 __ str(r2, 2288 __ StoreP(r5,
2308 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset)); 2289 FieldMemOperand(r3, JSGeneratorObject::kResultValuePropertyOffset),
2309 __ str(r3, 2290 r0);
2310 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset)); 2291 __ StoreP(r6,
2292 FieldMemOperand(r3, JSGeneratorObject::kResultDonePropertyOffset),
2293 r0);
2311 2294
2312 // Only the value field needs a write barrier, as the other values are in the 2295 // Only the value field needs a write barrier, as the other values are in the
2313 // root set. 2296 // root set.
2314 __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset, 2297 __ RecordWriteField(r3, JSGeneratorObject::kResultValuePropertyOffset, r5, r6,
2315 r2, r3, kLRHasBeenSaved, kDontSaveFPRegs); 2298 kLRHasBeenSaved, kDontSaveFPRegs);
2316 } 2299 }
2317 2300
2318 2301
2319 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { 2302 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2320 SetSourcePosition(prop->position()); 2303 SetSourcePosition(prop->position());
2321 Literal* key = prop->key()->AsLiteral(); 2304 Literal* key = prop->key()->AsLiteral();
2305 DCHECK(!prop->IsSuperAccess());
2322 2306
2323 __ mov(LoadDescriptor::NameRegister(), Operand(key->value())); 2307 __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2324 if (FLAG_vector_ics) { 2308 if (FLAG_vector_ics) {
2325 __ mov(VectorLoadICDescriptor::SlotRegister(), 2309 __ mov(VectorLoadICDescriptor::SlotRegister(),
2326 Operand(Smi::FromInt(prop->PropertyFeedbackSlot()))); 2310 Operand(Smi::FromInt(prop->PropertyFeedbackSlot())));
2327 CallLoadIC(NOT_CONTEXTUAL); 2311 CallLoadIC(NOT_CONTEXTUAL);
2328 } else { 2312 } else {
2329 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId()); 2313 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2330 } 2314 }
2331 } 2315 }
2332 2316
2333 2317
2334 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) { 2318 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2319 // Stack: receiver, home_object.
2335 SetSourcePosition(prop->position()); 2320 SetSourcePosition(prop->position());
2336 Literal* key = prop->key()->AsLiteral(); 2321 Literal* key = prop->key()->AsLiteral();
2337 DCHECK(!key->value()->IsSmi()); 2322 DCHECK(!key->value()->IsSmi());
2338 DCHECK(prop->IsSuperAccess()); 2323 DCHECK(prop->IsSuperAccess());
2339 2324
2340 SuperReference* super_ref = prop->obj()->AsSuperReference();
2341 EmitLoadHomeObject(super_ref);
2342 __ Push(r0);
2343 VisitForStackValue(super_ref->this_var());
2344 __ Push(key->value()); 2325 __ Push(key->value());
2345 __ CallRuntime(Runtime::kLoadFromSuper, 3); 2326 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2346 } 2327 }
2347 2328
2348 2329
2349 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { 2330 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2350 SetSourcePosition(prop->position()); 2331 SetSourcePosition(prop->position());
2351 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code(); 2332 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2352 if (FLAG_vector_ics) { 2333 if (FLAG_vector_ics) {
2353 __ mov(VectorLoadICDescriptor::SlotRegister(), 2334 __ mov(VectorLoadICDescriptor::SlotRegister(),
2354 Operand(Smi::FromInt(prop->PropertyFeedbackSlot()))); 2335 Operand(Smi::FromInt(prop->PropertyFeedbackSlot())));
2355 CallIC(ic); 2336 CallIC(ic);
2356 } else { 2337 } else {
2357 CallIC(ic, prop->PropertyFeedbackId()); 2338 CallIC(ic, prop->PropertyFeedbackId());
2358 } 2339 }
2359 } 2340 }
2360 2341
2361 2342
2343 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2344 // Stack: receiver, home_object, key.
2345 SetSourcePosition(prop->position());
2346
2347 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2348 }
2349
2350
2362 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 2351 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2363 Token::Value op, 2352 Token::Value op,
2364 OverwriteMode mode, 2353 OverwriteMode mode,
2365 Expression* left_expr, 2354 Expression* left_expr,
2366 Expression* right_expr) { 2355 Expression* right_expr) {
2367 Label done, smi_case, stub_call; 2356 Label done, smi_case, stub_call;
2368 2357
2369 Register scratch1 = r2; 2358 Register scratch1 = r5;
2370 Register scratch2 = r3; 2359 Register scratch2 = r6;
2371 2360
2372 // Get the arguments. 2361 // Get the arguments.
2373 Register left = r1; 2362 Register left = r4;
2374 Register right = r0; 2363 Register right = r3;
2375 __ pop(left); 2364 __ pop(left);
2376 2365
2377 // Perform combined smi check on both operands. 2366 // Perform combined smi check on both operands.
2378 __ orr(scratch1, left, Operand(right)); 2367 __ orx(scratch1, left, right);
2379 STATIC_ASSERT(kSmiTag == 0); 2368 STATIC_ASSERT(kSmiTag == 0);
2380 JumpPatchSite patch_site(masm_); 2369 JumpPatchSite patch_site(masm_);
2381 patch_site.EmitJumpIfSmi(scratch1, &smi_case); 2370 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2382 2371
2383 __ bind(&stub_call); 2372 __ bind(&stub_call);
2384 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code(); 2373 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2385 CallIC(code, expr->BinaryOperationFeedbackId()); 2374 CallIC(code, expr->BinaryOperationFeedbackId());
2386 patch_site.EmitPatchInfo(); 2375 patch_site.EmitPatchInfo();
2387 __ jmp(&done); 2376 __ b(&done);
2388 2377
2389 __ bind(&smi_case); 2378 __ bind(&smi_case);
2390 // Smi case. This code works the same way as the smi-smi case in the type 2379 // Smi case. This code works the same way as the smi-smi case in the type
2391 // recording binary operation stub, see 2380 // recording binary operation stub.
2392 switch (op) { 2381 switch (op) {
2393 case Token::SAR: 2382 case Token::SAR:
2394 __ GetLeastBitsFromSmi(scratch1, right, 5); 2383 __ GetLeastBitsFromSmi(scratch1, right, 5);
2395 __ mov(right, Operand(left, ASR, scratch1)); 2384 __ ShiftRightArith(right, left, scratch1);
2396 __ bic(right, right, Operand(kSmiTagMask)); 2385 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize));
2397 break; 2386 break;
2398 case Token::SHL: { 2387 case Token::SHL: {
2388 __ GetLeastBitsFromSmi(scratch2, right, 5);
2389 #if V8_TARGET_ARCH_PPC64
2390 __ ShiftLeft_(right, left, scratch2);
2391 #else
2399 __ SmiUntag(scratch1, left); 2392 __ SmiUntag(scratch1, left);
2400 __ GetLeastBitsFromSmi(scratch2, right, 5); 2393 __ ShiftLeft_(scratch1, scratch1, scratch2);
2401 __ mov(scratch1, Operand(scratch1, LSL, scratch2)); 2394 // Check that the *signed* result fits in a smi
2402 __ TrySmiTag(right, scratch1, &stub_call); 2395 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call);
2396 __ SmiTag(right, scratch1);
2397 #endif
2403 break; 2398 break;
2404 } 2399 }
2405 case Token::SHR: { 2400 case Token::SHR: {
2406 __ SmiUntag(scratch1, left); 2401 __ SmiUntag(scratch1, left);
2407 __ GetLeastBitsFromSmi(scratch2, right, 5); 2402 __ GetLeastBitsFromSmi(scratch2, right, 5);
2408 __ mov(scratch1, Operand(scratch1, LSR, scratch2)); 2403 __ srw(scratch1, scratch1, scratch2);
2409 __ tst(scratch1, Operand(0xc0000000)); 2404 // Unsigned shift is not allowed to produce a negative number.
2410 __ b(ne, &stub_call); 2405 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call);
2411 __ SmiTag(right, scratch1); 2406 __ SmiTag(right, scratch1);
2412 break; 2407 break;
2413 } 2408 }
2414 case Token::ADD: 2409 case Token::ADD: {
2415 __ add(scratch1, left, Operand(right), SetCC); 2410 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2416 __ b(vs, &stub_call); 2411 __ bne(&stub_call, cr0);
2417 __ mov(right, scratch1); 2412 __ mr(right, scratch1);
2418 break; 2413 break;
2419 case Token::SUB: 2414 }
2420 __ sub(scratch1, left, Operand(right), SetCC); 2415 case Token::SUB: {
2421 __ b(vs, &stub_call); 2416 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2422 __ mov(right, scratch1); 2417 __ bne(&stub_call, cr0);
2418 __ mr(right, scratch1);
2423 break; 2419 break;
2420 }
2424 case Token::MUL: { 2421 case Token::MUL: {
2422 Label mul_zero;
2423 #if V8_TARGET_ARCH_PPC64
2424 // Remove tag from both operands.
2425 __ SmiUntag(ip, right); 2425 __ SmiUntag(ip, right);
2426 __ smull(scratch1, scratch2, left, ip); 2426 __ SmiUntag(r0, left);
2427 __ mov(ip, Operand(scratch1, ASR, 31)); 2427 __ Mul(scratch1, r0, ip);
2428 __ cmp(ip, Operand(scratch2)); 2428 // Check for overflowing the smi range - no overflow if higher 33 bits of
2429 __ b(ne, &stub_call); 2429 // the result are identical.
2430 __ cmp(scratch1, Operand::Zero()); 2430 __ TestIfInt32(scratch1, scratch2, ip);
2431 __ mov(right, Operand(scratch1), LeaveCC, ne); 2431 __ bne(&stub_call);
2432 __ b(ne, &done); 2432 #else
2433 __ add(scratch2, right, Operand(left), SetCC); 2433 __ SmiUntag(ip, right);
2434 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl); 2434 __ mullw(scratch1, left, ip);
2435 __ b(mi, &stub_call); 2435 __ mulhw(scratch2, left, ip);
2436 // Check for overflowing the smi range - no overflow if higher 33 bits of
2437 // the result are identical.
2438 __ TestIfInt32(scratch2, scratch1, ip);
2439 __ bne(&stub_call);
2440 #endif
2441 // Go slow on zero result to handle -0.
2442 __ cmpi(scratch1, Operand::Zero());
2443 __ beq(&mul_zero);
2444 #if V8_TARGET_ARCH_PPC64
2445 __ SmiTag(right, scratch1);
2446 #else
2447 __ mr(right, scratch1);
2448 #endif
2449 __ b(&done);
2450 // We need -0 if we were multiplying a negative number with 0 to get 0.
2451 // We know one of them was zero.
2452 __ bind(&mul_zero);
2453 __ add(scratch2, right, left);
2454 __ cmpi(scratch2, Operand::Zero());
2455 __ blt(&stub_call);
2456 __ LoadSmiLiteral(right, Smi::FromInt(0));
2436 break; 2457 break;
2437 } 2458 }
2438 case Token::BIT_OR: 2459 case Token::BIT_OR:
2439 __ orr(right, left, Operand(right)); 2460 __ orx(right, left, right);
2440 break; 2461 break;
2441 case Token::BIT_AND: 2462 case Token::BIT_AND:
2442 __ and_(right, left, Operand(right)); 2463 __ and_(right, left, right);
2443 break; 2464 break;
2444 case Token::BIT_XOR: 2465 case Token::BIT_XOR:
2445 __ eor(right, left, Operand(right)); 2466 __ xor_(right, left, right);
2446 break; 2467 break;
2447 default: 2468 default:
2448 UNREACHABLE(); 2469 UNREACHABLE();
2449 } 2470 }
2450 2471
2451 __ bind(&done); 2472 __ bind(&done);
2452 context()->Plug(r0); 2473 context()->Plug(r3);
2453 } 2474 }
2454 2475
2455 2476
2456 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, 2477 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op,
2457 Token::Value op,
2458 OverwriteMode mode) { 2478 OverwriteMode mode) {
2459 __ pop(r1); 2479 __ pop(r4);
2460 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code(); 2480 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2461 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 2481 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2462 CallIC(code, expr->BinaryOperationFeedbackId()); 2482 CallIC(code, expr->BinaryOperationFeedbackId());
2463 patch_site.EmitPatchInfo(); 2483 patch_site.EmitPatchInfo();
2464 context()->Plug(r0); 2484 context()->Plug(r3);
2465 } 2485 }
2466 2486
2467 2487
2468 void FullCodeGenerator::EmitAssignment(Expression* expr) { 2488 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2469 DCHECK(expr->IsValidReferenceExpression()); 2489 DCHECK(expr->IsValidReferenceExpression());
2470 2490
2471 // Left-hand side can only be a property, a global or a (parameter or local) 2491 // Left-hand side can only be a property, a global or a (parameter or local)
2472 // slot. 2492 // slot.
2473 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 2493 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2474 LhsKind assign_type = VARIABLE; 2494 LhsKind assign_type = VARIABLE;
2475 Property* prop = expr->AsProperty(); 2495 Property* prop = expr->AsProperty();
2476 if (prop != NULL) { 2496 if (prop != NULL) {
2477 assign_type = (prop->key()->IsPropertyName()) 2497 assign_type =
2478 ? NAMED_PROPERTY 2498 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
2479 : KEYED_PROPERTY;
2480 } 2499 }
2481 2500
2482 switch (assign_type) { 2501 switch (assign_type) {
2483 case VARIABLE: { 2502 case VARIABLE: {
2484 Variable* var = expr->AsVariableProxy()->var(); 2503 Variable* var = expr->AsVariableProxy()->var();
2485 EffectContext context(this); 2504 EffectContext context(this);
2486 EmitVariableAssignment(var, Token::ASSIGN); 2505 EmitVariableAssignment(var, Token::ASSIGN);
2487 break; 2506 break;
2488 } 2507 }
2489 case NAMED_PROPERTY: { 2508 case NAMED_PROPERTY: {
2490 __ push(r0); // Preserve value. 2509 __ push(r3); // Preserve value.
2491 VisitForAccumulatorValue(prop->obj()); 2510 VisitForAccumulatorValue(prop->obj());
2492 __ Move(StoreDescriptor::ReceiverRegister(), r0); 2511 __ Move(StoreDescriptor::ReceiverRegister(), r3);
2493 __ pop(StoreDescriptor::ValueRegister()); // Restore value. 2512 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2494 __ mov(StoreDescriptor::NameRegister(), 2513 __ mov(StoreDescriptor::NameRegister(),
2495 Operand(prop->key()->AsLiteral()->value())); 2514 Operand(prop->key()->AsLiteral()->value()));
2496 CallStoreIC(); 2515 CallStoreIC();
2497 break; 2516 break;
2498 } 2517 }
2499 case KEYED_PROPERTY: { 2518 case KEYED_PROPERTY: {
2500 __ push(r0); // Preserve value. 2519 __ push(r3); // Preserve value.
2501 VisitForStackValue(prop->obj()); 2520 VisitForStackValue(prop->obj());
2502 VisitForAccumulatorValue(prop->key()); 2521 VisitForAccumulatorValue(prop->key());
2503 __ Move(StoreDescriptor::NameRegister(), r0); 2522 __ Move(StoreDescriptor::NameRegister(), r3);
2504 __ Pop(StoreDescriptor::ValueRegister(), 2523 __ Pop(StoreDescriptor::ValueRegister(),
2505 StoreDescriptor::ReceiverRegister()); 2524 StoreDescriptor::ReceiverRegister());
2506 Handle<Code> ic = 2525 Handle<Code> ic =
2507 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code(); 2526 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2508 CallIC(ic); 2527 CallIC(ic);
2509 break; 2528 break;
2510 } 2529 }
2511 } 2530 }
2512 context()->Plug(r0); 2531 context()->Plug(r3);
2513 } 2532 }
2514 2533
2515 2534
2516 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( 2535 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2517 Variable* var, MemOperand location) { 2536 Variable* var, MemOperand location) {
2518 __ str(result_register(), location); 2537 __ StoreP(result_register(), location, r0);
2519 if (var->IsContextSlot()) { 2538 if (var->IsContextSlot()) {
2520 // RecordWrite may destroy all its register arguments. 2539 // RecordWrite may destroy all its register arguments.
2521 __ mov(r3, result_register()); 2540 __ mr(r6, result_register());
2522 int offset = Context::SlotOffset(var->index()); 2541 int offset = Context::SlotOffset(var->index());
2523 __ RecordWriteContextSlot( 2542 __ RecordWriteContextSlot(r4, offset, r6, r5, kLRHasBeenSaved,
2524 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs); 2543 kDontSaveFPRegs);
2525 } 2544 }
2526 } 2545 }
2527 2546
2528 2547
2529 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) { 2548 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2530 if (var->IsUnallocated()) { 2549 if (var->IsUnallocated()) {
2531 // Global var, const, or let. 2550 // Global var, const, or let.
2532 __ mov(StoreDescriptor::NameRegister(), Operand(var->name())); 2551 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2533 __ ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand()); 2552 __ LoadP(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2534 CallStoreIC(); 2553 CallStoreIC();
2535 2554
2536 } else if (op == Token::INIT_CONST_LEGACY) { 2555 } else if (op == Token::INIT_CONST_LEGACY) {
2537 // Const initializers need a write barrier. 2556 // Const initializers need a write barrier.
2538 DCHECK(!var->IsParameter()); // No const parameters. 2557 DCHECK(!var->IsParameter()); // No const parameters.
2539 if (var->IsLookupSlot()) { 2558 if (var->IsLookupSlot()) {
2540 __ push(r0); 2559 __ push(r3);
2541 __ mov(r0, Operand(var->name())); 2560 __ mov(r3, Operand(var->name()));
2542 __ Push(cp, r0); // Context and name. 2561 __ Push(cp, r3); // Context and name.
2543 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3); 2562 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2544 } else { 2563 } else {
2545 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2564 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2546 Label skip; 2565 Label skip;
2547 MemOperand location = VarOperand(var, r1); 2566 MemOperand location = VarOperand(var, r4);
2548 __ ldr(r2, location); 2567 __ LoadP(r5, location);
2549 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); 2568 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2550 __ b(ne, &skip); 2569 __ bne(&skip);
2551 EmitStoreToStackLocalOrContextSlot(var, location); 2570 EmitStoreToStackLocalOrContextSlot(var, location);
2552 __ bind(&skip); 2571 __ bind(&skip);
2553 } 2572 }
2554 2573
2555 } else if (var->mode() == LET && op != Token::INIT_LET) { 2574 } else if (var->mode() == LET && op != Token::INIT_LET) {
2556 // Non-initializing assignment to let variable needs a write barrier. 2575 // Non-initializing assignment to let variable needs a write barrier.
2557 DCHECK(!var->IsLookupSlot()); 2576 DCHECK(!var->IsLookupSlot());
2558 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2577 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2559 Label assign; 2578 Label assign;
2560 MemOperand location = VarOperand(var, r1); 2579 MemOperand location = VarOperand(var, r4);
2561 __ ldr(r3, location); 2580 __ LoadP(r6, location);
2562 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); 2581 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2563 __ b(ne, &assign); 2582 __ bne(&assign);
2564 __ mov(r3, Operand(var->name())); 2583 __ mov(r6, Operand(var->name()));
2565 __ push(r3); 2584 __ push(r6);
2566 __ CallRuntime(Runtime::kThrowReferenceError, 1); 2585 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2567 // Perform the assignment. 2586 // Perform the assignment.
2568 __ bind(&assign); 2587 __ bind(&assign);
2569 EmitStoreToStackLocalOrContextSlot(var, location); 2588 EmitStoreToStackLocalOrContextSlot(var, location);
2570 2589
2571 } else if (!var->is_const_mode() || op == Token::INIT_CONST) { 2590 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2572 if (var->IsLookupSlot()) { 2591 if (var->IsLookupSlot()) {
2573 // Assignment to var. 2592 // Assignment to var.
2574 __ push(r0); // Value. 2593 __ push(r3); // Value.
2575 __ mov(r1, Operand(var->name())); 2594 __ mov(r4, Operand(var->name()));
2576 __ mov(r0, Operand(Smi::FromInt(strict_mode()))); 2595 __ mov(r3, Operand(Smi::FromInt(strict_mode())));
2577 __ Push(cp, r1, r0); // Context, name, strict mode. 2596 __ Push(cp, r4, r3); // Context, name, strict mode.
2578 __ CallRuntime(Runtime::kStoreLookupSlot, 4); 2597 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2579 } else { 2598 } else {
2580 // Assignment to var or initializing assignment to let/const in harmony 2599 // Assignment to var or initializing assignment to let/const in harmony
2581 // mode. 2600 // mode.
2582 DCHECK((var->IsStackAllocated() || var->IsContextSlot())); 2601 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2583 MemOperand location = VarOperand(var, r1); 2602 MemOperand location = VarOperand(var, r4);
2584 if (generate_debug_code_ && op == Token::INIT_LET) { 2603 if (generate_debug_code_ && op == Token::INIT_LET) {
2585 // Check for an uninitialized let binding. 2604 // Check for an uninitialized let binding.
2586 __ ldr(r2, location); 2605 __ LoadP(r5, location);
2587 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); 2606 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2588 __ Check(eq, kLetBindingReInitialization); 2607 __ Check(eq, kLetBindingReInitialization);
2589 } 2608 }
2590 EmitStoreToStackLocalOrContextSlot(var, location); 2609 EmitStoreToStackLocalOrContextSlot(var, location);
2591 } 2610 }
2592 } 2611 }
2593 // Non-initializing assignments to consts are ignored. 2612 // Non-initializing assignments to consts are ignored.
2594 } 2613 }
2595 2614
2596 2615
2597 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2616 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2598 // Assignment to a property, using a named store IC. 2617 // Assignment to a property, using a named store IC.
2599 Property* prop = expr->target()->AsProperty(); 2618 Property* prop = expr->target()->AsProperty();
2600 DCHECK(prop != NULL); 2619 DCHECK(prop != NULL);
2601 DCHECK(prop->key()->IsLiteral()); 2620 DCHECK(prop->key()->IsLiteral());
2602 2621
2603 // Record source code position before IC call. 2622 // Record source code position before IC call.
2604 SetSourcePosition(expr->position()); 2623 SetSourcePosition(expr->position());
2605 __ mov(StoreDescriptor::NameRegister(), 2624 __ mov(StoreDescriptor::NameRegister(),
2606 Operand(prop->key()->AsLiteral()->value())); 2625 Operand(prop->key()->AsLiteral()->value()));
2607 __ pop(StoreDescriptor::ReceiverRegister()); 2626 __ pop(StoreDescriptor::ReceiverRegister());
2608 CallStoreIC(expr->AssignmentFeedbackId()); 2627 CallStoreIC(expr->AssignmentFeedbackId());
2609 2628
2610 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2629 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2611 context()->Plug(r0); 2630 context()->Plug(r3);
2612 } 2631 }
2613 2632
2614 2633
2634 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2635 // Assignment to named property of super.
2636 // r3 : value
2637 // stack : receiver ('this'), home_object
2638 DCHECK(prop != NULL);
2639 Literal* key = prop->key()->AsLiteral();
2640 DCHECK(key != NULL);
2641
2642 __ Push(r3);
2643 __ Push(key->value());
2644 __ CallRuntime((strict_mode() == STRICT ? Runtime::kStoreToSuper_Strict
2645 : Runtime::kStoreToSuper_Sloppy),
2646 4);
2647 }
2648
2649
2615 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2650 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2616 // Assignment to a property, using a keyed store IC. 2651 // Assignment to a property, using a keyed store IC.
2617 2652
2618 // Record source code position before IC call. 2653 // Record source code position before IC call.
2619 SetSourcePosition(expr->position()); 2654 SetSourcePosition(expr->position());
2620 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister()); 2655 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2621 DCHECK(StoreDescriptor::ValueRegister().is(r0)); 2656 DCHECK(StoreDescriptor::ValueRegister().is(r3));
2622 2657
2623 Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code(); 2658 Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2624 CallIC(ic, expr->AssignmentFeedbackId()); 2659 CallIC(ic, expr->AssignmentFeedbackId());
2625 2660
2626 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2661 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2627 context()->Plug(r0); 2662 context()->Plug(r3);
2628 } 2663 }
2629 2664
2630 2665
2631 void FullCodeGenerator::VisitProperty(Property* expr) { 2666 void FullCodeGenerator::VisitProperty(Property* expr) {
2632 Comment cmnt(masm_, "[ Property"); 2667 Comment cmnt(masm_, "[ Property");
2633 Expression* key = expr->key(); 2668 Expression* key = expr->key();
2634 2669
2635 if (key->IsPropertyName()) { 2670 if (key->IsPropertyName()) {
2636 if (!expr->IsSuperAccess()) { 2671 if (!expr->IsSuperAccess()) {
2637 VisitForAccumulatorValue(expr->obj()); 2672 VisitForAccumulatorValue(expr->obj());
2638 __ Move(LoadDescriptor::ReceiverRegister(), r0); 2673 __ Move(LoadDescriptor::ReceiverRegister(), r3);
2639 EmitNamedPropertyLoad(expr); 2674 EmitNamedPropertyLoad(expr);
2640 } else { 2675 } else {
2676 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2677 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2678 __ Push(result_register());
2641 EmitNamedSuperPropertyLoad(expr); 2679 EmitNamedSuperPropertyLoad(expr);
2642 } 2680 }
2643 PrepareForBailoutForId(expr->LoadId(), TOS_REG); 2681 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2644 context()->Plug(r0); 2682 context()->Plug(r3);
2645 } else { 2683 } else {
2646 VisitForStackValue(expr->obj()); 2684 if (!expr->IsSuperAccess()) {
2647 VisitForAccumulatorValue(expr->key()); 2685 VisitForStackValue(expr->obj());
2648 __ Move(LoadDescriptor::NameRegister(), r0); 2686 VisitForAccumulatorValue(expr->key());
2649 __ pop(LoadDescriptor::ReceiverRegister()); 2687 __ Move(LoadDescriptor::NameRegister(), r3);
2650 EmitKeyedPropertyLoad(expr); 2688 __ pop(LoadDescriptor::ReceiverRegister());
2651 context()->Plug(r0); 2689 EmitKeyedPropertyLoad(expr);
2690 } else {
2691 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2692 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2693 __ Push(result_register());
2694 VisitForStackValue(expr->key());
2695 EmitKeyedSuperPropertyLoad(expr);
2696 }
2697 context()->Plug(r3);
2652 } 2698 }
2653 } 2699 }
2654 2700
2655 2701
2656 void FullCodeGenerator::CallIC(Handle<Code> code, 2702 void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) {
2657 TypeFeedbackId ast_id) {
2658 ic_total_count_++; 2703 ic_total_count_++;
2659 // All calls must have a predictable size in full-codegen code to ensure that 2704 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2660 // the debugger can patch them correctly.
2661 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2662 NEVER_INLINE_TARGET_ADDRESS);
2663 } 2705 }
2664 2706
2665 2707
2666 // Code common for calls using the IC. 2708 // Code common for calls using the IC.
2667 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { 2709 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2668 Expression* callee = expr->expression(); 2710 Expression* callee = expr->expression();
2669 2711
2670 CallICState::CallType call_type = 2712 CallICState::CallType call_type =
2671 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD; 2713 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2672 2714
2673 // Get the target function. 2715 // Get the target function.
2674 if (call_type == CallICState::FUNCTION) { 2716 if (call_type == CallICState::FUNCTION) {
2675 { StackValueContext context(this); 2717 {
2718 StackValueContext context(this);
2676 EmitVariableLoad(callee->AsVariableProxy()); 2719 EmitVariableLoad(callee->AsVariableProxy());
2677 PrepareForBailout(callee, NO_REGISTERS); 2720 PrepareForBailout(callee, NO_REGISTERS);
2678 } 2721 }
2679 // Push undefined as receiver. This is patched in the method prologue if it 2722 // Push undefined as receiver. This is patched in the method prologue if it
2680 // is a sloppy mode method. 2723 // is a sloppy mode method.
2681 __ Push(isolate()->factory()->undefined_value()); 2724 __ Push(isolate()->factory()->undefined_value());
2682 } else { 2725 } else {
2683 // Load the function from the receiver. 2726 // Load the function from the receiver.
2684 DCHECK(callee->IsProperty()); 2727 DCHECK(callee->IsProperty());
2685 DCHECK(!callee->AsProperty()->IsSuperAccess()); 2728 DCHECK(!callee->AsProperty()->IsSuperAccess());
2686 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 2729 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2687 EmitNamedPropertyLoad(callee->AsProperty()); 2730 EmitNamedPropertyLoad(callee->AsProperty());
2688 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2731 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2689 // Push the target function under the receiver. 2732 // Push the target function under the receiver.
2690 __ ldr(ip, MemOperand(sp, 0)); 2733 __ LoadP(ip, MemOperand(sp, 0));
2691 __ push(ip); 2734 __ push(ip);
2692 __ str(r0, MemOperand(sp, kPointerSize)); 2735 __ StoreP(r3, MemOperand(sp, kPointerSize));
2693 } 2736 }
2694 2737
2695 EmitCall(expr, call_type); 2738 EmitCall(expr, call_type);
2696 } 2739 }
2697 2740
2698 2741
2699 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) { 2742 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2700 Expression* callee = expr->expression(); 2743 Expression* callee = expr->expression();
2701 DCHECK(callee->IsProperty()); 2744 DCHECK(callee->IsProperty());
2702 Property* prop = callee->AsProperty(); 2745 Property* prop = callee->AsProperty();
2703 DCHECK(prop->IsSuperAccess()); 2746 DCHECK(prop->IsSuperAccess());
2704 2747
2705 SetSourcePosition(prop->position()); 2748 SetSourcePosition(prop->position());
2706 Literal* key = prop->key()->AsLiteral(); 2749 Literal* key = prop->key()->AsLiteral();
2707 DCHECK(!key->value()->IsSmi()); 2750 DCHECK(!key->value()->IsSmi());
2708 // Load the function from the receiver. 2751 // Load the function from the receiver.
2709 const Register scratch = r1; 2752 const Register scratch = r4;
2710 SuperReference* super_ref = prop->obj()->AsSuperReference(); 2753 SuperReference* super_ref = prop->obj()->AsSuperReference();
2711 EmitLoadHomeObject(super_ref); 2754 EmitLoadHomeObject(super_ref);
2712 __ Push(r0); 2755 __ Push(r3);
2713 VisitForAccumulatorValue(super_ref->this_var()); 2756 VisitForAccumulatorValue(super_ref->this_var());
2714 __ Push(r0); 2757 __ Push(r3);
2715 __ ldr(scratch, MemOperand(sp, kPointerSize)); 2758 __ Push(r3);
2759 __ LoadP(scratch, MemOperand(sp, kPointerSize));
2716 __ Push(scratch); 2760 __ Push(scratch);
2717 __ Push(r0);
2718 __ Push(key->value()); 2761 __ Push(key->value());
2719 2762
2720 // Stack here: 2763 // Stack here:
2721 // - home_object 2764 // - home_object
2722 // - this (receiver) 2765 // - this (receiver)
2723 // - home_object <-- LoadFromSuper will pop here and below. 2766 // - this (receiver) <-- LoadFromSuper will pop here and below.
2724 // - this (receiver) 2767 // - home_object
2725 // - key 2768 // - key
2726 __ CallRuntime(Runtime::kLoadFromSuper, 3); 2769 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2727 2770
2728 // Replace home_object with target function. 2771 // Replace home_object with target function.
2729 __ str(r0, MemOperand(sp, kPointerSize)); 2772 __ StoreP(r3, MemOperand(sp, kPointerSize));
2730 2773
2731 // Stack here: 2774 // Stack here:
2732 // - target function 2775 // - target function
2733 // - this (receiver) 2776 // - this (receiver)
2734 EmitCall(expr, CallICState::METHOD); 2777 EmitCall(expr, CallICState::METHOD);
2735 } 2778 }
2736 2779
2737 2780
2738 // Code common for calls using the IC. 2781 // Code common for calls using the IC.
2739 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, 2782 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) {
2740 Expression* key) {
2741 // Load the key. 2783 // Load the key.
2742 VisitForAccumulatorValue(key); 2784 VisitForAccumulatorValue(key);
2743 2785
2744 Expression* callee = expr->expression(); 2786 Expression* callee = expr->expression();
2745 2787
2746 // Load the function from the receiver. 2788 // Load the function from the receiver.
2747 DCHECK(callee->IsProperty()); 2789 DCHECK(callee->IsProperty());
2748 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 2790 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2749 __ Move(LoadDescriptor::NameRegister(), r0); 2791 __ Move(LoadDescriptor::NameRegister(), r3);
2750 EmitKeyedPropertyLoad(callee->AsProperty()); 2792 EmitKeyedPropertyLoad(callee->AsProperty());
2751 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2793 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2752 2794
2753 // Push the target function under the receiver. 2795 // Push the target function under the receiver.
2754 __ ldr(ip, MemOperand(sp, 0)); 2796 __ LoadP(ip, MemOperand(sp, 0));
2755 __ push(ip); 2797 __ push(ip);
2756 __ str(r0, MemOperand(sp, kPointerSize)); 2798 __ StoreP(r3, MemOperand(sp, kPointerSize));
2757 2799
2758 EmitCall(expr, CallICState::METHOD); 2800 EmitCall(expr, CallICState::METHOD);
2759 } 2801 }
2760 2802
2761 2803
2804 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2805 Expression* callee = expr->expression();
2806 DCHECK(callee->IsProperty());
2807 Property* prop = callee->AsProperty();
2808 DCHECK(prop->IsSuperAccess());
2809
2810 SetSourcePosition(prop->position());
2811 // Load the function from the receiver.
2812 const Register scratch = r4;
2813 SuperReference* super_ref = prop->obj()->AsSuperReference();
2814 EmitLoadHomeObject(super_ref);
2815 __ Push(r3);
2816 VisitForAccumulatorValue(super_ref->this_var());
2817 __ Push(r3);
2818 __ Push(r3);
2819 __ LoadP(scratch, MemOperand(sp, kPointerSize * 2));
2820 __ Push(scratch);
2821 VisitForStackValue(prop->key());
2822
2823 // Stack here:
2824 // - home_object
2825 // - this (receiver)
2826 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2827 // - home_object
2828 // - key
2829 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2830
2831 // Replace home_object with target function.
2832 __ StoreP(r3, MemOperand(sp, kPointerSize));
2833
2834 // Stack here:
2835 // - target function
2836 // - this (receiver)
2837 EmitCall(expr, CallICState::METHOD);
2838 }
2839
2840
2762 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) { 2841 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2763 // Load the arguments. 2842 // Load the arguments.
2764 ZoneList<Expression*>* args = expr->arguments(); 2843 ZoneList<Expression*>* args = expr->arguments();
2765 int arg_count = args->length(); 2844 int arg_count = args->length();
2766 { PreservePositionScope scope(masm()->positions_recorder()); 2845 {
2846 PreservePositionScope scope(masm()->positions_recorder());
2767 for (int i = 0; i < arg_count; i++) { 2847 for (int i = 0; i < arg_count; i++) {
2768 VisitForStackValue(args->at(i)); 2848 VisitForStackValue(args->at(i));
2769 } 2849 }
2770 } 2850 }
2771 2851
2772 // Record source position of the IC call. 2852 // Record source position of the IC call.
2773 SetSourcePosition(expr->position()); 2853 SetSourcePosition(expr->position());
2774 Handle<Code> ic = CallIC::initialize_stub( 2854 Handle<Code> ic = CallIC::initialize_stub(isolate(), arg_count, call_type);
2775 isolate(), arg_count, call_type); 2855 __ LoadSmiLiteral(r6, Smi::FromInt(expr->CallFeedbackSlot()));
2776 __ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot()))); 2856 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2777 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2778 // Don't assign a type feedback id to the IC, since type feedback is provided 2857 // Don't assign a type feedback id to the IC, since type feedback is provided
2779 // by the vector above. 2858 // by the vector above.
2780 CallIC(ic); 2859 CallIC(ic);
2781 2860
2782 RecordJSReturnSite(expr); 2861 RecordJSReturnSite(expr);
2783 // Restore context register. 2862 // Restore context register.
2784 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2863 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2785 context()->DropAndPlug(1, r0); 2864 context()->DropAndPlug(1, r3);
2786 } 2865 }
2787 2866
2788 2867
2789 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { 2868 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2790 // r4: copy of the first argument or undefined if it doesn't exist. 2869 // r7: copy of the first argument or undefined if it doesn't exist.
2791 if (arg_count > 0) { 2870 if (arg_count > 0) {
2792 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize)); 2871 __ LoadP(r7, MemOperand(sp, arg_count * kPointerSize), r0);
2793 } else { 2872 } else {
2794 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); 2873 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
2795 } 2874 }
2796 2875
2797 // r3: the receiver of the enclosing function. 2876 // r6: the receiver of the enclosing function.
2798 int receiver_offset = 2 + info_->scope()->num_parameters(); 2877 int receiver_offset = 2 + info_->scope()->num_parameters();
2799 __ ldr(r3, MemOperand(fp, receiver_offset * kPointerSize)); 2878 __ LoadP(r6, MemOperand(fp, receiver_offset * kPointerSize), r0);
2800 2879
2801 // r2: strict mode. 2880 // r5: strict mode.
2802 __ mov(r2, Operand(Smi::FromInt(strict_mode()))); 2881 __ LoadSmiLiteral(r5, Smi::FromInt(strict_mode()));
2803 2882
2804 // r1: the start position of the scope the calls resides in. 2883 // r4: the start position of the scope the calls resides in.
2805 __ mov(r1, Operand(Smi::FromInt(scope()->start_position()))); 2884 __ LoadSmiLiteral(r4, Smi::FromInt(scope()->start_position()));
2806 2885
2807 // Do the runtime call. 2886 // Do the runtime call.
2808 __ Push(r4, r3, r2, r1); 2887 __ Push(r7, r6, r5, r4);
2809 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); 2888 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2810 } 2889 }
2811 2890
2812 2891
2813 void FullCodeGenerator::VisitCall(Call* expr) { 2892 void FullCodeGenerator::VisitCall(Call* expr) {
2814 #ifdef DEBUG 2893 #ifdef DEBUG
2815 // We want to verify that RecordJSReturnSite gets called on all paths 2894 // We want to verify that RecordJSReturnSite gets called on all paths
2816 // through this function. Avoid early returns. 2895 // through this function. Avoid early returns.
2817 expr->return_is_recorded_ = false; 2896 expr->return_is_recorded_ = false;
2818 #endif 2897 #endif
2819 2898
2820 Comment cmnt(masm_, "[ Call"); 2899 Comment cmnt(masm_, "[ Call");
2821 Expression* callee = expr->expression(); 2900 Expression* callee = expr->expression();
2822 Call::CallType call_type = expr->GetCallType(isolate()); 2901 Call::CallType call_type = expr->GetCallType(isolate());
2823 2902
2824 if (call_type == Call::POSSIBLY_EVAL_CALL) { 2903 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2825 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval 2904 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2826 // to resolve the function we need to call and the receiver of the 2905 // to resolve the function we need to call and the receiver of the
2827 // call. Then we call the resolved function using the given 2906 // call. Then we call the resolved function using the given
2828 // arguments. 2907 // arguments.
2829 ZoneList<Expression*>* args = expr->arguments(); 2908 ZoneList<Expression*>* args = expr->arguments();
2830 int arg_count = args->length(); 2909 int arg_count = args->length();
2831 2910
2832 { PreservePositionScope pos_scope(masm()->positions_recorder()); 2911 {
2912 PreservePositionScope pos_scope(masm()->positions_recorder());
2833 VisitForStackValue(callee); 2913 VisitForStackValue(callee);
2834 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 2914 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
2835 __ push(r2); // Reserved receiver slot. 2915 __ push(r5); // Reserved receiver slot.
2836 2916
2837 // Push the arguments. 2917 // Push the arguments.
2838 for (int i = 0; i < arg_count; i++) { 2918 for (int i = 0; i < arg_count; i++) {
2839 VisitForStackValue(args->at(i)); 2919 VisitForStackValue(args->at(i));
2840 } 2920 }
2841 2921
2842 // Push a copy of the function (found below the arguments) and 2922 // Push a copy of the function (found below the arguments) and
2843 // resolve eval. 2923 // resolve eval.
2844 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2924 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2845 __ push(r1); 2925 __ push(r4);
2846 EmitResolvePossiblyDirectEval(arg_count); 2926 EmitResolvePossiblyDirectEval(arg_count);
2847 2927
2848 // The runtime call returns a pair of values in r0 (function) and 2928 // The runtime call returns a pair of values in r3 (function) and
2849 // r1 (receiver). Touch up the stack with the right values. 2929 // r4 (receiver). Touch up the stack with the right values.
2850 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2930 __ StoreP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2851 __ str(r1, MemOperand(sp, arg_count * kPointerSize)); 2931 __ StoreP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
2932
2933 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
2852 } 2934 }
2853 2935
2854 // Record source position for debugger. 2936 // Record source position for debugger.
2855 SetSourcePosition(expr->position()); 2937 SetSourcePosition(expr->position());
2856 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); 2938 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2857 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2939 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2858 __ CallStub(&stub); 2940 __ CallStub(&stub);
2859 RecordJSReturnSite(expr); 2941 RecordJSReturnSite(expr);
2860 // Restore context register. 2942 // Restore context register.
2861 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2943 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2862 context()->DropAndPlug(1, r0); 2944 context()->DropAndPlug(1, r3);
2863 } else if (call_type == Call::GLOBAL_CALL) { 2945 } else if (call_type == Call::GLOBAL_CALL) {
2864 EmitCallWithLoadIC(expr); 2946 EmitCallWithLoadIC(expr);
2865 2947
2866 } else if (call_type == Call::LOOKUP_SLOT_CALL) { 2948 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2867 // Call to a lookup slot (dynamically introduced variable). 2949 // Call to a lookup slot (dynamically introduced variable).
2868 VariableProxy* proxy = callee->AsVariableProxy(); 2950 VariableProxy* proxy = callee->AsVariableProxy();
2869 Label slow, done; 2951 Label slow, done;
2870 2952
2871 { PreservePositionScope scope(masm()->positions_recorder()); 2953 {
2954 PreservePositionScope scope(masm()->positions_recorder());
2872 // Generate code for loading from variables potentially shadowed 2955 // Generate code for loading from variables potentially shadowed
2873 // by eval-introduced variables. 2956 // by eval-introduced variables.
2874 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done); 2957 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2875 } 2958 }
2876 2959
2877 __ bind(&slow); 2960 __ bind(&slow);
2878 // Call the runtime to find the function to call (returned in r0) 2961 // Call the runtime to find the function to call (returned in r3)
2879 // and the object holding it (returned in edx). 2962 // and the object holding it (returned in edx).
2880 DCHECK(!context_register().is(r2)); 2963 DCHECK(!context_register().is(r5));
2881 __ mov(r2, Operand(proxy->name())); 2964 __ mov(r5, Operand(proxy->name()));
2882 __ Push(context_register(), r2); 2965 __ Push(context_register(), r5);
2883 __ CallRuntime(Runtime::kLoadLookupSlot, 2); 2966 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2884 __ Push(r0, r1); // Function, receiver. 2967 __ Push(r3, r4); // Function, receiver.
2968 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
2885 2969
2886 // If fast case code has been generated, emit code to push the 2970 // If fast case code has been generated, emit code to push the
2887 // function and receiver and have the slow path jump around this 2971 // function and receiver and have the slow path jump around this
2888 // code. 2972 // code.
2889 if (done.is_linked()) { 2973 if (done.is_linked()) {
2890 Label call; 2974 Label call;
2891 __ b(&call); 2975 __ b(&call);
2892 __ bind(&done); 2976 __ bind(&done);
2893 // Push function. 2977 // Push function.
2894 __ push(r0); 2978 __ push(r3);
2895 // The receiver is implicitly the global receiver. Indicate this 2979 // The receiver is implicitly the global receiver. Indicate this
2896 // by passing the hole to the call function stub. 2980 // by passing the hole to the call function stub.
2897 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 2981 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2898 __ push(r1); 2982 __ push(r4);
2899 __ bind(&call); 2983 __ bind(&call);
2900 } 2984 }
2901 2985
2902 // The receiver is either the global receiver or an object found 2986 // The receiver is either the global receiver or an object found
2903 // by LoadContextSlot. 2987 // by LoadContextSlot.
2904 EmitCall(expr); 2988 EmitCall(expr);
2905 } else if (call_type == Call::PROPERTY_CALL) { 2989 } else if (call_type == Call::PROPERTY_CALL) {
2906 Property* property = callee->AsProperty(); 2990 Property* property = callee->AsProperty();
2907 bool is_named_call = property->key()->IsPropertyName(); 2991 bool is_named_call = property->key()->IsPropertyName();
2908 // super.x() is handled in EmitCallWithLoadIC. 2992 if (property->IsSuperAccess()) {
2909 if (property->IsSuperAccess() && is_named_call) { 2993 if (is_named_call) {
2910 EmitSuperCallWithLoadIC(expr); 2994 EmitSuperCallWithLoadIC(expr);
2995 } else {
2996 EmitKeyedSuperCallWithLoadIC(expr);
2997 }
2911 } else { 2998 } else {
2912 { 2999 {
2913 PreservePositionScope scope(masm()->positions_recorder()); 3000 PreservePositionScope scope(masm()->positions_recorder());
2914 VisitForStackValue(property->obj()); 3001 VisitForStackValue(property->obj());
2915 } 3002 }
2916 if (is_named_call) { 3003 if (is_named_call) {
2917 EmitCallWithLoadIC(expr); 3004 EmitCallWithLoadIC(expr);
2918 } else { 3005 } else {
2919 EmitKeyedCallWithLoadIC(expr, property->key()); 3006 EmitKeyedCallWithLoadIC(expr, property->key());
2920 } 3007 }
2921 } 3008 }
2922 } else { 3009 } else {
2923 DCHECK(call_type == Call::OTHER_CALL); 3010 DCHECK(call_type == Call::OTHER_CALL);
2924 // Call to an arbitrary expression not handled specially above. 3011 // Call to an arbitrary expression not handled specially above.
2925 { PreservePositionScope scope(masm()->positions_recorder()); 3012 {
3013 PreservePositionScope scope(masm()->positions_recorder());
2926 VisitForStackValue(callee); 3014 VisitForStackValue(callee);
2927 } 3015 }
2928 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 3016 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2929 __ push(r1); 3017 __ push(r4);
2930 // Emit function call. 3018 // Emit function call.
2931 EmitCall(expr); 3019 EmitCall(expr);
2932 } 3020 }
2933 3021
2934 #ifdef DEBUG 3022 #ifdef DEBUG
2935 // RecordJSReturnSite should have been called. 3023 // RecordJSReturnSite should have been called.
2936 DCHECK(expr->return_is_recorded_); 3024 DCHECK(expr->return_is_recorded_);
2937 #endif 3025 #endif
2938 } 3026 }
2939 3027
(...skipping 13 matching lines...) Expand all
2953 ZoneList<Expression*>* args = expr->arguments(); 3041 ZoneList<Expression*>* args = expr->arguments();
2954 int arg_count = args->length(); 3042 int arg_count = args->length();
2955 for (int i = 0; i < arg_count; i++) { 3043 for (int i = 0; i < arg_count; i++) {
2956 VisitForStackValue(args->at(i)); 3044 VisitForStackValue(args->at(i));
2957 } 3045 }
2958 3046
2959 // Call the construct call builtin that handles allocation and 3047 // Call the construct call builtin that handles allocation and
2960 // constructor invocation. 3048 // constructor invocation.
2961 SetSourcePosition(expr->position()); 3049 SetSourcePosition(expr->position());
2962 3050
2963 // Load function and argument count into r1 and r0. 3051 // Load function and argument count into r4 and r3.
2964 __ mov(r0, Operand(arg_count)); 3052 __ mov(r3, Operand(arg_count));
2965 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); 3053 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
2966 3054
2967 // Record call targets in unoptimized code. 3055 // Record call targets in unoptimized code.
2968 if (FLAG_pretenuring_call_new) { 3056 if (FLAG_pretenuring_call_new) {
2969 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); 3057 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2970 DCHECK(expr->AllocationSiteFeedbackSlot() == 3058 DCHECK(expr->AllocationSiteFeedbackSlot() ==
2971 expr->CallNewFeedbackSlot() + 1); 3059 expr->CallNewFeedbackSlot() + 1);
2972 } 3060 }
2973 3061
2974 __ Move(r2, FeedbackVector()); 3062 __ Move(r5, FeedbackVector());
2975 __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); 3063 __ LoadSmiLiteral(r6, Smi::FromInt(expr->CallNewFeedbackSlot()));
2976 3064
2977 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); 3065 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2978 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); 3066 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2979 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 3067 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2980 context()->Plug(r0); 3068 context()->Plug(r3);
2981 } 3069 }
2982 3070
2983 3071
2984 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 3072 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2985 ZoneList<Expression*>* args = expr->arguments(); 3073 ZoneList<Expression*>* args = expr->arguments();
2986 DCHECK(args->length() == 1); 3074 DCHECK(args->length() == 1);
2987 3075
2988 VisitForAccumulatorValue(args->at(0)); 3076 VisitForAccumulatorValue(args->at(0));
2989 3077
2990 Label materialize_true, materialize_false; 3078 Label materialize_true, materialize_false;
2991 Label* if_true = NULL; 3079 Label* if_true = NULL;
2992 Label* if_false = NULL; 3080 Label* if_false = NULL;
2993 Label* fall_through = NULL; 3081 Label* fall_through = NULL;
2994 context()->PrepareTest(&materialize_true, &materialize_false, 3082 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2995 &if_true, &if_false, &fall_through); 3083 &if_false, &fall_through);
2996 3084
2997 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3085 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2998 __ SmiTst(r0); 3086 __ TestIfSmi(r3, r0);
2999 Split(eq, if_true, if_false, fall_through); 3087 Split(eq, if_true, if_false, fall_through, cr0);
3000 3088
3001 context()->Plug(if_true, if_false); 3089 context()->Plug(if_true, if_false);
3002 } 3090 }
3003 3091
3004 3092
3005 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) { 3093 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3006 ZoneList<Expression*>* args = expr->arguments(); 3094 ZoneList<Expression*>* args = expr->arguments();
3007 DCHECK(args->length() == 1); 3095 DCHECK(args->length() == 1);
3008 3096
3009 VisitForAccumulatorValue(args->at(0)); 3097 VisitForAccumulatorValue(args->at(0));
3010 3098
3011 Label materialize_true, materialize_false; 3099 Label materialize_true, materialize_false;
3012 Label* if_true = NULL; 3100 Label* if_true = NULL;
3013 Label* if_false = NULL; 3101 Label* if_false = NULL;
3014 Label* fall_through = NULL; 3102 Label* fall_through = NULL;
3015 context()->PrepareTest(&materialize_true, &materialize_false, 3103 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3016 &if_true, &if_false, &fall_through); 3104 &if_false, &fall_through);
3017 3105
3018 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3106 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3019 __ NonNegativeSmiTst(r0); 3107 __ TestIfPositiveSmi(r3, r0);
3020 Split(eq, if_true, if_false, fall_through); 3108 Split(eq, if_true, if_false, fall_through, cr0);
3021 3109
3022 context()->Plug(if_true, if_false); 3110 context()->Plug(if_true, if_false);
3023 } 3111 }
3024 3112
3025 3113
3026 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) { 3114 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3027 ZoneList<Expression*>* args = expr->arguments(); 3115 ZoneList<Expression*>* args = expr->arguments();
3028 DCHECK(args->length() == 1); 3116 DCHECK(args->length() == 1);
3029 3117
3030 VisitForAccumulatorValue(args->at(0)); 3118 VisitForAccumulatorValue(args->at(0));
3031 3119
3032 Label materialize_true, materialize_false; 3120 Label materialize_true, materialize_false;
3033 Label* if_true = NULL; 3121 Label* if_true = NULL;
3034 Label* if_false = NULL; 3122 Label* if_false = NULL;
3035 Label* fall_through = NULL; 3123 Label* fall_through = NULL;
3036 context()->PrepareTest(&materialize_true, &materialize_false, 3124 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3037 &if_true, &if_false, &fall_through); 3125 &if_false, &fall_through);
3038 3126
3039 __ JumpIfSmi(r0, if_false); 3127 __ JumpIfSmi(r3, if_false);
3040 __ LoadRoot(ip, Heap::kNullValueRootIndex); 3128 __ LoadRoot(ip, Heap::kNullValueRootIndex);
3041 __ cmp(r0, ip); 3129 __ cmp(r3, ip);
3042 __ b(eq, if_true); 3130 __ beq(if_true);
3043 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); 3131 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
3044 // Undetectable objects behave like undefined when tested with typeof. 3132 // Undetectable objects behave like undefined when tested with typeof.
3045 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset)); 3133 __ lbz(r4, FieldMemOperand(r5, Map::kBitFieldOffset));
3046 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 3134 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3047 __ b(ne, if_false); 3135 __ bne(if_false, cr0);
3048 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset)); 3136 __ lbz(r4, FieldMemOperand(r5, Map::kInstanceTypeOffset));
3049 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); 3137 __ cmpi(r4, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3050 __ b(lt, if_false); 3138 __ blt(if_false);
3051 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); 3139 __ cmpi(r4, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3052 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3140 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3053 Split(le, if_true, if_false, fall_through); 3141 Split(le, if_true, if_false, fall_through);
3054 3142
3055 context()->Plug(if_true, if_false); 3143 context()->Plug(if_true, if_false);
3056 } 3144 }
3057 3145
3058 3146
3059 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) { 3147 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3060 ZoneList<Expression*>* args = expr->arguments(); 3148 ZoneList<Expression*>* args = expr->arguments();
3061 DCHECK(args->length() == 1); 3149 DCHECK(args->length() == 1);
3062 3150
3063 VisitForAccumulatorValue(args->at(0)); 3151 VisitForAccumulatorValue(args->at(0));
3064 3152
3065 Label materialize_true, materialize_false; 3153 Label materialize_true, materialize_false;
3066 Label* if_true = NULL; 3154 Label* if_true = NULL;
3067 Label* if_false = NULL; 3155 Label* if_false = NULL;
3068 Label* fall_through = NULL; 3156 Label* fall_through = NULL;
3069 context()->PrepareTest(&materialize_true, &materialize_false, 3157 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3070 &if_true, &if_false, &fall_through); 3158 &if_false, &fall_through);
3071 3159
3072 __ JumpIfSmi(r0, if_false); 3160 __ JumpIfSmi(r3, if_false);
3073 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 3161 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
3074 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3162 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3075 Split(ge, if_true, if_false, fall_through); 3163 Split(ge, if_true, if_false, fall_through);
3076 3164
3077 context()->Plug(if_true, if_false); 3165 context()->Plug(if_true, if_false);
3078 } 3166 }
3079 3167
3080 3168
3081 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) { 3169 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3082 ZoneList<Expression*>* args = expr->arguments(); 3170 ZoneList<Expression*>* args = expr->arguments();
3083 DCHECK(args->length() == 1); 3171 DCHECK(args->length() == 1);
3084 3172
3085 VisitForAccumulatorValue(args->at(0)); 3173 VisitForAccumulatorValue(args->at(0));
3086 3174
3087 Label materialize_true, materialize_false; 3175 Label materialize_true, materialize_false;
3088 Label* if_true = NULL; 3176 Label* if_true = NULL;
3089 Label* if_false = NULL; 3177 Label* if_false = NULL;
3090 Label* fall_through = NULL; 3178 Label* fall_through = NULL;
3091 context()->PrepareTest(&materialize_true, &materialize_false, 3179 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3092 &if_true, &if_false, &fall_through); 3180 &if_false, &fall_through);
3093 3181
3094 __ JumpIfSmi(r0, if_false); 3182 __ JumpIfSmi(r3, if_false);
3095 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 3183 __ LoadP(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
3096 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset)); 3184 __ lbz(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
3097 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 3185 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3098 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3186 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3099 Split(ne, if_true, if_false, fall_through); 3187 Split(ne, if_true, if_false, fall_through, cr0);
3100 3188
3101 context()->Plug(if_true, if_false); 3189 context()->Plug(if_true, if_false);
3102 } 3190 }
3103 3191
3104 3192
3105 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( 3193 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3106 CallRuntime* expr) { 3194 CallRuntime* expr) {
3107 ZoneList<Expression*>* args = expr->arguments(); 3195 ZoneList<Expression*>* args = expr->arguments();
3108 DCHECK(args->length() == 1); 3196 DCHECK(args->length() == 1);
3109 3197
3110 VisitForAccumulatorValue(args->at(0)); 3198 VisitForAccumulatorValue(args->at(0));
3111 3199
3112 Label materialize_true, materialize_false, skip_lookup; 3200 Label materialize_true, materialize_false, skip_lookup;
3113 Label* if_true = NULL; 3201 Label* if_true = NULL;
3114 Label* if_false = NULL; 3202 Label* if_false = NULL;
3115 Label* fall_through = NULL; 3203 Label* fall_through = NULL;
3116 context()->PrepareTest(&materialize_true, &materialize_false, 3204 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3117 &if_true, &if_false, &fall_through); 3205 &if_false, &fall_through);
3118 3206
3119 __ AssertNotSmi(r0); 3207 __ AssertNotSmi(r3);
3120 3208
3121 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 3209 __ LoadP(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
3122 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset)); 3210 __ lbz(ip, FieldMemOperand(r4, Map::kBitField2Offset));
3123 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); 3211 __ andi(r0, ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3124 __ b(ne, &skip_lookup); 3212 __ bne(&skip_lookup, cr0);
3125 3213
3126 // Check for fast case object. Generate false result for slow case object. 3214 // Check for fast case object. Generate false result for slow case object.
3127 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset)); 3215 __ LoadP(r5, FieldMemOperand(r3, JSObject::kPropertiesOffset));
3128 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); 3216 __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset));
3129 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); 3217 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3130 __ cmp(r2, ip); 3218 __ cmp(r5, ip);
3131 __ b(eq, if_false); 3219 __ beq(if_false);
3132 3220
3133 // Look for valueOf name in the descriptor array, and indicate false if 3221 // Look for valueOf name in the descriptor array, and indicate false if
3134 // found. Since we omit an enumeration index check, if it is added via a 3222 // found. Since we omit an enumeration index check, if it is added via a
3135 // transition that shares its descriptor array, this is a false positive. 3223 // transition that shares its descriptor array, this is a false positive.
3136 Label entry, loop, done; 3224 Label entry, loop, done;
3137 3225
3138 // Skip loop if no descriptors are valid. 3226 // Skip loop if no descriptors are valid.
3139 __ NumberOfOwnDescriptors(r3, r1); 3227 __ NumberOfOwnDescriptors(r6, r4);
3140 __ cmp(r3, Operand::Zero()); 3228 __ cmpi(r6, Operand::Zero());
3141 __ b(eq, &done); 3229 __ beq(&done);
3142 3230
3143 __ LoadInstanceDescriptors(r1, r4); 3231 __ LoadInstanceDescriptors(r4, r7);
3144 // r4: descriptor array. 3232 // r7: descriptor array.
3145 // r3: valid entries in the descriptor array. 3233 // r6: valid entries in the descriptor array.
3146 __ mov(ip, Operand(DescriptorArray::kDescriptorSize)); 3234 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3147 __ mul(r3, r3, ip); 3235 __ Mul(r6, r6, ip);
3148 // Calculate location of the first key name. 3236 // Calculate location of the first key name.
3149 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag)); 3237 __ addi(r7, r7, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3150 // Calculate the end of the descriptor array. 3238 // Calculate the end of the descriptor array.
3151 __ mov(r2, r4); 3239 __ mr(r5, r7);
3152 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2)); 3240 __ ShiftLeftImm(ip, r6, Operand(kPointerSizeLog2));
3241 __ add(r5, r5, ip);
3153 3242
3154 // Loop through all the keys in the descriptor array. If one of these is the 3243 // Loop through all the keys in the descriptor array. If one of these is the
3155 // string "valueOf" the result is false. 3244 // string "valueOf" the result is false.
3156 // The use of ip to store the valueOf string assumes that it is not otherwise 3245 // The use of ip to store the valueOf string assumes that it is not otherwise
3157 // used in the loop below. 3246 // used in the loop below.
3158 __ mov(ip, Operand(isolate()->factory()->value_of_string())); 3247 __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3159 __ jmp(&entry); 3248 __ b(&entry);
3160 __ bind(&loop); 3249 __ bind(&loop);
3161 __ ldr(r3, MemOperand(r4, 0)); 3250 __ LoadP(r6, MemOperand(r7, 0));
3162 __ cmp(r3, ip); 3251 __ cmp(r6, ip);
3163 __ b(eq, if_false); 3252 __ beq(if_false);
3164 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize)); 3253 __ addi(r7, r7, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3165 __ bind(&entry); 3254 __ bind(&entry);
3166 __ cmp(r4, Operand(r2)); 3255 __ cmp(r7, r5);
3167 __ b(ne, &loop); 3256 __ bne(&loop);
3168 3257
3169 __ bind(&done); 3258 __ bind(&done);
3170 3259
3171 // Set the bit in the map to indicate that there is no local valueOf field. 3260 // Set the bit in the map to indicate that there is no local valueOf field.
3172 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset)); 3261 __ lbz(r5, FieldMemOperand(r4, Map::kBitField2Offset));
3173 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); 3262 __ ori(r5, r5, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3174 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset)); 3263 __ stb(r5, FieldMemOperand(r4, Map::kBitField2Offset));
3175 3264
3176 __ bind(&skip_lookup); 3265 __ bind(&skip_lookup);
3177 3266
3178 // If a valueOf property is not found on the object check that its 3267 // If a valueOf property is not found on the object check that its
3179 // prototype is the un-modified String prototype. If not result is false. 3268 // prototype is the un-modified String prototype. If not result is false.
3180 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset)); 3269 __ LoadP(r5, FieldMemOperand(r4, Map::kPrototypeOffset));
3181 __ JumpIfSmi(r2, if_false); 3270 __ JumpIfSmi(r5, if_false);
3182 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); 3271 __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset));
3183 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 3272 __ LoadP(r6, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3184 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset)); 3273 __ LoadP(r6, FieldMemOperand(r6, GlobalObject::kNativeContextOffset));
3185 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); 3274 __ LoadP(r6,
3186 __ cmp(r2, r3); 3275 ContextOperand(r6, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3276 __ cmp(r5, r6);
3187 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3277 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3188 Split(eq, if_true, if_false, fall_through); 3278 Split(eq, if_true, if_false, fall_through);
3189 3279
3190 context()->Plug(if_true, if_false); 3280 context()->Plug(if_true, if_false);
3191 } 3281 }
3192 3282
3193 3283
3194 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { 3284 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3195 ZoneList<Expression*>* args = expr->arguments(); 3285 ZoneList<Expression*>* args = expr->arguments();
3196 DCHECK(args->length() == 1); 3286 DCHECK(args->length() == 1);
3197 3287
3198 VisitForAccumulatorValue(args->at(0)); 3288 VisitForAccumulatorValue(args->at(0));
3199 3289
3200 Label materialize_true, materialize_false; 3290 Label materialize_true, materialize_false;
3201 Label* if_true = NULL; 3291 Label* if_true = NULL;
3202 Label* if_false = NULL; 3292 Label* if_false = NULL;
3203 Label* fall_through = NULL; 3293 Label* fall_through = NULL;
3204 context()->PrepareTest(&materialize_true, &materialize_false, 3294 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3205 &if_true, &if_false, &fall_through); 3295 &if_false, &fall_through);
3206 3296
3207 __ JumpIfSmi(r0, if_false); 3297 __ JumpIfSmi(r3, if_false);
3208 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE); 3298 __ CompareObjectType(r3, r4, r5, JS_FUNCTION_TYPE);
3209 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3299 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3210 Split(eq, if_true, if_false, fall_through); 3300 Split(eq, if_true, if_false, fall_through);
3211 3301
3212 context()->Plug(if_true, if_false); 3302 context()->Plug(if_true, if_false);
3213 } 3303 }
3214 3304
3215 3305
3216 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) { 3306 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3217 ZoneList<Expression*>* args = expr->arguments(); 3307 ZoneList<Expression*>* args = expr->arguments();
3218 DCHECK(args->length() == 1); 3308 DCHECK(args->length() == 1);
3219 3309
3220 VisitForAccumulatorValue(args->at(0)); 3310 VisitForAccumulatorValue(args->at(0));
3221 3311
3222 Label materialize_true, materialize_false; 3312 Label materialize_true, materialize_false;
3223 Label* if_true = NULL; 3313 Label* if_true = NULL;
3224 Label* if_false = NULL; 3314 Label* if_false = NULL;
3225 Label* fall_through = NULL; 3315 Label* fall_through = NULL;
3226 context()->PrepareTest(&materialize_true, &materialize_false, 3316 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3227 &if_true, &if_false, &fall_through); 3317 &if_false, &fall_through);
3228 3318
3229 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK); 3319 __ CheckMap(r3, r4, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3230 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 3320 #if V8_TARGET_ARCH_PPC64
3231 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); 3321 __ LoadP(r4, FieldMemOperand(r3, HeapNumber::kValueOffset));
3232 __ cmp(r2, Operand(0x80000000)); 3322 __ li(r5, Operand(1));
3233 __ cmp(r1, Operand(0x00000000), eq); 3323 __ rotrdi(r5, r5, 1); // r5 = 0x80000000_00000000
3324 __ cmp(r4, r5);
3325 #else
3326 __ lwz(r5, FieldMemOperand(r3, HeapNumber::kExponentOffset));
3327 __ lwz(r4, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
3328 Label skip;
3329 __ lis(r0, Operand(SIGN_EXT_IMM16(0x8000)));
3330 __ cmp(r5, r0);
3331 __ bne(&skip);
3332 __ cmpi(r4, Operand::Zero());
3333 __ bind(&skip);
3334 #endif
3234 3335
3235 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3336 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3236 Split(eq, if_true, if_false, fall_through); 3337 Split(eq, if_true, if_false, fall_through);
3237 3338
3238 context()->Plug(if_true, if_false); 3339 context()->Plug(if_true, if_false);
3239 } 3340 }
3240 3341
3241 3342
3242 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { 3343 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3243 ZoneList<Expression*>* args = expr->arguments(); 3344 ZoneList<Expression*>* args = expr->arguments();
3244 DCHECK(args->length() == 1); 3345 DCHECK(args->length() == 1);
3245 3346
3246 VisitForAccumulatorValue(args->at(0)); 3347 VisitForAccumulatorValue(args->at(0));
3247 3348
3248 Label materialize_true, materialize_false; 3349 Label materialize_true, materialize_false;
3249 Label* if_true = NULL; 3350 Label* if_true = NULL;
3250 Label* if_false = NULL; 3351 Label* if_false = NULL;
3251 Label* fall_through = NULL; 3352 Label* fall_through = NULL;
3252 context()->PrepareTest(&materialize_true, &materialize_false, 3353 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3253 &if_true, &if_false, &fall_through); 3354 &if_false, &fall_through);
3254 3355
3255 __ JumpIfSmi(r0, if_false); 3356 __ JumpIfSmi(r3, if_false);
3256 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE); 3357 __ CompareObjectType(r3, r4, r4, JS_ARRAY_TYPE);
3257 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3358 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3258 Split(eq, if_true, if_false, fall_through); 3359 Split(eq, if_true, if_false, fall_through);
3259 3360
3260 context()->Plug(if_true, if_false); 3361 context()->Plug(if_true, if_false);
3261 } 3362 }
3262 3363
3263 3364
3264 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { 3365 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3265 ZoneList<Expression*>* args = expr->arguments(); 3366 ZoneList<Expression*>* args = expr->arguments();
3266 DCHECK(args->length() == 1); 3367 DCHECK(args->length() == 1);
3267 3368
3268 VisitForAccumulatorValue(args->at(0)); 3369 VisitForAccumulatorValue(args->at(0));
3269 3370
3270 Label materialize_true, materialize_false; 3371 Label materialize_true, materialize_false;
3271 Label* if_true = NULL; 3372 Label* if_true = NULL;
3272 Label* if_false = NULL; 3373 Label* if_false = NULL;
3273 Label* fall_through = NULL; 3374 Label* fall_through = NULL;
3274 context()->PrepareTest(&materialize_true, &materialize_false, 3375 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3275 &if_true, &if_false, &fall_through); 3376 &if_false, &fall_through);
3276 3377
3277 __ JumpIfSmi(r0, if_false); 3378 __ JumpIfSmi(r3, if_false);
3278 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE); 3379 __ CompareObjectType(r3, r4, r4, JS_REGEXP_TYPE);
3279 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3380 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3280 Split(eq, if_true, if_false, fall_through); 3381 Split(eq, if_true, if_false, fall_through);
3281 3382
3282 context()->Plug(if_true, if_false); 3383 context()->Plug(if_true, if_false);
3283 } 3384 }
3284 3385
3285 3386
3286
3287 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) { 3387 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3288 DCHECK(expr->arguments()->length() == 0); 3388 DCHECK(expr->arguments()->length() == 0);
3289 3389
3290 Label materialize_true, materialize_false; 3390 Label materialize_true, materialize_false;
3291 Label* if_true = NULL; 3391 Label* if_true = NULL;
3292 Label* if_false = NULL; 3392 Label* if_false = NULL;
3293 Label* fall_through = NULL; 3393 Label* fall_through = NULL;
3294 context()->PrepareTest(&materialize_true, &materialize_false, 3394 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3295 &if_true, &if_false, &fall_through); 3395 &if_false, &fall_through);
3296 3396
3297 // Get the frame pointer for the calling frame. 3397 // Get the frame pointer for the calling frame.
3298 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3398 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3299 3399
3300 // Skip the arguments adaptor frame if it exists. 3400 // Skip the arguments adaptor frame if it exists.
3301 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset)); 3401 Label check_frame_marker;
3302 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3402 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kContextOffset));
3303 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq); 3403 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
3404 __ bne(&check_frame_marker);
3405 __ LoadP(r5, MemOperand(r5, StandardFrameConstants::kCallerFPOffset));
3304 3406
3305 // Check the marker in the calling frame. 3407 // Check the marker in the calling frame.
3306 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset)); 3408 __ bind(&check_frame_marker);
3307 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); 3409 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kMarkerOffset));
3410 STATIC_ASSERT(StackFrame::CONSTRUCT < 0x4000);
3411 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::CONSTRUCT), r0);
3308 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3412 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3309 Split(eq, if_true, if_false, fall_through); 3413 Split(eq, if_true, if_false, fall_through);
3310 3414
3311 context()->Plug(if_true, if_false); 3415 context()->Plug(if_true, if_false);
3312 } 3416 }
3313 3417
3314 3418
3315 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { 3419 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3316 ZoneList<Expression*>* args = expr->arguments(); 3420 ZoneList<Expression*>* args = expr->arguments();
3317 DCHECK(args->length() == 2); 3421 DCHECK(args->length() == 2);
3318 3422
3319 // Load the two objects into registers and perform the comparison. 3423 // Load the two objects into registers and perform the comparison.
3320 VisitForStackValue(args->at(0)); 3424 VisitForStackValue(args->at(0));
3321 VisitForAccumulatorValue(args->at(1)); 3425 VisitForAccumulatorValue(args->at(1));
3322 3426
3323 Label materialize_true, materialize_false; 3427 Label materialize_true, materialize_false;
3324 Label* if_true = NULL; 3428 Label* if_true = NULL;
3325 Label* if_false = NULL; 3429 Label* if_false = NULL;
3326 Label* fall_through = NULL; 3430 Label* fall_through = NULL;
3327 context()->PrepareTest(&materialize_true, &materialize_false, 3431 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3328 &if_true, &if_false, &fall_through); 3432 &if_false, &fall_through);
3329 3433
3330 __ pop(r1); 3434 __ pop(r4);
3331 __ cmp(r0, r1); 3435 __ cmp(r3, r4);
3332 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3436 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3333 Split(eq, if_true, if_false, fall_through); 3437 Split(eq, if_true, if_false, fall_through);
3334 3438
3335 context()->Plug(if_true, if_false); 3439 context()->Plug(if_true, if_false);
3336 } 3440 }
3337 3441
3338 3442
3339 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { 3443 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3340 ZoneList<Expression*>* args = expr->arguments(); 3444 ZoneList<Expression*>* args = expr->arguments();
3341 DCHECK(args->length() == 1); 3445 DCHECK(args->length() == 1);
3342 3446
3343 // ArgumentsAccessStub expects the key in edx and the formal 3447 // ArgumentsAccessStub expects the key in edx and the formal
3344 // parameter count in r0. 3448 // parameter count in r3.
3345 VisitForAccumulatorValue(args->at(0)); 3449 VisitForAccumulatorValue(args->at(0));
3346 __ mov(r1, r0); 3450 __ mr(r4, r3);
3347 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 3451 __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
3348 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT); 3452 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3349 __ CallStub(&stub); 3453 __ CallStub(&stub);
3350 context()->Plug(r0); 3454 context()->Plug(r3);
3351 } 3455 }
3352 3456
3353 3457
3354 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { 3458 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3355 DCHECK(expr->arguments()->length() == 0); 3459 DCHECK(expr->arguments()->length() == 0);
3356 3460 Label exit;
3357 // Get the number of formal parameters. 3461 // Get the number of formal parameters.
3358 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 3462 __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
3359 3463
3360 // Check if the calling frame is an arguments adaptor frame. 3464 // Check if the calling frame is an arguments adaptor frame.
3361 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3465 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3362 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); 3466 __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset));
3363 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3467 __ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
3468 __ bne(&exit);
3364 3469
3365 // Arguments adaptor case: Read the arguments length from the 3470 // Arguments adaptor case: Read the arguments length from the
3366 // adaptor frame. 3471 // adaptor frame.
3367 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq); 3472 __ LoadP(r3, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
3368 3473
3369 context()->Plug(r0); 3474 __ bind(&exit);
3475 context()->Plug(r3);
3370 } 3476 }
3371 3477
3372 3478
3373 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { 3479 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3374 ZoneList<Expression*>* args = expr->arguments(); 3480 ZoneList<Expression*>* args = expr->arguments();
3375 DCHECK(args->length() == 1); 3481 DCHECK(args->length() == 1);
3376 Label done, null, function, non_function_constructor; 3482 Label done, null, function, non_function_constructor;
3377 3483
3378 VisitForAccumulatorValue(args->at(0)); 3484 VisitForAccumulatorValue(args->at(0));
3379 3485
3380 // If the object is a smi, we return null. 3486 // If the object is a smi, we return null.
3381 __ JumpIfSmi(r0, &null); 3487 __ JumpIfSmi(r3, &null);
3382 3488
3383 // Check that the object is a JS object but take special care of JS 3489 // Check that the object is a JS object but take special care of JS
3384 // functions to make sure they have 'Function' as their class. 3490 // functions to make sure they have 'Function' as their class.
3385 // Assume that there are only two callable types, and one of them is at 3491 // Assume that there are only two callable types, and one of them is at
3386 // either end of the type range for JS object types. Saves extra comparisons. 3492 // either end of the type range for JS object types. Saves extra comparisons.
3387 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 3493 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3388 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE); 3494 __ CompareObjectType(r3, r3, r4, FIRST_SPEC_OBJECT_TYPE);
3389 // Map is now in r0. 3495 // Map is now in r3.
3390 __ b(lt, &null); 3496 __ blt(&null);
3391 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == 3497 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3392 FIRST_SPEC_OBJECT_TYPE + 1); 3498 FIRST_SPEC_OBJECT_TYPE + 1);
3393 __ b(eq, &function); 3499 __ beq(&function);
3394 3500
3395 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE)); 3501 __ cmpi(r4, Operand(LAST_SPEC_OBJECT_TYPE));
3396 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == 3502 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_SPEC_OBJECT_TYPE - 1);
3397 LAST_SPEC_OBJECT_TYPE - 1); 3503 __ beq(&function);
3398 __ b(eq, &function);
3399 // Assume that there is no larger type. 3504 // Assume that there is no larger type.
3400 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); 3505 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3401 3506
3402 // Check if the constructor in the map is a JS function. 3507 // Check if the constructor in the map is a JS function.
3403 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset)); 3508 __ LoadP(r3, FieldMemOperand(r3, Map::kConstructorOffset));
3404 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); 3509 __ CompareObjectType(r3, r4, r4, JS_FUNCTION_TYPE);
3405 __ b(ne, &non_function_constructor); 3510 __ bne(&non_function_constructor);
3406 3511
3407 // r0 now contains the constructor function. Grab the 3512 // r3 now contains the constructor function. Grab the
3408 // instance class name from there. 3513 // instance class name from there.
3409 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); 3514 __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
3410 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset)); 3515 __ LoadP(r3,
3516 FieldMemOperand(r3, SharedFunctionInfo::kInstanceClassNameOffset));
3411 __ b(&done); 3517 __ b(&done);
3412 3518
3413 // Functions have class 'Function'. 3519 // Functions have class 'Function'.
3414 __ bind(&function); 3520 __ bind(&function);
3415 __ LoadRoot(r0, Heap::kFunction_stringRootIndex); 3521 __ LoadRoot(r3, Heap::kFunction_stringRootIndex);
3416 __ jmp(&done); 3522 __ b(&done);
3417 3523
3418 // Objects with a non-function constructor have class 'Object'. 3524 // Objects with a non-function constructor have class 'Object'.
3419 __ bind(&non_function_constructor); 3525 __ bind(&non_function_constructor);
3420 __ LoadRoot(r0, Heap::kObject_stringRootIndex); 3526 __ LoadRoot(r3, Heap::kObject_stringRootIndex);
3421 __ jmp(&done); 3527 __ b(&done);
3422 3528
3423 // Non-JS objects have class null. 3529 // Non-JS objects have class null.
3424 __ bind(&null); 3530 __ bind(&null);
3425 __ LoadRoot(r0, Heap::kNullValueRootIndex); 3531 __ LoadRoot(r3, Heap::kNullValueRootIndex);
3426 3532
3427 // All done. 3533 // All done.
3428 __ bind(&done); 3534 __ bind(&done);
3429 3535
3430 context()->Plug(r0); 3536 context()->Plug(r3);
3431 } 3537 }
3432 3538
3433 3539
3434 void FullCodeGenerator::EmitSubString(CallRuntime* expr) { 3540 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3435 // Load the arguments on the stack and call the stub. 3541 // Load the arguments on the stack and call the stub.
3436 SubStringStub stub(isolate()); 3542 SubStringStub stub(isolate());
3437 ZoneList<Expression*>* args = expr->arguments(); 3543 ZoneList<Expression*>* args = expr->arguments();
3438 DCHECK(args->length() == 3); 3544 DCHECK(args->length() == 3);
3439 VisitForStackValue(args->at(0)); 3545 VisitForStackValue(args->at(0));
3440 VisitForStackValue(args->at(1)); 3546 VisitForStackValue(args->at(1));
3441 VisitForStackValue(args->at(2)); 3547 VisitForStackValue(args->at(2));
3442 __ CallStub(&stub); 3548 __ CallStub(&stub);
3443 context()->Plug(r0); 3549 context()->Plug(r3);
3444 } 3550 }
3445 3551
3446 3552
3447 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { 3553 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3448 // Load the arguments on the stack and call the stub. 3554 // Load the arguments on the stack and call the stub.
3449 RegExpExecStub stub(isolate()); 3555 RegExpExecStub stub(isolate());
3450 ZoneList<Expression*>* args = expr->arguments(); 3556 ZoneList<Expression*>* args = expr->arguments();
3451 DCHECK(args->length() == 4); 3557 DCHECK(args->length() == 4);
3452 VisitForStackValue(args->at(0)); 3558 VisitForStackValue(args->at(0));
3453 VisitForStackValue(args->at(1)); 3559 VisitForStackValue(args->at(1));
3454 VisitForStackValue(args->at(2)); 3560 VisitForStackValue(args->at(2));
3455 VisitForStackValue(args->at(3)); 3561 VisitForStackValue(args->at(3));
3456 __ CallStub(&stub); 3562 __ CallStub(&stub);
3457 context()->Plug(r0); 3563 context()->Plug(r3);
3458 } 3564 }
3459 3565
3460 3566
3461 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { 3567 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3462 ZoneList<Expression*>* args = expr->arguments(); 3568 ZoneList<Expression*>* args = expr->arguments();
3463 DCHECK(args->length() == 1); 3569 DCHECK(args->length() == 1);
3464 VisitForAccumulatorValue(args->at(0)); // Load the object. 3570 VisitForAccumulatorValue(args->at(0)); // Load the object.
3465 3571
3466 Label done; 3572 Label done;
3467 // If the object is a smi return the object. 3573 // If the object is a smi return the object.
3468 __ JumpIfSmi(r0, &done); 3574 __ JumpIfSmi(r3, &done);
3469 // If the object is not a value type, return the object. 3575 // If the object is not a value type, return the object.
3470 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE); 3576 __ CompareObjectType(r3, r4, r4, JS_VALUE_TYPE);
3471 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq); 3577 __ bne(&done);
3578 __ LoadP(r3, FieldMemOperand(r3, JSValue::kValueOffset));
3472 3579
3473 __ bind(&done); 3580 __ bind(&done);
3474 context()->Plug(r0); 3581 context()->Plug(r3);
3475 } 3582 }
3476 3583
3477 3584
3478 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { 3585 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3479 ZoneList<Expression*>* args = expr->arguments(); 3586 ZoneList<Expression*>* args = expr->arguments();
3480 DCHECK(args->length() == 2); 3587 DCHECK(args->length() == 2);
3481 DCHECK_NE(NULL, args->at(1)->AsLiteral()); 3588 DCHECK_NE(NULL, args->at(1)->AsLiteral());
3482 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value())); 3589 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3483 3590
3484 VisitForAccumulatorValue(args->at(0)); // Load the object. 3591 VisitForAccumulatorValue(args->at(0)); // Load the object.
3485 3592
3486 Label runtime, done, not_date_object; 3593 Label runtime, done, not_date_object;
3487 Register object = r0; 3594 Register object = r3;
3488 Register result = r0; 3595 Register result = r3;
3489 Register scratch0 = r9; 3596 Register scratch0 = r11;
3490 Register scratch1 = r1; 3597 Register scratch1 = r4;
3491 3598
3492 __ JumpIfSmi(object, &not_date_object); 3599 __ JumpIfSmi(object, &not_date_object);
3493 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE); 3600 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
3494 __ b(ne, &not_date_object); 3601 __ bne(&not_date_object);
3495 3602
3496 if (index->value() == 0) { 3603 if (index->value() == 0) {
3497 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset)); 3604 __ LoadP(result, FieldMemOperand(object, JSDate::kValueOffset));
3498 __ jmp(&done); 3605 __ b(&done);
3499 } else { 3606 } else {
3500 if (index->value() < JSDate::kFirstUncachedField) { 3607 if (index->value() < JSDate::kFirstUncachedField) {
3501 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); 3608 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3502 __ mov(scratch1, Operand(stamp)); 3609 __ mov(scratch1, Operand(stamp));
3503 __ ldr(scratch1, MemOperand(scratch1)); 3610 __ LoadP(scratch1, MemOperand(scratch1));
3504 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset)); 3611 __ LoadP(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3505 __ cmp(scratch1, scratch0); 3612 __ cmp(scratch1, scratch0);
3506 __ b(ne, &runtime); 3613 __ bne(&runtime);
3507 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset + 3614 __ LoadP(result,
3508 kPointerSize * index->value())); 3615 FieldMemOperand(object, JSDate::kValueOffset +
3509 __ jmp(&done); 3616 kPointerSize * index->value()),
3617 scratch0);
3618 __ b(&done);
3510 } 3619 }
3511 __ bind(&runtime); 3620 __ bind(&runtime);
3512 __ PrepareCallCFunction(2, scratch1); 3621 __ PrepareCallCFunction(2, scratch1);
3513 __ mov(r1, Operand(index)); 3622 __ LoadSmiLiteral(r4, index);
3514 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); 3623 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3515 __ jmp(&done); 3624 __ b(&done);
3516 } 3625 }
3517 3626
3518 __ bind(&not_date_object); 3627 __ bind(&not_date_object);
3519 __ CallRuntime(Runtime::kThrowNotDateError, 0); 3628 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3520 __ bind(&done); 3629 __ bind(&done);
3521 context()->Plug(r0); 3630 context()->Plug(r3);
3522 } 3631 }
3523 3632
3524 3633
3525 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { 3634 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3526 ZoneList<Expression*>* args = expr->arguments(); 3635 ZoneList<Expression*>* args = expr->arguments();
3527 DCHECK_EQ(3, args->length()); 3636 DCHECK_EQ(3, args->length());
3528 3637
3529 Register string = r0; 3638 Register string = r3;
3530 Register index = r1; 3639 Register index = r4;
3531 Register value = r2; 3640 Register value = r5;
3532 3641
3533 VisitForStackValue(args->at(0)); // index 3642 VisitForStackValue(args->at(0)); // index
3534 VisitForStackValue(args->at(1)); // value 3643 VisitForStackValue(args->at(1)); // value
3535 VisitForAccumulatorValue(args->at(2)); // string 3644 VisitForAccumulatorValue(args->at(2)); // string
3536 __ Pop(index, value); 3645 __ Pop(index, value);
3537 3646
3538 if (FLAG_debug_code) { 3647 if (FLAG_debug_code) {
3539 __ SmiTst(value); 3648 __ TestIfSmi(value, r0);
3540 __ Check(eq, kNonSmiValue); 3649 __ Check(eq, kNonSmiValue, cr0);
3541 __ SmiTst(index); 3650 __ TestIfSmi(index, r0);
3542 __ Check(eq, kNonSmiIndex); 3651 __ Check(eq, kNonSmiIndex, cr0);
3543 __ SmiUntag(index, index); 3652 __ SmiUntag(index, index);
3544 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; 3653 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3545 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type); 3654 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3546 __ SmiTag(index, index); 3655 __ SmiTag(index, index);
3547 } 3656 }
3548 3657
3549 __ SmiUntag(value, value); 3658 __ SmiUntag(value);
3550 __ add(ip, 3659 __ addi(ip, string, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3551 string, 3660 __ SmiToByteArrayOffset(r0, index);
3552 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3661 __ stbx(value, MemOperand(ip, r0));
3553 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3554 context()->Plug(string); 3662 context()->Plug(string);
3555 } 3663 }
3556 3664
3557 3665
3558 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { 3666 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3559 ZoneList<Expression*>* args = expr->arguments(); 3667 ZoneList<Expression*>* args = expr->arguments();
3560 DCHECK_EQ(3, args->length()); 3668 DCHECK_EQ(3, args->length());
3561 3669
3562 Register string = r0; 3670 Register string = r3;
3563 Register index = r1; 3671 Register index = r4;
3564 Register value = r2; 3672 Register value = r5;
3565 3673
3566 VisitForStackValue(args->at(0)); // index 3674 VisitForStackValue(args->at(0)); // index
3567 VisitForStackValue(args->at(1)); // value 3675 VisitForStackValue(args->at(1)); // value
3568 VisitForAccumulatorValue(args->at(2)); // string 3676 VisitForAccumulatorValue(args->at(2)); // string
3569 __ Pop(index, value); 3677 __ Pop(index, value);
3570 3678
3571 if (FLAG_debug_code) { 3679 if (FLAG_debug_code) {
3572 __ SmiTst(value); 3680 __ TestIfSmi(value, r0);
3573 __ Check(eq, kNonSmiValue); 3681 __ Check(eq, kNonSmiValue, cr0);
3574 __ SmiTst(index); 3682 __ TestIfSmi(index, r0);
3575 __ Check(eq, kNonSmiIndex); 3683 __ Check(eq, kNonSmiIndex, cr0);
3576 __ SmiUntag(index, index); 3684 __ SmiUntag(index, index);
3577 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; 3685 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3578 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type); 3686 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3579 __ SmiTag(index, index); 3687 __ SmiTag(index, index);
3580 } 3688 }
3581 3689
3582 __ SmiUntag(value, value); 3690 __ SmiUntag(value);
3583 __ add(ip, 3691 __ addi(ip, string, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3584 string, 3692 __ SmiToShortArrayOffset(r0, index);
3585 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 3693 __ sthx(value, MemOperand(ip, r0));
3586 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3587 __ strh(value, MemOperand(ip, index));
3588 context()->Plug(string); 3694 context()->Plug(string);
3589 } 3695 }
3590 3696
3591 3697
3592
3593 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { 3698 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3594 // Load the arguments on the stack and call the runtime function. 3699 // Load the arguments on the stack and call the runtime function.
3595 ZoneList<Expression*>* args = expr->arguments(); 3700 ZoneList<Expression*>* args = expr->arguments();
3596 DCHECK(args->length() == 2); 3701 DCHECK(args->length() == 2);
3597 VisitForStackValue(args->at(0)); 3702 VisitForStackValue(args->at(0));
3598 VisitForStackValue(args->at(1)); 3703 VisitForStackValue(args->at(1));
3599 MathPowStub stub(isolate(), MathPowStub::ON_STACK); 3704 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3600 __ CallStub(&stub); 3705 __ CallStub(&stub);
3601 context()->Plug(r0); 3706 context()->Plug(r3);
3602 } 3707 }
3603 3708
3604 3709
3605 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { 3710 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3606 ZoneList<Expression*>* args = expr->arguments(); 3711 ZoneList<Expression*>* args = expr->arguments();
3607 DCHECK(args->length() == 2); 3712 DCHECK(args->length() == 2);
3608 VisitForStackValue(args->at(0)); // Load the object. 3713 VisitForStackValue(args->at(0)); // Load the object.
3609 VisitForAccumulatorValue(args->at(1)); // Load the value. 3714 VisitForAccumulatorValue(args->at(1)); // Load the value.
3610 __ pop(r1); // r0 = value. r1 = object. 3715 __ pop(r4); // r3 = value. r4 = object.
3611 3716
3612 Label done; 3717 Label done;
3613 // If the object is a smi, return the value. 3718 // If the object is a smi, return the value.
3614 __ JumpIfSmi(r1, &done); 3719 __ JumpIfSmi(r4, &done);
3615 3720
3616 // If the object is not a value type, return the value. 3721 // If the object is not a value type, return the value.
3617 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE); 3722 __ CompareObjectType(r4, r5, r5, JS_VALUE_TYPE);
3618 __ b(ne, &done); 3723 __ bne(&done);
3619 3724
3620 // Store the value. 3725 // Store the value.
3621 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset)); 3726 __ StoreP(r3, FieldMemOperand(r4, JSValue::kValueOffset), r0);
3622 // Update the write barrier. Save the value as it will be 3727 // Update the write barrier. Save the value as it will be
3623 // overwritten by the write barrier code and is needed afterward. 3728 // overwritten by the write barrier code and is needed afterward.
3624 __ mov(r2, r0); 3729 __ mr(r5, r3);
3625 __ RecordWriteField( 3730 __ RecordWriteField(r4, JSValue::kValueOffset, r5, r6, kLRHasBeenSaved,
3626 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs); 3731 kDontSaveFPRegs);
3627 3732
3628 __ bind(&done); 3733 __ bind(&done);
3629 context()->Plug(r0); 3734 context()->Plug(r3);
3630 } 3735 }
3631 3736
3632 3737
3633 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { 3738 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3634 ZoneList<Expression*>* args = expr->arguments(); 3739 ZoneList<Expression*>* args = expr->arguments();
3635 DCHECK_EQ(args->length(), 1); 3740 DCHECK_EQ(args->length(), 1);
3636 // Load the argument into r0 and call the stub. 3741 // Load the argument into r3 and call the stub.
3637 VisitForAccumulatorValue(args->at(0)); 3742 VisitForAccumulatorValue(args->at(0));
3638 3743
3639 NumberToStringStub stub(isolate()); 3744 NumberToStringStub stub(isolate());
3640 __ CallStub(&stub); 3745 __ CallStub(&stub);
3641 context()->Plug(r0); 3746 context()->Plug(r3);
3642 } 3747 }
3643 3748
3644 3749
3645 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { 3750 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3646 ZoneList<Expression*>* args = expr->arguments(); 3751 ZoneList<Expression*>* args = expr->arguments();
3647 DCHECK(args->length() == 1); 3752 DCHECK(args->length() == 1);
3648 VisitForAccumulatorValue(args->at(0)); 3753 VisitForAccumulatorValue(args->at(0));
3649 3754
3650 Label done; 3755 Label done;
3651 StringCharFromCodeGenerator generator(r0, r1); 3756 StringCharFromCodeGenerator generator(r3, r4);
3652 generator.GenerateFast(masm_); 3757 generator.GenerateFast(masm_);
3653 __ jmp(&done); 3758 __ b(&done);
3654 3759
3655 NopRuntimeCallHelper call_helper; 3760 NopRuntimeCallHelper call_helper;
3656 generator.GenerateSlow(masm_, call_helper); 3761 generator.GenerateSlow(masm_, call_helper);
3657 3762
3658 __ bind(&done); 3763 __ bind(&done);
3659 context()->Plug(r1); 3764 context()->Plug(r4);
3660 } 3765 }
3661 3766
3662 3767
3663 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { 3768 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3664 ZoneList<Expression*>* args = expr->arguments(); 3769 ZoneList<Expression*>* args = expr->arguments();
3665 DCHECK(args->length() == 2); 3770 DCHECK(args->length() == 2);
3666 VisitForStackValue(args->at(0)); 3771 VisitForStackValue(args->at(0));
3667 VisitForAccumulatorValue(args->at(1)); 3772 VisitForAccumulatorValue(args->at(1));
3668 3773
3669 Register object = r1; 3774 Register object = r4;
3670 Register index = r0; 3775 Register index = r3;
3671 Register result = r3; 3776 Register result = r6;
3672 3777
3673 __ pop(object); 3778 __ pop(object);
3674 3779
3675 Label need_conversion; 3780 Label need_conversion;
3676 Label index_out_of_range; 3781 Label index_out_of_range;
3677 Label done; 3782 Label done;
3678 StringCharCodeAtGenerator generator(object, 3783 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
3679 index, 3784 &need_conversion, &index_out_of_range,
3680 result,
3681 &need_conversion,
3682 &need_conversion,
3683 &index_out_of_range,
3684 STRING_INDEX_IS_NUMBER); 3785 STRING_INDEX_IS_NUMBER);
3685 generator.GenerateFast(masm_); 3786 generator.GenerateFast(masm_);
3686 __ jmp(&done); 3787 __ b(&done);
3687 3788
3688 __ bind(&index_out_of_range); 3789 __ bind(&index_out_of_range);
3689 // When the index is out of range, the spec requires us to return 3790 // When the index is out of range, the spec requires us to return
3690 // NaN. 3791 // NaN.
3691 __ LoadRoot(result, Heap::kNanValueRootIndex); 3792 __ LoadRoot(result, Heap::kNanValueRootIndex);
3692 __ jmp(&done); 3793 __ b(&done);
3693 3794
3694 __ bind(&need_conversion); 3795 __ bind(&need_conversion);
3695 // Load the undefined value into the result register, which will 3796 // Load the undefined value into the result register, which will
3696 // trigger conversion. 3797 // trigger conversion.
3697 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); 3798 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3698 __ jmp(&done); 3799 __ b(&done);
3699 3800
3700 NopRuntimeCallHelper call_helper; 3801 NopRuntimeCallHelper call_helper;
3701 generator.GenerateSlow(masm_, call_helper); 3802 generator.GenerateSlow(masm_, call_helper);
3702 3803
3703 __ bind(&done); 3804 __ bind(&done);
3704 context()->Plug(result); 3805 context()->Plug(result);
3705 } 3806 }
3706 3807
3707 3808
3708 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { 3809 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3709 ZoneList<Expression*>* args = expr->arguments(); 3810 ZoneList<Expression*>* args = expr->arguments();
3710 DCHECK(args->length() == 2); 3811 DCHECK(args->length() == 2);
3711 VisitForStackValue(args->at(0)); 3812 VisitForStackValue(args->at(0));
3712 VisitForAccumulatorValue(args->at(1)); 3813 VisitForAccumulatorValue(args->at(1));
3713 3814
3714 Register object = r1; 3815 Register object = r4;
3715 Register index = r0; 3816 Register index = r3;
3716 Register scratch = r3; 3817 Register scratch = r6;
3717 Register result = r0; 3818 Register result = r3;
3718 3819
3719 __ pop(object); 3820 __ pop(object);
3720 3821
3721 Label need_conversion; 3822 Label need_conversion;
3722 Label index_out_of_range; 3823 Label index_out_of_range;
3723 Label done; 3824 Label done;
3724 StringCharAtGenerator generator(object, 3825 StringCharAtGenerator generator(object, index, scratch, result,
3725 index, 3826 &need_conversion, &need_conversion,
3726 scratch, 3827 &index_out_of_range, STRING_INDEX_IS_NUMBER);
3727 result,
3728 &need_conversion,
3729 &need_conversion,
3730 &index_out_of_range,
3731 STRING_INDEX_IS_NUMBER);
3732 generator.GenerateFast(masm_); 3828 generator.GenerateFast(masm_);
3733 __ jmp(&done); 3829 __ b(&done);
3734 3830
3735 __ bind(&index_out_of_range); 3831 __ bind(&index_out_of_range);
3736 // When the index is out of range, the spec requires us to return 3832 // When the index is out of range, the spec requires us to return
3737 // the empty string. 3833 // the empty string.
3738 __ LoadRoot(result, Heap::kempty_stringRootIndex); 3834 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3739 __ jmp(&done); 3835 __ b(&done);
3740 3836
3741 __ bind(&need_conversion); 3837 __ bind(&need_conversion);
3742 // Move smi zero into the result register, which will trigger 3838 // Move smi zero into the result register, which will trigger
3743 // conversion. 3839 // conversion.
3744 __ mov(result, Operand(Smi::FromInt(0))); 3840 __ LoadSmiLiteral(result, Smi::FromInt(0));
3745 __ jmp(&done); 3841 __ b(&done);
3746 3842
3747 NopRuntimeCallHelper call_helper; 3843 NopRuntimeCallHelper call_helper;
3748 generator.GenerateSlow(masm_, call_helper); 3844 generator.GenerateSlow(masm_, call_helper);
3749 3845
3750 __ bind(&done); 3846 __ bind(&done);
3751 context()->Plug(result); 3847 context()->Plug(result);
3752 } 3848 }
3753 3849
3754 3850
3755 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { 3851 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3756 ZoneList<Expression*>* args = expr->arguments(); 3852 ZoneList<Expression*>* args = expr->arguments();
3757 DCHECK_EQ(2, args->length()); 3853 DCHECK_EQ(2, args->length());
3758 VisitForStackValue(args->at(0)); 3854 VisitForStackValue(args->at(0));
3759 VisitForAccumulatorValue(args->at(1)); 3855 VisitForAccumulatorValue(args->at(1));
3760 3856
3761 __ pop(r1); 3857 __ pop(r4);
3762 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED); 3858 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3763 __ CallStub(&stub); 3859 __ CallStub(&stub);
3764 context()->Plug(r0); 3860 context()->Plug(r3);
3765 } 3861 }
3766 3862
3767 3863
3768 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { 3864 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3769 ZoneList<Expression*>* args = expr->arguments(); 3865 ZoneList<Expression*>* args = expr->arguments();
3770 DCHECK_EQ(2, args->length()); 3866 DCHECK_EQ(2, args->length());
3771 VisitForStackValue(args->at(0)); 3867 VisitForStackValue(args->at(0));
3772 VisitForStackValue(args->at(1)); 3868 VisitForStackValue(args->at(1));
3773 3869
3774 StringCompareStub stub(isolate()); 3870 StringCompareStub stub(isolate());
3775 __ CallStub(&stub); 3871 __ CallStub(&stub);
3776 context()->Plug(r0); 3872 context()->Plug(r3);
3777 } 3873 }
3778 3874
3779 3875
3780 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { 3876 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3781 ZoneList<Expression*>* args = expr->arguments(); 3877 ZoneList<Expression*>* args = expr->arguments();
3782 DCHECK(args->length() >= 2); 3878 DCHECK(args->length() >= 2);
3783 3879
3784 int arg_count = args->length() - 2; // 2 ~ receiver and function. 3880 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3785 for (int i = 0; i < arg_count + 1; i++) { 3881 for (int i = 0; i < arg_count + 1; i++) {
3786 VisitForStackValue(args->at(i)); 3882 VisitForStackValue(args->at(i));
3787 } 3883 }
3788 VisitForAccumulatorValue(args->last()); // Function. 3884 VisitForAccumulatorValue(args->last()); // Function.
3789 3885
3790 Label runtime, done; 3886 Label runtime, done;
3791 // Check for non-function argument (including proxy). 3887 // Check for non-function argument (including proxy).
3792 __ JumpIfSmi(r0, &runtime); 3888 __ JumpIfSmi(r3, &runtime);
3793 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); 3889 __ CompareObjectType(r3, r4, r4, JS_FUNCTION_TYPE);
3794 __ b(ne, &runtime); 3890 __ bne(&runtime);
3795 3891
3796 // InvokeFunction requires the function in r1. Move it in there. 3892 // InvokeFunction requires the function in r4. Move it in there.
3797 __ mov(r1, result_register()); 3893 __ mr(r4, result_register());
3798 ParameterCount count(arg_count); 3894 ParameterCount count(arg_count);
3799 __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper()); 3895 __ InvokeFunction(r4, count, CALL_FUNCTION, NullCallWrapper());
3800 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3896 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3801 __ jmp(&done); 3897 __ b(&done);
3802 3898
3803 __ bind(&runtime); 3899 __ bind(&runtime);
3804 __ push(r0); 3900 __ push(r3);
3805 __ CallRuntime(Runtime::kCall, args->length()); 3901 __ CallRuntime(Runtime::kCall, args->length());
3806 __ bind(&done); 3902 __ bind(&done);
3807 3903
3808 context()->Plug(r0); 3904 context()->Plug(r3);
3809 } 3905 }
3810 3906
3811 3907
3812 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { 3908 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3813 RegExpConstructResultStub stub(isolate()); 3909 RegExpConstructResultStub stub(isolate());
3814 ZoneList<Expression*>* args = expr->arguments(); 3910 ZoneList<Expression*>* args = expr->arguments();
3815 DCHECK(args->length() == 3); 3911 DCHECK(args->length() == 3);
3816 VisitForStackValue(args->at(0)); 3912 VisitForStackValue(args->at(0));
3817 VisitForStackValue(args->at(1)); 3913 VisitForStackValue(args->at(1));
3818 VisitForAccumulatorValue(args->at(2)); 3914 VisitForAccumulatorValue(args->at(2));
3819 __ pop(r1); 3915 __ Pop(r5, r4);
3820 __ pop(r2);
3821 __ CallStub(&stub); 3916 __ CallStub(&stub);
3822 context()->Plug(r0); 3917 context()->Plug(r3);
3823 } 3918 }
3824 3919
3825 3920
3826 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { 3921 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3827 ZoneList<Expression*>* args = expr->arguments(); 3922 ZoneList<Expression*>* args = expr->arguments();
3828 DCHECK_EQ(2, args->length()); 3923 DCHECK_EQ(2, args->length());
3829 DCHECK_NE(NULL, args->at(0)->AsLiteral()); 3924 DCHECK_NE(NULL, args->at(0)->AsLiteral());
3830 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value(); 3925 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3831 3926
3832 Handle<FixedArray> jsfunction_result_caches( 3927 Handle<FixedArray> jsfunction_result_caches(
3833 isolate()->native_context()->jsfunction_result_caches()); 3928 isolate()->native_context()->jsfunction_result_caches());
3834 if (jsfunction_result_caches->length() <= cache_id) { 3929 if (jsfunction_result_caches->length() <= cache_id) {
3835 __ Abort(kAttemptToUseUndefinedCache); 3930 __ Abort(kAttemptToUseUndefinedCache);
3836 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 3931 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
3837 context()->Plug(r0); 3932 context()->Plug(r3);
3838 return; 3933 return;
3839 } 3934 }
3840 3935
3841 VisitForAccumulatorValue(args->at(1)); 3936 VisitForAccumulatorValue(args->at(1));
3842 3937
3843 Register key = r0; 3938 Register key = r3;
3844 Register cache = r1; 3939 Register cache = r4;
3845 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 3940 __ LoadP(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3846 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset)); 3941 __ LoadP(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3847 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); 3942 __ LoadP(cache,
3848 __ ldr(cache, 3943 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3849 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); 3944 __ LoadP(cache,
3850 3945 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)), r0);
3851 3946
3852 Label done, not_found; 3947 Label done, not_found;
3853 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset)); 3948 __ LoadP(r5, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3854 // r2 now holds finger offset as a smi. 3949 // r5 now holds finger offset as a smi.
3855 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 3950 __ addi(r6, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3856 // r3 now points to the start of fixed array elements. 3951 // r6 now points to the start of fixed array elements.
3857 __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex)); 3952 __ SmiToPtrArrayOffset(r5, r5);
3858 // Note side effect of PreIndex: r3 now points to the key of the pair. 3953 __ LoadPUX(r5, MemOperand(r6, r5));
3859 __ cmp(key, r2); 3954 // r6 now points to the key of the pair.
3860 __ b(ne, &not_found); 3955 __ cmp(key, r5);
3956 __ bne(&not_found);
3861 3957
3862 __ ldr(r0, MemOperand(r3, kPointerSize)); 3958 __ LoadP(r3, MemOperand(r6, kPointerSize));
3863 __ b(&done); 3959 __ b(&done);
3864 3960
3865 __ bind(&not_found); 3961 __ bind(&not_found);
3866 // Call runtime to perform the lookup. 3962 // Call runtime to perform the lookup.
3867 __ Push(cache, key); 3963 __ Push(cache, key);
3868 __ CallRuntime(Runtime::kGetFromCache, 2); 3964 __ CallRuntime(Runtime::kGetFromCache, 2);
3869 3965
3870 __ bind(&done); 3966 __ bind(&done);
3871 context()->Plug(r0); 3967 context()->Plug(r3);
3872 } 3968 }
3873 3969
3874 3970
3875 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { 3971 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3876 ZoneList<Expression*>* args = expr->arguments(); 3972 ZoneList<Expression*>* args = expr->arguments();
3877 VisitForAccumulatorValue(args->at(0)); 3973 VisitForAccumulatorValue(args->at(0));
3878 3974
3879 Label materialize_true, materialize_false; 3975 Label materialize_true, materialize_false;
3880 Label* if_true = NULL; 3976 Label* if_true = NULL;
3881 Label* if_false = NULL; 3977 Label* if_false = NULL;
3882 Label* fall_through = NULL; 3978 Label* fall_through = NULL;
3883 context()->PrepareTest(&materialize_true, &materialize_false, 3979 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3884 &if_true, &if_false, &fall_through); 3980 &if_false, &fall_through);
3885 3981
3886 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset)); 3982 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
3887 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask)); 3983 // PPC - assume ip is free
3984 __ mov(ip, Operand(String::kContainsCachedArrayIndexMask));
3985 __ and_(r0, r3, ip);
3986 __ cmpi(r0, Operand::Zero());
3888 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3987 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3889 Split(eq, if_true, if_false, fall_through); 3988 Split(eq, if_true, if_false, fall_through);
3890 3989
3891 context()->Plug(if_true, if_false); 3990 context()->Plug(if_true, if_false);
3892 } 3991 }
3893 3992
3894 3993
3895 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { 3994 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3896 ZoneList<Expression*>* args = expr->arguments(); 3995 ZoneList<Expression*>* args = expr->arguments();
3897 DCHECK(args->length() == 1); 3996 DCHECK(args->length() == 1);
3898 VisitForAccumulatorValue(args->at(0)); 3997 VisitForAccumulatorValue(args->at(0));
3899 3998
3900 __ AssertString(r0); 3999 __ AssertString(r3);
3901 4000
3902 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset)); 4001 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
3903 __ IndexFromHash(r0, r0); 4002 __ IndexFromHash(r3, r3);
3904 4003
3905 context()->Plug(r0); 4004 context()->Plug(r3);
3906 } 4005 }
3907 4006
3908 4007
3909 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) { 4008 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3910 Label bailout, done, one_char_separator, long_separator, non_trivial_array, 4009 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
3911 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop, 4010 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
3912 one_char_separator_loop_entry, long_separator_loop; 4011 one_char_separator_loop_entry, long_separator_loop;
3913 ZoneList<Expression*>* args = expr->arguments(); 4012 ZoneList<Expression*>* args = expr->arguments();
3914 DCHECK(args->length() == 2); 4013 DCHECK(args->length() == 2);
3915 VisitForStackValue(args->at(1)); 4014 VisitForStackValue(args->at(1));
3916 VisitForAccumulatorValue(args->at(0)); 4015 VisitForAccumulatorValue(args->at(0));
3917 4016
3918 // All aliases of the same register have disjoint lifetimes. 4017 // All aliases of the same register have disjoint lifetimes.
3919 Register array = r0; 4018 Register array = r3;
3920 Register elements = no_reg; // Will be r0. 4019 Register elements = no_reg; // Will be r3.
3921 Register result = no_reg; // Will be r0. 4020 Register result = no_reg; // Will be r3.
3922 Register separator = r1; 4021 Register separator = r4;
3923 Register array_length = r2; 4022 Register array_length = r5;
3924 Register result_pos = no_reg; // Will be r2 4023 Register result_pos = no_reg; // Will be r5
3925 Register string_length = r3; 4024 Register string_length = r6;
3926 Register string = r4; 4025 Register string = r7;
3927 Register element = r5; 4026 Register element = r8;
3928 Register elements_end = r6; 4027 Register elements_end = r9;
3929 Register scratch = r9; 4028 Register scratch1 = r10;
4029 Register scratch2 = r11;
3930 4030
3931 // Separator operand is on the stack. 4031 // Separator operand is on the stack.
3932 __ pop(separator); 4032 __ pop(separator);
3933 4033
3934 // Check that the array is a JSArray. 4034 // Check that the array is a JSArray.
3935 __ JumpIfSmi(array, &bailout); 4035 __ JumpIfSmi(array, &bailout);
3936 __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE); 4036 __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
3937 __ b(ne, &bailout); 4037 __ bne(&bailout);
3938 4038
3939 // Check that the array has fast elements. 4039 // Check that the array has fast elements.
3940 __ CheckFastElements(scratch, array_length, &bailout); 4040 __ CheckFastElements(scratch1, scratch2, &bailout);
3941 4041
3942 // If the array has length zero, return the empty string. 4042 // If the array has length zero, return the empty string.
3943 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset)); 4043 __ LoadP(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3944 __ SmiUntag(array_length, SetCC); 4044 __ SmiUntag(array_length);
3945 __ b(ne, &non_trivial_array); 4045 __ cmpi(array_length, Operand::Zero());
3946 __ LoadRoot(r0, Heap::kempty_stringRootIndex); 4046 __ bne(&non_trivial_array);
4047 __ LoadRoot(r3, Heap::kempty_stringRootIndex);
3947 __ b(&done); 4048 __ b(&done);
3948 4049
3949 __ bind(&non_trivial_array); 4050 __ bind(&non_trivial_array);
3950 4051
3951 // Get the FixedArray containing array's elements. 4052 // Get the FixedArray containing array's elements.
3952 elements = array; 4053 elements = array;
3953 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset)); 4054 __ LoadP(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3954 array = no_reg; // End of array's live range. 4055 array = no_reg; // End of array's live range.
3955 4056
3956 // Check that all array elements are sequential one-byte strings, and 4057 // Check that all array elements are sequential one-byte strings, and
3957 // accumulate the sum of their lengths, as a smi-encoded value. 4058 // accumulate the sum of their lengths, as a smi-encoded value.
3958 __ mov(string_length, Operand::Zero()); 4059 __ li(string_length, Operand::Zero());
3959 __ add(element, 4060 __ addi(element, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3960 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 4061 __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
3961 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); 4062 __ add(elements_end, element, elements_end);
3962 // Loop condition: while (element < elements_end). 4063 // Loop condition: while (element < elements_end).
3963 // Live values in registers: 4064 // Live values in registers:
3964 // elements: Fixed array of strings. 4065 // elements: Fixed array of strings.
3965 // array_length: Length of the fixed array of strings (not smi) 4066 // array_length: Length of the fixed array of strings (not smi)
3966 // separator: Separator string 4067 // separator: Separator string
3967 // string_length: Accumulated sum of string lengths (smi). 4068 // string_length: Accumulated sum of string lengths (smi).
3968 // element: Current array element. 4069 // element: Current array element.
3969 // elements_end: Array end. 4070 // elements_end: Array end.
3970 if (generate_debug_code_) { 4071 if (generate_debug_code_) {
3971 __ cmp(array_length, Operand::Zero()); 4072 __ cmpi(array_length, Operand::Zero());
3972 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin); 4073 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
3973 } 4074 }
3974 __ bind(&loop); 4075 __ bind(&loop);
3975 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 4076 __ LoadP(string, MemOperand(element));
4077 __ addi(element, element, Operand(kPointerSize));
3976 __ JumpIfSmi(string, &bailout); 4078 __ JumpIfSmi(string, &bailout);
3977 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset)); 4079 __ LoadP(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3978 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); 4080 __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3979 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout); 4081 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3980 __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset)); 4082 __ LoadP(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
3981 __ add(string_length, string_length, Operand(scratch), SetCC); 4083
3982 __ b(vs, &bailout); 4084 __ AddAndCheckForOverflow(string_length, string_length, scratch1, scratch2,
4085 r0);
4086 __ BranchOnOverflow(&bailout);
4087
3983 __ cmp(element, elements_end); 4088 __ cmp(element, elements_end);
3984 __ b(lt, &loop); 4089 __ blt(&loop);
3985 4090
3986 // If array_length is 1, return elements[0], a string. 4091 // If array_length is 1, return elements[0], a string.
3987 __ cmp(array_length, Operand(1)); 4092 __ cmpi(array_length, Operand(1));
3988 __ b(ne, &not_size_one_array); 4093 __ bne(&not_size_one_array);
3989 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize)); 4094 __ LoadP(r3, FieldMemOperand(elements, FixedArray::kHeaderSize));
3990 __ b(&done); 4095 __ b(&done);
3991 4096
3992 __ bind(&not_size_one_array); 4097 __ bind(&not_size_one_array);
3993 4098
3994 // Live values in registers: 4099 // Live values in registers:
3995 // separator: Separator string 4100 // separator: Separator string
3996 // array_length: Length of the array. 4101 // array_length: Length of the array.
3997 // string_length: Sum of string lengths (smi). 4102 // string_length: Sum of string lengths (smi).
3998 // elements: FixedArray of strings. 4103 // elements: FixedArray of strings.
3999 4104
4000 // Check that the separator is a flat one-byte string. 4105 // Check that the separator is a flat one-byte string.
4001 __ JumpIfSmi(separator, &bailout); 4106 __ JumpIfSmi(separator, &bailout);
4002 __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset)); 4107 __ LoadP(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4003 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); 4108 __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4004 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout); 4109 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4005 4110
4006 // Add (separator length times array_length) - separator length to the 4111 // Add (separator length times array_length) - separator length to the
4007 // string_length to get the length of the result string. array_length is not 4112 // string_length to get the length of the result string.
4008 // smi but the other values are, so the result is a smi 4113 __ LoadP(scratch1,
4009 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); 4114 FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4010 __ sub(string_length, string_length, Operand(scratch)); 4115 __ sub(string_length, string_length, scratch1);
4011 __ smull(scratch, ip, array_length, scratch); 4116 #if V8_TARGET_ARCH_PPC64
4117 __ SmiUntag(scratch1, scratch1);
4118 __ Mul(scratch2, array_length, scratch1);
4012 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are 4119 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4013 // zero. 4120 // zero.
4014 __ cmp(ip, Operand::Zero()); 4121 __ ShiftRightImm(ip, scratch2, Operand(31), SetRC);
4015 __ b(ne, &bailout); 4122 __ bne(&bailout, cr0);
4016 __ tst(scratch, Operand(0x80000000)); 4123 __ SmiTag(scratch2, scratch2);
4017 __ b(ne, &bailout); 4124 #else
4018 __ add(string_length, string_length, Operand(scratch), SetCC); 4125 // array_length is not smi but the other values are, so the result is a smi
4019 __ b(vs, &bailout); 4126 __ mullw(scratch2, array_length, scratch1);
4127 __ mulhw(ip, array_length, scratch1);
4128 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4129 // zero.
4130 __ cmpi(ip, Operand::Zero());
4131 __ bne(&bailout);
4132 __ cmpwi(scratch2, Operand::Zero());
4133 __ blt(&bailout);
4134 #endif
4135
4136 __ AddAndCheckForOverflow(string_length, string_length, scratch2, scratch1,
4137 r0);
4138 __ BranchOnOverflow(&bailout);
4020 __ SmiUntag(string_length); 4139 __ SmiUntag(string_length);
4021 4140
4022 // Get first element in the array to free up the elements register to be used 4141 // Get first element in the array to free up the elements register to be used
4023 // for the result. 4142 // for the result.
4024 __ add(element, 4143 __ addi(element, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4025 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4026 result = elements; // End of live range for elements. 4144 result = elements; // End of live range for elements.
4027 elements = no_reg; 4145 elements = no_reg;
4028 // Live values in registers: 4146 // Live values in registers:
4029 // element: First array element 4147 // element: First array element
4030 // separator: Separator string 4148 // separator: Separator string
4031 // string_length: Length of result string (not smi) 4149 // string_length: Length of result string (not smi)
4032 // array_length: Length of the array. 4150 // array_length: Length of the array.
4033 __ AllocateOneByteString(result, string_length, scratch, 4151 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4034 string, // used as scratch 4152 elements_end, &bailout);
4035 elements_end, // used as scratch
4036 &bailout);
4037 // Prepare for looping. Set up elements_end to end of the array. Set 4153 // Prepare for looping. Set up elements_end to end of the array. Set
4038 // result_pos to the position of the result where to write the first 4154 // result_pos to the position of the result where to write the first
4039 // character. 4155 // character.
4040 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); 4156 __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
4157 __ add(elements_end, element, elements_end);
4041 result_pos = array_length; // End of live range for array_length. 4158 result_pos = array_length; // End of live range for array_length.
4042 array_length = no_reg; 4159 array_length = no_reg;
4043 __ add(result_pos, 4160 __ addi(result_pos, result,
4044 result, 4161 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4045 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4046 4162
4047 // Check the length of the separator. 4163 // Check the length of the separator.
4048 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); 4164 __ LoadP(scratch1,
4049 __ cmp(scratch, Operand(Smi::FromInt(1))); 4165 FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4050 __ b(eq, &one_char_separator); 4166 __ CmpSmiLiteral(scratch1, Smi::FromInt(1), r0);
4051 __ b(gt, &long_separator); 4167 __ beq(&one_char_separator);
4168 __ bgt(&long_separator);
4052 4169
4053 // Empty separator case 4170 // Empty separator case
4054 __ bind(&empty_separator_loop); 4171 __ bind(&empty_separator_loop);
4055 // Live values in registers: 4172 // Live values in registers:
4056 // result_pos: the position to which we are currently copying characters. 4173 // result_pos: the position to which we are currently copying characters.
4057 // element: Current array element. 4174 // element: Current array element.
4058 // elements_end: Array end. 4175 // elements_end: Array end.
4059 4176
4060 // Copy next array element to the result. 4177 // Copy next array element to the result.
4061 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 4178 __ LoadP(string, MemOperand(element));
4062 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 4179 __ addi(element, element, Operand(kPointerSize));
4180 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4063 __ SmiUntag(string_length); 4181 __ SmiUntag(string_length);
4064 __ add(string, 4182 __ addi(string, string,
4065 string, 4183 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4066 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4184 __ CopyBytes(string, result_pos, string_length, scratch1);
4067 __ CopyBytes(string, result_pos, string_length, scratch);
4068 __ cmp(element, elements_end); 4185 __ cmp(element, elements_end);
4069 __ b(lt, &empty_separator_loop); // End while (element < elements_end). 4186 __ blt(&empty_separator_loop); // End while (element < elements_end).
4070 DCHECK(result.is(r0)); 4187 DCHECK(result.is(r3));
4071 __ b(&done); 4188 __ b(&done);
4072 4189
4073 // One-character separator case 4190 // One-character separator case
4074 __ bind(&one_char_separator); 4191 __ bind(&one_char_separator);
4075 // Replace separator with its one-byte character value. 4192 // Replace separator with its one-byte character value.
4076 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize)); 4193 __ lbz(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4077 // Jump into the loop after the code that copies the separator, so the first 4194 // Jump into the loop after the code that copies the separator, so the first
4078 // element is not preceded by a separator 4195 // element is not preceded by a separator
4079 __ jmp(&one_char_separator_loop_entry); 4196 __ b(&one_char_separator_loop_entry);
4080 4197
4081 __ bind(&one_char_separator_loop); 4198 __ bind(&one_char_separator_loop);
4082 // Live values in registers: 4199 // Live values in registers:
4083 // result_pos: the position to which we are currently copying characters. 4200 // result_pos: the position to which we are currently copying characters.
4084 // element: Current array element. 4201 // element: Current array element.
4085 // elements_end: Array end. 4202 // elements_end: Array end.
4086 // separator: Single separator one-byte char (in lower byte). 4203 // separator: Single separator one-byte char (in lower byte).
4087 4204
4088 // Copy the separator character to the result. 4205 // Copy the separator character to the result.
4089 __ strb(separator, MemOperand(result_pos, 1, PostIndex)); 4206 __ stb(separator, MemOperand(result_pos));
4207 __ addi(result_pos, result_pos, Operand(1));
4090 4208
4091 // Copy next array element to the result. 4209 // Copy next array element to the result.
4092 __ bind(&one_char_separator_loop_entry); 4210 __ bind(&one_char_separator_loop_entry);
4093 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 4211 __ LoadP(string, MemOperand(element));
4094 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 4212 __ addi(element, element, Operand(kPointerSize));
4213 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4095 __ SmiUntag(string_length); 4214 __ SmiUntag(string_length);
4096 __ add(string, 4215 __ addi(string, string,
4097 string, 4216 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4098 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4217 __ CopyBytes(string, result_pos, string_length, scratch1);
4099 __ CopyBytes(string, result_pos, string_length, scratch); 4218 __ cmpl(element, elements_end);
4100 __ cmp(element, elements_end); 4219 __ blt(&one_char_separator_loop); // End while (element < elements_end).
4101 __ b(lt, &one_char_separator_loop); // End while (element < elements_end). 4220 DCHECK(result.is(r3));
4102 DCHECK(result.is(r0));
4103 __ b(&done); 4221 __ b(&done);
4104 4222
4105 // Long separator case (separator is more than one character). Entry is at the 4223 // Long separator case (separator is more than one character). Entry is at the
4106 // label long_separator below. 4224 // label long_separator below.
4107 __ bind(&long_separator_loop); 4225 __ bind(&long_separator_loop);
4108 // Live values in registers: 4226 // Live values in registers:
4109 // result_pos: the position to which we are currently copying characters. 4227 // result_pos: the position to which we are currently copying characters.
4110 // element: Current array element. 4228 // element: Current array element.
4111 // elements_end: Array end. 4229 // elements_end: Array end.
4112 // separator: Separator string. 4230 // separator: Separator string.
4113 4231
4114 // Copy the separator to the result. 4232 // Copy the separator to the result.
4115 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset)); 4233 __ LoadP(string_length, FieldMemOperand(separator, String::kLengthOffset));
4116 __ SmiUntag(string_length); 4234 __ SmiUntag(string_length);
4117 __ add(string, 4235 __ addi(string, separator,
4118 separator, 4236 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4119 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4237 __ CopyBytes(string, result_pos, string_length, scratch1);
4120 __ CopyBytes(string, result_pos, string_length, scratch);
4121 4238
4122 __ bind(&long_separator); 4239 __ bind(&long_separator);
4123 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 4240 __ LoadP(string, MemOperand(element));
4124 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 4241 __ addi(element, element, Operand(kPointerSize));
4242 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4125 __ SmiUntag(string_length); 4243 __ SmiUntag(string_length);
4126 __ add(string, 4244 __ addi(string, string,
4127 string, 4245 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4128 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4246 __ CopyBytes(string, result_pos, string_length, scratch1);
4129 __ CopyBytes(string, result_pos, string_length, scratch); 4247 __ cmpl(element, elements_end);
4130 __ cmp(element, elements_end); 4248 __ blt(&long_separator_loop); // End while (element < elements_end).
4131 __ b(lt, &long_separator_loop); // End while (element < elements_end). 4249 DCHECK(result.is(r3));
4132 DCHECK(result.is(r0));
4133 __ b(&done); 4250 __ b(&done);
4134 4251
4135 __ bind(&bailout); 4252 __ bind(&bailout);
4136 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 4253 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
4137 __ bind(&done); 4254 __ bind(&done);
4138 context()->Plug(r0); 4255 context()->Plug(r3);
4139 } 4256 }
4140 4257
4141 4258
4142 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { 4259 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4143 DCHECK(expr->arguments()->length() == 0); 4260 DCHECK(expr->arguments()->length() == 0);
4144 ExternalReference debug_is_active = 4261 ExternalReference debug_is_active =
4145 ExternalReference::debug_is_active_address(isolate()); 4262 ExternalReference::debug_is_active_address(isolate());
4146 __ mov(ip, Operand(debug_is_active)); 4263 __ mov(ip, Operand(debug_is_active));
4147 __ ldrb(r0, MemOperand(ip)); 4264 __ lbz(r3, MemOperand(ip));
4148 __ SmiTag(r0); 4265 __ SmiTag(r3);
4149 context()->Plug(r0); 4266 context()->Plug(r3);
4150 } 4267 }
4151 4268
4152 4269
4153 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 4270 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4154 if (expr->function() != NULL && 4271 if (expr->function() != NULL &&
4155 expr->function()->intrinsic_type == Runtime::INLINE) { 4272 expr->function()->intrinsic_type == Runtime::INLINE) {
4156 Comment cmnt(masm_, "[ InlineRuntimeCall"); 4273 Comment cmnt(masm_, "[ InlineRuntimeCall");
4157 EmitInlineRuntimeCall(expr); 4274 EmitInlineRuntimeCall(expr);
4158 return; 4275 return;
4159 } 4276 }
4160 4277
4161 Comment cmnt(masm_, "[ CallRuntime"); 4278 Comment cmnt(masm_, "[ CallRuntime");
4162 ZoneList<Expression*>* args = expr->arguments(); 4279 ZoneList<Expression*>* args = expr->arguments();
4163 int arg_count = args->length(); 4280 int arg_count = args->length();
4164 4281
4165 if (expr->is_jsruntime()) { 4282 if (expr->is_jsruntime()) {
4166 // Push the builtins object as the receiver. 4283 // Push the builtins object as the receiver.
4167 Register receiver = LoadDescriptor::ReceiverRegister(); 4284 Register receiver = LoadDescriptor::ReceiverRegister();
4168 __ ldr(receiver, GlobalObjectOperand()); 4285 __ LoadP(receiver, GlobalObjectOperand());
4169 __ ldr(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset)); 4286 __ LoadP(receiver,
4287 FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4170 __ push(receiver); 4288 __ push(receiver);
4171 4289
4172 // Load the function from the receiver. 4290 // Load the function from the receiver.
4173 __ mov(LoadDescriptor::NameRegister(), Operand(expr->name())); 4291 __ mov(LoadDescriptor::NameRegister(), Operand(expr->name()));
4174 if (FLAG_vector_ics) { 4292 if (FLAG_vector_ics) {
4175 __ mov(VectorLoadICDescriptor::SlotRegister(), 4293 __ mov(VectorLoadICDescriptor::SlotRegister(),
4176 Operand(Smi::FromInt(expr->CallRuntimeFeedbackSlot()))); 4294 Operand(Smi::FromInt(expr->CallRuntimeFeedbackSlot())));
4177 CallLoadIC(NOT_CONTEXTUAL); 4295 CallLoadIC(NOT_CONTEXTUAL);
4178 } else { 4296 } else {
4179 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId()); 4297 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4180 } 4298 }
4181 4299
4182 // Push the target function under the receiver. 4300 // Push the target function under the receiver.
4183 __ ldr(ip, MemOperand(sp, 0)); 4301 __ LoadP(ip, MemOperand(sp, 0));
4184 __ push(ip); 4302 __ push(ip);
4185 __ str(r0, MemOperand(sp, kPointerSize)); 4303 __ StoreP(r3, MemOperand(sp, kPointerSize));
4186 4304
4187 // Push the arguments ("left-to-right"). 4305 // Push the arguments ("left-to-right").
4188 int arg_count = args->length(); 4306 int arg_count = args->length();
4189 for (int i = 0; i < arg_count; i++) { 4307 for (int i = 0; i < arg_count; i++) {
4190 VisitForStackValue(args->at(i)); 4308 VisitForStackValue(args->at(i));
4191 } 4309 }
4192 4310
4193 // Record source position of the IC call. 4311 // Record source position of the IC call.
4194 SetSourcePosition(expr->position()); 4312 SetSourcePosition(expr->position());
4195 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); 4313 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4196 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 4314 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
4197 __ CallStub(&stub); 4315 __ CallStub(&stub);
4198 4316
4199 // Restore context register. 4317 // Restore context register.
4200 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 4318 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4201 4319
4202 context()->DropAndPlug(1, r0); 4320 context()->DropAndPlug(1, r3);
4203 } else { 4321 } else {
4204 // Push the arguments ("left-to-right"). 4322 // Push the arguments ("left-to-right").
4205 for (int i = 0; i < arg_count; i++) { 4323 for (int i = 0; i < arg_count; i++) {
4206 VisitForStackValue(args->at(i)); 4324 VisitForStackValue(args->at(i));
4207 } 4325 }
4208 4326
4209 // Call the C runtime function. 4327 // Call the C runtime function.
4210 __ CallRuntime(expr->function(), arg_count); 4328 __ CallRuntime(expr->function(), arg_count);
4211 context()->Plug(r0); 4329 context()->Plug(r3);
4212 } 4330 }
4213 } 4331 }
4214 4332
4215 4333
4216 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 4334 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4217 switch (expr->op()) { 4335 switch (expr->op()) {
4218 case Token::DELETE: { 4336 case Token::DELETE: {
4219 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 4337 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4220 Property* property = expr->expression()->AsProperty(); 4338 Property* property = expr->expression()->AsProperty();
4221 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 4339 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4222 4340
4223 if (property != NULL) { 4341 if (property != NULL) {
4224 VisitForStackValue(property->obj()); 4342 VisitForStackValue(property->obj());
4225 VisitForStackValue(property->key()); 4343 VisitForStackValue(property->key());
4226 __ mov(r1, Operand(Smi::FromInt(strict_mode()))); 4344 __ LoadSmiLiteral(r4, Smi::FromInt(strict_mode()));
4227 __ push(r1); 4345 __ push(r4);
4228 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4346 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4229 context()->Plug(r0); 4347 context()->Plug(r3);
4230 } else if (proxy != NULL) { 4348 } else if (proxy != NULL) {
4231 Variable* var = proxy->var(); 4349 Variable* var = proxy->var();
4232 // Delete of an unqualified identifier is disallowed in strict mode 4350 // Delete of an unqualified identifier is disallowed in strict mode
4233 // but "delete this" is allowed. 4351 // but "delete this" is allowed.
4234 DCHECK(strict_mode() == SLOPPY || var->is_this()); 4352 DCHECK(strict_mode() == SLOPPY || var->is_this());
4235 if (var->IsUnallocated()) { 4353 if (var->IsUnallocated()) {
4236 __ ldr(r2, GlobalObjectOperand()); 4354 __ LoadP(r5, GlobalObjectOperand());
4237 __ mov(r1, Operand(var->name())); 4355 __ mov(r4, Operand(var->name()));
4238 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); 4356 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY));
4239 __ Push(r2, r1, r0); 4357 __ Push(r5, r4, r3);
4240 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4358 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4241 context()->Plug(r0); 4359 context()->Plug(r3);
4242 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 4360 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4243 // Result of deleting non-global, non-dynamic variables is false. 4361 // Result of deleting non-global, non-dynamic variables is false.
4244 // The subexpression does not have side effects. 4362 // The subexpression does not have side effects.
4245 context()->Plug(var->is_this()); 4363 context()->Plug(var->is_this());
4246 } else { 4364 } else {
4247 // Non-global variable. Call the runtime to try to delete from the 4365 // Non-global variable. Call the runtime to try to delete from the
4248 // context where the variable was introduced. 4366 // context where the variable was introduced.
4249 DCHECK(!context_register().is(r2)); 4367 DCHECK(!context_register().is(r5));
4250 __ mov(r2, Operand(var->name())); 4368 __ mov(r5, Operand(var->name()));
4251 __ Push(context_register(), r2); 4369 __ Push(context_register(), r5);
4252 __ CallRuntime(Runtime::kDeleteLookupSlot, 2); 4370 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4253 context()->Plug(r0); 4371 context()->Plug(r3);
4254 } 4372 }
4255 } else { 4373 } else {
4256 // Result of deleting non-property, non-variable reference is true. 4374 // Result of deleting non-property, non-variable reference is true.
4257 // The subexpression may have side effects. 4375 // The subexpression may have side effects.
4258 VisitForEffect(expr->expression()); 4376 VisitForEffect(expr->expression());
4259 context()->Plug(true); 4377 context()->Plug(true);
4260 } 4378 }
4261 break; 4379 break;
4262 } 4380 }
4263 4381
4264 case Token::VOID: { 4382 case Token::VOID: {
4265 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); 4383 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4266 VisitForEffect(expr->expression()); 4384 VisitForEffect(expr->expression());
4267 context()->Plug(Heap::kUndefinedValueRootIndex); 4385 context()->Plug(Heap::kUndefinedValueRootIndex);
4268 break; 4386 break;
4269 } 4387 }
4270 4388
4271 case Token::NOT: { 4389 case Token::NOT: {
4272 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); 4390 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4273 if (context()->IsEffect()) { 4391 if (context()->IsEffect()) {
4274 // Unary NOT has no side effects so it's only necessary to visit the 4392 // Unary NOT has no side effects so it's only necessary to visit the
4275 // subexpression. Match the optimizing compiler by not branching. 4393 // subexpression. Match the optimizing compiler by not branching.
4276 VisitForEffect(expr->expression()); 4394 VisitForEffect(expr->expression());
4277 } else if (context()->IsTest()) { 4395 } else if (context()->IsTest()) {
4278 const TestContext* test = TestContext::cast(context()); 4396 const TestContext* test = TestContext::cast(context());
4279 // The labels are swapped for the recursive call. 4397 // The labels are swapped for the recursive call.
4280 VisitForControl(expr->expression(), 4398 VisitForControl(expr->expression(), test->false_label(),
4281 test->false_label(), 4399 test->true_label(), test->fall_through());
4282 test->true_label(),
4283 test->fall_through());
4284 context()->Plug(test->true_label(), test->false_label()); 4400 context()->Plug(test->true_label(), test->false_label());
4285 } else { 4401 } else {
4286 // We handle value contexts explicitly rather than simply visiting 4402 // We handle value contexts explicitly rather than simply visiting
4287 // for control and plugging the control flow into the context, 4403 // for control and plugging the control flow into the context,
4288 // because we need to prepare a pair of extra administrative AST ids 4404 // because we need to prepare a pair of extra administrative AST ids
4289 // for the optimizing compiler. 4405 // for the optimizing compiler.
4290 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue()); 4406 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4291 Label materialize_true, materialize_false, done; 4407 Label materialize_true, materialize_false, done;
4292 VisitForControl(expr->expression(), 4408 VisitForControl(expr->expression(), &materialize_false,
4293 &materialize_false, 4409 &materialize_true, &materialize_true);
4294 &materialize_true,
4295 &materialize_true);
4296 __ bind(&materialize_true); 4410 __ bind(&materialize_true);
4297 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); 4411 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4298 __ LoadRoot(r0, Heap::kTrueValueRootIndex); 4412 __ LoadRoot(r3, Heap::kTrueValueRootIndex);
4299 if (context()->IsStackValue()) __ push(r0); 4413 if (context()->IsStackValue()) __ push(r3);
4300 __ jmp(&done); 4414 __ b(&done);
4301 __ bind(&materialize_false); 4415 __ bind(&materialize_false);
4302 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); 4416 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4303 __ LoadRoot(r0, Heap::kFalseValueRootIndex); 4417 __ LoadRoot(r3, Heap::kFalseValueRootIndex);
4304 if (context()->IsStackValue()) __ push(r0); 4418 if (context()->IsStackValue()) __ push(r3);
4305 __ bind(&done); 4419 __ bind(&done);
4306 } 4420 }
4307 break; 4421 break;
4308 } 4422 }
4309 4423
4310 case Token::TYPEOF: { 4424 case Token::TYPEOF: {
4311 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 4425 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4312 { StackValueContext context(this); 4426 {
4427 StackValueContext context(this);
4313 VisitForTypeofValue(expr->expression()); 4428 VisitForTypeofValue(expr->expression());
4314 } 4429 }
4315 __ CallRuntime(Runtime::kTypeof, 1); 4430 __ CallRuntime(Runtime::kTypeof, 1);
4316 context()->Plug(r0); 4431 context()->Plug(r3);
4317 break; 4432 break;
4318 } 4433 }
4319 4434
4320 default: 4435 default:
4321 UNREACHABLE(); 4436 UNREACHABLE();
4322 } 4437 }
4323 } 4438 }
4324 4439
4325 4440
4326 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 4441 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4327 DCHECK(expr->expression()->IsValidReferenceExpression()); 4442 DCHECK(expr->expression()->IsValidReferenceExpression());
4328 4443
4329 Comment cmnt(masm_, "[ CountOperation"); 4444 Comment cmnt(masm_, "[ CountOperation");
4330 SetSourcePosition(expr->position()); 4445 SetSourcePosition(expr->position());
4331 4446
4332 // Expression can only be a property, a global or a (parameter or local) 4447 // Expression can only be a property, a global or a (parameter or local)
4333 // slot. 4448 // slot.
4334 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 4449 enum LhsKind {
4450 VARIABLE,
4451 NAMED_PROPERTY,
4452 KEYED_PROPERTY,
4453 NAMED_SUPER_PROPERTY
4454 };
4335 LhsKind assign_type = VARIABLE; 4455 LhsKind assign_type = VARIABLE;
4336 Property* prop = expr->expression()->AsProperty(); 4456 Property* prop = expr->expression()->AsProperty();
4337 // In case of a property we use the uninitialized expression context 4457 // In case of a property we use the uninitialized expression context
4338 // of the key to detect a named property. 4458 // of the key to detect a named property.
4339 if (prop != NULL) { 4459 if (prop != NULL) {
4340 assign_type = 4460 assign_type =
4341 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY; 4461 (prop->key()->IsPropertyName())
4462 ? (prop->IsSuperAccess() ? NAMED_SUPER_PROPERTY : NAMED_PROPERTY)
4463 : KEYED_PROPERTY;
4342 } 4464 }
4343 4465
4344 // Evaluate expression and get value. 4466 // Evaluate expression and get value.
4345 if (assign_type == VARIABLE) { 4467 if (assign_type == VARIABLE) {
4346 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL); 4468 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4347 AccumulatorValueContext context(this); 4469 AccumulatorValueContext context(this);
4348 EmitVariableLoad(expr->expression()->AsVariableProxy()); 4470 EmitVariableLoad(expr->expression()->AsVariableProxy());
4349 } else { 4471 } else {
4350 // Reserve space for result of postfix operation. 4472 // Reserve space for result of postfix operation.
4351 if (expr->is_postfix() && !context()->IsEffect()) { 4473 if (expr->is_postfix() && !context()->IsEffect()) {
4352 __ mov(ip, Operand(Smi::FromInt(0))); 4474 __ LoadSmiLiteral(ip, Smi::FromInt(0));
4353 __ push(ip); 4475 __ push(ip);
4354 } 4476 }
4355 if (assign_type == NAMED_PROPERTY) { 4477 if (assign_type == NAMED_PROPERTY) {
4356 // Put the object both on the stack and in the register. 4478 // Put the object both on the stack and in the register.
4357 VisitForStackValue(prop->obj()); 4479 VisitForStackValue(prop->obj());
4358 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 4480 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4359 EmitNamedPropertyLoad(prop); 4481 EmitNamedPropertyLoad(prop);
4482 } else if (assign_type == NAMED_SUPER_PROPERTY) {
4483 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4484 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4485 __ Push(result_register());
4486 const Register scratch = r4;
4487 __ LoadP(scratch, MemOperand(sp, kPointerSize));
4488 __ Push(scratch);
4489 __ Push(result_register());
4490 EmitNamedSuperPropertyLoad(prop);
4360 } else { 4491 } else {
4361 VisitForStackValue(prop->obj()); 4492 VisitForStackValue(prop->obj());
4362 VisitForStackValue(prop->key()); 4493 VisitForStackValue(prop->key());
4363 __ ldr(LoadDescriptor::ReceiverRegister(), 4494 __ LoadP(LoadDescriptor::ReceiverRegister(),
4364 MemOperand(sp, 1 * kPointerSize)); 4495 MemOperand(sp, 1 * kPointerSize));
4365 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); 4496 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4366 EmitKeyedPropertyLoad(prop); 4497 EmitKeyedPropertyLoad(prop);
4367 } 4498 }
4368 } 4499 }
4369 4500
4370 // We need a second deoptimization point after loading the value 4501 // We need a second deoptimization point after loading the value
4371 // in case evaluating the property load my have a side effect. 4502 // in case evaluating the property load my have a side effect.
4372 if (assign_type == VARIABLE) { 4503 if (assign_type == VARIABLE) {
4373 PrepareForBailout(expr->expression(), TOS_REG); 4504 PrepareForBailout(expr->expression(), TOS_REG);
4374 } else { 4505 } else {
4375 PrepareForBailoutForId(prop->LoadId(), TOS_REG); 4506 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4376 } 4507 }
4377 4508
4378 // Inline smi case if we are in a loop. 4509 // Inline smi case if we are in a loop.
4379 Label stub_call, done; 4510 Label stub_call, done;
4380 JumpPatchSite patch_site(masm_); 4511 JumpPatchSite patch_site(masm_);
4381 4512
4382 int count_value = expr->op() == Token::INC ? 1 : -1; 4513 int count_value = expr->op() == Token::INC ? 1 : -1;
4383 if (ShouldInlineSmiCase(expr->op())) { 4514 if (ShouldInlineSmiCase(expr->op())) {
4384 Label slow; 4515 Label slow;
4385 patch_site.EmitJumpIfNotSmi(r0, &slow); 4516 patch_site.EmitJumpIfNotSmi(r3, &slow);
4386 4517
4387 // Save result for postfix expressions. 4518 // Save result for postfix expressions.
4388 if (expr->is_postfix()) { 4519 if (expr->is_postfix()) {
4389 if (!context()->IsEffect()) { 4520 if (!context()->IsEffect()) {
4390 // Save the result on the stack. If we have a named or keyed property 4521 // Save the result on the stack. If we have a named or keyed property
4391 // we store the result under the receiver that is currently on top 4522 // we store the result under the receiver that is currently on top
4392 // of the stack. 4523 // of the stack.
4393 switch (assign_type) { 4524 switch (assign_type) {
4394 case VARIABLE: 4525 case VARIABLE:
4395 __ push(r0); 4526 __ push(r3);
4396 break; 4527 break;
4397 case NAMED_PROPERTY: 4528 case NAMED_PROPERTY:
4398 __ str(r0, MemOperand(sp, kPointerSize)); 4529 __ StoreP(r3, MemOperand(sp, kPointerSize));
4530 break;
4531 case NAMED_SUPER_PROPERTY:
4532 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4399 break; 4533 break;
4400 case KEYED_PROPERTY: 4534 case KEYED_PROPERTY:
4401 __ str(r0, MemOperand(sp, 2 * kPointerSize)); 4535 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4402 break; 4536 break;
4403 } 4537 }
4404 } 4538 }
4405 } 4539 }
4406 4540
4407 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC); 4541 Register scratch1 = r4;
4408 __ b(vc, &done); 4542 Register scratch2 = r5;
4543 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value));
4544 __ AddAndCheckForOverflow(r3, r3, scratch1, scratch2, r0);
4545 __ BranchOnNoOverflow(&done);
4409 // Call stub. Undo operation first. 4546 // Call stub. Undo operation first.
4410 __ sub(r0, r0, Operand(Smi::FromInt(count_value))); 4547 __ sub(r3, r3, scratch1);
4411 __ jmp(&stub_call); 4548 __ b(&stub_call);
4412 __ bind(&slow); 4549 __ bind(&slow);
4413 } 4550 }
4414 ToNumberStub convert_stub(isolate()); 4551 ToNumberStub convert_stub(isolate());
4415 __ CallStub(&convert_stub); 4552 __ CallStub(&convert_stub);
4416 4553
4417 // Save result for postfix expressions. 4554 // Save result for postfix expressions.
4418 if (expr->is_postfix()) { 4555 if (expr->is_postfix()) {
4419 if (!context()->IsEffect()) { 4556 if (!context()->IsEffect()) {
4420 // Save the result on the stack. If we have a named or keyed property 4557 // Save the result on the stack. If we have a named or keyed property
4421 // we store the result under the receiver that is currently on top 4558 // we store the result under the receiver that is currently on top
4422 // of the stack. 4559 // of the stack.
4423 switch (assign_type) { 4560 switch (assign_type) {
4424 case VARIABLE: 4561 case VARIABLE:
4425 __ push(r0); 4562 __ push(r3);
4426 break; 4563 break;
4427 case NAMED_PROPERTY: 4564 case NAMED_PROPERTY:
4428 __ str(r0, MemOperand(sp, kPointerSize)); 4565 __ StoreP(r3, MemOperand(sp, kPointerSize));
4566 break;
4567 case NAMED_SUPER_PROPERTY:
4568 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4429 break; 4569 break;
4430 case KEYED_PROPERTY: 4570 case KEYED_PROPERTY:
4431 __ str(r0, MemOperand(sp, 2 * kPointerSize)); 4571 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4432 break; 4572 break;
4433 } 4573 }
4434 } 4574 }
4435 } 4575 }
4436 4576
4437
4438 __ bind(&stub_call); 4577 __ bind(&stub_call);
4439 __ mov(r1, r0); 4578 __ mr(r4, r3);
4440 __ mov(r0, Operand(Smi::FromInt(count_value))); 4579 __ LoadSmiLiteral(r3, Smi::FromInt(count_value));
4441 4580
4442 // Record position before stub call. 4581 // Record position before stub call.
4443 SetSourcePosition(expr->position()); 4582 SetSourcePosition(expr->position());
4444 4583
4445 Handle<Code> code = 4584 Handle<Code> code =
4446 CodeFactory::BinaryOpIC(isolate(), Token::ADD, NO_OVERWRITE).code(); 4585 CodeFactory::BinaryOpIC(isolate(), Token::ADD, NO_OVERWRITE).code();
4447 CallIC(code, expr->CountBinOpFeedbackId()); 4586 CallIC(code, expr->CountBinOpFeedbackId());
4448 patch_site.EmitPatchInfo(); 4587 patch_site.EmitPatchInfo();
4449 __ bind(&done); 4588 __ bind(&done);
4450 4589
4451 // Store the value returned in r0. 4590 // Store the value returned in r3.
4452 switch (assign_type) { 4591 switch (assign_type) {
4453 case VARIABLE: 4592 case VARIABLE:
4454 if (expr->is_postfix()) { 4593 if (expr->is_postfix()) {
4455 { EffectContext context(this); 4594 {
4595 EffectContext context(this);
4456 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4596 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4457 Token::ASSIGN); 4597 Token::ASSIGN);
4458 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4598 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4459 context.Plug(r0); 4599 context.Plug(r3);
4460 } 4600 }
4461 // For all contexts except EffectConstant We have the result on 4601 // For all contexts except EffectConstant We have the result on
4462 // top of the stack. 4602 // top of the stack.
4463 if (!context()->IsEffect()) { 4603 if (!context()->IsEffect()) {
4464 context()->PlugTOS(); 4604 context()->PlugTOS();
4465 } 4605 }
4466 } else { 4606 } else {
4467 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4607 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4468 Token::ASSIGN); 4608 Token::ASSIGN);
4469 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4609 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4470 context()->Plug(r0); 4610 context()->Plug(r3);
4471 } 4611 }
4472 break; 4612 break;
4473 case NAMED_PROPERTY: { 4613 case NAMED_PROPERTY: {
4474 __ mov(StoreDescriptor::NameRegister(), 4614 __ mov(StoreDescriptor::NameRegister(),
4475 Operand(prop->key()->AsLiteral()->value())); 4615 Operand(prop->key()->AsLiteral()->value()));
4476 __ pop(StoreDescriptor::ReceiverRegister()); 4616 __ pop(StoreDescriptor::ReceiverRegister());
4477 CallStoreIC(expr->CountStoreFeedbackId()); 4617 CallStoreIC(expr->CountStoreFeedbackId());
4478 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4618 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4479 if (expr->is_postfix()) { 4619 if (expr->is_postfix()) {
4480 if (!context()->IsEffect()) { 4620 if (!context()->IsEffect()) {
4481 context()->PlugTOS(); 4621 context()->PlugTOS();
4482 } 4622 }
4483 } else { 4623 } else {
4484 context()->Plug(r0); 4624 context()->Plug(r3);
4485 } 4625 }
4486 break; 4626 break;
4487 } 4627 }
4628 case NAMED_SUPER_PROPERTY: {
4629 EmitNamedSuperPropertyStore(prop);
4630 if (expr->is_postfix()) {
4631 if (!context()->IsEffect()) {
4632 context()->PlugTOS();
4633 }
4634 } else {
4635 context()->Plug(r3);
4636 }
4637 break;
4638 }
4488 case KEYED_PROPERTY: { 4639 case KEYED_PROPERTY: {
4489 __ Pop(StoreDescriptor::ReceiverRegister(), 4640 __ Pop(StoreDescriptor::ReceiverRegister(),
4490 StoreDescriptor::NameRegister()); 4641 StoreDescriptor::NameRegister());
4491 Handle<Code> ic = 4642 Handle<Code> ic =
4492 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code(); 4643 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4493 CallIC(ic, expr->CountStoreFeedbackId()); 4644 CallIC(ic, expr->CountStoreFeedbackId());
4494 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4645 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4495 if (expr->is_postfix()) { 4646 if (expr->is_postfix()) {
4496 if (!context()->IsEffect()) { 4647 if (!context()->IsEffect()) {
4497 context()->PlugTOS(); 4648 context()->PlugTOS();
4498 } 4649 }
4499 } else { 4650 } else {
4500 context()->Plug(r0); 4651 context()->Plug(r3);
4501 } 4652 }
4502 break; 4653 break;
4503 } 4654 }
4504 } 4655 }
4505 } 4656 }
4506 4657
4507 4658
4508 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 4659 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4509 DCHECK(!context()->IsEffect()); 4660 DCHECK(!context()->IsEffect());
4510 DCHECK(!context()->IsTest()); 4661 DCHECK(!context()->IsTest());
4511 VariableProxy* proxy = expr->AsVariableProxy(); 4662 VariableProxy* proxy = expr->AsVariableProxy();
4512 if (proxy != NULL && proxy->var()->IsUnallocated()) { 4663 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4513 Comment cmnt(masm_, "[ Global variable"); 4664 Comment cmnt(masm_, "[ Global variable");
4514 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); 4665 __ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
4515 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->name())); 4666 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->name()));
4516 if (FLAG_vector_ics) { 4667 if (FLAG_vector_ics) {
4517 __ mov(VectorLoadICDescriptor::SlotRegister(), 4668 __ mov(VectorLoadICDescriptor::SlotRegister(),
4518 Operand(Smi::FromInt(proxy->VariableFeedbackSlot()))); 4669 Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
4519 } 4670 }
4520 // Use a regular load, not a contextual load, to avoid a reference 4671 // Use a regular load, not a contextual load, to avoid a reference
4521 // error. 4672 // error.
4522 CallLoadIC(NOT_CONTEXTUAL); 4673 CallLoadIC(NOT_CONTEXTUAL);
4523 PrepareForBailout(expr, TOS_REG); 4674 PrepareForBailout(expr, TOS_REG);
4524 context()->Plug(r0); 4675 context()->Plug(r3);
4525 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { 4676 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4526 Comment cmnt(masm_, "[ Lookup slot"); 4677 Comment cmnt(masm_, "[ Lookup slot");
4527 Label done, slow; 4678 Label done, slow;
4528 4679
4529 // Generate code for loading from variables potentially shadowed 4680 // Generate code for loading from variables potentially shadowed
4530 // by eval-introduced variables. 4681 // by eval-introduced variables.
4531 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done); 4682 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4532 4683
4533 __ bind(&slow); 4684 __ bind(&slow);
4534 __ mov(r0, Operand(proxy->name())); 4685 __ mov(r3, Operand(proxy->name()));
4535 __ Push(cp, r0); 4686 __ Push(cp, r3);
4536 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2); 4687 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4537 PrepareForBailout(expr, TOS_REG); 4688 PrepareForBailout(expr, TOS_REG);
4538 __ bind(&done); 4689 __ bind(&done);
4539 4690
4540 context()->Plug(r0); 4691 context()->Plug(r3);
4541 } else { 4692 } else {
4542 // This expression cannot throw a reference error at the top level. 4693 // This expression cannot throw a reference error at the top level.
4543 VisitInDuplicateContext(expr); 4694 VisitInDuplicateContext(expr);
4544 } 4695 }
4545 } 4696 }
4546 4697
4547 4698
4548 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, 4699 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4549 Expression* sub_expr, 4700 Expression* sub_expr,
4550 Handle<String> check) { 4701 Handle<String> check) {
4551 Label materialize_true, materialize_false; 4702 Label materialize_true, materialize_false;
4552 Label* if_true = NULL; 4703 Label* if_true = NULL;
4553 Label* if_false = NULL; 4704 Label* if_false = NULL;
4554 Label* fall_through = NULL; 4705 Label* fall_through = NULL;
4555 context()->PrepareTest(&materialize_true, &materialize_false, 4706 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
4556 &if_true, &if_false, &fall_through); 4707 &if_false, &fall_through);
4557 4708
4558 { AccumulatorValueContext context(this); 4709 {
4710 AccumulatorValueContext context(this);
4559 VisitForTypeofValue(sub_expr); 4711 VisitForTypeofValue(sub_expr);
4560 } 4712 }
4561 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4713 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4562 4714
4563 Factory* factory = isolate()->factory(); 4715 Factory* factory = isolate()->factory();
4564 if (String::Equals(check, factory->number_string())) { 4716 if (String::Equals(check, factory->number_string())) {
4565 __ JumpIfSmi(r0, if_true); 4717 __ JumpIfSmi(r3, if_true);
4566 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 4718 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
4567 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); 4719 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4568 __ cmp(r0, ip); 4720 __ cmp(r3, ip);
4569 Split(eq, if_true, if_false, fall_through); 4721 Split(eq, if_true, if_false, fall_through);
4570 } else if (String::Equals(check, factory->string_string())) { 4722 } else if (String::Equals(check, factory->string_string())) {
4571 __ JumpIfSmi(r0, if_false); 4723 __ JumpIfSmi(r3, if_false);
4572 // Check for undetectable objects => false. 4724 // Check for undetectable objects => false.
4573 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE); 4725 __ CompareObjectType(r3, r3, r4, FIRST_NONSTRING_TYPE);
4574 __ b(ge, if_false); 4726 __ bge(if_false);
4575 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4727 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
4576 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4728 STATIC_ASSERT((1 << Map::kIsUndetectable) < 0x8000);
4577 Split(eq, if_true, if_false, fall_through); 4729 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
4730 Split(eq, if_true, if_false, fall_through, cr0);
4578 } else if (String::Equals(check, factory->symbol_string())) { 4731 } else if (String::Equals(check, factory->symbol_string())) {
4579 __ JumpIfSmi(r0, if_false); 4732 __ JumpIfSmi(r3, if_false);
4580 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE); 4733 __ CompareObjectType(r3, r3, r4, SYMBOL_TYPE);
4581 Split(eq, if_true, if_false, fall_through); 4734 Split(eq, if_true, if_false, fall_through);
4582 } else if (String::Equals(check, factory->boolean_string())) { 4735 } else if (String::Equals(check, factory->boolean_string())) {
4583 __ CompareRoot(r0, Heap::kTrueValueRootIndex); 4736 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
4584 __ b(eq, if_true); 4737 __ beq(if_true);
4585 __ CompareRoot(r0, Heap::kFalseValueRootIndex); 4738 __ CompareRoot(r3, Heap::kFalseValueRootIndex);
4586 Split(eq, if_true, if_false, fall_through); 4739 Split(eq, if_true, if_false, fall_through);
4587 } else if (String::Equals(check, factory->undefined_string())) { 4740 } else if (String::Equals(check, factory->undefined_string())) {
4588 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex); 4741 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
4589 __ b(eq, if_true); 4742 __ beq(if_true);
4590 __ JumpIfSmi(r0, if_false); 4743 __ JumpIfSmi(r3, if_false);
4591 // Check for undetectable objects => true. 4744 // Check for undetectable objects => true.
4592 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 4745 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
4593 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4746 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
4594 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4747 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
4595 Split(ne, if_true, if_false, fall_through); 4748 Split(ne, if_true, if_false, fall_through, cr0);
4596 4749
4597 } else if (String::Equals(check, factory->function_string())) { 4750 } else if (String::Equals(check, factory->function_string())) {
4598 __ JumpIfSmi(r0, if_false); 4751 __ JumpIfSmi(r3, if_false);
4599 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 4752 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4600 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE); 4753 __ CompareObjectType(r3, r3, r4, JS_FUNCTION_TYPE);
4601 __ b(eq, if_true); 4754 __ beq(if_true);
4602 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE)); 4755 __ cmpi(r4, Operand(JS_FUNCTION_PROXY_TYPE));
4603 Split(eq, if_true, if_false, fall_through); 4756 Split(eq, if_true, if_false, fall_through);
4604 } else if (String::Equals(check, factory->object_string())) { 4757 } else if (String::Equals(check, factory->object_string())) {
4605 __ JumpIfSmi(r0, if_false); 4758 __ JumpIfSmi(r3, if_false);
4606 __ CompareRoot(r0, Heap::kNullValueRootIndex); 4759 __ CompareRoot(r3, Heap::kNullValueRootIndex);
4607 __ b(eq, if_true); 4760 __ beq(if_true);
4608 // Check for JS objects => true. 4761 // Check for JS objects => true.
4609 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); 4762 __ CompareObjectType(r3, r3, r4, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4610 __ b(lt, if_false); 4763 __ blt(if_false);
4611 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); 4764 __ CompareInstanceType(r3, r4, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4612 __ b(gt, if_false); 4765 __ bgt(if_false);
4613 // Check for undetectable objects => false. 4766 // Check for undetectable objects => false.
4614 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4767 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
4615 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4768 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
4616 Split(eq, if_true, if_false, fall_through); 4769 Split(eq, if_true, if_false, fall_through, cr0);
4617 } else { 4770 } else {
4618 if (if_false != fall_through) __ jmp(if_false); 4771 if (if_false != fall_through) __ b(if_false);
4619 } 4772 }
4620 context()->Plug(if_true, if_false); 4773 context()->Plug(if_true, if_false);
4621 } 4774 }
4622 4775
4623 4776
4624 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 4777 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4625 Comment cmnt(masm_, "[ CompareOperation"); 4778 Comment cmnt(masm_, "[ CompareOperation");
4626 SetSourcePosition(expr->position()); 4779 SetSourcePosition(expr->position());
4627 4780
4628 // First we try a fast inlined version of the compare when one of 4781 // First we try a fast inlined version of the compare when one of
4629 // the operands is a literal. 4782 // the operands is a literal.
4630 if (TryLiteralCompare(expr)) return; 4783 if (TryLiteralCompare(expr)) return;
4631 4784
4632 // Always perform the comparison for its control flow. Pack the result 4785 // Always perform the comparison for its control flow. Pack the result
4633 // into the expression's context after the comparison is performed. 4786 // into the expression's context after the comparison is performed.
4634 Label materialize_true, materialize_false; 4787 Label materialize_true, materialize_false;
4635 Label* if_true = NULL; 4788 Label* if_true = NULL;
4636 Label* if_false = NULL; 4789 Label* if_false = NULL;
4637 Label* fall_through = NULL; 4790 Label* fall_through = NULL;
4638 context()->PrepareTest(&materialize_true, &materialize_false, 4791 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
4639 &if_true, &if_false, &fall_through); 4792 &if_false, &fall_through);
4640 4793
4641 Token::Value op = expr->op(); 4794 Token::Value op = expr->op();
4642 VisitForStackValue(expr->left()); 4795 VisitForStackValue(expr->left());
4643 switch (op) { 4796 switch (op) {
4644 case Token::IN: 4797 case Token::IN:
4645 VisitForStackValue(expr->right()); 4798 VisitForStackValue(expr->right());
4646 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); 4799 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4647 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); 4800 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4648 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 4801 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4649 __ cmp(r0, ip); 4802 __ cmp(r3, ip);
4650 Split(eq, if_true, if_false, fall_through); 4803 Split(eq, if_true, if_false, fall_through);
4651 break; 4804 break;
4652 4805
4653 case Token::INSTANCEOF: { 4806 case Token::INSTANCEOF: {
4654 VisitForStackValue(expr->right()); 4807 VisitForStackValue(expr->right());
4655 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags); 4808 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4656 __ CallStub(&stub); 4809 __ CallStub(&stub);
4657 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4810 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4658 // The stub returns 0 for true. 4811 // The stub returns 0 for true.
4659 __ tst(r0, r0); 4812 __ cmpi(r3, Operand::Zero());
4660 Split(eq, if_true, if_false, fall_through); 4813 Split(eq, if_true, if_false, fall_through);
4661 break; 4814 break;
4662 } 4815 }
4663 4816
4664 default: { 4817 default: {
4665 VisitForAccumulatorValue(expr->right()); 4818 VisitForAccumulatorValue(expr->right());
4666 Condition cond = CompareIC::ComputeCondition(op); 4819 Condition cond = CompareIC::ComputeCondition(op);
4667 __ pop(r1); 4820 __ pop(r4);
4668 4821
4669 bool inline_smi_code = ShouldInlineSmiCase(op); 4822 bool inline_smi_code = ShouldInlineSmiCase(op);
4670 JumpPatchSite patch_site(masm_); 4823 JumpPatchSite patch_site(masm_);
4671 if (inline_smi_code) { 4824 if (inline_smi_code) {
4672 Label slow_case; 4825 Label slow_case;
4673 __ orr(r2, r0, Operand(r1)); 4826 __ orx(r5, r3, r4);
4674 patch_site.EmitJumpIfNotSmi(r2, &slow_case); 4827 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
4675 __ cmp(r1, r0); 4828 __ cmp(r4, r3);
4676 Split(cond, if_true, if_false, NULL); 4829 Split(cond, if_true, if_false, NULL);
4677 __ bind(&slow_case); 4830 __ bind(&slow_case);
4678 } 4831 }
4679 4832
4680 // Record position and call the compare IC. 4833 // Record position and call the compare IC.
4681 SetSourcePosition(expr->position()); 4834 SetSourcePosition(expr->position());
4682 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code(); 4835 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4683 CallIC(ic, expr->CompareOperationFeedbackId()); 4836 CallIC(ic, expr->CompareOperationFeedbackId());
4684 patch_site.EmitPatchInfo(); 4837 patch_site.EmitPatchInfo();
4685 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4838 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4686 __ cmp(r0, Operand::Zero()); 4839 __ cmpi(r3, Operand::Zero());
4687 Split(cond, if_true, if_false, fall_through); 4840 Split(cond, if_true, if_false, fall_through);
4688 } 4841 }
4689 } 4842 }
4690 4843
4691 // Convert the result of the comparison into one expected for this 4844 // Convert the result of the comparison into one expected for this
4692 // expression's context. 4845 // expression's context.
4693 context()->Plug(if_true, if_false); 4846 context()->Plug(if_true, if_false);
4694 } 4847 }
4695 4848
4696 4849
4697 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, 4850 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4698 Expression* sub_expr, 4851 Expression* sub_expr,
4699 NilValue nil) { 4852 NilValue nil) {
4700 Label materialize_true, materialize_false; 4853 Label materialize_true, materialize_false;
4701 Label* if_true = NULL; 4854 Label* if_true = NULL;
4702 Label* if_false = NULL; 4855 Label* if_false = NULL;
4703 Label* fall_through = NULL; 4856 Label* fall_through = NULL;
4704 context()->PrepareTest(&materialize_true, &materialize_false, 4857 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
4705 &if_true, &if_false, &fall_through); 4858 &if_false, &fall_through);
4706 4859
4707 VisitForAccumulatorValue(sub_expr); 4860 VisitForAccumulatorValue(sub_expr);
4708 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4861 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4709 if (expr->op() == Token::EQ_STRICT) { 4862 if (expr->op() == Token::EQ_STRICT) {
4710 Heap::RootListIndex nil_value = nil == kNullValue ? 4863 Heap::RootListIndex nil_value = nil == kNullValue
4711 Heap::kNullValueRootIndex : 4864 ? Heap::kNullValueRootIndex
4712 Heap::kUndefinedValueRootIndex; 4865 : Heap::kUndefinedValueRootIndex;
4713 __ LoadRoot(r1, nil_value); 4866 __ LoadRoot(r4, nil_value);
4714 __ cmp(r0, r1); 4867 __ cmp(r3, r4);
4715 Split(eq, if_true, if_false, fall_through); 4868 Split(eq, if_true, if_false, fall_through);
4716 } else { 4869 } else {
4717 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); 4870 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4718 CallIC(ic, expr->CompareOperationFeedbackId()); 4871 CallIC(ic, expr->CompareOperationFeedbackId());
4719 __ cmp(r0, Operand(0)); 4872 __ cmpi(r3, Operand::Zero());
4720 Split(ne, if_true, if_false, fall_through); 4873 Split(ne, if_true, if_false, fall_through);
4721 } 4874 }
4722 context()->Plug(if_true, if_false); 4875 context()->Plug(if_true, if_false);
4723 } 4876 }
4724 4877
4725 4878
4726 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { 4879 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4727 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4880 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4728 context()->Plug(r0); 4881 context()->Plug(r3);
4729 } 4882 }
4730 4883
4731 4884
4732 Register FullCodeGenerator::result_register() { 4885 Register FullCodeGenerator::result_register() { return r3; }
4733 return r0;
4734 }
4735 4886
4736 4887
4737 Register FullCodeGenerator::context_register() { 4888 Register FullCodeGenerator::context_register() { return cp; }
4738 return cp;
4739 }
4740 4889
4741 4890
4742 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 4891 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4743 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 4892 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
4744 __ str(value, MemOperand(fp, frame_offset)); 4893 __ StoreP(value, MemOperand(fp, frame_offset), r0);
4745 } 4894 }
4746 4895
4747 4896
4748 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 4897 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4749 __ ldr(dst, ContextOperand(cp, context_index)); 4898 __ LoadP(dst, ContextOperand(cp, context_index), r0);
4750 } 4899 }
4751 4900
4752 4901
4753 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { 4902 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4754 Scope* declaration_scope = scope()->DeclarationScope(); 4903 Scope* declaration_scope = scope()->DeclarationScope();
4755 if (declaration_scope->is_global_scope() || 4904 if (declaration_scope->is_global_scope() ||
4756 declaration_scope->is_module_scope()) { 4905 declaration_scope->is_module_scope()) {
4757 // Contexts nested in the native context have a canonical empty function 4906 // Contexts nested in the native context have a canonical empty function
4758 // as their closure, not the anonymous closure containing the global 4907 // as their closure, not the anonymous closure containing the global
4759 // code. Pass a smi sentinel and let the runtime look up the empty 4908 // code. Pass a smi sentinel and let the runtime look up the empty
4760 // function. 4909 // function.
4761 __ mov(ip, Operand(Smi::FromInt(0))); 4910 __ LoadSmiLiteral(ip, Smi::FromInt(0));
4762 } else if (declaration_scope->is_eval_scope()) { 4911 } else if (declaration_scope->is_eval_scope()) {
4763 // Contexts created by a call to eval have the same closure as the 4912 // Contexts created by a call to eval have the same closure as the
4764 // context calling eval, not the anonymous closure containing the eval 4913 // context calling eval, not the anonymous closure containing the eval
4765 // code. Fetch it from the context. 4914 // code. Fetch it from the context.
4766 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX)); 4915 __ LoadP(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
4767 } else { 4916 } else {
4768 DCHECK(declaration_scope->is_function_scope()); 4917 DCHECK(declaration_scope->is_function_scope());
4769 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4918 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4770 } 4919 }
4771 __ push(ip); 4920 __ push(ip);
4772 } 4921 }
4773 4922
4774 4923
4775 // ---------------------------------------------------------------------------- 4924 // ----------------------------------------------------------------------------
4776 // Non-local control flow support. 4925 // Non-local control flow support.
4777 4926
4778 void FullCodeGenerator::EnterFinallyBlock() { 4927 void FullCodeGenerator::EnterFinallyBlock() {
4779 DCHECK(!result_register().is(r1)); 4928 DCHECK(!result_register().is(r4));
4780 // Store result register while executing finally block. 4929 // Store result register while executing finally block.
4781 __ push(result_register()); 4930 __ push(result_register());
4782 // Cook return address in link register to stack (smi encoded Code* delta) 4931 // Cook return address in link register to stack (smi encoded Code* delta)
4783 __ sub(r1, lr, Operand(masm_->CodeObject())); 4932 __ mflr(r4);
4784 __ SmiTag(r1); 4933 __ mov(ip, Operand(masm_->CodeObject()));
4934 __ sub(r4, r4, ip);
4935 __ SmiTag(r4);
4785 4936
4786 // Store result register while executing finally block. 4937 // Store result register while executing finally block.
4787 __ push(r1); 4938 __ push(r4);
4788 4939
4789 // Store pending message while executing finally block. 4940 // Store pending message while executing finally block.
4790 ExternalReference pending_message_obj = 4941 ExternalReference pending_message_obj =
4791 ExternalReference::address_of_pending_message_obj(isolate()); 4942 ExternalReference::address_of_pending_message_obj(isolate());
4792 __ mov(ip, Operand(pending_message_obj)); 4943 __ mov(ip, Operand(pending_message_obj));
4793 __ ldr(r1, MemOperand(ip)); 4944 __ LoadP(r4, MemOperand(ip));
4794 __ push(r1); 4945 __ push(r4);
4795 4946
4796 ExternalReference has_pending_message = 4947 ExternalReference has_pending_message =
4797 ExternalReference::address_of_has_pending_message(isolate()); 4948 ExternalReference::address_of_has_pending_message(isolate());
4798 __ mov(ip, Operand(has_pending_message)); 4949 __ mov(ip, Operand(has_pending_message));
4799 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof) 4950 __ lbz(r4, MemOperand(ip));
4800 __ ldrb(r1, MemOperand(ip)); 4951 __ SmiTag(r4);
4801 __ SmiTag(r1); 4952 __ push(r4);
4802 __ push(r1);
4803 4953
4804 ExternalReference pending_message_script = 4954 ExternalReference pending_message_script =
4805 ExternalReference::address_of_pending_message_script(isolate()); 4955 ExternalReference::address_of_pending_message_script(isolate());
4806 __ mov(ip, Operand(pending_message_script)); 4956 __ mov(ip, Operand(pending_message_script));
4807 __ ldr(r1, MemOperand(ip)); 4957 __ LoadP(r4, MemOperand(ip));
4808 __ push(r1); 4958 __ push(r4);
4809 } 4959 }
4810 4960
4811 4961
4812 void FullCodeGenerator::ExitFinallyBlock() { 4962 void FullCodeGenerator::ExitFinallyBlock() {
4813 DCHECK(!result_register().is(r1)); 4963 DCHECK(!result_register().is(r4));
4814 // Restore pending message from stack. 4964 // Restore pending message from stack.
4815 __ pop(r1); 4965 __ pop(r4);
4816 ExternalReference pending_message_script = 4966 ExternalReference pending_message_script =
4817 ExternalReference::address_of_pending_message_script(isolate()); 4967 ExternalReference::address_of_pending_message_script(isolate());
4818 __ mov(ip, Operand(pending_message_script)); 4968 __ mov(ip, Operand(pending_message_script));
4819 __ str(r1, MemOperand(ip)); 4969 __ StoreP(r4, MemOperand(ip));
4820 4970
4821 __ pop(r1); 4971 __ pop(r4);
4822 __ SmiUntag(r1); 4972 __ SmiUntag(r4);
4823 ExternalReference has_pending_message = 4973 ExternalReference has_pending_message =
4824 ExternalReference::address_of_has_pending_message(isolate()); 4974 ExternalReference::address_of_has_pending_message(isolate());
4825 __ mov(ip, Operand(has_pending_message)); 4975 __ mov(ip, Operand(has_pending_message));
4826 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof) 4976 __ stb(r4, MemOperand(ip));
4827 __ strb(r1, MemOperand(ip));
4828 4977
4829 __ pop(r1); 4978 __ pop(r4);
4830 ExternalReference pending_message_obj = 4979 ExternalReference pending_message_obj =
4831 ExternalReference::address_of_pending_message_obj(isolate()); 4980 ExternalReference::address_of_pending_message_obj(isolate());
4832 __ mov(ip, Operand(pending_message_obj)); 4981 __ mov(ip, Operand(pending_message_obj));
4833 __ str(r1, MemOperand(ip)); 4982 __ StoreP(r4, MemOperand(ip));
4834 4983
4835 // Restore result register from stack. 4984 // Restore result register from stack.
4836 __ pop(r1); 4985 __ pop(r4);
4837 4986
4838 // Uncook return address and return. 4987 // Uncook return address and return.
4839 __ pop(result_register()); 4988 __ pop(result_register());
4840 __ SmiUntag(r1); 4989 __ SmiUntag(r4);
4841 __ add(pc, r1, Operand(masm_->CodeObject())); 4990 __ mov(ip, Operand(masm_->CodeObject()));
4991 __ add(ip, ip, r4);
4992 __ mtctr(ip);
4993 __ bctr();
4842 } 4994 }
4843 4995
4844 4996
4845 #undef __ 4997 #undef __
4846 4998
4847 #define __ ACCESS_MASM(masm()) 4999 #define __ ACCESS_MASM(masm())
4848 5000
4849 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( 5001 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4850 int* stack_depth, 5002 int* stack_depth, int* context_length) {
4851 int* context_length) {
4852 // The macros used here must preserve the result register. 5003 // The macros used here must preserve the result register.
4853 5004
4854 // Because the handler block contains the context of the finally 5005 // Because the handler block contains the context of the finally
4855 // code, we can restore it directly from there for the finally code 5006 // code, we can restore it directly from there for the finally code
4856 // rather than iteratively unwinding contexts via their previous 5007 // rather than iteratively unwinding contexts via their previous
4857 // links. 5008 // links.
4858 __ Drop(*stack_depth); // Down to the handler block. 5009 __ Drop(*stack_depth); // Down to the handler block.
4859 if (*context_length > 0) { 5010 if (*context_length > 0) {
4860 // Restore the context to its dedicated register and the stack. 5011 // Restore the context to its dedicated register and the stack.
4861 __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset)); 5012 __ LoadP(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
4862 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 5013 __ StoreP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4863 } 5014 }
4864 __ PopTryHandler(); 5015 __ PopTryHandler();
4865 __ bl(finally_entry_); 5016 __ b(finally_entry_, SetLK);
4866 5017
4867 *stack_depth = 0; 5018 *stack_depth = 0;
4868 *context_length = 0; 5019 *context_length = 0;
4869 return previous_; 5020 return previous_;
4870 } 5021 }
4871 5022
4872
4873 #undef __ 5023 #undef __
4874 5024
4875 5025
4876 static Address GetInterruptImmediateLoadAddress(Address pc) { 5026 void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc,
4877 Address load_address = pc - 2 * Assembler::kInstrSize;
4878 if (!FLAG_enable_ool_constant_pool) {
4879 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
4880 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
4881 // This is an extended constant pool lookup.
4882 if (CpuFeatures::IsSupported(ARMv7)) {
4883 load_address -= 2 * Assembler::kInstrSize;
4884 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
4885 DCHECK(Assembler::IsMovT(
4886 Memory::int32_at(load_address + Assembler::kInstrSize)));
4887 } else {
4888 load_address -= 4 * Assembler::kInstrSize;
4889 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
4890 DCHECK(Assembler::IsOrrImmed(
4891 Memory::int32_at(load_address + Assembler::kInstrSize)));
4892 DCHECK(Assembler::IsOrrImmed(
4893 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
4894 DCHECK(Assembler::IsOrrImmed(
4895 Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
4896 }
4897 } else if (CpuFeatures::IsSupported(ARMv7) &&
4898 Assembler::IsMovT(Memory::int32_at(load_address))) {
4899 // This is a movw / movt immediate load.
4900 load_address -= Assembler::kInstrSize;
4901 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
4902 } else if (!CpuFeatures::IsSupported(ARMv7) &&
4903 Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
4904 // This is a mov / orr immediate load.
4905 load_address -= 3 * Assembler::kInstrSize;
4906 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
4907 DCHECK(Assembler::IsOrrImmed(
4908 Memory::int32_at(load_address + Assembler::kInstrSize)));
4909 DCHECK(Assembler::IsOrrImmed(
4910 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
4911 } else {
4912 // This is a small constant pool lookup.
4913 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
4914 }
4915 return load_address;
4916 }
4917
4918
4919 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4920 Address pc,
4921 BackEdgeState target_state, 5027 BackEdgeState target_state,
4922 Code* replacement_code) { 5028 Code* replacement_code) {
4923 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc); 5029 Address mov_address = Assembler::target_address_from_return_address(pc);
4924 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize; 5030 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
4925 CodePatcher patcher(branch_address, 1); 5031 CodePatcher patcher(cmp_address, 1);
5032
4926 switch (target_state) { 5033 switch (target_state) {
4927 case INTERRUPT: 5034 case INTERRUPT: {
4928 {
4929 // <decrement profiling counter> 5035 // <decrement profiling counter>
4930 // bpl ok 5036 // cmpi r6, 0
4931 // ; load interrupt stub address into ip - either of (for ARMv7): 5037 // bge <ok> ;; not changed
4932 // ; <small cp load> | <extended cp load> | <immediate load> 5038 // mov r12, <interrupt stub address>
4933 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm 5039 // mtlr r12
4934 // | movt ip, #imm | movw ip, #imm 5040 // blrl
4935 // | ldr ip, [pp, ip]
4936 // ; or (for ARMv6):
4937 // ; <small cp load> | <extended cp load> | <immediate load>
4938 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
4939 // | orr ip, ip, #imm> | orr ip, ip, #imm
4940 // | orr ip, ip, #imm> | orr ip, ip, #imm
4941 // | orr ip, ip, #imm> | orr ip, ip, #imm
4942 // blx ip
4943 // <reset profiling counter> 5041 // <reset profiling counter>
4944 // ok-label 5042 // ok-label
4945 5043 patcher.masm()->cmpi(r6, Operand::Zero());
4946 // Calculate branch offset to the ok-label - this is the difference
4947 // between the branch address and |pc| (which points at <blx ip>) plus
4948 // kProfileCounterResetSequence instructions
4949 int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
4950 kProfileCounterResetSequenceLength;
4951 patcher.masm()->b(branch_offset, pl);
4952 break; 5044 break;
4953 } 5045 }
4954 case ON_STACK_REPLACEMENT: 5046 case ON_STACK_REPLACEMENT:
4955 case OSR_AFTER_STACK_CHECK: 5047 case OSR_AFTER_STACK_CHECK:
4956 // <decrement profiling counter> 5048 // <decrement profiling counter>
4957 // mov r0, r0 (NOP) 5049 // crset
4958 // ; load on-stack replacement address into ip - either of (for ARMv7): 5050 // bge <ok> ;; not changed
4959 // ; <small cp load> | <extended cp load> | <immediate load> 5051 // mov r12, <on-stack replacement address>
4960 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm 5052 // mtlr r12
4961 // | movt ip, #imm> | movw ip, #imm 5053 // blrl
4962 // | ldr ip, [pp, ip]
4963 // ; or (for ARMv6):
4964 // ; <small cp load> | <extended cp load> | <immediate load>
4965 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
4966 // | orr ip, ip, #imm> | orr ip, ip, #imm
4967 // | orr ip, ip, #imm> | orr ip, ip, #imm
4968 // | orr ip, ip, #imm> | orr ip, ip, #imm
4969 // blx ip
4970 // <reset profiling counter> 5054 // <reset profiling counter>
4971 // ok-label 5055 // ok-label ----- pc_after points here
4972 patcher.masm()->nop(); 5056
5057 // Set the LT bit such that bge is a NOP
5058 patcher.masm()->crset(Assembler::encode_crbit(cr7, CR_LT));
4973 break; 5059 break;
4974 } 5060 }
4975 5061
4976 // Replace the call address. 5062 // Replace the stack check address in the mov sequence with the
4977 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code, 5063 // entry address of the replacement code.
4978 replacement_code->entry()); 5064 Assembler::set_target_address_at(mov_address, unoptimized_code,
5065 replacement_code->entry());
4979 5066
4980 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( 5067 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4981 unoptimized_code, pc_immediate_load_address, replacement_code); 5068 unoptimized_code, mov_address, replacement_code);
4982 } 5069 }
4983 5070
4984 5071
4985 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( 5072 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4986 Isolate* isolate, 5073 Isolate* isolate, Code* unoptimized_code, Address pc) {
4987 Code* unoptimized_code, 5074 Address mov_address = Assembler::target_address_from_return_address(pc);
4988 Address pc) { 5075 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
4989 DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize))); 5076 Address interrupt_address =
5077 Assembler::target_address_at(mov_address, unoptimized_code);
4990 5078
4991 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc); 5079 if (Assembler::IsCmpImmediate(Assembler::instr_at(cmp_address))) {
4992 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize; 5080 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry());
4993 Address interrupt_address = Assembler::target_address_at(
4994 pc_immediate_load_address, unoptimized_code);
4995
4996 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
4997 DCHECK(interrupt_address ==
4998 isolate->builtins()->InterruptCheck()->entry());
4999 return INTERRUPT; 5081 return INTERRUPT;
5000 } 5082 }
5001 5083
5002 DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address))); 5084 DCHECK(Assembler::IsCrSet(Assembler::instr_at(cmp_address)));
5003 5085
5004 if (interrupt_address == 5086 if (interrupt_address == isolate->builtins()->OnStackReplacement()->entry()) {
5005 isolate->builtins()->OnStackReplacement()->entry()) {
5006 return ON_STACK_REPLACEMENT; 5087 return ON_STACK_REPLACEMENT;
5007 } 5088 }
5008 5089
5009 DCHECK(interrupt_address == 5090 DCHECK(interrupt_address ==
5010 isolate->builtins()->OsrAfterStackCheck()->entry()); 5091 isolate->builtins()->OsrAfterStackCheck()->entry());
5011 return OSR_AFTER_STACK_CHECK; 5092 return OSR_AFTER_STACK_CHECK;
5012 } 5093 }
5094 }
5095 } // namespace v8::internal
5013 5096
5014 5097 #endif // V8_TARGET_ARCH_PPC
5015 } } // namespace v8::internal
5016
5017 #endif // V8_TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698