Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: src/ppc/full-codegen-ppc.cc

Issue 571173003: PowerPC specific sub-directories (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Updated ppc sub-dirs to current V8 code levels Created 6 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 //
3 // Copyright IBM Corp. 2012, 2013. All rights reserved.
4 //
2 // Use of this source code is governed by a BSD-style license that can be 5 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 6 // found in the LICENSE file.
4 7
5 #include "src/v8.h" 8 #include "src/v8.h"
6 9
7 #if V8_TARGET_ARCH_ARM 10 #if V8_TARGET_ARCH_PPC
8 11
9 #include "src/code-factory.h" 12 #include "src/code-factory.h"
10 #include "src/code-stubs.h" 13 #include "src/code-stubs.h"
11 #include "src/codegen.h" 14 #include "src/codegen.h"
12 #include "src/compiler.h" 15 #include "src/compiler.h"
13 #include "src/debug.h" 16 #include "src/debug.h"
14 #include "src/full-codegen.h" 17 #include "src/full-codegen.h"
18 #include "src/ic/ic.h"
15 #include "src/isolate-inl.h" 19 #include "src/isolate-inl.h"
16 #include "src/parser.h" 20 #include "src/parser.h"
17 #include "src/scopes.h" 21 #include "src/scopes.h"
18 22
19 #include "src/arm/code-stubs-arm.h" 23 #include "src/ppc/code-stubs-ppc.h"
20 #include "src/arm/macro-assembler-arm.h" 24 #include "src/ppc/macro-assembler-ppc.h"
21 25
22 namespace v8 { 26 namespace v8 {
23 namespace internal { 27 namespace internal {
24 28
25 #define __ ACCESS_MASM(masm_) 29 #define __ ACCESS_MASM(masm_)
26 30
27
28 // A patch site is a location in the code which it is possible to patch. This 31 // A patch site is a location in the code which it is possible to patch. This
29 // class has a number of methods to emit the code which is patchable and the 32 // class has a number of methods to emit the code which is patchable and the
30 // method EmitPatchInfo to record a marker back to the patchable code. This 33 // method EmitPatchInfo to record a marker back to the patchable code. This
31 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit 34 // marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit
32 // immediate value is used) is the delta from the pc to the first instruction of 35 // immediate value is used) is the delta from the pc to the first instruction of
33 // the patchable code. 36 // the patchable code.
37 // See PatchInlinedSmiCode in ic-ppc.cc for the code that patches it
34 class JumpPatchSite BASE_EMBEDDED { 38 class JumpPatchSite BASE_EMBEDDED {
35 public: 39 public:
36 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { 40 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
37 #ifdef DEBUG 41 #ifdef DEBUG
38 info_emitted_ = false; 42 info_emitted_ = false;
39 #endif 43 #endif
40 } 44 }
41 45
42 ~JumpPatchSite() { 46 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); }
43 DCHECK(patch_site_.is_bound() == info_emitted_);
44 }
45 47
46 // When initially emitting this ensure that a jump is always generated to skip 48 // When initially emitting this ensure that a jump is always generated to skip
47 // the inlined smi code. 49 // the inlined smi code.
48 void EmitJumpIfNotSmi(Register reg, Label* target) { 50 void EmitJumpIfNotSmi(Register reg, Label* target) {
49 DCHECK(!patch_site_.is_bound() && !info_emitted_); 51 DCHECK(!patch_site_.is_bound() && !info_emitted_);
50 Assembler::BlockConstPoolScope block_const_pool(masm_); 52 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
51 __ bind(&patch_site_); 53 __ bind(&patch_site_);
52 __ cmp(reg, Operand(reg)); 54 __ cmp(reg, reg, cr0);
53 __ b(eq, target); // Always taken before patched. 55 __ beq(target, cr0); // Always taken before patched.
54 } 56 }
55 57
56 // When initially emitting this ensure that a jump is never generated to skip 58 // When initially emitting this ensure that a jump is never generated to skip
57 // the inlined smi code. 59 // the inlined smi code.
58 void EmitJumpIfSmi(Register reg, Label* target) { 60 void EmitJumpIfSmi(Register reg, Label* target) {
61 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
59 DCHECK(!patch_site_.is_bound() && !info_emitted_); 62 DCHECK(!patch_site_.is_bound() && !info_emitted_);
60 Assembler::BlockConstPoolScope block_const_pool(masm_);
61 __ bind(&patch_site_); 63 __ bind(&patch_site_);
62 __ cmp(reg, Operand(reg)); 64 __ cmp(reg, reg, cr0);
63 __ b(ne, target); // Never taken before patched. 65 __ bne(target, cr0); // Never taken before patched.
64 } 66 }
65 67
66 void EmitPatchInfo() { 68 void EmitPatchInfo() {
67 // Block literal pool emission whilst recording patch site information.
68 Assembler::BlockConstPoolScope block_const_pool(masm_);
69 if (patch_site_.is_bound()) { 69 if (patch_site_.is_bound()) {
70 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); 70 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
71 Register reg; 71 Register reg;
72 reg.set_code(delta_to_patch_site / kOff12Mask); 72 // I believe this is using reg as the high bits of of the offset
73 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask); 73 reg.set_code(delta_to_patch_site / kOff16Mask);
74 __ cmpi(reg, Operand(delta_to_patch_site % kOff16Mask));
74 #ifdef DEBUG 75 #ifdef DEBUG
75 info_emitted_ = true; 76 info_emitted_ = true;
76 #endif 77 #endif
77 } else { 78 } else {
78 __ nop(); // Signals no inlined code. 79 __ nop(); // Signals no inlined code.
79 } 80 }
80 } 81 }
81 82
82 private: 83 private:
83 MacroAssembler* masm_; 84 MacroAssembler* masm_;
84 Label patch_site_; 85 Label patch_site_;
85 #ifdef DEBUG 86 #ifdef DEBUG
86 bool info_emitted_; 87 bool info_emitted_;
87 #endif 88 #endif
88 }; 89 };
89 90
90 91
91 // Generate code for a JS function. On entry to the function the receiver 92 // Generate code for a JS function. On entry to the function the receiver
92 // and arguments have been pushed on the stack left to right. The actual 93 // and arguments have been pushed on the stack left to right. The actual
93 // argument count matches the formal parameter count expected by the 94 // argument count matches the formal parameter count expected by the
94 // function. 95 // function.
95 // 96 //
96 // The live registers are: 97 // The live registers are:
97 // o r1: the JS function object being called (i.e., ourselves) 98 // o r4: the JS function object being called (i.e., ourselves)
98 // o cp: our context 99 // o cp: our context
99 // o pp: our caller's constant pool pointer (if FLAG_enable_ool_constant_pool) 100 // o fp: our caller's frame pointer (aka r31)
100 // o fp: our caller's frame pointer
101 // o sp: stack pointer 101 // o sp: stack pointer
102 // o lr: return address 102 // o lr: return address (bogus.. PPC has no lr reg)
103 // 103 //
104 // The function builds a JS frame. Please see JavaScriptFrameConstants in 104 // The function builds a JS frame. Please see JavaScriptFrameConstants in
105 // frames-arm.h for its layout. 105 // frames-ppc.h for its layout.
106 void FullCodeGenerator::Generate() { 106 void FullCodeGenerator::Generate() {
107 CompilationInfo* info = info_; 107 CompilationInfo* info = info_;
108 handler_table_ = 108 handler_table_ =
109 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); 109 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
110 110
111 profiling_counter_ = isolate()->factory()->NewCell( 111 profiling_counter_ = isolate()->factory()->NewCell(
112 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); 112 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
113 SetFunctionPosition(function()); 113 SetFunctionPosition(function());
114 Comment cmnt(masm_, "[ function compiled by full code generator"); 114 Comment cmnt(masm_, "[ function compiled by full code generator");
115 115
116 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 116 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
117 117
118 #ifdef DEBUG 118 #ifdef DEBUG
119 if (strlen(FLAG_stop_at) > 0 && 119 if (strlen(FLAG_stop_at) > 0 &&
120 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 120 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
121 __ stop("stop-at"); 121 __ stop("stop-at");
122 } 122 }
123 #endif 123 #endif
124 124
125 // Sloppy mode functions and builtins need to replace the receiver with the 125 // Sloppy mode functions and builtins need to replace the receiver with the
126 // global proxy when called as functions (without an explicit receiver 126 // global proxy when called as functions (without an explicit receiver
127 // object). 127 // object).
128 if (info->strict_mode() == SLOPPY && !info->is_native()) { 128 if (info->strict_mode() == SLOPPY && !info->is_native()) {
129 Label ok; 129 Label ok;
130 int receiver_offset = info->scope()->num_parameters() * kPointerSize; 130 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
131 __ ldr(r2, MemOperand(sp, receiver_offset)); 131 __ LoadP(r5, MemOperand(sp, receiver_offset), r0);
132 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); 132 __ CompareRoot(r5, Heap::kUndefinedValueRootIndex);
133 __ b(ne, &ok); 133 __ bne(&ok);
134 134
135 __ ldr(r2, GlobalObjectOperand()); 135 __ LoadP(r5, GlobalObjectOperand());
136 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalProxyOffset)); 136 __ LoadP(r5, FieldMemOperand(r5, GlobalObject::kGlobalProxyOffset));
137 137
138 __ str(r2, MemOperand(sp, receiver_offset)); 138 __ StoreP(r5, MemOperand(sp, receiver_offset), r0);
139 139
140 __ bind(&ok); 140 __ bind(&ok);
141 } 141 }
142 142
143 // Open a frame scope to indicate that there is a frame on the stack. The 143 // Open a frame scope to indicate that there is a frame on the stack. The
144 // MANUAL indicates that the scope shouldn't actually generate code to set up 144 // MANUAL indicates that the scope shouldn't actually generate code to set up
145 // the frame (that is done below). 145 // the frame (that is done below).
146 FrameScope frame_scope(masm_, StackFrame::MANUAL); 146 FrameScope frame_scope(masm_, StackFrame::MANUAL);
147 147
148 info->set_prologue_offset(masm_->pc_offset()); 148 info->set_prologue_offset(masm_->pc_offset());
149 __ Prologue(info->IsCodePreAgingActive()); 149 __ Prologue(info->IsCodePreAgingActive());
150 info->AddNoFrameRange(0, masm_->pc_offset()); 150 info->AddNoFrameRange(0, masm_->pc_offset());
151 151
152 { Comment cmnt(masm_, "[ Allocate locals"); 152 {
153 Comment cmnt(masm_, "[ Allocate locals");
153 int locals_count = info->scope()->num_stack_slots(); 154 int locals_count = info->scope()->num_stack_slots();
154 // Generators allocate locals, if any, in context slots. 155 // Generators allocate locals, if any, in context slots.
155 DCHECK(!info->function()->is_generator() || locals_count == 0); 156 DCHECK(!info->function()->is_generator() || locals_count == 0);
156 if (locals_count > 0) { 157 if (locals_count > 0) {
157 if (locals_count >= 128) { 158 if (locals_count >= 128) {
158 Label ok; 159 Label ok;
159 __ sub(r9, sp, Operand(locals_count * kPointerSize)); 160 __ Add(ip, sp, -(locals_count * kPointerSize), r0);
160 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex); 161 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
161 __ cmp(r9, Operand(r2)); 162 __ cmpl(ip, r5);
162 __ b(hs, &ok); 163 __ bc_short(ge, &ok);
163 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); 164 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
164 __ bind(&ok); 165 __ bind(&ok);
165 } 166 }
166 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex); 167 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
167 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32; 168 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
168 if (locals_count >= kMaxPushes) { 169 if (locals_count >= kMaxPushes) {
169 int loop_iterations = locals_count / kMaxPushes; 170 int loop_iterations = locals_count / kMaxPushes;
170 __ mov(r2, Operand(loop_iterations)); 171 __ mov(r5, Operand(loop_iterations));
172 __ mtctr(r5);
171 Label loop_header; 173 Label loop_header;
172 __ bind(&loop_header); 174 __ bind(&loop_header);
173 // Do pushes. 175 // Do pushes.
174 for (int i = 0; i < kMaxPushes; i++) { 176 for (int i = 0; i < kMaxPushes; i++) {
175 __ push(r9); 177 __ push(ip);
176 } 178 }
177 // Continue loop if not done. 179 // Continue loop if not done.
178 __ sub(r2, r2, Operand(1), SetCC); 180 __ bdnz(&loop_header);
179 __ b(&loop_header, ne);
180 } 181 }
181 int remaining = locals_count % kMaxPushes; 182 int remaining = locals_count % kMaxPushes;
182 // Emit the remaining pushes. 183 // Emit the remaining pushes.
183 for (int i = 0; i < remaining; i++) { 184 for (int i = 0; i < remaining; i++) {
184 __ push(r9); 185 __ push(ip);
185 } 186 }
186 } 187 }
187 } 188 }
188 189
189 bool function_in_register = true; 190 bool function_in_register = true;
190 191
191 // Possibly allocate a local context. 192 // Possibly allocate a local context.
192 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 193 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
193 if (heap_slots > 0) { 194 if (heap_slots > 0) {
194 // Argument to NewContext is the function, which is still in r1. 195 // Argument to NewContext is the function, which is still in r4.
195 Comment cmnt(masm_, "[ Allocate context"); 196 Comment cmnt(masm_, "[ Allocate context");
196 bool need_write_barrier = true; 197 bool need_write_barrier = true;
197 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) { 198 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
198 __ push(r1); 199 __ push(r4);
199 __ Push(info->scope()->GetScopeInfo()); 200 __ Push(info->scope()->GetScopeInfo());
200 __ CallRuntime(Runtime::kNewGlobalContext, 2); 201 __ CallRuntime(Runtime::kNewGlobalContext, 2);
201 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { 202 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
202 FastNewContextStub stub(isolate(), heap_slots); 203 FastNewContextStub stub(isolate(), heap_slots);
203 __ CallStub(&stub); 204 __ CallStub(&stub);
204 // Result of FastNewContextStub is always in new space. 205 // Result of FastNewContextStub is always in new space.
205 need_write_barrier = false; 206 need_write_barrier = false;
206 } else { 207 } else {
207 __ push(r1); 208 __ push(r4);
208 __ CallRuntime(Runtime::kNewFunctionContext, 1); 209 __ CallRuntime(Runtime::kNewFunctionContext, 1);
209 } 210 }
210 function_in_register = false; 211 function_in_register = false;
211 // Context is returned in r0. It replaces the context passed to us. 212 // Context is returned in r3. It replaces the context passed to us.
212 // It's saved in the stack and kept live in cp. 213 // It's saved in the stack and kept live in cp.
213 __ mov(cp, r0); 214 __ mr(cp, r3);
214 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset)); 215 __ StoreP(r3, MemOperand(fp, StandardFrameConstants::kContextOffset));
215 // Copy any necessary parameters into the context. 216 // Copy any necessary parameters into the context.
216 int num_parameters = info->scope()->num_parameters(); 217 int num_parameters = info->scope()->num_parameters();
217 for (int i = 0; i < num_parameters; i++) { 218 for (int i = 0; i < num_parameters; i++) {
218 Variable* var = scope()->parameter(i); 219 Variable* var = scope()->parameter(i);
219 if (var->IsContextSlot()) { 220 if (var->IsContextSlot()) {
220 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 221 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
221 (num_parameters - 1 - i) * kPointerSize; 222 (num_parameters - 1 - i) * kPointerSize;
222 // Load parameter from stack. 223 // Load parameter from stack.
223 __ ldr(r0, MemOperand(fp, parameter_offset)); 224 __ LoadP(r3, MemOperand(fp, parameter_offset), r0);
224 // Store it in the context. 225 // Store it in the context.
225 MemOperand target = ContextOperand(cp, var->index()); 226 MemOperand target = ContextOperand(cp, var->index());
226 __ str(r0, target); 227 __ StoreP(r3, target, r0);
227 228
228 // Update the write barrier. 229 // Update the write barrier.
229 if (need_write_barrier) { 230 if (need_write_barrier) {
230 __ RecordWriteContextSlot( 231 __ RecordWriteContextSlot(cp, target.offset(), r3, r6,
231 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs); 232 kLRHasBeenSaved, kDontSaveFPRegs);
232 } else if (FLAG_debug_code) { 233 } else if (FLAG_debug_code) {
233 Label done; 234 Label done;
234 __ JumpIfInNewSpace(cp, r0, &done); 235 __ JumpIfInNewSpace(cp, r3, &done);
235 __ Abort(kExpectedNewSpaceObject); 236 __ Abort(kExpectedNewSpaceObject);
236 __ bind(&done); 237 __ bind(&done);
237 } 238 }
238 } 239 }
239 } 240 }
240 } 241 }
241 242
242 Variable* arguments = scope()->arguments(); 243 Variable* arguments = scope()->arguments();
243 if (arguments != NULL) { 244 if (arguments != NULL) {
244 // Function uses arguments object. 245 // Function uses arguments object.
245 Comment cmnt(masm_, "[ Allocate arguments object"); 246 Comment cmnt(masm_, "[ Allocate arguments object");
246 if (!function_in_register) { 247 if (!function_in_register) {
247 // Load this again, if it's used by the local context below. 248 // Load this again, if it's used by the local context below.
248 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 249 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
249 } else { 250 } else {
250 __ mov(r3, r1); 251 __ mr(r6, r4);
251 } 252 }
252 // Receiver is just before the parameters on the caller's stack. 253 // Receiver is just before the parameters on the caller's stack.
253 int num_parameters = info->scope()->num_parameters(); 254 int num_parameters = info->scope()->num_parameters();
254 int offset = num_parameters * kPointerSize; 255 int offset = num_parameters * kPointerSize;
255 __ add(r2, fp, 256 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
256 Operand(StandardFrameConstants::kCallerSPOffset + offset)); 257 __ LoadSmiLiteral(r4, Smi::FromInt(num_parameters));
257 __ mov(r1, Operand(Smi::FromInt(num_parameters))); 258 __ Push(r6, r5, r4);
258 __ Push(r3, r2, r1);
259 259
260 // Arguments to ArgumentsAccessStub: 260 // Arguments to ArgumentsAccessStub:
261 // function, receiver address, parameter count. 261 // function, receiver address, parameter count.
262 // The stub will rewrite receiever and parameter count if the previous 262 // The stub will rewrite receiever and parameter count if the previous
263 // stack frame was an arguments adapter frame. 263 // stack frame was an arguments adapter frame.
264 ArgumentsAccessStub::Type type; 264 ArgumentsAccessStub::Type type;
265 if (strict_mode() == STRICT) { 265 if (strict_mode() == STRICT) {
266 type = ArgumentsAccessStub::NEW_STRICT; 266 type = ArgumentsAccessStub::NEW_STRICT;
267 } else if (function()->has_duplicate_parameters()) { 267 } else if (function()->has_duplicate_parameters()) {
268 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW; 268 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
269 } else { 269 } else {
270 type = ArgumentsAccessStub::NEW_SLOPPY_FAST; 270 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
271 } 271 }
272 ArgumentsAccessStub stub(isolate(), type); 272 ArgumentsAccessStub stub(isolate(), type);
273 __ CallStub(&stub); 273 __ CallStub(&stub);
274 274
275 SetVar(arguments, r0, r1, r2); 275 SetVar(arguments, r3, r4, r5);
276 } 276 }
277 277
278 if (FLAG_trace) { 278 if (FLAG_trace) {
279 __ CallRuntime(Runtime::kTraceEnter, 0); 279 __ CallRuntime(Runtime::kTraceEnter, 0);
280 } 280 }
281 281
282 // Visit the declarations and body unless there is an illegal 282 // Visit the declarations and body unless there is an illegal
283 // redeclaration. 283 // redeclaration.
284 if (scope()->HasIllegalRedeclaration()) { 284 if (scope()->HasIllegalRedeclaration()) {
285 Comment cmnt(masm_, "[ Declarations"); 285 Comment cmnt(masm_, "[ Declarations");
286 scope()->VisitIllegalRedeclaration(this); 286 scope()->VisitIllegalRedeclaration(this);
287 287
288 } else { 288 } else {
289 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); 289 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
290 { Comment cmnt(masm_, "[ Declarations"); 290 {
291 Comment cmnt(masm_, "[ Declarations");
291 // For named function expressions, declare the function name as a 292 // For named function expressions, declare the function name as a
292 // constant. 293 // constant.
293 if (scope()->is_function_scope() && scope()->function() != NULL) { 294 if (scope()->is_function_scope() && scope()->function() != NULL) {
294 VariableDeclaration* function = scope()->function(); 295 VariableDeclaration* function = scope()->function();
295 DCHECK(function->proxy()->var()->mode() == CONST || 296 DCHECK(function->proxy()->var()->mode() == CONST ||
296 function->proxy()->var()->mode() == CONST_LEGACY); 297 function->proxy()->var()->mode() == CONST_LEGACY);
297 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED); 298 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
298 VisitVariableDeclaration(function); 299 VisitVariableDeclaration(function);
299 } 300 }
300 VisitDeclarations(scope()->declarations()); 301 VisitDeclarations(scope()->declarations());
301 } 302 }
302 303
303 { Comment cmnt(masm_, "[ Stack check"); 304 {
305 Comment cmnt(masm_, "[ Stack check");
304 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); 306 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
305 Label ok; 307 Label ok;
306 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 308 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
307 __ cmp(sp, Operand(ip)); 309 __ cmpl(sp, ip);
308 __ b(hs, &ok); 310 __ bc_short(ge, &ok);
309 Handle<Code> stack_check = isolate()->builtins()->StackCheck(); 311 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
310 PredictableCodeSizeScope predictable(masm_,
311 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
312 __ Call(stack_check, RelocInfo::CODE_TARGET);
313 __ bind(&ok); 312 __ bind(&ok);
314 } 313 }
315 314
316 { Comment cmnt(masm_, "[ Body"); 315 {
316 Comment cmnt(masm_, "[ Body");
317 DCHECK(loop_depth() == 0); 317 DCHECK(loop_depth() == 0);
318 VisitStatements(function()->body()); 318 VisitStatements(function()->body());
319 DCHECK(loop_depth() == 0); 319 DCHECK(loop_depth() == 0);
320 } 320 }
321 } 321 }
322 322
323 // Always emit a 'return undefined' in case control fell off the end of 323 // Always emit a 'return undefined' in case control fell off the end of
324 // the body. 324 // the body.
325 { Comment cmnt(masm_, "[ return <undefined>;"); 325 {
326 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 326 Comment cmnt(masm_, "[ return <undefined>;");
327 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
327 } 328 }
328 EmitReturnSequence(); 329 EmitReturnSequence();
329
330 // Force emit the constant pool, so it doesn't get emitted in the middle
331 // of the back edge table.
332 masm()->CheckConstPool(true, false);
333 } 330 }
334 331
335 332
336 void FullCodeGenerator::ClearAccumulator() { 333 void FullCodeGenerator::ClearAccumulator() {
337 __ mov(r0, Operand(Smi::FromInt(0))); 334 __ LoadSmiLiteral(r3, Smi::FromInt(0));
338 } 335 }
339 336
340 337
341 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { 338 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
342 __ mov(r2, Operand(profiling_counter_)); 339 __ mov(r5, Operand(profiling_counter_));
343 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); 340 __ LoadP(r6, FieldMemOperand(r5, Cell::kValueOffset));
344 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC); 341 __ SubSmiLiteral(r6, r6, Smi::FromInt(delta), r0);
345 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); 342 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
346 } 343 }
347 344
348 345
349 #ifdef CAN_USE_ARMV7_INSTRUCTIONS
350 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
351 #else
352 static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
353 #endif
354
355
356 void FullCodeGenerator::EmitProfilingCounterReset() { 346 void FullCodeGenerator::EmitProfilingCounterReset() {
357 Assembler::BlockConstPoolScope block_const_pool(masm_);
358 PredictableCodeSizeScope predictable_code_size_scope(
359 masm_, kProfileCounterResetSequenceLength);
360 Label start;
361 __ bind(&start);
362 int reset_value = FLAG_interrupt_budget; 347 int reset_value = FLAG_interrupt_budget;
363 if (info_->is_debug()) { 348 if (info_->is_debug()) {
364 // Detect debug break requests as soon as possible. 349 // Detect debug break requests as soon as possible.
365 reset_value = FLAG_interrupt_budget >> 4; 350 reset_value = FLAG_interrupt_budget >> 4;
366 } 351 }
367 __ mov(r2, Operand(profiling_counter_)); 352 __ mov(r5, Operand(profiling_counter_));
368 // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5 353 __ LoadSmiLiteral(r6, Smi::FromInt(reset_value));
369 // instructions (for ARMv6) depending upon whether it is an extended constant 354 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
370 // pool - insert nop to compensate.
371 int expected_instr_count =
372 (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
373 DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
374 while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
375 __ nop();
376 }
377 __ mov(r3, Operand(Smi::FromInt(reset_value)));
378 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
379 } 355 }
380 356
381 357
382 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, 358 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
383 Label* back_edge_target) { 359 Label* back_edge_target) {
384 Comment cmnt(masm_, "[ Back edge bookkeeping"); 360 Comment cmnt(masm_, "[ Back edge bookkeeping");
385 // Block literal pools whilst emitting back edge code.
386 Assembler::BlockConstPoolScope block_const_pool(masm_);
387 Label ok; 361 Label ok;
388 362
389 DCHECK(back_edge_target->is_bound()); 363 DCHECK(back_edge_target->is_bound());
390 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); 364 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
391 int weight = Min(kMaxBackEdgeWeight, 365 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
392 Max(1, distance / kCodeSizeMultiplier));
393 EmitProfilingCounterDecrement(weight); 366 EmitProfilingCounterDecrement(weight);
394 __ b(pl, &ok); 367 {
395 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); 368 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
369 // BackEdgeTable::PatchAt manipulates this sequence.
370 __ cmpi(r6, Operand::Zero());
371 __ bc_short(ge, &ok);
372 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
396 373
397 // Record a mapping of this PC offset to the OSR id. This is used to find 374 // Record a mapping of this PC offset to the OSR id. This is used to find
398 // the AST id from the unoptimized code in order to use it as a key into 375 // the AST id from the unoptimized code in order to use it as a key into
399 // the deoptimization input data found in the optimized code. 376 // the deoptimization input data found in the optimized code.
400 RecordBackEdge(stmt->OsrEntryId()); 377 RecordBackEdge(stmt->OsrEntryId());
401 378 }
402 EmitProfilingCounterReset(); 379 EmitProfilingCounterReset();
403 380
404 __ bind(&ok); 381 __ bind(&ok);
405 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 382 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
406 // Record a mapping of the OSR id to this PC. This is used if the OSR 383 // Record a mapping of the OSR id to this PC. This is used if the OSR
407 // entry becomes the target of a bailout. We don't expect it to be, but 384 // entry becomes the target of a bailout. We don't expect it to be, but
408 // we want it to work if it is. 385 // we want it to work if it is.
409 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); 386 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
410 } 387 }
411 388
412 389
413 void FullCodeGenerator::EmitReturnSequence() { 390 void FullCodeGenerator::EmitReturnSequence() {
414 Comment cmnt(masm_, "[ Return sequence"); 391 Comment cmnt(masm_, "[ Return sequence");
415 if (return_label_.is_bound()) { 392 if (return_label_.is_bound()) {
416 __ b(&return_label_); 393 __ b(&return_label_);
417 } else { 394 } else {
418 __ bind(&return_label_); 395 __ bind(&return_label_);
419 if (FLAG_trace) { 396 if (FLAG_trace) {
420 // Push the return value on the stack as the parameter. 397 // Push the return value on the stack as the parameter.
421 // Runtime::TraceExit returns its parameter in r0. 398 // Runtime::TraceExit returns its parameter in r3
422 __ push(r0); 399 __ push(r3);
423 __ CallRuntime(Runtime::kTraceExit, 1); 400 __ CallRuntime(Runtime::kTraceExit, 1);
424 } 401 }
425 // Pretend that the exit is a backwards jump to the entry. 402 // Pretend that the exit is a backwards jump to the entry.
426 int weight = 1; 403 int weight = 1;
427 if (info_->ShouldSelfOptimize()) { 404 if (info_->ShouldSelfOptimize()) {
428 weight = FLAG_interrupt_budget / FLAG_self_opt_count; 405 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
429 } else { 406 } else {
430 int distance = masm_->pc_offset(); 407 int distance = masm_->pc_offset();
431 weight = Min(kMaxBackEdgeWeight, 408 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
432 Max(1, distance / kCodeSizeMultiplier));
433 } 409 }
434 EmitProfilingCounterDecrement(weight); 410 EmitProfilingCounterDecrement(weight);
435 Label ok; 411 Label ok;
436 __ b(pl, &ok); 412 __ cmpi(r6, Operand::Zero());
437 __ push(r0); 413 __ bge(&ok);
438 __ Call(isolate()->builtins()->InterruptCheck(), 414 __ push(r3);
439 RelocInfo::CODE_TARGET); 415 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
440 __ pop(r0); 416 __ pop(r3);
441 EmitProfilingCounterReset(); 417 EmitProfilingCounterReset();
442 __ bind(&ok); 418 __ bind(&ok);
443 419
444 #ifdef DEBUG 420 #ifdef DEBUG
445 // Add a label for checking the size of the code used for returning. 421 // Add a label for checking the size of the code used for returning.
446 Label check_exit_codesize; 422 Label check_exit_codesize;
447 __ bind(&check_exit_codesize); 423 __ bind(&check_exit_codesize);
448 #endif 424 #endif
449 // Make sure that the constant pool is not emitted inside of the return 425 // Make sure that the constant pool is not emitted inside of the return
450 // sequence. 426 // sequence.
451 { Assembler::BlockConstPoolScope block_const_pool(masm_); 427 {
428 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
429 #if V8_OOL_CONSTANT_POOL
430 ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
431 #endif
452 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize; 432 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
453 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); 433 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
454 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
455 PredictableCodeSizeScope predictable(masm_, -1);
456 __ RecordJSReturn(); 434 __ RecordJSReturn();
457 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT); 435 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT);
458 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_); 436 __ Add(sp, sp, sp_delta, r0);
459 __ add(sp, sp, Operand(sp_delta)); 437 __ blr();
460 __ Jump(lr); 438 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
461 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); 439 #if V8_TARGET_ARCH_PPC64 && !V8_OOL_CONSTANT_POOL
462 } 440 // With 64bit we need a nop() instructions to ensure we have
441 // enough space to SetDebugBreakAtReturn()
442 masm_->nop();
443 #endif
463 } 444 }
464 445
465 #ifdef DEBUG 446 #ifdef DEBUG
466 // Check that the size of the code used for returning is large enough 447 // Check that the size of the code used for returning is large enough
467 // for the debugger's requirements. 448 // for the debugger's requirements.
468 DCHECK(Assembler::kJSReturnSequenceInstructions <= 449 DCHECK(Assembler::kJSReturnSequenceInstructions <=
469 masm_->InstructionsGeneratedSince(&check_exit_codesize)); 450 masm_->InstructionsGeneratedSince(&check_exit_codesize));
470 #endif 451 #endif
471 } 452 }
472 } 453 }
(...skipping 19 matching lines...) Expand all
492 473
493 void FullCodeGenerator::TestContext::Plug(Variable* var) const { 474 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
494 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 475 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
495 // For simplicity we always test the accumulator register. 476 // For simplicity we always test the accumulator register.
496 codegen()->GetVar(result_register(), var); 477 codegen()->GetVar(result_register(), var);
497 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 478 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
498 codegen()->DoTest(this); 479 codegen()->DoTest(this);
499 } 480 }
500 481
501 482
502 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 483 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {}
503 }
504 484
505 485
506 void FullCodeGenerator::AccumulatorValueContext::Plug( 486 void FullCodeGenerator::AccumulatorValueContext::Plug(
507 Heap::RootListIndex index) const { 487 Heap::RootListIndex index) const {
508 __ LoadRoot(result_register(), index); 488 __ LoadRoot(result_register(), index);
509 } 489 }
510 490
511 491
512 void FullCodeGenerator::StackValueContext::Plug( 492 void FullCodeGenerator::StackValueContext::Plug(
513 Heap::RootListIndex index) const { 493 Heap::RootListIndex index) const {
514 __ LoadRoot(result_register(), index); 494 __ LoadRoot(result_register(), index);
515 __ push(result_register()); 495 __ push(result_register());
516 } 496 }
517 497
518 498
519 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { 499 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
520 codegen()->PrepareForBailoutBeforeSplit(condition(), 500 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
521 true,
522 true_label_,
523 false_label_); 501 false_label_);
524 if (index == Heap::kUndefinedValueRootIndex || 502 if (index == Heap::kUndefinedValueRootIndex ||
525 index == Heap::kNullValueRootIndex || 503 index == Heap::kNullValueRootIndex ||
526 index == Heap::kFalseValueRootIndex) { 504 index == Heap::kFalseValueRootIndex) {
527 if (false_label_ != fall_through_) __ b(false_label_); 505 if (false_label_ != fall_through_) __ b(false_label_);
528 } else if (index == Heap::kTrueValueRootIndex) { 506 } else if (index == Heap::kTrueValueRootIndex) {
529 if (true_label_ != fall_through_) __ b(true_label_); 507 if (true_label_ != fall_through_) __ b(true_label_);
530 } else { 508 } else {
531 __ LoadRoot(result_register(), index); 509 __ LoadRoot(result_register(), index);
532 codegen()->DoTest(this); 510 codegen()->DoTest(this);
533 } 511 }
534 } 512 }
535 513
536 514
537 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const { 515 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {}
538 }
539 516
540 517
541 void FullCodeGenerator::AccumulatorValueContext::Plug( 518 void FullCodeGenerator::AccumulatorValueContext::Plug(
542 Handle<Object> lit) const { 519 Handle<Object> lit) const {
543 __ mov(result_register(), Operand(lit)); 520 __ mov(result_register(), Operand(lit));
544 } 521 }
545 522
546 523
547 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { 524 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
548 // Immediates cannot be pushed directly. 525 // Immediates cannot be pushed directly.
549 __ mov(result_register(), Operand(lit)); 526 __ mov(result_register(), Operand(lit));
550 __ push(result_register()); 527 __ push(result_register());
551 } 528 }
552 529
553 530
554 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { 531 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
555 codegen()->PrepareForBailoutBeforeSplit(condition(), 532 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
556 true,
557 true_label_,
558 false_label_); 533 false_label_);
559 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals. 534 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
560 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { 535 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
561 if (false_label_ != fall_through_) __ b(false_label_); 536 if (false_label_ != fall_through_) __ b(false_label_);
562 } else if (lit->IsTrue() || lit->IsJSObject()) { 537 } else if (lit->IsTrue() || lit->IsJSObject()) {
563 if (true_label_ != fall_through_) __ b(true_label_); 538 if (true_label_ != fall_through_) __ b(true_label_);
564 } else if (lit->IsString()) { 539 } else if (lit->IsString()) {
565 if (String::cast(*lit)->length() == 0) { 540 if (String::cast(*lit)->length() == 0) {
566 if (false_label_ != fall_through_) __ b(false_label_); 541 if (false_label_ != fall_through_) __ b(false_label_);
567 } else { 542 } else {
(...skipping 14 matching lines...) Expand all
582 557
583 558
584 void FullCodeGenerator::EffectContext::DropAndPlug(int count, 559 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
585 Register reg) const { 560 Register reg) const {
586 DCHECK(count > 0); 561 DCHECK(count > 0);
587 __ Drop(count); 562 __ Drop(count);
588 } 563 }
589 564
590 565
591 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( 566 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
592 int count, 567 int count, Register reg) const {
593 Register reg) const {
594 DCHECK(count > 0); 568 DCHECK(count > 0);
595 __ Drop(count); 569 __ Drop(count);
596 __ Move(result_register(), reg); 570 __ Move(result_register(), reg);
597 } 571 }
598 572
599 573
600 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, 574 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
601 Register reg) const { 575 Register reg) const {
602 DCHECK(count > 0); 576 DCHECK(count > 0);
603 if (count > 1) __ Drop(count - 1); 577 if (count > 1) __ Drop(count - 1);
604 __ str(reg, MemOperand(sp, 0)); 578 __ StoreP(reg, MemOperand(sp, 0));
605 } 579 }
606 580
607 581
608 void FullCodeGenerator::TestContext::DropAndPlug(int count, 582 void FullCodeGenerator::TestContext::DropAndPlug(int count,
609 Register reg) const { 583 Register reg) const {
610 DCHECK(count > 0); 584 DCHECK(count > 0);
611 // For simplicity we always test the accumulator register. 585 // For simplicity we always test the accumulator register.
612 __ Drop(count); 586 __ Drop(count);
613 __ Move(result_register(), reg); 587 __ Move(result_register(), reg);
614 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 588 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
615 codegen()->DoTest(this); 589 codegen()->DoTest(this);
616 } 590 }
617 591
618 592
619 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 593 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
620 Label* materialize_false) const { 594 Label* materialize_false) const {
621 DCHECK(materialize_true == materialize_false); 595 DCHECK(materialize_true == materialize_false);
622 __ bind(materialize_true); 596 __ bind(materialize_true);
623 } 597 }
624 598
625 599
626 void FullCodeGenerator::AccumulatorValueContext::Plug( 600 void FullCodeGenerator::AccumulatorValueContext::Plug(
627 Label* materialize_true, 601 Label* materialize_true, Label* materialize_false) const {
628 Label* materialize_false) const {
629 Label done; 602 Label done;
630 __ bind(materialize_true); 603 __ bind(materialize_true);
631 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); 604 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
632 __ jmp(&done); 605 __ b(&done);
633 __ bind(materialize_false); 606 __ bind(materialize_false);
634 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); 607 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
635 __ bind(&done); 608 __ bind(&done);
636 } 609 }
637 610
638 611
639 void FullCodeGenerator::StackValueContext::Plug( 612 void FullCodeGenerator::StackValueContext::Plug(
640 Label* materialize_true, 613 Label* materialize_true, Label* materialize_false) const {
641 Label* materialize_false) const {
642 Label done; 614 Label done;
643 __ bind(materialize_true); 615 __ bind(materialize_true);
644 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 616 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
645 __ jmp(&done); 617 __ b(&done);
646 __ bind(materialize_false); 618 __ bind(materialize_false);
647 __ LoadRoot(ip, Heap::kFalseValueRootIndex); 619 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
648 __ bind(&done); 620 __ bind(&done);
649 __ push(ip); 621 __ push(ip);
650 } 622 }
651 623
652 624
653 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 625 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
654 Label* materialize_false) const { 626 Label* materialize_false) const {
655 DCHECK(materialize_true == true_label_); 627 DCHECK(materialize_true == true_label_);
656 DCHECK(materialize_false == false_label_); 628 DCHECK(materialize_false == false_label_);
657 } 629 }
658 630
659 631
660 void FullCodeGenerator::EffectContext::Plug(bool flag) const { 632 void FullCodeGenerator::EffectContext::Plug(bool flag) const {}
661 }
662 633
663 634
664 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { 635 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
665 Heap::RootListIndex value_root_index = 636 Heap::RootListIndex value_root_index =
666 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 637 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
667 __ LoadRoot(result_register(), value_root_index); 638 __ LoadRoot(result_register(), value_root_index);
668 } 639 }
669 640
670 641
671 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { 642 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
672 Heap::RootListIndex value_root_index = 643 Heap::RootListIndex value_root_index =
673 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 644 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
674 __ LoadRoot(ip, value_root_index); 645 __ LoadRoot(ip, value_root_index);
675 __ push(ip); 646 __ push(ip);
676 } 647 }
677 648
678 649
679 void FullCodeGenerator::TestContext::Plug(bool flag) const { 650 void FullCodeGenerator::TestContext::Plug(bool flag) const {
680 codegen()->PrepareForBailoutBeforeSplit(condition(), 651 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
681 true,
682 true_label_,
683 false_label_); 652 false_label_);
684 if (flag) { 653 if (flag) {
685 if (true_label_ != fall_through_) __ b(true_label_); 654 if (true_label_ != fall_through_) __ b(true_label_);
686 } else { 655 } else {
687 if (false_label_ != fall_through_) __ b(false_label_); 656 if (false_label_ != fall_through_) __ b(false_label_);
688 } 657 }
689 } 658 }
690 659
691 660
692 void FullCodeGenerator::DoTest(Expression* condition, 661 void FullCodeGenerator::DoTest(Expression* condition, Label* if_true,
693 Label* if_true, 662 Label* if_false, Label* fall_through) {
694 Label* if_false,
695 Label* fall_through) {
696 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate()); 663 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
697 CallIC(ic, condition->test_id()); 664 CallIC(ic, condition->test_id());
698 __ tst(result_register(), result_register()); 665 __ cmpi(result_register(), Operand::Zero());
699 Split(ne, if_true, if_false, fall_through); 666 Split(ne, if_true, if_false, fall_through);
700 } 667 }
701 668
702 669
703 void FullCodeGenerator::Split(Condition cond, 670 void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false,
704 Label* if_true, 671 Label* fall_through, CRegister cr) {
705 Label* if_false,
706 Label* fall_through) {
707 if (if_false == fall_through) { 672 if (if_false == fall_through) {
708 __ b(cond, if_true); 673 __ b(cond, if_true, cr);
709 } else if (if_true == fall_through) { 674 } else if (if_true == fall_through) {
710 __ b(NegateCondition(cond), if_false); 675 __ b(NegateCondition(cond), if_false, cr);
711 } else { 676 } else {
712 __ b(cond, if_true); 677 __ b(cond, if_true, cr);
713 __ b(if_false); 678 __ b(if_false);
714 } 679 }
715 } 680 }
716 681
717 682
718 MemOperand FullCodeGenerator::StackOperand(Variable* var) { 683 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
719 DCHECK(var->IsStackAllocated()); 684 DCHECK(var->IsStackAllocated());
720 // Offset is negative because higher indexes are at lower addresses. 685 // Offset is negative because higher indexes are at lower addresses.
721 int offset = -var->index() * kPointerSize; 686 int offset = -var->index() * kPointerSize;
722 // Adjust by a (parameter or local) base offset. 687 // Adjust by a (parameter or local) base offset.
(...skipping 14 matching lines...) Expand all
737 return ContextOperand(scratch, var->index()); 702 return ContextOperand(scratch, var->index());
738 } else { 703 } else {
739 return StackOperand(var); 704 return StackOperand(var);
740 } 705 }
741 } 706 }
742 707
743 708
744 void FullCodeGenerator::GetVar(Register dest, Variable* var) { 709 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
745 // Use destination as scratch. 710 // Use destination as scratch.
746 MemOperand location = VarOperand(var, dest); 711 MemOperand location = VarOperand(var, dest);
747 __ ldr(dest, location); 712 __ LoadP(dest, location, r0);
748 } 713 }
749 714
750 715
751 void FullCodeGenerator::SetVar(Variable* var, 716 void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0,
752 Register src,
753 Register scratch0,
754 Register scratch1) { 717 Register scratch1) {
755 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 718 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
756 DCHECK(!scratch0.is(src)); 719 DCHECK(!scratch0.is(src));
757 DCHECK(!scratch0.is(scratch1)); 720 DCHECK(!scratch0.is(scratch1));
758 DCHECK(!scratch1.is(src)); 721 DCHECK(!scratch1.is(src));
759 MemOperand location = VarOperand(var, scratch0); 722 MemOperand location = VarOperand(var, scratch0);
760 __ str(src, location); 723 __ StoreP(src, location, r0);
761 724
762 // Emit the write barrier code if the location is in the heap. 725 // Emit the write barrier code if the location is in the heap.
763 if (var->IsContextSlot()) { 726 if (var->IsContextSlot()) {
764 __ RecordWriteContextSlot(scratch0, 727 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1,
765 location.offset(), 728 kLRHasBeenSaved, kDontSaveFPRegs);
766 src,
767 scratch1,
768 kLRHasBeenSaved,
769 kDontSaveFPRegs);
770 } 729 }
771 } 730 }
772 731
773 732
774 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, 733 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
775 bool should_normalize, 734 bool should_normalize,
776 Label* if_true, 735 Label* if_true,
777 Label* if_false) { 736 Label* if_false) {
778 // Only prepare for bailouts before splits if we're in a test 737 // Only prepare for bailouts before splits if we're in a test
779 // context. Otherwise, we let the Visit function deal with the 738 // context. Otherwise, we let the Visit function deal with the
780 // preparation to avoid preparing with the same AST id twice. 739 // preparation to avoid preparing with the same AST id twice.
781 if (!context()->IsTest() || !info_->IsOptimizable()) return; 740 if (!context()->IsTest() || !info_->IsOptimizable()) return;
782 741
783 Label skip; 742 Label skip;
784 if (should_normalize) __ b(&skip); 743 if (should_normalize) __ b(&skip);
785 PrepareForBailout(expr, TOS_REG); 744 PrepareForBailout(expr, TOS_REG);
786 if (should_normalize) { 745 if (should_normalize) {
787 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 746 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
788 __ cmp(r0, ip); 747 __ cmp(r3, ip);
789 Split(eq, if_true, if_false, NULL); 748 Split(eq, if_true, if_false, NULL);
790 __ bind(&skip); 749 __ bind(&skip);
791 } 750 }
792 } 751 }
793 752
794 753
795 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 754 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
796 // The variable in the declaration always resides in the current function 755 // The variable in the declaration always resides in the current function
797 // context. 756 // context.
798 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope())); 757 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
799 if (generate_debug_code_) { 758 if (generate_debug_code_) {
800 // Check that we're not inside a with or catch context. 759 // Check that we're not inside a with or catch context.
801 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset)); 760 __ LoadP(r4, FieldMemOperand(cp, HeapObject::kMapOffset));
802 __ CompareRoot(r1, Heap::kWithContextMapRootIndex); 761 __ CompareRoot(r4, Heap::kWithContextMapRootIndex);
803 __ Check(ne, kDeclarationInWithContext); 762 __ Check(ne, kDeclarationInWithContext);
804 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); 763 __ CompareRoot(r4, Heap::kCatchContextMapRootIndex);
805 __ Check(ne, kDeclarationInCatchContext); 764 __ Check(ne, kDeclarationInCatchContext);
806 } 765 }
807 } 766 }
808 767
809 768
810 void FullCodeGenerator::VisitVariableDeclaration( 769 void FullCodeGenerator::VisitVariableDeclaration(
811 VariableDeclaration* declaration) { 770 VariableDeclaration* declaration) {
812 // If it was not possible to allocate the variable at compile time, we 771 // If it was not possible to allocate the variable at compile time, we
813 // need to "declare" it at runtime to make sure it actually exists in the 772 // need to "declare" it at runtime to make sure it actually exists in the
814 // local context. 773 // local context.
815 VariableProxy* proxy = declaration->proxy(); 774 VariableProxy* proxy = declaration->proxy();
816 VariableMode mode = declaration->mode(); 775 VariableMode mode = declaration->mode();
817 Variable* variable = proxy->var(); 776 Variable* variable = proxy->var();
818 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY; 777 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
819 switch (variable->location()) { 778 switch (variable->location()) {
820 case Variable::UNALLOCATED: 779 case Variable::UNALLOCATED:
821 globals_->Add(variable->name(), zone()); 780 globals_->Add(variable->name(), zone());
822 globals_->Add(variable->binding_needs_init() 781 globals_->Add(variable->binding_needs_init()
823 ? isolate()->factory()->the_hole_value() 782 ? isolate()->factory()->the_hole_value()
824 : isolate()->factory()->undefined_value(), 783 : isolate()->factory()->undefined_value(),
825 zone()); 784 zone());
826 break; 785 break;
827 786
828 case Variable::PARAMETER: 787 case Variable::PARAMETER:
829 case Variable::LOCAL: 788 case Variable::LOCAL:
830 if (hole_init) { 789 if (hole_init) {
831 Comment cmnt(masm_, "[ VariableDeclaration"); 790 Comment cmnt(masm_, "[ VariableDeclaration");
832 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 791 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
833 __ str(ip, StackOperand(variable)); 792 __ StoreP(ip, StackOperand(variable));
834 } 793 }
835 break; 794 break;
836 795
837 case Variable::CONTEXT: 796 case Variable::CONTEXT:
838 if (hole_init) { 797 if (hole_init) {
839 Comment cmnt(masm_, "[ VariableDeclaration"); 798 Comment cmnt(masm_, "[ VariableDeclaration");
840 EmitDebugCheckDeclarationContext(variable); 799 EmitDebugCheckDeclarationContext(variable);
841 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 800 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
842 __ str(ip, ContextOperand(cp, variable->index())); 801 __ StoreP(ip, ContextOperand(cp, variable->index()), r0);
843 // No write barrier since the_hole_value is in old space. 802 // No write barrier since the_hole_value is in old space.
844 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 803 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
845 } 804 }
846 break; 805 break;
847 806
848 case Variable::LOOKUP: { 807 case Variable::LOOKUP: {
849 Comment cmnt(masm_, "[ VariableDeclaration"); 808 Comment cmnt(masm_, "[ VariableDeclaration");
850 __ mov(r2, Operand(variable->name())); 809 __ mov(r5, Operand(variable->name()));
851 // Declaration nodes are always introduced in one of four modes. 810 // Declaration nodes are always introduced in one of four modes.
852 DCHECK(IsDeclaredVariableMode(mode)); 811 DCHECK(IsDeclaredVariableMode(mode));
853 PropertyAttributes attr = 812 PropertyAttributes attr =
854 IsImmutableVariableMode(mode) ? READ_ONLY : NONE; 813 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
855 __ mov(r1, Operand(Smi::FromInt(attr))); 814 __ LoadSmiLiteral(r4, Smi::FromInt(attr));
856 // Push initial value, if any. 815 // Push initial value, if any.
857 // Note: For variables we must not push an initial value (such as 816 // Note: For variables we must not push an initial value (such as
858 // 'undefined') because we may have a (legal) redeclaration and we 817 // 'undefined') because we may have a (legal) redeclaration and we
859 // must not destroy the current value. 818 // must not destroy the current value.
860 if (hole_init) { 819 if (hole_init) {
861 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); 820 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
862 __ Push(cp, r2, r1, r0); 821 __ Push(cp, r5, r4, r3);
863 } else { 822 } else {
864 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value. 823 __ LoadSmiLiteral(r3, Smi::FromInt(0)); // Indicates no initial value.
865 __ Push(cp, r2, r1, r0); 824 __ Push(cp, r5, r4, r3);
866 } 825 }
867 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); 826 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
868 break; 827 break;
869 } 828 }
870 } 829 }
871 } 830 }
872 831
873 832
874 void FullCodeGenerator::VisitFunctionDeclaration( 833 void FullCodeGenerator::VisitFunctionDeclaration(
875 FunctionDeclaration* declaration) { 834 FunctionDeclaration* declaration) {
876 VariableProxy* proxy = declaration->proxy(); 835 VariableProxy* proxy = declaration->proxy();
877 Variable* variable = proxy->var(); 836 Variable* variable = proxy->var();
878 switch (variable->location()) { 837 switch (variable->location()) {
879 case Variable::UNALLOCATED: { 838 case Variable::UNALLOCATED: {
880 globals_->Add(variable->name(), zone()); 839 globals_->Add(variable->name(), zone());
881 Handle<SharedFunctionInfo> function = 840 Handle<SharedFunctionInfo> function =
882 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_); 841 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
883 // Check for stack-overflow exception. 842 // Check for stack-overflow exception.
884 if (function.is_null()) return SetStackOverflow(); 843 if (function.is_null()) return SetStackOverflow();
885 globals_->Add(function, zone()); 844 globals_->Add(function, zone());
886 break; 845 break;
887 } 846 }
888 847
889 case Variable::PARAMETER: 848 case Variable::PARAMETER:
890 case Variable::LOCAL: { 849 case Variable::LOCAL: {
891 Comment cmnt(masm_, "[ FunctionDeclaration"); 850 Comment cmnt(masm_, "[ FunctionDeclaration");
892 VisitForAccumulatorValue(declaration->fun()); 851 VisitForAccumulatorValue(declaration->fun());
893 __ str(result_register(), StackOperand(variable)); 852 __ StoreP(result_register(), StackOperand(variable));
894 break; 853 break;
895 } 854 }
896 855
897 case Variable::CONTEXT: { 856 case Variable::CONTEXT: {
898 Comment cmnt(masm_, "[ FunctionDeclaration"); 857 Comment cmnt(masm_, "[ FunctionDeclaration");
899 EmitDebugCheckDeclarationContext(variable); 858 EmitDebugCheckDeclarationContext(variable);
900 VisitForAccumulatorValue(declaration->fun()); 859 VisitForAccumulatorValue(declaration->fun());
901 __ str(result_register(), ContextOperand(cp, variable->index())); 860 __ StoreP(result_register(), ContextOperand(cp, variable->index()), r0);
902 int offset = Context::SlotOffset(variable->index()); 861 int offset = Context::SlotOffset(variable->index());
903 // We know that we have written a function, which is not a smi. 862 // We know that we have written a function, which is not a smi.
904 __ RecordWriteContextSlot(cp, 863 __ RecordWriteContextSlot(cp, offset, result_register(), r5,
905 offset, 864 kLRHasBeenSaved, kDontSaveFPRegs,
906 result_register(), 865 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
907 r2,
908 kLRHasBeenSaved,
909 kDontSaveFPRegs,
910 EMIT_REMEMBERED_SET,
911 OMIT_SMI_CHECK);
912 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 866 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
913 break; 867 break;
914 } 868 }
915 869
916 case Variable::LOOKUP: { 870 case Variable::LOOKUP: {
917 Comment cmnt(masm_, "[ FunctionDeclaration"); 871 Comment cmnt(masm_, "[ FunctionDeclaration");
918 __ mov(r2, Operand(variable->name())); 872 __ mov(r5, Operand(variable->name()));
919 __ mov(r1, Operand(Smi::FromInt(NONE))); 873 __ LoadSmiLiteral(r4, Smi::FromInt(NONE));
920 __ Push(cp, r2, r1); 874 __ Push(cp, r5, r4);
921 // Push initial value for function declaration. 875 // Push initial value for function declaration.
922 VisitForStackValue(declaration->fun()); 876 VisitForStackValue(declaration->fun());
923 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); 877 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
924 break; 878 break;
925 } 879 }
926 } 880 }
927 } 881 }
928 882
929 883
930 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { 884 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
931 Variable* variable = declaration->proxy()->var(); 885 Variable* variable = declaration->proxy()->var();
932 DCHECK(variable->location() == Variable::CONTEXT); 886 DCHECK(variable->location() == Variable::CONTEXT);
933 DCHECK(variable->interface()->IsFrozen()); 887 DCHECK(variable->interface()->IsFrozen());
934 888
935 Comment cmnt(masm_, "[ ModuleDeclaration"); 889 Comment cmnt(masm_, "[ ModuleDeclaration");
936 EmitDebugCheckDeclarationContext(variable); 890 EmitDebugCheckDeclarationContext(variable);
937 891
938 // Load instance object. 892 // Load instance object.
939 __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope())); 893 __ LoadContext(r4, scope_->ContextChainLength(scope_->GlobalScope()));
940 __ ldr(r1, ContextOperand(r1, variable->interface()->Index())); 894 __ LoadP(r4, ContextOperand(r4, variable->interface()->Index()));
941 __ ldr(r1, ContextOperand(r1, Context::EXTENSION_INDEX)); 895 __ LoadP(r4, ContextOperand(r4, Context::EXTENSION_INDEX));
942 896
943 // Assign it. 897 // Assign it.
944 __ str(r1, ContextOperand(cp, variable->index())); 898 __ StoreP(r4, ContextOperand(cp, variable->index()), r0);
945 // We know that we have written a module, which is not a smi. 899 // We know that we have written a module, which is not a smi.
946 __ RecordWriteContextSlot(cp, 900 __ RecordWriteContextSlot(cp, Context::SlotOffset(variable->index()), r4, r6,
947 Context::SlotOffset(variable->index()), 901 kLRHasBeenSaved, kDontSaveFPRegs,
948 r1, 902 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
949 r3,
950 kLRHasBeenSaved,
951 kDontSaveFPRegs,
952 EMIT_REMEMBERED_SET,
953 OMIT_SMI_CHECK);
954 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS); 903 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
955 904
956 // Traverse into body. 905 // Traverse into body.
957 Visit(declaration->module()); 906 Visit(declaration->module());
958 } 907 }
959 908
960 909
961 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) { 910 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
962 VariableProxy* proxy = declaration->proxy(); 911 VariableProxy* proxy = declaration->proxy();
963 Variable* variable = proxy->var(); 912 Variable* variable = proxy->var();
(...skipping 18 matching lines...) Expand all
982 931
983 932
984 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) { 933 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
985 // TODO(rossberg) 934 // TODO(rossberg)
986 } 935 }
987 936
988 937
989 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 938 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
990 // Call the runtime to declare the globals. 939 // Call the runtime to declare the globals.
991 // The context is the first argument. 940 // The context is the first argument.
992 __ mov(r1, Operand(pairs)); 941 __ mov(r4, Operand(pairs));
993 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags()))); 942 __ LoadSmiLiteral(r3, Smi::FromInt(DeclareGlobalsFlags()));
994 __ Push(cp, r1, r0); 943 __ Push(cp, r4, r3);
995 __ CallRuntime(Runtime::kDeclareGlobals, 3); 944 __ CallRuntime(Runtime::kDeclareGlobals, 3);
996 // Return value is ignored. 945 // Return value is ignored.
997 } 946 }
998 947
999 948
1000 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { 949 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1001 // Call the runtime to declare the modules. 950 // Call the runtime to declare the modules.
1002 __ Push(descriptions); 951 __ Push(descriptions);
1003 __ CallRuntime(Runtime::kDeclareModules, 1); 952 __ CallRuntime(Runtime::kDeclareModules, 1);
1004 // Return value is ignored. 953 // Return value is ignored.
(...skipping 25 matching lines...) Expand all
1030 } 979 }
1031 980
1032 Comment cmnt(masm_, "[ Case comparison"); 981 Comment cmnt(masm_, "[ Case comparison");
1033 __ bind(&next_test); 982 __ bind(&next_test);
1034 next_test.Unuse(); 983 next_test.Unuse();
1035 984
1036 // Compile the label expression. 985 // Compile the label expression.
1037 VisitForAccumulatorValue(clause->label()); 986 VisitForAccumulatorValue(clause->label());
1038 987
1039 // Perform the comparison as if via '==='. 988 // Perform the comparison as if via '==='.
1040 __ ldr(r1, MemOperand(sp, 0)); // Switch value. 989 __ LoadP(r4, MemOperand(sp, 0)); // Switch value.
1041 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); 990 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1042 JumpPatchSite patch_site(masm_); 991 JumpPatchSite patch_site(masm_);
1043 if (inline_smi_code) { 992 if (inline_smi_code) {
1044 Label slow_case; 993 Label slow_case;
1045 __ orr(r2, r1, r0); 994 __ orx(r5, r4, r3);
1046 patch_site.EmitJumpIfNotSmi(r2, &slow_case); 995 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
1047 996
1048 __ cmp(r1, r0); 997 __ cmp(r4, r3);
1049 __ b(ne, &next_test); 998 __ bne(&next_test);
1050 __ Drop(1); // Switch value is no longer needed. 999 __ Drop(1); // Switch value is no longer needed.
1051 __ b(clause->body_target()); 1000 __ b(clause->body_target());
1052 __ bind(&slow_case); 1001 __ bind(&slow_case);
1053 } 1002 }
1054 1003
1055 // Record position before stub call for type feedback. 1004 // Record position before stub call for type feedback.
1056 SetSourcePosition(clause->position()); 1005 SetSourcePosition(clause->position());
1057 Handle<Code> ic = 1006 Handle<Code> ic =
1058 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code(); 1007 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1059 CallIC(ic, clause->CompareId()); 1008 CallIC(ic, clause->CompareId());
1060 patch_site.EmitPatchInfo(); 1009 patch_site.EmitPatchInfo();
1061 1010
1062 Label skip; 1011 Label skip;
1063 __ b(&skip); 1012 __ b(&skip);
1064 PrepareForBailout(clause, TOS_REG); 1013 PrepareForBailout(clause, TOS_REG);
1065 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 1014 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1066 __ cmp(r0, ip); 1015 __ cmp(r3, ip);
1067 __ b(ne, &next_test); 1016 __ bne(&next_test);
1068 __ Drop(1); 1017 __ Drop(1);
1069 __ jmp(clause->body_target()); 1018 __ b(clause->body_target());
1070 __ bind(&skip); 1019 __ bind(&skip);
1071 1020
1072 __ cmp(r0, Operand::Zero()); 1021 __ cmpi(r3, Operand::Zero());
1073 __ b(ne, &next_test); 1022 __ bne(&next_test);
1074 __ Drop(1); // Switch value is no longer needed. 1023 __ Drop(1); // Switch value is no longer needed.
1075 __ b(clause->body_target()); 1024 __ b(clause->body_target());
1076 } 1025 }
1077 1026
1078 // Discard the test value and jump to the default if present, otherwise to 1027 // Discard the test value and jump to the default if present, otherwise to
1079 // the end of the statement. 1028 // the end of the statement.
1080 __ bind(&next_test); 1029 __ bind(&next_test);
1081 __ Drop(1); // Switch value is no longer needed. 1030 __ Drop(1); // Switch value is no longer needed.
1082 if (default_clause == NULL) { 1031 if (default_clause == NULL) {
1083 __ b(nested_statement.break_label()); 1032 __ b(nested_statement.break_label());
(...skipping 21 matching lines...) Expand all
1105 SetStatementPosition(stmt); 1054 SetStatementPosition(stmt);
1106 1055
1107 Label loop, exit; 1056 Label loop, exit;
1108 ForIn loop_statement(this, stmt); 1057 ForIn loop_statement(this, stmt);
1109 increment_loop_depth(); 1058 increment_loop_depth();
1110 1059
1111 // Get the object to enumerate over. If the object is null or undefined, skip 1060 // Get the object to enumerate over. If the object is null or undefined, skip
1112 // over the loop. See ECMA-262 version 5, section 12.6.4. 1061 // over the loop. See ECMA-262 version 5, section 12.6.4.
1113 VisitForAccumulatorValue(stmt->enumerable()); 1062 VisitForAccumulatorValue(stmt->enumerable());
1114 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1063 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1115 __ cmp(r0, ip); 1064 __ cmp(r3, ip);
1116 __ b(eq, &exit); 1065 __ beq(&exit);
1117 Register null_value = r5; 1066 Register null_value = r7;
1118 __ LoadRoot(null_value, Heap::kNullValueRootIndex); 1067 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1119 __ cmp(r0, null_value); 1068 __ cmp(r3, null_value);
1120 __ b(eq, &exit); 1069 __ beq(&exit);
1121 1070
1122 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); 1071 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1123 1072
1124 // Convert the object to a JS object. 1073 // Convert the object to a JS object.
1125 Label convert, done_convert; 1074 Label convert, done_convert;
1126 __ JumpIfSmi(r0, &convert); 1075 __ JumpIfSmi(r3, &convert);
1127 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 1076 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
1128 __ b(ge, &done_convert); 1077 __ bge(&done_convert);
1129 __ bind(&convert); 1078 __ bind(&convert);
1130 __ push(r0); 1079 __ push(r3);
1131 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1080 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1132 __ bind(&done_convert); 1081 __ bind(&done_convert);
1133 __ push(r0); 1082 __ push(r3);
1134 1083
1135 // Check for proxies. 1084 // Check for proxies.
1136 Label call_runtime; 1085 Label call_runtime;
1137 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1086 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1138 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE); 1087 __ CompareObjectType(r3, r4, r4, LAST_JS_PROXY_TYPE);
1139 __ b(le, &call_runtime); 1088 __ ble(&call_runtime);
1140 1089
1141 // Check cache validity in generated code. This is a fast case for 1090 // Check cache validity in generated code. This is a fast case for
1142 // the JSObject::IsSimpleEnum cache validity checks. If we cannot 1091 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1143 // guarantee cache validity, call the runtime system to check cache 1092 // guarantee cache validity, call the runtime system to check cache
1144 // validity or get the property names in a fixed array. 1093 // validity or get the property names in a fixed array.
1145 __ CheckEnumCache(null_value, &call_runtime); 1094 __ CheckEnumCache(null_value, &call_runtime);
1146 1095
1147 // The enum cache is valid. Load the map of the object being 1096 // The enum cache is valid. Load the map of the object being
1148 // iterated over and use the cache for the iteration. 1097 // iterated over and use the cache for the iteration.
1149 Label use_cache; 1098 Label use_cache;
1150 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 1099 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
1151 __ b(&use_cache); 1100 __ b(&use_cache);
1152 1101
1153 // Get the set of properties to enumerate. 1102 // Get the set of properties to enumerate.
1154 __ bind(&call_runtime); 1103 __ bind(&call_runtime);
1155 __ push(r0); // Duplicate the enumerable object on the stack. 1104 __ push(r3); // Duplicate the enumerable object on the stack.
1156 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); 1105 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1157 1106
1158 // If we got a map from the runtime call, we can do a fast 1107 // If we got a map from the runtime call, we can do a fast
1159 // modification check. Otherwise, we got a fixed array, and we have 1108 // modification check. Otherwise, we got a fixed array, and we have
1160 // to do a slow check. 1109 // to do a slow check.
1161 Label fixed_array; 1110 Label fixed_array;
1162 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); 1111 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
1163 __ LoadRoot(ip, Heap::kMetaMapRootIndex); 1112 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1164 __ cmp(r2, ip); 1113 __ cmp(r5, ip);
1165 __ b(ne, &fixed_array); 1114 __ bne(&fixed_array);
1166 1115
1167 // We got a map in register r0. Get the enumeration cache from it. 1116 // We got a map in register r3. Get the enumeration cache from it.
1168 Label no_descriptors; 1117 Label no_descriptors;
1169 __ bind(&use_cache); 1118 __ bind(&use_cache);
1170 1119
1171 __ EnumLength(r1, r0); 1120 __ EnumLength(r4, r3);
1172 __ cmp(r1, Operand(Smi::FromInt(0))); 1121 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0);
1173 __ b(eq, &no_descriptors); 1122 __ beq(&no_descriptors);
1174 1123
1175 __ LoadInstanceDescriptors(r0, r2); 1124 __ LoadInstanceDescriptors(r3, r5);
1176 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset)); 1125 __ LoadP(r5, FieldMemOperand(r5, DescriptorArray::kEnumCacheOffset));
1177 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset)); 1126 __ LoadP(r5,
1127 FieldMemOperand(r5, DescriptorArray::kEnumCacheBridgeCacheOffset));
1178 1128
1179 // Set up the four remaining stack slots. 1129 // Set up the four remaining stack slots.
1180 __ push(r0); // Map. 1130 __ push(r3); // Map.
1181 __ mov(r0, Operand(Smi::FromInt(0))); 1131 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1182 // Push enumeration cache, enumeration cache length (as smi) and zero. 1132 // Push enumeration cache, enumeration cache length (as smi) and zero.
1183 __ Push(r2, r1, r0); 1133 __ Push(r5, r4, r3);
1184 __ jmp(&loop); 1134 __ b(&loop);
1185 1135
1186 __ bind(&no_descriptors); 1136 __ bind(&no_descriptors);
1187 __ Drop(1); 1137 __ Drop(1);
1188 __ jmp(&exit); 1138 __ b(&exit);
1189 1139
1190 // We got a fixed array in register r0. Iterate through that. 1140 // We got a fixed array in register r3. Iterate through that.
1191 Label non_proxy; 1141 Label non_proxy;
1192 __ bind(&fixed_array); 1142 __ bind(&fixed_array);
1193 1143
1194 __ Move(r1, FeedbackVector()); 1144 __ Move(r4, FeedbackVector());
1195 __ mov(r2, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate()))); 1145 __ mov(r5, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1196 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(slot))); 1146 __ StoreP(r5, FieldMemOperand(r4, FixedArray::OffsetOfElementAt(slot)), r0);
1197 1147
1198 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check 1148 __ LoadSmiLiteral(r4, Smi::FromInt(1)); // Smi indicates slow check
1199 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object 1149 __ LoadP(r5, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1200 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1150 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1201 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE); 1151 __ CompareObjectType(r5, r6, r6, LAST_JS_PROXY_TYPE);
1202 __ b(gt, &non_proxy); 1152 __ bgt(&non_proxy);
1203 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy 1153 __ LoadSmiLiteral(r4, Smi::FromInt(0)); // Zero indicates proxy
1204 __ bind(&non_proxy); 1154 __ bind(&non_proxy);
1205 __ Push(r1, r0); // Smi and array 1155 __ Push(r4, r3); // Smi and array
1206 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset)); 1156 __ LoadP(r4, FieldMemOperand(r3, FixedArray::kLengthOffset));
1207 __ mov(r0, Operand(Smi::FromInt(0))); 1157 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1208 __ Push(r1, r0); // Fixed array length (as smi) and initial index. 1158 __ Push(r4, r3); // Fixed array length (as smi) and initial index.
1209 1159
1210 // Generate code for doing the condition check. 1160 // Generate code for doing the condition check.
1211 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); 1161 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1212 __ bind(&loop); 1162 __ bind(&loop);
1213 // Load the current count to r0, load the length to r1. 1163 // Load the current count to r3, load the length to r4.
1214 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize)); 1164 __ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
1215 __ cmp(r0, r1); // Compare to the array length. 1165 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
1216 __ b(hs, loop_statement.break_label()); 1166 __ cmpl(r3, r4); // Compare to the array length.
1167 __ bge(loop_statement.break_label());
1217 1168
1218 // Get the current entry of the array into register r3. 1169 // Get the current entry of the array into register r6.
1219 __ ldr(r2, MemOperand(sp, 2 * kPointerSize)); 1170 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
1220 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 1171 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1221 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0)); 1172 __ SmiToPtrArrayOffset(r6, r3);
1173 __ LoadPX(r6, MemOperand(r6, r5));
1222 1174
1223 // Get the expected map from the stack or a smi in the 1175 // Get the expected map from the stack or a smi in the
1224 // permanent slow case into register r2. 1176 // permanent slow case into register r5.
1225 __ ldr(r2, MemOperand(sp, 3 * kPointerSize)); 1177 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
1226 1178
1227 // Check if the expected map still matches that of the enumerable. 1179 // Check if the expected map still matches that of the enumerable.
1228 // If not, we may have to filter the key. 1180 // If not, we may have to filter the key.
1229 Label update_each; 1181 Label update_each;
1230 __ ldr(r1, MemOperand(sp, 4 * kPointerSize)); 1182 __ LoadP(r4, MemOperand(sp, 4 * kPointerSize));
1231 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset)); 1183 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1232 __ cmp(r4, Operand(r2)); 1184 __ cmp(r7, r5);
1233 __ b(eq, &update_each); 1185 __ beq(&update_each);
1234 1186
1235 // For proxies, no filtering is done. 1187 // For proxies, no filtering is done.
1236 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. 1188 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1237 __ cmp(r2, Operand(Smi::FromInt(0))); 1189 __ CmpSmiLiteral(r5, Smi::FromInt(0), r0);
1238 __ b(eq, &update_each); 1190 __ beq(&update_each);
1239 1191
1240 // Convert the entry to a string or (smi) 0 if it isn't a property 1192 // Convert the entry to a string or (smi) 0 if it isn't a property
1241 // any more. If the property has been removed while iterating, we 1193 // any more. If the property has been removed while iterating, we
1242 // just skip it. 1194 // just skip it.
1243 __ push(r1); // Enumerable. 1195 __ Push(r4, r6); // Enumerable and current entry.
1244 __ push(r3); // Current entry.
1245 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); 1196 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1246 __ mov(r3, Operand(r0), SetCC); 1197 __ mr(r6, r3);
1247 __ b(eq, loop_statement.continue_label()); 1198 __ cmpi(r6, Operand::Zero());
1199 __ beq(loop_statement.continue_label());
1248 1200
1249 // Update the 'each' property or variable from the possibly filtered 1201 // Update the 'each' property or variable from the possibly filtered
1250 // entry in register r3. 1202 // entry in register r6.
1251 __ bind(&update_each); 1203 __ bind(&update_each);
1252 __ mov(result_register(), r3); 1204 __ mr(result_register(), r6);
1253 // Perform the assignment as if via '='. 1205 // Perform the assignment as if via '='.
1254 { EffectContext context(this); 1206 {
1207 EffectContext context(this);
1255 EmitAssignment(stmt->each()); 1208 EmitAssignment(stmt->each());
1256 } 1209 }
1257 1210
1258 // Generate code for the body of the loop. 1211 // Generate code for the body of the loop.
1259 Visit(stmt->body()); 1212 Visit(stmt->body());
1260 1213
1261 // Generate code for the going to the next element by incrementing 1214 // Generate code for the going to the next element by incrementing
1262 // the index (smi) stored on top of the stack. 1215 // the index (smi) stored on top of the stack.
1263 __ bind(loop_statement.continue_label()); 1216 __ bind(loop_statement.continue_label());
1264 __ pop(r0); 1217 __ pop(r3);
1265 __ add(r0, r0, Operand(Smi::FromInt(1))); 1218 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
1266 __ push(r0); 1219 __ push(r3);
1267 1220
1268 EmitBackEdgeBookkeeping(stmt, &loop); 1221 EmitBackEdgeBookkeeping(stmt, &loop);
1269 __ b(&loop); 1222 __ b(&loop);
1270 1223
1271 // Remove the pointers stored on the stack. 1224 // Remove the pointers stored on the stack.
1272 __ bind(loop_statement.break_label()); 1225 __ bind(loop_statement.break_label());
1273 __ Drop(5); 1226 __ Drop(5);
1274 1227
1275 // Exit and decrement the loop depth. 1228 // Exit and decrement the loop depth.
1276 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1229 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
(...skipping 13 matching lines...) Expand all
1290 VisitForEffect(stmt->assign_iterator()); 1243 VisitForEffect(stmt->assign_iterator());
1291 1244
1292 // Loop entry. 1245 // Loop entry.
1293 __ bind(loop_statement.continue_label()); 1246 __ bind(loop_statement.continue_label());
1294 1247
1295 // result = iterator.next() 1248 // result = iterator.next()
1296 VisitForEffect(stmt->next_result()); 1249 VisitForEffect(stmt->next_result());
1297 1250
1298 // if (result.done) break; 1251 // if (result.done) break;
1299 Label result_not_done; 1252 Label result_not_done;
1300 VisitForControl(stmt->result_done(), 1253 VisitForControl(stmt->result_done(), loop_statement.break_label(),
1301 loop_statement.break_label(), 1254 &result_not_done, &result_not_done);
1302 &result_not_done,
1303 &result_not_done);
1304 __ bind(&result_not_done); 1255 __ bind(&result_not_done);
1305 1256
1306 // each = result.value 1257 // each = result.value
1307 VisitForEffect(stmt->assign_each()); 1258 VisitForEffect(stmt->assign_each());
1308 1259
1309 // Generate code for the body of the loop. 1260 // Generate code for the body of the loop.
1310 Visit(stmt->body()); 1261 Visit(stmt->body());
1311 1262
1312 // Check stack before looping. 1263 // Check stack before looping.
1313 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS); 1264 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1314 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label()); 1265 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1315 __ jmp(loop_statement.continue_label()); 1266 __ b(loop_statement.continue_label());
1316 1267
1317 // Exit and decrement the loop depth. 1268 // Exit and decrement the loop depth.
1318 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1269 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1319 __ bind(loop_statement.break_label()); 1270 __ bind(loop_statement.break_label());
1320 decrement_loop_depth(); 1271 decrement_loop_depth();
1321 } 1272 }
1322 1273
1323 1274
1324 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, 1275 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1325 bool pretenure) { 1276 bool pretenure) {
1326 // Use the fast case closure allocation code that allocates in new 1277 // Use the fast case closure allocation code that allocates in new
1327 // space for nested functions that don't need literals cloning. If 1278 // space for nested functions that don't need literals cloning. If
1328 // we're running with the --always-opt or the --prepare-always-opt 1279 // we're running with the --always-opt or the --prepare-always-opt
1329 // flag, we need to use the runtime function so that the new function 1280 // flag, we need to use the runtime function so that the new function
1330 // we are creating here gets a chance to have its code optimized and 1281 // we are creating here gets a chance to have its code optimized and
1331 // doesn't just get a copy of the existing unoptimized code. 1282 // doesn't just get a copy of the existing unoptimized code.
1332 if (!FLAG_always_opt && 1283 if (!FLAG_always_opt && !FLAG_prepare_always_opt && !pretenure &&
1333 !FLAG_prepare_always_opt && 1284 scope()->is_function_scope() && info->num_literals() == 0) {
1334 !pretenure &&
1335 scope()->is_function_scope() &&
1336 info->num_literals() == 0) {
1337 FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind()); 1285 FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1338 __ mov(r2, Operand(info)); 1286 __ mov(r5, Operand(info));
1339 __ CallStub(&stub); 1287 __ CallStub(&stub);
1340 } else { 1288 } else {
1341 __ mov(r0, Operand(info)); 1289 __ mov(r3, Operand(info));
1342 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex 1290 __ LoadRoot(
1343 : Heap::kFalseValueRootIndex); 1291 r4, pretenure ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1344 __ Push(cp, r0, r1); 1292 __ Push(cp, r3, r4);
1345 __ CallRuntime(Runtime::kNewClosure, 3); 1293 __ CallRuntime(Runtime::kNewClosure, 3);
1346 } 1294 }
1347 context()->Plug(r0); 1295 context()->Plug(r3);
1348 } 1296 }
1349 1297
1350 1298
1351 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 1299 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1352 Comment cmnt(masm_, "[ VariableProxy"); 1300 Comment cmnt(masm_, "[ VariableProxy");
1353 EmitVariableLoad(expr); 1301 EmitVariableLoad(expr);
1354 } 1302 }
1355 1303
1356 1304
1305 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1306 Comment cnmt(masm_, "[ SuperReference ");
1307
1308 __ LoadP(LoadDescriptor::ReceiverRegister(),
1309 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1310
1311 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1312 __ Move(LoadDescriptor::NameRegister(), home_object_symbol);
1313
1314 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1315
1316 __ Cmpi(r3, Operand(isolate()->factory()->undefined_value()), r0);
1317 Label done;
1318 __ bne(&done);
1319 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1320 __ bind(&done);
1321 }
1322
1323
1357 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy, 1324 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1358 TypeofState typeof_state, 1325 TypeofState typeof_state,
1359 Label* slow) { 1326 Label* slow) {
1360 Register current = cp; 1327 Register current = cp;
1361 Register next = r1; 1328 Register next = r4;
1362 Register temp = r2; 1329 Register temp = r5;
1363 1330
1364 Scope* s = scope(); 1331 Scope* s = scope();
1365 while (s != NULL) { 1332 while (s != NULL) {
1366 if (s->num_heap_slots() > 0) { 1333 if (s->num_heap_slots() > 0) {
1367 if (s->calls_sloppy_eval()) { 1334 if (s->calls_sloppy_eval()) {
1368 // Check that extension is NULL. 1335 // Check that extension is NULL.
1369 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); 1336 __ LoadP(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1370 __ tst(temp, temp); 1337 __ cmpi(temp, Operand::Zero());
1371 __ b(ne, slow); 1338 __ bne(slow);
1372 } 1339 }
1373 // Load next context in chain. 1340 // Load next context in chain.
1374 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX)); 1341 __ LoadP(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1375 // Walk the rest of the chain without clobbering cp. 1342 // Walk the rest of the chain without clobbering cp.
1376 current = next; 1343 current = next;
1377 } 1344 }
1378 // If no outer scope calls eval, we do not need to check more 1345 // If no outer scope calls eval, we do not need to check more
1379 // context extensions. 1346 // context extensions.
1380 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; 1347 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1381 s = s->outer_scope(); 1348 s = s->outer_scope();
1382 } 1349 }
1383 1350
1384 if (s->is_eval_scope()) { 1351 if (s->is_eval_scope()) {
1385 Label loop, fast; 1352 Label loop, fast;
1386 if (!current.is(next)) { 1353 if (!current.is(next)) {
1387 __ Move(next, current); 1354 __ Move(next, current);
1388 } 1355 }
1389 __ bind(&loop); 1356 __ bind(&loop);
1390 // Terminate at native context. 1357 // Terminate at native context.
1391 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset)); 1358 __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1392 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex); 1359 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1393 __ cmp(temp, ip); 1360 __ cmp(temp, ip);
1394 __ b(eq, &fast); 1361 __ beq(&fast);
1395 // Check that extension is NULL. 1362 // Check that extension is NULL.
1396 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX)); 1363 __ LoadP(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1397 __ tst(temp, temp); 1364 __ cmpi(temp, Operand::Zero());
1398 __ b(ne, slow); 1365 __ bne(slow);
1399 // Load next context in chain. 1366 // Load next context in chain.
1400 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX)); 1367 __ LoadP(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1401 __ b(&loop); 1368 __ b(&loop);
1402 __ bind(&fast); 1369 __ bind(&fast);
1403 } 1370 }
1404 1371
1405 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); 1372 __ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1406 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->var()->name())); 1373 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1407 if (FLAG_vector_ics) { 1374 if (FLAG_vector_ics) {
1408 __ mov(VectorLoadICDescriptor::SlotRegister(), 1375 __ mov(VectorLoadICDescriptor::SlotRegister(),
1409 Operand(Smi::FromInt(proxy->VariableFeedbackSlot()))); 1376 Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
1410 } 1377 }
1411 1378
1412 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) 1379 ContextualMode mode =
1413 ? NOT_CONTEXTUAL 1380 (typeof_state == INSIDE_TYPEOF) ? NOT_CONTEXTUAL : CONTEXTUAL;
1414 : CONTEXTUAL;
1415 CallLoadIC(mode); 1381 CallLoadIC(mode);
1416 } 1382 }
1417 1383
1418 1384
1419 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1385 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1420 Label* slow) { 1386 Label* slow) {
1421 DCHECK(var->IsContextSlot()); 1387 DCHECK(var->IsContextSlot());
1422 Register context = cp; 1388 Register context = cp;
1423 Register next = r3; 1389 Register next = r6;
1424 Register temp = r4; 1390 Register temp = r7;
1425 1391
1426 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1392 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1427 if (s->num_heap_slots() > 0) { 1393 if (s->num_heap_slots() > 0) {
1428 if (s->calls_sloppy_eval()) { 1394 if (s->calls_sloppy_eval()) {
1429 // Check that extension is NULL. 1395 // Check that extension is NULL.
1430 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1396 __ LoadP(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1431 __ tst(temp, temp); 1397 __ cmpi(temp, Operand::Zero());
1432 __ b(ne, slow); 1398 __ bne(slow);
1433 } 1399 }
1434 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX)); 1400 __ LoadP(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1435 // Walk the rest of the chain without clobbering cp. 1401 // Walk the rest of the chain without clobbering cp.
1436 context = next; 1402 context = next;
1437 } 1403 }
1438 } 1404 }
1439 // Check that last extension is NULL. 1405 // Check that last extension is NULL.
1440 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1406 __ LoadP(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1441 __ tst(temp, temp); 1407 __ cmpi(temp, Operand::Zero());
1442 __ b(ne, slow); 1408 __ bne(slow);
1443 1409
1444 // This function is used only for loads, not stores, so it's safe to 1410 // This function is used only for loads, not stores, so it's safe to
1445 // return an cp-based operand (the write barrier cannot be allowed to 1411 // return an cp-based operand (the write barrier cannot be allowed to
1446 // destroy the cp register). 1412 // destroy the cp register).
1447 return ContextOperand(context, var->index()); 1413 return ContextOperand(context, var->index());
1448 } 1414 }
1449 1415
1450 1416
1451 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, 1417 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1452 TypeofState typeof_state, 1418 TypeofState typeof_state,
1453 Label* slow, 1419 Label* slow, Label* done) {
1454 Label* done) {
1455 // Generate fast-case code for variables that might be shadowed by 1420 // Generate fast-case code for variables that might be shadowed by
1456 // eval-introduced variables. Eval is used a lot without 1421 // eval-introduced variables. Eval is used a lot without
1457 // introducing variables. In those cases, we do not want to 1422 // introducing variables. In those cases, we do not want to
1458 // perform a runtime call for all variables in the scope 1423 // perform a runtime call for all variables in the scope
1459 // containing the eval. 1424 // containing the eval.
1460 Variable* var = proxy->var(); 1425 Variable* var = proxy->var();
1461 if (var->mode() == DYNAMIC_GLOBAL) { 1426 if (var->mode() == DYNAMIC_GLOBAL) {
1462 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow); 1427 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1463 __ jmp(done); 1428 __ b(done);
1464 } else if (var->mode() == DYNAMIC_LOCAL) { 1429 } else if (var->mode() == DYNAMIC_LOCAL) {
1465 Variable* local = var->local_if_not_shadowed(); 1430 Variable* local = var->local_if_not_shadowed();
1466 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow)); 1431 __ LoadP(r3, ContextSlotOperandCheckExtensions(local, slow));
1467 if (local->mode() == LET || local->mode() == CONST || 1432 if (local->mode() == LET || local->mode() == CONST ||
1468 local->mode() == CONST_LEGACY) { 1433 local->mode() == CONST_LEGACY) {
1469 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); 1434 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1435 __ bne(done);
1470 if (local->mode() == CONST_LEGACY) { 1436 if (local->mode() == CONST_LEGACY) {
1471 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); 1437 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1472 } else { // LET || CONST 1438 } else { // LET || CONST
1473 __ b(ne, done); 1439 __ mov(r3, Operand(var->name()));
1474 __ mov(r0, Operand(var->name())); 1440 __ push(r3);
1475 __ push(r0);
1476 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1441 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1477 } 1442 }
1478 } 1443 }
1479 __ jmp(done); 1444 __ b(done);
1480 } 1445 }
1481 } 1446 }
1482 1447
1483 1448
1484 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { 1449 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1485 // Record position before possible IC call. 1450 // Record position before possible IC call.
1486 SetSourcePosition(proxy->position()); 1451 SetSourcePosition(proxy->position());
1487 Variable* var = proxy->var(); 1452 Variable* var = proxy->var();
1488 1453
1489 // Three cases: global variables, lookup variables, and all other types of 1454 // Three cases: global variables, lookup variables, and all other types of
1490 // variables. 1455 // variables.
1491 switch (var->location()) { 1456 switch (var->location()) {
1492 case Variable::UNALLOCATED: { 1457 case Variable::UNALLOCATED: {
1493 Comment cmnt(masm_, "[ Global variable"); 1458 Comment cmnt(masm_, "[ Global variable");
1494 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); 1459 __ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1495 __ mov(LoadDescriptor::NameRegister(), Operand(var->name())); 1460 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1496 if (FLAG_vector_ics) { 1461 if (FLAG_vector_ics) {
1497 __ mov(VectorLoadICDescriptor::SlotRegister(), 1462 __ mov(VectorLoadICDescriptor::SlotRegister(),
1498 Operand(Smi::FromInt(proxy->VariableFeedbackSlot()))); 1463 Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
1499 } 1464 }
1500 CallLoadIC(CONTEXTUAL); 1465 CallLoadIC(CONTEXTUAL);
1501 context()->Plug(r0); 1466 context()->Plug(r3);
1502 break; 1467 break;
1503 } 1468 }
1504 1469
1505 case Variable::PARAMETER: 1470 case Variable::PARAMETER:
1506 case Variable::LOCAL: 1471 case Variable::LOCAL:
1507 case Variable::CONTEXT: { 1472 case Variable::CONTEXT: {
1508 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" 1473 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1509 : "[ Stack variable"); 1474 : "[ Stack variable");
1510 if (var->binding_needs_init()) { 1475 if (var->binding_needs_init()) {
1511 // var->scope() may be NULL when the proxy is located in eval code and 1476 // var->scope() may be NULL when the proxy is located in eval code and
(...skipping 20 matching lines...) Expand all
1532 // function() { f(); let x = 1; function f() { x = 2; } } 1497 // function() { f(); let x = 1; function f() { x = 2; } }
1533 // 1498 //
1534 bool skip_init_check; 1499 bool skip_init_check;
1535 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) { 1500 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1536 skip_init_check = false; 1501 skip_init_check = false;
1537 } else { 1502 } else {
1538 // Check that we always have valid source position. 1503 // Check that we always have valid source position.
1539 DCHECK(var->initializer_position() != RelocInfo::kNoPosition); 1504 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1540 DCHECK(proxy->position() != RelocInfo::kNoPosition); 1505 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1541 skip_init_check = var->mode() != CONST_LEGACY && 1506 skip_init_check = var->mode() != CONST_LEGACY &&
1542 var->initializer_position() < proxy->position(); 1507 var->initializer_position() < proxy->position();
1543 } 1508 }
1544 1509
1545 if (!skip_init_check) { 1510 if (!skip_init_check) {
1511 Label done;
1546 // Let and const need a read barrier. 1512 // Let and const need a read barrier.
1547 GetVar(r0, var); 1513 GetVar(r3, var);
1548 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); 1514 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1515 __ bne(&done);
1549 if (var->mode() == LET || var->mode() == CONST) { 1516 if (var->mode() == LET || var->mode() == CONST) {
1550 // Throw a reference error when using an uninitialized let/const 1517 // Throw a reference error when using an uninitialized let/const
1551 // binding in harmony mode. 1518 // binding in harmony mode.
1552 Label done; 1519 __ mov(r3, Operand(var->name()));
1553 __ b(ne, &done); 1520 __ push(r3);
1554 __ mov(r0, Operand(var->name()));
1555 __ push(r0);
1556 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1521 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1557 __ bind(&done);
1558 } else { 1522 } else {
1559 // Uninitalized const bindings outside of harmony mode are unholed. 1523 // Uninitalized const bindings outside of harmony mode are unholed.
1560 DCHECK(var->mode() == CONST_LEGACY); 1524 DCHECK(var->mode() == CONST_LEGACY);
1561 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); 1525 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1562 } 1526 }
1563 context()->Plug(r0); 1527 __ bind(&done);
1528 context()->Plug(r3);
1564 break; 1529 break;
1565 } 1530 }
1566 } 1531 }
1567 context()->Plug(var); 1532 context()->Plug(var);
1568 break; 1533 break;
1569 } 1534 }
1570 1535
1571 case Variable::LOOKUP: { 1536 case Variable::LOOKUP: {
1572 Comment cmnt(masm_, "[ Lookup variable"); 1537 Comment cmnt(masm_, "[ Lookup variable");
1573 Label done, slow; 1538 Label done, slow;
1574 // Generate code for loading from variables potentially shadowed 1539 // Generate code for loading from variables potentially shadowed
1575 // by eval-introduced variables. 1540 // by eval-introduced variables.
1576 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done); 1541 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1577 __ bind(&slow); 1542 __ bind(&slow);
1578 __ mov(r1, Operand(var->name())); 1543 __ mov(r4, Operand(var->name()));
1579 __ Push(cp, r1); // Context and name. 1544 __ Push(cp, r4); // Context and name.
1580 __ CallRuntime(Runtime::kLoadLookupSlot, 2); 1545 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1581 __ bind(&done); 1546 __ bind(&done);
1582 context()->Plug(r0); 1547 context()->Plug(r3);
1583 } 1548 }
1584 } 1549 }
1585 } 1550 }
1586 1551
1587 1552
1588 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1553 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1589 Comment cmnt(masm_, "[ RegExpLiteral"); 1554 Comment cmnt(masm_, "[ RegExpLiteral");
1590 Label materialized; 1555 Label materialized;
1591 // Registers will be used as follows: 1556 // Registers will be used as follows:
1592 // r5 = materialized value (RegExp literal) 1557 // r8 = materialized value (RegExp literal)
1593 // r4 = JS function, literals array 1558 // r7 = JS function, literals array
1594 // r3 = literal index 1559 // r6 = literal index
1595 // r2 = RegExp pattern 1560 // r5 = RegExp pattern
1596 // r1 = RegExp flags 1561 // r4 = RegExp flags
1597 // r0 = RegExp literal clone 1562 // r3 = RegExp literal clone
1598 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1563 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1599 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset)); 1564 __ LoadP(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1600 int literal_offset = 1565 int literal_offset =
1601 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; 1566 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1602 __ ldr(r5, FieldMemOperand(r4, literal_offset)); 1567 __ LoadP(r8, FieldMemOperand(r7, literal_offset), r0);
1603 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1568 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1604 __ cmp(r5, ip); 1569 __ cmp(r8, ip);
1605 __ b(ne, &materialized); 1570 __ bne(&materialized);
1606 1571
1607 // Create regexp literal using runtime function. 1572 // Create regexp literal using runtime function.
1608 // Result will be in r0. 1573 // Result will be in r3.
1609 __ mov(r3, Operand(Smi::FromInt(expr->literal_index()))); 1574 __ LoadSmiLiteral(r6, Smi::FromInt(expr->literal_index()));
1610 __ mov(r2, Operand(expr->pattern())); 1575 __ mov(r5, Operand(expr->pattern()));
1611 __ mov(r1, Operand(expr->flags())); 1576 __ mov(r4, Operand(expr->flags()));
1612 __ Push(r4, r3, r2, r1); 1577 __ Push(r7, r6, r5, r4);
1613 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); 1578 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1614 __ mov(r5, r0); 1579 __ mr(r8, r3);
1615 1580
1616 __ bind(&materialized); 1581 __ bind(&materialized);
1617 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 1582 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1618 Label allocated, runtime_allocate; 1583 Label allocated, runtime_allocate;
1619 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT); 1584 __ Allocate(size, r3, r5, r6, &runtime_allocate, TAG_OBJECT);
1620 __ jmp(&allocated); 1585 __ b(&allocated);
1621 1586
1622 __ bind(&runtime_allocate); 1587 __ bind(&runtime_allocate);
1623 __ mov(r0, Operand(Smi::FromInt(size))); 1588 __ LoadSmiLiteral(r3, Smi::FromInt(size));
1624 __ Push(r5, r0); 1589 __ Push(r8, r3);
1625 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 1590 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1626 __ pop(r5); 1591 __ pop(r8);
1627 1592
1628 __ bind(&allocated); 1593 __ bind(&allocated);
1629 // After this, registers are used as follows: 1594 // After this, registers are used as follows:
1630 // r0: Newly allocated regexp. 1595 // r3: Newly allocated regexp.
1631 // r5: Materialized regexp. 1596 // r8: Materialized regexp.
1632 // r2: temp. 1597 // r5: temp.
1633 __ CopyFields(r0, r5, d0, size / kPointerSize); 1598 __ CopyFields(r3, r8, r5.bit(), size / kPointerSize);
1634 context()->Plug(r0); 1599 context()->Plug(r3);
1635 } 1600 }
1636 1601
1637 1602
1638 void FullCodeGenerator::EmitAccessor(Expression* expression) { 1603 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1639 if (expression == NULL) { 1604 if (expression == NULL) {
1640 __ LoadRoot(r1, Heap::kNullValueRootIndex); 1605 __ LoadRoot(r4, Heap::kNullValueRootIndex);
1641 __ push(r1); 1606 __ push(r4);
1642 } else { 1607 } else {
1643 VisitForStackValue(expression); 1608 VisitForStackValue(expression);
1644 } 1609 }
1645 } 1610 }
1646 1611
1647 1612
1648 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { 1613 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1649 Comment cmnt(masm_, "[ ObjectLiteral"); 1614 Comment cmnt(masm_, "[ ObjectLiteral");
1650 1615
1651 expr->BuildConstantProperties(isolate()); 1616 expr->BuildConstantProperties(isolate());
1652 Handle<FixedArray> constant_properties = expr->constant_properties(); 1617 Handle<FixedArray> constant_properties = expr->constant_properties();
1653 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1618 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1654 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); 1619 __ LoadP(r6, FieldMemOperand(r6, JSFunction::kLiteralsOffset));
1655 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); 1620 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1656 __ mov(r1, Operand(constant_properties)); 1621 __ mov(r4, Operand(constant_properties));
1657 int flags = expr->fast_elements() 1622 int flags = expr->fast_elements() ? ObjectLiteral::kFastElements
1658 ? ObjectLiteral::kFastElements 1623 : ObjectLiteral::kNoFlags;
1659 : ObjectLiteral::kNoFlags; 1624 flags |= expr->has_function() ? ObjectLiteral::kHasFunction
1660 flags |= expr->has_function() 1625 : ObjectLiteral::kNoFlags;
1661 ? ObjectLiteral::kHasFunction 1626 __ LoadSmiLiteral(r3, Smi::FromInt(flags));
1662 : ObjectLiteral::kNoFlags;
1663 __ mov(r0, Operand(Smi::FromInt(flags)));
1664 int properties_count = constant_properties->length() / 2; 1627 int properties_count = constant_properties->length() / 2;
1665 if (expr->may_store_doubles() || expr->depth() > 1 || 1628 if (expr->may_store_doubles() || expr->depth() > 1 ||
1666 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements || 1629 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1667 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { 1630 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1668 __ Push(r3, r2, r1, r0); 1631 __ Push(r6, r5, r4, r3);
1669 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); 1632 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1670 } else { 1633 } else {
1671 FastCloneShallowObjectStub stub(isolate(), properties_count); 1634 FastCloneShallowObjectStub stub(isolate(), properties_count);
1672 __ CallStub(&stub); 1635 __ CallStub(&stub);
1673 } 1636 }
1674 1637
1675 // If result_saved is true the result is on top of the stack. If 1638 // If result_saved is true the result is on top of the stack. If
1676 // result_saved is false the result is in r0. 1639 // result_saved is false the result is in r3.
1677 bool result_saved = false; 1640 bool result_saved = false;
1678 1641
1679 // Mark all computed expressions that are bound to a key that 1642 // Mark all computed expressions that are bound to a key that
1680 // is shadowed by a later occurrence of the same key. For the 1643 // is shadowed by a later occurrence of the same key. For the
1681 // marked expressions, no store code is emitted. 1644 // marked expressions, no store code is emitted.
1682 expr->CalculateEmitStore(zone()); 1645 expr->CalculateEmitStore(zone());
1683 1646
1684 AccessorTable accessor_table(zone()); 1647 AccessorTable accessor_table(zone());
1685 for (int i = 0; i < expr->properties()->length(); i++) { 1648 for (int i = 0; i < expr->properties()->length(); i++) {
1686 ObjectLiteral::Property* property = expr->properties()->at(i); 1649 ObjectLiteral::Property* property = expr->properties()->at(i);
1687 if (property->IsCompileTimeValue()) continue; 1650 if (property->IsCompileTimeValue()) continue;
1688 1651
1689 Literal* key = property->key(); 1652 Literal* key = property->key();
1690 Expression* value = property->value(); 1653 Expression* value = property->value();
1691 if (!result_saved) { 1654 if (!result_saved) {
1692 __ push(r0); // Save result on stack 1655 __ push(r3); // Save result on stack
1693 result_saved = true; 1656 result_saved = true;
1694 } 1657 }
1695 switch (property->kind()) { 1658 switch (property->kind()) {
1696 case ObjectLiteral::Property::CONSTANT: 1659 case ObjectLiteral::Property::CONSTANT:
1697 UNREACHABLE(); 1660 UNREACHABLE();
1698 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1661 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1699 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value())); 1662 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1700 // Fall through. 1663 // Fall through.
1701 case ObjectLiteral::Property::COMPUTED: 1664 case ObjectLiteral::Property::COMPUTED:
1702 if (key->value()->IsInternalizedString()) { 1665 if (key->value()->IsInternalizedString()) {
1703 if (property->emit_store()) { 1666 if (property->emit_store()) {
1704 VisitForAccumulatorValue(value); 1667 VisitForAccumulatorValue(value);
1705 DCHECK(StoreDescriptor::ValueRegister().is(r0)); 1668 DCHECK(StoreDescriptor::ValueRegister().is(r3));
1706 __ mov(StoreDescriptor::NameRegister(), Operand(key->value())); 1669 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1707 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); 1670 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1708 CallStoreIC(key->LiteralFeedbackId()); 1671 CallStoreIC(key->LiteralFeedbackId());
1709 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1672 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1710 } else { 1673 } else {
1711 VisitForEffect(value); 1674 VisitForEffect(value);
1712 } 1675 }
1713 break; 1676 break;
1714 } 1677 }
1715 // Duplicate receiver on stack. 1678 // Duplicate receiver on stack.
1716 __ ldr(r0, MemOperand(sp)); 1679 __ LoadP(r3, MemOperand(sp));
1717 __ push(r0); 1680 __ push(r3);
1718 VisitForStackValue(key); 1681 VisitForStackValue(key);
1719 VisitForStackValue(value); 1682 VisitForStackValue(value);
1720 if (property->emit_store()) { 1683 if (property->emit_store()) {
1721 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes 1684 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY)); // PropertyAttributes
1722 __ push(r0); 1685 __ push(r3);
1723 __ CallRuntime(Runtime::kSetProperty, 4); 1686 __ CallRuntime(Runtime::kSetProperty, 4);
1724 } else { 1687 } else {
1725 __ Drop(3); 1688 __ Drop(3);
1726 } 1689 }
1727 break; 1690 break;
1728 case ObjectLiteral::Property::PROTOTYPE: 1691 case ObjectLiteral::Property::PROTOTYPE:
1729 // Duplicate receiver on stack. 1692 // Duplicate receiver on stack.
1730 __ ldr(r0, MemOperand(sp)); 1693 __ LoadP(r3, MemOperand(sp));
1731 __ push(r0); 1694 __ push(r3);
1732 VisitForStackValue(value); 1695 VisitForStackValue(value);
1733 if (property->emit_store()) { 1696 if (property->emit_store()) {
1734 __ CallRuntime(Runtime::kSetPrototype, 2); 1697 __ CallRuntime(Runtime::kSetPrototype, 2);
1735 } else { 1698 } else {
1736 __ Drop(2); 1699 __ Drop(2);
1737 } 1700 }
1738 break; 1701 break;
1739
1740 case ObjectLiteral::Property::GETTER: 1702 case ObjectLiteral::Property::GETTER:
1741 accessor_table.lookup(key)->second->getter = value; 1703 accessor_table.lookup(key)->second->getter = value;
1742 break; 1704 break;
1743 case ObjectLiteral::Property::SETTER: 1705 case ObjectLiteral::Property::SETTER:
1744 accessor_table.lookup(key)->second->setter = value; 1706 accessor_table.lookup(key)->second->setter = value;
1745 break; 1707 break;
1746 } 1708 }
1747 } 1709 }
1748 1710
1749 // Emit code to define accessors, using only a single call to the runtime for 1711 // Emit code to define accessors, using only a single call to the runtime for
1750 // each pair of corresponding getters and setters. 1712 // each pair of corresponding getters and setters.
1751 for (AccessorTable::Iterator it = accessor_table.begin(); 1713 for (AccessorTable::Iterator it = accessor_table.begin();
1752 it != accessor_table.end(); 1714 it != accessor_table.end(); ++it) {
1753 ++it) { 1715 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
1754 __ ldr(r0, MemOperand(sp)); // Duplicate receiver. 1716 __ push(r3);
1755 __ push(r0);
1756 VisitForStackValue(it->first); 1717 VisitForStackValue(it->first);
1757 EmitAccessor(it->second->getter); 1718 EmitAccessor(it->second->getter);
1758 EmitAccessor(it->second->setter); 1719 EmitAccessor(it->second->setter);
1759 __ mov(r0, Operand(Smi::FromInt(NONE))); 1720 __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
1760 __ push(r0); 1721 __ push(r3);
1761 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5); 1722 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1762 } 1723 }
1763 1724
1764 if (expr->has_function()) { 1725 if (expr->has_function()) {
1765 DCHECK(result_saved); 1726 DCHECK(result_saved);
1766 __ ldr(r0, MemOperand(sp)); 1727 __ LoadP(r3, MemOperand(sp));
1767 __ push(r0); 1728 __ push(r3);
1768 __ CallRuntime(Runtime::kToFastProperties, 1); 1729 __ CallRuntime(Runtime::kToFastProperties, 1);
1769 } 1730 }
1770 1731
1771 if (result_saved) { 1732 if (result_saved) {
1772 context()->PlugTOS(); 1733 context()->PlugTOS();
1773 } else { 1734 } else {
1774 context()->Plug(r0); 1735 context()->Plug(r3);
1775 } 1736 }
1776 } 1737 }
1777 1738
1778 1739
1779 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1740 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1780 Comment cmnt(masm_, "[ ArrayLiteral"); 1741 Comment cmnt(masm_, "[ ArrayLiteral");
1781 1742
1782 expr->BuildConstantElements(isolate()); 1743 expr->BuildConstantElements(isolate());
1783 int flags = expr->depth() == 1 1744 int flags = expr->depth() == 1 ? ArrayLiteral::kShallowElements
1784 ? ArrayLiteral::kShallowElements 1745 : ArrayLiteral::kNoFlags;
1785 : ArrayLiteral::kNoFlags;
1786 1746
1787 ZoneList<Expression*>* subexprs = expr->values(); 1747 ZoneList<Expression*>* subexprs = expr->values();
1788 int length = subexprs->length(); 1748 int length = subexprs->length();
1789 Handle<FixedArray> constant_elements = expr->constant_elements(); 1749 Handle<FixedArray> constant_elements = expr->constant_elements();
1790 DCHECK_EQ(2, constant_elements->length()); 1750 DCHECK_EQ(2, constant_elements->length());
1791 ElementsKind constant_elements_kind = 1751 ElementsKind constant_elements_kind =
1792 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); 1752 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1793 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind); 1753 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1794 Handle<FixedArrayBase> constant_elements_values( 1754 Handle<FixedArrayBase> constant_elements_values(
1795 FixedArrayBase::cast(constant_elements->get(1))); 1755 FixedArrayBase::cast(constant_elements->get(1)));
1796 1756
1797 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; 1757 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1798 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { 1758 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1799 // If the only customer of allocation sites is transitioning, then 1759 // If the only customer of allocation sites is transitioning, then
1800 // we can turn it off if we don't have anywhere else to transition to. 1760 // we can turn it off if we don't have anywhere else to transition to.
1801 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; 1761 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1802 } 1762 }
1803 1763
1804 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1764 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1805 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); 1765 __ LoadP(r6, FieldMemOperand(r6, JSFunction::kLiteralsOffset));
1806 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); 1766 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1807 __ mov(r1, Operand(constant_elements)); 1767 __ mov(r4, Operand(constant_elements));
1808 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) { 1768 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1809 __ mov(r0, Operand(Smi::FromInt(flags))); 1769 __ LoadSmiLiteral(r3, Smi::FromInt(flags));
1810 __ Push(r3, r2, r1, r0); 1770 __ Push(r6, r5, r4, r3);
1811 __ CallRuntime(Runtime::kCreateArrayLiteral, 4); 1771 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1812 } else { 1772 } else {
1813 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); 1773 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1814 __ CallStub(&stub); 1774 __ CallStub(&stub);
1815 } 1775 }
1816 1776
1817 bool result_saved = false; // Is the result saved to the stack? 1777 bool result_saved = false; // Is the result saved to the stack?
1818 1778
1819 // Emit code to evaluate all the non-constant subexpressions and to store 1779 // Emit code to evaluate all the non-constant subexpressions and to store
1820 // them into the newly cloned array. 1780 // them into the newly cloned array.
1821 for (int i = 0; i < length; i++) { 1781 for (int i = 0; i < length; i++) {
1822 Expression* subexpr = subexprs->at(i); 1782 Expression* subexpr = subexprs->at(i);
1823 // If the subexpression is a literal or a simple materialized literal it 1783 // If the subexpression is a literal or a simple materialized literal it
1824 // is already set in the cloned array. 1784 // is already set in the cloned array.
1825 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; 1785 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1826 1786
1827 if (!result_saved) { 1787 if (!result_saved) {
1828 __ push(r0); 1788 __ push(r3);
1829 __ Push(Smi::FromInt(expr->literal_index())); 1789 __ Push(Smi::FromInt(expr->literal_index()));
1830 result_saved = true; 1790 result_saved = true;
1831 } 1791 }
1832 VisitForAccumulatorValue(subexpr); 1792 VisitForAccumulatorValue(subexpr);
1833 1793
1834 if (IsFastObjectElementsKind(constant_elements_kind)) { 1794 if (IsFastObjectElementsKind(constant_elements_kind)) {
1835 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1795 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1836 __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal. 1796 __ LoadP(r8, MemOperand(sp, kPointerSize)); // Copy of array literal.
1837 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset)); 1797 __ LoadP(r4, FieldMemOperand(r8, JSObject::kElementsOffset));
1838 __ str(result_register(), FieldMemOperand(r1, offset)); 1798 __ StoreP(result_register(), FieldMemOperand(r4, offset), r0);
1839 // Update the write barrier for the array store. 1799 // Update the write barrier for the array store.
1840 __ RecordWriteField(r1, offset, result_register(), r2, 1800 __ RecordWriteField(r4, offset, result_register(), r5, kLRHasBeenSaved,
1841 kLRHasBeenSaved, kDontSaveFPRegs, 1801 kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1842 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK); 1802 INLINE_SMI_CHECK);
1843 } else { 1803 } else {
1844 __ mov(r3, Operand(Smi::FromInt(i))); 1804 __ LoadSmiLiteral(r6, Smi::FromInt(i));
1845 StoreArrayLiteralElementStub stub(isolate()); 1805 StoreArrayLiteralElementStub stub(isolate());
1846 __ CallStub(&stub); 1806 __ CallStub(&stub);
1847 } 1807 }
1848 1808
1849 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); 1809 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1850 } 1810 }
1851 1811
1852 if (result_saved) { 1812 if (result_saved) {
1853 __ pop(); // literal index 1813 __ pop(); // literal index
1854 context()->PlugTOS(); 1814 context()->PlugTOS();
1855 } else { 1815 } else {
1856 context()->Plug(r0); 1816 context()->Plug(r3);
1857 } 1817 }
1858 } 1818 }
1859 1819
1860 1820
1861 void FullCodeGenerator::VisitAssignment(Assignment* expr) { 1821 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1862 DCHECK(expr->target()->IsValidReferenceExpression()); 1822 DCHECK(expr->target()->IsValidReferenceExpression());
1863 1823
1864 Comment cmnt(masm_, "[ Assignment"); 1824 Comment cmnt(masm_, "[ Assignment");
1865 1825
1866 // Left-hand side can only be a property, a global or a (parameter or local) 1826 // Left-hand side can only be a property, a global or a (parameter or local)
1867 // slot. 1827 // slot.
1868 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 1828 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1869 LhsKind assign_type = VARIABLE; 1829 LhsKind assign_type = VARIABLE;
1870 Property* property = expr->target()->AsProperty(); 1830 Property* property = expr->target()->AsProperty();
1871 if (property != NULL) { 1831 if (property != NULL) {
1872 assign_type = (property->key()->IsPropertyName()) 1832 assign_type =
1873 ? NAMED_PROPERTY 1833 (property->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
1874 : KEYED_PROPERTY;
1875 } 1834 }
1876 1835
1877 // Evaluate LHS expression. 1836 // Evaluate LHS expression.
1878 switch (assign_type) { 1837 switch (assign_type) {
1879 case VARIABLE: 1838 case VARIABLE:
1880 // Nothing to do here. 1839 // Nothing to do here.
1881 break; 1840 break;
1882 case NAMED_PROPERTY: 1841 case NAMED_PROPERTY:
1883 if (expr->is_compound()) { 1842 if (expr->is_compound()) {
1884 // We need the receiver both on the stack and in the register. 1843 // We need the receiver both on the stack and in the register.
1885 VisitForStackValue(property->obj()); 1844 VisitForStackValue(property->obj());
1886 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 1845 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1887 } else { 1846 } else {
1888 VisitForStackValue(property->obj()); 1847 VisitForStackValue(property->obj());
1889 } 1848 }
1890 break; 1849 break;
1891 case KEYED_PROPERTY: 1850 case KEYED_PROPERTY:
1892 if (expr->is_compound()) { 1851 if (expr->is_compound()) {
1893 VisitForStackValue(property->obj()); 1852 VisitForStackValue(property->obj());
1894 VisitForStackValue(property->key()); 1853 VisitForStackValue(property->key());
1895 __ ldr(LoadDescriptor::ReceiverRegister(), 1854 __ LoadP(LoadDescriptor::ReceiverRegister(),
1896 MemOperand(sp, 1 * kPointerSize)); 1855 MemOperand(sp, 1 * kPointerSize));
1897 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); 1856 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1898 } else { 1857 } else {
1899 VisitForStackValue(property->obj()); 1858 VisitForStackValue(property->obj());
1900 VisitForStackValue(property->key()); 1859 VisitForStackValue(property->key());
1901 } 1860 }
1902 break; 1861 break;
1903 } 1862 }
1904 1863
1905 // For compound assignments we need another deoptimization point after the 1864 // For compound assignments we need another deoptimization point after the
1906 // variable/property load. 1865 // variable/property load.
1907 if (expr->is_compound()) { 1866 if (expr->is_compound()) {
1908 { AccumulatorValueContext context(this); 1867 {
1868 AccumulatorValueContext context(this);
1909 switch (assign_type) { 1869 switch (assign_type) {
1910 case VARIABLE: 1870 case VARIABLE:
1911 EmitVariableLoad(expr->target()->AsVariableProxy()); 1871 EmitVariableLoad(expr->target()->AsVariableProxy());
1912 PrepareForBailout(expr->target(), TOS_REG); 1872 PrepareForBailout(expr->target(), TOS_REG);
1913 break; 1873 break;
1914 case NAMED_PROPERTY: 1874 case NAMED_PROPERTY:
1915 EmitNamedPropertyLoad(property); 1875 EmitNamedPropertyLoad(property);
1916 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1876 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1917 break; 1877 break;
1918 case KEYED_PROPERTY: 1878 case KEYED_PROPERTY:
1919 EmitKeyedPropertyLoad(property); 1879 EmitKeyedPropertyLoad(property);
1920 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1880 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1921 break; 1881 break;
1922 } 1882 }
1923 } 1883 }
1924 1884
1925 Token::Value op = expr->binary_op(); 1885 Token::Value op = expr->binary_op();
1926 __ push(r0); // Left operand goes on the stack. 1886 __ push(r3); // Left operand goes on the stack.
1927 VisitForAccumulatorValue(expr->value()); 1887 VisitForAccumulatorValue(expr->value());
1928 1888
1929 OverwriteMode mode = expr->value()->ResultOverwriteAllowed() 1889 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1930 ? OVERWRITE_RIGHT 1890 ? OVERWRITE_RIGHT
1931 : NO_OVERWRITE; 1891 : NO_OVERWRITE;
1932 SetSourcePosition(expr->position() + 1); 1892 SetSourcePosition(expr->position() + 1);
1933 AccumulatorValueContext context(this); 1893 AccumulatorValueContext context(this);
1934 if (ShouldInlineSmiCase(op)) { 1894 if (ShouldInlineSmiCase(op)) {
1935 EmitInlineSmiBinaryOp(expr->binary_operation(), 1895 EmitInlineSmiBinaryOp(expr->binary_operation(), op, mode, expr->target(),
1936 op,
1937 mode,
1938 expr->target(),
1939 expr->value()); 1896 expr->value());
1940 } else { 1897 } else {
1941 EmitBinaryOp(expr->binary_operation(), op, mode); 1898 EmitBinaryOp(expr->binary_operation(), op, mode);
1942 } 1899 }
1943 1900
1944 // Deoptimization point in case the binary operation may have side effects. 1901 // Deoptimization point in case the binary operation may have side effects.
1945 PrepareForBailout(expr->binary_operation(), TOS_REG); 1902 PrepareForBailout(expr->binary_operation(), TOS_REG);
1946 } else { 1903 } else {
1947 VisitForAccumulatorValue(expr->value()); 1904 VisitForAccumulatorValue(expr->value());
1948 } 1905 }
1949 1906
1950 // Record source position before possible IC call. 1907 // Record source position before possible IC call.
1951 SetSourcePosition(expr->position()); 1908 SetSourcePosition(expr->position());
1952 1909
1953 // Store the value. 1910 // Store the value.
1954 switch (assign_type) { 1911 switch (assign_type) {
1955 case VARIABLE: 1912 case VARIABLE:
1956 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), 1913 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1957 expr->op()); 1914 expr->op());
1958 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 1915 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1959 context()->Plug(r0); 1916 context()->Plug(r3);
1960 break; 1917 break;
1961 case NAMED_PROPERTY: 1918 case NAMED_PROPERTY:
1962 EmitNamedPropertyAssignment(expr); 1919 EmitNamedPropertyAssignment(expr);
1963 break; 1920 break;
1964 case KEYED_PROPERTY: 1921 case KEYED_PROPERTY:
1965 EmitKeyedPropertyAssignment(expr); 1922 EmitKeyedPropertyAssignment(expr);
1966 break; 1923 break;
1967 } 1924 }
1968 } 1925 }
1969 1926
1970 1927
1971 void FullCodeGenerator::VisitYield(Yield* expr) { 1928 void FullCodeGenerator::VisitYield(Yield* expr) {
1972 Comment cmnt(masm_, "[ Yield"); 1929 Comment cmnt(masm_, "[ Yield");
1973 // Evaluate yielded value first; the initial iterator definition depends on 1930 // Evaluate yielded value first; the initial iterator definition depends on
1974 // this. It stays on the stack while we update the iterator. 1931 // this. It stays on the stack while we update the iterator.
1975 VisitForStackValue(expr->expression()); 1932 VisitForStackValue(expr->expression());
1976 1933
1977 switch (expr->yield_kind()) { 1934 switch (expr->yield_kind()) {
1978 case Yield::kSuspend: 1935 case Yield::kSuspend:
1979 // Pop value from top-of-stack slot; box result into result register. 1936 // Pop value from top-of-stack slot; box result into result register.
1980 EmitCreateIteratorResult(false); 1937 EmitCreateIteratorResult(false);
1981 __ push(result_register()); 1938 __ push(result_register());
1982 // Fall through. 1939 // Fall through.
1983 case Yield::kInitial: { 1940 case Yield::kInitial: {
1984 Label suspend, continuation, post_runtime, resume; 1941 Label suspend, continuation, post_runtime, resume;
1985 1942
1986 __ jmp(&suspend); 1943 __ b(&suspend);
1987 1944
1988 __ bind(&continuation); 1945 __ bind(&continuation);
1989 __ jmp(&resume); 1946 __ b(&resume);
1990 1947
1991 __ bind(&suspend); 1948 __ bind(&suspend);
1992 VisitForAccumulatorValue(expr->generator_object()); 1949 VisitForAccumulatorValue(expr->generator_object());
1993 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); 1950 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1994 __ mov(r1, Operand(Smi::FromInt(continuation.pos()))); 1951 __ LoadSmiLiteral(r4, Smi::FromInt(continuation.pos()));
1995 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset)); 1952 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
1996 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset)); 1953 r0);
1997 __ mov(r1, cp); 1954 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
1998 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2, 1955 __ mr(r4, cp);
1956 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
1999 kLRHasBeenSaved, kDontSaveFPRegs); 1957 kLRHasBeenSaved, kDontSaveFPRegs);
2000 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset)); 1958 __ addi(r4, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2001 __ cmp(sp, r1); 1959 __ cmp(sp, r4);
2002 __ b(eq, &post_runtime); 1960 __ beq(&post_runtime);
2003 __ push(r0); // generator object 1961 __ push(r3); // generator object
2004 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 1962 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2005 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 1963 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2006 __ bind(&post_runtime); 1964 __ bind(&post_runtime);
2007 __ pop(result_register()); 1965 __ pop(result_register());
2008 EmitReturnSequence(); 1966 EmitReturnSequence();
2009 1967
2010 __ bind(&resume); 1968 __ bind(&resume);
2011 context()->Plug(result_register()); 1969 context()->Plug(result_register());
2012 break; 1970 break;
2013 } 1971 }
2014 1972
2015 case Yield::kFinal: { 1973 case Yield::kFinal: {
2016 VisitForAccumulatorValue(expr->generator_object()); 1974 VisitForAccumulatorValue(expr->generator_object());
2017 __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed))); 1975 __ LoadSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
2018 __ str(r1, FieldMemOperand(result_register(), 1976 __ StoreP(r4, FieldMemOperand(result_register(),
2019 JSGeneratorObject::kContinuationOffset)); 1977 JSGeneratorObject::kContinuationOffset),
1978 r0);
2020 // Pop value from top-of-stack slot, box result into result register. 1979 // Pop value from top-of-stack slot, box result into result register.
2021 EmitCreateIteratorResult(true); 1980 EmitCreateIteratorResult(true);
2022 EmitUnwindBeforeReturn(); 1981 EmitUnwindBeforeReturn();
2023 EmitReturnSequence(); 1982 EmitReturnSequence();
2024 break; 1983 break;
2025 } 1984 }
2026 1985
2027 case Yield::kDelegating: { 1986 case Yield::kDelegating: {
2028 VisitForStackValue(expr->generator_object()); 1987 VisitForStackValue(expr->generator_object());
2029 1988
2030 // Initial stack layout is as follows: 1989 // Initial stack layout is as follows:
2031 // [sp + 1 * kPointerSize] iter 1990 // [sp + 1 * kPointerSize] iter
2032 // [sp + 0 * kPointerSize] g 1991 // [sp + 0 * kPointerSize] g
2033 1992
2034 Label l_catch, l_try, l_suspend, l_continuation, l_resume; 1993 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2035 Label l_next, l_call, l_loop; 1994 Label l_next, l_call;
2036 Register load_receiver = LoadDescriptor::ReceiverRegister(); 1995 Register load_receiver = LoadDescriptor::ReceiverRegister();
2037 Register load_name = LoadDescriptor::NameRegister(); 1996 Register load_name = LoadDescriptor::NameRegister();
2038 1997
2039 // Initial send value is undefined. 1998 // Initial send value is undefined.
2040 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 1999 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
2041 __ b(&l_next); 2000 __ b(&l_next);
2042 2001
2043 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; } 2002 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2044 __ bind(&l_catch); 2003 __ bind(&l_catch);
2045 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos())); 2004 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2046 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw" 2005 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2047 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter 2006 __ LoadP(r6, MemOperand(sp, 1 * kPointerSize)); // iter
2048 __ Push(load_name, r3, r0); // "throw", iter, except 2007 __ Push(load_name, r6, r3); // "throw", iter, except
2049 __ jmp(&l_call); 2008 __ b(&l_call);
2050 2009
2051 // try { received = %yield result } 2010 // try { received = %yield result }
2052 // Shuffle the received result above a try handler and yield it without 2011 // Shuffle the received result above a try handler and yield it without
2053 // re-boxing. 2012 // re-boxing.
2054 __ bind(&l_try); 2013 __ bind(&l_try);
2055 __ pop(r0); // result 2014 __ pop(r3); // result
2056 __ PushTryHandler(StackHandler::CATCH, expr->index()); 2015 __ PushTryHandler(StackHandler::CATCH, expr->index());
2057 const int handler_size = StackHandlerConstants::kSize; 2016 const int handler_size = StackHandlerConstants::kSize;
2058 __ push(r0); // result 2017 __ push(r3); // result
2059 __ jmp(&l_suspend); 2018 __ b(&l_suspend);
2060 __ bind(&l_continuation); 2019 __ bind(&l_continuation);
2061 __ jmp(&l_resume); 2020 __ b(&l_resume);
2062 __ bind(&l_suspend); 2021 __ bind(&l_suspend);
2063 const int generator_object_depth = kPointerSize + handler_size; 2022 const int generator_object_depth = kPointerSize + handler_size;
2064 __ ldr(r0, MemOperand(sp, generator_object_depth)); 2023 __ LoadP(r3, MemOperand(sp, generator_object_depth));
2065 __ push(r0); // g 2024 __ push(r3); // g
2066 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); 2025 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2067 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos()))); 2026 __ LoadSmiLiteral(r4, Smi::FromInt(l_continuation.pos()));
2068 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset)); 2027 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
2069 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset)); 2028 r0);
2070 __ mov(r1, cp); 2029 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
2071 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2, 2030 __ mr(r4, cp);
2031 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
2072 kLRHasBeenSaved, kDontSaveFPRegs); 2032 kLRHasBeenSaved, kDontSaveFPRegs);
2073 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 2033 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2074 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2034 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2075 __ pop(r0); // result 2035 __ pop(r3); // result
2076 EmitReturnSequence(); 2036 EmitReturnSequence();
2077 __ bind(&l_resume); // received in r0 2037 __ bind(&l_resume); // received in r3
2078 __ PopTryHandler(); 2038 __ PopTryHandler();
2079 2039
2080 // receiver = iter; f = 'next'; arg = received; 2040 // receiver = iter; f = 'next'; arg = received;
2081 __ bind(&l_next); 2041 __ bind(&l_next);
2082 2042
2083 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next" 2043 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2084 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter 2044 __ LoadP(r6, MemOperand(sp, 1 * kPointerSize)); // iter
2085 __ Push(load_name, r3, r0); // "next", iter, received 2045 __ Push(load_name, r6, r3); // "next", iter, received
2086 2046
2087 // result = receiver[f](arg); 2047 // result = receiver[f](arg);
2088 __ bind(&l_call); 2048 __ bind(&l_call);
2089 __ ldr(load_receiver, MemOperand(sp, kPointerSize)); 2049 __ LoadP(load_receiver, MemOperand(sp, kPointerSize));
2090 __ ldr(load_name, MemOperand(sp, 2 * kPointerSize)); 2050 __ LoadP(load_name, MemOperand(sp, 2 * kPointerSize));
2091 if (FLAG_vector_ics) { 2051 if (FLAG_vector_ics) {
2092 __ mov(VectorLoadICDescriptor::SlotRegister(), 2052 __ mov(VectorLoadICDescriptor::SlotRegister(),
2093 Operand(Smi::FromInt(expr->KeyedLoadFeedbackSlot()))); 2053 Operand(Smi::FromInt(expr->KeyedLoadFeedbackSlot())));
2094 } 2054 }
2095 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code(); 2055 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2096 CallIC(ic, TypeFeedbackId::None()); 2056 CallIC(ic, TypeFeedbackId::None());
2097 __ mov(r1, r0); 2057 __ mr(r4, r3);
2098 __ str(r1, MemOperand(sp, 2 * kPointerSize)); 2058 __ StoreP(r4, MemOperand(sp, 2 * kPointerSize));
2099 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD); 2059 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2100 __ CallStub(&stub); 2060 __ CallStub(&stub);
2101 2061
2102 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2062 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2103 __ Drop(1); // The function is still on the stack; drop it. 2063 __ Drop(1); // The function is still on the stack; drop it.
2104 2064
2105 // if (!result.done) goto l_try; 2065 // if (!result.done) goto l_try;
2106 __ bind(&l_loop); 2066 __ Move(load_receiver, r3);
2107 __ Move(load_receiver, r0);
2108 2067
2109 __ push(load_receiver); // save result 2068 __ push(load_receiver); // save result
2110 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done" 2069 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2111 if (FLAG_vector_ics) { 2070 if (FLAG_vector_ics) {
2112 __ mov(VectorLoadICDescriptor::SlotRegister(), 2071 __ mov(VectorLoadICDescriptor::SlotRegister(),
2113 Operand(Smi::FromInt(expr->DoneFeedbackSlot()))); 2072 Operand(Smi::FromInt(expr->DoneFeedbackSlot())));
2114 } 2073 }
2115 CallLoadIC(NOT_CONTEXTUAL); // r0=result.done 2074 CallLoadIC(NOT_CONTEXTUAL); // r0=result.done
2116 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate()); 2075 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2117 CallIC(bool_ic); 2076 CallIC(bool_ic);
2118 __ cmp(r0, Operand(0)); 2077 __ cmpi(r3, Operand::Zero());
2119 __ b(eq, &l_try); 2078 __ beq(&l_try);
2120 2079
2121 // result.value 2080 // result.value
2122 __ pop(load_receiver); // result 2081 __ pop(load_receiver); // result
2123 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value" 2082 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2124 if (FLAG_vector_ics) { 2083 if (FLAG_vector_ics) {
2125 __ mov(VectorLoadICDescriptor::SlotRegister(), 2084 __ mov(VectorLoadICDescriptor::SlotRegister(),
2126 Operand(Smi::FromInt(expr->ValueFeedbackSlot()))); 2085 Operand(Smi::FromInt(expr->ValueFeedbackSlot())));
2127 } 2086 }
2128 CallLoadIC(NOT_CONTEXTUAL); // r0=result.value 2087 CallLoadIC(NOT_CONTEXTUAL); // r3=result.value
2129 context()->DropAndPlug(2, r0); // drop iter and g 2088 context()->DropAndPlug(2, r3); // drop iter and g
2130 break; 2089 break;
2131 } 2090 }
2132 } 2091 }
2133 } 2092 }
2134 2093
2135 2094
2136 void FullCodeGenerator::EmitGeneratorResume(Expression *generator, 2095 void FullCodeGenerator::EmitGeneratorResume(
2137 Expression *value, 2096 Expression* generator, Expression* value,
2138 JSGeneratorObject::ResumeMode resume_mode) { 2097 JSGeneratorObject::ResumeMode resume_mode) {
2139 // The value stays in r0, and is ultimately read by the resumed generator, as 2098 // The value stays in r3, and is ultimately read by the resumed generator, as
2140 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it 2099 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2141 // is read to throw the value when the resumed generator is already closed. 2100 // is read to throw the value when the resumed generator is already closed.
2142 // r1 will hold the generator object until the activation has been resumed. 2101 // r4 will hold the generator object until the activation has been resumed.
2143 VisitForStackValue(generator); 2102 VisitForStackValue(generator);
2144 VisitForAccumulatorValue(value); 2103 VisitForAccumulatorValue(value);
2145 __ pop(r1); 2104 __ pop(r4);
2146 2105
2147 // Check generator state. 2106 // Check generator state.
2148 Label wrong_state, closed_state, done; 2107 Label wrong_state, closed_state, done;
2149 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset)); 2108 __ LoadP(r6, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
2150 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0); 2109 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2151 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0); 2110 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2152 __ cmp(r3, Operand(Smi::FromInt(0))); 2111 __ CmpSmiLiteral(r6, Smi::FromInt(0), r0);
2153 __ b(eq, &closed_state); 2112 __ beq(&closed_state);
2154 __ b(lt, &wrong_state); 2113 __ blt(&wrong_state);
2155 2114
2156 // Load suspended function and context. 2115 // Load suspended function and context.
2157 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset)); 2116 __ LoadP(cp, FieldMemOperand(r4, JSGeneratorObject::kContextOffset));
2158 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset)); 2117 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
2159 2118
2160 // Load receiver and store as the first argument. 2119 // Load receiver and store as the first argument.
2161 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset)); 2120 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
2162 __ push(r2); 2121 __ push(r5);
2163 2122
2164 // Push holes for the rest of the arguments to the generator function. 2123 // Push holes for the rest of the arguments to the generator function.
2165 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); 2124 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
2166 __ ldr(r3, 2125 __ LoadWordArith(
2167 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset)); 2126 r6, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
2168 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex); 2127 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
2169 Label push_argument_holes, push_frame; 2128 Label argument_loop, push_frame;
2170 __ bind(&push_argument_holes); 2129 #if V8_TARGET_ARCH_PPC64
2171 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC); 2130 __ cmpi(r6, Operand::Zero());
2172 __ b(mi, &push_frame); 2131 __ beq(&push_frame);
2173 __ push(r2); 2132 #else
2174 __ jmp(&push_argument_holes); 2133 __ SmiUntag(r6, SetRC);
2134 __ beq(&push_frame, cr0);
2135 #endif
2136 __ mtctr(r6);
2137 __ bind(&argument_loop);
2138 __ push(r5);
2139 __ bdnz(&argument_loop);
2175 2140
2176 // Enter a new JavaScript frame, and initialize its slots as they were when 2141 // Enter a new JavaScript frame, and initialize its slots as they were when
2177 // the generator was suspended. 2142 // the generator was suspended.
2178 Label resume_frame; 2143 Label resume_frame;
2179 __ bind(&push_frame); 2144 __ bind(&push_frame);
2180 __ bl(&resume_frame); 2145 __ b(&resume_frame, SetLK);
2181 __ jmp(&done); 2146 __ b(&done);
2182 __ bind(&resume_frame); 2147 __ bind(&resume_frame);
2183 // lr = return address. 2148 // lr = return address.
2184 // fp = caller's frame pointer. 2149 // fp = caller's frame pointer.
2185 // pp = caller's constant pool (if FLAG_enable_ool_constant_pool),
2186 // cp = callee's context, 2150 // cp = callee's context,
2187 // r4 = callee's JS function. 2151 // r7 = callee's JS function.
2188 __ PushFixedFrame(r4); 2152 __ PushFixedFrame(r7);
2189 // Adjust FP to point to saved FP. 2153 // Adjust FP to point to saved FP.
2190 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); 2154 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2191 2155
2192 // Load the operand stack size. 2156 // Load the operand stack size.
2193 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset)); 2157 __ LoadP(r6, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset));
2194 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset)); 2158 __ LoadP(r6, FieldMemOperand(r6, FixedArray::kLengthOffset));
2195 __ SmiUntag(r3); 2159 __ SmiUntag(r6, SetRC);
2196 2160
2197 // If we are sending a value and there is no operand stack, we can jump back 2161 // If we are sending a value and there is no operand stack, we can jump back
2198 // in directly. 2162 // in directly.
2163 Label call_resume;
2199 if (resume_mode == JSGeneratorObject::NEXT) { 2164 if (resume_mode == JSGeneratorObject::NEXT) {
2200 Label slow_resume; 2165 Label slow_resume;
2201 __ cmp(r3, Operand(0)); 2166 __ bne(&slow_resume, cr0);
2202 __ b(ne, &slow_resume); 2167 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kCodeEntryOffset));
2203 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset)); 2168 #if V8_OOL_CONSTANT_POOL
2204 2169 {
2205 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_); 2170 ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2206 if (FLAG_enable_ool_constant_pool) { 2171 // Load the new code object's constant pool pointer.
2207 // Load the new code object's constant pool pointer. 2172 __ LoadP(kConstantPoolRegister,
2208 __ ldr(pp, 2173 MemOperand(r6, Code::kConstantPoolOffset - Code::kHeaderSize));
2209 MemOperand(r3, Code::kConstantPoolOffset - Code::kHeaderSize)); 2174 #endif
2210 } 2175 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
2211 2176 __ SmiUntag(r5);
2212 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset)); 2177 __ add(r6, r6, r5);
2213 __ SmiUntag(r2); 2178 __ LoadSmiLiteral(r5,
2214 __ add(r3, r3, r2); 2179 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2215 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting))); 2180 __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset),
2216 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset)); 2181 r0);
2217 __ Jump(r3); 2182 __ Jump(r6);
2183 __ bind(&slow_resume);
2184 #if V8_OOL_CONSTANT_POOL
2218 } 2185 }
2219 __ bind(&slow_resume); 2186 #endif
2187 } else {
2188 __ beq(&call_resume, cr0);
2220 } 2189 }
2221 2190
2222 // Otherwise, we push holes for the operand stack and call the runtime to fix 2191 // Otherwise, we push holes for the operand stack and call the runtime to fix
2223 // up the stack and the handlers. 2192 // up the stack and the handlers.
2224 Label push_operand_holes, call_resume; 2193 Label operand_loop;
2225 __ bind(&push_operand_holes); 2194 __ mtctr(r6);
2226 __ sub(r3, r3, Operand(1), SetCC); 2195 __ bind(&operand_loop);
2227 __ b(mi, &call_resume); 2196 __ push(r5);
2228 __ push(r2); 2197 __ bdnz(&operand_loop);
2229 __ b(&push_operand_holes); 2198
2230 __ bind(&call_resume); 2199 __ bind(&call_resume);
2231 DCHECK(!result_register().is(r1)); 2200 DCHECK(!result_register().is(r4));
2232 __ Push(r1, result_register()); 2201 __ Push(r4, result_register());
2233 __ Push(Smi::FromInt(resume_mode)); 2202 __ Push(Smi::FromInt(resume_mode));
2234 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3); 2203 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2235 // Not reached: the runtime call returns elsewhere. 2204 // Not reached: the runtime call returns elsewhere.
2236 __ stop("not-reached"); 2205 __ stop("not-reached");
2237 2206
2238 // Reach here when generator is closed. 2207 // Reach here when generator is closed.
2239 __ bind(&closed_state); 2208 __ bind(&closed_state);
2240 if (resume_mode == JSGeneratorObject::NEXT) { 2209 if (resume_mode == JSGeneratorObject::NEXT) {
2241 // Return completed iterator result when generator is closed. 2210 // Return completed iterator result when generator is closed.
2242 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 2211 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
2243 __ push(r2); 2212 __ push(r5);
2244 // Pop value from top-of-stack slot; box result into result register. 2213 // Pop value from top-of-stack slot; box result into result register.
2245 EmitCreateIteratorResult(true); 2214 EmitCreateIteratorResult(true);
2246 } else { 2215 } else {
2247 // Throw the provided value. 2216 // Throw the provided value.
2248 __ push(r0); 2217 __ push(r3);
2249 __ CallRuntime(Runtime::kThrow, 1); 2218 __ CallRuntime(Runtime::kThrow, 1);
2250 } 2219 }
2251 __ jmp(&done); 2220 __ b(&done);
2252 2221
2253 // Throw error if we attempt to operate on a running generator. 2222 // Throw error if we attempt to operate on a running generator.
2254 __ bind(&wrong_state); 2223 __ bind(&wrong_state);
2255 __ push(r1); 2224 __ push(r4);
2256 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1); 2225 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2257 2226
2258 __ bind(&done); 2227 __ bind(&done);
2259 context()->Plug(result_register()); 2228 context()->Plug(result_register());
2260 } 2229 }
2261 2230
2262 2231
2263 void FullCodeGenerator::EmitCreateIteratorResult(bool done) { 2232 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2264 Label gc_required; 2233 Label gc_required;
2265 Label allocated; 2234 Label allocated;
2266 2235
2267 Handle<Map> map(isolate()->native_context()->iterator_result_map()); 2236 Handle<Map> map(isolate()->native_context()->iterator_result_map());
2268 2237
2269 __ Allocate(map->instance_size(), r0, r2, r3, &gc_required, TAG_OBJECT); 2238 __ Allocate(map->instance_size(), r3, r5, r6, &gc_required, TAG_OBJECT);
2270 __ jmp(&allocated); 2239 __ b(&allocated);
2271 2240
2272 __ bind(&gc_required); 2241 __ bind(&gc_required);
2273 __ Push(Smi::FromInt(map->instance_size())); 2242 __ Push(Smi::FromInt(map->instance_size()));
2274 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 2243 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2275 __ ldr(context_register(), 2244 __ LoadP(context_register(),
2276 MemOperand(fp, StandardFrameConstants::kContextOffset)); 2245 MemOperand(fp, StandardFrameConstants::kContextOffset));
2277 2246
2278 __ bind(&allocated); 2247 __ bind(&allocated);
2279 __ mov(r1, Operand(map)); 2248 __ mov(r4, Operand(map));
2280 __ pop(r2); 2249 __ pop(r5);
2281 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done))); 2250 __ mov(r6, Operand(isolate()->factory()->ToBoolean(done)));
2282 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array())); 2251 __ mov(r7, Operand(isolate()->factory()->empty_fixed_array()));
2283 DCHECK_EQ(map->instance_size(), 5 * kPointerSize); 2252 DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
2284 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 2253 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
2285 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset)); 2254 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
2286 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset)); 2255 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
2287 __ str(r2, 2256 __ StoreP(r5,
2288 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset)); 2257 FieldMemOperand(r3, JSGeneratorObject::kResultValuePropertyOffset),
2289 __ str(r3, 2258 r0);
2290 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset)); 2259 __ StoreP(r6,
2260 FieldMemOperand(r3, JSGeneratorObject::kResultDonePropertyOffset),
2261 r0);
2291 2262
2292 // Only the value field needs a write barrier, as the other values are in the 2263 // Only the value field needs a write barrier, as the other values are in the
2293 // root set. 2264 // root set.
2294 __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset, 2265 __ RecordWriteField(r3, JSGeneratorObject::kResultValuePropertyOffset, r5, r6,
2295 r2, r3, kLRHasBeenSaved, kDontSaveFPRegs); 2266 kLRHasBeenSaved, kDontSaveFPRegs);
2296 } 2267 }
2297 2268
2298 2269
2299 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { 2270 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2300 SetSourcePosition(prop->position()); 2271 SetSourcePosition(prop->position());
2301 Literal* key = prop->key()->AsLiteral(); 2272 Literal* key = prop->key()->AsLiteral();
2273
2302 __ mov(LoadDescriptor::NameRegister(), Operand(key->value())); 2274 __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2303 if (FLAG_vector_ics) { 2275 if (FLAG_vector_ics) {
2304 __ mov(VectorLoadICDescriptor::SlotRegister(), 2276 __ mov(VectorLoadICDescriptor::SlotRegister(),
2305 Operand(Smi::FromInt(prop->PropertyFeedbackSlot()))); 2277 Operand(Smi::FromInt(prop->PropertyFeedbackSlot())));
2306 CallLoadIC(NOT_CONTEXTUAL); 2278 CallLoadIC(NOT_CONTEXTUAL);
2307 } else { 2279 } else {
2308 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId()); 2280 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2309 } 2281 }
2310 } 2282 }
2311 2283
2312 2284
2285 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2286 SetSourcePosition(prop->position());
2287 Literal* key = prop->key()->AsLiteral();
2288 DCHECK(!key->value()->IsSmi());
2289 DCHECK(prop->IsSuperAccess());
2290
2291 SuperReference* super_ref = prop->obj()->AsSuperReference();
2292 EmitLoadHomeObject(super_ref);
2293 __ Push(r3);
2294 VisitForStackValue(super_ref->this_var());
2295 __ Push(key->value());
2296 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2297 }
2298
2299
2313 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { 2300 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2314 SetSourcePosition(prop->position()); 2301 SetSourcePosition(prop->position());
2315 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code(); 2302 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2316 if (FLAG_vector_ics) { 2303 if (FLAG_vector_ics) {
2317 __ mov(VectorLoadICDescriptor::SlotRegister(), 2304 __ mov(VectorLoadICDescriptor::SlotRegister(),
2318 Operand(Smi::FromInt(prop->PropertyFeedbackSlot()))); 2305 Operand(Smi::FromInt(prop->PropertyFeedbackSlot())));
2319 CallIC(ic); 2306 CallIC(ic);
2320 } else { 2307 } else {
2321 CallIC(ic, prop->PropertyFeedbackId()); 2308 CallIC(ic, prop->PropertyFeedbackId());
2322 } 2309 }
2323 } 2310 }
2324 2311
2325 2312
2326 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 2313 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2327 Token::Value op, 2314 Token::Value op,
2328 OverwriteMode mode, 2315 OverwriteMode mode,
2329 Expression* left_expr, 2316 Expression* left_expr,
2330 Expression* right_expr) { 2317 Expression* right_expr) {
2331 Label done, smi_case, stub_call; 2318 Label done, smi_case, stub_call;
2332 2319
2333 Register scratch1 = r2; 2320 Register scratch1 = r5;
2334 Register scratch2 = r3; 2321 Register scratch2 = r6;
2335 2322
2336 // Get the arguments. 2323 // Get the arguments.
2337 Register left = r1; 2324 Register left = r4;
2338 Register right = r0; 2325 Register right = r3;
2339 __ pop(left); 2326 __ pop(left);
2340 2327
2341 // Perform combined smi check on both operands. 2328 // Perform combined smi check on both operands.
2342 __ orr(scratch1, left, Operand(right)); 2329 __ orx(scratch1, left, right);
2343 STATIC_ASSERT(kSmiTag == 0); 2330 STATIC_ASSERT(kSmiTag == 0);
2344 JumpPatchSite patch_site(masm_); 2331 JumpPatchSite patch_site(masm_);
2345 patch_site.EmitJumpIfSmi(scratch1, &smi_case); 2332 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2346 2333
2347 __ bind(&stub_call); 2334 __ bind(&stub_call);
2348 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code(); 2335 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2349 CallIC(code, expr->BinaryOperationFeedbackId()); 2336 CallIC(code, expr->BinaryOperationFeedbackId());
2350 patch_site.EmitPatchInfo(); 2337 patch_site.EmitPatchInfo();
2351 __ jmp(&done); 2338 __ b(&done);
2352 2339
2353 __ bind(&smi_case); 2340 __ bind(&smi_case);
2354 // Smi case. This code works the same way as the smi-smi case in the type 2341 // Smi case. This code works the same way as the smi-smi case in the type
2355 // recording binary operation stub, see 2342 // recording binary operation stub.
2356 switch (op) { 2343 switch (op) {
2357 case Token::SAR: 2344 case Token::SAR:
2358 __ GetLeastBitsFromSmi(scratch1, right, 5); 2345 __ GetLeastBitsFromSmi(scratch1, right, 5);
2359 __ mov(right, Operand(left, ASR, scratch1)); 2346 __ ShiftRightArith(right, left, scratch1);
2360 __ bic(right, right, Operand(kSmiTagMask)); 2347 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize));
2361 break; 2348 break;
2362 case Token::SHL: { 2349 case Token::SHL: {
2350 __ GetLeastBitsFromSmi(scratch2, right, 5);
2351 #if V8_TARGET_ARCH_PPC64
2352 __ ShiftLeft(right, left, scratch2);
2353 #else
2363 __ SmiUntag(scratch1, left); 2354 __ SmiUntag(scratch1, left);
2364 __ GetLeastBitsFromSmi(scratch2, right, 5); 2355 __ ShiftLeft(scratch1, scratch1, scratch2);
2365 __ mov(scratch1, Operand(scratch1, LSL, scratch2)); 2356 // Check that the *signed* result fits in a smi
2366 __ TrySmiTag(right, scratch1, &stub_call); 2357 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call);
2358 __ SmiTag(right, scratch1);
2359 #endif
2367 break; 2360 break;
2368 } 2361 }
2369 case Token::SHR: { 2362 case Token::SHR: {
2370 __ SmiUntag(scratch1, left); 2363 __ SmiUntag(scratch1, left);
2371 __ GetLeastBitsFromSmi(scratch2, right, 5); 2364 __ GetLeastBitsFromSmi(scratch2, right, 5);
2372 __ mov(scratch1, Operand(scratch1, LSR, scratch2)); 2365 __ srw(scratch1, scratch1, scratch2);
2373 __ tst(scratch1, Operand(0xc0000000)); 2366 // Unsigned shift is not allowed to produce a negative number.
2374 __ b(ne, &stub_call); 2367 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call);
2375 __ SmiTag(right, scratch1); 2368 __ SmiTag(right, scratch1);
2376 break; 2369 break;
2377 } 2370 }
2378 case Token::ADD: 2371 case Token::ADD: {
2379 __ add(scratch1, left, Operand(right), SetCC); 2372 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2380 __ b(vs, &stub_call); 2373 __ bne(&stub_call, cr0);
2381 __ mov(right, scratch1); 2374 __ mr(right, scratch1);
2382 break; 2375 break;
2383 case Token::SUB: 2376 }
2384 __ sub(scratch1, left, Operand(right), SetCC); 2377 case Token::SUB: {
2385 __ b(vs, &stub_call); 2378 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2386 __ mov(right, scratch1); 2379 __ bne(&stub_call, cr0);
2380 __ mr(right, scratch1);
2387 break; 2381 break;
2382 }
2388 case Token::MUL: { 2383 case Token::MUL: {
2384 Label mul_zero;
2385 #if V8_TARGET_ARCH_PPC64
2386 // Remove tag from both operands.
2389 __ SmiUntag(ip, right); 2387 __ SmiUntag(ip, right);
2390 __ smull(scratch1, scratch2, left, ip); 2388 __ SmiUntag(r0, left);
2391 __ mov(ip, Operand(scratch1, ASR, 31)); 2389 __ Mul(scratch1, r0, ip);
2392 __ cmp(ip, Operand(scratch2)); 2390 // Check for overflowing the smi range - no overflow if higher 33 bits of
2393 __ b(ne, &stub_call); 2391 // the result are identical.
2394 __ cmp(scratch1, Operand::Zero()); 2392 __ TestIfInt32(scratch1, scratch2, ip);
2395 __ mov(right, Operand(scratch1), LeaveCC, ne); 2393 __ bne(&stub_call);
2396 __ b(ne, &done); 2394 #else
2397 __ add(scratch2, right, Operand(left), SetCC); 2395 __ SmiUntag(ip, right);
2398 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl); 2396 __ mullw(scratch1, left, ip);
2399 __ b(mi, &stub_call); 2397 __ mulhw(scratch2, left, ip);
2398 // Check for overflowing the smi range - no overflow if higher 33 bits of
2399 // the result are identical.
2400 __ TestIfInt32(scratch2, scratch1, ip);
2401 __ bne(&stub_call);
2402 #endif
2403 // Go slow on zero result to handle -0.
2404 __ cmpi(scratch1, Operand::Zero());
2405 __ beq(&mul_zero);
2406 #if V8_TARGET_ARCH_PPC64
2407 __ SmiTag(right, scratch1);
2408 #else
2409 __ mr(right, scratch1);
2410 #endif
2411 __ b(&done);
2412 // We need -0 if we were multiplying a negative number with 0 to get 0.
2413 // We know one of them was zero.
2414 __ bind(&mul_zero);
2415 __ add(scratch2, right, left);
2416 __ cmpi(scratch2, Operand::Zero());
2417 __ blt(&stub_call);
2418 __ LoadSmiLiteral(right, Smi::FromInt(0));
2400 break; 2419 break;
2401 } 2420 }
2402 case Token::BIT_OR: 2421 case Token::BIT_OR:
2403 __ orr(right, left, Operand(right)); 2422 __ orx(right, left, right);
2404 break; 2423 break;
2405 case Token::BIT_AND: 2424 case Token::BIT_AND:
2406 __ and_(right, left, Operand(right)); 2425 __ and_(right, left, right);
2407 break; 2426 break;
2408 case Token::BIT_XOR: 2427 case Token::BIT_XOR:
2409 __ eor(right, left, Operand(right)); 2428 __ xor_(right, left, right);
2410 break; 2429 break;
2411 default: 2430 default:
2412 UNREACHABLE(); 2431 UNREACHABLE();
2413 } 2432 }
2414 2433
2415 __ bind(&done); 2434 __ bind(&done);
2416 context()->Plug(r0); 2435 context()->Plug(r3);
2417 } 2436 }
2418 2437
2419 2438
2420 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, 2439 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op,
2421 Token::Value op,
2422 OverwriteMode mode) { 2440 OverwriteMode mode) {
2423 __ pop(r1); 2441 __ pop(r4);
2424 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code(); 2442 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2425 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 2443 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2426 CallIC(code, expr->BinaryOperationFeedbackId()); 2444 CallIC(code, expr->BinaryOperationFeedbackId());
2427 patch_site.EmitPatchInfo(); 2445 patch_site.EmitPatchInfo();
2428 context()->Plug(r0); 2446 context()->Plug(r3);
2429 } 2447 }
2430 2448
2431 2449
2432 void FullCodeGenerator::EmitAssignment(Expression* expr) { 2450 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2433 DCHECK(expr->IsValidReferenceExpression()); 2451 DCHECK(expr->IsValidReferenceExpression());
2434 2452
2435 // Left-hand side can only be a property, a global or a (parameter or local) 2453 // Left-hand side can only be a property, a global or a (parameter or local)
2436 // slot. 2454 // slot.
2437 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 2455 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2438 LhsKind assign_type = VARIABLE; 2456 LhsKind assign_type = VARIABLE;
2439 Property* prop = expr->AsProperty(); 2457 Property* prop = expr->AsProperty();
2440 if (prop != NULL) { 2458 if (prop != NULL) {
2441 assign_type = (prop->key()->IsPropertyName()) 2459 assign_type =
2442 ? NAMED_PROPERTY 2460 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
2443 : KEYED_PROPERTY;
2444 } 2461 }
2445 2462
2446 switch (assign_type) { 2463 switch (assign_type) {
2447 case VARIABLE: { 2464 case VARIABLE: {
2448 Variable* var = expr->AsVariableProxy()->var(); 2465 Variable* var = expr->AsVariableProxy()->var();
2449 EffectContext context(this); 2466 EffectContext context(this);
2450 EmitVariableAssignment(var, Token::ASSIGN); 2467 EmitVariableAssignment(var, Token::ASSIGN);
2451 break; 2468 break;
2452 } 2469 }
2453 case NAMED_PROPERTY: { 2470 case NAMED_PROPERTY: {
2454 __ push(r0); // Preserve value. 2471 __ push(r3); // Preserve value.
2455 VisitForAccumulatorValue(prop->obj()); 2472 VisitForAccumulatorValue(prop->obj());
2456 __ Move(StoreDescriptor::ReceiverRegister(), r0); 2473 __ Move(StoreDescriptor::ReceiverRegister(), r3);
2457 __ pop(StoreDescriptor::ValueRegister()); // Restore value. 2474 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2458 __ mov(StoreDescriptor::NameRegister(), 2475 __ mov(StoreDescriptor::NameRegister(),
2459 Operand(prop->key()->AsLiteral()->value())); 2476 Operand(prop->key()->AsLiteral()->value()));
2460 CallStoreIC(); 2477 CallStoreIC();
2461 break; 2478 break;
2462 } 2479 }
2463 case KEYED_PROPERTY: { 2480 case KEYED_PROPERTY: {
2464 __ push(r0); // Preserve value. 2481 __ push(r3); // Preserve value.
2465 VisitForStackValue(prop->obj()); 2482 VisitForStackValue(prop->obj());
2466 VisitForAccumulatorValue(prop->key()); 2483 VisitForAccumulatorValue(prop->key());
2467 __ Move(StoreDescriptor::NameRegister(), r0); 2484 __ Move(StoreDescriptor::NameRegister(), r3);
2468 __ Pop(StoreDescriptor::ValueRegister(), 2485 __ Pop(StoreDescriptor::ValueRegister(),
2469 StoreDescriptor::ReceiverRegister()); 2486 StoreDescriptor::ReceiverRegister());
2470 Handle<Code> ic = 2487 Handle<Code> ic =
2471 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code(); 2488 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2472 CallIC(ic); 2489 CallIC(ic);
2473 break; 2490 break;
2474 } 2491 }
2475 } 2492 }
2476 context()->Plug(r0); 2493 context()->Plug(r3);
2477 } 2494 }
2478 2495
2479 2496
2480 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( 2497 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2481 Variable* var, MemOperand location) { 2498 Variable* var, MemOperand location) {
2482 __ str(result_register(), location); 2499 __ StoreP(result_register(), location, r0);
2483 if (var->IsContextSlot()) { 2500 if (var->IsContextSlot()) {
2484 // RecordWrite may destroy all its register arguments. 2501 // RecordWrite may destroy all its register arguments.
2485 __ mov(r3, result_register()); 2502 __ mr(r6, result_register());
2486 int offset = Context::SlotOffset(var->index()); 2503 int offset = Context::SlotOffset(var->index());
2487 __ RecordWriteContextSlot( 2504 __ RecordWriteContextSlot(r4, offset, r6, r5, kLRHasBeenSaved,
2488 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs); 2505 kDontSaveFPRegs);
2489 } 2506 }
2490 } 2507 }
2491 2508
2492 2509
2493 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) { 2510 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2494 if (var->IsUnallocated()) { 2511 if (var->IsUnallocated()) {
2495 // Global var, const, or let. 2512 // Global var, const, or let.
2496 __ mov(StoreDescriptor::NameRegister(), Operand(var->name())); 2513 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2497 __ ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand()); 2514 __ LoadP(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2498 CallStoreIC(); 2515 CallStoreIC();
2499 2516
2500 } else if (op == Token::INIT_CONST_LEGACY) { 2517 } else if (op == Token::INIT_CONST_LEGACY) {
2501 // Const initializers need a write barrier. 2518 // Const initializers need a write barrier.
2502 DCHECK(!var->IsParameter()); // No const parameters. 2519 DCHECK(!var->IsParameter()); // No const parameters.
2503 if (var->IsLookupSlot()) { 2520 if (var->IsLookupSlot()) {
2504 __ push(r0); 2521 __ push(r3);
2505 __ mov(r0, Operand(var->name())); 2522 __ mov(r3, Operand(var->name()));
2506 __ Push(cp, r0); // Context and name. 2523 __ Push(cp, r3); // Context and name.
2507 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3); 2524 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2508 } else { 2525 } else {
2509 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2526 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2510 Label skip; 2527 Label skip;
2511 MemOperand location = VarOperand(var, r1); 2528 MemOperand location = VarOperand(var, r4);
2512 __ ldr(r2, location); 2529 __ LoadP(r5, location);
2513 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); 2530 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2514 __ b(ne, &skip); 2531 __ bne(&skip);
2515 EmitStoreToStackLocalOrContextSlot(var, location); 2532 EmitStoreToStackLocalOrContextSlot(var, location);
2516 __ bind(&skip); 2533 __ bind(&skip);
2517 } 2534 }
2518 2535
2519 } else if (var->mode() == LET && op != Token::INIT_LET) { 2536 } else if (var->mode() == LET && op != Token::INIT_LET) {
2520 // Non-initializing assignment to let variable needs a write barrier. 2537 // Non-initializing assignment to let variable needs a write barrier.
2521 DCHECK(!var->IsLookupSlot()); 2538 DCHECK(!var->IsLookupSlot());
2522 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2539 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2523 Label assign; 2540 Label assign;
2524 MemOperand location = VarOperand(var, r1); 2541 MemOperand location = VarOperand(var, r4);
2525 __ ldr(r3, location); 2542 __ LoadP(r6, location);
2526 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); 2543 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2527 __ b(ne, &assign); 2544 __ bne(&assign);
2528 __ mov(r3, Operand(var->name())); 2545 __ mov(r6, Operand(var->name()));
2529 __ push(r3); 2546 __ push(r6);
2530 __ CallRuntime(Runtime::kThrowReferenceError, 1); 2547 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2531 // Perform the assignment. 2548 // Perform the assignment.
2532 __ bind(&assign); 2549 __ bind(&assign);
2533 EmitStoreToStackLocalOrContextSlot(var, location); 2550 EmitStoreToStackLocalOrContextSlot(var, location);
2534 2551
2535 } else if (!var->is_const_mode() || op == Token::INIT_CONST) { 2552 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2536 if (var->IsLookupSlot()) { 2553 if (var->IsLookupSlot()) {
2537 // Assignment to var. 2554 // Assignment to var.
2538 __ push(r0); // Value. 2555 __ push(r3); // Value.
2539 __ mov(r1, Operand(var->name())); 2556 __ mov(r4, Operand(var->name()));
2540 __ mov(r0, Operand(Smi::FromInt(strict_mode()))); 2557 __ mov(r3, Operand(Smi::FromInt(strict_mode())));
2541 __ Push(cp, r1, r0); // Context, name, strict mode. 2558 __ Push(cp, r4, r3); // Context, name, strict mode.
2542 __ CallRuntime(Runtime::kStoreLookupSlot, 4); 2559 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2543 } else { 2560 } else {
2544 // Assignment to var or initializing assignment to let/const in harmony 2561 // Assignment to var or initializing assignment to let/const in harmony
2545 // mode. 2562 // mode.
2546 DCHECK((var->IsStackAllocated() || var->IsContextSlot())); 2563 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2547 MemOperand location = VarOperand(var, r1); 2564 MemOperand location = VarOperand(var, r4);
2548 if (generate_debug_code_ && op == Token::INIT_LET) { 2565 if (generate_debug_code_ && op == Token::INIT_LET) {
2549 // Check for an uninitialized let binding. 2566 // Check for an uninitialized let binding.
2550 __ ldr(r2, location); 2567 __ LoadP(r5, location);
2551 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); 2568 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2552 __ Check(eq, kLetBindingReInitialization); 2569 __ Check(eq, kLetBindingReInitialization);
2553 } 2570 }
2554 EmitStoreToStackLocalOrContextSlot(var, location); 2571 EmitStoreToStackLocalOrContextSlot(var, location);
2555 } 2572 }
2556 } 2573 }
2557 // Non-initializing assignments to consts are ignored. 2574 // Non-initializing assignments to consts are ignored.
2558 } 2575 }
2559 2576
2560 2577
2561 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2578 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2562 // Assignment to a property, using a named store IC. 2579 // Assignment to a property, using a named store IC.
2563 Property* prop = expr->target()->AsProperty(); 2580 Property* prop = expr->target()->AsProperty();
2564 DCHECK(prop != NULL); 2581 DCHECK(prop != NULL);
2565 DCHECK(prop->key()->IsLiteral()); 2582 DCHECK(prop->key()->IsLiteral());
2566 2583
2567 // Record source code position before IC call. 2584 // Record source code position before IC call.
2568 SetSourcePosition(expr->position()); 2585 SetSourcePosition(expr->position());
2569 __ mov(StoreDescriptor::NameRegister(), 2586 __ mov(StoreDescriptor::NameRegister(),
2570 Operand(prop->key()->AsLiteral()->value())); 2587 Operand(prop->key()->AsLiteral()->value()));
2571 __ pop(StoreDescriptor::ReceiverRegister()); 2588 __ pop(StoreDescriptor::ReceiverRegister());
2572 CallStoreIC(expr->AssignmentFeedbackId()); 2589 CallStoreIC(expr->AssignmentFeedbackId());
2573 2590
2574 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2591 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2575 context()->Plug(r0); 2592 context()->Plug(r3);
2576 } 2593 }
2577 2594
2578 2595
2579 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2596 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2580 // Assignment to a property, using a keyed store IC. 2597 // Assignment to a property, using a keyed store IC.
2581 2598
2582 // Record source code position before IC call. 2599 // Record source code position before IC call.
2583 SetSourcePosition(expr->position()); 2600 SetSourcePosition(expr->position());
2584 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister()); 2601 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2585 DCHECK(StoreDescriptor::ValueRegister().is(r0)); 2602 DCHECK(StoreDescriptor::ValueRegister().is(r3));
2586 2603
2587 Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code(); 2604 Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2588 CallIC(ic, expr->AssignmentFeedbackId()); 2605 CallIC(ic, expr->AssignmentFeedbackId());
2589 2606
2590 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2607 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2591 context()->Plug(r0); 2608 context()->Plug(r3);
2592 } 2609 }
2593 2610
2594 2611
2595 void FullCodeGenerator::VisitProperty(Property* expr) { 2612 void FullCodeGenerator::VisitProperty(Property* expr) {
2596 Comment cmnt(masm_, "[ Property"); 2613 Comment cmnt(masm_, "[ Property");
2597 Expression* key = expr->key(); 2614 Expression* key = expr->key();
2598 2615
2599 if (key->IsPropertyName()) { 2616 if (key->IsPropertyName()) {
2600 VisitForAccumulatorValue(expr->obj()); 2617 if (!expr->IsSuperAccess()) {
2601 __ Move(LoadDescriptor::ReceiverRegister(), r0); 2618 VisitForAccumulatorValue(expr->obj());
2602 EmitNamedPropertyLoad(expr); 2619 __ Move(LoadDescriptor::ReceiverRegister(), r3);
2620 EmitNamedPropertyLoad(expr);
2621 } else {
2622 EmitNamedSuperPropertyLoad(expr);
2623 }
2603 PrepareForBailoutForId(expr->LoadId(), TOS_REG); 2624 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2604 context()->Plug(r0); 2625 context()->Plug(r3);
2605 } else { 2626 } else {
2606 VisitForStackValue(expr->obj()); 2627 VisitForStackValue(expr->obj());
2607 VisitForAccumulatorValue(expr->key()); 2628 VisitForAccumulatorValue(expr->key());
2608 __ Move(LoadDescriptor::NameRegister(), r0); 2629 __ Move(LoadDescriptor::NameRegister(), r3);
2609 __ pop(LoadDescriptor::ReceiverRegister()); 2630 __ pop(LoadDescriptor::ReceiverRegister());
2610 EmitKeyedPropertyLoad(expr); 2631 EmitKeyedPropertyLoad(expr);
2611 context()->Plug(r0); 2632 context()->Plug(r3);
2612 } 2633 }
2613 } 2634 }
2614 2635
2615 2636
2616 void FullCodeGenerator::CallIC(Handle<Code> code, 2637 void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) {
2617 TypeFeedbackId ast_id) {
2618 ic_total_count_++; 2638 ic_total_count_++;
2619 // All calls must have a predictable size in full-codegen code to ensure that 2639 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2620 // the debugger can patch them correctly.
2621 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2622 NEVER_INLINE_TARGET_ADDRESS);
2623 } 2640 }
2624 2641
2625 2642
2626 // Code common for calls using the IC. 2643 // Code common for calls using the IC.
2627 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { 2644 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2628 Expression* callee = expr->expression(); 2645 Expression* callee = expr->expression();
2629 2646
2630 CallIC::CallType call_type = callee->IsVariableProxy() 2647 CallICState::CallType call_type =
2631 ? CallIC::FUNCTION 2648 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2632 : CallIC::METHOD;
2633 2649
2634 // Get the target function. 2650 // Get the target function.
2635 if (call_type == CallIC::FUNCTION) { 2651 if (call_type == CallICState::FUNCTION) {
2636 { StackValueContext context(this); 2652 {
2653 StackValueContext context(this);
2637 EmitVariableLoad(callee->AsVariableProxy()); 2654 EmitVariableLoad(callee->AsVariableProxy());
2638 PrepareForBailout(callee, NO_REGISTERS); 2655 PrepareForBailout(callee, NO_REGISTERS);
2639 } 2656 }
2640 // Push undefined as receiver. This is patched in the method prologue if it 2657 // Push undefined as receiver. This is patched in the method prologue if it
2641 // is a sloppy mode method. 2658 // is a sloppy mode method.
2642 __ Push(isolate()->factory()->undefined_value()); 2659 __ Push(isolate()->factory()->undefined_value());
2643 } else { 2660 } else {
2644 // Load the function from the receiver. 2661 // Load the function from the receiver.
2645 DCHECK(callee->IsProperty()); 2662 DCHECK(callee->IsProperty());
2646 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 2663 DCHECK(!callee->AsProperty()->IsSuperAccess());
2664 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2647 EmitNamedPropertyLoad(callee->AsProperty()); 2665 EmitNamedPropertyLoad(callee->AsProperty());
2648 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2666 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2649 // Push the target function under the receiver. 2667 // Push the target function under the receiver.
2650 __ ldr(ip, MemOperand(sp, 0)); 2668 __ LoadP(ip, MemOperand(sp, 0));
2651 __ push(ip); 2669 __ push(ip);
2652 __ str(r0, MemOperand(sp, kPointerSize)); 2670 __ StoreP(r3, MemOperand(sp, kPointerSize));
2653 } 2671 }
2654 2672
2655 EmitCall(expr, call_type); 2673 EmitCall(expr, call_type);
2656 } 2674 }
2657 2675
2658 2676
2677 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2678 Expression* callee = expr->expression();
2679 DCHECK(callee->IsProperty());
2680 Property* prop = callee->AsProperty();
2681 DCHECK(prop->IsSuperAccess());
2682
2683 SetSourcePosition(prop->position());
2684 Literal* key = prop->key()->AsLiteral();
2685 DCHECK(!key->value()->IsSmi());
2686 // Load the function from the receiver.
2687 const Register scratch = r4;
2688 SuperReference* super_ref = prop->obj()->AsSuperReference();
2689 EmitLoadHomeObject(super_ref);
2690 __ Push(r3);
2691 VisitForAccumulatorValue(super_ref->this_var());
2692 __ Push(r3);
2693 __ LoadP(scratch, MemOperand(sp, kPointerSize));
2694 __ Push(scratch);
2695 __ Push(r3);
2696 __ Push(key->value());
2697
2698 // Stack here:
2699 // - home_object
2700 // - this (receiver)
2701 // - home_object <-- LoadFromSuper will pop here and below.
2702 // - this (receiver)
2703 // - key
2704 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2705
2706 // Replace home_object with target function.
2707 __ StoreP(r3, MemOperand(sp, kPointerSize));
2708
2709 // Stack here:
2710 // - target function
2711 // - this (receiver)
2712 EmitCall(expr, CallICState::METHOD);
2713 }
2714
2715
2659 // Code common for calls using the IC. 2716 // Code common for calls using the IC.
2660 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, 2717 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) {
2661 Expression* key) {
2662 // Load the key. 2718 // Load the key.
2663 VisitForAccumulatorValue(key); 2719 VisitForAccumulatorValue(key);
2664 2720
2665 Expression* callee = expr->expression(); 2721 Expression* callee = expr->expression();
2666 2722
2667 // Load the function from the receiver. 2723 // Load the function from the receiver.
2668 DCHECK(callee->IsProperty()); 2724 DCHECK(callee->IsProperty());
2669 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 2725 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2670 __ Move(LoadDescriptor::NameRegister(), r0); 2726 __ Move(LoadDescriptor::NameRegister(), r3);
2671 EmitKeyedPropertyLoad(callee->AsProperty()); 2727 EmitKeyedPropertyLoad(callee->AsProperty());
2672 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2728 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2673 2729
2674 // Push the target function under the receiver. 2730 // Push the target function under the receiver.
2675 __ ldr(ip, MemOperand(sp, 0)); 2731 __ LoadP(ip, MemOperand(sp, 0));
2676 __ push(ip); 2732 __ push(ip);
2677 __ str(r0, MemOperand(sp, kPointerSize)); 2733 __ StoreP(r3, MemOperand(sp, kPointerSize));
2678 2734
2679 EmitCall(expr, CallIC::METHOD); 2735 EmitCall(expr, CallICState::METHOD);
2680 } 2736 }
2681 2737
2682 2738
2683 void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) { 2739 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2684 // Load the arguments. 2740 // Load the arguments.
2685 ZoneList<Expression*>* args = expr->arguments(); 2741 ZoneList<Expression*>* args = expr->arguments();
2686 int arg_count = args->length(); 2742 int arg_count = args->length();
2687 { PreservePositionScope scope(masm()->positions_recorder()); 2743 {
2744 PreservePositionScope scope(masm()->positions_recorder());
2688 for (int i = 0; i < arg_count; i++) { 2745 for (int i = 0; i < arg_count; i++) {
2689 VisitForStackValue(args->at(i)); 2746 VisitForStackValue(args->at(i));
2690 } 2747 }
2691 } 2748 }
2692 2749
2693 // Record source position of the IC call. 2750 // Record source position of the IC call.
2694 SetSourcePosition(expr->position()); 2751 SetSourcePosition(expr->position());
2695 Handle<Code> ic = CallIC::initialize_stub( 2752 Handle<Code> ic = CallIC::initialize_stub(isolate(), arg_count, call_type);
2696 isolate(), arg_count, call_type); 2753 __ LoadSmiLiteral(r6, Smi::FromInt(expr->CallFeedbackSlot()));
2697 __ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot()))); 2754 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2698 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2699 // Don't assign a type feedback id to the IC, since type feedback is provided 2755 // Don't assign a type feedback id to the IC, since type feedback is provided
2700 // by the vector above. 2756 // by the vector above.
2701 CallIC(ic); 2757 CallIC(ic);
2702 2758
2703 RecordJSReturnSite(expr); 2759 RecordJSReturnSite(expr);
2704 // Restore context register. 2760 // Restore context register.
2705 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2761 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2706 context()->DropAndPlug(1, r0); 2762 context()->DropAndPlug(1, r3);
2707 } 2763 }
2708 2764
2709 2765
2710 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { 2766 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2711 // r4: copy of the first argument or undefined if it doesn't exist. 2767 // r7: copy of the first argument or undefined if it doesn't exist.
2712 if (arg_count > 0) { 2768 if (arg_count > 0) {
2713 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize)); 2769 __ LoadP(r7, MemOperand(sp, arg_count * kPointerSize), r0);
2714 } else { 2770 } else {
2715 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); 2771 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
2716 } 2772 }
2717 2773
2718 // r3: the receiver of the enclosing function. 2774 // r6: the receiver of the enclosing function.
2719 int receiver_offset = 2 + info_->scope()->num_parameters(); 2775 int receiver_offset = 2 + info_->scope()->num_parameters();
2720 __ ldr(r3, MemOperand(fp, receiver_offset * kPointerSize)); 2776 __ LoadP(r6, MemOperand(fp, receiver_offset * kPointerSize), r0);
2721 2777
2722 // r2: strict mode. 2778 // r5: strict mode.
2723 __ mov(r2, Operand(Smi::FromInt(strict_mode()))); 2779 __ LoadSmiLiteral(r5, Smi::FromInt(strict_mode()));
2724 2780
2725 // r1: the start position of the scope the calls resides in. 2781 // r4: the start position of the scope the calls resides in.
2726 __ mov(r1, Operand(Smi::FromInt(scope()->start_position()))); 2782 __ LoadSmiLiteral(r4, Smi::FromInt(scope()->start_position()));
2727 2783
2728 // Do the runtime call. 2784 // Do the runtime call.
2729 __ Push(r4, r3, r2, r1); 2785 __ Push(r7, r6, r5, r4);
2730 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); 2786 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2731 } 2787 }
2732 2788
2733 2789
2734 void FullCodeGenerator::VisitCall(Call* expr) { 2790 void FullCodeGenerator::VisitCall(Call* expr) {
2735 #ifdef DEBUG 2791 #ifdef DEBUG
2736 // We want to verify that RecordJSReturnSite gets called on all paths 2792 // We want to verify that RecordJSReturnSite gets called on all paths
2737 // through this function. Avoid early returns. 2793 // through this function. Avoid early returns.
2738 expr->return_is_recorded_ = false; 2794 expr->return_is_recorded_ = false;
2739 #endif 2795 #endif
2740 2796
2741 Comment cmnt(masm_, "[ Call"); 2797 Comment cmnt(masm_, "[ Call");
2742 Expression* callee = expr->expression(); 2798 Expression* callee = expr->expression();
2743 Call::CallType call_type = expr->GetCallType(isolate()); 2799 Call::CallType call_type = expr->GetCallType(isolate());
2744 2800
2745 if (call_type == Call::POSSIBLY_EVAL_CALL) { 2801 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2746 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval 2802 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2747 // to resolve the function we need to call and the receiver of the 2803 // to resolve the function we need to call and the receiver of the
2748 // call. Then we call the resolved function using the given 2804 // call. Then we call the resolved function using the given
2749 // arguments. 2805 // arguments.
2750 ZoneList<Expression*>* args = expr->arguments(); 2806 ZoneList<Expression*>* args = expr->arguments();
2751 int arg_count = args->length(); 2807 int arg_count = args->length();
2752 2808
2753 { PreservePositionScope pos_scope(masm()->positions_recorder()); 2809 {
2810 PreservePositionScope pos_scope(masm()->positions_recorder());
2754 VisitForStackValue(callee); 2811 VisitForStackValue(callee);
2755 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 2812 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
2756 __ push(r2); // Reserved receiver slot. 2813 __ push(r5); // Reserved receiver slot.
2757 2814
2758 // Push the arguments. 2815 // Push the arguments.
2759 for (int i = 0; i < arg_count; i++) { 2816 for (int i = 0; i < arg_count; i++) {
2760 VisitForStackValue(args->at(i)); 2817 VisitForStackValue(args->at(i));
2761 } 2818 }
2762 2819
2763 // Push a copy of the function (found below the arguments) and 2820 // Push a copy of the function (found below the arguments) and
2764 // resolve eval. 2821 // resolve eval.
2765 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2822 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2766 __ push(r1); 2823 __ push(r4);
2767 EmitResolvePossiblyDirectEval(arg_count); 2824 EmitResolvePossiblyDirectEval(arg_count);
2768 2825
2769 // The runtime call returns a pair of values in r0 (function) and 2826 // The runtime call returns a pair of values in r3 (function) and
2770 // r1 (receiver). Touch up the stack with the right values. 2827 // r4 (receiver). Touch up the stack with the right values.
2771 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2828 __ StoreP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2772 __ str(r1, MemOperand(sp, arg_count * kPointerSize)); 2829 __ StoreP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
2773 } 2830 }
2774 2831
2775 // Record source position for debugger. 2832 // Record source position for debugger.
2776 SetSourcePosition(expr->position()); 2833 SetSourcePosition(expr->position());
2777 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); 2834 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2778 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2835 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2779 __ CallStub(&stub); 2836 __ CallStub(&stub);
2780 RecordJSReturnSite(expr); 2837 RecordJSReturnSite(expr);
2781 // Restore context register. 2838 // Restore context register.
2782 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2839 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2783 context()->DropAndPlug(1, r0); 2840 context()->DropAndPlug(1, r3);
2784 } else if (call_type == Call::GLOBAL_CALL) { 2841 } else if (call_type == Call::GLOBAL_CALL) {
2785 EmitCallWithLoadIC(expr); 2842 EmitCallWithLoadIC(expr);
2786 2843
2787 } else if (call_type == Call::LOOKUP_SLOT_CALL) { 2844 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2788 // Call to a lookup slot (dynamically introduced variable). 2845 // Call to a lookup slot (dynamically introduced variable).
2789 VariableProxy* proxy = callee->AsVariableProxy(); 2846 VariableProxy* proxy = callee->AsVariableProxy();
2790 Label slow, done; 2847 Label slow, done;
2791 2848
2792 { PreservePositionScope scope(masm()->positions_recorder()); 2849 {
2850 PreservePositionScope scope(masm()->positions_recorder());
2793 // Generate code for loading from variables potentially shadowed 2851 // Generate code for loading from variables potentially shadowed
2794 // by eval-introduced variables. 2852 // by eval-introduced variables.
2795 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done); 2853 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2796 } 2854 }
2797 2855
2798 __ bind(&slow); 2856 __ bind(&slow);
2799 // Call the runtime to find the function to call (returned in r0) 2857 // Call the runtime to find the function to call (returned in r3)
2800 // and the object holding it (returned in edx). 2858 // and the object holding it (returned in edx).
2801 DCHECK(!context_register().is(r2)); 2859 DCHECK(!context_register().is(r5));
2802 __ mov(r2, Operand(proxy->name())); 2860 __ mov(r5, Operand(proxy->name()));
2803 __ Push(context_register(), r2); 2861 __ Push(context_register(), r5);
2804 __ CallRuntime(Runtime::kLoadLookupSlot, 2); 2862 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2805 __ Push(r0, r1); // Function, receiver. 2863 __ Push(r3, r4); // Function, receiver.
2806 2864
2807 // If fast case code has been generated, emit code to push the 2865 // If fast case code has been generated, emit code to push the
2808 // function and receiver and have the slow path jump around this 2866 // function and receiver and have the slow path jump around this
2809 // code. 2867 // code.
2810 if (done.is_linked()) { 2868 if (done.is_linked()) {
2811 Label call; 2869 Label call;
2812 __ b(&call); 2870 __ b(&call);
2813 __ bind(&done); 2871 __ bind(&done);
2814 // Push function. 2872 // Push function.
2815 __ push(r0); 2873 __ push(r3);
2816 // The receiver is implicitly the global receiver. Indicate this 2874 // The receiver is implicitly the global receiver. Indicate this
2817 // by passing the hole to the call function stub. 2875 // by passing the hole to the call function stub.
2818 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 2876 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2819 __ push(r1); 2877 __ push(r4);
2820 __ bind(&call); 2878 __ bind(&call);
2821 } 2879 }
2822 2880
2823 // The receiver is either the global receiver or an object found 2881 // The receiver is either the global receiver or an object found
2824 // by LoadContextSlot. 2882 // by LoadContextSlot.
2825 EmitCall(expr); 2883 EmitCall(expr);
2826 } else if (call_type == Call::PROPERTY_CALL) { 2884 } else if (call_type == Call::PROPERTY_CALL) {
2827 Property* property = callee->AsProperty(); 2885 Property* property = callee->AsProperty();
2828 { PreservePositionScope scope(masm()->positions_recorder()); 2886 bool is_named_call = property->key()->IsPropertyName();
2829 VisitForStackValue(property->obj()); 2887 // super.x() is handled in EmitCallWithLoadIC.
2830 } 2888 if (property->IsSuperAccess() && is_named_call) {
2831 if (property->key()->IsPropertyName()) { 2889 EmitSuperCallWithLoadIC(expr);
2832 EmitCallWithLoadIC(expr);
2833 } else { 2890 } else {
2834 EmitKeyedCallWithLoadIC(expr, property->key()); 2891 {
2892 PreservePositionScope scope(masm()->positions_recorder());
2893 VisitForStackValue(property->obj());
2894 }
2895 if (is_named_call) {
2896 EmitCallWithLoadIC(expr);
2897 } else {
2898 EmitKeyedCallWithLoadIC(expr, property->key());
2899 }
2835 } 2900 }
2836 } else { 2901 } else {
2837 DCHECK(call_type == Call::OTHER_CALL); 2902 DCHECK(call_type == Call::OTHER_CALL);
2838 // Call to an arbitrary expression not handled specially above. 2903 // Call to an arbitrary expression not handled specially above.
2839 { PreservePositionScope scope(masm()->positions_recorder()); 2904 {
2905 PreservePositionScope scope(masm()->positions_recorder());
2840 VisitForStackValue(callee); 2906 VisitForStackValue(callee);
2841 } 2907 }
2842 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 2908 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2843 __ push(r1); 2909 __ push(r4);
2844 // Emit function call. 2910 // Emit function call.
2845 EmitCall(expr); 2911 EmitCall(expr);
2846 } 2912 }
2847 2913
2848 #ifdef DEBUG 2914 #ifdef DEBUG
2849 // RecordJSReturnSite should have been called. 2915 // RecordJSReturnSite should have been called.
2850 DCHECK(expr->return_is_recorded_); 2916 DCHECK(expr->return_is_recorded_);
2851 #endif 2917 #endif
2852 } 2918 }
2853 2919
(...skipping 13 matching lines...) Expand all
2867 ZoneList<Expression*>* args = expr->arguments(); 2933 ZoneList<Expression*>* args = expr->arguments();
2868 int arg_count = args->length(); 2934 int arg_count = args->length();
2869 for (int i = 0; i < arg_count; i++) { 2935 for (int i = 0; i < arg_count; i++) {
2870 VisitForStackValue(args->at(i)); 2936 VisitForStackValue(args->at(i));
2871 } 2937 }
2872 2938
2873 // Call the construct call builtin that handles allocation and 2939 // Call the construct call builtin that handles allocation and
2874 // constructor invocation. 2940 // constructor invocation.
2875 SetSourcePosition(expr->position()); 2941 SetSourcePosition(expr->position());
2876 2942
2877 // Load function and argument count into r1 and r0. 2943 // Load function and argument count into r4 and r3.
2878 __ mov(r0, Operand(arg_count)); 2944 __ mov(r3, Operand(arg_count));
2879 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); 2945 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
2880 2946
2881 // Record call targets in unoptimized code. 2947 // Record call targets in unoptimized code.
2882 if (FLAG_pretenuring_call_new) { 2948 if (FLAG_pretenuring_call_new) {
2883 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); 2949 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2884 DCHECK(expr->AllocationSiteFeedbackSlot() == 2950 DCHECK(expr->AllocationSiteFeedbackSlot() ==
2885 expr->CallNewFeedbackSlot() + 1); 2951 expr->CallNewFeedbackSlot() + 1);
2886 } 2952 }
2887 2953
2888 __ Move(r2, FeedbackVector()); 2954 __ Move(r5, FeedbackVector());
2889 __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); 2955 __ LoadSmiLiteral(r6, Smi::FromInt(expr->CallNewFeedbackSlot()));
2890 2956
2891 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); 2957 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2892 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); 2958 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2893 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 2959 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2894 context()->Plug(r0); 2960 context()->Plug(r3);
2895 } 2961 }
2896 2962
2897 2963
2898 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 2964 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2899 ZoneList<Expression*>* args = expr->arguments(); 2965 ZoneList<Expression*>* args = expr->arguments();
2900 DCHECK(args->length() == 1); 2966 DCHECK(args->length() == 1);
2901 2967
2902 VisitForAccumulatorValue(args->at(0)); 2968 VisitForAccumulatorValue(args->at(0));
2903 2969
2904 Label materialize_true, materialize_false; 2970 Label materialize_true, materialize_false;
2905 Label* if_true = NULL; 2971 Label* if_true = NULL;
2906 Label* if_false = NULL; 2972 Label* if_false = NULL;
2907 Label* fall_through = NULL; 2973 Label* fall_through = NULL;
2908 context()->PrepareTest(&materialize_true, &materialize_false, 2974 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2909 &if_true, &if_false, &fall_through); 2975 &if_false, &fall_through);
2910 2976
2911 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2977 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2912 __ SmiTst(r0); 2978 __ TestIfSmi(r3, r0);
2913 Split(eq, if_true, if_false, fall_through); 2979 Split(eq, if_true, if_false, fall_through, cr0);
2914 2980
2915 context()->Plug(if_true, if_false); 2981 context()->Plug(if_true, if_false);
2916 } 2982 }
2917 2983
2918 2984
2919 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) { 2985 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2920 ZoneList<Expression*>* args = expr->arguments(); 2986 ZoneList<Expression*>* args = expr->arguments();
2921 DCHECK(args->length() == 1); 2987 DCHECK(args->length() == 1);
2922 2988
2923 VisitForAccumulatorValue(args->at(0)); 2989 VisitForAccumulatorValue(args->at(0));
2924 2990
2925 Label materialize_true, materialize_false; 2991 Label materialize_true, materialize_false;
2926 Label* if_true = NULL; 2992 Label* if_true = NULL;
2927 Label* if_false = NULL; 2993 Label* if_false = NULL;
2928 Label* fall_through = NULL; 2994 Label* fall_through = NULL;
2929 context()->PrepareTest(&materialize_true, &materialize_false, 2995 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2930 &if_true, &if_false, &fall_through); 2996 &if_false, &fall_through);
2931 2997
2932 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2998 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2933 __ NonNegativeSmiTst(r0); 2999 __ TestIfPositiveSmi(r3, r0);
2934 Split(eq, if_true, if_false, fall_through); 3000 Split(eq, if_true, if_false, fall_through, cr0);
2935 3001
2936 context()->Plug(if_true, if_false); 3002 context()->Plug(if_true, if_false);
2937 } 3003 }
2938 3004
2939 3005
2940 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) { 3006 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2941 ZoneList<Expression*>* args = expr->arguments(); 3007 ZoneList<Expression*>* args = expr->arguments();
2942 DCHECK(args->length() == 1); 3008 DCHECK(args->length() == 1);
2943 3009
2944 VisitForAccumulatorValue(args->at(0)); 3010 VisitForAccumulatorValue(args->at(0));
2945 3011
2946 Label materialize_true, materialize_false; 3012 Label materialize_true, materialize_false;
2947 Label* if_true = NULL; 3013 Label* if_true = NULL;
2948 Label* if_false = NULL; 3014 Label* if_false = NULL;
2949 Label* fall_through = NULL; 3015 Label* fall_through = NULL;
2950 context()->PrepareTest(&materialize_true, &materialize_false, 3016 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2951 &if_true, &if_false, &fall_through); 3017 &if_false, &fall_through);
2952 3018
2953 __ JumpIfSmi(r0, if_false); 3019 __ JumpIfSmi(r3, if_false);
2954 __ LoadRoot(ip, Heap::kNullValueRootIndex); 3020 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2955 __ cmp(r0, ip); 3021 __ cmp(r3, ip);
2956 __ b(eq, if_true); 3022 __ beq(if_true);
2957 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); 3023 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
2958 // Undetectable objects behave like undefined when tested with typeof. 3024 // Undetectable objects behave like undefined when tested with typeof.
2959 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset)); 3025 __ lbz(r4, FieldMemOperand(r5, Map::kBitFieldOffset));
2960 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 3026 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
2961 __ b(ne, if_false); 3027 __ bne(if_false, cr0);
2962 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset)); 3028 __ lbz(r4, FieldMemOperand(r5, Map::kInstanceTypeOffset));
2963 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); 3029 __ cmpi(r4, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2964 __ b(lt, if_false); 3030 __ blt(if_false);
2965 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); 3031 __ cmpi(r4, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2966 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3032 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2967 Split(le, if_true, if_false, fall_through); 3033 Split(le, if_true, if_false, fall_through);
2968 3034
2969 context()->Plug(if_true, if_false); 3035 context()->Plug(if_true, if_false);
2970 } 3036 }
2971 3037
2972 3038
2973 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) { 3039 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2974 ZoneList<Expression*>* args = expr->arguments(); 3040 ZoneList<Expression*>* args = expr->arguments();
2975 DCHECK(args->length() == 1); 3041 DCHECK(args->length() == 1);
2976 3042
2977 VisitForAccumulatorValue(args->at(0)); 3043 VisitForAccumulatorValue(args->at(0));
2978 3044
2979 Label materialize_true, materialize_false; 3045 Label materialize_true, materialize_false;
2980 Label* if_true = NULL; 3046 Label* if_true = NULL;
2981 Label* if_false = NULL; 3047 Label* if_false = NULL;
2982 Label* fall_through = NULL; 3048 Label* fall_through = NULL;
2983 context()->PrepareTest(&materialize_true, &materialize_false, 3049 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2984 &if_true, &if_false, &fall_through); 3050 &if_false, &fall_through);
2985 3051
2986 __ JumpIfSmi(r0, if_false); 3052 __ JumpIfSmi(r3, if_false);
2987 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 3053 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
2988 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3054 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2989 Split(ge, if_true, if_false, fall_through); 3055 Split(ge, if_true, if_false, fall_through);
2990 3056
2991 context()->Plug(if_true, if_false); 3057 context()->Plug(if_true, if_false);
2992 } 3058 }
2993 3059
2994 3060
2995 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) { 3061 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2996 ZoneList<Expression*>* args = expr->arguments(); 3062 ZoneList<Expression*>* args = expr->arguments();
2997 DCHECK(args->length() == 1); 3063 DCHECK(args->length() == 1);
2998 3064
2999 VisitForAccumulatorValue(args->at(0)); 3065 VisitForAccumulatorValue(args->at(0));
3000 3066
3001 Label materialize_true, materialize_false; 3067 Label materialize_true, materialize_false;
3002 Label* if_true = NULL; 3068 Label* if_true = NULL;
3003 Label* if_false = NULL; 3069 Label* if_false = NULL;
3004 Label* fall_through = NULL; 3070 Label* fall_through = NULL;
3005 context()->PrepareTest(&materialize_true, &materialize_false, 3071 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3006 &if_true, &if_false, &fall_through); 3072 &if_false, &fall_through);
3007 3073
3008 __ JumpIfSmi(r0, if_false); 3074 __ JumpIfSmi(r3, if_false);
3009 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 3075 __ LoadP(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
3010 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset)); 3076 __ lbz(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
3011 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 3077 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3012 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3078 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3013 Split(ne, if_true, if_false, fall_through); 3079 Split(ne, if_true, if_false, fall_through, cr0);
3014 3080
3015 context()->Plug(if_true, if_false); 3081 context()->Plug(if_true, if_false);
3016 } 3082 }
3017 3083
3018 3084
3019 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( 3085 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3020 CallRuntime* expr) { 3086 CallRuntime* expr) {
3021 ZoneList<Expression*>* args = expr->arguments(); 3087 ZoneList<Expression*>* args = expr->arguments();
3022 DCHECK(args->length() == 1); 3088 DCHECK(args->length() == 1);
3023 3089
3024 VisitForAccumulatorValue(args->at(0)); 3090 VisitForAccumulatorValue(args->at(0));
3025 3091
3026 Label materialize_true, materialize_false, skip_lookup; 3092 Label materialize_true, materialize_false, skip_lookup;
3027 Label* if_true = NULL; 3093 Label* if_true = NULL;
3028 Label* if_false = NULL; 3094 Label* if_false = NULL;
3029 Label* fall_through = NULL; 3095 Label* fall_through = NULL;
3030 context()->PrepareTest(&materialize_true, &materialize_false, 3096 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3031 &if_true, &if_false, &fall_through); 3097 &if_false, &fall_through);
3032 3098
3033 __ AssertNotSmi(r0); 3099 __ AssertNotSmi(r3);
3034 3100
3035 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 3101 __ LoadP(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
3036 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset)); 3102 __ lbz(ip, FieldMemOperand(r4, Map::kBitField2Offset));
3037 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); 3103 __ andi(r0, ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3038 __ b(ne, &skip_lookup); 3104 __ bne(&skip_lookup, cr0);
3039 3105
3040 // Check for fast case object. Generate false result for slow case object. 3106 // Check for fast case object. Generate false result for slow case object.
3041 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset)); 3107 __ LoadP(r5, FieldMemOperand(r3, JSObject::kPropertiesOffset));
3042 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); 3108 __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset));
3043 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); 3109 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3044 __ cmp(r2, ip); 3110 __ cmp(r5, ip);
3045 __ b(eq, if_false); 3111 __ beq(if_false);
3046 3112
3047 // Look for valueOf name in the descriptor array, and indicate false if 3113 // Look for valueOf name in the descriptor array, and indicate false if
3048 // found. Since we omit an enumeration index check, if it is added via a 3114 // found. Since we omit an enumeration index check, if it is added via a
3049 // transition that shares its descriptor array, this is a false positive. 3115 // transition that shares its descriptor array, this is a false positive.
3050 Label entry, loop, done; 3116 Label entry, loop, done;
3051 3117
3052 // Skip loop if no descriptors are valid. 3118 // Skip loop if no descriptors are valid.
3053 __ NumberOfOwnDescriptors(r3, r1); 3119 __ NumberOfOwnDescriptors(r6, r4);
3054 __ cmp(r3, Operand::Zero()); 3120 __ cmpi(r6, Operand::Zero());
3055 __ b(eq, &done); 3121 __ beq(&done);
3056 3122
3057 __ LoadInstanceDescriptors(r1, r4); 3123 __ LoadInstanceDescriptors(r4, r7);
3058 // r4: descriptor array. 3124 // r7: descriptor array.
3059 // r3: valid entries in the descriptor array. 3125 // r6: valid entries in the descriptor array.
3060 __ mov(ip, Operand(DescriptorArray::kDescriptorSize)); 3126 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3061 __ mul(r3, r3, ip); 3127 __ Mul(r6, r6, ip);
3062 // Calculate location of the first key name. 3128 // Calculate location of the first key name.
3063 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag)); 3129 __ addi(r7, r7, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3064 // Calculate the end of the descriptor array. 3130 // Calculate the end of the descriptor array.
3065 __ mov(r2, r4); 3131 __ mr(r5, r7);
3066 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2)); 3132 __ ShiftLeftImm(ip, r6, Operand(kPointerSizeLog2));
3133 __ add(r5, r5, ip);
3067 3134
3068 // Loop through all the keys in the descriptor array. If one of these is the 3135 // Loop through all the keys in the descriptor array. If one of these is the
3069 // string "valueOf" the result is false. 3136 // string "valueOf" the result is false.
3070 // The use of ip to store the valueOf string assumes that it is not otherwise 3137 // The use of ip to store the valueOf string assumes that it is not otherwise
3071 // used in the loop below. 3138 // used in the loop below.
3072 __ mov(ip, Operand(isolate()->factory()->value_of_string())); 3139 __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3073 __ jmp(&entry); 3140 __ b(&entry);
3074 __ bind(&loop); 3141 __ bind(&loop);
3075 __ ldr(r3, MemOperand(r4, 0)); 3142 __ LoadP(r6, MemOperand(r7, 0));
3076 __ cmp(r3, ip); 3143 __ cmp(r6, ip);
3077 __ b(eq, if_false); 3144 __ beq(if_false);
3078 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize)); 3145 __ addi(r7, r7, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3079 __ bind(&entry); 3146 __ bind(&entry);
3080 __ cmp(r4, Operand(r2)); 3147 __ cmp(r7, r5);
3081 __ b(ne, &loop); 3148 __ bne(&loop);
3082 3149
3083 __ bind(&done); 3150 __ bind(&done);
3084 3151
3085 // Set the bit in the map to indicate that there is no local valueOf field. 3152 // Set the bit in the map to indicate that there is no local valueOf field.
3086 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset)); 3153 __ lbz(r5, FieldMemOperand(r4, Map::kBitField2Offset));
3087 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); 3154 __ ori(r5, r5, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3088 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset)); 3155 __ stb(r5, FieldMemOperand(r4, Map::kBitField2Offset));
3089 3156
3090 __ bind(&skip_lookup); 3157 __ bind(&skip_lookup);
3091 3158
3092 // If a valueOf property is not found on the object check that its 3159 // If a valueOf property is not found on the object check that its
3093 // prototype is the un-modified String prototype. If not result is false. 3160 // prototype is the un-modified String prototype. If not result is false.
3094 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset)); 3161 __ LoadP(r5, FieldMemOperand(r4, Map::kPrototypeOffset));
3095 __ JumpIfSmi(r2, if_false); 3162 __ JumpIfSmi(r5, if_false);
3096 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); 3163 __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset));
3097 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 3164 __ LoadP(r6, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3098 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset)); 3165 __ LoadP(r6, FieldMemOperand(r6, GlobalObject::kNativeContextOffset));
3099 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); 3166 __ LoadP(r6,
3100 __ cmp(r2, r3); 3167 ContextOperand(r6, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3168 __ cmp(r5, r6);
3101 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3169 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3102 Split(eq, if_true, if_false, fall_through); 3170 Split(eq, if_true, if_false, fall_through);
3103 3171
3104 context()->Plug(if_true, if_false); 3172 context()->Plug(if_true, if_false);
3105 } 3173 }
3106 3174
3107 3175
3108 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { 3176 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3109 ZoneList<Expression*>* args = expr->arguments(); 3177 ZoneList<Expression*>* args = expr->arguments();
3110 DCHECK(args->length() == 1); 3178 DCHECK(args->length() == 1);
3111 3179
3112 VisitForAccumulatorValue(args->at(0)); 3180 VisitForAccumulatorValue(args->at(0));
3113 3181
3114 Label materialize_true, materialize_false; 3182 Label materialize_true, materialize_false;
3115 Label* if_true = NULL; 3183 Label* if_true = NULL;
3116 Label* if_false = NULL; 3184 Label* if_false = NULL;
3117 Label* fall_through = NULL; 3185 Label* fall_through = NULL;
3118 context()->PrepareTest(&materialize_true, &materialize_false, 3186 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3119 &if_true, &if_false, &fall_through); 3187 &if_false, &fall_through);
3120 3188
3121 __ JumpIfSmi(r0, if_false); 3189 __ JumpIfSmi(r3, if_false);
3122 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE); 3190 __ CompareObjectType(r3, r4, r5, JS_FUNCTION_TYPE);
3123 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3191 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3124 Split(eq, if_true, if_false, fall_through); 3192 Split(eq, if_true, if_false, fall_through);
3125 3193
3126 context()->Plug(if_true, if_false); 3194 context()->Plug(if_true, if_false);
3127 } 3195 }
3128 3196
3129 3197
3130 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) { 3198 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3131 ZoneList<Expression*>* args = expr->arguments(); 3199 ZoneList<Expression*>* args = expr->arguments();
3132 DCHECK(args->length() == 1); 3200 DCHECK(args->length() == 1);
3133 3201
3134 VisitForAccumulatorValue(args->at(0)); 3202 VisitForAccumulatorValue(args->at(0));
3135 3203
3136 Label materialize_true, materialize_false; 3204 Label materialize_true, materialize_false;
3137 Label* if_true = NULL; 3205 Label* if_true = NULL;
3138 Label* if_false = NULL; 3206 Label* if_false = NULL;
3139 Label* fall_through = NULL; 3207 Label* fall_through = NULL;
3140 context()->PrepareTest(&materialize_true, &materialize_false, 3208 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3141 &if_true, &if_false, &fall_through); 3209 &if_false, &fall_through);
3142 3210
3143 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK); 3211 __ CheckMap(r3, r4, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3144 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 3212 #if V8_TARGET_ARCH_PPC64
3145 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); 3213 __ LoadP(r4, FieldMemOperand(r3, HeapNumber::kValueOffset));
3146 __ cmp(r2, Operand(0x80000000)); 3214 __ li(r5, Operand(1));
3147 __ cmp(r1, Operand(0x00000000), eq); 3215 __ rotrdi(r5, r5, 1); // r5 = 0x80000000_00000000
3216 __ cmp(r4, r5);
3217 #else
3218 __ lwz(r5, FieldMemOperand(r3, HeapNumber::kExponentOffset));
3219 __ lwz(r4, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
3220 Label skip;
3221 __ lis(r0, Operand(SIGN_EXT_IMM16(0x8000)));
3222 __ cmp(r5, r0);
3223 __ bne(&skip);
3224 __ cmpi(r4, Operand::Zero());
3225 __ bind(&skip);
3226 #endif
3148 3227
3149 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3228 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3150 Split(eq, if_true, if_false, fall_through); 3229 Split(eq, if_true, if_false, fall_through);
3151 3230
3152 context()->Plug(if_true, if_false); 3231 context()->Plug(if_true, if_false);
3153 } 3232 }
3154 3233
3155 3234
3156 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { 3235 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3157 ZoneList<Expression*>* args = expr->arguments(); 3236 ZoneList<Expression*>* args = expr->arguments();
3158 DCHECK(args->length() == 1); 3237 DCHECK(args->length() == 1);
3159 3238
3160 VisitForAccumulatorValue(args->at(0)); 3239 VisitForAccumulatorValue(args->at(0));
3161 3240
3162 Label materialize_true, materialize_false; 3241 Label materialize_true, materialize_false;
3163 Label* if_true = NULL; 3242 Label* if_true = NULL;
3164 Label* if_false = NULL; 3243 Label* if_false = NULL;
3165 Label* fall_through = NULL; 3244 Label* fall_through = NULL;
3166 context()->PrepareTest(&materialize_true, &materialize_false, 3245 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3167 &if_true, &if_false, &fall_through); 3246 &if_false, &fall_through);
3168 3247
3169 __ JumpIfSmi(r0, if_false); 3248 __ JumpIfSmi(r3, if_false);
3170 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE); 3249 __ CompareObjectType(r3, r4, r4, JS_ARRAY_TYPE);
3171 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3250 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3172 Split(eq, if_true, if_false, fall_through); 3251 Split(eq, if_true, if_false, fall_through);
3173 3252
3174 context()->Plug(if_true, if_false); 3253 context()->Plug(if_true, if_false);
3175 } 3254 }
3176 3255
3177 3256
3178 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { 3257 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3179 ZoneList<Expression*>* args = expr->arguments(); 3258 ZoneList<Expression*>* args = expr->arguments();
3180 DCHECK(args->length() == 1); 3259 DCHECK(args->length() == 1);
3181 3260
3182 VisitForAccumulatorValue(args->at(0)); 3261 VisitForAccumulatorValue(args->at(0));
3183 3262
3184 Label materialize_true, materialize_false; 3263 Label materialize_true, materialize_false;
3185 Label* if_true = NULL; 3264 Label* if_true = NULL;
3186 Label* if_false = NULL; 3265 Label* if_false = NULL;
3187 Label* fall_through = NULL; 3266 Label* fall_through = NULL;
3188 context()->PrepareTest(&materialize_true, &materialize_false, 3267 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3189 &if_true, &if_false, &fall_through); 3268 &if_false, &fall_through);
3190 3269
3191 __ JumpIfSmi(r0, if_false); 3270 __ JumpIfSmi(r3, if_false);
3192 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE); 3271 __ CompareObjectType(r3, r4, r4, JS_REGEXP_TYPE);
3193 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3272 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3194 Split(eq, if_true, if_false, fall_through); 3273 Split(eq, if_true, if_false, fall_through);
3195 3274
3196 context()->Plug(if_true, if_false); 3275 context()->Plug(if_true, if_false);
3197 } 3276 }
3198 3277
3199 3278
3200
3201 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) { 3279 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3202 DCHECK(expr->arguments()->length() == 0); 3280 DCHECK(expr->arguments()->length() == 0);
3203 3281
3204 Label materialize_true, materialize_false; 3282 Label materialize_true, materialize_false;
3205 Label* if_true = NULL; 3283 Label* if_true = NULL;
3206 Label* if_false = NULL; 3284 Label* if_false = NULL;
3207 Label* fall_through = NULL; 3285 Label* fall_through = NULL;
3208 context()->PrepareTest(&materialize_true, &materialize_false, 3286 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3209 &if_true, &if_false, &fall_through); 3287 &if_false, &fall_through);
3210 3288
3211 // Get the frame pointer for the calling frame. 3289 // Get the frame pointer for the calling frame.
3212 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3290 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3213 3291
3214 // Skip the arguments adaptor frame if it exists. 3292 // Skip the arguments adaptor frame if it exists.
3215 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset)); 3293 Label check_frame_marker;
3216 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3294 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kContextOffset));
3217 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq); 3295 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
3296 __ bne(&check_frame_marker);
3297 __ LoadP(r5, MemOperand(r5, StandardFrameConstants::kCallerFPOffset));
3218 3298
3219 // Check the marker in the calling frame. 3299 // Check the marker in the calling frame.
3220 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset)); 3300 __ bind(&check_frame_marker);
3221 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); 3301 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kMarkerOffset));
3302 STATIC_ASSERT(StackFrame::CONSTRUCT < 0x4000);
3303 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::CONSTRUCT), r0);
3222 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3304 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3223 Split(eq, if_true, if_false, fall_through); 3305 Split(eq, if_true, if_false, fall_through);
3224 3306
3225 context()->Plug(if_true, if_false); 3307 context()->Plug(if_true, if_false);
3226 } 3308 }
3227 3309
3228 3310
3229 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { 3311 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3230 ZoneList<Expression*>* args = expr->arguments(); 3312 ZoneList<Expression*>* args = expr->arguments();
3231 DCHECK(args->length() == 2); 3313 DCHECK(args->length() == 2);
3232 3314
3233 // Load the two objects into registers and perform the comparison. 3315 // Load the two objects into registers and perform the comparison.
3234 VisitForStackValue(args->at(0)); 3316 VisitForStackValue(args->at(0));
3235 VisitForAccumulatorValue(args->at(1)); 3317 VisitForAccumulatorValue(args->at(1));
3236 3318
3237 Label materialize_true, materialize_false; 3319 Label materialize_true, materialize_false;
3238 Label* if_true = NULL; 3320 Label* if_true = NULL;
3239 Label* if_false = NULL; 3321 Label* if_false = NULL;
3240 Label* fall_through = NULL; 3322 Label* fall_through = NULL;
3241 context()->PrepareTest(&materialize_true, &materialize_false, 3323 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3242 &if_true, &if_false, &fall_through); 3324 &if_false, &fall_through);
3243 3325
3244 __ pop(r1); 3326 __ pop(r4);
3245 __ cmp(r0, r1); 3327 __ cmp(r3, r4);
3246 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3328 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3247 Split(eq, if_true, if_false, fall_through); 3329 Split(eq, if_true, if_false, fall_through);
3248 3330
3249 context()->Plug(if_true, if_false); 3331 context()->Plug(if_true, if_false);
3250 } 3332 }
3251 3333
3252 3334
3253 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { 3335 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3254 ZoneList<Expression*>* args = expr->arguments(); 3336 ZoneList<Expression*>* args = expr->arguments();
3255 DCHECK(args->length() == 1); 3337 DCHECK(args->length() == 1);
3256 3338
3257 // ArgumentsAccessStub expects the key in edx and the formal 3339 // ArgumentsAccessStub expects the key in edx and the formal
3258 // parameter count in r0. 3340 // parameter count in r3.
3259 VisitForAccumulatorValue(args->at(0)); 3341 VisitForAccumulatorValue(args->at(0));
3260 __ mov(r1, r0); 3342 __ mr(r4, r3);
3261 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 3343 __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
3262 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT); 3344 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3263 __ CallStub(&stub); 3345 __ CallStub(&stub);
3264 context()->Plug(r0); 3346 context()->Plug(r3);
3265 } 3347 }
3266 3348
3267 3349
3268 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { 3350 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3269 DCHECK(expr->arguments()->length() == 0); 3351 DCHECK(expr->arguments()->length() == 0);
3270 3352 Label exit;
3271 // Get the number of formal parameters. 3353 // Get the number of formal parameters.
3272 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 3354 __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
3273 3355
3274 // Check if the calling frame is an arguments adaptor frame. 3356 // Check if the calling frame is an arguments adaptor frame.
3275 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3357 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3276 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); 3358 __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset));
3277 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3359 __ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
3360 __ bne(&exit);
3278 3361
3279 // Arguments adaptor case: Read the arguments length from the 3362 // Arguments adaptor case: Read the arguments length from the
3280 // adaptor frame. 3363 // adaptor frame.
3281 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq); 3364 __ LoadP(r3, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
3282 3365
3283 context()->Plug(r0); 3366 __ bind(&exit);
3367 context()->Plug(r3);
3284 } 3368 }
3285 3369
3286 3370
3287 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { 3371 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3288 ZoneList<Expression*>* args = expr->arguments(); 3372 ZoneList<Expression*>* args = expr->arguments();
3289 DCHECK(args->length() == 1); 3373 DCHECK(args->length() == 1);
3290 Label done, null, function, non_function_constructor; 3374 Label done, null, function, non_function_constructor;
3291 3375
3292 VisitForAccumulatorValue(args->at(0)); 3376 VisitForAccumulatorValue(args->at(0));
3293 3377
3294 // If the object is a smi, we return null. 3378 // If the object is a smi, we return null.
3295 __ JumpIfSmi(r0, &null); 3379 __ JumpIfSmi(r3, &null);
3296 3380
3297 // Check that the object is a JS object but take special care of JS 3381 // Check that the object is a JS object but take special care of JS
3298 // functions to make sure they have 'Function' as their class. 3382 // functions to make sure they have 'Function' as their class.
3299 // Assume that there are only two callable types, and one of them is at 3383 // Assume that there are only two callable types, and one of them is at
3300 // either end of the type range for JS object types. Saves extra comparisons. 3384 // either end of the type range for JS object types. Saves extra comparisons.
3301 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 3385 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3302 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE); 3386 __ CompareObjectType(r3, r3, r4, FIRST_SPEC_OBJECT_TYPE);
3303 // Map is now in r0. 3387 // Map is now in r3.
3304 __ b(lt, &null); 3388 __ blt(&null);
3305 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == 3389 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3306 FIRST_SPEC_OBJECT_TYPE + 1); 3390 FIRST_SPEC_OBJECT_TYPE + 1);
3307 __ b(eq, &function); 3391 __ beq(&function);
3308 3392
3309 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE)); 3393 __ cmpi(r4, Operand(LAST_SPEC_OBJECT_TYPE));
3310 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == 3394 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_SPEC_OBJECT_TYPE - 1);
3311 LAST_SPEC_OBJECT_TYPE - 1); 3395 __ beq(&function);
3312 __ b(eq, &function);
3313 // Assume that there is no larger type. 3396 // Assume that there is no larger type.
3314 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); 3397 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3315 3398
3316 // Check if the constructor in the map is a JS function. 3399 // Check if the constructor in the map is a JS function.
3317 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset)); 3400 __ LoadP(r3, FieldMemOperand(r3, Map::kConstructorOffset));
3318 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); 3401 __ CompareObjectType(r3, r4, r4, JS_FUNCTION_TYPE);
3319 __ b(ne, &non_function_constructor); 3402 __ bne(&non_function_constructor);
3320 3403
3321 // r0 now contains the constructor function. Grab the 3404 // r3 now contains the constructor function. Grab the
3322 // instance class name from there. 3405 // instance class name from there.
3323 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); 3406 __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
3324 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset)); 3407 __ LoadP(r3,
3408 FieldMemOperand(r3, SharedFunctionInfo::kInstanceClassNameOffset));
3325 __ b(&done); 3409 __ b(&done);
3326 3410
3327 // Functions have class 'Function'. 3411 // Functions have class 'Function'.
3328 __ bind(&function); 3412 __ bind(&function);
3329 __ LoadRoot(r0, Heap::kFunction_stringRootIndex); 3413 __ LoadRoot(r3, Heap::kFunction_stringRootIndex);
3330 __ jmp(&done); 3414 __ b(&done);
3331 3415
3332 // Objects with a non-function constructor have class 'Object'. 3416 // Objects with a non-function constructor have class 'Object'.
3333 __ bind(&non_function_constructor); 3417 __ bind(&non_function_constructor);
3334 __ LoadRoot(r0, Heap::kObject_stringRootIndex); 3418 __ LoadRoot(r3, Heap::kObject_stringRootIndex);
3335 __ jmp(&done); 3419 __ b(&done);
3336 3420
3337 // Non-JS objects have class null. 3421 // Non-JS objects have class null.
3338 __ bind(&null); 3422 __ bind(&null);
3339 __ LoadRoot(r0, Heap::kNullValueRootIndex); 3423 __ LoadRoot(r3, Heap::kNullValueRootIndex);
3340 3424
3341 // All done. 3425 // All done.
3342 __ bind(&done); 3426 __ bind(&done);
3343 3427
3344 context()->Plug(r0); 3428 context()->Plug(r3);
3345 } 3429 }
3346 3430
3347 3431
3348 void FullCodeGenerator::EmitSubString(CallRuntime* expr) { 3432 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3349 // Load the arguments on the stack and call the stub. 3433 // Load the arguments on the stack and call the stub.
3350 SubStringStub stub(isolate()); 3434 SubStringStub stub(isolate());
3351 ZoneList<Expression*>* args = expr->arguments(); 3435 ZoneList<Expression*>* args = expr->arguments();
3352 DCHECK(args->length() == 3); 3436 DCHECK(args->length() == 3);
3353 VisitForStackValue(args->at(0)); 3437 VisitForStackValue(args->at(0));
3354 VisitForStackValue(args->at(1)); 3438 VisitForStackValue(args->at(1));
3355 VisitForStackValue(args->at(2)); 3439 VisitForStackValue(args->at(2));
3356 __ CallStub(&stub); 3440 __ CallStub(&stub);
3357 context()->Plug(r0); 3441 context()->Plug(r3);
3358 } 3442 }
3359 3443
3360 3444
3361 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { 3445 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3362 // Load the arguments on the stack and call the stub. 3446 // Load the arguments on the stack and call the stub.
3363 RegExpExecStub stub(isolate()); 3447 RegExpExecStub stub(isolate());
3364 ZoneList<Expression*>* args = expr->arguments(); 3448 ZoneList<Expression*>* args = expr->arguments();
3365 DCHECK(args->length() == 4); 3449 DCHECK(args->length() == 4);
3366 VisitForStackValue(args->at(0)); 3450 VisitForStackValue(args->at(0));
3367 VisitForStackValue(args->at(1)); 3451 VisitForStackValue(args->at(1));
3368 VisitForStackValue(args->at(2)); 3452 VisitForStackValue(args->at(2));
3369 VisitForStackValue(args->at(3)); 3453 VisitForStackValue(args->at(3));
3370 __ CallStub(&stub); 3454 __ CallStub(&stub);
3371 context()->Plug(r0); 3455 context()->Plug(r3);
3372 } 3456 }
3373 3457
3374 3458
3375 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { 3459 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3376 ZoneList<Expression*>* args = expr->arguments(); 3460 ZoneList<Expression*>* args = expr->arguments();
3377 DCHECK(args->length() == 1); 3461 DCHECK(args->length() == 1);
3378 VisitForAccumulatorValue(args->at(0)); // Load the object. 3462 VisitForAccumulatorValue(args->at(0)); // Load the object.
3379 3463
3380 Label done; 3464 Label done;
3381 // If the object is a smi return the object. 3465 // If the object is a smi return the object.
3382 __ JumpIfSmi(r0, &done); 3466 __ JumpIfSmi(r3, &done);
3383 // If the object is not a value type, return the object. 3467 // If the object is not a value type, return the object.
3384 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE); 3468 __ CompareObjectType(r3, r4, r4, JS_VALUE_TYPE);
3385 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq); 3469 __ bne(&done);
3470 __ LoadP(r3, FieldMemOperand(r3, JSValue::kValueOffset));
3386 3471
3387 __ bind(&done); 3472 __ bind(&done);
3388 context()->Plug(r0); 3473 context()->Plug(r3);
3389 } 3474 }
3390 3475
3391 3476
3392 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { 3477 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3393 ZoneList<Expression*>* args = expr->arguments(); 3478 ZoneList<Expression*>* args = expr->arguments();
3394 DCHECK(args->length() == 2); 3479 DCHECK(args->length() == 2);
3395 DCHECK_NE(NULL, args->at(1)->AsLiteral()); 3480 DCHECK_NE(NULL, args->at(1)->AsLiteral());
3396 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value())); 3481 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3397 3482
3398 VisitForAccumulatorValue(args->at(0)); // Load the object. 3483 VisitForAccumulatorValue(args->at(0)); // Load the object.
3399 3484
3400 Label runtime, done, not_date_object; 3485 Label runtime, done, not_date_object;
3401 Register object = r0; 3486 Register object = r3;
3402 Register result = r0; 3487 Register result = r3;
3403 Register scratch0 = r9; 3488 Register scratch0 = r11;
3404 Register scratch1 = r1; 3489 Register scratch1 = r4;
3405 3490
3406 __ JumpIfSmi(object, &not_date_object); 3491 __ JumpIfSmi(object, &not_date_object);
3407 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE); 3492 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
3408 __ b(ne, &not_date_object); 3493 __ bne(&not_date_object);
3409 3494
3410 if (index->value() == 0) { 3495 if (index->value() == 0) {
3411 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset)); 3496 __ LoadP(result, FieldMemOperand(object, JSDate::kValueOffset));
3412 __ jmp(&done); 3497 __ b(&done);
3413 } else { 3498 } else {
3414 if (index->value() < JSDate::kFirstUncachedField) { 3499 if (index->value() < JSDate::kFirstUncachedField) {
3415 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); 3500 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3416 __ mov(scratch1, Operand(stamp)); 3501 __ mov(scratch1, Operand(stamp));
3417 __ ldr(scratch1, MemOperand(scratch1)); 3502 __ LoadP(scratch1, MemOperand(scratch1));
3418 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset)); 3503 __ LoadP(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3419 __ cmp(scratch1, scratch0); 3504 __ cmp(scratch1, scratch0);
3420 __ b(ne, &runtime); 3505 __ bne(&runtime);
3421 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset + 3506 __ LoadP(result,
3422 kPointerSize * index->value())); 3507 FieldMemOperand(object, JSDate::kValueOffset +
3423 __ jmp(&done); 3508 kPointerSize * index->value()),
3509 scratch0);
3510 __ b(&done);
3424 } 3511 }
3425 __ bind(&runtime); 3512 __ bind(&runtime);
3426 __ PrepareCallCFunction(2, scratch1); 3513 __ PrepareCallCFunction(2, scratch1);
3427 __ mov(r1, Operand(index)); 3514 __ LoadSmiLiteral(r4, index);
3428 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); 3515 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3429 __ jmp(&done); 3516 __ b(&done);
3430 } 3517 }
3431 3518
3432 __ bind(&not_date_object); 3519 __ bind(&not_date_object);
3433 __ CallRuntime(Runtime::kThrowNotDateError, 0); 3520 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3434 __ bind(&done); 3521 __ bind(&done);
3435 context()->Plug(r0); 3522 context()->Plug(r3);
3436 } 3523 }
3437 3524
3438 3525
3439 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { 3526 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3440 ZoneList<Expression*>* args = expr->arguments(); 3527 ZoneList<Expression*>* args = expr->arguments();
3441 DCHECK_EQ(3, args->length()); 3528 DCHECK_EQ(3, args->length());
3442 3529
3443 Register string = r0; 3530 Register string = r3;
3444 Register index = r1; 3531 Register index = r4;
3445 Register value = r2; 3532 Register value = r5;
3446 3533
3447 VisitForStackValue(args->at(0)); // index 3534 VisitForStackValue(args->at(0)); // index
3448 VisitForStackValue(args->at(1)); // value 3535 VisitForStackValue(args->at(1)); // value
3449 VisitForAccumulatorValue(args->at(2)); // string 3536 VisitForAccumulatorValue(args->at(2)); // string
3450 __ Pop(index, value); 3537 __ Pop(index, value);
3451 3538
3452 if (FLAG_debug_code) { 3539 if (FLAG_debug_code) {
3453 __ SmiTst(value); 3540 __ TestIfSmi(value, r0);
3454 __ Check(eq, kNonSmiValue); 3541 __ Check(eq, kNonSmiValue, cr0);
3455 __ SmiTst(index); 3542 __ TestIfSmi(index, r0);
3456 __ Check(eq, kNonSmiIndex); 3543 __ Check(eq, kNonSmiIndex, cr0);
3457 __ SmiUntag(index, index); 3544 __ SmiUntag(index, index);
3458 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; 3545 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3459 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type); 3546 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3460 __ SmiTag(index, index); 3547 __ SmiTag(index, index);
3461 } 3548 }
3462 3549
3463 __ SmiUntag(value, value); 3550 __ SmiUntag(value);
3464 __ add(ip, 3551 __ addi(ip, string, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3465 string, 3552 __ SmiToByteArrayOffset(r0, index);
3466 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3553 __ stbx(value, MemOperand(ip, r0));
3467 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3468 context()->Plug(string); 3554 context()->Plug(string);
3469 } 3555 }
3470 3556
3471 3557
3472 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { 3558 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3473 ZoneList<Expression*>* args = expr->arguments(); 3559 ZoneList<Expression*>* args = expr->arguments();
3474 DCHECK_EQ(3, args->length()); 3560 DCHECK_EQ(3, args->length());
3475 3561
3476 Register string = r0; 3562 Register string = r3;
3477 Register index = r1; 3563 Register index = r4;
3478 Register value = r2; 3564 Register value = r5;
3479 3565
3480 VisitForStackValue(args->at(0)); // index 3566 VisitForStackValue(args->at(0)); // index
3481 VisitForStackValue(args->at(1)); // value 3567 VisitForStackValue(args->at(1)); // value
3482 VisitForAccumulatorValue(args->at(2)); // string 3568 VisitForAccumulatorValue(args->at(2)); // string
3483 __ Pop(index, value); 3569 __ Pop(index, value);
3484 3570
3485 if (FLAG_debug_code) { 3571 if (FLAG_debug_code) {
3486 __ SmiTst(value); 3572 __ TestIfSmi(value, r0);
3487 __ Check(eq, kNonSmiValue); 3573 __ Check(eq, kNonSmiValue, cr0);
3488 __ SmiTst(index); 3574 __ TestIfSmi(index, r0);
3489 __ Check(eq, kNonSmiIndex); 3575 __ Check(eq, kNonSmiIndex, cr0);
3490 __ SmiUntag(index, index); 3576 __ SmiUntag(index, index);
3491 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; 3577 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3492 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type); 3578 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3493 __ SmiTag(index, index); 3579 __ SmiTag(index, index);
3494 } 3580 }
3495 3581
3496 __ SmiUntag(value, value); 3582 __ SmiUntag(value);
3497 __ add(ip, 3583 __ addi(ip, string, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3498 string, 3584 __ SmiToShortArrayOffset(r0, index);
3499 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 3585 __ sthx(value, MemOperand(ip, r0));
3500 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3501 __ strh(value, MemOperand(ip, index));
3502 context()->Plug(string); 3586 context()->Plug(string);
3503 } 3587 }
3504 3588
3505 3589
3506
3507 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { 3590 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3508 // Load the arguments on the stack and call the runtime function. 3591 // Load the arguments on the stack and call the runtime function.
3509 ZoneList<Expression*>* args = expr->arguments(); 3592 ZoneList<Expression*>* args = expr->arguments();
3510 DCHECK(args->length() == 2); 3593 DCHECK(args->length() == 2);
3511 VisitForStackValue(args->at(0)); 3594 VisitForStackValue(args->at(0));
3512 VisitForStackValue(args->at(1)); 3595 VisitForStackValue(args->at(1));
3513 MathPowStub stub(isolate(), MathPowStub::ON_STACK); 3596 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3514 __ CallStub(&stub); 3597 __ CallStub(&stub);
3515 context()->Plug(r0); 3598 context()->Plug(r3);
3516 } 3599 }
3517 3600
3518 3601
3519 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { 3602 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3520 ZoneList<Expression*>* args = expr->arguments(); 3603 ZoneList<Expression*>* args = expr->arguments();
3521 DCHECK(args->length() == 2); 3604 DCHECK(args->length() == 2);
3522 VisitForStackValue(args->at(0)); // Load the object. 3605 VisitForStackValue(args->at(0)); // Load the object.
3523 VisitForAccumulatorValue(args->at(1)); // Load the value. 3606 VisitForAccumulatorValue(args->at(1)); // Load the value.
3524 __ pop(r1); // r0 = value. r1 = object. 3607 __ pop(r4); // r3 = value. r4 = object.
3525 3608
3526 Label done; 3609 Label done;
3527 // If the object is a smi, return the value. 3610 // If the object is a smi, return the value.
3528 __ JumpIfSmi(r1, &done); 3611 __ JumpIfSmi(r4, &done);
3529 3612
3530 // If the object is not a value type, return the value. 3613 // If the object is not a value type, return the value.
3531 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE); 3614 __ CompareObjectType(r4, r5, r5, JS_VALUE_TYPE);
3532 __ b(ne, &done); 3615 __ bne(&done);
3533 3616
3534 // Store the value. 3617 // Store the value.
3535 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset)); 3618 __ StoreP(r3, FieldMemOperand(r4, JSValue::kValueOffset), r0);
3536 // Update the write barrier. Save the value as it will be 3619 // Update the write barrier. Save the value as it will be
3537 // overwritten by the write barrier code and is needed afterward. 3620 // overwritten by the write barrier code and is needed afterward.
3538 __ mov(r2, r0); 3621 __ mr(r5, r3);
3539 __ RecordWriteField( 3622 __ RecordWriteField(r4, JSValue::kValueOffset, r5, r6, kLRHasBeenSaved,
3540 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs); 3623 kDontSaveFPRegs);
3541 3624
3542 __ bind(&done); 3625 __ bind(&done);
3543 context()->Plug(r0); 3626 context()->Plug(r3);
3544 } 3627 }
3545 3628
3546 3629
3547 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { 3630 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3548 ZoneList<Expression*>* args = expr->arguments(); 3631 ZoneList<Expression*>* args = expr->arguments();
3549 DCHECK_EQ(args->length(), 1); 3632 DCHECK_EQ(args->length(), 1);
3550 // Load the argument into r0 and call the stub. 3633 // Load the argument into r3 and call the stub.
3551 VisitForAccumulatorValue(args->at(0)); 3634 VisitForAccumulatorValue(args->at(0));
3552 3635
3553 NumberToStringStub stub(isolate()); 3636 NumberToStringStub stub(isolate());
3554 __ CallStub(&stub); 3637 __ CallStub(&stub);
3555 context()->Plug(r0); 3638 context()->Plug(r3);
3556 } 3639 }
3557 3640
3558 3641
3559 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { 3642 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3560 ZoneList<Expression*>* args = expr->arguments(); 3643 ZoneList<Expression*>* args = expr->arguments();
3561 DCHECK(args->length() == 1); 3644 DCHECK(args->length() == 1);
3562 VisitForAccumulatorValue(args->at(0)); 3645 VisitForAccumulatorValue(args->at(0));
3563 3646
3564 Label done; 3647 Label done;
3565 StringCharFromCodeGenerator generator(r0, r1); 3648 StringCharFromCodeGenerator generator(r3, r4);
3566 generator.GenerateFast(masm_); 3649 generator.GenerateFast(masm_);
3567 __ jmp(&done); 3650 __ b(&done);
3568 3651
3569 NopRuntimeCallHelper call_helper; 3652 NopRuntimeCallHelper call_helper;
3570 generator.GenerateSlow(masm_, call_helper); 3653 generator.GenerateSlow(masm_, call_helper);
3571 3654
3572 __ bind(&done); 3655 __ bind(&done);
3573 context()->Plug(r1); 3656 context()->Plug(r4);
3574 } 3657 }
3575 3658
3576 3659
3577 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { 3660 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3578 ZoneList<Expression*>* args = expr->arguments(); 3661 ZoneList<Expression*>* args = expr->arguments();
3579 DCHECK(args->length() == 2); 3662 DCHECK(args->length() == 2);
3580 VisitForStackValue(args->at(0)); 3663 VisitForStackValue(args->at(0));
3581 VisitForAccumulatorValue(args->at(1)); 3664 VisitForAccumulatorValue(args->at(1));
3582 3665
3583 Register object = r1; 3666 Register object = r4;
3584 Register index = r0; 3667 Register index = r3;
3585 Register result = r3; 3668 Register result = r6;
3586 3669
3587 __ pop(object); 3670 __ pop(object);
3588 3671
3589 Label need_conversion; 3672 Label need_conversion;
3590 Label index_out_of_range; 3673 Label index_out_of_range;
3591 Label done; 3674 Label done;
3592 StringCharCodeAtGenerator generator(object, 3675 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
3593 index, 3676 &need_conversion, &index_out_of_range,
3594 result,
3595 &need_conversion,
3596 &need_conversion,
3597 &index_out_of_range,
3598 STRING_INDEX_IS_NUMBER); 3677 STRING_INDEX_IS_NUMBER);
3599 generator.GenerateFast(masm_); 3678 generator.GenerateFast(masm_);
3600 __ jmp(&done); 3679 __ b(&done);
3601 3680
3602 __ bind(&index_out_of_range); 3681 __ bind(&index_out_of_range);
3603 // When the index is out of range, the spec requires us to return 3682 // When the index is out of range, the spec requires us to return
3604 // NaN. 3683 // NaN.
3605 __ LoadRoot(result, Heap::kNanValueRootIndex); 3684 __ LoadRoot(result, Heap::kNanValueRootIndex);
3606 __ jmp(&done); 3685 __ b(&done);
3607 3686
3608 __ bind(&need_conversion); 3687 __ bind(&need_conversion);
3609 // Load the undefined value into the result register, which will 3688 // Load the undefined value into the result register, which will
3610 // trigger conversion. 3689 // trigger conversion.
3611 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); 3690 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3612 __ jmp(&done); 3691 __ b(&done);
3613 3692
3614 NopRuntimeCallHelper call_helper; 3693 NopRuntimeCallHelper call_helper;
3615 generator.GenerateSlow(masm_, call_helper); 3694 generator.GenerateSlow(masm_, call_helper);
3616 3695
3617 __ bind(&done); 3696 __ bind(&done);
3618 context()->Plug(result); 3697 context()->Plug(result);
3619 } 3698 }
3620 3699
3621 3700
3622 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { 3701 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3623 ZoneList<Expression*>* args = expr->arguments(); 3702 ZoneList<Expression*>* args = expr->arguments();
3624 DCHECK(args->length() == 2); 3703 DCHECK(args->length() == 2);
3625 VisitForStackValue(args->at(0)); 3704 VisitForStackValue(args->at(0));
3626 VisitForAccumulatorValue(args->at(1)); 3705 VisitForAccumulatorValue(args->at(1));
3627 3706
3628 Register object = r1; 3707 Register object = r4;
3629 Register index = r0; 3708 Register index = r3;
3630 Register scratch = r3; 3709 Register scratch = r6;
3631 Register result = r0; 3710 Register result = r3;
3632 3711
3633 __ pop(object); 3712 __ pop(object);
3634 3713
3635 Label need_conversion; 3714 Label need_conversion;
3636 Label index_out_of_range; 3715 Label index_out_of_range;
3637 Label done; 3716 Label done;
3638 StringCharAtGenerator generator(object, 3717 StringCharAtGenerator generator(object, index, scratch, result,
3639 index, 3718 &need_conversion, &need_conversion,
3640 scratch, 3719 &index_out_of_range, STRING_INDEX_IS_NUMBER);
3641 result,
3642 &need_conversion,
3643 &need_conversion,
3644 &index_out_of_range,
3645 STRING_INDEX_IS_NUMBER);
3646 generator.GenerateFast(masm_); 3720 generator.GenerateFast(masm_);
3647 __ jmp(&done); 3721 __ b(&done);
3648 3722
3649 __ bind(&index_out_of_range); 3723 __ bind(&index_out_of_range);
3650 // When the index is out of range, the spec requires us to return 3724 // When the index is out of range, the spec requires us to return
3651 // the empty string. 3725 // the empty string.
3652 __ LoadRoot(result, Heap::kempty_stringRootIndex); 3726 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3653 __ jmp(&done); 3727 __ b(&done);
3654 3728
3655 __ bind(&need_conversion); 3729 __ bind(&need_conversion);
3656 // Move smi zero into the result register, which will trigger 3730 // Move smi zero into the result register, which will trigger
3657 // conversion. 3731 // conversion.
3658 __ mov(result, Operand(Smi::FromInt(0))); 3732 __ LoadSmiLiteral(result, Smi::FromInt(0));
3659 __ jmp(&done); 3733 __ b(&done);
3660 3734
3661 NopRuntimeCallHelper call_helper; 3735 NopRuntimeCallHelper call_helper;
3662 generator.GenerateSlow(masm_, call_helper); 3736 generator.GenerateSlow(masm_, call_helper);
3663 3737
3664 __ bind(&done); 3738 __ bind(&done);
3665 context()->Plug(result); 3739 context()->Plug(result);
3666 } 3740 }
3667 3741
3668 3742
3669 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { 3743 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3670 ZoneList<Expression*>* args = expr->arguments(); 3744 ZoneList<Expression*>* args = expr->arguments();
3671 DCHECK_EQ(2, args->length()); 3745 DCHECK_EQ(2, args->length());
3672 VisitForStackValue(args->at(0)); 3746 VisitForStackValue(args->at(0));
3673 VisitForAccumulatorValue(args->at(1)); 3747 VisitForAccumulatorValue(args->at(1));
3674 3748
3675 __ pop(r1); 3749 __ pop(r4);
3676 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED); 3750 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3677 __ CallStub(&stub); 3751 __ CallStub(&stub);
3678 context()->Plug(r0); 3752 context()->Plug(r3);
3679 } 3753 }
3680 3754
3681 3755
3682 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { 3756 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3683 ZoneList<Expression*>* args = expr->arguments(); 3757 ZoneList<Expression*>* args = expr->arguments();
3684 DCHECK_EQ(2, args->length()); 3758 DCHECK_EQ(2, args->length());
3685 VisitForStackValue(args->at(0)); 3759 VisitForStackValue(args->at(0));
3686 VisitForStackValue(args->at(1)); 3760 VisitForStackValue(args->at(1));
3687 3761
3688 StringCompareStub stub(isolate()); 3762 StringCompareStub stub(isolate());
3689 __ CallStub(&stub); 3763 __ CallStub(&stub);
3690 context()->Plug(r0); 3764 context()->Plug(r3);
3691 } 3765 }
3692 3766
3693 3767
3694 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { 3768 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3695 ZoneList<Expression*>* args = expr->arguments(); 3769 ZoneList<Expression*>* args = expr->arguments();
3696 DCHECK(args->length() >= 2); 3770 DCHECK(args->length() >= 2);
3697 3771
3698 int arg_count = args->length() - 2; // 2 ~ receiver and function. 3772 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3699 for (int i = 0; i < arg_count + 1; i++) { 3773 for (int i = 0; i < arg_count + 1; i++) {
3700 VisitForStackValue(args->at(i)); 3774 VisitForStackValue(args->at(i));
3701 } 3775 }
3702 VisitForAccumulatorValue(args->last()); // Function. 3776 VisitForAccumulatorValue(args->last()); // Function.
3703 3777
3704 Label runtime, done; 3778 Label runtime, done;
3705 // Check for non-function argument (including proxy). 3779 // Check for non-function argument (including proxy).
3706 __ JumpIfSmi(r0, &runtime); 3780 __ JumpIfSmi(r3, &runtime);
3707 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); 3781 __ CompareObjectType(r3, r4, r4, JS_FUNCTION_TYPE);
3708 __ b(ne, &runtime); 3782 __ bne(&runtime);
3709 3783
3710 // InvokeFunction requires the function in r1. Move it in there. 3784 // InvokeFunction requires the function in r4. Move it in there.
3711 __ mov(r1, result_register()); 3785 __ mr(r4, result_register());
3712 ParameterCount count(arg_count); 3786 ParameterCount count(arg_count);
3713 __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper()); 3787 __ InvokeFunction(r4, count, CALL_FUNCTION, NullCallWrapper());
3714 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3788 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3715 __ jmp(&done); 3789 __ b(&done);
3716 3790
3717 __ bind(&runtime); 3791 __ bind(&runtime);
3718 __ push(r0); 3792 __ push(r3);
3719 __ CallRuntime(Runtime::kCall, args->length()); 3793 __ CallRuntime(Runtime::kCall, args->length());
3720 __ bind(&done); 3794 __ bind(&done);
3721 3795
3722 context()->Plug(r0); 3796 context()->Plug(r3);
3723 } 3797 }
3724 3798
3725 3799
3726 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { 3800 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3727 RegExpConstructResultStub stub(isolate()); 3801 RegExpConstructResultStub stub(isolate());
3728 ZoneList<Expression*>* args = expr->arguments(); 3802 ZoneList<Expression*>* args = expr->arguments();
3729 DCHECK(args->length() == 3); 3803 DCHECK(args->length() == 3);
3730 VisitForStackValue(args->at(0)); 3804 VisitForStackValue(args->at(0));
3731 VisitForStackValue(args->at(1)); 3805 VisitForStackValue(args->at(1));
3732 VisitForAccumulatorValue(args->at(2)); 3806 VisitForAccumulatorValue(args->at(2));
3733 __ pop(r1); 3807 __ Pop(r5, r4);
3734 __ pop(r2);
3735 __ CallStub(&stub); 3808 __ CallStub(&stub);
3736 context()->Plug(r0); 3809 context()->Plug(r3);
3737 } 3810 }
3738 3811
3739 3812
3740 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { 3813 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3741 ZoneList<Expression*>* args = expr->arguments(); 3814 ZoneList<Expression*>* args = expr->arguments();
3742 DCHECK_EQ(2, args->length()); 3815 DCHECK_EQ(2, args->length());
3743 DCHECK_NE(NULL, args->at(0)->AsLiteral()); 3816 DCHECK_NE(NULL, args->at(0)->AsLiteral());
3744 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value(); 3817 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3745 3818
3746 Handle<FixedArray> jsfunction_result_caches( 3819 Handle<FixedArray> jsfunction_result_caches(
3747 isolate()->native_context()->jsfunction_result_caches()); 3820 isolate()->native_context()->jsfunction_result_caches());
3748 if (jsfunction_result_caches->length() <= cache_id) { 3821 if (jsfunction_result_caches->length() <= cache_id) {
3749 __ Abort(kAttemptToUseUndefinedCache); 3822 __ Abort(kAttemptToUseUndefinedCache);
3750 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 3823 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
3751 context()->Plug(r0); 3824 context()->Plug(r3);
3752 return; 3825 return;
3753 } 3826 }
3754 3827
3755 VisitForAccumulatorValue(args->at(1)); 3828 VisitForAccumulatorValue(args->at(1));
3756 3829
3757 Register key = r0; 3830 Register key = r3;
3758 Register cache = r1; 3831 Register cache = r4;
3759 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 3832 __ LoadP(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3760 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset)); 3833 __ LoadP(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3761 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); 3834 __ LoadP(cache,
3762 __ ldr(cache, 3835 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3763 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); 3836 __ LoadP(cache,
3764 3837 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)), r0);
3765 3838
3766 Label done, not_found; 3839 Label done, not_found;
3767 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset)); 3840 __ LoadP(r5, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3768 // r2 now holds finger offset as a smi. 3841 // r5 now holds finger offset as a smi.
3769 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 3842 __ addi(r6, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3770 // r3 now points to the start of fixed array elements. 3843 // r6 now points to the start of fixed array elements.
3771 __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex)); 3844 __ SmiToPtrArrayOffset(r5, r5);
3772 // Note side effect of PreIndex: r3 now points to the key of the pair. 3845 __ LoadPUX(r5, MemOperand(r6, r5));
3773 __ cmp(key, r2); 3846 // r6 now points to the key of the pair.
3774 __ b(ne, &not_found); 3847 __ cmp(key, r5);
3848 __ bne(&not_found);
3775 3849
3776 __ ldr(r0, MemOperand(r3, kPointerSize)); 3850 __ LoadP(r3, MemOperand(r6, kPointerSize));
3777 __ b(&done); 3851 __ b(&done);
3778 3852
3779 __ bind(&not_found); 3853 __ bind(&not_found);
3780 // Call runtime to perform the lookup. 3854 // Call runtime to perform the lookup.
3781 __ Push(cache, key); 3855 __ Push(cache, key);
3782 __ CallRuntime(Runtime::kGetFromCache, 2); 3856 __ CallRuntime(Runtime::kGetFromCache, 2);
3783 3857
3784 __ bind(&done); 3858 __ bind(&done);
3785 context()->Plug(r0); 3859 context()->Plug(r3);
3786 } 3860 }
3787 3861
3788 3862
3789 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { 3863 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3790 ZoneList<Expression*>* args = expr->arguments(); 3864 ZoneList<Expression*>* args = expr->arguments();
3791 VisitForAccumulatorValue(args->at(0)); 3865 VisitForAccumulatorValue(args->at(0));
3792 3866
3793 Label materialize_true, materialize_false; 3867 Label materialize_true, materialize_false;
3794 Label* if_true = NULL; 3868 Label* if_true = NULL;
3795 Label* if_false = NULL; 3869 Label* if_false = NULL;
3796 Label* fall_through = NULL; 3870 Label* fall_through = NULL;
3797 context()->PrepareTest(&materialize_true, &materialize_false, 3871 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3798 &if_true, &if_false, &fall_through); 3872 &if_false, &fall_through);
3799 3873
3800 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset)); 3874 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
3801 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask)); 3875 // PPC - assume ip is free
3876 __ mov(ip, Operand(String::kContainsCachedArrayIndexMask));
3877 __ and_(r0, r3, ip);
3878 __ cmpi(r0, Operand::Zero());
3802 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3879 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3803 Split(eq, if_true, if_false, fall_through); 3880 Split(eq, if_true, if_false, fall_through);
3804 3881
3805 context()->Plug(if_true, if_false); 3882 context()->Plug(if_true, if_false);
3806 } 3883 }
3807 3884
3808 3885
3809 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { 3886 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3810 ZoneList<Expression*>* args = expr->arguments(); 3887 ZoneList<Expression*>* args = expr->arguments();
3811 DCHECK(args->length() == 1); 3888 DCHECK(args->length() == 1);
3812 VisitForAccumulatorValue(args->at(0)); 3889 VisitForAccumulatorValue(args->at(0));
3813 3890
3814 __ AssertString(r0); 3891 __ AssertString(r3);
3815 3892
3816 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset)); 3893 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
3817 __ IndexFromHash(r0, r0); 3894 __ IndexFromHash(r3, r3);
3818 3895
3819 context()->Plug(r0); 3896 context()->Plug(r3);
3820 } 3897 }
3821 3898
3822 3899
3823 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) { 3900 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3824 Label bailout, done, one_char_separator, long_separator, non_trivial_array, 3901 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
3825 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop, 3902 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
3826 one_char_separator_loop_entry, long_separator_loop; 3903 one_char_separator_loop_entry, long_separator_loop;
3827 ZoneList<Expression*>* args = expr->arguments(); 3904 ZoneList<Expression*>* args = expr->arguments();
3828 DCHECK(args->length() == 2); 3905 DCHECK(args->length() == 2);
3829 VisitForStackValue(args->at(1)); 3906 VisitForStackValue(args->at(1));
3830 VisitForAccumulatorValue(args->at(0)); 3907 VisitForAccumulatorValue(args->at(0));
3831 3908
3832 // All aliases of the same register have disjoint lifetimes. 3909 // All aliases of the same register have disjoint lifetimes.
3833 Register array = r0; 3910 Register array = r3;
3834 Register elements = no_reg; // Will be r0. 3911 Register elements = no_reg; // Will be r3.
3835 Register result = no_reg; // Will be r0. 3912 Register result = no_reg; // Will be r3.
3836 Register separator = r1; 3913 Register separator = r4;
3837 Register array_length = r2; 3914 Register array_length = r5;
3838 Register result_pos = no_reg; // Will be r2 3915 Register result_pos = no_reg; // Will be r5
3839 Register string_length = r3; 3916 Register string_length = r6;
3840 Register string = r4; 3917 Register string = r7;
3841 Register element = r5; 3918 Register element = r8;
3842 Register elements_end = r6; 3919 Register elements_end = r9;
3843 Register scratch = r9; 3920 Register scratch1 = r10;
3921 Register scratch2 = r11;
3844 3922
3845 // Separator operand is on the stack. 3923 // Separator operand is on the stack.
3846 __ pop(separator); 3924 __ pop(separator);
3847 3925
3848 // Check that the array is a JSArray. 3926 // Check that the array is a JSArray.
3849 __ JumpIfSmi(array, &bailout); 3927 __ JumpIfSmi(array, &bailout);
3850 __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE); 3928 __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
3851 __ b(ne, &bailout); 3929 __ bne(&bailout);
3852 3930
3853 // Check that the array has fast elements. 3931 // Check that the array has fast elements.
3854 __ CheckFastElements(scratch, array_length, &bailout); 3932 __ CheckFastElements(scratch1, scratch2, &bailout);
3855 3933
3856 // If the array has length zero, return the empty string. 3934 // If the array has length zero, return the empty string.
3857 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset)); 3935 __ LoadP(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3858 __ SmiUntag(array_length, SetCC); 3936 __ SmiUntag(array_length);
3859 __ b(ne, &non_trivial_array); 3937 __ cmpi(array_length, Operand::Zero());
3860 __ LoadRoot(r0, Heap::kempty_stringRootIndex); 3938 __ bne(&non_trivial_array);
3939 __ LoadRoot(r3, Heap::kempty_stringRootIndex);
3861 __ b(&done); 3940 __ b(&done);
3862 3941
3863 __ bind(&non_trivial_array); 3942 __ bind(&non_trivial_array);
3864 3943
3865 // Get the FixedArray containing array's elements. 3944 // Get the FixedArray containing array's elements.
3866 elements = array; 3945 elements = array;
3867 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset)); 3946 __ LoadP(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3868 array = no_reg; // End of array's live range. 3947 array = no_reg; // End of array's live range.
3869 3948
3870 // Check that all array elements are sequential one-byte strings, and 3949 // Check that all array elements are sequential one-byte strings, and
3871 // accumulate the sum of their lengths, as a smi-encoded value. 3950 // accumulate the sum of their lengths, as a smi-encoded value.
3872 __ mov(string_length, Operand::Zero()); 3951 __ li(string_length, Operand::Zero());
3873 __ add(element, 3952 __ addi(element, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3874 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 3953 __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
3875 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); 3954 __ add(elements_end, element, elements_end);
3876 // Loop condition: while (element < elements_end). 3955 // Loop condition: while (element < elements_end).
3877 // Live values in registers: 3956 // Live values in registers:
3878 // elements: Fixed array of strings. 3957 // elements: Fixed array of strings.
3879 // array_length: Length of the fixed array of strings (not smi) 3958 // array_length: Length of the fixed array of strings (not smi)
3880 // separator: Separator string 3959 // separator: Separator string
3881 // string_length: Accumulated sum of string lengths (smi). 3960 // string_length: Accumulated sum of string lengths (smi).
3882 // element: Current array element. 3961 // element: Current array element.
3883 // elements_end: Array end. 3962 // elements_end: Array end.
3884 if (generate_debug_code_) { 3963 if (generate_debug_code_) {
3885 __ cmp(array_length, Operand::Zero()); 3964 __ cmpi(array_length, Operand::Zero());
3886 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin); 3965 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
3887 } 3966 }
3888 __ bind(&loop); 3967 __ bind(&loop);
3889 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 3968 __ LoadP(string, MemOperand(element));
3969 __ addi(element, element, Operand(kPointerSize));
3890 __ JumpIfSmi(string, &bailout); 3970 __ JumpIfSmi(string, &bailout);
3891 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset)); 3971 __ LoadP(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3892 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); 3972 __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3893 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout); 3973 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3894 __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset)); 3974 __ LoadP(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
3895 __ add(string_length, string_length, Operand(scratch), SetCC); 3975
3896 __ b(vs, &bailout); 3976 __ AddAndCheckForOverflow(string_length, string_length, scratch1, scratch2,
3977 r0);
3978 __ BranchOnOverflow(&bailout);
3979
3897 __ cmp(element, elements_end); 3980 __ cmp(element, elements_end);
3898 __ b(lt, &loop); 3981 __ blt(&loop);
3899 3982
3900 // If array_length is 1, return elements[0], a string. 3983 // If array_length is 1, return elements[0], a string.
3901 __ cmp(array_length, Operand(1)); 3984 __ cmpi(array_length, Operand(1));
3902 __ b(ne, &not_size_one_array); 3985 __ bne(&not_size_one_array);
3903 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize)); 3986 __ LoadP(r3, FieldMemOperand(elements, FixedArray::kHeaderSize));
3904 __ b(&done); 3987 __ b(&done);
3905 3988
3906 __ bind(&not_size_one_array); 3989 __ bind(&not_size_one_array);
3907 3990
3908 // Live values in registers: 3991 // Live values in registers:
3909 // separator: Separator string 3992 // separator: Separator string
3910 // array_length: Length of the array. 3993 // array_length: Length of the array.
3911 // string_length: Sum of string lengths (smi). 3994 // string_length: Sum of string lengths (smi).
3912 // elements: FixedArray of strings. 3995 // elements: FixedArray of strings.
3913 3996
3914 // Check that the separator is a flat one-byte string. 3997 // Check that the separator is a flat one-byte string.
3915 __ JumpIfSmi(separator, &bailout); 3998 __ JumpIfSmi(separator, &bailout);
3916 __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset)); 3999 __ LoadP(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3917 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); 4000 __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3918 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout); 4001 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3919 4002
3920 // Add (separator length times array_length) - separator length to the 4003 // Add (separator length times array_length) - separator length to the
3921 // string_length to get the length of the result string. array_length is not 4004 // string_length to get the length of the result string.
3922 // smi but the other values are, so the result is a smi 4005 __ LoadP(scratch1,
3923 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); 4006 FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
3924 __ sub(string_length, string_length, Operand(scratch)); 4007 __ sub(string_length, string_length, scratch1);
3925 __ smull(scratch, ip, array_length, scratch); 4008 #if V8_TARGET_ARCH_PPC64
4009 __ SmiUntag(scratch1, scratch1);
4010 __ Mul(scratch2, array_length, scratch1);
3926 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are 4011 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3927 // zero. 4012 // zero.
3928 __ cmp(ip, Operand::Zero()); 4013 __ ShiftRightImm(ip, scratch2, Operand(31), SetRC);
3929 __ b(ne, &bailout); 4014 __ bne(&bailout, cr0);
3930 __ tst(scratch, Operand(0x80000000)); 4015 __ SmiTag(scratch2, scratch2);
3931 __ b(ne, &bailout); 4016 #else
3932 __ add(string_length, string_length, Operand(scratch), SetCC); 4017 // array_length is not smi but the other values are, so the result is a smi
3933 __ b(vs, &bailout); 4018 __ mullw(scratch2, array_length, scratch1);
4019 __ mulhw(ip, array_length, scratch1);
4020 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4021 // zero.
4022 __ cmpi(ip, Operand::Zero());
4023 __ bne(&bailout);
4024 __ cmpwi(scratch2, Operand::Zero());
4025 __ blt(&bailout);
4026 #endif
4027
4028 __ AddAndCheckForOverflow(string_length, string_length, scratch2, scratch1,
4029 r0);
4030 __ BranchOnOverflow(&bailout);
3934 __ SmiUntag(string_length); 4031 __ SmiUntag(string_length);
3935 4032
3936 // Get first element in the array to free up the elements register to be used 4033 // Get first element in the array to free up the elements register to be used
3937 // for the result. 4034 // for the result.
3938 __ add(element, 4035 __ addi(element, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3939 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3940 result = elements; // End of live range for elements. 4036 result = elements; // End of live range for elements.
3941 elements = no_reg; 4037 elements = no_reg;
3942 // Live values in registers: 4038 // Live values in registers:
3943 // element: First array element 4039 // element: First array element
3944 // separator: Separator string 4040 // separator: Separator string
3945 // string_length: Length of result string (not smi) 4041 // string_length: Length of result string (not smi)
3946 // array_length: Length of the array. 4042 // array_length: Length of the array.
3947 __ AllocateOneByteString(result, string_length, scratch, 4043 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
3948 string, // used as scratch 4044 elements_end, &bailout);
3949 elements_end, // used as scratch
3950 &bailout);
3951 // Prepare for looping. Set up elements_end to end of the array. Set 4045 // Prepare for looping. Set up elements_end to end of the array. Set
3952 // result_pos to the position of the result where to write the first 4046 // result_pos to the position of the result where to write the first
3953 // character. 4047 // character.
3954 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); 4048 __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
4049 __ add(elements_end, element, elements_end);
3955 result_pos = array_length; // End of live range for array_length. 4050 result_pos = array_length; // End of live range for array_length.
3956 array_length = no_reg; 4051 array_length = no_reg;
3957 __ add(result_pos, 4052 __ addi(result_pos, result,
3958 result, 4053 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3959 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3960 4054
3961 // Check the length of the separator. 4055 // Check the length of the separator.
3962 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); 4056 __ LoadP(scratch1,
3963 __ cmp(scratch, Operand(Smi::FromInt(1))); 4057 FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
3964 __ b(eq, &one_char_separator); 4058 __ CmpSmiLiteral(scratch1, Smi::FromInt(1), r0);
3965 __ b(gt, &long_separator); 4059 __ beq(&one_char_separator);
4060 __ bgt(&long_separator);
3966 4061
3967 // Empty separator case 4062 // Empty separator case
3968 __ bind(&empty_separator_loop); 4063 __ bind(&empty_separator_loop);
3969 // Live values in registers: 4064 // Live values in registers:
3970 // result_pos: the position to which we are currently copying characters. 4065 // result_pos: the position to which we are currently copying characters.
3971 // element: Current array element. 4066 // element: Current array element.
3972 // elements_end: Array end. 4067 // elements_end: Array end.
3973 4068
3974 // Copy next array element to the result. 4069 // Copy next array element to the result.
3975 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 4070 __ LoadP(string, MemOperand(element));
3976 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 4071 __ addi(element, element, Operand(kPointerSize));
4072 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
3977 __ SmiUntag(string_length); 4073 __ SmiUntag(string_length);
3978 __ add(string, 4074 __ addi(string, string,
3979 string, 4075 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3980 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4076 __ CopyBytes(string, result_pos, string_length, scratch1);
3981 __ CopyBytes(string, result_pos, string_length, scratch);
3982 __ cmp(element, elements_end); 4077 __ cmp(element, elements_end);
3983 __ b(lt, &empty_separator_loop); // End while (element < elements_end). 4078 __ blt(&empty_separator_loop); // End while (element < elements_end).
3984 DCHECK(result.is(r0)); 4079 DCHECK(result.is(r3));
3985 __ b(&done); 4080 __ b(&done);
3986 4081
3987 // One-character separator case 4082 // One-character separator case
3988 __ bind(&one_char_separator); 4083 __ bind(&one_char_separator);
3989 // Replace separator with its one-byte character value. 4084 // Replace separator with its one-byte character value.
3990 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize)); 4085 __ lbz(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
3991 // Jump into the loop after the code that copies the separator, so the first 4086 // Jump into the loop after the code that copies the separator, so the first
3992 // element is not preceded by a separator 4087 // element is not preceded by a separator
3993 __ jmp(&one_char_separator_loop_entry); 4088 __ b(&one_char_separator_loop_entry);
3994 4089
3995 __ bind(&one_char_separator_loop); 4090 __ bind(&one_char_separator_loop);
3996 // Live values in registers: 4091 // Live values in registers:
3997 // result_pos: the position to which we are currently copying characters. 4092 // result_pos: the position to which we are currently copying characters.
3998 // element: Current array element. 4093 // element: Current array element.
3999 // elements_end: Array end. 4094 // elements_end: Array end.
4000 // separator: Single separator one-byte char (in lower byte). 4095 // separator: Single separator one-byte char (in lower byte).
4001 4096
4002 // Copy the separator character to the result. 4097 // Copy the separator character to the result.
4003 __ strb(separator, MemOperand(result_pos, 1, PostIndex)); 4098 __ stb(separator, MemOperand(result_pos));
4099 __ addi(result_pos, result_pos, Operand(1));
4004 4100
4005 // Copy next array element to the result. 4101 // Copy next array element to the result.
4006 __ bind(&one_char_separator_loop_entry); 4102 __ bind(&one_char_separator_loop_entry);
4007 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 4103 __ LoadP(string, MemOperand(element));
4008 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 4104 __ addi(element, element, Operand(kPointerSize));
4105 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4009 __ SmiUntag(string_length); 4106 __ SmiUntag(string_length);
4010 __ add(string, 4107 __ addi(string, string,
4011 string, 4108 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4012 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4109 __ CopyBytes(string, result_pos, string_length, scratch1);
4013 __ CopyBytes(string, result_pos, string_length, scratch); 4110 __ cmpl(element, elements_end);
4014 __ cmp(element, elements_end); 4111 __ blt(&one_char_separator_loop); // End while (element < elements_end).
4015 __ b(lt, &one_char_separator_loop); // End while (element < elements_end). 4112 DCHECK(result.is(r3));
4016 DCHECK(result.is(r0));
4017 __ b(&done); 4113 __ b(&done);
4018 4114
4019 // Long separator case (separator is more than one character). Entry is at the 4115 // Long separator case (separator is more than one character). Entry is at the
4020 // label long_separator below. 4116 // label long_separator below.
4021 __ bind(&long_separator_loop); 4117 __ bind(&long_separator_loop);
4022 // Live values in registers: 4118 // Live values in registers:
4023 // result_pos: the position to which we are currently copying characters. 4119 // result_pos: the position to which we are currently copying characters.
4024 // element: Current array element. 4120 // element: Current array element.
4025 // elements_end: Array end. 4121 // elements_end: Array end.
4026 // separator: Separator string. 4122 // separator: Separator string.
4027 4123
4028 // Copy the separator to the result. 4124 // Copy the separator to the result.
4029 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset)); 4125 __ LoadP(string_length, FieldMemOperand(separator, String::kLengthOffset));
4030 __ SmiUntag(string_length); 4126 __ SmiUntag(string_length);
4031 __ add(string, 4127 __ addi(string, separator,
4032 separator, 4128 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4033 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4129 __ CopyBytes(string, result_pos, string_length, scratch1);
4034 __ CopyBytes(string, result_pos, string_length, scratch);
4035 4130
4036 __ bind(&long_separator); 4131 __ bind(&long_separator);
4037 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 4132 __ LoadP(string, MemOperand(element));
4038 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 4133 __ addi(element, element, Operand(kPointerSize));
4134 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4039 __ SmiUntag(string_length); 4135 __ SmiUntag(string_length);
4040 __ add(string, 4136 __ addi(string, string,
4041 string, 4137 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4042 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4138 __ CopyBytes(string, result_pos, string_length, scratch1);
4043 __ CopyBytes(string, result_pos, string_length, scratch); 4139 __ cmpl(element, elements_end);
4044 __ cmp(element, elements_end); 4140 __ blt(&long_separator_loop); // End while (element < elements_end).
4045 __ b(lt, &long_separator_loop); // End while (element < elements_end). 4141 DCHECK(result.is(r3));
4046 DCHECK(result.is(r0));
4047 __ b(&done); 4142 __ b(&done);
4048 4143
4049 __ bind(&bailout); 4144 __ bind(&bailout);
4050 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 4145 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
4051 __ bind(&done); 4146 __ bind(&done);
4052 context()->Plug(r0); 4147 context()->Plug(r3);
4053 } 4148 }
4054 4149
4055 4150
4056 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { 4151 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4057 DCHECK(expr->arguments()->length() == 0); 4152 DCHECK(expr->arguments()->length() == 0);
4058 ExternalReference debug_is_active = 4153 ExternalReference debug_is_active =
4059 ExternalReference::debug_is_active_address(isolate()); 4154 ExternalReference::debug_is_active_address(isolate());
4060 __ mov(ip, Operand(debug_is_active)); 4155 __ mov(ip, Operand(debug_is_active));
4061 __ ldrb(r0, MemOperand(ip)); 4156 __ lbz(r3, MemOperand(ip));
4062 __ SmiTag(r0); 4157 __ SmiTag(r3);
4063 context()->Plug(r0); 4158 context()->Plug(r3);
4064 } 4159 }
4065 4160
4066 4161
4067 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 4162 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4068 if (expr->function() != NULL && 4163 if (expr->function() != NULL &&
4069 expr->function()->intrinsic_type == Runtime::INLINE) { 4164 expr->function()->intrinsic_type == Runtime::INLINE) {
4070 Comment cmnt(masm_, "[ InlineRuntimeCall"); 4165 Comment cmnt(masm_, "[ InlineRuntimeCall");
4071 EmitInlineRuntimeCall(expr); 4166 EmitInlineRuntimeCall(expr);
4072 return; 4167 return;
4073 } 4168 }
4074 4169
4075 Comment cmnt(masm_, "[ CallRuntime"); 4170 Comment cmnt(masm_, "[ CallRuntime");
4076 ZoneList<Expression*>* args = expr->arguments(); 4171 ZoneList<Expression*>* args = expr->arguments();
4077 int arg_count = args->length(); 4172 int arg_count = args->length();
4078 4173
4079 if (expr->is_jsruntime()) { 4174 if (expr->is_jsruntime()) {
4080 // Push the builtins object as the receiver. 4175 // Push the builtins object as the receiver.
4081 Register receiver = LoadDescriptor::ReceiverRegister(); 4176 Register receiver = LoadDescriptor::ReceiverRegister();
4082 __ ldr(receiver, GlobalObjectOperand()); 4177 __ LoadP(receiver, GlobalObjectOperand());
4083 __ ldr(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset)); 4178 __ LoadP(receiver,
4179 FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4084 __ push(receiver); 4180 __ push(receiver);
4085 4181
4086 // Load the function from the receiver. 4182 // Load the function from the receiver.
4087 __ mov(LoadDescriptor::NameRegister(), Operand(expr->name())); 4183 __ mov(LoadDescriptor::NameRegister(), Operand(expr->name()));
4088 if (FLAG_vector_ics) { 4184 if (FLAG_vector_ics) {
4089 __ mov(VectorLoadICDescriptor::SlotRegister(), 4185 __ mov(VectorLoadICDescriptor::SlotRegister(),
4090 Operand(Smi::FromInt(expr->CallRuntimeFeedbackSlot()))); 4186 Operand(Smi::FromInt(expr->CallRuntimeFeedbackSlot())));
4091 CallLoadIC(NOT_CONTEXTUAL); 4187 CallLoadIC(NOT_CONTEXTUAL);
4092 } else { 4188 } else {
4093 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId()); 4189 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4094 } 4190 }
4095 4191
4096 // Push the target function under the receiver. 4192 // Push the target function under the receiver.
4097 __ ldr(ip, MemOperand(sp, 0)); 4193 __ LoadP(ip, MemOperand(sp, 0));
4098 __ push(ip); 4194 __ push(ip);
4099 __ str(r0, MemOperand(sp, kPointerSize)); 4195 __ StoreP(r3, MemOperand(sp, kPointerSize));
4100 4196
4101 // Push the arguments ("left-to-right"). 4197 // Push the arguments ("left-to-right").
4102 int arg_count = args->length(); 4198 int arg_count = args->length();
4103 for (int i = 0; i < arg_count; i++) { 4199 for (int i = 0; i < arg_count; i++) {
4104 VisitForStackValue(args->at(i)); 4200 VisitForStackValue(args->at(i));
4105 } 4201 }
4106 4202
4107 // Record source position of the IC call. 4203 // Record source position of the IC call.
4108 SetSourcePosition(expr->position()); 4204 SetSourcePosition(expr->position());
4109 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); 4205 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4110 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 4206 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
4111 __ CallStub(&stub); 4207 __ CallStub(&stub);
4112 4208
4113 // Restore context register. 4209 // Restore context register.
4114 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 4210 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4115 4211
4116 context()->DropAndPlug(1, r0); 4212 context()->DropAndPlug(1, r3);
4117 } else { 4213 } else {
4118 // Push the arguments ("left-to-right"). 4214 // Push the arguments ("left-to-right").
4119 for (int i = 0; i < arg_count; i++) { 4215 for (int i = 0; i < arg_count; i++) {
4120 VisitForStackValue(args->at(i)); 4216 VisitForStackValue(args->at(i));
4121 } 4217 }
4122 4218
4123 // Call the C runtime function. 4219 // Call the C runtime function.
4124 __ CallRuntime(expr->function(), arg_count); 4220 __ CallRuntime(expr->function(), arg_count);
4125 context()->Plug(r0); 4221 context()->Plug(r3);
4126 } 4222 }
4127 } 4223 }
4128 4224
4129 4225
4130 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 4226 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4131 switch (expr->op()) { 4227 switch (expr->op()) {
4132 case Token::DELETE: { 4228 case Token::DELETE: {
4133 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 4229 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4134 Property* property = expr->expression()->AsProperty(); 4230 Property* property = expr->expression()->AsProperty();
4135 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 4231 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4136 4232
4137 if (property != NULL) { 4233 if (property != NULL) {
4138 VisitForStackValue(property->obj()); 4234 VisitForStackValue(property->obj());
4139 VisitForStackValue(property->key()); 4235 VisitForStackValue(property->key());
4140 __ mov(r1, Operand(Smi::FromInt(strict_mode()))); 4236 __ LoadSmiLiteral(r4, Smi::FromInt(strict_mode()));
4141 __ push(r1); 4237 __ push(r4);
4142 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4238 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4143 context()->Plug(r0); 4239 context()->Plug(r3);
4144 } else if (proxy != NULL) { 4240 } else if (proxy != NULL) {
4145 Variable* var = proxy->var(); 4241 Variable* var = proxy->var();
4146 // Delete of an unqualified identifier is disallowed in strict mode 4242 // Delete of an unqualified identifier is disallowed in strict mode
4147 // but "delete this" is allowed. 4243 // but "delete this" is allowed.
4148 DCHECK(strict_mode() == SLOPPY || var->is_this()); 4244 DCHECK(strict_mode() == SLOPPY || var->is_this());
4149 if (var->IsUnallocated()) { 4245 if (var->IsUnallocated()) {
4150 __ ldr(r2, GlobalObjectOperand()); 4246 __ LoadP(r5, GlobalObjectOperand());
4151 __ mov(r1, Operand(var->name())); 4247 __ mov(r4, Operand(var->name()));
4152 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); 4248 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY));
4153 __ Push(r2, r1, r0); 4249 __ Push(r5, r4, r3);
4154 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4250 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4155 context()->Plug(r0); 4251 context()->Plug(r3);
4156 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 4252 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4157 // Result of deleting non-global, non-dynamic variables is false. 4253 // Result of deleting non-global, non-dynamic variables is false.
4158 // The subexpression does not have side effects. 4254 // The subexpression does not have side effects.
4159 context()->Plug(var->is_this()); 4255 context()->Plug(var->is_this());
4160 } else { 4256 } else {
4161 // Non-global variable. Call the runtime to try to delete from the 4257 // Non-global variable. Call the runtime to try to delete from the
4162 // context where the variable was introduced. 4258 // context where the variable was introduced.
4163 DCHECK(!context_register().is(r2)); 4259 DCHECK(!context_register().is(r5));
4164 __ mov(r2, Operand(var->name())); 4260 __ mov(r5, Operand(var->name()));
4165 __ Push(context_register(), r2); 4261 __ Push(context_register(), r5);
4166 __ CallRuntime(Runtime::kDeleteLookupSlot, 2); 4262 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4167 context()->Plug(r0); 4263 context()->Plug(r3);
4168 } 4264 }
4169 } else { 4265 } else {
4170 // Result of deleting non-property, non-variable reference is true. 4266 // Result of deleting non-property, non-variable reference is true.
4171 // The subexpression may have side effects. 4267 // The subexpression may have side effects.
4172 VisitForEffect(expr->expression()); 4268 VisitForEffect(expr->expression());
4173 context()->Plug(true); 4269 context()->Plug(true);
4174 } 4270 }
4175 break; 4271 break;
4176 } 4272 }
4177 4273
4178 case Token::VOID: { 4274 case Token::VOID: {
4179 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); 4275 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4180 VisitForEffect(expr->expression()); 4276 VisitForEffect(expr->expression());
4181 context()->Plug(Heap::kUndefinedValueRootIndex); 4277 context()->Plug(Heap::kUndefinedValueRootIndex);
4182 break; 4278 break;
4183 } 4279 }
4184 4280
4185 case Token::NOT: { 4281 case Token::NOT: {
4186 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); 4282 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4187 if (context()->IsEffect()) { 4283 if (context()->IsEffect()) {
4188 // Unary NOT has no side effects so it's only necessary to visit the 4284 // Unary NOT has no side effects so it's only necessary to visit the
4189 // subexpression. Match the optimizing compiler by not branching. 4285 // subexpression. Match the optimizing compiler by not branching.
4190 VisitForEffect(expr->expression()); 4286 VisitForEffect(expr->expression());
4191 } else if (context()->IsTest()) { 4287 } else if (context()->IsTest()) {
4192 const TestContext* test = TestContext::cast(context()); 4288 const TestContext* test = TestContext::cast(context());
4193 // The labels are swapped for the recursive call. 4289 // The labels are swapped for the recursive call.
4194 VisitForControl(expr->expression(), 4290 VisitForControl(expr->expression(), test->false_label(),
4195 test->false_label(), 4291 test->true_label(), test->fall_through());
4196 test->true_label(),
4197 test->fall_through());
4198 context()->Plug(test->true_label(), test->false_label()); 4292 context()->Plug(test->true_label(), test->false_label());
4199 } else { 4293 } else {
4200 // We handle value contexts explicitly rather than simply visiting 4294 // We handle value contexts explicitly rather than simply visiting
4201 // for control and plugging the control flow into the context, 4295 // for control and plugging the control flow into the context,
4202 // because we need to prepare a pair of extra administrative AST ids 4296 // because we need to prepare a pair of extra administrative AST ids
4203 // for the optimizing compiler. 4297 // for the optimizing compiler.
4204 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue()); 4298 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4205 Label materialize_true, materialize_false, done; 4299 Label materialize_true, materialize_false, done;
4206 VisitForControl(expr->expression(), 4300 VisitForControl(expr->expression(), &materialize_false,
4207 &materialize_false, 4301 &materialize_true, &materialize_true);
4208 &materialize_true,
4209 &materialize_true);
4210 __ bind(&materialize_true); 4302 __ bind(&materialize_true);
4211 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); 4303 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4212 __ LoadRoot(r0, Heap::kTrueValueRootIndex); 4304 __ LoadRoot(r3, Heap::kTrueValueRootIndex);
4213 if (context()->IsStackValue()) __ push(r0); 4305 if (context()->IsStackValue()) __ push(r3);
4214 __ jmp(&done); 4306 __ b(&done);
4215 __ bind(&materialize_false); 4307 __ bind(&materialize_false);
4216 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); 4308 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4217 __ LoadRoot(r0, Heap::kFalseValueRootIndex); 4309 __ LoadRoot(r3, Heap::kFalseValueRootIndex);
4218 if (context()->IsStackValue()) __ push(r0); 4310 if (context()->IsStackValue()) __ push(r3);
4219 __ bind(&done); 4311 __ bind(&done);
4220 } 4312 }
4221 break; 4313 break;
4222 } 4314 }
4223 4315
4224 case Token::TYPEOF: { 4316 case Token::TYPEOF: {
4225 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 4317 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4226 { StackValueContext context(this); 4318 {
4319 StackValueContext context(this);
4227 VisitForTypeofValue(expr->expression()); 4320 VisitForTypeofValue(expr->expression());
4228 } 4321 }
4229 __ CallRuntime(Runtime::kTypeof, 1); 4322 __ CallRuntime(Runtime::kTypeof, 1);
4230 context()->Plug(r0); 4323 context()->Plug(r3);
4231 break; 4324 break;
4232 } 4325 }
4233 4326
4234 default: 4327 default:
4235 UNREACHABLE(); 4328 UNREACHABLE();
4236 } 4329 }
4237 } 4330 }
4238 4331
4239 4332
4240 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 4333 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
(...skipping 15 matching lines...) Expand all
4256 } 4349 }
4257 4350
4258 // Evaluate expression and get value. 4351 // Evaluate expression and get value.
4259 if (assign_type == VARIABLE) { 4352 if (assign_type == VARIABLE) {
4260 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL); 4353 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4261 AccumulatorValueContext context(this); 4354 AccumulatorValueContext context(this);
4262 EmitVariableLoad(expr->expression()->AsVariableProxy()); 4355 EmitVariableLoad(expr->expression()->AsVariableProxy());
4263 } else { 4356 } else {
4264 // Reserve space for result of postfix operation. 4357 // Reserve space for result of postfix operation.
4265 if (expr->is_postfix() && !context()->IsEffect()) { 4358 if (expr->is_postfix() && !context()->IsEffect()) {
4266 __ mov(ip, Operand(Smi::FromInt(0))); 4359 __ LoadSmiLiteral(ip, Smi::FromInt(0));
4267 __ push(ip); 4360 __ push(ip);
4268 } 4361 }
4269 if (assign_type == NAMED_PROPERTY) { 4362 if (assign_type == NAMED_PROPERTY) {
4270 // Put the object both on the stack and in the register. 4363 // Put the object both on the stack and in the register.
4271 VisitForStackValue(prop->obj()); 4364 VisitForStackValue(prop->obj());
4272 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 4365 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4273 EmitNamedPropertyLoad(prop); 4366 EmitNamedPropertyLoad(prop);
4274 } else { 4367 } else {
4275 VisitForStackValue(prop->obj()); 4368 VisitForStackValue(prop->obj());
4276 VisitForStackValue(prop->key()); 4369 VisitForStackValue(prop->key());
4277 __ ldr(LoadDescriptor::ReceiverRegister(), 4370 __ LoadP(LoadDescriptor::ReceiverRegister(),
4278 MemOperand(sp, 1 * kPointerSize)); 4371 MemOperand(sp, 1 * kPointerSize));
4279 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); 4372 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4280 EmitKeyedPropertyLoad(prop); 4373 EmitKeyedPropertyLoad(prop);
4281 } 4374 }
4282 } 4375 }
4283 4376
4284 // We need a second deoptimization point after loading the value 4377 // We need a second deoptimization point after loading the value
4285 // in case evaluating the property load my have a side effect. 4378 // in case evaluating the property load my have a side effect.
4286 if (assign_type == VARIABLE) { 4379 if (assign_type == VARIABLE) {
4287 PrepareForBailout(expr->expression(), TOS_REG); 4380 PrepareForBailout(expr->expression(), TOS_REG);
4288 } else { 4381 } else {
4289 PrepareForBailoutForId(prop->LoadId(), TOS_REG); 4382 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4290 } 4383 }
4291 4384
4292 // Inline smi case if we are in a loop. 4385 // Inline smi case if we are in a loop.
4293 Label stub_call, done; 4386 Label stub_call, done;
4294 JumpPatchSite patch_site(masm_); 4387 JumpPatchSite patch_site(masm_);
4295 4388
4296 int count_value = expr->op() == Token::INC ? 1 : -1; 4389 int count_value = expr->op() == Token::INC ? 1 : -1;
4297 if (ShouldInlineSmiCase(expr->op())) { 4390 if (ShouldInlineSmiCase(expr->op())) {
4298 Label slow; 4391 Label slow;
4299 patch_site.EmitJumpIfNotSmi(r0, &slow); 4392 patch_site.EmitJumpIfNotSmi(r3, &slow);
4300 4393
4301 // Save result for postfix expressions. 4394 // Save result for postfix expressions.
4302 if (expr->is_postfix()) { 4395 if (expr->is_postfix()) {
4303 if (!context()->IsEffect()) { 4396 if (!context()->IsEffect()) {
4304 // Save the result on the stack. If we have a named or keyed property 4397 // Save the result on the stack. If we have a named or keyed property
4305 // we store the result under the receiver that is currently on top 4398 // we store the result under the receiver that is currently on top
4306 // of the stack. 4399 // of the stack.
4307 switch (assign_type) { 4400 switch (assign_type) {
4308 case VARIABLE: 4401 case VARIABLE:
4309 __ push(r0); 4402 __ push(r3);
4310 break; 4403 break;
4311 case NAMED_PROPERTY: 4404 case NAMED_PROPERTY:
4312 __ str(r0, MemOperand(sp, kPointerSize)); 4405 __ StoreP(r3, MemOperand(sp, kPointerSize));
4313 break; 4406 break;
4314 case KEYED_PROPERTY: 4407 case KEYED_PROPERTY:
4315 __ str(r0, MemOperand(sp, 2 * kPointerSize)); 4408 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4316 break; 4409 break;
4317 } 4410 }
4318 } 4411 }
4319 } 4412 }
4320 4413
4321 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC); 4414 Register scratch1 = r4;
4322 __ b(vc, &done); 4415 Register scratch2 = r5;
4416 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value));
4417 __ AddAndCheckForOverflow(r3, r3, scratch1, scratch2, r0);
4418 __ BranchOnNoOverflow(&done);
4323 // Call stub. Undo operation first. 4419 // Call stub. Undo operation first.
4324 __ sub(r0, r0, Operand(Smi::FromInt(count_value))); 4420 __ sub(r3, r3, scratch1);
4325 __ jmp(&stub_call); 4421 __ b(&stub_call);
4326 __ bind(&slow); 4422 __ bind(&slow);
4327 } 4423 }
4328 ToNumberStub convert_stub(isolate()); 4424 ToNumberStub convert_stub(isolate());
4329 __ CallStub(&convert_stub); 4425 __ CallStub(&convert_stub);
4330 4426
4331 // Save result for postfix expressions. 4427 // Save result for postfix expressions.
4332 if (expr->is_postfix()) { 4428 if (expr->is_postfix()) {
4333 if (!context()->IsEffect()) { 4429 if (!context()->IsEffect()) {
4334 // Save the result on the stack. If we have a named or keyed property 4430 // Save the result on the stack. If we have a named or keyed property
4335 // we store the result under the receiver that is currently on top 4431 // we store the result under the receiver that is currently on top
4336 // of the stack. 4432 // of the stack.
4337 switch (assign_type) { 4433 switch (assign_type) {
4338 case VARIABLE: 4434 case VARIABLE:
4339 __ push(r0); 4435 __ push(r3);
4340 break; 4436 break;
4341 case NAMED_PROPERTY: 4437 case NAMED_PROPERTY:
4342 __ str(r0, MemOperand(sp, kPointerSize)); 4438 __ StoreP(r3, MemOperand(sp, kPointerSize));
4343 break; 4439 break;
4344 case KEYED_PROPERTY: 4440 case KEYED_PROPERTY:
4345 __ str(r0, MemOperand(sp, 2 * kPointerSize)); 4441 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4346 break; 4442 break;
4347 } 4443 }
4348 } 4444 }
4349 } 4445 }
4350 4446
4351
4352 __ bind(&stub_call); 4447 __ bind(&stub_call);
4353 __ mov(r1, r0); 4448 __ mr(r4, r3);
4354 __ mov(r0, Operand(Smi::FromInt(count_value))); 4449 __ LoadSmiLiteral(r3, Smi::FromInt(count_value));
4355 4450
4356 // Record position before stub call. 4451 // Record position before stub call.
4357 SetSourcePosition(expr->position()); 4452 SetSourcePosition(expr->position());
4358 4453
4359 Handle<Code> code = 4454 Handle<Code> code =
4360 CodeFactory::BinaryOpIC(isolate(), Token::ADD, NO_OVERWRITE).code(); 4455 CodeFactory::BinaryOpIC(isolate(), Token::ADD, NO_OVERWRITE).code();
4361 CallIC(code, expr->CountBinOpFeedbackId()); 4456 CallIC(code, expr->CountBinOpFeedbackId());
4362 patch_site.EmitPatchInfo(); 4457 patch_site.EmitPatchInfo();
4363 __ bind(&done); 4458 __ bind(&done);
4364 4459
4365 // Store the value returned in r0. 4460 // Store the value returned in r3.
4366 switch (assign_type) { 4461 switch (assign_type) {
4367 case VARIABLE: 4462 case VARIABLE:
4368 if (expr->is_postfix()) { 4463 if (expr->is_postfix()) {
4369 { EffectContext context(this); 4464 {
4465 EffectContext context(this);
4370 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4466 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4371 Token::ASSIGN); 4467 Token::ASSIGN);
4372 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4468 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4373 context.Plug(r0); 4469 context.Plug(r3);
4374 } 4470 }
4375 // For all contexts except EffectConstant We have the result on 4471 // For all contexts except EffectConstant We have the result on
4376 // top of the stack. 4472 // top of the stack.
4377 if (!context()->IsEffect()) { 4473 if (!context()->IsEffect()) {
4378 context()->PlugTOS(); 4474 context()->PlugTOS();
4379 } 4475 }
4380 } else { 4476 } else {
4381 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4477 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4382 Token::ASSIGN); 4478 Token::ASSIGN);
4383 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4479 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4384 context()->Plug(r0); 4480 context()->Plug(r3);
4385 } 4481 }
4386 break; 4482 break;
4387 case NAMED_PROPERTY: { 4483 case NAMED_PROPERTY: {
4388 __ mov(StoreDescriptor::NameRegister(), 4484 __ mov(StoreDescriptor::NameRegister(),
4389 Operand(prop->key()->AsLiteral()->value())); 4485 Operand(prop->key()->AsLiteral()->value()));
4390 __ pop(StoreDescriptor::ReceiverRegister()); 4486 __ pop(StoreDescriptor::ReceiverRegister());
4391 CallStoreIC(expr->CountStoreFeedbackId()); 4487 CallStoreIC(expr->CountStoreFeedbackId());
4392 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4488 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4393 if (expr->is_postfix()) { 4489 if (expr->is_postfix()) {
4394 if (!context()->IsEffect()) { 4490 if (!context()->IsEffect()) {
4395 context()->PlugTOS(); 4491 context()->PlugTOS();
4396 } 4492 }
4397 } else { 4493 } else {
4398 context()->Plug(r0); 4494 context()->Plug(r3);
4399 } 4495 }
4400 break; 4496 break;
4401 } 4497 }
4402 case KEYED_PROPERTY: { 4498 case KEYED_PROPERTY: {
4403 __ Pop(StoreDescriptor::ReceiverRegister(), 4499 __ Pop(StoreDescriptor::ReceiverRegister(),
4404 StoreDescriptor::NameRegister()); 4500 StoreDescriptor::NameRegister());
4405 Handle<Code> ic = 4501 Handle<Code> ic =
4406 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code(); 4502 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4407 CallIC(ic, expr->CountStoreFeedbackId()); 4503 CallIC(ic, expr->CountStoreFeedbackId());
4408 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4504 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4409 if (expr->is_postfix()) { 4505 if (expr->is_postfix()) {
4410 if (!context()->IsEffect()) { 4506 if (!context()->IsEffect()) {
4411 context()->PlugTOS(); 4507 context()->PlugTOS();
4412 } 4508 }
4413 } else { 4509 } else {
4414 context()->Plug(r0); 4510 context()->Plug(r3);
4415 } 4511 }
4416 break; 4512 break;
4417 } 4513 }
4418 } 4514 }
4419 } 4515 }
4420 4516
4421 4517
4422 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 4518 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4423 DCHECK(!context()->IsEffect()); 4519 DCHECK(!context()->IsEffect());
4424 DCHECK(!context()->IsTest()); 4520 DCHECK(!context()->IsTest());
4425 VariableProxy* proxy = expr->AsVariableProxy(); 4521 VariableProxy* proxy = expr->AsVariableProxy();
4426 if (proxy != NULL && proxy->var()->IsUnallocated()) { 4522 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4427 Comment cmnt(masm_, "[ Global variable"); 4523 Comment cmnt(masm_, "[ Global variable");
4428 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); 4524 __ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
4429 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->name())); 4525 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->name()));
4430 if (FLAG_vector_ics) { 4526 if (FLAG_vector_ics) {
4431 __ mov(VectorLoadICDescriptor::SlotRegister(), 4527 __ mov(VectorLoadICDescriptor::SlotRegister(),
4432 Operand(Smi::FromInt(proxy->VariableFeedbackSlot()))); 4528 Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
4433 } 4529 }
4434 // Use a regular load, not a contextual load, to avoid a reference 4530 // Use a regular load, not a contextual load, to avoid a reference
4435 // error. 4531 // error.
4436 CallLoadIC(NOT_CONTEXTUAL); 4532 CallLoadIC(NOT_CONTEXTUAL);
4437 PrepareForBailout(expr, TOS_REG); 4533 PrepareForBailout(expr, TOS_REG);
4438 context()->Plug(r0); 4534 context()->Plug(r3);
4439 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { 4535 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4440 Comment cmnt(masm_, "[ Lookup slot"); 4536 Comment cmnt(masm_, "[ Lookup slot");
4441 Label done, slow; 4537 Label done, slow;
4442 4538
4443 // Generate code for loading from variables potentially shadowed 4539 // Generate code for loading from variables potentially shadowed
4444 // by eval-introduced variables. 4540 // by eval-introduced variables.
4445 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done); 4541 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4446 4542
4447 __ bind(&slow); 4543 __ bind(&slow);
4448 __ mov(r0, Operand(proxy->name())); 4544 __ mov(r3, Operand(proxy->name()));
4449 __ Push(cp, r0); 4545 __ Push(cp, r3);
4450 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2); 4546 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4451 PrepareForBailout(expr, TOS_REG); 4547 PrepareForBailout(expr, TOS_REG);
4452 __ bind(&done); 4548 __ bind(&done);
4453 4549
4454 context()->Plug(r0); 4550 context()->Plug(r3);
4455 } else { 4551 } else {
4456 // This expression cannot throw a reference error at the top level. 4552 // This expression cannot throw a reference error at the top level.
4457 VisitInDuplicateContext(expr); 4553 VisitInDuplicateContext(expr);
4458 } 4554 }
4459 } 4555 }
4460 4556
4461 4557
4462 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, 4558 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4463 Expression* sub_expr, 4559 Expression* sub_expr,
4464 Handle<String> check) { 4560 Handle<String> check) {
4465 Label materialize_true, materialize_false; 4561 Label materialize_true, materialize_false;
4466 Label* if_true = NULL; 4562 Label* if_true = NULL;
4467 Label* if_false = NULL; 4563 Label* if_false = NULL;
4468 Label* fall_through = NULL; 4564 Label* fall_through = NULL;
4469 context()->PrepareTest(&materialize_true, &materialize_false, 4565 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
4470 &if_true, &if_false, &fall_through); 4566 &if_false, &fall_through);
4471 4567
4472 { AccumulatorValueContext context(this); 4568 {
4569 AccumulatorValueContext context(this);
4473 VisitForTypeofValue(sub_expr); 4570 VisitForTypeofValue(sub_expr);
4474 } 4571 }
4475 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4572 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4476 4573
4477 Factory* factory = isolate()->factory(); 4574 Factory* factory = isolate()->factory();
4478 if (String::Equals(check, factory->number_string())) { 4575 if (String::Equals(check, factory->number_string())) {
4479 __ JumpIfSmi(r0, if_true); 4576 __ JumpIfSmi(r3, if_true);
4480 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 4577 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
4481 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); 4578 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4482 __ cmp(r0, ip); 4579 __ cmp(r3, ip);
4483 Split(eq, if_true, if_false, fall_through); 4580 Split(eq, if_true, if_false, fall_through);
4484 } else if (String::Equals(check, factory->string_string())) { 4581 } else if (String::Equals(check, factory->string_string())) {
4485 __ JumpIfSmi(r0, if_false); 4582 __ JumpIfSmi(r3, if_false);
4486 // Check for undetectable objects => false. 4583 // Check for undetectable objects => false.
4487 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE); 4584 __ CompareObjectType(r3, r3, r4, FIRST_NONSTRING_TYPE);
4488 __ b(ge, if_false); 4585 __ bge(if_false);
4489 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4586 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
4490 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4587 STATIC_ASSERT((1 << Map::kIsUndetectable) < 0x8000);
4491 Split(eq, if_true, if_false, fall_through); 4588 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
4589 Split(eq, if_true, if_false, fall_through, cr0);
4492 } else if (String::Equals(check, factory->symbol_string())) { 4590 } else if (String::Equals(check, factory->symbol_string())) {
4493 __ JumpIfSmi(r0, if_false); 4591 __ JumpIfSmi(r3, if_false);
4494 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE); 4592 __ CompareObjectType(r3, r3, r4, SYMBOL_TYPE);
4495 Split(eq, if_true, if_false, fall_through); 4593 Split(eq, if_true, if_false, fall_through);
4496 } else if (String::Equals(check, factory->boolean_string())) { 4594 } else if (String::Equals(check, factory->boolean_string())) {
4497 __ CompareRoot(r0, Heap::kTrueValueRootIndex); 4595 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
4498 __ b(eq, if_true); 4596 __ beq(if_true);
4499 __ CompareRoot(r0, Heap::kFalseValueRootIndex); 4597 __ CompareRoot(r3, Heap::kFalseValueRootIndex);
4500 Split(eq, if_true, if_false, fall_through); 4598 Split(eq, if_true, if_false, fall_through);
4501 } else if (String::Equals(check, factory->undefined_string())) { 4599 } else if (String::Equals(check, factory->undefined_string())) {
4502 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex); 4600 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
4503 __ b(eq, if_true); 4601 __ beq(if_true);
4504 __ JumpIfSmi(r0, if_false); 4602 __ JumpIfSmi(r3, if_false);
4505 // Check for undetectable objects => true. 4603 // Check for undetectable objects => true.
4506 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 4604 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
4507 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4605 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
4508 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4606 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
4509 Split(ne, if_true, if_false, fall_through); 4607 Split(ne, if_true, if_false, fall_through, cr0);
4510 4608
4511 } else if (String::Equals(check, factory->function_string())) { 4609 } else if (String::Equals(check, factory->function_string())) {
4512 __ JumpIfSmi(r0, if_false); 4610 __ JumpIfSmi(r3, if_false);
4513 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 4611 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4514 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE); 4612 __ CompareObjectType(r3, r3, r4, JS_FUNCTION_TYPE);
4515 __ b(eq, if_true); 4613 __ beq(if_true);
4516 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE)); 4614 __ cmpi(r4, Operand(JS_FUNCTION_PROXY_TYPE));
4517 Split(eq, if_true, if_false, fall_through); 4615 Split(eq, if_true, if_false, fall_through);
4518 } else if (String::Equals(check, factory->object_string())) { 4616 } else if (String::Equals(check, factory->object_string())) {
4519 __ JumpIfSmi(r0, if_false); 4617 __ JumpIfSmi(r3, if_false);
4520 __ CompareRoot(r0, Heap::kNullValueRootIndex); 4618 __ CompareRoot(r3, Heap::kNullValueRootIndex);
4521 __ b(eq, if_true); 4619 __ beq(if_true);
4522 // Check for JS objects => true. 4620 // Check for JS objects => true.
4523 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); 4621 __ CompareObjectType(r3, r3, r4, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4524 __ b(lt, if_false); 4622 __ blt(if_false);
4525 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); 4623 __ CompareInstanceType(r3, r4, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4526 __ b(gt, if_false); 4624 __ bgt(if_false);
4527 // Check for undetectable objects => false. 4625 // Check for undetectable objects => false.
4528 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4626 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
4529 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4627 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
4530 Split(eq, if_true, if_false, fall_through); 4628 Split(eq, if_true, if_false, fall_through, cr0);
4531 } else { 4629 } else {
4532 if (if_false != fall_through) __ jmp(if_false); 4630 if (if_false != fall_through) __ b(if_false);
4533 } 4631 }
4534 context()->Plug(if_true, if_false); 4632 context()->Plug(if_true, if_false);
4535 } 4633 }
4536 4634
4537 4635
4538 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 4636 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4539 Comment cmnt(masm_, "[ CompareOperation"); 4637 Comment cmnt(masm_, "[ CompareOperation");
4540 SetSourcePosition(expr->position()); 4638 SetSourcePosition(expr->position());
4541 4639
4542 // First we try a fast inlined version of the compare when one of 4640 // First we try a fast inlined version of the compare when one of
4543 // the operands is a literal. 4641 // the operands is a literal.
4544 if (TryLiteralCompare(expr)) return; 4642 if (TryLiteralCompare(expr)) return;
4545 4643
4546 // Always perform the comparison for its control flow. Pack the result 4644 // Always perform the comparison for its control flow. Pack the result
4547 // into the expression's context after the comparison is performed. 4645 // into the expression's context after the comparison is performed.
4548 Label materialize_true, materialize_false; 4646 Label materialize_true, materialize_false;
4549 Label* if_true = NULL; 4647 Label* if_true = NULL;
4550 Label* if_false = NULL; 4648 Label* if_false = NULL;
4551 Label* fall_through = NULL; 4649 Label* fall_through = NULL;
4552 context()->PrepareTest(&materialize_true, &materialize_false, 4650 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
4553 &if_true, &if_false, &fall_through); 4651 &if_false, &fall_through);
4554 4652
4555 Token::Value op = expr->op(); 4653 Token::Value op = expr->op();
4556 VisitForStackValue(expr->left()); 4654 VisitForStackValue(expr->left());
4557 switch (op) { 4655 switch (op) {
4558 case Token::IN: 4656 case Token::IN:
4559 VisitForStackValue(expr->right()); 4657 VisitForStackValue(expr->right());
4560 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); 4658 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4561 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); 4659 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4562 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 4660 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4563 __ cmp(r0, ip); 4661 __ cmp(r3, ip);
4564 Split(eq, if_true, if_false, fall_through); 4662 Split(eq, if_true, if_false, fall_through);
4565 break; 4663 break;
4566 4664
4567 case Token::INSTANCEOF: { 4665 case Token::INSTANCEOF: {
4568 VisitForStackValue(expr->right()); 4666 VisitForStackValue(expr->right());
4569 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags); 4667 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4570 __ CallStub(&stub); 4668 __ CallStub(&stub);
4571 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4669 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4572 // The stub returns 0 for true. 4670 // The stub returns 0 for true.
4573 __ tst(r0, r0); 4671 __ cmpi(r3, Operand::Zero());
4574 Split(eq, if_true, if_false, fall_through); 4672 Split(eq, if_true, if_false, fall_through);
4575 break; 4673 break;
4576 } 4674 }
4577 4675
4578 default: { 4676 default: {
4579 VisitForAccumulatorValue(expr->right()); 4677 VisitForAccumulatorValue(expr->right());
4580 Condition cond = CompareIC::ComputeCondition(op); 4678 Condition cond = CompareIC::ComputeCondition(op);
4581 __ pop(r1); 4679 __ pop(r4);
4582 4680
4583 bool inline_smi_code = ShouldInlineSmiCase(op); 4681 bool inline_smi_code = ShouldInlineSmiCase(op);
4584 JumpPatchSite patch_site(masm_); 4682 JumpPatchSite patch_site(masm_);
4585 if (inline_smi_code) { 4683 if (inline_smi_code) {
4586 Label slow_case; 4684 Label slow_case;
4587 __ orr(r2, r0, Operand(r1)); 4685 __ orx(r5, r3, r4);
4588 patch_site.EmitJumpIfNotSmi(r2, &slow_case); 4686 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
4589 __ cmp(r1, r0); 4687 __ cmp(r4, r3);
4590 Split(cond, if_true, if_false, NULL); 4688 Split(cond, if_true, if_false, NULL);
4591 __ bind(&slow_case); 4689 __ bind(&slow_case);
4592 } 4690 }
4593 4691
4594 // Record position and call the compare IC. 4692 // Record position and call the compare IC.
4595 SetSourcePosition(expr->position()); 4693 SetSourcePosition(expr->position());
4596 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code(); 4694 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4597 CallIC(ic, expr->CompareOperationFeedbackId()); 4695 CallIC(ic, expr->CompareOperationFeedbackId());
4598 patch_site.EmitPatchInfo(); 4696 patch_site.EmitPatchInfo();
4599 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4697 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4600 __ cmp(r0, Operand::Zero()); 4698 __ cmpi(r3, Operand::Zero());
4601 Split(cond, if_true, if_false, fall_through); 4699 Split(cond, if_true, if_false, fall_through);
4602 } 4700 }
4603 } 4701 }
4604 4702
4605 // Convert the result of the comparison into one expected for this 4703 // Convert the result of the comparison into one expected for this
4606 // expression's context. 4704 // expression's context.
4607 context()->Plug(if_true, if_false); 4705 context()->Plug(if_true, if_false);
4608 } 4706 }
4609 4707
4610 4708
4611 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, 4709 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4612 Expression* sub_expr, 4710 Expression* sub_expr,
4613 NilValue nil) { 4711 NilValue nil) {
4614 Label materialize_true, materialize_false; 4712 Label materialize_true, materialize_false;
4615 Label* if_true = NULL; 4713 Label* if_true = NULL;
4616 Label* if_false = NULL; 4714 Label* if_false = NULL;
4617 Label* fall_through = NULL; 4715 Label* fall_through = NULL;
4618 context()->PrepareTest(&materialize_true, &materialize_false, 4716 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
4619 &if_true, &if_false, &fall_through); 4717 &if_false, &fall_through);
4620 4718
4621 VisitForAccumulatorValue(sub_expr); 4719 VisitForAccumulatorValue(sub_expr);
4622 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4720 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4623 if (expr->op() == Token::EQ_STRICT) { 4721 if (expr->op() == Token::EQ_STRICT) {
4624 Heap::RootListIndex nil_value = nil == kNullValue ? 4722 Heap::RootListIndex nil_value = nil == kNullValue
4625 Heap::kNullValueRootIndex : 4723 ? Heap::kNullValueRootIndex
4626 Heap::kUndefinedValueRootIndex; 4724 : Heap::kUndefinedValueRootIndex;
4627 __ LoadRoot(r1, nil_value); 4725 __ LoadRoot(r4, nil_value);
4628 __ cmp(r0, r1); 4726 __ cmp(r3, r4);
4629 Split(eq, if_true, if_false, fall_through); 4727 Split(eq, if_true, if_false, fall_through);
4630 } else { 4728 } else {
4631 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); 4729 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4632 CallIC(ic, expr->CompareOperationFeedbackId()); 4730 CallIC(ic, expr->CompareOperationFeedbackId());
4633 __ cmp(r0, Operand(0)); 4731 __ cmpi(r3, Operand::Zero());
4634 Split(ne, if_true, if_false, fall_through); 4732 Split(ne, if_true, if_false, fall_through);
4635 } 4733 }
4636 context()->Plug(if_true, if_false); 4734 context()->Plug(if_true, if_false);
4637 } 4735 }
4638 4736
4639 4737
4640 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { 4738 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4641 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4739 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4642 context()->Plug(r0); 4740 context()->Plug(r3);
4643 } 4741 }
4644 4742
4645 4743
4646 Register FullCodeGenerator::result_register() { 4744 Register FullCodeGenerator::result_register() { return r3; }
4647 return r0;
4648 }
4649 4745
4650 4746
4651 Register FullCodeGenerator::context_register() { 4747 Register FullCodeGenerator::context_register() { return cp; }
4652 return cp;
4653 }
4654 4748
4655 4749
4656 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 4750 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4657 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 4751 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
4658 __ str(value, MemOperand(fp, frame_offset)); 4752 __ StoreP(value, MemOperand(fp, frame_offset), r0);
4659 } 4753 }
4660 4754
4661 4755
4662 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 4756 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4663 __ ldr(dst, ContextOperand(cp, context_index)); 4757 __ LoadP(dst, ContextOperand(cp, context_index), r0);
4664 } 4758 }
4665 4759
4666 4760
4667 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { 4761 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4668 Scope* declaration_scope = scope()->DeclarationScope(); 4762 Scope* declaration_scope = scope()->DeclarationScope();
4669 if (declaration_scope->is_global_scope() || 4763 if (declaration_scope->is_global_scope() ||
4670 declaration_scope->is_module_scope()) { 4764 declaration_scope->is_module_scope()) {
4671 // Contexts nested in the native context have a canonical empty function 4765 // Contexts nested in the native context have a canonical empty function
4672 // as their closure, not the anonymous closure containing the global 4766 // as their closure, not the anonymous closure containing the global
4673 // code. Pass a smi sentinel and let the runtime look up the empty 4767 // code. Pass a smi sentinel and let the runtime look up the empty
4674 // function. 4768 // function.
4675 __ mov(ip, Operand(Smi::FromInt(0))); 4769 __ LoadSmiLiteral(ip, Smi::FromInt(0));
4676 } else if (declaration_scope->is_eval_scope()) { 4770 } else if (declaration_scope->is_eval_scope()) {
4677 // Contexts created by a call to eval have the same closure as the 4771 // Contexts created by a call to eval have the same closure as the
4678 // context calling eval, not the anonymous closure containing the eval 4772 // context calling eval, not the anonymous closure containing the eval
4679 // code. Fetch it from the context. 4773 // code. Fetch it from the context.
4680 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX)); 4774 __ LoadP(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
4681 } else { 4775 } else {
4682 DCHECK(declaration_scope->is_function_scope()); 4776 DCHECK(declaration_scope->is_function_scope());
4683 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4777 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4684 } 4778 }
4685 __ push(ip); 4779 __ push(ip);
4686 } 4780 }
4687 4781
4688 4782
4689 // ---------------------------------------------------------------------------- 4783 // ----------------------------------------------------------------------------
4690 // Non-local control flow support. 4784 // Non-local control flow support.
4691 4785
4692 void FullCodeGenerator::EnterFinallyBlock() { 4786 void FullCodeGenerator::EnterFinallyBlock() {
4693 DCHECK(!result_register().is(r1)); 4787 DCHECK(!result_register().is(r4));
4694 // Store result register while executing finally block. 4788 // Store result register while executing finally block.
4695 __ push(result_register()); 4789 __ push(result_register());
4696 // Cook return address in link register to stack (smi encoded Code* delta) 4790 // Cook return address in link register to stack (smi encoded Code* delta)
4697 __ sub(r1, lr, Operand(masm_->CodeObject())); 4791 __ mflr(r4);
4698 __ SmiTag(r1); 4792 __ mov(ip, Operand(masm_->CodeObject()));
4793 __ sub(r4, r4, ip);
4794 __ SmiTag(r4);
4699 4795
4700 // Store result register while executing finally block. 4796 // Store result register while executing finally block.
4701 __ push(r1); 4797 __ push(r4);
4702 4798
4703 // Store pending message while executing finally block. 4799 // Store pending message while executing finally block.
4704 ExternalReference pending_message_obj = 4800 ExternalReference pending_message_obj =
4705 ExternalReference::address_of_pending_message_obj(isolate()); 4801 ExternalReference::address_of_pending_message_obj(isolate());
4706 __ mov(ip, Operand(pending_message_obj)); 4802 __ mov(ip, Operand(pending_message_obj));
4707 __ ldr(r1, MemOperand(ip)); 4803 __ LoadP(r4, MemOperand(ip));
4708 __ push(r1); 4804 __ push(r4);
4709 4805
4710 ExternalReference has_pending_message = 4806 ExternalReference has_pending_message =
4711 ExternalReference::address_of_has_pending_message(isolate()); 4807 ExternalReference::address_of_has_pending_message(isolate());
4712 __ mov(ip, Operand(has_pending_message)); 4808 __ mov(ip, Operand(has_pending_message));
4713 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof) 4809 __ lbz(r4, MemOperand(ip));
4714 __ ldrb(r1, MemOperand(ip)); 4810 __ SmiTag(r4);
4715 __ SmiTag(r1); 4811 __ push(r4);
4716 __ push(r1);
4717 4812
4718 ExternalReference pending_message_script = 4813 ExternalReference pending_message_script =
4719 ExternalReference::address_of_pending_message_script(isolate()); 4814 ExternalReference::address_of_pending_message_script(isolate());
4720 __ mov(ip, Operand(pending_message_script)); 4815 __ mov(ip, Operand(pending_message_script));
4721 __ ldr(r1, MemOperand(ip)); 4816 __ LoadP(r4, MemOperand(ip));
4722 __ push(r1); 4817 __ push(r4);
4723 } 4818 }
4724 4819
4725 4820
4726 void FullCodeGenerator::ExitFinallyBlock() { 4821 void FullCodeGenerator::ExitFinallyBlock() {
4727 DCHECK(!result_register().is(r1)); 4822 DCHECK(!result_register().is(r4));
4728 // Restore pending message from stack. 4823 // Restore pending message from stack.
4729 __ pop(r1); 4824 __ pop(r4);
4730 ExternalReference pending_message_script = 4825 ExternalReference pending_message_script =
4731 ExternalReference::address_of_pending_message_script(isolate()); 4826 ExternalReference::address_of_pending_message_script(isolate());
4732 __ mov(ip, Operand(pending_message_script)); 4827 __ mov(ip, Operand(pending_message_script));
4733 __ str(r1, MemOperand(ip)); 4828 __ StoreP(r4, MemOperand(ip));
4734 4829
4735 __ pop(r1); 4830 __ pop(r4);
4736 __ SmiUntag(r1); 4831 __ SmiUntag(r4);
4737 ExternalReference has_pending_message = 4832 ExternalReference has_pending_message =
4738 ExternalReference::address_of_has_pending_message(isolate()); 4833 ExternalReference::address_of_has_pending_message(isolate());
4739 __ mov(ip, Operand(has_pending_message)); 4834 __ mov(ip, Operand(has_pending_message));
4740 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof) 4835 __ stb(r4, MemOperand(ip));
4741 __ strb(r1, MemOperand(ip));
4742 4836
4743 __ pop(r1); 4837 __ pop(r4);
4744 ExternalReference pending_message_obj = 4838 ExternalReference pending_message_obj =
4745 ExternalReference::address_of_pending_message_obj(isolate()); 4839 ExternalReference::address_of_pending_message_obj(isolate());
4746 __ mov(ip, Operand(pending_message_obj)); 4840 __ mov(ip, Operand(pending_message_obj));
4747 __ str(r1, MemOperand(ip)); 4841 __ StoreP(r4, MemOperand(ip));
4748 4842
4749 // Restore result register from stack. 4843 // Restore result register from stack.
4750 __ pop(r1); 4844 __ pop(r4);
4751 4845
4752 // Uncook return address and return. 4846 // Uncook return address and return.
4753 __ pop(result_register()); 4847 __ pop(result_register());
4754 __ SmiUntag(r1); 4848 __ SmiUntag(r4);
4755 __ add(pc, r1, Operand(masm_->CodeObject())); 4849 __ mov(ip, Operand(masm_->CodeObject()));
4850 __ add(ip, ip, r4);
4851 __ mtctr(ip);
4852 __ bctr();
4756 } 4853 }
4757 4854
4758 4855
4759 #undef __ 4856 #undef __
4760 4857
4761 #define __ ACCESS_MASM(masm()) 4858 #define __ ACCESS_MASM(masm())
4762 4859
4763 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( 4860 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4764 int* stack_depth, 4861 int* stack_depth, int* context_length) {
4765 int* context_length) {
4766 // The macros used here must preserve the result register. 4862 // The macros used here must preserve the result register.
4767 4863
4768 // Because the handler block contains the context of the finally 4864 // Because the handler block contains the context of the finally
4769 // code, we can restore it directly from there for the finally code 4865 // code, we can restore it directly from there for the finally code
4770 // rather than iteratively unwinding contexts via their previous 4866 // rather than iteratively unwinding contexts via their previous
4771 // links. 4867 // links.
4772 __ Drop(*stack_depth); // Down to the handler block. 4868 __ Drop(*stack_depth); // Down to the handler block.
4773 if (*context_length > 0) { 4869 if (*context_length > 0) {
4774 // Restore the context to its dedicated register and the stack. 4870 // Restore the context to its dedicated register and the stack.
4775 __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset)); 4871 __ LoadP(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
4776 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 4872 __ StoreP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4777 } 4873 }
4778 __ PopTryHandler(); 4874 __ PopTryHandler();
4779 __ bl(finally_entry_); 4875 __ b(finally_entry_, SetLK);
4780 4876
4781 *stack_depth = 0; 4877 *stack_depth = 0;
4782 *context_length = 0; 4878 *context_length = 0;
4783 return previous_; 4879 return previous_;
4784 } 4880 }
4785 4881
4786
4787 #undef __ 4882 #undef __
4788 4883
4789 4884
4790 static Address GetInterruptImmediateLoadAddress(Address pc) { 4885 void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc,
4791 Address load_address = pc - 2 * Assembler::kInstrSize;
4792 if (!FLAG_enable_ool_constant_pool) {
4793 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
4794 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
4795 // This is an extended constant pool lookup.
4796 if (CpuFeatures::IsSupported(ARMv7)) {
4797 load_address -= 2 * Assembler::kInstrSize;
4798 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
4799 DCHECK(Assembler::IsMovT(
4800 Memory::int32_at(load_address + Assembler::kInstrSize)));
4801 } else {
4802 load_address -= 4 * Assembler::kInstrSize;
4803 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
4804 DCHECK(Assembler::IsOrrImmed(
4805 Memory::int32_at(load_address + Assembler::kInstrSize)));
4806 DCHECK(Assembler::IsOrrImmed(
4807 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
4808 DCHECK(Assembler::IsOrrImmed(
4809 Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
4810 }
4811 } else if (CpuFeatures::IsSupported(ARMv7) &&
4812 Assembler::IsMovT(Memory::int32_at(load_address))) {
4813 // This is a movw / movt immediate load.
4814 load_address -= Assembler::kInstrSize;
4815 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
4816 } else if (!CpuFeatures::IsSupported(ARMv7) &&
4817 Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
4818 // This is a mov / orr immediate load.
4819 load_address -= 3 * Assembler::kInstrSize;
4820 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
4821 DCHECK(Assembler::IsOrrImmed(
4822 Memory::int32_at(load_address + Assembler::kInstrSize)));
4823 DCHECK(Assembler::IsOrrImmed(
4824 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
4825 } else {
4826 // This is a small constant pool lookup.
4827 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
4828 }
4829 return load_address;
4830 }
4831
4832
4833 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4834 Address pc,
4835 BackEdgeState target_state, 4886 BackEdgeState target_state,
4836 Code* replacement_code) { 4887 Code* replacement_code) {
4837 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc); 4888 Address mov_address = Assembler::target_address_from_return_address(pc);
4838 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize; 4889 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
4839 CodePatcher patcher(branch_address, 1); 4890 CodePatcher patcher(cmp_address, 1);
4891
4840 switch (target_state) { 4892 switch (target_state) {
4841 case INTERRUPT: 4893 case INTERRUPT: {
4842 {
4843 // <decrement profiling counter> 4894 // <decrement profiling counter>
4844 // bpl ok 4895 // cmpi r6, 0
4845 // ; load interrupt stub address into ip - either of (for ARMv7): 4896 // bge <ok> ;; not changed
4846 // ; <small cp load> | <extended cp load> | <immediate load> 4897 // mov r12, <interrupt stub address>
4847 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm 4898 // mtlr r12
4848 // | movt ip, #imm | movw ip, #imm 4899 // blrl
4849 // | ldr ip, [pp, ip]
4850 // ; or (for ARMv6):
4851 // ; <small cp load> | <extended cp load> | <immediate load>
4852 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
4853 // | orr ip, ip, #imm> | orr ip, ip, #imm
4854 // | orr ip, ip, #imm> | orr ip, ip, #imm
4855 // | orr ip, ip, #imm> | orr ip, ip, #imm
4856 // blx ip
4857 // <reset profiling counter> 4900 // <reset profiling counter>
4858 // ok-label 4901 // ok-label
4859 4902 patcher.masm()->cmpi(r6, Operand::Zero());
4860 // Calculate branch offset to the ok-label - this is the difference
4861 // between the branch address and |pc| (which points at <blx ip>) plus
4862 // kProfileCounterResetSequence instructions
4863 int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
4864 kProfileCounterResetSequenceLength;
4865 patcher.masm()->b(branch_offset, pl);
4866 break; 4903 break;
4867 } 4904 }
4868 case ON_STACK_REPLACEMENT: 4905 case ON_STACK_REPLACEMENT:
4869 case OSR_AFTER_STACK_CHECK: 4906 case OSR_AFTER_STACK_CHECK:
4870 // <decrement profiling counter> 4907 // <decrement profiling counter>
4871 // mov r0, r0 (NOP) 4908 // crset
4872 // ; load on-stack replacement address into ip - either of (for ARMv7): 4909 // bge <ok> ;; not changed
4873 // ; <small cp load> | <extended cp load> | <immediate load> 4910 // mov r12, <on-stack replacement address>
4874 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm 4911 // mtlr r12
4875 // | movt ip, #imm> | movw ip, #imm 4912 // blrl
4876 // | ldr ip, [pp, ip]
4877 // ; or (for ARMv6):
4878 // ; <small cp load> | <extended cp load> | <immediate load>
4879 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
4880 // | orr ip, ip, #imm> | orr ip, ip, #imm
4881 // | orr ip, ip, #imm> | orr ip, ip, #imm
4882 // | orr ip, ip, #imm> | orr ip, ip, #imm
4883 // blx ip
4884 // <reset profiling counter> 4913 // <reset profiling counter>
4885 // ok-label 4914 // ok-label ----- pc_after points here
4886 patcher.masm()->nop(); 4915
4916 // Set the LT bit such that bge is a NOP
4917 patcher.masm()->crset(Assembler::encode_crbit(cr7, CR_LT));
4887 break; 4918 break;
4888 } 4919 }
4889 4920
4890 // Replace the call address. 4921 // Replace the stack check address in the mov sequence with the
4891 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code, 4922 // entry address of the replacement code.
4892 replacement_code->entry()); 4923 Assembler::set_target_address_at(mov_address, unoptimized_code,
4924 replacement_code->entry());
4893 4925
4894 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( 4926 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4895 unoptimized_code, pc_immediate_load_address, replacement_code); 4927 unoptimized_code, mov_address, replacement_code);
4896 } 4928 }
4897 4929
4898 4930
4899 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( 4931 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4900 Isolate* isolate, 4932 Isolate* isolate, Code* unoptimized_code, Address pc) {
4901 Code* unoptimized_code, 4933 Address mov_address = Assembler::target_address_from_return_address(pc);
4902 Address pc) { 4934 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
4903 DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize))); 4935 Address interrupt_address =
4936 Assembler::target_address_at(mov_address, unoptimized_code);
4904 4937
4905 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc); 4938 if (Assembler::IsCmpImmediate(Assembler::instr_at(cmp_address))) {
4906 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize; 4939 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry());
4907 Address interrupt_address = Assembler::target_address_at(
4908 pc_immediate_load_address, unoptimized_code);
4909
4910 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
4911 DCHECK(interrupt_address ==
4912 isolate->builtins()->InterruptCheck()->entry());
4913 return INTERRUPT; 4940 return INTERRUPT;
4914 } 4941 }
4915 4942
4916 DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address))); 4943 DCHECK(Assembler::IsCrSet(Assembler::instr_at(cmp_address)));
4917 4944
4918 if (interrupt_address == 4945 if (interrupt_address == isolate->builtins()->OnStackReplacement()->entry()) {
4919 isolate->builtins()->OnStackReplacement()->entry()) {
4920 return ON_STACK_REPLACEMENT; 4946 return ON_STACK_REPLACEMENT;
4921 } 4947 }
4922 4948
4923 DCHECK(interrupt_address == 4949 DCHECK(interrupt_address ==
4924 isolate->builtins()->OsrAfterStackCheck()->entry()); 4950 isolate->builtins()->OsrAfterStackCheck()->entry());
4925 return OSR_AFTER_STACK_CHECK; 4951 return OSR_AFTER_STACK_CHECK;
4926 } 4952 }
4953 }
4954 } // namespace v8::internal
4927 4955
4928 4956 #endif // V8_TARGET_ARCH_PPC
4929 } } // namespace v8::internal
4930
4931 #endif // V8_TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698