Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: src/ppc/full-codegen-ppc.cc

Issue 714093002: PowerPC specific sub-directories. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ppc/frames-ppc.cc ('k') | src/ppc/interface-descriptors-ppc.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_ARM 7 #if V8_TARGET_ARCH_PPC
8 8
9 #include "src/code-factory.h" 9 #include "src/code-factory.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
11 #include "src/codegen.h" 11 #include "src/codegen.h"
12 #include "src/compiler.h" 12 #include "src/compiler.h"
13 #include "src/debug.h" 13 #include "src/debug.h"
14 #include "src/full-codegen.h" 14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h" 15 #include "src/ic/ic.h"
16 #include "src/isolate-inl.h" 16 #include "src/isolate-inl.h"
17 #include "src/parser.h" 17 #include "src/parser.h"
18 #include "src/scopes.h" 18 #include "src/scopes.h"
19 19
20 #include "src/arm/code-stubs-arm.h" 20 #include "src/ppc/code-stubs-ppc.h"
21 #include "src/arm/macro-assembler-arm.h" 21 #include "src/ppc/macro-assembler-ppc.h"
22 22
23 namespace v8 { 23 namespace v8 {
24 namespace internal { 24 namespace internal {
25 25
26 #define __ ACCESS_MASM(masm_) 26 #define __ ACCESS_MASM(masm_)
27 27
28
29 // A patch site is a location in the code which it is possible to patch. This 28 // A patch site is a location in the code which it is possible to patch. This
30 // class has a number of methods to emit the code which is patchable and the 29 // class has a number of methods to emit the code which is patchable and the
31 // method EmitPatchInfo to record a marker back to the patchable code. This 30 // method EmitPatchInfo to record a marker back to the patchable code. This
32 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit 31 // marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit
33 // immediate value is used) is the delta from the pc to the first instruction of 32 // immediate value is used) is the delta from the pc to the first instruction of
34 // the patchable code. 33 // the patchable code.
34 // See PatchInlinedSmiCode in ic-ppc.cc for the code that patches it
35 class JumpPatchSite BASE_EMBEDDED { 35 class JumpPatchSite BASE_EMBEDDED {
36 public: 36 public:
37 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { 37 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
38 #ifdef DEBUG 38 #ifdef DEBUG
39 info_emitted_ = false; 39 info_emitted_ = false;
40 #endif 40 #endif
41 } 41 }
42 42
43 ~JumpPatchSite() { 43 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); }
44 DCHECK(patch_site_.is_bound() == info_emitted_);
45 }
46 44
47 // When initially emitting this ensure that a jump is always generated to skip 45 // When initially emitting this ensure that a jump is always generated to skip
48 // the inlined smi code. 46 // the inlined smi code.
49 void EmitJumpIfNotSmi(Register reg, Label* target) { 47 void EmitJumpIfNotSmi(Register reg, Label* target) {
50 DCHECK(!patch_site_.is_bound() && !info_emitted_); 48 DCHECK(!patch_site_.is_bound() && !info_emitted_);
51 Assembler::BlockConstPoolScope block_const_pool(masm_); 49 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
52 __ bind(&patch_site_); 50 __ bind(&patch_site_);
53 __ cmp(reg, Operand(reg)); 51 __ cmp(reg, reg, cr0);
54 __ b(eq, target); // Always taken before patched. 52 __ beq(target, cr0); // Always taken before patched.
55 } 53 }
56 54
57 // When initially emitting this ensure that a jump is never generated to skip 55 // When initially emitting this ensure that a jump is never generated to skip
58 // the inlined smi code. 56 // the inlined smi code.
59 void EmitJumpIfSmi(Register reg, Label* target) { 57 void EmitJumpIfSmi(Register reg, Label* target) {
58 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
60 DCHECK(!patch_site_.is_bound() && !info_emitted_); 59 DCHECK(!patch_site_.is_bound() && !info_emitted_);
61 Assembler::BlockConstPoolScope block_const_pool(masm_);
62 __ bind(&patch_site_); 60 __ bind(&patch_site_);
63 __ cmp(reg, Operand(reg)); 61 __ cmp(reg, reg, cr0);
64 __ b(ne, target); // Never taken before patched. 62 __ bne(target, cr0); // Never taken before patched.
65 } 63 }
66 64
67 void EmitPatchInfo() { 65 void EmitPatchInfo() {
68 // Block literal pool emission whilst recording patch site information.
69 Assembler::BlockConstPoolScope block_const_pool(masm_);
70 if (patch_site_.is_bound()) { 66 if (patch_site_.is_bound()) {
71 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); 67 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
72 Register reg; 68 Register reg;
73 reg.set_code(delta_to_patch_site / kOff12Mask); 69 // I believe this is using reg as the high bits of of the offset
74 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask); 70 reg.set_code(delta_to_patch_site / kOff16Mask);
71 __ cmpi(reg, Operand(delta_to_patch_site % kOff16Mask));
75 #ifdef DEBUG 72 #ifdef DEBUG
76 info_emitted_ = true; 73 info_emitted_ = true;
77 #endif 74 #endif
78 } else { 75 } else {
79 __ nop(); // Signals no inlined code. 76 __ nop(); // Signals no inlined code.
80 } 77 }
81 } 78 }
82 79
83 private: 80 private:
84 MacroAssembler* masm_; 81 MacroAssembler* masm_;
85 Label patch_site_; 82 Label patch_site_;
86 #ifdef DEBUG 83 #ifdef DEBUG
87 bool info_emitted_; 84 bool info_emitted_;
88 #endif 85 #endif
89 }; 86 };
90 87
91 88
92 // Generate code for a JS function. On entry to the function the receiver 89 // Generate code for a JS function. On entry to the function the receiver
93 // and arguments have been pushed on the stack left to right. The actual 90 // and arguments have been pushed on the stack left to right. The actual
94 // argument count matches the formal parameter count expected by the 91 // argument count matches the formal parameter count expected by the
95 // function. 92 // function.
96 // 93 //
97 // The live registers are: 94 // The live registers are:
98 // o r1: the JS function object being called (i.e., ourselves) 95 // o r4: the JS function object being called (i.e., ourselves)
99 // o cp: our context 96 // o cp: our context
100 // o pp: our caller's constant pool pointer (if FLAG_enable_ool_constant_pool) 97 // o fp: our caller's frame pointer (aka r31)
101 // o fp: our caller's frame pointer
102 // o sp: stack pointer 98 // o sp: stack pointer
103 // o lr: return address 99 // o lr: return address
100 // o ip: our own function entry (required by the prologue)
104 // 101 //
105 // The function builds a JS frame. Please see JavaScriptFrameConstants in 102 // The function builds a JS frame. Please see JavaScriptFrameConstants in
106 // frames-arm.h for its layout. 103 // frames-ppc.h for its layout.
107 void FullCodeGenerator::Generate() { 104 void FullCodeGenerator::Generate() {
108 CompilationInfo* info = info_; 105 CompilationInfo* info = info_;
109 handler_table_ = 106 handler_table_ =
110 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); 107 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
111 108
112 profiling_counter_ = isolate()->factory()->NewCell( 109 profiling_counter_ = isolate()->factory()->NewCell(
113 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); 110 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
114 SetFunctionPosition(function()); 111 SetFunctionPosition(function());
115 Comment cmnt(masm_, "[ function compiled by full code generator"); 112 Comment cmnt(masm_, "[ function compiled by full code generator");
116 113
117 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 114 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
118 115
119 #ifdef DEBUG 116 #ifdef DEBUG
120 if (strlen(FLAG_stop_at) > 0 && 117 if (strlen(FLAG_stop_at) > 0 &&
121 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 118 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
122 __ stop("stop-at"); 119 __ stop("stop-at");
123 } 120 }
124 #endif 121 #endif
125 122
126 // Sloppy mode functions and builtins need to replace the receiver with the 123 // Sloppy mode functions and builtins need to replace the receiver with the
127 // global proxy when called as functions (without an explicit receiver 124 // global proxy when called as functions (without an explicit receiver
128 // object). 125 // object).
129 if (info->strict_mode() == SLOPPY && !info->is_native()) { 126 if (info->strict_mode() == SLOPPY && !info->is_native()) {
130 Label ok; 127 Label ok;
131 int receiver_offset = info->scope()->num_parameters() * kPointerSize; 128 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
132 __ ldr(r2, MemOperand(sp, receiver_offset)); 129 __ LoadP(r5, MemOperand(sp, receiver_offset), r0);
133 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); 130 __ CompareRoot(r5, Heap::kUndefinedValueRootIndex);
134 __ b(ne, &ok); 131 __ bne(&ok);
135 132
136 __ ldr(r2, GlobalObjectOperand()); 133 __ LoadP(r5, GlobalObjectOperand());
137 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalProxyOffset)); 134 __ LoadP(r5, FieldMemOperand(r5, GlobalObject::kGlobalProxyOffset));
138 135
139 __ str(r2, MemOperand(sp, receiver_offset)); 136 __ StoreP(r5, MemOperand(sp, receiver_offset), r0);
140 137
141 __ bind(&ok); 138 __ bind(&ok);
142 } 139 }
143 140
144 // Open a frame scope to indicate that there is a frame on the stack. The 141 // Open a frame scope to indicate that there is a frame on the stack. The
145 // MANUAL indicates that the scope shouldn't actually generate code to set up 142 // MANUAL indicates that the scope shouldn't actually generate code to set up
146 // the frame (that is done below). 143 // the frame (that is done below).
147 FrameScope frame_scope(masm_, StackFrame::MANUAL); 144 FrameScope frame_scope(masm_, StackFrame::MANUAL);
145 int prologue_offset = masm_->pc_offset();
148 146
149 info->set_prologue_offset(masm_->pc_offset()); 147 if (prologue_offset) {
150 __ Prologue(info->IsCodePreAgingActive()); 148 // Prologue logic requires it's starting address in ip and the
149 // corresponding offset from the function entry.
150 prologue_offset += Instruction::kInstrSize;
151 __ addi(ip, ip, Operand(prologue_offset));
152 }
153 info->set_prologue_offset(prologue_offset);
154 __ Prologue(info->IsCodePreAgingActive(), prologue_offset);
151 info->AddNoFrameRange(0, masm_->pc_offset()); 155 info->AddNoFrameRange(0, masm_->pc_offset());
152 156
153 { Comment cmnt(masm_, "[ Allocate locals"); 157 {
158 Comment cmnt(masm_, "[ Allocate locals");
154 int locals_count = info->scope()->num_stack_slots(); 159 int locals_count = info->scope()->num_stack_slots();
155 // Generators allocate locals, if any, in context slots. 160 // Generators allocate locals, if any, in context slots.
156 DCHECK(!info->function()->is_generator() || locals_count == 0); 161 DCHECK(!info->function()->is_generator() || locals_count == 0);
157 if (locals_count > 0) { 162 if (locals_count > 0) {
158 if (locals_count >= 128) { 163 if (locals_count >= 128) {
159 Label ok; 164 Label ok;
160 __ sub(r9, sp, Operand(locals_count * kPointerSize)); 165 __ Add(ip, sp, -(locals_count * kPointerSize), r0);
161 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex); 166 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
162 __ cmp(r9, Operand(r2)); 167 __ cmpl(ip, r5);
163 __ b(hs, &ok); 168 __ bc_short(ge, &ok);
164 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); 169 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
165 __ bind(&ok); 170 __ bind(&ok);
166 } 171 }
167 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex); 172 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
168 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32; 173 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
169 if (locals_count >= kMaxPushes) { 174 if (locals_count >= kMaxPushes) {
170 int loop_iterations = locals_count / kMaxPushes; 175 int loop_iterations = locals_count / kMaxPushes;
171 __ mov(r2, Operand(loop_iterations)); 176 __ mov(r5, Operand(loop_iterations));
177 __ mtctr(r5);
172 Label loop_header; 178 Label loop_header;
173 __ bind(&loop_header); 179 __ bind(&loop_header);
174 // Do pushes. 180 // Do pushes.
175 for (int i = 0; i < kMaxPushes; i++) { 181 for (int i = 0; i < kMaxPushes; i++) {
176 __ push(r9); 182 __ push(ip);
177 } 183 }
178 // Continue loop if not done. 184 // Continue loop if not done.
179 __ sub(r2, r2, Operand(1), SetCC); 185 __ bdnz(&loop_header);
180 __ b(&loop_header, ne);
181 } 186 }
182 int remaining = locals_count % kMaxPushes; 187 int remaining = locals_count % kMaxPushes;
183 // Emit the remaining pushes. 188 // Emit the remaining pushes.
184 for (int i = 0; i < remaining; i++) { 189 for (int i = 0; i < remaining; i++) {
185 __ push(r9); 190 __ push(ip);
186 } 191 }
187 } 192 }
188 } 193 }
189 194
190 bool function_in_register = true; 195 bool function_in_register = true;
191 196
192 // Possibly allocate a local context. 197 // Possibly allocate a local context.
193 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 198 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
194 if (heap_slots > 0) { 199 if (heap_slots > 0) {
195 // Argument to NewContext is the function, which is still in r1. 200 // Argument to NewContext is the function, which is still in r4.
196 Comment cmnt(masm_, "[ Allocate context"); 201 Comment cmnt(masm_, "[ Allocate context");
197 bool need_write_barrier = true; 202 bool need_write_barrier = true;
198 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) { 203 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
199 __ push(r1); 204 __ push(r4);
200 __ Push(info->scope()->GetScopeInfo()); 205 __ Push(info->scope()->GetScopeInfo());
201 __ CallRuntime(Runtime::kNewGlobalContext, 2); 206 __ CallRuntime(Runtime::kNewGlobalContext, 2);
202 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { 207 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
203 FastNewContextStub stub(isolate(), heap_slots); 208 FastNewContextStub stub(isolate(), heap_slots);
204 __ CallStub(&stub); 209 __ CallStub(&stub);
205 // Result of FastNewContextStub is always in new space. 210 // Result of FastNewContextStub is always in new space.
206 need_write_barrier = false; 211 need_write_barrier = false;
207 } else { 212 } else {
208 __ push(r1); 213 __ push(r4);
209 __ CallRuntime(Runtime::kNewFunctionContext, 1); 214 __ CallRuntime(Runtime::kNewFunctionContext, 1);
210 } 215 }
211 function_in_register = false; 216 function_in_register = false;
212 // Context is returned in r0. It replaces the context passed to us. 217 // Context is returned in r3. It replaces the context passed to us.
213 // It's saved in the stack and kept live in cp. 218 // It's saved in the stack and kept live in cp.
214 __ mov(cp, r0); 219 __ mr(cp, r3);
215 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset)); 220 __ StoreP(r3, MemOperand(fp, StandardFrameConstants::kContextOffset));
216 // Copy any necessary parameters into the context. 221 // Copy any necessary parameters into the context.
217 int num_parameters = info->scope()->num_parameters(); 222 int num_parameters = info->scope()->num_parameters();
218 for (int i = 0; i < num_parameters; i++) { 223 for (int i = 0; i < num_parameters; i++) {
219 Variable* var = scope()->parameter(i); 224 Variable* var = scope()->parameter(i);
220 if (var->IsContextSlot()) { 225 if (var->IsContextSlot()) {
221 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 226 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
222 (num_parameters - 1 - i) * kPointerSize; 227 (num_parameters - 1 - i) * kPointerSize;
223 // Load parameter from stack. 228 // Load parameter from stack.
224 __ ldr(r0, MemOperand(fp, parameter_offset)); 229 __ LoadP(r3, MemOperand(fp, parameter_offset), r0);
225 // Store it in the context. 230 // Store it in the context.
226 MemOperand target = ContextOperand(cp, var->index()); 231 MemOperand target = ContextOperand(cp, var->index());
227 __ str(r0, target); 232 __ StoreP(r3, target, r0);
228 233
229 // Update the write barrier. 234 // Update the write barrier.
230 if (need_write_barrier) { 235 if (need_write_barrier) {
231 __ RecordWriteContextSlot( 236 __ RecordWriteContextSlot(cp, target.offset(), r3, r6,
232 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs); 237 kLRHasBeenSaved, kDontSaveFPRegs);
233 } else if (FLAG_debug_code) { 238 } else if (FLAG_debug_code) {
234 Label done; 239 Label done;
235 __ JumpIfInNewSpace(cp, r0, &done); 240 __ JumpIfInNewSpace(cp, r3, &done);
236 __ Abort(kExpectedNewSpaceObject); 241 __ Abort(kExpectedNewSpaceObject);
237 __ bind(&done); 242 __ bind(&done);
238 } 243 }
239 } 244 }
240 } 245 }
241 } 246 }
242 247
243 Variable* arguments = scope()->arguments(); 248 Variable* arguments = scope()->arguments();
244 if (arguments != NULL) { 249 if (arguments != NULL) {
245 // Function uses arguments object. 250 // Function uses arguments object.
246 Comment cmnt(masm_, "[ Allocate arguments object"); 251 Comment cmnt(masm_, "[ Allocate arguments object");
247 if (!function_in_register) { 252 if (!function_in_register) {
248 // Load this again, if it's used by the local context below. 253 // Load this again, if it's used by the local context below.
249 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 254 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
250 } else { 255 } else {
251 __ mov(r3, r1); 256 __ mr(r6, r4);
252 } 257 }
253 // Receiver is just before the parameters on the caller's stack. 258 // Receiver is just before the parameters on the caller's stack.
254 int num_parameters = info->scope()->num_parameters(); 259 int num_parameters = info->scope()->num_parameters();
255 int offset = num_parameters * kPointerSize; 260 int offset = num_parameters * kPointerSize;
256 __ add(r2, fp, 261 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset));
257 Operand(StandardFrameConstants::kCallerSPOffset + offset)); 262 __ LoadSmiLiteral(r4, Smi::FromInt(num_parameters));
258 __ mov(r1, Operand(Smi::FromInt(num_parameters))); 263 __ Push(r6, r5, r4);
259 __ Push(r3, r2, r1);
260 264
261 // Arguments to ArgumentsAccessStub: 265 // Arguments to ArgumentsAccessStub:
262 // function, receiver address, parameter count. 266 // function, receiver address, parameter count.
263 // The stub will rewrite receiever and parameter count if the previous 267 // The stub will rewrite receiever and parameter count if the previous
264 // stack frame was an arguments adapter frame. 268 // stack frame was an arguments adapter frame.
265 ArgumentsAccessStub::Type type; 269 ArgumentsAccessStub::Type type;
266 if (strict_mode() == STRICT) { 270 if (strict_mode() == STRICT) {
267 type = ArgumentsAccessStub::NEW_STRICT; 271 type = ArgumentsAccessStub::NEW_STRICT;
268 } else if (function()->has_duplicate_parameters()) { 272 } else if (function()->has_duplicate_parameters()) {
269 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW; 273 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
270 } else { 274 } else {
271 type = ArgumentsAccessStub::NEW_SLOPPY_FAST; 275 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
272 } 276 }
273 ArgumentsAccessStub stub(isolate(), type); 277 ArgumentsAccessStub stub(isolate(), type);
274 __ CallStub(&stub); 278 __ CallStub(&stub);
275 279
276 SetVar(arguments, r0, r1, r2); 280 SetVar(arguments, r3, r4, r5);
277 } 281 }
278 282
279 if (FLAG_trace) { 283 if (FLAG_trace) {
280 __ CallRuntime(Runtime::kTraceEnter, 0); 284 __ CallRuntime(Runtime::kTraceEnter, 0);
281 } 285 }
282 286
283 // Visit the declarations and body unless there is an illegal 287 // Visit the declarations and body unless there is an illegal
284 // redeclaration. 288 // redeclaration.
285 if (scope()->HasIllegalRedeclaration()) { 289 if (scope()->HasIllegalRedeclaration()) {
286 Comment cmnt(masm_, "[ Declarations"); 290 Comment cmnt(masm_, "[ Declarations");
287 scope()->VisitIllegalRedeclaration(this); 291 scope()->VisitIllegalRedeclaration(this);
288 292
289 } else { 293 } else {
290 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); 294 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
291 { Comment cmnt(masm_, "[ Declarations"); 295 {
296 Comment cmnt(masm_, "[ Declarations");
292 // For named function expressions, declare the function name as a 297 // For named function expressions, declare the function name as a
293 // constant. 298 // constant.
294 if (scope()->is_function_scope() && scope()->function() != NULL) { 299 if (scope()->is_function_scope() && scope()->function() != NULL) {
295 VariableDeclaration* function = scope()->function(); 300 VariableDeclaration* function = scope()->function();
296 DCHECK(function->proxy()->var()->mode() == CONST || 301 DCHECK(function->proxy()->var()->mode() == CONST ||
297 function->proxy()->var()->mode() == CONST_LEGACY); 302 function->proxy()->var()->mode() == CONST_LEGACY);
298 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED); 303 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
299 VisitVariableDeclaration(function); 304 VisitVariableDeclaration(function);
300 } 305 }
301 VisitDeclarations(scope()->declarations()); 306 VisitDeclarations(scope()->declarations());
302 } 307 }
303 308
304 { Comment cmnt(masm_, "[ Stack check"); 309 {
310 Comment cmnt(masm_, "[ Stack check");
305 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); 311 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
306 Label ok; 312 Label ok;
307 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 313 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
308 __ cmp(sp, Operand(ip)); 314 __ cmpl(sp, ip);
309 __ b(hs, &ok); 315 __ bc_short(ge, &ok);
310 Handle<Code> stack_check = isolate()->builtins()->StackCheck(); 316 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
311 PredictableCodeSizeScope predictable(masm_,
312 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
313 __ Call(stack_check, RelocInfo::CODE_TARGET);
314 __ bind(&ok); 317 __ bind(&ok);
315 } 318 }
316 319
317 { Comment cmnt(masm_, "[ Body"); 320 {
321 Comment cmnt(masm_, "[ Body");
318 DCHECK(loop_depth() == 0); 322 DCHECK(loop_depth() == 0);
319 VisitStatements(function()->body()); 323 VisitStatements(function()->body());
320 DCHECK(loop_depth() == 0); 324 DCHECK(loop_depth() == 0);
321 } 325 }
322 } 326 }
323 327
324 // Always emit a 'return undefined' in case control fell off the end of 328 // Always emit a 'return undefined' in case control fell off the end of
325 // the body. 329 // the body.
326 { Comment cmnt(masm_, "[ return <undefined>;"); 330 {
327 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 331 Comment cmnt(masm_, "[ return <undefined>;");
332 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
328 } 333 }
329 EmitReturnSequence(); 334 EmitReturnSequence();
330
331 // Force emit the constant pool, so it doesn't get emitted in the middle
332 // of the back edge table.
333 masm()->CheckConstPool(true, false);
334 } 335 }
335 336
336 337
337 void FullCodeGenerator::ClearAccumulator() { 338 void FullCodeGenerator::ClearAccumulator() {
338 __ mov(r0, Operand(Smi::FromInt(0))); 339 __ LoadSmiLiteral(r3, Smi::FromInt(0));
339 } 340 }
340 341
341 342
342 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { 343 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
343 __ mov(r2, Operand(profiling_counter_)); 344 __ mov(r5, Operand(profiling_counter_));
344 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); 345 __ LoadP(r6, FieldMemOperand(r5, Cell::kValueOffset));
345 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC); 346 __ SubSmiLiteral(r6, r6, Smi::FromInt(delta), r0);
346 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); 347 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
347 } 348 }
348 349
349 350
350 #ifdef CAN_USE_ARMV7_INSTRUCTIONS
351 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
352 #else
353 static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
354 #endif
355
356
357 void FullCodeGenerator::EmitProfilingCounterReset() { 351 void FullCodeGenerator::EmitProfilingCounterReset() {
358 Assembler::BlockConstPoolScope block_const_pool(masm_);
359 PredictableCodeSizeScope predictable_code_size_scope(
360 masm_, kProfileCounterResetSequenceLength);
361 Label start;
362 __ bind(&start);
363 int reset_value = FLAG_interrupt_budget; 352 int reset_value = FLAG_interrupt_budget;
364 if (info_->is_debug()) { 353 if (info_->is_debug()) {
365 // Detect debug break requests as soon as possible. 354 // Detect debug break requests as soon as possible.
366 reset_value = FLAG_interrupt_budget >> 4; 355 reset_value = FLAG_interrupt_budget >> 4;
367 } 356 }
368 __ mov(r2, Operand(profiling_counter_)); 357 __ mov(r5, Operand(profiling_counter_));
369 // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5 358 __ LoadSmiLiteral(r6, Smi::FromInt(reset_value));
370 // instructions (for ARMv6) depending upon whether it is an extended constant 359 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
371 // pool - insert nop to compensate.
372 int expected_instr_count =
373 (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
374 DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
375 while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
376 __ nop();
377 }
378 __ mov(r3, Operand(Smi::FromInt(reset_value)));
379 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
380 } 360 }
381 361
382 362
383 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, 363 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
384 Label* back_edge_target) { 364 Label* back_edge_target) {
385 Comment cmnt(masm_, "[ Back edge bookkeeping"); 365 Comment cmnt(masm_, "[ Back edge bookkeeping");
386 // Block literal pools whilst emitting back edge code.
387 Assembler::BlockConstPoolScope block_const_pool(masm_);
388 Label ok; 366 Label ok;
389 367
390 DCHECK(back_edge_target->is_bound()); 368 DCHECK(back_edge_target->is_bound());
391 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); 369 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) +
392 int weight = Min(kMaxBackEdgeWeight, 370 kCodeSizeMultiplier / 2;
393 Max(1, distance / kCodeSizeMultiplier)); 371 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
394 EmitProfilingCounterDecrement(weight); 372 EmitProfilingCounterDecrement(weight);
395 __ b(pl, &ok); 373 {
396 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); 374 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
375 // BackEdgeTable::PatchAt manipulates this sequence.
376 __ cmpi(r6, Operand::Zero());
377 __ bc_short(ge, &ok);
378 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
397 379
398 // Record a mapping of this PC offset to the OSR id. This is used to find 380 // Record a mapping of this PC offset to the OSR id. This is used to find
399 // the AST id from the unoptimized code in order to use it as a key into 381 // the AST id from the unoptimized code in order to use it as a key into
400 // the deoptimization input data found in the optimized code. 382 // the deoptimization input data found in the optimized code.
401 RecordBackEdge(stmt->OsrEntryId()); 383 RecordBackEdge(stmt->OsrEntryId());
402 384 }
403 EmitProfilingCounterReset(); 385 EmitProfilingCounterReset();
404 386
405 __ bind(&ok); 387 __ bind(&ok);
406 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 388 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
407 // Record a mapping of the OSR id to this PC. This is used if the OSR 389 // Record a mapping of the OSR id to this PC. This is used if the OSR
408 // entry becomes the target of a bailout. We don't expect it to be, but 390 // entry becomes the target of a bailout. We don't expect it to be, but
409 // we want it to work if it is. 391 // we want it to work if it is.
410 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); 392 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
411 } 393 }
412 394
413 395
414 void FullCodeGenerator::EmitReturnSequence() { 396 void FullCodeGenerator::EmitReturnSequence() {
415 Comment cmnt(masm_, "[ Return sequence"); 397 Comment cmnt(masm_, "[ Return sequence");
416 if (return_label_.is_bound()) { 398 if (return_label_.is_bound()) {
417 __ b(&return_label_); 399 __ b(&return_label_);
418 } else { 400 } else {
419 __ bind(&return_label_); 401 __ bind(&return_label_);
420 if (FLAG_trace) { 402 if (FLAG_trace) {
421 // Push the return value on the stack as the parameter. 403 // Push the return value on the stack as the parameter.
422 // Runtime::TraceExit returns its parameter in r0. 404 // Runtime::TraceExit returns its parameter in r3
423 __ push(r0); 405 __ push(r3);
424 __ CallRuntime(Runtime::kTraceExit, 1); 406 __ CallRuntime(Runtime::kTraceExit, 1);
425 } 407 }
426 // Pretend that the exit is a backwards jump to the entry. 408 // Pretend that the exit is a backwards jump to the entry.
427 int weight = 1; 409 int weight = 1;
428 if (info_->ShouldSelfOptimize()) { 410 if (info_->ShouldSelfOptimize()) {
429 weight = FLAG_interrupt_budget / FLAG_self_opt_count; 411 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
430 } else { 412 } else {
431 int distance = masm_->pc_offset(); 413 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
432 weight = Min(kMaxBackEdgeWeight, 414 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
433 Max(1, distance / kCodeSizeMultiplier));
434 } 415 }
435 EmitProfilingCounterDecrement(weight); 416 EmitProfilingCounterDecrement(weight);
436 Label ok; 417 Label ok;
437 __ b(pl, &ok); 418 __ cmpi(r6, Operand::Zero());
438 __ push(r0); 419 __ bge(&ok);
439 __ Call(isolate()->builtins()->InterruptCheck(), 420 __ push(r3);
440 RelocInfo::CODE_TARGET); 421 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
441 __ pop(r0); 422 __ pop(r3);
442 EmitProfilingCounterReset(); 423 EmitProfilingCounterReset();
443 __ bind(&ok); 424 __ bind(&ok);
444 425
445 #ifdef DEBUG 426 #ifdef DEBUG
446 // Add a label for checking the size of the code used for returning. 427 // Add a label for checking the size of the code used for returning.
447 Label check_exit_codesize; 428 Label check_exit_codesize;
448 __ bind(&check_exit_codesize); 429 __ bind(&check_exit_codesize);
449 #endif 430 #endif
450 // Make sure that the constant pool is not emitted inside of the return 431 // Make sure that the constant pool is not emitted inside of the return
451 // sequence. 432 // sequence.
452 { Assembler::BlockConstPoolScope block_const_pool(masm_); 433 {
434 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
453 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize; 435 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
454 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); 436 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
455 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
456 PredictableCodeSizeScope predictable(masm_, -1);
457 __ RecordJSReturn(); 437 __ RecordJSReturn();
458 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT); 438 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta);
459 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_); 439 #if V8_TARGET_ARCH_PPC64
460 __ add(sp, sp, Operand(sp_delta)); 440 // With 64bit we may need nop() instructions to ensure we have
461 __ Jump(lr); 441 // enough space to SetDebugBreakAtReturn()
462 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); 442 if (is_int16(sp_delta)) {
443 #if !V8_OOL_CONSTANT_POOL
444 masm_->nop();
445 #endif
446 masm_->nop();
463 } 447 }
448 #endif
449 __ blr();
450 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
464 } 451 }
465 452
466 #ifdef DEBUG 453 #ifdef DEBUG
467 // Check that the size of the code used for returning is large enough 454 // Check that the size of the code used for returning is large enough
468 // for the debugger's requirements. 455 // for the debugger's requirements.
469 DCHECK(Assembler::kJSReturnSequenceInstructions <= 456 DCHECK(Assembler::kJSReturnSequenceInstructions <=
470 masm_->InstructionsGeneratedSince(&check_exit_codesize)); 457 masm_->InstructionsGeneratedSince(&check_exit_codesize));
471 #endif 458 #endif
472 } 459 }
473 } 460 }
(...skipping 19 matching lines...) Expand all
493 480
494 void FullCodeGenerator::TestContext::Plug(Variable* var) const { 481 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
495 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 482 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
496 // For simplicity we always test the accumulator register. 483 // For simplicity we always test the accumulator register.
497 codegen()->GetVar(result_register(), var); 484 codegen()->GetVar(result_register(), var);
498 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 485 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
499 codegen()->DoTest(this); 486 codegen()->DoTest(this);
500 } 487 }
501 488
502 489
503 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 490 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {}
504 }
505 491
506 492
507 void FullCodeGenerator::AccumulatorValueContext::Plug( 493 void FullCodeGenerator::AccumulatorValueContext::Plug(
508 Heap::RootListIndex index) const { 494 Heap::RootListIndex index) const {
509 __ LoadRoot(result_register(), index); 495 __ LoadRoot(result_register(), index);
510 } 496 }
511 497
512 498
513 void FullCodeGenerator::StackValueContext::Plug( 499 void FullCodeGenerator::StackValueContext::Plug(
514 Heap::RootListIndex index) const { 500 Heap::RootListIndex index) const {
515 __ LoadRoot(result_register(), index); 501 __ LoadRoot(result_register(), index);
516 __ push(result_register()); 502 __ push(result_register());
517 } 503 }
518 504
519 505
520 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { 506 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
521 codegen()->PrepareForBailoutBeforeSplit(condition(), 507 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
522 true,
523 true_label_,
524 false_label_); 508 false_label_);
525 if (index == Heap::kUndefinedValueRootIndex || 509 if (index == Heap::kUndefinedValueRootIndex ||
526 index == Heap::kNullValueRootIndex || 510 index == Heap::kNullValueRootIndex ||
527 index == Heap::kFalseValueRootIndex) { 511 index == Heap::kFalseValueRootIndex) {
528 if (false_label_ != fall_through_) __ b(false_label_); 512 if (false_label_ != fall_through_) __ b(false_label_);
529 } else if (index == Heap::kTrueValueRootIndex) { 513 } else if (index == Heap::kTrueValueRootIndex) {
530 if (true_label_ != fall_through_) __ b(true_label_); 514 if (true_label_ != fall_through_) __ b(true_label_);
531 } else { 515 } else {
532 __ LoadRoot(result_register(), index); 516 __ LoadRoot(result_register(), index);
533 codegen()->DoTest(this); 517 codegen()->DoTest(this);
534 } 518 }
535 } 519 }
536 520
537 521
538 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const { 522 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {}
539 }
540 523
541 524
542 void FullCodeGenerator::AccumulatorValueContext::Plug( 525 void FullCodeGenerator::AccumulatorValueContext::Plug(
543 Handle<Object> lit) const { 526 Handle<Object> lit) const {
544 __ mov(result_register(), Operand(lit)); 527 __ mov(result_register(), Operand(lit));
545 } 528 }
546 529
547 530
548 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { 531 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
549 // Immediates cannot be pushed directly. 532 // Immediates cannot be pushed directly.
550 __ mov(result_register(), Operand(lit)); 533 __ mov(result_register(), Operand(lit));
551 __ push(result_register()); 534 __ push(result_register());
552 } 535 }
553 536
554 537
555 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { 538 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
556 codegen()->PrepareForBailoutBeforeSplit(condition(), 539 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
557 true,
558 true_label_,
559 false_label_); 540 false_label_);
560 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals. 541 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
561 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { 542 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
562 if (false_label_ != fall_through_) __ b(false_label_); 543 if (false_label_ != fall_through_) __ b(false_label_);
563 } else if (lit->IsTrue() || lit->IsJSObject()) { 544 } else if (lit->IsTrue() || lit->IsJSObject()) {
564 if (true_label_ != fall_through_) __ b(true_label_); 545 if (true_label_ != fall_through_) __ b(true_label_);
565 } else if (lit->IsString()) { 546 } else if (lit->IsString()) {
566 if (String::cast(*lit)->length() == 0) { 547 if (String::cast(*lit)->length() == 0) {
567 if (false_label_ != fall_through_) __ b(false_label_); 548 if (false_label_ != fall_through_) __ b(false_label_);
568 } else { 549 } else {
(...skipping 14 matching lines...) Expand all
583 564
584 565
585 void FullCodeGenerator::EffectContext::DropAndPlug(int count, 566 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
586 Register reg) const { 567 Register reg) const {
587 DCHECK(count > 0); 568 DCHECK(count > 0);
588 __ Drop(count); 569 __ Drop(count);
589 } 570 }
590 571
591 572
592 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( 573 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
593 int count, 574 int count, Register reg) const {
594 Register reg) const {
595 DCHECK(count > 0); 575 DCHECK(count > 0);
596 __ Drop(count); 576 __ Drop(count);
597 __ Move(result_register(), reg); 577 __ Move(result_register(), reg);
598 } 578 }
599 579
600 580
601 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, 581 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
602 Register reg) const { 582 Register reg) const {
603 DCHECK(count > 0); 583 DCHECK(count > 0);
604 if (count > 1) __ Drop(count - 1); 584 if (count > 1) __ Drop(count - 1);
605 __ str(reg, MemOperand(sp, 0)); 585 __ StoreP(reg, MemOperand(sp, 0));
606 } 586 }
607 587
608 588
609 void FullCodeGenerator::TestContext::DropAndPlug(int count, 589 void FullCodeGenerator::TestContext::DropAndPlug(int count,
610 Register reg) const { 590 Register reg) const {
611 DCHECK(count > 0); 591 DCHECK(count > 0);
612 // For simplicity we always test the accumulator register. 592 // For simplicity we always test the accumulator register.
613 __ Drop(count); 593 __ Drop(count);
614 __ Move(result_register(), reg); 594 __ Move(result_register(), reg);
615 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 595 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
616 codegen()->DoTest(this); 596 codegen()->DoTest(this);
617 } 597 }
618 598
619 599
620 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 600 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
621 Label* materialize_false) const { 601 Label* materialize_false) const {
622 DCHECK(materialize_true == materialize_false); 602 DCHECK(materialize_true == materialize_false);
623 __ bind(materialize_true); 603 __ bind(materialize_true);
624 } 604 }
625 605
626 606
627 void FullCodeGenerator::AccumulatorValueContext::Plug( 607 void FullCodeGenerator::AccumulatorValueContext::Plug(
628 Label* materialize_true, 608 Label* materialize_true, Label* materialize_false) const {
629 Label* materialize_false) const {
630 Label done; 609 Label done;
631 __ bind(materialize_true); 610 __ bind(materialize_true);
632 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); 611 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
633 __ jmp(&done); 612 __ b(&done);
634 __ bind(materialize_false); 613 __ bind(materialize_false);
635 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); 614 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
636 __ bind(&done); 615 __ bind(&done);
637 } 616 }
638 617
639 618
640 void FullCodeGenerator::StackValueContext::Plug( 619 void FullCodeGenerator::StackValueContext::Plug(
641 Label* materialize_true, 620 Label* materialize_true, Label* materialize_false) const {
642 Label* materialize_false) const {
643 Label done; 621 Label done;
644 __ bind(materialize_true); 622 __ bind(materialize_true);
645 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 623 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
646 __ jmp(&done); 624 __ b(&done);
647 __ bind(materialize_false); 625 __ bind(materialize_false);
648 __ LoadRoot(ip, Heap::kFalseValueRootIndex); 626 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
649 __ bind(&done); 627 __ bind(&done);
650 __ push(ip); 628 __ push(ip);
651 } 629 }
652 630
653 631
654 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 632 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
655 Label* materialize_false) const { 633 Label* materialize_false) const {
656 DCHECK(materialize_true == true_label_); 634 DCHECK(materialize_true == true_label_);
657 DCHECK(materialize_false == false_label_); 635 DCHECK(materialize_false == false_label_);
658 } 636 }
659 637
660 638
661 void FullCodeGenerator::EffectContext::Plug(bool flag) const { 639 void FullCodeGenerator::EffectContext::Plug(bool flag) const {}
662 }
663 640
664 641
665 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { 642 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
666 Heap::RootListIndex value_root_index = 643 Heap::RootListIndex value_root_index =
667 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 644 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
668 __ LoadRoot(result_register(), value_root_index); 645 __ LoadRoot(result_register(), value_root_index);
669 } 646 }
670 647
671 648
672 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { 649 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
673 Heap::RootListIndex value_root_index = 650 Heap::RootListIndex value_root_index =
674 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 651 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
675 __ LoadRoot(ip, value_root_index); 652 __ LoadRoot(ip, value_root_index);
676 __ push(ip); 653 __ push(ip);
677 } 654 }
678 655
679 656
680 void FullCodeGenerator::TestContext::Plug(bool flag) const { 657 void FullCodeGenerator::TestContext::Plug(bool flag) const {
681 codegen()->PrepareForBailoutBeforeSplit(condition(), 658 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
682 true,
683 true_label_,
684 false_label_); 659 false_label_);
685 if (flag) { 660 if (flag) {
686 if (true_label_ != fall_through_) __ b(true_label_); 661 if (true_label_ != fall_through_) __ b(true_label_);
687 } else { 662 } else {
688 if (false_label_ != fall_through_) __ b(false_label_); 663 if (false_label_ != fall_through_) __ b(false_label_);
689 } 664 }
690 } 665 }
691 666
692 667
693 void FullCodeGenerator::DoTest(Expression* condition, 668 void FullCodeGenerator::DoTest(Expression* condition, Label* if_true,
694 Label* if_true, 669 Label* if_false, Label* fall_through) {
695 Label* if_false,
696 Label* fall_through) {
697 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate()); 670 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
698 CallIC(ic, condition->test_id()); 671 CallIC(ic, condition->test_id());
699 __ tst(result_register(), result_register()); 672 __ cmpi(result_register(), Operand::Zero());
700 Split(ne, if_true, if_false, fall_through); 673 Split(ne, if_true, if_false, fall_through);
701 } 674 }
702 675
703 676
704 void FullCodeGenerator::Split(Condition cond, 677 void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false,
705 Label* if_true, 678 Label* fall_through, CRegister cr) {
706 Label* if_false,
707 Label* fall_through) {
708 if (if_false == fall_through) { 679 if (if_false == fall_through) {
709 __ b(cond, if_true); 680 __ b(cond, if_true, cr);
710 } else if (if_true == fall_through) { 681 } else if (if_true == fall_through) {
711 __ b(NegateCondition(cond), if_false); 682 __ b(NegateCondition(cond), if_false, cr);
712 } else { 683 } else {
713 __ b(cond, if_true); 684 __ b(cond, if_true, cr);
714 __ b(if_false); 685 __ b(if_false);
715 } 686 }
716 } 687 }
717 688
718 689
719 MemOperand FullCodeGenerator::StackOperand(Variable* var) { 690 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
720 DCHECK(var->IsStackAllocated()); 691 DCHECK(var->IsStackAllocated());
721 // Offset is negative because higher indexes are at lower addresses. 692 // Offset is negative because higher indexes are at lower addresses.
722 int offset = -var->index() * kPointerSize; 693 int offset = -var->index() * kPointerSize;
723 // Adjust by a (parameter or local) base offset. 694 // Adjust by a (parameter or local) base offset.
(...skipping 14 matching lines...) Expand all
738 return ContextOperand(scratch, var->index()); 709 return ContextOperand(scratch, var->index());
739 } else { 710 } else {
740 return StackOperand(var); 711 return StackOperand(var);
741 } 712 }
742 } 713 }
743 714
744 715
745 void FullCodeGenerator::GetVar(Register dest, Variable* var) { 716 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
746 // Use destination as scratch. 717 // Use destination as scratch.
747 MemOperand location = VarOperand(var, dest); 718 MemOperand location = VarOperand(var, dest);
748 __ ldr(dest, location); 719 __ LoadP(dest, location, r0);
749 } 720 }
750 721
751 722
752 void FullCodeGenerator::SetVar(Variable* var, 723 void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0,
753 Register src,
754 Register scratch0,
755 Register scratch1) { 724 Register scratch1) {
756 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 725 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
757 DCHECK(!scratch0.is(src)); 726 DCHECK(!scratch0.is(src));
758 DCHECK(!scratch0.is(scratch1)); 727 DCHECK(!scratch0.is(scratch1));
759 DCHECK(!scratch1.is(src)); 728 DCHECK(!scratch1.is(src));
760 MemOperand location = VarOperand(var, scratch0); 729 MemOperand location = VarOperand(var, scratch0);
761 __ str(src, location); 730 __ StoreP(src, location, r0);
762 731
763 // Emit the write barrier code if the location is in the heap. 732 // Emit the write barrier code if the location is in the heap.
764 if (var->IsContextSlot()) { 733 if (var->IsContextSlot()) {
765 __ RecordWriteContextSlot(scratch0, 734 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1,
766 location.offset(), 735 kLRHasBeenSaved, kDontSaveFPRegs);
767 src,
768 scratch1,
769 kLRHasBeenSaved,
770 kDontSaveFPRegs);
771 } 736 }
772 } 737 }
773 738
774 739
775 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, 740 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
776 bool should_normalize, 741 bool should_normalize,
777 Label* if_true, 742 Label* if_true,
778 Label* if_false) { 743 Label* if_false) {
779 // Only prepare for bailouts before splits if we're in a test 744 // Only prepare for bailouts before splits if we're in a test
780 // context. Otherwise, we let the Visit function deal with the 745 // context. Otherwise, we let the Visit function deal with the
781 // preparation to avoid preparing with the same AST id twice. 746 // preparation to avoid preparing with the same AST id twice.
782 if (!context()->IsTest() || !info_->IsOptimizable()) return; 747 if (!context()->IsTest() || !info_->IsOptimizable()) return;
783 748
784 Label skip; 749 Label skip;
785 if (should_normalize) __ b(&skip); 750 if (should_normalize) __ b(&skip);
786 PrepareForBailout(expr, TOS_REG); 751 PrepareForBailout(expr, TOS_REG);
787 if (should_normalize) { 752 if (should_normalize) {
788 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 753 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
789 __ cmp(r0, ip); 754 __ cmp(r3, ip);
790 Split(eq, if_true, if_false, NULL); 755 Split(eq, if_true, if_false, NULL);
791 __ bind(&skip); 756 __ bind(&skip);
792 } 757 }
793 } 758 }
794 759
795 760
796 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 761 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
797 // The variable in the declaration always resides in the current function 762 // The variable in the declaration always resides in the current function
798 // context. 763 // context.
799 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope())); 764 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
800 if (generate_debug_code_) { 765 if (generate_debug_code_) {
801 // Check that we're not inside a with or catch context. 766 // Check that we're not inside a with or catch context.
802 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset)); 767 __ LoadP(r4, FieldMemOperand(cp, HeapObject::kMapOffset));
803 __ CompareRoot(r1, Heap::kWithContextMapRootIndex); 768 __ CompareRoot(r4, Heap::kWithContextMapRootIndex);
804 __ Check(ne, kDeclarationInWithContext); 769 __ Check(ne, kDeclarationInWithContext);
805 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); 770 __ CompareRoot(r4, Heap::kCatchContextMapRootIndex);
806 __ Check(ne, kDeclarationInCatchContext); 771 __ Check(ne, kDeclarationInCatchContext);
807 } 772 }
808 } 773 }
809 774
810 775
811 void FullCodeGenerator::VisitVariableDeclaration( 776 void FullCodeGenerator::VisitVariableDeclaration(
812 VariableDeclaration* declaration) { 777 VariableDeclaration* declaration) {
813 // If it was not possible to allocate the variable at compile time, we 778 // If it was not possible to allocate the variable at compile time, we
814 // need to "declare" it at runtime to make sure it actually exists in the 779 // need to "declare" it at runtime to make sure it actually exists in the
815 // local context. 780 // local context.
816 VariableProxy* proxy = declaration->proxy(); 781 VariableProxy* proxy = declaration->proxy();
817 VariableMode mode = declaration->mode(); 782 VariableMode mode = declaration->mode();
818 Variable* variable = proxy->var(); 783 Variable* variable = proxy->var();
819 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY; 784 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
820 switch (variable->location()) { 785 switch (variable->location()) {
821 case Variable::UNALLOCATED: 786 case Variable::UNALLOCATED:
822 globals_->Add(variable->name(), zone()); 787 globals_->Add(variable->name(), zone());
823 globals_->Add(variable->binding_needs_init() 788 globals_->Add(variable->binding_needs_init()
824 ? isolate()->factory()->the_hole_value() 789 ? isolate()->factory()->the_hole_value()
825 : isolate()->factory()->undefined_value(), 790 : isolate()->factory()->undefined_value(),
826 zone()); 791 zone());
827 break; 792 break;
828 793
829 case Variable::PARAMETER: 794 case Variable::PARAMETER:
830 case Variable::LOCAL: 795 case Variable::LOCAL:
831 if (hole_init) { 796 if (hole_init) {
832 Comment cmnt(masm_, "[ VariableDeclaration"); 797 Comment cmnt(masm_, "[ VariableDeclaration");
833 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 798 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
834 __ str(ip, StackOperand(variable)); 799 __ StoreP(ip, StackOperand(variable));
835 } 800 }
836 break; 801 break;
837 802
838 case Variable::CONTEXT: 803 case Variable::CONTEXT:
839 if (hole_init) { 804 if (hole_init) {
840 Comment cmnt(masm_, "[ VariableDeclaration"); 805 Comment cmnt(masm_, "[ VariableDeclaration");
841 EmitDebugCheckDeclarationContext(variable); 806 EmitDebugCheckDeclarationContext(variable);
842 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 807 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
843 __ str(ip, ContextOperand(cp, variable->index())); 808 __ StoreP(ip, ContextOperand(cp, variable->index()), r0);
844 // No write barrier since the_hole_value is in old space. 809 // No write barrier since the_hole_value is in old space.
845 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 810 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
846 } 811 }
847 break; 812 break;
848 813
849 case Variable::LOOKUP: { 814 case Variable::LOOKUP: {
850 Comment cmnt(masm_, "[ VariableDeclaration"); 815 Comment cmnt(masm_, "[ VariableDeclaration");
851 __ mov(r2, Operand(variable->name())); 816 __ mov(r5, Operand(variable->name()));
852 // Declaration nodes are always introduced in one of four modes. 817 // Declaration nodes are always introduced in one of four modes.
853 DCHECK(IsDeclaredVariableMode(mode)); 818 DCHECK(IsDeclaredVariableMode(mode));
854 PropertyAttributes attr = 819 PropertyAttributes attr =
855 IsImmutableVariableMode(mode) ? READ_ONLY : NONE; 820 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
856 __ mov(r1, Operand(Smi::FromInt(attr))); 821 __ LoadSmiLiteral(r4, Smi::FromInt(attr));
857 // Push initial value, if any. 822 // Push initial value, if any.
858 // Note: For variables we must not push an initial value (such as 823 // Note: For variables we must not push an initial value (such as
859 // 'undefined') because we may have a (legal) redeclaration and we 824 // 'undefined') because we may have a (legal) redeclaration and we
860 // must not destroy the current value. 825 // must not destroy the current value.
861 if (hole_init) { 826 if (hole_init) {
862 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); 827 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
863 __ Push(cp, r2, r1, r0); 828 __ Push(cp, r5, r4, r3);
864 } else { 829 } else {
865 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value. 830 __ LoadSmiLiteral(r3, Smi::FromInt(0)); // Indicates no initial value.
866 __ Push(cp, r2, r1, r0); 831 __ Push(cp, r5, r4, r3);
867 } 832 }
868 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); 833 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
869 break; 834 break;
870 } 835 }
871 } 836 }
872 } 837 }
873 838
874 839
875 void FullCodeGenerator::VisitFunctionDeclaration( 840 void FullCodeGenerator::VisitFunctionDeclaration(
876 FunctionDeclaration* declaration) { 841 FunctionDeclaration* declaration) {
877 VariableProxy* proxy = declaration->proxy(); 842 VariableProxy* proxy = declaration->proxy();
878 Variable* variable = proxy->var(); 843 Variable* variable = proxy->var();
879 switch (variable->location()) { 844 switch (variable->location()) {
880 case Variable::UNALLOCATED: { 845 case Variable::UNALLOCATED: {
881 globals_->Add(variable->name(), zone()); 846 globals_->Add(variable->name(), zone());
882 Handle<SharedFunctionInfo> function = 847 Handle<SharedFunctionInfo> function =
883 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_); 848 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
884 // Check for stack-overflow exception. 849 // Check for stack-overflow exception.
885 if (function.is_null()) return SetStackOverflow(); 850 if (function.is_null()) return SetStackOverflow();
886 globals_->Add(function, zone()); 851 globals_->Add(function, zone());
887 break; 852 break;
888 } 853 }
889 854
890 case Variable::PARAMETER: 855 case Variable::PARAMETER:
891 case Variable::LOCAL: { 856 case Variable::LOCAL: {
892 Comment cmnt(masm_, "[ FunctionDeclaration"); 857 Comment cmnt(masm_, "[ FunctionDeclaration");
893 VisitForAccumulatorValue(declaration->fun()); 858 VisitForAccumulatorValue(declaration->fun());
894 __ str(result_register(), StackOperand(variable)); 859 __ StoreP(result_register(), StackOperand(variable));
895 break; 860 break;
896 } 861 }
897 862
898 case Variable::CONTEXT: { 863 case Variable::CONTEXT: {
899 Comment cmnt(masm_, "[ FunctionDeclaration"); 864 Comment cmnt(masm_, "[ FunctionDeclaration");
900 EmitDebugCheckDeclarationContext(variable); 865 EmitDebugCheckDeclarationContext(variable);
901 VisitForAccumulatorValue(declaration->fun()); 866 VisitForAccumulatorValue(declaration->fun());
902 __ str(result_register(), ContextOperand(cp, variable->index())); 867 __ StoreP(result_register(), ContextOperand(cp, variable->index()), r0);
903 int offset = Context::SlotOffset(variable->index()); 868 int offset = Context::SlotOffset(variable->index());
904 // We know that we have written a function, which is not a smi. 869 // We know that we have written a function, which is not a smi.
905 __ RecordWriteContextSlot(cp, 870 __ RecordWriteContextSlot(cp, offset, result_register(), r5,
906 offset, 871 kLRHasBeenSaved, kDontSaveFPRegs,
907 result_register(), 872 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
908 r2,
909 kLRHasBeenSaved,
910 kDontSaveFPRegs,
911 EMIT_REMEMBERED_SET,
912 OMIT_SMI_CHECK);
913 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 873 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
914 break; 874 break;
915 } 875 }
916 876
917 case Variable::LOOKUP: { 877 case Variable::LOOKUP: {
918 Comment cmnt(masm_, "[ FunctionDeclaration"); 878 Comment cmnt(masm_, "[ FunctionDeclaration");
919 __ mov(r2, Operand(variable->name())); 879 __ mov(r5, Operand(variable->name()));
920 __ mov(r1, Operand(Smi::FromInt(NONE))); 880 __ LoadSmiLiteral(r4, Smi::FromInt(NONE));
921 __ Push(cp, r2, r1); 881 __ Push(cp, r5, r4);
922 // Push initial value for function declaration. 882 // Push initial value for function declaration.
923 VisitForStackValue(declaration->fun()); 883 VisitForStackValue(declaration->fun());
924 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); 884 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
925 break; 885 break;
926 } 886 }
927 } 887 }
928 } 888 }
929 889
930 890
931 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { 891 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
932 Variable* variable = declaration->proxy()->var(); 892 Variable* variable = declaration->proxy()->var();
933 DCHECK(variable->location() == Variable::CONTEXT); 893 DCHECK(variable->location() == Variable::CONTEXT);
934 DCHECK(variable->interface()->IsFrozen()); 894 DCHECK(variable->interface()->IsFrozen());
935 895
936 Comment cmnt(masm_, "[ ModuleDeclaration"); 896 Comment cmnt(masm_, "[ ModuleDeclaration");
937 EmitDebugCheckDeclarationContext(variable); 897 EmitDebugCheckDeclarationContext(variable);
938 898
939 // Load instance object. 899 // Load instance object.
940 __ LoadContext(r1, scope_->ContextChainLength(scope_->GlobalScope())); 900 __ LoadContext(r4, scope_->ContextChainLength(scope_->GlobalScope()));
941 __ ldr(r1, ContextOperand(r1, variable->interface()->Index())); 901 __ LoadP(r4, ContextOperand(r4, variable->interface()->Index()));
942 __ ldr(r1, ContextOperand(r1, Context::EXTENSION_INDEX)); 902 __ LoadP(r4, ContextOperand(r4, Context::EXTENSION_INDEX));
943 903
944 // Assign it. 904 // Assign it.
945 __ str(r1, ContextOperand(cp, variable->index())); 905 __ StoreP(r4, ContextOperand(cp, variable->index()), r0);
946 // We know that we have written a module, which is not a smi. 906 // We know that we have written a module, which is not a smi.
947 __ RecordWriteContextSlot(cp, 907 __ RecordWriteContextSlot(cp, Context::SlotOffset(variable->index()), r4, r6,
948 Context::SlotOffset(variable->index()), 908 kLRHasBeenSaved, kDontSaveFPRegs,
949 r1, 909 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
950 r3,
951 kLRHasBeenSaved,
952 kDontSaveFPRegs,
953 EMIT_REMEMBERED_SET,
954 OMIT_SMI_CHECK);
955 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS); 910 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
956 911
957 // Traverse into body. 912 // Traverse into body.
958 Visit(declaration->module()); 913 Visit(declaration->module());
959 } 914 }
960 915
961 916
962 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) { 917 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
963 VariableProxy* proxy = declaration->proxy(); 918 VariableProxy* proxy = declaration->proxy();
964 Variable* variable = proxy->var(); 919 Variable* variable = proxy->var();
(...skipping 18 matching lines...) Expand all
983 938
984 939
985 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) { 940 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
986 // TODO(rossberg) 941 // TODO(rossberg)
987 } 942 }
988 943
989 944
990 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 945 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
991 // Call the runtime to declare the globals. 946 // Call the runtime to declare the globals.
992 // The context is the first argument. 947 // The context is the first argument.
993 __ mov(r1, Operand(pairs)); 948 __ mov(r4, Operand(pairs));
994 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags()))); 949 __ LoadSmiLiteral(r3, Smi::FromInt(DeclareGlobalsFlags()));
995 __ Push(cp, r1, r0); 950 __ Push(cp, r4, r3);
996 __ CallRuntime(Runtime::kDeclareGlobals, 3); 951 __ CallRuntime(Runtime::kDeclareGlobals, 3);
997 // Return value is ignored. 952 // Return value is ignored.
998 } 953 }
999 954
1000 955
1001 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { 956 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
1002 // Call the runtime to declare the modules. 957 // Call the runtime to declare the modules.
1003 __ Push(descriptions); 958 __ Push(descriptions);
1004 __ CallRuntime(Runtime::kDeclareModules, 1); 959 __ CallRuntime(Runtime::kDeclareModules, 1);
1005 // Return value is ignored. 960 // Return value is ignored.
(...skipping 25 matching lines...) Expand all
1031 } 986 }
1032 987
1033 Comment cmnt(masm_, "[ Case comparison"); 988 Comment cmnt(masm_, "[ Case comparison");
1034 __ bind(&next_test); 989 __ bind(&next_test);
1035 next_test.Unuse(); 990 next_test.Unuse();
1036 991
1037 // Compile the label expression. 992 // Compile the label expression.
1038 VisitForAccumulatorValue(clause->label()); 993 VisitForAccumulatorValue(clause->label());
1039 994
1040 // Perform the comparison as if via '==='. 995 // Perform the comparison as if via '==='.
1041 __ ldr(r1, MemOperand(sp, 0)); // Switch value. 996 __ LoadP(r4, MemOperand(sp, 0)); // Switch value.
1042 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); 997 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1043 JumpPatchSite patch_site(masm_); 998 JumpPatchSite patch_site(masm_);
1044 if (inline_smi_code) { 999 if (inline_smi_code) {
1045 Label slow_case; 1000 Label slow_case;
1046 __ orr(r2, r1, r0); 1001 __ orx(r5, r4, r3);
1047 patch_site.EmitJumpIfNotSmi(r2, &slow_case); 1002 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
1048 1003
1049 __ cmp(r1, r0); 1004 __ cmp(r4, r3);
1050 __ b(ne, &next_test); 1005 __ bne(&next_test);
1051 __ Drop(1); // Switch value is no longer needed. 1006 __ Drop(1); // Switch value is no longer needed.
1052 __ b(clause->body_target()); 1007 __ b(clause->body_target());
1053 __ bind(&slow_case); 1008 __ bind(&slow_case);
1054 } 1009 }
1055 1010
1056 // Record position before stub call for type feedback. 1011 // Record position before stub call for type feedback.
1057 SetSourcePosition(clause->position()); 1012 SetSourcePosition(clause->position());
1058 Handle<Code> ic = 1013 Handle<Code> ic =
1059 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code(); 1014 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1060 CallIC(ic, clause->CompareId()); 1015 CallIC(ic, clause->CompareId());
1061 patch_site.EmitPatchInfo(); 1016 patch_site.EmitPatchInfo();
1062 1017
1063 Label skip; 1018 Label skip;
1064 __ b(&skip); 1019 __ b(&skip);
1065 PrepareForBailout(clause, TOS_REG); 1020 PrepareForBailout(clause, TOS_REG);
1066 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 1021 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1067 __ cmp(r0, ip); 1022 __ cmp(r3, ip);
1068 __ b(ne, &next_test); 1023 __ bne(&next_test);
1069 __ Drop(1); 1024 __ Drop(1);
1070 __ jmp(clause->body_target()); 1025 __ b(clause->body_target());
1071 __ bind(&skip); 1026 __ bind(&skip);
1072 1027
1073 __ cmp(r0, Operand::Zero()); 1028 __ cmpi(r3, Operand::Zero());
1074 __ b(ne, &next_test); 1029 __ bne(&next_test);
1075 __ Drop(1); // Switch value is no longer needed. 1030 __ Drop(1); // Switch value is no longer needed.
1076 __ b(clause->body_target()); 1031 __ b(clause->body_target());
1077 } 1032 }
1078 1033
1079 // Discard the test value and jump to the default if present, otherwise to 1034 // Discard the test value and jump to the default if present, otherwise to
1080 // the end of the statement. 1035 // the end of the statement.
1081 __ bind(&next_test); 1036 __ bind(&next_test);
1082 __ Drop(1); // Switch value is no longer needed. 1037 __ Drop(1); // Switch value is no longer needed.
1083 if (default_clause == NULL) { 1038 if (default_clause == NULL) {
1084 __ b(nested_statement.break_label()); 1039 __ b(nested_statement.break_label());
(...skipping 21 matching lines...) Expand all
1106 SetStatementPosition(stmt); 1061 SetStatementPosition(stmt);
1107 1062
1108 Label loop, exit; 1063 Label loop, exit;
1109 ForIn loop_statement(this, stmt); 1064 ForIn loop_statement(this, stmt);
1110 increment_loop_depth(); 1065 increment_loop_depth();
1111 1066
1112 // Get the object to enumerate over. If the object is null or undefined, skip 1067 // Get the object to enumerate over. If the object is null or undefined, skip
1113 // over the loop. See ECMA-262 version 5, section 12.6.4. 1068 // over the loop. See ECMA-262 version 5, section 12.6.4.
1114 VisitForAccumulatorValue(stmt->enumerable()); 1069 VisitForAccumulatorValue(stmt->enumerable());
1115 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1070 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1116 __ cmp(r0, ip); 1071 __ cmp(r3, ip);
1117 __ b(eq, &exit); 1072 __ beq(&exit);
1118 Register null_value = r5; 1073 Register null_value = r7;
1119 __ LoadRoot(null_value, Heap::kNullValueRootIndex); 1074 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1120 __ cmp(r0, null_value); 1075 __ cmp(r3, null_value);
1121 __ b(eq, &exit); 1076 __ beq(&exit);
1122 1077
1123 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); 1078 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1124 1079
1125 // Convert the object to a JS object. 1080 // Convert the object to a JS object.
1126 Label convert, done_convert; 1081 Label convert, done_convert;
1127 __ JumpIfSmi(r0, &convert); 1082 __ JumpIfSmi(r3, &convert);
1128 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 1083 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
1129 __ b(ge, &done_convert); 1084 __ bge(&done_convert);
1130 __ bind(&convert); 1085 __ bind(&convert);
1131 __ push(r0); 1086 __ push(r3);
1132 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1087 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1133 __ bind(&done_convert); 1088 __ bind(&done_convert);
1134 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG); 1089 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1135 __ push(r0); 1090 __ push(r3);
1136 1091
1137 // Check for proxies. 1092 // Check for proxies.
1138 Label call_runtime; 1093 Label call_runtime;
1139 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1094 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1140 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE); 1095 __ CompareObjectType(r3, r4, r4, LAST_JS_PROXY_TYPE);
1141 __ b(le, &call_runtime); 1096 __ ble(&call_runtime);
1142 1097
1143 // Check cache validity in generated code. This is a fast case for 1098 // Check cache validity in generated code. This is a fast case for
1144 // the JSObject::IsSimpleEnum cache validity checks. If we cannot 1099 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1145 // guarantee cache validity, call the runtime system to check cache 1100 // guarantee cache validity, call the runtime system to check cache
1146 // validity or get the property names in a fixed array. 1101 // validity or get the property names in a fixed array.
1147 __ CheckEnumCache(null_value, &call_runtime); 1102 __ CheckEnumCache(null_value, &call_runtime);
1148 1103
1149 // The enum cache is valid. Load the map of the object being 1104 // The enum cache is valid. Load the map of the object being
1150 // iterated over and use the cache for the iteration. 1105 // iterated over and use the cache for the iteration.
1151 Label use_cache; 1106 Label use_cache;
1152 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 1107 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
1153 __ b(&use_cache); 1108 __ b(&use_cache);
1154 1109
1155 // Get the set of properties to enumerate. 1110 // Get the set of properties to enumerate.
1156 __ bind(&call_runtime); 1111 __ bind(&call_runtime);
1157 __ push(r0); // Duplicate the enumerable object on the stack. 1112 __ push(r3); // Duplicate the enumerable object on the stack.
1158 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); 1113 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1159 PrepareForBailoutForId(stmt->EnumId(), TOS_REG); 1114 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1160 1115
1161 // If we got a map from the runtime call, we can do a fast 1116 // If we got a map from the runtime call, we can do a fast
1162 // modification check. Otherwise, we got a fixed array, and we have 1117 // modification check. Otherwise, we got a fixed array, and we have
1163 // to do a slow check. 1118 // to do a slow check.
1164 Label fixed_array; 1119 Label fixed_array;
1165 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); 1120 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
1166 __ LoadRoot(ip, Heap::kMetaMapRootIndex); 1121 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1167 __ cmp(r2, ip); 1122 __ cmp(r5, ip);
1168 __ b(ne, &fixed_array); 1123 __ bne(&fixed_array);
1169 1124
1170 // We got a map in register r0. Get the enumeration cache from it. 1125 // We got a map in register r3. Get the enumeration cache from it.
1171 Label no_descriptors; 1126 Label no_descriptors;
1172 __ bind(&use_cache); 1127 __ bind(&use_cache);
1173 1128
1174 __ EnumLength(r1, r0); 1129 __ EnumLength(r4, r3);
1175 __ cmp(r1, Operand(Smi::FromInt(0))); 1130 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0);
1176 __ b(eq, &no_descriptors); 1131 __ beq(&no_descriptors);
1177 1132
1178 __ LoadInstanceDescriptors(r0, r2); 1133 __ LoadInstanceDescriptors(r3, r5);
1179 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset)); 1134 __ LoadP(r5, FieldMemOperand(r5, DescriptorArray::kEnumCacheOffset));
1180 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset)); 1135 __ LoadP(r5,
1136 FieldMemOperand(r5, DescriptorArray::kEnumCacheBridgeCacheOffset));
1181 1137
1182 // Set up the four remaining stack slots. 1138 // Set up the four remaining stack slots.
1183 __ push(r0); // Map. 1139 __ push(r3); // Map.
1184 __ mov(r0, Operand(Smi::FromInt(0))); 1140 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1185 // Push enumeration cache, enumeration cache length (as smi) and zero. 1141 // Push enumeration cache, enumeration cache length (as smi) and zero.
1186 __ Push(r2, r1, r0); 1142 __ Push(r5, r4, r3);
1187 __ jmp(&loop); 1143 __ b(&loop);
1188 1144
1189 __ bind(&no_descriptors); 1145 __ bind(&no_descriptors);
1190 __ Drop(1); 1146 __ Drop(1);
1191 __ jmp(&exit); 1147 __ b(&exit);
1192 1148
1193 // We got a fixed array in register r0. Iterate through that. 1149 // We got a fixed array in register r3. Iterate through that.
1194 Label non_proxy; 1150 Label non_proxy;
1195 __ bind(&fixed_array); 1151 __ bind(&fixed_array);
1196 1152
1197 __ Move(r1, FeedbackVector()); 1153 __ Move(r4, FeedbackVector());
1198 __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); 1154 __ mov(r5, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1199 int vector_index = FeedbackVector()->GetIndex(slot); 1155 int vector_index = FeedbackVector()->GetIndex(slot);
1200 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(vector_index))); 1156 __ StoreP(
1157 r5, FieldMemOperand(r4, FixedArray::OffsetOfElementAt(vector_index)), r0);
1201 1158
1202 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check 1159 __ LoadSmiLiteral(r4, Smi::FromInt(1)); // Smi indicates slow check
1203 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object 1160 __ LoadP(r5, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1204 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1161 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1205 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE); 1162 __ CompareObjectType(r5, r6, r6, LAST_JS_PROXY_TYPE);
1206 __ b(gt, &non_proxy); 1163 __ bgt(&non_proxy);
1207 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy 1164 __ LoadSmiLiteral(r4, Smi::FromInt(0)); // Zero indicates proxy
1208 __ bind(&non_proxy); 1165 __ bind(&non_proxy);
1209 __ Push(r1, r0); // Smi and array 1166 __ Push(r4, r3); // Smi and array
1210 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset)); 1167 __ LoadP(r4, FieldMemOperand(r3, FixedArray::kLengthOffset));
1211 __ mov(r0, Operand(Smi::FromInt(0))); 1168 __ LoadSmiLiteral(r3, Smi::FromInt(0));
1212 __ Push(r1, r0); // Fixed array length (as smi) and initial index. 1169 __ Push(r4, r3); // Fixed array length (as smi) and initial index.
1213 1170
1214 // Generate code for doing the condition check. 1171 // Generate code for doing the condition check.
1215 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); 1172 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1216 __ bind(&loop); 1173 __ bind(&loop);
1217 // Load the current count to r0, load the length to r1. 1174 // Load the current count to r3, load the length to r4.
1218 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize)); 1175 __ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
1219 __ cmp(r0, r1); // Compare to the array length. 1176 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
1220 __ b(hs, loop_statement.break_label()); 1177 __ cmpl(r3, r4); // Compare to the array length.
1178 __ bge(loop_statement.break_label());
1221 1179
1222 // Get the current entry of the array into register r3. 1180 // Get the current entry of the array into register r6.
1223 __ ldr(r2, MemOperand(sp, 2 * kPointerSize)); 1181 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
1224 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 1182 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1225 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0)); 1183 __ SmiToPtrArrayOffset(r6, r3);
1184 __ LoadPX(r6, MemOperand(r6, r5));
1226 1185
1227 // Get the expected map from the stack or a smi in the 1186 // Get the expected map from the stack or a smi in the
1228 // permanent slow case into register r2. 1187 // permanent slow case into register r5.
1229 __ ldr(r2, MemOperand(sp, 3 * kPointerSize)); 1188 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
1230 1189
1231 // Check if the expected map still matches that of the enumerable. 1190 // Check if the expected map still matches that of the enumerable.
1232 // If not, we may have to filter the key. 1191 // If not, we may have to filter the key.
1233 Label update_each; 1192 Label update_each;
1234 __ ldr(r1, MemOperand(sp, 4 * kPointerSize)); 1193 __ LoadP(r4, MemOperand(sp, 4 * kPointerSize));
1235 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset)); 1194 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1236 __ cmp(r4, Operand(r2)); 1195 __ cmp(r7, r5);
1237 __ b(eq, &update_each); 1196 __ beq(&update_each);
1238 1197
1239 // For proxies, no filtering is done. 1198 // For proxies, no filtering is done.
1240 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. 1199 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1241 __ cmp(r2, Operand(Smi::FromInt(0))); 1200 __ CmpSmiLiteral(r5, Smi::FromInt(0), r0);
1242 __ b(eq, &update_each); 1201 __ beq(&update_each);
1243 1202
1244 // Convert the entry to a string or (smi) 0 if it isn't a property 1203 // Convert the entry to a string or (smi) 0 if it isn't a property
1245 // any more. If the property has been removed while iterating, we 1204 // any more. If the property has been removed while iterating, we
1246 // just skip it. 1205 // just skip it.
1247 __ push(r1); // Enumerable. 1206 __ Push(r4, r6); // Enumerable and current entry.
1248 __ push(r3); // Current entry.
1249 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); 1207 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1250 __ mov(r3, Operand(r0), SetCC); 1208 __ mr(r6, r3);
1251 __ b(eq, loop_statement.continue_label()); 1209 __ cmpi(r6, Operand::Zero());
1210 __ beq(loop_statement.continue_label());
1252 1211
1253 // Update the 'each' property or variable from the possibly filtered 1212 // Update the 'each' property or variable from the possibly filtered
1254 // entry in register r3. 1213 // entry in register r6.
1255 __ bind(&update_each); 1214 __ bind(&update_each);
1256 __ mov(result_register(), r3); 1215 __ mr(result_register(), r6);
1257 // Perform the assignment as if via '='. 1216 // Perform the assignment as if via '='.
1258 { EffectContext context(this); 1217 {
1218 EffectContext context(this);
1259 EmitAssignment(stmt->each()); 1219 EmitAssignment(stmt->each());
1260 } 1220 }
1261 1221
1262 // Generate code for the body of the loop. 1222 // Generate code for the body of the loop.
1263 Visit(stmt->body()); 1223 Visit(stmt->body());
1264 1224
1265 // Generate code for the going to the next element by incrementing 1225 // Generate code for the going to the next element by incrementing
1266 // the index (smi) stored on top of the stack. 1226 // the index (smi) stored on top of the stack.
1267 __ bind(loop_statement.continue_label()); 1227 __ bind(loop_statement.continue_label());
1268 __ pop(r0); 1228 __ pop(r3);
1269 __ add(r0, r0, Operand(Smi::FromInt(1))); 1229 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
1270 __ push(r0); 1230 __ push(r3);
1271 1231
1272 EmitBackEdgeBookkeeping(stmt, &loop); 1232 EmitBackEdgeBookkeeping(stmt, &loop);
1273 __ b(&loop); 1233 __ b(&loop);
1274 1234
1275 // Remove the pointers stored on the stack. 1235 // Remove the pointers stored on the stack.
1276 __ bind(loop_statement.break_label()); 1236 __ bind(loop_statement.break_label());
1277 __ Drop(5); 1237 __ Drop(5);
1278 1238
1279 // Exit and decrement the loop depth. 1239 // Exit and decrement the loop depth.
1280 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1240 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
(...skipping 13 matching lines...) Expand all
1294 VisitForEffect(stmt->assign_iterator()); 1254 VisitForEffect(stmt->assign_iterator());
1295 1255
1296 // Loop entry. 1256 // Loop entry.
1297 __ bind(loop_statement.continue_label()); 1257 __ bind(loop_statement.continue_label());
1298 1258
1299 // result = iterator.next() 1259 // result = iterator.next()
1300 VisitForEffect(stmt->next_result()); 1260 VisitForEffect(stmt->next_result());
1301 1261
1302 // if (result.done) break; 1262 // if (result.done) break;
1303 Label result_not_done; 1263 Label result_not_done;
1304 VisitForControl(stmt->result_done(), 1264 VisitForControl(stmt->result_done(), loop_statement.break_label(),
1305 loop_statement.break_label(), 1265 &result_not_done, &result_not_done);
1306 &result_not_done,
1307 &result_not_done);
1308 __ bind(&result_not_done); 1266 __ bind(&result_not_done);
1309 1267
1310 // each = result.value 1268 // each = result.value
1311 VisitForEffect(stmt->assign_each()); 1269 VisitForEffect(stmt->assign_each());
1312 1270
1313 // Generate code for the body of the loop. 1271 // Generate code for the body of the loop.
1314 Visit(stmt->body()); 1272 Visit(stmt->body());
1315 1273
1316 // Check stack before looping. 1274 // Check stack before looping.
1317 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS); 1275 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1318 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label()); 1276 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1319 __ jmp(loop_statement.continue_label()); 1277 __ b(loop_statement.continue_label());
1320 1278
1321 // Exit and decrement the loop depth. 1279 // Exit and decrement the loop depth.
1322 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1280 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1323 __ bind(loop_statement.break_label()); 1281 __ bind(loop_statement.break_label());
1324 decrement_loop_depth(); 1282 decrement_loop_depth();
1325 } 1283 }
1326 1284
1327 1285
1328 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, 1286 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1329 bool pretenure) { 1287 bool pretenure) {
1330 // Use the fast case closure allocation code that allocates in new 1288 // Use the fast case closure allocation code that allocates in new
1331 // space for nested functions that don't need literals cloning. If 1289 // space for nested functions that don't need literals cloning. If
1332 // we're running with the --always-opt or the --prepare-always-opt 1290 // we're running with the --always-opt or the --prepare-always-opt
1333 // flag, we need to use the runtime function so that the new function 1291 // flag, we need to use the runtime function so that the new function
1334 // we are creating here gets a chance to have its code optimized and 1292 // we are creating here gets a chance to have its code optimized and
1335 // doesn't just get a copy of the existing unoptimized code. 1293 // doesn't just get a copy of the existing unoptimized code.
1336 if (!FLAG_always_opt && 1294 if (!FLAG_always_opt && !FLAG_prepare_always_opt && !pretenure &&
1337 !FLAG_prepare_always_opt && 1295 scope()->is_function_scope() && info->num_literals() == 0) {
1338 !pretenure &&
1339 scope()->is_function_scope() &&
1340 info->num_literals() == 0) {
1341 FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind()); 1296 FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1342 __ mov(r2, Operand(info)); 1297 __ mov(r5, Operand(info));
1343 __ CallStub(&stub); 1298 __ CallStub(&stub);
1344 } else { 1299 } else {
1345 __ mov(r0, Operand(info)); 1300 __ mov(r3, Operand(info));
1346 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex 1301 __ LoadRoot(
1347 : Heap::kFalseValueRootIndex); 1302 r4, pretenure ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1348 __ Push(cp, r0, r1); 1303 __ Push(cp, r3, r4);
1349 __ CallRuntime(Runtime::kNewClosure, 3); 1304 __ CallRuntime(Runtime::kNewClosure, 3);
1350 } 1305 }
1351 context()->Plug(r0); 1306 context()->Plug(r3);
1352 } 1307 }
1353 1308
1354 1309
1355 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 1310 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1356 Comment cmnt(masm_, "[ VariableProxy"); 1311 Comment cmnt(masm_, "[ VariableProxy");
1357 EmitVariableLoad(expr); 1312 EmitVariableLoad(expr);
1358 } 1313 }
1359 1314
1360 1315
1361 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) { 1316 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1362 Comment cnmt(masm_, "[ SuperReference "); 1317 Comment cnmt(masm_, "[ SuperReference ");
1363 1318
1364 __ ldr(LoadDescriptor::ReceiverRegister(), 1319 __ LoadP(LoadDescriptor::ReceiverRegister(),
1365 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1320 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1366 1321
1367 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol()); 1322 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1368 __ Move(LoadDescriptor::NameRegister(), home_object_symbol); 1323 __ Move(LoadDescriptor::NameRegister(), home_object_symbol);
1369 1324
1370 if (FLAG_vector_ics) { 1325 if (FLAG_vector_ics) {
1371 __ mov(VectorLoadICDescriptor::SlotRegister(), 1326 __ mov(VectorLoadICDescriptor::SlotRegister(),
1372 Operand(SmiFromSlot(expr->HomeObjectFeedbackSlot()))); 1327 Operand(SmiFromSlot(expr->HomeObjectFeedbackSlot())));
1373 CallLoadIC(NOT_CONTEXTUAL); 1328 CallLoadIC(NOT_CONTEXTUAL);
1374 } else { 1329 } else {
1375 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId()); 1330 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1376 } 1331 }
1377 1332
1378 __ cmp(r0, Operand(isolate()->factory()->undefined_value())); 1333 __ Cmpi(r3, Operand(isolate()->factory()->undefined_value()), r0);
1379 Label done; 1334 Label done;
1380 __ b(ne, &done); 1335 __ bne(&done);
1381 __ CallRuntime(Runtime::kThrowNonMethodError, 0); 1336 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1382 __ bind(&done); 1337 __ bind(&done);
1383 } 1338 }
1384 1339
1385 1340
1386 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy, 1341 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1387 TypeofState typeof_state, 1342 TypeofState typeof_state,
1388 Label* slow) { 1343 Label* slow) {
1389 Register current = cp; 1344 Register current = cp;
1390 Register next = r1; 1345 Register next = r4;
1391 Register temp = r2; 1346 Register temp = r5;
1392 1347
1393 Scope* s = scope(); 1348 Scope* s = scope();
1394 while (s != NULL) { 1349 while (s != NULL) {
1395 if (s->num_heap_slots() > 0) { 1350 if (s->num_heap_slots() > 0) {
1396 if (s->calls_sloppy_eval()) { 1351 if (s->calls_sloppy_eval()) {
1397 // Check that extension is NULL. 1352 // Check that extension is NULL.
1398 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); 1353 __ LoadP(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1399 __ tst(temp, temp); 1354 __ cmpi(temp, Operand::Zero());
1400 __ b(ne, slow); 1355 __ bne(slow);
1401 } 1356 }
1402 // Load next context in chain. 1357 // Load next context in chain.
1403 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX)); 1358 __ LoadP(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1404 // Walk the rest of the chain without clobbering cp. 1359 // Walk the rest of the chain without clobbering cp.
1405 current = next; 1360 current = next;
1406 } 1361 }
1407 // If no outer scope calls eval, we do not need to check more 1362 // If no outer scope calls eval, we do not need to check more
1408 // context extensions. 1363 // context extensions.
1409 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; 1364 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1410 s = s->outer_scope(); 1365 s = s->outer_scope();
1411 } 1366 }
1412 1367
1413 if (s->is_eval_scope()) { 1368 if (s->is_eval_scope()) {
1414 Label loop, fast; 1369 Label loop, fast;
1415 if (!current.is(next)) { 1370 if (!current.is(next)) {
1416 __ Move(next, current); 1371 __ Move(next, current);
1417 } 1372 }
1418 __ bind(&loop); 1373 __ bind(&loop);
1419 // Terminate at native context. 1374 // Terminate at native context.
1420 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset)); 1375 __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1421 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex); 1376 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1422 __ cmp(temp, ip); 1377 __ cmp(temp, ip);
1423 __ b(eq, &fast); 1378 __ beq(&fast);
1424 // Check that extension is NULL. 1379 // Check that extension is NULL.
1425 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX)); 1380 __ LoadP(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1426 __ tst(temp, temp); 1381 __ cmpi(temp, Operand::Zero());
1427 __ b(ne, slow); 1382 __ bne(slow);
1428 // Load next context in chain. 1383 // Load next context in chain.
1429 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX)); 1384 __ LoadP(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1430 __ b(&loop); 1385 __ b(&loop);
1431 __ bind(&fast); 1386 __ bind(&fast);
1432 } 1387 }
1433 1388
1434 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); 1389 __ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1435 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->var()->name())); 1390 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1436 if (FLAG_vector_ics) { 1391 if (FLAG_vector_ics) {
1437 __ mov(VectorLoadICDescriptor::SlotRegister(), 1392 __ mov(VectorLoadICDescriptor::SlotRegister(),
1438 Operand(SmiFromSlot(proxy->VariableFeedbackSlot()))); 1393 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1439 } 1394 }
1440 1395
1441 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) 1396 ContextualMode mode =
1442 ? NOT_CONTEXTUAL 1397 (typeof_state == INSIDE_TYPEOF) ? NOT_CONTEXTUAL : CONTEXTUAL;
1443 : CONTEXTUAL;
1444 CallLoadIC(mode); 1398 CallLoadIC(mode);
1445 } 1399 }
1446 1400
1447 1401
1448 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1402 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1449 Label* slow) { 1403 Label* slow) {
1450 DCHECK(var->IsContextSlot()); 1404 DCHECK(var->IsContextSlot());
1451 Register context = cp; 1405 Register context = cp;
1452 Register next = r3; 1406 Register next = r6;
1453 Register temp = r4; 1407 Register temp = r7;
1454 1408
1455 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1409 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1456 if (s->num_heap_slots() > 0) { 1410 if (s->num_heap_slots() > 0) {
1457 if (s->calls_sloppy_eval()) { 1411 if (s->calls_sloppy_eval()) {
1458 // Check that extension is NULL. 1412 // Check that extension is NULL.
1459 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1413 __ LoadP(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1460 __ tst(temp, temp); 1414 __ cmpi(temp, Operand::Zero());
1461 __ b(ne, slow); 1415 __ bne(slow);
1462 } 1416 }
1463 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX)); 1417 __ LoadP(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1464 // Walk the rest of the chain without clobbering cp. 1418 // Walk the rest of the chain without clobbering cp.
1465 context = next; 1419 context = next;
1466 } 1420 }
1467 } 1421 }
1468 // Check that last extension is NULL. 1422 // Check that last extension is NULL.
1469 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1423 __ LoadP(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1470 __ tst(temp, temp); 1424 __ cmpi(temp, Operand::Zero());
1471 __ b(ne, slow); 1425 __ bne(slow);
1472 1426
1473 // This function is used only for loads, not stores, so it's safe to 1427 // This function is used only for loads, not stores, so it's safe to
1474 // return an cp-based operand (the write barrier cannot be allowed to 1428 // return an cp-based operand (the write barrier cannot be allowed to
1475 // destroy the cp register). 1429 // destroy the cp register).
1476 return ContextOperand(context, var->index()); 1430 return ContextOperand(context, var->index());
1477 } 1431 }
1478 1432
1479 1433
1480 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, 1434 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1481 TypeofState typeof_state, 1435 TypeofState typeof_state,
1482 Label* slow, 1436 Label* slow, Label* done) {
1483 Label* done) {
1484 // Generate fast-case code for variables that might be shadowed by 1437 // Generate fast-case code for variables that might be shadowed by
1485 // eval-introduced variables. Eval is used a lot without 1438 // eval-introduced variables. Eval is used a lot without
1486 // introducing variables. In those cases, we do not want to 1439 // introducing variables. In those cases, we do not want to
1487 // perform a runtime call for all variables in the scope 1440 // perform a runtime call for all variables in the scope
1488 // containing the eval. 1441 // containing the eval.
1489 Variable* var = proxy->var(); 1442 Variable* var = proxy->var();
1490 if (var->mode() == DYNAMIC_GLOBAL) { 1443 if (var->mode() == DYNAMIC_GLOBAL) {
1491 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow); 1444 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1492 __ jmp(done); 1445 __ b(done);
1493 } else if (var->mode() == DYNAMIC_LOCAL) { 1446 } else if (var->mode() == DYNAMIC_LOCAL) {
1494 Variable* local = var->local_if_not_shadowed(); 1447 Variable* local = var->local_if_not_shadowed();
1495 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow)); 1448 __ LoadP(r3, ContextSlotOperandCheckExtensions(local, slow));
1496 if (local->mode() == LET || local->mode() == CONST || 1449 if (local->mode() == LET || local->mode() == CONST ||
1497 local->mode() == CONST_LEGACY) { 1450 local->mode() == CONST_LEGACY) {
1498 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); 1451 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1452 __ bne(done);
1499 if (local->mode() == CONST_LEGACY) { 1453 if (local->mode() == CONST_LEGACY) {
1500 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); 1454 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1501 } else { // LET || CONST 1455 } else { // LET || CONST
1502 __ b(ne, done); 1456 __ mov(r3, Operand(var->name()));
1503 __ mov(r0, Operand(var->name())); 1457 __ push(r3);
1504 __ push(r0);
1505 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1458 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1506 } 1459 }
1507 } 1460 }
1508 __ jmp(done); 1461 __ b(done);
1509 } 1462 }
1510 } 1463 }
1511 1464
1512 1465
1513 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { 1466 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1514 // Record position before possible IC call. 1467 // Record position before possible IC call.
1515 SetSourcePosition(proxy->position()); 1468 SetSourcePosition(proxy->position());
1516 Variable* var = proxy->var(); 1469 Variable* var = proxy->var();
1517 1470
1518 // Three cases: global variables, lookup variables, and all other types of 1471 // Three cases: global variables, lookup variables, and all other types of
1519 // variables. 1472 // variables.
1520 switch (var->location()) { 1473 switch (var->location()) {
1521 case Variable::UNALLOCATED: { 1474 case Variable::UNALLOCATED: {
1522 Comment cmnt(masm_, "[ Global variable"); 1475 Comment cmnt(masm_, "[ Global variable");
1523 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); 1476 __ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1524 __ mov(LoadDescriptor::NameRegister(), Operand(var->name())); 1477 __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1525 if (FLAG_vector_ics) { 1478 if (FLAG_vector_ics) {
1526 __ mov(VectorLoadICDescriptor::SlotRegister(), 1479 __ mov(VectorLoadICDescriptor::SlotRegister(),
1527 Operand(SmiFromSlot(proxy->VariableFeedbackSlot()))); 1480 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1528 } 1481 }
1529 CallLoadIC(CONTEXTUAL); 1482 CallLoadIC(CONTEXTUAL);
1530 context()->Plug(r0); 1483 context()->Plug(r3);
1531 break; 1484 break;
1532 } 1485 }
1533 1486
1534 case Variable::PARAMETER: 1487 case Variable::PARAMETER:
1535 case Variable::LOCAL: 1488 case Variable::LOCAL:
1536 case Variable::CONTEXT: { 1489 case Variable::CONTEXT: {
1537 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" 1490 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1538 : "[ Stack variable"); 1491 : "[ Stack variable");
1539 if (var->binding_needs_init()) { 1492 if (var->binding_needs_init()) {
1540 // var->scope() may be NULL when the proxy is located in eval code and 1493 // var->scope() may be NULL when the proxy is located in eval code and
(...skipping 20 matching lines...) Expand all
1561 // function() { f(); let x = 1; function f() { x = 2; } } 1514 // function() { f(); let x = 1; function f() { x = 2; } }
1562 // 1515 //
1563 bool skip_init_check; 1516 bool skip_init_check;
1564 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) { 1517 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1565 skip_init_check = false; 1518 skip_init_check = false;
1566 } else { 1519 } else {
1567 // Check that we always have valid source position. 1520 // Check that we always have valid source position.
1568 DCHECK(var->initializer_position() != RelocInfo::kNoPosition); 1521 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1569 DCHECK(proxy->position() != RelocInfo::kNoPosition); 1522 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1570 skip_init_check = var->mode() != CONST_LEGACY && 1523 skip_init_check = var->mode() != CONST_LEGACY &&
1571 var->initializer_position() < proxy->position(); 1524 var->initializer_position() < proxy->position();
1572 } 1525 }
1573 1526
1574 if (!skip_init_check) { 1527 if (!skip_init_check) {
1528 Label done;
1575 // Let and const need a read barrier. 1529 // Let and const need a read barrier.
1576 GetVar(r0, var); 1530 GetVar(r3, var);
1577 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); 1531 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1532 __ bne(&done);
1578 if (var->mode() == LET || var->mode() == CONST) { 1533 if (var->mode() == LET || var->mode() == CONST) {
1579 // Throw a reference error when using an uninitialized let/const 1534 // Throw a reference error when using an uninitialized let/const
1580 // binding in harmony mode. 1535 // binding in harmony mode.
1581 Label done; 1536 __ mov(r3, Operand(var->name()));
1582 __ b(ne, &done); 1537 __ push(r3);
1583 __ mov(r0, Operand(var->name()));
1584 __ push(r0);
1585 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1538 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1586 __ bind(&done);
1587 } else { 1539 } else {
1588 // Uninitalized const bindings outside of harmony mode are unholed. 1540 // Uninitalized const bindings outside of harmony mode are unholed.
1589 DCHECK(var->mode() == CONST_LEGACY); 1541 DCHECK(var->mode() == CONST_LEGACY);
1590 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); 1542 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1591 } 1543 }
1592 context()->Plug(r0); 1544 __ bind(&done);
1545 context()->Plug(r3);
1593 break; 1546 break;
1594 } 1547 }
1595 } 1548 }
1596 context()->Plug(var); 1549 context()->Plug(var);
1597 break; 1550 break;
1598 } 1551 }
1599 1552
1600 case Variable::LOOKUP: { 1553 case Variable::LOOKUP: {
1601 Comment cmnt(masm_, "[ Lookup variable"); 1554 Comment cmnt(masm_, "[ Lookup variable");
1602 Label done, slow; 1555 Label done, slow;
1603 // Generate code for loading from variables potentially shadowed 1556 // Generate code for loading from variables potentially shadowed
1604 // by eval-introduced variables. 1557 // by eval-introduced variables.
1605 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done); 1558 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1606 __ bind(&slow); 1559 __ bind(&slow);
1607 __ mov(r1, Operand(var->name())); 1560 __ mov(r4, Operand(var->name()));
1608 __ Push(cp, r1); // Context and name. 1561 __ Push(cp, r4); // Context and name.
1609 __ CallRuntime(Runtime::kLoadLookupSlot, 2); 1562 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1610 __ bind(&done); 1563 __ bind(&done);
1611 context()->Plug(r0); 1564 context()->Plug(r3);
1612 } 1565 }
1613 } 1566 }
1614 } 1567 }
1615 1568
1616 1569
1617 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1570 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1618 Comment cmnt(masm_, "[ RegExpLiteral"); 1571 Comment cmnt(masm_, "[ RegExpLiteral");
1619 Label materialized; 1572 Label materialized;
1620 // Registers will be used as follows: 1573 // Registers will be used as follows:
1621 // r5 = materialized value (RegExp literal) 1574 // r8 = materialized value (RegExp literal)
1622 // r4 = JS function, literals array 1575 // r7 = JS function, literals array
1623 // r3 = literal index 1576 // r6 = literal index
1624 // r2 = RegExp pattern 1577 // r5 = RegExp pattern
1625 // r1 = RegExp flags 1578 // r4 = RegExp flags
1626 // r0 = RegExp literal clone 1579 // r3 = RegExp literal clone
1627 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1580 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1628 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset)); 1581 __ LoadP(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1629 int literal_offset = 1582 int literal_offset =
1630 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; 1583 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1631 __ ldr(r5, FieldMemOperand(r4, literal_offset)); 1584 __ LoadP(r8, FieldMemOperand(r7, literal_offset), r0);
1632 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1585 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1633 __ cmp(r5, ip); 1586 __ cmp(r8, ip);
1634 __ b(ne, &materialized); 1587 __ bne(&materialized);
1635 1588
1636 // Create regexp literal using runtime function. 1589 // Create regexp literal using runtime function.
1637 // Result will be in r0. 1590 // Result will be in r3.
1638 __ mov(r3, Operand(Smi::FromInt(expr->literal_index()))); 1591 __ LoadSmiLiteral(r6, Smi::FromInt(expr->literal_index()));
1639 __ mov(r2, Operand(expr->pattern())); 1592 __ mov(r5, Operand(expr->pattern()));
1640 __ mov(r1, Operand(expr->flags())); 1593 __ mov(r4, Operand(expr->flags()));
1641 __ Push(r4, r3, r2, r1); 1594 __ Push(r7, r6, r5, r4);
1642 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); 1595 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1643 __ mov(r5, r0); 1596 __ mr(r8, r3);
1644 1597
1645 __ bind(&materialized); 1598 __ bind(&materialized);
1646 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 1599 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1647 Label allocated, runtime_allocate; 1600 Label allocated, runtime_allocate;
1648 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT); 1601 __ Allocate(size, r3, r5, r6, &runtime_allocate, TAG_OBJECT);
1649 __ jmp(&allocated); 1602 __ b(&allocated);
1650 1603
1651 __ bind(&runtime_allocate); 1604 __ bind(&runtime_allocate);
1652 __ mov(r0, Operand(Smi::FromInt(size))); 1605 __ LoadSmiLiteral(r3, Smi::FromInt(size));
1653 __ Push(r5, r0); 1606 __ Push(r8, r3);
1654 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 1607 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1655 __ pop(r5); 1608 __ pop(r8);
1656 1609
1657 __ bind(&allocated); 1610 __ bind(&allocated);
1658 // After this, registers are used as follows: 1611 // After this, registers are used as follows:
1659 // r0: Newly allocated regexp. 1612 // r3: Newly allocated regexp.
1660 // r5: Materialized regexp. 1613 // r8: Materialized regexp.
1661 // r2: temp. 1614 // r5: temp.
1662 __ CopyFields(r0, r5, d0, size / kPointerSize); 1615 __ CopyFields(r3, r8, r5.bit(), size / kPointerSize);
1663 context()->Plug(r0); 1616 context()->Plug(r3);
1664 } 1617 }
1665 1618
1666 1619
1667 void FullCodeGenerator::EmitAccessor(Expression* expression) { 1620 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1668 if (expression == NULL) { 1621 if (expression == NULL) {
1669 __ LoadRoot(r1, Heap::kNullValueRootIndex); 1622 __ LoadRoot(r4, Heap::kNullValueRootIndex);
1670 __ push(r1); 1623 __ push(r4);
1671 } else { 1624 } else {
1672 VisitForStackValue(expression); 1625 VisitForStackValue(expression);
1673 } 1626 }
1674 } 1627 }
1675 1628
1676 1629
1677 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { 1630 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1678 Comment cmnt(masm_, "[ ObjectLiteral"); 1631 Comment cmnt(masm_, "[ ObjectLiteral");
1679 1632
1680 expr->BuildConstantProperties(isolate()); 1633 expr->BuildConstantProperties(isolate());
1681 Handle<FixedArray> constant_properties = expr->constant_properties(); 1634 Handle<FixedArray> constant_properties = expr->constant_properties();
1682 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1635 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1683 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); 1636 __ LoadP(r6, FieldMemOperand(r6, JSFunction::kLiteralsOffset));
1684 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); 1637 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1685 __ mov(r1, Operand(constant_properties)); 1638 __ mov(r4, Operand(constant_properties));
1686 int flags = expr->fast_elements() 1639 int flags = expr->fast_elements() ? ObjectLiteral::kFastElements
1687 ? ObjectLiteral::kFastElements 1640 : ObjectLiteral::kNoFlags;
1688 : ObjectLiteral::kNoFlags; 1641 flags |= expr->has_function() ? ObjectLiteral::kHasFunction
1689 flags |= expr->has_function() 1642 : ObjectLiteral::kNoFlags;
1690 ? ObjectLiteral::kHasFunction 1643 __ LoadSmiLiteral(r3, Smi::FromInt(flags));
1691 : ObjectLiteral::kNoFlags;
1692 __ mov(r0, Operand(Smi::FromInt(flags)));
1693 int properties_count = constant_properties->length() / 2; 1644 int properties_count = constant_properties->length() / 2;
1694 if (expr->may_store_doubles() || expr->depth() > 1 || 1645 if (expr->may_store_doubles() || expr->depth() > 1 ||
1695 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements || 1646 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1696 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { 1647 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1697 __ Push(r3, r2, r1, r0); 1648 __ Push(r6, r5, r4, r3);
1698 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); 1649 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1699 } else { 1650 } else {
1700 FastCloneShallowObjectStub stub(isolate(), properties_count); 1651 FastCloneShallowObjectStub stub(isolate(), properties_count);
1701 __ CallStub(&stub); 1652 __ CallStub(&stub);
1702 } 1653 }
1703 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); 1654 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1704 1655
1705 // If result_saved is true the result is on top of the stack. If 1656 // If result_saved is true the result is on top of the stack. If
1706 // result_saved is false the result is in r0. 1657 // result_saved is false the result is in r3.
1707 bool result_saved = false; 1658 bool result_saved = false;
1708 1659
1709 // Mark all computed expressions that are bound to a key that 1660 // Mark all computed expressions that are bound to a key that
1710 // is shadowed by a later occurrence of the same key. For the 1661 // is shadowed by a later occurrence of the same key. For the
1711 // marked expressions, no store code is emitted. 1662 // marked expressions, no store code is emitted.
1712 expr->CalculateEmitStore(zone()); 1663 expr->CalculateEmitStore(zone());
1713 1664
1714 AccessorTable accessor_table(zone()); 1665 AccessorTable accessor_table(zone());
1715 for (int i = 0; i < expr->properties()->length(); i++) { 1666 for (int i = 0; i < expr->properties()->length(); i++) {
1716 ObjectLiteral::Property* property = expr->properties()->at(i); 1667 ObjectLiteral::Property* property = expr->properties()->at(i);
1717 if (property->IsCompileTimeValue()) continue; 1668 if (property->IsCompileTimeValue()) continue;
1718 1669
1719 Literal* key = property->key(); 1670 Literal* key = property->key();
1720 Expression* value = property->value(); 1671 Expression* value = property->value();
1721 if (!result_saved) { 1672 if (!result_saved) {
1722 __ push(r0); // Save result on stack 1673 __ push(r3); // Save result on stack
1723 result_saved = true; 1674 result_saved = true;
1724 } 1675 }
1725 switch (property->kind()) { 1676 switch (property->kind()) {
1726 case ObjectLiteral::Property::CONSTANT: 1677 case ObjectLiteral::Property::CONSTANT:
1727 UNREACHABLE(); 1678 UNREACHABLE();
1728 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1679 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1729 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value())); 1680 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1730 // Fall through. 1681 // Fall through.
1731 case ObjectLiteral::Property::COMPUTED: 1682 case ObjectLiteral::Property::COMPUTED:
1732 // It is safe to use [[Put]] here because the boilerplate already 1683 // It is safe to use [[Put]] here because the boilerplate already
1733 // contains computed properties with an uninitialized value. 1684 // contains computed properties with an uninitialized value.
1734 if (key->value()->IsInternalizedString()) { 1685 if (key->value()->IsInternalizedString()) {
1735 if (property->emit_store()) { 1686 if (property->emit_store()) {
1736 VisitForAccumulatorValue(value); 1687 VisitForAccumulatorValue(value);
1737 DCHECK(StoreDescriptor::ValueRegister().is(r0)); 1688 DCHECK(StoreDescriptor::ValueRegister().is(r3));
1738 __ mov(StoreDescriptor::NameRegister(), Operand(key->value())); 1689 __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1739 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); 1690 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1740 CallStoreIC(key->LiteralFeedbackId()); 1691 CallStoreIC(key->LiteralFeedbackId());
1741 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1692 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1742 } else { 1693 } else {
1743 VisitForEffect(value); 1694 VisitForEffect(value);
1744 } 1695 }
1745 break; 1696 break;
1746 } 1697 }
1747 // Duplicate receiver on stack. 1698 // Duplicate receiver on stack.
1748 __ ldr(r0, MemOperand(sp)); 1699 __ LoadP(r3, MemOperand(sp));
1749 __ push(r0); 1700 __ push(r3);
1750 VisitForStackValue(key); 1701 VisitForStackValue(key);
1751 VisitForStackValue(value); 1702 VisitForStackValue(value);
1752 if (property->emit_store()) { 1703 if (property->emit_store()) {
1753 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes 1704 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY)); // PropertyAttributes
1754 __ push(r0); 1705 __ push(r3);
1755 __ CallRuntime(Runtime::kSetProperty, 4); 1706 __ CallRuntime(Runtime::kSetProperty, 4);
1756 } else { 1707 } else {
1757 __ Drop(3); 1708 __ Drop(3);
1758 } 1709 }
1759 break; 1710 break;
1760 case ObjectLiteral::Property::PROTOTYPE: 1711 case ObjectLiteral::Property::PROTOTYPE:
1761 // Duplicate receiver on stack. 1712 // Duplicate receiver on stack.
1762 __ ldr(r0, MemOperand(sp)); 1713 __ LoadP(r3, MemOperand(sp));
1763 __ push(r0); 1714 __ push(r3);
1764 VisitForStackValue(value); 1715 VisitForStackValue(value);
1765 if (property->emit_store()) { 1716 if (property->emit_store()) {
1766 __ CallRuntime(Runtime::kInternalSetPrototype, 2); 1717 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1767 } else { 1718 } else {
1768 __ Drop(2); 1719 __ Drop(2);
1769 } 1720 }
1770 break; 1721 break;
1771
1772 case ObjectLiteral::Property::GETTER: 1722 case ObjectLiteral::Property::GETTER:
1773 accessor_table.lookup(key)->second->getter = value; 1723 accessor_table.lookup(key)->second->getter = value;
1774 break; 1724 break;
1775 case ObjectLiteral::Property::SETTER: 1725 case ObjectLiteral::Property::SETTER:
1776 accessor_table.lookup(key)->second->setter = value; 1726 accessor_table.lookup(key)->second->setter = value;
1777 break; 1727 break;
1778 } 1728 }
1779 } 1729 }
1780 1730
1781 // Emit code to define accessors, using only a single call to the runtime for 1731 // Emit code to define accessors, using only a single call to the runtime for
1782 // each pair of corresponding getters and setters. 1732 // each pair of corresponding getters and setters.
1783 for (AccessorTable::Iterator it = accessor_table.begin(); 1733 for (AccessorTable::Iterator it = accessor_table.begin();
1784 it != accessor_table.end(); 1734 it != accessor_table.end(); ++it) {
1785 ++it) { 1735 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
1786 __ ldr(r0, MemOperand(sp)); // Duplicate receiver. 1736 __ push(r3);
1787 __ push(r0);
1788 VisitForStackValue(it->first); 1737 VisitForStackValue(it->first);
1789 EmitAccessor(it->second->getter); 1738 EmitAccessor(it->second->getter);
1790 EmitAccessor(it->second->setter); 1739 EmitAccessor(it->second->setter);
1791 __ mov(r0, Operand(Smi::FromInt(NONE))); 1740 __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
1792 __ push(r0); 1741 __ push(r3);
1793 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5); 1742 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1794 } 1743 }
1795 1744
1796 if (expr->has_function()) { 1745 if (expr->has_function()) {
1797 DCHECK(result_saved); 1746 DCHECK(result_saved);
1798 __ ldr(r0, MemOperand(sp)); 1747 __ LoadP(r3, MemOperand(sp));
1799 __ push(r0); 1748 __ push(r3);
1800 __ CallRuntime(Runtime::kToFastProperties, 1); 1749 __ CallRuntime(Runtime::kToFastProperties, 1);
1801 } 1750 }
1802 1751
1803 if (result_saved) { 1752 if (result_saved) {
1804 context()->PlugTOS(); 1753 context()->PlugTOS();
1805 } else { 1754 } else {
1806 context()->Plug(r0); 1755 context()->Plug(r3);
1807 } 1756 }
1808 } 1757 }
1809 1758
1810 1759
1811 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1760 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1812 Comment cmnt(masm_, "[ ArrayLiteral"); 1761 Comment cmnt(masm_, "[ ArrayLiteral");
1813 1762
1814 expr->BuildConstantElements(isolate()); 1763 expr->BuildConstantElements(isolate());
1815 int flags = expr->depth() == 1 1764 int flags = expr->depth() == 1 ? ArrayLiteral::kShallowElements
1816 ? ArrayLiteral::kShallowElements 1765 : ArrayLiteral::kNoFlags;
1817 : ArrayLiteral::kNoFlags;
1818 1766
1819 ZoneList<Expression*>* subexprs = expr->values(); 1767 ZoneList<Expression*>* subexprs = expr->values();
1820 int length = subexprs->length(); 1768 int length = subexprs->length();
1821 Handle<FixedArray> constant_elements = expr->constant_elements(); 1769 Handle<FixedArray> constant_elements = expr->constant_elements();
1822 DCHECK_EQ(2, constant_elements->length()); 1770 DCHECK_EQ(2, constant_elements->length());
1823 ElementsKind constant_elements_kind = 1771 ElementsKind constant_elements_kind =
1824 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); 1772 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1825 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind); 1773 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
1826 Handle<FixedArrayBase> constant_elements_values( 1774 Handle<FixedArrayBase> constant_elements_values(
1827 FixedArrayBase::cast(constant_elements->get(1))); 1775 FixedArrayBase::cast(constant_elements->get(1)));
1828 1776
1829 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; 1777 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1830 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { 1778 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1831 // If the only customer of allocation sites is transitioning, then 1779 // If the only customer of allocation sites is transitioning, then
1832 // we can turn it off if we don't have anywhere else to transition to. 1780 // we can turn it off if we don't have anywhere else to transition to.
1833 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; 1781 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1834 } 1782 }
1835 1783
1836 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1784 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1837 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); 1785 __ LoadP(r6, FieldMemOperand(r6, JSFunction::kLiteralsOffset));
1838 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); 1786 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1839 __ mov(r1, Operand(constant_elements)); 1787 __ mov(r4, Operand(constant_elements));
1840 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) { 1788 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1841 __ mov(r0, Operand(Smi::FromInt(flags))); 1789 __ LoadSmiLiteral(r3, Smi::FromInt(flags));
1842 __ Push(r3, r2, r1, r0); 1790 __ Push(r6, r5, r4, r3);
1843 __ CallRuntime(Runtime::kCreateArrayLiteral, 4); 1791 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1844 } else { 1792 } else {
1845 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); 1793 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1846 __ CallStub(&stub); 1794 __ CallStub(&stub);
1847 } 1795 }
1848 1796
1849 bool result_saved = false; // Is the result saved to the stack? 1797 bool result_saved = false; // Is the result saved to the stack?
1850 1798
1851 // Emit code to evaluate all the non-constant subexpressions and to store 1799 // Emit code to evaluate all the non-constant subexpressions and to store
1852 // them into the newly cloned array. 1800 // them into the newly cloned array.
1853 for (int i = 0; i < length; i++) { 1801 for (int i = 0; i < length; i++) {
1854 Expression* subexpr = subexprs->at(i); 1802 Expression* subexpr = subexprs->at(i);
1855 // If the subexpression is a literal or a simple materialized literal it 1803 // If the subexpression is a literal or a simple materialized literal it
1856 // is already set in the cloned array. 1804 // is already set in the cloned array.
1857 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; 1805 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1858 1806
1859 if (!result_saved) { 1807 if (!result_saved) {
1860 __ push(r0); 1808 __ push(r3);
1861 __ Push(Smi::FromInt(expr->literal_index())); 1809 __ Push(Smi::FromInt(expr->literal_index()));
1862 result_saved = true; 1810 result_saved = true;
1863 } 1811 }
1864 VisitForAccumulatorValue(subexpr); 1812 VisitForAccumulatorValue(subexpr);
1865 1813
1866 if (IsFastObjectElementsKind(constant_elements_kind)) { 1814 if (IsFastObjectElementsKind(constant_elements_kind)) {
1867 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1815 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1868 __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal. 1816 __ LoadP(r8, MemOperand(sp, kPointerSize)); // Copy of array literal.
1869 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset)); 1817 __ LoadP(r4, FieldMemOperand(r8, JSObject::kElementsOffset));
1870 __ str(result_register(), FieldMemOperand(r1, offset)); 1818 __ StoreP(result_register(), FieldMemOperand(r4, offset), r0);
1871 // Update the write barrier for the array store. 1819 // Update the write barrier for the array store.
1872 __ RecordWriteField(r1, offset, result_register(), r2, 1820 __ RecordWriteField(r4, offset, result_register(), r5, kLRHasBeenSaved,
1873 kLRHasBeenSaved, kDontSaveFPRegs, 1821 kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1874 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK); 1822 INLINE_SMI_CHECK);
1875 } else { 1823 } else {
1876 __ mov(r3, Operand(Smi::FromInt(i))); 1824 __ LoadSmiLiteral(r6, Smi::FromInt(i));
1877 StoreArrayLiteralElementStub stub(isolate()); 1825 StoreArrayLiteralElementStub stub(isolate());
1878 __ CallStub(&stub); 1826 __ CallStub(&stub);
1879 } 1827 }
1880 1828
1881 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); 1829 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1882 } 1830 }
1883 1831
1884 if (result_saved) { 1832 if (result_saved) {
1885 __ pop(); // literal index 1833 __ pop(); // literal index
1886 context()->PlugTOS(); 1834 context()->PlugTOS();
1887 } else { 1835 } else {
1888 context()->Plug(r0); 1836 context()->Plug(r3);
1889 } 1837 }
1890 } 1838 }
1891 1839
1892 1840
1893 void FullCodeGenerator::VisitAssignment(Assignment* expr) { 1841 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1894 DCHECK(expr->target()->IsValidReferenceExpression()); 1842 DCHECK(expr->target()->IsValidReferenceExpression());
1895 1843
1896 Comment cmnt(masm_, "[ Assignment"); 1844 Comment cmnt(masm_, "[ Assignment");
1897 1845
1898 Property* property = expr->target()->AsProperty(); 1846 Property* property = expr->target()->AsProperty();
1899 LhsKind assign_type = GetAssignType(property); 1847 LhsKind assign_type = GetAssignType(property);
1900 1848
1901 // Evaluate LHS expression. 1849 // Evaluate LHS expression.
1902 switch (assign_type) { 1850 switch (assign_type) {
1903 case VARIABLE: 1851 case VARIABLE:
1904 // Nothing to do here. 1852 // Nothing to do here.
1905 break; 1853 break;
1906 case NAMED_PROPERTY: 1854 case NAMED_PROPERTY:
1907 if (expr->is_compound()) { 1855 if (expr->is_compound()) {
1908 // We need the receiver both on the stack and in the register. 1856 // We need the receiver both on the stack and in the register.
1909 VisitForStackValue(property->obj()); 1857 VisitForStackValue(property->obj());
1910 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 1858 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1911 } else { 1859 } else {
1912 VisitForStackValue(property->obj()); 1860 VisitForStackValue(property->obj());
1913 } 1861 }
1914 break; 1862 break;
1915 case NAMED_SUPER_PROPERTY: 1863 case NAMED_SUPER_PROPERTY:
1916 VisitForStackValue(property->obj()->AsSuperReference()->this_var()); 1864 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1917 EmitLoadHomeObject(property->obj()->AsSuperReference()); 1865 EmitLoadHomeObject(property->obj()->AsSuperReference());
1918 __ Push(result_register()); 1866 __ Push(result_register());
1919 if (expr->is_compound()) { 1867 if (expr->is_compound()) {
1920 const Register scratch = r1; 1868 const Register scratch = r4;
1921 __ ldr(scratch, MemOperand(sp, kPointerSize)); 1869 __ LoadP(scratch, MemOperand(sp, kPointerSize));
1922 __ Push(scratch); 1870 __ Push(scratch, result_register());
1923 __ Push(result_register());
1924 } 1871 }
1925 break; 1872 break;
1926 case KEYED_SUPER_PROPERTY: 1873 case KEYED_SUPER_PROPERTY: {
1874 const Register scratch = r4;
1927 VisitForStackValue(property->obj()->AsSuperReference()->this_var()); 1875 VisitForStackValue(property->obj()->AsSuperReference()->this_var());
1928 EmitLoadHomeObject(property->obj()->AsSuperReference()); 1876 EmitLoadHomeObject(property->obj()->AsSuperReference());
1929 __ Push(result_register()); 1877 __ Move(scratch, result_register());
1930 VisitForAccumulatorValue(property->key()); 1878 VisitForAccumulatorValue(property->key());
1931 __ Push(result_register()); 1879 __ Push(scratch, result_register());
1932 if (expr->is_compound()) { 1880 if (expr->is_compound()) {
1933 const Register scratch = r1; 1881 const Register scratch1 = r5;
1934 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize)); 1882 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
1935 __ Push(scratch); 1883 __ Push(scratch1, scratch, result_register());
1936 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
1937 __ Push(scratch);
1938 __ Push(result_register());
1939 } 1884 }
1940 break; 1885 break;
1886 }
1941 case KEYED_PROPERTY: 1887 case KEYED_PROPERTY:
1942 if (expr->is_compound()) { 1888 if (expr->is_compound()) {
1943 VisitForStackValue(property->obj()); 1889 VisitForStackValue(property->obj());
1944 VisitForStackValue(property->key()); 1890 VisitForStackValue(property->key());
1945 __ ldr(LoadDescriptor::ReceiverRegister(), 1891 __ LoadP(LoadDescriptor::ReceiverRegister(),
1946 MemOperand(sp, 1 * kPointerSize)); 1892 MemOperand(sp, 1 * kPointerSize));
1947 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); 1893 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1948 } else { 1894 } else {
1949 VisitForStackValue(property->obj()); 1895 VisitForStackValue(property->obj());
1950 VisitForStackValue(property->key()); 1896 VisitForStackValue(property->key());
1951 } 1897 }
1952 break; 1898 break;
1953 } 1899 }
1954 1900
1955 // For compound assignments we need another deoptimization point after the 1901 // For compound assignments we need another deoptimization point after the
1956 // variable/property load. 1902 // variable/property load.
1957 if (expr->is_compound()) { 1903 if (expr->is_compound()) {
1958 { AccumulatorValueContext context(this); 1904 {
1905 AccumulatorValueContext context(this);
1959 switch (assign_type) { 1906 switch (assign_type) {
1960 case VARIABLE: 1907 case VARIABLE:
1961 EmitVariableLoad(expr->target()->AsVariableProxy()); 1908 EmitVariableLoad(expr->target()->AsVariableProxy());
1962 PrepareForBailout(expr->target(), TOS_REG); 1909 PrepareForBailout(expr->target(), TOS_REG);
1963 break; 1910 break;
1964 case NAMED_PROPERTY: 1911 case NAMED_PROPERTY:
1965 EmitNamedPropertyLoad(property); 1912 EmitNamedPropertyLoad(property);
1966 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1913 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1967 break; 1914 break;
1968 case NAMED_SUPER_PROPERTY: 1915 case NAMED_SUPER_PROPERTY:
1969 EmitNamedSuperPropertyLoad(property); 1916 EmitNamedSuperPropertyLoad(property);
1970 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1917 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1971 break; 1918 break;
1972 case KEYED_SUPER_PROPERTY: 1919 case KEYED_SUPER_PROPERTY:
1973 EmitKeyedSuperPropertyLoad(property); 1920 EmitKeyedSuperPropertyLoad(property);
1974 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1921 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1975 break; 1922 break;
1976 case KEYED_PROPERTY: 1923 case KEYED_PROPERTY:
1977 EmitKeyedPropertyLoad(property); 1924 EmitKeyedPropertyLoad(property);
1978 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1925 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1979 break; 1926 break;
1980 } 1927 }
1981 } 1928 }
1982 1929
1983 Token::Value op = expr->binary_op(); 1930 Token::Value op = expr->binary_op();
1984 __ push(r0); // Left operand goes on the stack. 1931 __ push(r3); // Left operand goes on the stack.
1985 VisitForAccumulatorValue(expr->value()); 1932 VisitForAccumulatorValue(expr->value());
1986 1933
1987 OverwriteMode mode = expr->value()->ResultOverwriteAllowed() 1934 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1988 ? OVERWRITE_RIGHT 1935 ? OVERWRITE_RIGHT
1989 : NO_OVERWRITE; 1936 : NO_OVERWRITE;
1990 SetSourcePosition(expr->position() + 1); 1937 SetSourcePosition(expr->position() + 1);
1991 AccumulatorValueContext context(this); 1938 AccumulatorValueContext context(this);
1992 if (ShouldInlineSmiCase(op)) { 1939 if (ShouldInlineSmiCase(op)) {
1993 EmitInlineSmiBinaryOp(expr->binary_operation(), 1940 EmitInlineSmiBinaryOp(expr->binary_operation(), op, mode, expr->target(),
1994 op,
1995 mode,
1996 expr->target(),
1997 expr->value()); 1941 expr->value());
1998 } else { 1942 } else {
1999 EmitBinaryOp(expr->binary_operation(), op, mode); 1943 EmitBinaryOp(expr->binary_operation(), op, mode);
2000 } 1944 }
2001 1945
2002 // Deoptimization point in case the binary operation may have side effects. 1946 // Deoptimization point in case the binary operation may have side effects.
2003 PrepareForBailout(expr->binary_operation(), TOS_REG); 1947 PrepareForBailout(expr->binary_operation(), TOS_REG);
2004 } else { 1948 } else {
2005 VisitForAccumulatorValue(expr->value()); 1949 VisitForAccumulatorValue(expr->value());
2006 } 1950 }
2007 1951
2008 // Record source position before possible IC call. 1952 // Record source position before possible IC call.
2009 SetSourcePosition(expr->position()); 1953 SetSourcePosition(expr->position());
2010 1954
2011 // Store the value. 1955 // Store the value.
2012 switch (assign_type) { 1956 switch (assign_type) {
2013 case VARIABLE: 1957 case VARIABLE:
2014 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), 1958 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2015 expr->op()); 1959 expr->op());
2016 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 1960 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2017 context()->Plug(r0); 1961 context()->Plug(r3);
2018 break; 1962 break;
2019 case NAMED_PROPERTY: 1963 case NAMED_PROPERTY:
2020 EmitNamedPropertyAssignment(expr); 1964 EmitNamedPropertyAssignment(expr);
2021 break; 1965 break;
2022 case NAMED_SUPER_PROPERTY: 1966 case NAMED_SUPER_PROPERTY:
2023 EmitNamedSuperPropertyStore(property); 1967 EmitNamedSuperPropertyStore(property);
2024 context()->Plug(r0); 1968 context()->Plug(r3);
2025 break; 1969 break;
2026 case KEYED_SUPER_PROPERTY: 1970 case KEYED_SUPER_PROPERTY:
2027 EmitKeyedSuperPropertyStore(property); 1971 EmitKeyedSuperPropertyStore(property);
2028 context()->Plug(r0); 1972 context()->Plug(r3);
2029 break; 1973 break;
2030 case KEYED_PROPERTY: 1974 case KEYED_PROPERTY:
2031 EmitKeyedPropertyAssignment(expr); 1975 EmitKeyedPropertyAssignment(expr);
2032 break; 1976 break;
2033 } 1977 }
2034 } 1978 }
2035 1979
2036 1980
2037 void FullCodeGenerator::VisitYield(Yield* expr) { 1981 void FullCodeGenerator::VisitYield(Yield* expr) {
2038 Comment cmnt(masm_, "[ Yield"); 1982 Comment cmnt(masm_, "[ Yield");
2039 // Evaluate yielded value first; the initial iterator definition depends on 1983 // Evaluate yielded value first; the initial iterator definition depends on
2040 // this. It stays on the stack while we update the iterator. 1984 // this. It stays on the stack while we update the iterator.
2041 VisitForStackValue(expr->expression()); 1985 VisitForStackValue(expr->expression());
2042 1986
2043 switch (expr->yield_kind()) { 1987 switch (expr->yield_kind()) {
2044 case Yield::kSuspend: 1988 case Yield::kSuspend:
2045 // Pop value from top-of-stack slot; box result into result register. 1989 // Pop value from top-of-stack slot; box result into result register.
2046 EmitCreateIteratorResult(false); 1990 EmitCreateIteratorResult(false);
2047 __ push(result_register()); 1991 __ push(result_register());
2048 // Fall through. 1992 // Fall through.
2049 case Yield::kInitial: { 1993 case Yield::kInitial: {
2050 Label suspend, continuation, post_runtime, resume; 1994 Label suspend, continuation, post_runtime, resume;
2051 1995
2052 __ jmp(&suspend); 1996 __ b(&suspend);
2053 1997
2054 __ bind(&continuation); 1998 __ bind(&continuation);
2055 __ jmp(&resume); 1999 __ b(&resume);
2056 2000
2057 __ bind(&suspend); 2001 __ bind(&suspend);
2058 VisitForAccumulatorValue(expr->generator_object()); 2002 VisitForAccumulatorValue(expr->generator_object());
2059 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); 2003 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2060 __ mov(r1, Operand(Smi::FromInt(continuation.pos()))); 2004 __ LoadSmiLiteral(r4, Smi::FromInt(continuation.pos()));
2061 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset)); 2005 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
2062 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset)); 2006 r0);
2063 __ mov(r1, cp); 2007 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
2064 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2, 2008 __ mr(r4, cp);
2009 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
2065 kLRHasBeenSaved, kDontSaveFPRegs); 2010 kLRHasBeenSaved, kDontSaveFPRegs);
2066 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset)); 2011 __ addi(r4, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2067 __ cmp(sp, r1); 2012 __ cmp(sp, r4);
2068 __ b(eq, &post_runtime); 2013 __ beq(&post_runtime);
2069 __ push(r0); // generator object 2014 __ push(r3); // generator object
2070 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 2015 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2071 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2016 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2072 __ bind(&post_runtime); 2017 __ bind(&post_runtime);
2073 __ pop(result_register()); 2018 __ pop(result_register());
2074 EmitReturnSequence(); 2019 EmitReturnSequence();
2075 2020
2076 __ bind(&resume); 2021 __ bind(&resume);
2077 context()->Plug(result_register()); 2022 context()->Plug(result_register());
2078 break; 2023 break;
2079 } 2024 }
2080 2025
2081 case Yield::kFinal: { 2026 case Yield::kFinal: {
2082 VisitForAccumulatorValue(expr->generator_object()); 2027 VisitForAccumulatorValue(expr->generator_object());
2083 __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed))); 2028 __ LoadSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
2084 __ str(r1, FieldMemOperand(result_register(), 2029 __ StoreP(r4, FieldMemOperand(result_register(),
2085 JSGeneratorObject::kContinuationOffset)); 2030 JSGeneratorObject::kContinuationOffset),
2031 r0);
2086 // Pop value from top-of-stack slot, box result into result register. 2032 // Pop value from top-of-stack slot, box result into result register.
2087 EmitCreateIteratorResult(true); 2033 EmitCreateIteratorResult(true);
2088 EmitUnwindBeforeReturn(); 2034 EmitUnwindBeforeReturn();
2089 EmitReturnSequence(); 2035 EmitReturnSequence();
2090 break; 2036 break;
2091 } 2037 }
2092 2038
2093 case Yield::kDelegating: { 2039 case Yield::kDelegating: {
2094 VisitForStackValue(expr->generator_object()); 2040 VisitForStackValue(expr->generator_object());
2095 2041
2096 // Initial stack layout is as follows: 2042 // Initial stack layout is as follows:
2097 // [sp + 1 * kPointerSize] iter 2043 // [sp + 1 * kPointerSize] iter
2098 // [sp + 0 * kPointerSize] g 2044 // [sp + 0 * kPointerSize] g
2099 2045
2100 Label l_catch, l_try, l_suspend, l_continuation, l_resume; 2046 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2101 Label l_next, l_call, l_loop; 2047 Label l_next, l_call;
2102 Register load_receiver = LoadDescriptor::ReceiverRegister(); 2048 Register load_receiver = LoadDescriptor::ReceiverRegister();
2103 Register load_name = LoadDescriptor::NameRegister(); 2049 Register load_name = LoadDescriptor::NameRegister();
2104 2050
2105 // Initial send value is undefined. 2051 // Initial send value is undefined.
2106 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 2052 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
2107 __ b(&l_next); 2053 __ b(&l_next);
2108 2054
2109 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; } 2055 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2110 __ bind(&l_catch); 2056 __ bind(&l_catch);
2111 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos())); 2057 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2112 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw" 2058 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
2113 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter 2059 __ LoadP(r6, MemOperand(sp, 1 * kPointerSize)); // iter
2114 __ Push(load_name, r3, r0); // "throw", iter, except 2060 __ Push(load_name, r6, r3); // "throw", iter, except
2115 __ jmp(&l_call); 2061 __ b(&l_call);
2116 2062
2117 // try { received = %yield result } 2063 // try { received = %yield result }
2118 // Shuffle the received result above a try handler and yield it without 2064 // Shuffle the received result above a try handler and yield it without
2119 // re-boxing. 2065 // re-boxing.
2120 __ bind(&l_try); 2066 __ bind(&l_try);
2121 __ pop(r0); // result 2067 __ pop(r3); // result
2122 __ PushTryHandler(StackHandler::CATCH, expr->index()); 2068 __ PushTryHandler(StackHandler::CATCH, expr->index());
2123 const int handler_size = StackHandlerConstants::kSize; 2069 const int handler_size = StackHandlerConstants::kSize;
2124 __ push(r0); // result 2070 __ push(r3); // result
2125 __ jmp(&l_suspend); 2071 __ b(&l_suspend);
2126 __ bind(&l_continuation); 2072 __ bind(&l_continuation);
2127 __ jmp(&l_resume); 2073 __ b(&l_resume);
2128 __ bind(&l_suspend); 2074 __ bind(&l_suspend);
2129 const int generator_object_depth = kPointerSize + handler_size; 2075 const int generator_object_depth = kPointerSize + handler_size;
2130 __ ldr(r0, MemOperand(sp, generator_object_depth)); 2076 __ LoadP(r3, MemOperand(sp, generator_object_depth));
2131 __ push(r0); // g 2077 __ push(r3); // g
2132 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); 2078 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2133 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos()))); 2079 __ LoadSmiLiteral(r4, Smi::FromInt(l_continuation.pos()));
2134 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset)); 2080 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
2135 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset)); 2081 r0);
2136 __ mov(r1, cp); 2082 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
2137 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2, 2083 __ mr(r4, cp);
2084 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
2138 kLRHasBeenSaved, kDontSaveFPRegs); 2085 kLRHasBeenSaved, kDontSaveFPRegs);
2139 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 2086 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2140 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2087 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2141 __ pop(r0); // result 2088 __ pop(r3); // result
2142 EmitReturnSequence(); 2089 EmitReturnSequence();
2143 __ bind(&l_resume); // received in r0 2090 __ bind(&l_resume); // received in r3
2144 __ PopTryHandler(); 2091 __ PopTryHandler();
2145 2092
2146 // receiver = iter; f = 'next'; arg = received; 2093 // receiver = iter; f = 'next'; arg = received;
2147 __ bind(&l_next); 2094 __ bind(&l_next);
2148 2095
2149 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next" 2096 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2150 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter 2097 __ LoadP(r6, MemOperand(sp, 1 * kPointerSize)); // iter
2151 __ Push(load_name, r3, r0); // "next", iter, received 2098 __ Push(load_name, r6, r3); // "next", iter, received
2152 2099
2153 // result = receiver[f](arg); 2100 // result = receiver[f](arg);
2154 __ bind(&l_call); 2101 __ bind(&l_call);
2155 __ ldr(load_receiver, MemOperand(sp, kPointerSize)); 2102 __ LoadP(load_receiver, MemOperand(sp, kPointerSize));
2156 __ ldr(load_name, MemOperand(sp, 2 * kPointerSize)); 2103 __ LoadP(load_name, MemOperand(sp, 2 * kPointerSize));
2157 if (FLAG_vector_ics) { 2104 if (FLAG_vector_ics) {
2158 __ mov(VectorLoadICDescriptor::SlotRegister(), 2105 __ mov(VectorLoadICDescriptor::SlotRegister(),
2159 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot()))); 2106 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2160 } 2107 }
2161 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code(); 2108 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2162 CallIC(ic, TypeFeedbackId::None()); 2109 CallIC(ic, TypeFeedbackId::None());
2163 __ mov(r1, r0); 2110 __ mr(r4, r3);
2164 __ str(r1, MemOperand(sp, 2 * kPointerSize)); 2111 __ StoreP(r4, MemOperand(sp, 2 * kPointerSize));
2165 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD); 2112 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2166 __ CallStub(&stub); 2113 __ CallStub(&stub);
2167 2114
2168 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2115 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2169 __ Drop(1); // The function is still on the stack; drop it. 2116 __ Drop(1); // The function is still on the stack; drop it.
2170 2117
2171 // if (!result.done) goto l_try; 2118 // if (!result.done) goto l_try;
2172 __ bind(&l_loop); 2119 __ Move(load_receiver, r3);
2173 __ Move(load_receiver, r0);
2174 2120
2175 __ push(load_receiver); // save result 2121 __ push(load_receiver); // save result
2176 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done" 2122 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2177 if (FLAG_vector_ics) { 2123 if (FLAG_vector_ics) {
2178 __ mov(VectorLoadICDescriptor::SlotRegister(), 2124 __ mov(VectorLoadICDescriptor::SlotRegister(),
2179 Operand(SmiFromSlot(expr->DoneFeedbackSlot()))); 2125 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2180 } 2126 }
2181 CallLoadIC(NOT_CONTEXTUAL); // r0=result.done 2127 CallLoadIC(NOT_CONTEXTUAL); // r0=result.done
2182 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate()); 2128 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2183 CallIC(bool_ic); 2129 CallIC(bool_ic);
2184 __ cmp(r0, Operand(0)); 2130 __ cmpi(r3, Operand::Zero());
2185 __ b(eq, &l_try); 2131 __ beq(&l_try);
2186 2132
2187 // result.value 2133 // result.value
2188 __ pop(load_receiver); // result 2134 __ pop(load_receiver); // result
2189 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value" 2135 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2190 if (FLAG_vector_ics) { 2136 if (FLAG_vector_ics) {
2191 __ mov(VectorLoadICDescriptor::SlotRegister(), 2137 __ mov(VectorLoadICDescriptor::SlotRegister(),
2192 Operand(SmiFromSlot(expr->ValueFeedbackSlot()))); 2138 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2193 } 2139 }
2194 CallLoadIC(NOT_CONTEXTUAL); // r0=result.value 2140 CallLoadIC(NOT_CONTEXTUAL); // r3=result.value
2195 context()->DropAndPlug(2, r0); // drop iter and g 2141 context()->DropAndPlug(2, r3); // drop iter and g
2196 break; 2142 break;
2197 } 2143 }
2198 } 2144 }
2199 } 2145 }
2200 2146
2201 2147
2202 void FullCodeGenerator::EmitGeneratorResume(Expression *generator, 2148 void FullCodeGenerator::EmitGeneratorResume(
2203 Expression *value, 2149 Expression* generator, Expression* value,
2204 JSGeneratorObject::ResumeMode resume_mode) { 2150 JSGeneratorObject::ResumeMode resume_mode) {
2205 // The value stays in r0, and is ultimately read by the resumed generator, as 2151 // The value stays in r3, and is ultimately read by the resumed generator, as
2206 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it 2152 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2207 // is read to throw the value when the resumed generator is already closed. 2153 // is read to throw the value when the resumed generator is already closed.
2208 // r1 will hold the generator object until the activation has been resumed. 2154 // r4 will hold the generator object until the activation has been resumed.
2209 VisitForStackValue(generator); 2155 VisitForStackValue(generator);
2210 VisitForAccumulatorValue(value); 2156 VisitForAccumulatorValue(value);
2211 __ pop(r1); 2157 __ pop(r4);
2212 2158
2213 // Check generator state. 2159 // Check generator state.
2214 Label wrong_state, closed_state, done; 2160 Label wrong_state, closed_state, done;
2215 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset)); 2161 __ LoadP(r6, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
2216 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0); 2162 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2217 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0); 2163 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2218 __ cmp(r3, Operand(Smi::FromInt(0))); 2164 __ CmpSmiLiteral(r6, Smi::FromInt(0), r0);
2219 __ b(eq, &closed_state); 2165 __ beq(&closed_state);
2220 __ b(lt, &wrong_state); 2166 __ blt(&wrong_state);
2221 2167
2222 // Load suspended function and context. 2168 // Load suspended function and context.
2223 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset)); 2169 __ LoadP(cp, FieldMemOperand(r4, JSGeneratorObject::kContextOffset));
2224 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset)); 2170 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
2225 2171
2226 // Load receiver and store as the first argument. 2172 // Load receiver and store as the first argument.
2227 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset)); 2173 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
2228 __ push(r2); 2174 __ push(r5);
2229 2175
2230 // Push holes for the rest of the arguments to the generator function. 2176 // Push holes for the rest of the arguments to the generator function.
2231 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); 2177 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
2232 __ ldr(r3, 2178 __ LoadWordArith(
2233 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset)); 2179 r6, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
2234 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex); 2180 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
2235 Label push_argument_holes, push_frame; 2181 Label argument_loop, push_frame;
2236 __ bind(&push_argument_holes); 2182 #if V8_TARGET_ARCH_PPC64
2237 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC); 2183 __ cmpi(r6, Operand::Zero());
2238 __ b(mi, &push_frame); 2184 __ beq(&push_frame);
2239 __ push(r2); 2185 #else
2240 __ jmp(&push_argument_holes); 2186 __ SmiUntag(r6, SetRC);
2187 __ beq(&push_frame, cr0);
2188 #endif
2189 __ mtctr(r6);
2190 __ bind(&argument_loop);
2191 __ push(r5);
2192 __ bdnz(&argument_loop);
2241 2193
2242 // Enter a new JavaScript frame, and initialize its slots as they were when 2194 // Enter a new JavaScript frame, and initialize its slots as they were when
2243 // the generator was suspended. 2195 // the generator was suspended.
2244 Label resume_frame; 2196 Label resume_frame;
2245 __ bind(&push_frame); 2197 __ bind(&push_frame);
2246 __ bl(&resume_frame); 2198 __ b(&resume_frame, SetLK);
2247 __ jmp(&done); 2199 __ b(&done);
2248 __ bind(&resume_frame); 2200 __ bind(&resume_frame);
2249 // lr = return address. 2201 // lr = return address.
2250 // fp = caller's frame pointer. 2202 // fp = caller's frame pointer.
2251 // pp = caller's constant pool (if FLAG_enable_ool_constant_pool),
2252 // cp = callee's context, 2203 // cp = callee's context,
2253 // r4 = callee's JS function. 2204 // r7 = callee's JS function.
2254 __ PushFixedFrame(r4); 2205 __ PushFixedFrame(r7);
2255 // Adjust FP to point to saved FP. 2206 // Adjust FP to point to saved FP.
2256 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); 2207 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2257 2208
2258 // Load the operand stack size. 2209 // Load the operand stack size.
2259 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset)); 2210 __ LoadP(r6, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset));
2260 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset)); 2211 __ LoadP(r6, FieldMemOperand(r6, FixedArray::kLengthOffset));
2261 __ SmiUntag(r3); 2212 __ SmiUntag(r6, SetRC);
2262 2213
2263 // If we are sending a value and there is no operand stack, we can jump back 2214 // If we are sending a value and there is no operand stack, we can jump back
2264 // in directly. 2215 // in directly.
2216 Label call_resume;
2265 if (resume_mode == JSGeneratorObject::NEXT) { 2217 if (resume_mode == JSGeneratorObject::NEXT) {
2266 Label slow_resume; 2218 Label slow_resume;
2267 __ cmp(r3, Operand(0)); 2219 __ bne(&slow_resume, cr0);
2268 __ b(ne, &slow_resume); 2220 __ LoadP(ip, FieldMemOperand(r7, JSFunction::kCodeEntryOffset));
2269 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset)); 2221 #if V8_OOL_CONSTANT_POOL
2270 2222 {
2271 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_); 2223 ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2272 if (FLAG_enable_ool_constant_pool) { 2224 // Load the new code object's constant pool pointer.
2273 // Load the new code object's constant pool pointer. 2225 __ LoadP(kConstantPoolRegister,
2274 __ ldr(pp, 2226 MemOperand(ip, Code::kConstantPoolOffset - Code::kHeaderSize));
2275 MemOperand(r3, Code::kConstantPoolOffset - Code::kHeaderSize)); 2227 #endif
2276 } 2228 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
2277 2229 __ SmiUntag(r5);
2278 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset)); 2230 __ add(ip, ip, r5);
2279 __ SmiUntag(r2); 2231 __ LoadSmiLiteral(r5,
2280 __ add(r3, r3, r2); 2232 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2281 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting))); 2233 __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset),
2282 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset)); 2234 r0);
2283 __ Jump(r3); 2235 __ Jump(ip);
2236 __ bind(&slow_resume);
2237 #if V8_OOL_CONSTANT_POOL
2284 } 2238 }
2285 __ bind(&slow_resume); 2239 #endif
2240 } else {
2241 __ beq(&call_resume, cr0);
2286 } 2242 }
2287 2243
2288 // Otherwise, we push holes for the operand stack and call the runtime to fix 2244 // Otherwise, we push holes for the operand stack and call the runtime to fix
2289 // up the stack and the handlers. 2245 // up the stack and the handlers.
2290 Label push_operand_holes, call_resume; 2246 Label operand_loop;
2291 __ bind(&push_operand_holes); 2247 __ mtctr(r6);
2292 __ sub(r3, r3, Operand(1), SetCC); 2248 __ bind(&operand_loop);
2293 __ b(mi, &call_resume); 2249 __ push(r5);
2294 __ push(r2); 2250 __ bdnz(&operand_loop);
2295 __ b(&push_operand_holes); 2251
2296 __ bind(&call_resume); 2252 __ bind(&call_resume);
2297 DCHECK(!result_register().is(r1)); 2253 DCHECK(!result_register().is(r4));
2298 __ Push(r1, result_register()); 2254 __ Push(r4, result_register());
2299 __ Push(Smi::FromInt(resume_mode)); 2255 __ Push(Smi::FromInt(resume_mode));
2300 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3); 2256 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2301 // Not reached: the runtime call returns elsewhere. 2257 // Not reached: the runtime call returns elsewhere.
2302 __ stop("not-reached"); 2258 __ stop("not-reached");
2303 2259
2304 // Reach here when generator is closed. 2260 // Reach here when generator is closed.
2305 __ bind(&closed_state); 2261 __ bind(&closed_state);
2306 if (resume_mode == JSGeneratorObject::NEXT) { 2262 if (resume_mode == JSGeneratorObject::NEXT) {
2307 // Return completed iterator result when generator is closed. 2263 // Return completed iterator result when generator is closed.
2308 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 2264 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
2309 __ push(r2); 2265 __ push(r5);
2310 // Pop value from top-of-stack slot; box result into result register. 2266 // Pop value from top-of-stack slot; box result into result register.
2311 EmitCreateIteratorResult(true); 2267 EmitCreateIteratorResult(true);
2312 } else { 2268 } else {
2313 // Throw the provided value. 2269 // Throw the provided value.
2314 __ push(r0); 2270 __ push(r3);
2315 __ CallRuntime(Runtime::kThrow, 1); 2271 __ CallRuntime(Runtime::kThrow, 1);
2316 } 2272 }
2317 __ jmp(&done); 2273 __ b(&done);
2318 2274
2319 // Throw error if we attempt to operate on a running generator. 2275 // Throw error if we attempt to operate on a running generator.
2320 __ bind(&wrong_state); 2276 __ bind(&wrong_state);
2321 __ push(r1); 2277 __ push(r4);
2322 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1); 2278 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2323 2279
2324 __ bind(&done); 2280 __ bind(&done);
2325 context()->Plug(result_register()); 2281 context()->Plug(result_register());
2326 } 2282 }
2327 2283
2328 2284
2329 void FullCodeGenerator::EmitCreateIteratorResult(bool done) { 2285 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2330 Label gc_required; 2286 Label gc_required;
2331 Label allocated; 2287 Label allocated;
2332 2288
2333 const int instance_size = 5 * kPointerSize; 2289 const int instance_size = 5 * kPointerSize;
2334 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(), 2290 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2335 instance_size); 2291 instance_size);
2336 2292
2337 __ Allocate(instance_size, r0, r2, r3, &gc_required, TAG_OBJECT); 2293 __ Allocate(instance_size, r3, r5, r6, &gc_required, TAG_OBJECT);
2338 __ jmp(&allocated); 2294 __ b(&allocated);
2339 2295
2340 __ bind(&gc_required); 2296 __ bind(&gc_required);
2341 __ Push(Smi::FromInt(instance_size)); 2297 __ Push(Smi::FromInt(instance_size));
2342 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 2298 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2343 __ ldr(context_register(), 2299 __ LoadP(context_register(),
2344 MemOperand(fp, StandardFrameConstants::kContextOffset)); 2300 MemOperand(fp, StandardFrameConstants::kContextOffset));
2345 2301
2346 __ bind(&allocated); 2302 __ bind(&allocated);
2347 __ ldr(r1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 2303 __ LoadP(r4, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2348 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kNativeContextOffset)); 2304 __ LoadP(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset));
2349 __ ldr(r1, ContextOperand(r1, Context::ITERATOR_RESULT_MAP_INDEX)); 2305 __ LoadP(r4, ContextOperand(r4, Context::ITERATOR_RESULT_MAP_INDEX));
2350 __ pop(r2); 2306 __ pop(r5);
2351 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done))); 2307 __ mov(r6, Operand(isolate()->factory()->ToBoolean(done)));
2352 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array())); 2308 __ mov(r7, Operand(isolate()->factory()->empty_fixed_array()));
2353 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 2309 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
2354 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset)); 2310 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
2355 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset)); 2311 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
2356 __ str(r2, 2312 __ StoreP(r5,
2357 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset)); 2313 FieldMemOperand(r3, JSGeneratorObject::kResultValuePropertyOffset),
2358 __ str(r3, 2314 r0);
2359 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset)); 2315 __ StoreP(r6,
2316 FieldMemOperand(r3, JSGeneratorObject::kResultDonePropertyOffset),
2317 r0);
2360 2318
2361 // Only the value field needs a write barrier, as the other values are in the 2319 // Only the value field needs a write barrier, as the other values are in the
2362 // root set. 2320 // root set.
2363 __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset, 2321 __ RecordWriteField(r3, JSGeneratorObject::kResultValuePropertyOffset, r5, r6,
2364 r2, r3, kLRHasBeenSaved, kDontSaveFPRegs); 2322 kLRHasBeenSaved, kDontSaveFPRegs);
2365 } 2323 }
2366 2324
2367 2325
2368 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { 2326 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2369 SetSourcePosition(prop->position()); 2327 SetSourcePosition(prop->position());
2370 Literal* key = prop->key()->AsLiteral(); 2328 Literal* key = prop->key()->AsLiteral();
2371 DCHECK(!prop->IsSuperAccess()); 2329 DCHECK(!prop->IsSuperAccess());
2372 2330
2373 __ mov(LoadDescriptor::NameRegister(), Operand(key->value())); 2331 __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2374 if (FLAG_vector_ics) { 2332 if (FLAG_vector_ics) {
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
2414 } 2372 }
2415 2373
2416 2374
2417 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 2375 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2418 Token::Value op, 2376 Token::Value op,
2419 OverwriteMode mode, 2377 OverwriteMode mode,
2420 Expression* left_expr, 2378 Expression* left_expr,
2421 Expression* right_expr) { 2379 Expression* right_expr) {
2422 Label done, smi_case, stub_call; 2380 Label done, smi_case, stub_call;
2423 2381
2424 Register scratch1 = r2; 2382 Register scratch1 = r5;
2425 Register scratch2 = r3; 2383 Register scratch2 = r6;
2426 2384
2427 // Get the arguments. 2385 // Get the arguments.
2428 Register left = r1; 2386 Register left = r4;
2429 Register right = r0; 2387 Register right = r3;
2430 __ pop(left); 2388 __ pop(left);
2431 2389
2432 // Perform combined smi check on both operands. 2390 // Perform combined smi check on both operands.
2433 __ orr(scratch1, left, Operand(right)); 2391 __ orx(scratch1, left, right);
2434 STATIC_ASSERT(kSmiTag == 0); 2392 STATIC_ASSERT(kSmiTag == 0);
2435 JumpPatchSite patch_site(masm_); 2393 JumpPatchSite patch_site(masm_);
2436 patch_site.EmitJumpIfSmi(scratch1, &smi_case); 2394 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2437 2395
2438 __ bind(&stub_call); 2396 __ bind(&stub_call);
2439 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code(); 2397 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2440 CallIC(code, expr->BinaryOperationFeedbackId()); 2398 CallIC(code, expr->BinaryOperationFeedbackId());
2441 patch_site.EmitPatchInfo(); 2399 patch_site.EmitPatchInfo();
2442 __ jmp(&done); 2400 __ b(&done);
2443 2401
2444 __ bind(&smi_case); 2402 __ bind(&smi_case);
2445 // Smi case. This code works the same way as the smi-smi case in the type 2403 // Smi case. This code works the same way as the smi-smi case in the type
2446 // recording binary operation stub, see 2404 // recording binary operation stub.
2447 switch (op) { 2405 switch (op) {
2448 case Token::SAR: 2406 case Token::SAR:
2449 __ GetLeastBitsFromSmi(scratch1, right, 5); 2407 __ GetLeastBitsFromSmi(scratch1, right, 5);
2450 __ mov(right, Operand(left, ASR, scratch1)); 2408 __ ShiftRightArith(right, left, scratch1);
2451 __ bic(right, right, Operand(kSmiTagMask)); 2409 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize));
2452 break; 2410 break;
2453 case Token::SHL: { 2411 case Token::SHL: {
2412 __ GetLeastBitsFromSmi(scratch2, right, 5);
2413 #if V8_TARGET_ARCH_PPC64
2414 __ ShiftLeft_(right, left, scratch2);
2415 #else
2454 __ SmiUntag(scratch1, left); 2416 __ SmiUntag(scratch1, left);
2455 __ GetLeastBitsFromSmi(scratch2, right, 5); 2417 __ ShiftLeft_(scratch1, scratch1, scratch2);
2456 __ mov(scratch1, Operand(scratch1, LSL, scratch2)); 2418 // Check that the *signed* result fits in a smi
2457 __ TrySmiTag(right, scratch1, &stub_call); 2419 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call);
2420 __ SmiTag(right, scratch1);
2421 #endif
2458 break; 2422 break;
2459 } 2423 }
2460 case Token::SHR: { 2424 case Token::SHR: {
2461 __ SmiUntag(scratch1, left); 2425 __ SmiUntag(scratch1, left);
2462 __ GetLeastBitsFromSmi(scratch2, right, 5); 2426 __ GetLeastBitsFromSmi(scratch2, right, 5);
2463 __ mov(scratch1, Operand(scratch1, LSR, scratch2)); 2427 __ srw(scratch1, scratch1, scratch2);
2464 __ tst(scratch1, Operand(0xc0000000)); 2428 // Unsigned shift is not allowed to produce a negative number.
2465 __ b(ne, &stub_call); 2429 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call);
2466 __ SmiTag(right, scratch1); 2430 __ SmiTag(right, scratch1);
2467 break; 2431 break;
2468 } 2432 }
2469 case Token::ADD: 2433 case Token::ADD: {
2470 __ add(scratch1, left, Operand(right), SetCC); 2434 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2471 __ b(vs, &stub_call); 2435 __ bne(&stub_call, cr0);
2472 __ mov(right, scratch1); 2436 __ mr(right, scratch1);
2473 break; 2437 break;
2474 case Token::SUB: 2438 }
2475 __ sub(scratch1, left, Operand(right), SetCC); 2439 case Token::SUB: {
2476 __ b(vs, &stub_call); 2440 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2477 __ mov(right, scratch1); 2441 __ bne(&stub_call, cr0);
2442 __ mr(right, scratch1);
2478 break; 2443 break;
2444 }
2479 case Token::MUL: { 2445 case Token::MUL: {
2446 Label mul_zero;
2447 #if V8_TARGET_ARCH_PPC64
2448 // Remove tag from both operands.
2480 __ SmiUntag(ip, right); 2449 __ SmiUntag(ip, right);
2481 __ smull(scratch1, scratch2, left, ip); 2450 __ SmiUntag(r0, left);
2482 __ mov(ip, Operand(scratch1, ASR, 31)); 2451 __ Mul(scratch1, r0, ip);
2483 __ cmp(ip, Operand(scratch2)); 2452 // Check for overflowing the smi range - no overflow if higher 33 bits of
2484 __ b(ne, &stub_call); 2453 // the result are identical.
2485 __ cmp(scratch1, Operand::Zero()); 2454 __ TestIfInt32(scratch1, scratch2, ip);
2486 __ mov(right, Operand(scratch1), LeaveCC, ne); 2455 __ bne(&stub_call);
2487 __ b(ne, &done); 2456 #else
2488 __ add(scratch2, right, Operand(left), SetCC); 2457 __ SmiUntag(ip, right);
2489 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl); 2458 __ mullw(scratch1, left, ip);
2490 __ b(mi, &stub_call); 2459 __ mulhw(scratch2, left, ip);
2460 // Check for overflowing the smi range - no overflow if higher 33 bits of
2461 // the result are identical.
2462 __ TestIfInt32(scratch2, scratch1, ip);
2463 __ bne(&stub_call);
2464 #endif
2465 // Go slow on zero result to handle -0.
2466 __ cmpi(scratch1, Operand::Zero());
2467 __ beq(&mul_zero);
2468 #if V8_TARGET_ARCH_PPC64
2469 __ SmiTag(right, scratch1);
2470 #else
2471 __ mr(right, scratch1);
2472 #endif
2473 __ b(&done);
2474 // We need -0 if we were multiplying a negative number with 0 to get 0.
2475 // We know one of them was zero.
2476 __ bind(&mul_zero);
2477 __ add(scratch2, right, left);
2478 __ cmpi(scratch2, Operand::Zero());
2479 __ blt(&stub_call);
2480 __ LoadSmiLiteral(right, Smi::FromInt(0));
2491 break; 2481 break;
2492 } 2482 }
2493 case Token::BIT_OR: 2483 case Token::BIT_OR:
2494 __ orr(right, left, Operand(right)); 2484 __ orx(right, left, right);
2495 break; 2485 break;
2496 case Token::BIT_AND: 2486 case Token::BIT_AND:
2497 __ and_(right, left, Operand(right)); 2487 __ and_(right, left, right);
2498 break; 2488 break;
2499 case Token::BIT_XOR: 2489 case Token::BIT_XOR:
2500 __ eor(right, left, Operand(right)); 2490 __ xor_(right, left, right);
2501 break; 2491 break;
2502 default: 2492 default:
2503 UNREACHABLE(); 2493 UNREACHABLE();
2504 } 2494 }
2505 2495
2506 __ bind(&done); 2496 __ bind(&done);
2507 context()->Plug(r0); 2497 context()->Plug(r3);
2508 } 2498 }
2509 2499
2510 2500
2511 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) { 2501 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2512 // Constructor is in r0. 2502 // Constructor is in r3.
2513 DCHECK(lit != NULL); 2503 DCHECK(lit != NULL);
2514 __ push(r0); 2504 __ push(r3);
2515 2505
2516 // No access check is needed here since the constructor is created by the 2506 // No access check is needed here since the constructor is created by the
2517 // class literal. 2507 // class literal.
2518 Register scratch = r1; 2508 Register scratch = r4;
2519 __ ldr(scratch, 2509 __ LoadP(scratch,
2520 FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset)); 2510 FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
2521 __ push(scratch); 2511 __ push(scratch);
2522 2512
2523 for (int i = 0; i < lit->properties()->length(); i++) { 2513 for (int i = 0; i < lit->properties()->length(); i++) {
2524 ObjectLiteral::Property* property = lit->properties()->at(i); 2514 ObjectLiteral::Property* property = lit->properties()->at(i);
2525 Literal* key = property->key()->AsLiteral(); 2515 Literal* key = property->key()->AsLiteral();
2526 Expression* value = property->value(); 2516 Expression* value = property->value();
2527 DCHECK(key != NULL); 2517 DCHECK(key != NULL);
2528 2518
2529 if (property->is_static()) { 2519 if (property->is_static()) {
2530 __ ldr(scratch, MemOperand(sp, kPointerSize)); // constructor 2520 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // constructor
2531 } else { 2521 } else {
2532 __ ldr(scratch, MemOperand(sp, 0)); // prototype 2522 __ LoadP(scratch, MemOperand(sp, 0)); // prototype
2533 } 2523 }
2534 __ push(scratch); 2524 __ push(scratch);
2535 VisitForStackValue(key); 2525 VisitForStackValue(key);
2536 VisitForStackValue(value); 2526 VisitForStackValue(value);
2537 2527
2538 switch (property->kind()) { 2528 switch (property->kind()) {
2539 case ObjectLiteral::Property::CONSTANT: 2529 case ObjectLiteral::Property::CONSTANT:
2540 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 2530 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2541 case ObjectLiteral::Property::COMPUTED: 2531 case ObjectLiteral::Property::COMPUTED:
2542 case ObjectLiteral::Property::PROTOTYPE: 2532 case ObjectLiteral::Property::PROTOTYPE:
(...skipping 14 matching lines...) Expand all
2557 } 2547 }
2558 2548
2559 // prototype 2549 // prototype
2560 __ CallRuntime(Runtime::kToFastProperties, 1); 2550 __ CallRuntime(Runtime::kToFastProperties, 1);
2561 2551
2562 // constructor 2552 // constructor
2563 __ CallRuntime(Runtime::kToFastProperties, 1); 2553 __ CallRuntime(Runtime::kToFastProperties, 1);
2564 } 2554 }
2565 2555
2566 2556
2567 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, 2557 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op,
2568 Token::Value op,
2569 OverwriteMode mode) { 2558 OverwriteMode mode) {
2570 __ pop(r1); 2559 __ pop(r4);
2571 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code(); 2560 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2572 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 2561 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2573 CallIC(code, expr->BinaryOperationFeedbackId()); 2562 CallIC(code, expr->BinaryOperationFeedbackId());
2574 patch_site.EmitPatchInfo(); 2563 patch_site.EmitPatchInfo();
2575 context()->Plug(r0); 2564 context()->Plug(r3);
2576 } 2565 }
2577 2566
2578 2567
2579 void FullCodeGenerator::EmitAssignment(Expression* expr) { 2568 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2580 DCHECK(expr->IsValidReferenceExpression()); 2569 DCHECK(expr->IsValidReferenceExpression());
2581 2570
2582 Property* prop = expr->AsProperty(); 2571 Property* prop = expr->AsProperty();
2583 LhsKind assign_type = GetAssignType(prop); 2572 LhsKind assign_type = GetAssignType(prop);
2584 2573
2585 switch (assign_type) { 2574 switch (assign_type) {
2586 case VARIABLE: { 2575 case VARIABLE: {
2587 Variable* var = expr->AsVariableProxy()->var(); 2576 Variable* var = expr->AsVariableProxy()->var();
2588 EffectContext context(this); 2577 EffectContext context(this);
2589 EmitVariableAssignment(var, Token::ASSIGN); 2578 EmitVariableAssignment(var, Token::ASSIGN);
2590 break; 2579 break;
2591 } 2580 }
2592 case NAMED_PROPERTY: { 2581 case NAMED_PROPERTY: {
2593 __ push(r0); // Preserve value. 2582 __ push(r3); // Preserve value.
2594 VisitForAccumulatorValue(prop->obj()); 2583 VisitForAccumulatorValue(prop->obj());
2595 __ Move(StoreDescriptor::ReceiverRegister(), r0); 2584 __ Move(StoreDescriptor::ReceiverRegister(), r3);
2596 __ pop(StoreDescriptor::ValueRegister()); // Restore value. 2585 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2597 __ mov(StoreDescriptor::NameRegister(), 2586 __ mov(StoreDescriptor::NameRegister(),
2598 Operand(prop->key()->AsLiteral()->value())); 2587 Operand(prop->key()->AsLiteral()->value()));
2599 CallStoreIC(); 2588 CallStoreIC();
2600 break; 2589 break;
2601 } 2590 }
2602 case NAMED_SUPER_PROPERTY: { 2591 case NAMED_SUPER_PROPERTY: {
2603 __ Push(r0); 2592 __ Push(r3);
2604 VisitForStackValue(prop->obj()->AsSuperReference()->this_var()); 2593 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2605 EmitLoadHomeObject(prop->obj()->AsSuperReference()); 2594 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2606 // stack: value, this; r0: home_object 2595 // stack: value, this; r3: home_object
2607 Register scratch = r2; 2596 Register scratch = r5;
2608 Register scratch2 = r3; 2597 Register scratch2 = r6;
2609 __ mov(scratch, result_register()); // home_object 2598 __ mr(scratch, result_register()); // home_object
2610 __ ldr(r0, MemOperand(sp, kPointerSize)); // value 2599 __ LoadP(r3, MemOperand(sp, kPointerSize)); // value
2611 __ ldr(scratch2, MemOperand(sp, 0)); // this 2600 __ LoadP(scratch2, MemOperand(sp, 0)); // this
2612 __ str(scratch2, MemOperand(sp, kPointerSize)); // this 2601 __ StoreP(scratch2, MemOperand(sp, kPointerSize)); // this
2613 __ str(scratch, MemOperand(sp, 0)); // home_object 2602 __ StoreP(scratch, MemOperand(sp, 0)); // home_object
2614 // stack: this, home_object; r0: value 2603 // stack: this, home_object; r3: value
2615 EmitNamedSuperPropertyStore(prop); 2604 EmitNamedSuperPropertyStore(prop);
2616 break; 2605 break;
2617 } 2606 }
2618 case KEYED_SUPER_PROPERTY: { 2607 case KEYED_SUPER_PROPERTY: {
2619 __ Push(r0); 2608 __ Push(r3);
2620 VisitForStackValue(prop->obj()->AsSuperReference()->this_var()); 2609 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
2621 EmitLoadHomeObject(prop->obj()->AsSuperReference()); 2610 EmitLoadHomeObject(prop->obj()->AsSuperReference());
2622 __ Push(result_register()); 2611 __ Push(result_register());
2623 VisitForAccumulatorValue(prop->key()); 2612 VisitForAccumulatorValue(prop->key());
2624 Register scratch = r2; 2613 Register scratch = r5;
2625 Register scratch2 = r3; 2614 Register scratch2 = r6;
2626 __ ldr(scratch2, MemOperand(sp, 2 * kPointerSize)); // value 2615 __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2627 // stack: value, this, home_object; r0: key, r3: value 2616 // stack: value, this, home_object; r3: key, r6: value
2628 __ ldr(scratch, MemOperand(sp, kPointerSize)); // this 2617 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // this
2629 __ str(scratch, MemOperand(sp, 2 * kPointerSize)); 2618 __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize));
2630 __ ldr(scratch, MemOperand(sp, 0)); // home_object 2619 __ LoadP(scratch, MemOperand(sp, 0)); // home_object
2631 __ str(scratch, MemOperand(sp, kPointerSize)); 2620 __ StoreP(scratch, MemOperand(sp, kPointerSize));
2632 __ str(r0, MemOperand(sp, 0)); 2621 __ StoreP(r3, MemOperand(sp, 0));
2633 __ Move(r0, scratch2); 2622 __ Move(r3, scratch2);
2634 // stack: this, home_object, key; r0: value. 2623 // stack: this, home_object, key; r3: value.
2635 EmitKeyedSuperPropertyStore(prop); 2624 EmitKeyedSuperPropertyStore(prop);
2636 break; 2625 break;
2637 } 2626 }
2638 case KEYED_PROPERTY: { 2627 case KEYED_PROPERTY: {
2639 __ push(r0); // Preserve value. 2628 __ push(r3); // Preserve value.
2640 VisitForStackValue(prop->obj()); 2629 VisitForStackValue(prop->obj());
2641 VisitForAccumulatorValue(prop->key()); 2630 VisitForAccumulatorValue(prop->key());
2642 __ Move(StoreDescriptor::NameRegister(), r0); 2631 __ Move(StoreDescriptor::NameRegister(), r3);
2643 __ Pop(StoreDescriptor::ValueRegister(), 2632 __ Pop(StoreDescriptor::ValueRegister(),
2644 StoreDescriptor::ReceiverRegister()); 2633 StoreDescriptor::ReceiverRegister());
2645 Handle<Code> ic = 2634 Handle<Code> ic =
2646 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code(); 2635 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2647 CallIC(ic); 2636 CallIC(ic);
2648 break; 2637 break;
2649 } 2638 }
2650 } 2639 }
2651 context()->Plug(r0); 2640 context()->Plug(r3);
2652 } 2641 }
2653 2642
2654 2643
2655 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( 2644 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2656 Variable* var, MemOperand location) { 2645 Variable* var, MemOperand location) {
2657 __ str(result_register(), location); 2646 __ StoreP(result_register(), location, r0);
2658 if (var->IsContextSlot()) { 2647 if (var->IsContextSlot()) {
2659 // RecordWrite may destroy all its register arguments. 2648 // RecordWrite may destroy all its register arguments.
2660 __ mov(r3, result_register()); 2649 __ mr(r6, result_register());
2661 int offset = Context::SlotOffset(var->index()); 2650 int offset = Context::SlotOffset(var->index());
2662 __ RecordWriteContextSlot( 2651 __ RecordWriteContextSlot(r4, offset, r6, r5, kLRHasBeenSaved,
2663 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs); 2652 kDontSaveFPRegs);
2664 } 2653 }
2665 } 2654 }
2666 2655
2667 2656
2668 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) { 2657 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2669 if (var->IsUnallocated()) { 2658 if (var->IsUnallocated()) {
2670 // Global var, const, or let. 2659 // Global var, const, or let.
2671 __ mov(StoreDescriptor::NameRegister(), Operand(var->name())); 2660 __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2672 __ ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand()); 2661 __ LoadP(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2673 CallStoreIC(); 2662 CallStoreIC();
2674 2663
2675 } else if (op == Token::INIT_CONST_LEGACY) { 2664 } else if (op == Token::INIT_CONST_LEGACY) {
2676 // Const initializers need a write barrier. 2665 // Const initializers need a write barrier.
2677 DCHECK(!var->IsParameter()); // No const parameters. 2666 DCHECK(!var->IsParameter()); // No const parameters.
2678 if (var->IsLookupSlot()) { 2667 if (var->IsLookupSlot()) {
2679 __ push(r0); 2668 __ push(r3);
2680 __ mov(r0, Operand(var->name())); 2669 __ mov(r3, Operand(var->name()));
2681 __ Push(cp, r0); // Context and name. 2670 __ Push(cp, r3); // Context and name.
2682 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3); 2671 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2683 } else { 2672 } else {
2684 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2673 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2685 Label skip; 2674 Label skip;
2686 MemOperand location = VarOperand(var, r1); 2675 MemOperand location = VarOperand(var, r4);
2687 __ ldr(r2, location); 2676 __ LoadP(r5, location);
2688 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); 2677 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2689 __ b(ne, &skip); 2678 __ bne(&skip);
2690 EmitStoreToStackLocalOrContextSlot(var, location); 2679 EmitStoreToStackLocalOrContextSlot(var, location);
2691 __ bind(&skip); 2680 __ bind(&skip);
2692 } 2681 }
2693 2682
2694 } else if (var->mode() == LET && op != Token::INIT_LET) { 2683 } else if (var->mode() == LET && op != Token::INIT_LET) {
2695 // Non-initializing assignment to let variable needs a write barrier. 2684 // Non-initializing assignment to let variable needs a write barrier.
2696 DCHECK(!var->IsLookupSlot()); 2685 DCHECK(!var->IsLookupSlot());
2697 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2686 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2698 Label assign; 2687 Label assign;
2699 MemOperand location = VarOperand(var, r1); 2688 MemOperand location = VarOperand(var, r4);
2700 __ ldr(r3, location); 2689 __ LoadP(r6, location);
2701 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); 2690 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2702 __ b(ne, &assign); 2691 __ bne(&assign);
2703 __ mov(r3, Operand(var->name())); 2692 __ mov(r6, Operand(var->name()));
2704 __ push(r3); 2693 __ push(r6);
2705 __ CallRuntime(Runtime::kThrowReferenceError, 1); 2694 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2706 // Perform the assignment. 2695 // Perform the assignment.
2707 __ bind(&assign); 2696 __ bind(&assign);
2708 EmitStoreToStackLocalOrContextSlot(var, location); 2697 EmitStoreToStackLocalOrContextSlot(var, location);
2709 2698
2710 } else if (!var->is_const_mode() || op == Token::INIT_CONST) { 2699 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2711 if (var->IsLookupSlot()) { 2700 if (var->IsLookupSlot()) {
2712 // Assignment to var. 2701 // Assignment to var.
2713 __ push(r0); // Value. 2702 __ push(r3); // Value.
2714 __ mov(r1, Operand(var->name())); 2703 __ mov(r4, Operand(var->name()));
2715 __ mov(r0, Operand(Smi::FromInt(strict_mode()))); 2704 __ mov(r3, Operand(Smi::FromInt(strict_mode())));
2716 __ Push(cp, r1, r0); // Context, name, strict mode. 2705 __ Push(cp, r4, r3); // Context, name, strict mode.
2717 __ CallRuntime(Runtime::kStoreLookupSlot, 4); 2706 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2718 } else { 2707 } else {
2719 // Assignment to var or initializing assignment to let/const in harmony 2708 // Assignment to var or initializing assignment to let/const in harmony
2720 // mode. 2709 // mode.
2721 DCHECK((var->IsStackAllocated() || var->IsContextSlot())); 2710 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2722 MemOperand location = VarOperand(var, r1); 2711 MemOperand location = VarOperand(var, r4);
2723 if (generate_debug_code_ && op == Token::INIT_LET) { 2712 if (generate_debug_code_ && op == Token::INIT_LET) {
2724 // Check for an uninitialized let binding. 2713 // Check for an uninitialized let binding.
2725 __ ldr(r2, location); 2714 __ LoadP(r5, location);
2726 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); 2715 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2727 __ Check(eq, kLetBindingReInitialization); 2716 __ Check(eq, kLetBindingReInitialization);
2728 } 2717 }
2729 EmitStoreToStackLocalOrContextSlot(var, location); 2718 EmitStoreToStackLocalOrContextSlot(var, location);
2730 } 2719 }
2731 } 2720 }
2732 // Non-initializing assignments to consts are ignored. 2721 // Non-initializing assignments to consts are ignored.
2733 } 2722 }
2734 2723
2735 2724
2736 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2725 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2737 // Assignment to a property, using a named store IC. 2726 // Assignment to a property, using a named store IC.
2738 Property* prop = expr->target()->AsProperty(); 2727 Property* prop = expr->target()->AsProperty();
2739 DCHECK(prop != NULL); 2728 DCHECK(prop != NULL);
2740 DCHECK(prop->key()->IsLiteral()); 2729 DCHECK(prop->key()->IsLiteral());
2741 2730
2742 // Record source code position before IC call. 2731 // Record source code position before IC call.
2743 SetSourcePosition(expr->position()); 2732 SetSourcePosition(expr->position());
2744 __ mov(StoreDescriptor::NameRegister(), 2733 __ mov(StoreDescriptor::NameRegister(),
2745 Operand(prop->key()->AsLiteral()->value())); 2734 Operand(prop->key()->AsLiteral()->value()));
2746 __ pop(StoreDescriptor::ReceiverRegister()); 2735 __ pop(StoreDescriptor::ReceiverRegister());
2747 CallStoreIC(expr->AssignmentFeedbackId()); 2736 CallStoreIC(expr->AssignmentFeedbackId());
2748 2737
2749 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2738 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2750 context()->Plug(r0); 2739 context()->Plug(r3);
2751 } 2740 }
2752 2741
2753 2742
2754 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) { 2743 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2755 // Assignment to named property of super. 2744 // Assignment to named property of super.
2756 // r0 : value 2745 // r3 : value
2757 // stack : receiver ('this'), home_object 2746 // stack : receiver ('this'), home_object
2758 DCHECK(prop != NULL); 2747 DCHECK(prop != NULL);
2759 Literal* key = prop->key()->AsLiteral(); 2748 Literal* key = prop->key()->AsLiteral();
2760 DCHECK(key != NULL); 2749 DCHECK(key != NULL);
2761 2750
2762 __ Push(key->value()); 2751 __ Push(key->value());
2763 __ Push(r0); 2752 __ Push(r3);
2764 __ CallRuntime((strict_mode() == STRICT ? Runtime::kStoreToSuper_Strict 2753 __ CallRuntime((strict_mode() == STRICT ? Runtime::kStoreToSuper_Strict
2765 : Runtime::kStoreToSuper_Sloppy), 2754 : Runtime::kStoreToSuper_Sloppy),
2766 4); 2755 4);
2767 } 2756 }
2768 2757
2769 2758
2770 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) { 2759 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2771 // Assignment to named property of super. 2760 // Assignment to named property of super.
2772 // r0 : value 2761 // r3 : value
2773 // stack : receiver ('this'), home_object, key 2762 // stack : receiver ('this'), home_object, key
2774 DCHECK(prop != NULL); 2763 DCHECK(prop != NULL);
2775 2764
2776 __ Push(r0); 2765 __ Push(r3);
2777 __ CallRuntime((strict_mode() == STRICT ? Runtime::kStoreKeyedToSuper_Strict 2766 __ CallRuntime((strict_mode() == STRICT ? Runtime::kStoreKeyedToSuper_Strict
2778 : Runtime::kStoreKeyedToSuper_Sloppy), 2767 : Runtime::kStoreKeyedToSuper_Sloppy),
2779 4); 2768 4);
2780 } 2769 }
2781 2770
2782 2771
2783 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2772 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2784 // Assignment to a property, using a keyed store IC. 2773 // Assignment to a property, using a keyed store IC.
2785 2774
2786 // Record source code position before IC call. 2775 // Record source code position before IC call.
2787 SetSourcePosition(expr->position()); 2776 SetSourcePosition(expr->position());
2788 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister()); 2777 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2789 DCHECK(StoreDescriptor::ValueRegister().is(r0)); 2778 DCHECK(StoreDescriptor::ValueRegister().is(r3));
2790 2779
2791 Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code(); 2780 Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2792 CallIC(ic, expr->AssignmentFeedbackId()); 2781 CallIC(ic, expr->AssignmentFeedbackId());
2793 2782
2794 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2783 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2795 context()->Plug(r0); 2784 context()->Plug(r3);
2796 } 2785 }
2797 2786
2798 2787
2799 void FullCodeGenerator::VisitProperty(Property* expr) { 2788 void FullCodeGenerator::VisitProperty(Property* expr) {
2800 Comment cmnt(masm_, "[ Property"); 2789 Comment cmnt(masm_, "[ Property");
2801 Expression* key = expr->key(); 2790 Expression* key = expr->key();
2802 2791
2803 if (key->IsPropertyName()) { 2792 if (key->IsPropertyName()) {
2804 if (!expr->IsSuperAccess()) { 2793 if (!expr->IsSuperAccess()) {
2805 VisitForAccumulatorValue(expr->obj()); 2794 VisitForAccumulatorValue(expr->obj());
2806 __ Move(LoadDescriptor::ReceiverRegister(), r0); 2795 __ Move(LoadDescriptor::ReceiverRegister(), r3);
2807 EmitNamedPropertyLoad(expr); 2796 EmitNamedPropertyLoad(expr);
2808 } else { 2797 } else {
2809 VisitForStackValue(expr->obj()->AsSuperReference()->this_var()); 2798 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2810 EmitLoadHomeObject(expr->obj()->AsSuperReference()); 2799 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2811 __ Push(result_register()); 2800 __ Push(result_register());
2812 EmitNamedSuperPropertyLoad(expr); 2801 EmitNamedSuperPropertyLoad(expr);
2813 } 2802 }
2814 PrepareForBailoutForId(expr->LoadId(), TOS_REG); 2803 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2815 context()->Plug(r0); 2804 context()->Plug(r3);
2816 } else { 2805 } else {
2817 if (!expr->IsSuperAccess()) { 2806 if (!expr->IsSuperAccess()) {
2818 VisitForStackValue(expr->obj()); 2807 VisitForStackValue(expr->obj());
2819 VisitForAccumulatorValue(expr->key()); 2808 VisitForAccumulatorValue(expr->key());
2820 __ Move(LoadDescriptor::NameRegister(), r0); 2809 __ Move(LoadDescriptor::NameRegister(), r3);
2821 __ pop(LoadDescriptor::ReceiverRegister()); 2810 __ pop(LoadDescriptor::ReceiverRegister());
2822 EmitKeyedPropertyLoad(expr); 2811 EmitKeyedPropertyLoad(expr);
2823 } else { 2812 } else {
2824 VisitForStackValue(expr->obj()->AsSuperReference()->this_var()); 2813 VisitForStackValue(expr->obj()->AsSuperReference()->this_var());
2825 EmitLoadHomeObject(expr->obj()->AsSuperReference()); 2814 EmitLoadHomeObject(expr->obj()->AsSuperReference());
2826 __ Push(result_register()); 2815 __ Push(result_register());
2827 VisitForStackValue(expr->key()); 2816 VisitForStackValue(expr->key());
2828 EmitKeyedSuperPropertyLoad(expr); 2817 EmitKeyedSuperPropertyLoad(expr);
2829 } 2818 }
2830 context()->Plug(r0); 2819 context()->Plug(r3);
2831 } 2820 }
2832 } 2821 }
2833 2822
2834 2823
2835 void FullCodeGenerator::CallIC(Handle<Code> code, 2824 void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) {
2836 TypeFeedbackId ast_id) {
2837 ic_total_count_++; 2825 ic_total_count_++;
2838 // All calls must have a predictable size in full-codegen code to ensure that 2826 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2839 // the debugger can patch them correctly.
2840 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al,
2841 NEVER_INLINE_TARGET_ADDRESS);
2842 } 2827 }
2843 2828
2844 2829
2845 // Code common for calls using the IC. 2830 // Code common for calls using the IC.
2846 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { 2831 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2847 Expression* callee = expr->expression(); 2832 Expression* callee = expr->expression();
2848 2833
2849 CallICState::CallType call_type = 2834 CallICState::CallType call_type =
2850 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD; 2835 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2851 2836
2852 // Get the target function. 2837 // Get the target function.
2853 if (call_type == CallICState::FUNCTION) { 2838 if (call_type == CallICState::FUNCTION) {
2854 { StackValueContext context(this); 2839 {
2840 StackValueContext context(this);
2855 EmitVariableLoad(callee->AsVariableProxy()); 2841 EmitVariableLoad(callee->AsVariableProxy());
2856 PrepareForBailout(callee, NO_REGISTERS); 2842 PrepareForBailout(callee, NO_REGISTERS);
2857 } 2843 }
2858 // Push undefined as receiver. This is patched in the method prologue if it 2844 // Push undefined as receiver. This is patched in the method prologue if it
2859 // is a sloppy mode method. 2845 // is a sloppy mode method.
2860 __ Push(isolate()->factory()->undefined_value()); 2846 __ Push(isolate()->factory()->undefined_value());
2861 } else { 2847 } else {
2862 // Load the function from the receiver. 2848 // Load the function from the receiver.
2863 DCHECK(callee->IsProperty()); 2849 DCHECK(callee->IsProperty());
2864 DCHECK(!callee->AsProperty()->IsSuperAccess()); 2850 DCHECK(!callee->AsProperty()->IsSuperAccess());
2865 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 2851 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2866 EmitNamedPropertyLoad(callee->AsProperty()); 2852 EmitNamedPropertyLoad(callee->AsProperty());
2867 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2853 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2868 // Push the target function under the receiver. 2854 // Push the target function under the receiver.
2869 __ ldr(ip, MemOperand(sp, 0)); 2855 __ LoadP(ip, MemOperand(sp, 0));
2870 __ push(ip); 2856 __ push(ip);
2871 __ str(r0, MemOperand(sp, kPointerSize)); 2857 __ StoreP(r3, MemOperand(sp, kPointerSize));
2872 } 2858 }
2873 2859
2874 EmitCall(expr, call_type); 2860 EmitCall(expr, call_type);
2875 } 2861 }
2876 2862
2877 2863
2878 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) { 2864 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2879 Expression* callee = expr->expression(); 2865 Expression* callee = expr->expression();
2880 DCHECK(callee->IsProperty()); 2866 DCHECK(callee->IsProperty());
2881 Property* prop = callee->AsProperty(); 2867 Property* prop = callee->AsProperty();
2882 DCHECK(prop->IsSuperAccess()); 2868 DCHECK(prop->IsSuperAccess());
2883 2869
2884 SetSourcePosition(prop->position()); 2870 SetSourcePosition(prop->position());
2885 Literal* key = prop->key()->AsLiteral(); 2871 Literal* key = prop->key()->AsLiteral();
2886 DCHECK(!key->value()->IsSmi()); 2872 DCHECK(!key->value()->IsSmi());
2887 // Load the function from the receiver. 2873 // Load the function from the receiver.
2888 const Register scratch = r1; 2874 const Register scratch = r4;
2889 SuperReference* super_ref = prop->obj()->AsSuperReference(); 2875 SuperReference* super_ref = prop->obj()->AsSuperReference();
2890 EmitLoadHomeObject(super_ref); 2876 EmitLoadHomeObject(super_ref);
2891 __ Push(r0); 2877 __ mr(scratch, r3);
2892 VisitForAccumulatorValue(super_ref->this_var()); 2878 VisitForAccumulatorValue(super_ref->this_var());
2893 __ Push(r0); 2879 __ Push(scratch, r3, r3, scratch);
2894 __ Push(r0);
2895 __ ldr(scratch, MemOperand(sp, kPointerSize * 2));
2896 __ Push(scratch);
2897 __ Push(key->value()); 2880 __ Push(key->value());
2898 2881
2899 // Stack here: 2882 // Stack here:
2900 // - home_object 2883 // - home_object
2901 // - this (receiver) 2884 // - this (receiver)
2902 // - this (receiver) <-- LoadFromSuper will pop here and below. 2885 // - this (receiver) <-- LoadFromSuper will pop here and below.
2903 // - home_object 2886 // - home_object
2904 // - key 2887 // - key
2905 __ CallRuntime(Runtime::kLoadFromSuper, 3); 2888 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2906 2889
2907 // Replace home_object with target function. 2890 // Replace home_object with target function.
2908 __ str(r0, MemOperand(sp, kPointerSize)); 2891 __ StoreP(r3, MemOperand(sp, kPointerSize));
2909 2892
2910 // Stack here: 2893 // Stack here:
2911 // - target function 2894 // - target function
2912 // - this (receiver) 2895 // - this (receiver)
2913 EmitCall(expr, CallICState::METHOD); 2896 EmitCall(expr, CallICState::METHOD);
2914 } 2897 }
2915 2898
2916 2899
2917 // Code common for calls using the IC. 2900 // Code common for calls using the IC.
2918 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, 2901 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) {
2919 Expression* key) {
2920 // Load the key. 2902 // Load the key.
2921 VisitForAccumulatorValue(key); 2903 VisitForAccumulatorValue(key);
2922 2904
2923 Expression* callee = expr->expression(); 2905 Expression* callee = expr->expression();
2924 2906
2925 // Load the function from the receiver. 2907 // Load the function from the receiver.
2926 DCHECK(callee->IsProperty()); 2908 DCHECK(callee->IsProperty());
2927 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 2909 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2928 __ Move(LoadDescriptor::NameRegister(), r0); 2910 __ Move(LoadDescriptor::NameRegister(), r3);
2929 EmitKeyedPropertyLoad(callee->AsProperty()); 2911 EmitKeyedPropertyLoad(callee->AsProperty());
2930 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2912 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2931 2913
2932 // Push the target function under the receiver. 2914 // Push the target function under the receiver.
2933 __ ldr(ip, MemOperand(sp, 0)); 2915 __ LoadP(ip, MemOperand(sp, 0));
2934 __ push(ip); 2916 __ push(ip);
2935 __ str(r0, MemOperand(sp, kPointerSize)); 2917 __ StoreP(r3, MemOperand(sp, kPointerSize));
2936 2918
2937 EmitCall(expr, CallICState::METHOD); 2919 EmitCall(expr, CallICState::METHOD);
2938 } 2920 }
2939 2921
2940 2922
2941 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) { 2923 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2942 Expression* callee = expr->expression(); 2924 Expression* callee = expr->expression();
2943 DCHECK(callee->IsProperty()); 2925 DCHECK(callee->IsProperty());
2944 Property* prop = callee->AsProperty(); 2926 Property* prop = callee->AsProperty();
2945 DCHECK(prop->IsSuperAccess()); 2927 DCHECK(prop->IsSuperAccess());
2946 2928
2947 SetSourcePosition(prop->position()); 2929 SetSourcePosition(prop->position());
2948 // Load the function from the receiver. 2930 // Load the function from the receiver.
2949 const Register scratch = r1; 2931 const Register scratch = r4;
2950 SuperReference* super_ref = prop->obj()->AsSuperReference(); 2932 SuperReference* super_ref = prop->obj()->AsSuperReference();
2951 EmitLoadHomeObject(super_ref); 2933 EmitLoadHomeObject(super_ref);
2952 __ Push(r0); 2934 __ Push(r3);
2953 VisitForAccumulatorValue(super_ref->this_var()); 2935 VisitForAccumulatorValue(super_ref->this_var());
2954 __ Push(r0); 2936 __ Push(r3);
2955 __ Push(r0); 2937 __ Push(r3);
2956 __ ldr(scratch, MemOperand(sp, kPointerSize * 2)); 2938 __ LoadP(scratch, MemOperand(sp, kPointerSize * 2));
2957 __ Push(scratch); 2939 __ Push(scratch);
2958 VisitForStackValue(prop->key()); 2940 VisitForStackValue(prop->key());
2959 2941
2960 // Stack here: 2942 // Stack here:
2961 // - home_object 2943 // - home_object
2962 // - this (receiver) 2944 // - this (receiver)
2963 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below. 2945 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2964 // - home_object 2946 // - home_object
2965 // - key 2947 // - key
2966 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3); 2948 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 3);
2967 2949
2968 // Replace home_object with target function. 2950 // Replace home_object with target function.
2969 __ str(r0, MemOperand(sp, kPointerSize)); 2951 __ StoreP(r3, MemOperand(sp, kPointerSize));
2970 2952
2971 // Stack here: 2953 // Stack here:
2972 // - target function 2954 // - target function
2973 // - this (receiver) 2955 // - this (receiver)
2974 EmitCall(expr, CallICState::METHOD); 2956 EmitCall(expr, CallICState::METHOD);
2975 } 2957 }
2976 2958
2977 2959
2978 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) { 2960 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2979 // Load the arguments. 2961 // Load the arguments.
2980 ZoneList<Expression*>* args = expr->arguments(); 2962 ZoneList<Expression*>* args = expr->arguments();
2981 int arg_count = args->length(); 2963 int arg_count = args->length();
2982 { PreservePositionScope scope(masm()->positions_recorder()); 2964 {
2965 PreservePositionScope scope(masm()->positions_recorder());
2983 for (int i = 0; i < arg_count; i++) { 2966 for (int i = 0; i < arg_count; i++) {
2984 VisitForStackValue(args->at(i)); 2967 VisitForStackValue(args->at(i));
2985 } 2968 }
2986 } 2969 }
2987 2970
2988 // Record source position of the IC call. 2971 // Record source position of the IC call.
2989 SetSourcePosition(expr->position()); 2972 SetSourcePosition(expr->position());
2990 Handle<Code> ic = CallIC::initialize_stub( 2973 Handle<Code> ic = CallIC::initialize_stub(isolate(), arg_count, call_type);
2991 isolate(), arg_count, call_type); 2974 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackSlot()));
2992 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackSlot()))); 2975 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2993 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2994 // Don't assign a type feedback id to the IC, since type feedback is provided 2976 // Don't assign a type feedback id to the IC, since type feedback is provided
2995 // by the vector above. 2977 // by the vector above.
2996 CallIC(ic); 2978 CallIC(ic);
2997 2979
2998 RecordJSReturnSite(expr); 2980 RecordJSReturnSite(expr);
2999 // Restore context register. 2981 // Restore context register.
3000 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2982 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3001 context()->DropAndPlug(1, r0); 2983 context()->DropAndPlug(1, r3);
3002 } 2984 }
3003 2985
3004 2986
3005 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { 2987 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3006 // r5: copy of the first argument or undefined if it doesn't exist. 2988 // r8: copy of the first argument or undefined if it doesn't exist.
3007 if (arg_count > 0) { 2989 if (arg_count > 0) {
3008 __ ldr(r5, MemOperand(sp, arg_count * kPointerSize)); 2990 __ LoadP(r8, MemOperand(sp, arg_count * kPointerSize), r0);
3009 } else { 2991 } else {
3010 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); 2992 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
3011 } 2993 }
3012 2994
3013 // r4: the receiver of the enclosing function. 2995 // r7: the receiver of the enclosing function.
3014 __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 2996 __ LoadP(r7, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3015 2997
3016 // r3: the receiver of the enclosing function. 2998 // r6: the receiver of the enclosing function.
3017 int receiver_offset = 2 + info_->scope()->num_parameters(); 2999 int receiver_offset = 2 + info_->scope()->num_parameters();
3018 __ ldr(r3, MemOperand(fp, receiver_offset * kPointerSize)); 3000 __ LoadP(r6, MemOperand(fp, receiver_offset * kPointerSize), r0);
3019 3001
3020 // r2: strict mode. 3002 // r5: strict mode.
3021 __ mov(r2, Operand(Smi::FromInt(strict_mode()))); 3003 __ LoadSmiLiteral(r5, Smi::FromInt(strict_mode()));
3022 3004
3023 // r1: the start position of the scope the calls resides in. 3005 // r4: the start position of the scope the calls resides in.
3024 __ mov(r1, Operand(Smi::FromInt(scope()->start_position()))); 3006 __ LoadSmiLiteral(r4, Smi::FromInt(scope()->start_position()));
3025 3007
3026 // Do the runtime call. 3008 // Do the runtime call.
3027 __ Push(r5); 3009 __ Push(r8, r7, r6, r5, r4);
3028 __ Push(r4, r3, r2, r1);
3029 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6); 3010 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
3030 } 3011 }
3031 3012
3032 3013
3033 void FullCodeGenerator::EmitLoadSuperConstructor(SuperReference* super_ref) { 3014 void FullCodeGenerator::EmitLoadSuperConstructor(SuperReference* super_ref) {
3034 DCHECK(super_ref != NULL); 3015 DCHECK(super_ref != NULL);
3035 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 3016 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3036 __ Push(r0); 3017 __ Push(r3);
3037 __ CallRuntime(Runtime::kGetPrototype, 1); 3018 __ CallRuntime(Runtime::kGetPrototype, 1);
3038 } 3019 }
3039 3020
3040 3021
3041 void FullCodeGenerator::VisitCall(Call* expr) { 3022 void FullCodeGenerator::VisitCall(Call* expr) {
3042 #ifdef DEBUG 3023 #ifdef DEBUG
3043 // We want to verify that RecordJSReturnSite gets called on all paths 3024 // We want to verify that RecordJSReturnSite gets called on all paths
3044 // through this function. Avoid early returns. 3025 // through this function. Avoid early returns.
3045 expr->return_is_recorded_ = false; 3026 expr->return_is_recorded_ = false;
3046 #endif 3027 #endif
3047 3028
3048 Comment cmnt(masm_, "[ Call"); 3029 Comment cmnt(masm_, "[ Call");
3049 Expression* callee = expr->expression(); 3030 Expression* callee = expr->expression();
3050 Call::CallType call_type = expr->GetCallType(isolate()); 3031 Call::CallType call_type = expr->GetCallType(isolate());
3051 3032
3052 if (call_type == Call::POSSIBLY_EVAL_CALL) { 3033 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3053 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval 3034 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3054 // to resolve the function we need to call and the receiver of the 3035 // to resolve the function we need to call and the receiver of the
3055 // call. Then we call the resolved function using the given 3036 // call. Then we call the resolved function using the given
3056 // arguments. 3037 // arguments.
3057 ZoneList<Expression*>* args = expr->arguments(); 3038 ZoneList<Expression*>* args = expr->arguments();
3058 int arg_count = args->length(); 3039 int arg_count = args->length();
3059 3040
3060 { PreservePositionScope pos_scope(masm()->positions_recorder()); 3041 {
3042 PreservePositionScope pos_scope(masm()->positions_recorder());
3061 VisitForStackValue(callee); 3043 VisitForStackValue(callee);
3062 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); 3044 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
3063 __ push(r2); // Reserved receiver slot. 3045 __ push(r5); // Reserved receiver slot.
3064 3046
3065 // Push the arguments. 3047 // Push the arguments.
3066 for (int i = 0; i < arg_count; i++) { 3048 for (int i = 0; i < arg_count; i++) {
3067 VisitForStackValue(args->at(i)); 3049 VisitForStackValue(args->at(i));
3068 } 3050 }
3069 3051
3070 // Push a copy of the function (found below the arguments) and 3052 // Push a copy of the function (found below the arguments) and
3071 // resolve eval. 3053 // resolve eval.
3072 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 3054 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3073 __ push(r1); 3055 __ push(r4);
3074 EmitResolvePossiblyDirectEval(arg_count); 3056 EmitResolvePossiblyDirectEval(arg_count);
3075 3057
3076 // The runtime call returns a pair of values in r0 (function) and 3058 // The runtime call returns a pair of values in r3 (function) and
3077 // r1 (receiver). Touch up the stack with the right values. 3059 // r4 (receiver). Touch up the stack with the right values.
3078 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize)); 3060 __ StoreP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3079 __ str(r1, MemOperand(sp, arg_count * kPointerSize)); 3061 __ StoreP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
3080 3062
3081 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS); 3063 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3082 } 3064 }
3083 3065
3084 // Record source position for debugger. 3066 // Record source position for debugger.
3085 SetSourcePosition(expr->position()); 3067 SetSourcePosition(expr->position());
3086 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); 3068 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3087 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 3069 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
3088 __ CallStub(&stub); 3070 __ CallStub(&stub);
3089 RecordJSReturnSite(expr); 3071 RecordJSReturnSite(expr);
3090 // Restore context register. 3072 // Restore context register.
3091 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3073 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3092 context()->DropAndPlug(1, r0); 3074 context()->DropAndPlug(1, r3);
3093 } else if (call_type == Call::GLOBAL_CALL) { 3075 } else if (call_type == Call::GLOBAL_CALL) {
3094 EmitCallWithLoadIC(expr); 3076 EmitCallWithLoadIC(expr);
3095 3077
3096 } else if (call_type == Call::LOOKUP_SLOT_CALL) { 3078 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3097 // Call to a lookup slot (dynamically introduced variable). 3079 // Call to a lookup slot (dynamically introduced variable).
3098 VariableProxy* proxy = callee->AsVariableProxy(); 3080 VariableProxy* proxy = callee->AsVariableProxy();
3099 Label slow, done; 3081 Label slow, done;
3100 3082
3101 { PreservePositionScope scope(masm()->positions_recorder()); 3083 {
3084 PreservePositionScope scope(masm()->positions_recorder());
3102 // Generate code for loading from variables potentially shadowed 3085 // Generate code for loading from variables potentially shadowed
3103 // by eval-introduced variables. 3086 // by eval-introduced variables.
3104 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done); 3087 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
3105 } 3088 }
3106 3089
3107 __ bind(&slow); 3090 __ bind(&slow);
3108 // Call the runtime to find the function to call (returned in r0) 3091 // Call the runtime to find the function to call (returned in r3)
3109 // and the object holding it (returned in edx). 3092 // and the object holding it (returned in edx).
3110 DCHECK(!context_register().is(r2)); 3093 DCHECK(!context_register().is(r5));
3111 __ mov(r2, Operand(proxy->name())); 3094 __ mov(r5, Operand(proxy->name()));
3112 __ Push(context_register(), r2); 3095 __ Push(context_register(), r5);
3113 __ CallRuntime(Runtime::kLoadLookupSlot, 2); 3096 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3114 __ Push(r0, r1); // Function, receiver. 3097 __ Push(r3, r4); // Function, receiver.
3115 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS); 3098 PrepareForBailoutForId(expr->EvalOrLookupId(), NO_REGISTERS);
3116 3099
3117 // If fast case code has been generated, emit code to push the 3100 // If fast case code has been generated, emit code to push the
3118 // function and receiver and have the slow path jump around this 3101 // function and receiver and have the slow path jump around this
3119 // code. 3102 // code.
3120 if (done.is_linked()) { 3103 if (done.is_linked()) {
3121 Label call; 3104 Label call;
3122 __ b(&call); 3105 __ b(&call);
3123 __ bind(&done); 3106 __ bind(&done);
3124 // Push function. 3107 // Push function.
3125 __ push(r0); 3108 __ push(r3);
3126 // The receiver is implicitly the global receiver. Indicate this 3109 // The receiver is implicitly the global receiver. Indicate this
3127 // by passing the hole to the call function stub. 3110 // by passing the hole to the call function stub.
3128 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 3111 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
3129 __ push(r1); 3112 __ push(r4);
3130 __ bind(&call); 3113 __ bind(&call);
3131 } 3114 }
3132 3115
3133 // The receiver is either the global receiver or an object found 3116 // The receiver is either the global receiver or an object found
3134 // by LoadContextSlot. 3117 // by LoadContextSlot.
3135 EmitCall(expr); 3118 EmitCall(expr);
3136 } else if (call_type == Call::PROPERTY_CALL) { 3119 } else if (call_type == Call::PROPERTY_CALL) {
3137 Property* property = callee->AsProperty(); 3120 Property* property = callee->AsProperty();
3138 bool is_named_call = property->key()->IsPropertyName(); 3121 bool is_named_call = property->key()->IsPropertyName();
3139 if (property->IsSuperAccess()) { 3122 if (property->IsSuperAccess()) {
(...skipping 15 matching lines...) Expand all
3155 } 3138 }
3156 } else if (call_type == Call::SUPER_CALL) { 3139 } else if (call_type == Call::SUPER_CALL) {
3157 SuperReference* super_ref = callee->AsSuperReference(); 3140 SuperReference* super_ref = callee->AsSuperReference();
3158 EmitLoadSuperConstructor(super_ref); 3141 EmitLoadSuperConstructor(super_ref);
3159 __ Push(result_register()); 3142 __ Push(result_register());
3160 VisitForStackValue(super_ref->this_var()); 3143 VisitForStackValue(super_ref->this_var());
3161 EmitCall(expr, CallICState::METHOD); 3144 EmitCall(expr, CallICState::METHOD);
3162 } else { 3145 } else {
3163 DCHECK(call_type == Call::OTHER_CALL); 3146 DCHECK(call_type == Call::OTHER_CALL);
3164 // Call to an arbitrary expression not handled specially above. 3147 // Call to an arbitrary expression not handled specially above.
3165 { PreservePositionScope scope(masm()->positions_recorder()); 3148 {
3149 PreservePositionScope scope(masm()->positions_recorder());
3166 VisitForStackValue(callee); 3150 VisitForStackValue(callee);
3167 } 3151 }
3168 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); 3152 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
3169 __ push(r1); 3153 __ push(r4);
3170 // Emit function call. 3154 // Emit function call.
3171 EmitCall(expr); 3155 EmitCall(expr);
3172 } 3156 }
3173 3157
3174 #ifdef DEBUG 3158 #ifdef DEBUG
3175 // RecordJSReturnSite should have been called. 3159 // RecordJSReturnSite should have been called.
3176 DCHECK(expr->return_is_recorded_); 3160 DCHECK(expr->return_is_recorded_);
3177 #endif 3161 #endif
3178 } 3162 }
3179 3163
(...skipping 18 matching lines...) Expand all
3198 ZoneList<Expression*>* args = expr->arguments(); 3182 ZoneList<Expression*>* args = expr->arguments();
3199 int arg_count = args->length(); 3183 int arg_count = args->length();
3200 for (int i = 0; i < arg_count; i++) { 3184 for (int i = 0; i < arg_count; i++) {
3201 VisitForStackValue(args->at(i)); 3185 VisitForStackValue(args->at(i));
3202 } 3186 }
3203 3187
3204 // Call the construct call builtin that handles allocation and 3188 // Call the construct call builtin that handles allocation and
3205 // constructor invocation. 3189 // constructor invocation.
3206 SetSourcePosition(expr->position()); 3190 SetSourcePosition(expr->position());
3207 3191
3208 // Load function and argument count into r1 and r0. 3192 // Load function and argument count into r4 and r3.
3209 __ mov(r0, Operand(arg_count)); 3193 __ mov(r3, Operand(arg_count));
3210 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); 3194 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
3211 3195
3212 // Record call targets in unoptimized code. 3196 // Record call targets in unoptimized code.
3213 if (FLAG_pretenuring_call_new) { 3197 if (FLAG_pretenuring_call_new) {
3214 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); 3198 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3215 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() == 3199 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3216 expr->CallNewFeedbackSlot().ToInt() + 1); 3200 expr->CallNewFeedbackSlot().ToInt() + 1);
3217 } 3201 }
3218 3202
3219 __ Move(r2, FeedbackVector()); 3203 __ Move(r5, FeedbackVector());
3220 __ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot()))); 3204 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallNewFeedbackSlot()));
3221 3205
3222 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); 3206 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3223 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); 3207 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3224 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 3208 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3225 context()->Plug(r0); 3209 context()->Plug(r3);
3226 } 3210 }
3227 3211
3228 3212
3229 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 3213 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3230 ZoneList<Expression*>* args = expr->arguments(); 3214 ZoneList<Expression*>* args = expr->arguments();
3231 DCHECK(args->length() == 1); 3215 DCHECK(args->length() == 1);
3232 3216
3233 VisitForAccumulatorValue(args->at(0)); 3217 VisitForAccumulatorValue(args->at(0));
3234 3218
3235 Label materialize_true, materialize_false; 3219 Label materialize_true, materialize_false;
3236 Label* if_true = NULL; 3220 Label* if_true = NULL;
3237 Label* if_false = NULL; 3221 Label* if_false = NULL;
3238 Label* fall_through = NULL; 3222 Label* fall_through = NULL;
3239 context()->PrepareTest(&materialize_true, &materialize_false, 3223 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3240 &if_true, &if_false, &fall_through); 3224 &if_false, &fall_through);
3241 3225
3242 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3226 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3243 __ SmiTst(r0); 3227 __ TestIfSmi(r3, r0);
3244 Split(eq, if_true, if_false, fall_through); 3228 Split(eq, if_true, if_false, fall_through, cr0);
3245 3229
3246 context()->Plug(if_true, if_false); 3230 context()->Plug(if_true, if_false);
3247 } 3231 }
3248 3232
3249 3233
3250 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) { 3234 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3251 ZoneList<Expression*>* args = expr->arguments(); 3235 ZoneList<Expression*>* args = expr->arguments();
3252 DCHECK(args->length() == 1); 3236 DCHECK(args->length() == 1);
3253 3237
3254 VisitForAccumulatorValue(args->at(0)); 3238 VisitForAccumulatorValue(args->at(0));
3255 3239
3256 Label materialize_true, materialize_false; 3240 Label materialize_true, materialize_false;
3257 Label* if_true = NULL; 3241 Label* if_true = NULL;
3258 Label* if_false = NULL; 3242 Label* if_false = NULL;
3259 Label* fall_through = NULL; 3243 Label* fall_through = NULL;
3260 context()->PrepareTest(&materialize_true, &materialize_false, 3244 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3261 &if_true, &if_false, &fall_through); 3245 &if_false, &fall_through);
3262 3246
3263 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3247 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3264 __ NonNegativeSmiTst(r0); 3248 __ TestIfPositiveSmi(r3, r0);
3265 Split(eq, if_true, if_false, fall_through); 3249 Split(eq, if_true, if_false, fall_through, cr0);
3266 3250
3267 context()->Plug(if_true, if_false); 3251 context()->Plug(if_true, if_false);
3268 } 3252 }
3269 3253
3270 3254
3271 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) { 3255 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3272 ZoneList<Expression*>* args = expr->arguments(); 3256 ZoneList<Expression*>* args = expr->arguments();
3273 DCHECK(args->length() == 1); 3257 DCHECK(args->length() == 1);
3274 3258
3275 VisitForAccumulatorValue(args->at(0)); 3259 VisitForAccumulatorValue(args->at(0));
3276 3260
3277 Label materialize_true, materialize_false; 3261 Label materialize_true, materialize_false;
3278 Label* if_true = NULL; 3262 Label* if_true = NULL;
3279 Label* if_false = NULL; 3263 Label* if_false = NULL;
3280 Label* fall_through = NULL; 3264 Label* fall_through = NULL;
3281 context()->PrepareTest(&materialize_true, &materialize_false, 3265 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3282 &if_true, &if_false, &fall_through); 3266 &if_false, &fall_through);
3283 3267
3284 __ JumpIfSmi(r0, if_false); 3268 __ JumpIfSmi(r3, if_false);
3285 __ LoadRoot(ip, Heap::kNullValueRootIndex); 3269 __ LoadRoot(ip, Heap::kNullValueRootIndex);
3286 __ cmp(r0, ip); 3270 __ cmp(r3, ip);
3287 __ b(eq, if_true); 3271 __ beq(if_true);
3288 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); 3272 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
3289 // Undetectable objects behave like undefined when tested with typeof. 3273 // Undetectable objects behave like undefined when tested with typeof.
3290 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset)); 3274 __ lbz(r4, FieldMemOperand(r5, Map::kBitFieldOffset));
3291 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 3275 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3292 __ b(ne, if_false); 3276 __ bne(if_false, cr0);
3293 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset)); 3277 __ lbz(r4, FieldMemOperand(r5, Map::kInstanceTypeOffset));
3294 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); 3278 __ cmpi(r4, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3295 __ b(lt, if_false); 3279 __ blt(if_false);
3296 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); 3280 __ cmpi(r4, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
3297 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3281 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3298 Split(le, if_true, if_false, fall_through); 3282 Split(le, if_true, if_false, fall_through);
3299 3283
3300 context()->Plug(if_true, if_false); 3284 context()->Plug(if_true, if_false);
3301 } 3285 }
3302 3286
3303 3287
3304 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) { 3288 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3305 ZoneList<Expression*>* args = expr->arguments(); 3289 ZoneList<Expression*>* args = expr->arguments();
3306 DCHECK(args->length() == 1); 3290 DCHECK(args->length() == 1);
3307 3291
3308 VisitForAccumulatorValue(args->at(0)); 3292 VisitForAccumulatorValue(args->at(0));
3309 3293
3310 Label materialize_true, materialize_false; 3294 Label materialize_true, materialize_false;
3311 Label* if_true = NULL; 3295 Label* if_true = NULL;
3312 Label* if_false = NULL; 3296 Label* if_false = NULL;
3313 Label* fall_through = NULL; 3297 Label* fall_through = NULL;
3314 context()->PrepareTest(&materialize_true, &materialize_false, 3298 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3315 &if_true, &if_false, &fall_through); 3299 &if_false, &fall_through);
3316 3300
3317 __ JumpIfSmi(r0, if_false); 3301 __ JumpIfSmi(r3, if_false);
3318 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); 3302 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
3319 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3303 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3320 Split(ge, if_true, if_false, fall_through); 3304 Split(ge, if_true, if_false, fall_through);
3321 3305
3322 context()->Plug(if_true, if_false); 3306 context()->Plug(if_true, if_false);
3323 } 3307 }
3324 3308
3325 3309
3326 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) { 3310 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3327 ZoneList<Expression*>* args = expr->arguments(); 3311 ZoneList<Expression*>* args = expr->arguments();
3328 DCHECK(args->length() == 1); 3312 DCHECK(args->length() == 1);
3329 3313
3330 VisitForAccumulatorValue(args->at(0)); 3314 VisitForAccumulatorValue(args->at(0));
3331 3315
3332 Label materialize_true, materialize_false; 3316 Label materialize_true, materialize_false;
3333 Label* if_true = NULL; 3317 Label* if_true = NULL;
3334 Label* if_false = NULL; 3318 Label* if_false = NULL;
3335 Label* fall_through = NULL; 3319 Label* fall_through = NULL;
3336 context()->PrepareTest(&materialize_true, &materialize_false, 3320 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3337 &if_true, &if_false, &fall_through); 3321 &if_false, &fall_through);
3338 3322
3339 __ JumpIfSmi(r0, if_false); 3323 __ JumpIfSmi(r3, if_false);
3340 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 3324 __ LoadP(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
3341 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset)); 3325 __ lbz(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
3342 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 3326 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3343 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3327 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3344 Split(ne, if_true, if_false, fall_through); 3328 Split(ne, if_true, if_false, fall_through, cr0);
3345 3329
3346 context()->Plug(if_true, if_false); 3330 context()->Plug(if_true, if_false);
3347 } 3331 }
3348 3332
3349 3333
3350 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( 3334 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3351 CallRuntime* expr) { 3335 CallRuntime* expr) {
3352 ZoneList<Expression*>* args = expr->arguments(); 3336 ZoneList<Expression*>* args = expr->arguments();
3353 DCHECK(args->length() == 1); 3337 DCHECK(args->length() == 1);
3354 3338
3355 VisitForAccumulatorValue(args->at(0)); 3339 VisitForAccumulatorValue(args->at(0));
3356 3340
3357 Label materialize_true, materialize_false, skip_lookup; 3341 Label materialize_true, materialize_false, skip_lookup;
3358 Label* if_true = NULL; 3342 Label* if_true = NULL;
3359 Label* if_false = NULL; 3343 Label* if_false = NULL;
3360 Label* fall_through = NULL; 3344 Label* fall_through = NULL;
3361 context()->PrepareTest(&materialize_true, &materialize_false, 3345 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3362 &if_true, &if_false, &fall_through); 3346 &if_false, &fall_through);
3363 3347
3364 __ AssertNotSmi(r0); 3348 __ AssertNotSmi(r3);
3365 3349
3366 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 3350 __ LoadP(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
3367 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset)); 3351 __ lbz(ip, FieldMemOperand(r4, Map::kBitField2Offset));
3368 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); 3352 __ andi(r0, ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3369 __ b(ne, &skip_lookup); 3353 __ bne(&skip_lookup, cr0);
3370 3354
3371 // Check for fast case object. Generate false result for slow case object. 3355 // Check for fast case object. Generate false result for slow case object.
3372 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset)); 3356 __ LoadP(r5, FieldMemOperand(r3, JSObject::kPropertiesOffset));
3373 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); 3357 __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset));
3374 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); 3358 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
3375 __ cmp(r2, ip); 3359 __ cmp(r5, ip);
3376 __ b(eq, if_false); 3360 __ beq(if_false);
3377 3361
3378 // Look for valueOf name in the descriptor array, and indicate false if 3362 // Look for valueOf name in the descriptor array, and indicate false if
3379 // found. Since we omit an enumeration index check, if it is added via a 3363 // found. Since we omit an enumeration index check, if it is added via a
3380 // transition that shares its descriptor array, this is a false positive. 3364 // transition that shares its descriptor array, this is a false positive.
3381 Label entry, loop, done; 3365 Label entry, loop, done;
3382 3366
3383 // Skip loop if no descriptors are valid. 3367 // Skip loop if no descriptors are valid.
3384 __ NumberOfOwnDescriptors(r3, r1); 3368 __ NumberOfOwnDescriptors(r6, r4);
3385 __ cmp(r3, Operand::Zero()); 3369 __ cmpi(r6, Operand::Zero());
3386 __ b(eq, &done); 3370 __ beq(&done);
3387 3371
3388 __ LoadInstanceDescriptors(r1, r4); 3372 __ LoadInstanceDescriptors(r4, r7);
3389 // r4: descriptor array. 3373 // r7: descriptor array.
3390 // r3: valid entries in the descriptor array. 3374 // r6: valid entries in the descriptor array.
3391 __ mov(ip, Operand(DescriptorArray::kDescriptorSize)); 3375 __ mov(ip, Operand(DescriptorArray::kDescriptorSize));
3392 __ mul(r3, r3, ip); 3376 __ Mul(r6, r6, ip);
3393 // Calculate location of the first key name. 3377 // Calculate location of the first key name.
3394 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag)); 3378 __ addi(r7, r7, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3395 // Calculate the end of the descriptor array. 3379 // Calculate the end of the descriptor array.
3396 __ mov(r2, r4); 3380 __ mr(r5, r7);
3397 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2)); 3381 __ ShiftLeftImm(ip, r6, Operand(kPointerSizeLog2));
3382 __ add(r5, r5, ip);
3398 3383
3399 // Loop through all the keys in the descriptor array. If one of these is the 3384 // Loop through all the keys in the descriptor array. If one of these is the
3400 // string "valueOf" the result is false. 3385 // string "valueOf" the result is false.
3401 // The use of ip to store the valueOf string assumes that it is not otherwise 3386 // The use of ip to store the valueOf string assumes that it is not otherwise
3402 // used in the loop below. 3387 // used in the loop below.
3403 __ mov(ip, Operand(isolate()->factory()->value_of_string())); 3388 __ mov(ip, Operand(isolate()->factory()->value_of_string()));
3404 __ jmp(&entry); 3389 __ b(&entry);
3405 __ bind(&loop); 3390 __ bind(&loop);
3406 __ ldr(r3, MemOperand(r4, 0)); 3391 __ LoadP(r6, MemOperand(r7, 0));
3407 __ cmp(r3, ip); 3392 __ cmp(r6, ip);
3408 __ b(eq, if_false); 3393 __ beq(if_false);
3409 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize)); 3394 __ addi(r7, r7, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3410 __ bind(&entry); 3395 __ bind(&entry);
3411 __ cmp(r4, Operand(r2)); 3396 __ cmp(r7, r5);
3412 __ b(ne, &loop); 3397 __ bne(&loop);
3413 3398
3414 __ bind(&done); 3399 __ bind(&done);
3415 3400
3416 // Set the bit in the map to indicate that there is no local valueOf field. 3401 // Set the bit in the map to indicate that there is no local valueOf field.
3417 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset)); 3402 __ lbz(r5, FieldMemOperand(r4, Map::kBitField2Offset));
3418 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); 3403 __ ori(r5, r5, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3419 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset)); 3404 __ stb(r5, FieldMemOperand(r4, Map::kBitField2Offset));
3420 3405
3421 __ bind(&skip_lookup); 3406 __ bind(&skip_lookup);
3422 3407
3423 // If a valueOf property is not found on the object check that its 3408 // If a valueOf property is not found on the object check that its
3424 // prototype is the un-modified String prototype. If not result is false. 3409 // prototype is the un-modified String prototype. If not result is false.
3425 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset)); 3410 __ LoadP(r5, FieldMemOperand(r4, Map::kPrototypeOffset));
3426 __ JumpIfSmi(r2, if_false); 3411 __ JumpIfSmi(r5, if_false);
3427 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); 3412 __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset));
3428 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 3413 __ LoadP(r6, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3429 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset)); 3414 __ LoadP(r6, FieldMemOperand(r6, GlobalObject::kNativeContextOffset));
3430 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); 3415 __ LoadP(r6,
3431 __ cmp(r2, r3); 3416 ContextOperand(r6, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3417 __ cmp(r5, r6);
3432 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3418 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3433 Split(eq, if_true, if_false, fall_through); 3419 Split(eq, if_true, if_false, fall_through);
3434 3420
3435 context()->Plug(if_true, if_false); 3421 context()->Plug(if_true, if_false);
3436 } 3422 }
3437 3423
3438 3424
3439 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { 3425 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3440 ZoneList<Expression*>* args = expr->arguments(); 3426 ZoneList<Expression*>* args = expr->arguments();
3441 DCHECK(args->length() == 1); 3427 DCHECK(args->length() == 1);
3442 3428
3443 VisitForAccumulatorValue(args->at(0)); 3429 VisitForAccumulatorValue(args->at(0));
3444 3430
3445 Label materialize_true, materialize_false; 3431 Label materialize_true, materialize_false;
3446 Label* if_true = NULL; 3432 Label* if_true = NULL;
3447 Label* if_false = NULL; 3433 Label* if_false = NULL;
3448 Label* fall_through = NULL; 3434 Label* fall_through = NULL;
3449 context()->PrepareTest(&materialize_true, &materialize_false, 3435 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3450 &if_true, &if_false, &fall_through); 3436 &if_false, &fall_through);
3451 3437
3452 __ JumpIfSmi(r0, if_false); 3438 __ JumpIfSmi(r3, if_false);
3453 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE); 3439 __ CompareObjectType(r3, r4, r5, JS_FUNCTION_TYPE);
3454 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3440 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3455 Split(eq, if_true, if_false, fall_through); 3441 Split(eq, if_true, if_false, fall_through);
3456 3442
3457 context()->Plug(if_true, if_false); 3443 context()->Plug(if_true, if_false);
3458 } 3444 }
3459 3445
3460 3446
3461 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) { 3447 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3462 ZoneList<Expression*>* args = expr->arguments(); 3448 ZoneList<Expression*>* args = expr->arguments();
3463 DCHECK(args->length() == 1); 3449 DCHECK(args->length() == 1);
3464 3450
3465 VisitForAccumulatorValue(args->at(0)); 3451 VisitForAccumulatorValue(args->at(0));
3466 3452
3467 Label materialize_true, materialize_false; 3453 Label materialize_true, materialize_false;
3468 Label* if_true = NULL; 3454 Label* if_true = NULL;
3469 Label* if_false = NULL; 3455 Label* if_false = NULL;
3470 Label* fall_through = NULL; 3456 Label* fall_through = NULL;
3471 context()->PrepareTest(&materialize_true, &materialize_false, 3457 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3472 &if_true, &if_false, &fall_through); 3458 &if_false, &fall_through);
3473 3459
3474 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK); 3460 __ CheckMap(r3, r4, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3475 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 3461 #if V8_TARGET_ARCH_PPC64
3476 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); 3462 __ LoadP(r4, FieldMemOperand(r3, HeapNumber::kValueOffset));
3477 __ cmp(r2, Operand(0x80000000)); 3463 __ li(r5, Operand(1));
3478 __ cmp(r1, Operand(0x00000000), eq); 3464 __ rotrdi(r5, r5, 1); // r5 = 0x80000000_00000000
3465 __ cmp(r4, r5);
3466 #else
3467 __ lwz(r5, FieldMemOperand(r3, HeapNumber::kExponentOffset));
3468 __ lwz(r4, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
3469 Label skip;
3470 __ lis(r0, Operand(SIGN_EXT_IMM16(0x8000)));
3471 __ cmp(r5, r0);
3472 __ bne(&skip);
3473 __ cmpi(r4, Operand::Zero());
3474 __ bind(&skip);
3475 #endif
3479 3476
3480 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3477 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3481 Split(eq, if_true, if_false, fall_through); 3478 Split(eq, if_true, if_false, fall_through);
3482 3479
3483 context()->Plug(if_true, if_false); 3480 context()->Plug(if_true, if_false);
3484 } 3481 }
3485 3482
3486 3483
3487 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { 3484 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3488 ZoneList<Expression*>* args = expr->arguments(); 3485 ZoneList<Expression*>* args = expr->arguments();
3489 DCHECK(args->length() == 1); 3486 DCHECK(args->length() == 1);
3490 3487
3491 VisitForAccumulatorValue(args->at(0)); 3488 VisitForAccumulatorValue(args->at(0));
3492 3489
3493 Label materialize_true, materialize_false; 3490 Label materialize_true, materialize_false;
3494 Label* if_true = NULL; 3491 Label* if_true = NULL;
3495 Label* if_false = NULL; 3492 Label* if_false = NULL;
3496 Label* fall_through = NULL; 3493 Label* fall_through = NULL;
3497 context()->PrepareTest(&materialize_true, &materialize_false, 3494 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3498 &if_true, &if_false, &fall_through); 3495 &if_false, &fall_through);
3499 3496
3500 __ JumpIfSmi(r0, if_false); 3497 __ JumpIfSmi(r3, if_false);
3501 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE); 3498 __ CompareObjectType(r3, r4, r4, JS_ARRAY_TYPE);
3502 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3499 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3503 Split(eq, if_true, if_false, fall_through); 3500 Split(eq, if_true, if_false, fall_through);
3504 3501
3505 context()->Plug(if_true, if_false); 3502 context()->Plug(if_true, if_false);
3506 } 3503 }
3507 3504
3508 3505
3509 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { 3506 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3510 ZoneList<Expression*>* args = expr->arguments(); 3507 ZoneList<Expression*>* args = expr->arguments();
3511 DCHECK(args->length() == 1); 3508 DCHECK(args->length() == 1);
3512 3509
3513 VisitForAccumulatorValue(args->at(0)); 3510 VisitForAccumulatorValue(args->at(0));
3514 3511
3515 Label materialize_true, materialize_false; 3512 Label materialize_true, materialize_false;
3516 Label* if_true = NULL; 3513 Label* if_true = NULL;
3517 Label* if_false = NULL; 3514 Label* if_false = NULL;
3518 Label* fall_through = NULL; 3515 Label* fall_through = NULL;
3519 context()->PrepareTest(&materialize_true, &materialize_false, 3516 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3520 &if_true, &if_false, &fall_through); 3517 &if_false, &fall_through);
3521 3518
3522 __ JumpIfSmi(r0, if_false); 3519 __ JumpIfSmi(r3, if_false);
3523 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE); 3520 __ CompareObjectType(r3, r4, r4, JS_REGEXP_TYPE);
3524 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3521 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3525 Split(eq, if_true, if_false, fall_through); 3522 Split(eq, if_true, if_false, fall_through);
3526 3523
3527 context()->Plug(if_true, if_false); 3524 context()->Plug(if_true, if_false);
3528 } 3525 }
3529 3526
3530 3527
3531 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) { 3528 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3532 ZoneList<Expression*>* args = expr->arguments(); 3529 ZoneList<Expression*>* args = expr->arguments();
3533 DCHECK(args->length() == 1); 3530 DCHECK(args->length() == 1);
3534 3531
3535 VisitForAccumulatorValue(args->at(0)); 3532 VisitForAccumulatorValue(args->at(0));
3536 3533
3537 Label materialize_true, materialize_false; 3534 Label materialize_true, materialize_false;
3538 Label* if_true = NULL; 3535 Label* if_true = NULL;
3539 Label* if_false = NULL; 3536 Label* if_false = NULL;
3540 Label* fall_through = NULL; 3537 Label* fall_through = NULL;
3541 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, 3538 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3542 &if_false, &fall_through); 3539 &if_false, &fall_through);
3543 3540
3544 __ JumpIfSmi(r0, if_false); 3541 __ JumpIfSmi(r3, if_false);
3545 Register map = r1; 3542 Register map = r4;
3546 Register type_reg = r2; 3543 Register type_reg = r5;
3547 __ ldr(map, FieldMemOperand(r0, HeapObject::kMapOffset)); 3544 __ LoadP(map, FieldMemOperand(r3, HeapObject::kMapOffset));
3548 __ ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset)); 3545 __ lbz(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3549 __ sub(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE)); 3546 __ subi(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3550 __ cmp(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE)); 3547 __ cmpli(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3551 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3548 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3552 Split(ls, if_true, if_false, fall_through); 3549 Split(le, if_true, if_false, fall_through);
3553 3550
3554 context()->Plug(if_true, if_false); 3551 context()->Plug(if_true, if_false);
3555 } 3552 }
3556 3553
3557 3554
3558 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) { 3555 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3559 DCHECK(expr->arguments()->length() == 0); 3556 DCHECK(expr->arguments()->length() == 0);
3560 3557
3561 Label materialize_true, materialize_false; 3558 Label materialize_true, materialize_false;
3562 Label* if_true = NULL; 3559 Label* if_true = NULL;
3563 Label* if_false = NULL; 3560 Label* if_false = NULL;
3564 Label* fall_through = NULL; 3561 Label* fall_through = NULL;
3565 context()->PrepareTest(&materialize_true, &materialize_false, 3562 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3566 &if_true, &if_false, &fall_through); 3563 &if_false, &fall_through);
3567 3564
3568 // Get the frame pointer for the calling frame. 3565 // Get the frame pointer for the calling frame.
3569 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3566 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3570 3567
3571 // Skip the arguments adaptor frame if it exists. 3568 // Skip the arguments adaptor frame if it exists.
3572 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset)); 3569 Label check_frame_marker;
3573 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3570 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kContextOffset));
3574 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq); 3571 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
3572 __ bne(&check_frame_marker);
3573 __ LoadP(r5, MemOperand(r5, StandardFrameConstants::kCallerFPOffset));
3575 3574
3576 // Check the marker in the calling frame. 3575 // Check the marker in the calling frame.
3577 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset)); 3576 __ bind(&check_frame_marker);
3578 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); 3577 __ LoadP(r4, MemOperand(r5, StandardFrameConstants::kMarkerOffset));
3578 STATIC_ASSERT(StackFrame::CONSTRUCT < 0x4000);
3579 __ CmpSmiLiteral(r4, Smi::FromInt(StackFrame::CONSTRUCT), r0);
3579 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3580 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3580 Split(eq, if_true, if_false, fall_through); 3581 Split(eq, if_true, if_false, fall_through);
3581 3582
3582 context()->Plug(if_true, if_false); 3583 context()->Plug(if_true, if_false);
3583 } 3584 }
3584 3585
3585 3586
3586 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { 3587 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3587 ZoneList<Expression*>* args = expr->arguments(); 3588 ZoneList<Expression*>* args = expr->arguments();
3588 DCHECK(args->length() == 2); 3589 DCHECK(args->length() == 2);
3589 3590
3590 // Load the two objects into registers and perform the comparison. 3591 // Load the two objects into registers and perform the comparison.
3591 VisitForStackValue(args->at(0)); 3592 VisitForStackValue(args->at(0));
3592 VisitForAccumulatorValue(args->at(1)); 3593 VisitForAccumulatorValue(args->at(1));
3593 3594
3594 Label materialize_true, materialize_false; 3595 Label materialize_true, materialize_false;
3595 Label* if_true = NULL; 3596 Label* if_true = NULL;
3596 Label* if_false = NULL; 3597 Label* if_false = NULL;
3597 Label* fall_through = NULL; 3598 Label* fall_through = NULL;
3598 context()->PrepareTest(&materialize_true, &materialize_false, 3599 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3599 &if_true, &if_false, &fall_through); 3600 &if_false, &fall_through);
3600 3601
3601 __ pop(r1); 3602 __ pop(r4);
3602 __ cmp(r0, r1); 3603 __ cmp(r3, r4);
3603 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3604 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3604 Split(eq, if_true, if_false, fall_through); 3605 Split(eq, if_true, if_false, fall_through);
3605 3606
3606 context()->Plug(if_true, if_false); 3607 context()->Plug(if_true, if_false);
3607 } 3608 }
3608 3609
3609 3610
3610 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { 3611 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3611 ZoneList<Expression*>* args = expr->arguments(); 3612 ZoneList<Expression*>* args = expr->arguments();
3612 DCHECK(args->length() == 1); 3613 DCHECK(args->length() == 1);
3613 3614
3614 // ArgumentsAccessStub expects the key in edx and the formal 3615 // ArgumentsAccessStub expects the key in edx and the formal
3615 // parameter count in r0. 3616 // parameter count in r3.
3616 VisitForAccumulatorValue(args->at(0)); 3617 VisitForAccumulatorValue(args->at(0));
3617 __ mov(r1, r0); 3618 __ mr(r4, r3);
3618 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 3619 __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
3619 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT); 3620 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3620 __ CallStub(&stub); 3621 __ CallStub(&stub);
3621 context()->Plug(r0); 3622 context()->Plug(r3);
3622 } 3623 }
3623 3624
3624 3625
3625 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { 3626 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3626 DCHECK(expr->arguments()->length() == 0); 3627 DCHECK(expr->arguments()->length() == 0);
3627 3628 Label exit;
3628 // Get the number of formal parameters. 3629 // Get the number of formal parameters.
3629 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 3630 __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
3630 3631
3631 // Check if the calling frame is an arguments adaptor frame. 3632 // Check if the calling frame is an arguments adaptor frame.
3632 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3633 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3633 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); 3634 __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset));
3634 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3635 __ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
3636 __ bne(&exit);
3635 3637
3636 // Arguments adaptor case: Read the arguments length from the 3638 // Arguments adaptor case: Read the arguments length from the
3637 // adaptor frame. 3639 // adaptor frame.
3638 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq); 3640 __ LoadP(r3, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
3639 3641
3640 context()->Plug(r0); 3642 __ bind(&exit);
3643 context()->Plug(r3);
3641 } 3644 }
3642 3645
3643 3646
3644 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { 3647 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3645 ZoneList<Expression*>* args = expr->arguments(); 3648 ZoneList<Expression*>* args = expr->arguments();
3646 DCHECK(args->length() == 1); 3649 DCHECK(args->length() == 1);
3647 Label done, null, function, non_function_constructor; 3650 Label done, null, function, non_function_constructor;
3648 3651
3649 VisitForAccumulatorValue(args->at(0)); 3652 VisitForAccumulatorValue(args->at(0));
3650 3653
3651 // If the object is a smi, we return null. 3654 // If the object is a smi, we return null.
3652 __ JumpIfSmi(r0, &null); 3655 __ JumpIfSmi(r3, &null);
3653 3656
3654 // Check that the object is a JS object but take special care of JS 3657 // Check that the object is a JS object but take special care of JS
3655 // functions to make sure they have 'Function' as their class. 3658 // functions to make sure they have 'Function' as their class.
3656 // Assume that there are only two callable types, and one of them is at 3659 // Assume that there are only two callable types, and one of them is at
3657 // either end of the type range for JS object types. Saves extra comparisons. 3660 // either end of the type range for JS object types. Saves extra comparisons.
3658 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 3661 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3659 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE); 3662 __ CompareObjectType(r3, r3, r4, FIRST_SPEC_OBJECT_TYPE);
3660 // Map is now in r0. 3663 // Map is now in r3.
3661 __ b(lt, &null); 3664 __ blt(&null);
3662 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == 3665 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3663 FIRST_SPEC_OBJECT_TYPE + 1); 3666 FIRST_SPEC_OBJECT_TYPE + 1);
3664 __ b(eq, &function); 3667 __ beq(&function);
3665 3668
3666 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE)); 3669 __ cmpi(r4, Operand(LAST_SPEC_OBJECT_TYPE));
3667 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == 3670 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_SPEC_OBJECT_TYPE - 1);
3668 LAST_SPEC_OBJECT_TYPE - 1); 3671 __ beq(&function);
3669 __ b(eq, &function);
3670 // Assume that there is no larger type. 3672 // Assume that there is no larger type.
3671 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); 3673 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3672 3674
3673 // Check if the constructor in the map is a JS function. 3675 // Check if the constructor in the map is a JS function.
3674 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset)); 3676 __ LoadP(r3, FieldMemOperand(r3, Map::kConstructorOffset));
3675 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); 3677 __ CompareObjectType(r3, r4, r4, JS_FUNCTION_TYPE);
3676 __ b(ne, &non_function_constructor); 3678 __ bne(&non_function_constructor);
3677 3679
3678 // r0 now contains the constructor function. Grab the 3680 // r3 now contains the constructor function. Grab the
3679 // instance class name from there. 3681 // instance class name from there.
3680 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); 3682 __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
3681 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset)); 3683 __ LoadP(r3,
3684 FieldMemOperand(r3, SharedFunctionInfo::kInstanceClassNameOffset));
3682 __ b(&done); 3685 __ b(&done);
3683 3686
3684 // Functions have class 'Function'. 3687 // Functions have class 'Function'.
3685 __ bind(&function); 3688 __ bind(&function);
3686 __ LoadRoot(r0, Heap::kFunction_stringRootIndex); 3689 __ LoadRoot(r3, Heap::kFunction_stringRootIndex);
3687 __ jmp(&done); 3690 __ b(&done);
3688 3691
3689 // Objects with a non-function constructor have class 'Object'. 3692 // Objects with a non-function constructor have class 'Object'.
3690 __ bind(&non_function_constructor); 3693 __ bind(&non_function_constructor);
3691 __ LoadRoot(r0, Heap::kObject_stringRootIndex); 3694 __ LoadRoot(r3, Heap::kObject_stringRootIndex);
3692 __ jmp(&done); 3695 __ b(&done);
3693 3696
3694 // Non-JS objects have class null. 3697 // Non-JS objects have class null.
3695 __ bind(&null); 3698 __ bind(&null);
3696 __ LoadRoot(r0, Heap::kNullValueRootIndex); 3699 __ LoadRoot(r3, Heap::kNullValueRootIndex);
3697 3700
3698 // All done. 3701 // All done.
3699 __ bind(&done); 3702 __ bind(&done);
3700 3703
3701 context()->Plug(r0); 3704 context()->Plug(r3);
3702 } 3705 }
3703 3706
3704 3707
3705 void FullCodeGenerator::EmitSubString(CallRuntime* expr) { 3708 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3706 // Load the arguments on the stack and call the stub. 3709 // Load the arguments on the stack and call the stub.
3707 SubStringStub stub(isolate()); 3710 SubStringStub stub(isolate());
3708 ZoneList<Expression*>* args = expr->arguments(); 3711 ZoneList<Expression*>* args = expr->arguments();
3709 DCHECK(args->length() == 3); 3712 DCHECK(args->length() == 3);
3710 VisitForStackValue(args->at(0)); 3713 VisitForStackValue(args->at(0));
3711 VisitForStackValue(args->at(1)); 3714 VisitForStackValue(args->at(1));
3712 VisitForStackValue(args->at(2)); 3715 VisitForStackValue(args->at(2));
3713 __ CallStub(&stub); 3716 __ CallStub(&stub);
3714 context()->Plug(r0); 3717 context()->Plug(r3);
3715 } 3718 }
3716 3719
3717 3720
3718 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { 3721 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3719 // Load the arguments on the stack and call the stub. 3722 // Load the arguments on the stack and call the stub.
3720 RegExpExecStub stub(isolate()); 3723 RegExpExecStub stub(isolate());
3721 ZoneList<Expression*>* args = expr->arguments(); 3724 ZoneList<Expression*>* args = expr->arguments();
3722 DCHECK(args->length() == 4); 3725 DCHECK(args->length() == 4);
3723 VisitForStackValue(args->at(0)); 3726 VisitForStackValue(args->at(0));
3724 VisitForStackValue(args->at(1)); 3727 VisitForStackValue(args->at(1));
3725 VisitForStackValue(args->at(2)); 3728 VisitForStackValue(args->at(2));
3726 VisitForStackValue(args->at(3)); 3729 VisitForStackValue(args->at(3));
3727 __ CallStub(&stub); 3730 __ CallStub(&stub);
3728 context()->Plug(r0); 3731 context()->Plug(r3);
3729 } 3732 }
3730 3733
3731 3734
3732 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { 3735 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3733 ZoneList<Expression*>* args = expr->arguments(); 3736 ZoneList<Expression*>* args = expr->arguments();
3734 DCHECK(args->length() == 1); 3737 DCHECK(args->length() == 1);
3735 VisitForAccumulatorValue(args->at(0)); // Load the object. 3738 VisitForAccumulatorValue(args->at(0)); // Load the object.
3736 3739
3737 Label done; 3740 Label done;
3738 // If the object is a smi return the object. 3741 // If the object is a smi return the object.
3739 __ JumpIfSmi(r0, &done); 3742 __ JumpIfSmi(r3, &done);
3740 // If the object is not a value type, return the object. 3743 // If the object is not a value type, return the object.
3741 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE); 3744 __ CompareObjectType(r3, r4, r4, JS_VALUE_TYPE);
3742 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq); 3745 __ bne(&done);
3746 __ LoadP(r3, FieldMemOperand(r3, JSValue::kValueOffset));
3743 3747
3744 __ bind(&done); 3748 __ bind(&done);
3745 context()->Plug(r0); 3749 context()->Plug(r3);
3746 } 3750 }
3747 3751
3748 3752
3749 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { 3753 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3750 ZoneList<Expression*>* args = expr->arguments(); 3754 ZoneList<Expression*>* args = expr->arguments();
3751 DCHECK(args->length() == 2); 3755 DCHECK(args->length() == 2);
3752 DCHECK_NE(NULL, args->at(1)->AsLiteral()); 3756 DCHECK_NE(NULL, args->at(1)->AsLiteral());
3753 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value())); 3757 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3754 3758
3755 VisitForAccumulatorValue(args->at(0)); // Load the object. 3759 VisitForAccumulatorValue(args->at(0)); // Load the object.
3756 3760
3757 Label runtime, done, not_date_object; 3761 Label runtime, done, not_date_object;
3758 Register object = r0; 3762 Register object = r3;
3759 Register result = r0; 3763 Register result = r3;
3760 Register scratch0 = r9; 3764 Register scratch0 = r11;
3761 Register scratch1 = r1; 3765 Register scratch1 = r4;
3762 3766
3763 __ JumpIfSmi(object, &not_date_object); 3767 __ JumpIfSmi(object, &not_date_object);
3764 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE); 3768 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
3765 __ b(ne, &not_date_object); 3769 __ bne(&not_date_object);
3766 3770
3767 if (index->value() == 0) { 3771 if (index->value() == 0) {
3768 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset)); 3772 __ LoadP(result, FieldMemOperand(object, JSDate::kValueOffset));
3769 __ jmp(&done); 3773 __ b(&done);
3770 } else { 3774 } else {
3771 if (index->value() < JSDate::kFirstUncachedField) { 3775 if (index->value() < JSDate::kFirstUncachedField) {
3772 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); 3776 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3773 __ mov(scratch1, Operand(stamp)); 3777 __ mov(scratch1, Operand(stamp));
3774 __ ldr(scratch1, MemOperand(scratch1)); 3778 __ LoadP(scratch1, MemOperand(scratch1));
3775 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset)); 3779 __ LoadP(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3776 __ cmp(scratch1, scratch0); 3780 __ cmp(scratch1, scratch0);
3777 __ b(ne, &runtime); 3781 __ bne(&runtime);
3778 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset + 3782 __ LoadP(result,
3779 kPointerSize * index->value())); 3783 FieldMemOperand(object, JSDate::kValueOffset +
3780 __ jmp(&done); 3784 kPointerSize * index->value()),
3785 scratch0);
3786 __ b(&done);
3781 } 3787 }
3782 __ bind(&runtime); 3788 __ bind(&runtime);
3783 __ PrepareCallCFunction(2, scratch1); 3789 __ PrepareCallCFunction(2, scratch1);
3784 __ mov(r1, Operand(index)); 3790 __ LoadSmiLiteral(r4, index);
3785 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); 3791 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3786 __ jmp(&done); 3792 __ b(&done);
3787 } 3793 }
3788 3794
3789 __ bind(&not_date_object); 3795 __ bind(&not_date_object);
3790 __ CallRuntime(Runtime::kThrowNotDateError, 0); 3796 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3791 __ bind(&done); 3797 __ bind(&done);
3792 context()->Plug(r0); 3798 context()->Plug(r3);
3793 } 3799 }
3794 3800
3795 3801
3796 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { 3802 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3797 ZoneList<Expression*>* args = expr->arguments(); 3803 ZoneList<Expression*>* args = expr->arguments();
3798 DCHECK_EQ(3, args->length()); 3804 DCHECK_EQ(3, args->length());
3799 3805
3800 Register string = r0; 3806 Register string = r3;
3801 Register index = r1; 3807 Register index = r4;
3802 Register value = r2; 3808 Register value = r5;
3803 3809
3804 VisitForStackValue(args->at(0)); // index 3810 VisitForStackValue(args->at(0)); // index
3805 VisitForStackValue(args->at(1)); // value 3811 VisitForStackValue(args->at(1)); // value
3806 VisitForAccumulatorValue(args->at(2)); // string 3812 VisitForAccumulatorValue(args->at(2)); // string
3807 __ Pop(index, value); 3813 __ Pop(index, value);
3808 3814
3809 if (FLAG_debug_code) { 3815 if (FLAG_debug_code) {
3810 __ SmiTst(value); 3816 __ TestIfSmi(value, r0);
3811 __ Check(eq, kNonSmiValue); 3817 __ Check(eq, kNonSmiValue, cr0);
3812 __ SmiTst(index); 3818 __ TestIfSmi(index, r0);
3813 __ Check(eq, kNonSmiIndex); 3819 __ Check(eq, kNonSmiIndex, cr0);
3814 __ SmiUntag(index, index); 3820 __ SmiUntag(index, index);
3815 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; 3821 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3816 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type); 3822 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3817 __ SmiTag(index, index); 3823 __ SmiTag(index, index);
3818 } 3824 }
3819 3825
3820 __ SmiUntag(value, value); 3826 __ SmiUntag(value);
3821 __ add(ip, 3827 __ addi(ip, string, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3822 string, 3828 __ SmiToByteArrayOffset(r0, index);
3823 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3829 __ stbx(value, MemOperand(ip, r0));
3824 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize));
3825 context()->Plug(string); 3830 context()->Plug(string);
3826 } 3831 }
3827 3832
3828 3833
3829 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { 3834 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3830 ZoneList<Expression*>* args = expr->arguments(); 3835 ZoneList<Expression*>* args = expr->arguments();
3831 DCHECK_EQ(3, args->length()); 3836 DCHECK_EQ(3, args->length());
3832 3837
3833 Register string = r0; 3838 Register string = r3;
3834 Register index = r1; 3839 Register index = r4;
3835 Register value = r2; 3840 Register value = r5;
3836 3841
3837 VisitForStackValue(args->at(0)); // index 3842 VisitForStackValue(args->at(0)); // index
3838 VisitForStackValue(args->at(1)); // value 3843 VisitForStackValue(args->at(1)); // value
3839 VisitForAccumulatorValue(args->at(2)); // string 3844 VisitForAccumulatorValue(args->at(2)); // string
3840 __ Pop(index, value); 3845 __ Pop(index, value);
3841 3846
3842 if (FLAG_debug_code) { 3847 if (FLAG_debug_code) {
3843 __ SmiTst(value); 3848 __ TestIfSmi(value, r0);
3844 __ Check(eq, kNonSmiValue); 3849 __ Check(eq, kNonSmiValue, cr0);
3845 __ SmiTst(index); 3850 __ TestIfSmi(index, r0);
3846 __ Check(eq, kNonSmiIndex); 3851 __ Check(eq, kNonSmiIndex, cr0);
3847 __ SmiUntag(index, index); 3852 __ SmiUntag(index, index);
3848 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; 3853 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3849 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type); 3854 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3850 __ SmiTag(index, index); 3855 __ SmiTag(index, index);
3851 } 3856 }
3852 3857
3853 __ SmiUntag(value, value); 3858 __ SmiUntag(value);
3854 __ add(ip, 3859 __ addi(ip, string, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3855 string, 3860 __ SmiToShortArrayOffset(r0, index);
3856 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 3861 __ sthx(value, MemOperand(ip, r0));
3857 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3858 __ strh(value, MemOperand(ip, index));
3859 context()->Plug(string); 3862 context()->Plug(string);
3860 } 3863 }
3861 3864
3862 3865
3863
3864 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { 3866 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3865 // Load the arguments on the stack and call the runtime function. 3867 // Load the arguments on the stack and call the runtime function.
3866 ZoneList<Expression*>* args = expr->arguments(); 3868 ZoneList<Expression*>* args = expr->arguments();
3867 DCHECK(args->length() == 2); 3869 DCHECK(args->length() == 2);
3868 VisitForStackValue(args->at(0)); 3870 VisitForStackValue(args->at(0));
3869 VisitForStackValue(args->at(1)); 3871 VisitForStackValue(args->at(1));
3870 MathPowStub stub(isolate(), MathPowStub::ON_STACK); 3872 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3871 __ CallStub(&stub); 3873 __ CallStub(&stub);
3872 context()->Plug(r0); 3874 context()->Plug(r3);
3873 } 3875 }
3874 3876
3875 3877
3876 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { 3878 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3877 ZoneList<Expression*>* args = expr->arguments(); 3879 ZoneList<Expression*>* args = expr->arguments();
3878 DCHECK(args->length() == 2); 3880 DCHECK(args->length() == 2);
3879 VisitForStackValue(args->at(0)); // Load the object. 3881 VisitForStackValue(args->at(0)); // Load the object.
3880 VisitForAccumulatorValue(args->at(1)); // Load the value. 3882 VisitForAccumulatorValue(args->at(1)); // Load the value.
3881 __ pop(r1); // r0 = value. r1 = object. 3883 __ pop(r4); // r3 = value. r4 = object.
3882 3884
3883 Label done; 3885 Label done;
3884 // If the object is a smi, return the value. 3886 // If the object is a smi, return the value.
3885 __ JumpIfSmi(r1, &done); 3887 __ JumpIfSmi(r4, &done);
3886 3888
3887 // If the object is not a value type, return the value. 3889 // If the object is not a value type, return the value.
3888 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE); 3890 __ CompareObjectType(r4, r5, r5, JS_VALUE_TYPE);
3889 __ b(ne, &done); 3891 __ bne(&done);
3890 3892
3891 // Store the value. 3893 // Store the value.
3892 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset)); 3894 __ StoreP(r3, FieldMemOperand(r4, JSValue::kValueOffset), r0);
3893 // Update the write barrier. Save the value as it will be 3895 // Update the write barrier. Save the value as it will be
3894 // overwritten by the write barrier code and is needed afterward. 3896 // overwritten by the write barrier code and is needed afterward.
3895 __ mov(r2, r0); 3897 __ mr(r5, r3);
3896 __ RecordWriteField( 3898 __ RecordWriteField(r4, JSValue::kValueOffset, r5, r6, kLRHasBeenSaved,
3897 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs); 3899 kDontSaveFPRegs);
3898 3900
3899 __ bind(&done); 3901 __ bind(&done);
3900 context()->Plug(r0); 3902 context()->Plug(r3);
3901 } 3903 }
3902 3904
3903 3905
3904 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { 3906 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3905 ZoneList<Expression*>* args = expr->arguments(); 3907 ZoneList<Expression*>* args = expr->arguments();
3906 DCHECK_EQ(args->length(), 1); 3908 DCHECK_EQ(args->length(), 1);
3907 // Load the argument into r0 and call the stub. 3909 // Load the argument into r3 and call the stub.
3908 VisitForAccumulatorValue(args->at(0)); 3910 VisitForAccumulatorValue(args->at(0));
3909 3911
3910 NumberToStringStub stub(isolate()); 3912 NumberToStringStub stub(isolate());
3911 __ CallStub(&stub); 3913 __ CallStub(&stub);
3912 context()->Plug(r0); 3914 context()->Plug(r3);
3913 } 3915 }
3914 3916
3915 3917
3916 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { 3918 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3917 ZoneList<Expression*>* args = expr->arguments(); 3919 ZoneList<Expression*>* args = expr->arguments();
3918 DCHECK(args->length() == 1); 3920 DCHECK(args->length() == 1);
3919 VisitForAccumulatorValue(args->at(0)); 3921 VisitForAccumulatorValue(args->at(0));
3920 3922
3921 Label done; 3923 Label done;
3922 StringCharFromCodeGenerator generator(r0, r1); 3924 StringCharFromCodeGenerator generator(r3, r4);
3923 generator.GenerateFast(masm_); 3925 generator.GenerateFast(masm_);
3924 __ jmp(&done); 3926 __ b(&done);
3925 3927
3926 NopRuntimeCallHelper call_helper; 3928 NopRuntimeCallHelper call_helper;
3927 generator.GenerateSlow(masm_, call_helper); 3929 generator.GenerateSlow(masm_, call_helper);
3928 3930
3929 __ bind(&done); 3931 __ bind(&done);
3930 context()->Plug(r1); 3932 context()->Plug(r4);
3931 } 3933 }
3932 3934
3933 3935
3934 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { 3936 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3935 ZoneList<Expression*>* args = expr->arguments(); 3937 ZoneList<Expression*>* args = expr->arguments();
3936 DCHECK(args->length() == 2); 3938 DCHECK(args->length() == 2);
3937 VisitForStackValue(args->at(0)); 3939 VisitForStackValue(args->at(0));
3938 VisitForAccumulatorValue(args->at(1)); 3940 VisitForAccumulatorValue(args->at(1));
3939 3941
3940 Register object = r1; 3942 Register object = r4;
3941 Register index = r0; 3943 Register index = r3;
3942 Register result = r3; 3944 Register result = r6;
3943 3945
3944 __ pop(object); 3946 __ pop(object);
3945 3947
3946 Label need_conversion; 3948 Label need_conversion;
3947 Label index_out_of_range; 3949 Label index_out_of_range;
3948 Label done; 3950 Label done;
3949 StringCharCodeAtGenerator generator(object, 3951 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
3950 index, 3952 &need_conversion, &index_out_of_range,
3951 result,
3952 &need_conversion,
3953 &need_conversion,
3954 &index_out_of_range,
3955 STRING_INDEX_IS_NUMBER); 3953 STRING_INDEX_IS_NUMBER);
3956 generator.GenerateFast(masm_); 3954 generator.GenerateFast(masm_);
3957 __ jmp(&done); 3955 __ b(&done);
3958 3956
3959 __ bind(&index_out_of_range); 3957 __ bind(&index_out_of_range);
3960 // When the index is out of range, the spec requires us to return 3958 // When the index is out of range, the spec requires us to return
3961 // NaN. 3959 // NaN.
3962 __ LoadRoot(result, Heap::kNanValueRootIndex); 3960 __ LoadRoot(result, Heap::kNanValueRootIndex);
3963 __ jmp(&done); 3961 __ b(&done);
3964 3962
3965 __ bind(&need_conversion); 3963 __ bind(&need_conversion);
3966 // Load the undefined value into the result register, which will 3964 // Load the undefined value into the result register, which will
3967 // trigger conversion. 3965 // trigger conversion.
3968 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); 3966 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3969 __ jmp(&done); 3967 __ b(&done);
3970 3968
3971 NopRuntimeCallHelper call_helper; 3969 NopRuntimeCallHelper call_helper;
3972 generator.GenerateSlow(masm_, call_helper); 3970 generator.GenerateSlow(masm_, call_helper);
3973 3971
3974 __ bind(&done); 3972 __ bind(&done);
3975 context()->Plug(result); 3973 context()->Plug(result);
3976 } 3974 }
3977 3975
3978 3976
3979 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { 3977 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3980 ZoneList<Expression*>* args = expr->arguments(); 3978 ZoneList<Expression*>* args = expr->arguments();
3981 DCHECK(args->length() == 2); 3979 DCHECK(args->length() == 2);
3982 VisitForStackValue(args->at(0)); 3980 VisitForStackValue(args->at(0));
3983 VisitForAccumulatorValue(args->at(1)); 3981 VisitForAccumulatorValue(args->at(1));
3984 3982
3985 Register object = r1; 3983 Register object = r4;
3986 Register index = r0; 3984 Register index = r3;
3987 Register scratch = r3; 3985 Register scratch = r6;
3988 Register result = r0; 3986 Register result = r3;
3989 3987
3990 __ pop(object); 3988 __ pop(object);
3991 3989
3992 Label need_conversion; 3990 Label need_conversion;
3993 Label index_out_of_range; 3991 Label index_out_of_range;
3994 Label done; 3992 Label done;
3995 StringCharAtGenerator generator(object, 3993 StringCharAtGenerator generator(object, index, scratch, result,
3996 index, 3994 &need_conversion, &need_conversion,
3997 scratch, 3995 &index_out_of_range, STRING_INDEX_IS_NUMBER);
3998 result,
3999 &need_conversion,
4000 &need_conversion,
4001 &index_out_of_range,
4002 STRING_INDEX_IS_NUMBER);
4003 generator.GenerateFast(masm_); 3996 generator.GenerateFast(masm_);
4004 __ jmp(&done); 3997 __ b(&done);
4005 3998
4006 __ bind(&index_out_of_range); 3999 __ bind(&index_out_of_range);
4007 // When the index is out of range, the spec requires us to return 4000 // When the index is out of range, the spec requires us to return
4008 // the empty string. 4001 // the empty string.
4009 __ LoadRoot(result, Heap::kempty_stringRootIndex); 4002 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4010 __ jmp(&done); 4003 __ b(&done);
4011 4004
4012 __ bind(&need_conversion); 4005 __ bind(&need_conversion);
4013 // Move smi zero into the result register, which will trigger 4006 // Move smi zero into the result register, which will trigger
4014 // conversion. 4007 // conversion.
4015 __ mov(result, Operand(Smi::FromInt(0))); 4008 __ LoadSmiLiteral(result, Smi::FromInt(0));
4016 __ jmp(&done); 4009 __ b(&done);
4017 4010
4018 NopRuntimeCallHelper call_helper; 4011 NopRuntimeCallHelper call_helper;
4019 generator.GenerateSlow(masm_, call_helper); 4012 generator.GenerateSlow(masm_, call_helper);
4020 4013
4021 __ bind(&done); 4014 __ bind(&done);
4022 context()->Plug(result); 4015 context()->Plug(result);
4023 } 4016 }
4024 4017
4025 4018
4026 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { 4019 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4027 ZoneList<Expression*>* args = expr->arguments(); 4020 ZoneList<Expression*>* args = expr->arguments();
4028 DCHECK_EQ(2, args->length()); 4021 DCHECK_EQ(2, args->length());
4029 VisitForStackValue(args->at(0)); 4022 VisitForStackValue(args->at(0));
4030 VisitForAccumulatorValue(args->at(1)); 4023 VisitForAccumulatorValue(args->at(1));
4031 4024
4032 __ pop(r1); 4025 __ pop(r4);
4033 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED); 4026 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4034 __ CallStub(&stub); 4027 __ CallStub(&stub);
4035 context()->Plug(r0); 4028 context()->Plug(r3);
4036 } 4029 }
4037 4030
4038 4031
4039 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { 4032 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
4040 ZoneList<Expression*>* args = expr->arguments(); 4033 ZoneList<Expression*>* args = expr->arguments();
4041 DCHECK_EQ(2, args->length()); 4034 DCHECK_EQ(2, args->length());
4042 VisitForStackValue(args->at(0)); 4035 VisitForStackValue(args->at(0));
4043 VisitForStackValue(args->at(1)); 4036 VisitForStackValue(args->at(1));
4044 4037
4045 StringCompareStub stub(isolate()); 4038 StringCompareStub stub(isolate());
4046 __ CallStub(&stub); 4039 __ CallStub(&stub);
4047 context()->Plug(r0); 4040 context()->Plug(r3);
4048 } 4041 }
4049 4042
4050 4043
4051 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { 4044 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4052 ZoneList<Expression*>* args = expr->arguments(); 4045 ZoneList<Expression*>* args = expr->arguments();
4053 DCHECK(args->length() >= 2); 4046 DCHECK(args->length() >= 2);
4054 4047
4055 int arg_count = args->length() - 2; // 2 ~ receiver and function. 4048 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4056 for (int i = 0; i < arg_count + 1; i++) { 4049 for (int i = 0; i < arg_count + 1; i++) {
4057 VisitForStackValue(args->at(i)); 4050 VisitForStackValue(args->at(i));
4058 } 4051 }
4059 VisitForAccumulatorValue(args->last()); // Function. 4052 VisitForAccumulatorValue(args->last()); // Function.
4060 4053
4061 Label runtime, done; 4054 Label runtime, done;
4062 // Check for non-function argument (including proxy). 4055 // Check for non-function argument (including proxy).
4063 __ JumpIfSmi(r0, &runtime); 4056 __ JumpIfSmi(r3, &runtime);
4064 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); 4057 __ CompareObjectType(r3, r4, r4, JS_FUNCTION_TYPE);
4065 __ b(ne, &runtime); 4058 __ bne(&runtime);
4066 4059
4067 // InvokeFunction requires the function in r1. Move it in there. 4060 // InvokeFunction requires the function in r4. Move it in there.
4068 __ mov(r1, result_register()); 4061 __ mr(r4, result_register());
4069 ParameterCount count(arg_count); 4062 ParameterCount count(arg_count);
4070 __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper()); 4063 __ InvokeFunction(r4, count, CALL_FUNCTION, NullCallWrapper());
4071 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 4064 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4072 __ jmp(&done); 4065 __ b(&done);
4073 4066
4074 __ bind(&runtime); 4067 __ bind(&runtime);
4075 __ push(r0); 4068 __ push(r3);
4076 __ CallRuntime(Runtime::kCall, args->length()); 4069 __ CallRuntime(Runtime::kCall, args->length());
4077 __ bind(&done); 4070 __ bind(&done);
4078 4071
4079 context()->Plug(r0); 4072 context()->Plug(r3);
4080 } 4073 }
4081 4074
4082 4075
4083 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { 4076 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4084 RegExpConstructResultStub stub(isolate()); 4077 RegExpConstructResultStub stub(isolate());
4085 ZoneList<Expression*>* args = expr->arguments(); 4078 ZoneList<Expression*>* args = expr->arguments();
4086 DCHECK(args->length() == 3); 4079 DCHECK(args->length() == 3);
4087 VisitForStackValue(args->at(0)); 4080 VisitForStackValue(args->at(0));
4088 VisitForStackValue(args->at(1)); 4081 VisitForStackValue(args->at(1));
4089 VisitForAccumulatorValue(args->at(2)); 4082 VisitForAccumulatorValue(args->at(2));
4090 __ pop(r1); 4083 __ Pop(r5, r4);
4091 __ pop(r2);
4092 __ CallStub(&stub); 4084 __ CallStub(&stub);
4093 context()->Plug(r0); 4085 context()->Plug(r3);
4094 } 4086 }
4095 4087
4096 4088
4097 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { 4089 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4098 ZoneList<Expression*>* args = expr->arguments(); 4090 ZoneList<Expression*>* args = expr->arguments();
4099 DCHECK_EQ(2, args->length()); 4091 DCHECK_EQ(2, args->length());
4100 DCHECK_NE(NULL, args->at(0)->AsLiteral()); 4092 DCHECK_NE(NULL, args->at(0)->AsLiteral());
4101 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value(); 4093 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4102 4094
4103 Handle<FixedArray> jsfunction_result_caches( 4095 Handle<FixedArray> jsfunction_result_caches(
4104 isolate()->native_context()->jsfunction_result_caches()); 4096 isolate()->native_context()->jsfunction_result_caches());
4105 if (jsfunction_result_caches->length() <= cache_id) { 4097 if (jsfunction_result_caches->length() <= cache_id) {
4106 __ Abort(kAttemptToUseUndefinedCache); 4098 __ Abort(kAttemptToUseUndefinedCache);
4107 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 4099 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
4108 context()->Plug(r0); 4100 context()->Plug(r3);
4109 return; 4101 return;
4110 } 4102 }
4111 4103
4112 VisitForAccumulatorValue(args->at(1)); 4104 VisitForAccumulatorValue(args->at(1));
4113 4105
4114 Register key = r0; 4106 Register key = r3;
4115 Register cache = r1; 4107 Register cache = r4;
4116 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 4108 __ LoadP(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4117 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset)); 4109 __ LoadP(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4118 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); 4110 __ LoadP(cache,
4119 __ ldr(cache, 4111 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4120 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); 4112 __ LoadP(cache,
4121 4113 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)), r0);
4122 4114
4123 Label done, not_found; 4115 Label done, not_found;
4124 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset)); 4116 __ LoadP(r5, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4125 // r2 now holds finger offset as a smi. 4117 // r5 now holds finger offset as a smi.
4126 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 4118 __ addi(r6, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4127 // r3 now points to the start of fixed array elements. 4119 // r6 now points to the start of fixed array elements.
4128 __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex)); 4120 __ SmiToPtrArrayOffset(r5, r5);
4129 // Note side effect of PreIndex: r3 now points to the key of the pair. 4121 __ LoadPUX(r5, MemOperand(r6, r5));
4130 __ cmp(key, r2); 4122 // r6 now points to the key of the pair.
4131 __ b(ne, &not_found); 4123 __ cmp(key, r5);
4124 __ bne(&not_found);
4132 4125
4133 __ ldr(r0, MemOperand(r3, kPointerSize)); 4126 __ LoadP(r3, MemOperand(r6, kPointerSize));
4134 __ b(&done); 4127 __ b(&done);
4135 4128
4136 __ bind(&not_found); 4129 __ bind(&not_found);
4137 // Call runtime to perform the lookup. 4130 // Call runtime to perform the lookup.
4138 __ Push(cache, key); 4131 __ Push(cache, key);
4139 __ CallRuntime(Runtime::kGetFromCache, 2); 4132 __ CallRuntime(Runtime::kGetFromCache, 2);
4140 4133
4141 __ bind(&done); 4134 __ bind(&done);
4142 context()->Plug(r0); 4135 context()->Plug(r3);
4143 } 4136 }
4144 4137
4145 4138
4146 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { 4139 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4147 ZoneList<Expression*>* args = expr->arguments(); 4140 ZoneList<Expression*>* args = expr->arguments();
4148 VisitForAccumulatorValue(args->at(0)); 4141 VisitForAccumulatorValue(args->at(0));
4149 4142
4150 Label materialize_true, materialize_false; 4143 Label materialize_true, materialize_false;
4151 Label* if_true = NULL; 4144 Label* if_true = NULL;
4152 Label* if_false = NULL; 4145 Label* if_false = NULL;
4153 Label* fall_through = NULL; 4146 Label* fall_through = NULL;
4154 context()->PrepareTest(&materialize_true, &materialize_false, 4147 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
4155 &if_true, &if_false, &fall_through); 4148 &if_false, &fall_through);
4156 4149
4157 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset)); 4150 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
4158 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask)); 4151 // PPC - assume ip is free
4152 __ mov(ip, Operand(String::kContainsCachedArrayIndexMask));
4153 __ and_(r0, r3, ip);
4154 __ cmpi(r0, Operand::Zero());
4159 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4155 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4160 Split(eq, if_true, if_false, fall_through); 4156 Split(eq, if_true, if_false, fall_through);
4161 4157
4162 context()->Plug(if_true, if_false); 4158 context()->Plug(if_true, if_false);
4163 } 4159 }
4164 4160
4165 4161
4166 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { 4162 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4167 ZoneList<Expression*>* args = expr->arguments(); 4163 ZoneList<Expression*>* args = expr->arguments();
4168 DCHECK(args->length() == 1); 4164 DCHECK(args->length() == 1);
4169 VisitForAccumulatorValue(args->at(0)); 4165 VisitForAccumulatorValue(args->at(0));
4170 4166
4171 __ AssertString(r0); 4167 __ AssertString(r3);
4172 4168
4173 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset)); 4169 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
4174 __ IndexFromHash(r0, r0); 4170 __ IndexFromHash(r3, r3);
4175 4171
4176 context()->Plug(r0); 4172 context()->Plug(r3);
4177 } 4173 }
4178 4174
4179 4175
4180 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) { 4176 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4181 Label bailout, done, one_char_separator, long_separator, non_trivial_array, 4177 Label bailout, done, one_char_separator, long_separator, non_trivial_array,
4182 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop, 4178 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
4183 one_char_separator_loop_entry, long_separator_loop; 4179 one_char_separator_loop_entry, long_separator_loop;
4184 ZoneList<Expression*>* args = expr->arguments(); 4180 ZoneList<Expression*>* args = expr->arguments();
4185 DCHECK(args->length() == 2); 4181 DCHECK(args->length() == 2);
4186 VisitForStackValue(args->at(1)); 4182 VisitForStackValue(args->at(1));
4187 VisitForAccumulatorValue(args->at(0)); 4183 VisitForAccumulatorValue(args->at(0));
4188 4184
4189 // All aliases of the same register have disjoint lifetimes. 4185 // All aliases of the same register have disjoint lifetimes.
4190 Register array = r0; 4186 Register array = r3;
4191 Register elements = no_reg; // Will be r0. 4187 Register elements = no_reg; // Will be r3.
4192 Register result = no_reg; // Will be r0. 4188 Register result = no_reg; // Will be r3.
4193 Register separator = r1; 4189 Register separator = r4;
4194 Register array_length = r2; 4190 Register array_length = r5;
4195 Register result_pos = no_reg; // Will be r2 4191 Register result_pos = no_reg; // Will be r5
4196 Register string_length = r3; 4192 Register string_length = r6;
4197 Register string = r4; 4193 Register string = r7;
4198 Register element = r5; 4194 Register element = r8;
4199 Register elements_end = r6; 4195 Register elements_end = r9;
4200 Register scratch = r9; 4196 Register scratch1 = r10;
4197 Register scratch2 = r11;
4201 4198
4202 // Separator operand is on the stack. 4199 // Separator operand is on the stack.
4203 __ pop(separator); 4200 __ pop(separator);
4204 4201
4205 // Check that the array is a JSArray. 4202 // Check that the array is a JSArray.
4206 __ JumpIfSmi(array, &bailout); 4203 __ JumpIfSmi(array, &bailout);
4207 __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE); 4204 __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
4208 __ b(ne, &bailout); 4205 __ bne(&bailout);
4209 4206
4210 // Check that the array has fast elements. 4207 // Check that the array has fast elements.
4211 __ CheckFastElements(scratch, array_length, &bailout); 4208 __ CheckFastElements(scratch1, scratch2, &bailout);
4212 4209
4213 // If the array has length zero, return the empty string. 4210 // If the array has length zero, return the empty string.
4214 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset)); 4211 __ LoadP(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4215 __ SmiUntag(array_length, SetCC); 4212 __ SmiUntag(array_length);
4216 __ b(ne, &non_trivial_array); 4213 __ cmpi(array_length, Operand::Zero());
4217 __ LoadRoot(r0, Heap::kempty_stringRootIndex); 4214 __ bne(&non_trivial_array);
4215 __ LoadRoot(r3, Heap::kempty_stringRootIndex);
4218 __ b(&done); 4216 __ b(&done);
4219 4217
4220 __ bind(&non_trivial_array); 4218 __ bind(&non_trivial_array);
4221 4219
4222 // Get the FixedArray containing array's elements. 4220 // Get the FixedArray containing array's elements.
4223 elements = array; 4221 elements = array;
4224 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset)); 4222 __ LoadP(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4225 array = no_reg; // End of array's live range. 4223 array = no_reg; // End of array's live range.
4226 4224
4227 // Check that all array elements are sequential one-byte strings, and 4225 // Check that all array elements are sequential one-byte strings, and
4228 // accumulate the sum of their lengths, as a smi-encoded value. 4226 // accumulate the sum of their lengths, as a smi-encoded value.
4229 __ mov(string_length, Operand::Zero()); 4227 __ li(string_length, Operand::Zero());
4230 __ add(element, 4228 __ addi(element, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4231 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 4229 __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
4232 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); 4230 __ add(elements_end, element, elements_end);
4233 // Loop condition: while (element < elements_end). 4231 // Loop condition: while (element < elements_end).
4234 // Live values in registers: 4232 // Live values in registers:
4235 // elements: Fixed array of strings. 4233 // elements: Fixed array of strings.
4236 // array_length: Length of the fixed array of strings (not smi) 4234 // array_length: Length of the fixed array of strings (not smi)
4237 // separator: Separator string 4235 // separator: Separator string
4238 // string_length: Accumulated sum of string lengths (smi). 4236 // string_length: Accumulated sum of string lengths (smi).
4239 // element: Current array element. 4237 // element: Current array element.
4240 // elements_end: Array end. 4238 // elements_end: Array end.
4241 if (generate_debug_code_) { 4239 if (generate_debug_code_) {
4242 __ cmp(array_length, Operand::Zero()); 4240 __ cmpi(array_length, Operand::Zero());
4243 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin); 4241 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4244 } 4242 }
4245 __ bind(&loop); 4243 __ bind(&loop);
4246 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 4244 __ LoadP(string, MemOperand(element));
4245 __ addi(element, element, Operand(kPointerSize));
4247 __ JumpIfSmi(string, &bailout); 4246 __ JumpIfSmi(string, &bailout);
4248 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset)); 4247 __ LoadP(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4249 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); 4248 __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4250 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout); 4249 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4251 __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset)); 4250 __ LoadP(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4252 __ add(string_length, string_length, Operand(scratch), SetCC); 4251
4253 __ b(vs, &bailout); 4252 __ AddAndCheckForOverflow(string_length, string_length, scratch1, scratch2,
4253 r0);
4254 __ BranchOnOverflow(&bailout);
4255
4254 __ cmp(element, elements_end); 4256 __ cmp(element, elements_end);
4255 __ b(lt, &loop); 4257 __ blt(&loop);
4256 4258
4257 // If array_length is 1, return elements[0], a string. 4259 // If array_length is 1, return elements[0], a string.
4258 __ cmp(array_length, Operand(1)); 4260 __ cmpi(array_length, Operand(1));
4259 __ b(ne, &not_size_one_array); 4261 __ bne(&not_size_one_array);
4260 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize)); 4262 __ LoadP(r3, FieldMemOperand(elements, FixedArray::kHeaderSize));
4261 __ b(&done); 4263 __ b(&done);
4262 4264
4263 __ bind(&not_size_one_array); 4265 __ bind(&not_size_one_array);
4264 4266
4265 // Live values in registers: 4267 // Live values in registers:
4266 // separator: Separator string 4268 // separator: Separator string
4267 // array_length: Length of the array. 4269 // array_length: Length of the array.
4268 // string_length: Sum of string lengths (smi). 4270 // string_length: Sum of string lengths (smi).
4269 // elements: FixedArray of strings. 4271 // elements: FixedArray of strings.
4270 4272
4271 // Check that the separator is a flat one-byte string. 4273 // Check that the separator is a flat one-byte string.
4272 __ JumpIfSmi(separator, &bailout); 4274 __ JumpIfSmi(separator, &bailout);
4273 __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset)); 4275 __ LoadP(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4274 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); 4276 __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4275 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout); 4277 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4276 4278
4277 // Add (separator length times array_length) - separator length to the 4279 // Add (separator length times array_length) - separator length to the
4278 // string_length to get the length of the result string. array_length is not 4280 // string_length to get the length of the result string.
4279 // smi but the other values are, so the result is a smi 4281 __ LoadP(scratch1,
4280 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); 4282 FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4281 __ sub(string_length, string_length, Operand(scratch)); 4283 __ sub(string_length, string_length, scratch1);
4282 __ smull(scratch, ip, array_length, scratch); 4284 #if V8_TARGET_ARCH_PPC64
4285 __ SmiUntag(scratch1, scratch1);
4286 __ Mul(scratch2, array_length, scratch1);
4283 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are 4287 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4284 // zero. 4288 // zero.
4285 __ cmp(ip, Operand::Zero()); 4289 __ ShiftRightImm(ip, scratch2, Operand(31), SetRC);
4286 __ b(ne, &bailout); 4290 __ bne(&bailout, cr0);
4287 __ tst(scratch, Operand(0x80000000)); 4291 __ SmiTag(scratch2, scratch2);
4288 __ b(ne, &bailout); 4292 #else
4289 __ add(string_length, string_length, Operand(scratch), SetCC); 4293 // array_length is not smi but the other values are, so the result is a smi
4290 __ b(vs, &bailout); 4294 __ mullw(scratch2, array_length, scratch1);
4295 __ mulhw(ip, array_length, scratch1);
4296 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4297 // zero.
4298 __ cmpi(ip, Operand::Zero());
4299 __ bne(&bailout);
4300 __ cmpwi(scratch2, Operand::Zero());
4301 __ blt(&bailout);
4302 #endif
4303
4304 __ AddAndCheckForOverflow(string_length, string_length, scratch2, scratch1,
4305 r0);
4306 __ BranchOnOverflow(&bailout);
4291 __ SmiUntag(string_length); 4307 __ SmiUntag(string_length);
4292 4308
4293 // Get first element in the array to free up the elements register to be used 4309 // Get first element in the array to free up the elements register to be used
4294 // for the result. 4310 // for the result.
4295 __ add(element, 4311 __ addi(element, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4296 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4297 result = elements; // End of live range for elements. 4312 result = elements; // End of live range for elements.
4298 elements = no_reg; 4313 elements = no_reg;
4299 // Live values in registers: 4314 // Live values in registers:
4300 // element: First array element 4315 // element: First array element
4301 // separator: Separator string 4316 // separator: Separator string
4302 // string_length: Length of result string (not smi) 4317 // string_length: Length of result string (not smi)
4303 // array_length: Length of the array. 4318 // array_length: Length of the array.
4304 __ AllocateOneByteString(result, string_length, scratch, 4319 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4305 string, // used as scratch 4320 elements_end, &bailout);
4306 elements_end, // used as scratch
4307 &bailout);
4308 // Prepare for looping. Set up elements_end to end of the array. Set 4321 // Prepare for looping. Set up elements_end to end of the array. Set
4309 // result_pos to the position of the result where to write the first 4322 // result_pos to the position of the result where to write the first
4310 // character. 4323 // character.
4311 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); 4324 __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
4325 __ add(elements_end, element, elements_end);
4312 result_pos = array_length; // End of live range for array_length. 4326 result_pos = array_length; // End of live range for array_length.
4313 array_length = no_reg; 4327 array_length = no_reg;
4314 __ add(result_pos, 4328 __ addi(result_pos, result,
4315 result, 4329 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4316 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4317 4330
4318 // Check the length of the separator. 4331 // Check the length of the separator.
4319 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); 4332 __ LoadP(scratch1,
4320 __ cmp(scratch, Operand(Smi::FromInt(1))); 4333 FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4321 __ b(eq, &one_char_separator); 4334 __ CmpSmiLiteral(scratch1, Smi::FromInt(1), r0);
4322 __ b(gt, &long_separator); 4335 __ beq(&one_char_separator);
4336 __ bgt(&long_separator);
4323 4337
4324 // Empty separator case 4338 // Empty separator case
4325 __ bind(&empty_separator_loop); 4339 __ bind(&empty_separator_loop);
4326 // Live values in registers: 4340 // Live values in registers:
4327 // result_pos: the position to which we are currently copying characters. 4341 // result_pos: the position to which we are currently copying characters.
4328 // element: Current array element. 4342 // element: Current array element.
4329 // elements_end: Array end. 4343 // elements_end: Array end.
4330 4344
4331 // Copy next array element to the result. 4345 // Copy next array element to the result.
4332 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 4346 __ LoadP(string, MemOperand(element));
4333 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 4347 __ addi(element, element, Operand(kPointerSize));
4348 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4334 __ SmiUntag(string_length); 4349 __ SmiUntag(string_length);
4335 __ add(string, 4350 __ addi(string, string,
4336 string, 4351 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4337 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4352 __ CopyBytes(string, result_pos, string_length, scratch1);
4338 __ CopyBytes(string, result_pos, string_length, scratch);
4339 __ cmp(element, elements_end); 4353 __ cmp(element, elements_end);
4340 __ b(lt, &empty_separator_loop); // End while (element < elements_end). 4354 __ blt(&empty_separator_loop); // End while (element < elements_end).
4341 DCHECK(result.is(r0)); 4355 DCHECK(result.is(r3));
4342 __ b(&done); 4356 __ b(&done);
4343 4357
4344 // One-character separator case 4358 // One-character separator case
4345 __ bind(&one_char_separator); 4359 __ bind(&one_char_separator);
4346 // Replace separator with its one-byte character value. 4360 // Replace separator with its one-byte character value.
4347 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize)); 4361 __ lbz(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4348 // Jump into the loop after the code that copies the separator, so the first 4362 // Jump into the loop after the code that copies the separator, so the first
4349 // element is not preceded by a separator 4363 // element is not preceded by a separator
4350 __ jmp(&one_char_separator_loop_entry); 4364 __ b(&one_char_separator_loop_entry);
4351 4365
4352 __ bind(&one_char_separator_loop); 4366 __ bind(&one_char_separator_loop);
4353 // Live values in registers: 4367 // Live values in registers:
4354 // result_pos: the position to which we are currently copying characters. 4368 // result_pos: the position to which we are currently copying characters.
4355 // element: Current array element. 4369 // element: Current array element.
4356 // elements_end: Array end. 4370 // elements_end: Array end.
4357 // separator: Single separator one-byte char (in lower byte). 4371 // separator: Single separator one-byte char (in lower byte).
4358 4372
4359 // Copy the separator character to the result. 4373 // Copy the separator character to the result.
4360 __ strb(separator, MemOperand(result_pos, 1, PostIndex)); 4374 __ stb(separator, MemOperand(result_pos));
4375 __ addi(result_pos, result_pos, Operand(1));
4361 4376
4362 // Copy next array element to the result. 4377 // Copy next array element to the result.
4363 __ bind(&one_char_separator_loop_entry); 4378 __ bind(&one_char_separator_loop_entry);
4364 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 4379 __ LoadP(string, MemOperand(element));
4365 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 4380 __ addi(element, element, Operand(kPointerSize));
4381 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4366 __ SmiUntag(string_length); 4382 __ SmiUntag(string_length);
4367 __ add(string, 4383 __ addi(string, string,
4368 string, 4384 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4369 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4385 __ CopyBytes(string, result_pos, string_length, scratch1);
4370 __ CopyBytes(string, result_pos, string_length, scratch); 4386 __ cmpl(element, elements_end);
4371 __ cmp(element, elements_end); 4387 __ blt(&one_char_separator_loop); // End while (element < elements_end).
4372 __ b(lt, &one_char_separator_loop); // End while (element < elements_end). 4388 DCHECK(result.is(r3));
4373 DCHECK(result.is(r0));
4374 __ b(&done); 4389 __ b(&done);
4375 4390
4376 // Long separator case (separator is more than one character). Entry is at the 4391 // Long separator case (separator is more than one character). Entry is at the
4377 // label long_separator below. 4392 // label long_separator below.
4378 __ bind(&long_separator_loop); 4393 __ bind(&long_separator_loop);
4379 // Live values in registers: 4394 // Live values in registers:
4380 // result_pos: the position to which we are currently copying characters. 4395 // result_pos: the position to which we are currently copying characters.
4381 // element: Current array element. 4396 // element: Current array element.
4382 // elements_end: Array end. 4397 // elements_end: Array end.
4383 // separator: Separator string. 4398 // separator: Separator string.
4384 4399
4385 // Copy the separator to the result. 4400 // Copy the separator to the result.
4386 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset)); 4401 __ LoadP(string_length, FieldMemOperand(separator, String::kLengthOffset));
4387 __ SmiUntag(string_length); 4402 __ SmiUntag(string_length);
4388 __ add(string, 4403 __ addi(string, separator,
4389 separator, 4404 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4390 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4405 __ CopyBytes(string, result_pos, string_length, scratch1);
4391 __ CopyBytes(string, result_pos, string_length, scratch);
4392 4406
4393 __ bind(&long_separator); 4407 __ bind(&long_separator);
4394 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); 4408 __ LoadP(string, MemOperand(element));
4395 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); 4409 __ addi(element, element, Operand(kPointerSize));
4410 __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
4396 __ SmiUntag(string_length); 4411 __ SmiUntag(string_length);
4397 __ add(string, 4412 __ addi(string, string,
4398 string, 4413 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4399 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4414 __ CopyBytes(string, result_pos, string_length, scratch1);
4400 __ CopyBytes(string, result_pos, string_length, scratch); 4415 __ cmpl(element, elements_end);
4401 __ cmp(element, elements_end); 4416 __ blt(&long_separator_loop); // End while (element < elements_end).
4402 __ b(lt, &long_separator_loop); // End while (element < elements_end). 4417 DCHECK(result.is(r3));
4403 DCHECK(result.is(r0));
4404 __ b(&done); 4418 __ b(&done);
4405 4419
4406 __ bind(&bailout); 4420 __ bind(&bailout);
4407 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 4421 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
4408 __ bind(&done); 4422 __ bind(&done);
4409 context()->Plug(r0); 4423 context()->Plug(r3);
4410 } 4424 }
4411 4425
4412 4426
4413 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { 4427 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4414 DCHECK(expr->arguments()->length() == 0); 4428 DCHECK(expr->arguments()->length() == 0);
4415 ExternalReference debug_is_active = 4429 ExternalReference debug_is_active =
4416 ExternalReference::debug_is_active_address(isolate()); 4430 ExternalReference::debug_is_active_address(isolate());
4417 __ mov(ip, Operand(debug_is_active)); 4431 __ mov(ip, Operand(debug_is_active));
4418 __ ldrb(r0, MemOperand(ip)); 4432 __ lbz(r3, MemOperand(ip));
4419 __ SmiTag(r0); 4433 __ SmiTag(r3);
4420 context()->Plug(r0); 4434 context()->Plug(r3);
4421 } 4435 }
4422 4436
4423 4437
4424 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 4438 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4425 if (expr->function() != NULL && 4439 if (expr->function() != NULL &&
4426 expr->function()->intrinsic_type == Runtime::INLINE) { 4440 expr->function()->intrinsic_type == Runtime::INLINE) {
4427 Comment cmnt(masm_, "[ InlineRuntimeCall"); 4441 Comment cmnt(masm_, "[ InlineRuntimeCall");
4428 EmitInlineRuntimeCall(expr); 4442 EmitInlineRuntimeCall(expr);
4429 return; 4443 return;
4430 } 4444 }
4431 4445
4432 Comment cmnt(masm_, "[ CallRuntime"); 4446 Comment cmnt(masm_, "[ CallRuntime");
4433 ZoneList<Expression*>* args = expr->arguments(); 4447 ZoneList<Expression*>* args = expr->arguments();
4434 int arg_count = args->length(); 4448 int arg_count = args->length();
4435 4449
4436 if (expr->is_jsruntime()) { 4450 if (expr->is_jsruntime()) {
4437 // Push the builtins object as the receiver. 4451 // Push the builtins object as the receiver.
4438 Register receiver = LoadDescriptor::ReceiverRegister(); 4452 Register receiver = LoadDescriptor::ReceiverRegister();
4439 __ ldr(receiver, GlobalObjectOperand()); 4453 __ LoadP(receiver, GlobalObjectOperand());
4440 __ ldr(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset)); 4454 __ LoadP(receiver,
4455 FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4441 __ push(receiver); 4456 __ push(receiver);
4442 4457
4443 // Load the function from the receiver. 4458 // Load the function from the receiver.
4444 __ mov(LoadDescriptor::NameRegister(), Operand(expr->name())); 4459 __ mov(LoadDescriptor::NameRegister(), Operand(expr->name()));
4445 if (FLAG_vector_ics) { 4460 if (FLAG_vector_ics) {
4446 __ mov(VectorLoadICDescriptor::SlotRegister(), 4461 __ mov(VectorLoadICDescriptor::SlotRegister(),
4447 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot()))); 4462 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4448 CallLoadIC(NOT_CONTEXTUAL); 4463 CallLoadIC(NOT_CONTEXTUAL);
4449 } else { 4464 } else {
4450 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId()); 4465 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4451 } 4466 }
4452 4467
4453 // Push the target function under the receiver. 4468 // Push the target function under the receiver.
4454 __ ldr(ip, MemOperand(sp, 0)); 4469 __ LoadP(ip, MemOperand(sp, 0));
4455 __ push(ip); 4470 __ push(ip);
4456 __ str(r0, MemOperand(sp, kPointerSize)); 4471 __ StoreP(r3, MemOperand(sp, kPointerSize));
4457 4472
4458 // Push the arguments ("left-to-right"). 4473 // Push the arguments ("left-to-right").
4459 int arg_count = args->length(); 4474 int arg_count = args->length();
4460 for (int i = 0; i < arg_count; i++) { 4475 for (int i = 0; i < arg_count; i++) {
4461 VisitForStackValue(args->at(i)); 4476 VisitForStackValue(args->at(i));
4462 } 4477 }
4463 4478
4464 // Record source position of the IC call. 4479 // Record source position of the IC call.
4465 SetSourcePosition(expr->position()); 4480 SetSourcePosition(expr->position());
4466 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); 4481 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4467 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 4482 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
4468 __ CallStub(&stub); 4483 __ CallStub(&stub);
4469 4484
4470 // Restore context register. 4485 // Restore context register.
4471 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 4486 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4472 4487
4473 context()->DropAndPlug(1, r0); 4488 context()->DropAndPlug(1, r3);
4474 } else { 4489 } else {
4475 // Push the arguments ("left-to-right"). 4490 // Push the arguments ("left-to-right").
4476 for (int i = 0; i < arg_count; i++) { 4491 for (int i = 0; i < arg_count; i++) {
4477 VisitForStackValue(args->at(i)); 4492 VisitForStackValue(args->at(i));
4478 } 4493 }
4479 4494
4480 // Call the C runtime function. 4495 // Call the C runtime function.
4481 __ CallRuntime(expr->function(), arg_count); 4496 __ CallRuntime(expr->function(), arg_count);
4482 context()->Plug(r0); 4497 context()->Plug(r3);
4483 } 4498 }
4484 } 4499 }
4485 4500
4486 4501
4487 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 4502 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4488 switch (expr->op()) { 4503 switch (expr->op()) {
4489 case Token::DELETE: { 4504 case Token::DELETE: {
4490 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 4505 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4491 Property* property = expr->expression()->AsProperty(); 4506 Property* property = expr->expression()->AsProperty();
4492 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 4507 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4493 4508
4494 if (property != NULL) { 4509 if (property != NULL) {
4495 VisitForStackValue(property->obj()); 4510 VisitForStackValue(property->obj());
4496 VisitForStackValue(property->key()); 4511 VisitForStackValue(property->key());
4497 __ mov(r1, Operand(Smi::FromInt(strict_mode()))); 4512 __ LoadSmiLiteral(r4, Smi::FromInt(strict_mode()));
4498 __ push(r1); 4513 __ push(r4);
4499 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4514 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4500 context()->Plug(r0); 4515 context()->Plug(r3);
4501 } else if (proxy != NULL) { 4516 } else if (proxy != NULL) {
4502 Variable* var = proxy->var(); 4517 Variable* var = proxy->var();
4503 // Delete of an unqualified identifier is disallowed in strict mode 4518 // Delete of an unqualified identifier is disallowed in strict mode
4504 // but "delete this" is allowed. 4519 // but "delete this" is allowed.
4505 DCHECK(strict_mode() == SLOPPY || var->is_this()); 4520 DCHECK(strict_mode() == SLOPPY || var->is_this());
4506 if (var->IsUnallocated()) { 4521 if (var->IsUnallocated()) {
4507 __ ldr(r2, GlobalObjectOperand()); 4522 __ LoadP(r5, GlobalObjectOperand());
4508 __ mov(r1, Operand(var->name())); 4523 __ mov(r4, Operand(var->name()));
4509 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); 4524 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY));
4510 __ Push(r2, r1, r0); 4525 __ Push(r5, r4, r3);
4511 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4526 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4512 context()->Plug(r0); 4527 context()->Plug(r3);
4513 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 4528 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4514 // Result of deleting non-global, non-dynamic variables is false. 4529 // Result of deleting non-global, non-dynamic variables is false.
4515 // The subexpression does not have side effects. 4530 // The subexpression does not have side effects.
4516 context()->Plug(var->is_this()); 4531 context()->Plug(var->is_this());
4517 } else { 4532 } else {
4518 // Non-global variable. Call the runtime to try to delete from the 4533 // Non-global variable. Call the runtime to try to delete from the
4519 // context where the variable was introduced. 4534 // context where the variable was introduced.
4520 DCHECK(!context_register().is(r2)); 4535 DCHECK(!context_register().is(r5));
4521 __ mov(r2, Operand(var->name())); 4536 __ mov(r5, Operand(var->name()));
4522 __ Push(context_register(), r2); 4537 __ Push(context_register(), r5);
4523 __ CallRuntime(Runtime::kDeleteLookupSlot, 2); 4538 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4524 context()->Plug(r0); 4539 context()->Plug(r3);
4525 } 4540 }
4526 } else { 4541 } else {
4527 // Result of deleting non-property, non-variable reference is true. 4542 // Result of deleting non-property, non-variable reference is true.
4528 // The subexpression may have side effects. 4543 // The subexpression may have side effects.
4529 VisitForEffect(expr->expression()); 4544 VisitForEffect(expr->expression());
4530 context()->Plug(true); 4545 context()->Plug(true);
4531 } 4546 }
4532 break; 4547 break;
4533 } 4548 }
4534 4549
4535 case Token::VOID: { 4550 case Token::VOID: {
4536 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); 4551 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4537 VisitForEffect(expr->expression()); 4552 VisitForEffect(expr->expression());
4538 context()->Plug(Heap::kUndefinedValueRootIndex); 4553 context()->Plug(Heap::kUndefinedValueRootIndex);
4539 break; 4554 break;
4540 } 4555 }
4541 4556
4542 case Token::NOT: { 4557 case Token::NOT: {
4543 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); 4558 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4544 if (context()->IsEffect()) { 4559 if (context()->IsEffect()) {
4545 // Unary NOT has no side effects so it's only necessary to visit the 4560 // Unary NOT has no side effects so it's only necessary to visit the
4546 // subexpression. Match the optimizing compiler by not branching. 4561 // subexpression. Match the optimizing compiler by not branching.
4547 VisitForEffect(expr->expression()); 4562 VisitForEffect(expr->expression());
4548 } else if (context()->IsTest()) { 4563 } else if (context()->IsTest()) {
4549 const TestContext* test = TestContext::cast(context()); 4564 const TestContext* test = TestContext::cast(context());
4550 // The labels are swapped for the recursive call. 4565 // The labels are swapped for the recursive call.
4551 VisitForControl(expr->expression(), 4566 VisitForControl(expr->expression(), test->false_label(),
4552 test->false_label(), 4567 test->true_label(), test->fall_through());
4553 test->true_label(),
4554 test->fall_through());
4555 context()->Plug(test->true_label(), test->false_label()); 4568 context()->Plug(test->true_label(), test->false_label());
4556 } else { 4569 } else {
4557 // We handle value contexts explicitly rather than simply visiting 4570 // We handle value contexts explicitly rather than simply visiting
4558 // for control and plugging the control flow into the context, 4571 // for control and plugging the control flow into the context,
4559 // because we need to prepare a pair of extra administrative AST ids 4572 // because we need to prepare a pair of extra administrative AST ids
4560 // for the optimizing compiler. 4573 // for the optimizing compiler.
4561 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue()); 4574 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4562 Label materialize_true, materialize_false, done; 4575 Label materialize_true, materialize_false, done;
4563 VisitForControl(expr->expression(), 4576 VisitForControl(expr->expression(), &materialize_false,
4564 &materialize_false, 4577 &materialize_true, &materialize_true);
4565 &materialize_true,
4566 &materialize_true);
4567 __ bind(&materialize_true); 4578 __ bind(&materialize_true);
4568 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); 4579 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4569 __ LoadRoot(r0, Heap::kTrueValueRootIndex); 4580 __ LoadRoot(r3, Heap::kTrueValueRootIndex);
4570 if (context()->IsStackValue()) __ push(r0); 4581 if (context()->IsStackValue()) __ push(r3);
4571 __ jmp(&done); 4582 __ b(&done);
4572 __ bind(&materialize_false); 4583 __ bind(&materialize_false);
4573 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); 4584 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4574 __ LoadRoot(r0, Heap::kFalseValueRootIndex); 4585 __ LoadRoot(r3, Heap::kFalseValueRootIndex);
4575 if (context()->IsStackValue()) __ push(r0); 4586 if (context()->IsStackValue()) __ push(r3);
4576 __ bind(&done); 4587 __ bind(&done);
4577 } 4588 }
4578 break; 4589 break;
4579 } 4590 }
4580 4591
4581 case Token::TYPEOF: { 4592 case Token::TYPEOF: {
4582 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 4593 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4583 { StackValueContext context(this); 4594 {
4595 StackValueContext context(this);
4584 VisitForTypeofValue(expr->expression()); 4596 VisitForTypeofValue(expr->expression());
4585 } 4597 }
4586 __ CallRuntime(Runtime::kTypeof, 1); 4598 __ CallRuntime(Runtime::kTypeof, 1);
4587 context()->Plug(r0); 4599 context()->Plug(r3);
4588 break; 4600 break;
4589 } 4601 }
4590 4602
4591 default: 4603 default:
4592 UNREACHABLE(); 4604 UNREACHABLE();
4593 } 4605 }
4594 } 4606 }
4595 4607
4596 4608
4597 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 4609 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4598 DCHECK(expr->expression()->IsValidReferenceExpression()); 4610 DCHECK(expr->expression()->IsValidReferenceExpression());
4599 4611
4600 Comment cmnt(masm_, "[ CountOperation"); 4612 Comment cmnt(masm_, "[ CountOperation");
4601 SetSourcePosition(expr->position()); 4613 SetSourcePosition(expr->position());
4602 4614
4603 Property* prop = expr->expression()->AsProperty(); 4615 Property* prop = expr->expression()->AsProperty();
4604 LhsKind assign_type = GetAssignType(prop); 4616 LhsKind assign_type = GetAssignType(prop);
4605 4617
4606 // Evaluate expression and get value. 4618 // Evaluate expression and get value.
4607 if (assign_type == VARIABLE) { 4619 if (assign_type == VARIABLE) {
4608 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL); 4620 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4609 AccumulatorValueContext context(this); 4621 AccumulatorValueContext context(this);
4610 EmitVariableLoad(expr->expression()->AsVariableProxy()); 4622 EmitVariableLoad(expr->expression()->AsVariableProxy());
4611 } else { 4623 } else {
4612 // Reserve space for result of postfix operation. 4624 // Reserve space for result of postfix operation.
4613 if (expr->is_postfix() && !context()->IsEffect()) { 4625 if (expr->is_postfix() && !context()->IsEffect()) {
4614 __ mov(ip, Operand(Smi::FromInt(0))); 4626 __ LoadSmiLiteral(ip, Smi::FromInt(0));
4615 __ push(ip); 4627 __ push(ip);
4616 } 4628 }
4617 switch (assign_type) { 4629 switch (assign_type) {
4618 case NAMED_PROPERTY: { 4630 case NAMED_PROPERTY: {
4619 // Put the object both on the stack and in the register. 4631 // Put the object both on the stack and in the register.
4620 VisitForStackValue(prop->obj()); 4632 VisitForStackValue(prop->obj());
4621 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); 4633 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4622 EmitNamedPropertyLoad(prop); 4634 EmitNamedPropertyLoad(prop);
4623 break; 4635 break;
4624 } 4636 }
4625 4637
4626 case NAMED_SUPER_PROPERTY: { 4638 case NAMED_SUPER_PROPERTY: {
4627 VisitForStackValue(prop->obj()->AsSuperReference()->this_var()); 4639 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4628 EmitLoadHomeObject(prop->obj()->AsSuperReference()); 4640 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4629 __ Push(result_register()); 4641 __ Push(result_register());
4630 const Register scratch = r1; 4642 const Register scratch = r4;
4631 __ ldr(scratch, MemOperand(sp, kPointerSize)); 4643 __ LoadP(scratch, MemOperand(sp, kPointerSize));
4632 __ Push(scratch); 4644 __ Push(scratch, result_register());
4633 __ Push(result_register());
4634 EmitNamedSuperPropertyLoad(prop); 4645 EmitNamedSuperPropertyLoad(prop);
4635 break; 4646 break;
4636 } 4647 }
4637 4648
4638 case KEYED_SUPER_PROPERTY: { 4649 case KEYED_SUPER_PROPERTY: {
4639 VisitForStackValue(prop->obj()->AsSuperReference()->this_var()); 4650 VisitForStackValue(prop->obj()->AsSuperReference()->this_var());
4640 EmitLoadHomeObject(prop->obj()->AsSuperReference()); 4651 EmitLoadHomeObject(prop->obj()->AsSuperReference());
4641 __ Push(result_register()); 4652 const Register scratch = r4;
4653 const Register scratch1 = r5;
4654 __ Move(scratch, result_register());
4642 VisitForAccumulatorValue(prop->key()); 4655 VisitForAccumulatorValue(prop->key());
4643 __ Push(result_register()); 4656 __ Push(scratch, result_register());
4644 const Register scratch = r1; 4657 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
4645 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize)); 4658 __ Push(scratch1, scratch, result_register());
4646 __ Push(scratch);
4647 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize));
4648 __ Push(scratch);
4649 __ Push(result_register());
4650 EmitKeyedSuperPropertyLoad(prop); 4659 EmitKeyedSuperPropertyLoad(prop);
4651 break; 4660 break;
4652 } 4661 }
4653 4662
4654 case KEYED_PROPERTY: { 4663 case KEYED_PROPERTY: {
4655 VisitForStackValue(prop->obj()); 4664 VisitForStackValue(prop->obj());
4656 VisitForStackValue(prop->key()); 4665 VisitForStackValue(prop->key());
4657 __ ldr(LoadDescriptor::ReceiverRegister(), 4666 __ LoadP(LoadDescriptor::ReceiverRegister(),
4658 MemOperand(sp, 1 * kPointerSize)); 4667 MemOperand(sp, 1 * kPointerSize));
4659 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); 4668 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4660 EmitKeyedPropertyLoad(prop); 4669 EmitKeyedPropertyLoad(prop);
4661 break; 4670 break;
4662 } 4671 }
4663 4672
4664 case VARIABLE: 4673 case VARIABLE:
4665 UNREACHABLE(); 4674 UNREACHABLE();
4666 } 4675 }
4667 } 4676 }
4668 4677
4669 // We need a second deoptimization point after loading the value 4678 // We need a second deoptimization point after loading the value
4670 // in case evaluating the property load my have a side effect. 4679 // in case evaluating the property load my have a side effect.
4671 if (assign_type == VARIABLE) { 4680 if (assign_type == VARIABLE) {
4672 PrepareForBailout(expr->expression(), TOS_REG); 4681 PrepareForBailout(expr->expression(), TOS_REG);
4673 } else { 4682 } else {
4674 PrepareForBailoutForId(prop->LoadId(), TOS_REG); 4683 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4675 } 4684 }
4676 4685
4677 // Inline smi case if we are in a loop. 4686 // Inline smi case if we are in a loop.
4678 Label stub_call, done; 4687 Label stub_call, done;
4679 JumpPatchSite patch_site(masm_); 4688 JumpPatchSite patch_site(masm_);
4680 4689
4681 int count_value = expr->op() == Token::INC ? 1 : -1; 4690 int count_value = expr->op() == Token::INC ? 1 : -1;
4682 if (ShouldInlineSmiCase(expr->op())) { 4691 if (ShouldInlineSmiCase(expr->op())) {
4683 Label slow; 4692 Label slow;
4684 patch_site.EmitJumpIfNotSmi(r0, &slow); 4693 patch_site.EmitJumpIfNotSmi(r3, &slow);
4685 4694
4686 // Save result for postfix expressions. 4695 // Save result for postfix expressions.
4687 if (expr->is_postfix()) { 4696 if (expr->is_postfix()) {
4688 if (!context()->IsEffect()) { 4697 if (!context()->IsEffect()) {
4689 // Save the result on the stack. If we have a named or keyed property 4698 // Save the result on the stack. If we have a named or keyed property
4690 // we store the result under the receiver that is currently on top 4699 // we store the result under the receiver that is currently on top
4691 // of the stack. 4700 // of the stack.
4692 switch (assign_type) { 4701 switch (assign_type) {
4693 case VARIABLE: 4702 case VARIABLE:
4694 __ push(r0); 4703 __ push(r3);
4695 break; 4704 break;
4696 case NAMED_PROPERTY: 4705 case NAMED_PROPERTY:
4697 __ str(r0, MemOperand(sp, kPointerSize)); 4706 __ StoreP(r3, MemOperand(sp, kPointerSize));
4698 break; 4707 break;
4699 case NAMED_SUPER_PROPERTY: 4708 case NAMED_SUPER_PROPERTY:
4700 __ str(r0, MemOperand(sp, 2 * kPointerSize)); 4709 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4701 break; 4710 break;
4702 case KEYED_PROPERTY: 4711 case KEYED_PROPERTY:
4703 __ str(r0, MemOperand(sp, 2 * kPointerSize)); 4712 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4704 break; 4713 break;
4705 case KEYED_SUPER_PROPERTY: 4714 case KEYED_SUPER_PROPERTY:
4706 __ str(r0, MemOperand(sp, 3 * kPointerSize)); 4715 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
4707 break; 4716 break;
4708 } 4717 }
4709 } 4718 }
4710 } 4719 }
4711 4720
4712 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC); 4721 Register scratch1 = r4;
4713 __ b(vc, &done); 4722 Register scratch2 = r5;
4723 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value));
4724 __ AddAndCheckForOverflow(r3, r3, scratch1, scratch2, r0);
4725 __ BranchOnNoOverflow(&done);
4714 // Call stub. Undo operation first. 4726 // Call stub. Undo operation first.
4715 __ sub(r0, r0, Operand(Smi::FromInt(count_value))); 4727 __ sub(r3, r3, scratch1);
4716 __ jmp(&stub_call); 4728 __ b(&stub_call);
4717 __ bind(&slow); 4729 __ bind(&slow);
4718 } 4730 }
4719 ToNumberStub convert_stub(isolate()); 4731 ToNumberStub convert_stub(isolate());
4720 __ CallStub(&convert_stub); 4732 __ CallStub(&convert_stub);
4721 4733
4722 // Save result for postfix expressions. 4734 // Save result for postfix expressions.
4723 if (expr->is_postfix()) { 4735 if (expr->is_postfix()) {
4724 if (!context()->IsEffect()) { 4736 if (!context()->IsEffect()) {
4725 // Save the result on the stack. If we have a named or keyed property 4737 // Save the result on the stack. If we have a named or keyed property
4726 // we store the result under the receiver that is currently on top 4738 // we store the result under the receiver that is currently on top
4727 // of the stack. 4739 // of the stack.
4728 switch (assign_type) { 4740 switch (assign_type) {
4729 case VARIABLE: 4741 case VARIABLE:
4730 __ push(r0); 4742 __ push(r3);
4731 break; 4743 break;
4732 case NAMED_PROPERTY: 4744 case NAMED_PROPERTY:
4733 __ str(r0, MemOperand(sp, kPointerSize)); 4745 __ StoreP(r3, MemOperand(sp, kPointerSize));
4734 break; 4746 break;
4735 case NAMED_SUPER_PROPERTY: 4747 case NAMED_SUPER_PROPERTY:
4736 __ str(r0, MemOperand(sp, 2 * kPointerSize)); 4748 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4737 break; 4749 break;
4738 case KEYED_PROPERTY: 4750 case KEYED_PROPERTY:
4739 __ str(r0, MemOperand(sp, 2 * kPointerSize)); 4751 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4740 break; 4752 break;
4741 case KEYED_SUPER_PROPERTY: 4753 case KEYED_SUPER_PROPERTY:
4742 __ str(r0, MemOperand(sp, 3 * kPointerSize)); 4754 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
4743 break; 4755 break;
4744 } 4756 }
4745 } 4757 }
4746 } 4758 }
4747 4759
4748
4749 __ bind(&stub_call); 4760 __ bind(&stub_call);
4750 __ mov(r1, r0); 4761 __ mr(r4, r3);
4751 __ mov(r0, Operand(Smi::FromInt(count_value))); 4762 __ LoadSmiLiteral(r3, Smi::FromInt(count_value));
4752 4763
4753 // Record position before stub call. 4764 // Record position before stub call.
4754 SetSourcePosition(expr->position()); 4765 SetSourcePosition(expr->position());
4755 4766
4756 Handle<Code> code = 4767 Handle<Code> code =
4757 CodeFactory::BinaryOpIC(isolate(), Token::ADD, NO_OVERWRITE).code(); 4768 CodeFactory::BinaryOpIC(isolate(), Token::ADD, NO_OVERWRITE).code();
4758 CallIC(code, expr->CountBinOpFeedbackId()); 4769 CallIC(code, expr->CountBinOpFeedbackId());
4759 patch_site.EmitPatchInfo(); 4770 patch_site.EmitPatchInfo();
4760 __ bind(&done); 4771 __ bind(&done);
4761 4772
4762 // Store the value returned in r0. 4773 // Store the value returned in r3.
4763 switch (assign_type) { 4774 switch (assign_type) {
4764 case VARIABLE: 4775 case VARIABLE:
4765 if (expr->is_postfix()) { 4776 if (expr->is_postfix()) {
4766 { EffectContext context(this); 4777 {
4778 EffectContext context(this);
4767 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4779 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4768 Token::ASSIGN); 4780 Token::ASSIGN);
4769 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4781 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4770 context.Plug(r0); 4782 context.Plug(r3);
4771 } 4783 }
4772 // For all contexts except EffectConstant We have the result on 4784 // For all contexts except EffectConstant We have the result on
4773 // top of the stack. 4785 // top of the stack.
4774 if (!context()->IsEffect()) { 4786 if (!context()->IsEffect()) {
4775 context()->PlugTOS(); 4787 context()->PlugTOS();
4776 } 4788 }
4777 } else { 4789 } else {
4778 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4790 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4779 Token::ASSIGN); 4791 Token::ASSIGN);
4780 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4792 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4781 context()->Plug(r0); 4793 context()->Plug(r3);
4782 } 4794 }
4783 break; 4795 break;
4784 case NAMED_PROPERTY: { 4796 case NAMED_PROPERTY: {
4785 __ mov(StoreDescriptor::NameRegister(), 4797 __ mov(StoreDescriptor::NameRegister(),
4786 Operand(prop->key()->AsLiteral()->value())); 4798 Operand(prop->key()->AsLiteral()->value()));
4787 __ pop(StoreDescriptor::ReceiverRegister()); 4799 __ pop(StoreDescriptor::ReceiverRegister());
4788 CallStoreIC(expr->CountStoreFeedbackId()); 4800 CallStoreIC(expr->CountStoreFeedbackId());
4789 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4801 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4790 if (expr->is_postfix()) { 4802 if (expr->is_postfix()) {
4791 if (!context()->IsEffect()) { 4803 if (!context()->IsEffect()) {
4792 context()->PlugTOS(); 4804 context()->PlugTOS();
4793 } 4805 }
4794 } else { 4806 } else {
4795 context()->Plug(r0); 4807 context()->Plug(r3);
4796 } 4808 }
4797 break; 4809 break;
4798 } 4810 }
4799 case NAMED_SUPER_PROPERTY: { 4811 case NAMED_SUPER_PROPERTY: {
4800 EmitNamedSuperPropertyStore(prop); 4812 EmitNamedSuperPropertyStore(prop);
4801 if (expr->is_postfix()) { 4813 if (expr->is_postfix()) {
4802 if (!context()->IsEffect()) { 4814 if (!context()->IsEffect()) {
4803 context()->PlugTOS(); 4815 context()->PlugTOS();
4804 } 4816 }
4805 } else { 4817 } else {
4806 context()->Plug(r0); 4818 context()->Plug(r3);
4807 } 4819 }
4808 break; 4820 break;
4809 } 4821 }
4810 case KEYED_SUPER_PROPERTY: { 4822 case KEYED_SUPER_PROPERTY: {
4811 EmitKeyedSuperPropertyStore(prop); 4823 EmitKeyedSuperPropertyStore(prop);
4812 if (expr->is_postfix()) { 4824 if (expr->is_postfix()) {
4813 if (!context()->IsEffect()) { 4825 if (!context()->IsEffect()) {
4814 context()->PlugTOS(); 4826 context()->PlugTOS();
4815 } 4827 }
4816 } else { 4828 } else {
4817 context()->Plug(r0); 4829 context()->Plug(r3);
4818 } 4830 }
4819 break; 4831 break;
4820 } 4832 }
4821 case KEYED_PROPERTY: { 4833 case KEYED_PROPERTY: {
4822 __ Pop(StoreDescriptor::ReceiverRegister(), 4834 __ Pop(StoreDescriptor::ReceiverRegister(),
4823 StoreDescriptor::NameRegister()); 4835 StoreDescriptor::NameRegister());
4824 Handle<Code> ic = 4836 Handle<Code> ic =
4825 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code(); 4837 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4826 CallIC(ic, expr->CountStoreFeedbackId()); 4838 CallIC(ic, expr->CountStoreFeedbackId());
4827 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4839 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4828 if (expr->is_postfix()) { 4840 if (expr->is_postfix()) {
4829 if (!context()->IsEffect()) { 4841 if (!context()->IsEffect()) {
4830 context()->PlugTOS(); 4842 context()->PlugTOS();
4831 } 4843 }
4832 } else { 4844 } else {
4833 context()->Plug(r0); 4845 context()->Plug(r3);
4834 } 4846 }
4835 break; 4847 break;
4836 } 4848 }
4837 } 4849 }
4838 } 4850 }
4839 4851
4840 4852
4841 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 4853 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4842 DCHECK(!context()->IsEffect()); 4854 DCHECK(!context()->IsEffect());
4843 DCHECK(!context()->IsTest()); 4855 DCHECK(!context()->IsTest());
4844 VariableProxy* proxy = expr->AsVariableProxy(); 4856 VariableProxy* proxy = expr->AsVariableProxy();
4845 if (proxy != NULL && proxy->var()->IsUnallocated()) { 4857 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4846 Comment cmnt(masm_, "[ Global variable"); 4858 Comment cmnt(masm_, "[ Global variable");
4847 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); 4859 __ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
4848 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->name())); 4860 __ mov(LoadDescriptor::NameRegister(), Operand(proxy->name()));
4849 if (FLAG_vector_ics) { 4861 if (FLAG_vector_ics) {
4850 __ mov(VectorLoadICDescriptor::SlotRegister(), 4862 __ mov(VectorLoadICDescriptor::SlotRegister(),
4851 Operand(SmiFromSlot(proxy->VariableFeedbackSlot()))); 4863 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
4852 } 4864 }
4853 // Use a regular load, not a contextual load, to avoid a reference 4865 // Use a regular load, not a contextual load, to avoid a reference
4854 // error. 4866 // error.
4855 CallLoadIC(NOT_CONTEXTUAL); 4867 CallLoadIC(NOT_CONTEXTUAL);
4856 PrepareForBailout(expr, TOS_REG); 4868 PrepareForBailout(expr, TOS_REG);
4857 context()->Plug(r0); 4869 context()->Plug(r3);
4858 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { 4870 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4859 Comment cmnt(masm_, "[ Lookup slot"); 4871 Comment cmnt(masm_, "[ Lookup slot");
4860 Label done, slow; 4872 Label done, slow;
4861 4873
4862 // Generate code for loading from variables potentially shadowed 4874 // Generate code for loading from variables potentially shadowed
4863 // by eval-introduced variables. 4875 // by eval-introduced variables.
4864 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done); 4876 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4865 4877
4866 __ bind(&slow); 4878 __ bind(&slow);
4867 __ mov(r0, Operand(proxy->name())); 4879 __ mov(r3, Operand(proxy->name()));
4868 __ Push(cp, r0); 4880 __ Push(cp, r3);
4869 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2); 4881 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4870 PrepareForBailout(expr, TOS_REG); 4882 PrepareForBailout(expr, TOS_REG);
4871 __ bind(&done); 4883 __ bind(&done);
4872 4884
4873 context()->Plug(r0); 4885 context()->Plug(r3);
4874 } else { 4886 } else {
4875 // This expression cannot throw a reference error at the top level. 4887 // This expression cannot throw a reference error at the top level.
4876 VisitInDuplicateContext(expr); 4888 VisitInDuplicateContext(expr);
4877 } 4889 }
4878 } 4890 }
4879 4891
4880 4892
4881 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, 4893 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4882 Expression* sub_expr, 4894 Expression* sub_expr,
4883 Handle<String> check) { 4895 Handle<String> check) {
4884 Label materialize_true, materialize_false; 4896 Label materialize_true, materialize_false;
4885 Label* if_true = NULL; 4897 Label* if_true = NULL;
4886 Label* if_false = NULL; 4898 Label* if_false = NULL;
4887 Label* fall_through = NULL; 4899 Label* fall_through = NULL;
4888 context()->PrepareTest(&materialize_true, &materialize_false, 4900 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
4889 &if_true, &if_false, &fall_through); 4901 &if_false, &fall_through);
4890 4902
4891 { AccumulatorValueContext context(this); 4903 {
4904 AccumulatorValueContext context(this);
4892 VisitForTypeofValue(sub_expr); 4905 VisitForTypeofValue(sub_expr);
4893 } 4906 }
4894 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4907 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4895 4908
4896 Factory* factory = isolate()->factory(); 4909 Factory* factory = isolate()->factory();
4897 if (String::Equals(check, factory->number_string())) { 4910 if (String::Equals(check, factory->number_string())) {
4898 __ JumpIfSmi(r0, if_true); 4911 __ JumpIfSmi(r3, if_true);
4899 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 4912 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
4900 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); 4913 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4901 __ cmp(r0, ip); 4914 __ cmp(r3, ip);
4902 Split(eq, if_true, if_false, fall_through); 4915 Split(eq, if_true, if_false, fall_through);
4903 } else if (String::Equals(check, factory->string_string())) { 4916 } else if (String::Equals(check, factory->string_string())) {
4904 __ JumpIfSmi(r0, if_false); 4917 __ JumpIfSmi(r3, if_false);
4905 // Check for undetectable objects => false. 4918 // Check for undetectable objects => false.
4906 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE); 4919 __ CompareObjectType(r3, r3, r4, FIRST_NONSTRING_TYPE);
4907 __ b(ge, if_false); 4920 __ bge(if_false);
4908 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4921 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
4909 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4922 STATIC_ASSERT((1 << Map::kIsUndetectable) < 0x8000);
4910 Split(eq, if_true, if_false, fall_through); 4923 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
4924 Split(eq, if_true, if_false, fall_through, cr0);
4911 } else if (String::Equals(check, factory->symbol_string())) { 4925 } else if (String::Equals(check, factory->symbol_string())) {
4912 __ JumpIfSmi(r0, if_false); 4926 __ JumpIfSmi(r3, if_false);
4913 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE); 4927 __ CompareObjectType(r3, r3, r4, SYMBOL_TYPE);
4914 Split(eq, if_true, if_false, fall_through); 4928 Split(eq, if_true, if_false, fall_through);
4915 } else if (String::Equals(check, factory->boolean_string())) { 4929 } else if (String::Equals(check, factory->boolean_string())) {
4916 __ CompareRoot(r0, Heap::kTrueValueRootIndex); 4930 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
4917 __ b(eq, if_true); 4931 __ beq(if_true);
4918 __ CompareRoot(r0, Heap::kFalseValueRootIndex); 4932 __ CompareRoot(r3, Heap::kFalseValueRootIndex);
4919 Split(eq, if_true, if_false, fall_through); 4933 Split(eq, if_true, if_false, fall_through);
4920 } else if (String::Equals(check, factory->undefined_string())) { 4934 } else if (String::Equals(check, factory->undefined_string())) {
4921 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex); 4935 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
4922 __ b(eq, if_true); 4936 __ beq(if_true);
4923 __ JumpIfSmi(r0, if_false); 4937 __ JumpIfSmi(r3, if_false);
4924 // Check for undetectable objects => true. 4938 // Check for undetectable objects => true.
4925 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); 4939 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
4926 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4940 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
4927 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4941 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
4928 Split(ne, if_true, if_false, fall_through); 4942 Split(ne, if_true, if_false, fall_through, cr0);
4929 4943
4930 } else if (String::Equals(check, factory->function_string())) { 4944 } else if (String::Equals(check, factory->function_string())) {
4931 __ JumpIfSmi(r0, if_false); 4945 __ JumpIfSmi(r3, if_false);
4932 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 4946 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4933 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE); 4947 __ CompareObjectType(r3, r3, r4, JS_FUNCTION_TYPE);
4934 __ b(eq, if_true); 4948 __ beq(if_true);
4935 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE)); 4949 __ cmpi(r4, Operand(JS_FUNCTION_PROXY_TYPE));
4936 Split(eq, if_true, if_false, fall_through); 4950 Split(eq, if_true, if_false, fall_through);
4937 } else if (String::Equals(check, factory->object_string())) { 4951 } else if (String::Equals(check, factory->object_string())) {
4938 __ JumpIfSmi(r0, if_false); 4952 __ JumpIfSmi(r3, if_false);
4939 __ CompareRoot(r0, Heap::kNullValueRootIndex); 4953 __ CompareRoot(r3, Heap::kNullValueRootIndex);
4940 __ b(eq, if_true); 4954 __ beq(if_true);
4941 // Check for JS objects => true. 4955 // Check for JS objects => true.
4942 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); 4956 __ CompareObjectType(r3, r3, r4, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4943 __ b(lt, if_false); 4957 __ blt(if_false);
4944 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); 4958 __ CompareInstanceType(r3, r4, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4945 __ b(gt, if_false); 4959 __ bgt(if_false);
4946 // Check for undetectable objects => false. 4960 // Check for undetectable objects => false.
4947 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); 4961 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
4948 __ tst(r1, Operand(1 << Map::kIsUndetectable)); 4962 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
4949 Split(eq, if_true, if_false, fall_through); 4963 Split(eq, if_true, if_false, fall_through, cr0);
4950 } else { 4964 } else {
4951 if (if_false != fall_through) __ jmp(if_false); 4965 if (if_false != fall_through) __ b(if_false);
4952 } 4966 }
4953 context()->Plug(if_true, if_false); 4967 context()->Plug(if_true, if_false);
4954 } 4968 }
4955 4969
4956 4970
4957 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 4971 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4958 Comment cmnt(masm_, "[ CompareOperation"); 4972 Comment cmnt(masm_, "[ CompareOperation");
4959 SetSourcePosition(expr->position()); 4973 SetSourcePosition(expr->position());
4960 4974
4961 // First we try a fast inlined version of the compare when one of 4975 // First we try a fast inlined version of the compare when one of
4962 // the operands is a literal. 4976 // the operands is a literal.
4963 if (TryLiteralCompare(expr)) return; 4977 if (TryLiteralCompare(expr)) return;
4964 4978
4965 // Always perform the comparison for its control flow. Pack the result 4979 // Always perform the comparison for its control flow. Pack the result
4966 // into the expression's context after the comparison is performed. 4980 // into the expression's context after the comparison is performed.
4967 Label materialize_true, materialize_false; 4981 Label materialize_true, materialize_false;
4968 Label* if_true = NULL; 4982 Label* if_true = NULL;
4969 Label* if_false = NULL; 4983 Label* if_false = NULL;
4970 Label* fall_through = NULL; 4984 Label* fall_through = NULL;
4971 context()->PrepareTest(&materialize_true, &materialize_false, 4985 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
4972 &if_true, &if_false, &fall_through); 4986 &if_false, &fall_through);
4973 4987
4974 Token::Value op = expr->op(); 4988 Token::Value op = expr->op();
4975 VisitForStackValue(expr->left()); 4989 VisitForStackValue(expr->left());
4976 switch (op) { 4990 switch (op) {
4977 case Token::IN: 4991 case Token::IN:
4978 VisitForStackValue(expr->right()); 4992 VisitForStackValue(expr->right());
4979 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); 4993 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4980 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); 4994 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4981 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 4995 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4982 __ cmp(r0, ip); 4996 __ cmp(r3, ip);
4983 Split(eq, if_true, if_false, fall_through); 4997 Split(eq, if_true, if_false, fall_through);
4984 break; 4998 break;
4985 4999
4986 case Token::INSTANCEOF: { 5000 case Token::INSTANCEOF: {
4987 VisitForStackValue(expr->right()); 5001 VisitForStackValue(expr->right());
4988 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags); 5002 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4989 __ CallStub(&stub); 5003 __ CallStub(&stub);
4990 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 5004 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4991 // The stub returns 0 for true. 5005 // The stub returns 0 for true.
4992 __ tst(r0, r0); 5006 __ cmpi(r3, Operand::Zero());
4993 Split(eq, if_true, if_false, fall_through); 5007 Split(eq, if_true, if_false, fall_through);
4994 break; 5008 break;
4995 } 5009 }
4996 5010
4997 default: { 5011 default: {
4998 VisitForAccumulatorValue(expr->right()); 5012 VisitForAccumulatorValue(expr->right());
4999 Condition cond = CompareIC::ComputeCondition(op); 5013 Condition cond = CompareIC::ComputeCondition(op);
5000 __ pop(r1); 5014 __ pop(r4);
5001 5015
5002 bool inline_smi_code = ShouldInlineSmiCase(op); 5016 bool inline_smi_code = ShouldInlineSmiCase(op);
5003 JumpPatchSite patch_site(masm_); 5017 JumpPatchSite patch_site(masm_);
5004 if (inline_smi_code) { 5018 if (inline_smi_code) {
5005 Label slow_case; 5019 Label slow_case;
5006 __ orr(r2, r0, Operand(r1)); 5020 __ orx(r5, r3, r4);
5007 patch_site.EmitJumpIfNotSmi(r2, &slow_case); 5021 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
5008 __ cmp(r1, r0); 5022 __ cmp(r4, r3);
5009 Split(cond, if_true, if_false, NULL); 5023 Split(cond, if_true, if_false, NULL);
5010 __ bind(&slow_case); 5024 __ bind(&slow_case);
5011 } 5025 }
5012 5026
5013 // Record position and call the compare IC. 5027 // Record position and call the compare IC.
5014 SetSourcePosition(expr->position()); 5028 SetSourcePosition(expr->position());
5015 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code(); 5029 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
5016 CallIC(ic, expr->CompareOperationFeedbackId()); 5030 CallIC(ic, expr->CompareOperationFeedbackId());
5017 patch_site.EmitPatchInfo(); 5031 patch_site.EmitPatchInfo();
5018 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 5032 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5019 __ cmp(r0, Operand::Zero()); 5033 __ cmpi(r3, Operand::Zero());
5020 Split(cond, if_true, if_false, fall_through); 5034 Split(cond, if_true, if_false, fall_through);
5021 } 5035 }
5022 } 5036 }
5023 5037
5024 // Convert the result of the comparison into one expected for this 5038 // Convert the result of the comparison into one expected for this
5025 // expression's context. 5039 // expression's context.
5026 context()->Plug(if_true, if_false); 5040 context()->Plug(if_true, if_false);
5027 } 5041 }
5028 5042
5029 5043
5030 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, 5044 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5031 Expression* sub_expr, 5045 Expression* sub_expr,
5032 NilValue nil) { 5046 NilValue nil) {
5033 Label materialize_true, materialize_false; 5047 Label materialize_true, materialize_false;
5034 Label* if_true = NULL; 5048 Label* if_true = NULL;
5035 Label* if_false = NULL; 5049 Label* if_false = NULL;
5036 Label* fall_through = NULL; 5050 Label* fall_through = NULL;
5037 context()->PrepareTest(&materialize_true, &materialize_false, 5051 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
5038 &if_true, &if_false, &fall_through); 5052 &if_false, &fall_through);
5039 5053
5040 VisitForAccumulatorValue(sub_expr); 5054 VisitForAccumulatorValue(sub_expr);
5041 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 5055 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5042 if (expr->op() == Token::EQ_STRICT) { 5056 if (expr->op() == Token::EQ_STRICT) {
5043 Heap::RootListIndex nil_value = nil == kNullValue ? 5057 Heap::RootListIndex nil_value = nil == kNullValue
5044 Heap::kNullValueRootIndex : 5058 ? Heap::kNullValueRootIndex
5045 Heap::kUndefinedValueRootIndex; 5059 : Heap::kUndefinedValueRootIndex;
5046 __ LoadRoot(r1, nil_value); 5060 __ LoadRoot(r4, nil_value);
5047 __ cmp(r0, r1); 5061 __ cmp(r3, r4);
5048 Split(eq, if_true, if_false, fall_through); 5062 Split(eq, if_true, if_false, fall_through);
5049 } else { 5063 } else {
5050 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); 5064 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5051 CallIC(ic, expr->CompareOperationFeedbackId()); 5065 CallIC(ic, expr->CompareOperationFeedbackId());
5052 __ cmp(r0, Operand(0)); 5066 __ cmpi(r3, Operand::Zero());
5053 Split(ne, if_true, if_false, fall_through); 5067 Split(ne, if_true, if_false, fall_through);
5054 } 5068 }
5055 context()->Plug(if_true, if_false); 5069 context()->Plug(if_true, if_false);
5056 } 5070 }
5057 5071
5058 5072
5059 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { 5073 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5060 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 5074 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5061 context()->Plug(r0); 5075 context()->Plug(r3);
5062 } 5076 }
5063 5077
5064 5078
5065 Register FullCodeGenerator::result_register() { 5079 Register FullCodeGenerator::result_register() { return r3; }
5066 return r0;
5067 }
5068 5080
5069 5081
5070 Register FullCodeGenerator::context_register() { 5082 Register FullCodeGenerator::context_register() { return cp; }
5071 return cp;
5072 }
5073 5083
5074 5084
5075 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 5085 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5076 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 5086 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
5077 __ str(value, MemOperand(fp, frame_offset)); 5087 __ StoreP(value, MemOperand(fp, frame_offset), r0);
5078 } 5088 }
5079 5089
5080 5090
5081 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 5091 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5082 __ ldr(dst, ContextOperand(cp, context_index)); 5092 __ LoadP(dst, ContextOperand(cp, context_index), r0);
5083 } 5093 }
5084 5094
5085 5095
5086 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { 5096 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5087 Scope* declaration_scope = scope()->DeclarationScope(); 5097 Scope* declaration_scope = scope()->DeclarationScope();
5088 if (declaration_scope->is_global_scope() || 5098 if (declaration_scope->is_global_scope() ||
5089 declaration_scope->is_module_scope()) { 5099 declaration_scope->is_module_scope()) {
5090 // Contexts nested in the native context have a canonical empty function 5100 // Contexts nested in the native context have a canonical empty function
5091 // as their closure, not the anonymous closure containing the global 5101 // as their closure, not the anonymous closure containing the global
5092 // code. Pass a smi sentinel and let the runtime look up the empty 5102 // code. Pass a smi sentinel and let the runtime look up the empty
5093 // function. 5103 // function.
5094 __ mov(ip, Operand(Smi::FromInt(0))); 5104 __ LoadSmiLiteral(ip, Smi::FromInt(0));
5095 } else if (declaration_scope->is_eval_scope()) { 5105 } else if (declaration_scope->is_eval_scope()) {
5096 // Contexts created by a call to eval have the same closure as the 5106 // Contexts created by a call to eval have the same closure as the
5097 // context calling eval, not the anonymous closure containing the eval 5107 // context calling eval, not the anonymous closure containing the eval
5098 // code. Fetch it from the context. 5108 // code. Fetch it from the context.
5099 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX)); 5109 __ LoadP(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
5100 } else { 5110 } else {
5101 DCHECK(declaration_scope->is_function_scope()); 5111 DCHECK(declaration_scope->is_function_scope());
5102 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 5112 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5103 } 5113 }
5104 __ push(ip); 5114 __ push(ip);
5105 } 5115 }
5106 5116
5107 5117
5108 // ---------------------------------------------------------------------------- 5118 // ----------------------------------------------------------------------------
5109 // Non-local control flow support. 5119 // Non-local control flow support.
5110 5120
5111 void FullCodeGenerator::EnterFinallyBlock() { 5121 void FullCodeGenerator::EnterFinallyBlock() {
5112 DCHECK(!result_register().is(r1)); 5122 DCHECK(!result_register().is(r4));
5113 // Store result register while executing finally block. 5123 // Store result register while executing finally block.
5114 __ push(result_register()); 5124 __ push(result_register());
5115 // Cook return address in link register to stack (smi encoded Code* delta) 5125 // Cook return address in link register to stack (smi encoded Code* delta)
5116 __ sub(r1, lr, Operand(masm_->CodeObject())); 5126 __ mflr(r4);
5117 __ SmiTag(r1); 5127 __ mov(ip, Operand(masm_->CodeObject()));
5128 __ sub(r4, r4, ip);
5129 __ SmiTag(r4);
5118 5130
5119 // Store result register while executing finally block. 5131 // Store result register while executing finally block.
5120 __ push(r1); 5132 __ push(r4);
5121 5133
5122 // Store pending message while executing finally block. 5134 // Store pending message while executing finally block.
5123 ExternalReference pending_message_obj = 5135 ExternalReference pending_message_obj =
5124 ExternalReference::address_of_pending_message_obj(isolate()); 5136 ExternalReference::address_of_pending_message_obj(isolate());
5125 __ mov(ip, Operand(pending_message_obj)); 5137 __ mov(ip, Operand(pending_message_obj));
5126 __ ldr(r1, MemOperand(ip)); 5138 __ LoadP(r4, MemOperand(ip));
5127 __ push(r1); 5139 __ push(r4);
5128 5140
5129 ExternalReference has_pending_message = 5141 ExternalReference has_pending_message =
5130 ExternalReference::address_of_has_pending_message(isolate()); 5142 ExternalReference::address_of_has_pending_message(isolate());
5131 __ mov(ip, Operand(has_pending_message)); 5143 __ mov(ip, Operand(has_pending_message));
5132 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof) 5144 __ lbz(r4, MemOperand(ip));
5133 __ ldrb(r1, MemOperand(ip)); 5145 __ SmiTag(r4);
5134 __ SmiTag(r1); 5146 __ push(r4);
5135 __ push(r1);
5136 5147
5137 ExternalReference pending_message_script = 5148 ExternalReference pending_message_script =
5138 ExternalReference::address_of_pending_message_script(isolate()); 5149 ExternalReference::address_of_pending_message_script(isolate());
5139 __ mov(ip, Operand(pending_message_script)); 5150 __ mov(ip, Operand(pending_message_script));
5140 __ ldr(r1, MemOperand(ip)); 5151 __ LoadP(r4, MemOperand(ip));
5141 __ push(r1); 5152 __ push(r4);
5142 } 5153 }
5143 5154
5144 5155
5145 void FullCodeGenerator::ExitFinallyBlock() { 5156 void FullCodeGenerator::ExitFinallyBlock() {
5146 DCHECK(!result_register().is(r1)); 5157 DCHECK(!result_register().is(r4));
5147 // Restore pending message from stack. 5158 // Restore pending message from stack.
5148 __ pop(r1); 5159 __ pop(r4);
5149 ExternalReference pending_message_script = 5160 ExternalReference pending_message_script =
5150 ExternalReference::address_of_pending_message_script(isolate()); 5161 ExternalReference::address_of_pending_message_script(isolate());
5151 __ mov(ip, Operand(pending_message_script)); 5162 __ mov(ip, Operand(pending_message_script));
5152 __ str(r1, MemOperand(ip)); 5163 __ StoreP(r4, MemOperand(ip));
5153 5164
5154 __ pop(r1); 5165 __ pop(r4);
5155 __ SmiUntag(r1); 5166 __ SmiUntag(r4);
5156 ExternalReference has_pending_message = 5167 ExternalReference has_pending_message =
5157 ExternalReference::address_of_has_pending_message(isolate()); 5168 ExternalReference::address_of_has_pending_message(isolate());
5158 __ mov(ip, Operand(has_pending_message)); 5169 __ mov(ip, Operand(has_pending_message));
5159 STATIC_ASSERT(sizeof(bool) == 1); // NOLINT(runtime/sizeof) 5170 __ stb(r4, MemOperand(ip));
5160 __ strb(r1, MemOperand(ip));
5161 5171
5162 __ pop(r1); 5172 __ pop(r4);
5163 ExternalReference pending_message_obj = 5173 ExternalReference pending_message_obj =
5164 ExternalReference::address_of_pending_message_obj(isolate()); 5174 ExternalReference::address_of_pending_message_obj(isolate());
5165 __ mov(ip, Operand(pending_message_obj)); 5175 __ mov(ip, Operand(pending_message_obj));
5166 __ str(r1, MemOperand(ip)); 5176 __ StoreP(r4, MemOperand(ip));
5167 5177
5168 // Restore result register from stack. 5178 // Restore result register from stack.
5169 __ pop(r1); 5179 __ pop(r4);
5170 5180
5171 // Uncook return address and return. 5181 // Uncook return address and return.
5172 __ pop(result_register()); 5182 __ pop(result_register());
5173 __ SmiUntag(r1); 5183 __ SmiUntag(r4);
5174 __ add(pc, r1, Operand(masm_->CodeObject())); 5184 __ mov(ip, Operand(masm_->CodeObject()));
5185 __ add(ip, ip, r4);
5186 __ mtctr(ip);
5187 __ bctr();
5175 } 5188 }
5176 5189
5177 5190
5178 #undef __ 5191 #undef __
5179 5192
5180 #define __ ACCESS_MASM(masm()) 5193 #define __ ACCESS_MASM(masm())
5181 5194
5182 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( 5195 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
5183 int* stack_depth, 5196 int* stack_depth, int* context_length) {
5184 int* context_length) {
5185 // The macros used here must preserve the result register. 5197 // The macros used here must preserve the result register.
5186 5198
5187 // Because the handler block contains the context of the finally 5199 // Because the handler block contains the context of the finally
5188 // code, we can restore it directly from there for the finally code 5200 // code, we can restore it directly from there for the finally code
5189 // rather than iteratively unwinding contexts via their previous 5201 // rather than iteratively unwinding contexts via their previous
5190 // links. 5202 // links.
5191 __ Drop(*stack_depth); // Down to the handler block. 5203 __ Drop(*stack_depth); // Down to the handler block.
5192 if (*context_length > 0) { 5204 if (*context_length > 0) {
5193 // Restore the context to its dedicated register and the stack. 5205 // Restore the context to its dedicated register and the stack.
5194 __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset)); 5206 __ LoadP(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
5195 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 5207 __ StoreP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5196 } 5208 }
5197 __ PopTryHandler(); 5209 __ PopTryHandler();
5198 __ bl(finally_entry_); 5210 __ b(finally_entry_, SetLK);
5199 5211
5200 *stack_depth = 0; 5212 *stack_depth = 0;
5201 *context_length = 0; 5213 *context_length = 0;
5202 return previous_; 5214 return previous_;
5203 } 5215 }
5204 5216
5205
5206 #undef __ 5217 #undef __
5207 5218
5208 5219
5209 static Address GetInterruptImmediateLoadAddress(Address pc) { 5220 void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc,
5210 Address load_address = pc - 2 * Assembler::kInstrSize;
5211 if (!FLAG_enable_ool_constant_pool) {
5212 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
5213 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
5214 // This is an extended constant pool lookup.
5215 if (CpuFeatures::IsSupported(ARMv7)) {
5216 load_address -= 2 * Assembler::kInstrSize;
5217 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
5218 DCHECK(Assembler::IsMovT(
5219 Memory::int32_at(load_address + Assembler::kInstrSize)));
5220 } else {
5221 load_address -= 4 * Assembler::kInstrSize;
5222 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
5223 DCHECK(Assembler::IsOrrImmed(
5224 Memory::int32_at(load_address + Assembler::kInstrSize)));
5225 DCHECK(Assembler::IsOrrImmed(
5226 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
5227 DCHECK(Assembler::IsOrrImmed(
5228 Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
5229 }
5230 } else if (CpuFeatures::IsSupported(ARMv7) &&
5231 Assembler::IsMovT(Memory::int32_at(load_address))) {
5232 // This is a movw / movt immediate load.
5233 load_address -= Assembler::kInstrSize;
5234 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
5235 } else if (!CpuFeatures::IsSupported(ARMv7) &&
5236 Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
5237 // This is a mov / orr immediate load.
5238 load_address -= 3 * Assembler::kInstrSize;
5239 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
5240 DCHECK(Assembler::IsOrrImmed(
5241 Memory::int32_at(load_address + Assembler::kInstrSize)));
5242 DCHECK(Assembler::IsOrrImmed(
5243 Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
5244 } else {
5245 // This is a small constant pool lookup.
5246 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
5247 }
5248 return load_address;
5249 }
5250
5251
5252 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5253 Address pc,
5254 BackEdgeState target_state, 5221 BackEdgeState target_state,
5255 Code* replacement_code) { 5222 Code* replacement_code) {
5256 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc); 5223 Address mov_address = Assembler::target_address_from_return_address(pc);
5257 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize; 5224 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
5258 CodePatcher patcher(branch_address, 1); 5225 CodePatcher patcher(cmp_address, 1);
5226
5259 switch (target_state) { 5227 switch (target_state) {
5260 case INTERRUPT: 5228 case INTERRUPT: {
5261 {
5262 // <decrement profiling counter> 5229 // <decrement profiling counter>
5263 // bpl ok 5230 // cmpi r6, 0
5264 // ; load interrupt stub address into ip - either of (for ARMv7): 5231 // bge <ok> ;; not changed
5265 // ; <small cp load> | <extended cp load> | <immediate load> 5232 // mov r12, <interrupt stub address>
5266 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm 5233 // mtlr r12
5267 // | movt ip, #imm | movw ip, #imm 5234 // blrl
5268 // | ldr ip, [pp, ip]
5269 // ; or (for ARMv6):
5270 // ; <small cp load> | <extended cp load> | <immediate load>
5271 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
5272 // | orr ip, ip, #imm> | orr ip, ip, #imm
5273 // | orr ip, ip, #imm> | orr ip, ip, #imm
5274 // | orr ip, ip, #imm> | orr ip, ip, #imm
5275 // blx ip
5276 // <reset profiling counter> 5235 // <reset profiling counter>
5277 // ok-label 5236 // ok-label
5278 5237 patcher.masm()->cmpi(r6, Operand::Zero());
5279 // Calculate branch offset to the ok-label - this is the difference
5280 // between the branch address and |pc| (which points at <blx ip>) plus
5281 // kProfileCounterResetSequence instructions
5282 int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
5283 kProfileCounterResetSequenceLength;
5284 patcher.masm()->b(branch_offset, pl);
5285 break; 5238 break;
5286 } 5239 }
5287 case ON_STACK_REPLACEMENT: 5240 case ON_STACK_REPLACEMENT:
5288 case OSR_AFTER_STACK_CHECK: 5241 case OSR_AFTER_STACK_CHECK:
5289 // <decrement profiling counter> 5242 // <decrement profiling counter>
5290 // mov r0, r0 (NOP) 5243 // crset
5291 // ; load on-stack replacement address into ip - either of (for ARMv7): 5244 // bge <ok> ;; not changed
5292 // ; <small cp load> | <extended cp load> | <immediate load> 5245 // mov r12, <on-stack replacement address>
5293 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm 5246 // mtlr r12
5294 // | movt ip, #imm> | movw ip, #imm 5247 // blrl
5295 // | ldr ip, [pp, ip]
5296 // ; or (for ARMv6):
5297 // ; <small cp load> | <extended cp load> | <immediate load>
5298 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm
5299 // | orr ip, ip, #imm> | orr ip, ip, #imm
5300 // | orr ip, ip, #imm> | orr ip, ip, #imm
5301 // | orr ip, ip, #imm> | orr ip, ip, #imm
5302 // blx ip
5303 // <reset profiling counter> 5248 // <reset profiling counter>
5304 // ok-label 5249 // ok-label ----- pc_after points here
5305 patcher.masm()->nop(); 5250
5251 // Set the LT bit such that bge is a NOP
5252 patcher.masm()->crset(Assembler::encode_crbit(cr7, CR_LT));
5306 break; 5253 break;
5307 } 5254 }
5308 5255
5309 // Replace the call address. 5256 // Replace the stack check address in the mov sequence with the
5310 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code, 5257 // entry address of the replacement code.
5311 replacement_code->entry()); 5258 Assembler::set_target_address_at(mov_address, unoptimized_code,
5259 replacement_code->entry());
5312 5260
5313 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( 5261 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5314 unoptimized_code, pc_immediate_load_address, replacement_code); 5262 unoptimized_code, mov_address, replacement_code);
5315 } 5263 }
5316 5264
5317 5265
5318 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( 5266 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5319 Isolate* isolate, 5267 Isolate* isolate, Code* unoptimized_code, Address pc) {
5320 Code* unoptimized_code, 5268 Address mov_address = Assembler::target_address_from_return_address(pc);
5321 Address pc) { 5269 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
5322 DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize))); 5270 Address interrupt_address =
5271 Assembler::target_address_at(mov_address, unoptimized_code);
5323 5272
5324 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc); 5273 if (Assembler::IsCmpImmediate(Assembler::instr_at(cmp_address))) {
5325 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize; 5274 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry());
5326 Address interrupt_address = Assembler::target_address_at(
5327 pc_immediate_load_address, unoptimized_code);
5328
5329 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
5330 DCHECK(interrupt_address ==
5331 isolate->builtins()->InterruptCheck()->entry());
5332 return INTERRUPT; 5275 return INTERRUPT;
5333 } 5276 }
5334 5277
5335 DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address))); 5278 DCHECK(Assembler::IsCrSet(Assembler::instr_at(cmp_address)));
5336 5279
5337 if (interrupt_address == 5280 if (interrupt_address == isolate->builtins()->OnStackReplacement()->entry()) {
5338 isolate->builtins()->OnStackReplacement()->entry()) {
5339 return ON_STACK_REPLACEMENT; 5281 return ON_STACK_REPLACEMENT;
5340 } 5282 }
5341 5283
5342 DCHECK(interrupt_address == 5284 DCHECK(interrupt_address ==
5343 isolate->builtins()->OsrAfterStackCheck()->entry()); 5285 isolate->builtins()->OsrAfterStackCheck()->entry());
5344 return OSR_AFTER_STACK_CHECK; 5286 return OSR_AFTER_STACK_CHECK;
5345 } 5287 }
5346 5288 }
5347 5289 } // namespace v8::internal
5348 } } // namespace v8::internal 5290 #endif // V8_TARGET_ARCH_PPC
5349
5350 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/ppc/frames-ppc.cc ('k') | src/ppc/interface-descriptors-ppc.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698