OLD | NEW |
---|---|
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_PPC | 5 #if V8_TARGET_ARCH_S390 |
6 | 6 |
7 #include "src/full-codegen/full-codegen.h" | |
jochen (gone - plz use gerrit)
2016/03/04 16:09:55
why did you move the header up?
| |
7 #include "src/ast/scopes.h" | 8 #include "src/ast/scopes.h" |
8 #include "src/code-factory.h" | 9 #include "src/code-factory.h" |
9 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 11 #include "src/codegen.h" |
11 #include "src/debug/debug.h" | 12 #include "src/debug/debug.h" |
12 #include "src/full-codegen/full-codegen.h" | |
13 #include "src/ic/ic.h" | 13 #include "src/ic/ic.h" |
14 #include "src/parsing/parser.h" | 14 #include "src/parsing/parser.h" |
15 | 15 |
16 #include "src/ppc/code-stubs-ppc.h" | 16 #include "src/s390/code-stubs-s390.h" |
17 #include "src/ppc/macro-assembler-ppc.h" | 17 #include "src/s390/macro-assembler-s390.h" |
18 | 18 |
19 namespace v8 { | 19 namespace v8 { |
20 namespace internal { | 20 namespace internal { |
21 | 21 |
22 #define __ ACCESS_MASM(masm()) | 22 #define __ ACCESS_MASM(masm()) |
23 | 23 |
24 // A patch site is a location in the code which it is possible to patch. This | 24 // A patch site is a location in the code which it is possible to patch. This |
25 // class has a number of methods to emit the code which is patchable and the | 25 // class has a number of methods to emit the code which is patchable and the |
26 // method EmitPatchInfo to record a marker back to the patchable code. This | 26 // method EmitPatchInfo to record a marker back to the patchable code. This |
27 // marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit | 27 // marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit |
28 // immediate value is used) is the delta from the pc to the first instruction of | 28 // immediate value is used) is the delta from the pc to the first instruction of |
29 // the patchable code. | 29 // the patchable code. |
30 // See PatchInlinedSmiCode in ic-ppc.cc for the code that patches it | 30 // See PatchInlinedSmiCode in ic-s390.cc for the code that patches it |
31 class JumpPatchSite BASE_EMBEDDED { | 31 class JumpPatchSite BASE_EMBEDDED { |
32 public: | 32 public: |
33 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { | 33 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { |
34 #ifdef DEBUG | 34 #ifdef DEBUG |
35 info_emitted_ = false; | 35 info_emitted_ = false; |
36 #endif | 36 #endif |
37 } | 37 } |
38 | 38 |
39 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); } | 39 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); } |
40 | 40 |
41 // When initially emitting this ensure that a jump is always generated to skip | 41 // When initially emitting this ensure that a jump is always generated to skip |
42 // the inlined smi code. | 42 // the inlined smi code. |
43 void EmitJumpIfNotSmi(Register reg, Label* target) { | 43 void EmitJumpIfNotSmi(Register reg, Label* target) { |
44 DCHECK(!patch_site_.is_bound() && !info_emitted_); | 44 DCHECK(!patch_site_.is_bound() && !info_emitted_); |
45 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | |
46 __ bind(&patch_site_); | 45 __ bind(&patch_site_); |
47 __ cmp(reg, reg, cr0); | 46 __ CmpP(reg, reg); |
48 __ beq(target, cr0); // Always taken before patched. | 47 // Emit the Nop to make bigger place for patching on 31-bit |
48 // as the TestIfSmi sequence uses 4-byte TMLL | |
49 #ifndef V8_TARGET_ARCH_S390X | |
50 __ nop(); | |
51 #endif | |
52 __ beq(target); // Always taken before patched. | |
49 } | 53 } |
50 | 54 |
51 // When initially emitting this ensure that a jump is never generated to skip | 55 // When initially emitting this ensure that a jump is never generated to skip |
52 // the inlined smi code. | 56 // the inlined smi code. |
53 void EmitJumpIfSmi(Register reg, Label* target) { | 57 void EmitJumpIfSmi(Register reg, Label* target) { |
54 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | |
55 DCHECK(!patch_site_.is_bound() && !info_emitted_); | 58 DCHECK(!patch_site_.is_bound() && !info_emitted_); |
56 __ bind(&patch_site_); | 59 __ bind(&patch_site_); |
57 __ cmp(reg, reg, cr0); | 60 __ CmpP(reg, reg); |
58 __ bne(target, cr0); // Never taken before patched. | 61 // Emit the Nop to make bigger place for patching on 31-bit |
62 // as the TestIfSmi sequence uses 4-byte TMLL | |
63 #ifndef V8_TARGET_ARCH_S390X | |
64 __ nop(); | |
65 #endif | |
66 __ bne(target); // Never taken before patched. | |
59 } | 67 } |
60 | 68 |
61 void EmitPatchInfo() { | 69 void EmitPatchInfo() { |
62 if (patch_site_.is_bound()) { | 70 if (patch_site_.is_bound()) { |
63 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); | 71 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_); |
64 Register reg; | 72 DCHECK(is_int16(delta_to_patch_site)); |
65 // I believe this is using reg as the high bits of of the offset | 73 __ chi(r0, Operand(delta_to_patch_site)); |
66 reg.set_code(delta_to_patch_site / kOff16Mask); | |
67 __ cmpi(reg, Operand(delta_to_patch_site % kOff16Mask)); | |
68 #ifdef DEBUG | 74 #ifdef DEBUG |
69 info_emitted_ = true; | 75 info_emitted_ = true; |
70 #endif | 76 #endif |
71 } else { | 77 } else { |
72 __ nop(); // Signals no inlined code. | 78 __ nop(); |
79 __ nop(); | |
73 } | 80 } |
74 } | 81 } |
75 | 82 |
76 private: | 83 private: |
77 MacroAssembler* masm() { return masm_; } | 84 MacroAssembler* masm() { return masm_; } |
78 MacroAssembler* masm_; | 85 MacroAssembler* masm_; |
79 Label patch_site_; | 86 Label patch_site_; |
80 #ifdef DEBUG | 87 #ifdef DEBUG |
81 bool info_emitted_; | 88 bool info_emitted_; |
82 #endif | 89 #endif |
83 }; | 90 }; |
84 | 91 |
85 | |
86 // Generate code for a JS function. On entry to the function the receiver | 92 // Generate code for a JS function. On entry to the function the receiver |
87 // and arguments have been pushed on the stack left to right. The actual | 93 // and arguments have been pushed on the stack left to right. The actual |
88 // argument count matches the formal parameter count expected by the | 94 // argument count matches the formal parameter count expected by the |
89 // function. | 95 // function. |
90 // | 96 // |
91 // The live registers are: | 97 // The live registers are: |
92 // o r4: the JS function object being called (i.e., ourselves) | 98 // o r3: the JS function object being called (i.e., ourselves) |
93 // o r6: the new target value | 99 // o r5: the new target value |
94 // o cp: our context | 100 // o cp: our context |
95 // o fp: our caller's frame pointer (aka r31) | 101 // o fp: our caller's frame pointer |
96 // o sp: stack pointer | 102 // o sp: stack pointer |
97 // o lr: return address | 103 // o lr: return address |
98 // o ip: our own function entry (required by the prologue) | 104 // o ip: our own function entry (required by the prologue) |
99 // | 105 // |
100 // The function builds a JS frame. Please see JavaScriptFrameConstants in | 106 // The function builds a JS frame. Please see JavaScriptFrameConstants in |
101 // frames-ppc.h for its layout. | 107 // frames-s390.h for its layout. |
102 void FullCodeGenerator::Generate() { | 108 void FullCodeGenerator::Generate() { |
103 CompilationInfo* info = info_; | 109 CompilationInfo* info = info_; |
104 profiling_counter_ = isolate()->factory()->NewCell( | 110 profiling_counter_ = isolate()->factory()->NewCell( |
105 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); | 111 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); |
106 SetFunctionPosition(literal()); | 112 SetFunctionPosition(literal()); |
107 Comment cmnt(masm_, "[ function compiled by full code generator"); | 113 Comment cmnt(masm_, "[ function compiled by full code generator"); |
108 | 114 |
109 ProfileEntryHookStub::MaybeCallEntryHook(masm_); | 115 ProfileEntryHookStub::MaybeCallEntryHook(masm_); |
110 | 116 |
111 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) { | 117 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) { |
112 int receiver_offset = info->scope()->num_parameters() * kPointerSize; | 118 int receiver_offset = info->scope()->num_parameters() * kPointerSize; |
113 __ LoadP(r5, MemOperand(sp, receiver_offset), r0); | 119 __ LoadP(r4, MemOperand(sp, receiver_offset), r0); |
114 __ AssertNotSmi(r5); | 120 __ AssertNotSmi(r4); |
115 __ CompareObjectType(r5, r5, no_reg, FIRST_JS_RECEIVER_TYPE); | 121 __ CompareObjectType(r4, r4, no_reg, FIRST_JS_RECEIVER_TYPE); |
116 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver); | 122 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver); |
117 } | 123 } |
118 | 124 |
119 // Open a frame scope to indicate that there is a frame on the stack. The | 125 // Open a frame scope to indicate that there is a frame on the stack. The |
120 // MANUAL indicates that the scope shouldn't actually generate code to set up | 126 // MANUAL indicates that the scope shouldn't actually generate code to set up |
121 // the frame (that is done below). | 127 // the frame (that is done below). |
122 FrameScope frame_scope(masm_, StackFrame::MANUAL); | 128 FrameScope frame_scope(masm_, StackFrame::MANUAL); |
123 int prologue_offset = masm_->pc_offset(); | 129 int prologue_offset = masm_->pc_offset(); |
124 | 130 |
125 if (prologue_offset) { | |
126 // Prologue logic requires it's starting address in ip and the | |
127 // corresponding offset from the function entry. | |
128 prologue_offset += Instruction::kInstrSize; | |
129 __ addi(ip, ip, Operand(prologue_offset)); | |
130 } | |
131 info->set_prologue_offset(prologue_offset); | 131 info->set_prologue_offset(prologue_offset); |
132 __ Prologue(info->GeneratePreagedPrologue(), ip, prologue_offset); | 132 __ Prologue(info->GeneratePreagedPrologue(), ip, prologue_offset); |
133 | 133 |
134 { | 134 { |
135 Comment cmnt(masm_, "[ Allocate locals"); | 135 Comment cmnt(masm_, "[ Allocate locals"); |
136 int locals_count = info->scope()->num_stack_slots(); | 136 int locals_count = info->scope()->num_stack_slots(); |
137 // Generators allocate locals, if any, in context slots. | 137 // Generators allocate locals, if any, in context slots. |
138 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0); | 138 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0); |
139 OperandStackDepthIncrement(locals_count); | 139 OperandStackDepthIncrement(locals_count); |
140 if (locals_count > 0) { | 140 if (locals_count > 0) { |
141 if (locals_count >= 128) { | 141 if (locals_count >= 128) { |
142 Label ok; | 142 Label ok; |
143 __ Add(ip, sp, -(locals_count * kPointerSize), r0); | 143 __ AddP(ip, sp, Operand(-(locals_count * kPointerSize))); |
144 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex); | 144 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex); |
145 __ cmpl(ip, r5); | 145 __ CmpLogicalP(ip, r5); |
146 __ bc_short(ge, &ok); | 146 __ bge(&ok, Label::kNear); |
147 __ CallRuntime(Runtime::kThrowStackOverflow); | 147 __ CallRuntime(Runtime::kThrowStackOverflow); |
148 __ bind(&ok); | 148 __ bind(&ok); |
149 } | 149 } |
150 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 150 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
151 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32; | 151 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32; |
152 if (locals_count >= kMaxPushes) { | 152 if (locals_count >= kMaxPushes) { |
153 int loop_iterations = locals_count / kMaxPushes; | 153 int loop_iterations = locals_count / kMaxPushes; |
154 __ mov(r5, Operand(loop_iterations)); | 154 __ mov(r4, Operand(loop_iterations)); |
155 __ mtctr(r5); | |
156 Label loop_header; | 155 Label loop_header; |
157 __ bind(&loop_header); | 156 __ bind(&loop_header); |
158 // Do pushes. | 157 // Do pushes. |
158 // TODO(joransiu): Use MVC for better performance | |
159 __ lay(sp, MemOperand(sp, -kMaxPushes * kPointerSize)); | |
159 for (int i = 0; i < kMaxPushes; i++) { | 160 for (int i = 0; i < kMaxPushes; i++) { |
160 __ push(ip); | 161 __ StoreP(ip, MemOperand(sp, i * kPointerSize)); |
161 } | 162 } |
162 // Continue loop if not done. | 163 // Continue loop if not done. |
163 __ bdnz(&loop_header); | 164 __ BranchOnCount(r4, &loop_header); |
164 } | 165 } |
165 int remaining = locals_count % kMaxPushes; | 166 int remaining = locals_count % kMaxPushes; |
166 // Emit the remaining pushes. | 167 // Emit the remaining pushes. |
167 for (int i = 0; i < remaining; i++) { | 168 // TODO(joransiu): Use MVC for better performance |
168 __ push(ip); | 169 if (remaining > 0) { |
170 __ lay(sp, MemOperand(sp, -remaining * kPointerSize)); | |
171 for (int i = 0; i < remaining; i++) { | |
172 __ StoreP(ip, MemOperand(sp, i * kPointerSize)); | |
173 } | |
169 } | 174 } |
170 } | 175 } |
171 } | 176 } |
172 | 177 |
173 bool function_in_register_r4 = true; | 178 bool function_in_register_r3 = true; |
174 | 179 |
175 // Possibly allocate a local context. | 180 // Possibly allocate a local context. |
176 if (info->scope()->num_heap_slots() > 0) { | 181 if (info->scope()->num_heap_slots() > 0) { |
177 // Argument to NewContext is the function, which is still in r4. | 182 // Argument to NewContext is the function, which is still in r3. |
178 Comment cmnt(masm_, "[ Allocate context"); | 183 Comment cmnt(masm_, "[ Allocate context"); |
179 bool need_write_barrier = true; | 184 bool need_write_barrier = true; |
180 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; | 185 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
181 if (info->scope()->is_script_scope()) { | 186 if (info->scope()->is_script_scope()) { |
182 __ push(r4); | 187 __ push(r3); |
183 __ Push(info->scope()->GetScopeInfo(info->isolate())); | 188 __ Push(info->scope()->GetScopeInfo(info->isolate())); |
184 __ CallRuntime(Runtime::kNewScriptContext); | 189 __ CallRuntime(Runtime::kNewScriptContext); |
185 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG); | 190 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG); |
186 // The new target value is not used, clobbering is safe. | 191 // The new target value is not used, clobbering is safe. |
187 DCHECK_NULL(info->scope()->new_target_var()); | 192 DCHECK_NULL(info->scope()->new_target_var()); |
188 } else { | 193 } else { |
189 if (info->scope()->new_target_var() != nullptr) { | 194 if (info->scope()->new_target_var() != nullptr) { |
190 __ push(r6); // Preserve new target. | 195 __ push(r5); // Preserve new target. |
191 } | 196 } |
192 if (slots <= FastNewContextStub::kMaximumSlots) { | 197 if (slots <= FastNewContextStub::kMaximumSlots) { |
193 FastNewContextStub stub(isolate(), slots); | 198 FastNewContextStub stub(isolate(), slots); |
194 __ CallStub(&stub); | 199 __ CallStub(&stub); |
195 // Result of FastNewContextStub is always in new space. | 200 // Result of FastNewContextStub is always in new space. |
196 need_write_barrier = false; | 201 need_write_barrier = false; |
197 } else { | 202 } else { |
198 __ push(r4); | 203 __ push(r3); |
199 __ CallRuntime(Runtime::kNewFunctionContext); | 204 __ CallRuntime(Runtime::kNewFunctionContext); |
200 } | 205 } |
201 if (info->scope()->new_target_var() != nullptr) { | 206 if (info->scope()->new_target_var() != nullptr) { |
202 __ pop(r6); // Preserve new target. | 207 __ pop(r5); // Preserve new target. |
203 } | 208 } |
204 } | 209 } |
205 function_in_register_r4 = false; | 210 function_in_register_r3 = false; |
206 // Context is returned in r3. It replaces the context passed to us. | 211 // Context is returned in r2. It replaces the context passed to us. |
207 // It's saved in the stack and kept live in cp. | 212 // It's saved in the stack and kept live in cp. |
208 __ mr(cp, r3); | 213 __ LoadRR(cp, r2); |
209 __ StoreP(r3, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 214 __ StoreP(r2, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
210 // Copy any necessary parameters into the context. | 215 // Copy any necessary parameters into the context. |
211 int num_parameters = info->scope()->num_parameters(); | 216 int num_parameters = info->scope()->num_parameters(); |
212 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0; | 217 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0; |
213 for (int i = first_parameter; i < num_parameters; i++) { | 218 for (int i = first_parameter; i < num_parameters; i++) { |
214 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i); | 219 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i); |
215 if (var->IsContextSlot()) { | 220 if (var->IsContextSlot()) { |
216 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | 221 int parameter_offset = StandardFrameConstants::kCallerSPOffset + |
217 (num_parameters - 1 - i) * kPointerSize; | 222 (num_parameters - 1 - i) * kPointerSize; |
218 // Load parameter from stack. | 223 // Load parameter from stack. |
219 __ LoadP(r3, MemOperand(fp, parameter_offset), r0); | 224 __ LoadP(r2, MemOperand(fp, parameter_offset), r0); |
220 // Store it in the context. | 225 // Store it in the context. |
221 MemOperand target = ContextMemOperand(cp, var->index()); | 226 MemOperand target = ContextMemOperand(cp, var->index()); |
222 __ StoreP(r3, target, r0); | 227 __ StoreP(r2, target); |
223 | 228 |
224 // Update the write barrier. | 229 // Update the write barrier. |
225 if (need_write_barrier) { | 230 if (need_write_barrier) { |
226 __ RecordWriteContextSlot(cp, target.offset(), r3, r5, | 231 __ RecordWriteContextSlot(cp, target.offset(), r2, r4, |
227 kLRHasBeenSaved, kDontSaveFPRegs); | 232 kLRHasBeenSaved, kDontSaveFPRegs); |
228 } else if (FLAG_debug_code) { | 233 } else if (FLAG_debug_code) { |
229 Label done; | 234 Label done; |
230 __ JumpIfInNewSpace(cp, r3, &done); | 235 __ JumpIfInNewSpace(cp, r2, &done); |
231 __ Abort(kExpectedNewSpaceObject); | 236 __ Abort(kExpectedNewSpaceObject); |
232 __ bind(&done); | 237 __ bind(&done); |
233 } | 238 } |
234 } | 239 } |
235 } | 240 } |
236 } | 241 } |
237 | 242 |
238 // Register holding this function and new target are both trashed in case we | 243 // Register holding this function and new target are both trashed in case we |
239 // bailout here. But since that can happen only when new target is not used | 244 // bailout here. But since that can happen only when new target is not used |
240 // and we allocate a context, the value of |function_in_register| is correct. | 245 // and we allocate a context, the value of |function_in_register| is correct. |
241 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS); | 246 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS); |
242 | 247 |
243 // Possibly set up a local binding to the this function which is used in | 248 // Possibly set up a local binding to the this function which is used in |
244 // derived constructors with super calls. | 249 // derived constructors with super calls. |
245 Variable* this_function_var = scope()->this_function_var(); | 250 Variable* this_function_var = scope()->this_function_var(); |
246 if (this_function_var != nullptr) { | 251 if (this_function_var != nullptr) { |
247 Comment cmnt(masm_, "[ This function"); | 252 Comment cmnt(masm_, "[ This function"); |
248 if (!function_in_register_r4) { | 253 if (!function_in_register_r3) { |
249 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 254 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
250 // The write barrier clobbers register again, keep it marked as such. | 255 // The write barrier clobbers register again, keep it marked as such. |
251 } | 256 } |
252 SetVar(this_function_var, r4, r3, r5); | 257 SetVar(this_function_var, r3, r2, r4); |
253 } | 258 } |
254 | 259 |
255 // Possibly set up a local binding to the new target value. | 260 // Possibly set up a local binding to the new target value. |
256 Variable* new_target_var = scope()->new_target_var(); | 261 Variable* new_target_var = scope()->new_target_var(); |
257 if (new_target_var != nullptr) { | 262 if (new_target_var != nullptr) { |
258 Comment cmnt(masm_, "[ new.target"); | 263 Comment cmnt(masm_, "[ new.target"); |
259 SetVar(new_target_var, r6, r3, r5); | 264 SetVar(new_target_var, r5, r2, r4); |
260 } | 265 } |
261 | 266 |
262 // Possibly allocate RestParameters | 267 // Possibly allocate RestParameters |
263 int rest_index; | 268 int rest_index; |
264 Variable* rest_param = scope()->rest_parameter(&rest_index); | 269 Variable* rest_param = scope()->rest_parameter(&rest_index); |
265 if (rest_param) { | 270 if (rest_param) { |
266 Comment cmnt(masm_, "[ Allocate rest parameter array"); | 271 Comment cmnt(masm_, "[ Allocate rest parameter array"); |
267 if (!function_in_register_r4) { | 272 |
268 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 273 if (!function_in_register_r3) { |
274 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
269 } | 275 } |
270 FastNewRestParameterStub stub(isolate()); | 276 FastNewRestParameterStub stub(isolate()); |
271 __ CallStub(&stub); | 277 __ CallStub(&stub); |
272 function_in_register_r4 = false; | 278 |
273 SetVar(rest_param, r3, r4, r5); | 279 function_in_register_r3 = false; |
280 SetVar(rest_param, r2, r3, r4); | |
274 } | 281 } |
275 | 282 |
276 Variable* arguments = scope()->arguments(); | 283 Variable* arguments = scope()->arguments(); |
277 if (arguments != NULL) { | 284 if (arguments != NULL) { |
278 // Function uses arguments object. | 285 // Function uses arguments object. |
279 Comment cmnt(masm_, "[ Allocate arguments object"); | 286 Comment cmnt(masm_, "[ Allocate arguments object"); |
280 if (!function_in_register_r4) { | 287 if (!function_in_register_r3) { |
281 // Load this again, if it's used by the local context below. | 288 // Load this again, if it's used by the local context below. |
282 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 289 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
283 } | 290 } |
284 if (is_strict(language_mode()) || !has_simple_parameters()) { | 291 if (is_strict(language_mode()) || !has_simple_parameters()) { |
285 FastNewStrictArgumentsStub stub(isolate()); | 292 FastNewStrictArgumentsStub stub(isolate()); |
286 __ CallStub(&stub); | 293 __ CallStub(&stub); |
287 } else if (literal()->has_duplicate_parameters()) { | 294 } else if (literal()->has_duplicate_parameters()) { |
288 __ Push(r4); | 295 __ Push(r3); |
289 __ CallRuntime(Runtime::kNewSloppyArguments_Generic); | 296 __ CallRuntime(Runtime::kNewSloppyArguments_Generic); |
290 } else { | 297 } else { |
291 FastNewSloppyArgumentsStub stub(isolate()); | 298 FastNewSloppyArgumentsStub stub(isolate()); |
292 __ CallStub(&stub); | 299 __ CallStub(&stub); |
293 } | 300 } |
294 | 301 |
295 SetVar(arguments, r3, r4, r5); | 302 SetVar(arguments, r2, r3, r4); |
296 } | 303 } |
297 | 304 |
298 if (FLAG_trace) { | 305 if (FLAG_trace) { |
299 __ CallRuntime(Runtime::kTraceEnter); | 306 __ CallRuntime(Runtime::kTraceEnter); |
300 } | 307 } |
301 | 308 |
302 // Visit the declarations and body unless there is an illegal | 309 // Visit the declarations and body unless there is an illegal |
303 // redeclaration. | 310 // redeclaration. |
304 if (scope()->HasIllegalRedeclaration()) { | 311 if (scope()->HasIllegalRedeclaration()) { |
305 EmitIllegalRedeclaration(); | 312 Comment cmnt(masm_, "[ Declarations"); |
313 VisitForEffect(scope()->GetIllegalRedeclaration()); | |
314 | |
306 } else { | 315 } else { |
307 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); | 316 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); |
308 { | 317 { |
309 Comment cmnt(masm_, "[ Declarations"); | 318 Comment cmnt(masm_, "[ Declarations"); |
310 VisitDeclarations(scope()->declarations()); | 319 VisitDeclarations(scope()->declarations()); |
311 } | 320 } |
312 | 321 |
313 // Assert that the declarations do not use ICs. Otherwise the debugger | 322 // Assert that the declarations do not use ICs. Otherwise the debugger |
314 // won't be able to redirect a PC at an IC to the correct IC in newly | 323 // won't be able to redirect a PC at an IC to the correct IC in newly |
315 // recompiled code. | 324 // recompiled code. |
316 DCHECK_EQ(0, ic_total_count_); | 325 DCHECK_EQ(0, ic_total_count_); |
317 | 326 |
318 { | 327 { |
319 Comment cmnt(masm_, "[ Stack check"); | 328 Comment cmnt(masm_, "[ Stack check"); |
320 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); | 329 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); |
321 Label ok; | 330 Label ok; |
322 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 331 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
323 __ cmpl(sp, ip); | 332 __ CmpLogicalP(sp, ip); |
324 __ bc_short(ge, &ok); | 333 __ bge(&ok, Label::kNear); |
325 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET); | 334 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET); |
326 __ bind(&ok); | 335 __ bind(&ok); |
327 } | 336 } |
328 | 337 |
329 { | 338 { |
330 Comment cmnt(masm_, "[ Body"); | 339 Comment cmnt(masm_, "[ Body"); |
331 DCHECK(loop_depth() == 0); | 340 DCHECK(loop_depth() == 0); |
332 VisitStatements(literal()->body()); | 341 VisitStatements(literal()->body()); |
333 DCHECK(loop_depth() == 0); | 342 DCHECK(loop_depth() == 0); |
334 } | 343 } |
335 } | 344 } |
336 | 345 |
337 // Always emit a 'return undefined' in case control fell off the end of | 346 // Always emit a 'return undefined' in case control fell off the end of |
338 // the body. | 347 // the body. |
339 { | 348 { |
340 Comment cmnt(masm_, "[ return <undefined>;"); | 349 Comment cmnt(masm_, "[ return <undefined>;"); |
341 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); | 350 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
342 } | 351 } |
343 EmitReturnSequence(); | 352 EmitReturnSequence(); |
353 } | |
344 | 354 |
345 if (HasStackOverflow()) { | 355 void FullCodeGenerator::ClearAccumulator() { |
346 masm_->AbortConstantPoolBuilding(); | 356 __ LoadSmiLiteral(r2, Smi::FromInt(0)); |
357 } | |
358 | |
359 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { | |
360 __ mov(r4, Operand(profiling_counter_)); | |
361 intptr_t smi_delta = reinterpret_cast<intptr_t>(Smi::FromInt(delta)); | |
362 if (CpuFeatures::IsSupported(GENERAL_INSTR_EXT) && is_int8(-smi_delta)) { | |
363 __ AddP(FieldMemOperand(r4, Cell::kValueOffset), Operand(-smi_delta)); | |
364 __ LoadP(r5, FieldMemOperand(r4, Cell::kValueOffset)); | |
365 } else { | |
366 __ LoadP(r5, FieldMemOperand(r4, Cell::kValueOffset)); | |
367 __ SubSmiLiteral(r5, r5, Smi::FromInt(delta), r0); | |
368 __ StoreP(r5, FieldMemOperand(r4, Cell::kValueOffset)); | |
347 } | 369 } |
348 } | 370 } |
349 | 371 |
350 | |
351 void FullCodeGenerator::ClearAccumulator() { | |
352 __ LoadSmiLiteral(r3, Smi::FromInt(0)); | |
353 } | |
354 | |
355 | |
356 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { | |
357 __ mov(r5, Operand(profiling_counter_)); | |
358 __ LoadP(r6, FieldMemOperand(r5, Cell::kValueOffset)); | |
359 __ SubSmiLiteral(r6, r6, Smi::FromInt(delta), r0); | |
360 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0); | |
361 } | |
362 | |
363 | |
364 void FullCodeGenerator::EmitProfilingCounterReset() { | 372 void FullCodeGenerator::EmitProfilingCounterReset() { |
365 int reset_value = FLAG_interrupt_budget; | 373 int reset_value = FLAG_interrupt_budget; |
366 __ mov(r5, Operand(profiling_counter_)); | 374 __ mov(r4, Operand(profiling_counter_)); |
367 __ LoadSmiLiteral(r6, Smi::FromInt(reset_value)); | 375 __ LoadSmiLiteral(r5, Smi::FromInt(reset_value)); |
368 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0); | 376 __ StoreP(r5, FieldMemOperand(r4, Cell::kValueOffset)); |
369 } | 377 } |
370 | 378 |
371 | |
372 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, | 379 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, |
373 Label* back_edge_target) { | 380 Label* back_edge_target) { |
374 Comment cmnt(masm_, "[ Back edge bookkeeping"); | 381 Comment cmnt(masm_, "[ Back edge bookkeeping"); |
375 Label ok; | 382 Label ok; |
376 | 383 |
377 DCHECK(back_edge_target->is_bound()); | 384 DCHECK(back_edge_target->is_bound()); |
378 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) + | 385 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) + |
379 kCodeSizeMultiplier / 2; | 386 kCodeSizeMultiplier / 2; |
380 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); | 387 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); |
381 EmitProfilingCounterDecrement(weight); | 388 EmitProfilingCounterDecrement(weight); |
382 { | 389 { |
383 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | |
384 Assembler::BlockConstantPoolEntrySharingScope prevent_entry_sharing(masm_); | |
385 // BackEdgeTable::PatchAt manipulates this sequence. | 390 // BackEdgeTable::PatchAt manipulates this sequence. |
386 __ cmpi(r6, Operand::Zero()); | 391 __ bge(&ok, Label::kNear); |
387 __ bc_short(ge, &ok); | |
388 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); | 392 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); |
389 | 393 |
390 // Record a mapping of this PC offset to the OSR id. This is used to find | 394 // Record a mapping of this PC offset to the OSR id. This is used to find |
391 // the AST id from the unoptimized code in order to use it as a key into | 395 // the AST id from the unoptimized code in order to use it as a key into |
392 // the deoptimization input data found in the optimized code. | 396 // the deoptimization input data found in the optimized code. |
393 RecordBackEdge(stmt->OsrEntryId()); | 397 RecordBackEdge(stmt->OsrEntryId()); |
394 } | 398 } |
395 EmitProfilingCounterReset(); | 399 EmitProfilingCounterReset(); |
396 | 400 |
397 __ bind(&ok); | 401 __ bind(&ok); |
398 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); | 402 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); |
399 // Record a mapping of the OSR id to this PC. This is used if the OSR | 403 // Record a mapping of the OSR id to this PC. This is used if the OSR |
400 // entry becomes the target of a bailout. We don't expect it to be, but | 404 // entry becomes the target of a bailout. We don't expect it to be, but |
401 // we want it to work if it is. | 405 // we want it to work if it is. |
402 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); | 406 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); |
403 } | 407 } |
404 | 408 |
405 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence( | 409 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence( |
406 bool is_tail_call) { | 410 bool is_tail_call) { |
407 // Pretend that the exit is a backwards jump to the entry. | 411 // Pretend that the exit is a backwards jump to the entry. |
408 int weight = 1; | 412 int weight = 1; |
409 if (info_->ShouldSelfOptimize()) { | 413 if (info_->ShouldSelfOptimize()) { |
410 weight = FLAG_interrupt_budget / FLAG_self_opt_count; | 414 weight = FLAG_interrupt_budget / FLAG_self_opt_count; |
411 } else { | 415 } else { |
412 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2; | 416 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2; |
413 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); | 417 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); |
414 } | 418 } |
415 EmitProfilingCounterDecrement(weight); | 419 EmitProfilingCounterDecrement(weight); |
416 Label ok; | 420 Label ok; |
417 __ cmpi(r6, Operand::Zero()); | 421 __ CmpP(r5, Operand::Zero()); |
418 __ bge(&ok); | 422 __ bge(&ok); |
419 // Don't need to save result register if we are going to do a tail call. | 423 // Don't need to save result register if we are going to do a tail call. |
420 if (!is_tail_call) { | 424 if (!is_tail_call) { |
421 __ push(r3); | 425 __ push(r2); |
422 } | 426 } |
423 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); | 427 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); |
424 if (!is_tail_call) { | 428 if (!is_tail_call) { |
425 __ pop(r3); | 429 __ pop(r2); |
426 } | 430 } |
427 EmitProfilingCounterReset(); | 431 EmitProfilingCounterReset(); |
428 __ bind(&ok); | 432 __ bind(&ok); |
429 } | 433 } |
430 | 434 |
431 void FullCodeGenerator::EmitReturnSequence() { | 435 void FullCodeGenerator::EmitReturnSequence() { |
432 Comment cmnt(masm_, "[ Return sequence"); | 436 Comment cmnt(masm_, "[ Return sequence"); |
433 if (return_label_.is_bound()) { | 437 if (return_label_.is_bound()) { |
434 __ b(&return_label_); | 438 __ b(&return_label_); |
435 } else { | 439 } else { |
436 __ bind(&return_label_); | 440 __ bind(&return_label_); |
437 if (FLAG_trace) { | 441 if (FLAG_trace) { |
438 // Push the return value on the stack as the parameter. | 442 // Push the return value on the stack as the parameter. |
439 // Runtime::TraceExit returns its parameter in r3 | 443 // Runtime::TraceExit returns its parameter in r2 |
440 __ push(r3); | 444 __ push(r2); |
441 __ CallRuntime(Runtime::kTraceExit); | 445 __ CallRuntime(Runtime::kTraceExit); |
442 } | 446 } |
443 EmitProfilingCounterHandlingForReturnSequence(false); | 447 EmitProfilingCounterHandlingForReturnSequence(false); |
444 | 448 |
445 // Make sure that the constant pool is not emitted inside of the return | 449 // Make sure that the constant pool is not emitted inside of the return |
446 // sequence. | 450 // sequence. |
447 { | 451 { |
448 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 452 // Here we use masm_-> instead of the __ macro to avoid the code coverage |
453 // tool from instrumenting as we rely on the code size here. | |
449 int32_t arg_count = info_->scope()->num_parameters() + 1; | 454 int32_t arg_count = info_->scope()->num_parameters() + 1; |
450 int32_t sp_delta = arg_count * kPointerSize; | 455 int32_t sp_delta = arg_count * kPointerSize; |
451 SetReturnPosition(literal()); | 456 SetReturnPosition(literal()); |
452 __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta); | 457 __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta); |
453 __ blr(); | 458 |
459 __ Ret(); | |
454 } | 460 } |
455 } | 461 } |
456 } | 462 } |
457 | 463 |
458 | |
459 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { | 464 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { |
460 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | 465 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); |
461 codegen()->GetVar(result_register(), var); | 466 codegen()->GetVar(result_register(), var); |
462 codegen()->PushOperand(result_register()); | 467 codegen()->PushOperand(result_register()); |
463 } | 468 } |
464 | 469 |
465 | |
466 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {} | 470 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {} |
467 | 471 |
468 | |
469 void FullCodeGenerator::AccumulatorValueContext::Plug( | 472 void FullCodeGenerator::AccumulatorValueContext::Plug( |
470 Heap::RootListIndex index) const { | 473 Heap::RootListIndex index) const { |
471 __ LoadRoot(result_register(), index); | 474 __ LoadRoot(result_register(), index); |
472 } | 475 } |
473 | 476 |
474 | |
475 void FullCodeGenerator::StackValueContext::Plug( | 477 void FullCodeGenerator::StackValueContext::Plug( |
476 Heap::RootListIndex index) const { | 478 Heap::RootListIndex index) const { |
477 __ LoadRoot(result_register(), index); | 479 __ LoadRoot(result_register(), index); |
478 codegen()->PushOperand(result_register()); | 480 codegen()->PushOperand(result_register()); |
479 } | 481 } |
480 | 482 |
481 | |
482 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { | 483 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { |
483 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, | 484 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, |
484 false_label_); | 485 false_label_); |
485 if (index == Heap::kUndefinedValueRootIndex || | 486 if (index == Heap::kUndefinedValueRootIndex || |
486 index == Heap::kNullValueRootIndex || | 487 index == Heap::kNullValueRootIndex || |
487 index == Heap::kFalseValueRootIndex) { | 488 index == Heap::kFalseValueRootIndex) { |
488 if (false_label_ != fall_through_) __ b(false_label_); | 489 if (false_label_ != fall_through_) __ b(false_label_); |
489 } else if (index == Heap::kTrueValueRootIndex) { | 490 } else if (index == Heap::kTrueValueRootIndex) { |
490 if (true_label_ != fall_through_) __ b(true_label_); | 491 if (true_label_ != fall_through_) __ b(true_label_); |
491 } else { | 492 } else { |
492 __ LoadRoot(result_register(), index); | 493 __ LoadRoot(result_register(), index); |
493 codegen()->DoTest(this); | 494 codegen()->DoTest(this); |
494 } | 495 } |
495 } | 496 } |
496 | 497 |
497 | |
498 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {} | 498 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {} |
499 | 499 |
500 | |
501 void FullCodeGenerator::AccumulatorValueContext::Plug( | 500 void FullCodeGenerator::AccumulatorValueContext::Plug( |
502 Handle<Object> lit) const { | 501 Handle<Object> lit) const { |
503 __ mov(result_register(), Operand(lit)); | 502 __ mov(result_register(), Operand(lit)); |
504 } | 503 } |
505 | 504 |
506 | |
507 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { | 505 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { |
508 // Immediates cannot be pushed directly. | 506 // Immediates cannot be pushed directly. |
509 __ mov(result_register(), Operand(lit)); | 507 __ mov(result_register(), Operand(lit)); |
510 codegen()->PushOperand(result_register()); | 508 codegen()->PushOperand(result_register()); |
511 } | 509 } |
512 | 510 |
513 | |
514 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { | 511 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { |
515 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, | 512 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, |
516 false_label_); | 513 false_label_); |
517 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectable()); | 514 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectableObject()); |
518 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { | 515 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { |
519 if (false_label_ != fall_through_) __ b(false_label_); | 516 if (false_label_ != fall_through_) __ b(false_label_); |
520 } else if (lit->IsTrue() || lit->IsJSObject()) { | 517 } else if (lit->IsTrue() || lit->IsJSObject()) { |
521 if (true_label_ != fall_through_) __ b(true_label_); | 518 if (true_label_ != fall_through_) __ b(true_label_); |
522 } else if (lit->IsString()) { | 519 } else if (lit->IsString()) { |
523 if (String::cast(*lit)->length() == 0) { | 520 if (String::cast(*lit)->length() == 0) { |
524 if (false_label_ != fall_through_) __ b(false_label_); | 521 if (false_label_ != fall_through_) __ b(false_label_); |
525 } else { | 522 } else { |
526 if (true_label_ != fall_through_) __ b(true_label_); | 523 if (true_label_ != fall_through_) __ b(true_label_); |
527 } | 524 } |
528 } else if (lit->IsSmi()) { | 525 } else if (lit->IsSmi()) { |
529 if (Smi::cast(*lit)->value() == 0) { | 526 if (Smi::cast(*lit)->value() == 0) { |
530 if (false_label_ != fall_through_) __ b(false_label_); | 527 if (false_label_ != fall_through_) __ b(false_label_); |
531 } else { | 528 } else { |
532 if (true_label_ != fall_through_) __ b(true_label_); | 529 if (true_label_ != fall_through_) __ b(true_label_); |
533 } | 530 } |
534 } else { | 531 } else { |
535 // For simplicity we always test the accumulator register. | 532 // For simplicity we always test the accumulator register. |
536 __ mov(result_register(), Operand(lit)); | 533 __ mov(result_register(), Operand(lit)); |
537 codegen()->DoTest(this); | 534 codegen()->DoTest(this); |
538 } | 535 } |
539 } | 536 } |
540 | 537 |
541 | |
542 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, | 538 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, |
543 Register reg) const { | 539 Register reg) const { |
544 DCHECK(count > 0); | 540 DCHECK(count > 0); |
545 if (count > 1) codegen()->DropOperands(count - 1); | 541 if (count > 1) codegen()->DropOperands(count - 1); |
546 __ StoreP(reg, MemOperand(sp, 0)); | 542 __ StoreP(reg, MemOperand(sp, 0)); |
547 } | 543 } |
548 | 544 |
549 | |
550 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, | 545 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, |
551 Label* materialize_false) const { | 546 Label* materialize_false) const { |
552 DCHECK(materialize_true == materialize_false); | 547 DCHECK(materialize_true == materialize_false); |
553 __ bind(materialize_true); | 548 __ bind(materialize_true); |
554 } | 549 } |
555 | 550 |
556 | |
557 void FullCodeGenerator::AccumulatorValueContext::Plug( | 551 void FullCodeGenerator::AccumulatorValueContext::Plug( |
558 Label* materialize_true, Label* materialize_false) const { | 552 Label* materialize_true, Label* materialize_false) const { |
559 Label done; | 553 Label done; |
560 __ bind(materialize_true); | 554 __ bind(materialize_true); |
561 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); | 555 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); |
562 __ b(&done); | 556 __ b(&done, Label::kNear); |
563 __ bind(materialize_false); | 557 __ bind(materialize_false); |
564 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); | 558 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); |
565 __ bind(&done); | 559 __ bind(&done); |
566 } | 560 } |
567 | 561 |
568 | |
569 void FullCodeGenerator::StackValueContext::Plug( | 562 void FullCodeGenerator::StackValueContext::Plug( |
570 Label* materialize_true, Label* materialize_false) const { | 563 Label* materialize_true, Label* materialize_false) const { |
571 Label done; | 564 Label done; |
572 __ bind(materialize_true); | 565 __ bind(materialize_true); |
573 __ LoadRoot(ip, Heap::kTrueValueRootIndex); | 566 __ LoadRoot(ip, Heap::kTrueValueRootIndex); |
574 __ b(&done); | 567 __ b(&done, Label::kNear); |
575 __ bind(materialize_false); | 568 __ bind(materialize_false); |
576 __ LoadRoot(ip, Heap::kFalseValueRootIndex); | 569 __ LoadRoot(ip, Heap::kFalseValueRootIndex); |
577 __ bind(&done); | 570 __ bind(&done); |
578 codegen()->PushOperand(ip); | 571 codegen()->PushOperand(ip); |
579 } | 572 } |
580 | 573 |
581 | |
582 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, | 574 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, |
583 Label* materialize_false) const { | 575 Label* materialize_false) const { |
584 DCHECK(materialize_true == true_label_); | 576 DCHECK(materialize_true == true_label_); |
585 DCHECK(materialize_false == false_label_); | 577 DCHECK(materialize_false == false_label_); |
586 } | 578 } |
587 | 579 |
588 | |
589 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { | 580 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { |
590 Heap::RootListIndex value_root_index = | 581 Heap::RootListIndex value_root_index = |
591 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; | 582 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; |
592 __ LoadRoot(result_register(), value_root_index); | 583 __ LoadRoot(result_register(), value_root_index); |
593 } | 584 } |
594 | 585 |
595 | |
596 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { | 586 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { |
597 Heap::RootListIndex value_root_index = | 587 Heap::RootListIndex value_root_index = |
598 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; | 588 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; |
599 __ LoadRoot(ip, value_root_index); | 589 __ LoadRoot(ip, value_root_index); |
600 codegen()->PushOperand(ip); | 590 codegen()->PushOperand(ip); |
601 } | 591 } |
602 | 592 |
603 | |
604 void FullCodeGenerator::TestContext::Plug(bool flag) const { | 593 void FullCodeGenerator::TestContext::Plug(bool flag) const { |
605 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, | 594 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, |
606 false_label_); | 595 false_label_); |
607 if (flag) { | 596 if (flag) { |
608 if (true_label_ != fall_through_) __ b(true_label_); | 597 if (true_label_ != fall_through_) __ b(true_label_); |
609 } else { | 598 } else { |
610 if (false_label_ != fall_through_) __ b(false_label_); | 599 if (false_label_ != fall_through_) __ b(false_label_); |
611 } | 600 } |
612 } | 601 } |
613 | 602 |
614 | |
615 void FullCodeGenerator::DoTest(Expression* condition, Label* if_true, | 603 void FullCodeGenerator::DoTest(Expression* condition, Label* if_true, |
616 Label* if_false, Label* fall_through) { | 604 Label* if_false, Label* fall_through) { |
617 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate()); | 605 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate()); |
618 CallIC(ic, condition->test_id()); | 606 CallIC(ic, condition->test_id()); |
619 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex); | 607 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex); |
620 Split(eq, if_true, if_false, fall_through); | 608 Split(eq, if_true, if_false, fall_through); |
621 } | 609 } |
622 | 610 |
623 | |
624 void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false, | 611 void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false, |
625 Label* fall_through, CRegister cr) { | 612 Label* fall_through) { |
626 if (if_false == fall_through) { | 613 if (if_false == fall_through) { |
627 __ b(cond, if_true, cr); | 614 __ b(cond, if_true); |
628 } else if (if_true == fall_through) { | 615 } else if (if_true == fall_through) { |
629 __ b(NegateCondition(cond), if_false, cr); | 616 __ b(NegateCondition(cond), if_false); |
630 } else { | 617 } else { |
631 __ b(cond, if_true, cr); | 618 __ b(cond, if_true); |
632 __ b(if_false); | 619 __ b(if_false); |
633 } | 620 } |
634 } | 621 } |
635 | 622 |
636 | |
637 MemOperand FullCodeGenerator::StackOperand(Variable* var) { | 623 MemOperand FullCodeGenerator::StackOperand(Variable* var) { |
638 DCHECK(var->IsStackAllocated()); | 624 DCHECK(var->IsStackAllocated()); |
639 // Offset is negative because higher indexes are at lower addresses. | 625 // Offset is negative because higher indexes are at lower addresses. |
640 int offset = -var->index() * kPointerSize; | 626 int offset = -var->index() * kPointerSize; |
641 // Adjust by a (parameter or local) base offset. | 627 // Adjust by a (parameter or local) base offset. |
642 if (var->IsParameter()) { | 628 if (var->IsParameter()) { |
643 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; | 629 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; |
644 } else { | 630 } else { |
645 offset += JavaScriptFrameConstants::kLocal0Offset; | 631 offset += JavaScriptFrameConstants::kLocal0Offset; |
646 } | 632 } |
647 return MemOperand(fp, offset); | 633 return MemOperand(fp, offset); |
648 } | 634 } |
649 | 635 |
650 | |
651 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { | 636 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { |
652 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); | 637 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); |
653 if (var->IsContextSlot()) { | 638 if (var->IsContextSlot()) { |
654 int context_chain_length = scope()->ContextChainLength(var->scope()); | 639 int context_chain_length = scope()->ContextChainLength(var->scope()); |
655 __ LoadContext(scratch, context_chain_length); | 640 __ LoadContext(scratch, context_chain_length); |
656 return ContextMemOperand(scratch, var->index()); | 641 return ContextMemOperand(scratch, var->index()); |
657 } else { | 642 } else { |
658 return StackOperand(var); | 643 return StackOperand(var); |
659 } | 644 } |
660 } | 645 } |
661 | 646 |
662 | |
663 void FullCodeGenerator::GetVar(Register dest, Variable* var) { | 647 void FullCodeGenerator::GetVar(Register dest, Variable* var) { |
664 // Use destination as scratch. | 648 // Use destination as scratch. |
665 MemOperand location = VarOperand(var, dest); | 649 MemOperand location = VarOperand(var, dest); |
666 __ LoadP(dest, location, r0); | 650 __ LoadP(dest, location, r0); |
667 } | 651 } |
668 | 652 |
669 | |
670 void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0, | 653 void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0, |
671 Register scratch1) { | 654 Register scratch1) { |
672 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); | 655 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); |
673 DCHECK(!scratch0.is(src)); | 656 DCHECK(!scratch0.is(src)); |
674 DCHECK(!scratch0.is(scratch1)); | 657 DCHECK(!scratch0.is(scratch1)); |
675 DCHECK(!scratch1.is(src)); | 658 DCHECK(!scratch1.is(src)); |
676 MemOperand location = VarOperand(var, scratch0); | 659 MemOperand location = VarOperand(var, scratch0); |
677 __ StoreP(src, location, r0); | 660 __ StoreP(src, location); |
678 | 661 |
679 // Emit the write barrier code if the location is in the heap. | 662 // Emit the write barrier code if the location is in the heap. |
680 if (var->IsContextSlot()) { | 663 if (var->IsContextSlot()) { |
681 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1, | 664 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1, |
682 kLRHasBeenSaved, kDontSaveFPRegs); | 665 kLRHasBeenSaved, kDontSaveFPRegs); |
683 } | 666 } |
684 } | 667 } |
685 | 668 |
686 | |
687 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, | 669 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, |
688 bool should_normalize, | 670 bool should_normalize, |
689 Label* if_true, | 671 Label* if_true, |
690 Label* if_false) { | 672 Label* if_false) { |
691 // Only prepare for bailouts before splits if we're in a test | 673 // Only prepare for bailouts before splits if we're in a test |
692 // context. Otherwise, we let the Visit function deal with the | 674 // context. Otherwise, we let the Visit function deal with the |
693 // preparation to avoid preparing with the same AST id twice. | 675 // preparation to avoid preparing with the same AST id twice. |
694 if (!context()->IsTest()) return; | 676 if (!context()->IsTest()) return; |
695 | 677 |
696 Label skip; | 678 Label skip; |
697 if (should_normalize) __ b(&skip); | 679 if (should_normalize) __ b(&skip); |
698 PrepareForBailout(expr, TOS_REG); | 680 PrepareForBailout(expr, TOS_REG); |
699 if (should_normalize) { | 681 if (should_normalize) { |
700 __ LoadRoot(ip, Heap::kTrueValueRootIndex); | 682 __ CompareRoot(r2, Heap::kTrueValueRootIndex); |
701 __ cmp(r3, ip); | |
702 Split(eq, if_true, if_false, NULL); | 683 Split(eq, if_true, if_false, NULL); |
703 __ bind(&skip); | 684 __ bind(&skip); |
704 } | 685 } |
705 } | 686 } |
706 | 687 |
707 | |
708 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { | 688 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { |
709 // The variable in the declaration always resides in the current function | 689 // The variable in the declaration always resides in the current function |
710 // context. | 690 // context. |
711 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope())); | 691 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope())); |
712 if (FLAG_debug_code) { | 692 if (FLAG_debug_code) { |
713 // Check that we're not inside a with or catch context. | 693 // Check that we're not inside a with or catch context. |
714 __ LoadP(r4, FieldMemOperand(cp, HeapObject::kMapOffset)); | 694 __ LoadP(r3, FieldMemOperand(cp, HeapObject::kMapOffset)); |
715 __ CompareRoot(r4, Heap::kWithContextMapRootIndex); | 695 __ CompareRoot(r3, Heap::kWithContextMapRootIndex); |
716 __ Check(ne, kDeclarationInWithContext); | 696 __ Check(ne, kDeclarationInWithContext); |
717 __ CompareRoot(r4, Heap::kCatchContextMapRootIndex); | 697 __ CompareRoot(r3, Heap::kCatchContextMapRootIndex); |
718 __ Check(ne, kDeclarationInCatchContext); | 698 __ Check(ne, kDeclarationInCatchContext); |
719 } | 699 } |
720 } | 700 } |
721 | 701 |
722 | |
723 void FullCodeGenerator::VisitVariableDeclaration( | 702 void FullCodeGenerator::VisitVariableDeclaration( |
724 VariableDeclaration* declaration) { | 703 VariableDeclaration* declaration) { |
725 // If it was not possible to allocate the variable at compile time, we | 704 // If it was not possible to allocate the variable at compile time, we |
726 // need to "declare" it at runtime to make sure it actually exists in the | 705 // need to "declare" it at runtime to make sure it actually exists in the |
727 // local context. | 706 // local context. |
728 VariableProxy* proxy = declaration->proxy(); | 707 VariableProxy* proxy = declaration->proxy(); |
729 VariableMode mode = declaration->mode(); | 708 VariableMode mode = declaration->mode(); |
730 Variable* variable = proxy->var(); | 709 Variable* variable = proxy->var(); |
731 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY; | 710 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY; |
732 switch (variable->location()) { | 711 switch (variable->location()) { |
(...skipping 13 matching lines...) Expand all Loading... | |
746 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 725 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
747 __ StoreP(ip, StackOperand(variable)); | 726 __ StoreP(ip, StackOperand(variable)); |
748 } | 727 } |
749 break; | 728 break; |
750 | 729 |
751 case VariableLocation::CONTEXT: | 730 case VariableLocation::CONTEXT: |
752 if (hole_init) { | 731 if (hole_init) { |
753 Comment cmnt(masm_, "[ VariableDeclaration"); | 732 Comment cmnt(masm_, "[ VariableDeclaration"); |
754 EmitDebugCheckDeclarationContext(variable); | 733 EmitDebugCheckDeclarationContext(variable); |
755 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 734 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
756 __ StoreP(ip, ContextMemOperand(cp, variable->index()), r0); | 735 __ StoreP(ip, ContextMemOperand(cp, variable->index())); |
757 // No write barrier since the_hole_value is in old space. | 736 // No write barrier since the_hole_value is in old space. |
758 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | 737 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); |
759 } | 738 } |
760 break; | 739 break; |
761 | 740 |
762 case VariableLocation::LOOKUP: { | 741 case VariableLocation::LOOKUP: { |
763 Comment cmnt(masm_, "[ VariableDeclaration"); | 742 Comment cmnt(masm_, "[ VariableDeclaration"); |
764 __ mov(r5, Operand(variable->name())); | 743 __ mov(r4, Operand(variable->name())); |
765 // Declaration nodes are always introduced in one of four modes. | 744 // Declaration nodes are always introduced in one of four modes. |
766 DCHECK(IsDeclaredVariableMode(mode)); | 745 DCHECK(IsDeclaredVariableMode(mode)); |
767 // Push initial value, if any. | 746 // Push initial value, if any. |
768 // Note: For variables we must not push an initial value (such as | 747 // Note: For variables we must not push an initial value (such as |
769 // 'undefined') because we may have a (legal) redeclaration and we | 748 // 'undefined') because we may have a (legal) redeclaration and we |
770 // must not destroy the current value. | 749 // must not destroy the current value. |
771 if (hole_init) { | 750 if (hole_init) { |
772 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex); | 751 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex); |
773 } else { | 752 } else { |
774 __ LoadSmiLiteral(r3, Smi::FromInt(0)); // Indicates no initial value. | 753 __ LoadSmiLiteral(r2, Smi::FromInt(0)); // Indicates no initial value. |
775 } | 754 } |
776 __ Push(r5, r3); | 755 __ Push(r4, r2); |
777 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes())); | 756 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes())); |
778 __ CallRuntime(Runtime::kDeclareLookupSlot); | 757 __ CallRuntime(Runtime::kDeclareLookupSlot); |
779 break; | 758 break; |
780 } | 759 } |
781 } | 760 } |
782 } | 761 } |
783 | 762 |
784 | |
785 void FullCodeGenerator::VisitFunctionDeclaration( | 763 void FullCodeGenerator::VisitFunctionDeclaration( |
786 FunctionDeclaration* declaration) { | 764 FunctionDeclaration* declaration) { |
787 VariableProxy* proxy = declaration->proxy(); | 765 VariableProxy* proxy = declaration->proxy(); |
788 Variable* variable = proxy->var(); | 766 Variable* variable = proxy->var(); |
789 switch (variable->location()) { | 767 switch (variable->location()) { |
790 case VariableLocation::GLOBAL: | 768 case VariableLocation::GLOBAL: |
791 case VariableLocation::UNALLOCATED: { | 769 case VariableLocation::UNALLOCATED: { |
792 globals_->Add(variable->name(), zone()); | 770 globals_->Add(variable->name(), zone()); |
793 Handle<SharedFunctionInfo> function = | 771 Handle<SharedFunctionInfo> function = |
794 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_); | 772 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_); |
795 // Check for stack-overflow exception. | 773 // Check for stack-overflow exception. |
796 if (function.is_null()) return SetStackOverflow(); | 774 if (function.is_null()) return SetStackOverflow(); |
797 globals_->Add(function, zone()); | 775 globals_->Add(function, zone()); |
798 break; | 776 break; |
799 } | 777 } |
800 | 778 |
801 case VariableLocation::PARAMETER: | 779 case VariableLocation::PARAMETER: |
802 case VariableLocation::LOCAL: { | 780 case VariableLocation::LOCAL: { |
803 Comment cmnt(masm_, "[ FunctionDeclaration"); | 781 Comment cmnt(masm_, "[ FunctionDeclaration"); |
804 VisitForAccumulatorValue(declaration->fun()); | 782 VisitForAccumulatorValue(declaration->fun()); |
805 __ StoreP(result_register(), StackOperand(variable)); | 783 __ StoreP(result_register(), StackOperand(variable)); |
806 break; | 784 break; |
807 } | 785 } |
808 | 786 |
809 case VariableLocation::CONTEXT: { | 787 case VariableLocation::CONTEXT: { |
810 Comment cmnt(masm_, "[ FunctionDeclaration"); | 788 Comment cmnt(masm_, "[ FunctionDeclaration"); |
811 EmitDebugCheckDeclarationContext(variable); | 789 EmitDebugCheckDeclarationContext(variable); |
812 VisitForAccumulatorValue(declaration->fun()); | 790 VisitForAccumulatorValue(declaration->fun()); |
813 __ StoreP(result_register(), ContextMemOperand(cp, variable->index()), | 791 __ StoreP(result_register(), ContextMemOperand(cp, variable->index())); |
814 r0); | |
815 int offset = Context::SlotOffset(variable->index()); | 792 int offset = Context::SlotOffset(variable->index()); |
816 // We know that we have written a function, which is not a smi. | 793 // We know that we have written a function, which is not a smi. |
817 __ RecordWriteContextSlot(cp, offset, result_register(), r5, | 794 __ RecordWriteContextSlot(cp, offset, result_register(), r4, |
818 kLRHasBeenSaved, kDontSaveFPRegs, | 795 kLRHasBeenSaved, kDontSaveFPRegs, |
819 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 796 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
820 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | 797 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); |
821 break; | 798 break; |
822 } | 799 } |
823 | 800 |
824 case VariableLocation::LOOKUP: { | 801 case VariableLocation::LOOKUP: { |
825 Comment cmnt(masm_, "[ FunctionDeclaration"); | 802 Comment cmnt(masm_, "[ FunctionDeclaration"); |
826 __ mov(r5, Operand(variable->name())); | 803 __ mov(r4, Operand(variable->name())); |
827 PushOperand(r5); | 804 PushOperand(r4); |
828 // Push initial value for function declaration. | 805 // Push initial value for function declaration. |
829 VisitForStackValue(declaration->fun()); | 806 VisitForStackValue(declaration->fun()); |
830 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes())); | 807 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes())); |
831 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot); | 808 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot); |
832 break; | 809 break; |
833 } | 810 } |
834 } | 811 } |
835 } | 812 } |
836 | 813 |
837 | |
838 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { | 814 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { |
839 // Call the runtime to declare the globals. | 815 // Call the runtime to declare the globals. |
840 __ mov(r4, Operand(pairs)); | 816 __ mov(r3, Operand(pairs)); |
841 __ LoadSmiLiteral(r3, Smi::FromInt(DeclareGlobalsFlags())); | 817 __ LoadSmiLiteral(r2, Smi::FromInt(DeclareGlobalsFlags())); |
842 __ Push(r4, r3); | 818 __ Push(r3, r2); |
843 __ CallRuntime(Runtime::kDeclareGlobals); | 819 __ CallRuntime(Runtime::kDeclareGlobals); |
844 // Return value is ignored. | 820 // Return value is ignored. |
845 } | 821 } |
846 | 822 |
847 | |
848 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { | 823 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { |
849 // Call the runtime to declare the modules. | 824 // Call the runtime to declare the modules. |
850 __ Push(descriptions); | 825 __ Push(descriptions); |
851 __ CallRuntime(Runtime::kDeclareModules); | 826 __ CallRuntime(Runtime::kDeclareModules); |
852 // Return value is ignored. | 827 // Return value is ignored. |
853 } | 828 } |
854 | 829 |
855 | |
856 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { | 830 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { |
857 Comment cmnt(masm_, "[ SwitchStatement"); | 831 Comment cmnt(masm_, "[ SwitchStatement"); |
858 Breakable nested_statement(this, stmt); | 832 Breakable nested_statement(this, stmt); |
859 SetStatementPosition(stmt); | 833 SetStatementPosition(stmt); |
860 | 834 |
861 // Keep the switch value on the stack until a case matches. | 835 // Keep the switch value on the stack until a case matches. |
862 VisitForStackValue(stmt->tag()); | 836 VisitForStackValue(stmt->tag()); |
863 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); | 837 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); |
864 | 838 |
865 ZoneList<CaseClause*>* clauses = stmt->cases(); | 839 ZoneList<CaseClause*>* clauses = stmt->cases(); |
(...skipping 12 matching lines...) Expand all Loading... | |
878 } | 852 } |
879 | 853 |
880 Comment cmnt(masm_, "[ Case comparison"); | 854 Comment cmnt(masm_, "[ Case comparison"); |
881 __ bind(&next_test); | 855 __ bind(&next_test); |
882 next_test.Unuse(); | 856 next_test.Unuse(); |
883 | 857 |
884 // Compile the label expression. | 858 // Compile the label expression. |
885 VisitForAccumulatorValue(clause->label()); | 859 VisitForAccumulatorValue(clause->label()); |
886 | 860 |
887 // Perform the comparison as if via '==='. | 861 // Perform the comparison as if via '==='. |
888 __ LoadP(r4, MemOperand(sp, 0)); // Switch value. | 862 __ LoadP(r3, MemOperand(sp, 0)); // Switch value. |
889 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); | 863 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); |
890 JumpPatchSite patch_site(masm_); | 864 JumpPatchSite patch_site(masm_); |
891 if (inline_smi_code) { | 865 if (inline_smi_code) { |
892 Label slow_case; | 866 Label slow_case; |
893 __ orx(r5, r4, r3); | 867 __ LoadRR(r4, r2); |
894 patch_site.EmitJumpIfNotSmi(r5, &slow_case); | 868 __ OrP(r4, r3); |
869 patch_site.EmitJumpIfNotSmi(r4, &slow_case); | |
895 | 870 |
896 __ cmp(r4, r3); | 871 __ CmpP(r3, r2); |
897 __ bne(&next_test); | 872 __ bne(&next_test); |
898 __ Drop(1); // Switch value is no longer needed. | 873 __ Drop(1); // Switch value is no longer needed. |
899 __ b(clause->body_target()); | 874 __ b(clause->body_target()); |
900 __ bind(&slow_case); | 875 __ bind(&slow_case); |
901 } | 876 } |
902 | 877 |
903 // Record position before stub call for type feedback. | 878 // Record position before stub call for type feedback. |
904 SetExpressionPosition(clause); | 879 SetExpressionPosition(clause); |
905 Handle<Code> ic = | 880 Handle<Code> ic = |
906 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code(); | 881 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code(); |
907 CallIC(ic, clause->CompareId()); | 882 CallIC(ic, clause->CompareId()); |
908 patch_site.EmitPatchInfo(); | 883 patch_site.EmitPatchInfo(); |
909 | 884 |
910 Label skip; | 885 Label skip; |
911 __ b(&skip); | 886 __ b(&skip); |
912 PrepareForBailout(clause, TOS_REG); | 887 PrepareForBailout(clause, TOS_REG); |
913 __ LoadRoot(ip, Heap::kTrueValueRootIndex); | 888 __ CompareRoot(r2, Heap::kTrueValueRootIndex); |
914 __ cmp(r3, ip); | |
915 __ bne(&next_test); | 889 __ bne(&next_test); |
916 __ Drop(1); | 890 __ Drop(1); |
917 __ b(clause->body_target()); | 891 __ b(clause->body_target()); |
918 __ bind(&skip); | 892 __ bind(&skip); |
919 | 893 |
920 __ cmpi(r3, Operand::Zero()); | 894 __ CmpP(r2, Operand::Zero()); |
921 __ bne(&next_test); | 895 __ bne(&next_test); |
922 __ Drop(1); // Switch value is no longer needed. | 896 __ Drop(1); // Switch value is no longer needed. |
923 __ b(clause->body_target()); | 897 __ b(clause->body_target()); |
924 } | 898 } |
925 | 899 |
926 // Discard the test value and jump to the default if present, otherwise to | 900 // Discard the test value and jump to the default if present, otherwise to |
927 // the end of the statement. | 901 // the end of the statement. |
928 __ bind(&next_test); | 902 __ bind(&next_test); |
929 DropOperands(1); // Switch value is no longer needed. | 903 DropOperands(1); // Switch value is no longer needed. |
930 if (default_clause == NULL) { | 904 if (default_clause == NULL) { |
931 __ b(nested_statement.break_label()); | 905 __ b(nested_statement.break_label()); |
932 } else { | 906 } else { |
933 __ b(default_clause->body_target()); | 907 __ b(default_clause->body_target()); |
934 } | 908 } |
935 | 909 |
936 // Compile all the case bodies. | 910 // Compile all the case bodies. |
937 for (int i = 0; i < clauses->length(); i++) { | 911 for (int i = 0; i < clauses->length(); i++) { |
938 Comment cmnt(masm_, "[ Case body"); | 912 Comment cmnt(masm_, "[ Case body"); |
939 CaseClause* clause = clauses->at(i); | 913 CaseClause* clause = clauses->at(i); |
940 __ bind(clause->body_target()); | 914 __ bind(clause->body_target()); |
941 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS); | 915 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS); |
942 VisitStatements(clause->statements()); | 916 VisitStatements(clause->statements()); |
943 } | 917 } |
944 | 918 |
945 __ bind(nested_statement.break_label()); | 919 __ bind(nested_statement.break_label()); |
946 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); | 920 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); |
947 } | 921 } |
948 | 922 |
949 | |
950 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { | 923 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { |
951 Comment cmnt(masm_, "[ ForInStatement"); | 924 Comment cmnt(masm_, "[ ForInStatement"); |
952 SetStatementPosition(stmt, SKIP_BREAK); | 925 SetStatementPosition(stmt, SKIP_BREAK); |
953 | 926 |
954 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot(); | 927 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot(); |
955 | 928 |
956 // Get the object to enumerate over. | 929 // Get the object to enumerate over. |
957 SetExpressionAsStatementPosition(stmt->enumerable()); | 930 SetExpressionAsStatementPosition(stmt->enumerable()); |
958 VisitForAccumulatorValue(stmt->enumerable()); | 931 VisitForAccumulatorValue(stmt->enumerable()); |
959 OperandStackDepthIncrement(5); | 932 OperandStackDepthIncrement(5); |
960 | 933 |
961 Label loop, exit; | 934 Label loop, exit; |
962 Iteration loop_statement(this, stmt); | 935 Iteration loop_statement(this, stmt); |
963 increment_loop_depth(); | 936 increment_loop_depth(); |
964 | 937 |
965 // If the object is null or undefined, skip over the loop, otherwise convert | 938 // If the object is null or undefined, skip over the loop, otherwise convert |
966 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4. | 939 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4. |
967 Label convert, done_convert; | 940 Label convert, done_convert; |
968 __ JumpIfSmi(r3, &convert); | 941 __ JumpIfSmi(r2, &convert); |
969 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE); | 942 __ CompareObjectType(r2, r3, r3, FIRST_JS_RECEIVER_TYPE); |
970 __ bge(&done_convert); | 943 __ bge(&done_convert); |
971 __ CompareRoot(r3, Heap::kNullValueRootIndex); | 944 __ CompareRoot(r2, Heap::kNullValueRootIndex); |
972 __ beq(&exit); | 945 __ beq(&exit); |
973 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); | 946 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); |
974 __ beq(&exit); | 947 __ beq(&exit); |
975 __ bind(&convert); | 948 __ bind(&convert); |
976 ToObjectStub stub(isolate()); | 949 ToObjectStub stub(isolate()); |
977 __ CallStub(&stub); | 950 __ CallStub(&stub); |
978 __ bind(&done_convert); | 951 __ bind(&done_convert); |
979 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG); | 952 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG); |
980 __ push(r3); | 953 __ push(r2); |
981 | 954 |
982 // Check cache validity in generated code. This is a fast case for | 955 // Check cache validity in generated code. This is a fast case for |
983 // the JSObject::IsSimpleEnum cache validity checks. If we cannot | 956 // the JSObject::IsSimpleEnum cache validity checks. If we cannot |
984 // guarantee cache validity, call the runtime system to check cache | 957 // guarantee cache validity, call the runtime system to check cache |
985 // validity or get the property names in a fixed array. | 958 // validity or get the property names in a fixed array. |
986 // Note: Proxies never have an enum cache, so will always take the | 959 // Note: Proxies never have an enum cache, so will always take the |
987 // slow path. | 960 // slow path. |
988 Label call_runtime; | 961 Label call_runtime; |
989 __ CheckEnumCache(&call_runtime); | 962 __ CheckEnumCache(&call_runtime); |
990 | 963 |
991 // The enum cache is valid. Load the map of the object being | 964 // The enum cache is valid. Load the map of the object being |
992 // iterated over and use the cache for the iteration. | 965 // iterated over and use the cache for the iteration. |
993 Label use_cache; | 966 Label use_cache; |
994 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); | 967 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
995 __ b(&use_cache); | 968 __ b(&use_cache); |
996 | 969 |
997 // Get the set of properties to enumerate. | 970 // Get the set of properties to enumerate. |
998 __ bind(&call_runtime); | 971 __ bind(&call_runtime); |
999 __ push(r3); // Duplicate the enumerable object on the stack. | 972 __ push(r2); // Duplicate the enumerable object on the stack. |
1000 __ CallRuntime(Runtime::kForInEnumerate); | 973 __ CallRuntime(Runtime::kForInEnumerate); |
1001 PrepareForBailoutForId(stmt->EnumId(), TOS_REG); | 974 PrepareForBailoutForId(stmt->EnumId(), TOS_REG); |
1002 | 975 |
1003 // If we got a map from the runtime call, we can do a fast | 976 // If we got a map from the runtime call, we can do a fast |
1004 // modification check. Otherwise, we got a fixed array, and we have | 977 // modification check. Otherwise, we got a fixed array, and we have |
1005 // to do a slow check. | 978 // to do a slow check. |
1006 Label fixed_array; | 979 Label fixed_array; |
1007 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset)); | 980 __ LoadP(r4, FieldMemOperand(r2, HeapObject::kMapOffset)); |
1008 __ LoadRoot(ip, Heap::kMetaMapRootIndex); | 981 __ CompareRoot(r4, Heap::kMetaMapRootIndex); |
1009 __ cmp(r5, ip); | |
1010 __ bne(&fixed_array); | 982 __ bne(&fixed_array); |
1011 | 983 |
1012 // We got a map in register r3. Get the enumeration cache from it. | 984 // We got a map in register r2. Get the enumeration cache from it. |
1013 Label no_descriptors; | 985 Label no_descriptors; |
1014 __ bind(&use_cache); | 986 __ bind(&use_cache); |
1015 | 987 |
1016 __ EnumLength(r4, r3); | 988 __ EnumLength(r3, r2); |
1017 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0); | 989 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0); |
1018 __ beq(&no_descriptors); | 990 __ beq(&no_descriptors, Label::kNear); |
1019 | 991 |
1020 __ LoadInstanceDescriptors(r3, r5); | 992 __ LoadInstanceDescriptors(r2, r4); |
1021 __ LoadP(r5, FieldMemOperand(r5, DescriptorArray::kEnumCacheOffset)); | 993 __ LoadP(r4, FieldMemOperand(r4, DescriptorArray::kEnumCacheOffset)); |
1022 __ LoadP(r5, | 994 __ LoadP(r4, |
1023 FieldMemOperand(r5, DescriptorArray::kEnumCacheBridgeCacheOffset)); | 995 FieldMemOperand(r4, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
1024 | 996 |
1025 // Set up the four remaining stack slots. | 997 // Set up the four remaining stack slots. |
1026 __ push(r3); // Map. | 998 __ push(r2); // Map. |
1027 __ LoadSmiLiteral(r3, Smi::FromInt(0)); | 999 __ LoadSmiLiteral(r2, Smi::FromInt(0)); |
1028 // Push enumeration cache, enumeration cache length (as smi) and zero. | 1000 // Push enumeration cache, enumeration cache length (as smi) and zero. |
1029 __ Push(r5, r4, r3); | 1001 __ Push(r4, r3, r2); |
1030 __ b(&loop); | 1002 __ b(&loop); |
1031 | 1003 |
1032 __ bind(&no_descriptors); | 1004 __ bind(&no_descriptors); |
1033 __ Drop(1); | 1005 __ Drop(1); |
1034 __ b(&exit); | 1006 __ b(&exit); |
1035 | 1007 |
1036 // We got a fixed array in register r3. Iterate through that. | 1008 // We got a fixed array in register r2. Iterate through that. |
1037 __ bind(&fixed_array); | 1009 __ bind(&fixed_array); |
1038 | 1010 |
1039 __ LoadSmiLiteral(r4, Smi::FromInt(1)); // Smi(1) indicates slow check | 1011 int const vector_index = SmiFromSlot(slot)->value(); |
1040 __ Push(r4, r3); // Smi and array | 1012 __ EmitLoadTypeFeedbackVector(r3); |
1041 __ LoadP(r4, FieldMemOperand(r3, FixedArray::kLengthOffset)); | 1013 __ mov(r4, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); |
1042 __ Push(r4); // Fixed array length (as smi). | 1014 __ StoreP( |
1015 r4, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(vector_index)), r0); | |
1016 __ LoadSmiLiteral(r3, Smi::FromInt(1)); // Smi(1) indicates slow check | |
1017 __ Push(r3, r2); // Smi and array | |
1018 __ LoadP(r3, FieldMemOperand(r2, FixedArray::kLengthOffset)); | |
1019 __ Push(r3); // Fixed array length (as smi). | |
1043 PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS); | 1020 PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS); |
1044 __ LoadSmiLiteral(r3, Smi::FromInt(0)); | 1021 __ LoadSmiLiteral(r2, Smi::FromInt(0)); |
1045 __ Push(r3); // Initial index. | 1022 __ Push(r2); // Initial index. |
1046 | 1023 |
1047 // Generate code for doing the condition check. | 1024 // Generate code for doing the condition check. |
1048 __ bind(&loop); | 1025 __ bind(&loop); |
1049 SetExpressionAsStatementPosition(stmt->each()); | 1026 SetExpressionAsStatementPosition(stmt->each()); |
1050 | 1027 |
1051 // Load the current count to r3, load the length to r4. | 1028 // Load the current count to r2, load the length to r3. |
1052 __ LoadP(r3, MemOperand(sp, 0 * kPointerSize)); | 1029 __ LoadP(r2, MemOperand(sp, 0 * kPointerSize)); |
1053 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize)); | 1030 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize)); |
1054 __ cmpl(r3, r4); // Compare to the array length. | 1031 __ CmpLogicalP(r2, r3); // Compare to the array length. |
1055 __ bge(loop_statement.break_label()); | 1032 __ bge(loop_statement.break_label()); |
1056 | 1033 |
1057 // Get the current entry of the array into register r6. | 1034 // Get the current entry of the array into register r5. |
1058 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize)); | 1035 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize)); |
1059 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 1036 __ AddP(r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
1060 __ SmiToPtrArrayOffset(r6, r3); | 1037 __ SmiToPtrArrayOffset(r5, r2); |
1061 __ LoadPX(r6, MemOperand(r6, r5)); | 1038 __ LoadP(r5, MemOperand(r5, r4)); |
1062 | 1039 |
1063 // Get the expected map from the stack or a smi in the | 1040 // Get the expected map from the stack or a smi in the |
1064 // permanent slow case into register r5. | 1041 // permanent slow case into register r4. |
1065 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize)); | 1042 __ LoadP(r4, MemOperand(sp, 3 * kPointerSize)); |
1066 | 1043 |
1067 // Check if the expected map still matches that of the enumerable. | 1044 // Check if the expected map still matches that of the enumerable. |
1068 // If not, we may have to filter the key. | 1045 // If not, we may have to filter the key. |
1069 Label update_each; | 1046 Label update_each; |
1070 __ LoadP(r4, MemOperand(sp, 4 * kPointerSize)); | 1047 __ LoadP(r3, MemOperand(sp, 4 * kPointerSize)); |
1071 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset)); | 1048 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset)); |
1072 __ cmp(r7, r5); | 1049 __ CmpP(r6, r4); |
1073 __ beq(&update_each); | 1050 __ beq(&update_each); |
1074 | 1051 |
1075 // We need to filter the key, record slow-path here. | 1052 // We might get here from TurboFan or Crankshaft when something in the |
1076 int const vector_index = SmiFromSlot(slot)->value(); | 1053 // for-in loop body deopts and only now notice in fullcodegen, that we |
1077 __ EmitLoadTypeFeedbackVector(r3); | 1054 // can now longer use the enum cache, i.e. left fast mode. So better record |
1078 __ mov(r5, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); | 1055 // this information here, in case we later OSR back into this loop or |
1056 // reoptimize the whole function w/o rerunning the loop with the slow | |
1057 // mode object in fullcodegen (which would result in a deopt loop). | |
1058 __ EmitLoadTypeFeedbackVector(r2); | |
1059 __ mov(r4, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); | |
1079 __ StoreP( | 1060 __ StoreP( |
1080 r5, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(vector_index)), r0); | 1061 r4, FieldMemOperand(r2, FixedArray::OffsetOfElementAt(vector_index)), r0); |
1081 | 1062 |
1082 // Convert the entry to a string or (smi) 0 if it isn't a property | 1063 // Convert the entry to a string or (smi) 0 if it isn't a property |
1083 // any more. If the property has been removed while iterating, we | 1064 // any more. If the property has been removed while iterating, we |
1084 // just skip it. | 1065 // just skip it. |
1085 __ Push(r4, r6); // Enumerable and current entry. | 1066 __ Push(r3, r5); // Enumerable and current entry. |
1086 __ CallRuntime(Runtime::kForInFilter); | 1067 __ CallRuntime(Runtime::kForInFilter); |
1087 PrepareForBailoutForId(stmt->FilterId(), TOS_REG); | 1068 PrepareForBailoutForId(stmt->FilterId(), TOS_REG); |
1088 __ mr(r6, r3); | 1069 __ LoadRR(r5, r2); |
1089 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 1070 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
1090 __ cmp(r3, r0); | 1071 __ CmpP(r2, r0); |
1091 __ beq(loop_statement.continue_label()); | 1072 __ beq(loop_statement.continue_label()); |
1092 | 1073 |
1093 // Update the 'each' property or variable from the possibly filtered | 1074 // Update the 'each' property or variable from the possibly filtered |
1094 // entry in register r6. | 1075 // entry in register r5. |
1095 __ bind(&update_each); | 1076 __ bind(&update_each); |
1096 __ mr(result_register(), r6); | 1077 __ LoadRR(result_register(), r5); |
1097 // Perform the assignment as if via '='. | 1078 // Perform the assignment as if via '='. |
1098 { | 1079 { |
1099 EffectContext context(this); | 1080 EffectContext context(this); |
1100 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot()); | 1081 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot()); |
1101 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS); | 1082 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS); |
1102 } | 1083 } |
1103 | 1084 |
1104 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body(). | 1085 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body(). |
1105 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); | 1086 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); |
1106 // Generate code for the body of the loop. | 1087 // Generate code for the body of the loop. |
1107 Visit(stmt->body()); | 1088 Visit(stmt->body()); |
1108 | 1089 |
1109 // Generate code for the going to the next element by incrementing | 1090 // Generate code for the going to the next element by incrementing |
1110 // the index (smi) stored on top of the stack. | 1091 // the index (smi) stored on top of the stack. |
1111 __ bind(loop_statement.continue_label()); | 1092 __ bind(loop_statement.continue_label()); |
1112 __ pop(r3); | 1093 __ pop(r2); |
1113 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0); | 1094 __ AddSmiLiteral(r2, r2, Smi::FromInt(1), r0); |
1114 __ push(r3); | 1095 __ push(r2); |
1115 | 1096 |
1116 EmitBackEdgeBookkeeping(stmt, &loop); | 1097 EmitBackEdgeBookkeeping(stmt, &loop); |
1117 __ b(&loop); | 1098 __ b(&loop); |
1118 | 1099 |
1119 // Remove the pointers stored on the stack. | 1100 // Remove the pointers stored on the stack. |
1120 __ bind(loop_statement.break_label()); | 1101 __ bind(loop_statement.break_label()); |
1121 DropOperands(5); | 1102 DropOperands(5); |
1122 | 1103 |
1123 // Exit and decrement the loop depth. | 1104 // Exit and decrement the loop depth. |
1124 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); | 1105 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); |
1125 __ bind(&exit); | 1106 __ bind(&exit); |
1126 decrement_loop_depth(); | 1107 decrement_loop_depth(); |
1127 } | 1108 } |
1128 | 1109 |
1129 | |
1130 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset, | 1110 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset, |
1131 FeedbackVectorSlot slot) { | 1111 FeedbackVectorSlot slot) { |
1132 DCHECK(NeedsHomeObject(initializer)); | 1112 DCHECK(NeedsHomeObject(initializer)); |
1133 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); | 1113 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); |
1134 __ mov(StoreDescriptor::NameRegister(), | 1114 __ mov(StoreDescriptor::NameRegister(), |
1135 Operand(isolate()->factory()->home_object_symbol())); | 1115 Operand(isolate()->factory()->home_object_symbol())); |
1136 __ LoadP(StoreDescriptor::ValueRegister(), | 1116 __ LoadP(StoreDescriptor::ValueRegister(), |
1137 MemOperand(sp, offset * kPointerSize)); | 1117 MemOperand(sp, offset * kPointerSize)); |
1138 EmitLoadStoreICSlot(slot); | 1118 EmitLoadStoreICSlot(slot); |
1139 CallStoreIC(); | 1119 CallStoreIC(); |
1140 } | 1120 } |
1141 | 1121 |
1142 | |
1143 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer, | 1122 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer, |
1144 int offset, | 1123 int offset, |
1145 FeedbackVectorSlot slot) { | 1124 FeedbackVectorSlot slot) { |
1146 DCHECK(NeedsHomeObject(initializer)); | 1125 DCHECK(NeedsHomeObject(initializer)); |
1147 __ Move(StoreDescriptor::ReceiverRegister(), r3); | 1126 __ Move(StoreDescriptor::ReceiverRegister(), r2); |
1148 __ mov(StoreDescriptor::NameRegister(), | 1127 __ mov(StoreDescriptor::NameRegister(), |
1149 Operand(isolate()->factory()->home_object_symbol())); | 1128 Operand(isolate()->factory()->home_object_symbol())); |
1150 __ LoadP(StoreDescriptor::ValueRegister(), | 1129 __ LoadP(StoreDescriptor::ValueRegister(), |
1151 MemOperand(sp, offset * kPointerSize)); | 1130 MemOperand(sp, offset * kPointerSize)); |
1152 EmitLoadStoreICSlot(slot); | 1131 EmitLoadStoreICSlot(slot); |
1153 CallStoreIC(); | 1132 CallStoreIC(); |
1154 } | 1133 } |
1155 | 1134 |
1156 | |
1157 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy, | 1135 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy, |
1158 TypeofMode typeof_mode, | 1136 TypeofMode typeof_mode, |
1159 Label* slow) { | 1137 Label* slow) { |
1160 Register current = cp; | 1138 Register current = cp; |
1161 Register next = r4; | 1139 Register next = r3; |
1162 Register temp = r5; | 1140 Register temp = r4; |
1163 | 1141 |
1164 Scope* s = scope(); | 1142 Scope* s = scope(); |
1165 while (s != NULL) { | 1143 while (s != NULL) { |
1166 if (s->num_heap_slots() > 0) { | 1144 if (s->num_heap_slots() > 0) { |
1167 if (s->calls_sloppy_eval()) { | 1145 if (s->calls_sloppy_eval()) { |
1168 // Check that extension is "the hole". | 1146 // Check that extension is "the hole". |
1169 __ LoadP(temp, ContextMemOperand(current, Context::EXTENSION_INDEX)); | 1147 __ LoadP(temp, ContextMemOperand(current, Context::EXTENSION_INDEX)); |
1170 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); | 1148 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); |
1171 } | 1149 } |
1172 // Load next context in chain. | 1150 // Load next context in chain. |
1173 __ LoadP(next, ContextMemOperand(current, Context::PREVIOUS_INDEX)); | 1151 __ LoadP(next, ContextMemOperand(current, Context::PREVIOUS_INDEX)); |
1174 // Walk the rest of the chain without clobbering cp. | 1152 // Walk the rest of the chain without clobbering cp. |
1175 current = next; | 1153 current = next; |
1176 } | 1154 } |
1177 // If no outer scope calls eval, we do not need to check more | 1155 // If no outer scope calls eval, we do not need to check more |
1178 // context extensions. | 1156 // context extensions. |
1179 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; | 1157 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; |
1180 s = s->outer_scope(); | 1158 s = s->outer_scope(); |
1181 } | 1159 } |
1182 | 1160 |
1183 if (s->is_eval_scope()) { | 1161 if (s->is_eval_scope()) { |
1184 Label loop, fast; | 1162 Label loop, fast; |
1185 if (!current.is(next)) { | 1163 if (!current.is(next)) { |
1186 __ Move(next, current); | 1164 __ Move(next, current); |
1187 } | 1165 } |
1188 __ bind(&loop); | 1166 __ bind(&loop); |
1189 // Terminate at native context. | 1167 // Terminate at native context. |
1190 __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset)); | 1168 __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset)); |
1191 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex); | 1169 __ CompareRoot(temp, Heap::kNativeContextMapRootIndex); |
1192 __ cmp(temp, ip); | 1170 __ beq(&fast, Label::kNear); |
1193 __ beq(&fast); | |
1194 // Check that extension is "the hole". | 1171 // Check that extension is "the hole". |
1195 __ LoadP(temp, ContextMemOperand(next, Context::EXTENSION_INDEX)); | 1172 __ LoadP(temp, ContextMemOperand(next, Context::EXTENSION_INDEX)); |
1196 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); | 1173 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); |
1197 // Load next context in chain. | 1174 // Load next context in chain. |
1198 __ LoadP(next, ContextMemOperand(next, Context::PREVIOUS_INDEX)); | 1175 __ LoadP(next, ContextMemOperand(next, Context::PREVIOUS_INDEX)); |
1199 __ b(&loop); | 1176 __ b(&loop); |
1200 __ bind(&fast); | 1177 __ bind(&fast); |
1201 } | 1178 } |
1202 | 1179 |
1203 // All extension objects were empty and it is safe to use a normal global | 1180 // All extension objects were empty and it is safe to use a normal global |
1204 // load machinery. | 1181 // load machinery. |
1205 EmitGlobalVariableLoad(proxy, typeof_mode); | 1182 EmitGlobalVariableLoad(proxy, typeof_mode); |
1206 } | 1183 } |
1207 | 1184 |
1208 | |
1209 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, | 1185 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, |
1210 Label* slow) { | 1186 Label* slow) { |
1211 DCHECK(var->IsContextSlot()); | 1187 DCHECK(var->IsContextSlot()); |
1212 Register context = cp; | 1188 Register context = cp; |
1213 Register next = r6; | 1189 Register next = r5; |
1214 Register temp = r7; | 1190 Register temp = r6; |
1215 | 1191 |
1216 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { | 1192 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { |
1217 if (s->num_heap_slots() > 0) { | 1193 if (s->num_heap_slots() > 0) { |
1218 if (s->calls_sloppy_eval()) { | 1194 if (s->calls_sloppy_eval()) { |
1219 // Check that extension is "the hole". | 1195 // Check that extension is "the hole". |
1220 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); | 1196 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); |
1221 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); | 1197 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); |
1222 } | 1198 } |
1223 __ LoadP(next, ContextMemOperand(context, Context::PREVIOUS_INDEX)); | 1199 __ LoadP(next, ContextMemOperand(context, Context::PREVIOUS_INDEX)); |
1224 // Walk the rest of the chain without clobbering cp. | 1200 // Walk the rest of the chain without clobbering cp. |
1225 context = next; | 1201 context = next; |
1226 } | 1202 } |
1227 } | 1203 } |
1228 // Check that last extension is "the hole". | 1204 // Check that last extension is "the hole". |
1229 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); | 1205 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); |
1230 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); | 1206 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); |
1231 | 1207 |
1232 // This function is used only for loads, not stores, so it's safe to | 1208 // This function is used only for loads, not stores, so it's safe to |
1233 // return an cp-based operand (the write barrier cannot be allowed to | 1209 // return an cp-based operand (the write barrier cannot be allowed to |
1234 // destroy the cp register). | 1210 // destroy the cp register). |
1235 return ContextMemOperand(context, var->index()); | 1211 return ContextMemOperand(context, var->index()); |
1236 } | 1212 } |
1237 | 1213 |
1238 | |
1239 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, | 1214 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, |
1240 TypeofMode typeof_mode, | 1215 TypeofMode typeof_mode, |
1241 Label* slow, Label* done) { | 1216 Label* slow, Label* done) { |
1242 // Generate fast-case code for variables that might be shadowed by | 1217 // Generate fast-case code for variables that might be shadowed by |
1243 // eval-introduced variables. Eval is used a lot without | 1218 // eval-introduced variables. Eval is used a lot without |
1244 // introducing variables. In those cases, we do not want to | 1219 // introducing variables. In those cases, we do not want to |
1245 // perform a runtime call for all variables in the scope | 1220 // perform a runtime call for all variables in the scope |
1246 // containing the eval. | 1221 // containing the eval. |
1247 Variable* var = proxy->var(); | 1222 Variable* var = proxy->var(); |
1248 if (var->mode() == DYNAMIC_GLOBAL) { | 1223 if (var->mode() == DYNAMIC_GLOBAL) { |
1249 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow); | 1224 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow); |
1250 __ b(done); | 1225 __ b(done); |
1251 } else if (var->mode() == DYNAMIC_LOCAL) { | 1226 } else if (var->mode() == DYNAMIC_LOCAL) { |
1252 Variable* local = var->local_if_not_shadowed(); | 1227 Variable* local = var->local_if_not_shadowed(); |
1253 __ LoadP(r3, ContextSlotOperandCheckExtensions(local, slow)); | 1228 __ LoadP(r2, ContextSlotOperandCheckExtensions(local, slow)); |
1254 if (local->mode() == LET || local->mode() == CONST || | 1229 if (local->mode() == LET || local->mode() == CONST || |
1255 local->mode() == CONST_LEGACY) { | 1230 local->mode() == CONST_LEGACY) { |
1256 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); | 1231 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); |
1257 __ bne(done); | 1232 __ bne(done); |
1258 if (local->mode() == CONST_LEGACY) { | 1233 if (local->mode() == CONST_LEGACY) { |
1259 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); | 1234 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
1260 } else { // LET || CONST | 1235 } else { // LET || CONST |
1261 __ mov(r3, Operand(var->name())); | 1236 __ mov(r2, Operand(var->name())); |
1262 __ push(r3); | 1237 __ push(r2); |
1263 __ CallRuntime(Runtime::kThrowReferenceError); | 1238 __ CallRuntime(Runtime::kThrowReferenceError); |
1264 } | 1239 } |
1265 } | 1240 } |
1266 __ b(done); | 1241 __ b(done); |
1267 } | 1242 } |
1268 } | 1243 } |
1269 | 1244 |
1270 | |
1271 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy, | 1245 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy, |
1272 TypeofMode typeof_mode) { | 1246 TypeofMode typeof_mode) { |
1273 Variable* var = proxy->var(); | 1247 Variable* var = proxy->var(); |
1274 DCHECK(var->IsUnallocatedOrGlobalSlot() || | 1248 DCHECK(var->IsUnallocatedOrGlobalSlot() || |
1275 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL)); | 1249 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL)); |
1276 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister()); | 1250 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister()); |
1277 __ mov(LoadDescriptor::NameRegister(), Operand(var->name())); | 1251 __ mov(LoadDescriptor::NameRegister(), Operand(var->name())); |
1278 __ mov(LoadDescriptor::SlotRegister(), | 1252 __ mov(LoadDescriptor::SlotRegister(), |
1279 Operand(SmiFromSlot(proxy->VariableFeedbackSlot()))); | 1253 Operand(SmiFromSlot(proxy->VariableFeedbackSlot()))); |
1280 CallLoadIC(typeof_mode); | 1254 CallLoadIC(typeof_mode); |
1281 } | 1255 } |
1282 | 1256 |
1283 | |
1284 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy, | 1257 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy, |
1285 TypeofMode typeof_mode) { | 1258 TypeofMode typeof_mode) { |
1286 // Record position before possible IC call. | 1259 // Record position before possible IC call. |
1287 SetExpressionPosition(proxy); | 1260 SetExpressionPosition(proxy); |
1288 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS); | 1261 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS); |
1289 Variable* var = proxy->var(); | 1262 Variable* var = proxy->var(); |
1290 | 1263 |
1291 // Three cases: global variables, lookup variables, and all other types of | 1264 // Three cases: global variables, lookup variables, and all other types of |
1292 // variables. | 1265 // variables. |
1293 switch (var->location()) { | 1266 switch (var->location()) { |
1294 case VariableLocation::GLOBAL: | 1267 case VariableLocation::GLOBAL: |
1295 case VariableLocation::UNALLOCATED: { | 1268 case VariableLocation::UNALLOCATED: { |
1296 Comment cmnt(masm_, "[ Global variable"); | 1269 Comment cmnt(masm_, "[ Global variable"); |
1297 EmitGlobalVariableLoad(proxy, typeof_mode); | 1270 EmitGlobalVariableLoad(proxy, typeof_mode); |
1298 context()->Plug(r3); | 1271 context()->Plug(r2); |
1299 break; | 1272 break; |
1300 } | 1273 } |
1301 | 1274 |
1302 case VariableLocation::PARAMETER: | 1275 case VariableLocation::PARAMETER: |
1303 case VariableLocation::LOCAL: | 1276 case VariableLocation::LOCAL: |
1304 case VariableLocation::CONTEXT: { | 1277 case VariableLocation::CONTEXT: { |
1305 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode); | 1278 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode); |
1306 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" | 1279 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" |
1307 : "[ Stack variable"); | 1280 : "[ Stack variable"); |
1308 if (NeedsHoleCheckForLoad(proxy)) { | 1281 if (NeedsHoleCheckForLoad(proxy)) { |
1309 Label done; | 1282 Label done; |
1310 // Let and const need a read barrier. | 1283 // Let and const need a read barrier. |
1311 GetVar(r3, var); | 1284 GetVar(r2, var); |
1312 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); | 1285 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); |
1313 __ bne(&done); | 1286 __ bne(&done); |
1314 if (var->mode() == LET || var->mode() == CONST) { | 1287 if (var->mode() == LET || var->mode() == CONST) { |
1315 // Throw a reference error when using an uninitialized let/const | 1288 // Throw a reference error when using an uninitialized let/const |
1316 // binding in harmony mode. | 1289 // binding in harmony mode. |
1317 __ mov(r3, Operand(var->name())); | 1290 __ mov(r2, Operand(var->name())); |
1318 __ push(r3); | 1291 __ push(r2); |
1319 __ CallRuntime(Runtime::kThrowReferenceError); | 1292 __ CallRuntime(Runtime::kThrowReferenceError); |
1320 } else { | 1293 } else { |
1321 // Uninitialized legacy const bindings are unholed. | 1294 // Uninitialized legacy const bindings are unholed. |
1322 DCHECK(var->mode() == CONST_LEGACY); | 1295 DCHECK(var->mode() == CONST_LEGACY); |
1323 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); | 1296 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
1324 } | 1297 } |
1325 __ bind(&done); | 1298 __ bind(&done); |
1326 context()->Plug(r3); | 1299 context()->Plug(r2); |
1327 break; | 1300 break; |
1328 } | 1301 } |
1329 context()->Plug(var); | 1302 context()->Plug(var); |
1330 break; | 1303 break; |
1331 } | 1304 } |
1332 | 1305 |
1333 case VariableLocation::LOOKUP: { | 1306 case VariableLocation::LOOKUP: { |
1334 Comment cmnt(masm_, "[ Lookup variable"); | 1307 Comment cmnt(masm_, "[ Lookup variable"); |
1335 Label done, slow; | 1308 Label done, slow; |
1336 // Generate code for loading from variables potentially shadowed | 1309 // Generate code for loading from variables potentially shadowed |
1337 // by eval-introduced variables. | 1310 // by eval-introduced variables. |
1338 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done); | 1311 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done); |
1339 __ bind(&slow); | 1312 __ bind(&slow); |
1340 __ Push(var->name()); | 1313 __ Push(var->name()); |
1341 Runtime::FunctionId function_id = | 1314 Runtime::FunctionId function_id = |
1342 typeof_mode == NOT_INSIDE_TYPEOF | 1315 typeof_mode == NOT_INSIDE_TYPEOF |
1343 ? Runtime::kLoadLookupSlot | 1316 ? Runtime::kLoadLookupSlot |
1344 : Runtime::kLoadLookupSlotInsideTypeof; | 1317 : Runtime::kLoadLookupSlotInsideTypeof; |
1345 __ CallRuntime(function_id); | 1318 __ CallRuntime(function_id); |
1346 __ bind(&done); | 1319 __ bind(&done); |
1347 context()->Plug(r3); | 1320 context()->Plug(r2); |
1348 } | 1321 } |
1349 } | 1322 } |
1350 } | 1323 } |
1351 | 1324 |
1352 | |
1353 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { | 1325 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { |
1354 Comment cmnt(masm_, "[ RegExpLiteral"); | 1326 Comment cmnt(masm_, "[ RegExpLiteral"); |
1355 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1327 __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
1356 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index())); | 1328 __ LoadSmiLiteral(r4, Smi::FromInt(expr->literal_index())); |
1357 __ mov(r4, Operand(expr->pattern())); | 1329 __ mov(r3, Operand(expr->pattern())); |
1358 __ LoadSmiLiteral(r3, Smi::FromInt(expr->flags())); | 1330 __ LoadSmiLiteral(r2, Smi::FromInt(expr->flags())); |
1359 FastCloneRegExpStub stub(isolate()); | 1331 FastCloneRegExpStub stub(isolate()); |
1360 __ CallStub(&stub); | 1332 __ CallStub(&stub); |
1361 context()->Plug(r3); | 1333 context()->Plug(r2); |
1362 } | 1334 } |
1363 | 1335 |
1364 | |
1365 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) { | 1336 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) { |
1366 Expression* expression = (property == NULL) ? NULL : property->value(); | 1337 Expression* expression = (property == NULL) ? NULL : property->value(); |
1367 if (expression == NULL) { | 1338 if (expression == NULL) { |
1368 __ LoadRoot(r4, Heap::kNullValueRootIndex); | 1339 __ LoadRoot(r3, Heap::kNullValueRootIndex); |
1369 PushOperand(r4); | 1340 PushOperand(r3); |
1370 } else { | 1341 } else { |
1371 VisitForStackValue(expression); | 1342 VisitForStackValue(expression); |
1372 if (NeedsHomeObject(expression)) { | 1343 if (NeedsHomeObject(expression)) { |
1373 DCHECK(property->kind() == ObjectLiteral::Property::GETTER || | 1344 DCHECK(property->kind() == ObjectLiteral::Property::GETTER || |
1374 property->kind() == ObjectLiteral::Property::SETTER); | 1345 property->kind() == ObjectLiteral::Property::SETTER); |
1375 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3; | 1346 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3; |
1376 EmitSetHomeObject(expression, offset, property->GetSlot()); | 1347 EmitSetHomeObject(expression, offset, property->GetSlot()); |
1377 } | 1348 } |
1378 } | 1349 } |
1379 } | 1350 } |
1380 | 1351 |
1381 | |
1382 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { | 1352 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { |
1383 Comment cmnt(masm_, "[ ObjectLiteral"); | 1353 Comment cmnt(masm_, "[ ObjectLiteral"); |
1384 | 1354 |
1385 Handle<FixedArray> constant_properties = expr->constant_properties(); | 1355 Handle<FixedArray> constant_properties = expr->constant_properties(); |
1386 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1356 __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
1387 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index())); | 1357 __ LoadSmiLiteral(r4, Smi::FromInt(expr->literal_index())); |
1388 __ mov(r4, Operand(constant_properties)); | 1358 __ mov(r3, Operand(constant_properties)); |
1389 int flags = expr->ComputeFlags(); | 1359 int flags = expr->ComputeFlags(); |
1390 __ LoadSmiLiteral(r3, Smi::FromInt(flags)); | 1360 __ LoadSmiLiteral(r2, Smi::FromInt(flags)); |
1391 if (MustCreateObjectLiteralWithRuntime(expr)) { | 1361 if (MustCreateObjectLiteralWithRuntime(expr)) { |
1392 __ Push(r6, r5, r4, r3); | 1362 __ Push(r5, r4, r3, r2); |
1393 __ CallRuntime(Runtime::kCreateObjectLiteral); | 1363 __ CallRuntime(Runtime::kCreateObjectLiteral); |
1394 } else { | 1364 } else { |
1395 FastCloneShallowObjectStub stub(isolate(), expr->properties_count()); | 1365 FastCloneShallowObjectStub stub(isolate(), expr->properties_count()); |
1396 __ CallStub(&stub); | 1366 __ CallStub(&stub); |
1397 } | 1367 } |
1398 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); | 1368 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); |
1399 | 1369 |
1400 // If result_saved is true the result is on top of the stack. If | 1370 // If result_saved is true the result is on top of the stack. If |
1401 // result_saved is false the result is in r3. | 1371 // result_saved is false the result is in r2. |
1402 bool result_saved = false; | 1372 bool result_saved = false; |
1403 | 1373 |
1404 AccessorTable accessor_table(zone()); | 1374 AccessorTable accessor_table(zone()); |
1405 int property_index = 0; | 1375 int property_index = 0; |
1406 for (; property_index < expr->properties()->length(); property_index++) { | 1376 for (; property_index < expr->properties()->length(); property_index++) { |
1407 ObjectLiteral::Property* property = expr->properties()->at(property_index); | 1377 ObjectLiteral::Property* property = expr->properties()->at(property_index); |
1408 if (property->is_computed_name()) break; | 1378 if (property->is_computed_name()) break; |
1409 if (property->IsCompileTimeValue()) continue; | 1379 if (property->IsCompileTimeValue()) continue; |
1410 | 1380 |
1411 Literal* key = property->key()->AsLiteral(); | 1381 Literal* key = property->key()->AsLiteral(); |
1412 Expression* value = property->value(); | 1382 Expression* value = property->value(); |
1413 if (!result_saved) { | 1383 if (!result_saved) { |
1414 PushOperand(r3); // Save result on stack | 1384 PushOperand(r2); // Save result on stack |
1415 result_saved = true; | 1385 result_saved = true; |
1416 } | 1386 } |
1417 switch (property->kind()) { | 1387 switch (property->kind()) { |
1418 case ObjectLiteral::Property::CONSTANT: | 1388 case ObjectLiteral::Property::CONSTANT: |
1419 UNREACHABLE(); | 1389 UNREACHABLE(); |
1420 case ObjectLiteral::Property::MATERIALIZED_LITERAL: | 1390 case ObjectLiteral::Property::MATERIALIZED_LITERAL: |
1421 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value())); | 1391 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value())); |
1422 // Fall through. | 1392 // Fall through. |
1423 case ObjectLiteral::Property::COMPUTED: | 1393 case ObjectLiteral::Property::COMPUTED: |
1424 // It is safe to use [[Put]] here because the boilerplate already | 1394 // It is safe to use [[Put]] here because the boilerplate already |
1425 // contains computed properties with an uninitialized value. | 1395 // contains computed properties with an uninitialized value. |
1426 if (key->value()->IsInternalizedString()) { | 1396 if (key->value()->IsInternalizedString()) { |
1427 if (property->emit_store()) { | 1397 if (property->emit_store()) { |
1428 VisitForAccumulatorValue(value); | 1398 VisitForAccumulatorValue(value); |
1429 DCHECK(StoreDescriptor::ValueRegister().is(r3)); | 1399 DCHECK(StoreDescriptor::ValueRegister().is(r2)); |
1430 __ mov(StoreDescriptor::NameRegister(), Operand(key->value())); | 1400 __ mov(StoreDescriptor::NameRegister(), Operand(key->value())); |
1431 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); | 1401 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); |
1432 EmitLoadStoreICSlot(property->GetSlot(0)); | 1402 EmitLoadStoreICSlot(property->GetSlot(0)); |
1433 CallStoreIC(); | 1403 CallStoreIC(); |
1434 PrepareForBailoutForId(key->id(), NO_REGISTERS); | 1404 PrepareForBailoutForId(key->id(), NO_REGISTERS); |
1435 | 1405 |
1436 if (NeedsHomeObject(value)) { | 1406 if (NeedsHomeObject(value)) { |
1437 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1)); | 1407 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1)); |
1438 } | 1408 } |
1439 } else { | 1409 } else { |
1440 VisitForEffect(value); | 1410 VisitForEffect(value); |
1441 } | 1411 } |
1442 break; | 1412 break; |
1443 } | 1413 } |
1444 // Duplicate receiver on stack. | 1414 // Duplicate receiver on stack. |
1445 __ LoadP(r3, MemOperand(sp)); | 1415 __ LoadP(r2, MemOperand(sp)); |
1446 PushOperand(r3); | 1416 PushOperand(r2); |
1447 VisitForStackValue(key); | 1417 VisitForStackValue(key); |
1448 VisitForStackValue(value); | 1418 VisitForStackValue(value); |
1449 if (property->emit_store()) { | 1419 if (property->emit_store()) { |
1450 if (NeedsHomeObject(value)) { | 1420 if (NeedsHomeObject(value)) { |
1451 EmitSetHomeObject(value, 2, property->GetSlot()); | 1421 EmitSetHomeObject(value, 2, property->GetSlot()); |
1452 } | 1422 } |
1453 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY)); // PropertyAttributes | 1423 __ LoadSmiLiteral(r2, Smi::FromInt(SLOPPY)); // PropertyAttributes |
1454 PushOperand(r3); | 1424 PushOperand(r2); |
1455 CallRuntimeWithOperands(Runtime::kSetProperty); | 1425 CallRuntimeWithOperands(Runtime::kSetProperty); |
1456 } else { | 1426 } else { |
1457 DropOperands(3); | 1427 DropOperands(3); |
1458 } | 1428 } |
1459 break; | 1429 break; |
1460 case ObjectLiteral::Property::PROTOTYPE: | 1430 case ObjectLiteral::Property::PROTOTYPE: |
1461 // Duplicate receiver on stack. | 1431 // Duplicate receiver on stack. |
1462 __ LoadP(r3, MemOperand(sp)); | 1432 __ LoadP(r2, MemOperand(sp)); |
1463 PushOperand(r3); | 1433 PushOperand(r2); |
1464 VisitForStackValue(value); | 1434 VisitForStackValue(value); |
1465 DCHECK(property->emit_store()); | 1435 DCHECK(property->emit_store()); |
1466 CallRuntimeWithOperands(Runtime::kInternalSetPrototype); | 1436 CallRuntimeWithOperands(Runtime::kInternalSetPrototype); |
1467 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index), | 1437 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index), |
1468 NO_REGISTERS); | 1438 NO_REGISTERS); |
1469 break; | 1439 break; |
1470 case ObjectLiteral::Property::GETTER: | 1440 case ObjectLiteral::Property::GETTER: |
1471 if (property->emit_store()) { | 1441 if (property->emit_store()) { |
1472 accessor_table.lookup(key)->second->getter = property; | 1442 accessor_table.lookup(key)->second->getter = property; |
1473 } | 1443 } |
1474 break; | 1444 break; |
1475 case ObjectLiteral::Property::SETTER: | 1445 case ObjectLiteral::Property::SETTER: |
1476 if (property->emit_store()) { | 1446 if (property->emit_store()) { |
1477 accessor_table.lookup(key)->second->setter = property; | 1447 accessor_table.lookup(key)->second->setter = property; |
1478 } | 1448 } |
1479 break; | 1449 break; |
1480 } | 1450 } |
1481 } | 1451 } |
1482 | 1452 |
1483 // Emit code to define accessors, using only a single call to the runtime for | 1453 // Emit code to define accessors, using only a single call to the runtime for |
1484 // each pair of corresponding getters and setters. | 1454 // each pair of corresponding getters and setters. |
1485 for (AccessorTable::Iterator it = accessor_table.begin(); | 1455 for (AccessorTable::Iterator it = accessor_table.begin(); |
1486 it != accessor_table.end(); ++it) { | 1456 it != accessor_table.end(); ++it) { |
1487 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver. | 1457 __ LoadP(r2, MemOperand(sp)); // Duplicate receiver. |
1488 PushOperand(r3); | 1458 PushOperand(r2); |
1489 VisitForStackValue(it->first); | 1459 VisitForStackValue(it->first); |
1490 EmitAccessor(it->second->getter); | 1460 EmitAccessor(it->second->getter); |
1491 EmitAccessor(it->second->setter); | 1461 EmitAccessor(it->second->setter); |
1492 __ LoadSmiLiteral(r3, Smi::FromInt(NONE)); | 1462 __ LoadSmiLiteral(r2, Smi::FromInt(NONE)); |
1493 PushOperand(r3); | 1463 PushOperand(r2); |
1494 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked); | 1464 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked); |
1495 } | 1465 } |
1496 | 1466 |
1497 // Object literals have two parts. The "static" part on the left contains no | 1467 // Object literals have two parts. The "static" part on the left contains no |
1498 // computed property names, and so we can compute its map ahead of time; see | 1468 // computed property names, and so we can compute its map ahead of time; see |
1499 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part | 1469 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part |
1500 // starts with the first computed property name, and continues with all | 1470 // starts with the first computed property name, and continues with all |
1501 // properties to its right. All the code from above initializes the static | 1471 // properties to its right. All the code from above initializes the static |
1502 // component of the object literal, and arranges for the map of the result to | 1472 // component of the object literal, and arranges for the map of the result to |
1503 // reflect the static order in which the keys appear. For the dynamic | 1473 // reflect the static order in which the keys appear. For the dynamic |
1504 // properties, we compile them into a series of "SetOwnProperty" runtime | 1474 // properties, we compile them into a series of "SetOwnProperty" runtime |
1505 // calls. This will preserve insertion order. | 1475 // calls. This will preserve insertion order. |
1506 for (; property_index < expr->properties()->length(); property_index++) { | 1476 for (; property_index < expr->properties()->length(); property_index++) { |
1507 ObjectLiteral::Property* property = expr->properties()->at(property_index); | 1477 ObjectLiteral::Property* property = expr->properties()->at(property_index); |
1508 | 1478 |
1509 Expression* value = property->value(); | 1479 Expression* value = property->value(); |
1510 if (!result_saved) { | 1480 if (!result_saved) { |
1511 PushOperand(r3); // Save result on the stack | 1481 PushOperand(r2); // Save result on the stack |
1512 result_saved = true; | 1482 result_saved = true; |
1513 } | 1483 } |
1514 | 1484 |
1515 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver. | 1485 __ LoadP(r2, MemOperand(sp)); // Duplicate receiver. |
1516 PushOperand(r3); | 1486 PushOperand(r2); |
1517 | 1487 |
1518 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) { | 1488 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) { |
1519 DCHECK(!property->is_computed_name()); | 1489 DCHECK(!property->is_computed_name()); |
1520 VisitForStackValue(value); | 1490 VisitForStackValue(value); |
1521 DCHECK(property->emit_store()); | 1491 DCHECK(property->emit_store()); |
1522 CallRuntimeWithOperands(Runtime::kInternalSetPrototype); | 1492 CallRuntimeWithOperands(Runtime::kInternalSetPrototype); |
1523 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index), | 1493 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index), |
1524 NO_REGISTERS); | 1494 NO_REGISTERS); |
1525 } else { | 1495 } else { |
1526 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index)); | 1496 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index)); |
(...skipping 27 matching lines...) Expand all Loading... | |
1554 case ObjectLiteral::Property::SETTER: | 1524 case ObjectLiteral::Property::SETTER: |
1555 PushOperand(Smi::FromInt(NONE)); | 1525 PushOperand(Smi::FromInt(NONE)); |
1556 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked); | 1526 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked); |
1557 break; | 1527 break; |
1558 } | 1528 } |
1559 } | 1529 } |
1560 } | 1530 } |
1561 | 1531 |
1562 if (expr->has_function()) { | 1532 if (expr->has_function()) { |
1563 DCHECK(result_saved); | 1533 DCHECK(result_saved); |
1564 __ LoadP(r3, MemOperand(sp)); | 1534 __ LoadP(r2, MemOperand(sp)); |
1565 __ push(r3); | 1535 __ push(r2); |
1566 __ CallRuntime(Runtime::kToFastProperties); | 1536 __ CallRuntime(Runtime::kToFastProperties); |
1567 } | 1537 } |
1568 | 1538 |
1569 if (result_saved) { | 1539 if (result_saved) { |
1570 context()->PlugTOS(); | 1540 context()->PlugTOS(); |
1571 } else { | 1541 } else { |
1572 context()->Plug(r3); | 1542 context()->Plug(r2); |
1573 } | 1543 } |
1574 } | 1544 } |
1575 | 1545 |
1576 | |
1577 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { | 1546 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { |
1578 Comment cmnt(masm_, "[ ArrayLiteral"); | 1547 Comment cmnt(masm_, "[ ArrayLiteral"); |
1579 | 1548 |
1580 Handle<FixedArray> constant_elements = expr->constant_elements(); | 1549 Handle<FixedArray> constant_elements = expr->constant_elements(); |
1581 bool has_fast_elements = | 1550 bool has_fast_elements = |
1582 IsFastObjectElementsKind(expr->constant_elements_kind()); | 1551 IsFastObjectElementsKind(expr->constant_elements_kind()); |
1583 Handle<FixedArrayBase> constant_elements_values( | 1552 Handle<FixedArrayBase> constant_elements_values( |
1584 FixedArrayBase::cast(constant_elements->get(1))); | 1553 FixedArrayBase::cast(constant_elements->get(1))); |
1585 | 1554 |
1586 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; | 1555 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; |
1587 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { | 1556 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { |
1588 // If the only customer of allocation sites is transitioning, then | 1557 // If the only customer of allocation sites is transitioning, then |
1589 // we can turn it off if we don't have anywhere else to transition to. | 1558 // we can turn it off if we don't have anywhere else to transition to. |
1590 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; | 1559 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; |
1591 } | 1560 } |
1592 | 1561 |
1593 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1562 __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
1594 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index())); | 1563 __ LoadSmiLiteral(r4, Smi::FromInt(expr->literal_index())); |
1595 __ mov(r4, Operand(constant_elements)); | 1564 __ mov(r3, Operand(constant_elements)); |
1596 if (MustCreateArrayLiteralWithRuntime(expr)) { | 1565 if (MustCreateArrayLiteralWithRuntime(expr)) { |
1597 __ LoadSmiLiteral(r3, Smi::FromInt(expr->ComputeFlags())); | 1566 __ LoadSmiLiteral(r2, Smi::FromInt(expr->ComputeFlags())); |
1598 __ Push(r6, r5, r4, r3); | 1567 __ Push(r5, r4, r3, r2); |
1599 __ CallRuntime(Runtime::kCreateArrayLiteral); | 1568 __ CallRuntime(Runtime::kCreateArrayLiteral); |
1600 } else { | 1569 } else { |
1601 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); | 1570 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); |
1602 __ CallStub(&stub); | 1571 __ CallStub(&stub); |
1603 } | 1572 } |
1604 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); | 1573 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); |
1605 | 1574 |
1606 bool result_saved = false; // Is the result saved to the stack? | 1575 bool result_saved = false; // Is the result saved to the stack? |
1607 ZoneList<Expression*>* subexprs = expr->values(); | 1576 ZoneList<Expression*>* subexprs = expr->values(); |
1608 int length = subexprs->length(); | 1577 int length = subexprs->length(); |
1609 | 1578 |
1610 // Emit code to evaluate all the non-constant subexpressions and to store | 1579 // Emit code to evaluate all the non-constant subexpressions and to store |
1611 // them into the newly cloned array. | 1580 // them into the newly cloned array. |
1612 int array_index = 0; | 1581 int array_index = 0; |
1613 for (; array_index < length; array_index++) { | 1582 for (; array_index < length; array_index++) { |
1614 Expression* subexpr = subexprs->at(array_index); | 1583 Expression* subexpr = subexprs->at(array_index); |
1615 DCHECK(!subexpr->IsSpread()); | 1584 DCHECK(!subexpr->IsSpread()); |
1616 // If the subexpression is a literal or a simple materialized literal it | 1585 // If the subexpression is a literal or a simple materialized literal it |
1617 // is already set in the cloned array. | 1586 // is already set in the cloned array. |
1618 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; | 1587 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; |
1619 | 1588 |
1620 if (!result_saved) { | 1589 if (!result_saved) { |
1621 PushOperand(r3); | 1590 PushOperand(r2); |
1622 result_saved = true; | 1591 result_saved = true; |
1623 } | 1592 } |
1624 VisitForAccumulatorValue(subexpr); | 1593 VisitForAccumulatorValue(subexpr); |
1625 | 1594 |
1626 __ LoadSmiLiteral(StoreDescriptor::NameRegister(), | 1595 __ LoadSmiLiteral(StoreDescriptor::NameRegister(), |
1627 Smi::FromInt(array_index)); | 1596 Smi::FromInt(array_index)); |
1628 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0)); | 1597 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0)); |
1629 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot()); | 1598 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot()); |
1630 Handle<Code> ic = | 1599 Handle<Code> ic = |
1631 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); | 1600 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); |
1632 CallIC(ic); | 1601 CallIC(ic); |
1633 | 1602 |
1634 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); | 1603 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); |
1635 } | 1604 } |
1636 | 1605 |
1637 // In case the array literal contains spread expressions it has two parts. The | 1606 // In case the array literal contains spread expressions it has two parts. The |
1638 // first part is the "static" array which has a literal index is handled | 1607 // first part is the "static" array which has a literal index is handled |
1639 // above. The second part is the part after the first spread expression | 1608 // above. The second part is the part after the first spread expression |
1640 // (inclusive) and these elements gets appended to the array. Note that the | 1609 // (inclusive) and these elements gets appended to the array. Note that the |
1641 // number elements an iterable produces is unknown ahead of time. | 1610 // number elements an iterable produces is unknown ahead of time. |
1642 if (array_index < length && result_saved) { | 1611 if (array_index < length && result_saved) { |
1643 PopOperand(r3); | 1612 PopOperand(r2); |
1644 result_saved = false; | 1613 result_saved = false; |
1645 } | 1614 } |
1646 for (; array_index < length; array_index++) { | 1615 for (; array_index < length; array_index++) { |
1647 Expression* subexpr = subexprs->at(array_index); | 1616 Expression* subexpr = subexprs->at(array_index); |
1648 | 1617 |
1649 PushOperand(r3); | 1618 PushOperand(r2); |
1650 DCHECK(!subexpr->IsSpread()); | 1619 DCHECK(!subexpr->IsSpread()); |
1651 VisitForStackValue(subexpr); | 1620 VisitForStackValue(subexpr); |
1652 CallRuntimeWithOperands(Runtime::kAppendElement); | 1621 CallRuntimeWithOperands(Runtime::kAppendElement); |
1653 | 1622 |
1654 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); | 1623 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); |
1655 } | 1624 } |
1656 | 1625 |
1657 if (result_saved) { | 1626 if (result_saved) { |
1658 context()->PlugTOS(); | 1627 context()->PlugTOS(); |
1659 } else { | 1628 } else { |
1660 context()->Plug(r3); | 1629 context()->Plug(r2); |
1661 } | 1630 } |
1662 } | 1631 } |
1663 | 1632 |
1664 | |
1665 void FullCodeGenerator::VisitAssignment(Assignment* expr) { | 1633 void FullCodeGenerator::VisitAssignment(Assignment* expr) { |
1666 DCHECK(expr->target()->IsValidReferenceExpressionOrThis()); | 1634 DCHECK(expr->target()->IsValidReferenceExpressionOrThis()); |
1667 | 1635 |
1668 Comment cmnt(masm_, "[ Assignment"); | 1636 Comment cmnt(masm_, "[ Assignment"); |
1669 SetExpressionPosition(expr, INSERT_BREAK); | 1637 SetExpressionPosition(expr, INSERT_BREAK); |
1670 | 1638 |
1671 Property* property = expr->target()->AsProperty(); | 1639 Property* property = expr->target()->AsProperty(); |
1672 LhsKind assign_type = Property::GetAssignType(property); | 1640 LhsKind assign_type = Property::GetAssignType(property); |
1673 | 1641 |
1674 // Evaluate LHS expression. | 1642 // Evaluate LHS expression. |
(...skipping 10 matching lines...) Expand all Loading... | |
1685 VisitForStackValue(property->obj()); | 1653 VisitForStackValue(property->obj()); |
1686 } | 1654 } |
1687 break; | 1655 break; |
1688 case NAMED_SUPER_PROPERTY: | 1656 case NAMED_SUPER_PROPERTY: |
1689 VisitForStackValue( | 1657 VisitForStackValue( |
1690 property->obj()->AsSuperPropertyReference()->this_var()); | 1658 property->obj()->AsSuperPropertyReference()->this_var()); |
1691 VisitForAccumulatorValue( | 1659 VisitForAccumulatorValue( |
1692 property->obj()->AsSuperPropertyReference()->home_object()); | 1660 property->obj()->AsSuperPropertyReference()->home_object()); |
1693 PushOperand(result_register()); | 1661 PushOperand(result_register()); |
1694 if (expr->is_compound()) { | 1662 if (expr->is_compound()) { |
1695 const Register scratch = r4; | 1663 const Register scratch = r3; |
1696 __ LoadP(scratch, MemOperand(sp, kPointerSize)); | 1664 __ LoadP(scratch, MemOperand(sp, kPointerSize)); |
1697 PushOperands(scratch, result_register()); | 1665 PushOperands(scratch, result_register()); |
1698 } | 1666 } |
1699 break; | 1667 break; |
1700 case KEYED_SUPER_PROPERTY: { | 1668 case KEYED_SUPER_PROPERTY: { |
1701 const Register scratch = r4; | 1669 const Register scratch = r3; |
1702 VisitForStackValue( | 1670 VisitForStackValue( |
1703 property->obj()->AsSuperPropertyReference()->this_var()); | 1671 property->obj()->AsSuperPropertyReference()->this_var()); |
1704 VisitForAccumulatorValue( | 1672 VisitForAccumulatorValue( |
1705 property->obj()->AsSuperPropertyReference()->home_object()); | 1673 property->obj()->AsSuperPropertyReference()->home_object()); |
1706 __ mr(scratch, result_register()); | 1674 __ LoadRR(scratch, result_register()); |
1707 VisitForAccumulatorValue(property->key()); | 1675 VisitForAccumulatorValue(property->key()); |
1708 PushOperands(scratch, result_register()); | 1676 PushOperands(scratch, result_register()); |
1709 if (expr->is_compound()) { | 1677 if (expr->is_compound()) { |
1710 const Register scratch1 = r5; | 1678 const Register scratch1 = r4; |
1711 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize)); | 1679 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize)); |
1712 PushOperands(scratch1, scratch, result_register()); | 1680 PushOperands(scratch1, scratch, result_register()); |
1713 } | 1681 } |
1714 break; | 1682 break; |
1715 } | 1683 } |
1716 case KEYED_PROPERTY: | 1684 case KEYED_PROPERTY: |
1717 if (expr->is_compound()) { | 1685 if (expr->is_compound()) { |
1718 VisitForStackValue(property->obj()); | 1686 VisitForStackValue(property->obj()); |
1719 VisitForStackValue(property->key()); | 1687 VisitForStackValue(property->key()); |
1720 __ LoadP(LoadDescriptor::ReceiverRegister(), | 1688 __ LoadP(LoadDescriptor::ReceiverRegister(), |
(...skipping 29 matching lines...) Expand all Loading... | |
1750 PrepareForBailoutForId(property->LoadId(), TOS_REG); | 1718 PrepareForBailoutForId(property->LoadId(), TOS_REG); |
1751 break; | 1719 break; |
1752 case KEYED_PROPERTY: | 1720 case KEYED_PROPERTY: |
1753 EmitKeyedPropertyLoad(property); | 1721 EmitKeyedPropertyLoad(property); |
1754 PrepareForBailoutForId(property->LoadId(), TOS_REG); | 1722 PrepareForBailoutForId(property->LoadId(), TOS_REG); |
1755 break; | 1723 break; |
1756 } | 1724 } |
1757 } | 1725 } |
1758 | 1726 |
1759 Token::Value op = expr->binary_op(); | 1727 Token::Value op = expr->binary_op(); |
1760 PushOperand(r3); // Left operand goes on the stack. | 1728 PushOperand(r2); // Left operand goes on the stack. |
1761 VisitForAccumulatorValue(expr->value()); | 1729 VisitForAccumulatorValue(expr->value()); |
1762 | 1730 |
1763 AccumulatorValueContext context(this); | 1731 AccumulatorValueContext context(this); |
1764 if (ShouldInlineSmiCase(op)) { | 1732 if (ShouldInlineSmiCase(op)) { |
1765 EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(), | 1733 EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(), |
1766 expr->value()); | 1734 expr->value()); |
1767 } else { | 1735 } else { |
1768 EmitBinaryOp(expr->binary_operation(), op); | 1736 EmitBinaryOp(expr->binary_operation(), op); |
1769 } | 1737 } |
1770 | 1738 |
1771 // Deoptimization point in case the binary operation may have side effects. | 1739 // Deoptimization point in case the binary operation may have side effects. |
1772 PrepareForBailout(expr->binary_operation(), TOS_REG); | 1740 PrepareForBailout(expr->binary_operation(), TOS_REG); |
1773 } else { | 1741 } else { |
1774 VisitForAccumulatorValue(expr->value()); | 1742 VisitForAccumulatorValue(expr->value()); |
1775 } | 1743 } |
1776 | 1744 |
1777 SetExpressionPosition(expr); | 1745 SetExpressionPosition(expr); |
1778 | 1746 |
1779 // Store the value. | 1747 // Store the value. |
1780 switch (assign_type) { | 1748 switch (assign_type) { |
1781 case VARIABLE: | 1749 case VARIABLE: |
1782 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), | 1750 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), |
1783 expr->op(), expr->AssignmentSlot()); | 1751 expr->op(), expr->AssignmentSlot()); |
1784 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | 1752 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
1785 context()->Plug(r3); | 1753 context()->Plug(r2); |
1786 break; | 1754 break; |
1787 case NAMED_PROPERTY: | 1755 case NAMED_PROPERTY: |
1788 EmitNamedPropertyAssignment(expr); | 1756 EmitNamedPropertyAssignment(expr); |
1789 break; | 1757 break; |
1790 case NAMED_SUPER_PROPERTY: | 1758 case NAMED_SUPER_PROPERTY: |
1791 EmitNamedSuperPropertyStore(property); | 1759 EmitNamedSuperPropertyStore(property); |
1792 context()->Plug(r3); | 1760 context()->Plug(r2); |
1793 break; | 1761 break; |
1794 case KEYED_SUPER_PROPERTY: | 1762 case KEYED_SUPER_PROPERTY: |
1795 EmitKeyedSuperPropertyStore(property); | 1763 EmitKeyedSuperPropertyStore(property); |
1796 context()->Plug(r3); | 1764 context()->Plug(r2); |
1797 break; | 1765 break; |
1798 case KEYED_PROPERTY: | 1766 case KEYED_PROPERTY: |
1799 EmitKeyedPropertyAssignment(expr); | 1767 EmitKeyedPropertyAssignment(expr); |
1800 break; | 1768 break; |
1801 } | 1769 } |
1802 } | 1770 } |
1803 | 1771 |
1804 | |
1805 void FullCodeGenerator::VisitYield(Yield* expr) { | 1772 void FullCodeGenerator::VisitYield(Yield* expr) { |
1806 Comment cmnt(masm_, "[ Yield"); | 1773 Comment cmnt(masm_, "[ Yield"); |
1807 SetExpressionPosition(expr); | 1774 SetExpressionPosition(expr); |
1808 | 1775 |
1809 // Evaluate yielded value first; the initial iterator definition depends on | 1776 // Evaluate yielded value first; the initial iterator definition depends on |
1810 // this. It stays on the stack while we update the iterator. | 1777 // this. It stays on the stack while we update the iterator. |
1811 VisitForStackValue(expr->expression()); | 1778 VisitForStackValue(expr->expression()); |
1812 | 1779 |
1813 switch (expr->yield_kind()) { | 1780 switch (expr->yield_kind()) { |
1814 case Yield::kSuspend: | 1781 case Yield::kSuspend: |
1815 // Pop value from top-of-stack slot; box result into result register. | 1782 // Pop value from top-of-stack slot; box result into result register. |
1816 EmitCreateIteratorResult(false); | 1783 EmitCreateIteratorResult(false); |
1817 PushOperand(result_register()); | 1784 PushOperand(result_register()); |
1818 // Fall through. | 1785 // Fall through. |
1819 case Yield::kInitial: { | 1786 case Yield::kInitial: { |
1820 Label suspend, continuation, post_runtime, resume; | 1787 Label suspend, continuation, post_runtime, resume; |
1821 | 1788 |
1822 __ b(&suspend); | 1789 __ b(&suspend, Label::kNear); |
1823 __ bind(&continuation); | 1790 __ bind(&continuation); |
1824 // When we arrive here, the stack top is the resume mode and | 1791 // When we arrive here, the stack top is the resume mode and |
1825 // result_register() holds the input value (the argument given to the | 1792 // result_register() holds the input value (the argument given to the |
1826 // respective resume operation). | 1793 // respective resume operation). |
1827 __ RecordGeneratorContinuation(); | 1794 __ RecordGeneratorContinuation(); |
1828 __ pop(r4); | 1795 __ pop(r3); |
1829 __ CmpSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::RETURN), r0); | 1796 __ CmpSmiLiteral(r3, Smi::FromInt(JSGeneratorObject::RETURN), r0); |
1830 __ bne(&resume); | 1797 __ bne(&resume); |
1831 __ push(result_register()); | 1798 __ push(result_register()); |
1832 EmitCreateIteratorResult(true); | 1799 EmitCreateIteratorResult(true); |
1833 EmitUnwindAndReturn(); | 1800 EmitUnwindAndReturn(); |
1834 | 1801 |
1835 __ bind(&suspend); | 1802 __ bind(&suspend); |
1836 OperandStackDepthIncrement(1); // Not popped on this path. | 1803 OperandStackDepthIncrement(1); // Not popped on this path. |
1837 VisitForAccumulatorValue(expr->generator_object()); | 1804 VisitForAccumulatorValue(expr->generator_object()); |
1838 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); | 1805 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); |
1839 __ LoadSmiLiteral(r4, Smi::FromInt(continuation.pos())); | 1806 __ LoadSmiLiteral(r3, Smi::FromInt(continuation.pos())); |
1840 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset), | 1807 __ StoreP(r3, |
1841 r0); | 1808 FieldMemOperand(r2, JSGeneratorObject::kContinuationOffset)); |
1842 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0); | 1809 __ StoreP(cp, FieldMemOperand(r2, JSGeneratorObject::kContextOffset)); |
1843 __ mr(r4, cp); | 1810 __ LoadRR(r3, cp); |
1844 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5, | 1811 __ RecordWriteField(r2, JSGeneratorObject::kContextOffset, r3, r4, |
1845 kLRHasBeenSaved, kDontSaveFPRegs); | 1812 kLRHasBeenSaved, kDontSaveFPRegs); |
1846 __ addi(r4, fp, Operand(StandardFrameConstants::kExpressionsOffset)); | 1813 __ AddP(r3, fp, Operand(StandardFrameConstants::kExpressionsOffset)); |
1847 __ cmp(sp, r4); | 1814 __ CmpP(sp, r3); |
1848 __ beq(&post_runtime); | 1815 __ beq(&post_runtime); |
1849 __ push(r3); // generator object | 1816 __ push(r2); // generator object |
1850 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); | 1817 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); |
1851 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 1818 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
1852 __ bind(&post_runtime); | 1819 __ bind(&post_runtime); |
1853 PopOperand(result_register()); | 1820 PopOperand(result_register()); |
1854 EmitReturnSequence(); | 1821 EmitReturnSequence(); |
1855 | 1822 |
1856 __ bind(&resume); | 1823 __ bind(&resume); |
1857 context()->Plug(result_register()); | 1824 context()->Plug(result_register()); |
1858 break; | 1825 break; |
1859 } | 1826 } |
1860 | 1827 |
1861 case Yield::kFinal: { | 1828 case Yield::kFinal: { |
1862 // Pop value from top-of-stack slot, box result into result register. | 1829 // Pop value from top-of-stack slot, box result into result register. |
1863 EmitCreateIteratorResult(true); | 1830 EmitCreateIteratorResult(true); |
1864 EmitUnwindAndReturn(); | 1831 EmitUnwindAndReturn(); |
1865 break; | 1832 break; |
1866 } | 1833 } |
1867 | 1834 |
1868 case Yield::kDelegating: | 1835 case Yield::kDelegating: |
1869 UNREACHABLE(); | 1836 UNREACHABLE(); |
1870 } | 1837 } |
1871 } | 1838 } |
1872 | 1839 |
1873 | |
1874 void FullCodeGenerator::EmitGeneratorResume( | 1840 void FullCodeGenerator::EmitGeneratorResume( |
1875 Expression* generator, Expression* value, | 1841 Expression* generator, Expression* value, |
1876 JSGeneratorObject::ResumeMode resume_mode) { | 1842 JSGeneratorObject::ResumeMode resume_mode) { |
1877 // The value stays in r3, and is ultimately read by the resumed generator, as | 1843 // The value stays in r2, and is ultimately read by the resumed generator, as |
1878 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it | 1844 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it |
1879 // is read to throw the value when the resumed generator is already closed. | 1845 // is read to throw the value when the resumed generator is already closed. |
1880 // r4 will hold the generator object until the activation has been resumed. | 1846 // r3 will hold the generator object until the activation has been resumed. |
1881 VisitForStackValue(generator); | 1847 VisitForStackValue(generator); |
1882 VisitForAccumulatorValue(value); | 1848 VisitForAccumulatorValue(value); |
1883 PopOperand(r4); | 1849 PopOperand(r3); |
1884 | 1850 |
1885 // Store input value into generator object. | 1851 // Store input value into generator object. |
1886 __ StoreP(result_register(), | 1852 __ StoreP(result_register(), |
1887 FieldMemOperand(r4, JSGeneratorObject::kInputOffset), r0); | 1853 FieldMemOperand(r3, JSGeneratorObject::kInputOffset), r0); |
1888 __ mr(r5, result_register()); | 1854 __ LoadRR(r4, result_register()); |
1889 __ RecordWriteField(r4, JSGeneratorObject::kInputOffset, r5, r6, | 1855 __ RecordWriteField(r3, JSGeneratorObject::kInputOffset, r4, r5, |
1890 kLRHasBeenSaved, kDontSaveFPRegs); | 1856 kLRHasBeenSaved, kDontSaveFPRegs); |
1891 | 1857 |
1892 // Load suspended function and context. | 1858 // Load suspended function and context. |
1893 __ LoadP(cp, FieldMemOperand(r4, JSGeneratorObject::kContextOffset)); | 1859 __ LoadP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset)); |
1894 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset)); | 1860 __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset)); |
1895 | 1861 |
1896 // Load receiver and store as the first argument. | 1862 // Load receiver and store as the first argument. |
1897 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset)); | 1863 __ LoadP(r4, FieldMemOperand(r3, JSGeneratorObject::kReceiverOffset)); |
1898 __ push(r5); | 1864 __ push(r4); |
1899 | 1865 |
1900 // Push holes for the rest of the arguments to the generator function. | 1866 // Push holes for the rest of the arguments to the generator function. |
1901 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset)); | 1867 __ LoadP(r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset)); |
1902 __ LoadWordArith( | 1868 __ LoadW( |
1903 r6, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset)); | 1869 r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset)); |
1904 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex); | 1870 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex); |
1905 Label argument_loop, push_frame; | 1871 Label argument_loop, push_frame; |
1906 #if V8_TARGET_ARCH_PPC64 | 1872 #if V8_TARGET_ARCH_S390X |
1907 __ cmpi(r6, Operand::Zero()); | 1873 __ CmpP(r5, Operand::Zero()); |
1908 __ beq(&push_frame); | 1874 __ beq(&push_frame, Label::kNear); |
1909 #else | 1875 #else |
1910 __ SmiUntag(r6, SetRC); | 1876 __ SmiUntag(r5); |
1911 __ beq(&push_frame, cr0); | 1877 __ beq(&push_frame, Label::kNear); |
1912 #endif | 1878 #endif |
1913 __ mtctr(r6); | 1879 __ LoadRR(r0, r5); |
1914 __ bind(&argument_loop); | 1880 __ bind(&argument_loop); |
1915 __ push(r5); | 1881 __ push(r4); |
1916 __ bdnz(&argument_loop); | 1882 __ SubP(r0, Operand(1)); |
1883 __ bne(&argument_loop); | |
1917 | 1884 |
1918 // Enter a new JavaScript frame, and initialize its slots as they were when | 1885 // Enter a new JavaScript frame, and initialize its slots as they were when |
1919 // the generator was suspended. | 1886 // the generator was suspended. |
1920 Label resume_frame, done; | 1887 Label resume_frame, done; |
1921 __ bind(&push_frame); | 1888 __ bind(&push_frame); |
1922 __ b(&resume_frame, SetLK); | 1889 __ b(r14, &resume_frame); // brasl |
1923 __ b(&done); | 1890 __ b(&done); |
1924 __ bind(&resume_frame); | 1891 __ bind(&resume_frame); |
1925 // lr = return address. | 1892 // lr = return address. |
1926 // fp = caller's frame pointer. | 1893 // fp = caller's frame pointer. |
1927 // cp = callee's context, | 1894 // cp = callee's context, |
1928 // r7 = callee's JS function. | 1895 // r6 = callee's JS function. |
1929 __ PushFixedFrame(r7); | 1896 __ PushFixedFrame(r6); |
1930 // Adjust FP to point to saved FP. | 1897 // Adjust FP to point to saved FP. |
1931 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); | 1898 __ lay(fp, MemOperand(sp, StandardFrameConstants::kFixedFrameSizeFromFp)); |
1932 | 1899 |
1933 // Load the operand stack size. | 1900 // Load the operand stack size. |
1934 __ LoadP(r6, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset)); | 1901 __ LoadP(r5, FieldMemOperand(r3, JSGeneratorObject::kOperandStackOffset)); |
1935 __ LoadP(r6, FieldMemOperand(r6, FixedArray::kLengthOffset)); | 1902 __ LoadP(r5, FieldMemOperand(r5, FixedArray::kLengthOffset)); |
1936 __ SmiUntag(r6, SetRC); | 1903 __ SmiUntag(r5); |
1937 | 1904 |
1938 // If we are sending a value and there is no operand stack, we can jump back | 1905 // If we are sending a value and there is no operand stack, we can jump back |
1939 // in directly. | 1906 // in directly. |
1940 Label call_resume; | 1907 Label call_resume; |
1941 if (resume_mode == JSGeneratorObject::NEXT) { | 1908 if (resume_mode == JSGeneratorObject::NEXT) { |
1942 Label slow_resume; | 1909 Label slow_resume; |
1943 __ bne(&slow_resume, cr0); | 1910 __ bne(&slow_resume, Label::kNear); |
1944 __ LoadP(ip, FieldMemOperand(r7, JSFunction::kCodeEntryOffset)); | 1911 __ LoadP(ip, FieldMemOperand(r6, JSFunction::kCodeEntryOffset)); |
1945 { | 1912 __ LoadP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset)); |
1946 ConstantPoolUnavailableScope constant_pool_unavailable(masm_); | 1913 __ SmiUntag(r4); |
1947 if (FLAG_enable_embedded_constant_pool) { | 1914 __ AddP(ip, ip, r4); |
1948 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip); | 1915 __ LoadSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)); |
1949 } | 1916 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset)); |
1950 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset)); | 1917 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation. |
1951 __ SmiUntag(r5); | 1918 __ Jump(ip); |
1952 __ add(ip, ip, r5); | 1919 __ bind(&slow_resume); |
1953 __ LoadSmiLiteral(r5, | |
1954 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)); | |
1955 __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset), | |
1956 r0); | |
1957 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation. | |
1958 __ Jump(ip); | |
1959 __ bind(&slow_resume); | |
1960 } | |
1961 } else { | 1920 } else { |
1962 __ beq(&call_resume, cr0); | 1921 __ beq(&call_resume); |
1963 } | 1922 } |
1964 | 1923 |
1965 // Otherwise, we push holes for the operand stack and call the runtime to fix | 1924 // Otherwise, we push holes for the operand stack and call the runtime to fix |
1966 // up the stack and the handlers. | 1925 // up the stack and the handlers. |
1967 Label operand_loop; | 1926 Label operand_loop; |
1968 __ mtctr(r6); | 1927 __ LoadRR(r0, r5); |
1969 __ bind(&operand_loop); | 1928 __ bind(&operand_loop); |
1970 __ push(r5); | 1929 __ push(r4); |
1971 __ bdnz(&operand_loop); | 1930 __ SubP(r0, Operand(1)); |
1931 __ bne(&operand_loop); | |
1972 | 1932 |
1973 __ bind(&call_resume); | 1933 __ bind(&call_resume); |
1974 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation. | 1934 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation. |
1975 DCHECK(!result_register().is(r4)); | 1935 DCHECK(!result_register().is(r3)); |
1976 __ Push(r4, result_register()); | 1936 __ Push(r3, result_register()); |
1977 __ Push(Smi::FromInt(resume_mode)); | 1937 __ Push(Smi::FromInt(resume_mode)); |
1978 __ CallRuntime(Runtime::kResumeJSGeneratorObject); | 1938 __ CallRuntime(Runtime::kResumeJSGeneratorObject); |
1979 // Not reached: the runtime call returns elsewhere. | 1939 // Not reached: the runtime call returns elsewhere. |
1980 __ stop("not-reached"); | 1940 __ stop("not-reached"); |
1981 | 1941 |
1982 __ bind(&done); | 1942 __ bind(&done); |
1983 context()->Plug(result_register()); | 1943 context()->Plug(result_register()); |
1984 } | 1944 } |
1985 | 1945 |
1986 void FullCodeGenerator::PushOperands(Register reg1, Register reg2) { | 1946 void FullCodeGenerator::PushOperands(Register reg1, Register reg2) { |
(...skipping 15 matching lines...) Expand all Loading... | |
2002 | 1962 |
2003 void FullCodeGenerator::PopOperands(Register reg1, Register reg2) { | 1963 void FullCodeGenerator::PopOperands(Register reg1, Register reg2) { |
2004 OperandStackDepthDecrement(2); | 1964 OperandStackDepthDecrement(2); |
2005 __ Pop(reg1, reg2); | 1965 __ Pop(reg1, reg2); |
2006 } | 1966 } |
2007 | 1967 |
2008 void FullCodeGenerator::EmitOperandStackDepthCheck() { | 1968 void FullCodeGenerator::EmitOperandStackDepthCheck() { |
2009 if (FLAG_debug_code) { | 1969 if (FLAG_debug_code) { |
2010 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp + | 1970 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp + |
2011 operand_stack_depth_ * kPointerSize; | 1971 operand_stack_depth_ * kPointerSize; |
2012 __ sub(r3, fp, sp); | 1972 __ SubP(r2, fp, sp); |
2013 __ cmpi(r3, Operand(expected_diff)); | 1973 __ CmpP(r2, Operand(expected_diff)); |
2014 __ Assert(eq, kUnexpectedStackDepth); | 1974 __ Assert(eq, kUnexpectedStackDepth); |
2015 } | 1975 } |
2016 } | 1976 } |
2017 | 1977 |
2018 void FullCodeGenerator::EmitCreateIteratorResult(bool done) { | 1978 void FullCodeGenerator::EmitCreateIteratorResult(bool done) { |
2019 Label allocate, done_allocate; | 1979 Label allocate, done_allocate; |
2020 | 1980 |
2021 __ Allocate(JSIteratorResult::kSize, r3, r5, r6, &allocate, TAG_OBJECT); | 1981 __ Allocate(JSIteratorResult::kSize, r2, r4, r5, &allocate, TAG_OBJECT); |
2022 __ b(&done_allocate); | 1982 __ b(&done_allocate); |
2023 | 1983 |
2024 __ bind(&allocate); | 1984 __ bind(&allocate); |
2025 __ Push(Smi::FromInt(JSIteratorResult::kSize)); | 1985 __ Push(Smi::FromInt(JSIteratorResult::kSize)); |
2026 __ CallRuntime(Runtime::kAllocateInNewSpace); | 1986 __ CallRuntime(Runtime::kAllocateInNewSpace); |
2027 | 1987 |
2028 __ bind(&done_allocate); | 1988 __ bind(&done_allocate); |
2029 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r4); | 1989 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r3); |
2030 PopOperand(r5); | 1990 PopOperand(r4); |
2031 __ LoadRoot(r6, | 1991 __ LoadRoot(r5, |
2032 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex); | 1992 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex); |
2033 __ LoadRoot(r7, Heap::kEmptyFixedArrayRootIndex); | 1993 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex); |
2034 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0); | 1994 __ StoreP(r3, FieldMemOperand(r2, HeapObject::kMapOffset), r0); |
2035 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0); | 1995 __ StoreP(r6, FieldMemOperand(r2, JSObject::kPropertiesOffset), r0); |
2036 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0); | 1996 __ StoreP(r6, FieldMemOperand(r2, JSObject::kElementsOffset), r0); |
2037 __ StoreP(r5, FieldMemOperand(r3, JSIteratorResult::kValueOffset), r0); | 1997 __ StoreP(r4, FieldMemOperand(r2, JSIteratorResult::kValueOffset), r0); |
2038 __ StoreP(r6, FieldMemOperand(r3, JSIteratorResult::kDoneOffset), r0); | 1998 __ StoreP(r5, FieldMemOperand(r2, JSIteratorResult::kDoneOffset), r0); |
2039 } | 1999 } |
2040 | 2000 |
2041 | |
2042 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, | 2001 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, |
2043 Token::Value op, | 2002 Token::Value op, |
2044 Expression* left_expr, | 2003 Expression* left_expr, |
2045 Expression* right_expr) { | 2004 Expression* right_expr) { |
2046 Label done, smi_case, stub_call; | 2005 Label done, smi_case, stub_call; |
2047 | 2006 |
2048 Register scratch1 = r5; | 2007 Register scratch1 = r4; |
2049 Register scratch2 = r6; | 2008 Register scratch2 = r5; |
2050 | 2009 |
2051 // Get the arguments. | 2010 // Get the arguments. |
2052 Register left = r4; | 2011 Register left = r3; |
2053 Register right = r3; | 2012 Register right = r2; |
2054 PopOperand(left); | 2013 PopOperand(left); |
2055 | 2014 |
2056 // Perform combined smi check on both operands. | 2015 // Perform combined smi check on both operands. |
2057 __ orx(scratch1, left, right); | 2016 __ LoadRR(scratch1, right); |
2017 __ OrP(scratch1, left); | |
2058 STATIC_ASSERT(kSmiTag == 0); | 2018 STATIC_ASSERT(kSmiTag == 0); |
2059 JumpPatchSite patch_site(masm_); | 2019 JumpPatchSite patch_site(masm_); |
2060 patch_site.EmitJumpIfSmi(scratch1, &smi_case); | 2020 patch_site.EmitJumpIfSmi(scratch1, &smi_case); |
2061 | 2021 |
2062 __ bind(&stub_call); | 2022 __ bind(&stub_call); |
2063 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code(); | 2023 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code(); |
2064 CallIC(code, expr->BinaryOperationFeedbackId()); | 2024 CallIC(code, expr->BinaryOperationFeedbackId()); |
2065 patch_site.EmitPatchInfo(); | 2025 patch_site.EmitPatchInfo(); |
2066 __ b(&done); | 2026 __ b(&done); |
2067 | 2027 |
2068 __ bind(&smi_case); | 2028 __ bind(&smi_case); |
2069 // Smi case. This code works the same way as the smi-smi case in the type | 2029 // Smi case. This code works the same way as the smi-smi case in the type |
2070 // recording binary operation stub. | 2030 // recording binary operation stub. |
2071 switch (op) { | 2031 switch (op) { |
2072 case Token::SAR: | 2032 case Token::SAR: |
2073 __ GetLeastBitsFromSmi(scratch1, right, 5); | 2033 __ GetLeastBitsFromSmi(scratch1, right, 5); |
2074 __ ShiftRightArith(right, left, scratch1); | 2034 __ ShiftRightArithP(right, left, scratch1); |
2075 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize)); | 2035 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize)); |
2076 break; | 2036 break; |
2077 case Token::SHL: { | 2037 case Token::SHL: { |
2078 __ GetLeastBitsFromSmi(scratch2, right, 5); | 2038 __ GetLeastBitsFromSmi(scratch2, right, 5); |
2079 #if V8_TARGET_ARCH_PPC64 | 2039 #if V8_TARGET_ARCH_S390X |
2080 __ ShiftLeft_(right, left, scratch2); | 2040 __ ShiftLeftP(right, left, scratch2); |
2081 #else | 2041 #else |
2082 __ SmiUntag(scratch1, left); | 2042 __ SmiUntag(scratch1, left); |
2083 __ ShiftLeft_(scratch1, scratch1, scratch2); | 2043 __ ShiftLeftP(scratch1, scratch1, scratch2); |
2084 // Check that the *signed* result fits in a smi | 2044 // Check that the *signed* result fits in a smi |
2085 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call); | 2045 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call); |
2086 __ SmiTag(right, scratch1); | 2046 __ SmiTag(right, scratch1); |
2087 #endif | 2047 #endif |
2088 break; | 2048 break; |
2089 } | 2049 } |
2090 case Token::SHR: { | 2050 case Token::SHR: { |
2091 __ SmiUntag(scratch1, left); | 2051 __ SmiUntag(scratch1, left); |
2092 __ GetLeastBitsFromSmi(scratch2, right, 5); | 2052 __ GetLeastBitsFromSmi(scratch2, right, 5); |
2093 __ srw(scratch1, scratch1, scratch2); | 2053 __ srl(scratch1, scratch2); |
2094 // Unsigned shift is not allowed to produce a negative number. | 2054 // Unsigned shift is not allowed to produce a negative number. |
2095 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call); | 2055 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call); |
2096 __ SmiTag(right, scratch1); | 2056 __ SmiTag(right, scratch1); |
2097 break; | 2057 break; |
2098 } | 2058 } |
2099 case Token::ADD: { | 2059 case Token::ADD: { |
2100 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0); | 2060 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0); |
2101 __ BranchOnOverflow(&stub_call); | 2061 __ BranchOnOverflow(&stub_call); |
2102 __ mr(right, scratch1); | 2062 __ LoadRR(right, scratch1); |
2103 break; | 2063 break; |
2104 } | 2064 } |
2105 case Token::SUB: { | 2065 case Token::SUB: { |
2106 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0); | 2066 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0); |
2107 __ BranchOnOverflow(&stub_call); | 2067 __ BranchOnOverflow(&stub_call); |
2108 __ mr(right, scratch1); | 2068 __ LoadRR(right, scratch1); |
2109 break; | 2069 break; |
2110 } | 2070 } |
2111 case Token::MUL: { | 2071 case Token::MUL: { |
2112 Label mul_zero; | 2072 Label mul_zero; |
2113 #if V8_TARGET_ARCH_PPC64 | 2073 #if V8_TARGET_ARCH_S390X |
2114 // Remove tag from both operands. | 2074 // Remove tag from both operands. |
2115 __ SmiUntag(ip, right); | 2075 __ SmiUntag(ip, right); |
2116 __ SmiUntag(r0, left); | 2076 __ SmiUntag(scratch2, left); |
2117 __ Mul(scratch1, r0, ip); | 2077 __ mr_z(scratch1, ip); |
2118 // Check for overflowing the smi range - no overflow if higher 33 bits of | 2078 // Check for overflowing the smi range - no overflow if higher 33 bits of |
2119 // the result are identical. | 2079 // the result are identical. |
2120 __ TestIfInt32(scratch1, r0); | 2080 __ lr(ip, scratch2); // 32 bit load |
2081 __ sra(ip, Operand(31)); | |
2082 __ cr_z(ip, scratch1); // 32 bit compare | |
2121 __ bne(&stub_call); | 2083 __ bne(&stub_call); |
2122 #else | 2084 #else |
2123 __ SmiUntag(ip, right); | 2085 __ SmiUntag(ip, right); |
2124 __ mullw(scratch1, left, ip); | 2086 __ LoadRR(scratch2, left); // load into low order of reg pair |
2125 __ mulhw(scratch2, left, ip); | 2087 __ mr_z(scratch1, ip); // R4:R5 = R5 * ip |
2126 // Check for overflowing the smi range - no overflow if higher 33 bits of | 2088 // Check for overflowing the smi range - no overflow if higher 33 bits of |
2127 // the result are identical. | 2089 // the result are identical. |
2128 __ TestIfInt32(scratch2, scratch1, ip); | 2090 __ TestIfInt32(scratch1, scratch2, ip); |
2129 __ bne(&stub_call); | 2091 __ bne(&stub_call); |
2130 #endif | 2092 #endif |
2131 // Go slow on zero result to handle -0. | 2093 // Go slow on zero result to handle -0. |
2132 __ cmpi(scratch1, Operand::Zero()); | 2094 __ chi(scratch2, Operand::Zero()); |
2133 __ beq(&mul_zero); | 2095 __ beq(&mul_zero, Label::kNear); |
2134 #if V8_TARGET_ARCH_PPC64 | 2096 #if V8_TARGET_ARCH_S390X |
2135 __ SmiTag(right, scratch1); | 2097 __ SmiTag(right, scratch2); |
2136 #else | 2098 #else |
2137 __ mr(right, scratch1); | 2099 __ LoadRR(right, scratch2); |
2138 #endif | 2100 #endif |
2139 __ b(&done); | 2101 __ b(&done); |
2140 // We need -0 if we were multiplying a negative number with 0 to get 0. | 2102 // We need -0 if we were multiplying a negative number with 0 to get 0. |
2141 // We know one of them was zero. | 2103 // We know one of them was zero. |
2142 __ bind(&mul_zero); | 2104 __ bind(&mul_zero); |
2143 __ add(scratch2, right, left); | 2105 __ AddP(scratch2, right, left); |
2144 __ cmpi(scratch2, Operand::Zero()); | 2106 __ CmpP(scratch2, Operand::Zero()); |
2145 __ blt(&stub_call); | 2107 __ blt(&stub_call); |
2146 __ LoadSmiLiteral(right, Smi::FromInt(0)); | 2108 __ LoadSmiLiteral(right, Smi::FromInt(0)); |
2147 break; | 2109 break; |
2148 } | 2110 } |
2149 case Token::BIT_OR: | 2111 case Token::BIT_OR: |
2150 __ orx(right, left, right); | 2112 __ OrP(right, left); |
2151 break; | 2113 break; |
2152 case Token::BIT_AND: | 2114 case Token::BIT_AND: |
2153 __ and_(right, left, right); | 2115 __ AndP(right, left); |
2154 break; | 2116 break; |
2155 case Token::BIT_XOR: | 2117 case Token::BIT_XOR: |
2156 __ xor_(right, left, right); | 2118 __ XorP(right, left); |
2157 break; | 2119 break; |
2158 default: | 2120 default: |
2159 UNREACHABLE(); | 2121 UNREACHABLE(); |
2160 } | 2122 } |
2161 | 2123 |
2162 __ bind(&done); | 2124 __ bind(&done); |
2163 context()->Plug(r3); | 2125 context()->Plug(r2); |
2164 } | 2126 } |
2165 | 2127 |
2166 | |
2167 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) { | 2128 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) { |
2168 for (int i = 0; i < lit->properties()->length(); i++) { | 2129 for (int i = 0; i < lit->properties()->length(); i++) { |
2169 ObjectLiteral::Property* property = lit->properties()->at(i); | 2130 ObjectLiteral::Property* property = lit->properties()->at(i); |
2170 Expression* value = property->value(); | 2131 Expression* value = property->value(); |
2171 | 2132 |
2172 Register scratch = r4; | 2133 Register scratch = r3; |
2173 if (property->is_static()) { | 2134 if (property->is_static()) { |
2174 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // constructor | 2135 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // constructor |
2175 } else { | 2136 } else { |
2176 __ LoadP(scratch, MemOperand(sp, 0)); // prototype | 2137 __ LoadP(scratch, MemOperand(sp, 0)); // prototype |
2177 } | 2138 } |
2178 PushOperand(scratch); | 2139 PushOperand(scratch); |
2179 EmitPropertyKey(property, lit->GetIdForProperty(i)); | 2140 EmitPropertyKey(property, lit->GetIdForProperty(i)); |
2180 | 2141 |
2181 // The static prototype property is read only. We handle the non computed | 2142 // The static prototype property is read only. We handle the non computed |
2182 // property name case in the parser. Since this is the only case where we | 2143 // property name case in the parser. Since this is the only case where we |
2183 // need to check for an own read only property we special case this so we do | 2144 // need to check for an own read only property we special case this so we do |
2184 // not need to do this for every property. | 2145 // not need to do this for every property. |
2185 if (property->is_static() && property->is_computed_name()) { | 2146 if (property->is_static() && property->is_computed_name()) { |
2186 __ CallRuntime(Runtime::kThrowIfStaticPrototype); | 2147 __ CallRuntime(Runtime::kThrowIfStaticPrototype); |
2187 __ push(r3); | 2148 __ push(r2); |
2188 } | 2149 } |
2189 | 2150 |
2190 VisitForStackValue(value); | 2151 VisitForStackValue(value); |
2191 if (NeedsHomeObject(value)) { | 2152 if (NeedsHomeObject(value)) { |
2192 EmitSetHomeObject(value, 2, property->GetSlot()); | 2153 EmitSetHomeObject(value, 2, property->GetSlot()); |
2193 } | 2154 } |
2194 | 2155 |
2195 switch (property->kind()) { | 2156 switch (property->kind()) { |
2196 case ObjectLiteral::Property::CONSTANT: | 2157 case ObjectLiteral::Property::CONSTANT: |
2197 case ObjectLiteral::Property::MATERIALIZED_LITERAL: | 2158 case ObjectLiteral::Property::MATERIALIZED_LITERAL: |
(...skipping 14 matching lines...) Expand all Loading... | |
2212 PushOperand(Smi::FromInt(DONT_ENUM)); | 2173 PushOperand(Smi::FromInt(DONT_ENUM)); |
2213 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked); | 2174 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked); |
2214 break; | 2175 break; |
2215 | 2176 |
2216 default: | 2177 default: |
2217 UNREACHABLE(); | 2178 UNREACHABLE(); |
2218 } | 2179 } |
2219 } | 2180 } |
2220 } | 2181 } |
2221 | 2182 |
2222 | |
2223 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) { | 2183 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) { |
2224 PopOperand(r4); | 2184 PopOperand(r3); |
2225 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code(); | 2185 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code(); |
2226 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. | 2186 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. |
2227 CallIC(code, expr->BinaryOperationFeedbackId()); | 2187 CallIC(code, expr->BinaryOperationFeedbackId()); |
2228 patch_site.EmitPatchInfo(); | 2188 patch_site.EmitPatchInfo(); |
2229 context()->Plug(r3); | 2189 context()->Plug(r2); |
2230 } | 2190 } |
2231 | 2191 |
2232 | |
2233 void FullCodeGenerator::EmitAssignment(Expression* expr, | 2192 void FullCodeGenerator::EmitAssignment(Expression* expr, |
2234 FeedbackVectorSlot slot) { | 2193 FeedbackVectorSlot slot) { |
2235 DCHECK(expr->IsValidReferenceExpressionOrThis()); | 2194 DCHECK(expr->IsValidReferenceExpressionOrThis()); |
2236 | 2195 |
2237 Property* prop = expr->AsProperty(); | 2196 Property* prop = expr->AsProperty(); |
2238 LhsKind assign_type = Property::GetAssignType(prop); | 2197 LhsKind assign_type = Property::GetAssignType(prop); |
2239 | 2198 |
2240 switch (assign_type) { | 2199 switch (assign_type) { |
2241 case VARIABLE: { | 2200 case VARIABLE: { |
2242 Variable* var = expr->AsVariableProxy()->var(); | 2201 Variable* var = expr->AsVariableProxy()->var(); |
2243 EffectContext context(this); | 2202 EffectContext context(this); |
2244 EmitVariableAssignment(var, Token::ASSIGN, slot); | 2203 EmitVariableAssignment(var, Token::ASSIGN, slot); |
2245 break; | 2204 break; |
2246 } | 2205 } |
2247 case NAMED_PROPERTY: { | 2206 case NAMED_PROPERTY: { |
2248 PushOperand(r3); // Preserve value. | 2207 PushOperand(r2); // Preserve value. |
2249 VisitForAccumulatorValue(prop->obj()); | 2208 VisitForAccumulatorValue(prop->obj()); |
2250 __ Move(StoreDescriptor::ReceiverRegister(), r3); | 2209 __ Move(StoreDescriptor::ReceiverRegister(), r2); |
2251 PopOperand(StoreDescriptor::ValueRegister()); // Restore value. | 2210 PopOperand(StoreDescriptor::ValueRegister()); // Restore value. |
2252 __ mov(StoreDescriptor::NameRegister(), | 2211 __ mov(StoreDescriptor::NameRegister(), |
2253 Operand(prop->key()->AsLiteral()->value())); | 2212 Operand(prop->key()->AsLiteral()->value())); |
2254 EmitLoadStoreICSlot(slot); | 2213 EmitLoadStoreICSlot(slot); |
2255 CallStoreIC(); | 2214 CallStoreIC(); |
2256 break; | 2215 break; |
2257 } | 2216 } |
2258 case NAMED_SUPER_PROPERTY: { | 2217 case NAMED_SUPER_PROPERTY: { |
2259 PushOperand(r3); | 2218 PushOperand(r2); |
2260 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); | 2219 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); |
2261 VisitForAccumulatorValue( | 2220 VisitForAccumulatorValue( |
2262 prop->obj()->AsSuperPropertyReference()->home_object()); | 2221 prop->obj()->AsSuperPropertyReference()->home_object()); |
2263 // stack: value, this; r3: home_object | 2222 // stack: value, this; r2: home_object |
2264 Register scratch = r5; | 2223 Register scratch = r4; |
2265 Register scratch2 = r6; | 2224 Register scratch2 = r5; |
2266 __ mr(scratch, result_register()); // home_object | 2225 __ LoadRR(scratch, result_register()); // home_object |
2267 __ LoadP(r3, MemOperand(sp, kPointerSize)); // value | 2226 __ LoadP(r2, MemOperand(sp, kPointerSize)); // value |
2268 __ LoadP(scratch2, MemOperand(sp, 0)); // this | 2227 __ LoadP(scratch2, MemOperand(sp, 0)); // this |
2269 __ StoreP(scratch2, MemOperand(sp, kPointerSize)); // this | 2228 __ StoreP(scratch2, MemOperand(sp, kPointerSize)); // this |
2270 __ StoreP(scratch, MemOperand(sp, 0)); // home_object | 2229 __ StoreP(scratch, MemOperand(sp, 0)); // home_object |
2271 // stack: this, home_object; r3: value | 2230 // stack: this, home_object; r2: value |
2272 EmitNamedSuperPropertyStore(prop); | 2231 EmitNamedSuperPropertyStore(prop); |
2273 break; | 2232 break; |
2274 } | 2233 } |
2275 case KEYED_SUPER_PROPERTY: { | 2234 case KEYED_SUPER_PROPERTY: { |
2276 PushOperand(r3); | 2235 PushOperand(r2); |
2277 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); | 2236 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); |
2278 VisitForStackValue( | 2237 VisitForStackValue( |
2279 prop->obj()->AsSuperPropertyReference()->home_object()); | 2238 prop->obj()->AsSuperPropertyReference()->home_object()); |
2280 VisitForAccumulatorValue(prop->key()); | 2239 VisitForAccumulatorValue(prop->key()); |
2281 Register scratch = r5; | 2240 Register scratch = r4; |
2282 Register scratch2 = r6; | 2241 Register scratch2 = r5; |
2283 __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize)); // value | 2242 __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize)); // value |
2284 // stack: value, this, home_object; r3: key, r6: value | 2243 // stack: value, this, home_object; r3: key, r6: value |
2285 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // this | 2244 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // this |
2286 __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize)); | 2245 __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize)); |
2287 __ LoadP(scratch, MemOperand(sp, 0)); // home_object | 2246 __ LoadP(scratch, MemOperand(sp, 0)); // home_object |
2288 __ StoreP(scratch, MemOperand(sp, kPointerSize)); | 2247 __ StoreP(scratch, MemOperand(sp, kPointerSize)); |
2289 __ StoreP(r3, MemOperand(sp, 0)); | 2248 __ StoreP(r2, MemOperand(sp, 0)); |
2290 __ Move(r3, scratch2); | 2249 __ Move(r2, scratch2); |
2291 // stack: this, home_object, key; r3: value. | 2250 // stack: this, home_object, key; r2: value. |
2292 EmitKeyedSuperPropertyStore(prop); | 2251 EmitKeyedSuperPropertyStore(prop); |
2293 break; | 2252 break; |
2294 } | 2253 } |
2295 case KEYED_PROPERTY: { | 2254 case KEYED_PROPERTY: { |
2296 PushOperand(r3); // Preserve value. | 2255 PushOperand(r2); // Preserve value. |
2297 VisitForStackValue(prop->obj()); | 2256 VisitForStackValue(prop->obj()); |
2298 VisitForAccumulatorValue(prop->key()); | 2257 VisitForAccumulatorValue(prop->key()); |
2299 __ Move(StoreDescriptor::NameRegister(), r3); | 2258 __ Move(StoreDescriptor::NameRegister(), r2); |
2300 PopOperands(StoreDescriptor::ValueRegister(), | 2259 PopOperands(StoreDescriptor::ValueRegister(), |
2301 StoreDescriptor::ReceiverRegister()); | 2260 StoreDescriptor::ReceiverRegister()); |
2302 EmitLoadStoreICSlot(slot); | 2261 EmitLoadStoreICSlot(slot); |
2303 Handle<Code> ic = | 2262 Handle<Code> ic = |
2304 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); | 2263 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); |
2305 CallIC(ic); | 2264 CallIC(ic); |
2306 break; | 2265 break; |
2307 } | 2266 } |
2308 } | 2267 } |
2309 context()->Plug(r3); | 2268 context()->Plug(r2); |
2310 } | 2269 } |
2311 | 2270 |
2312 | |
2313 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( | 2271 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( |
2314 Variable* var, MemOperand location) { | 2272 Variable* var, MemOperand location) { |
2315 __ StoreP(result_register(), location, r0); | 2273 __ StoreP(result_register(), location); |
2316 if (var->IsContextSlot()) { | 2274 if (var->IsContextSlot()) { |
2317 // RecordWrite may destroy all its register arguments. | 2275 // RecordWrite may destroy all its register arguments. |
2318 __ mr(r6, result_register()); | 2276 __ LoadRR(r5, result_register()); |
2319 int offset = Context::SlotOffset(var->index()); | 2277 int offset = Context::SlotOffset(var->index()); |
2320 __ RecordWriteContextSlot(r4, offset, r6, r5, kLRHasBeenSaved, | 2278 __ RecordWriteContextSlot(r3, offset, r5, r4, kLRHasBeenSaved, |
2321 kDontSaveFPRegs); | 2279 kDontSaveFPRegs); |
2322 } | 2280 } |
2323 } | 2281 } |
2324 | 2282 |
2325 | |
2326 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, | 2283 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, |
2327 FeedbackVectorSlot slot) { | 2284 FeedbackVectorSlot slot) { |
2328 if (var->IsUnallocated()) { | 2285 if (var->IsUnallocated()) { |
2329 // Global var, const, or let. | 2286 // Global var, const, or let. |
2330 __ mov(StoreDescriptor::NameRegister(), Operand(var->name())); | 2287 __ mov(StoreDescriptor::NameRegister(), Operand(var->name())); |
2331 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister()); | 2288 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister()); |
2332 EmitLoadStoreICSlot(slot); | 2289 EmitLoadStoreICSlot(slot); |
2333 CallStoreIC(); | 2290 CallStoreIC(); |
2334 | 2291 |
2335 } else if (var->mode() == LET && op != Token::INIT) { | 2292 } else if (var->mode() == LET && op != Token::INIT) { |
2336 // Non-initializing assignment to let variable needs a write barrier. | 2293 // Non-initializing assignment to let variable needs a write barrier. |
2337 DCHECK(!var->IsLookupSlot()); | 2294 DCHECK(!var->IsLookupSlot()); |
2338 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | 2295 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); |
2339 Label assign; | 2296 Label assign; |
2340 MemOperand location = VarOperand(var, r4); | 2297 MemOperand location = VarOperand(var, r3); |
2341 __ LoadP(r6, location); | 2298 __ LoadP(r5, location); |
2342 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex); | 2299 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex); |
2343 __ bne(&assign); | 2300 __ bne(&assign); |
2344 __ mov(r6, Operand(var->name())); | 2301 __ mov(r5, Operand(var->name())); |
2345 __ push(r6); | 2302 __ push(r5); |
2346 __ CallRuntime(Runtime::kThrowReferenceError); | 2303 __ CallRuntime(Runtime::kThrowReferenceError); |
2347 // Perform the assignment. | 2304 // Perform the assignment. |
2348 __ bind(&assign); | 2305 __ bind(&assign); |
2349 EmitStoreToStackLocalOrContextSlot(var, location); | 2306 EmitStoreToStackLocalOrContextSlot(var, location); |
2350 | 2307 |
2351 } else if (var->mode() == CONST && op != Token::INIT) { | 2308 } else if (var->mode() == CONST && op != Token::INIT) { |
2352 // Assignment to const variable needs a write barrier. | 2309 // Assignment to const variable needs a write barrier. |
2353 DCHECK(!var->IsLookupSlot()); | 2310 DCHECK(!var->IsLookupSlot()); |
2354 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | 2311 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); |
2355 Label const_error; | 2312 Label const_error; |
2356 MemOperand location = VarOperand(var, r4); | 2313 MemOperand location = VarOperand(var, r3); |
2357 __ LoadP(r6, location); | 2314 __ LoadP(r5, location); |
2358 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex); | 2315 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex); |
2359 __ bne(&const_error); | 2316 __ bne(&const_error, Label::kNear); |
2360 __ mov(r6, Operand(var->name())); | 2317 __ mov(r5, Operand(var->name())); |
2361 __ push(r6); | 2318 __ push(r5); |
2362 __ CallRuntime(Runtime::kThrowReferenceError); | 2319 __ CallRuntime(Runtime::kThrowReferenceError); |
2363 __ bind(&const_error); | 2320 __ bind(&const_error); |
2364 __ CallRuntime(Runtime::kThrowConstAssignError); | 2321 __ CallRuntime(Runtime::kThrowConstAssignError); |
2365 | 2322 |
2366 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) { | 2323 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) { |
2367 // Initializing assignment to const {this} needs a write barrier. | 2324 // Initializing assignment to const {this} needs a write barrier. |
2368 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | 2325 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); |
2369 Label uninitialized_this; | 2326 Label uninitialized_this; |
2370 MemOperand location = VarOperand(var, r4); | 2327 MemOperand location = VarOperand(var, r3); |
2371 __ LoadP(r6, location); | 2328 __ LoadP(r5, location); |
2372 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex); | 2329 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex); |
2373 __ beq(&uninitialized_this); | 2330 __ beq(&uninitialized_this); |
2374 __ mov(r4, Operand(var->name())); | 2331 __ mov(r3, Operand(var->name())); |
2375 __ push(r4); | 2332 __ push(r3); |
2376 __ CallRuntime(Runtime::kThrowReferenceError); | 2333 __ CallRuntime(Runtime::kThrowReferenceError); |
2377 __ bind(&uninitialized_this); | 2334 __ bind(&uninitialized_this); |
2378 EmitStoreToStackLocalOrContextSlot(var, location); | 2335 EmitStoreToStackLocalOrContextSlot(var, location); |
2379 | 2336 |
2380 } else if (!var->is_const_mode() || | 2337 } else if (!var->is_const_mode() || |
2381 (var->mode() == CONST && op == Token::INIT)) { | 2338 (var->mode() == CONST && op == Token::INIT)) { |
2382 if (var->IsLookupSlot()) { | 2339 if (var->IsLookupSlot()) { |
2383 // Assignment to var. | 2340 // Assignment to var. |
2384 __ Push(var->name()); | 2341 __ Push(var->name()); |
2385 __ Push(r3); | 2342 __ Push(r2); |
2386 __ CallRuntime(is_strict(language_mode()) | 2343 __ CallRuntime(is_strict(language_mode()) |
2387 ? Runtime::kStoreLookupSlot_Strict | 2344 ? Runtime::kStoreLookupSlot_Strict |
2388 : Runtime::kStoreLookupSlot_Sloppy); | 2345 : Runtime::kStoreLookupSlot_Sloppy); |
2389 } else { | 2346 } else { |
2390 // Assignment to var or initializing assignment to let/const in harmony | 2347 // Assignment to var or initializing assignment to let/const in harmony |
2391 // mode. | 2348 // mode. |
2392 DCHECK((var->IsStackAllocated() || var->IsContextSlot())); | 2349 DCHECK((var->IsStackAllocated() || var->IsContextSlot())); |
2393 MemOperand location = VarOperand(var, r4); | 2350 MemOperand location = VarOperand(var, r3); |
2394 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) { | 2351 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) { |
2395 // Check for an uninitialized let binding. | 2352 // Check for an uninitialized let binding. |
2396 __ LoadP(r5, location); | 2353 __ LoadP(r4, location); |
2397 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex); | 2354 __ CompareRoot(r4, Heap::kTheHoleValueRootIndex); |
2398 __ Check(eq, kLetBindingReInitialization); | 2355 __ Check(eq, kLetBindingReInitialization); |
2399 } | 2356 } |
2400 EmitStoreToStackLocalOrContextSlot(var, location); | 2357 EmitStoreToStackLocalOrContextSlot(var, location); |
2401 } | 2358 } |
2402 } else if (var->mode() == CONST_LEGACY && op == Token::INIT) { | 2359 } else if (var->mode() == CONST_LEGACY && op == Token::INIT) { |
2403 // Const initializers need a write barrier. | 2360 // Const initializers need a write barrier. |
2404 DCHECK(!var->IsParameter()); // No const parameters. | 2361 DCHECK(!var->IsParameter()); // No const parameters. |
2405 if (var->IsLookupSlot()) { | 2362 if (var->IsLookupSlot()) { |
2406 __ push(r3); | 2363 __ push(r2); |
2407 __ mov(r3, Operand(var->name())); | 2364 __ mov(r2, Operand(var->name())); |
2408 __ Push(cp, r3); // Context and name. | 2365 __ Push(cp, r2); // Context and name. |
2409 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot); | 2366 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot); |
2410 } else { | 2367 } else { |
2411 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | 2368 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); |
2412 Label skip; | 2369 Label skip; |
2413 MemOperand location = VarOperand(var, r4); | 2370 MemOperand location = VarOperand(var, r3); |
2414 __ LoadP(r5, location); | 2371 __ LoadP(r4, location); |
2415 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex); | 2372 __ CompareRoot(r4, Heap::kTheHoleValueRootIndex); |
2416 __ bne(&skip); | 2373 __ bne(&skip); |
2417 EmitStoreToStackLocalOrContextSlot(var, location); | 2374 EmitStoreToStackLocalOrContextSlot(var, location); |
2418 __ bind(&skip); | 2375 __ bind(&skip); |
2419 } | 2376 } |
2420 | 2377 |
2421 } else { | 2378 } else { |
2422 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT); | 2379 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT); |
2423 if (is_strict(language_mode())) { | 2380 if (is_strict(language_mode())) { |
2424 __ CallRuntime(Runtime::kThrowConstAssignError); | 2381 __ CallRuntime(Runtime::kThrowConstAssignError); |
2425 } | 2382 } |
2426 // Silently ignore store in sloppy mode. | 2383 // Silently ignore store in sloppy mode. |
2427 } | 2384 } |
2428 } | 2385 } |
2429 | 2386 |
2430 | |
2431 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { | 2387 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { |
2432 // Assignment to a property, using a named store IC. | 2388 // Assignment to a property, using a named store IC. |
2433 Property* prop = expr->target()->AsProperty(); | 2389 Property* prop = expr->target()->AsProperty(); |
2434 DCHECK(prop != NULL); | 2390 DCHECK(prop != NULL); |
2435 DCHECK(prop->key()->IsLiteral()); | 2391 DCHECK(prop->key()->IsLiteral()); |
2436 | 2392 |
2437 __ mov(StoreDescriptor::NameRegister(), | 2393 __ mov(StoreDescriptor::NameRegister(), |
2438 Operand(prop->key()->AsLiteral()->value())); | 2394 Operand(prop->key()->AsLiteral()->value())); |
2439 PopOperand(StoreDescriptor::ReceiverRegister()); | 2395 PopOperand(StoreDescriptor::ReceiverRegister()); |
2440 EmitLoadStoreICSlot(expr->AssignmentSlot()); | 2396 EmitLoadStoreICSlot(expr->AssignmentSlot()); |
2441 CallStoreIC(); | 2397 CallStoreIC(); |
2442 | 2398 |
2443 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | 2399 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
2444 context()->Plug(r3); | 2400 context()->Plug(r2); |
2445 } | 2401 } |
2446 | 2402 |
2447 | |
2448 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) { | 2403 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) { |
2449 // Assignment to named property of super. | 2404 // Assignment to named property of super. |
2450 // r3 : value | 2405 // r2 : value |
2451 // stack : receiver ('this'), home_object | 2406 // stack : receiver ('this'), home_object |
2452 DCHECK(prop != NULL); | 2407 DCHECK(prop != NULL); |
2453 Literal* key = prop->key()->AsLiteral(); | 2408 Literal* key = prop->key()->AsLiteral(); |
2454 DCHECK(key != NULL); | 2409 DCHECK(key != NULL); |
2455 | 2410 |
2456 PushOperand(key->value()); | 2411 PushOperand(key->value()); |
2457 PushOperand(r3); | 2412 PushOperand(r2); |
2458 CallRuntimeWithOperands((is_strict(language_mode()) | 2413 CallRuntimeWithOperands((is_strict(language_mode()) |
2459 ? Runtime::kStoreToSuper_Strict | 2414 ? Runtime::kStoreToSuper_Strict |
2460 : Runtime::kStoreToSuper_Sloppy)); | 2415 : Runtime::kStoreToSuper_Sloppy)); |
2461 } | 2416 } |
2462 | 2417 |
2463 | |
2464 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) { | 2418 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) { |
2465 // Assignment to named property of super. | 2419 // Assignment to named property of super. |
2466 // r3 : value | 2420 // r2 : value |
2467 // stack : receiver ('this'), home_object, key | 2421 // stack : receiver ('this'), home_object, key |
2468 DCHECK(prop != NULL); | 2422 DCHECK(prop != NULL); |
2469 | 2423 |
2470 PushOperand(r3); | 2424 PushOperand(r2); |
2471 CallRuntimeWithOperands((is_strict(language_mode()) | 2425 CallRuntimeWithOperands((is_strict(language_mode()) |
2472 ? Runtime::kStoreKeyedToSuper_Strict | 2426 ? Runtime::kStoreKeyedToSuper_Strict |
2473 : Runtime::kStoreKeyedToSuper_Sloppy)); | 2427 : Runtime::kStoreKeyedToSuper_Sloppy)); |
2474 } | 2428 } |
2475 | 2429 |
2476 | |
2477 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { | 2430 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { |
2478 // Assignment to a property, using a keyed store IC. | 2431 // Assignment to a property, using a keyed store IC. |
2479 PopOperands(StoreDescriptor::ReceiverRegister(), | 2432 PopOperands(StoreDescriptor::ReceiverRegister(), |
2480 StoreDescriptor::NameRegister()); | 2433 StoreDescriptor::NameRegister()); |
2481 DCHECK(StoreDescriptor::ValueRegister().is(r3)); | 2434 DCHECK(StoreDescriptor::ValueRegister().is(r2)); |
2482 | 2435 |
2483 Handle<Code> ic = | 2436 Handle<Code> ic = |
2484 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); | 2437 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); |
2485 EmitLoadStoreICSlot(expr->AssignmentSlot()); | 2438 EmitLoadStoreICSlot(expr->AssignmentSlot()); |
2486 CallIC(ic); | 2439 CallIC(ic); |
2487 | 2440 |
2488 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | 2441 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
2489 context()->Plug(r3); | 2442 context()->Plug(r2); |
2490 } | 2443 } |
2491 | 2444 |
2492 | |
2493 void FullCodeGenerator::VisitProperty(Property* expr) { | 2445 void FullCodeGenerator::VisitProperty(Property* expr) { |
2494 Comment cmnt(masm_, "[ Property"); | 2446 Comment cmnt(masm_, "[ Property"); |
2495 SetExpressionPosition(expr); | 2447 SetExpressionPosition(expr); |
2496 | 2448 |
2497 Expression* key = expr->key(); | 2449 Expression* key = expr->key(); |
2498 | 2450 |
2499 if (key->IsPropertyName()) { | 2451 if (key->IsPropertyName()) { |
2500 if (!expr->IsSuperAccess()) { | 2452 if (!expr->IsSuperAccess()) { |
2501 VisitForAccumulatorValue(expr->obj()); | 2453 VisitForAccumulatorValue(expr->obj()); |
2502 __ Move(LoadDescriptor::ReceiverRegister(), r3); | 2454 __ Move(LoadDescriptor::ReceiverRegister(), r2); |
2503 EmitNamedPropertyLoad(expr); | 2455 EmitNamedPropertyLoad(expr); |
2504 } else { | 2456 } else { |
2505 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var()); | 2457 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var()); |
2506 VisitForStackValue( | 2458 VisitForStackValue( |
2507 expr->obj()->AsSuperPropertyReference()->home_object()); | 2459 expr->obj()->AsSuperPropertyReference()->home_object()); |
2508 EmitNamedSuperPropertyLoad(expr); | 2460 EmitNamedSuperPropertyLoad(expr); |
2509 } | 2461 } |
2510 } else { | 2462 } else { |
2511 if (!expr->IsSuperAccess()) { | 2463 if (!expr->IsSuperAccess()) { |
2512 VisitForStackValue(expr->obj()); | 2464 VisitForStackValue(expr->obj()); |
2513 VisitForAccumulatorValue(expr->key()); | 2465 VisitForAccumulatorValue(expr->key()); |
2514 __ Move(LoadDescriptor::NameRegister(), r3); | 2466 __ Move(LoadDescriptor::NameRegister(), r2); |
2515 PopOperand(LoadDescriptor::ReceiverRegister()); | 2467 PopOperand(LoadDescriptor::ReceiverRegister()); |
2516 EmitKeyedPropertyLoad(expr); | 2468 EmitKeyedPropertyLoad(expr); |
2517 } else { | 2469 } else { |
2518 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var()); | 2470 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var()); |
2519 VisitForStackValue( | 2471 VisitForStackValue( |
2520 expr->obj()->AsSuperPropertyReference()->home_object()); | 2472 expr->obj()->AsSuperPropertyReference()->home_object()); |
2521 VisitForStackValue(expr->key()); | 2473 VisitForStackValue(expr->key()); |
2522 EmitKeyedSuperPropertyLoad(expr); | 2474 EmitKeyedSuperPropertyLoad(expr); |
2523 } | 2475 } |
2524 } | 2476 } |
2525 PrepareForBailoutForId(expr->LoadId(), TOS_REG); | 2477 PrepareForBailoutForId(expr->LoadId(), TOS_REG); |
2526 context()->Plug(r3); | 2478 context()->Plug(r2); |
2527 } | 2479 } |
2528 | 2480 |
2529 | |
2530 void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) { | 2481 void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) { |
2531 ic_total_count_++; | 2482 ic_total_count_++; |
2532 __ Call(code, RelocInfo::CODE_TARGET, ast_id); | 2483 __ Call(code, RelocInfo::CODE_TARGET, ast_id); |
2533 } | 2484 } |
2534 | 2485 |
2535 | |
2536 // Code common for calls using the IC. | 2486 // Code common for calls using the IC. |
2537 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { | 2487 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { |
2538 Expression* callee = expr->expression(); | 2488 Expression* callee = expr->expression(); |
2539 | 2489 |
2540 // Get the target function. | 2490 // Get the target function. |
2541 ConvertReceiverMode convert_mode; | 2491 ConvertReceiverMode convert_mode; |
2542 if (callee->IsVariableProxy()) { | 2492 if (callee->IsVariableProxy()) { |
2543 { | 2493 { |
2544 StackValueContext context(this); | 2494 StackValueContext context(this); |
2545 EmitVariableLoad(callee->AsVariableProxy()); | 2495 EmitVariableLoad(callee->AsVariableProxy()); |
2546 PrepareForBailout(callee, NO_REGISTERS); | 2496 PrepareForBailout(callee, NO_REGISTERS); |
2547 } | 2497 } |
2548 // Push undefined as receiver. This is patched in the method prologue if it | 2498 // Push undefined as receiver. This is patched in the method prologue if it |
2549 // is a sloppy mode method. | 2499 // is a sloppy mode method. |
2550 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 2500 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); |
2551 PushOperand(r0); | 2501 PushOperand(r1); |
2552 convert_mode = ConvertReceiverMode::kNullOrUndefined; | 2502 convert_mode = ConvertReceiverMode::kNullOrUndefined; |
2553 } else { | 2503 } else { |
2554 // Load the function from the receiver. | 2504 // Load the function from the receiver. |
2555 DCHECK(callee->IsProperty()); | 2505 DCHECK(callee->IsProperty()); |
2556 DCHECK(!callee->AsProperty()->IsSuperAccess()); | 2506 DCHECK(!callee->AsProperty()->IsSuperAccess()); |
2557 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); | 2507 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); |
2558 EmitNamedPropertyLoad(callee->AsProperty()); | 2508 EmitNamedPropertyLoad(callee->AsProperty()); |
2559 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); | 2509 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); |
2560 // Push the target function under the receiver. | 2510 // Push the target function under the receiver. |
2561 __ LoadP(r0, MemOperand(sp, 0)); | 2511 __ LoadP(r1, MemOperand(sp, 0)); |
2562 PushOperand(r0); | 2512 PushOperand(r1); |
2563 __ StoreP(r3, MemOperand(sp, kPointerSize)); | 2513 __ StoreP(r2, MemOperand(sp, kPointerSize)); |
2564 convert_mode = ConvertReceiverMode::kNotNullOrUndefined; | 2514 convert_mode = ConvertReceiverMode::kNotNullOrUndefined; |
2565 } | 2515 } |
2566 | 2516 |
2567 EmitCall(expr, convert_mode); | 2517 EmitCall(expr, convert_mode); |
2568 } | 2518 } |
2569 | 2519 |
2570 | |
2571 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) { | 2520 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) { |
2572 Expression* callee = expr->expression(); | 2521 Expression* callee = expr->expression(); |
2573 DCHECK(callee->IsProperty()); | 2522 DCHECK(callee->IsProperty()); |
2574 Property* prop = callee->AsProperty(); | 2523 Property* prop = callee->AsProperty(); |
2575 DCHECK(prop->IsSuperAccess()); | 2524 DCHECK(prop->IsSuperAccess()); |
2576 SetExpressionPosition(prop); | 2525 SetExpressionPosition(prop); |
2577 | 2526 |
2578 Literal* key = prop->key()->AsLiteral(); | 2527 Literal* key = prop->key()->AsLiteral(); |
2579 DCHECK(!key->value()->IsSmi()); | 2528 DCHECK(!key->value()->IsSmi()); |
2580 // Load the function from the receiver. | 2529 // Load the function from the receiver. |
2581 const Register scratch = r4; | 2530 const Register scratch = r3; |
2582 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference(); | 2531 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference(); |
2583 VisitForAccumulatorValue(super_ref->home_object()); | 2532 VisitForAccumulatorValue(super_ref->home_object()); |
2584 __ mr(scratch, r3); | 2533 __ LoadRR(scratch, r2); |
2585 VisitForAccumulatorValue(super_ref->this_var()); | 2534 VisitForAccumulatorValue(super_ref->this_var()); |
2586 PushOperands(scratch, r3, r3, scratch); | 2535 PushOperands(scratch, r2, r2, scratch); |
2587 PushOperand(key->value()); | 2536 PushOperand(key->value()); |
2588 | 2537 |
2589 // Stack here: | 2538 // Stack here: |
2590 // - home_object | 2539 // - home_object |
2591 // - this (receiver) | 2540 // - this (receiver) |
2592 // - this (receiver) <-- LoadFromSuper will pop here and below. | 2541 // - this (receiver) <-- LoadFromSuper will pop here and below. |
2593 // - home_object | 2542 // - home_object |
2594 // - key | 2543 // - key |
2595 CallRuntimeWithOperands(Runtime::kLoadFromSuper); | 2544 CallRuntimeWithOperands(Runtime::kLoadFromSuper); |
2596 | 2545 |
2597 // Replace home_object with target function. | 2546 // Replace home_object with target function. |
2598 __ StoreP(r3, MemOperand(sp, kPointerSize)); | 2547 __ StoreP(r2, MemOperand(sp, kPointerSize)); |
2599 | 2548 |
2600 // Stack here: | 2549 // Stack here: |
2601 // - target function | 2550 // - target function |
2602 // - this (receiver) | 2551 // - this (receiver) |
2603 EmitCall(expr); | 2552 EmitCall(expr); |
2604 } | 2553 } |
2605 | 2554 |
2606 | |
2607 // Code common for calls using the IC. | 2555 // Code common for calls using the IC. |
2608 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) { | 2556 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) { |
2609 // Load the key. | 2557 // Load the key. |
2610 VisitForAccumulatorValue(key); | 2558 VisitForAccumulatorValue(key); |
2611 | 2559 |
2612 Expression* callee = expr->expression(); | 2560 Expression* callee = expr->expression(); |
2613 | 2561 |
2614 // Load the function from the receiver. | 2562 // Load the function from the receiver. |
2615 DCHECK(callee->IsProperty()); | 2563 DCHECK(callee->IsProperty()); |
2616 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); | 2564 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); |
2617 __ Move(LoadDescriptor::NameRegister(), r3); | 2565 __ Move(LoadDescriptor::NameRegister(), r2); |
2618 EmitKeyedPropertyLoad(callee->AsProperty()); | 2566 EmitKeyedPropertyLoad(callee->AsProperty()); |
2619 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); | 2567 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); |
2620 | 2568 |
2621 // Push the target function under the receiver. | 2569 // Push the target function under the receiver. |
2622 __ LoadP(ip, MemOperand(sp, 0)); | 2570 __ LoadP(ip, MemOperand(sp, 0)); |
2623 PushOperand(ip); | 2571 PushOperand(ip); |
2624 __ StoreP(r3, MemOperand(sp, kPointerSize)); | 2572 __ StoreP(r2, MemOperand(sp, kPointerSize)); |
2625 | 2573 |
2626 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined); | 2574 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined); |
2627 } | 2575 } |
2628 | 2576 |
2629 | |
2630 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) { | 2577 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) { |
2631 Expression* callee = expr->expression(); | 2578 Expression* callee = expr->expression(); |
2632 DCHECK(callee->IsProperty()); | 2579 DCHECK(callee->IsProperty()); |
2633 Property* prop = callee->AsProperty(); | 2580 Property* prop = callee->AsProperty(); |
2634 DCHECK(prop->IsSuperAccess()); | 2581 DCHECK(prop->IsSuperAccess()); |
2635 | 2582 |
2636 SetExpressionPosition(prop); | 2583 SetExpressionPosition(prop); |
2637 // Load the function from the receiver. | 2584 // Load the function from the receiver. |
2638 const Register scratch = r4; | 2585 const Register scratch = r3; |
2639 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference(); | 2586 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference(); |
2640 VisitForAccumulatorValue(super_ref->home_object()); | 2587 VisitForAccumulatorValue(super_ref->home_object()); |
2641 __ mr(scratch, r3); | 2588 __ LoadRR(scratch, r2); |
2642 VisitForAccumulatorValue(super_ref->this_var()); | 2589 VisitForAccumulatorValue(super_ref->this_var()); |
2643 PushOperands(scratch, r3, r3, scratch); | 2590 PushOperands(scratch, r2, r2, scratch); |
2644 VisitForStackValue(prop->key()); | 2591 VisitForStackValue(prop->key()); |
2645 | 2592 |
2646 // Stack here: | 2593 // Stack here: |
2647 // - home_object | 2594 // - home_object |
2648 // - this (receiver) | 2595 // - this (receiver) |
2649 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below. | 2596 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below. |
2650 // - home_object | 2597 // - home_object |
2651 // - key | 2598 // - key |
2652 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper); | 2599 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper); |
2653 | 2600 |
2654 // Replace home_object with target function. | 2601 // Replace home_object with target function. |
2655 __ StoreP(r3, MemOperand(sp, kPointerSize)); | 2602 __ StoreP(r2, MemOperand(sp, kPointerSize)); |
2656 | 2603 |
2657 // Stack here: | 2604 // Stack here: |
2658 // - target function | 2605 // - target function |
2659 // - this (receiver) | 2606 // - this (receiver) |
2660 EmitCall(expr); | 2607 EmitCall(expr); |
2661 } | 2608 } |
2662 | 2609 |
2663 | |
2664 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { | 2610 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { |
2665 // Load the arguments. | 2611 // Load the arguments. |
2666 ZoneList<Expression*>* args = expr->arguments(); | 2612 ZoneList<Expression*>* args = expr->arguments(); |
2667 int arg_count = args->length(); | 2613 int arg_count = args->length(); |
2668 for (int i = 0; i < arg_count; i++) { | 2614 for (int i = 0; i < arg_count; i++) { |
2669 VisitForStackValue(args->at(i)); | 2615 VisitForStackValue(args->at(i)); |
2670 } | 2616 } |
2671 | 2617 |
2672 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); | 2618 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); |
2673 SetCallPosition(expr); | 2619 SetCallPosition(expr); |
2674 if (expr->tail_call_mode() == TailCallMode::kAllow) { | 2620 if (expr->tail_call_mode() == TailCallMode::kAllow) { |
2675 if (FLAG_trace) { | 2621 if (FLAG_trace) { |
2676 __ CallRuntime(Runtime::kTraceTailCall); | 2622 __ CallRuntime(Runtime::kTraceTailCall); |
2677 } | 2623 } |
2678 // Update profiling counters before the tail call since we will | 2624 // Update profiling counters before the tail call since we will |
2679 // not return to this function. | 2625 // not return to this function. |
2680 EmitProfilingCounterHandlingForReturnSequence(true); | 2626 EmitProfilingCounterHandlingForReturnSequence(true); |
2681 } | 2627 } |
2682 Handle<Code> ic = | 2628 Handle<Code> ic = |
2683 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) | 2629 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) |
2684 .code(); | 2630 .code(); |
2685 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackICSlot())); | 2631 __ LoadSmiLiteral(r5, SmiFromSlot(expr->CallFeedbackICSlot())); |
2686 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); | 2632 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); |
2687 // Don't assign a type feedback id to the IC, since type feedback is provided | 2633 // Don't assign a type feedback id to the IC, since type feedback is provided |
2688 // by the vector above. | 2634 // by the vector above. |
2689 CallIC(ic); | 2635 CallIC(ic); |
2690 OperandStackDepthDecrement(arg_count + 1); | 2636 OperandStackDepthDecrement(arg_count + 1); |
2691 | 2637 |
2692 RecordJSReturnSite(expr); | 2638 RecordJSReturnSite(expr); |
2693 // Restore context register. | 2639 // Restore context register. |
2694 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2640 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
2695 context()->DropAndPlug(1, r3); | 2641 context()->DropAndPlug(1, r2); |
2696 } | 2642 } |
2697 | 2643 |
2698 | |
2699 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { | 2644 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { |
2700 // r7: copy of the first argument or undefined if it doesn't exist. | 2645 // r6: copy of the first argument or undefined if it doesn't exist. |
2701 if (arg_count > 0) { | 2646 if (arg_count > 0) { |
2702 __ LoadP(r7, MemOperand(sp, arg_count * kPointerSize), r0); | 2647 __ LoadP(r6, MemOperand(sp, arg_count * kPointerSize), r0); |
2703 } else { | 2648 } else { |
2704 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex); | 2649 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); |
2705 } | 2650 } |
2706 | 2651 |
2707 // r6: the receiver of the enclosing function. | 2652 // r5: the receiver of the enclosing function. |
2708 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 2653 __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
2709 | 2654 |
2710 // r5: language mode. | 2655 // r4: language mode. |
2711 __ LoadSmiLiteral(r5, Smi::FromInt(language_mode())); | 2656 __ LoadSmiLiteral(r4, Smi::FromInt(language_mode())); |
2712 | 2657 |
2713 // r4: the start position of the scope the calls resides in. | 2658 // r3: the start position of the scope the calls resides in. |
2714 __ LoadSmiLiteral(r4, Smi::FromInt(scope()->start_position())); | 2659 __ LoadSmiLiteral(r3, Smi::FromInt(scope()->start_position())); |
2715 | 2660 |
2716 // Do the runtime call. | 2661 // Do the runtime call. |
2717 __ Push(r7, r6, r5, r4); | 2662 __ Push(r6, r5, r4, r3); |
2718 __ CallRuntime(Runtime::kResolvePossiblyDirectEval); | 2663 __ CallRuntime(Runtime::kResolvePossiblyDirectEval); |
2719 } | 2664 } |
2720 | 2665 |
2721 | |
2722 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls. | 2666 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls. |
2723 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) { | 2667 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) { |
2724 VariableProxy* callee = expr->expression()->AsVariableProxy(); | 2668 VariableProxy* callee = expr->expression()->AsVariableProxy(); |
2725 if (callee->var()->IsLookupSlot()) { | 2669 if (callee->var()->IsLookupSlot()) { |
2726 Label slow, done; | 2670 Label slow, done; |
2727 SetExpressionPosition(callee); | 2671 SetExpressionPosition(callee); |
2728 // Generate code for loading from variables potentially shadowed by | 2672 // Generate code for loading from variables potentially shadowed by |
2729 // eval-introduced variables. | 2673 // eval-introduced variables. |
2730 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done); | 2674 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done); |
2731 | 2675 |
2732 __ bind(&slow); | 2676 __ bind(&slow); |
2733 // Call the runtime to find the function to call (returned in r3) and | 2677 // Call the runtime to find the function to call (returned in r2) and |
2734 // the object holding it (returned in r4). | 2678 // the object holding it (returned in r3). |
2735 __ Push(callee->name()); | 2679 __ Push(callee->name()); |
2736 __ CallRuntime(Runtime::kLoadLookupSlotForCall); | 2680 __ CallRuntime(Runtime::kLoadLookupSlotForCall); |
2737 PushOperands(r3, r4); // Function, receiver. | 2681 PushOperands(r2, r3); // Function, receiver. |
2738 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS); | 2682 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS); |
2739 | 2683 |
2740 // If fast case code has been generated, emit code to push the function | 2684 // If fast case code has been generated, emit code to push the function |
2741 // and receiver and have the slow path jump around this code. | 2685 // and receiver and have the slow path jump around this code. |
2742 if (done.is_linked()) { | 2686 if (done.is_linked()) { |
2743 Label call; | 2687 Label call; |
2744 __ b(&call); | 2688 __ b(&call); |
2745 __ bind(&done); | 2689 __ bind(&done); |
2746 // Push function. | 2690 // Push function. |
2747 __ push(r3); | 2691 __ push(r2); |
2748 // Pass undefined as the receiver, which is the WithBaseObject of a | 2692 // Pass undefined as the receiver, which is the WithBaseObject of a |
2749 // non-object environment record. If the callee is sloppy, it will patch | 2693 // non-object environment record. If the callee is sloppy, it will patch |
2750 // it up to be the global receiver. | 2694 // it up to be the global receiver. |
2751 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); | 2695 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); |
2752 __ push(r4); | 2696 __ push(r3); |
2753 __ bind(&call); | 2697 __ bind(&call); |
2754 } | 2698 } |
2755 } else { | 2699 } else { |
2756 VisitForStackValue(callee); | 2700 VisitForStackValue(callee); |
2757 // refEnv.WithBaseObject() | 2701 // refEnv.WithBaseObject() |
2758 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); | 2702 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); |
2759 PushOperand(r5); // Reserved receiver slot. | 2703 PushOperand(r4); // Reserved receiver slot. |
2760 } | 2704 } |
2761 } | 2705 } |
2762 | 2706 |
2763 | |
2764 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) { | 2707 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) { |
2765 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval | 2708 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval |
2766 // to resolve the function we need to call. Then we call the resolved | 2709 // to resolve the function we need to call. Then we call the resolved |
2767 // function using the given arguments. | 2710 // function using the given arguments. |
2768 ZoneList<Expression*>* args = expr->arguments(); | 2711 ZoneList<Expression*>* args = expr->arguments(); |
2769 int arg_count = args->length(); | 2712 int arg_count = args->length(); |
2770 | 2713 |
2771 PushCalleeAndWithBaseObject(expr); | 2714 PushCalleeAndWithBaseObject(expr); |
2772 | 2715 |
2773 // Push the arguments. | 2716 // Push the arguments. |
2774 for (int i = 0; i < arg_count; i++) { | 2717 for (int i = 0; i < arg_count; i++) { |
2775 VisitForStackValue(args->at(i)); | 2718 VisitForStackValue(args->at(i)); |
2776 } | 2719 } |
2777 | 2720 |
2778 // Push a copy of the function (found below the arguments) and | 2721 // Push a copy of the function (found below the arguments) and |
2779 // resolve eval. | 2722 // resolve eval. |
2780 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); | 2723 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); |
2781 __ push(r4); | 2724 __ push(r3); |
2782 EmitResolvePossiblyDirectEval(arg_count); | 2725 EmitResolvePossiblyDirectEval(arg_count); |
2783 | 2726 |
2784 // Touch up the stack with the resolved function. | 2727 // Touch up the stack with the resolved function. |
2785 __ StoreP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); | 2728 __ StoreP(r2, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); |
2786 | 2729 |
2787 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS); | 2730 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS); |
2788 | 2731 |
2789 // Record source position for debugger. | 2732 // Record source position for debugger. |
2790 SetCallPosition(expr); | 2733 SetCallPosition(expr); |
2791 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); | 2734 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); |
2792 __ mov(r3, Operand(arg_count)); | 2735 __ mov(r2, Operand(arg_count)); |
2793 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny, | 2736 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny, |
2794 expr->tail_call_mode()), | 2737 expr->tail_call_mode()), |
2795 RelocInfo::CODE_TARGET); | 2738 RelocInfo::CODE_TARGET); |
2796 OperandStackDepthDecrement(arg_count + 1); | 2739 OperandStackDepthDecrement(arg_count + 1); |
2797 RecordJSReturnSite(expr); | 2740 RecordJSReturnSite(expr); |
2798 // Restore context register. | 2741 // Restore context register. |
2799 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2742 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
2800 context()->DropAndPlug(1, r3); | 2743 context()->DropAndPlug(1, r2); |
2801 } | 2744 } |
2802 | 2745 |
2803 | |
2804 void FullCodeGenerator::VisitCallNew(CallNew* expr) { | 2746 void FullCodeGenerator::VisitCallNew(CallNew* expr) { |
2805 Comment cmnt(masm_, "[ CallNew"); | 2747 Comment cmnt(masm_, "[ CallNew"); |
2806 // According to ECMA-262, section 11.2.2, page 44, the function | 2748 // According to ECMA-262, section 11.2.2, page 44, the function |
2807 // expression in new calls must be evaluated before the | 2749 // expression in new calls must be evaluated before the |
2808 // arguments. | 2750 // arguments. |
2809 | 2751 |
2810 // Push constructor on the stack. If it's not a function it's used as | 2752 // Push constructor on the stack. If it's not a function it's used as |
2811 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is | 2753 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is |
2812 // ignored. | 2754 // ignored. |
2813 DCHECK(!expr->expression()->IsSuperPropertyReference()); | 2755 DCHECK(!expr->expression()->IsSuperPropertyReference()); |
2814 VisitForStackValue(expr->expression()); | 2756 VisitForStackValue(expr->expression()); |
2815 | 2757 |
2816 // Push the arguments ("left-to-right") on the stack. | 2758 // Push the arguments ("left-to-right") on the stack. |
2817 ZoneList<Expression*>* args = expr->arguments(); | 2759 ZoneList<Expression*>* args = expr->arguments(); |
2818 int arg_count = args->length(); | 2760 int arg_count = args->length(); |
2819 for (int i = 0; i < arg_count; i++) { | 2761 for (int i = 0; i < arg_count; i++) { |
2820 VisitForStackValue(args->at(i)); | 2762 VisitForStackValue(args->at(i)); |
2821 } | 2763 } |
2822 | 2764 |
2823 // Call the construct call builtin that handles allocation and | 2765 // Call the construct call builtin that handles allocation and |
2824 // constructor invocation. | 2766 // constructor invocation. |
2825 SetConstructCallPosition(expr); | 2767 SetConstructCallPosition(expr); |
2826 | 2768 |
2827 // Load function and argument count into r4 and r3. | 2769 // Load function and argument count into r3 and r2. |
2828 __ mov(r3, Operand(arg_count)); | 2770 __ mov(r2, Operand(arg_count)); |
2829 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize), r0); | 2771 __ LoadP(r3, MemOperand(sp, arg_count * kPointerSize), r0); |
2830 | 2772 |
2831 // Record call targets in unoptimized code. | 2773 // Record call targets in unoptimized code. |
2832 __ EmitLoadTypeFeedbackVector(r5); | 2774 __ EmitLoadTypeFeedbackVector(r4); |
2833 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallNewFeedbackSlot())); | 2775 __ LoadSmiLiteral(r5, SmiFromSlot(expr->CallNewFeedbackSlot())); |
2834 | 2776 |
2835 CallConstructStub stub(isolate()); | 2777 CallConstructStub stub(isolate()); |
2836 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET); | 2778 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET); |
2837 OperandStackDepthDecrement(arg_count + 1); | 2779 OperandStackDepthDecrement(arg_count + 1); |
2838 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); | 2780 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); |
2839 // Restore context register. | 2781 // Restore context register. |
2840 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2782 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
2841 context()->Plug(r3); | 2783 context()->Plug(r2); |
2842 } | 2784 } |
2843 | 2785 |
2844 | |
2845 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) { | 2786 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) { |
2846 SuperCallReference* super_call_ref = | 2787 SuperCallReference* super_call_ref = |
2847 expr->expression()->AsSuperCallReference(); | 2788 expr->expression()->AsSuperCallReference(); |
2848 DCHECK_NOT_NULL(super_call_ref); | 2789 DCHECK_NOT_NULL(super_call_ref); |
2849 | 2790 |
2850 // Push the super constructor target on the stack (may be null, | 2791 // Push the super constructor target on the stack (may be null, |
2851 // but the Construct builtin can deal with that properly). | 2792 // but the Construct builtin can deal with that properly). |
2852 VisitForAccumulatorValue(super_call_ref->this_function_var()); | 2793 VisitForAccumulatorValue(super_call_ref->this_function_var()); |
2853 __ AssertFunction(result_register()); | 2794 __ AssertFunction(result_register()); |
2854 __ LoadP(result_register(), | 2795 __ LoadP(result_register(), |
2855 FieldMemOperand(result_register(), HeapObject::kMapOffset)); | 2796 FieldMemOperand(result_register(), HeapObject::kMapOffset)); |
2856 __ LoadP(result_register(), | 2797 __ LoadP(result_register(), |
2857 FieldMemOperand(result_register(), Map::kPrototypeOffset)); | 2798 FieldMemOperand(result_register(), Map::kPrototypeOffset)); |
2858 PushOperand(result_register()); | 2799 PushOperand(result_register()); |
2859 | 2800 |
2860 // Push the arguments ("left-to-right") on the stack. | 2801 // Push the arguments ("left-to-right") on the stack. |
2861 ZoneList<Expression*>* args = expr->arguments(); | 2802 ZoneList<Expression*>* args = expr->arguments(); |
2862 int arg_count = args->length(); | 2803 int arg_count = args->length(); |
2863 for (int i = 0; i < arg_count; i++) { | 2804 for (int i = 0; i < arg_count; i++) { |
2864 VisitForStackValue(args->at(i)); | 2805 VisitForStackValue(args->at(i)); |
2865 } | 2806 } |
2866 | 2807 |
2867 // Call the construct call builtin that handles allocation and | 2808 // Call the construct call builtin that handles allocation and |
2868 // constructor invocation. | 2809 // constructor invocation. |
2869 SetConstructCallPosition(expr); | 2810 SetConstructCallPosition(expr); |
2870 | 2811 |
2871 // Load new target into r6. | 2812 // Load new target into r5. |
2872 VisitForAccumulatorValue(super_call_ref->new_target_var()); | 2813 VisitForAccumulatorValue(super_call_ref->new_target_var()); |
2873 __ mr(r6, result_register()); | 2814 __ LoadRR(r5, result_register()); |
2874 | 2815 |
2875 // Load function and argument count into r1 and r0. | 2816 // Load function and argument count into r1 and r0. |
2876 __ mov(r3, Operand(arg_count)); | 2817 __ mov(r2, Operand(arg_count)); |
2877 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize)); | 2818 __ LoadP(r3, MemOperand(sp, arg_count * kPointerSize)); |
2878 | 2819 |
2879 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 2820 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
2880 OperandStackDepthDecrement(arg_count + 1); | 2821 OperandStackDepthDecrement(arg_count + 1); |
2881 | 2822 |
2882 RecordJSReturnSite(expr); | 2823 RecordJSReturnSite(expr); |
2883 | 2824 |
2884 // Restore context register. | 2825 // Restore context register. |
2885 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2826 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
2886 context()->Plug(r3); | 2827 context()->Plug(r2); |
2887 } | 2828 } |
2888 | 2829 |
2889 | |
2890 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { | 2830 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { |
2891 ZoneList<Expression*>* args = expr->arguments(); | 2831 ZoneList<Expression*>* args = expr->arguments(); |
2892 DCHECK(args->length() == 1); | 2832 DCHECK(args->length() == 1); |
2893 | 2833 |
2894 VisitForAccumulatorValue(args->at(0)); | 2834 VisitForAccumulatorValue(args->at(0)); |
2895 | 2835 |
2896 Label materialize_true, materialize_false; | 2836 Label materialize_true, materialize_false, skip_lookup; |
2897 Label* if_true = NULL; | 2837 Label* if_true = NULL; |
2898 Label* if_false = NULL; | 2838 Label* if_false = NULL; |
2899 Label* fall_through = NULL; | 2839 Label* fall_through = NULL; |
2900 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 2840 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
2901 &if_false, &fall_through); | 2841 &if_false, &fall_through); |
2902 | 2842 |
2903 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2843 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
2904 __ TestIfSmi(r3, r0); | 2844 __ TestIfSmi(r2); |
2905 Split(eq, if_true, if_false, fall_through, cr0); | 2845 Split(eq, if_true, if_false, fall_through); |
2906 | 2846 |
2907 context()->Plug(if_true, if_false); | 2847 context()->Plug(if_true, if_false); |
2908 } | 2848 } |
2909 | 2849 |
2910 | |
2911 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) { | 2850 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) { |
2912 ZoneList<Expression*>* args = expr->arguments(); | 2851 ZoneList<Expression*>* args = expr->arguments(); |
2913 DCHECK(args->length() == 1); | 2852 DCHECK(args->length() == 1); |
2914 | 2853 |
2915 VisitForAccumulatorValue(args->at(0)); | 2854 VisitForAccumulatorValue(args->at(0)); |
2916 | 2855 |
2917 Label materialize_true, materialize_false; | 2856 Label materialize_true, materialize_false; |
2918 Label* if_true = NULL; | 2857 Label* if_true = NULL; |
2919 Label* if_false = NULL; | 2858 Label* if_false = NULL; |
2920 Label* fall_through = NULL; | 2859 Label* fall_through = NULL; |
2921 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 2860 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
2922 &if_false, &fall_through); | 2861 &if_false, &fall_through); |
2923 | 2862 |
2924 __ JumpIfSmi(r3, if_false); | 2863 __ JumpIfSmi(r2, if_false); |
2925 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE); | 2864 __ CompareObjectType(r2, r3, r3, FIRST_JS_RECEIVER_TYPE); |
2926 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2865 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
2927 Split(ge, if_true, if_false, fall_through); | 2866 Split(ge, if_true, if_false, fall_through); |
2928 | 2867 |
2929 context()->Plug(if_true, if_false); | 2868 context()->Plug(if_true, if_false); |
2930 } | 2869 } |
2931 | 2870 |
2932 | |
2933 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { | 2871 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { |
2934 ZoneList<Expression*>* args = expr->arguments(); | 2872 ZoneList<Expression*>* args = expr->arguments(); |
2935 DCHECK(args->length() == 1); | 2873 DCHECK(args->length() == 1); |
2936 | 2874 |
2937 VisitForAccumulatorValue(args->at(0)); | 2875 VisitForAccumulatorValue(args->at(0)); |
2938 | 2876 |
2939 Label materialize_true, materialize_false; | 2877 Label materialize_true, materialize_false; |
2940 Label* if_true = NULL; | 2878 Label* if_true = NULL; |
2941 Label* if_false = NULL; | 2879 Label* if_false = NULL; |
2942 Label* fall_through = NULL; | 2880 Label* fall_through = NULL; |
2943 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 2881 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
2944 &if_false, &fall_through); | 2882 &if_false, &fall_through); |
2945 | 2883 |
2946 __ JumpIfSmi(r3, if_false); | 2884 __ JumpIfSmi(r2, if_false); |
2947 __ CompareObjectType(r3, r4, r4, JS_ARRAY_TYPE); | 2885 __ CompareObjectType(r2, r3, r3, JS_ARRAY_TYPE); |
2948 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2886 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
2949 Split(eq, if_true, if_false, fall_through); | 2887 Split(eq, if_true, if_false, fall_through); |
2950 | 2888 |
2951 context()->Plug(if_true, if_false); | 2889 context()->Plug(if_true, if_false); |
2952 } | 2890 } |
2953 | 2891 |
2954 | |
2955 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) { | 2892 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) { |
2956 ZoneList<Expression*>* args = expr->arguments(); | 2893 ZoneList<Expression*>* args = expr->arguments(); |
2957 DCHECK(args->length() == 1); | 2894 DCHECK(args->length() == 1); |
2958 | 2895 |
2959 VisitForAccumulatorValue(args->at(0)); | 2896 VisitForAccumulatorValue(args->at(0)); |
2960 | 2897 |
2961 Label materialize_true, materialize_false; | 2898 Label materialize_true, materialize_false; |
2962 Label* if_true = NULL; | 2899 Label* if_true = NULL; |
2963 Label* if_false = NULL; | 2900 Label* if_false = NULL; |
2964 Label* fall_through = NULL; | 2901 Label* fall_through = NULL; |
2965 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 2902 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
2966 &if_false, &fall_through); | 2903 &if_false, &fall_through); |
2967 | 2904 |
2968 __ JumpIfSmi(r3, if_false); | 2905 __ JumpIfSmi(r2, if_false); |
2969 __ CompareObjectType(r3, r4, r4, JS_TYPED_ARRAY_TYPE); | 2906 __ CompareObjectType(r2, r3, r3, JS_TYPED_ARRAY_TYPE); |
2970 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2907 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
2971 Split(eq, if_true, if_false, fall_through); | 2908 Split(eq, if_true, if_false, fall_through); |
2972 | 2909 |
2973 context()->Plug(if_true, if_false); | 2910 context()->Plug(if_true, if_false); |
2974 } | 2911 } |
2975 | 2912 |
2976 | |
2977 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { | 2913 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { |
2978 ZoneList<Expression*>* args = expr->arguments(); | 2914 ZoneList<Expression*>* args = expr->arguments(); |
2979 DCHECK(args->length() == 1); | 2915 DCHECK(args->length() == 1); |
2980 | 2916 |
2981 VisitForAccumulatorValue(args->at(0)); | 2917 VisitForAccumulatorValue(args->at(0)); |
2982 | 2918 |
2983 Label materialize_true, materialize_false; | 2919 Label materialize_true, materialize_false; |
2984 Label* if_true = NULL; | 2920 Label* if_true = NULL; |
2985 Label* if_false = NULL; | 2921 Label* if_false = NULL; |
2986 Label* fall_through = NULL; | 2922 Label* fall_through = NULL; |
2987 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 2923 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
2988 &if_false, &fall_through); | 2924 &if_false, &fall_through); |
2989 | 2925 |
2990 __ JumpIfSmi(r3, if_false); | 2926 __ JumpIfSmi(r2, if_false); |
2991 __ CompareObjectType(r3, r4, r4, JS_REGEXP_TYPE); | 2927 __ CompareObjectType(r2, r3, r3, JS_REGEXP_TYPE); |
2992 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2928 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
2993 Split(eq, if_true, if_false, fall_through); | 2929 Split(eq, if_true, if_false, fall_through); |
2994 | 2930 |
2995 context()->Plug(if_true, if_false); | 2931 context()->Plug(if_true, if_false); |
2996 } | 2932 } |
2997 | 2933 |
2998 | |
2999 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) { | 2934 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) { |
3000 ZoneList<Expression*>* args = expr->arguments(); | 2935 ZoneList<Expression*>* args = expr->arguments(); |
3001 DCHECK(args->length() == 1); | 2936 DCHECK(args->length() == 1); |
3002 | 2937 |
3003 VisitForAccumulatorValue(args->at(0)); | 2938 VisitForAccumulatorValue(args->at(0)); |
3004 | 2939 |
3005 Label materialize_true, materialize_false; | 2940 Label materialize_true, materialize_false; |
3006 Label* if_true = NULL; | 2941 Label* if_true = NULL; |
3007 Label* if_false = NULL; | 2942 Label* if_false = NULL; |
3008 Label* fall_through = NULL; | 2943 Label* fall_through = NULL; |
3009 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 2944 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
3010 &if_false, &fall_through); | 2945 &if_false, &fall_through); |
3011 | 2946 |
3012 __ JumpIfSmi(r3, if_false); | 2947 __ JumpIfSmi(r2, if_false); |
3013 __ CompareObjectType(r3, r4, r4, JS_PROXY_TYPE); | 2948 __ CompareObjectType(r2, r3, r3, JS_PROXY_TYPE); |
3014 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2949 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
3015 Split(eq, if_true, if_false, fall_through); | 2950 Split(eq, if_true, if_false, fall_through); |
3016 | 2951 |
3017 context()->Plug(if_true, if_false); | 2952 context()->Plug(if_true, if_false); |
3018 } | 2953 } |
3019 | 2954 |
3020 | |
3021 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { | 2955 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { |
3022 ZoneList<Expression*>* args = expr->arguments(); | 2956 ZoneList<Expression*>* args = expr->arguments(); |
3023 DCHECK(args->length() == 1); | 2957 DCHECK(args->length() == 1); |
3024 Label done, null, function, non_function_constructor; | 2958 Label done, null, function, non_function_constructor; |
3025 | 2959 |
3026 VisitForAccumulatorValue(args->at(0)); | 2960 VisitForAccumulatorValue(args->at(0)); |
3027 | 2961 |
3028 // If the object is not a JSReceiver, we return null. | 2962 // If the object is not a JSReceiver, we return null. |
3029 __ JumpIfSmi(r3, &null); | 2963 __ JumpIfSmi(r2, &null); |
3030 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); | 2964 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); |
3031 __ CompareObjectType(r3, r3, r4, FIRST_JS_RECEIVER_TYPE); | 2965 __ CompareObjectType(r2, r2, r3, FIRST_JS_RECEIVER_TYPE); |
3032 // Map is now in r3. | 2966 // Map is now in r2. |
3033 __ blt(&null); | 2967 __ blt(&null); |
3034 | 2968 |
3035 // Return 'Function' for JSFunction objects. | 2969 // Return 'Function' for JSFunction objects. |
3036 __ cmpi(r4, Operand(JS_FUNCTION_TYPE)); | 2970 __ CmpP(r3, Operand(JS_FUNCTION_TYPE)); |
3037 __ beq(&function); | 2971 __ beq(&function); |
3038 | 2972 |
3039 // Check if the constructor in the map is a JS function. | 2973 // Check if the constructor in the map is a JS function. |
3040 Register instance_type = r5; | 2974 Register instance_type = r4; |
3041 __ GetMapConstructor(r3, r3, r4, instance_type); | 2975 __ GetMapConstructor(r2, r2, r3, instance_type); |
3042 __ cmpi(instance_type, Operand(JS_FUNCTION_TYPE)); | 2976 __ CmpP(instance_type, Operand(JS_FUNCTION_TYPE)); |
3043 __ bne(&non_function_constructor); | 2977 __ bne(&non_function_constructor, Label::kNear); |
3044 | 2978 |
3045 // r3 now contains the constructor function. Grab the | 2979 // r2 now contains the constructor function. Grab the |
3046 // instance class name from there. | 2980 // instance class name from there. |
3047 __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); | 2981 __ LoadP(r2, FieldMemOperand(r2, JSFunction::kSharedFunctionInfoOffset)); |
3048 __ LoadP(r3, | 2982 __ LoadP(r2, |
3049 FieldMemOperand(r3, SharedFunctionInfo::kInstanceClassNameOffset)); | 2983 FieldMemOperand(r2, SharedFunctionInfo::kInstanceClassNameOffset)); |
3050 __ b(&done); | 2984 __ b(&done, Label::kNear); |
3051 | 2985 |
3052 // Functions have class 'Function'. | 2986 // Functions have class 'Function'. |
3053 __ bind(&function); | 2987 __ bind(&function); |
3054 __ LoadRoot(r3, Heap::kFunction_stringRootIndex); | 2988 __ LoadRoot(r2, Heap::kFunction_stringRootIndex); |
3055 __ b(&done); | 2989 __ b(&done, Label::kNear); |
3056 | 2990 |
3057 // Objects with a non-function constructor have class 'Object'. | 2991 // Objects with a non-function constructor have class 'Object'. |
3058 __ bind(&non_function_constructor); | 2992 __ bind(&non_function_constructor); |
3059 __ LoadRoot(r3, Heap::kObject_stringRootIndex); | 2993 __ LoadRoot(r2, Heap::kObject_stringRootIndex); |
3060 __ b(&done); | 2994 __ b(&done, Label::kNear); |
3061 | 2995 |
3062 // Non-JS objects have class null. | 2996 // Non-JS objects have class null. |
3063 __ bind(&null); | 2997 __ bind(&null); |
3064 __ LoadRoot(r3, Heap::kNullValueRootIndex); | 2998 __ LoadRoot(r2, Heap::kNullValueRootIndex); |
3065 | 2999 |
3066 // All done. | 3000 // All done. |
3067 __ bind(&done); | 3001 __ bind(&done); |
3068 | 3002 |
3069 context()->Plug(r3); | 3003 context()->Plug(r2); |
3070 } | 3004 } |
3071 | 3005 |
3072 | |
3073 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { | 3006 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { |
3074 ZoneList<Expression*>* args = expr->arguments(); | 3007 ZoneList<Expression*>* args = expr->arguments(); |
3075 DCHECK(args->length() == 1); | 3008 DCHECK(args->length() == 1); |
3076 VisitForAccumulatorValue(args->at(0)); // Load the object. | 3009 VisitForAccumulatorValue(args->at(0)); // Load the object. |
3077 | 3010 |
3078 Label done; | 3011 Label done; |
3079 // If the object is a smi return the object. | 3012 // If the object is a smi return the object. |
3080 __ JumpIfSmi(r3, &done); | 3013 __ JumpIfSmi(r2, &done); |
3081 // If the object is not a value type, return the object. | 3014 // If the object is not a value type, return the object. |
3082 __ CompareObjectType(r3, r4, r4, JS_VALUE_TYPE); | 3015 __ CompareObjectType(r2, r3, r3, JS_VALUE_TYPE); |
3083 __ bne(&done); | 3016 __ bne(&done, Label::kNear); |
3084 __ LoadP(r3, FieldMemOperand(r3, JSValue::kValueOffset)); | 3017 __ LoadP(r2, FieldMemOperand(r2, JSValue::kValueOffset)); |
3085 | 3018 |
3086 __ bind(&done); | 3019 __ bind(&done); |
3087 context()->Plug(r3); | 3020 context()->Plug(r2); |
3088 } | 3021 } |
3089 | 3022 |
3090 | |
3091 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { | 3023 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { |
3092 ZoneList<Expression*>* args = expr->arguments(); | 3024 ZoneList<Expression*>* args = expr->arguments(); |
3093 DCHECK_EQ(3, args->length()); | 3025 DCHECK_EQ(3, args->length()); |
3094 | 3026 |
3095 Register string = r3; | 3027 Register string = r2; |
3096 Register index = r4; | 3028 Register index = r3; |
3097 Register value = r5; | 3029 Register value = r4; |
3098 | 3030 |
3099 VisitForStackValue(args->at(0)); // index | 3031 VisitForStackValue(args->at(0)); // index |
3100 VisitForStackValue(args->at(1)); // value | 3032 VisitForStackValue(args->at(1)); // value |
3101 VisitForAccumulatorValue(args->at(2)); // string | 3033 VisitForAccumulatorValue(args->at(2)); // string |
3102 PopOperands(index, value); | 3034 PopOperands(index, value); |
3103 | 3035 |
3104 if (FLAG_debug_code) { | 3036 if (FLAG_debug_code) { |
3105 __ TestIfSmi(value, r0); | 3037 __ TestIfSmi(value); |
3106 __ Check(eq, kNonSmiValue, cr0); | 3038 __ Check(eq, kNonSmiValue, cr0); |
3107 __ TestIfSmi(index, r0); | 3039 __ TestIfSmi(index); |
3108 __ Check(eq, kNonSmiIndex, cr0); | 3040 __ Check(eq, kNonSmiIndex, cr0); |
3109 __ SmiUntag(index, index); | 3041 __ SmiUntag(index); |
3110 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; | 3042 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; |
3111 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type); | 3043 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type); |
3112 __ SmiTag(index, index); | 3044 __ SmiTag(index); |
3113 } | 3045 } |
3114 | 3046 |
3115 __ SmiUntag(value); | 3047 __ SmiUntag(value); |
3116 __ addi(ip, string, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); | 3048 __ AddP(ip, string, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); |
3117 __ SmiToByteArrayOffset(r0, index); | 3049 __ SmiToByteArrayOffset(r1, index); |
3118 __ stbx(value, MemOperand(ip, r0)); | 3050 __ StoreByte(value, MemOperand(ip, r1)); |
3119 context()->Plug(string); | 3051 context()->Plug(string); |
3120 } | 3052 } |
3121 | 3053 |
3122 | |
3123 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { | 3054 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { |
3124 ZoneList<Expression*>* args = expr->arguments(); | 3055 ZoneList<Expression*>* args = expr->arguments(); |
3125 DCHECK_EQ(3, args->length()); | 3056 DCHECK_EQ(3, args->length()); |
3126 | 3057 |
3127 Register string = r3; | 3058 Register string = r2; |
3128 Register index = r4; | 3059 Register index = r3; |
3129 Register value = r5; | 3060 Register value = r4; |
3130 | 3061 |
3131 VisitForStackValue(args->at(0)); // index | 3062 VisitForStackValue(args->at(0)); // index |
3132 VisitForStackValue(args->at(1)); // value | 3063 VisitForStackValue(args->at(1)); // value |
3133 VisitForAccumulatorValue(args->at(2)); // string | 3064 VisitForAccumulatorValue(args->at(2)); // string |
3134 PopOperands(index, value); | 3065 PopOperands(index, value); |
3135 | 3066 |
3136 if (FLAG_debug_code) { | 3067 if (FLAG_debug_code) { |
3137 __ TestIfSmi(value, r0); | 3068 __ TestIfSmi(value); |
3138 __ Check(eq, kNonSmiValue, cr0); | 3069 __ Check(eq, kNonSmiValue, cr0); |
3139 __ TestIfSmi(index, r0); | 3070 __ TestIfSmi(index); |
3140 __ Check(eq, kNonSmiIndex, cr0); | 3071 __ Check(eq, kNonSmiIndex, cr0); |
3141 __ SmiUntag(index, index); | 3072 __ SmiUntag(index, index); |
3142 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; | 3073 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; |
3143 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type); | 3074 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type); |
3144 __ SmiTag(index, index); | 3075 __ SmiTag(index, index); |
3145 } | 3076 } |
3146 | 3077 |
3147 __ SmiUntag(value); | 3078 __ SmiUntag(value); |
3148 __ addi(ip, string, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); | 3079 __ SmiToShortArrayOffset(r1, index); |
3149 __ SmiToShortArrayOffset(r0, index); | 3080 __ StoreHalfWord(value, MemOperand(r1, string, SeqTwoByteString::kHeaderSize - |
3150 __ sthx(value, MemOperand(ip, r0)); | 3081 kHeapObjectTag)); |
3151 context()->Plug(string); | 3082 context()->Plug(string); |
3152 } | 3083 } |
3153 | 3084 |
3154 | |
3155 void FullCodeGenerator::EmitToInteger(CallRuntime* expr) { | 3085 void FullCodeGenerator::EmitToInteger(CallRuntime* expr) { |
3156 ZoneList<Expression*>* args = expr->arguments(); | 3086 ZoneList<Expression*>* args = expr->arguments(); |
3157 DCHECK_EQ(1, args->length()); | 3087 DCHECK_EQ(1, args->length()); |
3158 | 3088 |
3159 // Load the argument into r3 and convert it. | 3089 // Load the argument into r2 and convert it. |
3160 VisitForAccumulatorValue(args->at(0)); | 3090 VisitForAccumulatorValue(args->at(0)); |
3161 | 3091 |
3162 // Convert the object to an integer. | 3092 // Convert the object to an integer. |
3163 Label done_convert; | 3093 Label done_convert; |
3164 __ JumpIfSmi(r3, &done_convert); | 3094 __ JumpIfSmi(r2, &done_convert); |
3165 __ Push(r3); | 3095 __ Push(r2); |
3166 __ CallRuntime(Runtime::kToInteger); | 3096 __ CallRuntime(Runtime::kToInteger); |
3167 __ bind(&done_convert); | 3097 __ bind(&done_convert); |
3168 context()->Plug(r3); | 3098 context()->Plug(r2); |
3169 } | 3099 } |
3170 | 3100 |
3171 | |
3172 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { | 3101 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { |
3173 ZoneList<Expression*>* args = expr->arguments(); | 3102 ZoneList<Expression*>* args = expr->arguments(); |
3174 DCHECK(args->length() == 1); | 3103 DCHECK(args->length() == 1); |
3175 VisitForAccumulatorValue(args->at(0)); | 3104 VisitForAccumulatorValue(args->at(0)); |
3176 | 3105 |
3177 Label done; | 3106 Label done; |
3178 StringCharFromCodeGenerator generator(r3, r4); | 3107 StringCharFromCodeGenerator generator(r2, r3); |
3179 generator.GenerateFast(masm_); | 3108 generator.GenerateFast(masm_); |
3180 __ b(&done); | 3109 __ b(&done); |
3181 | 3110 |
3182 NopRuntimeCallHelper call_helper; | 3111 NopRuntimeCallHelper call_helper; |
3183 generator.GenerateSlow(masm_, call_helper); | 3112 generator.GenerateSlow(masm_, call_helper); |
3184 | 3113 |
3185 __ bind(&done); | 3114 __ bind(&done); |
3186 context()->Plug(r4); | 3115 context()->Plug(r3); |
3187 } | 3116 } |
3188 | 3117 |
3189 | |
3190 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { | 3118 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { |
3191 ZoneList<Expression*>* args = expr->arguments(); | 3119 ZoneList<Expression*>* args = expr->arguments(); |
3192 DCHECK(args->length() == 2); | 3120 DCHECK(args->length() == 2); |
3193 VisitForStackValue(args->at(0)); | 3121 VisitForStackValue(args->at(0)); |
3194 VisitForAccumulatorValue(args->at(1)); | 3122 VisitForAccumulatorValue(args->at(1)); |
3195 | 3123 |
3196 Register object = r4; | 3124 Register object = r3; |
3197 Register index = r3; | 3125 Register index = r2; |
3198 Register result = r6; | 3126 Register result = r5; |
3199 | 3127 |
3200 PopOperand(object); | 3128 PopOperand(object); |
3201 | 3129 |
3202 Label need_conversion; | 3130 Label need_conversion; |
3203 Label index_out_of_range; | 3131 Label index_out_of_range; |
3204 Label done; | 3132 Label done; |
3205 StringCharCodeAtGenerator generator(object, index, result, &need_conversion, | 3133 StringCharCodeAtGenerator generator(object, index, result, &need_conversion, |
3206 &need_conversion, &index_out_of_range, | 3134 &need_conversion, &index_out_of_range, |
3207 STRING_INDEX_IS_NUMBER); | 3135 STRING_INDEX_IS_NUMBER); |
3208 generator.GenerateFast(masm_); | 3136 generator.GenerateFast(masm_); |
(...skipping 11 matching lines...) Expand all Loading... | |
3220 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | 3148 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); |
3221 __ b(&done); | 3149 __ b(&done); |
3222 | 3150 |
3223 NopRuntimeCallHelper call_helper; | 3151 NopRuntimeCallHelper call_helper; |
3224 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper); | 3152 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper); |
3225 | 3153 |
3226 __ bind(&done); | 3154 __ bind(&done); |
3227 context()->Plug(result); | 3155 context()->Plug(result); |
3228 } | 3156 } |
3229 | 3157 |
3230 | |
3231 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { | 3158 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { |
3232 ZoneList<Expression*>* args = expr->arguments(); | 3159 ZoneList<Expression*>* args = expr->arguments(); |
3233 DCHECK(args->length() == 2); | 3160 DCHECK(args->length() == 2); |
3234 VisitForStackValue(args->at(0)); | 3161 VisitForStackValue(args->at(0)); |
3235 VisitForAccumulatorValue(args->at(1)); | 3162 VisitForAccumulatorValue(args->at(1)); |
3236 | 3163 |
3237 Register object = r4; | 3164 Register object = r3; |
3238 Register index = r3; | 3165 Register index = r2; |
3239 Register scratch = r6; | 3166 Register scratch = r5; |
3240 Register result = r3; | 3167 Register result = r2; |
3241 | 3168 |
3242 PopOperand(object); | 3169 PopOperand(object); |
3243 | 3170 |
3244 Label need_conversion; | 3171 Label need_conversion; |
3245 Label index_out_of_range; | 3172 Label index_out_of_range; |
3246 Label done; | 3173 Label done; |
3247 StringCharAtGenerator generator(object, index, scratch, result, | 3174 StringCharAtGenerator generator(object, index, scratch, result, |
3248 &need_conversion, &need_conversion, | 3175 &need_conversion, &need_conversion, |
3249 &index_out_of_range, STRING_INDEX_IS_NUMBER); | 3176 &index_out_of_range, STRING_INDEX_IS_NUMBER); |
3250 generator.GenerateFast(masm_); | 3177 generator.GenerateFast(masm_); |
(...skipping 11 matching lines...) Expand all Loading... | |
3262 __ LoadSmiLiteral(result, Smi::FromInt(0)); | 3189 __ LoadSmiLiteral(result, Smi::FromInt(0)); |
3263 __ b(&done); | 3190 __ b(&done); |
3264 | 3191 |
3265 NopRuntimeCallHelper call_helper; | 3192 NopRuntimeCallHelper call_helper; |
3266 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper); | 3193 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper); |
3267 | 3194 |
3268 __ bind(&done); | 3195 __ bind(&done); |
3269 context()->Plug(result); | 3196 context()->Plug(result); |
3270 } | 3197 } |
3271 | 3198 |
3272 | |
3273 void FullCodeGenerator::EmitCall(CallRuntime* expr) { | 3199 void FullCodeGenerator::EmitCall(CallRuntime* expr) { |
3274 ZoneList<Expression*>* args = expr->arguments(); | 3200 ZoneList<Expression*>* args = expr->arguments(); |
3275 DCHECK_LE(2, args->length()); | 3201 DCHECK_LE(2, args->length()); |
3276 // Push target, receiver and arguments onto the stack. | 3202 // Push target, receiver and arguments onto the stack. |
3277 for (Expression* const arg : *args) { | 3203 for (Expression* const arg : *args) { |
3278 VisitForStackValue(arg); | 3204 VisitForStackValue(arg); |
3279 } | 3205 } |
3280 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); | 3206 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); |
3281 // Move target to r4. | 3207 // Move target to r3. |
3282 int const argc = args->length() - 2; | 3208 int const argc = args->length() - 2; |
3283 __ LoadP(r4, MemOperand(sp, (argc + 1) * kPointerSize)); | 3209 __ LoadP(r3, MemOperand(sp, (argc + 1) * kPointerSize)); |
3284 // Call the target. | 3210 // Call the target. |
3285 __ mov(r3, Operand(argc)); | 3211 __ mov(r2, Operand(argc)); |
3286 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | 3212 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
3287 OperandStackDepthDecrement(argc + 1); | 3213 OperandStackDepthDecrement(argc + 1); |
3288 // Restore context register. | 3214 // Restore context register. |
3289 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 3215 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
3290 // Discard the function left on TOS. | 3216 // Discard the function left on TOS. |
3291 context()->DropAndPlug(1, r3); | 3217 context()->DropAndPlug(1, r2); |
3292 } | 3218 } |
3293 | 3219 |
3294 | |
3295 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { | 3220 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { |
3296 ZoneList<Expression*>* args = expr->arguments(); | 3221 ZoneList<Expression*>* args = expr->arguments(); |
3297 VisitForAccumulatorValue(args->at(0)); | 3222 VisitForAccumulatorValue(args->at(0)); |
3298 | 3223 |
3299 Label materialize_true, materialize_false; | 3224 Label materialize_true, materialize_false; |
3300 Label* if_true = NULL; | 3225 Label* if_true = NULL; |
3301 Label* if_false = NULL; | 3226 Label* if_false = NULL; |
3302 Label* fall_through = NULL; | 3227 Label* fall_through = NULL; |
3303 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 3228 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
3304 &if_false, &fall_through); | 3229 &if_false, &fall_through); |
3305 | 3230 |
3306 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset)); | 3231 __ LoadlW(r2, FieldMemOperand(r2, String::kHashFieldOffset)); |
3307 // PPC - assume ip is free | 3232 __ AndP(r0, r2, Operand(String::kContainsCachedArrayIndexMask)); |
3308 __ mov(ip, Operand(String::kContainsCachedArrayIndexMask)); | |
3309 __ and_(r0, r3, ip, SetRC); | |
3310 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 3233 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
3311 Split(eq, if_true, if_false, fall_through, cr0); | 3234 Split(eq, if_true, if_false, fall_through); |
3312 | 3235 |
3313 context()->Plug(if_true, if_false); | 3236 context()->Plug(if_true, if_false); |
3314 } | 3237 } |
3315 | 3238 |
3316 | |
3317 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { | 3239 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { |
3318 ZoneList<Expression*>* args = expr->arguments(); | 3240 ZoneList<Expression*>* args = expr->arguments(); |
3319 DCHECK(args->length() == 1); | 3241 DCHECK(args->length() == 1); |
3320 VisitForAccumulatorValue(args->at(0)); | 3242 VisitForAccumulatorValue(args->at(0)); |
3321 | 3243 |
3322 __ AssertString(r3); | 3244 __ AssertString(r2); |
3323 | 3245 |
3324 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset)); | 3246 __ LoadlW(r2, FieldMemOperand(r2, String::kHashFieldOffset)); |
3325 __ IndexFromHash(r3, r3); | 3247 __ IndexFromHash(r2, r2); |
3326 | 3248 |
3327 context()->Plug(r3); | 3249 context()->Plug(r2); |
3328 } | 3250 } |
3329 | 3251 |
3330 | |
3331 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) { | 3252 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) { |
3332 ZoneList<Expression*>* args = expr->arguments(); | 3253 ZoneList<Expression*>* args = expr->arguments(); |
3333 DCHECK_EQ(1, args->length()); | 3254 DCHECK_EQ(1, args->length()); |
3334 VisitForAccumulatorValue(args->at(0)); | 3255 VisitForAccumulatorValue(args->at(0)); |
3335 __ AssertFunction(r3); | 3256 __ AssertFunction(r2); |
3336 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); | 3257 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
3337 __ LoadP(r3, FieldMemOperand(r3, Map::kPrototypeOffset)); | 3258 __ LoadP(r2, FieldMemOperand(r2, Map::kPrototypeOffset)); |
3338 context()->Plug(r3); | 3259 context()->Plug(r2); |
3339 } | 3260 } |
3340 | 3261 |
3341 | |
3342 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { | 3262 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { |
3343 DCHECK(expr->arguments()->length() == 0); | 3263 DCHECK(expr->arguments()->length() == 0); |
3344 ExternalReference debug_is_active = | 3264 ExternalReference debug_is_active = |
3345 ExternalReference::debug_is_active_address(isolate()); | 3265 ExternalReference::debug_is_active_address(isolate()); |
3346 __ mov(ip, Operand(debug_is_active)); | 3266 __ mov(ip, Operand(debug_is_active)); |
3347 __ lbz(r3, MemOperand(ip)); | 3267 __ LoadlB(r2, MemOperand(ip)); |
3348 __ SmiTag(r3); | 3268 __ SmiTag(r2); |
3349 context()->Plug(r3); | 3269 context()->Plug(r2); |
3350 } | 3270 } |
3351 | 3271 |
3352 | |
3353 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) { | 3272 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) { |
3354 ZoneList<Expression*>* args = expr->arguments(); | 3273 ZoneList<Expression*>* args = expr->arguments(); |
3355 DCHECK_EQ(2, args->length()); | 3274 DCHECK_EQ(2, args->length()); |
3356 VisitForStackValue(args->at(0)); | 3275 VisitForStackValue(args->at(0)); |
3357 VisitForStackValue(args->at(1)); | 3276 VisitForStackValue(args->at(1)); |
3358 | 3277 |
3359 Label runtime, done; | 3278 Label runtime, done; |
3360 | 3279 |
3361 __ Allocate(JSIteratorResult::kSize, r3, r5, r6, &runtime, TAG_OBJECT); | 3280 __ Allocate(JSIteratorResult::kSize, r2, r4, r5, &runtime, TAG_OBJECT); |
3362 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r4); | 3281 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r3); |
3363 __ Pop(r5, r6); | 3282 __ Pop(r4, r5); |
3364 __ LoadRoot(r7, Heap::kEmptyFixedArrayRootIndex); | 3283 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex); |
3365 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0); | 3284 __ StoreP(r3, FieldMemOperand(r2, HeapObject::kMapOffset), r0); |
3366 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0); | 3285 __ StoreP(r6, FieldMemOperand(r2, JSObject::kPropertiesOffset), r0); |
3367 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0); | 3286 __ StoreP(r6, FieldMemOperand(r2, JSObject::kElementsOffset), r0); |
3368 __ StoreP(r5, FieldMemOperand(r3, JSIteratorResult::kValueOffset), r0); | 3287 __ StoreP(r4, FieldMemOperand(r2, JSIteratorResult::kValueOffset), r0); |
3369 __ StoreP(r6, FieldMemOperand(r3, JSIteratorResult::kDoneOffset), r0); | 3288 __ StoreP(r5, FieldMemOperand(r2, JSIteratorResult::kDoneOffset), r0); |
3370 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize); | 3289 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize); |
3371 __ b(&done); | 3290 __ b(&done); |
3372 | 3291 |
3373 __ bind(&runtime); | 3292 __ bind(&runtime); |
3374 CallRuntimeWithOperands(Runtime::kCreateIterResultObject); | 3293 CallRuntimeWithOperands(Runtime::kCreateIterResultObject); |
3375 | 3294 |
3376 __ bind(&done); | 3295 __ bind(&done); |
3377 context()->Plug(r3); | 3296 context()->Plug(r2); |
3378 } | 3297 } |
3379 | 3298 |
3380 | |
3381 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) { | 3299 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) { |
3382 // Push undefined as the receiver. | 3300 // Push undefined as the receiver. |
3383 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); | 3301 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
3384 PushOperand(r3); | 3302 PushOperand(r2); |
3385 | 3303 |
3386 __ LoadNativeContextSlot(expr->context_index(), r3); | 3304 __ LoadNativeContextSlot(expr->context_index(), r2); |
3387 } | 3305 } |
3388 | 3306 |
3389 | |
3390 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) { | 3307 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) { |
3391 ZoneList<Expression*>* args = expr->arguments(); | 3308 ZoneList<Expression*>* args = expr->arguments(); |
3392 int arg_count = args->length(); | 3309 int arg_count = args->length(); |
3393 | 3310 |
3394 SetCallPosition(expr); | 3311 SetCallPosition(expr); |
3395 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); | 3312 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); |
3396 __ mov(r3, Operand(arg_count)); | 3313 __ mov(r2, Operand(arg_count)); |
3397 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined), | 3314 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined), |
3398 RelocInfo::CODE_TARGET); | 3315 RelocInfo::CODE_TARGET); |
3399 OperandStackDepthDecrement(arg_count + 1); | 3316 OperandStackDepthDecrement(arg_count + 1); |
3400 } | 3317 } |
3401 | 3318 |
3402 | |
3403 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { | 3319 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { |
3404 ZoneList<Expression*>* args = expr->arguments(); | 3320 ZoneList<Expression*>* args = expr->arguments(); |
3405 int arg_count = args->length(); | 3321 int arg_count = args->length(); |
3406 | 3322 |
3407 if (expr->is_jsruntime()) { | 3323 if (expr->is_jsruntime()) { |
3408 Comment cmnt(masm_, "[ CallRuntime"); | 3324 Comment cmnt(masm_, "[ CallRuntime"); |
3409 EmitLoadJSRuntimeFunction(expr); | 3325 EmitLoadJSRuntimeFunction(expr); |
3410 | 3326 |
3411 // Push the target function under the receiver. | 3327 // Push the target function under the receiver. |
3412 __ LoadP(ip, MemOperand(sp, 0)); | 3328 __ LoadP(ip, MemOperand(sp, 0)); |
3413 PushOperand(ip); | 3329 PushOperand(ip); |
3414 __ StoreP(r3, MemOperand(sp, kPointerSize)); | 3330 __ StoreP(r2, MemOperand(sp, kPointerSize)); |
3415 | 3331 |
3416 // Push the arguments ("left-to-right"). | 3332 // Push the arguments ("left-to-right"). |
3417 for (int i = 0; i < arg_count; i++) { | 3333 for (int i = 0; i < arg_count; i++) { |
3418 VisitForStackValue(args->at(i)); | 3334 VisitForStackValue(args->at(i)); |
3419 } | 3335 } |
3420 | 3336 |
3421 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); | 3337 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); |
3422 EmitCallJSRuntimeFunction(expr); | 3338 EmitCallJSRuntimeFunction(expr); |
3423 | 3339 |
3424 // Restore context register. | 3340 // Restore context register. |
3425 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 3341 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
3426 | 3342 |
3427 context()->DropAndPlug(1, r3); | 3343 context()->DropAndPlug(1, r2); |
3428 | 3344 |
3429 } else { | 3345 } else { |
3430 const Runtime::Function* function = expr->function(); | 3346 const Runtime::Function* function = expr->function(); |
3431 switch (function->function_id) { | 3347 switch (function->function_id) { |
3432 #define CALL_INTRINSIC_GENERATOR(Name) \ | 3348 #define CALL_INTRINSIC_GENERATOR(Name) \ |
3433 case Runtime::kInline##Name: { \ | 3349 case Runtime::kInline##Name: { \ |
3434 Comment cmnt(masm_, "[ Inline" #Name); \ | 3350 Comment cmnt(masm_, "[ Inline" #Name); \ |
3435 return Emit##Name(expr); \ | 3351 return Emit##Name(expr); \ |
3436 } | 3352 } |
3437 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR) | 3353 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR) |
3438 #undef CALL_INTRINSIC_GENERATOR | 3354 #undef CALL_INTRINSIC_GENERATOR |
3439 default: { | 3355 default: { |
3440 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic"); | 3356 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic"); |
3441 // Push the arguments ("left-to-right"). | 3357 // Push the arguments ("left-to-right"). |
3442 for (int i = 0; i < arg_count; i++) { | 3358 for (int i = 0; i < arg_count; i++) { |
3443 VisitForStackValue(args->at(i)); | 3359 VisitForStackValue(args->at(i)); |
3444 } | 3360 } |
3445 | 3361 |
3446 // Call the C runtime function. | 3362 // Call the C runtime function. |
3447 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); | 3363 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); |
3448 __ CallRuntime(expr->function(), arg_count); | 3364 __ CallRuntime(expr->function(), arg_count); |
3449 OperandStackDepthDecrement(arg_count); | 3365 OperandStackDepthDecrement(arg_count); |
3450 context()->Plug(r3); | 3366 context()->Plug(r2); |
3451 } | 3367 } |
3452 } | 3368 } |
3453 } | 3369 } |
3454 } | 3370 } |
3455 | 3371 |
3456 | |
3457 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { | 3372 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { |
3458 switch (expr->op()) { | 3373 switch (expr->op()) { |
3459 case Token::DELETE: { | 3374 case Token::DELETE: { |
3460 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); | 3375 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); |
3461 Property* property = expr->expression()->AsProperty(); | 3376 Property* property = expr->expression()->AsProperty(); |
3462 VariableProxy* proxy = expr->expression()->AsVariableProxy(); | 3377 VariableProxy* proxy = expr->expression()->AsVariableProxy(); |
3463 | 3378 |
3464 if (property != NULL) { | 3379 if (property != NULL) { |
3465 VisitForStackValue(property->obj()); | 3380 VisitForStackValue(property->obj()); |
3466 VisitForStackValue(property->key()); | 3381 VisitForStackValue(property->key()); |
3467 CallRuntimeWithOperands(is_strict(language_mode()) | 3382 CallRuntimeWithOperands(is_strict(language_mode()) |
3468 ? Runtime::kDeleteProperty_Strict | 3383 ? Runtime::kDeleteProperty_Strict |
3469 : Runtime::kDeleteProperty_Sloppy); | 3384 : Runtime::kDeleteProperty_Sloppy); |
3470 context()->Plug(r3); | 3385 context()->Plug(r2); |
3471 } else if (proxy != NULL) { | 3386 } else if (proxy != NULL) { |
3472 Variable* var = proxy->var(); | 3387 Variable* var = proxy->var(); |
3473 // Delete of an unqualified identifier is disallowed in strict mode but | 3388 // Delete of an unqualified identifier is disallowed in strict mode but |
3474 // "delete this" is allowed. | 3389 // "delete this" is allowed. |
3475 bool is_this = var->HasThisName(isolate()); | 3390 bool is_this = var->HasThisName(isolate()); |
3476 DCHECK(is_sloppy(language_mode()) || is_this); | 3391 DCHECK(is_sloppy(language_mode()) || is_this); |
3477 if (var->IsUnallocatedOrGlobalSlot()) { | 3392 if (var->IsUnallocatedOrGlobalSlot()) { |
3478 __ LoadGlobalObject(r5); | 3393 __ LoadGlobalObject(r4); |
3479 __ mov(r4, Operand(var->name())); | 3394 __ mov(r3, Operand(var->name())); |
3480 __ Push(r5, r4); | 3395 __ Push(r4, r3); |
3481 __ CallRuntime(Runtime::kDeleteProperty_Sloppy); | 3396 __ CallRuntime(Runtime::kDeleteProperty_Sloppy); |
3482 context()->Plug(r3); | 3397 context()->Plug(r2); |
3483 } else if (var->IsStackAllocated() || var->IsContextSlot()) { | 3398 } else if (var->IsStackAllocated() || var->IsContextSlot()) { |
3484 // Result of deleting non-global, non-dynamic variables is false. | 3399 // Result of deleting non-global, non-dynamic variables is false. |
3485 // The subexpression does not have side effects. | 3400 // The subexpression does not have side effects. |
3486 context()->Plug(is_this); | 3401 context()->Plug(is_this); |
3487 } else { | 3402 } else { |
3488 // Non-global variable. Call the runtime to try to delete from the | 3403 // Non-global variable. Call the runtime to try to delete from the |
3489 // context where the variable was introduced. | 3404 // context where the variable was introduced. |
3490 __ Push(var->name()); | 3405 __ Push(var->name()); |
3491 __ CallRuntime(Runtime::kDeleteLookupSlot); | 3406 __ CallRuntime(Runtime::kDeleteLookupSlot); |
3492 context()->Plug(r3); | 3407 context()->Plug(r2); |
3493 } | 3408 } |
3494 } else { | 3409 } else { |
3495 // Result of deleting non-property, non-variable reference is true. | 3410 // Result of deleting non-property, non-variable reference is true. |
3496 // The subexpression may have side effects. | 3411 // The subexpression may have side effects. |
3497 VisitForEffect(expr->expression()); | 3412 VisitForEffect(expr->expression()); |
3498 context()->Plug(true); | 3413 context()->Plug(true); |
3499 } | 3414 } |
3500 break; | 3415 break; |
3501 } | 3416 } |
3502 | 3417 |
(...skipping 21 matching lines...) Expand all Loading... | |
3524 // for control and plugging the control flow into the context, | 3439 // for control and plugging the control flow into the context, |
3525 // because we need to prepare a pair of extra administrative AST ids | 3440 // because we need to prepare a pair of extra administrative AST ids |
3526 // for the optimizing compiler. | 3441 // for the optimizing compiler. |
3527 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue()); | 3442 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue()); |
3528 Label materialize_true, materialize_false, done; | 3443 Label materialize_true, materialize_false, done; |
3529 VisitForControl(expr->expression(), &materialize_false, | 3444 VisitForControl(expr->expression(), &materialize_false, |
3530 &materialize_true, &materialize_true); | 3445 &materialize_true, &materialize_true); |
3531 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1); | 3446 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1); |
3532 __ bind(&materialize_true); | 3447 __ bind(&materialize_true); |
3533 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); | 3448 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); |
3534 __ LoadRoot(r3, Heap::kTrueValueRootIndex); | 3449 __ LoadRoot(r2, Heap::kTrueValueRootIndex); |
3535 if (context()->IsStackValue()) __ push(r3); | 3450 if (context()->IsStackValue()) __ push(r2); |
3536 __ b(&done); | 3451 __ b(&done); |
3537 __ bind(&materialize_false); | 3452 __ bind(&materialize_false); |
3538 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); | 3453 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); |
3539 __ LoadRoot(r3, Heap::kFalseValueRootIndex); | 3454 __ LoadRoot(r2, Heap::kFalseValueRootIndex); |
3540 if (context()->IsStackValue()) __ push(r3); | 3455 if (context()->IsStackValue()) __ push(r2); |
3541 __ bind(&done); | 3456 __ bind(&done); |
3542 } | 3457 } |
3543 break; | 3458 break; |
3544 } | 3459 } |
3545 | 3460 |
3546 case Token::TYPEOF: { | 3461 case Token::TYPEOF: { |
3547 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); | 3462 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); |
3548 { | 3463 { |
3549 AccumulatorValueContext context(this); | 3464 AccumulatorValueContext context(this); |
3550 VisitForTypeofValue(expr->expression()); | 3465 VisitForTypeofValue(expr->expression()); |
3551 } | 3466 } |
3552 __ mr(r6, r3); | 3467 __ LoadRR(r5, r2); |
3553 TypeofStub typeof_stub(isolate()); | 3468 TypeofStub typeof_stub(isolate()); |
3554 __ CallStub(&typeof_stub); | 3469 __ CallStub(&typeof_stub); |
3555 context()->Plug(r3); | 3470 context()->Plug(r2); |
3556 break; | 3471 break; |
3557 } | 3472 } |
3558 | 3473 |
3559 default: | 3474 default: |
3560 UNREACHABLE(); | 3475 UNREACHABLE(); |
3561 } | 3476 } |
3562 } | 3477 } |
3563 | 3478 |
3564 | |
3565 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { | 3479 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { |
3566 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis()); | 3480 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis()); |
3567 | 3481 |
3568 Comment cmnt(masm_, "[ CountOperation"); | 3482 Comment cmnt(masm_, "[ CountOperation"); |
3569 | 3483 |
3570 Property* prop = expr->expression()->AsProperty(); | 3484 Property* prop = expr->expression()->AsProperty(); |
3571 LhsKind assign_type = Property::GetAssignType(prop); | 3485 LhsKind assign_type = Property::GetAssignType(prop); |
3572 | 3486 |
3573 // Evaluate expression and get value. | 3487 // Evaluate expression and get value. |
3574 if (assign_type == VARIABLE) { | 3488 if (assign_type == VARIABLE) { |
(...skipping 13 matching lines...) Expand all Loading... | |
3588 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); | 3502 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); |
3589 EmitNamedPropertyLoad(prop); | 3503 EmitNamedPropertyLoad(prop); |
3590 break; | 3504 break; |
3591 } | 3505 } |
3592 | 3506 |
3593 case NAMED_SUPER_PROPERTY: { | 3507 case NAMED_SUPER_PROPERTY: { |
3594 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); | 3508 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); |
3595 VisitForAccumulatorValue( | 3509 VisitForAccumulatorValue( |
3596 prop->obj()->AsSuperPropertyReference()->home_object()); | 3510 prop->obj()->AsSuperPropertyReference()->home_object()); |
3597 PushOperand(result_register()); | 3511 PushOperand(result_register()); |
3598 const Register scratch = r4; | 3512 const Register scratch = r3; |
3599 __ LoadP(scratch, MemOperand(sp, kPointerSize)); | 3513 __ LoadP(scratch, MemOperand(sp, kPointerSize)); |
3600 PushOperands(scratch, result_register()); | 3514 PushOperands(scratch, result_register()); |
3601 EmitNamedSuperPropertyLoad(prop); | 3515 EmitNamedSuperPropertyLoad(prop); |
3602 break; | 3516 break; |
3603 } | 3517 } |
3604 | 3518 |
3605 case KEYED_SUPER_PROPERTY: { | 3519 case KEYED_SUPER_PROPERTY: { |
3606 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); | 3520 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); |
3607 VisitForAccumulatorValue( | 3521 VisitForAccumulatorValue( |
3608 prop->obj()->AsSuperPropertyReference()->home_object()); | 3522 prop->obj()->AsSuperPropertyReference()->home_object()); |
3609 const Register scratch = r4; | 3523 const Register scratch = r3; |
3610 const Register scratch1 = r5; | 3524 const Register scratch1 = r4; |
3611 __ mr(scratch, result_register()); | 3525 __ LoadRR(scratch, result_register()); |
3612 VisitForAccumulatorValue(prop->key()); | 3526 VisitForAccumulatorValue(prop->key()); |
3613 PushOperands(scratch, result_register()); | 3527 PushOperands(scratch, result_register()); |
3614 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize)); | 3528 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize)); |
3615 PushOperands(scratch1, scratch, result_register()); | 3529 PushOperands(scratch1, scratch, result_register()); |
3616 EmitKeyedSuperPropertyLoad(prop); | 3530 EmitKeyedSuperPropertyLoad(prop); |
3617 break; | 3531 break; |
3618 } | 3532 } |
3619 | 3533 |
3620 case KEYED_PROPERTY: { | 3534 case KEYED_PROPERTY: { |
3621 VisitForStackValue(prop->obj()); | 3535 VisitForStackValue(prop->obj()); |
(...skipping 18 matching lines...) Expand all Loading... | |
3640 PrepareForBailoutForId(prop->LoadId(), TOS_REG); | 3554 PrepareForBailoutForId(prop->LoadId(), TOS_REG); |
3641 } | 3555 } |
3642 | 3556 |
3643 // Inline smi case if we are in a loop. | 3557 // Inline smi case if we are in a loop. |
3644 Label stub_call, done; | 3558 Label stub_call, done; |
3645 JumpPatchSite patch_site(masm_); | 3559 JumpPatchSite patch_site(masm_); |
3646 | 3560 |
3647 int count_value = expr->op() == Token::INC ? 1 : -1; | 3561 int count_value = expr->op() == Token::INC ? 1 : -1; |
3648 if (ShouldInlineSmiCase(expr->op())) { | 3562 if (ShouldInlineSmiCase(expr->op())) { |
3649 Label slow; | 3563 Label slow; |
3650 patch_site.EmitJumpIfNotSmi(r3, &slow); | 3564 patch_site.EmitJumpIfNotSmi(r2, &slow); |
3651 | 3565 |
3652 // Save result for postfix expressions. | 3566 // Save result for postfix expressions. |
3653 if (expr->is_postfix()) { | 3567 if (expr->is_postfix()) { |
3654 if (!context()->IsEffect()) { | 3568 if (!context()->IsEffect()) { |
3655 // Save the result on the stack. If we have a named or keyed property | 3569 // Save the result on the stack. If we have a named or keyed property |
3656 // we store the result under the receiver that is currently on top | 3570 // we store the result under the receiver that is currently on top |
3657 // of the stack. | 3571 // of the stack. |
3658 switch (assign_type) { | 3572 switch (assign_type) { |
3659 case VARIABLE: | 3573 case VARIABLE: |
3660 __ push(r3); | 3574 __ push(r2); |
3661 break; | 3575 break; |
3662 case NAMED_PROPERTY: | 3576 case NAMED_PROPERTY: |
3663 __ StoreP(r3, MemOperand(sp, kPointerSize)); | 3577 __ StoreP(r2, MemOperand(sp, kPointerSize)); |
3664 break; | 3578 break; |
3665 case NAMED_SUPER_PROPERTY: | 3579 case NAMED_SUPER_PROPERTY: |
3666 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize)); | 3580 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize)); |
3667 break; | 3581 break; |
3668 case KEYED_PROPERTY: | 3582 case KEYED_PROPERTY: |
3669 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize)); | 3583 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize)); |
3670 break; | 3584 break; |
3671 case KEYED_SUPER_PROPERTY: | 3585 case KEYED_SUPER_PROPERTY: |
3672 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize)); | 3586 __ StoreP(r2, MemOperand(sp, 3 * kPointerSize)); |
3673 break; | 3587 break; |
3674 } | 3588 } |
3675 } | 3589 } |
3676 } | 3590 } |
3677 | 3591 |
3678 Register scratch1 = r4; | 3592 Register scratch1 = r3; |
3679 Register scratch2 = r5; | 3593 Register scratch2 = r4; |
3680 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value)); | 3594 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value)); |
3681 __ AddAndCheckForOverflow(r3, r3, scratch1, scratch2, r0); | 3595 __ AddAndCheckForOverflow(r2, r2, scratch1, scratch2, r0); |
3682 __ BranchOnNoOverflow(&done); | 3596 __ BranchOnNoOverflow(&done); |
3683 // Call stub. Undo operation first. | 3597 // Call stub. Undo operation first. |
3684 __ sub(r3, r3, scratch1); | 3598 __ SubP(r2, r2, scratch1); |
3685 __ b(&stub_call); | 3599 __ b(&stub_call); |
3686 __ bind(&slow); | 3600 __ bind(&slow); |
3687 } | 3601 } |
3688 | 3602 |
3689 // Convert old value into a number. | 3603 // Convert old value into a number. |
3690 ToNumberStub convert_stub(isolate()); | 3604 ToNumberStub convert_stub(isolate()); |
3691 __ CallStub(&convert_stub); | 3605 __ CallStub(&convert_stub); |
3692 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG); | 3606 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG); |
3693 | 3607 |
3694 // Save result for postfix expressions. | 3608 // Save result for postfix expressions. |
3695 if (expr->is_postfix()) { | 3609 if (expr->is_postfix()) { |
3696 if (!context()->IsEffect()) { | 3610 if (!context()->IsEffect()) { |
3697 // Save the result on the stack. If we have a named or keyed property | 3611 // Save the result on the stack. If we have a named or keyed property |
3698 // we store the result under the receiver that is currently on top | 3612 // we store the result under the receiver that is currently on top |
3699 // of the stack. | 3613 // of the stack. |
3700 switch (assign_type) { | 3614 switch (assign_type) { |
3701 case VARIABLE: | 3615 case VARIABLE: |
3702 PushOperand(r3); | 3616 PushOperand(r2); |
3703 break; | 3617 break; |
3704 case NAMED_PROPERTY: | 3618 case NAMED_PROPERTY: |
3705 __ StoreP(r3, MemOperand(sp, kPointerSize)); | 3619 __ StoreP(r2, MemOperand(sp, kPointerSize)); |
3706 break; | 3620 break; |
3707 case NAMED_SUPER_PROPERTY: | 3621 case NAMED_SUPER_PROPERTY: |
3708 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize)); | 3622 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize)); |
3709 break; | 3623 break; |
3710 case KEYED_PROPERTY: | 3624 case KEYED_PROPERTY: |
3711 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize)); | 3625 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize)); |
3712 break; | 3626 break; |
3713 case KEYED_SUPER_PROPERTY: | 3627 case KEYED_SUPER_PROPERTY: |
3714 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize)); | 3628 __ StoreP(r2, MemOperand(sp, 3 * kPointerSize)); |
3715 break; | 3629 break; |
3716 } | 3630 } |
3717 } | 3631 } |
3718 } | 3632 } |
3719 | 3633 |
3720 __ bind(&stub_call); | 3634 __ bind(&stub_call); |
3721 __ mr(r4, r3); | 3635 __ LoadRR(r3, r2); |
3722 __ LoadSmiLiteral(r3, Smi::FromInt(count_value)); | 3636 __ LoadSmiLiteral(r2, Smi::FromInt(count_value)); |
3723 | 3637 |
3724 SetExpressionPosition(expr); | 3638 SetExpressionPosition(expr); |
3725 | 3639 |
3726 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code(); | 3640 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code(); |
3727 CallIC(code, expr->CountBinOpFeedbackId()); | 3641 CallIC(code, expr->CountBinOpFeedbackId()); |
3728 patch_site.EmitPatchInfo(); | 3642 patch_site.EmitPatchInfo(); |
3729 __ bind(&done); | 3643 __ bind(&done); |
3730 | 3644 |
3731 // Store the value returned in r3. | 3645 // Store the value returned in r2. |
3732 switch (assign_type) { | 3646 switch (assign_type) { |
3733 case VARIABLE: | 3647 case VARIABLE: |
3734 if (expr->is_postfix()) { | 3648 if (expr->is_postfix()) { |
3735 { | 3649 { |
3736 EffectContext context(this); | 3650 EffectContext context(this); |
3737 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), | 3651 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), |
3738 Token::ASSIGN, expr->CountSlot()); | 3652 Token::ASSIGN, expr->CountSlot()); |
3739 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | 3653 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
3740 context.Plug(r3); | 3654 context.Plug(r2); |
3741 } | 3655 } |
3742 // For all contexts except EffectConstant We have the result on | 3656 // For all contexts except EffectConstant We have the result on |
3743 // top of the stack. | 3657 // top of the stack. |
3744 if (!context()->IsEffect()) { | 3658 if (!context()->IsEffect()) { |
3745 context()->PlugTOS(); | 3659 context()->PlugTOS(); |
3746 } | 3660 } |
3747 } else { | 3661 } else { |
3748 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), | 3662 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), |
3749 Token::ASSIGN, expr->CountSlot()); | 3663 Token::ASSIGN, expr->CountSlot()); |
3750 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | 3664 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
3751 context()->Plug(r3); | 3665 context()->Plug(r2); |
3752 } | 3666 } |
3753 break; | 3667 break; |
3754 case NAMED_PROPERTY: { | 3668 case NAMED_PROPERTY: { |
3755 __ mov(StoreDescriptor::NameRegister(), | 3669 __ mov(StoreDescriptor::NameRegister(), |
3756 Operand(prop->key()->AsLiteral()->value())); | 3670 Operand(prop->key()->AsLiteral()->value())); |
3757 PopOperand(StoreDescriptor::ReceiverRegister()); | 3671 PopOperand(StoreDescriptor::ReceiverRegister()); |
3758 EmitLoadStoreICSlot(expr->CountSlot()); | 3672 EmitLoadStoreICSlot(expr->CountSlot()); |
3759 CallStoreIC(); | 3673 CallStoreIC(); |
3760 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | 3674 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
3761 if (expr->is_postfix()) { | 3675 if (expr->is_postfix()) { |
3762 if (!context()->IsEffect()) { | 3676 if (!context()->IsEffect()) { |
3763 context()->PlugTOS(); | 3677 context()->PlugTOS(); |
3764 } | 3678 } |
3765 } else { | 3679 } else { |
3766 context()->Plug(r3); | 3680 context()->Plug(r2); |
3767 } | 3681 } |
3768 break; | 3682 break; |
3769 } | 3683 } |
3770 case NAMED_SUPER_PROPERTY: { | 3684 case NAMED_SUPER_PROPERTY: { |
3771 EmitNamedSuperPropertyStore(prop); | 3685 EmitNamedSuperPropertyStore(prop); |
3772 if (expr->is_postfix()) { | 3686 if (expr->is_postfix()) { |
3773 if (!context()->IsEffect()) { | 3687 if (!context()->IsEffect()) { |
3774 context()->PlugTOS(); | 3688 context()->PlugTOS(); |
3775 } | 3689 } |
3776 } else { | 3690 } else { |
3777 context()->Plug(r3); | 3691 context()->Plug(r2); |
3778 } | 3692 } |
3779 break; | 3693 break; |
3780 } | 3694 } |
3781 case KEYED_SUPER_PROPERTY: { | 3695 case KEYED_SUPER_PROPERTY: { |
3782 EmitKeyedSuperPropertyStore(prop); | 3696 EmitKeyedSuperPropertyStore(prop); |
3783 if (expr->is_postfix()) { | 3697 if (expr->is_postfix()) { |
3784 if (!context()->IsEffect()) { | 3698 if (!context()->IsEffect()) { |
3785 context()->PlugTOS(); | 3699 context()->PlugTOS(); |
3786 } | 3700 } |
3787 } else { | 3701 } else { |
3788 context()->Plug(r3); | 3702 context()->Plug(r2); |
3789 } | 3703 } |
3790 break; | 3704 break; |
3791 } | 3705 } |
3792 case KEYED_PROPERTY: { | 3706 case KEYED_PROPERTY: { |
3793 PopOperands(StoreDescriptor::ReceiverRegister(), | 3707 PopOperands(StoreDescriptor::ReceiverRegister(), |
3794 StoreDescriptor::NameRegister()); | 3708 StoreDescriptor::NameRegister()); |
3795 Handle<Code> ic = | 3709 Handle<Code> ic = |
3796 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); | 3710 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); |
3797 EmitLoadStoreICSlot(expr->CountSlot()); | 3711 EmitLoadStoreICSlot(expr->CountSlot()); |
3798 CallIC(ic); | 3712 CallIC(ic); |
3799 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | 3713 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
3800 if (expr->is_postfix()) { | 3714 if (expr->is_postfix()) { |
3801 if (!context()->IsEffect()) { | 3715 if (!context()->IsEffect()) { |
3802 context()->PlugTOS(); | 3716 context()->PlugTOS(); |
3803 } | 3717 } |
3804 } else { | 3718 } else { |
3805 context()->Plug(r3); | 3719 context()->Plug(r2); |
3806 } | 3720 } |
3807 break; | 3721 break; |
3808 } | 3722 } |
3809 } | 3723 } |
3810 } | 3724 } |
3811 | 3725 |
3812 | |
3813 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, | 3726 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, |
3814 Expression* sub_expr, | 3727 Expression* sub_expr, |
3815 Handle<String> check) { | 3728 Handle<String> check) { |
3816 Label materialize_true, materialize_false; | 3729 Label materialize_true, materialize_false; |
3817 Label* if_true = NULL; | 3730 Label* if_true = NULL; |
3818 Label* if_false = NULL; | 3731 Label* if_false = NULL; |
3819 Label* fall_through = NULL; | 3732 Label* fall_through = NULL; |
3820 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 3733 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
3821 &if_false, &fall_through); | 3734 &if_false, &fall_through); |
3822 | 3735 |
3823 { | 3736 { |
3824 AccumulatorValueContext context(this); | 3737 AccumulatorValueContext context(this); |
3825 VisitForTypeofValue(sub_expr); | 3738 VisitForTypeofValue(sub_expr); |
3826 } | 3739 } |
3827 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 3740 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
3828 | 3741 |
3829 Factory* factory = isolate()->factory(); | 3742 Factory* factory = isolate()->factory(); |
3830 if (String::Equals(check, factory->number_string())) { | 3743 if (String::Equals(check, factory->number_string())) { |
3831 __ JumpIfSmi(r3, if_true); | 3744 __ JumpIfSmi(r2, if_true); |
3832 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); | 3745 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
3833 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); | 3746 __ CompareRoot(r2, Heap::kHeapNumberMapRootIndex); |
3834 __ cmp(r3, ip); | |
3835 Split(eq, if_true, if_false, fall_through); | 3747 Split(eq, if_true, if_false, fall_through); |
3836 } else if (String::Equals(check, factory->string_string())) { | 3748 } else if (String::Equals(check, factory->string_string())) { |
3837 __ JumpIfSmi(r3, if_false); | 3749 __ JumpIfSmi(r2, if_false); |
3838 __ CompareObjectType(r3, r3, r4, FIRST_NONSTRING_TYPE); | 3750 __ CompareObjectType(r2, r2, r3, FIRST_NONSTRING_TYPE); |
3839 Split(lt, if_true, if_false, fall_through); | 3751 Split(lt, if_true, if_false, fall_through); |
3840 } else if (String::Equals(check, factory->symbol_string())) { | 3752 } else if (String::Equals(check, factory->symbol_string())) { |
3841 __ JumpIfSmi(r3, if_false); | 3753 __ JumpIfSmi(r2, if_false); |
3842 __ CompareObjectType(r3, r3, r4, SYMBOL_TYPE); | 3754 __ CompareObjectType(r2, r2, r3, SYMBOL_TYPE); |
3843 Split(eq, if_true, if_false, fall_through); | 3755 Split(eq, if_true, if_false, fall_through); |
3844 } else if (String::Equals(check, factory->boolean_string())) { | 3756 } else if (String::Equals(check, factory->boolean_string())) { |
3845 __ CompareRoot(r3, Heap::kTrueValueRootIndex); | 3757 __ CompareRoot(r2, Heap::kTrueValueRootIndex); |
3846 __ beq(if_true); | 3758 __ beq(if_true); |
3847 __ CompareRoot(r3, Heap::kFalseValueRootIndex); | 3759 __ CompareRoot(r2, Heap::kFalseValueRootIndex); |
3848 Split(eq, if_true, if_false, fall_through); | 3760 Split(eq, if_true, if_false, fall_through); |
3849 } else if (String::Equals(check, factory->undefined_string())) { | 3761 } else if (String::Equals(check, factory->undefined_string())) { |
3850 __ CompareRoot(r3, Heap::kNullValueRootIndex); | 3762 __ CompareRoot(r2, Heap::kNullValueRootIndex); |
3851 __ beq(if_false); | 3763 __ beq(if_false); |
3852 __ JumpIfSmi(r3, if_false); | 3764 __ JumpIfSmi(r2, if_false); |
3853 // Check for undetectable objects => true. | 3765 // Check for undetectable objects => true. |
3854 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); | 3766 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
3855 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset)); | 3767 __ tm(FieldMemOperand(r2, Map::kBitFieldOffset), |
3856 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable)); | 3768 Operand(1 << Map::kIsUndetectable)); |
3857 Split(ne, if_true, if_false, fall_through, cr0); | 3769 Split(ne, if_true, if_false, fall_through); |
3858 | 3770 |
3859 } else if (String::Equals(check, factory->function_string())) { | 3771 } else if (String::Equals(check, factory->function_string())) { |
3860 __ JumpIfSmi(r3, if_false); | 3772 __ JumpIfSmi(r2, if_false); |
3861 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); | 3773 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
3862 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset)); | 3774 __ LoadlB(r3, FieldMemOperand(r2, Map::kBitFieldOffset)); |
3863 __ andi(r4, r4, | 3775 __ AndP(r3, r3, |
3864 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); | 3776 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); |
3865 __ cmpi(r4, Operand(1 << Map::kIsCallable)); | 3777 __ CmpP(r3, Operand(1 << Map::kIsCallable)); |
3866 Split(eq, if_true, if_false, fall_through); | 3778 Split(eq, if_true, if_false, fall_through); |
3867 } else if (String::Equals(check, factory->object_string())) { | 3779 } else if (String::Equals(check, factory->object_string())) { |
3868 __ JumpIfSmi(r3, if_false); | 3780 __ JumpIfSmi(r2, if_false); |
3869 __ CompareRoot(r3, Heap::kNullValueRootIndex); | 3781 __ CompareRoot(r2, Heap::kNullValueRootIndex); |
3870 __ beq(if_true); | 3782 __ beq(if_true); |
3871 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); | 3783 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); |
3872 __ CompareObjectType(r3, r3, r4, FIRST_JS_RECEIVER_TYPE); | 3784 __ CompareObjectType(r2, r2, r3, FIRST_JS_RECEIVER_TYPE); |
3873 __ blt(if_false); | 3785 __ blt(if_false); |
3874 // Check for callable or undetectable objects => false. | 3786 __ tm(FieldMemOperand(r2, Map::kBitFieldOffset), |
3875 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset)); | 3787 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); |
3876 __ andi(r0, r4, | 3788 Split(eq, if_true, if_false, fall_through); |
3877 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); | |
3878 Split(eq, if_true, if_false, fall_through, cr0); | |
3879 // clang-format off | 3789 // clang-format off |
3880 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \ | 3790 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \ |
3881 } else if (String::Equals(check, factory->type##_string())) { \ | 3791 } else if (String::Equals(check, factory->type##_string())) { \ |
3882 __ JumpIfSmi(r3, if_false); \ | 3792 __ JumpIfSmi(r2, if_false); \ |
3883 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); \ | 3793 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); \ |
3884 __ CompareRoot(r3, Heap::k##Type##MapRootIndex); \ | 3794 __ CompareRoot(r2, Heap::k##Type##MapRootIndex); \ |
3885 Split(eq, if_true, if_false, fall_through); | 3795 Split(eq, if_true, if_false, fall_through); |
3886 SIMD128_TYPES(SIMD128_TYPE) | 3796 SIMD128_TYPES(SIMD128_TYPE) |
3887 #undef SIMD128_TYPE | 3797 #undef SIMD128_TYPE |
3888 // clang-format on | 3798 // clang-format on |
3889 } else { | 3799 } else { |
3890 if (if_false != fall_through) __ b(if_false); | 3800 if (if_false != fall_through) __ b(if_false); |
3891 } | 3801 } |
3892 context()->Plug(if_true, if_false); | 3802 context()->Plug(if_true, if_false); |
3893 } | 3803 } |
3894 | 3804 |
3895 | |
3896 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { | 3805 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { |
3897 Comment cmnt(masm_, "[ CompareOperation"); | 3806 Comment cmnt(masm_, "[ CompareOperation"); |
3898 SetExpressionPosition(expr); | 3807 SetExpressionPosition(expr); |
3899 | 3808 |
3900 // First we try a fast inlined version of the compare when one of | 3809 // First we try a fast inlined version of the compare when one of |
3901 // the operands is a literal. | 3810 // the operands is a literal. |
3902 if (TryLiteralCompare(expr)) return; | 3811 if (TryLiteralCompare(expr)) return; |
3903 | 3812 |
3904 // Always perform the comparison for its control flow. Pack the result | 3813 // Always perform the comparison for its control flow. Pack the result |
3905 // into the expression's context after the comparison is performed. | 3814 // into the expression's context after the comparison is performed. |
3906 Label materialize_true, materialize_false; | 3815 Label materialize_true, materialize_false; |
3907 Label* if_true = NULL; | 3816 Label* if_true = NULL; |
3908 Label* if_false = NULL; | 3817 Label* if_false = NULL; |
3909 Label* fall_through = NULL; | 3818 Label* fall_through = NULL; |
3910 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 3819 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
3911 &if_false, &fall_through); | 3820 &if_false, &fall_through); |
3912 | 3821 |
3913 Token::Value op = expr->op(); | 3822 Token::Value op = expr->op(); |
3914 VisitForStackValue(expr->left()); | 3823 VisitForStackValue(expr->left()); |
3915 switch (op) { | 3824 switch (op) { |
3916 case Token::IN: | 3825 case Token::IN: |
3917 VisitForStackValue(expr->right()); | 3826 VisitForStackValue(expr->right()); |
3918 CallRuntimeWithOperands(Runtime::kHasProperty); | 3827 CallRuntimeWithOperands(Runtime::kHasProperty); |
3919 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); | 3828 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); |
3920 __ CompareRoot(r3, Heap::kTrueValueRootIndex); | 3829 __ CompareRoot(r2, Heap::kTrueValueRootIndex); |
3921 Split(eq, if_true, if_false, fall_through); | 3830 Split(eq, if_true, if_false, fall_through); |
3922 break; | 3831 break; |
3923 | 3832 |
3924 case Token::INSTANCEOF: { | 3833 case Token::INSTANCEOF: { |
3925 VisitForAccumulatorValue(expr->right()); | 3834 VisitForAccumulatorValue(expr->right()); |
3926 PopOperand(r4); | 3835 PopOperand(r3); |
3927 InstanceOfStub stub(isolate()); | 3836 InstanceOfStub stub(isolate()); |
3928 __ CallStub(&stub); | 3837 __ CallStub(&stub); |
3929 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); | 3838 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); |
3930 __ CompareRoot(r3, Heap::kTrueValueRootIndex); | 3839 __ CompareRoot(r2, Heap::kTrueValueRootIndex); |
3931 Split(eq, if_true, if_false, fall_through); | 3840 Split(eq, if_true, if_false, fall_through); |
3932 break; | 3841 break; |
3933 } | 3842 } |
3934 | 3843 |
3935 default: { | 3844 default: { |
3936 VisitForAccumulatorValue(expr->right()); | 3845 VisitForAccumulatorValue(expr->right()); |
3937 Condition cond = CompareIC::ComputeCondition(op); | 3846 Condition cond = CompareIC::ComputeCondition(op); |
3938 PopOperand(r4); | 3847 PopOperand(r3); |
3939 | 3848 |
3940 bool inline_smi_code = ShouldInlineSmiCase(op); | 3849 bool inline_smi_code = ShouldInlineSmiCase(op); |
3941 JumpPatchSite patch_site(masm_); | 3850 JumpPatchSite patch_site(masm_); |
3942 if (inline_smi_code) { | 3851 if (inline_smi_code) { |
3943 Label slow_case; | 3852 Label slow_case; |
3944 __ orx(r5, r3, r4); | 3853 __ LoadRR(r4, r3); |
3945 patch_site.EmitJumpIfNotSmi(r5, &slow_case); | 3854 __ OrP(r4, r2); |
3946 __ cmp(r4, r3); | 3855 patch_site.EmitJumpIfNotSmi(r4, &slow_case); |
3856 __ CmpP(r3, r2); | |
3947 Split(cond, if_true, if_false, NULL); | 3857 Split(cond, if_true, if_false, NULL); |
3948 __ bind(&slow_case); | 3858 __ bind(&slow_case); |
3949 } | 3859 } |
3950 | 3860 |
3951 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code(); | 3861 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code(); |
3952 CallIC(ic, expr->CompareOperationFeedbackId()); | 3862 CallIC(ic, expr->CompareOperationFeedbackId()); |
3953 patch_site.EmitPatchInfo(); | 3863 patch_site.EmitPatchInfo(); |
3954 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 3864 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
3955 __ cmpi(r3, Operand::Zero()); | 3865 __ CmpP(r2, Operand::Zero()); |
3956 Split(cond, if_true, if_false, fall_through); | 3866 Split(cond, if_true, if_false, fall_through); |
3957 } | 3867 } |
3958 } | 3868 } |
3959 | 3869 |
3960 // Convert the result of the comparison into one expected for this | 3870 // Convert the result of the comparison into one expected for this |
3961 // expression's context. | 3871 // expression's context. |
3962 context()->Plug(if_true, if_false); | 3872 context()->Plug(if_true, if_false); |
3963 } | 3873 } |
3964 | 3874 |
3965 | |
3966 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, | 3875 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, |
3967 Expression* sub_expr, | 3876 Expression* sub_expr, |
3968 NilValue nil) { | 3877 NilValue nil) { |
3969 Label materialize_true, materialize_false; | 3878 Label materialize_true, materialize_false; |
3970 Label* if_true = NULL; | 3879 Label* if_true = NULL; |
3971 Label* if_false = NULL; | 3880 Label* if_false = NULL; |
3972 Label* fall_through = NULL; | 3881 Label* fall_through = NULL; |
3973 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 3882 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
3974 &if_false, &fall_through); | 3883 &if_false, &fall_through); |
3975 | 3884 |
3976 VisitForAccumulatorValue(sub_expr); | 3885 VisitForAccumulatorValue(sub_expr); |
3977 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 3886 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
3978 if (expr->op() == Token::EQ_STRICT) { | 3887 if (expr->op() == Token::EQ_STRICT) { |
3979 Heap::RootListIndex nil_value = nil == kNullValue | 3888 Heap::RootListIndex nil_value = nil == kNullValue |
3980 ? Heap::kNullValueRootIndex | 3889 ? Heap::kNullValueRootIndex |
3981 : Heap::kUndefinedValueRootIndex; | 3890 : Heap::kUndefinedValueRootIndex; |
3982 __ LoadRoot(r4, nil_value); | 3891 __ CompareRoot(r2, nil_value); |
3983 __ cmp(r3, r4); | |
3984 Split(eq, if_true, if_false, fall_through); | 3892 Split(eq, if_true, if_false, fall_through); |
3985 } else { | 3893 } else { |
3986 __ JumpIfSmi(r3, if_false); | 3894 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); |
3987 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); | 3895 CallIC(ic, expr->CompareOperationFeedbackId()); |
3988 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset)); | 3896 __ CompareRoot(r2, Heap::kTrueValueRootIndex); |
3989 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable)); | 3897 Split(eq, if_true, if_false, fall_through); |
3990 Split(ne, if_true, if_false, fall_through, cr0); | |
3991 } | 3898 } |
3992 context()->Plug(if_true, if_false); | 3899 context()->Plug(if_true, if_false); |
3993 } | 3900 } |
3994 | 3901 |
3995 | |
3996 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { | 3902 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { |
3997 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 3903 __ LoadP(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
3998 context()->Plug(r3); | 3904 context()->Plug(r2); |
3999 } | 3905 } |
4000 | 3906 |
4001 | 3907 Register FullCodeGenerator::result_register() { return r2; } |
4002 Register FullCodeGenerator::result_register() { return r3; } | |
4003 | |
4004 | 3908 |
4005 Register FullCodeGenerator::context_register() { return cp; } | 3909 Register FullCodeGenerator::context_register() { return cp; } |
4006 | 3910 |
4007 | |
4008 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { | 3911 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { |
4009 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset); | 3912 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset); |
4010 __ StoreP(value, MemOperand(fp, frame_offset), r0); | 3913 __ StoreP(value, MemOperand(fp, frame_offset)); |
4011 } | 3914 } |
4012 | 3915 |
4013 | |
4014 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { | 3916 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { |
4015 __ LoadP(dst, ContextMemOperand(cp, context_index), r0); | 3917 __ LoadP(dst, ContextMemOperand(cp, context_index), r0); |
4016 } | 3918 } |
4017 | 3919 |
4018 | |
4019 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { | 3920 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { |
4020 Scope* closure_scope = scope()->ClosureScope(); | 3921 Scope* closure_scope = scope()->ClosureScope(); |
4021 if (closure_scope->is_script_scope() || | 3922 if (closure_scope->is_script_scope() || closure_scope->is_module_scope()) { |
4022 closure_scope->is_module_scope()) { | |
4023 // Contexts nested in the native context have a canonical empty function | 3923 // Contexts nested in the native context have a canonical empty function |
4024 // as their closure, not the anonymous closure containing the global | 3924 // as their closure, not the anonymous closure containing the global |
4025 // code. | 3925 // code. |
4026 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip); | 3926 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip); |
4027 } else if (closure_scope->is_eval_scope()) { | 3927 } else if (closure_scope->is_eval_scope()) { |
4028 // Contexts created by a call to eval have the same closure as the | 3928 // Contexts created by a call to eval have the same closure as the |
4029 // context calling eval, not the anonymous closure containing the eval | 3929 // context calling eval, not the anonymous closure containing the eval |
4030 // code. Fetch it from the context. | 3930 // code. Fetch it from the context. |
4031 __ LoadP(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX)); | 3931 __ LoadP(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX)); |
4032 } else { | 3932 } else { |
4033 DCHECK(closure_scope->is_function_scope()); | 3933 DCHECK(closure_scope->is_function_scope()); |
4034 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 3934 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
4035 } | 3935 } |
4036 PushOperand(ip); | 3936 PushOperand(ip); |
4037 } | 3937 } |
4038 | 3938 |
4039 | |
4040 // ---------------------------------------------------------------------------- | 3939 // ---------------------------------------------------------------------------- |
4041 // Non-local control flow support. | 3940 // Non-local control flow support. |
4042 | 3941 |
4043 void FullCodeGenerator::EnterFinallyBlock() { | 3942 void FullCodeGenerator::EnterFinallyBlock() { |
4044 DCHECK(!result_register().is(r4)); | 3943 DCHECK(!result_register().is(r3)); |
4045 // Store pending message while executing finally block. | 3944 // Store pending message while executing finally block. |
4046 ExternalReference pending_message_obj = | 3945 ExternalReference pending_message_obj = |
4047 ExternalReference::address_of_pending_message_obj(isolate()); | 3946 ExternalReference::address_of_pending_message_obj(isolate()); |
4048 __ mov(ip, Operand(pending_message_obj)); | 3947 __ mov(ip, Operand(pending_message_obj)); |
4049 __ LoadP(r4, MemOperand(ip)); | 3948 __ LoadP(r3, MemOperand(ip)); |
4050 PushOperand(r4); | 3949 PushOperand(r3); |
4051 | 3950 |
4052 ClearPendingMessage(); | 3951 ClearPendingMessage(); |
4053 } | 3952 } |
4054 | 3953 |
4055 | |
4056 void FullCodeGenerator::ExitFinallyBlock() { | 3954 void FullCodeGenerator::ExitFinallyBlock() { |
4057 DCHECK(!result_register().is(r4)); | 3955 DCHECK(!result_register().is(r3)); |
4058 // Restore pending message from stack. | 3956 // Restore pending message from stack. |
4059 PopOperand(r4); | 3957 PopOperand(r3); |
4060 ExternalReference pending_message_obj = | 3958 ExternalReference pending_message_obj = |
4061 ExternalReference::address_of_pending_message_obj(isolate()); | 3959 ExternalReference::address_of_pending_message_obj(isolate()); |
4062 __ mov(ip, Operand(pending_message_obj)); | 3960 __ mov(ip, Operand(pending_message_obj)); |
4063 __ StoreP(r4, MemOperand(ip)); | 3961 __ StoreP(r3, MemOperand(ip)); |
4064 } | 3962 } |
4065 | 3963 |
4066 | |
4067 void FullCodeGenerator::ClearPendingMessage() { | 3964 void FullCodeGenerator::ClearPendingMessage() { |
4068 DCHECK(!result_register().is(r4)); | 3965 DCHECK(!result_register().is(r3)); |
4069 ExternalReference pending_message_obj = | 3966 ExternalReference pending_message_obj = |
4070 ExternalReference::address_of_pending_message_obj(isolate()); | 3967 ExternalReference::address_of_pending_message_obj(isolate()); |
4071 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex); | 3968 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex); |
4072 __ mov(ip, Operand(pending_message_obj)); | 3969 __ mov(ip, Operand(pending_message_obj)); |
4073 __ StoreP(r4, MemOperand(ip)); | 3970 __ StoreP(r3, MemOperand(ip)); |
4074 } | 3971 } |
4075 | 3972 |
3973 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorSlot slot) { | |
3974 DCHECK(!slot.IsInvalid()); | |
3975 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(), | |
3976 Operand(SmiFromSlot(slot))); | |
3977 } | |
4076 | 3978 |
4077 void FullCodeGenerator::DeferredCommands::EmitCommands() { | 3979 void FullCodeGenerator::DeferredCommands::EmitCommands() { |
4078 DCHECK(!result_register().is(r4)); | 3980 DCHECK(!result_register().is(r3)); |
4079 // Restore the accumulator (r3) and token (r4). | 3981 // Restore the accumulator (r2) and token (r3). |
4080 __ Pop(r4, result_register()); | 3982 __ Pop(r3, result_register()); |
4081 for (DeferredCommand cmd : commands_) { | 3983 for (DeferredCommand cmd : commands_) { |
4082 Label skip; | 3984 Label skip; |
4083 __ CmpSmiLiteral(r4, Smi::FromInt(cmd.token), r0); | 3985 __ CmpSmiLiteral(r3, Smi::FromInt(cmd.token), r0); |
4084 __ bne(&skip); | 3986 __ bne(&skip); |
4085 switch (cmd.command) { | 3987 switch (cmd.command) { |
4086 case kReturn: | 3988 case kReturn: |
4087 codegen_->EmitUnwindAndReturn(); | 3989 codegen_->EmitUnwindAndReturn(); |
4088 break; | 3990 break; |
4089 case kThrow: | 3991 case kThrow: |
4090 __ Push(result_register()); | 3992 __ Push(result_register()); |
4091 __ CallRuntime(Runtime::kReThrow); | 3993 __ CallRuntime(Runtime::kReThrow); |
4092 break; | 3994 break; |
4093 case kContinue: | 3995 case kContinue: |
4094 codegen_->EmitContinue(cmd.target); | 3996 codegen_->EmitContinue(cmd.target); |
4095 break; | 3997 break; |
4096 case kBreak: | 3998 case kBreak: |
4097 codegen_->EmitBreak(cmd.target); | 3999 codegen_->EmitBreak(cmd.target); |
4098 break; | 4000 break; |
4099 } | 4001 } |
4100 __ bind(&skip); | 4002 __ bind(&skip); |
4101 } | 4003 } |
4102 } | 4004 } |
4103 | 4005 |
4104 #undef __ | 4006 #undef __ |
4105 | 4007 |
4008 #if V8_TARGET_ARCH_S390X | |
4009 static const FourByteInstr kInterruptBranchInstruction = 0xA7A40011; | |
4010 static const FourByteInstr kOSRBranchInstruction = 0xA7040011; | |
4011 static const int16_t kBackEdgeBranchOffset = 0x11 * 2; | |
4012 #else | |
4013 static const FourByteInstr kInterruptBranchInstruction = 0xA7A4000D; | |
4014 static const FourByteInstr kOSRBranchInstruction = 0xA704000D; | |
4015 static const int16_t kBackEdgeBranchOffset = 0xD * 2; | |
4016 #endif | |
4106 | 4017 |
4107 void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc, | 4018 void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc, |
4108 BackEdgeState target_state, | 4019 BackEdgeState target_state, |
4109 Code* replacement_code) { | 4020 Code* replacement_code) { |
4110 Address mov_address = Assembler::target_address_from_return_address(pc); | 4021 Address call_address = Assembler::target_address_from_return_address(pc); |
4111 Address cmp_address = mov_address - 2 * Assembler::kInstrSize; | 4022 Address branch_address = call_address - 4; |
4112 Isolate* isolate = unoptimized_code->GetIsolate(); | 4023 Isolate* isolate = unoptimized_code->GetIsolate(); |
4113 CodePatcher patcher(isolate, cmp_address, 1); | 4024 CodePatcher patcher(isolate, branch_address, 4); |
4114 | 4025 |
4115 switch (target_state) { | 4026 switch (target_state) { |
4116 case INTERRUPT: { | 4027 case INTERRUPT: { |
4117 // <decrement profiling counter> | 4028 // <decrement profiling counter> |
4118 // cmpi r6, 0 | 4029 // bge <ok> ;; patched to GE BRC |
4119 // bge <ok> ;; not changed | 4030 // brasrl r14, <interrupt stub address> |
4120 // mov r12, <interrupt stub address> | |
4121 // mtlr r12 | |
4122 // blrl | |
4123 // <reset profiling counter> | 4031 // <reset profiling counter> |
4124 // ok-label | 4032 // ok-label |
4125 patcher.masm()->cmpi(r6, Operand::Zero()); | 4033 patcher.masm()->brc(ge, Operand(kBackEdgeBranchOffset)); |
4126 break; | 4034 break; |
4127 } | 4035 } |
4128 case ON_STACK_REPLACEMENT: | 4036 case ON_STACK_REPLACEMENT: |
4129 case OSR_AFTER_STACK_CHECK: | 4037 case OSR_AFTER_STACK_CHECK: |
4130 // <decrement profiling counter> | 4038 // <decrement profiling counter> |
4131 // crset | 4039 // brc 0x0, <ok> ;; patched to NOP BRC |
4132 // bge <ok> ;; not changed | 4040 // brasrl r14, <interrupt stub address> |
4133 // mov r12, <on-stack replacement address> | |
4134 // mtlr r12 | |
4135 // blrl | |
4136 // <reset profiling counter> | 4041 // <reset profiling counter> |
4137 // ok-label ----- pc_after points here | 4042 // ok-label ----- pc_after points here |
4138 | 4043 patcher.masm()->brc(CC_NOP, Operand(kBackEdgeBranchOffset)); |
4139 // Set the LT bit such that bge is a NOP | |
4140 patcher.masm()->crset(Assembler::encode_crbit(cr7, CR_LT)); | |
4141 break; | 4044 break; |
4142 } | 4045 } |
4143 | 4046 |
4144 // Replace the stack check address in the mov sequence with the | 4047 // Replace the stack check address in the mov sequence with the |
4145 // entry address of the replacement code. | 4048 // entry address of the replacement code. |
4146 Assembler::set_target_address_at(isolate, mov_address, unoptimized_code, | 4049 Assembler::set_target_address_at(isolate, call_address, unoptimized_code, |
4147 replacement_code->entry()); | 4050 replacement_code->entry()); |
4148 | 4051 |
4149 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 4052 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
4150 unoptimized_code, mov_address, replacement_code); | 4053 unoptimized_code, call_address, replacement_code); |
4151 } | 4054 } |
4152 | 4055 |
4153 | |
4154 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( | 4056 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( |
4155 Isolate* isolate, Code* unoptimized_code, Address pc) { | 4057 Isolate* isolate, Code* unoptimized_code, Address pc) { |
4156 Address mov_address = Assembler::target_address_from_return_address(pc); | 4058 Address call_address = Assembler::target_address_from_return_address(pc); |
4157 Address cmp_address = mov_address - 2 * Assembler::kInstrSize; | 4059 Address branch_address = call_address - 4; |
4158 Address interrupt_address = | 4060 Address interrupt_address = |
4159 Assembler::target_address_at(mov_address, unoptimized_code); | 4061 Assembler::target_address_at(call_address, unoptimized_code); |
4160 | 4062 |
4161 if (Assembler::IsCmpImmediate(Assembler::instr_at(cmp_address))) { | 4063 DCHECK(BRC == Instruction::S390OpcodeValue(branch_address)); |
4064 // For interrupt, we expect a branch greater than or equal | |
4065 // i.e. BRC 0xa, +XXXX (0xA7A4XXXX) | |
4066 FourByteInstr br_instr = Instruction::InstructionBits( | |
4067 reinterpret_cast<const byte*>(branch_address)); | |
4068 if (kInterruptBranchInstruction == br_instr) { | |
4162 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry()); | 4069 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry()); |
4163 return INTERRUPT; | 4070 return INTERRUPT; |
4164 } | 4071 } |
4165 | 4072 |
4166 DCHECK(Assembler::IsCrSet(Assembler::instr_at(cmp_address))); | 4073 // Expect BRC to be patched to NOP branch. |
4074 // i.e. BRC 0x0, +XXXX (0xA704XXXX) | |
4075 USE(kOSRBranchInstruction); | |
4076 DCHECK(kOSRBranchInstruction == br_instr); | |
4167 | 4077 |
4168 if (interrupt_address == isolate->builtins()->OnStackReplacement()->entry()) { | 4078 if (interrupt_address == isolate->builtins()->OnStackReplacement()->entry()) { |
4169 return ON_STACK_REPLACEMENT; | 4079 return ON_STACK_REPLACEMENT; |
4170 } | 4080 } |
4171 | 4081 |
4172 DCHECK(interrupt_address == | 4082 DCHECK(interrupt_address == |
4173 isolate->builtins()->OsrAfterStackCheck()->entry()); | 4083 isolate->builtins()->OsrAfterStackCheck()->entry()); |
4174 return OSR_AFTER_STACK_CHECK; | 4084 return OSR_AFTER_STACK_CHECK; |
4175 } | 4085 } |
4086 | |
4176 } // namespace internal | 4087 } // namespace internal |
4177 } // namespace v8 | 4088 } // namespace v8 |
4178 #endif // V8_TARGET_ARCH_PPC | 4089 #endif // V8_TARGET_ARCH_S390 |
OLD | NEW |