OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_MIPS | 7 #if V8_TARGET_ARCH_MIPS64 |
8 | 8 |
9 // Note on Mips implementation: | 9 // Note on Mips implementation: |
10 // | 10 // |
11 // The result_register() for mips is the 'v0' register, which is defined | 11 // The result_register() for mips is the 'v0' register, which is defined |
12 // by the ABI to contain function return values. However, the first | 12 // by the ABI to contain function return values. However, the first |
13 // parameter to a function is defined to be 'a0'. So there are many | 13 // parameter to a function is defined to be 'a0'. So there are many |
14 // places where we have to move a previous result in v0 to a0 for the | 14 // places where we have to move a previous result in v0 to a0 for the |
15 // next call: mov(a0, v0). This is not needed on the other architectures. | 15 // next call: mov(a0, v0). This is not needed on the other architectures. |
16 | 16 |
17 #include "src/code-stubs.h" | 17 #include "src/code-stubs.h" |
18 #include "src/codegen.h" | 18 #include "src/codegen.h" |
19 #include "src/compiler.h" | 19 #include "src/compiler.h" |
20 #include "src/debug.h" | 20 #include "src/debug.h" |
21 #include "src/full-codegen.h" | 21 #include "src/full-codegen.h" |
22 #include "src/isolate-inl.h" | 22 #include "src/isolate-inl.h" |
23 #include "src/parser.h" | 23 #include "src/parser.h" |
24 #include "src/scopes.h" | 24 #include "src/scopes.h" |
25 #include "src/stub-cache.h" | 25 #include "src/stub-cache.h" |
26 | 26 |
27 #include "src/mips/code-stubs-mips.h" | 27 #include "src/mips64/code-stubs-mips64.h" |
28 #include "src/mips/macro-assembler-mips.h" | 28 #include "src/mips64/macro-assembler-mips64.h" |
29 | 29 |
30 namespace v8 { | 30 namespace v8 { |
31 namespace internal { | 31 namespace internal { |
32 | 32 |
33 #define __ ACCESS_MASM(masm_) | 33 #define __ ACCESS_MASM(masm_) |
34 | 34 |
35 | 35 |
36 // A patch site is a location in the code which it is possible to patch. This | 36 // A patch site is a location in the code which it is possible to patch. This |
37 // class has a number of methods to emit the code which is patchable and the | 37 // class has a number of methods to emit the code which is patchable and the |
38 // method EmitPatchInfo to record a marker back to the patchable code. This | 38 // method EmitPatchInfo to record a marker back to the patchable code. This |
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
129 __ stop("stop-at"); | 129 __ stop("stop-at"); |
130 } | 130 } |
131 #endif | 131 #endif |
132 | 132 |
133 // Sloppy mode functions and builtins need to replace the receiver with the | 133 // Sloppy mode functions and builtins need to replace the receiver with the |
134 // global proxy when called as functions (without an explicit receiver | 134 // global proxy when called as functions (without an explicit receiver |
135 // object). | 135 // object). |
136 if (info->strict_mode() == SLOPPY && !info->is_native()) { | 136 if (info->strict_mode() == SLOPPY && !info->is_native()) { |
137 Label ok; | 137 Label ok; |
138 int receiver_offset = info->scope()->num_parameters() * kPointerSize; | 138 int receiver_offset = info->scope()->num_parameters() * kPointerSize; |
139 __ lw(at, MemOperand(sp, receiver_offset)); | 139 __ ld(at, MemOperand(sp, receiver_offset)); |
140 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | 140 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); |
141 __ Branch(&ok, ne, a2, Operand(at)); | 141 __ Branch(&ok, ne, a2, Operand(at)); |
142 | 142 |
143 __ lw(a2, GlobalObjectOperand()); | 143 __ ld(a2, GlobalObjectOperand()); |
144 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset)); | 144 __ ld(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset)); |
145 | 145 |
146 __ sw(a2, MemOperand(sp, receiver_offset)); | 146 __ sd(a2, MemOperand(sp, receiver_offset)); |
147 | |
148 __ bind(&ok); | 147 __ bind(&ok); |
149 } | 148 } |
150 | |
151 // Open a frame scope to indicate that there is a frame on the stack. The | 149 // Open a frame scope to indicate that there is a frame on the stack. The |
152 // MANUAL indicates that the scope shouldn't actually generate code to set up | 150 // MANUAL indicates that the scope shouldn't actually generate code to set up |
153 // the frame (that is done below). | 151 // the frame (that is done below). |
154 FrameScope frame_scope(masm_, StackFrame::MANUAL); | 152 FrameScope frame_scope(masm_, StackFrame::MANUAL); |
155 | |
156 info->set_prologue_offset(masm_->pc_offset()); | 153 info->set_prologue_offset(masm_->pc_offset()); |
157 __ Prologue(info->IsCodePreAgingActive()); | 154 __ Prologue(info->IsCodePreAgingActive()); |
158 info->AddNoFrameRange(0, masm_->pc_offset()); | 155 info->AddNoFrameRange(0, masm_->pc_offset()); |
159 | 156 |
160 { Comment cmnt(masm_, "[ Allocate locals"); | 157 { Comment cmnt(masm_, "[ Allocate locals"); |
161 int locals_count = info->scope()->num_stack_slots(); | 158 int locals_count = info->scope()->num_stack_slots(); |
162 // Generators allocate locals, if any, in context slots. | 159 // Generators allocate locals, if any, in context slots. |
163 ASSERT(!info->function()->is_generator() || locals_count == 0); | 160 ASSERT(!info->function()->is_generator() || locals_count == 0); |
164 if (locals_count > 0) { | 161 if (locals_count > 0) { |
165 if (locals_count >= 128) { | 162 if (locals_count >= 128) { |
166 Label ok; | 163 Label ok; |
167 __ Subu(t5, sp, Operand(locals_count * kPointerSize)); | 164 __ Dsubu(t1, sp, Operand(locals_count * kPointerSize)); |
168 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); | 165 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); |
169 __ Branch(&ok, hs, t5, Operand(a2)); | 166 __ Branch(&ok, hs, t1, Operand(a2)); |
170 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); | 167 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); |
171 __ bind(&ok); | 168 __ bind(&ok); |
172 } | 169 } |
173 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex); | 170 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex); |
174 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32; | 171 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32; |
175 if (locals_count >= kMaxPushes) { | 172 if (locals_count >= kMaxPushes) { |
176 int loop_iterations = locals_count / kMaxPushes; | 173 int loop_iterations = locals_count / kMaxPushes; |
177 __ li(a2, Operand(loop_iterations)); | 174 __ li(a2, Operand(loop_iterations)); |
178 Label loop_header; | 175 Label loop_header; |
179 __ bind(&loop_header); | 176 __ bind(&loop_header); |
180 // Do pushes. | 177 // Do pushes. |
181 __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize)); | 178 __ Dsubu(sp, sp, Operand(kMaxPushes * kPointerSize)); |
182 for (int i = 0; i < kMaxPushes; i++) { | 179 for (int i = 0; i < kMaxPushes; i++) { |
183 __ sw(t5, MemOperand(sp, i * kPointerSize)); | 180 __ sd(t1, MemOperand(sp, i * kPointerSize)); |
184 } | 181 } |
185 // Continue loop if not done. | 182 // Continue loop if not done. |
186 __ Subu(a2, a2, Operand(1)); | 183 __ Dsubu(a2, a2, Operand(1)); |
187 __ Branch(&loop_header, ne, a2, Operand(zero_reg)); | 184 __ Branch(&loop_header, ne, a2, Operand(zero_reg)); |
188 } | 185 } |
189 int remaining = locals_count % kMaxPushes; | 186 int remaining = locals_count % kMaxPushes; |
190 // Emit the remaining pushes. | 187 // Emit the remaining pushes. |
191 __ Subu(sp, sp, Operand(remaining * kPointerSize)); | 188 __ Dsubu(sp, sp, Operand(remaining * kPointerSize)); |
192 for (int i = 0; i < remaining; i++) { | 189 for (int i = 0; i < remaining; i++) { |
193 __ sw(t5, MemOperand(sp, i * kPointerSize)); | 190 __ sd(t1, MemOperand(sp, i * kPointerSize)); |
194 } | 191 } |
195 } | 192 } |
196 } | 193 } |
197 | 194 |
198 bool function_in_register = true; | 195 bool function_in_register = true; |
199 | 196 |
200 // Possibly allocate a local context. | 197 // Possibly allocate a local context. |
201 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; | 198 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
202 if (heap_slots > 0) { | 199 if (heap_slots > 0) { |
203 Comment cmnt(masm_, "[ Allocate context"); | 200 Comment cmnt(masm_, "[ Allocate context"); |
204 // Argument to NewContext is the function, which is still in a1. | 201 // Argument to NewContext is the function, which is still in a1. |
205 bool need_write_barrier = true; | 202 bool need_write_barrier = true; |
206 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) { | 203 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) { |
207 __ push(a1); | 204 __ push(a1); |
208 __ Push(info->scope()->GetScopeInfo()); | 205 __ Push(info->scope()->GetScopeInfo()); |
209 __ CallRuntime(Runtime::kNewGlobalContext, 2); | 206 __ CallRuntime(Runtime::kNewGlobalContext, 2); |
210 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 207 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
211 FastNewContextStub stub(isolate(), heap_slots); | 208 FastNewContextStub stub(isolate(), heap_slots); |
212 __ CallStub(&stub); | 209 __ CallStub(&stub); |
213 // Result of FastNewContextStub is always in new space. | 210 // Result of FastNewContextStub is always in new space. |
214 need_write_barrier = false; | 211 need_write_barrier = false; |
215 } else { | 212 } else { |
216 __ push(a1); | 213 __ push(a1); |
217 __ CallRuntime(Runtime::kNewFunctionContext, 1); | 214 __ CallRuntime(Runtime::kNewFunctionContext, 1); |
218 } | 215 } |
219 function_in_register = false; | 216 function_in_register = false; |
220 // Context is returned in v0. It replaces the context passed to us. | 217 // Context is returned in v0. It replaces the context passed to us. |
221 // It's saved in the stack and kept live in cp. | 218 // It's saved in the stack and kept live in cp. |
222 __ mov(cp, v0); | 219 __ mov(cp, v0); |
223 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 220 __ sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
224 // Copy any necessary parameters into the context. | 221 // Copy any necessary parameters into the context. |
225 int num_parameters = info->scope()->num_parameters(); | 222 int num_parameters = info->scope()->num_parameters(); |
226 for (int i = 0; i < num_parameters; i++) { | 223 for (int i = 0; i < num_parameters; i++) { |
227 Variable* var = scope()->parameter(i); | 224 Variable* var = scope()->parameter(i); |
228 if (var->IsContextSlot()) { | 225 if (var->IsContextSlot()) { |
229 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | 226 int parameter_offset = StandardFrameConstants::kCallerSPOffset + |
230 (num_parameters - 1 - i) * kPointerSize; | 227 (num_parameters - 1 - i) * kPointerSize; |
231 // Load parameter from stack. | 228 // Load parameter from stack. |
232 __ lw(a0, MemOperand(fp, parameter_offset)); | 229 __ ld(a0, MemOperand(fp, parameter_offset)); |
233 // Store it in the context. | 230 // Store it in the context. |
234 MemOperand target = ContextOperand(cp, var->index()); | 231 MemOperand target = ContextOperand(cp, var->index()); |
235 __ sw(a0, target); | 232 __ sd(a0, target); |
236 | 233 |
237 // Update the write barrier. | 234 // Update the write barrier. |
238 if (need_write_barrier) { | 235 if (need_write_barrier) { |
239 __ RecordWriteContextSlot( | 236 __ RecordWriteContextSlot( |
240 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs); | 237 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs); |
241 } else if (FLAG_debug_code) { | 238 } else if (FLAG_debug_code) { |
242 Label done; | 239 Label done; |
243 __ JumpIfInNewSpace(cp, a0, &done); | 240 __ JumpIfInNewSpace(cp, a0, &done); |
244 __ Abort(kExpectedNewSpaceObject); | 241 __ Abort(kExpectedNewSpaceObject); |
245 __ bind(&done); | 242 __ bind(&done); |
246 } | 243 } |
247 } | 244 } |
248 } | 245 } |
249 } | 246 } |
250 | |
251 Variable* arguments = scope()->arguments(); | 247 Variable* arguments = scope()->arguments(); |
252 if (arguments != NULL) { | 248 if (arguments != NULL) { |
253 // Function uses arguments object. | 249 // Function uses arguments object. |
254 Comment cmnt(masm_, "[ Allocate arguments object"); | 250 Comment cmnt(masm_, "[ Allocate arguments object"); |
255 if (!function_in_register) { | 251 if (!function_in_register) { |
256 // Load this again, if it's used by the local context below. | 252 // Load this again, if it's used by the local context below. |
257 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 253 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
258 } else { | 254 } else { |
259 __ mov(a3, a1); | 255 __ mov(a3, a1); |
260 } | 256 } |
261 // Receiver is just before the parameters on the caller's stack. | 257 // Receiver is just before the parameters on the caller's stack. |
262 int num_parameters = info->scope()->num_parameters(); | 258 int num_parameters = info->scope()->num_parameters(); |
263 int offset = num_parameters * kPointerSize; | 259 int offset = num_parameters * kPointerSize; |
264 __ Addu(a2, fp, | 260 __ Daddu(a2, fp, |
265 Operand(StandardFrameConstants::kCallerSPOffset + offset)); | 261 Operand(StandardFrameConstants::kCallerSPOffset + offset)); |
266 __ li(a1, Operand(Smi::FromInt(num_parameters))); | 262 __ li(a1, Operand(Smi::FromInt(num_parameters))); |
267 __ Push(a3, a2, a1); | 263 __ Push(a3, a2, a1); |
268 | 264 |
269 // Arguments to ArgumentsAccessStub: | 265 // Arguments to ArgumentsAccessStub: |
270 // function, receiver address, parameter count. | 266 // function, receiver address, parameter count. |
271 // The stub will rewrite receiever and parameter count if the previous | 267 // The stub will rewrite receiever and parameter count if the previous |
272 // stack frame was an arguments adapter frame. | 268 // stack frame was an arguments adapter frame. |
273 ArgumentsAccessStub::Type type; | 269 ArgumentsAccessStub::Type type; |
274 if (strict_mode() == STRICT) { | 270 if (strict_mode() == STRICT) { |
275 type = ArgumentsAccessStub::NEW_STRICT; | 271 type = ArgumentsAccessStub::NEW_STRICT; |
276 } else if (function()->has_duplicate_parameters()) { | 272 } else if (function()->has_duplicate_parameters()) { |
277 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW; | 273 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW; |
278 } else { | 274 } else { |
279 type = ArgumentsAccessStub::NEW_SLOPPY_FAST; | 275 type = ArgumentsAccessStub::NEW_SLOPPY_FAST; |
280 } | 276 } |
281 ArgumentsAccessStub stub(isolate(), type); | 277 ArgumentsAccessStub stub(isolate(), type); |
282 __ CallStub(&stub); | 278 __ CallStub(&stub); |
283 | 279 |
284 SetVar(arguments, v0, a1, a2); | 280 SetVar(arguments, v0, a1, a2); |
285 } | 281 } |
286 | 282 |
287 if (FLAG_trace) { | 283 if (FLAG_trace) { |
288 __ CallRuntime(Runtime::kTraceEnter, 0); | 284 __ CallRuntime(Runtime::kTraceEnter, 0); |
289 } | 285 } |
290 | |
291 // Visit the declarations and body unless there is an illegal | 286 // Visit the declarations and body unless there is an illegal |
292 // redeclaration. | 287 // redeclaration. |
293 if (scope()->HasIllegalRedeclaration()) { | 288 if (scope()->HasIllegalRedeclaration()) { |
294 Comment cmnt(masm_, "[ Declarations"); | 289 Comment cmnt(masm_, "[ Declarations"); |
295 scope()->VisitIllegalRedeclaration(this); | 290 scope()->VisitIllegalRedeclaration(this); |
296 | 291 |
297 } else { | 292 } else { |
298 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); | 293 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); |
299 { Comment cmnt(masm_, "[ Declarations"); | 294 { Comment cmnt(masm_, "[ Declarations"); |
300 // For named function expressions, declare the function name as a | 295 // For named function expressions, declare the function name as a |
301 // constant. | 296 // constant. |
302 if (scope()->is_function_scope() && scope()->function() != NULL) { | 297 if (scope()->is_function_scope() && scope()->function() != NULL) { |
303 VariableDeclaration* function = scope()->function(); | 298 VariableDeclaration* function = scope()->function(); |
304 ASSERT(function->proxy()->var()->mode() == CONST || | 299 ASSERT(function->proxy()->var()->mode() == CONST || |
305 function->proxy()->var()->mode() == CONST_LEGACY); | 300 function->proxy()->var()->mode() == CONST_LEGACY); |
306 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); | 301 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); |
307 VisitVariableDeclaration(function); | 302 VisitVariableDeclaration(function); |
308 } | 303 } |
309 VisitDeclarations(scope()->declarations()); | 304 VisitDeclarations(scope()->declarations()); |
310 } | 305 } |
311 | |
312 { Comment cmnt(masm_, "[ Stack check"); | 306 { Comment cmnt(masm_, "[ Stack check"); |
313 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); | 307 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); |
314 Label ok; | 308 Label ok; |
315 __ LoadRoot(at, Heap::kStackLimitRootIndex); | 309 __ LoadRoot(at, Heap::kStackLimitRootIndex); |
316 __ Branch(&ok, hs, sp, Operand(at)); | 310 __ Branch(&ok, hs, sp, Operand(at)); |
317 Handle<Code> stack_check = isolate()->builtins()->StackCheck(); | 311 Handle<Code> stack_check = isolate()->builtins()->StackCheck(); |
318 PredictableCodeSizeScope predictable(masm_, | 312 PredictableCodeSizeScope predictable(masm_, |
319 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET)); | 313 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET)); |
320 __ Call(stack_check, RelocInfo::CODE_TARGET); | 314 __ Call(stack_check, RelocInfo::CODE_TARGET); |
321 __ bind(&ok); | 315 __ bind(&ok); |
322 } | 316 } |
323 | 317 |
324 { Comment cmnt(masm_, "[ Body"); | 318 { Comment cmnt(masm_, "[ Body"); |
325 ASSERT(loop_depth() == 0); | 319 ASSERT(loop_depth() == 0); |
| 320 |
326 VisitStatements(function()->body()); | 321 VisitStatements(function()->body()); |
| 322 |
327 ASSERT(loop_depth() == 0); | 323 ASSERT(loop_depth() == 0); |
328 } | 324 } |
329 } | 325 } |
330 | 326 |
331 // Always emit a 'return undefined' in case control fell off the end of | 327 // Always emit a 'return undefined' in case control fell off the end of |
332 // the body. | 328 // the body. |
333 { Comment cmnt(masm_, "[ return <undefined>;"); | 329 { Comment cmnt(masm_, "[ return <undefined>;"); |
334 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); | 330 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); |
335 } | 331 } |
336 EmitReturnSequence(); | 332 EmitReturnSequence(); |
337 } | 333 } |
338 | 334 |
339 | 335 |
340 void FullCodeGenerator::ClearAccumulator() { | 336 void FullCodeGenerator::ClearAccumulator() { |
341 ASSERT(Smi::FromInt(0) == 0); | 337 ASSERT(Smi::FromInt(0) == 0); |
342 __ mov(v0, zero_reg); | 338 __ mov(v0, zero_reg); |
343 } | 339 } |
344 | 340 |
345 | 341 |
346 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { | 342 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { |
347 __ li(a2, Operand(profiling_counter_)); | 343 __ li(a2, Operand(profiling_counter_)); |
348 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset)); | 344 __ ld(a3, FieldMemOperand(a2, Cell::kValueOffset)); |
349 __ Subu(a3, a3, Operand(Smi::FromInt(delta))); | 345 __ Dsubu(a3, a3, Operand(Smi::FromInt(delta))); |
350 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset)); | 346 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset)); |
351 } | 347 } |
352 | 348 |
353 | 349 |
354 void FullCodeGenerator::EmitProfilingCounterReset() { | 350 void FullCodeGenerator::EmitProfilingCounterReset() { |
355 int reset_value = FLAG_interrupt_budget; | 351 int reset_value = FLAG_interrupt_budget; |
356 if (info_->is_debug()) { | 352 if (info_->is_debug()) { |
357 // Detect debug break requests as soon as possible. | 353 // Detect debug break requests as soon as possible. |
358 reset_value = FLAG_interrupt_budget >> 4; | 354 reset_value = FLAG_interrupt_budget >> 4; |
359 } | 355 } |
360 __ li(a2, Operand(profiling_counter_)); | 356 __ li(a2, Operand(profiling_counter_)); |
361 __ li(a3, Operand(Smi::FromInt(reset_value))); | 357 __ li(a3, Operand(Smi::FromInt(reset_value))); |
362 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset)); | 358 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset)); |
363 } | 359 } |
364 | 360 |
365 | 361 |
366 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, | 362 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, |
367 Label* back_edge_target) { | 363 Label* back_edge_target) { |
368 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need | 364 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need |
369 // to make sure it is constant. Branch may emit a skip-or-jump sequence | 365 // to make sure it is constant. Branch may emit a skip-or-jump sequence |
370 // instead of the normal Branch. It seems that the "skip" part of that | 366 // instead of the normal Branch. It seems that the "skip" part of that |
371 // sequence is about as long as this Branch would be so it is safe to ignore | 367 // sequence is about as long as this Branch would be so it is safe to ignore |
372 // that. | 368 // that. |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
437 // sequence. | 433 // sequence. |
438 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 434 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
439 // Here we use masm_-> instead of the __ macro to avoid the code coverage | 435 // Here we use masm_-> instead of the __ macro to avoid the code coverage |
440 // tool from instrumenting as we rely on the code size here. | 436 // tool from instrumenting as we rely on the code size here. |
441 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize; | 437 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize; |
442 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); | 438 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); |
443 __ RecordJSReturn(); | 439 __ RecordJSReturn(); |
444 masm_->mov(sp, fp); | 440 masm_->mov(sp, fp); |
445 int no_frame_start = masm_->pc_offset(); | 441 int no_frame_start = masm_->pc_offset(); |
446 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit())); | 442 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit())); |
447 masm_->Addu(sp, sp, Operand(sp_delta)); | 443 masm_->Daddu(sp, sp, Operand(sp_delta)); |
448 masm_->Jump(ra); | 444 masm_->Jump(ra); |
449 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); | 445 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); |
450 } | 446 } |
451 | 447 |
452 #ifdef DEBUG | 448 #ifdef DEBUG |
453 // Check that the size of the code used for returning is large enough | 449 // Check that the size of the code used for returning is large enough |
454 // for the debugger's requirements. | 450 // for the debugger's requirements. |
455 ASSERT(Assembler::kJSReturnSequenceInstructions <= | 451 ASSERT(Assembler::kJSReturnSequenceInstructions <= |
456 masm_->InstructionsGeneratedSince(&check_exit_codesize)); | 452 masm_->InstructionsGeneratedSince(&check_exit_codesize)); |
457 #endif | 453 #endif |
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
580 ASSERT(count > 0); | 576 ASSERT(count > 0); |
581 __ Drop(count); | 577 __ Drop(count); |
582 __ Move(result_register(), reg); | 578 __ Move(result_register(), reg); |
583 } | 579 } |
584 | 580 |
585 | 581 |
586 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, | 582 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, |
587 Register reg) const { | 583 Register reg) const { |
588 ASSERT(count > 0); | 584 ASSERT(count > 0); |
589 if (count > 1) __ Drop(count - 1); | 585 if (count > 1) __ Drop(count - 1); |
590 __ sw(reg, MemOperand(sp, 0)); | 586 __ sd(reg, MemOperand(sp, 0)); |
591 } | 587 } |
592 | 588 |
593 | 589 |
594 void FullCodeGenerator::TestContext::DropAndPlug(int count, | 590 void FullCodeGenerator::TestContext::DropAndPlug(int count, |
595 Register reg) const { | 591 Register reg) const { |
596 ASSERT(count > 0); | 592 ASSERT(count > 0); |
597 // For simplicity we always test the accumulator register. | 593 // For simplicity we always test the accumulator register. |
598 __ Drop(count); | 594 __ Drop(count); |
599 __ Move(result_register(), reg); | 595 __ Move(result_register(), reg); |
600 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); | 596 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); |
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
728 return ContextOperand(scratch, var->index()); | 724 return ContextOperand(scratch, var->index()); |
729 } else { | 725 } else { |
730 return StackOperand(var); | 726 return StackOperand(var); |
731 } | 727 } |
732 } | 728 } |
733 | 729 |
734 | 730 |
735 void FullCodeGenerator::GetVar(Register dest, Variable* var) { | 731 void FullCodeGenerator::GetVar(Register dest, Variable* var) { |
736 // Use destination as scratch. | 732 // Use destination as scratch. |
737 MemOperand location = VarOperand(var, dest); | 733 MemOperand location = VarOperand(var, dest); |
738 __ lw(dest, location); | 734 __ ld(dest, location); |
739 } | 735 } |
740 | 736 |
741 | 737 |
742 void FullCodeGenerator::SetVar(Variable* var, | 738 void FullCodeGenerator::SetVar(Variable* var, |
743 Register src, | 739 Register src, |
744 Register scratch0, | 740 Register scratch0, |
745 Register scratch1) { | 741 Register scratch1) { |
746 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); | 742 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); |
747 ASSERT(!scratch0.is(src)); | 743 ASSERT(!scratch0.is(src)); |
748 ASSERT(!scratch0.is(scratch1)); | 744 ASSERT(!scratch0.is(scratch1)); |
749 ASSERT(!scratch1.is(src)); | 745 ASSERT(!scratch1.is(src)); |
750 MemOperand location = VarOperand(var, scratch0); | 746 MemOperand location = VarOperand(var, scratch0); |
751 __ sw(src, location); | 747 __ sd(src, location); |
752 // Emit the write barrier code if the location is in the heap. | 748 // Emit the write barrier code if the location is in the heap. |
753 if (var->IsContextSlot()) { | 749 if (var->IsContextSlot()) { |
754 __ RecordWriteContextSlot(scratch0, | 750 __ RecordWriteContextSlot(scratch0, |
755 location.offset(), | 751 location.offset(), |
756 src, | 752 src, |
757 scratch1, | 753 scratch1, |
758 kRAHasBeenSaved, | 754 kRAHasBeenSaved, |
759 kDontSaveFPRegs); | 755 kDontSaveFPRegs); |
760 } | 756 } |
761 } | 757 } |
762 | 758 |
763 | 759 |
764 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, | 760 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, |
765 bool should_normalize, | 761 bool should_normalize, |
766 Label* if_true, | 762 Label* if_true, |
767 Label* if_false) { | 763 Label* if_false) { |
768 // Only prepare for bailouts before splits if we're in a test | 764 // Only prepare for bailouts before splits if we're in a test |
769 // context. Otherwise, we let the Visit function deal with the | 765 // context. Otherwise, we let the Visit function deal with the |
770 // preparation to avoid preparing with the same AST id twice. | 766 // preparation to avoid preparing with the same AST id twice. |
771 if (!context()->IsTest() || !info_->IsOptimizable()) return; | 767 if (!context()->IsTest() || !info_->IsOptimizable()) return; |
772 | 768 |
773 Label skip; | 769 Label skip; |
774 if (should_normalize) __ Branch(&skip); | 770 if (should_normalize) __ Branch(&skip); |
775 PrepareForBailout(expr, TOS_REG); | 771 PrepareForBailout(expr, TOS_REG); |
776 if (should_normalize) { | 772 if (should_normalize) { |
777 __ LoadRoot(t0, Heap::kTrueValueRootIndex); | 773 __ LoadRoot(a4, Heap::kTrueValueRootIndex); |
778 Split(eq, a0, Operand(t0), if_true, if_false, NULL); | 774 Split(eq, a0, Operand(a4), if_true, if_false, NULL); |
779 __ bind(&skip); | 775 __ bind(&skip); |
780 } | 776 } |
781 } | 777 } |
782 | 778 |
783 | 779 |
784 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { | 780 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { |
785 // The variable in the declaration always resides in the current function | 781 // The variable in the declaration always resides in the current function |
786 // context. | 782 // context. |
787 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); | 783 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); |
788 if (generate_debug_code_) { | 784 if (generate_debug_code_) { |
789 // Check that we're not inside a with or catch context. | 785 // Check that we're not inside a with or catch context. |
790 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset)); | 786 __ ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset)); |
791 __ LoadRoot(t0, Heap::kWithContextMapRootIndex); | 787 __ LoadRoot(a4, Heap::kWithContextMapRootIndex); |
792 __ Check(ne, kDeclarationInWithContext, | 788 __ Check(ne, kDeclarationInWithContext, |
793 a1, Operand(t0)); | 789 a1, Operand(a4)); |
794 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex); | 790 __ LoadRoot(a4, Heap::kCatchContextMapRootIndex); |
795 __ Check(ne, kDeclarationInCatchContext, | 791 __ Check(ne, kDeclarationInCatchContext, |
796 a1, Operand(t0)); | 792 a1, Operand(a4)); |
797 } | 793 } |
798 } | 794 } |
799 | 795 |
800 | 796 |
801 void FullCodeGenerator::VisitVariableDeclaration( | 797 void FullCodeGenerator::VisitVariableDeclaration( |
802 VariableDeclaration* declaration) { | 798 VariableDeclaration* declaration) { |
803 // If it was not possible to allocate the variable at compile time, we | 799 // If it was not possible to allocate the variable at compile time, we |
804 // need to "declare" it at runtime to make sure it actually exists in the | 800 // need to "declare" it at runtime to make sure it actually exists in the |
805 // local context. | 801 // local context. |
806 VariableProxy* proxy = declaration->proxy(); | 802 VariableProxy* proxy = declaration->proxy(); |
807 VariableMode mode = declaration->mode(); | 803 VariableMode mode = declaration->mode(); |
808 Variable* variable = proxy->var(); | 804 Variable* variable = proxy->var(); |
809 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY; | 805 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY; |
810 switch (variable->location()) { | 806 switch (variable->location()) { |
811 case Variable::UNALLOCATED: | 807 case Variable::UNALLOCATED: |
812 globals_->Add(variable->name(), zone()); | 808 globals_->Add(variable->name(), zone()); |
813 globals_->Add(variable->binding_needs_init() | 809 globals_->Add(variable->binding_needs_init() |
814 ? isolate()->factory()->the_hole_value() | 810 ? isolate()->factory()->the_hole_value() |
815 : isolate()->factory()->undefined_value(), | 811 : isolate()->factory()->undefined_value(), |
816 zone()); | 812 zone()); |
817 break; | 813 break; |
818 | 814 |
819 case Variable::PARAMETER: | 815 case Variable::PARAMETER: |
820 case Variable::LOCAL: | 816 case Variable::LOCAL: |
821 if (hole_init) { | 817 if (hole_init) { |
822 Comment cmnt(masm_, "[ VariableDeclaration"); | 818 Comment cmnt(masm_, "[ VariableDeclaration"); |
823 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); | 819 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex); |
824 __ sw(t0, StackOperand(variable)); | 820 __ sd(a4, StackOperand(variable)); |
825 } | 821 } |
826 break; | 822 break; |
827 | 823 |
828 case Variable::CONTEXT: | 824 case Variable::CONTEXT: |
829 if (hole_init) { | 825 if (hole_init) { |
830 Comment cmnt(masm_, "[ VariableDeclaration"); | 826 Comment cmnt(masm_, "[ VariableDeclaration"); |
831 EmitDebugCheckDeclarationContext(variable); | 827 EmitDebugCheckDeclarationContext(variable); |
832 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 828 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
833 __ sw(at, ContextOperand(cp, variable->index())); | 829 __ sd(at, ContextOperand(cp, variable->index())); |
834 // No write barrier since the_hole_value is in old space. | 830 // No write barrier since the_hole_value is in old space. |
835 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | 831 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); |
836 } | 832 } |
837 break; | 833 break; |
838 | 834 |
839 case Variable::LOOKUP: { | 835 case Variable::LOOKUP: { |
840 Comment cmnt(masm_, "[ VariableDeclaration"); | 836 Comment cmnt(masm_, "[ VariableDeclaration"); |
841 __ li(a2, Operand(variable->name())); | 837 __ li(a2, Operand(variable->name())); |
842 // Declaration nodes are always introduced in one of four modes. | 838 // Declaration nodes are always introduced in one of four modes. |
843 ASSERT(IsDeclaredVariableMode(mode)); | 839 ASSERT(IsDeclaredVariableMode(mode)); |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
875 // Check for stack-overflow exception. | 871 // Check for stack-overflow exception. |
876 if (function.is_null()) return SetStackOverflow(); | 872 if (function.is_null()) return SetStackOverflow(); |
877 globals_->Add(function, zone()); | 873 globals_->Add(function, zone()); |
878 break; | 874 break; |
879 } | 875 } |
880 | 876 |
881 case Variable::PARAMETER: | 877 case Variable::PARAMETER: |
882 case Variable::LOCAL: { | 878 case Variable::LOCAL: { |
883 Comment cmnt(masm_, "[ FunctionDeclaration"); | 879 Comment cmnt(masm_, "[ FunctionDeclaration"); |
884 VisitForAccumulatorValue(declaration->fun()); | 880 VisitForAccumulatorValue(declaration->fun()); |
885 __ sw(result_register(), StackOperand(variable)); | 881 __ sd(result_register(), StackOperand(variable)); |
886 break; | 882 break; |
887 } | 883 } |
888 | 884 |
889 case Variable::CONTEXT: { | 885 case Variable::CONTEXT: { |
890 Comment cmnt(masm_, "[ FunctionDeclaration"); | 886 Comment cmnt(masm_, "[ FunctionDeclaration"); |
891 EmitDebugCheckDeclarationContext(variable); | 887 EmitDebugCheckDeclarationContext(variable); |
892 VisitForAccumulatorValue(declaration->fun()); | 888 VisitForAccumulatorValue(declaration->fun()); |
893 __ sw(result_register(), ContextOperand(cp, variable->index())); | 889 __ sd(result_register(), ContextOperand(cp, variable->index())); |
894 int offset = Context::SlotOffset(variable->index()); | 890 int offset = Context::SlotOffset(variable->index()); |
895 // We know that we have written a function, which is not a smi. | 891 // We know that we have written a function, which is not a smi. |
896 __ RecordWriteContextSlot(cp, | 892 __ RecordWriteContextSlot(cp, |
897 offset, | 893 offset, |
898 result_register(), | 894 result_register(), |
899 a2, | 895 a2, |
900 kRAHasBeenSaved, | 896 kRAHasBeenSaved, |
901 kDontSaveFPRegs, | 897 kDontSaveFPRegs, |
902 EMIT_REMEMBERED_SET, | 898 EMIT_REMEMBERED_SET, |
903 OMIT_SMI_CHECK); | 899 OMIT_SMI_CHECK); |
(...skipping 12 matching lines...) Expand all Loading... |
916 break; | 912 break; |
917 } | 913 } |
918 } | 914 } |
919 } | 915 } |
920 | 916 |
921 | 917 |
922 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { | 918 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { |
923 Variable* variable = declaration->proxy()->var(); | 919 Variable* variable = declaration->proxy()->var(); |
924 ASSERT(variable->location() == Variable::CONTEXT); | 920 ASSERT(variable->location() == Variable::CONTEXT); |
925 ASSERT(variable->interface()->IsFrozen()); | 921 ASSERT(variable->interface()->IsFrozen()); |
926 | |
927 Comment cmnt(masm_, "[ ModuleDeclaration"); | 922 Comment cmnt(masm_, "[ ModuleDeclaration"); |
928 EmitDebugCheckDeclarationContext(variable); | 923 EmitDebugCheckDeclarationContext(variable); |
929 | 924 |
930 // Load instance object. | 925 // Load instance object. |
931 __ LoadContext(a1, scope_->ContextChainLength(scope_->GlobalScope())); | 926 __ LoadContext(a1, scope_->ContextChainLength(scope_->GlobalScope())); |
932 __ lw(a1, ContextOperand(a1, variable->interface()->Index())); | 927 __ ld(a1, ContextOperand(a1, variable->interface()->Index())); |
933 __ lw(a1, ContextOperand(a1, Context::EXTENSION_INDEX)); | 928 __ ld(a1, ContextOperand(a1, Context::EXTENSION_INDEX)); |
934 | 929 |
935 // Assign it. | 930 // Assign it. |
936 __ sw(a1, ContextOperand(cp, variable->index())); | 931 __ sd(a1, ContextOperand(cp, variable->index())); |
937 // We know that we have written a module, which is not a smi. | 932 // We know that we have written a module, which is not a smi. |
938 __ RecordWriteContextSlot(cp, | 933 __ RecordWriteContextSlot(cp, |
939 Context::SlotOffset(variable->index()), | 934 Context::SlotOffset(variable->index()), |
940 a1, | 935 a1, |
941 a3, | 936 a3, |
942 kRAHasBeenSaved, | 937 kRAHasBeenSaved, |
943 kDontSaveFPRegs, | 938 kDontSaveFPRegs, |
944 EMIT_REMEMBERED_SET, | 939 EMIT_REMEMBERED_SET, |
945 OMIT_SMI_CHECK); | 940 OMIT_SMI_CHECK); |
946 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS); | 941 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS); |
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1023 | 1018 |
1024 Comment cmnt(masm_, "[ Case comparison"); | 1019 Comment cmnt(masm_, "[ Case comparison"); |
1025 __ bind(&next_test); | 1020 __ bind(&next_test); |
1026 next_test.Unuse(); | 1021 next_test.Unuse(); |
1027 | 1022 |
1028 // Compile the label expression. | 1023 // Compile the label expression. |
1029 VisitForAccumulatorValue(clause->label()); | 1024 VisitForAccumulatorValue(clause->label()); |
1030 __ mov(a0, result_register()); // CompareStub requires args in a0, a1. | 1025 __ mov(a0, result_register()); // CompareStub requires args in a0, a1. |
1031 | 1026 |
1032 // Perform the comparison as if via '==='. | 1027 // Perform the comparison as if via '==='. |
1033 __ lw(a1, MemOperand(sp, 0)); // Switch value. | 1028 __ ld(a1, MemOperand(sp, 0)); // Switch value. |
1034 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); | 1029 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); |
1035 JumpPatchSite patch_site(masm_); | 1030 JumpPatchSite patch_site(masm_); |
1036 if (inline_smi_code) { | 1031 if (inline_smi_code) { |
1037 Label slow_case; | 1032 Label slow_case; |
1038 __ or_(a2, a1, a0); | 1033 __ or_(a2, a1, a0); |
1039 patch_site.EmitJumpIfNotSmi(a2, &slow_case); | 1034 patch_site.EmitJumpIfNotSmi(a2, &slow_case); |
1040 | 1035 |
1041 __ Branch(&next_test, ne, a1, Operand(a0)); | 1036 __ Branch(&next_test, ne, a1, Operand(a0)); |
1042 __ Drop(1); // Switch value is no longer needed. | 1037 __ Drop(1); // Switch value is no longer needed. |
1043 __ Branch(clause->body_target()); | 1038 __ Branch(clause->body_target()); |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1097 Label loop, exit; | 1092 Label loop, exit; |
1098 ForIn loop_statement(this, stmt); | 1093 ForIn loop_statement(this, stmt); |
1099 increment_loop_depth(); | 1094 increment_loop_depth(); |
1100 | 1095 |
1101 // Get the object to enumerate over. If the object is null or undefined, skip | 1096 // Get the object to enumerate over. If the object is null or undefined, skip |
1102 // over the loop. See ECMA-262 version 5, section 12.6.4. | 1097 // over the loop. See ECMA-262 version 5, section 12.6.4. |
1103 VisitForAccumulatorValue(stmt->enumerable()); | 1098 VisitForAccumulatorValue(stmt->enumerable()); |
1104 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below. | 1099 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below. |
1105 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 1100 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
1106 __ Branch(&exit, eq, a0, Operand(at)); | 1101 __ Branch(&exit, eq, a0, Operand(at)); |
1107 Register null_value = t1; | 1102 Register null_value = a5; |
1108 __ LoadRoot(null_value, Heap::kNullValueRootIndex); | 1103 __ LoadRoot(null_value, Heap::kNullValueRootIndex); |
1109 __ Branch(&exit, eq, a0, Operand(null_value)); | 1104 __ Branch(&exit, eq, a0, Operand(null_value)); |
1110 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); | 1105 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); |
1111 __ mov(a0, v0); | 1106 __ mov(a0, v0); |
1112 // Convert the object to a JS object. | 1107 // Convert the object to a JS object. |
1113 Label convert, done_convert; | 1108 Label convert, done_convert; |
1114 __ JumpIfSmi(a0, &convert); | 1109 __ JumpIfSmi(a0, &convert); |
1115 __ GetObjectType(a0, a1, a1); | 1110 __ GetObjectType(a0, a1, a1); |
1116 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); | 1111 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); |
1117 __ bind(&convert); | 1112 __ bind(&convert); |
(...skipping 11 matching lines...) Expand all Loading... |
1129 | 1124 |
1130 // Check cache validity in generated code. This is a fast case for | 1125 // Check cache validity in generated code. This is a fast case for |
1131 // the JSObject::IsSimpleEnum cache validity checks. If we cannot | 1126 // the JSObject::IsSimpleEnum cache validity checks. If we cannot |
1132 // guarantee cache validity, call the runtime system to check cache | 1127 // guarantee cache validity, call the runtime system to check cache |
1133 // validity or get the property names in a fixed array. | 1128 // validity or get the property names in a fixed array. |
1134 __ CheckEnumCache(null_value, &call_runtime); | 1129 __ CheckEnumCache(null_value, &call_runtime); |
1135 | 1130 |
1136 // The enum cache is valid. Load the map of the object being | 1131 // The enum cache is valid. Load the map of the object being |
1137 // iterated over and use the cache for the iteration. | 1132 // iterated over and use the cache for the iteration. |
1138 Label use_cache; | 1133 Label use_cache; |
1139 __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset)); | 1134 __ ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset)); |
1140 __ Branch(&use_cache); | 1135 __ Branch(&use_cache); |
1141 | 1136 |
1142 // Get the set of properties to enumerate. | 1137 // Get the set of properties to enumerate. |
1143 __ bind(&call_runtime); | 1138 __ bind(&call_runtime); |
1144 __ push(a0); // Duplicate the enumerable object on the stack. | 1139 __ push(a0); // Duplicate the enumerable object on the stack. |
1145 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); | 1140 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); |
1146 | 1141 |
1147 // If we got a map from the runtime call, we can do a fast | 1142 // If we got a map from the runtime call, we can do a fast |
1148 // modification check. Otherwise, we got a fixed array, and we have | 1143 // modification check. Otherwise, we got a fixed array, and we have |
1149 // to do a slow check. | 1144 // to do a slow check. |
1150 Label fixed_array; | 1145 Label fixed_array; |
1151 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); | 1146 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); |
1152 __ LoadRoot(at, Heap::kMetaMapRootIndex); | 1147 __ LoadRoot(at, Heap::kMetaMapRootIndex); |
1153 __ Branch(&fixed_array, ne, a2, Operand(at)); | 1148 __ Branch(&fixed_array, ne, a2, Operand(at)); |
1154 | 1149 |
1155 // We got a map in register v0. Get the enumeration cache from it. | 1150 // We got a map in register v0. Get the enumeration cache from it. |
1156 Label no_descriptors; | 1151 Label no_descriptors; |
1157 __ bind(&use_cache); | 1152 __ bind(&use_cache); |
1158 | 1153 |
1159 __ EnumLength(a1, v0); | 1154 __ EnumLength(a1, v0); |
1160 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0))); | 1155 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0))); |
1161 | 1156 |
1162 __ LoadInstanceDescriptors(v0, a2); | 1157 __ LoadInstanceDescriptors(v0, a2); |
1163 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset)); | 1158 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset)); |
1164 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset)); | 1159 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
1165 | 1160 |
1166 // Set up the four remaining stack slots. | 1161 // Set up the four remaining stack slots. |
1167 __ li(a0, Operand(Smi::FromInt(0))); | 1162 __ li(a0, Operand(Smi::FromInt(0))); |
1168 // Push map, enumeration cache, enumeration cache length (as smi) and zero. | 1163 // Push map, enumeration cache, enumeration cache length (as smi) and zero. |
1169 __ Push(v0, a2, a1, a0); | 1164 __ Push(v0, a2, a1, a0); |
1170 __ jmp(&loop); | 1165 __ jmp(&loop); |
1171 | 1166 |
1172 __ bind(&no_descriptors); | 1167 __ bind(&no_descriptors); |
1173 __ Drop(1); | 1168 __ Drop(1); |
1174 __ jmp(&exit); | 1169 __ jmp(&exit); |
1175 | 1170 |
1176 // We got a fixed array in register v0. Iterate through that. | 1171 // We got a fixed array in register v0. Iterate through that. |
1177 Label non_proxy; | 1172 Label non_proxy; |
1178 __ bind(&fixed_array); | 1173 __ bind(&fixed_array); |
1179 | 1174 |
1180 __ li(a1, FeedbackVector()); | 1175 __ li(a1, FeedbackVector()); |
1181 __ li(a2, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate()))); | 1176 __ li(a2, Operand(TypeFeedbackInfo::MegamorphicSentinel(isolate()))); |
1182 __ sw(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(slot))); | 1177 __ sd(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(slot))); |
1183 | 1178 |
1184 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check | 1179 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check |
1185 __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object | 1180 __ ld(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object |
1186 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | 1181 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); |
1187 __ GetObjectType(a2, a3, a3); | 1182 __ GetObjectType(a2, a3, a3); |
1188 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE)); | 1183 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE)); |
1189 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy | 1184 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy |
1190 __ bind(&non_proxy); | 1185 __ bind(&non_proxy); |
1191 __ Push(a1, v0); // Smi and array | 1186 __ Push(a1, v0); // Smi and array |
1192 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset)); | 1187 __ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset)); |
1193 __ li(a0, Operand(Smi::FromInt(0))); | 1188 __ li(a0, Operand(Smi::FromInt(0))); |
1194 __ Push(a1, a0); // Fixed array length (as smi) and initial index. | 1189 __ Push(a1, a0); // Fixed array length (as smi) and initial index. |
1195 | 1190 |
1196 // Generate code for doing the condition check. | 1191 // Generate code for doing the condition check. |
1197 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); | 1192 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); |
1198 __ bind(&loop); | 1193 __ bind(&loop); |
1199 // Load the current count to a0, load the length to a1. | 1194 // Load the current count to a0, load the length to a1. |
1200 __ lw(a0, MemOperand(sp, 0 * kPointerSize)); | 1195 __ ld(a0, MemOperand(sp, 0 * kPointerSize)); |
1201 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); | 1196 __ ld(a1, MemOperand(sp, 1 * kPointerSize)); |
1202 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1)); | 1197 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1)); |
1203 | 1198 |
1204 // Get the current entry of the array into register a3. | 1199 // Get the current entry of the array into register a3. |
1205 __ lw(a2, MemOperand(sp, 2 * kPointerSize)); | 1200 __ ld(a2, MemOperand(sp, 2 * kPointerSize)); |
1206 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 1201 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
1207 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize); | 1202 __ SmiScale(a4, a0, kPointerSizeLog2); |
1208 __ addu(t0, a2, t0); // Array base + scaled (smi) index. | 1203 __ daddu(a4, a2, a4); // Array base + scaled (smi) index. |
1209 __ lw(a3, MemOperand(t0)); // Current entry. | 1204 __ ld(a3, MemOperand(a4)); // Current entry. |
1210 | 1205 |
1211 // Get the expected map from the stack or a smi in the | 1206 // Get the expected map from the stack or a smi in the |
1212 // permanent slow case into register a2. | 1207 // permanent slow case into register a2. |
1213 __ lw(a2, MemOperand(sp, 3 * kPointerSize)); | 1208 __ ld(a2, MemOperand(sp, 3 * kPointerSize)); |
1214 | 1209 |
1215 // Check if the expected map still matches that of the enumerable. | 1210 // Check if the expected map still matches that of the enumerable. |
1216 // If not, we may have to filter the key. | 1211 // If not, we may have to filter the key. |
1217 Label update_each; | 1212 Label update_each; |
1218 __ lw(a1, MemOperand(sp, 4 * kPointerSize)); | 1213 __ ld(a1, MemOperand(sp, 4 * kPointerSize)); |
1219 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); | 1214 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset)); |
1220 __ Branch(&update_each, eq, t0, Operand(a2)); | 1215 __ Branch(&update_each, eq, a4, Operand(a2)); |
1221 | 1216 |
1222 // For proxies, no filtering is done. | 1217 // For proxies, no filtering is done. |
1223 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. | 1218 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. |
1224 ASSERT_EQ(Smi::FromInt(0), 0); | 1219 ASSERT_EQ(Smi::FromInt(0), 0); |
1225 __ Branch(&update_each, eq, a2, Operand(zero_reg)); | 1220 __ Branch(&update_each, eq, a2, Operand(zero_reg)); |
1226 | 1221 |
1227 // Convert the entry to a string or (smi) 0 if it isn't a property | 1222 // Convert the entry to a string or (smi) 0 if it isn't a property |
1228 // any more. If the property has been removed while iterating, we | 1223 // any more. If the property has been removed while iterating, we |
1229 // just skip it. | 1224 // just skip it. |
1230 __ Push(a1, a3); // Enumerable and current entry. | 1225 __ Push(a1, a3); // Enumerable and current entry. |
(...skipping 10 matching lines...) Expand all Loading... |
1241 EmitAssignment(stmt->each()); | 1236 EmitAssignment(stmt->each()); |
1242 } | 1237 } |
1243 | 1238 |
1244 // Generate code for the body of the loop. | 1239 // Generate code for the body of the loop. |
1245 Visit(stmt->body()); | 1240 Visit(stmt->body()); |
1246 | 1241 |
1247 // Generate code for the going to the next element by incrementing | 1242 // Generate code for the going to the next element by incrementing |
1248 // the index (smi) stored on top of the stack. | 1243 // the index (smi) stored on top of the stack. |
1249 __ bind(loop_statement.continue_label()); | 1244 __ bind(loop_statement.continue_label()); |
1250 __ pop(a0); | 1245 __ pop(a0); |
1251 __ Addu(a0, a0, Operand(Smi::FromInt(1))); | 1246 __ Daddu(a0, a0, Operand(Smi::FromInt(1))); |
1252 __ push(a0); | 1247 __ push(a0); |
1253 | 1248 |
1254 EmitBackEdgeBookkeeping(stmt, &loop); | 1249 EmitBackEdgeBookkeeping(stmt, &loop); |
1255 __ Branch(&loop); | 1250 __ Branch(&loop); |
1256 | 1251 |
1257 // Remove the pointers stored on the stack. | 1252 // Remove the pointers stored on the stack. |
1258 __ bind(loop_statement.break_label()); | 1253 __ bind(loop_statement.break_label()); |
1259 __ Drop(5); | 1254 __ Drop(5); |
1260 | 1255 |
1261 // Exit and decrement the loop depth. | 1256 // Exit and decrement the loop depth. |
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1357 Label* slow) { | 1352 Label* slow) { |
1358 Register current = cp; | 1353 Register current = cp; |
1359 Register next = a1; | 1354 Register next = a1; |
1360 Register temp = a2; | 1355 Register temp = a2; |
1361 | 1356 |
1362 Scope* s = scope(); | 1357 Scope* s = scope(); |
1363 while (s != NULL) { | 1358 while (s != NULL) { |
1364 if (s->num_heap_slots() > 0) { | 1359 if (s->num_heap_slots() > 0) { |
1365 if (s->calls_sloppy_eval()) { | 1360 if (s->calls_sloppy_eval()) { |
1366 // Check that extension is NULL. | 1361 // Check that extension is NULL. |
1367 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX)); | 1362 __ ld(temp, ContextOperand(current, Context::EXTENSION_INDEX)); |
1368 __ Branch(slow, ne, temp, Operand(zero_reg)); | 1363 __ Branch(slow, ne, temp, Operand(zero_reg)); |
1369 } | 1364 } |
1370 // Load next context in chain. | 1365 // Load next context in chain. |
1371 __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX)); | 1366 __ ld(next, ContextOperand(current, Context::PREVIOUS_INDEX)); |
1372 // Walk the rest of the chain without clobbering cp. | 1367 // Walk the rest of the chain without clobbering cp. |
1373 current = next; | 1368 current = next; |
1374 } | 1369 } |
1375 // If no outer scope calls eval, we do not need to check more | 1370 // If no outer scope calls eval, we do not need to check more |
1376 // context extensions. | 1371 // context extensions. |
1377 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; | 1372 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; |
1378 s = s->outer_scope(); | 1373 s = s->outer_scope(); |
1379 } | 1374 } |
1380 | 1375 |
1381 if (s->is_eval_scope()) { | 1376 if (s->is_eval_scope()) { |
1382 Label loop, fast; | 1377 Label loop, fast; |
1383 if (!current.is(next)) { | 1378 if (!current.is(next)) { |
1384 __ Move(next, current); | 1379 __ Move(next, current); |
1385 } | 1380 } |
1386 __ bind(&loop); | 1381 __ bind(&loop); |
1387 // Terminate at native context. | 1382 // Terminate at native context. |
1388 __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset)); | 1383 __ ld(temp, FieldMemOperand(next, HeapObject::kMapOffset)); |
1389 __ LoadRoot(t0, Heap::kNativeContextMapRootIndex); | 1384 __ LoadRoot(a4, Heap::kNativeContextMapRootIndex); |
1390 __ Branch(&fast, eq, temp, Operand(t0)); | 1385 __ Branch(&fast, eq, temp, Operand(a4)); |
1391 // Check that extension is NULL. | 1386 // Check that extension is NULL. |
1392 __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX)); | 1387 __ ld(temp, ContextOperand(next, Context::EXTENSION_INDEX)); |
1393 __ Branch(slow, ne, temp, Operand(zero_reg)); | 1388 __ Branch(slow, ne, temp, Operand(zero_reg)); |
1394 // Load next context in chain. | 1389 // Load next context in chain. |
1395 __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX)); | 1390 __ ld(next, ContextOperand(next, Context::PREVIOUS_INDEX)); |
1396 __ Branch(&loop); | 1391 __ Branch(&loop); |
1397 __ bind(&fast); | 1392 __ bind(&fast); |
1398 } | 1393 } |
1399 | 1394 |
1400 __ lw(LoadIC::ReceiverRegister(), GlobalObjectOperand()); | 1395 __ ld(LoadIC::ReceiverRegister(), GlobalObjectOperand()); |
1401 __ li(LoadIC::NameRegister(), Operand(var->name())); | 1396 __ li(LoadIC::NameRegister(), Operand(var->name())); |
1402 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) | 1397 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) |
1403 ? NOT_CONTEXTUAL | 1398 ? NOT_CONTEXTUAL |
1404 : CONTEXTUAL; | 1399 : CONTEXTUAL; |
1405 CallLoadIC(mode); | 1400 CallLoadIC(mode); |
1406 } | 1401 } |
1407 | 1402 |
1408 | 1403 |
1409 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, | 1404 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, |
1410 Label* slow) { | 1405 Label* slow) { |
1411 ASSERT(var->IsContextSlot()); | 1406 ASSERT(var->IsContextSlot()); |
1412 Register context = cp; | 1407 Register context = cp; |
1413 Register next = a3; | 1408 Register next = a3; |
1414 Register temp = t0; | 1409 Register temp = a4; |
1415 | 1410 |
1416 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { | 1411 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { |
1417 if (s->num_heap_slots() > 0) { | 1412 if (s->num_heap_slots() > 0) { |
1418 if (s->calls_sloppy_eval()) { | 1413 if (s->calls_sloppy_eval()) { |
1419 // Check that extension is NULL. | 1414 // Check that extension is NULL. |
1420 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX)); | 1415 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX)); |
1421 __ Branch(slow, ne, temp, Operand(zero_reg)); | 1416 __ Branch(slow, ne, temp, Operand(zero_reg)); |
1422 } | 1417 } |
1423 __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX)); | 1418 __ ld(next, ContextOperand(context, Context::PREVIOUS_INDEX)); |
1424 // Walk the rest of the chain without clobbering cp. | 1419 // Walk the rest of the chain without clobbering cp. |
1425 context = next; | 1420 context = next; |
1426 } | 1421 } |
1427 } | 1422 } |
1428 // Check that last extension is NULL. | 1423 // Check that last extension is NULL. |
1429 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX)); | 1424 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX)); |
1430 __ Branch(slow, ne, temp, Operand(zero_reg)); | 1425 __ Branch(slow, ne, temp, Operand(zero_reg)); |
1431 | 1426 |
1432 // This function is used only for loads, not stores, so it's safe to | 1427 // This function is used only for loads, not stores, so it's safe to |
1433 // return an cp-based operand (the write barrier cannot be allowed to | 1428 // return an cp-based operand (the write barrier cannot be allowed to |
1434 // destroy the cp register). | 1429 // destroy the cp register). |
1435 return ContextOperand(context, var->index()); | 1430 return ContextOperand(context, var->index()); |
1436 } | 1431 } |
1437 | 1432 |
1438 | 1433 |
1439 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var, | 1434 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var, |
1440 TypeofState typeof_state, | 1435 TypeofState typeof_state, |
1441 Label* slow, | 1436 Label* slow, |
1442 Label* done) { | 1437 Label* done) { |
1443 // Generate fast-case code for variables that might be shadowed by | 1438 // Generate fast-case code for variables that might be shadowed by |
1444 // eval-introduced variables. Eval is used a lot without | 1439 // eval-introduced variables. Eval is used a lot without |
1445 // introducing variables. In those cases, we do not want to | 1440 // introducing variables. In those cases, we do not want to |
1446 // perform a runtime call for all variables in the scope | 1441 // perform a runtime call for all variables in the scope |
1447 // containing the eval. | 1442 // containing the eval. |
1448 if (var->mode() == DYNAMIC_GLOBAL) { | 1443 if (var->mode() == DYNAMIC_GLOBAL) { |
1449 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); | 1444 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); |
1450 __ Branch(done); | 1445 __ Branch(done); |
1451 } else if (var->mode() == DYNAMIC_LOCAL) { | 1446 } else if (var->mode() == DYNAMIC_LOCAL) { |
1452 Variable* local = var->local_if_not_shadowed(); | 1447 Variable* local = var->local_if_not_shadowed(); |
1453 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow)); | 1448 __ ld(v0, ContextSlotOperandCheckExtensions(local, slow)); |
1454 if (local->mode() == LET || local->mode() == CONST || | 1449 if (local->mode() == LET || local->mode() == CONST || |
1455 local->mode() == CONST_LEGACY) { | 1450 local->mode() == CONST_LEGACY) { |
1456 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 1451 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
1457 __ subu(at, v0, at); // Sub as compare: at == 0 on eq. | 1452 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq. |
1458 if (local->mode() == CONST_LEGACY) { | 1453 if (local->mode() == CONST_LEGACY) { |
1459 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); | 1454 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); |
1460 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole. | 1455 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole. |
1461 } else { // LET || CONST | 1456 } else { // LET || CONST |
1462 __ Branch(done, ne, at, Operand(zero_reg)); | 1457 __ Branch(done, ne, at, Operand(zero_reg)); |
1463 __ li(a0, Operand(var->name())); | 1458 __ li(a0, Operand(var->name())); |
1464 __ push(a0); | 1459 __ push(a0); |
1465 __ CallRuntime(Runtime::kThrowReferenceError, 1); | 1460 __ CallRuntime(Runtime::kThrowReferenceError, 1); |
1466 } | 1461 } |
1467 } | 1462 } |
1468 __ Branch(done); | 1463 __ Branch(done); |
1469 } | 1464 } |
1470 } | 1465 } |
1471 | 1466 |
1472 | 1467 |
1473 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { | 1468 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { |
1474 // Record position before possible IC call. | 1469 // Record position before possible IC call. |
1475 SetSourcePosition(proxy->position()); | 1470 SetSourcePosition(proxy->position()); |
1476 Variable* var = proxy->var(); | 1471 Variable* var = proxy->var(); |
1477 | 1472 |
1478 // Three cases: global variables, lookup variables, and all other types of | 1473 // Three cases: global variables, lookup variables, and all other types of |
1479 // variables. | 1474 // variables. |
1480 switch (var->location()) { | 1475 switch (var->location()) { |
1481 case Variable::UNALLOCATED: { | 1476 case Variable::UNALLOCATED: { |
1482 Comment cmnt(masm_, "[ Global variable"); | 1477 Comment cmnt(masm_, "[ Global variable"); |
1483 __ lw(LoadIC::ReceiverRegister(), GlobalObjectOperand()); | 1478 // Use inline caching. Variable name is passed in a2 and the global |
| 1479 // object (receiver) in a0. |
| 1480 __ ld(LoadIC::ReceiverRegister(), GlobalObjectOperand()); |
1484 __ li(LoadIC::NameRegister(), Operand(var->name())); | 1481 __ li(LoadIC::NameRegister(), Operand(var->name())); |
1485 CallLoadIC(CONTEXTUAL); | 1482 CallLoadIC(CONTEXTUAL); |
1486 context()->Plug(v0); | 1483 context()->Plug(v0); |
1487 break; | 1484 break; |
1488 } | 1485 } |
1489 | 1486 |
1490 case Variable::PARAMETER: | 1487 case Variable::PARAMETER: |
1491 case Variable::LOCAL: | 1488 case Variable::LOCAL: |
1492 case Variable::CONTEXT: { | 1489 case Variable::CONTEXT: { |
1493 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" | 1490 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" |
(...skipping 30 matching lines...) Expand all Loading... |
1524 ASSERT(var->initializer_position() != RelocInfo::kNoPosition); | 1521 ASSERT(var->initializer_position() != RelocInfo::kNoPosition); |
1525 ASSERT(proxy->position() != RelocInfo::kNoPosition); | 1522 ASSERT(proxy->position() != RelocInfo::kNoPosition); |
1526 skip_init_check = var->mode() != CONST_LEGACY && | 1523 skip_init_check = var->mode() != CONST_LEGACY && |
1527 var->initializer_position() < proxy->position(); | 1524 var->initializer_position() < proxy->position(); |
1528 } | 1525 } |
1529 | 1526 |
1530 if (!skip_init_check) { | 1527 if (!skip_init_check) { |
1531 // Let and const need a read barrier. | 1528 // Let and const need a read barrier. |
1532 GetVar(v0, var); | 1529 GetVar(v0, var); |
1533 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 1530 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
1534 __ subu(at, v0, at); // Sub as compare: at == 0 on eq. | 1531 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq. |
1535 if (var->mode() == LET || var->mode() == CONST) { | 1532 if (var->mode() == LET || var->mode() == CONST) { |
1536 // Throw a reference error when using an uninitialized let/const | 1533 // Throw a reference error when using an uninitialized let/const |
1537 // binding in harmony mode. | 1534 // binding in harmony mode. |
1538 Label done; | 1535 Label done; |
1539 __ Branch(&done, ne, at, Operand(zero_reg)); | 1536 __ Branch(&done, ne, at, Operand(zero_reg)); |
1540 __ li(a0, Operand(var->name())); | 1537 __ li(a0, Operand(var->name())); |
1541 __ push(a0); | 1538 __ push(a0); |
1542 __ CallRuntime(Runtime::kThrowReferenceError, 1); | 1539 __ CallRuntime(Runtime::kThrowReferenceError, 1); |
1543 __ bind(&done); | 1540 __ bind(&done); |
1544 } else { | 1541 } else { |
(...skipping 24 matching lines...) Expand all Loading... |
1569 context()->Plug(v0); | 1566 context()->Plug(v0); |
1570 } | 1567 } |
1571 } | 1568 } |
1572 } | 1569 } |
1573 | 1570 |
1574 | 1571 |
1575 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { | 1572 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { |
1576 Comment cmnt(masm_, "[ RegExpLiteral"); | 1573 Comment cmnt(masm_, "[ RegExpLiteral"); |
1577 Label materialized; | 1574 Label materialized; |
1578 // Registers will be used as follows: | 1575 // Registers will be used as follows: |
1579 // t1 = materialized value (RegExp literal) | 1576 // a5 = materialized value (RegExp literal) |
1580 // t0 = JS function, literals array | 1577 // a4 = JS function, literals array |
1581 // a3 = literal index | 1578 // a3 = literal index |
1582 // a2 = RegExp pattern | 1579 // a2 = RegExp pattern |
1583 // a1 = RegExp flags | 1580 // a1 = RegExp flags |
1584 // a0 = RegExp literal clone | 1581 // a0 = RegExp literal clone |
1585 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1582 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
1586 __ lw(t0, FieldMemOperand(a0, JSFunction::kLiteralsOffset)); | 1583 __ ld(a4, FieldMemOperand(a0, JSFunction::kLiteralsOffset)); |
1587 int literal_offset = | 1584 int literal_offset = |
1588 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; | 1585 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; |
1589 __ lw(t1, FieldMemOperand(t0, literal_offset)); | 1586 __ ld(a5, FieldMemOperand(a4, literal_offset)); |
1590 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 1587 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
1591 __ Branch(&materialized, ne, t1, Operand(at)); | 1588 __ Branch(&materialized, ne, a5, Operand(at)); |
1592 | 1589 |
1593 // Create regexp literal using runtime function. | 1590 // Create regexp literal using runtime function. |
1594 // Result will be in v0. | 1591 // Result will be in v0. |
1595 __ li(a3, Operand(Smi::FromInt(expr->literal_index()))); | 1592 __ li(a3, Operand(Smi::FromInt(expr->literal_index()))); |
1596 __ li(a2, Operand(expr->pattern())); | 1593 __ li(a2, Operand(expr->pattern())); |
1597 __ li(a1, Operand(expr->flags())); | 1594 __ li(a1, Operand(expr->flags())); |
1598 __ Push(t0, a3, a2, a1); | 1595 __ Push(a4, a3, a2, a1); |
1599 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); | 1596 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); |
1600 __ mov(t1, v0); | 1597 __ mov(a5, v0); |
1601 | 1598 |
1602 __ bind(&materialized); | 1599 __ bind(&materialized); |
1603 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | 1600 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; |
1604 Label allocated, runtime_allocate; | 1601 Label allocated, runtime_allocate; |
1605 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT); | 1602 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT); |
1606 __ jmp(&allocated); | 1603 __ jmp(&allocated); |
1607 | 1604 |
1608 __ bind(&runtime_allocate); | 1605 __ bind(&runtime_allocate); |
1609 __ li(a0, Operand(Smi::FromInt(size))); | 1606 __ li(a0, Operand(Smi::FromInt(size))); |
1610 __ Push(t1, a0); | 1607 __ Push(a5, a0); |
1611 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); | 1608 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); |
1612 __ pop(t1); | 1609 __ pop(a5); |
1613 | 1610 |
1614 __ bind(&allocated); | 1611 __ bind(&allocated); |
1615 | 1612 |
1616 // After this, registers are used as follows: | 1613 // After this, registers are used as follows: |
1617 // v0: Newly allocated regexp. | 1614 // v0: Newly allocated regexp. |
1618 // t1: Materialized regexp. | 1615 // a5: Materialized regexp. |
1619 // a2: temp. | 1616 // a2: temp. |
1620 __ CopyFields(v0, t1, a2.bit(), size / kPointerSize); | 1617 __ CopyFields(v0, a5, a2.bit(), size / kPointerSize); |
1621 context()->Plug(v0); | 1618 context()->Plug(v0); |
1622 } | 1619 } |
1623 | 1620 |
1624 | 1621 |
1625 void FullCodeGenerator::EmitAccessor(Expression* expression) { | 1622 void FullCodeGenerator::EmitAccessor(Expression* expression) { |
1626 if (expression == NULL) { | 1623 if (expression == NULL) { |
1627 __ LoadRoot(a1, Heap::kNullValueRootIndex); | 1624 __ LoadRoot(a1, Heap::kNullValueRootIndex); |
1628 __ push(a1); | 1625 __ push(a1); |
1629 } else { | 1626 } else { |
1630 VisitForStackValue(expression); | 1627 VisitForStackValue(expression); |
1631 } | 1628 } |
1632 } | 1629 } |
1633 | 1630 |
1634 | 1631 |
1635 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { | 1632 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { |
1636 Comment cmnt(masm_, "[ ObjectLiteral"); | 1633 Comment cmnt(masm_, "[ ObjectLiteral"); |
1637 | 1634 |
1638 expr->BuildConstantProperties(isolate()); | 1635 expr->BuildConstantProperties(isolate()); |
1639 Handle<FixedArray> constant_properties = expr->constant_properties(); | 1636 Handle<FixedArray> constant_properties = expr->constant_properties(); |
1640 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1637 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
1641 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset)); | 1638 __ ld(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset)); |
1642 __ li(a2, Operand(Smi::FromInt(expr->literal_index()))); | 1639 __ li(a2, Operand(Smi::FromInt(expr->literal_index()))); |
1643 __ li(a1, Operand(constant_properties)); | 1640 __ li(a1, Operand(constant_properties)); |
1644 int flags = expr->fast_elements() | 1641 int flags = expr->fast_elements() |
1645 ? ObjectLiteral::kFastElements | 1642 ? ObjectLiteral::kFastElements |
1646 : ObjectLiteral::kNoFlags; | 1643 : ObjectLiteral::kNoFlags; |
1647 flags |= expr->has_function() | 1644 flags |= expr->has_function() |
1648 ? ObjectLiteral::kHasFunction | 1645 ? ObjectLiteral::kHasFunction |
1649 : ObjectLiteral::kNoFlags; | 1646 : ObjectLiteral::kNoFlags; |
1650 __ li(a0, Operand(Smi::FromInt(flags))); | 1647 __ li(a0, Operand(Smi::FromInt(flags))); |
1651 int properties_count = constant_properties->length() / 2; | 1648 int properties_count = constant_properties->length() / 2; |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1684 UNREACHABLE(); | 1681 UNREACHABLE(); |
1685 case ObjectLiteral::Property::MATERIALIZED_LITERAL: | 1682 case ObjectLiteral::Property::MATERIALIZED_LITERAL: |
1686 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value())); | 1683 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value())); |
1687 // Fall through. | 1684 // Fall through. |
1688 case ObjectLiteral::Property::COMPUTED: | 1685 case ObjectLiteral::Property::COMPUTED: |
1689 if (key->value()->IsInternalizedString()) { | 1686 if (key->value()->IsInternalizedString()) { |
1690 if (property->emit_store()) { | 1687 if (property->emit_store()) { |
1691 VisitForAccumulatorValue(value); | 1688 VisitForAccumulatorValue(value); |
1692 __ mov(a0, result_register()); | 1689 __ mov(a0, result_register()); |
1693 __ li(a2, Operand(key->value())); | 1690 __ li(a2, Operand(key->value())); |
1694 __ lw(a1, MemOperand(sp)); | 1691 __ ld(a1, MemOperand(sp)); |
1695 CallStoreIC(key->LiteralFeedbackId()); | 1692 CallStoreIC(key->LiteralFeedbackId()); |
1696 PrepareForBailoutForId(key->id(), NO_REGISTERS); | 1693 PrepareForBailoutForId(key->id(), NO_REGISTERS); |
1697 } else { | 1694 } else { |
1698 VisitForEffect(value); | 1695 VisitForEffect(value); |
1699 } | 1696 } |
1700 break; | 1697 break; |
1701 } | 1698 } |
1702 // Duplicate receiver on stack. | 1699 // Duplicate receiver on stack. |
1703 __ lw(a0, MemOperand(sp)); | 1700 __ ld(a0, MemOperand(sp)); |
1704 __ push(a0); | 1701 __ push(a0); |
1705 VisitForStackValue(key); | 1702 VisitForStackValue(key); |
1706 VisitForStackValue(value); | 1703 VisitForStackValue(value); |
1707 if (property->emit_store()) { | 1704 if (property->emit_store()) { |
1708 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes. | 1705 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes. |
1709 __ push(a0); | 1706 __ push(a0); |
1710 __ CallRuntime(Runtime::kSetProperty, 4); | 1707 __ CallRuntime(Runtime::kSetProperty, 4); |
1711 } else { | 1708 } else { |
1712 __ Drop(3); | 1709 __ Drop(3); |
1713 } | 1710 } |
1714 break; | 1711 break; |
1715 case ObjectLiteral::Property::PROTOTYPE: | 1712 case ObjectLiteral::Property::PROTOTYPE: |
1716 // Duplicate receiver on stack. | 1713 // Duplicate receiver on stack. |
1717 __ lw(a0, MemOperand(sp)); | 1714 __ ld(a0, MemOperand(sp)); |
1718 __ push(a0); | 1715 __ push(a0); |
1719 VisitForStackValue(value); | 1716 VisitForStackValue(value); |
1720 if (property->emit_store()) { | 1717 if (property->emit_store()) { |
1721 __ CallRuntime(Runtime::kSetPrototype, 2); | 1718 __ CallRuntime(Runtime::kSetPrototype, 2); |
1722 } else { | 1719 } else { |
1723 __ Drop(2); | 1720 __ Drop(2); |
1724 } | 1721 } |
1725 break; | 1722 break; |
1726 case ObjectLiteral::Property::GETTER: | 1723 case ObjectLiteral::Property::GETTER: |
1727 accessor_table.lookup(key)->second->getter = value; | 1724 accessor_table.lookup(key)->second->getter = value; |
1728 break; | 1725 break; |
1729 case ObjectLiteral::Property::SETTER: | 1726 case ObjectLiteral::Property::SETTER: |
1730 accessor_table.lookup(key)->second->setter = value; | 1727 accessor_table.lookup(key)->second->setter = value; |
1731 break; | 1728 break; |
1732 } | 1729 } |
1733 } | 1730 } |
1734 | 1731 |
1735 // Emit code to define accessors, using only a single call to the runtime for | 1732 // Emit code to define accessors, using only a single call to the runtime for |
1736 // each pair of corresponding getters and setters. | 1733 // each pair of corresponding getters and setters. |
1737 for (AccessorTable::Iterator it = accessor_table.begin(); | 1734 for (AccessorTable::Iterator it = accessor_table.begin(); |
1738 it != accessor_table.end(); | 1735 it != accessor_table.end(); |
1739 ++it) { | 1736 ++it) { |
1740 __ lw(a0, MemOperand(sp)); // Duplicate receiver. | 1737 __ ld(a0, MemOperand(sp)); // Duplicate receiver. |
1741 __ push(a0); | 1738 __ push(a0); |
1742 VisitForStackValue(it->first); | 1739 VisitForStackValue(it->first); |
1743 EmitAccessor(it->second->getter); | 1740 EmitAccessor(it->second->getter); |
1744 EmitAccessor(it->second->setter); | 1741 EmitAccessor(it->second->setter); |
1745 __ li(a0, Operand(Smi::FromInt(NONE))); | 1742 __ li(a0, Operand(Smi::FromInt(NONE))); |
1746 __ push(a0); | 1743 __ push(a0); |
1747 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5); | 1744 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5); |
1748 } | 1745 } |
1749 | 1746 |
1750 if (expr->has_function()) { | 1747 if (expr->has_function()) { |
1751 ASSERT(result_saved); | 1748 ASSERT(result_saved); |
1752 __ lw(a0, MemOperand(sp)); | 1749 __ ld(a0, MemOperand(sp)); |
1753 __ push(a0); | 1750 __ push(a0); |
1754 __ CallRuntime(Runtime::kToFastProperties, 1); | 1751 __ CallRuntime(Runtime::kToFastProperties, 1); |
1755 } | 1752 } |
1756 | 1753 |
1757 if (result_saved) { | 1754 if (result_saved) { |
1758 context()->PlugTOS(); | 1755 context()->PlugTOS(); |
1759 } else { | 1756 } else { |
1760 context()->Plug(v0); | 1757 context()->Plug(v0); |
1761 } | 1758 } |
1762 } | 1759 } |
(...skipping 20 matching lines...) Expand all Loading... |
1783 FixedArrayBase::cast(constant_elements->get(1))); | 1780 FixedArrayBase::cast(constant_elements->get(1))); |
1784 | 1781 |
1785 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; | 1782 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; |
1786 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { | 1783 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { |
1787 // If the only customer of allocation sites is transitioning, then | 1784 // If the only customer of allocation sites is transitioning, then |
1788 // we can turn it off if we don't have anywhere else to transition to. | 1785 // we can turn it off if we don't have anywhere else to transition to. |
1789 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; | 1786 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; |
1790 } | 1787 } |
1791 | 1788 |
1792 __ mov(a0, result_register()); | 1789 __ mov(a0, result_register()); |
1793 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1790 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
1794 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset)); | 1791 __ ld(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset)); |
1795 __ li(a2, Operand(Smi::FromInt(expr->literal_index()))); | 1792 __ li(a2, Operand(Smi::FromInt(expr->literal_index()))); |
1796 __ li(a1, Operand(constant_elements)); | 1793 __ li(a1, Operand(constant_elements)); |
1797 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) { | 1794 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) { |
1798 __ li(a0, Operand(Smi::FromInt(flags))); | 1795 __ li(a0, Operand(Smi::FromInt(flags))); |
1799 __ Push(a3, a2, a1, a0); | 1796 __ Push(a3, a2, a1, a0); |
1800 __ CallRuntime(Runtime::kCreateArrayLiteral, 4); | 1797 __ CallRuntime(Runtime::kCreateArrayLiteral, 4); |
1801 } else { | 1798 } else { |
1802 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); | 1799 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); |
1803 __ CallStub(&stub); | 1800 __ CallStub(&stub); |
1804 } | 1801 } |
(...skipping 11 matching lines...) Expand all Loading... |
1816 if (!result_saved) { | 1813 if (!result_saved) { |
1817 __ push(v0); // array literal | 1814 __ push(v0); // array literal |
1818 __ Push(Smi::FromInt(expr->literal_index())); | 1815 __ Push(Smi::FromInt(expr->literal_index())); |
1819 result_saved = true; | 1816 result_saved = true; |
1820 } | 1817 } |
1821 | 1818 |
1822 VisitForAccumulatorValue(subexpr); | 1819 VisitForAccumulatorValue(subexpr); |
1823 | 1820 |
1824 if (IsFastObjectElementsKind(constant_elements_kind)) { | 1821 if (IsFastObjectElementsKind(constant_elements_kind)) { |
1825 int offset = FixedArray::kHeaderSize + (i * kPointerSize); | 1822 int offset = FixedArray::kHeaderSize + (i * kPointerSize); |
1826 __ lw(t2, MemOperand(sp, kPointerSize)); // Copy of array literal. | 1823 __ ld(a6, MemOperand(sp, kPointerSize)); // Copy of array literal. |
1827 __ lw(a1, FieldMemOperand(t2, JSObject::kElementsOffset)); | 1824 __ ld(a1, FieldMemOperand(a6, JSObject::kElementsOffset)); |
1828 __ sw(result_register(), FieldMemOperand(a1, offset)); | 1825 __ sd(result_register(), FieldMemOperand(a1, offset)); |
1829 // Update the write barrier for the array store. | 1826 // Update the write barrier for the array store. |
1830 __ RecordWriteField(a1, offset, result_register(), a2, | 1827 __ RecordWriteField(a1, offset, result_register(), a2, |
1831 kRAHasBeenSaved, kDontSaveFPRegs, | 1828 kRAHasBeenSaved, kDontSaveFPRegs, |
1832 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK); | 1829 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK); |
1833 } else { | 1830 } else { |
1834 __ li(a3, Operand(Smi::FromInt(i))); | 1831 __ li(a3, Operand(Smi::FromInt(i))); |
1835 __ mov(a0, result_register()); | 1832 __ mov(a0, result_register()); |
1836 StoreArrayLiteralElementStub stub(isolate()); | 1833 StoreArrayLiteralElementStub stub(isolate()); |
1837 __ CallStub(&stub); | 1834 __ CallStub(&stub); |
1838 } | 1835 } |
(...skipping 27 matching lines...) Expand all Loading... |
1866 | 1863 |
1867 // Evaluate LHS expression. | 1864 // Evaluate LHS expression. |
1868 switch (assign_type) { | 1865 switch (assign_type) { |
1869 case VARIABLE: | 1866 case VARIABLE: |
1870 // Nothing to do here. | 1867 // Nothing to do here. |
1871 break; | 1868 break; |
1872 case NAMED_PROPERTY: | 1869 case NAMED_PROPERTY: |
1873 if (expr->is_compound()) { | 1870 if (expr->is_compound()) { |
1874 // We need the receiver both on the stack and in the register. | 1871 // We need the receiver both on the stack and in the register. |
1875 VisitForStackValue(property->obj()); | 1872 VisitForStackValue(property->obj()); |
1876 __ lw(LoadIC::ReceiverRegister(), MemOperand(sp, 0)); | 1873 __ ld(LoadIC::ReceiverRegister(), MemOperand(sp, 0)); |
1877 } else { | 1874 } else { |
1878 VisitForStackValue(property->obj()); | 1875 VisitForStackValue(property->obj()); |
1879 } | 1876 } |
1880 break; | 1877 break; |
1881 case KEYED_PROPERTY: | 1878 case KEYED_PROPERTY: |
1882 // We need the key and receiver on both the stack and in v0 and a1. | 1879 // We need the key and receiver on both the stack and in v0 and a1. |
1883 if (expr->is_compound()) { | 1880 if (expr->is_compound()) { |
1884 VisitForStackValue(property->obj()); | 1881 VisitForStackValue(property->obj()); |
1885 VisitForStackValue(property->key()); | 1882 VisitForStackValue(property->key()); |
1886 __ lw(LoadIC::ReceiverRegister(), MemOperand(sp, 1 * kPointerSize)); | 1883 __ ld(LoadIC::ReceiverRegister(), MemOperand(sp, 1 * kPointerSize)); |
1887 __ lw(LoadIC::NameRegister(), MemOperand(sp, 0)); | 1884 __ ld(LoadIC::NameRegister(), MemOperand(sp, 0)); |
1888 } else { | 1885 } else { |
1889 VisitForStackValue(property->obj()); | 1886 VisitForStackValue(property->obj()); |
1890 VisitForStackValue(property->key()); | 1887 VisitForStackValue(property->key()); |
1891 } | 1888 } |
1892 break; | 1889 break; |
1893 } | 1890 } |
1894 | 1891 |
1895 // For compound assignments we need another deoptimization point after the | 1892 // For compound assignments we need another deoptimization point after the |
1896 // variable/property load. | 1893 // variable/property load. |
1897 if (expr->is_compound()) { | 1894 if (expr->is_compound()) { |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1975 | 1972 |
1976 __ jmp(&suspend); | 1973 __ jmp(&suspend); |
1977 | 1974 |
1978 __ bind(&continuation); | 1975 __ bind(&continuation); |
1979 __ jmp(&resume); | 1976 __ jmp(&resume); |
1980 | 1977 |
1981 __ bind(&suspend); | 1978 __ bind(&suspend); |
1982 VisitForAccumulatorValue(expr->generator_object()); | 1979 VisitForAccumulatorValue(expr->generator_object()); |
1983 ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); | 1980 ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); |
1984 __ li(a1, Operand(Smi::FromInt(continuation.pos()))); | 1981 __ li(a1, Operand(Smi::FromInt(continuation.pos()))); |
1985 __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset)); | 1982 __ sd(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset)); |
1986 __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset)); | 1983 __ sd(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset)); |
1987 __ mov(a1, cp); | 1984 __ mov(a1, cp); |
1988 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2, | 1985 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2, |
1989 kRAHasBeenSaved, kDontSaveFPRegs); | 1986 kRAHasBeenSaved, kDontSaveFPRegs); |
1990 __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset)); | 1987 __ Daddu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset)); |
1991 __ Branch(&post_runtime, eq, sp, Operand(a1)); | 1988 __ Branch(&post_runtime, eq, sp, Operand(a1)); |
1992 __ push(v0); // generator object | 1989 __ push(v0); // generator object |
1993 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); | 1990 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); |
1994 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 1991 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
1995 __ bind(&post_runtime); | 1992 __ bind(&post_runtime); |
1996 __ pop(result_register()); | 1993 __ pop(result_register()); |
1997 EmitReturnSequence(); | 1994 EmitReturnSequence(); |
1998 | 1995 |
1999 __ bind(&resume); | 1996 __ bind(&resume); |
2000 context()->Plug(result_register()); | 1997 context()->Plug(result_register()); |
2001 break; | 1998 break; |
2002 } | 1999 } |
2003 | 2000 |
2004 case Yield::FINAL: { | 2001 case Yield::FINAL: { |
2005 VisitForAccumulatorValue(expr->generator_object()); | 2002 VisitForAccumulatorValue(expr->generator_object()); |
2006 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed))); | 2003 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed))); |
2007 __ sw(a1, FieldMemOperand(result_register(), | 2004 __ sd(a1, FieldMemOperand(result_register(), |
2008 JSGeneratorObject::kContinuationOffset)); | 2005 JSGeneratorObject::kContinuationOffset)); |
2009 // Pop value from top-of-stack slot, box result into result register. | 2006 // Pop value from top-of-stack slot, box result into result register. |
2010 EmitCreateIteratorResult(true); | 2007 EmitCreateIteratorResult(true); |
2011 EmitUnwindBeforeReturn(); | 2008 EmitUnwindBeforeReturn(); |
2012 EmitReturnSequence(); | 2009 EmitReturnSequence(); |
2013 break; | 2010 break; |
2014 } | 2011 } |
2015 | 2012 |
2016 case Yield::DELEGATING: { | 2013 case Yield::DELEGATING: { |
2017 VisitForStackValue(expr->generator_object()); | 2014 VisitForStackValue(expr->generator_object()); |
2018 | 2015 |
2019 // Initial stack layout is as follows: | 2016 // Initial stack layout is as follows: |
2020 // [sp + 1 * kPointerSize] iter | 2017 // [sp + 1 * kPointerSize] iter |
2021 // [sp + 0 * kPointerSize] g | 2018 // [sp + 0 * kPointerSize] g |
2022 | 2019 |
2023 Label l_catch, l_try, l_suspend, l_continuation, l_resume; | 2020 Label l_catch, l_try, l_suspend, l_continuation, l_resume; |
2024 Label l_next, l_call; | 2021 Label l_next, l_call; |
2025 Register load_receiver = LoadIC::ReceiverRegister(); | 2022 Register load_receiver = LoadIC::ReceiverRegister(); |
2026 Register load_name = LoadIC::NameRegister(); | 2023 Register load_name = LoadIC::NameRegister(); |
2027 | |
2028 // Initial send value is undefined. | 2024 // Initial send value is undefined. |
2029 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); | 2025 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); |
2030 __ Branch(&l_next); | 2026 __ Branch(&l_next); |
2031 | 2027 |
2032 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; } | 2028 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; } |
2033 __ bind(&l_catch); | 2029 __ bind(&l_catch); |
2034 __ mov(a0, v0); | 2030 __ mov(a0, v0); |
2035 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos())); | 2031 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos())); |
2036 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw" | 2032 __ LoadRoot(a2, Heap::kthrow_stringRootIndex); // "throw" |
2037 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter | 2033 __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter |
2038 __ Push(load_name, a3, a0); // "throw", iter, except | 2034 __ Push(a2, a3, a0); // "throw", iter, except |
2039 __ jmp(&l_call); | 2035 __ jmp(&l_call); |
2040 | 2036 |
2041 // try { received = %yield result } | 2037 // try { received = %yield result } |
2042 // Shuffle the received result above a try handler and yield it without | 2038 // Shuffle the received result above a try handler and yield it without |
2043 // re-boxing. | 2039 // re-boxing. |
2044 __ bind(&l_try); | 2040 __ bind(&l_try); |
2045 __ pop(a0); // result | 2041 __ pop(a0); // result |
2046 __ PushTryHandler(StackHandler::CATCH, expr->index()); | 2042 __ PushTryHandler(StackHandler::CATCH, expr->index()); |
2047 const int handler_size = StackHandlerConstants::kSize; | 2043 const int handler_size = StackHandlerConstants::kSize; |
2048 __ push(a0); // result | 2044 __ push(a0); // result |
2049 __ jmp(&l_suspend); | 2045 __ jmp(&l_suspend); |
2050 __ bind(&l_continuation); | 2046 __ bind(&l_continuation); |
2051 __ mov(a0, v0); | 2047 __ mov(a0, v0); |
2052 __ jmp(&l_resume); | 2048 __ jmp(&l_resume); |
2053 __ bind(&l_suspend); | 2049 __ bind(&l_suspend); |
2054 const int generator_object_depth = kPointerSize + handler_size; | 2050 const int generator_object_depth = kPointerSize + handler_size; |
2055 __ lw(a0, MemOperand(sp, generator_object_depth)); | 2051 __ ld(a0, MemOperand(sp, generator_object_depth)); |
2056 __ push(a0); // g | 2052 __ push(a0); // g |
2057 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); | 2053 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); |
2058 __ li(a1, Operand(Smi::FromInt(l_continuation.pos()))); | 2054 __ li(a1, Operand(Smi::FromInt(l_continuation.pos()))); |
2059 __ sw(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset)); | 2055 __ sd(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset)); |
2060 __ sw(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset)); | 2056 __ sd(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset)); |
2061 __ mov(a1, cp); | 2057 __ mov(a1, cp); |
2062 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2, | 2058 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2, |
2063 kRAHasBeenSaved, kDontSaveFPRegs); | 2059 kRAHasBeenSaved, kDontSaveFPRegs); |
2064 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); | 2060 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); |
2065 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2061 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
2066 __ pop(v0); // result | 2062 __ pop(v0); // result |
2067 EmitReturnSequence(); | 2063 EmitReturnSequence(); |
2068 __ mov(a0, v0); | 2064 __ mov(a0, v0); |
2069 __ bind(&l_resume); // received in a0 | 2065 __ bind(&l_resume); // received in a0 |
2070 __ PopTryHandler(); | 2066 __ PopTryHandler(); |
2071 | 2067 |
2072 // receiver = iter; f = 'next'; arg = received; | 2068 // receiver = iter; f = 'next'; arg = received; |
2073 __ bind(&l_next); | 2069 __ bind(&l_next); |
2074 | |
2075 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next" | 2070 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next" |
2076 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter | 2071 __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter |
2077 __ Push(load_name, a3, a0); // "next", iter, received | 2072 __ Push(load_name, a3, a0); // "next", iter, received |
2078 | 2073 |
2079 // result = receiver[f](arg); | 2074 // result = receiver[f](arg); |
2080 __ bind(&l_call); | 2075 __ bind(&l_call); |
2081 __ lw(load_receiver, MemOperand(sp, kPointerSize)); | 2076 __ ld(load_receiver, MemOperand(sp, kPointerSize)); |
2082 __ lw(load_name, MemOperand(sp, 2 * kPointerSize)); | 2077 __ ld(load_name, MemOperand(sp, 2 * kPointerSize)); |
2083 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); | 2078 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); |
2084 CallIC(ic, TypeFeedbackId::None()); | 2079 CallIC(ic, TypeFeedbackId::None()); |
2085 __ mov(a0, v0); | 2080 __ mov(a0, v0); |
2086 __ mov(a1, a0); | 2081 __ mov(a1, a0); |
2087 __ sw(a1, MemOperand(sp, 2 * kPointerSize)); | 2082 __ sd(a1, MemOperand(sp, 2 * kPointerSize)); |
2088 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD); | 2083 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD); |
2089 __ CallStub(&stub); | 2084 __ CallStub(&stub); |
2090 | 2085 |
2091 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2086 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
2092 __ Drop(1); // The function is still on the stack; drop it. | 2087 __ Drop(1); // The function is still on the stack; drop it. |
2093 | 2088 |
2094 // if (!result.done) goto l_try; | 2089 // if (!result.done) goto l_try; |
2095 __ Move(load_receiver, v0); | 2090 __ Move(load_receiver, v0); |
2096 | 2091 |
2097 __ push(load_receiver); // save result | 2092 __ push(load_receiver); // save result |
2098 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done" | 2093 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done" |
2099 CallLoadIC(NOT_CONTEXTUAL); // v0=result.done | 2094 CallLoadIC(NOT_CONTEXTUAL); // v0=result.done |
2100 __ mov(a0, v0); | 2095 __ mov(a0, v0); |
2101 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate()); | 2096 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate()); |
2102 CallIC(bool_ic); | 2097 CallIC(bool_ic); |
2103 __ Branch(&l_try, eq, v0, Operand(zero_reg)); | 2098 __ Branch(&l_try, eq, v0, Operand(zero_reg)); |
2104 | 2099 |
2105 // result.value | 2100 // result.value |
2106 __ pop(load_receiver); // result | 2101 __ pop(load_receiver); // result |
2107 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value" | 2102 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value" |
(...skipping 11 matching lines...) Expand all Loading... |
2119 // The value stays in a0, and is ultimately read by the resumed generator, as | 2114 // The value stays in a0, and is ultimately read by the resumed generator, as |
2120 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it | 2115 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it |
2121 // is read to throw the value when the resumed generator is already closed. | 2116 // is read to throw the value when the resumed generator is already closed. |
2122 // a1 will hold the generator object until the activation has been resumed. | 2117 // a1 will hold the generator object until the activation has been resumed. |
2123 VisitForStackValue(generator); | 2118 VisitForStackValue(generator); |
2124 VisitForAccumulatorValue(value); | 2119 VisitForAccumulatorValue(value); |
2125 __ pop(a1); | 2120 __ pop(a1); |
2126 | 2121 |
2127 // Check generator state. | 2122 // Check generator state. |
2128 Label wrong_state, closed_state, done; | 2123 Label wrong_state, closed_state, done; |
2129 __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); | 2124 __ ld(a3, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); |
2130 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0); | 2125 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0); |
2131 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0); | 2126 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0); |
2132 __ Branch(&closed_state, eq, a3, Operand(zero_reg)); | 2127 __ Branch(&closed_state, eq, a3, Operand(zero_reg)); |
2133 __ Branch(&wrong_state, lt, a3, Operand(zero_reg)); | 2128 __ Branch(&wrong_state, lt, a3, Operand(zero_reg)); |
2134 | 2129 |
2135 // Load suspended function and context. | 2130 // Load suspended function and context. |
2136 __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset)); | 2131 __ ld(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset)); |
2137 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); | 2132 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); |
2138 | 2133 |
2139 // Load receiver and store as the first argument. | 2134 // Load receiver and store as the first argument. |
2140 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset)); | 2135 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset)); |
2141 __ push(a2); | 2136 __ push(a2); |
2142 | 2137 |
2143 // Push holes for the rest of the arguments to the generator function. | 2138 // Push holes for the rest of the arguments to the generator function. |
2144 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset)); | 2139 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset)); |
| 2140 // The argument count is stored as int32_t on 64-bit platforms. |
| 2141 // TODO(plind): Smi on 32-bit platforms. |
2145 __ lw(a3, | 2142 __ lw(a3, |
2146 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset)); | 2143 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset)); |
2147 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex); | 2144 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex); |
2148 Label push_argument_holes, push_frame; | 2145 Label push_argument_holes, push_frame; |
2149 __ bind(&push_argument_holes); | 2146 __ bind(&push_argument_holes); |
2150 __ Subu(a3, a3, Operand(Smi::FromInt(1))); | 2147 __ Dsubu(a3, a3, Operand(1)); |
2151 __ Branch(&push_frame, lt, a3, Operand(zero_reg)); | 2148 __ Branch(&push_frame, lt, a3, Operand(zero_reg)); |
2152 __ push(a2); | 2149 __ push(a2); |
2153 __ jmp(&push_argument_holes); | 2150 __ jmp(&push_argument_holes); |
2154 | 2151 |
2155 // Enter a new JavaScript frame, and initialize its slots as they were when | 2152 // Enter a new JavaScript frame, and initialize its slots as they were when |
2156 // the generator was suspended. | 2153 // the generator was suspended. |
2157 Label resume_frame; | 2154 Label resume_frame; |
2158 __ bind(&push_frame); | 2155 __ bind(&push_frame); |
2159 __ Call(&resume_frame); | 2156 __ Call(&resume_frame); |
2160 __ jmp(&done); | 2157 __ jmp(&done); |
2161 __ bind(&resume_frame); | 2158 __ bind(&resume_frame); |
2162 // ra = return address. | 2159 // ra = return address. |
2163 // fp = caller's frame pointer. | 2160 // fp = caller's frame pointer. |
2164 // cp = callee's context, | 2161 // cp = callee's context, |
2165 // t0 = callee's JS function. | 2162 // a4 = callee's JS function. |
2166 __ Push(ra, fp, cp, t0); | 2163 __ Push(ra, fp, cp, a4); |
2167 // Adjust FP to point to saved FP. | 2164 // Adjust FP to point to saved FP. |
2168 __ Addu(fp, sp, 2 * kPointerSize); | 2165 __ Daddu(fp, sp, 2 * kPointerSize); |
2169 | 2166 |
2170 // Load the operand stack size. | 2167 // Load the operand stack size. |
2171 __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset)); | 2168 __ ld(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset)); |
2172 __ lw(a3, FieldMemOperand(a3, FixedArray::kLengthOffset)); | 2169 __ ld(a3, FieldMemOperand(a3, FixedArray::kLengthOffset)); |
2173 __ SmiUntag(a3); | 2170 __ SmiUntag(a3); |
2174 | 2171 |
2175 // If we are sending a value and there is no operand stack, we can jump back | 2172 // If we are sending a value and there is no operand stack, we can jump back |
2176 // in directly. | 2173 // in directly. |
2177 if (resume_mode == JSGeneratorObject::NEXT) { | 2174 if (resume_mode == JSGeneratorObject::NEXT) { |
2178 Label slow_resume; | 2175 Label slow_resume; |
2179 __ Branch(&slow_resume, ne, a3, Operand(zero_reg)); | 2176 __ Branch(&slow_resume, ne, a3, Operand(zero_reg)); |
2180 __ lw(a3, FieldMemOperand(t0, JSFunction::kCodeEntryOffset)); | 2177 __ ld(a3, FieldMemOperand(a4, JSFunction::kCodeEntryOffset)); |
2181 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); | 2178 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); |
2182 __ SmiUntag(a2); | 2179 __ SmiUntag(a2); |
2183 __ Addu(a3, a3, Operand(a2)); | 2180 __ Daddu(a3, a3, Operand(a2)); |
2184 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting))); | 2181 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting))); |
2185 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); | 2182 __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); |
2186 __ Jump(a3); | 2183 __ Jump(a3); |
2187 __ bind(&slow_resume); | 2184 __ bind(&slow_resume); |
2188 } | 2185 } |
2189 | 2186 |
2190 // Otherwise, we push holes for the operand stack and call the runtime to fix | 2187 // Otherwise, we push holes for the operand stack and call the runtime to fix |
2191 // up the stack and the handlers. | 2188 // up the stack and the handlers. |
2192 Label push_operand_holes, call_resume; | 2189 Label push_operand_holes, call_resume; |
2193 __ bind(&push_operand_holes); | 2190 __ bind(&push_operand_holes); |
2194 __ Subu(a3, a3, Operand(1)); | 2191 __ Dsubu(a3, a3, Operand(1)); |
2195 __ Branch(&call_resume, lt, a3, Operand(zero_reg)); | 2192 __ Branch(&call_resume, lt, a3, Operand(zero_reg)); |
2196 __ push(a2); | 2193 __ push(a2); |
2197 __ Branch(&push_operand_holes); | 2194 __ Branch(&push_operand_holes); |
2198 __ bind(&call_resume); | 2195 __ bind(&call_resume); |
2199 ASSERT(!result_register().is(a1)); | 2196 ASSERT(!result_register().is(a1)); |
2200 __ Push(a1, result_register()); | 2197 __ Push(a1, result_register()); |
2201 __ Push(Smi::FromInt(resume_mode)); | 2198 __ Push(Smi::FromInt(resume_mode)); |
2202 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3); | 2199 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3); |
2203 // Not reached: the runtime call returns elsewhere. | 2200 // Not reached: the runtime call returns elsewhere. |
2204 __ stop("not-reached"); | 2201 __ stop("not-reached"); |
(...skipping 28 matching lines...) Expand all Loading... |
2233 Label allocated; | 2230 Label allocated; |
2234 | 2231 |
2235 Handle<Map> map(isolate()->native_context()->iterator_result_map()); | 2232 Handle<Map> map(isolate()->native_context()->iterator_result_map()); |
2236 | 2233 |
2237 __ Allocate(map->instance_size(), v0, a2, a3, &gc_required, TAG_OBJECT); | 2234 __ Allocate(map->instance_size(), v0, a2, a3, &gc_required, TAG_OBJECT); |
2238 __ jmp(&allocated); | 2235 __ jmp(&allocated); |
2239 | 2236 |
2240 __ bind(&gc_required); | 2237 __ bind(&gc_required); |
2241 __ Push(Smi::FromInt(map->instance_size())); | 2238 __ Push(Smi::FromInt(map->instance_size())); |
2242 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); | 2239 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); |
2243 __ lw(context_register(), | 2240 __ ld(context_register(), |
2244 MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2241 MemOperand(fp, StandardFrameConstants::kContextOffset)); |
2245 | 2242 |
2246 __ bind(&allocated); | 2243 __ bind(&allocated); |
2247 __ li(a1, Operand(map)); | 2244 __ li(a1, Operand(map)); |
2248 __ pop(a2); | 2245 __ pop(a2); |
2249 __ li(a3, Operand(isolate()->factory()->ToBoolean(done))); | 2246 __ li(a3, Operand(isolate()->factory()->ToBoolean(done))); |
2250 __ li(t0, Operand(isolate()->factory()->empty_fixed_array())); | 2247 __ li(a4, Operand(isolate()->factory()->empty_fixed_array())); |
2251 ASSERT_EQ(map->instance_size(), 5 * kPointerSize); | 2248 ASSERT_EQ(map->instance_size(), 5 * kPointerSize); |
2252 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); | 2249 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); |
2253 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset)); | 2250 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset)); |
2254 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); | 2251 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset)); |
2255 __ sw(a2, | 2252 __ sd(a2, |
2256 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset)); | 2253 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset)); |
2257 __ sw(a3, | 2254 __ sd(a3, |
2258 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset)); | 2255 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset)); |
2259 | 2256 |
2260 // Only the value field needs a write barrier, as the other values are in the | 2257 // Only the value field needs a write barrier, as the other values are in the |
2261 // root set. | 2258 // root set. |
2262 __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset, | 2259 __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset, |
2263 a2, a3, kRAHasBeenSaved, kDontSaveFPRegs); | 2260 a2, a3, kRAHasBeenSaved, kDontSaveFPRegs); |
2264 } | 2261 } |
2265 | 2262 |
2266 | 2263 |
2267 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { | 2264 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2308 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId()); | 2305 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId()); |
2309 patch_site.EmitPatchInfo(); | 2306 patch_site.EmitPatchInfo(); |
2310 __ jmp(&done); | 2307 __ jmp(&done); |
2311 | 2308 |
2312 __ bind(&smi_case); | 2309 __ bind(&smi_case); |
2313 // Smi case. This code works the same way as the smi-smi case in the type | 2310 // Smi case. This code works the same way as the smi-smi case in the type |
2314 // recording binary operation stub, see | 2311 // recording binary operation stub, see |
2315 switch (op) { | 2312 switch (op) { |
2316 case Token::SAR: | 2313 case Token::SAR: |
2317 __ GetLeastBitsFromSmi(scratch1, right, 5); | 2314 __ GetLeastBitsFromSmi(scratch1, right, 5); |
2318 __ srav(right, left, scratch1); | 2315 __ dsrav(right, left, scratch1); |
2319 __ And(v0, right, Operand(~kSmiTagMask)); | 2316 __ And(v0, right, Operand(0xffffffff00000000L)); |
2320 break; | 2317 break; |
2321 case Token::SHL: { | 2318 case Token::SHL: { |
2322 __ SmiUntag(scratch1, left); | 2319 __ SmiUntag(scratch1, left); |
2323 __ GetLeastBitsFromSmi(scratch2, right, 5); | 2320 __ GetLeastBitsFromSmi(scratch2, right, 5); |
2324 __ sllv(scratch1, scratch1, scratch2); | 2321 __ dsllv(scratch1, scratch1, scratch2); |
2325 __ Addu(scratch2, scratch1, Operand(0x40000000)); | |
2326 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg)); | |
2327 __ SmiTag(v0, scratch1); | 2322 __ SmiTag(v0, scratch1); |
2328 break; | 2323 break; |
2329 } | 2324 } |
2330 case Token::SHR: { | 2325 case Token::SHR: { |
2331 __ SmiUntag(scratch1, left); | 2326 __ SmiUntag(scratch1, left); |
2332 __ GetLeastBitsFromSmi(scratch2, right, 5); | 2327 __ GetLeastBitsFromSmi(scratch2, right, 5); |
2333 __ srlv(scratch1, scratch1, scratch2); | 2328 __ dsrlv(scratch1, scratch1, scratch2); |
2334 __ And(scratch2, scratch1, 0xc0000000); | 2329 __ And(scratch2, scratch1, 0x80000000); |
2335 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg)); | 2330 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg)); |
2336 __ SmiTag(v0, scratch1); | 2331 __ SmiTag(v0, scratch1); |
2337 break; | 2332 break; |
2338 } | 2333 } |
2339 case Token::ADD: | 2334 case Token::ADD: |
2340 __ AdduAndCheckForOverflow(v0, left, right, scratch1); | 2335 __ AdduAndCheckForOverflow(v0, left, right, scratch1); |
2341 __ BranchOnOverflow(&stub_call, scratch1); | 2336 __ BranchOnOverflow(&stub_call, scratch1); |
2342 break; | 2337 break; |
2343 case Token::SUB: | 2338 case Token::SUB: |
2344 __ SubuAndCheckForOverflow(v0, left, right, scratch1); | 2339 __ SubuAndCheckForOverflow(v0, left, right, scratch1); |
2345 __ BranchOnOverflow(&stub_call, scratch1); | 2340 __ BranchOnOverflow(&stub_call, scratch1); |
2346 break; | 2341 break; |
2347 case Token::MUL: { | 2342 case Token::MUL: { |
2348 __ SmiUntag(scratch1, right); | 2343 __ SmiUntag(scratch1, right); |
2349 __ Mult(left, scratch1); | 2344 __ Dmult(left, scratch1); |
2350 __ mflo(scratch1); | 2345 __ mflo(scratch1); |
2351 __ mfhi(scratch2); | 2346 __ mfhi(scratch2); |
2352 __ sra(scratch1, scratch1, 31); | 2347 __ dsra32(scratch1, scratch1, 31); |
2353 __ Branch(&stub_call, ne, scratch1, Operand(scratch2)); | 2348 __ Branch(&stub_call, ne, scratch1, Operand(scratch2)); |
2354 __ mflo(v0); | 2349 __ mflo(v0); |
2355 __ Branch(&done, ne, v0, Operand(zero_reg)); | 2350 __ Branch(&done, ne, v0, Operand(zero_reg)); |
2356 __ Addu(scratch2, right, left); | 2351 __ Daddu(scratch2, right, left); |
2357 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg)); | 2352 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg)); |
2358 ASSERT(Smi::FromInt(0) == 0); | 2353 ASSERT(Smi::FromInt(0) == 0); |
2359 __ mov(v0, zero_reg); | 2354 __ mov(v0, zero_reg); |
2360 break; | 2355 break; |
2361 } | 2356 } |
2362 case Token::BIT_OR: | 2357 case Token::BIT_OR: |
2363 __ Or(v0, left, Operand(right)); | 2358 __ Or(v0, left, Operand(right)); |
2364 break; | 2359 break; |
2365 case Token::BIT_AND: | 2360 case Token::BIT_AND: |
2366 __ And(v0, left, Operand(right)); | 2361 __ And(v0, left, Operand(right)); |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2432 CallIC(ic); | 2427 CallIC(ic); |
2433 break; | 2428 break; |
2434 } | 2429 } |
2435 } | 2430 } |
2436 context()->Plug(v0); | 2431 context()->Plug(v0); |
2437 } | 2432 } |
2438 | 2433 |
2439 | 2434 |
2440 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( | 2435 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( |
2441 Variable* var, MemOperand location) { | 2436 Variable* var, MemOperand location) { |
2442 __ sw(result_register(), location); | 2437 __ sd(result_register(), location); |
2443 if (var->IsContextSlot()) { | 2438 if (var->IsContextSlot()) { |
2444 // RecordWrite may destroy all its register arguments. | 2439 // RecordWrite may destroy all its register arguments. |
2445 __ Move(a3, result_register()); | 2440 __ Move(a3, result_register()); |
2446 int offset = Context::SlotOffset(var->index()); | 2441 int offset = Context::SlotOffset(var->index()); |
2447 __ RecordWriteContextSlot( | 2442 __ RecordWriteContextSlot( |
2448 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs); | 2443 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs); |
2449 } | 2444 } |
2450 } | 2445 } |
2451 | 2446 |
2452 | 2447 |
2453 void FullCodeGenerator::EmitCallStoreContextSlot( | 2448 void FullCodeGenerator::EmitCallStoreContextSlot( |
2454 Handle<String> name, StrictMode strict_mode) { | 2449 Handle<String> name, StrictMode strict_mode) { |
2455 __ li(a1, Operand(name)); | 2450 __ li(a1, Operand(name)); |
2456 __ li(a0, Operand(Smi::FromInt(strict_mode))); | 2451 __ li(a0, Operand(Smi::FromInt(strict_mode))); |
2457 __ Push(v0, cp, a1, a0); // Value, context, name, strict mode. | 2452 __ Push(v0, cp, a1, a0); // Value, context, name, strict mode. |
2458 __ CallRuntime(Runtime::kStoreContextSlot, 4); | 2453 __ CallRuntime(Runtime::kStoreContextSlot, 4); |
2459 } | 2454 } |
2460 | 2455 |
2461 | 2456 |
2462 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) { | 2457 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) { |
2463 if (var->IsUnallocated()) { | 2458 if (var->IsUnallocated()) { |
2464 // Global var, const, or let. | 2459 // Global var, const, or let. |
2465 __ mov(a0, result_register()); | 2460 __ mov(a0, result_register()); |
2466 __ li(a2, Operand(var->name())); | 2461 __ li(a2, Operand(var->name())); |
2467 __ lw(a1, GlobalObjectOperand()); | 2462 __ ld(a1, GlobalObjectOperand()); |
2468 CallStoreIC(); | 2463 CallStoreIC(); |
2469 | |
2470 } else if (op == Token::INIT_CONST_LEGACY) { | 2464 } else if (op == Token::INIT_CONST_LEGACY) { |
2471 // Const initializers need a write barrier. | 2465 // Const initializers need a write barrier. |
2472 ASSERT(!var->IsParameter()); // No const parameters. | 2466 ASSERT(!var->IsParameter()); // No const parameters. |
2473 if (var->IsLookupSlot()) { | 2467 if (var->IsLookupSlot()) { |
2474 __ li(a0, Operand(var->name())); | 2468 __ li(a0, Operand(var->name())); |
2475 __ Push(v0, cp, a0); // Context and name. | 2469 __ Push(v0, cp, a0); // Context and name. |
2476 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); | 2470 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); |
2477 } else { | 2471 } else { |
2478 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); | 2472 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
2479 Label skip; | 2473 Label skip; |
2480 MemOperand location = VarOperand(var, a1); | 2474 MemOperand location = VarOperand(var, a1); |
2481 __ lw(a2, location); | 2475 __ ld(a2, location); |
2482 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 2476 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
2483 __ Branch(&skip, ne, a2, Operand(at)); | 2477 __ Branch(&skip, ne, a2, Operand(at)); |
2484 EmitStoreToStackLocalOrContextSlot(var, location); | 2478 EmitStoreToStackLocalOrContextSlot(var, location); |
2485 __ bind(&skip); | 2479 __ bind(&skip); |
2486 } | 2480 } |
2487 | 2481 |
2488 } else if (var->mode() == LET && op != Token::INIT_LET) { | 2482 } else if (var->mode() == LET && op != Token::INIT_LET) { |
2489 // Non-initializing assignment to let variable needs a write barrier. | 2483 // Non-initializing assignment to let variable needs a write barrier. |
2490 if (var->IsLookupSlot()) { | 2484 if (var->IsLookupSlot()) { |
2491 EmitCallStoreContextSlot(var->name(), strict_mode()); | 2485 EmitCallStoreContextSlot(var->name(), strict_mode()); |
2492 } else { | 2486 } else { |
2493 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); | 2487 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
2494 Label assign; | 2488 Label assign; |
2495 MemOperand location = VarOperand(var, a1); | 2489 MemOperand location = VarOperand(var, a1); |
2496 __ lw(a3, location); | 2490 __ ld(a3, location); |
2497 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); | 2491 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex); |
2498 __ Branch(&assign, ne, a3, Operand(t0)); | 2492 __ Branch(&assign, ne, a3, Operand(a4)); |
2499 __ li(a3, Operand(var->name())); | 2493 __ li(a3, Operand(var->name())); |
2500 __ push(a3); | 2494 __ push(a3); |
2501 __ CallRuntime(Runtime::kThrowReferenceError, 1); | 2495 __ CallRuntime(Runtime::kThrowReferenceError, 1); |
2502 // Perform the assignment. | 2496 // Perform the assignment. |
2503 __ bind(&assign); | 2497 __ bind(&assign); |
2504 EmitStoreToStackLocalOrContextSlot(var, location); | 2498 EmitStoreToStackLocalOrContextSlot(var, location); |
2505 } | 2499 } |
2506 | 2500 |
2507 } else if (!var->is_const_mode() || op == Token::INIT_CONST) { | 2501 } else if (!var->is_const_mode() || op == Token::INIT_CONST) { |
2508 // Assignment to var or initializing assignment to let/const | 2502 // Assignment to var or initializing assignment to let/const |
2509 // in harmony mode. | 2503 // in harmony mode. |
2510 if (var->IsLookupSlot()) { | 2504 if (var->IsLookupSlot()) { |
2511 EmitCallStoreContextSlot(var->name(), strict_mode()); | 2505 EmitCallStoreContextSlot(var->name(), strict_mode()); |
2512 } else { | 2506 } else { |
2513 ASSERT((var->IsStackAllocated() || var->IsContextSlot())); | 2507 ASSERT((var->IsStackAllocated() || var->IsContextSlot())); |
2514 MemOperand location = VarOperand(var, a1); | 2508 MemOperand location = VarOperand(var, a1); |
2515 if (generate_debug_code_ && op == Token::INIT_LET) { | 2509 if (generate_debug_code_ && op == Token::INIT_LET) { |
2516 // Check for an uninitialized let binding. | 2510 // Check for an uninitialized let binding. |
2517 __ lw(a2, location); | 2511 __ ld(a2, location); |
2518 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); | 2512 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex); |
2519 __ Check(eq, kLetBindingReInitialization, a2, Operand(t0)); | 2513 __ Check(eq, kLetBindingReInitialization, a2, Operand(a4)); |
2520 } | 2514 } |
2521 EmitStoreToStackLocalOrContextSlot(var, location); | 2515 EmitStoreToStackLocalOrContextSlot(var, location); |
2522 } | 2516 } |
2523 } | 2517 } |
2524 // Non-initializing assignments to consts are ignored. | 2518 // Non-initializing assignments to consts are ignored. |
2525 } | 2519 } |
2526 | 2520 |
2527 | 2521 |
2528 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { | 2522 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { |
2529 // Assignment to a property, using a named store IC. | 2523 // Assignment to a property, using a named store IC. |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2608 { StackValueContext context(this); | 2602 { StackValueContext context(this); |
2609 EmitVariableLoad(callee->AsVariableProxy()); | 2603 EmitVariableLoad(callee->AsVariableProxy()); |
2610 PrepareForBailout(callee, NO_REGISTERS); | 2604 PrepareForBailout(callee, NO_REGISTERS); |
2611 } | 2605 } |
2612 // Push undefined as receiver. This is patched in the method prologue if it | 2606 // Push undefined as receiver. This is patched in the method prologue if it |
2613 // is a sloppy mode method. | 2607 // is a sloppy mode method. |
2614 __ Push(isolate()->factory()->undefined_value()); | 2608 __ Push(isolate()->factory()->undefined_value()); |
2615 } else { | 2609 } else { |
2616 // Load the function from the receiver. | 2610 // Load the function from the receiver. |
2617 ASSERT(callee->IsProperty()); | 2611 ASSERT(callee->IsProperty()); |
2618 __ lw(LoadIC::ReceiverRegister(), MemOperand(sp, 0)); | 2612 __ ld(LoadIC::ReceiverRegister(), MemOperand(sp, 0)); |
2619 EmitNamedPropertyLoad(callee->AsProperty()); | 2613 EmitNamedPropertyLoad(callee->AsProperty()); |
2620 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); | 2614 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); |
2621 // Push the target function under the receiver. | 2615 // Push the target function under the receiver. |
2622 __ lw(at, MemOperand(sp, 0)); | 2616 __ ld(at, MemOperand(sp, 0)); |
2623 __ push(at); | 2617 __ push(at); |
2624 __ sw(v0, MemOperand(sp, kPointerSize)); | 2618 __ sd(v0, MemOperand(sp, kPointerSize)); |
2625 } | 2619 } |
2626 | 2620 |
2627 EmitCall(expr, call_type); | 2621 EmitCall(expr, call_type); |
2628 } | 2622 } |
2629 | 2623 |
2630 | 2624 |
2631 // Code common for calls using the IC. | 2625 // Code common for calls using the IC. |
2632 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, | 2626 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, |
2633 Expression* key) { | 2627 Expression* key) { |
2634 // Load the key. | 2628 // Load the key. |
2635 VisitForAccumulatorValue(key); | 2629 VisitForAccumulatorValue(key); |
2636 | 2630 |
2637 Expression* callee = expr->expression(); | 2631 Expression* callee = expr->expression(); |
2638 | 2632 |
2639 // Load the function from the receiver. | 2633 // Load the function from the receiver. |
2640 ASSERT(callee->IsProperty()); | 2634 ASSERT(callee->IsProperty()); |
2641 __ lw(LoadIC::ReceiverRegister(), MemOperand(sp, 0)); | 2635 __ ld(LoadIC::ReceiverRegister(), MemOperand(sp, 0)); |
2642 __ Move(LoadIC::NameRegister(), v0); | 2636 __ Move(LoadIC::NameRegister(), v0); |
2643 EmitKeyedPropertyLoad(callee->AsProperty()); | 2637 EmitKeyedPropertyLoad(callee->AsProperty()); |
2644 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); | 2638 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); |
2645 | 2639 |
2646 // Push the target function under the receiver. | 2640 // Push the target function under the receiver. |
2647 __ lw(at, MemOperand(sp, 0)); | 2641 __ ld(at, MemOperand(sp, 0)); |
2648 __ push(at); | 2642 __ push(at); |
2649 __ sw(v0, MemOperand(sp, kPointerSize)); | 2643 __ sd(v0, MemOperand(sp, kPointerSize)); |
2650 | 2644 |
2651 EmitCall(expr, CallIC::METHOD); | 2645 EmitCall(expr, CallIC::METHOD); |
2652 } | 2646 } |
2653 | 2647 |
2654 | 2648 |
2655 void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) { | 2649 void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) { |
2656 // Load the arguments. | 2650 // Load the arguments. |
2657 ZoneList<Expression*>* args = expr->arguments(); | 2651 ZoneList<Expression*>* args = expr->arguments(); |
2658 int arg_count = args->length(); | 2652 int arg_count = args->length(); |
2659 { PreservePositionScope scope(masm()->positions_recorder()); | 2653 { PreservePositionScope scope(masm()->positions_recorder()); |
2660 for (int i = 0; i < arg_count; i++) { | 2654 for (int i = 0; i < arg_count; i++) { |
2661 VisitForStackValue(args->at(i)); | 2655 VisitForStackValue(args->at(i)); |
2662 } | 2656 } |
2663 } | 2657 } |
2664 | 2658 |
2665 // Record source position of the IC call. | 2659 // Record source position of the IC call. |
2666 SetSourcePosition(expr->position()); | 2660 SetSourcePosition(expr->position()); |
2667 Handle<Code> ic = CallIC::initialize_stub( | 2661 Handle<Code> ic = CallIC::initialize_stub( |
2668 isolate(), arg_count, call_type); | 2662 isolate(), arg_count, call_type); |
2669 __ li(a3, Operand(Smi::FromInt(expr->CallFeedbackSlot()))); | 2663 __ li(a3, Operand(Smi::FromInt(expr->CallFeedbackSlot()))); |
2670 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); | 2664 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); |
2671 // Don't assign a type feedback id to the IC, since type feedback is provided | 2665 // Don't assign a type feedback id to the IC, since type feedback is provided |
2672 // by the vector above. | 2666 // by the vector above. |
2673 CallIC(ic); | 2667 CallIC(ic); |
2674 | |
2675 RecordJSReturnSite(expr); | 2668 RecordJSReturnSite(expr); |
2676 // Restore context register. | 2669 // Restore context register. |
2677 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2670 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
2678 context()->DropAndPlug(1, v0); | 2671 context()->DropAndPlug(1, v0); |
2679 } | 2672 } |
2680 | 2673 |
2681 | 2674 |
2682 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { | 2675 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { |
2683 // t2: copy of the first argument or undefined if it doesn't exist. | 2676 // a6: copy of the first argument or undefined if it doesn't exist. |
2684 if (arg_count > 0) { | 2677 if (arg_count > 0) { |
2685 __ lw(t2, MemOperand(sp, arg_count * kPointerSize)); | 2678 __ ld(a6, MemOperand(sp, arg_count * kPointerSize)); |
2686 } else { | 2679 } else { |
2687 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex); | 2680 __ LoadRoot(a6, Heap::kUndefinedValueRootIndex); |
2688 } | 2681 } |
2689 | 2682 |
2690 // t1: the receiver of the enclosing function. | 2683 // a5: the receiver of the enclosing function. |
2691 int receiver_offset = 2 + info_->scope()->num_parameters(); | 2684 int receiver_offset = 2 + info_->scope()->num_parameters(); |
2692 __ lw(t1, MemOperand(fp, receiver_offset * kPointerSize)); | 2685 __ ld(a5, MemOperand(fp, receiver_offset * kPointerSize)); |
2693 | 2686 |
2694 // t0: the strict mode. | 2687 // a4: the strict mode. |
2695 __ li(t0, Operand(Smi::FromInt(strict_mode()))); | 2688 __ li(a4, Operand(Smi::FromInt(strict_mode()))); |
2696 | 2689 |
2697 // a1: the start position of the scope the calls resides in. | 2690 // a1: the start position of the scope the calls resides in. |
2698 __ li(a1, Operand(Smi::FromInt(scope()->start_position()))); | 2691 __ li(a1, Operand(Smi::FromInt(scope()->start_position()))); |
2699 | 2692 |
2700 // Do the runtime call. | 2693 // Do the runtime call. |
2701 __ Push(t2, t1, t0, a1); | 2694 __ Push(a6, a5, a4, a1); |
2702 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); | 2695 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); |
2703 } | 2696 } |
2704 | 2697 |
2705 | 2698 |
2706 void FullCodeGenerator::VisitCall(Call* expr) { | 2699 void FullCodeGenerator::VisitCall(Call* expr) { |
2707 #ifdef DEBUG | 2700 #ifdef DEBUG |
2708 // We want to verify that RecordJSReturnSite gets called on all paths | 2701 // We want to verify that RecordJSReturnSite gets called on all paths |
2709 // through this function. Avoid early returns. | 2702 // through this function. Avoid early returns. |
2710 expr->return_is_recorded_ = false; | 2703 expr->return_is_recorded_ = false; |
2711 #endif | 2704 #endif |
(...skipping 15 matching lines...) Expand all Loading... |
2727 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | 2720 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); |
2728 __ push(a2); // Reserved receiver slot. | 2721 __ push(a2); // Reserved receiver slot. |
2729 | 2722 |
2730 // Push the arguments. | 2723 // Push the arguments. |
2731 for (int i = 0; i < arg_count; i++) { | 2724 for (int i = 0; i < arg_count; i++) { |
2732 VisitForStackValue(args->at(i)); | 2725 VisitForStackValue(args->at(i)); |
2733 } | 2726 } |
2734 | 2727 |
2735 // Push a copy of the function (found below the arguments) and | 2728 // Push a copy of the function (found below the arguments) and |
2736 // resolve eval. | 2729 // resolve eval. |
2737 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); | 2730 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); |
2738 __ push(a1); | 2731 __ push(a1); |
2739 EmitResolvePossiblyDirectEval(arg_count); | 2732 EmitResolvePossiblyDirectEval(arg_count); |
2740 | 2733 |
2741 // The runtime call returns a pair of values in v0 (function) and | 2734 // The runtime call returns a pair of values in v0 (function) and |
2742 // v1 (receiver). Touch up the stack with the right values. | 2735 // v1 (receiver). Touch up the stack with the right values. |
2743 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize)); | 2736 __ sd(v0, MemOperand(sp, (arg_count + 1) * kPointerSize)); |
2744 __ sw(v1, MemOperand(sp, arg_count * kPointerSize)); | 2737 __ sd(v1, MemOperand(sp, arg_count * kPointerSize)); |
2745 } | 2738 } |
2746 // Record source position for debugger. | 2739 // Record source position for debugger. |
2747 SetSourcePosition(expr->position()); | 2740 SetSourcePosition(expr->position()); |
2748 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); | 2741 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); |
2749 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); | 2742 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); |
2750 __ CallStub(&stub); | 2743 __ CallStub(&stub); |
2751 RecordJSReturnSite(expr); | 2744 RecordJSReturnSite(expr); |
2752 // Restore context register. | 2745 // Restore context register. |
2753 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2746 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
2754 context()->DropAndPlug(1, v0); | 2747 context()->DropAndPlug(1, v0); |
2755 } else if (call_type == Call::GLOBAL_CALL) { | 2748 } else if (call_type == Call::GLOBAL_CALL) { |
2756 EmitCallWithLoadIC(expr); | 2749 EmitCallWithLoadIC(expr); |
2757 } else if (call_type == Call::LOOKUP_SLOT_CALL) { | 2750 } else if (call_type == Call::LOOKUP_SLOT_CALL) { |
2758 // Call to a lookup slot (dynamically introduced variable). | 2751 // Call to a lookup slot (dynamically introduced variable). |
2759 VariableProxy* proxy = callee->AsVariableProxy(); | 2752 VariableProxy* proxy = callee->AsVariableProxy(); |
2760 Label slow, done; | 2753 Label slow, done; |
2761 | 2754 |
2762 { PreservePositionScope scope(masm()->positions_recorder()); | 2755 { PreservePositionScope scope(masm()->positions_recorder()); |
2763 // Generate code for loading from variables potentially shadowed | 2756 // Generate code for loading from variables potentially shadowed |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2832 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is | 2825 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is |
2833 // ignored. | 2826 // ignored. |
2834 VisitForStackValue(expr->expression()); | 2827 VisitForStackValue(expr->expression()); |
2835 | 2828 |
2836 // Push the arguments ("left-to-right") on the stack. | 2829 // Push the arguments ("left-to-right") on the stack. |
2837 ZoneList<Expression*>* args = expr->arguments(); | 2830 ZoneList<Expression*>* args = expr->arguments(); |
2838 int arg_count = args->length(); | 2831 int arg_count = args->length(); |
2839 for (int i = 0; i < arg_count; i++) { | 2832 for (int i = 0; i < arg_count; i++) { |
2840 VisitForStackValue(args->at(i)); | 2833 VisitForStackValue(args->at(i)); |
2841 } | 2834 } |
2842 | |
2843 // Call the construct call builtin that handles allocation and | 2835 // Call the construct call builtin that handles allocation and |
2844 // constructor invocation. | 2836 // constructor invocation. |
2845 SetSourcePosition(expr->position()); | 2837 SetSourcePosition(expr->position()); |
2846 | 2838 |
2847 // Load function and argument count into a1 and a0. | 2839 // Load function and argument count into a1 and a0. |
2848 __ li(a0, Operand(arg_count)); | 2840 __ li(a0, Operand(arg_count)); |
2849 __ lw(a1, MemOperand(sp, arg_count * kPointerSize)); | 2841 __ ld(a1, MemOperand(sp, arg_count * kPointerSize)); |
2850 | 2842 |
2851 // Record call targets in unoptimized code. | 2843 // Record call targets in unoptimized code. |
2852 if (FLAG_pretenuring_call_new) { | 2844 if (FLAG_pretenuring_call_new) { |
2853 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); | 2845 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); |
2854 ASSERT(expr->AllocationSiteFeedbackSlot() == | 2846 ASSERT(expr->AllocationSiteFeedbackSlot() == |
2855 expr->CallNewFeedbackSlot() + 1); | 2847 expr->CallNewFeedbackSlot() + 1); |
2856 } | 2848 } |
2857 | 2849 |
2858 __ li(a2, FeedbackVector()); | 2850 __ li(a2, FeedbackVector()); |
2859 __ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); | 2851 __ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); |
(...skipping 12 matching lines...) Expand all Loading... |
2872 VisitForAccumulatorValue(args->at(0)); | 2864 VisitForAccumulatorValue(args->at(0)); |
2873 | 2865 |
2874 Label materialize_true, materialize_false; | 2866 Label materialize_true, materialize_false; |
2875 Label* if_true = NULL; | 2867 Label* if_true = NULL; |
2876 Label* if_false = NULL; | 2868 Label* if_false = NULL; |
2877 Label* fall_through = NULL; | 2869 Label* fall_through = NULL; |
2878 context()->PrepareTest(&materialize_true, &materialize_false, | 2870 context()->PrepareTest(&materialize_true, &materialize_false, |
2879 &if_true, &if_false, &fall_through); | 2871 &if_true, &if_false, &fall_through); |
2880 | 2872 |
2881 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2873 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
2882 __ SmiTst(v0, t0); | 2874 __ SmiTst(v0, a4); |
2883 Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through); | 2875 Split(eq, a4, Operand(zero_reg), if_true, if_false, fall_through); |
2884 | 2876 |
2885 context()->Plug(if_true, if_false); | 2877 context()->Plug(if_true, if_false); |
2886 } | 2878 } |
2887 | 2879 |
2888 | 2880 |
2889 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) { | 2881 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) { |
2890 ZoneList<Expression*>* args = expr->arguments(); | 2882 ZoneList<Expression*>* args = expr->arguments(); |
2891 ASSERT(args->length() == 1); | 2883 ASSERT(args->length() == 1); |
2892 | 2884 |
2893 VisitForAccumulatorValue(args->at(0)); | 2885 VisitForAccumulatorValue(args->at(0)); |
(...skipping 22 matching lines...) Expand all Loading... |
2916 Label materialize_true, materialize_false; | 2908 Label materialize_true, materialize_false; |
2917 Label* if_true = NULL; | 2909 Label* if_true = NULL; |
2918 Label* if_false = NULL; | 2910 Label* if_false = NULL; |
2919 Label* fall_through = NULL; | 2911 Label* fall_through = NULL; |
2920 context()->PrepareTest(&materialize_true, &materialize_false, | 2912 context()->PrepareTest(&materialize_true, &materialize_false, |
2921 &if_true, &if_false, &fall_through); | 2913 &if_true, &if_false, &fall_through); |
2922 | 2914 |
2923 __ JumpIfSmi(v0, if_false); | 2915 __ JumpIfSmi(v0, if_false); |
2924 __ LoadRoot(at, Heap::kNullValueRootIndex); | 2916 __ LoadRoot(at, Heap::kNullValueRootIndex); |
2925 __ Branch(if_true, eq, v0, Operand(at)); | 2917 __ Branch(if_true, eq, v0, Operand(at)); |
2926 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); | 2918 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); |
2927 // Undetectable objects behave like undefined when tested with typeof. | 2919 // Undetectable objects behave like undefined when tested with typeof. |
2928 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset)); | 2920 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset)); |
2929 __ And(at, a1, Operand(1 << Map::kIsUndetectable)); | 2921 __ And(at, a1, Operand(1 << Map::kIsUndetectable)); |
2930 __ Branch(if_false, ne, at, Operand(zero_reg)); | 2922 __ Branch(if_false, ne, at, Operand(zero_reg)); |
2931 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset)); | 2923 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset)); |
2932 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); | 2924 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); |
2933 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2925 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
2934 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE), | 2926 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE), |
2935 if_true, if_false, fall_through); | 2927 if_true, if_false, fall_through); |
2936 | 2928 |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2968 VisitForAccumulatorValue(args->at(0)); | 2960 VisitForAccumulatorValue(args->at(0)); |
2969 | 2961 |
2970 Label materialize_true, materialize_false; | 2962 Label materialize_true, materialize_false; |
2971 Label* if_true = NULL; | 2963 Label* if_true = NULL; |
2972 Label* if_false = NULL; | 2964 Label* if_false = NULL; |
2973 Label* fall_through = NULL; | 2965 Label* fall_through = NULL; |
2974 context()->PrepareTest(&materialize_true, &materialize_false, | 2966 context()->PrepareTest(&materialize_true, &materialize_false, |
2975 &if_true, &if_false, &fall_through); | 2967 &if_true, &if_false, &fall_through); |
2976 | 2968 |
2977 __ JumpIfSmi(v0, if_false); | 2969 __ JumpIfSmi(v0, if_false); |
2978 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); | 2970 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); |
2979 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset)); | 2971 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset)); |
2980 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2972 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
2981 __ And(at, a1, Operand(1 << Map::kIsUndetectable)); | 2973 __ And(at, a1, Operand(1 << Map::kIsUndetectable)); |
2982 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through); | 2974 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through); |
2983 | 2975 |
2984 context()->Plug(if_true, if_false); | 2976 context()->Plug(if_true, if_false); |
2985 } | 2977 } |
2986 | 2978 |
2987 | 2979 |
2988 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( | 2980 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( |
2989 CallRuntime* expr) { | 2981 CallRuntime* expr) { |
2990 ZoneList<Expression*>* args = expr->arguments(); | 2982 ZoneList<Expression*>* args = expr->arguments(); |
2991 ASSERT(args->length() == 1); | 2983 ASSERT(args->length() == 1); |
2992 | 2984 |
2993 VisitForAccumulatorValue(args->at(0)); | 2985 VisitForAccumulatorValue(args->at(0)); |
2994 | 2986 |
2995 Label materialize_true, materialize_false, skip_lookup; | 2987 Label materialize_true, materialize_false, skip_lookup; |
2996 Label* if_true = NULL; | 2988 Label* if_true = NULL; |
2997 Label* if_false = NULL; | 2989 Label* if_false = NULL; |
2998 Label* fall_through = NULL; | 2990 Label* fall_through = NULL; |
2999 context()->PrepareTest(&materialize_true, &materialize_false, | 2991 context()->PrepareTest(&materialize_true, &materialize_false, |
3000 &if_true, &if_false, &fall_through); | 2992 &if_true, &if_false, &fall_through); |
3001 | 2993 |
3002 __ AssertNotSmi(v0); | 2994 __ AssertNotSmi(v0); |
3003 | 2995 |
3004 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); | 2996 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); |
3005 __ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset)); | 2997 __ lbu(a4, FieldMemOperand(a1, Map::kBitField2Offset)); |
3006 __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf); | 2998 __ And(a4, a4, 1 << Map::kStringWrapperSafeForDefaultValueOf); |
3007 __ Branch(&skip_lookup, ne, t0, Operand(zero_reg)); | 2999 __ Branch(&skip_lookup, ne, a4, Operand(zero_reg)); |
3008 | 3000 |
3009 // Check for fast case object. Generate false result for slow case object. | 3001 // Check for fast case object. Generate false result for slow case object. |
3010 __ lw(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset)); | 3002 __ ld(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset)); |
3011 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset)); | 3003 __ ld(a2, FieldMemOperand(a2, HeapObject::kMapOffset)); |
3012 __ LoadRoot(t0, Heap::kHashTableMapRootIndex); | 3004 __ LoadRoot(a4, Heap::kHashTableMapRootIndex); |
3013 __ Branch(if_false, eq, a2, Operand(t0)); | 3005 __ Branch(if_false, eq, a2, Operand(a4)); |
3014 | 3006 |
3015 // Look for valueOf name in the descriptor array, and indicate false if | 3007 // Look for valueOf name in the descriptor array, and indicate false if |
3016 // found. Since we omit an enumeration index check, if it is added via a | 3008 // found. Since we omit an enumeration index check, if it is added via a |
3017 // transition that shares its descriptor array, this is a false positive. | 3009 // transition that shares its descriptor array, this is a false positive. |
3018 Label entry, loop, done; | 3010 Label entry, loop, done; |
3019 | 3011 |
3020 // Skip loop if no descriptors are valid. | 3012 // Skip loop if no descriptors are valid. |
3021 __ NumberOfOwnDescriptors(a3, a1); | 3013 __ NumberOfOwnDescriptors(a3, a1); |
3022 __ Branch(&done, eq, a3, Operand(zero_reg)); | 3014 __ Branch(&done, eq, a3, Operand(zero_reg)); |
3023 | 3015 |
3024 __ LoadInstanceDescriptors(a1, t0); | 3016 __ LoadInstanceDescriptors(a1, a4); |
3025 // t0: descriptor array. | 3017 // a4: descriptor array. |
3026 // a3: valid entries in the descriptor array. | 3018 // a3: valid entries in the descriptor array. |
3027 STATIC_ASSERT(kSmiTag == 0); | 3019 STATIC_ASSERT(kSmiTag == 0); |
3028 STATIC_ASSERT(kSmiTagSize == 1); | 3020 STATIC_ASSERT(kSmiTagSize == 1); |
3029 STATIC_ASSERT(kPointerSize == 4); | 3021 // Does not need? |
| 3022 // STATIC_ASSERT(kPointerSize == 4); |
3030 __ li(at, Operand(DescriptorArray::kDescriptorSize)); | 3023 __ li(at, Operand(DescriptorArray::kDescriptorSize)); |
3031 __ Mul(a3, a3, at); | 3024 __ Dmul(a3, a3, at); |
3032 // Calculate location of the first key name. | 3025 // Calculate location of the first key name. |
3033 __ Addu(t0, t0, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag)); | 3026 __ Daddu(a4, a4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag)); |
3034 // Calculate the end of the descriptor array. | 3027 // Calculate the end of the descriptor array. |
3035 __ mov(a2, t0); | 3028 __ mov(a2, a4); |
3036 __ sll(t1, a3, kPointerSizeLog2); | 3029 __ dsll(a5, a3, kPointerSizeLog2); |
3037 __ Addu(a2, a2, t1); | 3030 __ Daddu(a2, a2, a5); |
3038 | 3031 |
3039 // Loop through all the keys in the descriptor array. If one of these is the | 3032 // Loop through all the keys in the descriptor array. If one of these is the |
3040 // string "valueOf" the result is false. | 3033 // string "valueOf" the result is false. |
3041 // The use of t2 to store the valueOf string assumes that it is not otherwise | 3034 // The use of a6 to store the valueOf string assumes that it is not otherwise |
3042 // used in the loop below. | 3035 // used in the loop below. |
3043 __ li(t2, Operand(isolate()->factory()->value_of_string())); | 3036 __ li(a6, Operand(isolate()->factory()->value_of_string())); |
3044 __ jmp(&entry); | 3037 __ jmp(&entry); |
3045 __ bind(&loop); | 3038 __ bind(&loop); |
3046 __ lw(a3, MemOperand(t0, 0)); | 3039 __ ld(a3, MemOperand(a4, 0)); |
3047 __ Branch(if_false, eq, a3, Operand(t2)); | 3040 __ Branch(if_false, eq, a3, Operand(a6)); |
3048 __ Addu(t0, t0, Operand(DescriptorArray::kDescriptorSize * kPointerSize)); | 3041 __ Daddu(a4, a4, Operand(DescriptorArray::kDescriptorSize * kPointerSize)); |
3049 __ bind(&entry); | 3042 __ bind(&entry); |
3050 __ Branch(&loop, ne, t0, Operand(a2)); | 3043 __ Branch(&loop, ne, a4, Operand(a2)); |
3051 | 3044 |
3052 __ bind(&done); | 3045 __ bind(&done); |
3053 | 3046 |
3054 // Set the bit in the map to indicate that there is no local valueOf field. | 3047 // Set the bit in the map to indicate that there is no local valueOf field. |
3055 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset)); | 3048 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset)); |
3056 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); | 3049 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); |
3057 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset)); | 3050 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset)); |
3058 | 3051 |
3059 __ bind(&skip_lookup); | 3052 __ bind(&skip_lookup); |
3060 | 3053 |
3061 // If a valueOf property is not found on the object check that its | 3054 // If a valueOf property is not found on the object check that its |
3062 // prototype is the un-modified String prototype. If not result is false. | 3055 // prototype is the un-modified String prototype. If not result is false. |
3063 __ lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset)); | 3056 __ ld(a2, FieldMemOperand(a1, Map::kPrototypeOffset)); |
3064 __ JumpIfSmi(a2, if_false); | 3057 __ JumpIfSmi(a2, if_false); |
3065 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset)); | 3058 __ ld(a2, FieldMemOperand(a2, HeapObject::kMapOffset)); |
3066 __ lw(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | 3059 __ ld(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); |
3067 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset)); | 3060 __ ld(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset)); |
3068 __ lw(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); | 3061 __ ld(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); |
3069 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 3062 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
3070 Split(eq, a2, Operand(a3), if_true, if_false, fall_through); | 3063 Split(eq, a2, Operand(a3), if_true, if_false, fall_through); |
3071 | 3064 |
3072 context()->Plug(if_true, if_false); | 3065 context()->Plug(if_true, if_false); |
3073 } | 3066 } |
3074 | 3067 |
3075 | 3068 |
3076 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { | 3069 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { |
3077 ZoneList<Expression*>* args = expr->arguments(); | 3070 ZoneList<Expression*>* args = expr->arguments(); |
3078 ASSERT(args->length() == 1); | 3071 ASSERT(args->length() == 1); |
(...skipping 24 matching lines...) Expand all Loading... |
3103 VisitForAccumulatorValue(args->at(0)); | 3096 VisitForAccumulatorValue(args->at(0)); |
3104 | 3097 |
3105 Label materialize_true, materialize_false; | 3098 Label materialize_true, materialize_false; |
3106 Label* if_true = NULL; | 3099 Label* if_true = NULL; |
3107 Label* if_false = NULL; | 3100 Label* if_false = NULL; |
3108 Label* fall_through = NULL; | 3101 Label* fall_through = NULL; |
3109 context()->PrepareTest(&materialize_true, &materialize_false, | 3102 context()->PrepareTest(&materialize_true, &materialize_false, |
3110 &if_true, &if_false, &fall_through); | 3103 &if_true, &if_false, &fall_through); |
3111 | 3104 |
3112 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK); | 3105 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK); |
3113 __ lw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset)); | 3106 __ lwu(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset)); |
3114 __ lw(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset)); | 3107 __ lwu(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset)); |
3115 __ li(t0, 0x80000000); | 3108 __ li(a4, 0x80000000); |
3116 Label not_nan; | 3109 Label not_nan; |
3117 __ Branch(¬_nan, ne, a2, Operand(t0)); | 3110 __ Branch(¬_nan, ne, a2, Operand(a4)); |
3118 __ mov(t0, zero_reg); | 3111 __ mov(a4, zero_reg); |
3119 __ mov(a2, a1); | 3112 __ mov(a2, a1); |
3120 __ bind(¬_nan); | 3113 __ bind(¬_nan); |
3121 | 3114 |
3122 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 3115 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
3123 Split(eq, a2, Operand(t0), if_true, if_false, fall_through); | 3116 Split(eq, a2, Operand(a4), if_true, if_false, fall_through); |
3124 | 3117 |
3125 context()->Plug(if_true, if_false); | 3118 context()->Plug(if_true, if_false); |
3126 } | 3119 } |
3127 | 3120 |
3128 | 3121 |
3129 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { | 3122 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { |
3130 ZoneList<Expression*>* args = expr->arguments(); | 3123 ZoneList<Expression*>* args = expr->arguments(); |
3131 ASSERT(args->length() == 1); | 3124 ASSERT(args->length() == 1); |
3132 | 3125 |
3133 VisitForAccumulatorValue(args->at(0)); | 3126 VisitForAccumulatorValue(args->at(0)); |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3175 ASSERT(expr->arguments()->length() == 0); | 3168 ASSERT(expr->arguments()->length() == 0); |
3176 | 3169 |
3177 Label materialize_true, materialize_false; | 3170 Label materialize_true, materialize_false; |
3178 Label* if_true = NULL; | 3171 Label* if_true = NULL; |
3179 Label* if_false = NULL; | 3172 Label* if_false = NULL; |
3180 Label* fall_through = NULL; | 3173 Label* fall_through = NULL; |
3181 context()->PrepareTest(&materialize_true, &materialize_false, | 3174 context()->PrepareTest(&materialize_true, &materialize_false, |
3182 &if_true, &if_false, &fall_through); | 3175 &if_true, &if_false, &fall_through); |
3183 | 3176 |
3184 // Get the frame pointer for the calling frame. | 3177 // Get the frame pointer for the calling frame. |
3185 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 3178 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
3186 | 3179 |
3187 // Skip the arguments adaptor frame if it exists. | 3180 // Skip the arguments adaptor frame if it exists. |
3188 Label check_frame_marker; | 3181 Label check_frame_marker; |
3189 __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset)); | 3182 __ ld(a1, MemOperand(a2, StandardFrameConstants::kContextOffset)); |
3190 __ Branch(&check_frame_marker, ne, | 3183 __ Branch(&check_frame_marker, ne, |
3191 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 3184 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
3192 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset)); | 3185 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset)); |
3193 | 3186 |
3194 // Check the marker in the calling frame. | 3187 // Check the marker in the calling frame. |
3195 __ bind(&check_frame_marker); | 3188 __ bind(&check_frame_marker); |
3196 __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset)); | 3189 __ ld(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset)); |
3197 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 3190 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
3198 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)), | 3191 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)), |
3199 if_true, if_false, fall_through); | 3192 if_true, if_false, fall_through); |
3200 | 3193 |
3201 context()->Plug(if_true, if_false); | 3194 context()->Plug(if_true, if_false); |
3202 } | 3195 } |
3203 | 3196 |
3204 | 3197 |
3205 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { | 3198 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { |
3206 ZoneList<Expression*>* args = expr->arguments(); | 3199 ZoneList<Expression*>* args = expr->arguments(); |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3240 } | 3233 } |
3241 | 3234 |
3242 | 3235 |
3243 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { | 3236 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { |
3244 ASSERT(expr->arguments()->length() == 0); | 3237 ASSERT(expr->arguments()->length() == 0); |
3245 Label exit; | 3238 Label exit; |
3246 // Get the number of formal parameters. | 3239 // Get the number of formal parameters. |
3247 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); | 3240 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); |
3248 | 3241 |
3249 // Check if the calling frame is an arguments adaptor frame. | 3242 // Check if the calling frame is an arguments adaptor frame. |
3250 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 3243 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
3251 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); | 3244 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); |
3252 __ Branch(&exit, ne, a3, | 3245 __ Branch(&exit, ne, a3, |
3253 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 3246 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
3254 | 3247 |
3255 // Arguments adaptor case: Read the arguments length from the | 3248 // Arguments adaptor case: Read the arguments length from the |
3256 // adaptor frame. | 3249 // adaptor frame. |
3257 __ lw(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 3250 __ ld(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
3258 | 3251 |
3259 __ bind(&exit); | 3252 __ bind(&exit); |
3260 context()->Plug(v0); | 3253 context()->Plug(v0); |
3261 } | 3254 } |
3262 | 3255 |
3263 | 3256 |
3264 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { | 3257 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { |
3265 ZoneList<Expression*>* args = expr->arguments(); | 3258 ZoneList<Expression*>* args = expr->arguments(); |
3266 ASSERT(args->length() == 1); | 3259 ASSERT(args->length() == 1); |
3267 Label done, null, function, non_function_constructor; | 3260 Label done, null, function, non_function_constructor; |
(...skipping 15 matching lines...) Expand all Loading... |
3283 FIRST_SPEC_OBJECT_TYPE + 1); | 3276 FIRST_SPEC_OBJECT_TYPE + 1); |
3284 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); | 3277 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); |
3285 | 3278 |
3286 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == | 3279 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == |
3287 LAST_SPEC_OBJECT_TYPE - 1); | 3280 LAST_SPEC_OBJECT_TYPE - 1); |
3288 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE)); | 3281 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE)); |
3289 // Assume that there is no larger type. | 3282 // Assume that there is no larger type. |
3290 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); | 3283 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); |
3291 | 3284 |
3292 // Check if the constructor in the map is a JS function. | 3285 // Check if the constructor in the map is a JS function. |
3293 __ lw(v0, FieldMemOperand(v0, Map::kConstructorOffset)); | 3286 __ ld(v0, FieldMemOperand(v0, Map::kConstructorOffset)); |
3294 __ GetObjectType(v0, a1, a1); | 3287 __ GetObjectType(v0, a1, a1); |
3295 __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE)); | 3288 __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE)); |
3296 | 3289 |
3297 // v0 now contains the constructor function. Grab the | 3290 // v0 now contains the constructor function. Grab the |
3298 // instance class name from there. | 3291 // instance class name from there. |
3299 __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset)); | 3292 __ ld(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset)); |
3300 __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset)); | 3293 __ ld(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset)); |
3301 __ Branch(&done); | 3294 __ Branch(&done); |
3302 | 3295 |
3303 // Functions have class 'Function'. | 3296 // Functions have class 'Function'. |
3304 __ bind(&function); | 3297 __ bind(&function); |
3305 __ LoadRoot(v0, Heap::kfunction_class_stringRootIndex); | 3298 __ LoadRoot(v0, Heap::kfunction_class_stringRootIndex); |
3306 __ jmp(&done); | 3299 __ jmp(&done); |
3307 | 3300 |
3308 // Objects with a non-function constructor have class 'Object'. | 3301 // Objects with a non-function constructor have class 'Object'. |
3309 __ bind(&non_function_constructor); | 3302 __ bind(&non_function_constructor); |
3310 __ LoadRoot(v0, Heap::kObject_stringRootIndex); | 3303 __ LoadRoot(v0, Heap::kObject_stringRootIndex); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3354 | 3347 |
3355 VisitForAccumulatorValue(args->at(0)); // Load the object. | 3348 VisitForAccumulatorValue(args->at(0)); // Load the object. |
3356 | 3349 |
3357 Label done; | 3350 Label done; |
3358 // If the object is a smi return the object. | 3351 // If the object is a smi return the object. |
3359 __ JumpIfSmi(v0, &done); | 3352 __ JumpIfSmi(v0, &done); |
3360 // If the object is not a value type, return the object. | 3353 // If the object is not a value type, return the object. |
3361 __ GetObjectType(v0, a1, a1); | 3354 __ GetObjectType(v0, a1, a1); |
3362 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE)); | 3355 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE)); |
3363 | 3356 |
3364 __ lw(v0, FieldMemOperand(v0, JSValue::kValueOffset)); | 3357 __ ld(v0, FieldMemOperand(v0, JSValue::kValueOffset)); |
3365 | 3358 |
3366 __ bind(&done); | 3359 __ bind(&done); |
3367 context()->Plug(v0); | 3360 context()->Plug(v0); |
3368 } | 3361 } |
3369 | 3362 |
3370 | 3363 |
3371 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { | 3364 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { |
3372 ZoneList<Expression*>* args = expr->arguments(); | 3365 ZoneList<Expression*>* args = expr->arguments(); |
3373 ASSERT(args->length() == 2); | 3366 ASSERT(args->length() == 2); |
3374 ASSERT_NE(NULL, args->at(1)->AsLiteral()); | 3367 ASSERT_NE(NULL, args->at(1)->AsLiteral()); |
3375 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value())); | 3368 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value())); |
3376 | 3369 |
3377 VisitForAccumulatorValue(args->at(0)); // Load the object. | 3370 VisitForAccumulatorValue(args->at(0)); // Load the object. |
3378 | 3371 |
3379 Label runtime, done, not_date_object; | 3372 Label runtime, done, not_date_object; |
3380 Register object = v0; | 3373 Register object = v0; |
3381 Register result = v0; | 3374 Register result = v0; |
3382 Register scratch0 = t5; | 3375 Register scratch0 = t1; |
3383 Register scratch1 = a1; | 3376 Register scratch1 = a1; |
3384 | 3377 |
3385 __ JumpIfSmi(object, ¬_date_object); | 3378 __ JumpIfSmi(object, ¬_date_object); |
3386 __ GetObjectType(object, scratch1, scratch1); | 3379 __ GetObjectType(object, scratch1, scratch1); |
3387 __ Branch(¬_date_object, ne, scratch1, Operand(JS_DATE_TYPE)); | 3380 __ Branch(¬_date_object, ne, scratch1, Operand(JS_DATE_TYPE)); |
3388 | 3381 |
3389 if (index->value() == 0) { | 3382 if (index->value() == 0) { |
3390 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset)); | 3383 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset)); |
3391 __ jmp(&done); | 3384 __ jmp(&done); |
3392 } else { | 3385 } else { |
3393 if (index->value() < JSDate::kFirstUncachedField) { | 3386 if (index->value() < JSDate::kFirstUncachedField) { |
3394 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); | 3387 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); |
3395 __ li(scratch1, Operand(stamp)); | 3388 __ li(scratch1, Operand(stamp)); |
3396 __ lw(scratch1, MemOperand(scratch1)); | 3389 __ ld(scratch1, MemOperand(scratch1)); |
3397 __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset)); | 3390 __ ld(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset)); |
3398 __ Branch(&runtime, ne, scratch1, Operand(scratch0)); | 3391 __ Branch(&runtime, ne, scratch1, Operand(scratch0)); |
3399 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset + | 3392 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset + |
3400 kPointerSize * index->value())); | 3393 kPointerSize * index->value())); |
3401 __ jmp(&done); | 3394 __ jmp(&done); |
3402 } | 3395 } |
3403 __ bind(&runtime); | 3396 __ bind(&runtime); |
3404 __ PrepareCallCFunction(2, scratch1); | 3397 __ PrepareCallCFunction(2, scratch1); |
3405 __ li(a1, Operand(index)); | 3398 __ li(a1, Operand(index)); |
3406 __ Move(a0, object); | 3399 __ Move(a0, object); |
3407 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); | 3400 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); |
3408 __ jmp(&done); | 3401 __ jmp(&done); |
3409 } | 3402 } |
(...skipping 18 matching lines...) Expand all Loading... |
3428 VisitForAccumulatorValue(args->at(0)); // string | 3421 VisitForAccumulatorValue(args->at(0)); // string |
3429 __ Pop(index, value); | 3422 __ Pop(index, value); |
3430 | 3423 |
3431 if (FLAG_debug_code) { | 3424 if (FLAG_debug_code) { |
3432 __ SmiTst(value, at); | 3425 __ SmiTst(value, at); |
3433 __ Check(eq, kNonSmiValue, at, Operand(zero_reg)); | 3426 __ Check(eq, kNonSmiValue, at, Operand(zero_reg)); |
3434 __ SmiTst(index, at); | 3427 __ SmiTst(index, at); |
3435 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg)); | 3428 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg)); |
3436 __ SmiUntag(index, index); | 3429 __ SmiUntag(index, index); |
3437 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; | 3430 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; |
3438 Register scratch = t5; | 3431 Register scratch = t1; |
3439 __ EmitSeqStringSetCharCheck( | 3432 __ EmitSeqStringSetCharCheck( |
3440 string, index, value, scratch, one_byte_seq_type); | 3433 string, index, value, scratch, one_byte_seq_type); |
3441 __ SmiTag(index, index); | 3434 __ SmiTag(index, index); |
3442 } | 3435 } |
3443 | 3436 |
3444 __ SmiUntag(value, value); | 3437 __ SmiUntag(value, value); |
3445 __ Addu(at, | 3438 __ Daddu(at, |
3446 string, | 3439 string, |
3447 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); | 3440 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); |
3448 __ SmiUntag(index); | 3441 __ SmiUntag(index); |
3449 __ Addu(at, at, index); | 3442 __ Daddu(at, at, index); |
3450 __ sb(value, MemOperand(at)); | 3443 __ sb(value, MemOperand(at)); |
3451 context()->Plug(string); | 3444 context()->Plug(string); |
3452 } | 3445 } |
3453 | 3446 |
3454 | 3447 |
3455 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { | 3448 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { |
3456 ZoneList<Expression*>* args = expr->arguments(); | 3449 ZoneList<Expression*>* args = expr->arguments(); |
3457 ASSERT_EQ(3, args->length()); | 3450 ASSERT_EQ(3, args->length()); |
3458 | 3451 |
3459 Register string = v0; | 3452 Register string = v0; |
3460 Register index = a1; | 3453 Register index = a1; |
3461 Register value = a2; | 3454 Register value = a2; |
3462 | 3455 |
3463 VisitForStackValue(args->at(1)); // index | 3456 VisitForStackValue(args->at(1)); // index |
3464 VisitForStackValue(args->at(2)); // value | 3457 VisitForStackValue(args->at(2)); // value |
3465 VisitForAccumulatorValue(args->at(0)); // string | 3458 VisitForAccumulatorValue(args->at(0)); // string |
3466 __ Pop(index, value); | 3459 __ Pop(index, value); |
3467 | 3460 |
3468 if (FLAG_debug_code) { | 3461 if (FLAG_debug_code) { |
3469 __ SmiTst(value, at); | 3462 __ SmiTst(value, at); |
3470 __ Check(eq, kNonSmiValue, at, Operand(zero_reg)); | 3463 __ Check(eq, kNonSmiValue, at, Operand(zero_reg)); |
3471 __ SmiTst(index, at); | 3464 __ SmiTst(index, at); |
3472 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg)); | 3465 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg)); |
3473 __ SmiUntag(index, index); | 3466 __ SmiUntag(index, index); |
3474 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; | 3467 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; |
3475 Register scratch = t5; | 3468 Register scratch = t1; |
3476 __ EmitSeqStringSetCharCheck( | 3469 __ EmitSeqStringSetCharCheck( |
3477 string, index, value, scratch, two_byte_seq_type); | 3470 string, index, value, scratch, two_byte_seq_type); |
3478 __ SmiTag(index, index); | 3471 __ SmiTag(index, index); |
3479 } | 3472 } |
3480 | 3473 |
3481 __ SmiUntag(value, value); | 3474 __ SmiUntag(value, value); |
3482 __ Addu(at, | 3475 __ Daddu(at, |
3483 string, | 3476 string, |
3484 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); | 3477 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); |
3485 __ Addu(at, at, index); | 3478 __ dsra(index, index, 32 - 1); |
| 3479 __ Daddu(at, at, index); |
3486 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); | 3480 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); |
3487 __ sh(value, MemOperand(at)); | 3481 __ sh(value, MemOperand(at)); |
3488 context()->Plug(string); | 3482 context()->Plug(string); |
3489 } | 3483 } |
3490 | 3484 |
3491 | 3485 |
3492 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { | 3486 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { |
3493 // Load the arguments on the stack and call the runtime function. | 3487 // Load the arguments on the stack and call the runtime function. |
3494 ZoneList<Expression*>* args = expr->arguments(); | 3488 ZoneList<Expression*>* args = expr->arguments(); |
3495 ASSERT(args->length() == 2); | 3489 ASSERT(args->length() == 2); |
(...skipping 15 matching lines...) Expand all Loading... |
3511 | 3505 |
3512 Label done; | 3506 Label done; |
3513 // If the object is a smi, return the value. | 3507 // If the object is a smi, return the value. |
3514 __ JumpIfSmi(a1, &done); | 3508 __ JumpIfSmi(a1, &done); |
3515 | 3509 |
3516 // If the object is not a value type, return the value. | 3510 // If the object is not a value type, return the value. |
3517 __ GetObjectType(a1, a2, a2); | 3511 __ GetObjectType(a1, a2, a2); |
3518 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE)); | 3512 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE)); |
3519 | 3513 |
3520 // Store the value. | 3514 // Store the value. |
3521 __ sw(v0, FieldMemOperand(a1, JSValue::kValueOffset)); | 3515 __ sd(v0, FieldMemOperand(a1, JSValue::kValueOffset)); |
3522 // Update the write barrier. Save the value as it will be | 3516 // Update the write barrier. Save the value as it will be |
3523 // overwritten by the write barrier code and is needed afterward. | 3517 // overwritten by the write barrier code and is needed afterward. |
3524 __ mov(a2, v0); | 3518 __ mov(a2, v0); |
3525 __ RecordWriteField( | 3519 __ RecordWriteField( |
3526 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs); | 3520 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs); |
3527 | 3521 |
3528 __ bind(&done); | 3522 __ bind(&done); |
3529 context()->Plug(v0); | 3523 context()->Plug(v0); |
3530 } | 3524 } |
3531 | 3525 |
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3699 Label runtime, done; | 3693 Label runtime, done; |
3700 // Check for non-function argument (including proxy). | 3694 // Check for non-function argument (including proxy). |
3701 __ JumpIfSmi(v0, &runtime); | 3695 __ JumpIfSmi(v0, &runtime); |
3702 __ GetObjectType(v0, a1, a1); | 3696 __ GetObjectType(v0, a1, a1); |
3703 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE)); | 3697 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE)); |
3704 | 3698 |
3705 // InvokeFunction requires the function in a1. Move it in there. | 3699 // InvokeFunction requires the function in a1. Move it in there. |
3706 __ mov(a1, result_register()); | 3700 __ mov(a1, result_register()); |
3707 ParameterCount count(arg_count); | 3701 ParameterCount count(arg_count); |
3708 __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper()); | 3702 __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper()); |
3709 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 3703 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
3710 __ jmp(&done); | 3704 __ jmp(&done); |
3711 | 3705 |
3712 __ bind(&runtime); | 3706 __ bind(&runtime); |
3713 __ push(v0); | 3707 __ push(v0); |
3714 __ CallRuntime(Runtime::kCall, args->length()); | 3708 __ CallRuntime(Runtime::kCall, args->length()); |
3715 __ bind(&done); | 3709 __ bind(&done); |
3716 | 3710 |
3717 context()->Plug(v0); | 3711 context()->Plug(v0); |
3718 } | 3712 } |
3719 | 3713 |
(...skipping 26 matching lines...) Expand all Loading... |
3746 __ Abort(kAttemptToUseUndefinedCache); | 3740 __ Abort(kAttemptToUseUndefinedCache); |
3747 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); | 3741 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); |
3748 context()->Plug(v0); | 3742 context()->Plug(v0); |
3749 return; | 3743 return; |
3750 } | 3744 } |
3751 | 3745 |
3752 VisitForAccumulatorValue(args->at(1)); | 3746 VisitForAccumulatorValue(args->at(1)); |
3753 | 3747 |
3754 Register key = v0; | 3748 Register key = v0; |
3755 Register cache = a1; | 3749 Register cache = a1; |
3756 __ lw(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | 3750 __ ld(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); |
3757 __ lw(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset)); | 3751 __ ld(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset)); |
3758 __ lw(cache, | 3752 __ ld(cache, |
3759 ContextOperand( | 3753 ContextOperand( |
3760 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); | 3754 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); |
3761 __ lw(cache, | 3755 __ ld(cache, |
3762 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); | 3756 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); |
3763 | 3757 |
3764 | 3758 |
3765 Label done, not_found; | 3759 Label done, not_found; |
3766 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); | 3760 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); |
3767 __ lw(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset)); | 3761 __ ld(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset)); |
3768 // a2 now holds finger offset as a smi. | 3762 // a2 now holds finger offset as a smi. |
3769 __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 3763 __ Daddu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
3770 // a3 now points to the start of fixed array elements. | 3764 // a3 now points to the start of fixed array elements. |
3771 __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize); | 3765 __ SmiScale(at, a2, kPointerSizeLog2); |
3772 __ addu(a3, a3, at); | 3766 __ daddu(a3, a3, at); |
3773 // a3 now points to key of indexed element of cache. | 3767 // a3 now points to key of indexed element of cache. |
3774 __ lw(a2, MemOperand(a3)); | 3768 __ ld(a2, MemOperand(a3)); |
3775 __ Branch(¬_found, ne, key, Operand(a2)); | 3769 __ Branch(¬_found, ne, key, Operand(a2)); |
3776 | 3770 |
3777 __ lw(v0, MemOperand(a3, kPointerSize)); | 3771 __ ld(v0, MemOperand(a3, kPointerSize)); |
3778 __ Branch(&done); | 3772 __ Branch(&done); |
3779 | 3773 |
3780 __ bind(¬_found); | 3774 __ bind(¬_found); |
3781 // Call runtime to perform the lookup. | 3775 // Call runtime to perform the lookup. |
3782 __ Push(cache, key); | 3776 __ Push(cache, key); |
3783 __ CallRuntime(Runtime::kGetFromCache, 2); | 3777 __ CallRuntime(Runtime::kGetFromCache, 2); |
3784 | 3778 |
3785 __ bind(&done); | 3779 __ bind(&done); |
3786 context()->Plug(v0); | 3780 context()->Plug(v0); |
3787 } | 3781 } |
3788 | 3782 |
3789 | 3783 |
3790 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { | 3784 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { |
3791 ZoneList<Expression*>* args = expr->arguments(); | 3785 ZoneList<Expression*>* args = expr->arguments(); |
3792 VisitForAccumulatorValue(args->at(0)); | 3786 VisitForAccumulatorValue(args->at(0)); |
3793 | 3787 |
3794 Label materialize_true, materialize_false; | 3788 Label materialize_true, materialize_false; |
3795 Label* if_true = NULL; | 3789 Label* if_true = NULL; |
3796 Label* if_false = NULL; | 3790 Label* if_false = NULL; |
3797 Label* fall_through = NULL; | 3791 Label* fall_through = NULL; |
3798 context()->PrepareTest(&materialize_true, &materialize_false, | 3792 context()->PrepareTest(&materialize_true, &materialize_false, |
3799 &if_true, &if_false, &fall_through); | 3793 &if_true, &if_false, &fall_through); |
3800 | 3794 |
3801 __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset)); | 3795 __ lwu(a0, FieldMemOperand(v0, String::kHashFieldOffset)); |
3802 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask)); | 3796 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask)); |
3803 | 3797 |
3804 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 3798 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
3805 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through); | 3799 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through); |
3806 | 3800 |
3807 context()->Plug(if_true, if_false); | 3801 context()->Plug(if_true, if_false); |
3808 } | 3802 } |
3809 | 3803 |
3810 | 3804 |
3811 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { | 3805 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { |
3812 ZoneList<Expression*>* args = expr->arguments(); | 3806 ZoneList<Expression*>* args = expr->arguments(); |
3813 ASSERT(args->length() == 1); | 3807 ASSERT(args->length() == 1); |
3814 VisitForAccumulatorValue(args->at(0)); | 3808 VisitForAccumulatorValue(args->at(0)); |
3815 | 3809 |
3816 __ AssertString(v0); | 3810 __ AssertString(v0); |
3817 | 3811 |
3818 __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset)); | 3812 __ lwu(v0, FieldMemOperand(v0, String::kHashFieldOffset)); |
3819 __ IndexFromHash(v0, v0); | 3813 __ IndexFromHash(v0, v0); |
3820 | 3814 |
3821 context()->Plug(v0); | 3815 context()->Plug(v0); |
3822 } | 3816 } |
3823 | 3817 |
3824 | 3818 |
3825 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { | 3819 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { |
3826 Label bailout, done, one_char_separator, long_separator, | 3820 Label bailout, done, one_char_separator, long_separator, |
3827 non_trivial_array, not_size_one_array, loop, | 3821 non_trivial_array, not_size_one_array, loop, |
3828 empty_separator_loop, one_char_separator_loop, | 3822 empty_separator_loop, one_char_separator_loop, |
3829 one_char_separator_loop_entry, long_separator_loop; | 3823 one_char_separator_loop_entry, long_separator_loop; |
3830 ZoneList<Expression*>* args = expr->arguments(); | 3824 ZoneList<Expression*>* args = expr->arguments(); |
3831 ASSERT(args->length() == 2); | 3825 ASSERT(args->length() == 2); |
3832 VisitForStackValue(args->at(1)); | 3826 VisitForStackValue(args->at(1)); |
3833 VisitForAccumulatorValue(args->at(0)); | 3827 VisitForAccumulatorValue(args->at(0)); |
3834 | 3828 |
3835 // All aliases of the same register have disjoint lifetimes. | 3829 // All aliases of the same register have disjoint lifetimes. |
3836 Register array = v0; | 3830 Register array = v0; |
3837 Register elements = no_reg; // Will be v0. | 3831 Register elements = no_reg; // Will be v0. |
3838 Register result = no_reg; // Will be v0. | 3832 Register result = no_reg; // Will be v0. |
3839 Register separator = a1; | 3833 Register separator = a1; |
3840 Register array_length = a2; | 3834 Register array_length = a2; |
3841 Register result_pos = no_reg; // Will be a2. | 3835 Register result_pos = no_reg; // Will be a2. |
3842 Register string_length = a3; | 3836 Register string_length = a3; |
3843 Register string = t0; | 3837 Register string = a4; |
3844 Register element = t1; | 3838 Register element = a5; |
3845 Register elements_end = t2; | 3839 Register elements_end = a6; |
3846 Register scratch1 = t3; | 3840 Register scratch1 = a7; |
3847 Register scratch2 = t5; | 3841 Register scratch2 = t1; |
3848 Register scratch3 = t4; | 3842 Register scratch3 = t0; |
3849 | 3843 |
3850 // Separator operand is on the stack. | 3844 // Separator operand is on the stack. |
3851 __ pop(separator); | 3845 __ pop(separator); |
3852 | 3846 |
3853 // Check that the array is a JSArray. | 3847 // Check that the array is a JSArray. |
3854 __ JumpIfSmi(array, &bailout); | 3848 __ JumpIfSmi(array, &bailout); |
3855 __ GetObjectType(array, scratch1, scratch2); | 3849 __ GetObjectType(array, scratch1, scratch2); |
3856 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE)); | 3850 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE)); |
3857 | 3851 |
3858 // Check that the array has fast elements. | 3852 // Check that the array has fast elements. |
3859 __ CheckFastElements(scratch1, scratch2, &bailout); | 3853 __ CheckFastElements(scratch1, scratch2, &bailout); |
3860 | 3854 |
3861 // If the array has length zero, return the empty string. | 3855 // If the array has length zero, return the empty string. |
3862 __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset)); | 3856 __ ld(array_length, FieldMemOperand(array, JSArray::kLengthOffset)); |
3863 __ SmiUntag(array_length); | 3857 __ SmiUntag(array_length); |
3864 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg)); | 3858 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg)); |
3865 __ LoadRoot(v0, Heap::kempty_stringRootIndex); | 3859 __ LoadRoot(v0, Heap::kempty_stringRootIndex); |
3866 __ Branch(&done); | 3860 __ Branch(&done); |
3867 | 3861 |
3868 __ bind(&non_trivial_array); | 3862 __ bind(&non_trivial_array); |
3869 | 3863 |
3870 // Get the FixedArray containing array's elements. | 3864 // Get the FixedArray containing array's elements. |
3871 elements = array; | 3865 elements = array; |
3872 __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset)); | 3866 __ ld(elements, FieldMemOperand(array, JSArray::kElementsOffset)); |
3873 array = no_reg; // End of array's live range. | 3867 array = no_reg; // End of array's live range. |
3874 | 3868 |
3875 // Check that all array elements are sequential ASCII strings, and | 3869 // Check that all array elements are sequential ASCII strings, and |
3876 // accumulate the sum of their lengths, as a smi-encoded value. | 3870 // accumulate the sum of their lengths, as a smi-encoded value. |
3877 __ mov(string_length, zero_reg); | 3871 __ mov(string_length, zero_reg); |
3878 __ Addu(element, | 3872 __ Daddu(element, |
3879 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 3873 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
3880 __ sll(elements_end, array_length, kPointerSizeLog2); | 3874 __ dsll(elements_end, array_length, kPointerSizeLog2); |
3881 __ Addu(elements_end, element, elements_end); | 3875 __ Daddu(elements_end, element, elements_end); |
3882 // Loop condition: while (element < elements_end). | 3876 // Loop condition: while (element < elements_end). |
3883 // Live values in registers: | 3877 // Live values in registers: |
3884 // elements: Fixed array of strings. | 3878 // elements: Fixed array of strings. |
3885 // array_length: Length of the fixed array of strings (not smi) | 3879 // array_length: Length of the fixed array of strings (not smi) |
3886 // separator: Separator string | 3880 // separator: Separator string |
3887 // string_length: Accumulated sum of string lengths (smi). | 3881 // string_length: Accumulated sum of string lengths (smi). |
3888 // element: Current array element. | 3882 // element: Current array element. |
3889 // elements_end: Array end. | 3883 // elements_end: Array end. |
3890 if (generate_debug_code_) { | 3884 if (generate_debug_code_) { |
3891 __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin, | 3885 __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin, |
3892 array_length, Operand(zero_reg)); | 3886 array_length, Operand(zero_reg)); |
3893 } | 3887 } |
3894 __ bind(&loop); | 3888 __ bind(&loop); |
3895 __ lw(string, MemOperand(element)); | 3889 __ ld(string, MemOperand(element)); |
3896 __ Addu(element, element, kPointerSize); | 3890 __ Daddu(element, element, kPointerSize); |
3897 __ JumpIfSmi(string, &bailout); | 3891 __ JumpIfSmi(string, &bailout); |
3898 __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset)); | 3892 __ ld(scratch1, FieldMemOperand(string, HeapObject::kMapOffset)); |
3899 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); | 3893 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); |
3900 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); | 3894 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); |
3901 __ lw(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset)); | 3895 __ ld(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset)); |
3902 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3); | 3896 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3); |
3903 __ BranchOnOverflow(&bailout, scratch3); | 3897 __ BranchOnOverflow(&bailout, scratch3); |
3904 __ Branch(&loop, lt, element, Operand(elements_end)); | 3898 __ Branch(&loop, lt, element, Operand(elements_end)); |
3905 | 3899 |
3906 // If array_length is 1, return elements[0], a string. | 3900 // If array_length is 1, return elements[0], a string. |
3907 __ Branch(¬_size_one_array, ne, array_length, Operand(1)); | 3901 __ Branch(¬_size_one_array, ne, array_length, Operand(1)); |
3908 __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize)); | 3902 __ ld(v0, FieldMemOperand(elements, FixedArray::kHeaderSize)); |
3909 __ Branch(&done); | 3903 __ Branch(&done); |
3910 | 3904 |
3911 __ bind(¬_size_one_array); | 3905 __ bind(¬_size_one_array); |
3912 | 3906 |
3913 // Live values in registers: | 3907 // Live values in registers: |
3914 // separator: Separator string | 3908 // separator: Separator string |
3915 // array_length: Length of the array. | 3909 // array_length: Length of the array. |
3916 // string_length: Sum of string lengths (smi). | 3910 // string_length: Sum of string lengths (smi). |
3917 // elements: FixedArray of strings. | 3911 // elements: FixedArray of strings. |
3918 | 3912 |
3919 // Check that the separator is a flat ASCII string. | 3913 // Check that the separator is a flat ASCII string. |
3920 __ JumpIfSmi(separator, &bailout); | 3914 __ JumpIfSmi(separator, &bailout); |
3921 __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset)); | 3915 __ ld(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset)); |
3922 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); | 3916 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); |
3923 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); | 3917 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); |
3924 | 3918 |
3925 // Add (separator length times array_length) - separator length to the | 3919 // Add (separator length times array_length) - separator length to the |
3926 // string_length to get the length of the result string. array_length is not | 3920 // string_length to get the length of the result string. array_length is not |
3927 // smi but the other values are, so the result is a smi. | 3921 // smi but the other values are, so the result is a smi. |
3928 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); | 3922 __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); |
3929 __ Subu(string_length, string_length, Operand(scratch1)); | 3923 __ Dsubu(string_length, string_length, Operand(scratch1)); |
3930 __ Mult(array_length, scratch1); | 3924 __ SmiUntag(scratch1); |
| 3925 __ Dmult(array_length, scratch1); |
3931 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are | 3926 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are |
3932 // zero. | 3927 // zero. |
3933 __ mfhi(scratch2); | 3928 __ mfhi(scratch2); |
3934 __ Branch(&bailout, ne, scratch2, Operand(zero_reg)); | 3929 __ Branch(&bailout, ne, scratch2, Operand(zero_reg)); |
3935 __ mflo(scratch2); | 3930 __ mflo(scratch2); |
3936 __ And(scratch3, scratch2, Operand(0x80000000)); | 3931 __ SmiUntag(string_length); |
3937 __ Branch(&bailout, ne, scratch3, Operand(zero_reg)); | |
3938 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3); | 3932 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3); |
3939 __ BranchOnOverflow(&bailout, scratch3); | 3933 __ BranchOnOverflow(&bailout, scratch3); |
3940 __ SmiUntag(string_length); | |
3941 | 3934 |
3942 // Get first element in the array to free up the elements register to be used | 3935 // Get first element in the array to free up the elements register to be used |
3943 // for the result. | 3936 // for the result. |
3944 __ Addu(element, | 3937 __ Daddu(element, |
3945 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 3938 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
3946 result = elements; // End of live range for elements. | 3939 result = elements; // End of live range for elements. |
3947 elements = no_reg; | 3940 elements = no_reg; |
3948 // Live values in registers: | 3941 // Live values in registers: |
3949 // element: First array element | 3942 // element: First array element |
3950 // separator: Separator string | 3943 // separator: Separator string |
3951 // string_length: Length of result string (not smi) | 3944 // string_length: Length of result string (not smi) |
3952 // array_length: Length of the array. | 3945 // array_length: Length of the array. |
3953 __ AllocateAsciiString(result, | 3946 __ AllocateAsciiString(result, |
3954 string_length, | 3947 string_length, |
3955 scratch1, | 3948 scratch1, |
3956 scratch2, | 3949 scratch2, |
3957 elements_end, | 3950 elements_end, |
3958 &bailout); | 3951 &bailout); |
3959 // Prepare for looping. Set up elements_end to end of the array. Set | 3952 // Prepare for looping. Set up elements_end to end of the array. Set |
3960 // result_pos to the position of the result where to write the first | 3953 // result_pos to the position of the result where to write the first |
3961 // character. | 3954 // character. |
3962 __ sll(elements_end, array_length, kPointerSizeLog2); | 3955 __ dsll(elements_end, array_length, kPointerSizeLog2); |
3963 __ Addu(elements_end, element, elements_end); | 3956 __ Daddu(elements_end, element, elements_end); |
3964 result_pos = array_length; // End of live range for array_length. | 3957 result_pos = array_length; // End of live range for array_length. |
3965 array_length = no_reg; | 3958 array_length = no_reg; |
3966 __ Addu(result_pos, | 3959 __ Daddu(result_pos, |
3967 result, | 3960 result, |
3968 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); | 3961 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); |
3969 | 3962 |
3970 // Check the length of the separator. | 3963 // Check the length of the separator. |
3971 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); | 3964 __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); |
3972 __ li(at, Operand(Smi::FromInt(1))); | 3965 __ li(at, Operand(Smi::FromInt(1))); |
3973 __ Branch(&one_char_separator, eq, scratch1, Operand(at)); | 3966 __ Branch(&one_char_separator, eq, scratch1, Operand(at)); |
3974 __ Branch(&long_separator, gt, scratch1, Operand(at)); | 3967 __ Branch(&long_separator, gt, scratch1, Operand(at)); |
3975 | 3968 |
3976 // Empty separator case. | 3969 // Empty separator case. |
3977 __ bind(&empty_separator_loop); | 3970 __ bind(&empty_separator_loop); |
3978 // Live values in registers: | 3971 // Live values in registers: |
3979 // result_pos: the position to which we are currently copying characters. | 3972 // result_pos: the position to which we are currently copying characters. |
3980 // element: Current array element. | 3973 // element: Current array element. |
3981 // elements_end: Array end. | 3974 // elements_end: Array end. |
3982 | 3975 |
3983 // Copy next array element to the result. | 3976 // Copy next array element to the result. |
3984 __ lw(string, MemOperand(element)); | 3977 __ ld(string, MemOperand(element)); |
3985 __ Addu(element, element, kPointerSize); | 3978 __ Daddu(element, element, kPointerSize); |
3986 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset)); | 3979 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset)); |
3987 __ SmiUntag(string_length); | 3980 __ SmiUntag(string_length); |
3988 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); | 3981 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); |
3989 __ CopyBytes(string, result_pos, string_length, scratch1); | 3982 __ CopyBytes(string, result_pos, string_length, scratch1); |
3990 // End while (element < elements_end). | 3983 // End while (element < elements_end). |
3991 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end)); | 3984 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end)); |
3992 ASSERT(result.is(v0)); | 3985 ASSERT(result.is(v0)); |
3993 __ Branch(&done); | 3986 __ Branch(&done); |
3994 | 3987 |
3995 // One-character separator case. | 3988 // One-character separator case. |
3996 __ bind(&one_char_separator); | 3989 __ bind(&one_char_separator); |
3997 // Replace separator with its ASCII character value. | 3990 // Replace separator with its ASCII character value. |
3998 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize)); | 3991 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize)); |
3999 // Jump into the loop after the code that copies the separator, so the first | 3992 // Jump into the loop after the code that copies the separator, so the first |
4000 // element is not preceded by a separator. | 3993 // element is not preceded by a separator. |
4001 __ jmp(&one_char_separator_loop_entry); | 3994 __ jmp(&one_char_separator_loop_entry); |
4002 | 3995 |
4003 __ bind(&one_char_separator_loop); | 3996 __ bind(&one_char_separator_loop); |
4004 // Live values in registers: | 3997 // Live values in registers: |
4005 // result_pos: the position to which we are currently copying characters. | 3998 // result_pos: the position to which we are currently copying characters. |
4006 // element: Current array element. | 3999 // element: Current array element. |
4007 // elements_end: Array end. | 4000 // elements_end: Array end. |
4008 // separator: Single separator ASCII char (in lower byte). | 4001 // separator: Single separator ASCII char (in lower byte). |
4009 | 4002 |
4010 // Copy the separator character to the result. | 4003 // Copy the separator character to the result. |
4011 __ sb(separator, MemOperand(result_pos)); | 4004 __ sb(separator, MemOperand(result_pos)); |
4012 __ Addu(result_pos, result_pos, 1); | 4005 __ Daddu(result_pos, result_pos, 1); |
4013 | 4006 |
4014 // Copy next array element to the result. | 4007 // Copy next array element to the result. |
4015 __ bind(&one_char_separator_loop_entry); | 4008 __ bind(&one_char_separator_loop_entry); |
4016 __ lw(string, MemOperand(element)); | 4009 __ ld(string, MemOperand(element)); |
4017 __ Addu(element, element, kPointerSize); | 4010 __ Daddu(element, element, kPointerSize); |
4018 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset)); | 4011 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset)); |
4019 __ SmiUntag(string_length); | 4012 __ SmiUntag(string_length); |
4020 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); | 4013 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); |
4021 __ CopyBytes(string, result_pos, string_length, scratch1); | 4014 __ CopyBytes(string, result_pos, string_length, scratch1); |
4022 // End while (element < elements_end). | 4015 // End while (element < elements_end). |
4023 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end)); | 4016 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end)); |
4024 ASSERT(result.is(v0)); | 4017 ASSERT(result.is(v0)); |
4025 __ Branch(&done); | 4018 __ Branch(&done); |
4026 | 4019 |
4027 // Long separator case (separator is more than one character). Entry is at the | 4020 // Long separator case (separator is more than one character). Entry is at the |
4028 // label long_separator below. | 4021 // label long_separator below. |
4029 __ bind(&long_separator_loop); | 4022 __ bind(&long_separator_loop); |
4030 // Live values in registers: | 4023 // Live values in registers: |
4031 // result_pos: the position to which we are currently copying characters. | 4024 // result_pos: the position to which we are currently copying characters. |
4032 // element: Current array element. | 4025 // element: Current array element. |
4033 // elements_end: Array end. | 4026 // elements_end: Array end. |
4034 // separator: Separator string. | 4027 // separator: Separator string. |
4035 | 4028 |
4036 // Copy the separator to the result. | 4029 // Copy the separator to the result. |
4037 __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset)); | 4030 __ ld(string_length, FieldMemOperand(separator, String::kLengthOffset)); |
4038 __ SmiUntag(string_length); | 4031 __ SmiUntag(string_length); |
4039 __ Addu(string, | 4032 __ Daddu(string, |
4040 separator, | 4033 separator, |
4041 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); | 4034 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); |
4042 __ CopyBytes(string, result_pos, string_length, scratch1); | 4035 __ CopyBytes(string, result_pos, string_length, scratch1); |
4043 | 4036 |
4044 __ bind(&long_separator); | 4037 __ bind(&long_separator); |
4045 __ lw(string, MemOperand(element)); | 4038 __ ld(string, MemOperand(element)); |
4046 __ Addu(element, element, kPointerSize); | 4039 __ Daddu(element, element, kPointerSize); |
4047 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset)); | 4040 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset)); |
4048 __ SmiUntag(string_length); | 4041 __ SmiUntag(string_length); |
4049 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); | 4042 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); |
4050 __ CopyBytes(string, result_pos, string_length, scratch1); | 4043 __ CopyBytes(string, result_pos, string_length, scratch1); |
4051 // End while (element < elements_end). | 4044 // End while (element < elements_end). |
4052 __ Branch(&long_separator_loop, lt, element, Operand(elements_end)); | 4045 __ Branch(&long_separator_loop, lt, element, Operand(elements_end)); |
4053 ASSERT(result.is(v0)); | 4046 ASSERT(result.is(v0)); |
4054 __ Branch(&done); | 4047 __ Branch(&done); |
4055 | 4048 |
4056 __ bind(&bailout); | 4049 __ bind(&bailout); |
4057 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); | 4050 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); |
4058 __ bind(&done); | 4051 __ bind(&done); |
4059 context()->Plug(v0); | 4052 context()->Plug(v0); |
4060 } | 4053 } |
4061 | 4054 |
4062 | 4055 |
4063 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { | 4056 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { |
4064 ASSERT(expr->arguments()->length() == 0); | 4057 ASSERT(expr->arguments()->length() == 0); |
4065 ExternalReference debug_is_active = | 4058 ExternalReference debug_is_active = |
4066 ExternalReference::debug_is_active_address(isolate()); | 4059 ExternalReference::debug_is_active_address(isolate()); |
4067 __ li(at, Operand(debug_is_active)); | 4060 __ li(at, Operand(debug_is_active)); |
4068 __ lb(v0, MemOperand(at)); | 4061 __ lbu(v0, MemOperand(at)); |
4069 __ SmiTag(v0); | 4062 __ SmiTag(v0); |
4070 context()->Plug(v0); | 4063 context()->Plug(v0); |
4071 } | 4064 } |
4072 | 4065 |
4073 | 4066 |
4074 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { | 4067 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { |
4075 if (expr->function() != NULL && | 4068 if (expr->function() != NULL && |
4076 expr->function()->intrinsic_type == Runtime::INLINE) { | 4069 expr->function()->intrinsic_type == Runtime::INLINE) { |
4077 Comment cmnt(masm_, "[ InlineRuntimeCall"); | 4070 Comment cmnt(masm_, "[ InlineRuntimeCall"); |
4078 EmitInlineRuntimeCall(expr); | 4071 EmitInlineRuntimeCall(expr); |
4079 return; | 4072 return; |
4080 } | 4073 } |
4081 | 4074 |
4082 Comment cmnt(masm_, "[ CallRuntime"); | 4075 Comment cmnt(masm_, "[ CallRuntime"); |
4083 ZoneList<Expression*>* args = expr->arguments(); | 4076 ZoneList<Expression*>* args = expr->arguments(); |
4084 int arg_count = args->length(); | 4077 int arg_count = args->length(); |
4085 | 4078 |
4086 if (expr->is_jsruntime()) { | 4079 if (expr->is_jsruntime()) { |
4087 // Push the builtins object as the receiver. | 4080 // Push the builtins object as the receiver. |
4088 Register receiver = LoadIC::ReceiverRegister(); | 4081 Register receiver = LoadIC::ReceiverRegister(); |
4089 __ lw(receiver, GlobalObjectOperand()); | 4082 __ ld(receiver, GlobalObjectOperand()); |
4090 __ lw(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset)); | 4083 __ ld(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset)); |
4091 __ push(receiver); | 4084 __ push(receiver); |
4092 | 4085 |
4093 // Load the function from the receiver. | 4086 // Load the function from the receiver. |
4094 __ li(LoadIC::NameRegister(), Operand(expr->name())); | 4087 __ li(LoadIC::NameRegister(), Operand(expr->name())); |
4095 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId()); | 4088 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId()); |
4096 | 4089 |
4097 // Push the target function under the receiver. | 4090 // Push the target function under the receiver. |
4098 __ lw(at, MemOperand(sp, 0)); | 4091 __ ld(at, MemOperand(sp, 0)); |
4099 __ push(at); | 4092 __ push(at); |
4100 __ sw(v0, MemOperand(sp, kPointerSize)); | 4093 __ sd(v0, MemOperand(sp, kPointerSize)); |
4101 | 4094 |
4102 // Push the arguments ("left-to-right"). | 4095 // Push the arguments ("left-to-right"). |
4103 int arg_count = args->length(); | 4096 int arg_count = args->length(); |
4104 for (int i = 0; i < arg_count; i++) { | 4097 for (int i = 0; i < arg_count; i++) { |
4105 VisitForStackValue(args->at(i)); | 4098 VisitForStackValue(args->at(i)); |
4106 } | 4099 } |
4107 | 4100 |
4108 // Record source position of the IC call. | 4101 // Record source position of the IC call. |
4109 SetSourcePosition(expr->position()); | 4102 SetSourcePosition(expr->position()); |
4110 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); | 4103 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); |
4111 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); | 4104 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); |
4112 __ CallStub(&stub); | 4105 __ CallStub(&stub); |
4113 | 4106 |
4114 // Restore context register. | 4107 // Restore context register. |
4115 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 4108 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
4116 | 4109 |
4117 context()->DropAndPlug(1, v0); | 4110 context()->DropAndPlug(1, v0); |
4118 } else { | 4111 } else { |
4119 // Push the arguments ("left-to-right"). | 4112 // Push the arguments ("left-to-right"). |
4120 for (int i = 0; i < arg_count; i++) { | 4113 for (int i = 0; i < arg_count; i++) { |
4121 VisitForStackValue(args->at(i)); | 4114 VisitForStackValue(args->at(i)); |
4122 } | 4115 } |
4123 | 4116 |
4124 // Call the C runtime function. | 4117 // Call the C runtime function. |
4125 __ CallRuntime(expr->function(), arg_count); | 4118 __ CallRuntime(expr->function(), arg_count); |
(...skipping 15 matching lines...) Expand all Loading... |
4141 __ li(a1, Operand(Smi::FromInt(strict_mode()))); | 4134 __ li(a1, Operand(Smi::FromInt(strict_mode()))); |
4142 __ push(a1); | 4135 __ push(a1); |
4143 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); | 4136 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); |
4144 context()->Plug(v0); | 4137 context()->Plug(v0); |
4145 } else if (proxy != NULL) { | 4138 } else if (proxy != NULL) { |
4146 Variable* var = proxy->var(); | 4139 Variable* var = proxy->var(); |
4147 // Delete of an unqualified identifier is disallowed in strict mode | 4140 // Delete of an unqualified identifier is disallowed in strict mode |
4148 // but "delete this" is allowed. | 4141 // but "delete this" is allowed. |
4149 ASSERT(strict_mode() == SLOPPY || var->is_this()); | 4142 ASSERT(strict_mode() == SLOPPY || var->is_this()); |
4150 if (var->IsUnallocated()) { | 4143 if (var->IsUnallocated()) { |
4151 __ lw(a2, GlobalObjectOperand()); | 4144 __ ld(a2, GlobalObjectOperand()); |
4152 __ li(a1, Operand(var->name())); | 4145 __ li(a1, Operand(var->name())); |
4153 __ li(a0, Operand(Smi::FromInt(SLOPPY))); | 4146 __ li(a0, Operand(Smi::FromInt(SLOPPY))); |
4154 __ Push(a2, a1, a0); | 4147 __ Push(a2, a1, a0); |
4155 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); | 4148 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); |
4156 context()->Plug(v0); | 4149 context()->Plug(v0); |
4157 } else if (var->IsStackAllocated() || var->IsContextSlot()) { | 4150 } else if (var->IsStackAllocated() || var->IsContextSlot()) { |
4158 // Result of deleting non-global, non-dynamic variables is false. | 4151 // Result of deleting non-global, non-dynamic variables is false. |
4159 // The subexpression does not have side effects. | 4152 // The subexpression does not have side effects. |
4160 context()->Plug(var->is_this()); | 4153 context()->Plug(var->is_this()); |
4161 } else { | 4154 } else { |
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4263 EmitVariableLoad(expr->expression()->AsVariableProxy()); | 4256 EmitVariableLoad(expr->expression()->AsVariableProxy()); |
4264 } else { | 4257 } else { |
4265 // Reserve space for result of postfix operation. | 4258 // Reserve space for result of postfix operation. |
4266 if (expr->is_postfix() && !context()->IsEffect()) { | 4259 if (expr->is_postfix() && !context()->IsEffect()) { |
4267 __ li(at, Operand(Smi::FromInt(0))); | 4260 __ li(at, Operand(Smi::FromInt(0))); |
4268 __ push(at); | 4261 __ push(at); |
4269 } | 4262 } |
4270 if (assign_type == NAMED_PROPERTY) { | 4263 if (assign_type == NAMED_PROPERTY) { |
4271 // Put the object both on the stack and in the register. | 4264 // Put the object both on the stack and in the register. |
4272 VisitForStackValue(prop->obj()); | 4265 VisitForStackValue(prop->obj()); |
4273 __ lw(LoadIC::ReceiverRegister(), MemOperand(sp, 0)); | 4266 __ ld(LoadIC::ReceiverRegister(), MemOperand(sp, 0)); |
4274 EmitNamedPropertyLoad(prop); | 4267 EmitNamedPropertyLoad(prop); |
4275 } else { | 4268 } else { |
4276 VisitForStackValue(prop->obj()); | 4269 VisitForStackValue(prop->obj()); |
4277 VisitForStackValue(prop->key()); | 4270 VisitForStackValue(prop->key()); |
4278 __ lw(LoadIC::ReceiverRegister(), MemOperand(sp, 1 * kPointerSize)); | 4271 __ ld(LoadIC::ReceiverRegister(), MemOperand(sp, 1 * kPointerSize)); |
4279 __ lw(LoadIC::NameRegister(), MemOperand(sp, 0)); | 4272 __ ld(LoadIC::NameRegister(), MemOperand(sp, 0)); |
4280 EmitKeyedPropertyLoad(prop); | 4273 EmitKeyedPropertyLoad(prop); |
4281 } | 4274 } |
4282 } | 4275 } |
4283 | 4276 |
4284 // We need a second deoptimization point after loading the value | 4277 // We need a second deoptimization point after loading the value |
4285 // in case evaluating the property load my have a side effect. | 4278 // in case evaluating the property load my have a side effect. |
4286 if (assign_type == VARIABLE) { | 4279 if (assign_type == VARIABLE) { |
4287 PrepareForBailout(expr->expression(), TOS_REG); | 4280 PrepareForBailout(expr->expression(), TOS_REG); |
4288 } else { | 4281 } else { |
4289 PrepareForBailoutForId(prop->LoadId(), TOS_REG); | 4282 PrepareForBailoutForId(prop->LoadId(), TOS_REG); |
(...skipping 13 matching lines...) Expand all Loading... |
4303 if (expr->is_postfix()) { | 4296 if (expr->is_postfix()) { |
4304 if (!context()->IsEffect()) { | 4297 if (!context()->IsEffect()) { |
4305 // Save the result on the stack. If we have a named or keyed property | 4298 // Save the result on the stack. If we have a named or keyed property |
4306 // we store the result under the receiver that is currently on top | 4299 // we store the result under the receiver that is currently on top |
4307 // of the stack. | 4300 // of the stack. |
4308 switch (assign_type) { | 4301 switch (assign_type) { |
4309 case VARIABLE: | 4302 case VARIABLE: |
4310 __ push(v0); | 4303 __ push(v0); |
4311 break; | 4304 break; |
4312 case NAMED_PROPERTY: | 4305 case NAMED_PROPERTY: |
4313 __ sw(v0, MemOperand(sp, kPointerSize)); | 4306 __ sd(v0, MemOperand(sp, kPointerSize)); |
4314 break; | 4307 break; |
4315 case KEYED_PROPERTY: | 4308 case KEYED_PROPERTY: |
4316 __ sw(v0, MemOperand(sp, 2 * kPointerSize)); | 4309 __ sd(v0, MemOperand(sp, 2 * kPointerSize)); |
4317 break; | 4310 break; |
4318 } | 4311 } |
4319 } | 4312 } |
4320 } | 4313 } |
4321 | 4314 |
4322 Register scratch1 = a1; | 4315 Register scratch1 = a1; |
4323 Register scratch2 = t0; | 4316 Register scratch2 = a4; |
4324 __ li(scratch1, Operand(Smi::FromInt(count_value))); | 4317 __ li(scratch1, Operand(Smi::FromInt(count_value))); |
4325 __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2); | 4318 __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2); |
4326 __ BranchOnNoOverflow(&done, scratch2); | 4319 __ BranchOnNoOverflow(&done, scratch2); |
4327 // Call stub. Undo operation first. | 4320 // Call stub. Undo operation first. |
4328 __ Move(v0, a0); | 4321 __ Move(v0, a0); |
4329 __ jmp(&stub_call); | 4322 __ jmp(&stub_call); |
4330 __ bind(&slow); | 4323 __ bind(&slow); |
4331 } | 4324 } |
4332 ToNumberStub convert_stub(isolate()); | 4325 ToNumberStub convert_stub(isolate()); |
4333 __ CallStub(&convert_stub); | 4326 __ CallStub(&convert_stub); |
4334 | 4327 |
4335 // Save result for postfix expressions. | 4328 // Save result for postfix expressions. |
4336 if (expr->is_postfix()) { | 4329 if (expr->is_postfix()) { |
4337 if (!context()->IsEffect()) { | 4330 if (!context()->IsEffect()) { |
4338 // Save the result on the stack. If we have a named or keyed property | 4331 // Save the result on the stack. If we have a named or keyed property |
4339 // we store the result under the receiver that is currently on top | 4332 // we store the result under the receiver that is currently on top |
4340 // of the stack. | 4333 // of the stack. |
4341 switch (assign_type) { | 4334 switch (assign_type) { |
4342 case VARIABLE: | 4335 case VARIABLE: |
4343 __ push(v0); | 4336 __ push(v0); |
4344 break; | 4337 break; |
4345 case NAMED_PROPERTY: | 4338 case NAMED_PROPERTY: |
4346 __ sw(v0, MemOperand(sp, kPointerSize)); | 4339 __ sd(v0, MemOperand(sp, kPointerSize)); |
4347 break; | 4340 break; |
4348 case KEYED_PROPERTY: | 4341 case KEYED_PROPERTY: |
4349 __ sw(v0, MemOperand(sp, 2 * kPointerSize)); | 4342 __ sd(v0, MemOperand(sp, 2 * kPointerSize)); |
4350 break; | 4343 break; |
4351 } | 4344 } |
4352 } | 4345 } |
4353 } | 4346 } |
4354 | 4347 |
4355 __ bind(&stub_call); | 4348 __ bind(&stub_call); |
4356 __ mov(a1, v0); | 4349 __ mov(a1, v0); |
4357 __ li(a0, Operand(Smi::FromInt(count_value))); | 4350 __ li(a0, Operand(Smi::FromInt(count_value))); |
4358 | 4351 |
4359 // Record position before stub call. | 4352 // Record position before stub call. |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4421 } | 4414 } |
4422 } | 4415 } |
4423 | 4416 |
4424 | 4417 |
4425 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { | 4418 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { |
4426 ASSERT(!context()->IsEffect()); | 4419 ASSERT(!context()->IsEffect()); |
4427 ASSERT(!context()->IsTest()); | 4420 ASSERT(!context()->IsTest()); |
4428 VariableProxy* proxy = expr->AsVariableProxy(); | 4421 VariableProxy* proxy = expr->AsVariableProxy(); |
4429 if (proxy != NULL && proxy->var()->IsUnallocated()) { | 4422 if (proxy != NULL && proxy->var()->IsUnallocated()) { |
4430 Comment cmnt(masm_, "[ Global variable"); | 4423 Comment cmnt(masm_, "[ Global variable"); |
4431 __ lw(LoadIC::ReceiverRegister(), GlobalObjectOperand()); | 4424 __ ld(LoadIC::ReceiverRegister(), GlobalObjectOperand()); |
4432 __ li(LoadIC::NameRegister(), Operand(proxy->name())); | 4425 __ li(LoadIC::NameRegister(), Operand(proxy->name())); |
4433 // Use a regular load, not a contextual load, to avoid a reference | 4426 // Use a regular load, not a contextual load, to avoid a reference |
4434 // error. | 4427 // error. |
4435 CallLoadIC(NOT_CONTEXTUAL); | 4428 CallLoadIC(NOT_CONTEXTUAL); |
4436 PrepareForBailout(expr, TOS_REG); | 4429 PrepareForBailout(expr, TOS_REG); |
4437 context()->Plug(v0); | 4430 context()->Plug(v0); |
4438 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { | 4431 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { |
4439 Comment cmnt(masm_, "[ Lookup slot"); | 4432 Comment cmnt(masm_, "[ Lookup slot"); |
4440 Label done, slow; | 4433 Label done, slow; |
4441 | 4434 |
(...skipping 26 matching lines...) Expand all Loading... |
4468 &if_true, &if_false, &fall_through); | 4461 &if_true, &if_false, &fall_through); |
4469 | 4462 |
4470 { AccumulatorValueContext context(this); | 4463 { AccumulatorValueContext context(this); |
4471 VisitForTypeofValue(sub_expr); | 4464 VisitForTypeofValue(sub_expr); |
4472 } | 4465 } |
4473 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 4466 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
4474 | 4467 |
4475 Factory* factory = isolate()->factory(); | 4468 Factory* factory = isolate()->factory(); |
4476 if (String::Equals(check, factory->number_string())) { | 4469 if (String::Equals(check, factory->number_string())) { |
4477 __ JumpIfSmi(v0, if_true); | 4470 __ JumpIfSmi(v0, if_true); |
4478 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); | 4471 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); |
4479 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 4472 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); |
4480 Split(eq, v0, Operand(at), if_true, if_false, fall_through); | 4473 Split(eq, v0, Operand(at), if_true, if_false, fall_through); |
4481 } else if (String::Equals(check, factory->string_string())) { | 4474 } else if (String::Equals(check, factory->string_string())) { |
4482 __ JumpIfSmi(v0, if_false); | 4475 __ JumpIfSmi(v0, if_false); |
4483 // Check for undetectable objects => false. | 4476 // Check for undetectable objects => false. |
4484 __ GetObjectType(v0, v0, a1); | 4477 __ GetObjectType(v0, v0, a1); |
4485 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE)); | 4478 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE)); |
4486 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); | 4479 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); |
4487 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); | 4480 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); |
4488 Split(eq, a1, Operand(zero_reg), | 4481 Split(eq, a1, Operand(zero_reg), |
4489 if_true, if_false, fall_through); | 4482 if_true, if_false, fall_through); |
4490 } else if (String::Equals(check, factory->symbol_string())) { | 4483 } else if (String::Equals(check, factory->symbol_string())) { |
4491 __ JumpIfSmi(v0, if_false); | 4484 __ JumpIfSmi(v0, if_false); |
4492 __ GetObjectType(v0, v0, a1); | 4485 __ GetObjectType(v0, v0, a1); |
4493 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through); | 4486 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through); |
4494 } else if (String::Equals(check, factory->boolean_string())) { | 4487 } else if (String::Equals(check, factory->boolean_string())) { |
4495 __ LoadRoot(at, Heap::kTrueValueRootIndex); | 4488 __ LoadRoot(at, Heap::kTrueValueRootIndex); |
4496 __ Branch(if_true, eq, v0, Operand(at)); | 4489 __ Branch(if_true, eq, v0, Operand(at)); |
4497 __ LoadRoot(at, Heap::kFalseValueRootIndex); | 4490 __ LoadRoot(at, Heap::kFalseValueRootIndex); |
4498 Split(eq, v0, Operand(at), if_true, if_false, fall_through); | 4491 Split(eq, v0, Operand(at), if_true, if_false, fall_through); |
4499 } else if (FLAG_harmony_typeof && | 4492 } else if (FLAG_harmony_typeof && |
4500 String::Equals(check, factory->null_string())) { | 4493 String::Equals(check, factory->null_string())) { |
4501 __ LoadRoot(at, Heap::kNullValueRootIndex); | 4494 __ LoadRoot(at, Heap::kNullValueRootIndex); |
4502 Split(eq, v0, Operand(at), if_true, if_false, fall_through); | 4495 Split(eq, v0, Operand(at), if_true, if_false, fall_through); |
4503 } else if (String::Equals(check, factory->undefined_string())) { | 4496 } else if (String::Equals(check, factory->undefined_string())) { |
4504 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 4497 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
4505 __ Branch(if_true, eq, v0, Operand(at)); | 4498 __ Branch(if_true, eq, v0, Operand(at)); |
4506 __ JumpIfSmi(v0, if_false); | 4499 __ JumpIfSmi(v0, if_false); |
4507 // Check for undetectable objects => true. | 4500 // Check for undetectable objects => true. |
4508 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); | 4501 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); |
4509 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); | 4502 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); |
4510 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); | 4503 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); |
4511 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through); | 4504 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through); |
4512 } else if (String::Equals(check, factory->function_string())) { | 4505 } else if (String::Equals(check, factory->function_string())) { |
4513 __ JumpIfSmi(v0, if_false); | 4506 __ JumpIfSmi(v0, if_false); |
4514 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); | 4507 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); |
4515 __ GetObjectType(v0, v0, a1); | 4508 __ GetObjectType(v0, v0, a1); |
4516 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE)); | 4509 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE)); |
4517 Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE), | 4510 Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE), |
4518 if_true, if_false, fall_through); | 4511 if_true, if_false, fall_through); |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4555 context()->PrepareTest(&materialize_true, &materialize_false, | 4548 context()->PrepareTest(&materialize_true, &materialize_false, |
4556 &if_true, &if_false, &fall_through); | 4549 &if_true, &if_false, &fall_through); |
4557 | 4550 |
4558 Token::Value op = expr->op(); | 4551 Token::Value op = expr->op(); |
4559 VisitForStackValue(expr->left()); | 4552 VisitForStackValue(expr->left()); |
4560 switch (op) { | 4553 switch (op) { |
4561 case Token::IN: | 4554 case Token::IN: |
4562 VisitForStackValue(expr->right()); | 4555 VisitForStackValue(expr->right()); |
4563 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); | 4556 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); |
4564 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); | 4557 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); |
4565 __ LoadRoot(t0, Heap::kTrueValueRootIndex); | 4558 __ LoadRoot(a4, Heap::kTrueValueRootIndex); |
4566 Split(eq, v0, Operand(t0), if_true, if_false, fall_through); | 4559 Split(eq, v0, Operand(a4), if_true, if_false, fall_through); |
4567 break; | 4560 break; |
4568 | 4561 |
4569 case Token::INSTANCEOF: { | 4562 case Token::INSTANCEOF: { |
4570 VisitForStackValue(expr->right()); | 4563 VisitForStackValue(expr->right()); |
4571 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags); | 4564 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags); |
4572 __ CallStub(&stub); | 4565 __ CallStub(&stub); |
4573 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 4566 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
4574 // The stub returns 0 for true. | 4567 // The stub returns 0 for true. |
4575 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through); | 4568 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through); |
4576 break; | 4569 break; |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4629 } else { | 4622 } else { |
4630 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); | 4623 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); |
4631 CallIC(ic, expr->CompareOperationFeedbackId()); | 4624 CallIC(ic, expr->CompareOperationFeedbackId()); |
4632 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through); | 4625 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through); |
4633 } | 4626 } |
4634 context()->Plug(if_true, if_false); | 4627 context()->Plug(if_true, if_false); |
4635 } | 4628 } |
4636 | 4629 |
4637 | 4630 |
4638 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { | 4631 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { |
4639 __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 4632 __ ld(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
4640 context()->Plug(v0); | 4633 context()->Plug(v0); |
4641 } | 4634 } |
4642 | 4635 |
4643 | 4636 |
4644 Register FullCodeGenerator::result_register() { | 4637 Register FullCodeGenerator::result_register() { |
4645 return v0; | 4638 return v0; |
4646 } | 4639 } |
4647 | 4640 |
4648 | 4641 |
4649 Register FullCodeGenerator::context_register() { | 4642 Register FullCodeGenerator::context_register() { |
4650 return cp; | 4643 return cp; |
4651 } | 4644 } |
4652 | 4645 |
4653 | 4646 |
4654 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { | 4647 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { |
4655 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); | 4648 // ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); |
4656 __ sw(value, MemOperand(fp, frame_offset)); | 4649 ASSERT(IsAligned(frame_offset, kPointerSize)); |
| 4650 // __ sw(value, MemOperand(fp, frame_offset)); |
| 4651 __ sd(value, MemOperand(fp, frame_offset)); |
4657 } | 4652 } |
4658 | 4653 |
4659 | 4654 |
4660 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { | 4655 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { |
4661 __ lw(dst, ContextOperand(cp, context_index)); | 4656 __ ld(dst, ContextOperand(cp, context_index)); |
4662 } | 4657 } |
4663 | 4658 |
4664 | 4659 |
4665 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { | 4660 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { |
4666 Scope* declaration_scope = scope()->DeclarationScope(); | 4661 Scope* declaration_scope = scope()->DeclarationScope(); |
4667 if (declaration_scope->is_global_scope() || | 4662 if (declaration_scope->is_global_scope() || |
4668 declaration_scope->is_module_scope()) { | 4663 declaration_scope->is_module_scope()) { |
4669 // Contexts nested in the native context have a canonical empty function | 4664 // Contexts nested in the native context have a canonical empty function |
4670 // as their closure, not the anonymous closure containing the global | 4665 // as their closure, not the anonymous closure containing the global |
4671 // code. Pass a smi sentinel and let the runtime look up the empty | 4666 // code. Pass a smi sentinel and let the runtime look up the empty |
4672 // function. | 4667 // function. |
4673 __ li(at, Operand(Smi::FromInt(0))); | 4668 __ li(at, Operand(Smi::FromInt(0))); |
4674 } else if (declaration_scope->is_eval_scope()) { | 4669 } else if (declaration_scope->is_eval_scope()) { |
4675 // Contexts created by a call to eval have the same closure as the | 4670 // Contexts created by a call to eval have the same closure as the |
4676 // context calling eval, not the anonymous closure containing the eval | 4671 // context calling eval, not the anonymous closure containing the eval |
4677 // code. Fetch it from the context. | 4672 // code. Fetch it from the context. |
4678 __ lw(at, ContextOperand(cp, Context::CLOSURE_INDEX)); | 4673 __ ld(at, ContextOperand(cp, Context::CLOSURE_INDEX)); |
4679 } else { | 4674 } else { |
4680 ASSERT(declaration_scope->is_function_scope()); | 4675 ASSERT(declaration_scope->is_function_scope()); |
4681 __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 4676 __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
4682 } | 4677 } |
4683 __ push(at); | 4678 __ push(at); |
4684 } | 4679 } |
4685 | 4680 |
4686 | 4681 |
4687 // ---------------------------------------------------------------------------- | 4682 // ---------------------------------------------------------------------------- |
4688 // Non-local control flow support. | 4683 // Non-local control flow support. |
4689 | 4684 |
4690 void FullCodeGenerator::EnterFinallyBlock() { | 4685 void FullCodeGenerator::EnterFinallyBlock() { |
4691 ASSERT(!result_register().is(a1)); | 4686 ASSERT(!result_register().is(a1)); |
4692 // Store result register while executing finally block. | 4687 // Store result register while executing finally block. |
4693 __ push(result_register()); | 4688 __ push(result_register()); |
4694 // Cook return address in link register to stack (smi encoded Code* delta). | 4689 // Cook return address in link register to stack (smi encoded Code* delta). |
4695 __ Subu(a1, ra, Operand(masm_->CodeObject())); | 4690 __ Dsubu(a1, ra, Operand(masm_->CodeObject())); |
4696 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); | 4691 __ SmiTag(a1); |
4697 STATIC_ASSERT(0 == kSmiTag); | |
4698 __ Addu(a1, a1, Operand(a1)); // Convert to smi. | |
4699 | 4692 |
4700 // Store result register while executing finally block. | 4693 // Store result register while executing finally block. |
4701 __ push(a1); | 4694 __ push(a1); |
4702 | 4695 |
4703 // Store pending message while executing finally block. | 4696 // Store pending message while executing finally block. |
4704 ExternalReference pending_message_obj = | 4697 ExternalReference pending_message_obj = |
4705 ExternalReference::address_of_pending_message_obj(isolate()); | 4698 ExternalReference::address_of_pending_message_obj(isolate()); |
4706 __ li(at, Operand(pending_message_obj)); | 4699 __ li(at, Operand(pending_message_obj)); |
4707 __ lw(a1, MemOperand(at)); | 4700 __ ld(a1, MemOperand(at)); |
4708 __ push(a1); | 4701 __ push(a1); |
4709 | 4702 |
4710 ExternalReference has_pending_message = | 4703 ExternalReference has_pending_message = |
4711 ExternalReference::address_of_has_pending_message(isolate()); | 4704 ExternalReference::address_of_has_pending_message(isolate()); |
4712 __ li(at, Operand(has_pending_message)); | 4705 __ li(at, Operand(has_pending_message)); |
4713 __ lw(a1, MemOperand(at)); | 4706 __ ld(a1, MemOperand(at)); |
4714 __ SmiTag(a1); | 4707 __ SmiTag(a1); |
4715 __ push(a1); | 4708 __ push(a1); |
4716 | 4709 |
4717 ExternalReference pending_message_script = | 4710 ExternalReference pending_message_script = |
4718 ExternalReference::address_of_pending_message_script(isolate()); | 4711 ExternalReference::address_of_pending_message_script(isolate()); |
4719 __ li(at, Operand(pending_message_script)); | 4712 __ li(at, Operand(pending_message_script)); |
4720 __ lw(a1, MemOperand(at)); | 4713 __ ld(a1, MemOperand(at)); |
4721 __ push(a1); | 4714 __ push(a1); |
4722 } | 4715 } |
4723 | 4716 |
4724 | 4717 |
4725 void FullCodeGenerator::ExitFinallyBlock() { | 4718 void FullCodeGenerator::ExitFinallyBlock() { |
4726 ASSERT(!result_register().is(a1)); | 4719 ASSERT(!result_register().is(a1)); |
4727 // Restore pending message from stack. | 4720 // Restore pending message from stack. |
4728 __ pop(a1); | 4721 __ pop(a1); |
4729 ExternalReference pending_message_script = | 4722 ExternalReference pending_message_script = |
4730 ExternalReference::address_of_pending_message_script(isolate()); | 4723 ExternalReference::address_of_pending_message_script(isolate()); |
4731 __ li(at, Operand(pending_message_script)); | 4724 __ li(at, Operand(pending_message_script)); |
4732 __ sw(a1, MemOperand(at)); | 4725 __ sd(a1, MemOperand(at)); |
4733 | 4726 |
4734 __ pop(a1); | 4727 __ pop(a1); |
4735 __ SmiUntag(a1); | 4728 __ SmiUntag(a1); |
4736 ExternalReference has_pending_message = | 4729 ExternalReference has_pending_message = |
4737 ExternalReference::address_of_has_pending_message(isolate()); | 4730 ExternalReference::address_of_has_pending_message(isolate()); |
4738 __ li(at, Operand(has_pending_message)); | 4731 __ li(at, Operand(has_pending_message)); |
4739 __ sw(a1, MemOperand(at)); | 4732 __ sd(a1, MemOperand(at)); |
4740 | 4733 |
4741 __ pop(a1); | 4734 __ pop(a1); |
4742 ExternalReference pending_message_obj = | 4735 ExternalReference pending_message_obj = |
4743 ExternalReference::address_of_pending_message_obj(isolate()); | 4736 ExternalReference::address_of_pending_message_obj(isolate()); |
4744 __ li(at, Operand(pending_message_obj)); | 4737 __ li(at, Operand(pending_message_obj)); |
4745 __ sw(a1, MemOperand(at)); | 4738 __ sd(a1, MemOperand(at)); |
4746 | 4739 |
4747 // Restore result register from stack. | 4740 // Restore result register from stack. |
4748 __ pop(a1); | 4741 __ pop(a1); |
4749 | 4742 |
4750 // Uncook return address and return. | 4743 // Uncook return address and return. |
4751 __ pop(result_register()); | 4744 __ pop(result_register()); |
4752 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); | 4745 |
4753 __ sra(a1, a1, 1); // Un-smi-tag value. | 4746 __ SmiUntag(a1); |
4754 __ Addu(at, a1, Operand(masm_->CodeObject())); | 4747 __ Daddu(at, a1, Operand(masm_->CodeObject())); |
4755 __ Jump(at); | 4748 __ Jump(at); |
4756 } | 4749 } |
4757 | 4750 |
4758 | 4751 |
4759 #undef __ | 4752 #undef __ |
4760 | 4753 |
4761 #define __ ACCESS_MASM(masm()) | 4754 #define __ ACCESS_MASM(masm()) |
4762 | 4755 |
4763 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( | 4756 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( |
4764 int* stack_depth, | 4757 int* stack_depth, |
4765 int* context_length) { | 4758 int* context_length) { |
4766 // The macros used here must preserve the result register. | 4759 // The macros used here must preserve the result register. |
4767 | 4760 |
4768 // Because the handler block contains the context of the finally | 4761 // Because the handler block contains the context of the finally |
4769 // code, we can restore it directly from there for the finally code | 4762 // code, we can restore it directly from there for the finally code |
4770 // rather than iteratively unwinding contexts via their previous | 4763 // rather than iteratively unwinding contexts via their previous |
4771 // links. | 4764 // links. |
4772 __ Drop(*stack_depth); // Down to the handler block. | 4765 __ Drop(*stack_depth); // Down to the handler block. |
4773 if (*context_length > 0) { | 4766 if (*context_length > 0) { |
4774 // Restore the context to its dedicated register and the stack. | 4767 // Restore the context to its dedicated register and the stack. |
4775 __ lw(cp, MemOperand(sp, StackHandlerConstants::kContextOffset)); | 4768 __ ld(cp, MemOperand(sp, StackHandlerConstants::kContextOffset)); |
4776 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 4769 __ sd(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
4777 } | 4770 } |
4778 __ PopTryHandler(); | 4771 __ PopTryHandler(); |
4779 __ Call(finally_entry_); | 4772 __ Call(finally_entry_); |
4780 | 4773 |
4781 *stack_depth = 0; | 4774 *stack_depth = 0; |
4782 *context_length = 0; | 4775 *context_length = 0; |
4783 return previous_; | 4776 return previous_; |
4784 } | 4777 } |
4785 | 4778 |
4786 | 4779 |
4787 #undef __ | 4780 #undef __ |
4788 | 4781 |
4789 | 4782 |
4790 void BackEdgeTable::PatchAt(Code* unoptimized_code, | 4783 void BackEdgeTable::PatchAt(Code* unoptimized_code, |
4791 Address pc, | 4784 Address pc, |
4792 BackEdgeState target_state, | 4785 BackEdgeState target_state, |
4793 Code* replacement_code) { | 4786 Code* replacement_code) { |
4794 static const int kInstrSize = Assembler::kInstrSize; | 4787 static const int kInstrSize = Assembler::kInstrSize; |
4795 Address branch_address = pc - 6 * kInstrSize; | 4788 Address branch_address = pc - 8 * kInstrSize; |
4796 CodePatcher patcher(branch_address, 1); | 4789 CodePatcher patcher(branch_address, 1); |
4797 | 4790 |
4798 switch (target_state) { | 4791 switch (target_state) { |
4799 case INTERRUPT: | 4792 case INTERRUPT: |
4800 // slt at, a3, zero_reg (in case of count based interrupts) | 4793 // slt at, a3, zero_reg (in case of count based interrupts) |
4801 // beq at, zero_reg, ok | 4794 // beq at, zero_reg, ok |
4802 // lui t9, <interrupt stub address> upper | 4795 // lui t9, <interrupt stub address> upper |
4803 // ori t9, <interrupt stub address> lower | 4796 // ori t9, <interrupt stub address> u-middle |
| 4797 // dsll t9, t9, 16 |
| 4798 // ori t9, <interrupt stub address> lower |
4804 // jalr t9 | 4799 // jalr t9 |
4805 // nop | 4800 // nop |
4806 // ok-label ----- pc_after points here | 4801 // ok-label ----- pc_after points here |
4807 patcher.masm()->slt(at, a3, zero_reg); | 4802 patcher.masm()->slt(at, a3, zero_reg); |
4808 break; | 4803 break; |
4809 case ON_STACK_REPLACEMENT: | 4804 case ON_STACK_REPLACEMENT: |
4810 case OSR_AFTER_STACK_CHECK: | 4805 case OSR_AFTER_STACK_CHECK: |
4811 // addiu at, zero_reg, 1 | 4806 // addiu at, zero_reg, 1 |
4812 // beq at, zero_reg, ok ;; Not changed | 4807 // beq at, zero_reg, ok ;; Not changed |
4813 // lui t9, <on-stack replacement address> upper | 4808 // lui t9, <on-stack replacement address> upper |
4814 // ori t9, <on-stack replacement address> lower | 4809 // ori t9, <on-stack replacement address> middle |
| 4810 // dsll t9, t9, 16 |
| 4811 // ori t9, <on-stack replacement address> lower |
4815 // jalr t9 ;; Not changed | 4812 // jalr t9 ;; Not changed |
4816 // nop ;; Not changed | 4813 // nop ;; Not changed |
4817 // ok-label ----- pc_after points here | 4814 // ok-label ----- pc_after points here |
4818 patcher.masm()->addiu(at, zero_reg, 1); | 4815 patcher.masm()->daddiu(at, zero_reg, 1); |
4819 break; | 4816 break; |
4820 } | 4817 } |
4821 Address pc_immediate_load_address = pc - 4 * kInstrSize; | 4818 Address pc_immediate_load_address = pc - 6 * kInstrSize; |
4822 // Replace the stack check address in the load-immediate (lui/ori pair) | 4819 // Replace the stack check address in the load-immediate (6-instr sequence) |
4823 // with the entry address of the replacement code. | 4820 // with the entry address of the replacement code. |
4824 Assembler::set_target_address_at(pc_immediate_load_address, | 4821 Assembler::set_target_address_at(pc_immediate_load_address, |
4825 replacement_code->entry()); | 4822 replacement_code->entry()); |
4826 | 4823 |
4827 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 4824 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
4828 unoptimized_code, pc_immediate_load_address, replacement_code); | 4825 unoptimized_code, pc_immediate_load_address, replacement_code); |
4829 } | 4826 } |
4830 | 4827 |
4831 | 4828 |
4832 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( | 4829 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( |
4833 Isolate* isolate, | 4830 Isolate* isolate, |
4834 Code* unoptimized_code, | 4831 Code* unoptimized_code, |
4835 Address pc) { | 4832 Address pc) { |
4836 static const int kInstrSize = Assembler::kInstrSize; | 4833 static const int kInstrSize = Assembler::kInstrSize; |
4837 Address branch_address = pc - 6 * kInstrSize; | 4834 Address branch_address = pc - 8 * kInstrSize; |
4838 Address pc_immediate_load_address = pc - 4 * kInstrSize; | 4835 Address pc_immediate_load_address = pc - 6 * kInstrSize; |
4839 | 4836 |
4840 ASSERT(Assembler::IsBeq(Assembler::instr_at(pc - 5 * kInstrSize))); | 4837 ASSERT(Assembler::IsBeq(Assembler::instr_at(pc - 7 * kInstrSize))); |
4841 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) { | 4838 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) { |
4842 ASSERT(reinterpret_cast<uint32_t>( | 4839 ASSERT(reinterpret_cast<uint64_t>( |
4843 Assembler::target_address_at(pc_immediate_load_address)) == | 4840 Assembler::target_address_at(pc_immediate_load_address)) == |
4844 reinterpret_cast<uint32_t>( | 4841 reinterpret_cast<uint64_t>( |
4845 isolate->builtins()->InterruptCheck()->entry())); | 4842 isolate->builtins()->InterruptCheck()->entry())); |
4846 return INTERRUPT; | 4843 return INTERRUPT; |
4847 } | 4844 } |
4848 | 4845 |
4849 ASSERT(Assembler::IsAddImmediate(Assembler::instr_at(branch_address))); | 4846 ASSERT(Assembler::IsAddImmediate(Assembler::instr_at(branch_address))); |
4850 | 4847 |
4851 if (reinterpret_cast<uint32_t>( | 4848 if (reinterpret_cast<uint64_t>( |
4852 Assembler::target_address_at(pc_immediate_load_address)) == | 4849 Assembler::target_address_at(pc_immediate_load_address)) == |
4853 reinterpret_cast<uint32_t>( | 4850 reinterpret_cast<uint64_t>( |
4854 isolate->builtins()->OnStackReplacement()->entry())) { | 4851 isolate->builtins()->OnStackReplacement()->entry())) { |
4855 return ON_STACK_REPLACEMENT; | 4852 return ON_STACK_REPLACEMENT; |
4856 } | 4853 } |
4857 | 4854 |
4858 ASSERT(reinterpret_cast<uint32_t>( | 4855 ASSERT(reinterpret_cast<uint64_t>( |
4859 Assembler::target_address_at(pc_immediate_load_address)) == | 4856 Assembler::target_address_at(pc_immediate_load_address)) == |
4860 reinterpret_cast<uint32_t>( | 4857 reinterpret_cast<uint64_t>( |
4861 isolate->builtins()->OsrAfterStackCheck()->entry())); | 4858 isolate->builtins()->OsrAfterStackCheck()->entry())); |
4862 return OSR_AFTER_STACK_CHECK; | 4859 return OSR_AFTER_STACK_CHECK; |
4863 } | 4860 } |
4864 | 4861 |
4865 | 4862 |
4866 } } // namespace v8::internal | 4863 } } // namespace v8::internal |
4867 | 4864 |
4868 #endif // V8_TARGET_ARCH_MIPS | 4865 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |