OLD | NEW |
| (Empty) |
1 // Copyright 2012 the V8 project authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "src/v8.h" | |
6 | |
7 #if V8_TARGET_ARCH_MIPS | |
8 | |
9 // Note on Mips implementation: | |
10 // | |
11 // The result_register() for mips is the 'v0' register, which is defined | |
12 // by the ABI to contain function return values. However, the first | |
13 // parameter to a function is defined to be 'a0'. So there are many | |
14 // places where we have to move a previous result in v0 to a0 for the | |
15 // next call: mov(a0, v0). This is not needed on the other architectures. | |
16 | |
17 #include "src/code-factory.h" | |
18 #include "src/code-stubs.h" | |
19 #include "src/codegen.h" | |
20 #include "src/compiler.h" | |
21 #include "src/debug.h" | |
22 #include "src/full-codegen.h" | |
23 #include "src/ic/ic.h" | |
24 #include "src/parser.h" | |
25 #include "src/scopes.h" | |
26 | |
27 #include "src/mips/code-stubs-mips.h" | |
28 #include "src/mips/macro-assembler-mips.h" | |
29 | |
30 namespace v8 { | |
31 namespace internal { | |
32 | |
33 #define __ ACCESS_MASM(masm_) | |
34 | |
35 | |
36 // A patch site is a location in the code which it is possible to patch. This | |
37 // class has a number of methods to emit the code which is patchable and the | |
38 // method EmitPatchInfo to record a marker back to the patchable code. This | |
39 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy | |
40 // (raw 16 bit immediate value is used) is the delta from the pc to the first | |
41 // instruction of the patchable code. | |
42 // The marker instruction is effectively a NOP (dest is zero_reg) and will | |
43 // never be emitted by normal code. | |
44 class JumpPatchSite BASE_EMBEDDED { | |
45 public: | |
46 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { | |
47 #ifdef DEBUG | |
48 info_emitted_ = false; | |
49 #endif | |
50 } | |
51 | |
52 ~JumpPatchSite() { | |
53 DCHECK(patch_site_.is_bound() == info_emitted_); | |
54 } | |
55 | |
56 // When initially emitting this ensure that a jump is always generated to skip | |
57 // the inlined smi code. | |
58 void EmitJumpIfNotSmi(Register reg, Label* target) { | |
59 DCHECK(!patch_site_.is_bound() && !info_emitted_); | |
60 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | |
61 __ bind(&patch_site_); | |
62 __ andi(at, reg, 0); | |
63 // Always taken before patched. | |
64 __ BranchShort(target, eq, at, Operand(zero_reg)); | |
65 } | |
66 | |
67 // When initially emitting this ensure that a jump is never generated to skip | |
68 // the inlined smi code. | |
69 void EmitJumpIfSmi(Register reg, Label* target) { | |
70 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | |
71 DCHECK(!patch_site_.is_bound() && !info_emitted_); | |
72 __ bind(&patch_site_); | |
73 __ andi(at, reg, 0); | |
74 // Never taken before patched. | |
75 __ BranchShort(target, ne, at, Operand(zero_reg)); | |
76 } | |
77 | |
78 void EmitPatchInfo() { | |
79 if (patch_site_.is_bound()) { | |
80 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); | |
81 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask); | |
82 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask); | |
83 #ifdef DEBUG | |
84 info_emitted_ = true; | |
85 #endif | |
86 } else { | |
87 __ nop(); // Signals no inlined code. | |
88 } | |
89 } | |
90 | |
91 private: | |
92 MacroAssembler* masm_; | |
93 Label patch_site_; | |
94 #ifdef DEBUG | |
95 bool info_emitted_; | |
96 #endif | |
97 }; | |
98 | |
99 | |
100 // Generate code for a JS function. On entry to the function the receiver | |
101 // and arguments have been pushed on the stack left to right. The actual | |
102 // argument count matches the formal parameter count expected by the | |
103 // function. | |
104 // | |
105 // The live registers are: | |
106 // o a1: the JS function object being called (i.e. ourselves) | |
107 // o cp: our context | |
108 // o fp: our caller's frame pointer | |
109 // o sp: stack pointer | |
110 // o ra: return address | |
111 // | |
112 // The function builds a JS frame. Please see JavaScriptFrameConstants in | |
113 // frames-mips.h for its layout. | |
114 void FullCodeGenerator::Generate() { | |
115 CompilationInfo* info = info_; | |
116 profiling_counter_ = isolate()->factory()->NewCell( | |
117 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); | |
118 SetFunctionPosition(function()); | |
119 Comment cmnt(masm_, "[ function compiled by full code generator"); | |
120 | |
121 ProfileEntryHookStub::MaybeCallEntryHook(masm_); | |
122 | |
123 #ifdef DEBUG | |
124 if (strlen(FLAG_stop_at) > 0 && | |
125 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { | |
126 __ stop("stop-at"); | |
127 } | |
128 #endif | |
129 | |
130 // Sloppy mode functions and builtins need to replace the receiver with the | |
131 // global proxy when called as functions (without an explicit receiver | |
132 // object). | |
133 if (is_sloppy(info->language_mode()) && !info->is_native() && | |
134 info->MayUseThis() && info->scope()->has_this_declaration()) { | |
135 Label ok; | |
136 int receiver_offset = info->scope()->num_parameters() * kPointerSize; | |
137 __ lw(at, MemOperand(sp, receiver_offset)); | |
138 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | |
139 __ Branch(&ok, ne, a2, Operand(at)); | |
140 | |
141 __ lw(a2, GlobalObjectOperand()); | |
142 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset)); | |
143 | |
144 __ sw(a2, MemOperand(sp, receiver_offset)); | |
145 | |
146 __ bind(&ok); | |
147 } | |
148 | |
149 // Open a frame scope to indicate that there is a frame on the stack. The | |
150 // MANUAL indicates that the scope shouldn't actually generate code to set up | |
151 // the frame (that is done below). | |
152 FrameScope frame_scope(masm_, StackFrame::MANUAL); | |
153 | |
154 info->set_prologue_offset(masm_->pc_offset()); | |
155 __ Prologue(info->IsCodePreAgingActive()); | |
156 info->AddNoFrameRange(0, masm_->pc_offset()); | |
157 | |
158 { Comment cmnt(masm_, "[ Allocate locals"); | |
159 int locals_count = info->scope()->num_stack_slots(); | |
160 // Generators allocate locals, if any, in context slots. | |
161 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0); | |
162 if (locals_count > 0) { | |
163 if (locals_count >= 128) { | |
164 Label ok; | |
165 __ Subu(t5, sp, Operand(locals_count * kPointerSize)); | |
166 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); | |
167 __ Branch(&ok, hs, t5, Operand(a2)); | |
168 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); | |
169 __ bind(&ok); | |
170 } | |
171 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex); | |
172 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32; | |
173 if (locals_count >= kMaxPushes) { | |
174 int loop_iterations = locals_count / kMaxPushes; | |
175 __ li(a2, Operand(loop_iterations)); | |
176 Label loop_header; | |
177 __ bind(&loop_header); | |
178 // Do pushes. | |
179 __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize)); | |
180 for (int i = 0; i < kMaxPushes; i++) { | |
181 __ sw(t5, MemOperand(sp, i * kPointerSize)); | |
182 } | |
183 // Continue loop if not done. | |
184 __ Subu(a2, a2, Operand(1)); | |
185 __ Branch(&loop_header, ne, a2, Operand(zero_reg)); | |
186 } | |
187 int remaining = locals_count % kMaxPushes; | |
188 // Emit the remaining pushes. | |
189 __ Subu(sp, sp, Operand(remaining * kPointerSize)); | |
190 for (int i = 0; i < remaining; i++) { | |
191 __ sw(t5, MemOperand(sp, i * kPointerSize)); | |
192 } | |
193 } | |
194 } | |
195 | |
196 bool function_in_register = true; | |
197 | |
198 // Possibly allocate a local context. | |
199 if (info->scope()->num_heap_slots() > 0) { | |
200 Comment cmnt(masm_, "[ Allocate context"); | |
201 // Argument to NewContext is the function, which is still in a1. | |
202 bool need_write_barrier = true; | |
203 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; | |
204 if (info->scope()->is_script_scope()) { | |
205 __ push(a1); | |
206 __ Push(info->scope()->GetScopeInfo(info->isolate())); | |
207 __ CallRuntime(Runtime::kNewScriptContext, 2); | |
208 } else if (slots <= FastNewContextStub::kMaximumSlots) { | |
209 FastNewContextStub stub(isolate(), slots); | |
210 __ CallStub(&stub); | |
211 // Result of FastNewContextStub is always in new space. | |
212 need_write_barrier = false; | |
213 } else { | |
214 __ push(a1); | |
215 __ CallRuntime(Runtime::kNewFunctionContext, 1); | |
216 } | |
217 function_in_register = false; | |
218 // Context is returned in v0. It replaces the context passed to us. | |
219 // It's saved in the stack and kept live in cp. | |
220 __ mov(cp, v0); | |
221 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
222 // Copy any necessary parameters into the context. | |
223 int num_parameters = info->scope()->num_parameters(); | |
224 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0; | |
225 for (int i = first_parameter; i < num_parameters; i++) { | |
226 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i); | |
227 if (var->IsContextSlot()) { | |
228 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | |
229 (num_parameters - 1 - i) * kPointerSize; | |
230 // Load parameter from stack. | |
231 __ lw(a0, MemOperand(fp, parameter_offset)); | |
232 // Store it in the context. | |
233 MemOperand target = ContextOperand(cp, var->index()); | |
234 __ sw(a0, target); | |
235 | |
236 // Update the write barrier. | |
237 if (need_write_barrier) { | |
238 __ RecordWriteContextSlot( | |
239 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs); | |
240 } else if (FLAG_debug_code) { | |
241 Label done; | |
242 __ JumpIfInNewSpace(cp, a0, &done); | |
243 __ Abort(kExpectedNewSpaceObject); | |
244 __ bind(&done); | |
245 } | |
246 } | |
247 } | |
248 } | |
249 | |
250 // Possibly set up a local binding to the this function which is used in | |
251 // derived constructors with super calls. | |
252 Variable* this_function_var = scope()->this_function_var(); | |
253 if (this_function_var != nullptr) { | |
254 Comment cmnt(masm_, "[ This function"); | |
255 if (!function_in_register) { | |
256 __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
257 // The write barrier clobbers register again, keep is marked as such. | |
258 } | |
259 SetVar(this_function_var, a1, a2, a3); | |
260 } | |
261 | |
262 Variable* new_target_var = scope()->new_target_var(); | |
263 if (new_target_var != nullptr) { | |
264 Comment cmnt(masm_, "[ new.target"); | |
265 | |
266 // Get the frame pointer for the calling frame. | |
267 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
268 | |
269 // Skip the arguments adaptor frame if it exists. | |
270 Label check_frame_marker; | |
271 __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset)); | |
272 __ Branch(&check_frame_marker, ne, a1, | |
273 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | |
274 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset)); | |
275 | |
276 // Check the marker in the calling frame. | |
277 __ bind(&check_frame_marker); | |
278 __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset)); | |
279 | |
280 Label non_construct_frame, done; | |
281 __ Branch(&non_construct_frame, ne, a1, | |
282 Operand(Smi::FromInt(StackFrame::CONSTRUCT))); | |
283 | |
284 __ lw(v0, | |
285 MemOperand(a2, ConstructFrameConstants::kOriginalConstructorOffset)); | |
286 __ Branch(&done); | |
287 | |
288 __ bind(&non_construct_frame); | |
289 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); | |
290 __ bind(&done); | |
291 | |
292 SetVar(new_target_var, v0, a2, a3); | |
293 } | |
294 | |
295 // Possibly allocate RestParameters | |
296 int rest_index; | |
297 Variable* rest_param = scope()->rest_parameter(&rest_index); | |
298 if (rest_param) { | |
299 Comment cmnt(masm_, "[ Allocate rest parameter array"); | |
300 | |
301 int num_parameters = info->scope()->num_parameters(); | |
302 int offset = num_parameters * kPointerSize; | |
303 | |
304 __ Addu(a3, fp, | |
305 Operand(StandardFrameConstants::kCallerSPOffset + offset)); | |
306 __ li(a2, Operand(Smi::FromInt(num_parameters))); | |
307 __ li(a1, Operand(Smi::FromInt(rest_index))); | |
308 __ li(a0, Operand(Smi::FromInt(language_mode()))); | |
309 __ Push(a3, a2, a1, a0); | |
310 | |
311 RestParamAccessStub stub(isolate()); | |
312 __ CallStub(&stub); | |
313 | |
314 SetVar(rest_param, v0, a1, a2); | |
315 } | |
316 | |
317 Variable* arguments = scope()->arguments(); | |
318 if (arguments != NULL) { | |
319 // Function uses arguments object. | |
320 Comment cmnt(masm_, "[ Allocate arguments object"); | |
321 if (!function_in_register) { | |
322 // Load this again, if it's used by the local context below. | |
323 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
324 } else { | |
325 __ mov(a3, a1); | |
326 } | |
327 // Receiver is just before the parameters on the caller's stack. | |
328 int num_parameters = info->scope()->num_parameters(); | |
329 int offset = num_parameters * kPointerSize; | |
330 __ Addu(a2, fp, | |
331 Operand(StandardFrameConstants::kCallerSPOffset + offset)); | |
332 __ li(a1, Operand(Smi::FromInt(num_parameters))); | |
333 __ Push(a3, a2, a1); | |
334 | |
335 // Arguments to ArgumentsAccessStub: | |
336 // function, receiver address, parameter count. | |
337 // The stub will rewrite receiever and parameter count if the previous | |
338 // stack frame was an arguments adapter frame. | |
339 ArgumentsAccessStub::Type type; | |
340 if (is_strict(language_mode()) || !is_simple_parameter_list()) { | |
341 type = ArgumentsAccessStub::NEW_STRICT; | |
342 } else if (function()->has_duplicate_parameters()) { | |
343 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW; | |
344 } else { | |
345 type = ArgumentsAccessStub::NEW_SLOPPY_FAST; | |
346 } | |
347 ArgumentsAccessStub stub(isolate(), type); | |
348 __ CallStub(&stub); | |
349 | |
350 SetVar(arguments, v0, a1, a2); | |
351 } | |
352 | |
353 if (FLAG_trace) { | |
354 __ CallRuntime(Runtime::kTraceEnter, 0); | |
355 } | |
356 | |
357 // Visit the declarations and body unless there is an illegal | |
358 // redeclaration. | |
359 if (scope()->HasIllegalRedeclaration()) { | |
360 Comment cmnt(masm_, "[ Declarations"); | |
361 scope()->VisitIllegalRedeclaration(this); | |
362 | |
363 } else { | |
364 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); | |
365 { Comment cmnt(masm_, "[ Declarations"); | |
366 VisitDeclarations(scope()->declarations()); | |
367 } | |
368 | |
369 { Comment cmnt(masm_, "[ Stack check"); | |
370 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); | |
371 Label ok; | |
372 __ LoadRoot(at, Heap::kStackLimitRootIndex); | |
373 __ Branch(&ok, hs, sp, Operand(at)); | |
374 Handle<Code> stack_check = isolate()->builtins()->StackCheck(); | |
375 PredictableCodeSizeScope predictable(masm_, | |
376 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET)); | |
377 __ Call(stack_check, RelocInfo::CODE_TARGET); | |
378 __ bind(&ok); | |
379 } | |
380 | |
381 { Comment cmnt(masm_, "[ Body"); | |
382 DCHECK(loop_depth() == 0); | |
383 VisitStatements(function()->body()); | |
384 DCHECK(loop_depth() == 0); | |
385 } | |
386 } | |
387 | |
388 // Always emit a 'return undefined' in case control fell off the end of | |
389 // the body. | |
390 { Comment cmnt(masm_, "[ return <undefined>;"); | |
391 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); | |
392 } | |
393 EmitReturnSequence(); | |
394 } | |
395 | |
396 | |
397 void FullCodeGenerator::ClearAccumulator() { | |
398 DCHECK(Smi::FromInt(0) == 0); | |
399 __ mov(v0, zero_reg); | |
400 } | |
401 | |
402 | |
403 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { | |
404 __ li(a2, Operand(profiling_counter_)); | |
405 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset)); | |
406 __ Subu(a3, a3, Operand(Smi::FromInt(delta))); | |
407 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset)); | |
408 } | |
409 | |
410 | |
411 void FullCodeGenerator::EmitProfilingCounterReset() { | |
412 int reset_value = FLAG_interrupt_budget; | |
413 if (info_->is_debug()) { | |
414 // Detect debug break requests as soon as possible. | |
415 reset_value = FLAG_interrupt_budget >> 4; | |
416 } | |
417 __ li(a2, Operand(profiling_counter_)); | |
418 __ li(a3, Operand(Smi::FromInt(reset_value))); | |
419 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset)); | |
420 } | |
421 | |
422 | |
423 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, | |
424 Label* back_edge_target) { | |
425 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need | |
426 // to make sure it is constant. Branch may emit a skip-or-jump sequence | |
427 // instead of the normal Branch. It seems that the "skip" part of that | |
428 // sequence is about as long as this Branch would be so it is safe to ignore | |
429 // that. | |
430 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | |
431 Comment cmnt(masm_, "[ Back edge bookkeeping"); | |
432 Label ok; | |
433 DCHECK(back_edge_target->is_bound()); | |
434 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); | |
435 int weight = Min(kMaxBackEdgeWeight, | |
436 Max(1, distance / kCodeSizeMultiplier)); | |
437 EmitProfilingCounterDecrement(weight); | |
438 __ slt(at, a3, zero_reg); | |
439 __ beq(at, zero_reg, &ok); | |
440 // Call will emit a li t9 first, so it is safe to use the delay slot. | |
441 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); | |
442 // Record a mapping of this PC offset to the OSR id. This is used to find | |
443 // the AST id from the unoptimized code in order to use it as a key into | |
444 // the deoptimization input data found in the optimized code. | |
445 RecordBackEdge(stmt->OsrEntryId()); | |
446 EmitProfilingCounterReset(); | |
447 | |
448 __ bind(&ok); | |
449 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); | |
450 // Record a mapping of the OSR id to this PC. This is used if the OSR | |
451 // entry becomes the target of a bailout. We don't expect it to be, but | |
452 // we want it to work if it is. | |
453 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); | |
454 } | |
455 | |
456 | |
457 void FullCodeGenerator::EmitReturnSequence() { | |
458 Comment cmnt(masm_, "[ Return sequence"); | |
459 if (return_label_.is_bound()) { | |
460 __ Branch(&return_label_); | |
461 } else { | |
462 __ bind(&return_label_); | |
463 if (FLAG_trace) { | |
464 // Push the return value on the stack as the parameter. | |
465 // Runtime::TraceExit returns its parameter in v0. | |
466 __ push(v0); | |
467 __ CallRuntime(Runtime::kTraceExit, 1); | |
468 } | |
469 // Pretend that the exit is a backwards jump to the entry. | |
470 int weight = 1; | |
471 if (info_->ShouldSelfOptimize()) { | |
472 weight = FLAG_interrupt_budget / FLAG_self_opt_count; | |
473 } else { | |
474 int distance = masm_->pc_offset(); | |
475 weight = Min(kMaxBackEdgeWeight, | |
476 Max(1, distance / kCodeSizeMultiplier)); | |
477 } | |
478 EmitProfilingCounterDecrement(weight); | |
479 Label ok; | |
480 __ Branch(&ok, ge, a3, Operand(zero_reg)); | |
481 __ push(v0); | |
482 __ Call(isolate()->builtins()->InterruptCheck(), | |
483 RelocInfo::CODE_TARGET); | |
484 __ pop(v0); | |
485 EmitProfilingCounterReset(); | |
486 __ bind(&ok); | |
487 | |
488 // Make sure that the constant pool is not emitted inside of the return | |
489 // sequence. | |
490 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | |
491 // Here we use masm_-> instead of the __ macro to avoid the code coverage | |
492 // tool from instrumenting as we rely on the code size here. | |
493 int32_t arg_count = info_->scope()->num_parameters() + 1; | |
494 int32_t sp_delta = arg_count * kPointerSize; | |
495 SetReturnPosition(function()); | |
496 masm_->mov(sp, fp); | |
497 int no_frame_start = masm_->pc_offset(); | |
498 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit())); | |
499 masm_->Addu(sp, sp, Operand(sp_delta)); | |
500 masm_->Jump(ra); | |
501 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); | |
502 } | |
503 } | |
504 } | |
505 | |
506 | |
507 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { | |
508 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | |
509 codegen()->GetVar(result_register(), var); | |
510 __ push(result_register()); | |
511 } | |
512 | |
513 | |
514 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { | |
515 } | |
516 | |
517 | |
518 void FullCodeGenerator::AccumulatorValueContext::Plug( | |
519 Heap::RootListIndex index) const { | |
520 __ LoadRoot(result_register(), index); | |
521 } | |
522 | |
523 | |
524 void FullCodeGenerator::StackValueContext::Plug( | |
525 Heap::RootListIndex index) const { | |
526 __ LoadRoot(result_register(), index); | |
527 __ push(result_register()); | |
528 } | |
529 | |
530 | |
531 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { | |
532 codegen()->PrepareForBailoutBeforeSplit(condition(), | |
533 true, | |
534 true_label_, | |
535 false_label_); | |
536 if (index == Heap::kUndefinedValueRootIndex || | |
537 index == Heap::kNullValueRootIndex || | |
538 index == Heap::kFalseValueRootIndex) { | |
539 if (false_label_ != fall_through_) __ Branch(false_label_); | |
540 } else if (index == Heap::kTrueValueRootIndex) { | |
541 if (true_label_ != fall_through_) __ Branch(true_label_); | |
542 } else { | |
543 __ LoadRoot(result_register(), index); | |
544 codegen()->DoTest(this); | |
545 } | |
546 } | |
547 | |
548 | |
549 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const { | |
550 } | |
551 | |
552 | |
553 void FullCodeGenerator::AccumulatorValueContext::Plug( | |
554 Handle<Object> lit) const { | |
555 __ li(result_register(), Operand(lit)); | |
556 } | |
557 | |
558 | |
559 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { | |
560 // Immediates cannot be pushed directly. | |
561 __ li(result_register(), Operand(lit)); | |
562 __ push(result_register()); | |
563 } | |
564 | |
565 | |
566 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { | |
567 codegen()->PrepareForBailoutBeforeSplit(condition(), | |
568 true, | |
569 true_label_, | |
570 false_label_); | |
571 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals. | |
572 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { | |
573 if (false_label_ != fall_through_) __ Branch(false_label_); | |
574 } else if (lit->IsTrue() || lit->IsJSObject()) { | |
575 if (true_label_ != fall_through_) __ Branch(true_label_); | |
576 } else if (lit->IsString()) { | |
577 if (String::cast(*lit)->length() == 0) { | |
578 if (false_label_ != fall_through_) __ Branch(false_label_); | |
579 } else { | |
580 if (true_label_ != fall_through_) __ Branch(true_label_); | |
581 } | |
582 } else if (lit->IsSmi()) { | |
583 if (Smi::cast(*lit)->value() == 0) { | |
584 if (false_label_ != fall_through_) __ Branch(false_label_); | |
585 } else { | |
586 if (true_label_ != fall_through_) __ Branch(true_label_); | |
587 } | |
588 } else { | |
589 // For simplicity we always test the accumulator register. | |
590 __ li(result_register(), Operand(lit)); | |
591 codegen()->DoTest(this); | |
592 } | |
593 } | |
594 | |
595 | |
596 void FullCodeGenerator::EffectContext::DropAndPlug(int count, | |
597 Register reg) const { | |
598 DCHECK(count > 0); | |
599 __ Drop(count); | |
600 } | |
601 | |
602 | |
603 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( | |
604 int count, | |
605 Register reg) const { | |
606 DCHECK(count > 0); | |
607 __ Drop(count); | |
608 __ Move(result_register(), reg); | |
609 } | |
610 | |
611 | |
612 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, | |
613 Register reg) const { | |
614 DCHECK(count > 0); | |
615 if (count > 1) __ Drop(count - 1); | |
616 __ sw(reg, MemOperand(sp, 0)); | |
617 } | |
618 | |
619 | |
620 void FullCodeGenerator::TestContext::DropAndPlug(int count, | |
621 Register reg) const { | |
622 DCHECK(count > 0); | |
623 // For simplicity we always test the accumulator register. | |
624 __ Drop(count); | |
625 __ Move(result_register(), reg); | |
626 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); | |
627 codegen()->DoTest(this); | |
628 } | |
629 | |
630 | |
631 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, | |
632 Label* materialize_false) const { | |
633 DCHECK(materialize_true == materialize_false); | |
634 __ bind(materialize_true); | |
635 } | |
636 | |
637 | |
638 void FullCodeGenerator::AccumulatorValueContext::Plug( | |
639 Label* materialize_true, | |
640 Label* materialize_false) const { | |
641 Label done; | |
642 __ bind(materialize_true); | |
643 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); | |
644 __ Branch(&done); | |
645 __ bind(materialize_false); | |
646 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); | |
647 __ bind(&done); | |
648 } | |
649 | |
650 | |
651 void FullCodeGenerator::StackValueContext::Plug( | |
652 Label* materialize_true, | |
653 Label* materialize_false) const { | |
654 Label done; | |
655 __ bind(materialize_true); | |
656 __ LoadRoot(at, Heap::kTrueValueRootIndex); | |
657 // Push the value as the following branch can clobber at in long branch mode. | |
658 __ push(at); | |
659 __ Branch(&done); | |
660 __ bind(materialize_false); | |
661 __ LoadRoot(at, Heap::kFalseValueRootIndex); | |
662 __ push(at); | |
663 __ bind(&done); | |
664 } | |
665 | |
666 | |
667 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, | |
668 Label* materialize_false) const { | |
669 DCHECK(materialize_true == true_label_); | |
670 DCHECK(materialize_false == false_label_); | |
671 } | |
672 | |
673 | |
674 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { | |
675 Heap::RootListIndex value_root_index = | |
676 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; | |
677 __ LoadRoot(result_register(), value_root_index); | |
678 } | |
679 | |
680 | |
681 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { | |
682 Heap::RootListIndex value_root_index = | |
683 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; | |
684 __ LoadRoot(at, value_root_index); | |
685 __ push(at); | |
686 } | |
687 | |
688 | |
689 void FullCodeGenerator::TestContext::Plug(bool flag) const { | |
690 codegen()->PrepareForBailoutBeforeSplit(condition(), | |
691 true, | |
692 true_label_, | |
693 false_label_); | |
694 if (flag) { | |
695 if (true_label_ != fall_through_) __ Branch(true_label_); | |
696 } else { | |
697 if (false_label_ != fall_through_) __ Branch(false_label_); | |
698 } | |
699 } | |
700 | |
701 | |
702 void FullCodeGenerator::DoTest(Expression* condition, | |
703 Label* if_true, | |
704 Label* if_false, | |
705 Label* fall_through) { | |
706 __ mov(a0, result_register()); | |
707 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate()); | |
708 CallIC(ic, condition->test_id()); | |
709 __ mov(at, zero_reg); | |
710 Split(ne, v0, Operand(at), if_true, if_false, fall_through); | |
711 } | |
712 | |
713 | |
714 void FullCodeGenerator::Split(Condition cc, | |
715 Register lhs, | |
716 const Operand& rhs, | |
717 Label* if_true, | |
718 Label* if_false, | |
719 Label* fall_through) { | |
720 if (if_false == fall_through) { | |
721 __ Branch(if_true, cc, lhs, rhs); | |
722 } else if (if_true == fall_through) { | |
723 __ Branch(if_false, NegateCondition(cc), lhs, rhs); | |
724 } else { | |
725 __ Branch(if_true, cc, lhs, rhs); | |
726 __ Branch(if_false); | |
727 } | |
728 } | |
729 | |
730 | |
731 MemOperand FullCodeGenerator::StackOperand(Variable* var) { | |
732 DCHECK(var->IsStackAllocated()); | |
733 // Offset is negative because higher indexes are at lower addresses. | |
734 int offset = -var->index() * kPointerSize; | |
735 // Adjust by a (parameter or local) base offset. | |
736 if (var->IsParameter()) { | |
737 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; | |
738 } else { | |
739 offset += JavaScriptFrameConstants::kLocal0Offset; | |
740 } | |
741 return MemOperand(fp, offset); | |
742 } | |
743 | |
744 | |
745 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { | |
746 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); | |
747 if (var->IsContextSlot()) { | |
748 int context_chain_length = scope()->ContextChainLength(var->scope()); | |
749 __ LoadContext(scratch, context_chain_length); | |
750 return ContextOperand(scratch, var->index()); | |
751 } else { | |
752 return StackOperand(var); | |
753 } | |
754 } | |
755 | |
756 | |
757 void FullCodeGenerator::GetVar(Register dest, Variable* var) { | |
758 // Use destination as scratch. | |
759 MemOperand location = VarOperand(var, dest); | |
760 __ lw(dest, location); | |
761 } | |
762 | |
763 | |
764 void FullCodeGenerator::SetVar(Variable* var, | |
765 Register src, | |
766 Register scratch0, | |
767 Register scratch1) { | |
768 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); | |
769 DCHECK(!scratch0.is(src)); | |
770 DCHECK(!scratch0.is(scratch1)); | |
771 DCHECK(!scratch1.is(src)); | |
772 MemOperand location = VarOperand(var, scratch0); | |
773 __ sw(src, location); | |
774 // Emit the write barrier code if the location is in the heap. | |
775 if (var->IsContextSlot()) { | |
776 __ RecordWriteContextSlot(scratch0, | |
777 location.offset(), | |
778 src, | |
779 scratch1, | |
780 kRAHasBeenSaved, | |
781 kDontSaveFPRegs); | |
782 } | |
783 } | |
784 | |
785 | |
786 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, | |
787 bool should_normalize, | |
788 Label* if_true, | |
789 Label* if_false) { | |
790 // Only prepare for bailouts before splits if we're in a test | |
791 // context. Otherwise, we let the Visit function deal with the | |
792 // preparation to avoid preparing with the same AST id twice. | |
793 if (!context()->IsTest() || !info_->IsOptimizable()) return; | |
794 | |
795 Label skip; | |
796 if (should_normalize) __ Branch(&skip); | |
797 PrepareForBailout(expr, TOS_REG); | |
798 if (should_normalize) { | |
799 __ LoadRoot(t0, Heap::kTrueValueRootIndex); | |
800 Split(eq, a0, Operand(t0), if_true, if_false, NULL); | |
801 __ bind(&skip); | |
802 } | |
803 } | |
804 | |
805 | |
806 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { | |
807 // The variable in the declaration always resides in the current function | |
808 // context. | |
809 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope())); | |
810 if (generate_debug_code_) { | |
811 // Check that we're not inside a with or catch context. | |
812 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset)); | |
813 __ LoadRoot(t0, Heap::kWithContextMapRootIndex); | |
814 __ Check(ne, kDeclarationInWithContext, | |
815 a1, Operand(t0)); | |
816 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex); | |
817 __ Check(ne, kDeclarationInCatchContext, | |
818 a1, Operand(t0)); | |
819 } | |
820 } | |
821 | |
822 | |
823 void FullCodeGenerator::VisitVariableDeclaration( | |
824 VariableDeclaration* declaration) { | |
825 // If it was not possible to allocate the variable at compile time, we | |
826 // need to "declare" it at runtime to make sure it actually exists in the | |
827 // local context. | |
828 VariableProxy* proxy = declaration->proxy(); | |
829 VariableMode mode = declaration->mode(); | |
830 Variable* variable = proxy->var(); | |
831 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY; | |
832 switch (variable->location()) { | |
833 case VariableLocation::GLOBAL: | |
834 case VariableLocation::UNALLOCATED: | |
835 globals_->Add(variable->name(), zone()); | |
836 globals_->Add(variable->binding_needs_init() | |
837 ? isolate()->factory()->the_hole_value() | |
838 : isolate()->factory()->undefined_value(), | |
839 zone()); | |
840 break; | |
841 | |
842 case VariableLocation::PARAMETER: | |
843 case VariableLocation::LOCAL: | |
844 if (hole_init) { | |
845 Comment cmnt(masm_, "[ VariableDeclaration"); | |
846 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); | |
847 __ sw(t0, StackOperand(variable)); | |
848 } | |
849 break; | |
850 | |
851 case VariableLocation::CONTEXT: | |
852 if (hole_init) { | |
853 Comment cmnt(masm_, "[ VariableDeclaration"); | |
854 EmitDebugCheckDeclarationContext(variable); | |
855 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | |
856 __ sw(at, ContextOperand(cp, variable->index())); | |
857 // No write barrier since the_hole_value is in old space. | |
858 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | |
859 } | |
860 break; | |
861 | |
862 case VariableLocation::LOOKUP: { | |
863 Comment cmnt(masm_, "[ VariableDeclaration"); | |
864 __ li(a2, Operand(variable->name())); | |
865 // Declaration nodes are always introduced in one of four modes. | |
866 DCHECK(IsDeclaredVariableMode(mode)); | |
867 PropertyAttributes attr = | |
868 IsImmutableVariableMode(mode) ? READ_ONLY : NONE; | |
869 __ li(a1, Operand(Smi::FromInt(attr))); | |
870 // Push initial value, if any. | |
871 // Note: For variables we must not push an initial value (such as | |
872 // 'undefined') because we may have a (legal) redeclaration and we | |
873 // must not destroy the current value. | |
874 if (hole_init) { | |
875 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex); | |
876 __ Push(cp, a2, a1, a0); | |
877 } else { | |
878 DCHECK(Smi::FromInt(0) == 0); | |
879 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value. | |
880 __ Push(cp, a2, a1, a0); | |
881 } | |
882 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); | |
883 break; | |
884 } | |
885 } | |
886 } | |
887 | |
888 | |
889 void FullCodeGenerator::VisitFunctionDeclaration( | |
890 FunctionDeclaration* declaration) { | |
891 VariableProxy* proxy = declaration->proxy(); | |
892 Variable* variable = proxy->var(); | |
893 switch (variable->location()) { | |
894 case VariableLocation::GLOBAL: | |
895 case VariableLocation::UNALLOCATED: { | |
896 globals_->Add(variable->name(), zone()); | |
897 Handle<SharedFunctionInfo> function = | |
898 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_); | |
899 // Check for stack-overflow exception. | |
900 if (function.is_null()) return SetStackOverflow(); | |
901 globals_->Add(function, zone()); | |
902 break; | |
903 } | |
904 | |
905 case VariableLocation::PARAMETER: | |
906 case VariableLocation::LOCAL: { | |
907 Comment cmnt(masm_, "[ FunctionDeclaration"); | |
908 VisitForAccumulatorValue(declaration->fun()); | |
909 __ sw(result_register(), StackOperand(variable)); | |
910 break; | |
911 } | |
912 | |
913 case VariableLocation::CONTEXT: { | |
914 Comment cmnt(masm_, "[ FunctionDeclaration"); | |
915 EmitDebugCheckDeclarationContext(variable); | |
916 VisitForAccumulatorValue(declaration->fun()); | |
917 __ sw(result_register(), ContextOperand(cp, variable->index())); | |
918 int offset = Context::SlotOffset(variable->index()); | |
919 // We know that we have written a function, which is not a smi. | |
920 __ RecordWriteContextSlot(cp, | |
921 offset, | |
922 result_register(), | |
923 a2, | |
924 kRAHasBeenSaved, | |
925 kDontSaveFPRegs, | |
926 EMIT_REMEMBERED_SET, | |
927 OMIT_SMI_CHECK); | |
928 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | |
929 break; | |
930 } | |
931 | |
932 case VariableLocation::LOOKUP: { | |
933 Comment cmnt(masm_, "[ FunctionDeclaration"); | |
934 __ li(a2, Operand(variable->name())); | |
935 __ li(a1, Operand(Smi::FromInt(NONE))); | |
936 __ Push(cp, a2, a1); | |
937 // Push initial value for function declaration. | |
938 VisitForStackValue(declaration->fun()); | |
939 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); | |
940 break; | |
941 } | |
942 } | |
943 } | |
944 | |
945 | |
946 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { | |
947 // Call the runtime to declare the globals. | |
948 // The context is the first argument. | |
949 __ li(a1, Operand(pairs)); | |
950 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags()))); | |
951 __ Push(cp, a1, a0); | |
952 __ CallRuntime(Runtime::kDeclareGlobals, 3); | |
953 // Return value is ignored. | |
954 } | |
955 | |
956 | |
957 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { | |
958 // Call the runtime to declare the modules. | |
959 __ Push(descriptions); | |
960 __ CallRuntime(Runtime::kDeclareModules, 1); | |
961 // Return value is ignored. | |
962 } | |
963 | |
964 | |
965 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { | |
966 Comment cmnt(masm_, "[ SwitchStatement"); | |
967 Breakable nested_statement(this, stmt); | |
968 SetStatementPosition(stmt); | |
969 | |
970 // Keep the switch value on the stack until a case matches. | |
971 VisitForStackValue(stmt->tag()); | |
972 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); | |
973 | |
974 ZoneList<CaseClause*>* clauses = stmt->cases(); | |
975 CaseClause* default_clause = NULL; // Can occur anywhere in the list. | |
976 | |
977 Label next_test; // Recycled for each test. | |
978 // Compile all the tests with branches to their bodies. | |
979 for (int i = 0; i < clauses->length(); i++) { | |
980 CaseClause* clause = clauses->at(i); | |
981 clause->body_target()->Unuse(); | |
982 | |
983 // The default is not a test, but remember it as final fall through. | |
984 if (clause->is_default()) { | |
985 default_clause = clause; | |
986 continue; | |
987 } | |
988 | |
989 Comment cmnt(masm_, "[ Case comparison"); | |
990 __ bind(&next_test); | |
991 next_test.Unuse(); | |
992 | |
993 // Compile the label expression. | |
994 VisitForAccumulatorValue(clause->label()); | |
995 __ mov(a0, result_register()); // CompareStub requires args in a0, a1. | |
996 | |
997 // Perform the comparison as if via '==='. | |
998 __ lw(a1, MemOperand(sp, 0)); // Switch value. | |
999 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); | |
1000 JumpPatchSite patch_site(masm_); | |
1001 if (inline_smi_code) { | |
1002 Label slow_case; | |
1003 __ or_(a2, a1, a0); | |
1004 patch_site.EmitJumpIfNotSmi(a2, &slow_case); | |
1005 | |
1006 __ Branch(&next_test, ne, a1, Operand(a0)); | |
1007 __ Drop(1); // Switch value is no longer needed. | |
1008 __ Branch(clause->body_target()); | |
1009 | |
1010 __ bind(&slow_case); | |
1011 } | |
1012 | |
1013 // Record position before stub call for type feedback. | |
1014 SetExpressionPosition(clause); | |
1015 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT, | |
1016 strength(language_mode())).code(); | |
1017 CallIC(ic, clause->CompareId()); | |
1018 patch_site.EmitPatchInfo(); | |
1019 | |
1020 Label skip; | |
1021 __ Branch(&skip); | |
1022 PrepareForBailout(clause, TOS_REG); | |
1023 __ LoadRoot(at, Heap::kTrueValueRootIndex); | |
1024 __ Branch(&next_test, ne, v0, Operand(at)); | |
1025 __ Drop(1); | |
1026 __ Branch(clause->body_target()); | |
1027 __ bind(&skip); | |
1028 | |
1029 __ Branch(&next_test, ne, v0, Operand(zero_reg)); | |
1030 __ Drop(1); // Switch value is no longer needed. | |
1031 __ Branch(clause->body_target()); | |
1032 } | |
1033 | |
1034 // Discard the test value and jump to the default if present, otherwise to | |
1035 // the end of the statement. | |
1036 __ bind(&next_test); | |
1037 __ Drop(1); // Switch value is no longer needed. | |
1038 if (default_clause == NULL) { | |
1039 __ Branch(nested_statement.break_label()); | |
1040 } else { | |
1041 __ Branch(default_clause->body_target()); | |
1042 } | |
1043 | |
1044 // Compile all the case bodies. | |
1045 for (int i = 0; i < clauses->length(); i++) { | |
1046 Comment cmnt(masm_, "[ Case body"); | |
1047 CaseClause* clause = clauses->at(i); | |
1048 __ bind(clause->body_target()); | |
1049 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS); | |
1050 VisitStatements(clause->statements()); | |
1051 } | |
1052 | |
1053 __ bind(nested_statement.break_label()); | |
1054 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); | |
1055 } | |
1056 | |
1057 | |
1058 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { | |
1059 Comment cmnt(masm_, "[ ForInStatement"); | |
1060 SetStatementPosition(stmt, SKIP_BREAK); | |
1061 | |
1062 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot(); | |
1063 | |
1064 Label loop, exit; | |
1065 ForIn loop_statement(this, stmt); | |
1066 increment_loop_depth(); | |
1067 | |
1068 // Get the object to enumerate over. If the object is null or undefined, skip | |
1069 // over the loop. See ECMA-262 version 5, section 12.6.4. | |
1070 SetExpressionAsStatementPosition(stmt->enumerable()); | |
1071 VisitForAccumulatorValue(stmt->enumerable()); | |
1072 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below. | |
1073 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | |
1074 __ Branch(&exit, eq, a0, Operand(at)); | |
1075 Register null_value = t1; | |
1076 __ LoadRoot(null_value, Heap::kNullValueRootIndex); | |
1077 __ Branch(&exit, eq, a0, Operand(null_value)); | |
1078 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); | |
1079 __ mov(a0, v0); | |
1080 // Convert the object to a JS object. | |
1081 Label convert, done_convert; | |
1082 __ JumpIfSmi(a0, &convert); | |
1083 __ GetObjectType(a0, a1, a1); | |
1084 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); | |
1085 __ bind(&convert); | |
1086 __ push(a0); | |
1087 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | |
1088 __ mov(a0, v0); | |
1089 __ bind(&done_convert); | |
1090 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG); | |
1091 __ push(a0); | |
1092 | |
1093 // Check for proxies. | |
1094 Label call_runtime; | |
1095 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | |
1096 __ GetObjectType(a0, a1, a1); | |
1097 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE)); | |
1098 | |
1099 // Check cache validity in generated code. This is a fast case for | |
1100 // the JSObject::IsSimpleEnum cache validity checks. If we cannot | |
1101 // guarantee cache validity, call the runtime system to check cache | |
1102 // validity or get the property names in a fixed array. | |
1103 __ CheckEnumCache(null_value, &call_runtime); | |
1104 | |
1105 // The enum cache is valid. Load the map of the object being | |
1106 // iterated over and use the cache for the iteration. | |
1107 Label use_cache; | |
1108 __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset)); | |
1109 __ Branch(&use_cache); | |
1110 | |
1111 // Get the set of properties to enumerate. | |
1112 __ bind(&call_runtime); | |
1113 __ push(a0); // Duplicate the enumerable object on the stack. | |
1114 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); | |
1115 PrepareForBailoutForId(stmt->EnumId(), TOS_REG); | |
1116 | |
1117 // If we got a map from the runtime call, we can do a fast | |
1118 // modification check. Otherwise, we got a fixed array, and we have | |
1119 // to do a slow check. | |
1120 Label fixed_array; | |
1121 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); | |
1122 __ LoadRoot(at, Heap::kMetaMapRootIndex); | |
1123 __ Branch(&fixed_array, ne, a2, Operand(at)); | |
1124 | |
1125 // We got a map in register v0. Get the enumeration cache from it. | |
1126 Label no_descriptors; | |
1127 __ bind(&use_cache); | |
1128 | |
1129 __ EnumLength(a1, v0); | |
1130 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0))); | |
1131 | |
1132 __ LoadInstanceDescriptors(v0, a2); | |
1133 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset)); | |
1134 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset)); | |
1135 | |
1136 // Set up the four remaining stack slots. | |
1137 __ li(a0, Operand(Smi::FromInt(0))); | |
1138 // Push map, enumeration cache, enumeration cache length (as smi) and zero. | |
1139 __ Push(v0, a2, a1, a0); | |
1140 __ jmp(&loop); | |
1141 | |
1142 __ bind(&no_descriptors); | |
1143 __ Drop(1); | |
1144 __ jmp(&exit); | |
1145 | |
1146 // We got a fixed array in register v0. Iterate through that. | |
1147 Label non_proxy; | |
1148 __ bind(&fixed_array); | |
1149 | |
1150 __ li(a1, FeedbackVector()); | |
1151 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); | |
1152 int vector_index = FeedbackVector()->GetIndex(slot); | |
1153 __ sw(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index))); | |
1154 | |
1155 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check | |
1156 __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object | |
1157 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | |
1158 __ GetObjectType(a2, a3, a3); | |
1159 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE)); | |
1160 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy | |
1161 __ bind(&non_proxy); | |
1162 __ Push(a1, v0); // Smi and array | |
1163 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset)); | |
1164 __ li(a0, Operand(Smi::FromInt(0))); | |
1165 __ Push(a1, a0); // Fixed array length (as smi) and initial index. | |
1166 | |
1167 // Generate code for doing the condition check. | |
1168 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); | |
1169 __ bind(&loop); | |
1170 SetExpressionAsStatementPosition(stmt->each()); | |
1171 | |
1172 // Load the current count to a0, load the length to a1. | |
1173 __ lw(a0, MemOperand(sp, 0 * kPointerSize)); | |
1174 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); | |
1175 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1)); | |
1176 | |
1177 // Get the current entry of the array into register a3. | |
1178 __ lw(a2, MemOperand(sp, 2 * kPointerSize)); | |
1179 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
1180 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize); | |
1181 __ addu(t0, a2, t0); // Array base + scaled (smi) index. | |
1182 __ lw(a3, MemOperand(t0)); // Current entry. | |
1183 | |
1184 // Get the expected map from the stack or a smi in the | |
1185 // permanent slow case into register a2. | |
1186 __ lw(a2, MemOperand(sp, 3 * kPointerSize)); | |
1187 | |
1188 // Check if the expected map still matches that of the enumerable. | |
1189 // If not, we may have to filter the key. | |
1190 Label update_each; | |
1191 __ lw(a1, MemOperand(sp, 4 * kPointerSize)); | |
1192 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); | |
1193 __ Branch(&update_each, eq, t0, Operand(a2)); | |
1194 | |
1195 // For proxies, no filtering is done. | |
1196 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. | |
1197 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0)); | |
1198 __ Branch(&update_each, eq, a2, Operand(zero_reg)); | |
1199 | |
1200 // Convert the entry to a string or (smi) 0 if it isn't a property | |
1201 // any more. If the property has been removed while iterating, we | |
1202 // just skip it. | |
1203 __ Push(a1, a3); // Enumerable and current entry. | |
1204 __ CallRuntime(Runtime::kForInFilter, 2); | |
1205 PrepareForBailoutForId(stmt->FilterId(), TOS_REG); | |
1206 __ mov(a3, result_register()); | |
1207 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | |
1208 __ Branch(loop_statement.continue_label(), eq, a3, Operand(at)); | |
1209 | |
1210 // Update the 'each' property or variable from the possibly filtered | |
1211 // entry in register a3. | |
1212 __ bind(&update_each); | |
1213 __ mov(result_register(), a3); | |
1214 // Perform the assignment as if via '='. | |
1215 { EffectContext context(this); | |
1216 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot()); | |
1217 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS); | |
1218 } | |
1219 | |
1220 // Generate code for the body of the loop. | |
1221 Visit(stmt->body()); | |
1222 | |
1223 // Generate code for the going to the next element by incrementing | |
1224 // the index (smi) stored on top of the stack. | |
1225 __ bind(loop_statement.continue_label()); | |
1226 __ pop(a0); | |
1227 __ Addu(a0, a0, Operand(Smi::FromInt(1))); | |
1228 __ push(a0); | |
1229 | |
1230 EmitBackEdgeBookkeeping(stmt, &loop); | |
1231 __ Branch(&loop); | |
1232 | |
1233 // Remove the pointers stored on the stack. | |
1234 __ bind(loop_statement.break_label()); | |
1235 __ Drop(5); | |
1236 | |
1237 // Exit and decrement the loop depth. | |
1238 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); | |
1239 __ bind(&exit); | |
1240 decrement_loop_depth(); | |
1241 } | |
1242 | |
1243 | |
1244 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, | |
1245 bool pretenure) { | |
1246 // Use the fast case closure allocation code that allocates in new | |
1247 // space for nested functions that don't need literals cloning. If | |
1248 // we're running with the --always-opt or the --prepare-always-opt | |
1249 // flag, we need to use the runtime function so that the new function | |
1250 // we are creating here gets a chance to have its code optimized and | |
1251 // doesn't just get a copy of the existing unoptimized code. | |
1252 if (!FLAG_always_opt && | |
1253 !FLAG_prepare_always_opt && | |
1254 !pretenure && | |
1255 scope()->is_function_scope() && | |
1256 info->num_literals() == 0) { | |
1257 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind()); | |
1258 __ li(a2, Operand(info)); | |
1259 __ CallStub(&stub); | |
1260 } else { | |
1261 __ li(a0, Operand(info)); | |
1262 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex | |
1263 : Heap::kFalseValueRootIndex); | |
1264 __ Push(cp, a0, a1); | |
1265 __ CallRuntime(Runtime::kNewClosure, 3); | |
1266 } | |
1267 context()->Plug(v0); | |
1268 } | |
1269 | |
1270 | |
1271 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer, | |
1272 int offset, | |
1273 FeedbackVectorICSlot slot) { | |
1274 if (NeedsHomeObject(initializer)) { | |
1275 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); | |
1276 __ li(StoreDescriptor::NameRegister(), | |
1277 Operand(isolate()->factory()->home_object_symbol())); | |
1278 __ lw(StoreDescriptor::ValueRegister(), | |
1279 MemOperand(sp, offset * kPointerSize)); | |
1280 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot); | |
1281 CallStoreIC(); | |
1282 } | |
1283 } | |
1284 | |
1285 | |
1286 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy, | |
1287 TypeofMode typeof_mode, | |
1288 Label* slow) { | |
1289 Register current = cp; | |
1290 Register next = a1; | |
1291 Register temp = a2; | |
1292 | |
1293 Scope* s = scope(); | |
1294 while (s != NULL) { | |
1295 if (s->num_heap_slots() > 0) { | |
1296 if (s->calls_sloppy_eval()) { | |
1297 // Check that extension is NULL. | |
1298 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX)); | |
1299 __ Branch(slow, ne, temp, Operand(zero_reg)); | |
1300 } | |
1301 // Load next context in chain. | |
1302 __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX)); | |
1303 // Walk the rest of the chain without clobbering cp. | |
1304 current = next; | |
1305 } | |
1306 // If no outer scope calls eval, we do not need to check more | |
1307 // context extensions. | |
1308 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; | |
1309 s = s->outer_scope(); | |
1310 } | |
1311 | |
1312 if (s->is_eval_scope()) { | |
1313 Label loop, fast; | |
1314 if (!current.is(next)) { | |
1315 __ Move(next, current); | |
1316 } | |
1317 __ bind(&loop); | |
1318 // Terminate at native context. | |
1319 __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset)); | |
1320 __ LoadRoot(t0, Heap::kNativeContextMapRootIndex); | |
1321 __ Branch(&fast, eq, temp, Operand(t0)); | |
1322 // Check that extension is NULL. | |
1323 __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX)); | |
1324 __ Branch(slow, ne, temp, Operand(zero_reg)); | |
1325 // Load next context in chain. | |
1326 __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX)); | |
1327 __ Branch(&loop); | |
1328 __ bind(&fast); | |
1329 } | |
1330 | |
1331 // All extension objects were empty and it is safe to use a normal global | |
1332 // load machinery. | |
1333 EmitGlobalVariableLoad(proxy, typeof_mode); | |
1334 } | |
1335 | |
1336 | |
1337 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, | |
1338 Label* slow) { | |
1339 DCHECK(var->IsContextSlot()); | |
1340 Register context = cp; | |
1341 Register next = a3; | |
1342 Register temp = t0; | |
1343 | |
1344 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { | |
1345 if (s->num_heap_slots() > 0) { | |
1346 if (s->calls_sloppy_eval()) { | |
1347 // Check that extension is NULL. | |
1348 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX)); | |
1349 __ Branch(slow, ne, temp, Operand(zero_reg)); | |
1350 } | |
1351 __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX)); | |
1352 // Walk the rest of the chain without clobbering cp. | |
1353 context = next; | |
1354 } | |
1355 } | |
1356 // Check that last extension is NULL. | |
1357 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX)); | |
1358 __ Branch(slow, ne, temp, Operand(zero_reg)); | |
1359 | |
1360 // This function is used only for loads, not stores, so it's safe to | |
1361 // return an cp-based operand (the write barrier cannot be allowed to | |
1362 // destroy the cp register). | |
1363 return ContextOperand(context, var->index()); | |
1364 } | |
1365 | |
1366 | |
1367 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, | |
1368 TypeofMode typeof_mode, | |
1369 Label* slow, Label* done) { | |
1370 // Generate fast-case code for variables that might be shadowed by | |
1371 // eval-introduced variables. Eval is used a lot without | |
1372 // introducing variables. In those cases, we do not want to | |
1373 // perform a runtime call for all variables in the scope | |
1374 // containing the eval. | |
1375 Variable* var = proxy->var(); | |
1376 if (var->mode() == DYNAMIC_GLOBAL) { | |
1377 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow); | |
1378 __ Branch(done); | |
1379 } else if (var->mode() == DYNAMIC_LOCAL) { | |
1380 Variable* local = var->local_if_not_shadowed(); | |
1381 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow)); | |
1382 if (local->mode() == LET || local->mode() == CONST || | |
1383 local->mode() == CONST_LEGACY) { | |
1384 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | |
1385 __ subu(at, v0, at); // Sub as compare: at == 0 on eq. | |
1386 if (local->mode() == CONST_LEGACY) { | |
1387 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); | |
1388 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole. | |
1389 } else { // LET || CONST | |
1390 __ Branch(done, ne, at, Operand(zero_reg)); | |
1391 __ li(a0, Operand(var->name())); | |
1392 __ push(a0); | |
1393 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
1394 } | |
1395 } | |
1396 __ Branch(done); | |
1397 } | |
1398 } | |
1399 | |
1400 | |
1401 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy, | |
1402 TypeofMode typeof_mode) { | |
1403 Variable* var = proxy->var(); | |
1404 DCHECK(var->IsUnallocatedOrGlobalSlot() || | |
1405 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL)); | |
1406 if (var->IsGlobalSlot()) { | |
1407 DCHECK(var->index() > 0); | |
1408 DCHECK(var->IsStaticGlobalObjectProperty()); | |
1409 // Each var occupies two slots in the context: for reads and writes. | |
1410 int slot_index = var->index(); | |
1411 int depth = scope()->ContextChainLength(var->scope()); | |
1412 __ li(LoadGlobalViaContextDescriptor::DepthRegister(), | |
1413 Operand(Smi::FromInt(depth))); | |
1414 __ li(LoadGlobalViaContextDescriptor::SlotRegister(), | |
1415 Operand(Smi::FromInt(slot_index))); | |
1416 __ li(LoadGlobalViaContextDescriptor::NameRegister(), Operand(var->name())); | |
1417 LoadGlobalViaContextStub stub(isolate(), depth); | |
1418 __ CallStub(&stub); | |
1419 | |
1420 } else { | |
1421 __ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); | |
1422 __ li(LoadDescriptor::NameRegister(), Operand(var->name())); | |
1423 __ li(LoadDescriptor::SlotRegister(), | |
1424 Operand(SmiFromSlot(proxy->VariableFeedbackSlot()))); | |
1425 CallLoadIC(typeof_mode); | |
1426 } | |
1427 } | |
1428 | |
1429 | |
1430 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy, | |
1431 TypeofMode typeof_mode) { | |
1432 // Record position before possible IC call. | |
1433 SetExpressionPosition(proxy); | |
1434 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS); | |
1435 Variable* var = proxy->var(); | |
1436 | |
1437 // Three cases: global variables, lookup variables, and all other types of | |
1438 // variables. | |
1439 switch (var->location()) { | |
1440 case VariableLocation::GLOBAL: | |
1441 case VariableLocation::UNALLOCATED: { | |
1442 Comment cmnt(masm_, "[ Global variable"); | |
1443 EmitGlobalVariableLoad(proxy, typeof_mode); | |
1444 context()->Plug(v0); | |
1445 break; | |
1446 } | |
1447 | |
1448 case VariableLocation::PARAMETER: | |
1449 case VariableLocation::LOCAL: | |
1450 case VariableLocation::CONTEXT: { | |
1451 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode); | |
1452 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" | |
1453 : "[ Stack variable"); | |
1454 if (var->binding_needs_init()) { | |
1455 // var->scope() may be NULL when the proxy is located in eval code and | |
1456 // refers to a potential outside binding. Currently those bindings are | |
1457 // always looked up dynamically, i.e. in that case | |
1458 // var->location() == LOOKUP. | |
1459 // always holds. | |
1460 DCHECK(var->scope() != NULL); | |
1461 | |
1462 // Check if the binding really needs an initialization check. The check | |
1463 // can be skipped in the following situation: we have a LET or CONST | |
1464 // binding in harmony mode, both the Variable and the VariableProxy have | |
1465 // the same declaration scope (i.e. they are both in global code, in the | |
1466 // same function or in the same eval code) and the VariableProxy is in | |
1467 // the source physically located after the initializer of the variable. | |
1468 // | |
1469 // We cannot skip any initialization checks for CONST in non-harmony | |
1470 // mode because const variables may be declared but never initialized: | |
1471 // if (false) { const x; }; var y = x; | |
1472 // | |
1473 // The condition on the declaration scopes is a conservative check for | |
1474 // nested functions that access a binding and are called before the | |
1475 // binding is initialized: | |
1476 // function() { f(); let x = 1; function f() { x = 2; } } | |
1477 // | |
1478 bool skip_init_check; | |
1479 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) { | |
1480 skip_init_check = false; | |
1481 } else if (var->is_this()) { | |
1482 CHECK(info_->function() != nullptr && | |
1483 (info_->function()->kind() & kSubclassConstructor) != 0); | |
1484 // TODO(dslomov): implement 'this' hole check elimination. | |
1485 skip_init_check = false; | |
1486 } else { | |
1487 // Check that we always have valid source position. | |
1488 DCHECK(var->initializer_position() != RelocInfo::kNoPosition); | |
1489 DCHECK(proxy->position() != RelocInfo::kNoPosition); | |
1490 skip_init_check = var->mode() != CONST_LEGACY && | |
1491 var->initializer_position() < proxy->position(); | |
1492 } | |
1493 | |
1494 if (!skip_init_check) { | |
1495 // Let and const need a read barrier. | |
1496 GetVar(v0, var); | |
1497 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | |
1498 __ subu(at, v0, at); // Sub as compare: at == 0 on eq. | |
1499 if (var->mode() == LET || var->mode() == CONST) { | |
1500 // Throw a reference error when using an uninitialized let/const | |
1501 // binding in harmony mode. | |
1502 Label done; | |
1503 __ Branch(&done, ne, at, Operand(zero_reg)); | |
1504 __ li(a0, Operand(var->name())); | |
1505 __ push(a0); | |
1506 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
1507 __ bind(&done); | |
1508 } else { | |
1509 // Uninitalized const bindings outside of harmony mode are unholed. | |
1510 DCHECK(var->mode() == CONST_LEGACY); | |
1511 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); | |
1512 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole. | |
1513 } | |
1514 context()->Plug(v0); | |
1515 break; | |
1516 } | |
1517 } | |
1518 context()->Plug(var); | |
1519 break; | |
1520 } | |
1521 | |
1522 case VariableLocation::LOOKUP: { | |
1523 Comment cmnt(masm_, "[ Lookup variable"); | |
1524 Label done, slow; | |
1525 // Generate code for loading from variables potentially shadowed | |
1526 // by eval-introduced variables. | |
1527 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done); | |
1528 __ bind(&slow); | |
1529 __ li(a1, Operand(var->name())); | |
1530 __ Push(cp, a1); // Context and name. | |
1531 Runtime::FunctionId function_id = | |
1532 typeof_mode == NOT_INSIDE_TYPEOF | |
1533 ? Runtime::kLoadLookupSlot | |
1534 : Runtime::kLoadLookupSlotNoReferenceError; | |
1535 __ CallRuntime(function_id, 2); | |
1536 __ bind(&done); | |
1537 context()->Plug(v0); | |
1538 } | |
1539 } | |
1540 } | |
1541 | |
1542 | |
1543 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { | |
1544 Comment cmnt(masm_, "[ RegExpLiteral"); | |
1545 Label materialized; | |
1546 // Registers will be used as follows: | |
1547 // t1 = materialized value (RegExp literal) | |
1548 // t0 = JS function, literals array | |
1549 // a3 = literal index | |
1550 // a2 = RegExp pattern | |
1551 // a1 = RegExp flags | |
1552 // a0 = RegExp literal clone | |
1553 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
1554 __ lw(t0, FieldMemOperand(a0, JSFunction::kLiteralsOffset)); | |
1555 int literal_offset = | |
1556 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; | |
1557 __ lw(t1, FieldMemOperand(t0, literal_offset)); | |
1558 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | |
1559 __ Branch(&materialized, ne, t1, Operand(at)); | |
1560 | |
1561 // Create regexp literal using runtime function. | |
1562 // Result will be in v0. | |
1563 __ li(a3, Operand(Smi::FromInt(expr->literal_index()))); | |
1564 __ li(a2, Operand(expr->pattern())); | |
1565 __ li(a1, Operand(expr->flags())); | |
1566 __ Push(t0, a3, a2, a1); | |
1567 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); | |
1568 __ mov(t1, v0); | |
1569 | |
1570 __ bind(&materialized); | |
1571 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | |
1572 Label allocated, runtime_allocate; | |
1573 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT); | |
1574 __ jmp(&allocated); | |
1575 | |
1576 __ bind(&runtime_allocate); | |
1577 __ li(a0, Operand(Smi::FromInt(size))); | |
1578 __ Push(t1, a0); | |
1579 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); | |
1580 __ pop(t1); | |
1581 | |
1582 __ bind(&allocated); | |
1583 | |
1584 // After this, registers are used as follows: | |
1585 // v0: Newly allocated regexp. | |
1586 // t1: Materialized regexp. | |
1587 // a2: temp. | |
1588 __ CopyFields(v0, t1, a2.bit(), size / kPointerSize); | |
1589 context()->Plug(v0); | |
1590 } | |
1591 | |
1592 | |
1593 void FullCodeGenerator::EmitAccessor(Expression* expression) { | |
1594 if (expression == NULL) { | |
1595 __ LoadRoot(a1, Heap::kNullValueRootIndex); | |
1596 __ push(a1); | |
1597 } else { | |
1598 VisitForStackValue(expression); | |
1599 } | |
1600 } | |
1601 | |
1602 | |
1603 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { | |
1604 Comment cmnt(masm_, "[ ObjectLiteral"); | |
1605 | |
1606 Handle<FixedArray> constant_properties = expr->constant_properties(); | |
1607 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
1608 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset)); | |
1609 __ li(a2, Operand(Smi::FromInt(expr->literal_index()))); | |
1610 __ li(a1, Operand(constant_properties)); | |
1611 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags()))); | |
1612 if (MustCreateObjectLiteralWithRuntime(expr)) { | |
1613 __ Push(a3, a2, a1, a0); | |
1614 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); | |
1615 } else { | |
1616 FastCloneShallowObjectStub stub(isolate(), expr->properties_count()); | |
1617 __ CallStub(&stub); | |
1618 } | |
1619 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); | |
1620 | |
1621 // If result_saved is true the result is on top of the stack. If | |
1622 // result_saved is false the result is in v0. | |
1623 bool result_saved = false; | |
1624 | |
1625 AccessorTable accessor_table(zone()); | |
1626 int property_index = 0; | |
1627 // store_slot_index points to the vector IC slot for the next store IC used. | |
1628 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots | |
1629 // and must be updated if the number of store ICs emitted here changes. | |
1630 int store_slot_index = 0; | |
1631 for (; property_index < expr->properties()->length(); property_index++) { | |
1632 ObjectLiteral::Property* property = expr->properties()->at(property_index); | |
1633 if (property->is_computed_name()) break; | |
1634 if (property->IsCompileTimeValue()) continue; | |
1635 | |
1636 Literal* key = property->key()->AsLiteral(); | |
1637 Expression* value = property->value(); | |
1638 if (!result_saved) { | |
1639 __ push(v0); // Save result on stack. | |
1640 result_saved = true; | |
1641 } | |
1642 switch (property->kind()) { | |
1643 case ObjectLiteral::Property::CONSTANT: | |
1644 UNREACHABLE(); | |
1645 case ObjectLiteral::Property::MATERIALIZED_LITERAL: | |
1646 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value())); | |
1647 // Fall through. | |
1648 case ObjectLiteral::Property::COMPUTED: | |
1649 // It is safe to use [[Put]] here because the boilerplate already | |
1650 // contains computed properties with an uninitialized value. | |
1651 if (key->value()->IsInternalizedString()) { | |
1652 if (property->emit_store()) { | |
1653 VisitForAccumulatorValue(value); | |
1654 __ mov(StoreDescriptor::ValueRegister(), result_register()); | |
1655 DCHECK(StoreDescriptor::ValueRegister().is(a0)); | |
1656 __ li(StoreDescriptor::NameRegister(), Operand(key->value())); | |
1657 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); | |
1658 if (FLAG_vector_stores) { | |
1659 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++)); | |
1660 CallStoreIC(); | |
1661 } else { | |
1662 CallStoreIC(key->LiteralFeedbackId()); | |
1663 } | |
1664 PrepareForBailoutForId(key->id(), NO_REGISTERS); | |
1665 | |
1666 if (NeedsHomeObject(value)) { | |
1667 __ Move(StoreDescriptor::ReceiverRegister(), v0); | |
1668 __ li(StoreDescriptor::NameRegister(), | |
1669 Operand(isolate()->factory()->home_object_symbol())); | |
1670 __ lw(StoreDescriptor::ValueRegister(), MemOperand(sp)); | |
1671 if (FLAG_vector_stores) { | |
1672 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++)); | |
1673 } | |
1674 CallStoreIC(); | |
1675 } | |
1676 } else { | |
1677 VisitForEffect(value); | |
1678 } | |
1679 break; | |
1680 } | |
1681 // Duplicate receiver on stack. | |
1682 __ lw(a0, MemOperand(sp)); | |
1683 __ push(a0); | |
1684 VisitForStackValue(key); | |
1685 VisitForStackValue(value); | |
1686 if (property->emit_store()) { | |
1687 EmitSetHomeObjectIfNeeded( | |
1688 value, 2, expr->SlotForHomeObject(value, &store_slot_index)); | |
1689 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes. | |
1690 __ push(a0); | |
1691 __ CallRuntime(Runtime::kSetProperty, 4); | |
1692 } else { | |
1693 __ Drop(3); | |
1694 } | |
1695 break; | |
1696 case ObjectLiteral::Property::PROTOTYPE: | |
1697 // Duplicate receiver on stack. | |
1698 __ lw(a0, MemOperand(sp)); | |
1699 __ push(a0); | |
1700 VisitForStackValue(value); | |
1701 DCHECK(property->emit_store()); | |
1702 __ CallRuntime(Runtime::kInternalSetPrototype, 2); | |
1703 break; | |
1704 case ObjectLiteral::Property::GETTER: | |
1705 if (property->emit_store()) { | |
1706 accessor_table.lookup(key)->second->getter = value; | |
1707 } | |
1708 break; | |
1709 case ObjectLiteral::Property::SETTER: | |
1710 if (property->emit_store()) { | |
1711 accessor_table.lookup(key)->second->setter = value; | |
1712 } | |
1713 break; | |
1714 } | |
1715 } | |
1716 | |
1717 // Emit code to define accessors, using only a single call to the runtime for | |
1718 // each pair of corresponding getters and setters. | |
1719 for (AccessorTable::Iterator it = accessor_table.begin(); | |
1720 it != accessor_table.end(); | |
1721 ++it) { | |
1722 __ lw(a0, MemOperand(sp)); // Duplicate receiver. | |
1723 __ push(a0); | |
1724 VisitForStackValue(it->first); | |
1725 EmitAccessor(it->second->getter); | |
1726 EmitSetHomeObjectIfNeeded( | |
1727 it->second->getter, 2, | |
1728 expr->SlotForHomeObject(it->second->getter, &store_slot_index)); | |
1729 EmitAccessor(it->second->setter); | |
1730 EmitSetHomeObjectIfNeeded( | |
1731 it->second->setter, 3, | |
1732 expr->SlotForHomeObject(it->second->setter, &store_slot_index)); | |
1733 __ li(a0, Operand(Smi::FromInt(NONE))); | |
1734 __ push(a0); | |
1735 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5); | |
1736 } | |
1737 | |
1738 // Object literals have two parts. The "static" part on the left contains no | |
1739 // computed property names, and so we can compute its map ahead of time; see | |
1740 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part | |
1741 // starts with the first computed property name, and continues with all | |
1742 // properties to its right. All the code from above initializes the static | |
1743 // component of the object literal, and arranges for the map of the result to | |
1744 // reflect the static order in which the keys appear. For the dynamic | |
1745 // properties, we compile them into a series of "SetOwnProperty" runtime | |
1746 // calls. This will preserve insertion order. | |
1747 for (; property_index < expr->properties()->length(); property_index++) { | |
1748 ObjectLiteral::Property* property = expr->properties()->at(property_index); | |
1749 | |
1750 Expression* value = property->value(); | |
1751 if (!result_saved) { | |
1752 __ push(v0); // Save result on the stack | |
1753 result_saved = true; | |
1754 } | |
1755 | |
1756 __ lw(a0, MemOperand(sp)); // Duplicate receiver. | |
1757 __ push(a0); | |
1758 | |
1759 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) { | |
1760 DCHECK(!property->is_computed_name()); | |
1761 VisitForStackValue(value); | |
1762 DCHECK(property->emit_store()); | |
1763 __ CallRuntime(Runtime::kInternalSetPrototype, 2); | |
1764 } else { | |
1765 EmitPropertyKey(property, expr->GetIdForProperty(property_index)); | |
1766 VisitForStackValue(value); | |
1767 EmitSetHomeObjectIfNeeded( | |
1768 value, 2, expr->SlotForHomeObject(value, &store_slot_index)); | |
1769 | |
1770 switch (property->kind()) { | |
1771 case ObjectLiteral::Property::CONSTANT: | |
1772 case ObjectLiteral::Property::MATERIALIZED_LITERAL: | |
1773 case ObjectLiteral::Property::COMPUTED: | |
1774 if (property->emit_store()) { | |
1775 __ li(a0, Operand(Smi::FromInt(NONE))); | |
1776 __ push(a0); | |
1777 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4); | |
1778 } else { | |
1779 __ Drop(3); | |
1780 } | |
1781 break; | |
1782 | |
1783 case ObjectLiteral::Property::PROTOTYPE: | |
1784 UNREACHABLE(); | |
1785 break; | |
1786 | |
1787 case ObjectLiteral::Property::GETTER: | |
1788 __ li(a0, Operand(Smi::FromInt(NONE))); | |
1789 __ push(a0); | |
1790 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4); | |
1791 break; | |
1792 | |
1793 case ObjectLiteral::Property::SETTER: | |
1794 __ li(a0, Operand(Smi::FromInt(NONE))); | |
1795 __ push(a0); | |
1796 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4); | |
1797 break; | |
1798 } | |
1799 } | |
1800 } | |
1801 | |
1802 if (expr->has_function()) { | |
1803 DCHECK(result_saved); | |
1804 __ lw(a0, MemOperand(sp)); | |
1805 __ push(a0); | |
1806 __ CallRuntime(Runtime::kToFastProperties, 1); | |
1807 } | |
1808 | |
1809 if (result_saved) { | |
1810 context()->PlugTOS(); | |
1811 } else { | |
1812 context()->Plug(v0); | |
1813 } | |
1814 | |
1815 // Verify that compilation exactly consumed the number of store ic slots that | |
1816 // the ObjectLiteral node had to offer. | |
1817 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count()); | |
1818 } | |
1819 | |
1820 | |
1821 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { | |
1822 Comment cmnt(masm_, "[ ArrayLiteral"); | |
1823 | |
1824 expr->BuildConstantElements(isolate()); | |
1825 | |
1826 Handle<FixedArray> constant_elements = expr->constant_elements(); | |
1827 bool has_fast_elements = | |
1828 IsFastObjectElementsKind(expr->constant_elements_kind()); | |
1829 | |
1830 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; | |
1831 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { | |
1832 // If the only customer of allocation sites is transitioning, then | |
1833 // we can turn it off if we don't have anywhere else to transition to. | |
1834 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; | |
1835 } | |
1836 | |
1837 __ mov(a0, result_register()); | |
1838 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
1839 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset)); | |
1840 __ li(a2, Operand(Smi::FromInt(expr->literal_index()))); | |
1841 __ li(a1, Operand(constant_elements)); | |
1842 if (MustCreateArrayLiteralWithRuntime(expr)) { | |
1843 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags()))); | |
1844 __ Push(a3, a2, a1, a0); | |
1845 __ CallRuntime(Runtime::kCreateArrayLiteral, 4); | |
1846 } else { | |
1847 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); | |
1848 __ CallStub(&stub); | |
1849 } | |
1850 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); | |
1851 | |
1852 bool result_saved = false; // Is the result saved to the stack? | |
1853 ZoneList<Expression*>* subexprs = expr->values(); | |
1854 int length = subexprs->length(); | |
1855 | |
1856 // Emit code to evaluate all the non-constant subexpressions and to store | |
1857 // them into the newly cloned array. | |
1858 int array_index = 0; | |
1859 for (; array_index < length; array_index++) { | |
1860 Expression* subexpr = subexprs->at(array_index); | |
1861 if (subexpr->IsSpread()) break; | |
1862 | |
1863 // If the subexpression is a literal or a simple materialized literal it | |
1864 // is already set in the cloned array. | |
1865 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; | |
1866 | |
1867 if (!result_saved) { | |
1868 __ push(v0); // array literal | |
1869 __ Push(Smi::FromInt(expr->literal_index())); | |
1870 result_saved = true; | |
1871 } | |
1872 | |
1873 VisitForAccumulatorValue(subexpr); | |
1874 | |
1875 if (has_fast_elements) { | |
1876 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize); | |
1877 __ lw(t2, MemOperand(sp, kPointerSize)); // Copy of array literal. | |
1878 __ lw(a1, FieldMemOperand(t2, JSObject::kElementsOffset)); | |
1879 __ sw(result_register(), FieldMemOperand(a1, offset)); | |
1880 // Update the write barrier for the array store. | |
1881 __ RecordWriteField(a1, offset, result_register(), a2, | |
1882 kRAHasBeenSaved, kDontSaveFPRegs, | |
1883 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK); | |
1884 } else { | |
1885 __ li(a3, Operand(Smi::FromInt(array_index))); | |
1886 __ mov(a0, result_register()); | |
1887 StoreArrayLiteralElementStub stub(isolate()); | |
1888 __ CallStub(&stub); | |
1889 } | |
1890 | |
1891 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); | |
1892 } | |
1893 | |
1894 // In case the array literal contains spread expressions it has two parts. The | |
1895 // first part is the "static" array which has a literal index is handled | |
1896 // above. The second part is the part after the first spread expression | |
1897 // (inclusive) and these elements gets appended to the array. Note that the | |
1898 // number elements an iterable produces is unknown ahead of time. | |
1899 if (array_index < length && result_saved) { | |
1900 __ Pop(); // literal index | |
1901 __ Pop(v0); | |
1902 result_saved = false; | |
1903 } | |
1904 for (; array_index < length; array_index++) { | |
1905 Expression* subexpr = subexprs->at(array_index); | |
1906 | |
1907 __ Push(v0); | |
1908 if (subexpr->IsSpread()) { | |
1909 VisitForStackValue(subexpr->AsSpread()->expression()); | |
1910 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION); | |
1911 } else { | |
1912 VisitForStackValue(subexpr); | |
1913 __ CallRuntime(Runtime::kAppendElement, 2); | |
1914 } | |
1915 | |
1916 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); | |
1917 } | |
1918 | |
1919 if (result_saved) { | |
1920 __ Pop(); // literal index | |
1921 context()->PlugTOS(); | |
1922 } else { | |
1923 context()->Plug(v0); | |
1924 } | |
1925 } | |
1926 | |
1927 | |
1928 void FullCodeGenerator::VisitAssignment(Assignment* expr) { | |
1929 DCHECK(expr->target()->IsValidReferenceExpressionOrThis()); | |
1930 | |
1931 Comment cmnt(masm_, "[ Assignment"); | |
1932 SetExpressionPosition(expr, INSERT_BREAK); | |
1933 | |
1934 Property* property = expr->target()->AsProperty(); | |
1935 LhsKind assign_type = Property::GetAssignType(property); | |
1936 | |
1937 // Evaluate LHS expression. | |
1938 switch (assign_type) { | |
1939 case VARIABLE: | |
1940 // Nothing to do here. | |
1941 break; | |
1942 case NAMED_PROPERTY: | |
1943 if (expr->is_compound()) { | |
1944 // We need the receiver both on the stack and in the register. | |
1945 VisitForStackValue(property->obj()); | |
1946 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); | |
1947 } else { | |
1948 VisitForStackValue(property->obj()); | |
1949 } | |
1950 break; | |
1951 case NAMED_SUPER_PROPERTY: | |
1952 VisitForStackValue( | |
1953 property->obj()->AsSuperPropertyReference()->this_var()); | |
1954 VisitForAccumulatorValue( | |
1955 property->obj()->AsSuperPropertyReference()->home_object()); | |
1956 __ Push(result_register()); | |
1957 if (expr->is_compound()) { | |
1958 const Register scratch = a1; | |
1959 __ lw(scratch, MemOperand(sp, kPointerSize)); | |
1960 __ Push(scratch, result_register()); | |
1961 } | |
1962 break; | |
1963 case KEYED_SUPER_PROPERTY: { | |
1964 const Register scratch = a1; | |
1965 VisitForStackValue( | |
1966 property->obj()->AsSuperPropertyReference()->this_var()); | |
1967 VisitForAccumulatorValue( | |
1968 property->obj()->AsSuperPropertyReference()->home_object()); | |
1969 __ Move(scratch, result_register()); | |
1970 VisitForAccumulatorValue(property->key()); | |
1971 __ Push(scratch, result_register()); | |
1972 if (expr->is_compound()) { | |
1973 const Register scratch1 = t0; | |
1974 __ lw(scratch1, MemOperand(sp, 2 * kPointerSize)); | |
1975 __ Push(scratch1, scratch, result_register()); | |
1976 } | |
1977 break; | |
1978 } | |
1979 case KEYED_PROPERTY: | |
1980 // We need the key and receiver on both the stack and in v0 and a1. | |
1981 if (expr->is_compound()) { | |
1982 VisitForStackValue(property->obj()); | |
1983 VisitForStackValue(property->key()); | |
1984 __ lw(LoadDescriptor::ReceiverRegister(), | |
1985 MemOperand(sp, 1 * kPointerSize)); | |
1986 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); | |
1987 } else { | |
1988 VisitForStackValue(property->obj()); | |
1989 VisitForStackValue(property->key()); | |
1990 } | |
1991 break; | |
1992 } | |
1993 | |
1994 // For compound assignments we need another deoptimization point after the | |
1995 // variable/property load. | |
1996 if (expr->is_compound()) { | |
1997 { AccumulatorValueContext context(this); | |
1998 switch (assign_type) { | |
1999 case VARIABLE: | |
2000 EmitVariableLoad(expr->target()->AsVariableProxy()); | |
2001 PrepareForBailout(expr->target(), TOS_REG); | |
2002 break; | |
2003 case NAMED_PROPERTY: | |
2004 EmitNamedPropertyLoad(property); | |
2005 PrepareForBailoutForId(property->LoadId(), TOS_REG); | |
2006 break; | |
2007 case NAMED_SUPER_PROPERTY: | |
2008 EmitNamedSuperPropertyLoad(property); | |
2009 PrepareForBailoutForId(property->LoadId(), TOS_REG); | |
2010 break; | |
2011 case KEYED_SUPER_PROPERTY: | |
2012 EmitKeyedSuperPropertyLoad(property); | |
2013 PrepareForBailoutForId(property->LoadId(), TOS_REG); | |
2014 break; | |
2015 case KEYED_PROPERTY: | |
2016 EmitKeyedPropertyLoad(property); | |
2017 PrepareForBailoutForId(property->LoadId(), TOS_REG); | |
2018 break; | |
2019 } | |
2020 } | |
2021 | |
2022 Token::Value op = expr->binary_op(); | |
2023 __ push(v0); // Left operand goes on the stack. | |
2024 VisitForAccumulatorValue(expr->value()); | |
2025 | |
2026 AccumulatorValueContext context(this); | |
2027 if (ShouldInlineSmiCase(op)) { | |
2028 EmitInlineSmiBinaryOp(expr->binary_operation(), | |
2029 op, | |
2030 expr->target(), | |
2031 expr->value()); | |
2032 } else { | |
2033 EmitBinaryOp(expr->binary_operation(), op); | |
2034 } | |
2035 | |
2036 // Deoptimization point in case the binary operation may have side effects. | |
2037 PrepareForBailout(expr->binary_operation(), TOS_REG); | |
2038 } else { | |
2039 VisitForAccumulatorValue(expr->value()); | |
2040 } | |
2041 | |
2042 SetExpressionPosition(expr); | |
2043 | |
2044 // Store the value. | |
2045 switch (assign_type) { | |
2046 case VARIABLE: | |
2047 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), | |
2048 expr->op(), expr->AssignmentSlot()); | |
2049 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
2050 context()->Plug(v0); | |
2051 break; | |
2052 case NAMED_PROPERTY: | |
2053 EmitNamedPropertyAssignment(expr); | |
2054 break; | |
2055 case NAMED_SUPER_PROPERTY: | |
2056 EmitNamedSuperPropertyStore(property); | |
2057 context()->Plug(v0); | |
2058 break; | |
2059 case KEYED_SUPER_PROPERTY: | |
2060 EmitKeyedSuperPropertyStore(property); | |
2061 context()->Plug(v0); | |
2062 break; | |
2063 case KEYED_PROPERTY: | |
2064 EmitKeyedPropertyAssignment(expr); | |
2065 break; | |
2066 } | |
2067 } | |
2068 | |
2069 | |
2070 void FullCodeGenerator::VisitYield(Yield* expr) { | |
2071 Comment cmnt(masm_, "[ Yield"); | |
2072 SetExpressionPosition(expr); | |
2073 | |
2074 // Evaluate yielded value first; the initial iterator definition depends on | |
2075 // this. It stays on the stack while we update the iterator. | |
2076 VisitForStackValue(expr->expression()); | |
2077 | |
2078 switch (expr->yield_kind()) { | |
2079 case Yield::kSuspend: | |
2080 // Pop value from top-of-stack slot; box result into result register. | |
2081 EmitCreateIteratorResult(false); | |
2082 __ push(result_register()); | |
2083 // Fall through. | |
2084 case Yield::kInitial: { | |
2085 Label suspend, continuation, post_runtime, resume; | |
2086 | |
2087 __ jmp(&suspend); | |
2088 __ bind(&continuation); | |
2089 __ RecordGeneratorContinuation(); | |
2090 __ jmp(&resume); | |
2091 | |
2092 __ bind(&suspend); | |
2093 VisitForAccumulatorValue(expr->generator_object()); | |
2094 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); | |
2095 __ li(a1, Operand(Smi::FromInt(continuation.pos()))); | |
2096 __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset)); | |
2097 __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset)); | |
2098 __ mov(a1, cp); | |
2099 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2, | |
2100 kRAHasBeenSaved, kDontSaveFPRegs); | |
2101 __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset)); | |
2102 __ Branch(&post_runtime, eq, sp, Operand(a1)); | |
2103 __ push(v0); // generator object | |
2104 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); | |
2105 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
2106 __ bind(&post_runtime); | |
2107 __ pop(result_register()); | |
2108 EmitReturnSequence(); | |
2109 | |
2110 __ bind(&resume); | |
2111 context()->Plug(result_register()); | |
2112 break; | |
2113 } | |
2114 | |
2115 case Yield::kFinal: { | |
2116 VisitForAccumulatorValue(expr->generator_object()); | |
2117 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed))); | |
2118 __ sw(a1, FieldMemOperand(result_register(), | |
2119 JSGeneratorObject::kContinuationOffset)); | |
2120 // Pop value from top-of-stack slot, box result into result register. | |
2121 EmitCreateIteratorResult(true); | |
2122 EmitUnwindBeforeReturn(); | |
2123 EmitReturnSequence(); | |
2124 break; | |
2125 } | |
2126 | |
2127 case Yield::kDelegating: { | |
2128 VisitForStackValue(expr->generator_object()); | |
2129 | |
2130 // Initial stack layout is as follows: | |
2131 // [sp + 1 * kPointerSize] iter | |
2132 // [sp + 0 * kPointerSize] g | |
2133 | |
2134 Label l_catch, l_try, l_suspend, l_continuation, l_resume; | |
2135 Label l_next, l_call; | |
2136 Register load_receiver = LoadDescriptor::ReceiverRegister(); | |
2137 Register load_name = LoadDescriptor::NameRegister(); | |
2138 | |
2139 // Initial send value is undefined. | |
2140 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); | |
2141 __ Branch(&l_next); | |
2142 | |
2143 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; } | |
2144 __ bind(&l_catch); | |
2145 __ mov(a0, v0); | |
2146 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw" | |
2147 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter | |
2148 __ Push(load_name, a3, a0); // "throw", iter, except | |
2149 __ jmp(&l_call); | |
2150 | |
2151 // try { received = %yield result } | |
2152 // Shuffle the received result above a try handler and yield it without | |
2153 // re-boxing. | |
2154 __ bind(&l_try); | |
2155 __ pop(a0); // result | |
2156 int handler_index = NewHandlerTableEntry(); | |
2157 EnterTryBlock(handler_index, &l_catch); | |
2158 const int try_block_size = TryCatch::kElementCount * kPointerSize; | |
2159 __ push(a0); // result | |
2160 | |
2161 __ jmp(&l_suspend); | |
2162 __ bind(&l_continuation); | |
2163 __ RecordGeneratorContinuation(); | |
2164 __ mov(a0, v0); | |
2165 __ jmp(&l_resume); | |
2166 | |
2167 __ bind(&l_suspend); | |
2168 const int generator_object_depth = kPointerSize + try_block_size; | |
2169 __ lw(a0, MemOperand(sp, generator_object_depth)); | |
2170 __ push(a0); // g | |
2171 __ Push(Smi::FromInt(handler_index)); // handler-index | |
2172 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); | |
2173 __ li(a1, Operand(Smi::FromInt(l_continuation.pos()))); | |
2174 __ sw(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset)); | |
2175 __ sw(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset)); | |
2176 __ mov(a1, cp); | |
2177 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2, | |
2178 kRAHasBeenSaved, kDontSaveFPRegs); | |
2179 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2); | |
2180 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
2181 __ pop(v0); // result | |
2182 EmitReturnSequence(); | |
2183 __ mov(a0, v0); | |
2184 __ bind(&l_resume); // received in a0 | |
2185 ExitTryBlock(handler_index); | |
2186 | |
2187 // receiver = iter; f = 'next'; arg = received; | |
2188 __ bind(&l_next); | |
2189 | |
2190 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next" | |
2191 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter | |
2192 __ Push(load_name, a3, a0); // "next", iter, received | |
2193 | |
2194 // result = receiver[f](arg); | |
2195 __ bind(&l_call); | |
2196 __ lw(load_receiver, MemOperand(sp, kPointerSize)); | |
2197 __ lw(load_name, MemOperand(sp, 2 * kPointerSize)); | |
2198 __ li(LoadDescriptor::SlotRegister(), | |
2199 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot()))); | |
2200 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code(); | |
2201 CallIC(ic, TypeFeedbackId::None()); | |
2202 __ mov(a0, v0); | |
2203 __ mov(a1, a0); | |
2204 __ sw(a1, MemOperand(sp, 2 * kPointerSize)); | |
2205 SetCallPosition(expr, 1); | |
2206 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD); | |
2207 __ CallStub(&stub); | |
2208 | |
2209 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
2210 __ Drop(1); // The function is still on the stack; drop it. | |
2211 | |
2212 // if (!result.done) goto l_try; | |
2213 __ Move(load_receiver, v0); | |
2214 | |
2215 __ push(load_receiver); // save result | |
2216 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done" | |
2217 __ li(LoadDescriptor::SlotRegister(), | |
2218 Operand(SmiFromSlot(expr->DoneFeedbackSlot()))); | |
2219 CallLoadIC(NOT_INSIDE_TYPEOF); // v0=result.done | |
2220 __ mov(a0, v0); | |
2221 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate()); | |
2222 CallIC(bool_ic); | |
2223 __ Branch(&l_try, eq, v0, Operand(zero_reg)); | |
2224 | |
2225 // result.value | |
2226 __ pop(load_receiver); // result | |
2227 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value" | |
2228 __ li(LoadDescriptor::SlotRegister(), | |
2229 Operand(SmiFromSlot(expr->ValueFeedbackSlot()))); | |
2230 CallLoadIC(NOT_INSIDE_TYPEOF); // v0=result.value | |
2231 context()->DropAndPlug(2, v0); // drop iter and g | |
2232 break; | |
2233 } | |
2234 } | |
2235 } | |
2236 | |
2237 | |
2238 void FullCodeGenerator::EmitGeneratorResume(Expression *generator, | |
2239 Expression *value, | |
2240 JSGeneratorObject::ResumeMode resume_mode) { | |
2241 // The value stays in a0, and is ultimately read by the resumed generator, as | |
2242 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it | |
2243 // is read to throw the value when the resumed generator is already closed. | |
2244 // a1 will hold the generator object until the activation has been resumed. | |
2245 VisitForStackValue(generator); | |
2246 VisitForAccumulatorValue(value); | |
2247 __ pop(a1); | |
2248 | |
2249 // Load suspended function and context. | |
2250 __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset)); | |
2251 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); | |
2252 | |
2253 // Load receiver and store as the first argument. | |
2254 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset)); | |
2255 __ push(a2); | |
2256 | |
2257 // Push holes for the rest of the arguments to the generator function. | |
2258 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset)); | |
2259 __ lw(a3, | |
2260 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset)); | |
2261 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex); | |
2262 Label push_argument_holes, push_frame; | |
2263 __ bind(&push_argument_holes); | |
2264 __ Subu(a3, a3, Operand(Smi::FromInt(1))); | |
2265 __ Branch(&push_frame, lt, a3, Operand(zero_reg)); | |
2266 __ push(a2); | |
2267 __ jmp(&push_argument_holes); | |
2268 | |
2269 // Enter a new JavaScript frame, and initialize its slots as they were when | |
2270 // the generator was suspended. | |
2271 Label resume_frame, done; | |
2272 __ bind(&push_frame); | |
2273 __ Call(&resume_frame); | |
2274 __ jmp(&done); | |
2275 __ bind(&resume_frame); | |
2276 // ra = return address. | |
2277 // fp = caller's frame pointer. | |
2278 // cp = callee's context, | |
2279 // t0 = callee's JS function. | |
2280 __ Push(ra, fp, cp, t0); | |
2281 // Adjust FP to point to saved FP. | |
2282 __ Addu(fp, sp, 2 * kPointerSize); | |
2283 | |
2284 // Load the operand stack size. | |
2285 __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset)); | |
2286 __ lw(a3, FieldMemOperand(a3, FixedArray::kLengthOffset)); | |
2287 __ SmiUntag(a3); | |
2288 | |
2289 // If we are sending a value and there is no operand stack, we can jump back | |
2290 // in directly. | |
2291 if (resume_mode == JSGeneratorObject::NEXT) { | |
2292 Label slow_resume; | |
2293 __ Branch(&slow_resume, ne, a3, Operand(zero_reg)); | |
2294 __ lw(a3, FieldMemOperand(t0, JSFunction::kCodeEntryOffset)); | |
2295 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); | |
2296 __ SmiUntag(a2); | |
2297 __ Addu(a3, a3, Operand(a2)); | |
2298 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting))); | |
2299 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); | |
2300 __ Jump(a3); | |
2301 __ bind(&slow_resume); | |
2302 } | |
2303 | |
2304 // Otherwise, we push holes for the operand stack and call the runtime to fix | |
2305 // up the stack and the handlers. | |
2306 Label push_operand_holes, call_resume; | |
2307 __ bind(&push_operand_holes); | |
2308 __ Subu(a3, a3, Operand(1)); | |
2309 __ Branch(&call_resume, lt, a3, Operand(zero_reg)); | |
2310 __ push(a2); | |
2311 __ Branch(&push_operand_holes); | |
2312 __ bind(&call_resume); | |
2313 DCHECK(!result_register().is(a1)); | |
2314 __ Push(a1, result_register()); | |
2315 __ Push(Smi::FromInt(resume_mode)); | |
2316 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3); | |
2317 // Not reached: the runtime call returns elsewhere. | |
2318 __ stop("not-reached"); | |
2319 | |
2320 __ bind(&done); | |
2321 context()->Plug(result_register()); | |
2322 } | |
2323 | |
2324 | |
2325 void FullCodeGenerator::EmitCreateIteratorResult(bool done) { | |
2326 Label gc_required; | |
2327 Label allocated; | |
2328 | |
2329 const int instance_size = 5 * kPointerSize; | |
2330 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(), | |
2331 instance_size); | |
2332 | |
2333 __ Allocate(instance_size, v0, a2, a3, &gc_required, TAG_OBJECT); | |
2334 __ jmp(&allocated); | |
2335 | |
2336 __ bind(&gc_required); | |
2337 __ Push(Smi::FromInt(instance_size)); | |
2338 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); | |
2339 __ lw(context_register(), | |
2340 MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
2341 | |
2342 __ bind(&allocated); | |
2343 __ lw(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | |
2344 __ lw(a1, FieldMemOperand(a1, GlobalObject::kNativeContextOffset)); | |
2345 __ lw(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX)); | |
2346 __ pop(a2); | |
2347 __ li(a3, Operand(isolate()->factory()->ToBoolean(done))); | |
2348 __ li(t0, Operand(isolate()->factory()->empty_fixed_array())); | |
2349 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); | |
2350 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset)); | |
2351 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); | |
2352 __ sw(a2, | |
2353 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset)); | |
2354 __ sw(a3, | |
2355 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset)); | |
2356 | |
2357 // Only the value field needs a write barrier, as the other values are in the | |
2358 // root set. | |
2359 __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset, | |
2360 a2, a3, kRAHasBeenSaved, kDontSaveFPRegs); | |
2361 } | |
2362 | |
2363 | |
2364 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { | |
2365 SetExpressionPosition(prop); | |
2366 Literal* key = prop->key()->AsLiteral(); | |
2367 DCHECK(!prop->IsSuperAccess()); | |
2368 | |
2369 __ li(LoadDescriptor::NameRegister(), Operand(key->value())); | |
2370 __ li(LoadDescriptor::SlotRegister(), | |
2371 Operand(SmiFromSlot(prop->PropertyFeedbackSlot()))); | |
2372 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode()); | |
2373 } | |
2374 | |
2375 | |
2376 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) { | |
2377 // Stack: receiver, home_object. | |
2378 SetExpressionPosition(prop); | |
2379 | |
2380 Literal* key = prop->key()->AsLiteral(); | |
2381 DCHECK(!key->value()->IsSmi()); | |
2382 DCHECK(prop->IsSuperAccess()); | |
2383 | |
2384 __ Push(key->value()); | |
2385 __ Push(Smi::FromInt(language_mode())); | |
2386 __ CallRuntime(Runtime::kLoadFromSuper, 4); | |
2387 } | |
2388 | |
2389 | |
2390 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { | |
2391 SetExpressionPosition(prop); | |
2392 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code(); | |
2393 __ li(LoadDescriptor::SlotRegister(), | |
2394 Operand(SmiFromSlot(prop->PropertyFeedbackSlot()))); | |
2395 CallIC(ic); | |
2396 } | |
2397 | |
2398 | |
2399 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) { | |
2400 // Stack: receiver, home_object, key. | |
2401 SetExpressionPosition(prop); | |
2402 __ Push(Smi::FromInt(language_mode())); | |
2403 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4); | |
2404 } | |
2405 | |
2406 | |
2407 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, | |
2408 Token::Value op, | |
2409 Expression* left_expr, | |
2410 Expression* right_expr) { | |
2411 Label done, smi_case, stub_call; | |
2412 | |
2413 Register scratch1 = a2; | |
2414 Register scratch2 = a3; | |
2415 | |
2416 // Get the arguments. | |
2417 Register left = a1; | |
2418 Register right = a0; | |
2419 __ pop(left); | |
2420 __ mov(a0, result_register()); | |
2421 | |
2422 // Perform combined smi check on both operands. | |
2423 __ Or(scratch1, left, Operand(right)); | |
2424 STATIC_ASSERT(kSmiTag == 0); | |
2425 JumpPatchSite patch_site(masm_); | |
2426 patch_site.EmitJumpIfSmi(scratch1, &smi_case); | |
2427 | |
2428 __ bind(&stub_call); | |
2429 Handle<Code> code = | |
2430 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code(); | |
2431 CallIC(code, expr->BinaryOperationFeedbackId()); | |
2432 patch_site.EmitPatchInfo(); | |
2433 __ jmp(&done); | |
2434 | |
2435 __ bind(&smi_case); | |
2436 // Smi case. This code works the same way as the smi-smi case in the type | |
2437 // recording binary operation stub, see | |
2438 switch (op) { | |
2439 case Token::SAR: | |
2440 __ GetLeastBitsFromSmi(scratch1, right, 5); | |
2441 __ srav(right, left, scratch1); | |
2442 __ And(v0, right, Operand(~kSmiTagMask)); | |
2443 break; | |
2444 case Token::SHL: { | |
2445 __ SmiUntag(scratch1, left); | |
2446 __ GetLeastBitsFromSmi(scratch2, right, 5); | |
2447 __ sllv(scratch1, scratch1, scratch2); | |
2448 __ Addu(scratch2, scratch1, Operand(0x40000000)); | |
2449 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg)); | |
2450 __ SmiTag(v0, scratch1); | |
2451 break; | |
2452 } | |
2453 case Token::SHR: { | |
2454 __ SmiUntag(scratch1, left); | |
2455 __ GetLeastBitsFromSmi(scratch2, right, 5); | |
2456 __ srlv(scratch1, scratch1, scratch2); | |
2457 __ And(scratch2, scratch1, 0xc0000000); | |
2458 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg)); | |
2459 __ SmiTag(v0, scratch1); | |
2460 break; | |
2461 } | |
2462 case Token::ADD: | |
2463 __ AdduAndCheckForOverflow(v0, left, right, scratch1); | |
2464 __ BranchOnOverflow(&stub_call, scratch1); | |
2465 break; | |
2466 case Token::SUB: | |
2467 __ SubuAndCheckForOverflow(v0, left, right, scratch1); | |
2468 __ BranchOnOverflow(&stub_call, scratch1); | |
2469 break; | |
2470 case Token::MUL: { | |
2471 __ SmiUntag(scratch1, right); | |
2472 __ Mul(scratch2, v0, left, scratch1); | |
2473 __ sra(scratch1, v0, 31); | |
2474 __ Branch(&stub_call, ne, scratch1, Operand(scratch2)); | |
2475 __ Branch(&done, ne, v0, Operand(zero_reg)); | |
2476 __ Addu(scratch2, right, left); | |
2477 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg)); | |
2478 DCHECK(Smi::FromInt(0) == 0); | |
2479 __ mov(v0, zero_reg); | |
2480 break; | |
2481 } | |
2482 case Token::BIT_OR: | |
2483 __ Or(v0, left, Operand(right)); | |
2484 break; | |
2485 case Token::BIT_AND: | |
2486 __ And(v0, left, Operand(right)); | |
2487 break; | |
2488 case Token::BIT_XOR: | |
2489 __ Xor(v0, left, Operand(right)); | |
2490 break; | |
2491 default: | |
2492 UNREACHABLE(); | |
2493 } | |
2494 | |
2495 __ bind(&done); | |
2496 context()->Plug(v0); | |
2497 } | |
2498 | |
2499 | |
2500 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit, | |
2501 int* used_store_slots) { | |
2502 // Constructor is in v0. | |
2503 DCHECK(lit != NULL); | |
2504 __ push(v0); | |
2505 | |
2506 // No access check is needed here since the constructor is created by the | |
2507 // class literal. | |
2508 Register scratch = a1; | |
2509 __ lw(scratch, | |
2510 FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset)); | |
2511 __ push(scratch); | |
2512 | |
2513 for (int i = 0; i < lit->properties()->length(); i++) { | |
2514 ObjectLiteral::Property* property = lit->properties()->at(i); | |
2515 Expression* value = property->value(); | |
2516 | |
2517 if (property->is_static()) { | |
2518 __ lw(scratch, MemOperand(sp, kPointerSize)); // constructor | |
2519 } else { | |
2520 __ lw(scratch, MemOperand(sp, 0)); // prototype | |
2521 } | |
2522 __ push(scratch); | |
2523 EmitPropertyKey(property, lit->GetIdForProperty(i)); | |
2524 | |
2525 // The static prototype property is read only. We handle the non computed | |
2526 // property name case in the parser. Since this is the only case where we | |
2527 // need to check for an own read only property we special case this so we do | |
2528 // not need to do this for every property. | |
2529 if (property->is_static() && property->is_computed_name()) { | |
2530 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1); | |
2531 __ push(v0); | |
2532 } | |
2533 | |
2534 VisitForStackValue(value); | |
2535 EmitSetHomeObjectIfNeeded(value, 2, | |
2536 lit->SlotForHomeObject(value, used_store_slots)); | |
2537 | |
2538 switch (property->kind()) { | |
2539 case ObjectLiteral::Property::CONSTANT: | |
2540 case ObjectLiteral::Property::MATERIALIZED_LITERAL: | |
2541 case ObjectLiteral::Property::PROTOTYPE: | |
2542 UNREACHABLE(); | |
2543 case ObjectLiteral::Property::COMPUTED: | |
2544 __ CallRuntime(Runtime::kDefineClassMethod, 3); | |
2545 break; | |
2546 | |
2547 case ObjectLiteral::Property::GETTER: | |
2548 __ li(a0, Operand(Smi::FromInt(DONT_ENUM))); | |
2549 __ push(a0); | |
2550 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4); | |
2551 break; | |
2552 | |
2553 case ObjectLiteral::Property::SETTER: | |
2554 __ li(a0, Operand(Smi::FromInt(DONT_ENUM))); | |
2555 __ push(a0); | |
2556 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4); | |
2557 break; | |
2558 | |
2559 default: | |
2560 UNREACHABLE(); | |
2561 } | |
2562 } | |
2563 | |
2564 // prototype | |
2565 __ CallRuntime(Runtime::kToFastProperties, 1); | |
2566 | |
2567 // constructor | |
2568 __ CallRuntime(Runtime::kToFastProperties, 1); | |
2569 | |
2570 if (is_strong(language_mode())) { | |
2571 __ lw(scratch, | |
2572 FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset)); | |
2573 __ Push(v0, scratch); | |
2574 // TODO(conradw): It would be more efficient to define the properties with | |
2575 // the right attributes the first time round. | |
2576 // Freeze the prototype. | |
2577 __ CallRuntime(Runtime::kObjectFreeze, 1); | |
2578 // Freeze the constructor. | |
2579 __ CallRuntime(Runtime::kObjectFreeze, 1); | |
2580 } | |
2581 } | |
2582 | |
2583 | |
2584 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) { | |
2585 __ mov(a0, result_register()); | |
2586 __ pop(a1); | |
2587 Handle<Code> code = | |
2588 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code(); | |
2589 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. | |
2590 CallIC(code, expr->BinaryOperationFeedbackId()); | |
2591 patch_site.EmitPatchInfo(); | |
2592 context()->Plug(v0); | |
2593 } | |
2594 | |
2595 | |
2596 void FullCodeGenerator::EmitAssignment(Expression* expr, | |
2597 FeedbackVectorICSlot slot) { | |
2598 DCHECK(expr->IsValidReferenceExpressionOrThis()); | |
2599 | |
2600 Property* prop = expr->AsProperty(); | |
2601 LhsKind assign_type = Property::GetAssignType(prop); | |
2602 | |
2603 switch (assign_type) { | |
2604 case VARIABLE: { | |
2605 Variable* var = expr->AsVariableProxy()->var(); | |
2606 EffectContext context(this); | |
2607 EmitVariableAssignment(var, Token::ASSIGN, slot); | |
2608 break; | |
2609 } | |
2610 case NAMED_PROPERTY: { | |
2611 __ push(result_register()); // Preserve value. | |
2612 VisitForAccumulatorValue(prop->obj()); | |
2613 __ mov(StoreDescriptor::ReceiverRegister(), result_register()); | |
2614 __ pop(StoreDescriptor::ValueRegister()); // Restore value. | |
2615 __ li(StoreDescriptor::NameRegister(), | |
2616 Operand(prop->key()->AsLiteral()->value())); | |
2617 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot); | |
2618 CallStoreIC(); | |
2619 break; | |
2620 } | |
2621 case NAMED_SUPER_PROPERTY: { | |
2622 __ Push(v0); | |
2623 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); | |
2624 VisitForAccumulatorValue( | |
2625 prop->obj()->AsSuperPropertyReference()->home_object()); | |
2626 // stack: value, this; v0: home_object | |
2627 Register scratch = a2; | |
2628 Register scratch2 = a3; | |
2629 __ mov(scratch, result_register()); // home_object | |
2630 __ lw(v0, MemOperand(sp, kPointerSize)); // value | |
2631 __ lw(scratch2, MemOperand(sp, 0)); // this | |
2632 __ sw(scratch2, MemOperand(sp, kPointerSize)); // this | |
2633 __ sw(scratch, MemOperand(sp, 0)); // home_object | |
2634 // stack: this, home_object; v0: value | |
2635 EmitNamedSuperPropertyStore(prop); | |
2636 break; | |
2637 } | |
2638 case KEYED_SUPER_PROPERTY: { | |
2639 __ Push(v0); | |
2640 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); | |
2641 VisitForStackValue( | |
2642 prop->obj()->AsSuperPropertyReference()->home_object()); | |
2643 VisitForAccumulatorValue(prop->key()); | |
2644 Register scratch = a2; | |
2645 Register scratch2 = a3; | |
2646 __ lw(scratch2, MemOperand(sp, 2 * kPointerSize)); // value | |
2647 // stack: value, this, home_object; v0: key, a3: value | |
2648 __ lw(scratch, MemOperand(sp, kPointerSize)); // this | |
2649 __ sw(scratch, MemOperand(sp, 2 * kPointerSize)); | |
2650 __ lw(scratch, MemOperand(sp, 0)); // home_object | |
2651 __ sw(scratch, MemOperand(sp, kPointerSize)); | |
2652 __ sw(v0, MemOperand(sp, 0)); | |
2653 __ Move(v0, scratch2); | |
2654 // stack: this, home_object, key; v0: value. | |
2655 EmitKeyedSuperPropertyStore(prop); | |
2656 break; | |
2657 } | |
2658 case KEYED_PROPERTY: { | |
2659 __ push(result_register()); // Preserve value. | |
2660 VisitForStackValue(prop->obj()); | |
2661 VisitForAccumulatorValue(prop->key()); | |
2662 __ mov(StoreDescriptor::NameRegister(), result_register()); | |
2663 __ Pop(StoreDescriptor::ValueRegister(), | |
2664 StoreDescriptor::ReceiverRegister()); | |
2665 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot); | |
2666 Handle<Code> ic = | |
2667 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); | |
2668 CallIC(ic); | |
2669 break; | |
2670 } | |
2671 } | |
2672 context()->Plug(v0); | |
2673 } | |
2674 | |
2675 | |
2676 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( | |
2677 Variable* var, MemOperand location) { | |
2678 __ sw(result_register(), location); | |
2679 if (var->IsContextSlot()) { | |
2680 // RecordWrite may destroy all its register arguments. | |
2681 __ Move(a3, result_register()); | |
2682 int offset = Context::SlotOffset(var->index()); | |
2683 __ RecordWriteContextSlot( | |
2684 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs); | |
2685 } | |
2686 } | |
2687 | |
2688 | |
2689 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, | |
2690 FeedbackVectorICSlot slot) { | |
2691 if (var->IsUnallocated()) { | |
2692 // Global var, const, or let. | |
2693 __ mov(StoreDescriptor::ValueRegister(), result_register()); | |
2694 __ li(StoreDescriptor::NameRegister(), Operand(var->name())); | |
2695 __ lw(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand()); | |
2696 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot); | |
2697 CallStoreIC(); | |
2698 | |
2699 } else if (var->IsGlobalSlot()) { | |
2700 // Global var, const, or let. | |
2701 DCHECK(var->index() > 0); | |
2702 DCHECK(var->IsStaticGlobalObjectProperty()); | |
2703 // Each var occupies two slots in the context: for reads and writes. | |
2704 int slot_index = var->index() + 1; | |
2705 int depth = scope()->ContextChainLength(var->scope()); | |
2706 __ li(StoreGlobalViaContextDescriptor::DepthRegister(), | |
2707 Operand(Smi::FromInt(depth))); | |
2708 __ li(StoreGlobalViaContextDescriptor::SlotRegister(), | |
2709 Operand(Smi::FromInt(slot_index))); | |
2710 __ li(StoreGlobalViaContextDescriptor::NameRegister(), | |
2711 Operand(var->name())); | |
2712 __ mov(StoreGlobalViaContextDescriptor::ValueRegister(), result_register()); | |
2713 StoreGlobalViaContextStub stub(isolate(), depth, language_mode()); | |
2714 __ CallStub(&stub); | |
2715 | |
2716 } else if (var->mode() == LET && op != Token::INIT_LET) { | |
2717 // Non-initializing assignment to let variable needs a write barrier. | |
2718 DCHECK(!var->IsLookupSlot()); | |
2719 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | |
2720 Label assign; | |
2721 MemOperand location = VarOperand(var, a1); | |
2722 __ lw(a3, location); | |
2723 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); | |
2724 __ Branch(&assign, ne, a3, Operand(t0)); | |
2725 __ li(a3, Operand(var->name())); | |
2726 __ push(a3); | |
2727 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
2728 // Perform the assignment. | |
2729 __ bind(&assign); | |
2730 EmitStoreToStackLocalOrContextSlot(var, location); | |
2731 | |
2732 } else if (var->mode() == CONST && op != Token::INIT_CONST) { | |
2733 // Assignment to const variable needs a write barrier. | |
2734 DCHECK(!var->IsLookupSlot()); | |
2735 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | |
2736 Label const_error; | |
2737 MemOperand location = VarOperand(var, a1); | |
2738 __ lw(a3, location); | |
2739 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | |
2740 __ Branch(&const_error, ne, a3, Operand(at)); | |
2741 __ li(a3, Operand(var->name())); | |
2742 __ push(a3); | |
2743 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
2744 __ bind(&const_error); | |
2745 __ CallRuntime(Runtime::kThrowConstAssignError, 0); | |
2746 | |
2747 } else if (var->is_this() && op == Token::INIT_CONST) { | |
2748 // Initializing assignment to const {this} needs a write barrier. | |
2749 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | |
2750 Label uninitialized_this; | |
2751 MemOperand location = VarOperand(var, a1); | |
2752 __ lw(a3, location); | |
2753 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | |
2754 __ Branch(&uninitialized_this, eq, a3, Operand(at)); | |
2755 __ li(a0, Operand(var->name())); | |
2756 __ Push(a0); | |
2757 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
2758 __ bind(&uninitialized_this); | |
2759 EmitStoreToStackLocalOrContextSlot(var, location); | |
2760 | |
2761 } else if (!var->is_const_mode() || op == Token::INIT_CONST) { | |
2762 if (var->IsLookupSlot()) { | |
2763 // Assignment to var. | |
2764 __ li(a1, Operand(var->name())); | |
2765 __ li(a0, Operand(Smi::FromInt(language_mode()))); | |
2766 __ Push(v0, cp, a1, a0); // Value, context, name, language mode. | |
2767 __ CallRuntime(Runtime::kStoreLookupSlot, 4); | |
2768 } else { | |
2769 // Assignment to var or initializing assignment to let/const in harmony | |
2770 // mode. | |
2771 DCHECK((var->IsStackAllocated() || var->IsContextSlot())); | |
2772 MemOperand location = VarOperand(var, a1); | |
2773 if (generate_debug_code_ && op == Token::INIT_LET) { | |
2774 // Check for an uninitialized let binding. | |
2775 __ lw(a2, location); | |
2776 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); | |
2777 __ Check(eq, kLetBindingReInitialization, a2, Operand(t0)); | |
2778 } | |
2779 EmitStoreToStackLocalOrContextSlot(var, location); | |
2780 } | |
2781 | |
2782 } else if (op == Token::INIT_CONST_LEGACY) { | |
2783 // Const initializers need a write barrier. | |
2784 DCHECK(!var->IsParameter()); // No const parameters. | |
2785 if (var->IsLookupSlot()) { | |
2786 __ li(a0, Operand(var->name())); | |
2787 __ Push(v0, cp, a0); // Context and name. | |
2788 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3); | |
2789 } else { | |
2790 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | |
2791 Label skip; | |
2792 MemOperand location = VarOperand(var, a1); | |
2793 __ lw(a2, location); | |
2794 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | |
2795 __ Branch(&skip, ne, a2, Operand(at)); | |
2796 EmitStoreToStackLocalOrContextSlot(var, location); | |
2797 __ bind(&skip); | |
2798 } | |
2799 | |
2800 } else { | |
2801 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY); | |
2802 if (is_strict(language_mode())) { | |
2803 __ CallRuntime(Runtime::kThrowConstAssignError, 0); | |
2804 } | |
2805 // Silently ignore store in sloppy mode. | |
2806 } | |
2807 } | |
2808 | |
2809 | |
2810 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { | |
2811 // Assignment to a property, using a named store IC. | |
2812 Property* prop = expr->target()->AsProperty(); | |
2813 DCHECK(prop != NULL); | |
2814 DCHECK(prop->key()->IsLiteral()); | |
2815 | |
2816 __ mov(StoreDescriptor::ValueRegister(), result_register()); | |
2817 __ li(StoreDescriptor::NameRegister(), | |
2818 Operand(prop->key()->AsLiteral()->value())); | |
2819 __ pop(StoreDescriptor::ReceiverRegister()); | |
2820 if (FLAG_vector_stores) { | |
2821 EmitLoadStoreICSlot(expr->AssignmentSlot()); | |
2822 CallStoreIC(); | |
2823 } else { | |
2824 CallStoreIC(expr->AssignmentFeedbackId()); | |
2825 } | |
2826 | |
2827 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
2828 context()->Plug(v0); | |
2829 } | |
2830 | |
2831 | |
2832 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) { | |
2833 // Assignment to named property of super. | |
2834 // v0 : value | |
2835 // stack : receiver ('this'), home_object | |
2836 DCHECK(prop != NULL); | |
2837 Literal* key = prop->key()->AsLiteral(); | |
2838 DCHECK(key != NULL); | |
2839 | |
2840 __ Push(key->value()); | |
2841 __ Push(v0); | |
2842 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict | |
2843 : Runtime::kStoreToSuper_Sloppy), | |
2844 4); | |
2845 } | |
2846 | |
2847 | |
2848 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) { | |
2849 // Assignment to named property of super. | |
2850 // v0 : value | |
2851 // stack : receiver ('this'), home_object, key | |
2852 DCHECK(prop != NULL); | |
2853 | |
2854 __ Push(v0); | |
2855 __ CallRuntime( | |
2856 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict | |
2857 : Runtime::kStoreKeyedToSuper_Sloppy), | |
2858 4); | |
2859 } | |
2860 | |
2861 | |
2862 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { | |
2863 // Assignment to a property, using a keyed store IC. | |
2864 // Call keyed store IC. | |
2865 // The arguments are: | |
2866 // - a0 is the value, | |
2867 // - a1 is the key, | |
2868 // - a2 is the receiver. | |
2869 __ mov(StoreDescriptor::ValueRegister(), result_register()); | |
2870 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister()); | |
2871 DCHECK(StoreDescriptor::ValueRegister().is(a0)); | |
2872 | |
2873 Handle<Code> ic = | |
2874 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); | |
2875 if (FLAG_vector_stores) { | |
2876 EmitLoadStoreICSlot(expr->AssignmentSlot()); | |
2877 CallIC(ic); | |
2878 } else { | |
2879 CallIC(ic, expr->AssignmentFeedbackId()); | |
2880 } | |
2881 | |
2882 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
2883 context()->Plug(v0); | |
2884 } | |
2885 | |
2886 | |
2887 void FullCodeGenerator::VisitProperty(Property* expr) { | |
2888 Comment cmnt(masm_, "[ Property"); | |
2889 SetExpressionPosition(expr); | |
2890 | |
2891 Expression* key = expr->key(); | |
2892 | |
2893 if (key->IsPropertyName()) { | |
2894 if (!expr->IsSuperAccess()) { | |
2895 VisitForAccumulatorValue(expr->obj()); | |
2896 __ Move(LoadDescriptor::ReceiverRegister(), v0); | |
2897 EmitNamedPropertyLoad(expr); | |
2898 } else { | |
2899 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var()); | |
2900 VisitForStackValue( | |
2901 expr->obj()->AsSuperPropertyReference()->home_object()); | |
2902 EmitNamedSuperPropertyLoad(expr); | |
2903 } | |
2904 } else { | |
2905 if (!expr->IsSuperAccess()) { | |
2906 VisitForStackValue(expr->obj()); | |
2907 VisitForAccumulatorValue(expr->key()); | |
2908 __ Move(LoadDescriptor::NameRegister(), v0); | |
2909 __ pop(LoadDescriptor::ReceiverRegister()); | |
2910 EmitKeyedPropertyLoad(expr); | |
2911 } else { | |
2912 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var()); | |
2913 VisitForStackValue( | |
2914 expr->obj()->AsSuperPropertyReference()->home_object()); | |
2915 VisitForStackValue(expr->key()); | |
2916 EmitKeyedSuperPropertyLoad(expr); | |
2917 } | |
2918 } | |
2919 PrepareForBailoutForId(expr->LoadId(), TOS_REG); | |
2920 context()->Plug(v0); | |
2921 } | |
2922 | |
2923 | |
2924 void FullCodeGenerator::CallIC(Handle<Code> code, | |
2925 TypeFeedbackId id) { | |
2926 ic_total_count_++; | |
2927 __ Call(code, RelocInfo::CODE_TARGET, id); | |
2928 } | |
2929 | |
2930 | |
2931 // Code common for calls using the IC. | |
2932 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { | |
2933 Expression* callee = expr->expression(); | |
2934 | |
2935 CallICState::CallType call_type = | |
2936 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD; | |
2937 | |
2938 // Get the target function. | |
2939 if (call_type == CallICState::FUNCTION) { | |
2940 { StackValueContext context(this); | |
2941 EmitVariableLoad(callee->AsVariableProxy()); | |
2942 PrepareForBailout(callee, NO_REGISTERS); | |
2943 } | |
2944 // Push undefined as receiver. This is patched in the method prologue if it | |
2945 // is a sloppy mode method. | |
2946 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | |
2947 __ push(at); | |
2948 } else { | |
2949 // Load the function from the receiver. | |
2950 DCHECK(callee->IsProperty()); | |
2951 DCHECK(!callee->AsProperty()->IsSuperAccess()); | |
2952 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); | |
2953 EmitNamedPropertyLoad(callee->AsProperty()); | |
2954 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); | |
2955 // Push the target function under the receiver. | |
2956 __ lw(at, MemOperand(sp, 0)); | |
2957 __ push(at); | |
2958 __ sw(v0, MemOperand(sp, kPointerSize)); | |
2959 } | |
2960 | |
2961 EmitCall(expr, call_type); | |
2962 } | |
2963 | |
2964 | |
2965 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) { | |
2966 SetExpressionPosition(expr); | |
2967 Expression* callee = expr->expression(); | |
2968 DCHECK(callee->IsProperty()); | |
2969 Property* prop = callee->AsProperty(); | |
2970 DCHECK(prop->IsSuperAccess()); | |
2971 | |
2972 Literal* key = prop->key()->AsLiteral(); | |
2973 DCHECK(!key->value()->IsSmi()); | |
2974 // Load the function from the receiver. | |
2975 const Register scratch = a1; | |
2976 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference(); | |
2977 VisitForAccumulatorValue(super_ref->home_object()); | |
2978 __ mov(scratch, v0); | |
2979 VisitForAccumulatorValue(super_ref->this_var()); | |
2980 __ Push(scratch, v0, v0, scratch); | |
2981 __ Push(key->value()); | |
2982 __ Push(Smi::FromInt(language_mode())); | |
2983 | |
2984 // Stack here: | |
2985 // - home_object | |
2986 // - this (receiver) | |
2987 // - this (receiver) <-- LoadFromSuper will pop here and below. | |
2988 // - home_object | |
2989 // - key | |
2990 // - language_mode | |
2991 __ CallRuntime(Runtime::kLoadFromSuper, 4); | |
2992 | |
2993 // Replace home_object with target function. | |
2994 __ sw(v0, MemOperand(sp, kPointerSize)); | |
2995 | |
2996 // Stack here: | |
2997 // - target function | |
2998 // - this (receiver) | |
2999 EmitCall(expr, CallICState::METHOD); | |
3000 } | |
3001 | |
3002 | |
3003 // Code common for calls using the IC. | |
3004 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, | |
3005 Expression* key) { | |
3006 // Load the key. | |
3007 VisitForAccumulatorValue(key); | |
3008 | |
3009 Expression* callee = expr->expression(); | |
3010 | |
3011 // Load the function from the receiver. | |
3012 DCHECK(callee->IsProperty()); | |
3013 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); | |
3014 __ Move(LoadDescriptor::NameRegister(), v0); | |
3015 EmitKeyedPropertyLoad(callee->AsProperty()); | |
3016 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); | |
3017 | |
3018 // Push the target function under the receiver. | |
3019 __ lw(at, MemOperand(sp, 0)); | |
3020 __ push(at); | |
3021 __ sw(v0, MemOperand(sp, kPointerSize)); | |
3022 | |
3023 EmitCall(expr, CallICState::METHOD); | |
3024 } | |
3025 | |
3026 | |
3027 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) { | |
3028 Expression* callee = expr->expression(); | |
3029 DCHECK(callee->IsProperty()); | |
3030 Property* prop = callee->AsProperty(); | |
3031 DCHECK(prop->IsSuperAccess()); | |
3032 | |
3033 SetExpressionPosition(prop); | |
3034 // Load the function from the receiver. | |
3035 const Register scratch = a1; | |
3036 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference(); | |
3037 VisitForAccumulatorValue(super_ref->home_object()); | |
3038 __ Move(scratch, v0); | |
3039 VisitForAccumulatorValue(super_ref->this_var()); | |
3040 __ Push(scratch, v0, v0, scratch); | |
3041 VisitForStackValue(prop->key()); | |
3042 __ Push(Smi::FromInt(language_mode())); | |
3043 | |
3044 // Stack here: | |
3045 // - home_object | |
3046 // - this (receiver) | |
3047 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below. | |
3048 // - home_object | |
3049 // - key | |
3050 // - language_mode | |
3051 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4); | |
3052 | |
3053 // Replace home_object with target function. | |
3054 __ sw(v0, MemOperand(sp, kPointerSize)); | |
3055 | |
3056 // Stack here: | |
3057 // - target function | |
3058 // - this (receiver) | |
3059 EmitCall(expr, CallICState::METHOD); | |
3060 } | |
3061 | |
3062 | |
3063 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) { | |
3064 // Load the arguments. | |
3065 ZoneList<Expression*>* args = expr->arguments(); | |
3066 int arg_count = args->length(); | |
3067 for (int i = 0; i < arg_count; i++) { | |
3068 VisitForStackValue(args->at(i)); | |
3069 } | |
3070 | |
3071 // Record source position of the IC call. | |
3072 SetCallPosition(expr, arg_count); | |
3073 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code(); | |
3074 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot()))); | |
3075 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); | |
3076 // Don't assign a type feedback id to the IC, since type feedback is provided | |
3077 // by the vector above. | |
3078 CallIC(ic); | |
3079 | |
3080 RecordJSReturnSite(expr); | |
3081 // Restore context register. | |
3082 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
3083 context()->DropAndPlug(1, v0); | |
3084 } | |
3085 | |
3086 | |
3087 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { | |
3088 // t3: copy of the first argument or undefined if it doesn't exist. | |
3089 if (arg_count > 0) { | |
3090 __ lw(t3, MemOperand(sp, arg_count * kPointerSize)); | |
3091 } else { | |
3092 __ LoadRoot(t3, Heap::kUndefinedValueRootIndex); | |
3093 } | |
3094 | |
3095 // t2: the receiver of the enclosing function. | |
3096 __ lw(t2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
3097 | |
3098 // t1: the language mode. | |
3099 __ li(t1, Operand(Smi::FromInt(language_mode()))); | |
3100 | |
3101 // t0: the start position of the scope the calls resides in. | |
3102 __ li(t0, Operand(Smi::FromInt(scope()->start_position()))); | |
3103 | |
3104 // Do the runtime call. | |
3105 __ Push(t3, t2, t1, t0); | |
3106 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); | |
3107 } | |
3108 | |
3109 | |
3110 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls. | |
3111 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) { | |
3112 VariableProxy* callee = expr->expression()->AsVariableProxy(); | |
3113 if (callee->var()->IsLookupSlot()) { | |
3114 Label slow, done; | |
3115 | |
3116 SetExpressionPosition(callee); | |
3117 // Generate code for loading from variables potentially shadowed by | |
3118 // eval-introduced variables. | |
3119 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done); | |
3120 | |
3121 __ bind(&slow); | |
3122 // Call the runtime to find the function to call (returned in v0) | |
3123 // and the object holding it (returned in v1). | |
3124 DCHECK(!context_register().is(a2)); | |
3125 __ li(a2, Operand(callee->name())); | |
3126 __ Push(context_register(), a2); | |
3127 __ CallRuntime(Runtime::kLoadLookupSlot, 2); | |
3128 __ Push(v0, v1); // Function, receiver. | |
3129 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS); | |
3130 | |
3131 // If fast case code has been generated, emit code to push the | |
3132 // function and receiver and have the slow path jump around this | |
3133 // code. | |
3134 if (done.is_linked()) { | |
3135 Label call; | |
3136 __ Branch(&call); | |
3137 __ bind(&done); | |
3138 // Push function. | |
3139 __ push(v0); | |
3140 // The receiver is implicitly the global receiver. Indicate this | |
3141 // by passing the hole to the call function stub. | |
3142 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); | |
3143 __ push(a1); | |
3144 __ bind(&call); | |
3145 } | |
3146 } else { | |
3147 VisitForStackValue(callee); | |
3148 // refEnv.WithBaseObject() | |
3149 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | |
3150 __ push(a2); // Reserved receiver slot. | |
3151 } | |
3152 } | |
3153 | |
3154 | |
3155 void FullCodeGenerator::VisitCall(Call* expr) { | |
3156 #ifdef DEBUG | |
3157 // We want to verify that RecordJSReturnSite gets called on all paths | |
3158 // through this function. Avoid early returns. | |
3159 expr->return_is_recorded_ = false; | |
3160 #endif | |
3161 | |
3162 Comment cmnt(masm_, "[ Call"); | |
3163 Expression* callee = expr->expression(); | |
3164 Call::CallType call_type = expr->GetCallType(isolate()); | |
3165 | |
3166 if (call_type == Call::POSSIBLY_EVAL_CALL) { | |
3167 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval | |
3168 // to resolve the function we need to call. Then we call the resolved | |
3169 // function using the given arguments. | |
3170 ZoneList<Expression*>* args = expr->arguments(); | |
3171 int arg_count = args->length(); | |
3172 PushCalleeAndWithBaseObject(expr); | |
3173 | |
3174 // Push the arguments. | |
3175 for (int i = 0; i < arg_count; i++) { | |
3176 VisitForStackValue(args->at(i)); | |
3177 } | |
3178 | |
3179 // Push a copy of the function (found below the arguments) and | |
3180 // resolve eval. | |
3181 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); | |
3182 __ push(a1); | |
3183 EmitResolvePossiblyDirectEval(arg_count); | |
3184 | |
3185 // Touch up the stack with the resolved function. | |
3186 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize)); | |
3187 | |
3188 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS); | |
3189 // Record source position for debugger. | |
3190 SetCallPosition(expr, arg_count); | |
3191 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); | |
3192 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); | |
3193 __ CallStub(&stub); | |
3194 RecordJSReturnSite(expr); | |
3195 // Restore context register. | |
3196 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
3197 context()->DropAndPlug(1, v0); | |
3198 } else if (call_type == Call::GLOBAL_CALL) { | |
3199 EmitCallWithLoadIC(expr); | |
3200 } else if (call_type == Call::LOOKUP_SLOT_CALL) { | |
3201 // Call to a lookup slot (dynamically introduced variable). | |
3202 PushCalleeAndWithBaseObject(expr); | |
3203 EmitCall(expr); | |
3204 } else if (call_type == Call::PROPERTY_CALL) { | |
3205 Property* property = callee->AsProperty(); | |
3206 bool is_named_call = property->key()->IsPropertyName(); | |
3207 if (property->IsSuperAccess()) { | |
3208 if (is_named_call) { | |
3209 EmitSuperCallWithLoadIC(expr); | |
3210 } else { | |
3211 EmitKeyedSuperCallWithLoadIC(expr); | |
3212 } | |
3213 } else { | |
3214 VisitForStackValue(property->obj()); | |
3215 if (is_named_call) { | |
3216 EmitCallWithLoadIC(expr); | |
3217 } else { | |
3218 EmitKeyedCallWithLoadIC(expr, property->key()); | |
3219 } | |
3220 } | |
3221 } else if (call_type == Call::SUPER_CALL) { | |
3222 EmitSuperConstructorCall(expr); | |
3223 } else { | |
3224 DCHECK(call_type == Call::OTHER_CALL); | |
3225 // Call to an arbitrary expression not handled specially above. | |
3226 VisitForStackValue(callee); | |
3227 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); | |
3228 __ push(a1); | |
3229 // Emit function call. | |
3230 EmitCall(expr); | |
3231 } | |
3232 | |
3233 #ifdef DEBUG | |
3234 // RecordJSReturnSite should have been called. | |
3235 DCHECK(expr->return_is_recorded_); | |
3236 #endif | |
3237 } | |
3238 | |
3239 | |
3240 void FullCodeGenerator::VisitCallNew(CallNew* expr) { | |
3241 Comment cmnt(masm_, "[ CallNew"); | |
3242 // According to ECMA-262, section 11.2.2, page 44, the function | |
3243 // expression in new calls must be evaluated before the | |
3244 // arguments. | |
3245 | |
3246 // Push constructor on the stack. If it's not a function it's used as | |
3247 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is | |
3248 // ignored. | |
3249 DCHECK(!expr->expression()->IsSuperPropertyReference()); | |
3250 VisitForStackValue(expr->expression()); | |
3251 | |
3252 // Push the arguments ("left-to-right") on the stack. | |
3253 ZoneList<Expression*>* args = expr->arguments(); | |
3254 int arg_count = args->length(); | |
3255 for (int i = 0; i < arg_count; i++) { | |
3256 VisitForStackValue(args->at(i)); | |
3257 } | |
3258 | |
3259 // Call the construct call builtin that handles allocation and | |
3260 // constructor invocation. | |
3261 SetConstructCallPosition(expr); | |
3262 | |
3263 // Load function and argument count into a1 and a0. | |
3264 __ li(a0, Operand(arg_count)); | |
3265 __ lw(a1, MemOperand(sp, arg_count * kPointerSize)); | |
3266 | |
3267 // Record call targets in unoptimized code. | |
3268 if (FLAG_pretenuring_call_new) { | |
3269 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); | |
3270 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() == | |
3271 expr->CallNewFeedbackSlot().ToInt() + 1); | |
3272 } | |
3273 | |
3274 __ li(a2, FeedbackVector()); | |
3275 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot()))); | |
3276 | |
3277 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); | |
3278 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); | |
3279 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); | |
3280 context()->Plug(v0); | |
3281 } | |
3282 | |
3283 | |
3284 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) { | |
3285 SuperCallReference* super_call_ref = | |
3286 expr->expression()->AsSuperCallReference(); | |
3287 DCHECK_NOT_NULL(super_call_ref); | |
3288 | |
3289 EmitLoadSuperConstructor(super_call_ref); | |
3290 __ push(result_register()); | |
3291 | |
3292 // Push the arguments ("left-to-right") on the stack. | |
3293 ZoneList<Expression*>* args = expr->arguments(); | |
3294 int arg_count = args->length(); | |
3295 for (int i = 0; i < arg_count; i++) { | |
3296 VisitForStackValue(args->at(i)); | |
3297 } | |
3298 | |
3299 // Call the construct call builtin that handles allocation and | |
3300 // constructor invocation. | |
3301 SetConstructCallPosition(expr); | |
3302 | |
3303 // Load original constructor into t0. | |
3304 VisitForAccumulatorValue(super_call_ref->new_target_var()); | |
3305 __ mov(t0, result_register()); | |
3306 | |
3307 // Load function and argument count into a1 and a0. | |
3308 __ li(a0, Operand(arg_count)); | |
3309 __ lw(a1, MemOperand(sp, arg_count * kPointerSize)); | |
3310 | |
3311 // Record call targets in unoptimized code. | |
3312 if (FLAG_pretenuring_call_new) { | |
3313 UNREACHABLE(); | |
3314 /* TODO(dslomov): support pretenuring. | |
3315 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); | |
3316 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() == | |
3317 expr->CallNewFeedbackSlot().ToInt() + 1); | |
3318 */ | |
3319 } | |
3320 | |
3321 __ li(a2, FeedbackVector()); | |
3322 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot()))); | |
3323 | |
3324 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET); | |
3325 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); | |
3326 | |
3327 RecordJSReturnSite(expr); | |
3328 | |
3329 context()->Plug(v0); | |
3330 } | |
3331 | |
3332 | |
3333 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { | |
3334 ZoneList<Expression*>* args = expr->arguments(); | |
3335 DCHECK(args->length() == 1); | |
3336 | |
3337 VisitForAccumulatorValue(args->at(0)); | |
3338 | |
3339 Label materialize_true, materialize_false; | |
3340 Label* if_true = NULL; | |
3341 Label* if_false = NULL; | |
3342 Label* fall_through = NULL; | |
3343 context()->PrepareTest(&materialize_true, &materialize_false, | |
3344 &if_true, &if_false, &fall_through); | |
3345 | |
3346 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
3347 __ SmiTst(v0, t0); | |
3348 Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through); | |
3349 | |
3350 context()->Plug(if_true, if_false); | |
3351 } | |
3352 | |
3353 | |
3354 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) { | |
3355 ZoneList<Expression*>* args = expr->arguments(); | |
3356 DCHECK(args->length() == 1); | |
3357 | |
3358 VisitForAccumulatorValue(args->at(0)); | |
3359 | |
3360 Label materialize_true, materialize_false; | |
3361 Label* if_true = NULL; | |
3362 Label* if_false = NULL; | |
3363 Label* fall_through = NULL; | |
3364 context()->PrepareTest(&materialize_true, &materialize_false, | |
3365 &if_true, &if_false, &fall_through); | |
3366 | |
3367 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
3368 __ NonNegativeSmiTst(v0, at); | |
3369 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through); | |
3370 | |
3371 context()->Plug(if_true, if_false); | |
3372 } | |
3373 | |
3374 | |
3375 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) { | |
3376 ZoneList<Expression*>* args = expr->arguments(); | |
3377 DCHECK(args->length() == 1); | |
3378 | |
3379 VisitForAccumulatorValue(args->at(0)); | |
3380 | |
3381 Label materialize_true, materialize_false; | |
3382 Label* if_true = NULL; | |
3383 Label* if_false = NULL; | |
3384 Label* fall_through = NULL; | |
3385 context()->PrepareTest(&materialize_true, &materialize_false, | |
3386 &if_true, &if_false, &fall_through); | |
3387 | |
3388 __ JumpIfSmi(v0, if_false); | |
3389 __ LoadRoot(at, Heap::kNullValueRootIndex); | |
3390 __ Branch(if_true, eq, v0, Operand(at)); | |
3391 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); | |
3392 // Undetectable objects behave like undefined when tested with typeof. | |
3393 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset)); | |
3394 __ And(at, a1, Operand(1 << Map::kIsUndetectable)); | |
3395 __ Branch(if_false, ne, at, Operand(zero_reg)); | |
3396 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset)); | |
3397 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); | |
3398 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
3399 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE), | |
3400 if_true, if_false, fall_through); | |
3401 | |
3402 context()->Plug(if_true, if_false); | |
3403 } | |
3404 | |
3405 | |
3406 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) { | |
3407 ZoneList<Expression*>* args = expr->arguments(); | |
3408 DCHECK(args->length() == 1); | |
3409 | |
3410 VisitForAccumulatorValue(args->at(0)); | |
3411 | |
3412 Label materialize_true, materialize_false; | |
3413 Label* if_true = NULL; | |
3414 Label* if_false = NULL; | |
3415 Label* fall_through = NULL; | |
3416 context()->PrepareTest(&materialize_true, &materialize_false, | |
3417 &if_true, &if_false, &fall_through); | |
3418 | |
3419 __ JumpIfSmi(v0, if_false); | |
3420 __ GetObjectType(v0, a1, a1); | |
3421 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
3422 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE), | |
3423 if_true, if_false, fall_through); | |
3424 | |
3425 context()->Plug(if_true, if_false); | |
3426 } | |
3427 | |
3428 | |
3429 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) { | |
3430 ZoneList<Expression*>* args = expr->arguments(); | |
3431 DCHECK(args->length() == 1); | |
3432 | |
3433 VisitForAccumulatorValue(args->at(0)); | |
3434 | |
3435 Label materialize_true, materialize_false; | |
3436 Label* if_true = NULL; | |
3437 Label* if_false = NULL; | |
3438 Label* fall_through = NULL; | |
3439 context()->PrepareTest(&materialize_true, &materialize_false, | |
3440 &if_true, &if_false, &fall_through); | |
3441 | |
3442 __ JumpIfSmi(v0, if_false); | |
3443 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); | |
3444 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset)); | |
3445 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
3446 __ And(at, a1, Operand(1 << Map::kIsUndetectable)); | |
3447 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through); | |
3448 | |
3449 context()->Plug(if_true, if_false); | |
3450 } | |
3451 | |
3452 | |
3453 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( | |
3454 CallRuntime* expr) { | |
3455 ZoneList<Expression*>* args = expr->arguments(); | |
3456 DCHECK(args->length() == 1); | |
3457 | |
3458 VisitForAccumulatorValue(args->at(0)); | |
3459 | |
3460 Label materialize_true, materialize_false, skip_lookup; | |
3461 Label* if_true = NULL; | |
3462 Label* if_false = NULL; | |
3463 Label* fall_through = NULL; | |
3464 context()->PrepareTest(&materialize_true, &materialize_false, | |
3465 &if_true, &if_false, &fall_through); | |
3466 | |
3467 __ AssertNotSmi(v0); | |
3468 | |
3469 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); | |
3470 __ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset)); | |
3471 __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf); | |
3472 __ Branch(&skip_lookup, ne, t0, Operand(zero_reg)); | |
3473 | |
3474 // Check for fast case object. Generate false result for slow case object. | |
3475 __ lw(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset)); | |
3476 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset)); | |
3477 __ LoadRoot(t0, Heap::kHashTableMapRootIndex); | |
3478 __ Branch(if_false, eq, a2, Operand(t0)); | |
3479 | |
3480 // Look for valueOf name in the descriptor array, and indicate false if | |
3481 // found. Since we omit an enumeration index check, if it is added via a | |
3482 // transition that shares its descriptor array, this is a false positive. | |
3483 Label entry, loop, done; | |
3484 | |
3485 // Skip loop if no descriptors are valid. | |
3486 __ NumberOfOwnDescriptors(a3, a1); | |
3487 __ Branch(&done, eq, a3, Operand(zero_reg)); | |
3488 | |
3489 __ LoadInstanceDescriptors(a1, t0); | |
3490 // t0: descriptor array. | |
3491 // a3: valid entries in the descriptor array. | |
3492 STATIC_ASSERT(kSmiTag == 0); | |
3493 STATIC_ASSERT(kSmiTagSize == 1); | |
3494 STATIC_ASSERT(kPointerSize == 4); | |
3495 __ li(at, Operand(DescriptorArray::kDescriptorSize)); | |
3496 __ Mul(a3, a3, at); | |
3497 // Calculate location of the first key name. | |
3498 __ Addu(t0, t0, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag)); | |
3499 // Calculate the end of the descriptor array. | |
3500 __ mov(a2, t0); | |
3501 __ sll(t1, a3, kPointerSizeLog2); | |
3502 __ Addu(a2, a2, t1); | |
3503 | |
3504 // Loop through all the keys in the descriptor array. If one of these is the | |
3505 // string "valueOf" the result is false. | |
3506 // The use of t2 to store the valueOf string assumes that it is not otherwise | |
3507 // used in the loop below. | |
3508 __ li(t2, Operand(isolate()->factory()->value_of_string())); | |
3509 __ jmp(&entry); | |
3510 __ bind(&loop); | |
3511 __ lw(a3, MemOperand(t0, 0)); | |
3512 __ Branch(if_false, eq, a3, Operand(t2)); | |
3513 __ Addu(t0, t0, Operand(DescriptorArray::kDescriptorSize * kPointerSize)); | |
3514 __ bind(&entry); | |
3515 __ Branch(&loop, ne, t0, Operand(a2)); | |
3516 | |
3517 __ bind(&done); | |
3518 | |
3519 // Set the bit in the map to indicate that there is no local valueOf field. | |
3520 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset)); | |
3521 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); | |
3522 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset)); | |
3523 | |
3524 __ bind(&skip_lookup); | |
3525 | |
3526 // If a valueOf property is not found on the object check that its | |
3527 // prototype is the un-modified String prototype. If not result is false. | |
3528 __ lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset)); | |
3529 __ JumpIfSmi(a2, if_false); | |
3530 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset)); | |
3531 __ lw(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | |
3532 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset)); | |
3533 __ lw(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); | |
3534 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
3535 Split(eq, a2, Operand(a3), if_true, if_false, fall_through); | |
3536 | |
3537 context()->Plug(if_true, if_false); | |
3538 } | |
3539 | |
3540 | |
3541 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { | |
3542 ZoneList<Expression*>* args = expr->arguments(); | |
3543 DCHECK(args->length() == 1); | |
3544 | |
3545 VisitForAccumulatorValue(args->at(0)); | |
3546 | |
3547 Label materialize_true, materialize_false; | |
3548 Label* if_true = NULL; | |
3549 Label* if_false = NULL; | |
3550 Label* fall_through = NULL; | |
3551 context()->PrepareTest(&materialize_true, &materialize_false, | |
3552 &if_true, &if_false, &fall_through); | |
3553 | |
3554 __ JumpIfSmi(v0, if_false); | |
3555 __ GetObjectType(v0, a1, a2); | |
3556 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
3557 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE)); | |
3558 __ Branch(if_false); | |
3559 | |
3560 context()->Plug(if_true, if_false); | |
3561 } | |
3562 | |
3563 | |
3564 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) { | |
3565 ZoneList<Expression*>* args = expr->arguments(); | |
3566 DCHECK(args->length() == 1); | |
3567 | |
3568 VisitForAccumulatorValue(args->at(0)); | |
3569 | |
3570 Label materialize_true, materialize_false; | |
3571 Label* if_true = NULL; | |
3572 Label* if_false = NULL; | |
3573 Label* fall_through = NULL; | |
3574 context()->PrepareTest(&materialize_true, &materialize_false, | |
3575 &if_true, &if_false, &fall_through); | |
3576 | |
3577 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK); | |
3578 __ lw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset)); | |
3579 __ lw(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset)); | |
3580 __ li(t0, 0x80000000); | |
3581 Label not_nan; | |
3582 __ Branch(¬_nan, ne, a2, Operand(t0)); | |
3583 __ mov(t0, zero_reg); | |
3584 __ mov(a2, a1); | |
3585 __ bind(¬_nan); | |
3586 | |
3587 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
3588 Split(eq, a2, Operand(t0), if_true, if_false, fall_through); | |
3589 | |
3590 context()->Plug(if_true, if_false); | |
3591 } | |
3592 | |
3593 | |
3594 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { | |
3595 ZoneList<Expression*>* args = expr->arguments(); | |
3596 DCHECK(args->length() == 1); | |
3597 | |
3598 VisitForAccumulatorValue(args->at(0)); | |
3599 | |
3600 Label materialize_true, materialize_false; | |
3601 Label* if_true = NULL; | |
3602 Label* if_false = NULL; | |
3603 Label* fall_through = NULL; | |
3604 context()->PrepareTest(&materialize_true, &materialize_false, | |
3605 &if_true, &if_false, &fall_through); | |
3606 | |
3607 __ JumpIfSmi(v0, if_false); | |
3608 __ GetObjectType(v0, a1, a1); | |
3609 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
3610 Split(eq, a1, Operand(JS_ARRAY_TYPE), | |
3611 if_true, if_false, fall_through); | |
3612 | |
3613 context()->Plug(if_true, if_false); | |
3614 } | |
3615 | |
3616 | |
3617 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) { | |
3618 ZoneList<Expression*>* args = expr->arguments(); | |
3619 DCHECK(args->length() == 1); | |
3620 | |
3621 VisitForAccumulatorValue(args->at(0)); | |
3622 | |
3623 Label materialize_true, materialize_false; | |
3624 Label* if_true = NULL; | |
3625 Label* if_false = NULL; | |
3626 Label* fall_through = NULL; | |
3627 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | |
3628 &if_false, &fall_through); | |
3629 | |
3630 __ JumpIfSmi(v0, if_false); | |
3631 __ GetObjectType(v0, a1, a1); | |
3632 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
3633 Split(eq, a1, Operand(JS_TYPED_ARRAY_TYPE), if_true, if_false, fall_through); | |
3634 | |
3635 context()->Plug(if_true, if_false); | |
3636 } | |
3637 | |
3638 | |
3639 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { | |
3640 ZoneList<Expression*>* args = expr->arguments(); | |
3641 DCHECK(args->length() == 1); | |
3642 | |
3643 VisitForAccumulatorValue(args->at(0)); | |
3644 | |
3645 Label materialize_true, materialize_false; | |
3646 Label* if_true = NULL; | |
3647 Label* if_false = NULL; | |
3648 Label* fall_through = NULL; | |
3649 context()->PrepareTest(&materialize_true, &materialize_false, | |
3650 &if_true, &if_false, &fall_through); | |
3651 | |
3652 __ JumpIfSmi(v0, if_false); | |
3653 __ GetObjectType(v0, a1, a1); | |
3654 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
3655 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through); | |
3656 | |
3657 context()->Plug(if_true, if_false); | |
3658 } | |
3659 | |
3660 | |
3661 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) { | |
3662 ZoneList<Expression*>* args = expr->arguments(); | |
3663 DCHECK(args->length() == 1); | |
3664 | |
3665 VisitForAccumulatorValue(args->at(0)); | |
3666 | |
3667 Label materialize_true, materialize_false; | |
3668 Label* if_true = NULL; | |
3669 Label* if_false = NULL; | |
3670 Label* fall_through = NULL; | |
3671 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | |
3672 &if_false, &fall_through); | |
3673 | |
3674 __ JumpIfSmi(v0, if_false); | |
3675 Register map = a1; | |
3676 Register type_reg = a2; | |
3677 __ GetObjectType(v0, map, type_reg); | |
3678 __ Subu(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE)); | |
3679 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
3680 Split(ls, type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE), | |
3681 if_true, if_false, fall_through); | |
3682 | |
3683 context()->Plug(if_true, if_false); | |
3684 } | |
3685 | |
3686 | |
3687 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) { | |
3688 DCHECK(expr->arguments()->length() == 0); | |
3689 | |
3690 Label materialize_true, materialize_false; | |
3691 Label* if_true = NULL; | |
3692 Label* if_false = NULL; | |
3693 Label* fall_through = NULL; | |
3694 context()->PrepareTest(&materialize_true, &materialize_false, | |
3695 &if_true, &if_false, &fall_through); | |
3696 | |
3697 // Get the frame pointer for the calling frame. | |
3698 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
3699 | |
3700 // Skip the arguments adaptor frame if it exists. | |
3701 Label check_frame_marker; | |
3702 __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset)); | |
3703 __ Branch(&check_frame_marker, ne, | |
3704 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | |
3705 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset)); | |
3706 | |
3707 // Check the marker in the calling frame. | |
3708 __ bind(&check_frame_marker); | |
3709 __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset)); | |
3710 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
3711 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)), | |
3712 if_true, if_false, fall_through); | |
3713 | |
3714 context()->Plug(if_true, if_false); | |
3715 } | |
3716 | |
3717 | |
3718 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { | |
3719 ZoneList<Expression*>* args = expr->arguments(); | |
3720 DCHECK(args->length() == 2); | |
3721 | |
3722 // Load the two objects into registers and perform the comparison. | |
3723 VisitForStackValue(args->at(0)); | |
3724 VisitForAccumulatorValue(args->at(1)); | |
3725 | |
3726 Label materialize_true, materialize_false; | |
3727 Label* if_true = NULL; | |
3728 Label* if_false = NULL; | |
3729 Label* fall_through = NULL; | |
3730 context()->PrepareTest(&materialize_true, &materialize_false, | |
3731 &if_true, &if_false, &fall_through); | |
3732 | |
3733 __ pop(a1); | |
3734 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
3735 Split(eq, v0, Operand(a1), if_true, if_false, fall_through); | |
3736 | |
3737 context()->Plug(if_true, if_false); | |
3738 } | |
3739 | |
3740 | |
3741 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { | |
3742 ZoneList<Expression*>* args = expr->arguments(); | |
3743 DCHECK(args->length() == 1); | |
3744 | |
3745 // ArgumentsAccessStub expects the key in a1 and the formal | |
3746 // parameter count in a0. | |
3747 VisitForAccumulatorValue(args->at(0)); | |
3748 __ mov(a1, v0); | |
3749 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); | |
3750 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT); | |
3751 __ CallStub(&stub); | |
3752 context()->Plug(v0); | |
3753 } | |
3754 | |
3755 | |
3756 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { | |
3757 DCHECK(expr->arguments()->length() == 0); | |
3758 Label exit; | |
3759 // Get the number of formal parameters. | |
3760 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); | |
3761 | |
3762 // Check if the calling frame is an arguments adaptor frame. | |
3763 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
3764 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); | |
3765 __ Branch(&exit, ne, a3, | |
3766 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | |
3767 | |
3768 // Arguments adaptor case: Read the arguments length from the | |
3769 // adaptor frame. | |
3770 __ lw(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); | |
3771 | |
3772 __ bind(&exit); | |
3773 context()->Plug(v0); | |
3774 } | |
3775 | |
3776 | |
3777 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { | |
3778 ZoneList<Expression*>* args = expr->arguments(); | |
3779 DCHECK(args->length() == 1); | |
3780 Label done, null, function, non_function_constructor; | |
3781 | |
3782 VisitForAccumulatorValue(args->at(0)); | |
3783 | |
3784 // If the object is a smi, we return null. | |
3785 __ JumpIfSmi(v0, &null); | |
3786 | |
3787 // Check that the object is a JS object but take special care of JS | |
3788 // functions to make sure they have 'Function' as their class. | |
3789 // Assume that there are only two callable types, and one of them is at | |
3790 // either end of the type range for JS object types. Saves extra comparisons. | |
3791 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); | |
3792 __ GetObjectType(v0, v0, a1); // Map is now in v0. | |
3793 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); | |
3794 | |
3795 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == | |
3796 FIRST_SPEC_OBJECT_TYPE + 1); | |
3797 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); | |
3798 | |
3799 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == | |
3800 LAST_SPEC_OBJECT_TYPE - 1); | |
3801 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE)); | |
3802 // Assume that there is no larger type. | |
3803 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); | |
3804 | |
3805 // Check if the constructor in the map is a JS function. | |
3806 Register instance_type = a2; | |
3807 __ GetMapConstructor(v0, v0, a1, instance_type); | |
3808 __ Branch(&non_function_constructor, ne, instance_type, | |
3809 Operand(JS_FUNCTION_TYPE)); | |
3810 | |
3811 // v0 now contains the constructor function. Grab the | |
3812 // instance class name from there. | |
3813 __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset)); | |
3814 __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset)); | |
3815 __ Branch(&done); | |
3816 | |
3817 // Functions have class 'Function'. | |
3818 __ bind(&function); | |
3819 __ LoadRoot(v0, Heap::kFunction_stringRootIndex); | |
3820 __ jmp(&done); | |
3821 | |
3822 // Objects with a non-function constructor have class 'Object'. | |
3823 __ bind(&non_function_constructor); | |
3824 __ LoadRoot(v0, Heap::kObject_stringRootIndex); | |
3825 __ jmp(&done); | |
3826 | |
3827 // Non-JS objects have class null. | |
3828 __ bind(&null); | |
3829 __ LoadRoot(v0, Heap::kNullValueRootIndex); | |
3830 | |
3831 // All done. | |
3832 __ bind(&done); | |
3833 | |
3834 context()->Plug(v0); | |
3835 } | |
3836 | |
3837 | |
3838 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { | |
3839 ZoneList<Expression*>* args = expr->arguments(); | |
3840 DCHECK(args->length() == 1); | |
3841 | |
3842 VisitForAccumulatorValue(args->at(0)); // Load the object. | |
3843 | |
3844 Label done; | |
3845 // If the object is a smi return the object. | |
3846 __ JumpIfSmi(v0, &done); | |
3847 // If the object is not a value type, return the object. | |
3848 __ GetObjectType(v0, a1, a1); | |
3849 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE)); | |
3850 | |
3851 __ lw(v0, FieldMemOperand(v0, JSValue::kValueOffset)); | |
3852 | |
3853 __ bind(&done); | |
3854 context()->Plug(v0); | |
3855 } | |
3856 | |
3857 | |
3858 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) { | |
3859 ZoneList<Expression*>* args = expr->arguments(); | |
3860 DCHECK_EQ(1, args->length()); | |
3861 | |
3862 VisitForAccumulatorValue(args->at(0)); | |
3863 | |
3864 Label materialize_true, materialize_false; | |
3865 Label* if_true = nullptr; | |
3866 Label* if_false = nullptr; | |
3867 Label* fall_through = nullptr; | |
3868 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | |
3869 &if_false, &fall_through); | |
3870 | |
3871 __ JumpIfSmi(v0, if_false); | |
3872 __ GetObjectType(v0, a1, a1); | |
3873 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
3874 Split(eq, a1, Operand(JS_DATE_TYPE), if_true, if_false, fall_through); | |
3875 | |
3876 context()->Plug(if_true, if_false); | |
3877 } | |
3878 | |
3879 | |
3880 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { | |
3881 ZoneList<Expression*>* args = expr->arguments(); | |
3882 DCHECK(args->length() == 2); | |
3883 DCHECK_NOT_NULL(args->at(1)->AsLiteral()); | |
3884 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value())); | |
3885 | |
3886 VisitForAccumulatorValue(args->at(0)); // Load the object. | |
3887 | |
3888 Register object = v0; | |
3889 Register result = v0; | |
3890 Register scratch0 = t5; | |
3891 Register scratch1 = a1; | |
3892 | |
3893 if (index->value() == 0) { | |
3894 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset)); | |
3895 } else { | |
3896 Label runtime, done; | |
3897 if (index->value() < JSDate::kFirstUncachedField) { | |
3898 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); | |
3899 __ li(scratch1, Operand(stamp)); | |
3900 __ lw(scratch1, MemOperand(scratch1)); | |
3901 __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset)); | |
3902 __ Branch(&runtime, ne, scratch1, Operand(scratch0)); | |
3903 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset + | |
3904 kPointerSize * index->value())); | |
3905 __ jmp(&done); | |
3906 } | |
3907 __ bind(&runtime); | |
3908 __ PrepareCallCFunction(2, scratch1); | |
3909 __ li(a1, Operand(index)); | |
3910 __ Move(a0, object); | |
3911 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); | |
3912 __ bind(&done); | |
3913 } | |
3914 | |
3915 context()->Plug(result); | |
3916 } | |
3917 | |
3918 | |
3919 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { | |
3920 ZoneList<Expression*>* args = expr->arguments(); | |
3921 DCHECK_EQ(3, args->length()); | |
3922 | |
3923 Register string = v0; | |
3924 Register index = a1; | |
3925 Register value = a2; | |
3926 | |
3927 VisitForStackValue(args->at(0)); // index | |
3928 VisitForStackValue(args->at(1)); // value | |
3929 VisitForAccumulatorValue(args->at(2)); // string | |
3930 __ Pop(index, value); | |
3931 | |
3932 if (FLAG_debug_code) { | |
3933 __ SmiTst(value, at); | |
3934 __ Check(eq, kNonSmiValue, at, Operand(zero_reg)); | |
3935 __ SmiTst(index, at); | |
3936 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg)); | |
3937 __ SmiUntag(index, index); | |
3938 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; | |
3939 Register scratch = t5; | |
3940 __ EmitSeqStringSetCharCheck( | |
3941 string, index, value, scratch, one_byte_seq_type); | |
3942 __ SmiTag(index, index); | |
3943 } | |
3944 | |
3945 __ SmiUntag(value, value); | |
3946 __ Addu(at, | |
3947 string, | |
3948 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); | |
3949 __ SmiUntag(index); | |
3950 __ Addu(at, at, index); | |
3951 __ sb(value, MemOperand(at)); | |
3952 context()->Plug(string); | |
3953 } | |
3954 | |
3955 | |
3956 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { | |
3957 ZoneList<Expression*>* args = expr->arguments(); | |
3958 DCHECK_EQ(3, args->length()); | |
3959 | |
3960 Register string = v0; | |
3961 Register index = a1; | |
3962 Register value = a2; | |
3963 | |
3964 VisitForStackValue(args->at(0)); // index | |
3965 VisitForStackValue(args->at(1)); // value | |
3966 VisitForAccumulatorValue(args->at(2)); // string | |
3967 __ Pop(index, value); | |
3968 | |
3969 if (FLAG_debug_code) { | |
3970 __ SmiTst(value, at); | |
3971 __ Check(eq, kNonSmiValue, at, Operand(zero_reg)); | |
3972 __ SmiTst(index, at); | |
3973 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg)); | |
3974 __ SmiUntag(index, index); | |
3975 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; | |
3976 Register scratch = t5; | |
3977 __ EmitSeqStringSetCharCheck( | |
3978 string, index, value, scratch, two_byte_seq_type); | |
3979 __ SmiTag(index, index); | |
3980 } | |
3981 | |
3982 __ SmiUntag(value, value); | |
3983 __ Addu(at, | |
3984 string, | |
3985 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); | |
3986 __ Addu(at, at, index); | |
3987 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); | |
3988 __ sh(value, MemOperand(at)); | |
3989 context()->Plug(string); | |
3990 } | |
3991 | |
3992 | |
3993 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { | |
3994 ZoneList<Expression*>* args = expr->arguments(); | |
3995 DCHECK(args->length() == 2); | |
3996 | |
3997 VisitForStackValue(args->at(0)); // Load the object. | |
3998 VisitForAccumulatorValue(args->at(1)); // Load the value. | |
3999 __ pop(a1); // v0 = value. a1 = object. | |
4000 | |
4001 Label done; | |
4002 // If the object is a smi, return the value. | |
4003 __ JumpIfSmi(a1, &done); | |
4004 | |
4005 // If the object is not a value type, return the value. | |
4006 __ GetObjectType(a1, a2, a2); | |
4007 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE)); | |
4008 | |
4009 // Store the value. | |
4010 __ sw(v0, FieldMemOperand(a1, JSValue::kValueOffset)); | |
4011 // Update the write barrier. Save the value as it will be | |
4012 // overwritten by the write barrier code and is needed afterward. | |
4013 __ mov(a2, v0); | |
4014 __ RecordWriteField( | |
4015 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs); | |
4016 | |
4017 __ bind(&done); | |
4018 context()->Plug(v0); | |
4019 } | |
4020 | |
4021 | |
4022 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { | |
4023 ZoneList<Expression*>* args = expr->arguments(); | |
4024 DCHECK_EQ(args->length(), 1); | |
4025 | |
4026 // Load the argument into a0 and call the stub. | |
4027 VisitForAccumulatorValue(args->at(0)); | |
4028 __ mov(a0, result_register()); | |
4029 | |
4030 NumberToStringStub stub(isolate()); | |
4031 __ CallStub(&stub); | |
4032 context()->Plug(v0); | |
4033 } | |
4034 | |
4035 | |
4036 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { | |
4037 ZoneList<Expression*>* args = expr->arguments(); | |
4038 DCHECK(args->length() == 1); | |
4039 | |
4040 VisitForAccumulatorValue(args->at(0)); | |
4041 | |
4042 Label done; | |
4043 StringCharFromCodeGenerator generator(v0, a1); | |
4044 generator.GenerateFast(masm_); | |
4045 __ jmp(&done); | |
4046 | |
4047 NopRuntimeCallHelper call_helper; | |
4048 generator.GenerateSlow(masm_, call_helper); | |
4049 | |
4050 __ bind(&done); | |
4051 context()->Plug(a1); | |
4052 } | |
4053 | |
4054 | |
4055 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { | |
4056 ZoneList<Expression*>* args = expr->arguments(); | |
4057 DCHECK(args->length() == 2); | |
4058 | |
4059 VisitForStackValue(args->at(0)); | |
4060 VisitForAccumulatorValue(args->at(1)); | |
4061 __ mov(a0, result_register()); | |
4062 | |
4063 Register object = a1; | |
4064 Register index = a0; | |
4065 Register result = v0; | |
4066 | |
4067 __ pop(object); | |
4068 | |
4069 Label need_conversion; | |
4070 Label index_out_of_range; | |
4071 Label done; | |
4072 StringCharCodeAtGenerator generator(object, | |
4073 index, | |
4074 result, | |
4075 &need_conversion, | |
4076 &need_conversion, | |
4077 &index_out_of_range, | |
4078 STRING_INDEX_IS_NUMBER); | |
4079 generator.GenerateFast(masm_); | |
4080 __ jmp(&done); | |
4081 | |
4082 __ bind(&index_out_of_range); | |
4083 // When the index is out of range, the spec requires us to return | |
4084 // NaN. | |
4085 __ LoadRoot(result, Heap::kNanValueRootIndex); | |
4086 __ jmp(&done); | |
4087 | |
4088 __ bind(&need_conversion); | |
4089 // Load the undefined value into the result register, which will | |
4090 // trigger conversion. | |
4091 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | |
4092 __ jmp(&done); | |
4093 | |
4094 NopRuntimeCallHelper call_helper; | |
4095 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper); | |
4096 | |
4097 __ bind(&done); | |
4098 context()->Plug(result); | |
4099 } | |
4100 | |
4101 | |
4102 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { | |
4103 ZoneList<Expression*>* args = expr->arguments(); | |
4104 DCHECK(args->length() == 2); | |
4105 | |
4106 VisitForStackValue(args->at(0)); | |
4107 VisitForAccumulatorValue(args->at(1)); | |
4108 __ mov(a0, result_register()); | |
4109 | |
4110 Register object = a1; | |
4111 Register index = a0; | |
4112 Register scratch = a3; | |
4113 Register result = v0; | |
4114 | |
4115 __ pop(object); | |
4116 | |
4117 Label need_conversion; | |
4118 Label index_out_of_range; | |
4119 Label done; | |
4120 StringCharAtGenerator generator(object, | |
4121 index, | |
4122 scratch, | |
4123 result, | |
4124 &need_conversion, | |
4125 &need_conversion, | |
4126 &index_out_of_range, | |
4127 STRING_INDEX_IS_NUMBER); | |
4128 generator.GenerateFast(masm_); | |
4129 __ jmp(&done); | |
4130 | |
4131 __ bind(&index_out_of_range); | |
4132 // When the index is out of range, the spec requires us to return | |
4133 // the empty string. | |
4134 __ LoadRoot(result, Heap::kempty_stringRootIndex); | |
4135 __ jmp(&done); | |
4136 | |
4137 __ bind(&need_conversion); | |
4138 // Move smi zero into the result register, which will trigger | |
4139 // conversion. | |
4140 __ li(result, Operand(Smi::FromInt(0))); | |
4141 __ jmp(&done); | |
4142 | |
4143 NopRuntimeCallHelper call_helper; | |
4144 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper); | |
4145 | |
4146 __ bind(&done); | |
4147 context()->Plug(result); | |
4148 } | |
4149 | |
4150 | |
4151 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { | |
4152 ZoneList<Expression*>* args = expr->arguments(); | |
4153 DCHECK_EQ(2, args->length()); | |
4154 VisitForStackValue(args->at(0)); | |
4155 VisitForAccumulatorValue(args->at(1)); | |
4156 | |
4157 __ pop(a1); | |
4158 __ mov(a0, result_register()); // StringAddStub requires args in a0, a1. | |
4159 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED); | |
4160 __ CallStub(&stub); | |
4161 context()->Plug(v0); | |
4162 } | |
4163 | |
4164 | |
4165 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { | |
4166 ZoneList<Expression*>* args = expr->arguments(); | |
4167 DCHECK(args->length() >= 2); | |
4168 | |
4169 int arg_count = args->length() - 2; // 2 ~ receiver and function. | |
4170 for (int i = 0; i < arg_count + 1; i++) { | |
4171 VisitForStackValue(args->at(i)); | |
4172 } | |
4173 VisitForAccumulatorValue(args->last()); // Function. | |
4174 | |
4175 Label runtime, done; | |
4176 // Check for non-function argument (including proxy). | |
4177 __ JumpIfSmi(v0, &runtime); | |
4178 __ GetObjectType(v0, a1, a1); | |
4179 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE)); | |
4180 | |
4181 // InvokeFunction requires the function in a1. Move it in there. | |
4182 __ mov(a1, result_register()); | |
4183 ParameterCount count(arg_count); | |
4184 __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper()); | |
4185 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
4186 __ jmp(&done); | |
4187 | |
4188 __ bind(&runtime); | |
4189 __ push(v0); | |
4190 __ CallRuntime(Runtime::kCall, args->length()); | |
4191 __ bind(&done); | |
4192 | |
4193 context()->Plug(v0); | |
4194 } | |
4195 | |
4196 | |
4197 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) { | |
4198 ZoneList<Expression*>* args = expr->arguments(); | |
4199 DCHECK(args->length() == 2); | |
4200 | |
4201 // new.target | |
4202 VisitForStackValue(args->at(0)); | |
4203 | |
4204 // .this_function | |
4205 VisitForStackValue(args->at(1)); | |
4206 __ CallRuntime(Runtime::kGetPrototype, 1); | |
4207 __ Push(result_register()); | |
4208 | |
4209 // Load original constructor into t0. | |
4210 __ lw(t0, MemOperand(sp, 1 * kPointerSize)); | |
4211 | |
4212 // Check if the calling frame is an arguments adaptor frame. | |
4213 Label adaptor_frame, args_set_up, runtime; | |
4214 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
4215 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); | |
4216 __ Branch(&adaptor_frame, eq, a3, | |
4217 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | |
4218 // default constructor has no arguments, so no adaptor frame means no args. | |
4219 __ mov(a0, zero_reg); | |
4220 __ Branch(&args_set_up); | |
4221 | |
4222 // Copy arguments from adaptor frame. | |
4223 { | |
4224 __ bind(&adaptor_frame); | |
4225 __ lw(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); | |
4226 __ SmiUntag(a1, a1); | |
4227 | |
4228 __ mov(a0, a1); | |
4229 | |
4230 // Get arguments pointer in a2. | |
4231 __ sll(at, a1, kPointerSizeLog2); | |
4232 __ addu(a2, a2, at); | |
4233 __ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset)); | |
4234 Label loop; | |
4235 __ bind(&loop); | |
4236 // Pre-decrement a2 with kPointerSize on each iteration. | |
4237 // Pre-decrement in order to skip receiver. | |
4238 __ Addu(a2, a2, Operand(-kPointerSize)); | |
4239 __ lw(a3, MemOperand(a2)); | |
4240 __ Push(a3); | |
4241 __ Addu(a1, a1, Operand(-1)); | |
4242 __ Branch(&loop, ne, a1, Operand(zero_reg)); | |
4243 } | |
4244 | |
4245 __ bind(&args_set_up); | |
4246 __ sll(at, a0, kPointerSizeLog2); | |
4247 __ Addu(at, at, Operand(sp)); | |
4248 __ lw(a1, MemOperand(at, 0)); | |
4249 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | |
4250 | |
4251 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL); | |
4252 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); | |
4253 | |
4254 __ Drop(1); | |
4255 | |
4256 context()->Plug(result_register()); | |
4257 } | |
4258 | |
4259 | |
4260 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { | |
4261 RegExpConstructResultStub stub(isolate()); | |
4262 ZoneList<Expression*>* args = expr->arguments(); | |
4263 DCHECK(args->length() == 3); | |
4264 VisitForStackValue(args->at(0)); | |
4265 VisitForStackValue(args->at(1)); | |
4266 VisitForAccumulatorValue(args->at(2)); | |
4267 __ mov(a0, result_register()); | |
4268 __ pop(a1); | |
4269 __ pop(a2); | |
4270 __ CallStub(&stub); | |
4271 context()->Plug(v0); | |
4272 } | |
4273 | |
4274 | |
4275 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { | |
4276 ZoneList<Expression*>* args = expr->arguments(); | |
4277 DCHECK_EQ(2, args->length()); | |
4278 | |
4279 DCHECK_NOT_NULL(args->at(0)->AsLiteral()); | |
4280 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value(); | |
4281 | |
4282 Handle<FixedArray> jsfunction_result_caches( | |
4283 isolate()->native_context()->jsfunction_result_caches()); | |
4284 if (jsfunction_result_caches->length() <= cache_id) { | |
4285 __ Abort(kAttemptToUseUndefinedCache); | |
4286 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); | |
4287 context()->Plug(v0); | |
4288 return; | |
4289 } | |
4290 | |
4291 VisitForAccumulatorValue(args->at(1)); | |
4292 | |
4293 Register key = v0; | |
4294 Register cache = a1; | |
4295 __ lw(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | |
4296 __ lw(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset)); | |
4297 __ lw(cache, | |
4298 ContextOperand( | |
4299 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); | |
4300 __ lw(cache, | |
4301 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); | |
4302 | |
4303 | |
4304 Label done, not_found; | |
4305 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); | |
4306 __ lw(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset)); | |
4307 // a2 now holds finger offset as a smi. | |
4308 __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
4309 // a3 now points to the start of fixed array elements. | |
4310 __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize); | |
4311 __ addu(a3, a3, at); | |
4312 // a3 now points to key of indexed element of cache. | |
4313 __ lw(a2, MemOperand(a3)); | |
4314 __ Branch(¬_found, ne, key, Operand(a2)); | |
4315 | |
4316 __ lw(v0, MemOperand(a3, kPointerSize)); | |
4317 __ Branch(&done); | |
4318 | |
4319 __ bind(¬_found); | |
4320 // Call runtime to perform the lookup. | |
4321 __ Push(cache, key); | |
4322 __ CallRuntime(Runtime::kGetFromCacheRT, 2); | |
4323 | |
4324 __ bind(&done); | |
4325 context()->Plug(v0); | |
4326 } | |
4327 | |
4328 | |
4329 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { | |
4330 ZoneList<Expression*>* args = expr->arguments(); | |
4331 VisitForAccumulatorValue(args->at(0)); | |
4332 | |
4333 Label materialize_true, materialize_false; | |
4334 Label* if_true = NULL; | |
4335 Label* if_false = NULL; | |
4336 Label* fall_through = NULL; | |
4337 context()->PrepareTest(&materialize_true, &materialize_false, | |
4338 &if_true, &if_false, &fall_through); | |
4339 | |
4340 __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset)); | |
4341 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask)); | |
4342 | |
4343 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
4344 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through); | |
4345 | |
4346 context()->Plug(if_true, if_false); | |
4347 } | |
4348 | |
4349 | |
4350 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { | |
4351 ZoneList<Expression*>* args = expr->arguments(); | |
4352 DCHECK(args->length() == 1); | |
4353 VisitForAccumulatorValue(args->at(0)); | |
4354 | |
4355 __ AssertString(v0); | |
4356 | |
4357 __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset)); | |
4358 __ IndexFromHash(v0, v0); | |
4359 | |
4360 context()->Plug(v0); | |
4361 } | |
4362 | |
4363 | |
4364 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) { | |
4365 Label bailout, done, one_char_separator, long_separator, | |
4366 non_trivial_array, not_size_one_array, loop, | |
4367 empty_separator_loop, one_char_separator_loop, | |
4368 one_char_separator_loop_entry, long_separator_loop; | |
4369 ZoneList<Expression*>* args = expr->arguments(); | |
4370 DCHECK(args->length() == 2); | |
4371 VisitForStackValue(args->at(1)); | |
4372 VisitForAccumulatorValue(args->at(0)); | |
4373 | |
4374 // All aliases of the same register have disjoint lifetimes. | |
4375 Register array = v0; | |
4376 Register elements = no_reg; // Will be v0. | |
4377 Register result = no_reg; // Will be v0. | |
4378 Register separator = a1; | |
4379 Register array_length = a2; | |
4380 Register result_pos = no_reg; // Will be a2. | |
4381 Register string_length = a3; | |
4382 Register string = t0; | |
4383 Register element = t1; | |
4384 Register elements_end = t2; | |
4385 Register scratch1 = t3; | |
4386 Register scratch2 = t5; | |
4387 Register scratch3 = t4; | |
4388 | |
4389 // Separator operand is on the stack. | |
4390 __ pop(separator); | |
4391 | |
4392 // Check that the array is a JSArray. | |
4393 __ JumpIfSmi(array, &bailout); | |
4394 __ GetObjectType(array, scratch1, scratch2); | |
4395 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE)); | |
4396 | |
4397 // Check that the array has fast elements. | |
4398 __ CheckFastElements(scratch1, scratch2, &bailout); | |
4399 | |
4400 // If the array has length zero, return the empty string. | |
4401 __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset)); | |
4402 __ SmiUntag(array_length); | |
4403 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg)); | |
4404 __ LoadRoot(v0, Heap::kempty_stringRootIndex); | |
4405 __ Branch(&done); | |
4406 | |
4407 __ bind(&non_trivial_array); | |
4408 | |
4409 // Get the FixedArray containing array's elements. | |
4410 elements = array; | |
4411 __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset)); | |
4412 array = no_reg; // End of array's live range. | |
4413 | |
4414 // Check that all array elements are sequential one-byte strings, and | |
4415 // accumulate the sum of their lengths, as a smi-encoded value. | |
4416 __ mov(string_length, zero_reg); | |
4417 __ Addu(element, | |
4418 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
4419 __ sll(elements_end, array_length, kPointerSizeLog2); | |
4420 __ Addu(elements_end, element, elements_end); | |
4421 // Loop condition: while (element < elements_end). | |
4422 // Live values in registers: | |
4423 // elements: Fixed array of strings. | |
4424 // array_length: Length of the fixed array of strings (not smi) | |
4425 // separator: Separator string | |
4426 // string_length: Accumulated sum of string lengths (smi). | |
4427 // element: Current array element. | |
4428 // elements_end: Array end. | |
4429 if (generate_debug_code_) { | |
4430 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin, array_length, | |
4431 Operand(zero_reg)); | |
4432 } | |
4433 __ bind(&loop); | |
4434 __ lw(string, MemOperand(element)); | |
4435 __ Addu(element, element, kPointerSize); | |
4436 __ JumpIfSmi(string, &bailout); | |
4437 __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset)); | |
4438 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); | |
4439 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout); | |
4440 __ lw(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset)); | |
4441 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3); | |
4442 __ BranchOnOverflow(&bailout, scratch3); | |
4443 __ Branch(&loop, lt, element, Operand(elements_end)); | |
4444 | |
4445 // If array_length is 1, return elements[0], a string. | |
4446 __ Branch(¬_size_one_array, ne, array_length, Operand(1)); | |
4447 __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize)); | |
4448 __ Branch(&done); | |
4449 | |
4450 __ bind(¬_size_one_array); | |
4451 | |
4452 // Live values in registers: | |
4453 // separator: Separator string | |
4454 // array_length: Length of the array. | |
4455 // string_length: Sum of string lengths (smi). | |
4456 // elements: FixedArray of strings. | |
4457 | |
4458 // Check that the separator is a flat one-byte string. | |
4459 __ JumpIfSmi(separator, &bailout); | |
4460 __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset)); | |
4461 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); | |
4462 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout); | |
4463 | |
4464 // Add (separator length times array_length) - separator length to the | |
4465 // string_length to get the length of the result string. array_length is not | |
4466 // smi but the other values are, so the result is a smi. | |
4467 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); | |
4468 __ Subu(string_length, string_length, Operand(scratch1)); | |
4469 __ Mul(scratch3, scratch2, array_length, scratch1); | |
4470 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are | |
4471 // zero. | |
4472 __ Branch(&bailout, ne, scratch3, Operand(zero_reg)); | |
4473 __ And(scratch3, scratch2, Operand(0x80000000)); | |
4474 __ Branch(&bailout, ne, scratch3, Operand(zero_reg)); | |
4475 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3); | |
4476 __ BranchOnOverflow(&bailout, scratch3); | |
4477 __ SmiUntag(string_length); | |
4478 | |
4479 // Get first element in the array to free up the elements register to be used | |
4480 // for the result. | |
4481 __ Addu(element, | |
4482 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
4483 result = elements; // End of live range for elements. | |
4484 elements = no_reg; | |
4485 // Live values in registers: | |
4486 // element: First array element | |
4487 // separator: Separator string | |
4488 // string_length: Length of result string (not smi) | |
4489 // array_length: Length of the array. | |
4490 __ AllocateOneByteString(result, string_length, scratch1, scratch2, | |
4491 elements_end, &bailout); | |
4492 // Prepare for looping. Set up elements_end to end of the array. Set | |
4493 // result_pos to the position of the result where to write the first | |
4494 // character. | |
4495 __ sll(elements_end, array_length, kPointerSizeLog2); | |
4496 __ Addu(elements_end, element, elements_end); | |
4497 result_pos = array_length; // End of live range for array_length. | |
4498 array_length = no_reg; | |
4499 __ Addu(result_pos, | |
4500 result, | |
4501 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); | |
4502 | |
4503 // Check the length of the separator. | |
4504 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); | |
4505 __ li(at, Operand(Smi::FromInt(1))); | |
4506 __ Branch(&one_char_separator, eq, scratch1, Operand(at)); | |
4507 __ Branch(&long_separator, gt, scratch1, Operand(at)); | |
4508 | |
4509 // Empty separator case. | |
4510 __ bind(&empty_separator_loop); | |
4511 // Live values in registers: | |
4512 // result_pos: the position to which we are currently copying characters. | |
4513 // element: Current array element. | |
4514 // elements_end: Array end. | |
4515 | |
4516 // Copy next array element to the result. | |
4517 __ lw(string, MemOperand(element)); | |
4518 __ Addu(element, element, kPointerSize); | |
4519 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset)); | |
4520 __ SmiUntag(string_length); | |
4521 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); | |
4522 __ CopyBytes(string, result_pos, string_length, scratch1); | |
4523 // End while (element < elements_end). | |
4524 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end)); | |
4525 DCHECK(result.is(v0)); | |
4526 __ Branch(&done); | |
4527 | |
4528 // One-character separator case. | |
4529 __ bind(&one_char_separator); | |
4530 // Replace separator with its one-byte character value. | |
4531 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize)); | |
4532 // Jump into the loop after the code that copies the separator, so the first | |
4533 // element is not preceded by a separator. | |
4534 __ jmp(&one_char_separator_loop_entry); | |
4535 | |
4536 __ bind(&one_char_separator_loop); | |
4537 // Live values in registers: | |
4538 // result_pos: the position to which we are currently copying characters. | |
4539 // element: Current array element. | |
4540 // elements_end: Array end. | |
4541 // separator: Single separator one-byte char (in lower byte). | |
4542 | |
4543 // Copy the separator character to the result. | |
4544 __ sb(separator, MemOperand(result_pos)); | |
4545 __ Addu(result_pos, result_pos, 1); | |
4546 | |
4547 // Copy next array element to the result. | |
4548 __ bind(&one_char_separator_loop_entry); | |
4549 __ lw(string, MemOperand(element)); | |
4550 __ Addu(element, element, kPointerSize); | |
4551 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset)); | |
4552 __ SmiUntag(string_length); | |
4553 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); | |
4554 __ CopyBytes(string, result_pos, string_length, scratch1); | |
4555 // End while (element < elements_end). | |
4556 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end)); | |
4557 DCHECK(result.is(v0)); | |
4558 __ Branch(&done); | |
4559 | |
4560 // Long separator case (separator is more than one character). Entry is at the | |
4561 // label long_separator below. | |
4562 __ bind(&long_separator_loop); | |
4563 // Live values in registers: | |
4564 // result_pos: the position to which we are currently copying characters. | |
4565 // element: Current array element. | |
4566 // elements_end: Array end. | |
4567 // separator: Separator string. | |
4568 | |
4569 // Copy the separator to the result. | |
4570 __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset)); | |
4571 __ SmiUntag(string_length); | |
4572 __ Addu(string, | |
4573 separator, | |
4574 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); | |
4575 __ CopyBytes(string, result_pos, string_length, scratch1); | |
4576 | |
4577 __ bind(&long_separator); | |
4578 __ lw(string, MemOperand(element)); | |
4579 __ Addu(element, element, kPointerSize); | |
4580 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset)); | |
4581 __ SmiUntag(string_length); | |
4582 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); | |
4583 __ CopyBytes(string, result_pos, string_length, scratch1); | |
4584 // End while (element < elements_end). | |
4585 __ Branch(&long_separator_loop, lt, element, Operand(elements_end)); | |
4586 DCHECK(result.is(v0)); | |
4587 __ Branch(&done); | |
4588 | |
4589 __ bind(&bailout); | |
4590 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); | |
4591 __ bind(&done); | |
4592 context()->Plug(v0); | |
4593 } | |
4594 | |
4595 | |
4596 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { | |
4597 DCHECK(expr->arguments()->length() == 0); | |
4598 ExternalReference debug_is_active = | |
4599 ExternalReference::debug_is_active_address(isolate()); | |
4600 __ li(at, Operand(debug_is_active)); | |
4601 __ lb(v0, MemOperand(at)); | |
4602 __ SmiTag(v0); | |
4603 context()->Plug(v0); | |
4604 } | |
4605 | |
4606 | |
4607 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) { | |
4608 // Push the builtins object as the receiver. | |
4609 Register receiver = LoadDescriptor::ReceiverRegister(); | |
4610 __ lw(receiver, GlobalObjectOperand()); | |
4611 __ lw(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset)); | |
4612 __ push(receiver); | |
4613 | |
4614 // Load the function from the receiver. | |
4615 __ li(LoadDescriptor::NameRegister(), Operand(expr->name())); | |
4616 __ li(LoadDescriptor::SlotRegister(), | |
4617 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot()))); | |
4618 CallLoadIC(NOT_INSIDE_TYPEOF); | |
4619 } | |
4620 | |
4621 | |
4622 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) { | |
4623 ZoneList<Expression*>* args = expr->arguments(); | |
4624 int arg_count = args->length(); | |
4625 | |
4626 SetCallPosition(expr, arg_count); | |
4627 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); | |
4628 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); | |
4629 __ CallStub(&stub); | |
4630 } | |
4631 | |
4632 | |
4633 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { | |
4634 ZoneList<Expression*>* args = expr->arguments(); | |
4635 int arg_count = args->length(); | |
4636 | |
4637 if (expr->is_jsruntime()) { | |
4638 Comment cmnt(masm_, "[ CallRuntime"); | |
4639 EmitLoadJSRuntimeFunction(expr); | |
4640 | |
4641 // Push the target function under the receiver. | |
4642 __ lw(at, MemOperand(sp, 0)); | |
4643 __ push(at); | |
4644 __ sw(v0, MemOperand(sp, kPointerSize)); | |
4645 | |
4646 // Push the arguments ("left-to-right"). | |
4647 for (int i = 0; i < arg_count; i++) { | |
4648 VisitForStackValue(args->at(i)); | |
4649 } | |
4650 | |
4651 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); | |
4652 EmitCallJSRuntimeFunction(expr); | |
4653 | |
4654 // Restore context register. | |
4655 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
4656 | |
4657 context()->DropAndPlug(1, v0); | |
4658 | |
4659 } else { | |
4660 const Runtime::Function* function = expr->function(); | |
4661 switch (function->function_id) { | |
4662 #define CALL_INTRINSIC_GENERATOR(Name) \ | |
4663 case Runtime::kInline##Name: { \ | |
4664 Comment cmnt(masm_, "[ Inline" #Name); \ | |
4665 return Emit##Name(expr); \ | |
4666 } | |
4667 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR) | |
4668 #undef CALL_INTRINSIC_GENERATOR | |
4669 default: { | |
4670 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic"); | |
4671 // Push the arguments ("left-to-right"). | |
4672 for (int i = 0; i < arg_count; i++) { | |
4673 VisitForStackValue(args->at(i)); | |
4674 } | |
4675 | |
4676 // Call the C runtime function. | |
4677 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); | |
4678 __ CallRuntime(expr->function(), arg_count); | |
4679 context()->Plug(v0); | |
4680 } | |
4681 } | |
4682 } | |
4683 } | |
4684 | |
4685 | |
4686 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { | |
4687 switch (expr->op()) { | |
4688 case Token::DELETE: { | |
4689 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); | |
4690 Property* property = expr->expression()->AsProperty(); | |
4691 VariableProxy* proxy = expr->expression()->AsVariableProxy(); | |
4692 | |
4693 if (property != NULL) { | |
4694 VisitForStackValue(property->obj()); | |
4695 VisitForStackValue(property->key()); | |
4696 __ li(a1, Operand(Smi::FromInt(language_mode()))); | |
4697 __ push(a1); | |
4698 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); | |
4699 context()->Plug(v0); | |
4700 } else if (proxy != NULL) { | |
4701 Variable* var = proxy->var(); | |
4702 // Delete of an unqualified identifier is disallowed in strict mode but | |
4703 // "delete this" is allowed. | |
4704 bool is_this = var->HasThisName(isolate()); | |
4705 DCHECK(is_sloppy(language_mode()) || is_this); | |
4706 if (var->IsUnallocatedOrGlobalSlot()) { | |
4707 __ lw(a2, GlobalObjectOperand()); | |
4708 __ li(a1, Operand(var->name())); | |
4709 __ li(a0, Operand(Smi::FromInt(SLOPPY))); | |
4710 __ Push(a2, a1, a0); | |
4711 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); | |
4712 context()->Plug(v0); | |
4713 } else if (var->IsStackAllocated() || var->IsContextSlot()) { | |
4714 // Result of deleting non-global, non-dynamic variables is false. | |
4715 // The subexpression does not have side effects. | |
4716 context()->Plug(is_this); | |
4717 } else { | |
4718 // Non-global variable. Call the runtime to try to delete from the | |
4719 // context where the variable was introduced. | |
4720 DCHECK(!context_register().is(a2)); | |
4721 __ li(a2, Operand(var->name())); | |
4722 __ Push(context_register(), a2); | |
4723 __ CallRuntime(Runtime::kDeleteLookupSlot, 2); | |
4724 context()->Plug(v0); | |
4725 } | |
4726 } else { | |
4727 // Result of deleting non-property, non-variable reference is true. | |
4728 // The subexpression may have side effects. | |
4729 VisitForEffect(expr->expression()); | |
4730 context()->Plug(true); | |
4731 } | |
4732 break; | |
4733 } | |
4734 | |
4735 case Token::VOID: { | |
4736 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); | |
4737 VisitForEffect(expr->expression()); | |
4738 context()->Plug(Heap::kUndefinedValueRootIndex); | |
4739 break; | |
4740 } | |
4741 | |
4742 case Token::NOT: { | |
4743 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); | |
4744 if (context()->IsEffect()) { | |
4745 // Unary NOT has no side effects so it's only necessary to visit the | |
4746 // subexpression. Match the optimizing compiler by not branching. | |
4747 VisitForEffect(expr->expression()); | |
4748 } else if (context()->IsTest()) { | |
4749 const TestContext* test = TestContext::cast(context()); | |
4750 // The labels are swapped for the recursive call. | |
4751 VisitForControl(expr->expression(), | |
4752 test->false_label(), | |
4753 test->true_label(), | |
4754 test->fall_through()); | |
4755 context()->Plug(test->true_label(), test->false_label()); | |
4756 } else { | |
4757 // We handle value contexts explicitly rather than simply visiting | |
4758 // for control and plugging the control flow into the context, | |
4759 // because we need to prepare a pair of extra administrative AST ids | |
4760 // for the optimizing compiler. | |
4761 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue()); | |
4762 Label materialize_true, materialize_false, done; | |
4763 VisitForControl(expr->expression(), | |
4764 &materialize_false, | |
4765 &materialize_true, | |
4766 &materialize_true); | |
4767 __ bind(&materialize_true); | |
4768 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); | |
4769 __ LoadRoot(v0, Heap::kTrueValueRootIndex); | |
4770 if (context()->IsStackValue()) __ push(v0); | |
4771 __ jmp(&done); | |
4772 __ bind(&materialize_false); | |
4773 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); | |
4774 __ LoadRoot(v0, Heap::kFalseValueRootIndex); | |
4775 if (context()->IsStackValue()) __ push(v0); | |
4776 __ bind(&done); | |
4777 } | |
4778 break; | |
4779 } | |
4780 | |
4781 case Token::TYPEOF: { | |
4782 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); | |
4783 { | |
4784 AccumulatorValueContext context(this); | |
4785 VisitForTypeofValue(expr->expression()); | |
4786 } | |
4787 __ mov(a3, v0); | |
4788 TypeofStub typeof_stub(isolate()); | |
4789 __ CallStub(&typeof_stub); | |
4790 context()->Plug(v0); | |
4791 break; | |
4792 } | |
4793 | |
4794 default: | |
4795 UNREACHABLE(); | |
4796 } | |
4797 } | |
4798 | |
4799 | |
4800 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { | |
4801 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis()); | |
4802 | |
4803 Comment cmnt(masm_, "[ CountOperation"); | |
4804 | |
4805 Property* prop = expr->expression()->AsProperty(); | |
4806 LhsKind assign_type = Property::GetAssignType(prop); | |
4807 | |
4808 // Evaluate expression and get value. | |
4809 if (assign_type == VARIABLE) { | |
4810 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL); | |
4811 AccumulatorValueContext context(this); | |
4812 EmitVariableLoad(expr->expression()->AsVariableProxy()); | |
4813 } else { | |
4814 // Reserve space for result of postfix operation. | |
4815 if (expr->is_postfix() && !context()->IsEffect()) { | |
4816 __ li(at, Operand(Smi::FromInt(0))); | |
4817 __ push(at); | |
4818 } | |
4819 switch (assign_type) { | |
4820 case NAMED_PROPERTY: { | |
4821 // Put the object both on the stack and in the register. | |
4822 VisitForStackValue(prop->obj()); | |
4823 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); | |
4824 EmitNamedPropertyLoad(prop); | |
4825 break; | |
4826 } | |
4827 | |
4828 case NAMED_SUPER_PROPERTY: { | |
4829 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); | |
4830 VisitForAccumulatorValue( | |
4831 prop->obj()->AsSuperPropertyReference()->home_object()); | |
4832 __ Push(result_register()); | |
4833 const Register scratch = a1; | |
4834 __ lw(scratch, MemOperand(sp, kPointerSize)); | |
4835 __ Push(scratch, result_register()); | |
4836 EmitNamedSuperPropertyLoad(prop); | |
4837 break; | |
4838 } | |
4839 | |
4840 case KEYED_SUPER_PROPERTY: { | |
4841 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); | |
4842 VisitForAccumulatorValue( | |
4843 prop->obj()->AsSuperPropertyReference()->home_object()); | |
4844 const Register scratch = a1; | |
4845 const Register scratch1 = t0; | |
4846 __ Move(scratch, result_register()); | |
4847 VisitForAccumulatorValue(prop->key()); | |
4848 __ Push(scratch, result_register()); | |
4849 __ lw(scratch1, MemOperand(sp, 2 * kPointerSize)); | |
4850 __ Push(scratch1, scratch, result_register()); | |
4851 EmitKeyedSuperPropertyLoad(prop); | |
4852 break; | |
4853 } | |
4854 | |
4855 case KEYED_PROPERTY: { | |
4856 VisitForStackValue(prop->obj()); | |
4857 VisitForStackValue(prop->key()); | |
4858 __ lw(LoadDescriptor::ReceiverRegister(), | |
4859 MemOperand(sp, 1 * kPointerSize)); | |
4860 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); | |
4861 EmitKeyedPropertyLoad(prop); | |
4862 break; | |
4863 } | |
4864 | |
4865 case VARIABLE: | |
4866 UNREACHABLE(); | |
4867 } | |
4868 } | |
4869 | |
4870 // We need a second deoptimization point after loading the value | |
4871 // in case evaluating the property load my have a side effect. | |
4872 if (assign_type == VARIABLE) { | |
4873 PrepareForBailout(expr->expression(), TOS_REG); | |
4874 } else { | |
4875 PrepareForBailoutForId(prop->LoadId(), TOS_REG); | |
4876 } | |
4877 | |
4878 // Inline smi case if we are in a loop. | |
4879 Label stub_call, done; | |
4880 JumpPatchSite patch_site(masm_); | |
4881 | |
4882 int count_value = expr->op() == Token::INC ? 1 : -1; | |
4883 __ mov(a0, v0); | |
4884 if (ShouldInlineSmiCase(expr->op())) { | |
4885 Label slow; | |
4886 patch_site.EmitJumpIfNotSmi(v0, &slow); | |
4887 | |
4888 // Save result for postfix expressions. | |
4889 if (expr->is_postfix()) { | |
4890 if (!context()->IsEffect()) { | |
4891 // Save the result on the stack. If we have a named or keyed property | |
4892 // we store the result under the receiver that is currently on top | |
4893 // of the stack. | |
4894 switch (assign_type) { | |
4895 case VARIABLE: | |
4896 __ push(v0); | |
4897 break; | |
4898 case NAMED_PROPERTY: | |
4899 __ sw(v0, MemOperand(sp, kPointerSize)); | |
4900 break; | |
4901 case NAMED_SUPER_PROPERTY: | |
4902 __ sw(v0, MemOperand(sp, 2 * kPointerSize)); | |
4903 break; | |
4904 case KEYED_PROPERTY: | |
4905 __ sw(v0, MemOperand(sp, 2 * kPointerSize)); | |
4906 break; | |
4907 case KEYED_SUPER_PROPERTY: | |
4908 __ sw(v0, MemOperand(sp, 3 * kPointerSize)); | |
4909 break; | |
4910 } | |
4911 } | |
4912 } | |
4913 | |
4914 Register scratch1 = a1; | |
4915 Register scratch2 = t0; | |
4916 __ li(scratch1, Operand(Smi::FromInt(count_value))); | |
4917 __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2); | |
4918 __ BranchOnNoOverflow(&done, scratch2); | |
4919 // Call stub. Undo operation first. | |
4920 __ Move(v0, a0); | |
4921 __ jmp(&stub_call); | |
4922 __ bind(&slow); | |
4923 } | |
4924 if (!is_strong(language_mode())) { | |
4925 ToNumberStub convert_stub(isolate()); | |
4926 __ CallStub(&convert_stub); | |
4927 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG); | |
4928 } | |
4929 | |
4930 // Save result for postfix expressions. | |
4931 if (expr->is_postfix()) { | |
4932 if (!context()->IsEffect()) { | |
4933 // Save the result on the stack. If we have a named or keyed property | |
4934 // we store the result under the receiver that is currently on top | |
4935 // of the stack. | |
4936 switch (assign_type) { | |
4937 case VARIABLE: | |
4938 __ push(v0); | |
4939 break; | |
4940 case NAMED_PROPERTY: | |
4941 __ sw(v0, MemOperand(sp, kPointerSize)); | |
4942 break; | |
4943 case NAMED_SUPER_PROPERTY: | |
4944 __ sw(v0, MemOperand(sp, 2 * kPointerSize)); | |
4945 break; | |
4946 case KEYED_PROPERTY: | |
4947 __ sw(v0, MemOperand(sp, 2 * kPointerSize)); | |
4948 break; | |
4949 case KEYED_SUPER_PROPERTY: | |
4950 __ sw(v0, MemOperand(sp, 3 * kPointerSize)); | |
4951 break; | |
4952 } | |
4953 } | |
4954 } | |
4955 | |
4956 __ bind(&stub_call); | |
4957 __ mov(a1, v0); | |
4958 __ li(a0, Operand(Smi::FromInt(count_value))); | |
4959 | |
4960 SetExpressionPosition(expr); | |
4961 | |
4962 | |
4963 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD, | |
4964 strength(language_mode())).code(); | |
4965 CallIC(code, expr->CountBinOpFeedbackId()); | |
4966 patch_site.EmitPatchInfo(); | |
4967 __ bind(&done); | |
4968 | |
4969 if (is_strong(language_mode())) { | |
4970 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG); | |
4971 } | |
4972 // Store the value returned in v0. | |
4973 switch (assign_type) { | |
4974 case VARIABLE: | |
4975 if (expr->is_postfix()) { | |
4976 { EffectContext context(this); | |
4977 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), | |
4978 Token::ASSIGN, expr->CountSlot()); | |
4979 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
4980 context.Plug(v0); | |
4981 } | |
4982 // For all contexts except EffectConstant we have the result on | |
4983 // top of the stack. | |
4984 if (!context()->IsEffect()) { | |
4985 context()->PlugTOS(); | |
4986 } | |
4987 } else { | |
4988 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), | |
4989 Token::ASSIGN, expr->CountSlot()); | |
4990 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
4991 context()->Plug(v0); | |
4992 } | |
4993 break; | |
4994 case NAMED_PROPERTY: { | |
4995 __ mov(StoreDescriptor::ValueRegister(), result_register()); | |
4996 __ li(StoreDescriptor::NameRegister(), | |
4997 Operand(prop->key()->AsLiteral()->value())); | |
4998 __ pop(StoreDescriptor::ReceiverRegister()); | |
4999 if (FLAG_vector_stores) { | |
5000 EmitLoadStoreICSlot(expr->CountSlot()); | |
5001 CallStoreIC(); | |
5002 } else { | |
5003 CallStoreIC(expr->CountStoreFeedbackId()); | |
5004 } | |
5005 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
5006 if (expr->is_postfix()) { | |
5007 if (!context()->IsEffect()) { | |
5008 context()->PlugTOS(); | |
5009 } | |
5010 } else { | |
5011 context()->Plug(v0); | |
5012 } | |
5013 break; | |
5014 } | |
5015 case NAMED_SUPER_PROPERTY: { | |
5016 EmitNamedSuperPropertyStore(prop); | |
5017 if (expr->is_postfix()) { | |
5018 if (!context()->IsEffect()) { | |
5019 context()->PlugTOS(); | |
5020 } | |
5021 } else { | |
5022 context()->Plug(v0); | |
5023 } | |
5024 break; | |
5025 } | |
5026 case KEYED_SUPER_PROPERTY: { | |
5027 EmitKeyedSuperPropertyStore(prop); | |
5028 if (expr->is_postfix()) { | |
5029 if (!context()->IsEffect()) { | |
5030 context()->PlugTOS(); | |
5031 } | |
5032 } else { | |
5033 context()->Plug(v0); | |
5034 } | |
5035 break; | |
5036 } | |
5037 case KEYED_PROPERTY: { | |
5038 __ mov(StoreDescriptor::ValueRegister(), result_register()); | |
5039 __ Pop(StoreDescriptor::ReceiverRegister(), | |
5040 StoreDescriptor::NameRegister()); | |
5041 Handle<Code> ic = | |
5042 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); | |
5043 if (FLAG_vector_stores) { | |
5044 EmitLoadStoreICSlot(expr->CountSlot()); | |
5045 CallIC(ic); | |
5046 } else { | |
5047 CallIC(ic, expr->CountStoreFeedbackId()); | |
5048 } | |
5049 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
5050 if (expr->is_postfix()) { | |
5051 if (!context()->IsEffect()) { | |
5052 context()->PlugTOS(); | |
5053 } | |
5054 } else { | |
5055 context()->Plug(v0); | |
5056 } | |
5057 break; | |
5058 } | |
5059 } | |
5060 } | |
5061 | |
5062 | |
5063 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, | |
5064 Expression* sub_expr, | |
5065 Handle<String> check) { | |
5066 Label materialize_true, materialize_false; | |
5067 Label* if_true = NULL; | |
5068 Label* if_false = NULL; | |
5069 Label* fall_through = NULL; | |
5070 context()->PrepareTest(&materialize_true, &materialize_false, | |
5071 &if_true, &if_false, &fall_through); | |
5072 | |
5073 { AccumulatorValueContext context(this); | |
5074 VisitForTypeofValue(sub_expr); | |
5075 } | |
5076 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
5077 | |
5078 Factory* factory = isolate()->factory(); | |
5079 if (String::Equals(check, factory->number_string())) { | |
5080 __ JumpIfSmi(v0, if_true); | |
5081 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); | |
5082 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | |
5083 Split(eq, v0, Operand(at), if_true, if_false, fall_through); | |
5084 } else if (String::Equals(check, factory->string_string())) { | |
5085 __ JumpIfSmi(v0, if_false); | |
5086 // Check for undetectable objects => false. | |
5087 __ GetObjectType(v0, v0, a1); | |
5088 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE)); | |
5089 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); | |
5090 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); | |
5091 Split(eq, a1, Operand(zero_reg), | |
5092 if_true, if_false, fall_through); | |
5093 } else if (String::Equals(check, factory->symbol_string())) { | |
5094 __ JumpIfSmi(v0, if_false); | |
5095 __ GetObjectType(v0, v0, a1); | |
5096 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through); | |
5097 } else if (String::Equals(check, factory->float32x4_string())) { | |
5098 __ JumpIfSmi(v0, if_false); | |
5099 __ GetObjectType(v0, v0, a1); | |
5100 Split(eq, a1, Operand(FLOAT32X4_TYPE), if_true, if_false, fall_through); | |
5101 } else if (String::Equals(check, factory->boolean_string())) { | |
5102 __ LoadRoot(at, Heap::kTrueValueRootIndex); | |
5103 __ Branch(if_true, eq, v0, Operand(at)); | |
5104 __ LoadRoot(at, Heap::kFalseValueRootIndex); | |
5105 Split(eq, v0, Operand(at), if_true, if_false, fall_through); | |
5106 } else if (String::Equals(check, factory->undefined_string())) { | |
5107 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | |
5108 __ Branch(if_true, eq, v0, Operand(at)); | |
5109 __ JumpIfSmi(v0, if_false); | |
5110 // Check for undetectable objects => true. | |
5111 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); | |
5112 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); | |
5113 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); | |
5114 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through); | |
5115 } else if (String::Equals(check, factory->function_string())) { | |
5116 __ JumpIfSmi(v0, if_false); | |
5117 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); | |
5118 __ GetObjectType(v0, v0, a1); | |
5119 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE)); | |
5120 Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE), | |
5121 if_true, if_false, fall_through); | |
5122 } else if (String::Equals(check, factory->object_string())) { | |
5123 __ JumpIfSmi(v0, if_false); | |
5124 __ LoadRoot(at, Heap::kNullValueRootIndex); | |
5125 __ Branch(if_true, eq, v0, Operand(at)); | |
5126 // Check for JS objects => true. | |
5127 __ GetObjectType(v0, v0, a1); | |
5128 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); | |
5129 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset)); | |
5130 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); | |
5131 // Check for undetectable objects => false. | |
5132 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); | |
5133 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); | |
5134 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through); | |
5135 } else { | |
5136 if (if_false != fall_through) __ jmp(if_false); | |
5137 } | |
5138 context()->Plug(if_true, if_false); | |
5139 } | |
5140 | |
5141 | |
5142 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { | |
5143 Comment cmnt(masm_, "[ CompareOperation"); | |
5144 SetExpressionPosition(expr); | |
5145 | |
5146 // First we try a fast inlined version of the compare when one of | |
5147 // the operands is a literal. | |
5148 if (TryLiteralCompare(expr)) return; | |
5149 | |
5150 // Always perform the comparison for its control flow. Pack the result | |
5151 // into the expression's context after the comparison is performed. | |
5152 Label materialize_true, materialize_false; | |
5153 Label* if_true = NULL; | |
5154 Label* if_false = NULL; | |
5155 Label* fall_through = NULL; | |
5156 context()->PrepareTest(&materialize_true, &materialize_false, | |
5157 &if_true, &if_false, &fall_through); | |
5158 | |
5159 Token::Value op = expr->op(); | |
5160 VisitForStackValue(expr->left()); | |
5161 switch (op) { | |
5162 case Token::IN: | |
5163 VisitForStackValue(expr->right()); | |
5164 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); | |
5165 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); | |
5166 __ LoadRoot(t0, Heap::kTrueValueRootIndex); | |
5167 Split(eq, v0, Operand(t0), if_true, if_false, fall_through); | |
5168 break; | |
5169 | |
5170 case Token::INSTANCEOF: { | |
5171 VisitForStackValue(expr->right()); | |
5172 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags); | |
5173 __ CallStub(&stub); | |
5174 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
5175 // The stub returns 0 for true. | |
5176 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through); | |
5177 break; | |
5178 } | |
5179 | |
5180 default: { | |
5181 VisitForAccumulatorValue(expr->right()); | |
5182 Condition cc = CompareIC::ComputeCondition(op); | |
5183 __ mov(a0, result_register()); | |
5184 __ pop(a1); | |
5185 | |
5186 bool inline_smi_code = ShouldInlineSmiCase(op); | |
5187 JumpPatchSite patch_site(masm_); | |
5188 if (inline_smi_code) { | |
5189 Label slow_case; | |
5190 __ Or(a2, a0, Operand(a1)); | |
5191 patch_site.EmitJumpIfNotSmi(a2, &slow_case); | |
5192 Split(cc, a1, Operand(a0), if_true, if_false, NULL); | |
5193 __ bind(&slow_case); | |
5194 } | |
5195 | |
5196 Handle<Code> ic = CodeFactory::CompareIC( | |
5197 isolate(), op, strength(language_mode())).code(); | |
5198 CallIC(ic, expr->CompareOperationFeedbackId()); | |
5199 patch_site.EmitPatchInfo(); | |
5200 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
5201 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through); | |
5202 } | |
5203 } | |
5204 | |
5205 // Convert the result of the comparison into one expected for this | |
5206 // expression's context. | |
5207 context()->Plug(if_true, if_false); | |
5208 } | |
5209 | |
5210 | |
5211 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, | |
5212 Expression* sub_expr, | |
5213 NilValue nil) { | |
5214 Label materialize_true, materialize_false; | |
5215 Label* if_true = NULL; | |
5216 Label* if_false = NULL; | |
5217 Label* fall_through = NULL; | |
5218 context()->PrepareTest(&materialize_true, &materialize_false, | |
5219 &if_true, &if_false, &fall_through); | |
5220 | |
5221 VisitForAccumulatorValue(sub_expr); | |
5222 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
5223 __ mov(a0, result_register()); | |
5224 if (expr->op() == Token::EQ_STRICT) { | |
5225 Heap::RootListIndex nil_value = nil == kNullValue ? | |
5226 Heap::kNullValueRootIndex : | |
5227 Heap::kUndefinedValueRootIndex; | |
5228 __ LoadRoot(a1, nil_value); | |
5229 Split(eq, a0, Operand(a1), if_true, if_false, fall_through); | |
5230 } else { | |
5231 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); | |
5232 CallIC(ic, expr->CompareOperationFeedbackId()); | |
5233 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through); | |
5234 } | |
5235 context()->Plug(if_true, if_false); | |
5236 } | |
5237 | |
5238 | |
5239 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { | |
5240 __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
5241 context()->Plug(v0); | |
5242 } | |
5243 | |
5244 | |
5245 Register FullCodeGenerator::result_register() { | |
5246 return v0; | |
5247 } | |
5248 | |
5249 | |
5250 Register FullCodeGenerator::context_register() { | |
5251 return cp; | |
5252 } | |
5253 | |
5254 | |
5255 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { | |
5256 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); | |
5257 __ sw(value, MemOperand(fp, frame_offset)); | |
5258 } | |
5259 | |
5260 | |
5261 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { | |
5262 __ lw(dst, ContextOperand(cp, context_index)); | |
5263 } | |
5264 | |
5265 | |
5266 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { | |
5267 Scope* declaration_scope = scope()->DeclarationScope(); | |
5268 if (declaration_scope->is_script_scope() || | |
5269 declaration_scope->is_module_scope()) { | |
5270 // Contexts nested in the native context have a canonical empty function | |
5271 // as their closure, not the anonymous closure containing the global | |
5272 // code. Pass a smi sentinel and let the runtime look up the empty | |
5273 // function. | |
5274 __ li(at, Operand(Smi::FromInt(0))); | |
5275 } else if (declaration_scope->is_eval_scope()) { | |
5276 // Contexts created by a call to eval have the same closure as the | |
5277 // context calling eval, not the anonymous closure containing the eval | |
5278 // code. Fetch it from the context. | |
5279 __ lw(at, ContextOperand(cp, Context::CLOSURE_INDEX)); | |
5280 } else { | |
5281 DCHECK(declaration_scope->is_function_scope()); | |
5282 __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
5283 } | |
5284 __ push(at); | |
5285 } | |
5286 | |
5287 | |
5288 // ---------------------------------------------------------------------------- | |
5289 // Non-local control flow support. | |
5290 | |
5291 void FullCodeGenerator::EnterFinallyBlock() { | |
5292 DCHECK(!result_register().is(a1)); | |
5293 // Store result register while executing finally block. | |
5294 __ push(result_register()); | |
5295 // Cook return address in link register to stack (smi encoded Code* delta). | |
5296 __ Subu(a1, ra, Operand(masm_->CodeObject())); | |
5297 DCHECK_EQ(1, kSmiTagSize + kSmiShiftSize); | |
5298 STATIC_ASSERT(0 == kSmiTag); | |
5299 __ Addu(a1, a1, Operand(a1)); // Convert to smi. | |
5300 | |
5301 // Store result register while executing finally block. | |
5302 __ push(a1); | |
5303 | |
5304 // Store pending message while executing finally block. | |
5305 ExternalReference pending_message_obj = | |
5306 ExternalReference::address_of_pending_message_obj(isolate()); | |
5307 __ li(at, Operand(pending_message_obj)); | |
5308 __ lw(a1, MemOperand(at)); | |
5309 __ push(a1); | |
5310 | |
5311 ClearPendingMessage(); | |
5312 } | |
5313 | |
5314 | |
5315 void FullCodeGenerator::ExitFinallyBlock() { | |
5316 DCHECK(!result_register().is(a1)); | |
5317 // Restore pending message from stack. | |
5318 __ pop(a1); | |
5319 ExternalReference pending_message_obj = | |
5320 ExternalReference::address_of_pending_message_obj(isolate()); | |
5321 __ li(at, Operand(pending_message_obj)); | |
5322 __ sw(a1, MemOperand(at)); | |
5323 | |
5324 // Restore result register from stack. | |
5325 __ pop(a1); | |
5326 | |
5327 // Uncook return address and return. | |
5328 __ pop(result_register()); | |
5329 DCHECK_EQ(1, kSmiTagSize + kSmiShiftSize); | |
5330 __ sra(a1, a1, 1); // Un-smi-tag value. | |
5331 __ Addu(at, a1, Operand(masm_->CodeObject())); | |
5332 __ Jump(at); | |
5333 } | |
5334 | |
5335 | |
5336 void FullCodeGenerator::ClearPendingMessage() { | |
5337 DCHECK(!result_register().is(a1)); | |
5338 ExternalReference pending_message_obj = | |
5339 ExternalReference::address_of_pending_message_obj(isolate()); | |
5340 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex); | |
5341 __ li(at, Operand(pending_message_obj)); | |
5342 __ sw(a1, MemOperand(at)); | |
5343 } | |
5344 | |
5345 | |
5346 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) { | |
5347 DCHECK(FLAG_vector_stores && !slot.IsInvalid()); | |
5348 __ li(VectorStoreICTrampolineDescriptor::SlotRegister(), | |
5349 Operand(SmiFromSlot(slot))); | |
5350 } | |
5351 | |
5352 | |
5353 #undef __ | |
5354 | |
5355 | |
5356 void BackEdgeTable::PatchAt(Code* unoptimized_code, | |
5357 Address pc, | |
5358 BackEdgeState target_state, | |
5359 Code* replacement_code) { | |
5360 static const int kInstrSize = Assembler::kInstrSize; | |
5361 Address branch_address = pc - 6 * kInstrSize; | |
5362 CodePatcher patcher(branch_address, 1); | |
5363 | |
5364 switch (target_state) { | |
5365 case INTERRUPT: | |
5366 // slt at, a3, zero_reg (in case of count based interrupts) | |
5367 // beq at, zero_reg, ok | |
5368 // lui t9, <interrupt stub address> upper | |
5369 // ori t9, <interrupt stub address> lower | |
5370 // jalr t9 | |
5371 // nop | |
5372 // ok-label ----- pc_after points here | |
5373 patcher.masm()->slt(at, a3, zero_reg); | |
5374 break; | |
5375 case ON_STACK_REPLACEMENT: | |
5376 case OSR_AFTER_STACK_CHECK: | |
5377 // addiu at, zero_reg, 1 | |
5378 // beq at, zero_reg, ok ;; Not changed | |
5379 // lui t9, <on-stack replacement address> upper | |
5380 // ori t9, <on-stack replacement address> lower | |
5381 // jalr t9 ;; Not changed | |
5382 // nop ;; Not changed | |
5383 // ok-label ----- pc_after points here | |
5384 patcher.masm()->addiu(at, zero_reg, 1); | |
5385 break; | |
5386 } | |
5387 Address pc_immediate_load_address = pc - 4 * kInstrSize; | |
5388 // Replace the stack check address in the load-immediate (lui/ori pair) | |
5389 // with the entry address of the replacement code. | |
5390 Assembler::set_target_address_at(pc_immediate_load_address, | |
5391 replacement_code->entry()); | |
5392 | |
5393 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | |
5394 unoptimized_code, pc_immediate_load_address, replacement_code); | |
5395 } | |
5396 | |
5397 | |
5398 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( | |
5399 Isolate* isolate, | |
5400 Code* unoptimized_code, | |
5401 Address pc) { | |
5402 static const int kInstrSize = Assembler::kInstrSize; | |
5403 Address branch_address = pc - 6 * kInstrSize; | |
5404 Address pc_immediate_load_address = pc - 4 * kInstrSize; | |
5405 | |
5406 DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 5 * kInstrSize))); | |
5407 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) { | |
5408 DCHECK(reinterpret_cast<uint32_t>( | |
5409 Assembler::target_address_at(pc_immediate_load_address)) == | |
5410 reinterpret_cast<uint32_t>( | |
5411 isolate->builtins()->InterruptCheck()->entry())); | |
5412 return INTERRUPT; | |
5413 } | |
5414 | |
5415 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address))); | |
5416 | |
5417 if (reinterpret_cast<uint32_t>( | |
5418 Assembler::target_address_at(pc_immediate_load_address)) == | |
5419 reinterpret_cast<uint32_t>( | |
5420 isolate->builtins()->OnStackReplacement()->entry())) { | |
5421 return ON_STACK_REPLACEMENT; | |
5422 } | |
5423 | |
5424 DCHECK(reinterpret_cast<uint32_t>( | |
5425 Assembler::target_address_at(pc_immediate_load_address)) == | |
5426 reinterpret_cast<uint32_t>( | |
5427 isolate->builtins()->OsrAfterStackCheck()->entry())); | |
5428 return OSR_AFTER_STACK_CHECK; | |
5429 } | |
5430 | |
5431 | |
5432 } // namespace internal | |
5433 } // namespace v8 | |
5434 | |
5435 #endif // V8_TARGET_ARCH_MIPS | |
OLD | NEW |