Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(611)

Side by Side Diff: src/mips64/full-codegen-mips64.cc

Issue 1248443003: Move Full-codegen into its own folder. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: rename define Created 5 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/mips64/deoptimizer-mips64.cc ('k') | src/objects.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #if V8_TARGET_ARCH_MIPS64
8
9 // Note on Mips implementation:
10 //
11 // The result_register() for mips is the 'v0' register, which is defined
12 // by the ABI to contain function return values. However, the first
13 // parameter to a function is defined to be 'a0'. So there are many
14 // places where we have to move a previous result in v0 to a0 for the
15 // next call: mov(a0, v0). This is not needed on the other architectures.
16
17 #include "src/code-factory.h"
18 #include "src/code-stubs.h"
19 #include "src/codegen.h"
20 #include "src/compiler.h"
21 #include "src/debug.h"
22 #include "src/full-codegen.h"
23 #include "src/ic/ic.h"
24 #include "src/parser.h"
25 #include "src/scopes.h"
26
27 #include "src/mips64/code-stubs-mips64.h"
28 #include "src/mips64/macro-assembler-mips64.h"
29
30 namespace v8 {
31 namespace internal {
32
33 #define __ ACCESS_MASM(masm_)
34
35
36 // A patch site is a location in the code which it is possible to patch. This
37 // class has a number of methods to emit the code which is patchable and the
38 // method EmitPatchInfo to record a marker back to the patchable code. This
39 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
40 // (raw 16 bit immediate value is used) is the delta from the pc to the first
41 // instruction of the patchable code.
42 // The marker instruction is effectively a NOP (dest is zero_reg) and will
43 // never be emitted by normal code.
44 class JumpPatchSite BASE_EMBEDDED {
45 public:
46 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
47 #ifdef DEBUG
48 info_emitted_ = false;
49 #endif
50 }
51
52 ~JumpPatchSite() {
53 DCHECK(patch_site_.is_bound() == info_emitted_);
54 }
55
56 // When initially emitting this ensure that a jump is always generated to skip
57 // the inlined smi code.
58 void EmitJumpIfNotSmi(Register reg, Label* target) {
59 DCHECK(!patch_site_.is_bound() && !info_emitted_);
60 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
61 __ bind(&patch_site_);
62 __ andi(at, reg, 0);
63 // Always taken before patched.
64 __ BranchShort(target, eq, at, Operand(zero_reg));
65 }
66
67 // When initially emitting this ensure that a jump is never generated to skip
68 // the inlined smi code.
69 void EmitJumpIfSmi(Register reg, Label* target) {
70 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
71 DCHECK(!patch_site_.is_bound() && !info_emitted_);
72 __ bind(&patch_site_);
73 __ andi(at, reg, 0);
74 // Never taken before patched.
75 __ BranchShort(target, ne, at, Operand(zero_reg));
76 }
77
78 void EmitPatchInfo() {
79 if (patch_site_.is_bound()) {
80 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
81 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
82 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
83 #ifdef DEBUG
84 info_emitted_ = true;
85 #endif
86 } else {
87 __ nop(); // Signals no inlined code.
88 }
89 }
90
91 private:
92 MacroAssembler* masm_;
93 Label patch_site_;
94 #ifdef DEBUG
95 bool info_emitted_;
96 #endif
97 };
98
99
100 // Generate code for a JS function. On entry to the function the receiver
101 // and arguments have been pushed on the stack left to right. The actual
102 // argument count matches the formal parameter count expected by the
103 // function.
104 //
105 // The live registers are:
106 // o a1: the JS function object being called (i.e. ourselves)
107 // o cp: our context
108 // o fp: our caller's frame pointer
109 // o sp: stack pointer
110 // o ra: return address
111 //
112 // The function builds a JS frame. Please see JavaScriptFrameConstants in
113 // frames-mips.h for its layout.
114 void FullCodeGenerator::Generate() {
115 CompilationInfo* info = info_;
116 profiling_counter_ = isolate()->factory()->NewCell(
117 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
118 SetFunctionPosition(function());
119 Comment cmnt(masm_, "[ function compiled by full code generator");
120
121 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
122
123 #ifdef DEBUG
124 if (strlen(FLAG_stop_at) > 0 &&
125 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
126 __ stop("stop-at");
127 }
128 #endif
129
130 // Sloppy mode functions and builtins need to replace the receiver with the
131 // global proxy when called as functions (without an explicit receiver
132 // object).
133 if (is_sloppy(info->language_mode()) && !info->is_native() &&
134 info->MayUseThis() && info->scope()->has_this_declaration()) {
135 Label ok;
136 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
137 __ ld(at, MemOperand(sp, receiver_offset));
138 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
139 __ Branch(&ok, ne, a2, Operand(at));
140
141 __ ld(a2, GlobalObjectOperand());
142 __ ld(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
143
144 __ sd(a2, MemOperand(sp, receiver_offset));
145 __ bind(&ok);
146 }
147 // Open a frame scope to indicate that there is a frame on the stack. The
148 // MANUAL indicates that the scope shouldn't actually generate code to set up
149 // the frame (that is done below).
150 FrameScope frame_scope(masm_, StackFrame::MANUAL);
151 info->set_prologue_offset(masm_->pc_offset());
152 __ Prologue(info->IsCodePreAgingActive());
153 info->AddNoFrameRange(0, masm_->pc_offset());
154
155 { Comment cmnt(masm_, "[ Allocate locals");
156 int locals_count = info->scope()->num_stack_slots();
157 // Generators allocate locals, if any, in context slots.
158 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
159 if (locals_count > 0) {
160 if (locals_count >= 128) {
161 Label ok;
162 __ Dsubu(t1, sp, Operand(locals_count * kPointerSize));
163 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
164 __ Branch(&ok, hs, t1, Operand(a2));
165 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
166 __ bind(&ok);
167 }
168 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
169 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
170 if (locals_count >= kMaxPushes) {
171 int loop_iterations = locals_count / kMaxPushes;
172 __ li(a2, Operand(loop_iterations));
173 Label loop_header;
174 __ bind(&loop_header);
175 // Do pushes.
176 __ Dsubu(sp, sp, Operand(kMaxPushes * kPointerSize));
177 for (int i = 0; i < kMaxPushes; i++) {
178 __ sd(t1, MemOperand(sp, i * kPointerSize));
179 }
180 // Continue loop if not done.
181 __ Dsubu(a2, a2, Operand(1));
182 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
183 }
184 int remaining = locals_count % kMaxPushes;
185 // Emit the remaining pushes.
186 __ Dsubu(sp, sp, Operand(remaining * kPointerSize));
187 for (int i = 0; i < remaining; i++) {
188 __ sd(t1, MemOperand(sp, i * kPointerSize));
189 }
190 }
191 }
192
193 bool function_in_register = true;
194
195 // Possibly allocate a local context.
196 if (info->scope()->num_heap_slots() > 0) {
197 Comment cmnt(masm_, "[ Allocate context");
198 // Argument to NewContext is the function, which is still in a1.
199 bool need_write_barrier = true;
200 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
201 if (info->scope()->is_script_scope()) {
202 __ push(a1);
203 __ Push(info->scope()->GetScopeInfo(info->isolate()));
204 __ CallRuntime(Runtime::kNewScriptContext, 2);
205 } else if (slots <= FastNewContextStub::kMaximumSlots) {
206 FastNewContextStub stub(isolate(), slots);
207 __ CallStub(&stub);
208 // Result of FastNewContextStub is always in new space.
209 need_write_barrier = false;
210 } else {
211 __ push(a1);
212 __ CallRuntime(Runtime::kNewFunctionContext, 1);
213 }
214 function_in_register = false;
215 // Context is returned in v0. It replaces the context passed to us.
216 // It's saved in the stack and kept live in cp.
217 __ mov(cp, v0);
218 __ sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
219 // Copy any necessary parameters into the context.
220 int num_parameters = info->scope()->num_parameters();
221 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
222 for (int i = first_parameter; i < num_parameters; i++) {
223 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
224 if (var->IsContextSlot()) {
225 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
226 (num_parameters - 1 - i) * kPointerSize;
227 // Load parameter from stack.
228 __ ld(a0, MemOperand(fp, parameter_offset));
229 // Store it in the context.
230 MemOperand target = ContextOperand(cp, var->index());
231 __ sd(a0, target);
232
233 // Update the write barrier.
234 if (need_write_barrier) {
235 __ RecordWriteContextSlot(
236 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
237 } else if (FLAG_debug_code) {
238 Label done;
239 __ JumpIfInNewSpace(cp, a0, &done);
240 __ Abort(kExpectedNewSpaceObject);
241 __ bind(&done);
242 }
243 }
244 }
245 }
246
247 // Possibly set up a local binding to the this function which is used in
248 // derived constructors with super calls.
249 Variable* this_function_var = scope()->this_function_var();
250 if (this_function_var != nullptr) {
251 Comment cmnt(masm_, "[ This function");
252 if (!function_in_register) {
253 __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
254 // The write barrier clobbers register again, keep is marked as such.
255 }
256 SetVar(this_function_var, a1, a2, a3);
257 }
258
259 Variable* new_target_var = scope()->new_target_var();
260 if (new_target_var != nullptr) {
261 Comment cmnt(masm_, "[ new.target");
262 // Get the frame pointer for the calling frame.
263 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
264
265 // Skip the arguments adaptor frame if it exists.
266 Label check_frame_marker;
267 __ ld(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
268 __ Branch(&check_frame_marker, ne, a1,
269 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
270 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
271
272 // Check the marker in the calling frame.
273 __ bind(&check_frame_marker);
274 __ ld(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
275
276 Label non_construct_frame, done;
277 __ Branch(&non_construct_frame, ne, a1,
278 Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
279
280 __ ld(v0,
281 MemOperand(a2, ConstructFrameConstants::kOriginalConstructorOffset));
282 __ Branch(&done);
283
284 __ bind(&non_construct_frame);
285 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
286 __ bind(&done);
287
288 SetVar(new_target_var, v0, a2, a3);
289 }
290
291 // Possibly allocate RestParameters
292 int rest_index;
293 Variable* rest_param = scope()->rest_parameter(&rest_index);
294 if (rest_param) {
295 Comment cmnt(masm_, "[ Allocate rest parameter array");
296
297 int num_parameters = info->scope()->num_parameters();
298 int offset = num_parameters * kPointerSize;
299
300 __ Daddu(a3, fp,
301 Operand(StandardFrameConstants::kCallerSPOffset + offset));
302 __ li(a2, Operand(Smi::FromInt(num_parameters)));
303 __ li(a1, Operand(Smi::FromInt(rest_index)));
304 __ li(a0, Operand(Smi::FromInt(language_mode())));
305 __ Push(a3, a2, a1, a0);
306
307 RestParamAccessStub stub(isolate());
308 __ CallStub(&stub);
309
310 SetVar(rest_param, v0, a1, a2);
311 }
312
313 Variable* arguments = scope()->arguments();
314 if (arguments != NULL) {
315 // Function uses arguments object.
316 Comment cmnt(masm_, "[ Allocate arguments object");
317 if (!function_in_register) {
318 // Load this again, if it's used by the local context below.
319 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
320 } else {
321 __ mov(a3, a1);
322 }
323 // Receiver is just before the parameters on the caller's stack.
324 int num_parameters = info->scope()->num_parameters();
325 int offset = num_parameters * kPointerSize;
326 __ Daddu(a2, fp,
327 Operand(StandardFrameConstants::kCallerSPOffset + offset));
328 __ li(a1, Operand(Smi::FromInt(num_parameters)));
329 __ Push(a3, a2, a1);
330
331 // Arguments to ArgumentsAccessStub:
332 // function, receiver address, parameter count.
333 // The stub will rewrite receiever and parameter count if the previous
334 // stack frame was an arguments adapter frame.
335 ArgumentsAccessStub::Type type;
336 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
337 type = ArgumentsAccessStub::NEW_STRICT;
338 } else if (function()->has_duplicate_parameters()) {
339 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
340 } else {
341 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
342 }
343 ArgumentsAccessStub stub(isolate(), type);
344 __ CallStub(&stub);
345
346 SetVar(arguments, v0, a1, a2);
347 }
348
349 if (FLAG_trace) {
350 __ CallRuntime(Runtime::kTraceEnter, 0);
351 }
352 // Visit the declarations and body unless there is an illegal
353 // redeclaration.
354 if (scope()->HasIllegalRedeclaration()) {
355 Comment cmnt(masm_, "[ Declarations");
356 scope()->VisitIllegalRedeclaration(this);
357
358 } else {
359 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
360 { Comment cmnt(masm_, "[ Declarations");
361 VisitDeclarations(scope()->declarations());
362 }
363 { Comment cmnt(masm_, "[ Stack check");
364 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
365 Label ok;
366 __ LoadRoot(at, Heap::kStackLimitRootIndex);
367 __ Branch(&ok, hs, sp, Operand(at));
368 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
369 PredictableCodeSizeScope predictable(masm_,
370 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
371 __ Call(stack_check, RelocInfo::CODE_TARGET);
372 __ bind(&ok);
373 }
374
375 { Comment cmnt(masm_, "[ Body");
376 DCHECK(loop_depth() == 0);
377
378 VisitStatements(function()->body());
379
380 DCHECK(loop_depth() == 0);
381 }
382 }
383
384 // Always emit a 'return undefined' in case control fell off the end of
385 // the body.
386 { Comment cmnt(masm_, "[ return <undefined>;");
387 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
388 }
389 EmitReturnSequence();
390 }
391
392
393 void FullCodeGenerator::ClearAccumulator() {
394 DCHECK(Smi::FromInt(0) == 0);
395 __ mov(v0, zero_reg);
396 }
397
398
399 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
400 __ li(a2, Operand(profiling_counter_));
401 __ ld(a3, FieldMemOperand(a2, Cell::kValueOffset));
402 __ Dsubu(a3, a3, Operand(Smi::FromInt(delta)));
403 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
404 }
405
406
407 void FullCodeGenerator::EmitProfilingCounterReset() {
408 int reset_value = FLAG_interrupt_budget;
409 if (info_->is_debug()) {
410 // Detect debug break requests as soon as possible.
411 reset_value = FLAG_interrupt_budget >> 4;
412 }
413 __ li(a2, Operand(profiling_counter_));
414 __ li(a3, Operand(Smi::FromInt(reset_value)));
415 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
416 }
417
418
419 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
420 Label* back_edge_target) {
421 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
422 // to make sure it is constant. Branch may emit a skip-or-jump sequence
423 // instead of the normal Branch. It seems that the "skip" part of that
424 // sequence is about as long as this Branch would be so it is safe to ignore
425 // that.
426 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
427 Comment cmnt(masm_, "[ Back edge bookkeeping");
428 Label ok;
429 DCHECK(back_edge_target->is_bound());
430 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
431 int weight = Min(kMaxBackEdgeWeight,
432 Max(1, distance / kCodeSizeMultiplier));
433 EmitProfilingCounterDecrement(weight);
434 __ slt(at, a3, zero_reg);
435 __ beq(at, zero_reg, &ok);
436 // Call will emit a li t9 first, so it is safe to use the delay slot.
437 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
438 // Record a mapping of this PC offset to the OSR id. This is used to find
439 // the AST id from the unoptimized code in order to use it as a key into
440 // the deoptimization input data found in the optimized code.
441 RecordBackEdge(stmt->OsrEntryId());
442 EmitProfilingCounterReset();
443
444 __ bind(&ok);
445 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
446 // Record a mapping of the OSR id to this PC. This is used if the OSR
447 // entry becomes the target of a bailout. We don't expect it to be, but
448 // we want it to work if it is.
449 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
450 }
451
452
453 void FullCodeGenerator::EmitReturnSequence() {
454 Comment cmnt(masm_, "[ Return sequence");
455 if (return_label_.is_bound()) {
456 __ Branch(&return_label_);
457 } else {
458 __ bind(&return_label_);
459 if (FLAG_trace) {
460 // Push the return value on the stack as the parameter.
461 // Runtime::TraceExit returns its parameter in v0.
462 __ push(v0);
463 __ CallRuntime(Runtime::kTraceExit, 1);
464 }
465 // Pretend that the exit is a backwards jump to the entry.
466 int weight = 1;
467 if (info_->ShouldSelfOptimize()) {
468 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
469 } else {
470 int distance = masm_->pc_offset();
471 weight = Min(kMaxBackEdgeWeight,
472 Max(1, distance / kCodeSizeMultiplier));
473 }
474 EmitProfilingCounterDecrement(weight);
475 Label ok;
476 __ Branch(&ok, ge, a3, Operand(zero_reg));
477 __ push(v0);
478 __ Call(isolate()->builtins()->InterruptCheck(),
479 RelocInfo::CODE_TARGET);
480 __ pop(v0);
481 EmitProfilingCounterReset();
482 __ bind(&ok);
483
484 // Make sure that the constant pool is not emitted inside of the return
485 // sequence.
486 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
487 // Here we use masm_-> instead of the __ macro to avoid the code coverage
488 // tool from instrumenting as we rely on the code size here.
489 int32_t arg_count = info_->scope()->num_parameters() + 1;
490 int32_t sp_delta = arg_count * kPointerSize;
491 SetReturnPosition(function());
492 masm_->mov(sp, fp);
493 int no_frame_start = masm_->pc_offset();
494 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
495 masm_->Daddu(sp, sp, Operand(sp_delta));
496 masm_->Jump(ra);
497 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
498 }
499 }
500 }
501
502
503 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
504 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
505 codegen()->GetVar(result_register(), var);
506 __ push(result_register());
507 }
508
509
510 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
511 }
512
513
514 void FullCodeGenerator::AccumulatorValueContext::Plug(
515 Heap::RootListIndex index) const {
516 __ LoadRoot(result_register(), index);
517 }
518
519
520 void FullCodeGenerator::StackValueContext::Plug(
521 Heap::RootListIndex index) const {
522 __ LoadRoot(result_register(), index);
523 __ push(result_register());
524 }
525
526
527 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
528 codegen()->PrepareForBailoutBeforeSplit(condition(),
529 true,
530 true_label_,
531 false_label_);
532 if (index == Heap::kUndefinedValueRootIndex ||
533 index == Heap::kNullValueRootIndex ||
534 index == Heap::kFalseValueRootIndex) {
535 if (false_label_ != fall_through_) __ Branch(false_label_);
536 } else if (index == Heap::kTrueValueRootIndex) {
537 if (true_label_ != fall_through_) __ Branch(true_label_);
538 } else {
539 __ LoadRoot(result_register(), index);
540 codegen()->DoTest(this);
541 }
542 }
543
544
545 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
546 }
547
548
549 void FullCodeGenerator::AccumulatorValueContext::Plug(
550 Handle<Object> lit) const {
551 __ li(result_register(), Operand(lit));
552 }
553
554
555 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
556 // Immediates cannot be pushed directly.
557 __ li(result_register(), Operand(lit));
558 __ push(result_register());
559 }
560
561
562 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
563 codegen()->PrepareForBailoutBeforeSplit(condition(),
564 true,
565 true_label_,
566 false_label_);
567 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
568 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
569 if (false_label_ != fall_through_) __ Branch(false_label_);
570 } else if (lit->IsTrue() || lit->IsJSObject()) {
571 if (true_label_ != fall_through_) __ Branch(true_label_);
572 } else if (lit->IsString()) {
573 if (String::cast(*lit)->length() == 0) {
574 if (false_label_ != fall_through_) __ Branch(false_label_);
575 } else {
576 if (true_label_ != fall_through_) __ Branch(true_label_);
577 }
578 } else if (lit->IsSmi()) {
579 if (Smi::cast(*lit)->value() == 0) {
580 if (false_label_ != fall_through_) __ Branch(false_label_);
581 } else {
582 if (true_label_ != fall_through_) __ Branch(true_label_);
583 }
584 } else {
585 // For simplicity we always test the accumulator register.
586 __ li(result_register(), Operand(lit));
587 codegen()->DoTest(this);
588 }
589 }
590
591
592 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
593 Register reg) const {
594 DCHECK(count > 0);
595 __ Drop(count);
596 }
597
598
599 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
600 int count,
601 Register reg) const {
602 DCHECK(count > 0);
603 __ Drop(count);
604 __ Move(result_register(), reg);
605 }
606
607
608 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
609 Register reg) const {
610 DCHECK(count > 0);
611 if (count > 1) __ Drop(count - 1);
612 __ sd(reg, MemOperand(sp, 0));
613 }
614
615
616 void FullCodeGenerator::TestContext::DropAndPlug(int count,
617 Register reg) const {
618 DCHECK(count > 0);
619 // For simplicity we always test the accumulator register.
620 __ Drop(count);
621 __ Move(result_register(), reg);
622 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
623 codegen()->DoTest(this);
624 }
625
626
627 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
628 Label* materialize_false) const {
629 DCHECK(materialize_true == materialize_false);
630 __ bind(materialize_true);
631 }
632
633
634 void FullCodeGenerator::AccumulatorValueContext::Plug(
635 Label* materialize_true,
636 Label* materialize_false) const {
637 Label done;
638 __ bind(materialize_true);
639 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
640 __ Branch(&done);
641 __ bind(materialize_false);
642 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
643 __ bind(&done);
644 }
645
646
647 void FullCodeGenerator::StackValueContext::Plug(
648 Label* materialize_true,
649 Label* materialize_false) const {
650 Label done;
651 __ bind(materialize_true);
652 __ LoadRoot(at, Heap::kTrueValueRootIndex);
653 // Push the value as the following branch can clobber at in long branch mode.
654 __ push(at);
655 __ Branch(&done);
656 __ bind(materialize_false);
657 __ LoadRoot(at, Heap::kFalseValueRootIndex);
658 __ push(at);
659 __ bind(&done);
660 }
661
662
663 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
664 Label* materialize_false) const {
665 DCHECK(materialize_true == true_label_);
666 DCHECK(materialize_false == false_label_);
667 }
668
669
670 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
671 Heap::RootListIndex value_root_index =
672 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
673 __ LoadRoot(result_register(), value_root_index);
674 }
675
676
677 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
678 Heap::RootListIndex value_root_index =
679 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
680 __ LoadRoot(at, value_root_index);
681 __ push(at);
682 }
683
684
685 void FullCodeGenerator::TestContext::Plug(bool flag) const {
686 codegen()->PrepareForBailoutBeforeSplit(condition(),
687 true,
688 true_label_,
689 false_label_);
690 if (flag) {
691 if (true_label_ != fall_through_) __ Branch(true_label_);
692 } else {
693 if (false_label_ != fall_through_) __ Branch(false_label_);
694 }
695 }
696
697
698 void FullCodeGenerator::DoTest(Expression* condition,
699 Label* if_true,
700 Label* if_false,
701 Label* fall_through) {
702 __ mov(a0, result_register());
703 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
704 CallIC(ic, condition->test_id());
705 __ mov(at, zero_reg);
706 Split(ne, v0, Operand(at), if_true, if_false, fall_through);
707 }
708
709
710 void FullCodeGenerator::Split(Condition cc,
711 Register lhs,
712 const Operand& rhs,
713 Label* if_true,
714 Label* if_false,
715 Label* fall_through) {
716 if (if_false == fall_through) {
717 __ Branch(if_true, cc, lhs, rhs);
718 } else if (if_true == fall_through) {
719 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
720 } else {
721 __ Branch(if_true, cc, lhs, rhs);
722 __ Branch(if_false);
723 }
724 }
725
726
727 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
728 DCHECK(var->IsStackAllocated());
729 // Offset is negative because higher indexes are at lower addresses.
730 int offset = -var->index() * kPointerSize;
731 // Adjust by a (parameter or local) base offset.
732 if (var->IsParameter()) {
733 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
734 } else {
735 offset += JavaScriptFrameConstants::kLocal0Offset;
736 }
737 return MemOperand(fp, offset);
738 }
739
740
741 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
742 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
743 if (var->IsContextSlot()) {
744 int context_chain_length = scope()->ContextChainLength(var->scope());
745 __ LoadContext(scratch, context_chain_length);
746 return ContextOperand(scratch, var->index());
747 } else {
748 return StackOperand(var);
749 }
750 }
751
752
753 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
754 // Use destination as scratch.
755 MemOperand location = VarOperand(var, dest);
756 __ ld(dest, location);
757 }
758
759
760 void FullCodeGenerator::SetVar(Variable* var,
761 Register src,
762 Register scratch0,
763 Register scratch1) {
764 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
765 DCHECK(!scratch0.is(src));
766 DCHECK(!scratch0.is(scratch1));
767 DCHECK(!scratch1.is(src));
768 MemOperand location = VarOperand(var, scratch0);
769 __ sd(src, location);
770 // Emit the write barrier code if the location is in the heap.
771 if (var->IsContextSlot()) {
772 __ RecordWriteContextSlot(scratch0,
773 location.offset(),
774 src,
775 scratch1,
776 kRAHasBeenSaved,
777 kDontSaveFPRegs);
778 }
779 }
780
781
782 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
783 bool should_normalize,
784 Label* if_true,
785 Label* if_false) {
786 // Only prepare for bailouts before splits if we're in a test
787 // context. Otherwise, we let the Visit function deal with the
788 // preparation to avoid preparing with the same AST id twice.
789 if (!context()->IsTest() || !info_->IsOptimizable()) return;
790
791 Label skip;
792 if (should_normalize) __ Branch(&skip);
793 PrepareForBailout(expr, TOS_REG);
794 if (should_normalize) {
795 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
796 Split(eq, a0, Operand(a4), if_true, if_false, NULL);
797 __ bind(&skip);
798 }
799 }
800
801
802 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
803 // The variable in the declaration always resides in the current function
804 // context.
805 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
806 if (generate_debug_code_) {
807 // Check that we're not inside a with or catch context.
808 __ ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
809 __ LoadRoot(a4, Heap::kWithContextMapRootIndex);
810 __ Check(ne, kDeclarationInWithContext,
811 a1, Operand(a4));
812 __ LoadRoot(a4, Heap::kCatchContextMapRootIndex);
813 __ Check(ne, kDeclarationInCatchContext,
814 a1, Operand(a4));
815 }
816 }
817
818
819 void FullCodeGenerator::VisitVariableDeclaration(
820 VariableDeclaration* declaration) {
821 // If it was not possible to allocate the variable at compile time, we
822 // need to "declare" it at runtime to make sure it actually exists in the
823 // local context.
824 VariableProxy* proxy = declaration->proxy();
825 VariableMode mode = declaration->mode();
826 Variable* variable = proxy->var();
827 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
828 switch (variable->location()) {
829 case VariableLocation::GLOBAL:
830 case VariableLocation::UNALLOCATED:
831 globals_->Add(variable->name(), zone());
832 globals_->Add(variable->binding_needs_init()
833 ? isolate()->factory()->the_hole_value()
834 : isolate()->factory()->undefined_value(),
835 zone());
836 break;
837
838 case VariableLocation::PARAMETER:
839 case VariableLocation::LOCAL:
840 if (hole_init) {
841 Comment cmnt(masm_, "[ VariableDeclaration");
842 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
843 __ sd(a4, StackOperand(variable));
844 }
845 break;
846
847 case VariableLocation::CONTEXT:
848 if (hole_init) {
849 Comment cmnt(masm_, "[ VariableDeclaration");
850 EmitDebugCheckDeclarationContext(variable);
851 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
852 __ sd(at, ContextOperand(cp, variable->index()));
853 // No write barrier since the_hole_value is in old space.
854 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
855 }
856 break;
857
858 case VariableLocation::LOOKUP: {
859 Comment cmnt(masm_, "[ VariableDeclaration");
860 __ li(a2, Operand(variable->name()));
861 // Declaration nodes are always introduced in one of four modes.
862 DCHECK(IsDeclaredVariableMode(mode));
863 PropertyAttributes attr =
864 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
865 __ li(a1, Operand(Smi::FromInt(attr)));
866 // Push initial value, if any.
867 // Note: For variables we must not push an initial value (such as
868 // 'undefined') because we may have a (legal) redeclaration and we
869 // must not destroy the current value.
870 if (hole_init) {
871 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
872 __ Push(cp, a2, a1, a0);
873 } else {
874 DCHECK(Smi::FromInt(0) == 0);
875 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
876 __ Push(cp, a2, a1, a0);
877 }
878 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
879 break;
880 }
881 }
882 }
883
884
885 void FullCodeGenerator::VisitFunctionDeclaration(
886 FunctionDeclaration* declaration) {
887 VariableProxy* proxy = declaration->proxy();
888 Variable* variable = proxy->var();
889 switch (variable->location()) {
890 case VariableLocation::GLOBAL:
891 case VariableLocation::UNALLOCATED: {
892 globals_->Add(variable->name(), zone());
893 Handle<SharedFunctionInfo> function =
894 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
895 // Check for stack-overflow exception.
896 if (function.is_null()) return SetStackOverflow();
897 globals_->Add(function, zone());
898 break;
899 }
900
901 case VariableLocation::PARAMETER:
902 case VariableLocation::LOCAL: {
903 Comment cmnt(masm_, "[ FunctionDeclaration");
904 VisitForAccumulatorValue(declaration->fun());
905 __ sd(result_register(), StackOperand(variable));
906 break;
907 }
908
909 case VariableLocation::CONTEXT: {
910 Comment cmnt(masm_, "[ FunctionDeclaration");
911 EmitDebugCheckDeclarationContext(variable);
912 VisitForAccumulatorValue(declaration->fun());
913 __ sd(result_register(), ContextOperand(cp, variable->index()));
914 int offset = Context::SlotOffset(variable->index());
915 // We know that we have written a function, which is not a smi.
916 __ RecordWriteContextSlot(cp,
917 offset,
918 result_register(),
919 a2,
920 kRAHasBeenSaved,
921 kDontSaveFPRegs,
922 EMIT_REMEMBERED_SET,
923 OMIT_SMI_CHECK);
924 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
925 break;
926 }
927
928 case VariableLocation::LOOKUP: {
929 Comment cmnt(masm_, "[ FunctionDeclaration");
930 __ li(a2, Operand(variable->name()));
931 __ li(a1, Operand(Smi::FromInt(NONE)));
932 __ Push(cp, a2, a1);
933 // Push initial value for function declaration.
934 VisitForStackValue(declaration->fun());
935 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
936 break;
937 }
938 }
939 }
940
941
942 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
943 // Call the runtime to declare the globals.
944 // The context is the first argument.
945 __ li(a1, Operand(pairs));
946 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
947 __ Push(cp, a1, a0);
948 __ CallRuntime(Runtime::kDeclareGlobals, 3);
949 // Return value is ignored.
950 }
951
952
953 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
954 // Call the runtime to declare the modules.
955 __ Push(descriptions);
956 __ CallRuntime(Runtime::kDeclareModules, 1);
957 // Return value is ignored.
958 }
959
960
961 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
962 Comment cmnt(masm_, "[ SwitchStatement");
963 Breakable nested_statement(this, stmt);
964 SetStatementPosition(stmt);
965
966 // Keep the switch value on the stack until a case matches.
967 VisitForStackValue(stmt->tag());
968 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
969
970 ZoneList<CaseClause*>* clauses = stmt->cases();
971 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
972
973 Label next_test; // Recycled for each test.
974 // Compile all the tests with branches to their bodies.
975 for (int i = 0; i < clauses->length(); i++) {
976 CaseClause* clause = clauses->at(i);
977 clause->body_target()->Unuse();
978
979 // The default is not a test, but remember it as final fall through.
980 if (clause->is_default()) {
981 default_clause = clause;
982 continue;
983 }
984
985 Comment cmnt(masm_, "[ Case comparison");
986 __ bind(&next_test);
987 next_test.Unuse();
988
989 // Compile the label expression.
990 VisitForAccumulatorValue(clause->label());
991 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
992
993 // Perform the comparison as if via '==='.
994 __ ld(a1, MemOperand(sp, 0)); // Switch value.
995 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
996 JumpPatchSite patch_site(masm_);
997 if (inline_smi_code) {
998 Label slow_case;
999 __ or_(a2, a1, a0);
1000 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1001
1002 __ Branch(&next_test, ne, a1, Operand(a0));
1003 __ Drop(1); // Switch value is no longer needed.
1004 __ Branch(clause->body_target());
1005
1006 __ bind(&slow_case);
1007 }
1008
1009 // Record position before stub call for type feedback.
1010 SetExpressionPosition(clause);
1011 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
1012 strength(language_mode())).code();
1013 CallIC(ic, clause->CompareId());
1014 patch_site.EmitPatchInfo();
1015
1016 Label skip;
1017 __ Branch(&skip);
1018 PrepareForBailout(clause, TOS_REG);
1019 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1020 __ Branch(&next_test, ne, v0, Operand(at));
1021 __ Drop(1);
1022 __ Branch(clause->body_target());
1023 __ bind(&skip);
1024
1025 __ Branch(&next_test, ne, v0, Operand(zero_reg));
1026 __ Drop(1); // Switch value is no longer needed.
1027 __ Branch(clause->body_target());
1028 }
1029
1030 // Discard the test value and jump to the default if present, otherwise to
1031 // the end of the statement.
1032 __ bind(&next_test);
1033 __ Drop(1); // Switch value is no longer needed.
1034 if (default_clause == NULL) {
1035 __ Branch(nested_statement.break_label());
1036 } else {
1037 __ Branch(default_clause->body_target());
1038 }
1039
1040 // Compile all the case bodies.
1041 for (int i = 0; i < clauses->length(); i++) {
1042 Comment cmnt(masm_, "[ Case body");
1043 CaseClause* clause = clauses->at(i);
1044 __ bind(clause->body_target());
1045 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1046 VisitStatements(clause->statements());
1047 }
1048
1049 __ bind(nested_statement.break_label());
1050 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1051 }
1052
1053
1054 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1055 Comment cmnt(masm_, "[ ForInStatement");
1056 SetStatementPosition(stmt, SKIP_BREAK);
1057
1058 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1059
1060 Label loop, exit;
1061 ForIn loop_statement(this, stmt);
1062 increment_loop_depth();
1063
1064 // Get the object to enumerate over. If the object is null or undefined, skip
1065 // over the loop. See ECMA-262 version 5, section 12.6.4.
1066 SetExpressionAsStatementPosition(stmt->enumerable());
1067 VisitForAccumulatorValue(stmt->enumerable());
1068 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1069 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1070 __ Branch(&exit, eq, a0, Operand(at));
1071 Register null_value = a5;
1072 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1073 __ Branch(&exit, eq, a0, Operand(null_value));
1074 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1075 __ mov(a0, v0);
1076 // Convert the object to a JS object.
1077 Label convert, done_convert;
1078 __ JumpIfSmi(a0, &convert);
1079 __ GetObjectType(a0, a1, a1);
1080 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1081 __ bind(&convert);
1082 __ push(a0);
1083 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1084 __ mov(a0, v0);
1085 __ bind(&done_convert);
1086 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1087 __ push(a0);
1088
1089 // Check for proxies.
1090 Label call_runtime;
1091 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1092 __ GetObjectType(a0, a1, a1);
1093 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1094
1095 // Check cache validity in generated code. This is a fast case for
1096 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1097 // guarantee cache validity, call the runtime system to check cache
1098 // validity or get the property names in a fixed array.
1099 __ CheckEnumCache(null_value, &call_runtime);
1100
1101 // The enum cache is valid. Load the map of the object being
1102 // iterated over and use the cache for the iteration.
1103 Label use_cache;
1104 __ ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1105 __ Branch(&use_cache);
1106
1107 // Get the set of properties to enumerate.
1108 __ bind(&call_runtime);
1109 __ push(a0); // Duplicate the enumerable object on the stack.
1110 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1111 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1112
1113 // If we got a map from the runtime call, we can do a fast
1114 // modification check. Otherwise, we got a fixed array, and we have
1115 // to do a slow check.
1116 Label fixed_array;
1117 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1118 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1119 __ Branch(&fixed_array, ne, a2, Operand(at));
1120
1121 // We got a map in register v0. Get the enumeration cache from it.
1122 Label no_descriptors;
1123 __ bind(&use_cache);
1124
1125 __ EnumLength(a1, v0);
1126 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1127
1128 __ LoadInstanceDescriptors(v0, a2);
1129 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1130 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1131
1132 // Set up the four remaining stack slots.
1133 __ li(a0, Operand(Smi::FromInt(0)));
1134 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1135 __ Push(v0, a2, a1, a0);
1136 __ jmp(&loop);
1137
1138 __ bind(&no_descriptors);
1139 __ Drop(1);
1140 __ jmp(&exit);
1141
1142 // We got a fixed array in register v0. Iterate through that.
1143 Label non_proxy;
1144 __ bind(&fixed_array);
1145
1146 __ li(a1, FeedbackVector());
1147 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1148 int vector_index = FeedbackVector()->GetIndex(slot);
1149 __ sd(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index)));
1150
1151 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1152 __ ld(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1153 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1154 __ GetObjectType(a2, a3, a3);
1155 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1156 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1157 __ bind(&non_proxy);
1158 __ Push(a1, v0); // Smi and array
1159 __ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1160 __ li(a0, Operand(Smi::FromInt(0)));
1161 __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1162
1163 // Generate code for doing the condition check.
1164 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1165 __ bind(&loop);
1166 SetExpressionAsStatementPosition(stmt->each());
1167
1168 // Load the current count to a0, load the length to a1.
1169 __ ld(a0, MemOperand(sp, 0 * kPointerSize));
1170 __ ld(a1, MemOperand(sp, 1 * kPointerSize));
1171 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1172
1173 // Get the current entry of the array into register a3.
1174 __ ld(a2, MemOperand(sp, 2 * kPointerSize));
1175 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1176 __ SmiScale(a4, a0, kPointerSizeLog2);
1177 __ daddu(a4, a2, a4); // Array base + scaled (smi) index.
1178 __ ld(a3, MemOperand(a4)); // Current entry.
1179
1180 // Get the expected map from the stack or a smi in the
1181 // permanent slow case into register a2.
1182 __ ld(a2, MemOperand(sp, 3 * kPointerSize));
1183
1184 // Check if the expected map still matches that of the enumerable.
1185 // If not, we may have to filter the key.
1186 Label update_each;
1187 __ ld(a1, MemOperand(sp, 4 * kPointerSize));
1188 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1189 __ Branch(&update_each, eq, a4, Operand(a2));
1190
1191 // For proxies, no filtering is done.
1192 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1193 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
1194 __ Branch(&update_each, eq, a2, Operand(zero_reg));
1195
1196 // Convert the entry to a string or (smi) 0 if it isn't a property
1197 // any more. If the property has been removed while iterating, we
1198 // just skip it.
1199 __ Push(a1, a3); // Enumerable and current entry.
1200 __ CallRuntime(Runtime::kForInFilter, 2);
1201 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1202 __ mov(a3, result_register());
1203 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1204 __ Branch(loop_statement.continue_label(), eq, a3, Operand(at));
1205
1206 // Update the 'each' property or variable from the possibly filtered
1207 // entry in register a3.
1208 __ bind(&update_each);
1209 __ mov(result_register(), a3);
1210 // Perform the assignment as if via '='.
1211 { EffectContext context(this);
1212 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1213 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1214 }
1215
1216 // Generate code for the body of the loop.
1217 Visit(stmt->body());
1218
1219 // Generate code for the going to the next element by incrementing
1220 // the index (smi) stored on top of the stack.
1221 __ bind(loop_statement.continue_label());
1222 __ pop(a0);
1223 __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
1224 __ push(a0);
1225
1226 EmitBackEdgeBookkeeping(stmt, &loop);
1227 __ Branch(&loop);
1228
1229 // Remove the pointers stored on the stack.
1230 __ bind(loop_statement.break_label());
1231 __ Drop(5);
1232
1233 // Exit and decrement the loop depth.
1234 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1235 __ bind(&exit);
1236 decrement_loop_depth();
1237 }
1238
1239
1240 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1241 bool pretenure) {
1242 // Use the fast case closure allocation code that allocates in new
1243 // space for nested functions that don't need literals cloning. If
1244 // we're running with the --always-opt or the --prepare-always-opt
1245 // flag, we need to use the runtime function so that the new function
1246 // we are creating here gets a chance to have its code optimized and
1247 // doesn't just get a copy of the existing unoptimized code.
1248 if (!FLAG_always_opt &&
1249 !FLAG_prepare_always_opt &&
1250 !pretenure &&
1251 scope()->is_function_scope() &&
1252 info->num_literals() == 0) {
1253 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1254 __ li(a2, Operand(info));
1255 __ CallStub(&stub);
1256 } else {
1257 __ li(a0, Operand(info));
1258 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1259 : Heap::kFalseValueRootIndex);
1260 __ Push(cp, a0, a1);
1261 __ CallRuntime(Runtime::kNewClosure, 3);
1262 }
1263 context()->Plug(v0);
1264 }
1265
1266
1267 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1268 int offset,
1269 FeedbackVectorICSlot slot) {
1270 if (NeedsHomeObject(initializer)) {
1271 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1272 __ li(StoreDescriptor::NameRegister(),
1273 Operand(isolate()->factory()->home_object_symbol()));
1274 __ ld(StoreDescriptor::ValueRegister(),
1275 MemOperand(sp, offset * kPointerSize));
1276 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1277 CallStoreIC();
1278 }
1279 }
1280
1281
1282 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1283 TypeofMode typeof_mode,
1284 Label* slow) {
1285 Register current = cp;
1286 Register next = a1;
1287 Register temp = a2;
1288
1289 Scope* s = scope();
1290 while (s != NULL) {
1291 if (s->num_heap_slots() > 0) {
1292 if (s->calls_sloppy_eval()) {
1293 // Check that extension is NULL.
1294 __ ld(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1295 __ Branch(slow, ne, temp, Operand(zero_reg));
1296 }
1297 // Load next context in chain.
1298 __ ld(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1299 // Walk the rest of the chain without clobbering cp.
1300 current = next;
1301 }
1302 // If no outer scope calls eval, we do not need to check more
1303 // context extensions.
1304 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1305 s = s->outer_scope();
1306 }
1307
1308 if (s->is_eval_scope()) {
1309 Label loop, fast;
1310 if (!current.is(next)) {
1311 __ Move(next, current);
1312 }
1313 __ bind(&loop);
1314 // Terminate at native context.
1315 __ ld(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1316 __ LoadRoot(a4, Heap::kNativeContextMapRootIndex);
1317 __ Branch(&fast, eq, temp, Operand(a4));
1318 // Check that extension is NULL.
1319 __ ld(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1320 __ Branch(slow, ne, temp, Operand(zero_reg));
1321 // Load next context in chain.
1322 __ ld(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1323 __ Branch(&loop);
1324 __ bind(&fast);
1325 }
1326
1327 // All extension objects were empty and it is safe to use a normal global
1328 // load machinery.
1329 EmitGlobalVariableLoad(proxy, typeof_mode);
1330 }
1331
1332
1333 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1334 Label* slow) {
1335 DCHECK(var->IsContextSlot());
1336 Register context = cp;
1337 Register next = a3;
1338 Register temp = a4;
1339
1340 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1341 if (s->num_heap_slots() > 0) {
1342 if (s->calls_sloppy_eval()) {
1343 // Check that extension is NULL.
1344 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1345 __ Branch(slow, ne, temp, Operand(zero_reg));
1346 }
1347 __ ld(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1348 // Walk the rest of the chain without clobbering cp.
1349 context = next;
1350 }
1351 }
1352 // Check that last extension is NULL.
1353 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1354 __ Branch(slow, ne, temp, Operand(zero_reg));
1355
1356 // This function is used only for loads, not stores, so it's safe to
1357 // return an cp-based operand (the write barrier cannot be allowed to
1358 // destroy the cp register).
1359 return ContextOperand(context, var->index());
1360 }
1361
1362
1363 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1364 TypeofMode typeof_mode,
1365 Label* slow, Label* done) {
1366 // Generate fast-case code for variables that might be shadowed by
1367 // eval-introduced variables. Eval is used a lot without
1368 // introducing variables. In those cases, we do not want to
1369 // perform a runtime call for all variables in the scope
1370 // containing the eval.
1371 Variable* var = proxy->var();
1372 if (var->mode() == DYNAMIC_GLOBAL) {
1373 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1374 __ Branch(done);
1375 } else if (var->mode() == DYNAMIC_LOCAL) {
1376 Variable* local = var->local_if_not_shadowed();
1377 __ ld(v0, ContextSlotOperandCheckExtensions(local, slow));
1378 if (local->mode() == LET || local->mode() == CONST ||
1379 local->mode() == CONST_LEGACY) {
1380 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1381 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1382 if (local->mode() == CONST_LEGACY) {
1383 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1384 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1385 } else { // LET || CONST
1386 __ Branch(done, ne, at, Operand(zero_reg));
1387 __ li(a0, Operand(var->name()));
1388 __ push(a0);
1389 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1390 }
1391 }
1392 __ Branch(done);
1393 }
1394 }
1395
1396
1397 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1398 TypeofMode typeof_mode) {
1399 Variable* var = proxy->var();
1400 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1401 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1402 if (var->IsGlobalSlot()) {
1403 DCHECK(var->index() > 0);
1404 DCHECK(var->IsStaticGlobalObjectProperty());
1405 // Each var occupies two slots in the context: for reads and writes.
1406 int slot_index = var->index();
1407 int depth = scope()->ContextChainLength(var->scope());
1408 __ li(LoadGlobalViaContextDescriptor::DepthRegister(),
1409 Operand(Smi::FromInt(depth)));
1410 __ li(LoadGlobalViaContextDescriptor::SlotRegister(),
1411 Operand(Smi::FromInt(slot_index)));
1412 __ li(LoadGlobalViaContextDescriptor::NameRegister(), Operand(var->name()));
1413 LoadGlobalViaContextStub stub(isolate(), depth);
1414 __ CallStub(&stub);
1415
1416 } else {
1417 __ ld(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1418 __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1419 __ li(LoadDescriptor::SlotRegister(),
1420 Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1421 CallLoadIC(typeof_mode);
1422 }
1423 }
1424
1425
1426 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1427 TypeofMode typeof_mode) {
1428 // Record position before possible IC call.
1429 SetExpressionPosition(proxy);
1430 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1431 Variable* var = proxy->var();
1432
1433 // Three cases: global variables, lookup variables, and all other types of
1434 // variables.
1435 switch (var->location()) {
1436 case VariableLocation::GLOBAL:
1437 case VariableLocation::UNALLOCATED: {
1438 Comment cmnt(masm_, "[ Global variable");
1439 EmitGlobalVariableLoad(proxy, typeof_mode);
1440 context()->Plug(v0);
1441 break;
1442 }
1443
1444 case VariableLocation::PARAMETER:
1445 case VariableLocation::LOCAL:
1446 case VariableLocation::CONTEXT: {
1447 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1448 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1449 : "[ Stack variable");
1450 if (var->binding_needs_init()) {
1451 // var->scope() may be NULL when the proxy is located in eval code and
1452 // refers to a potential outside binding. Currently those bindings are
1453 // always looked up dynamically, i.e. in that case
1454 // var->location() == LOOKUP.
1455 // always holds.
1456 DCHECK(var->scope() != NULL);
1457
1458 // Check if the binding really needs an initialization check. The check
1459 // can be skipped in the following situation: we have a LET or CONST
1460 // binding in harmony mode, both the Variable and the VariableProxy have
1461 // the same declaration scope (i.e. they are both in global code, in the
1462 // same function or in the same eval code) and the VariableProxy is in
1463 // the source physically located after the initializer of the variable.
1464 //
1465 // We cannot skip any initialization checks for CONST in non-harmony
1466 // mode because const variables may be declared but never initialized:
1467 // if (false) { const x; }; var y = x;
1468 //
1469 // The condition on the declaration scopes is a conservative check for
1470 // nested functions that access a binding and are called before the
1471 // binding is initialized:
1472 // function() { f(); let x = 1; function f() { x = 2; } }
1473 //
1474 bool skip_init_check;
1475 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1476 skip_init_check = false;
1477 } else if (var->is_this()) {
1478 CHECK(info_->function() != nullptr &&
1479 (info_->function()->kind() & kSubclassConstructor) != 0);
1480 // TODO(dslomov): implement 'this' hole check elimination.
1481 skip_init_check = false;
1482 } else {
1483 // Check that we always have valid source position.
1484 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1485 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1486 skip_init_check = var->mode() != CONST_LEGACY &&
1487 var->initializer_position() < proxy->position();
1488 }
1489
1490 if (!skip_init_check) {
1491 // Let and const need a read barrier.
1492 GetVar(v0, var);
1493 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1494 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1495 if (var->mode() == LET || var->mode() == CONST) {
1496 // Throw a reference error when using an uninitialized let/const
1497 // binding in harmony mode.
1498 Label done;
1499 __ Branch(&done, ne, at, Operand(zero_reg));
1500 __ li(a0, Operand(var->name()));
1501 __ push(a0);
1502 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1503 __ bind(&done);
1504 } else {
1505 // Uninitalized const bindings outside of harmony mode are unholed.
1506 DCHECK(var->mode() == CONST_LEGACY);
1507 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1508 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1509 }
1510 context()->Plug(v0);
1511 break;
1512 }
1513 }
1514 context()->Plug(var);
1515 break;
1516 }
1517
1518 case VariableLocation::LOOKUP: {
1519 Comment cmnt(masm_, "[ Lookup variable");
1520 Label done, slow;
1521 // Generate code for loading from variables potentially shadowed
1522 // by eval-introduced variables.
1523 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1524 __ bind(&slow);
1525 __ li(a1, Operand(var->name()));
1526 __ Push(cp, a1); // Context and name.
1527 Runtime::FunctionId function_id =
1528 typeof_mode == NOT_INSIDE_TYPEOF
1529 ? Runtime::kLoadLookupSlot
1530 : Runtime::kLoadLookupSlotNoReferenceError;
1531 __ CallRuntime(function_id, 2);
1532 __ bind(&done);
1533 context()->Plug(v0);
1534 }
1535 }
1536 }
1537
1538
1539 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1540 Comment cmnt(masm_, "[ RegExpLiteral");
1541 Label materialized;
1542 // Registers will be used as follows:
1543 // a5 = materialized value (RegExp literal)
1544 // a4 = JS function, literals array
1545 // a3 = literal index
1546 // a2 = RegExp pattern
1547 // a1 = RegExp flags
1548 // a0 = RegExp literal clone
1549 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1550 __ ld(a4, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
1551 int literal_offset =
1552 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1553 __ ld(a5, FieldMemOperand(a4, literal_offset));
1554 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1555 __ Branch(&materialized, ne, a5, Operand(at));
1556
1557 // Create regexp literal using runtime function.
1558 // Result will be in v0.
1559 __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1560 __ li(a2, Operand(expr->pattern()));
1561 __ li(a1, Operand(expr->flags()));
1562 __ Push(a4, a3, a2, a1);
1563 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1564 __ mov(a5, v0);
1565
1566 __ bind(&materialized);
1567 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1568 Label allocated, runtime_allocate;
1569 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1570 __ jmp(&allocated);
1571
1572 __ bind(&runtime_allocate);
1573 __ li(a0, Operand(Smi::FromInt(size)));
1574 __ Push(a5, a0);
1575 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1576 __ pop(a5);
1577
1578 __ bind(&allocated);
1579
1580 // After this, registers are used as follows:
1581 // v0: Newly allocated regexp.
1582 // a5: Materialized regexp.
1583 // a2: temp.
1584 __ CopyFields(v0, a5, a2.bit(), size / kPointerSize);
1585 context()->Plug(v0);
1586 }
1587
1588
1589 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1590 if (expression == NULL) {
1591 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1592 __ push(a1);
1593 } else {
1594 VisitForStackValue(expression);
1595 }
1596 }
1597
1598
1599 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1600 Comment cmnt(masm_, "[ ObjectLiteral");
1601
1602 Handle<FixedArray> constant_properties = expr->constant_properties();
1603 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1604 __ ld(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1605 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1606 __ li(a1, Operand(constant_properties));
1607 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1608 if (MustCreateObjectLiteralWithRuntime(expr)) {
1609 __ Push(a3, a2, a1, a0);
1610 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1611 } else {
1612 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1613 __ CallStub(&stub);
1614 }
1615 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1616
1617 // If result_saved is true the result is on top of the stack. If
1618 // result_saved is false the result is in v0.
1619 bool result_saved = false;
1620
1621 AccessorTable accessor_table(zone());
1622 int property_index = 0;
1623 // store_slot_index points to the vector IC slot for the next store IC used.
1624 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1625 // and must be updated if the number of store ICs emitted here changes.
1626 int store_slot_index = 0;
1627 for (; property_index < expr->properties()->length(); property_index++) {
1628 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1629 if (property->is_computed_name()) break;
1630 if (property->IsCompileTimeValue()) continue;
1631
1632 Literal* key = property->key()->AsLiteral();
1633 Expression* value = property->value();
1634 if (!result_saved) {
1635 __ push(v0); // Save result on stack.
1636 result_saved = true;
1637 }
1638 switch (property->kind()) {
1639 case ObjectLiteral::Property::CONSTANT:
1640 UNREACHABLE();
1641 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1642 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1643 // Fall through.
1644 case ObjectLiteral::Property::COMPUTED:
1645 // It is safe to use [[Put]] here because the boilerplate already
1646 // contains computed properties with an uninitialized value.
1647 if (key->value()->IsInternalizedString()) {
1648 if (property->emit_store()) {
1649 VisitForAccumulatorValue(value);
1650 __ mov(StoreDescriptor::ValueRegister(), result_register());
1651 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1652 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1653 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1654 if (FLAG_vector_stores) {
1655 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1656 CallStoreIC();
1657 } else {
1658 CallStoreIC(key->LiteralFeedbackId());
1659 }
1660 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1661
1662 if (NeedsHomeObject(value)) {
1663 __ Move(StoreDescriptor::ReceiverRegister(), v0);
1664 __ li(StoreDescriptor::NameRegister(),
1665 Operand(isolate()->factory()->home_object_symbol()));
1666 __ ld(StoreDescriptor::ValueRegister(), MemOperand(sp));
1667 if (FLAG_vector_stores) {
1668 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1669 }
1670 CallStoreIC();
1671 }
1672 } else {
1673 VisitForEffect(value);
1674 }
1675 break;
1676 }
1677 // Duplicate receiver on stack.
1678 __ ld(a0, MemOperand(sp));
1679 __ push(a0);
1680 VisitForStackValue(key);
1681 VisitForStackValue(value);
1682 if (property->emit_store()) {
1683 EmitSetHomeObjectIfNeeded(
1684 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1685 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
1686 __ push(a0);
1687 __ CallRuntime(Runtime::kSetProperty, 4);
1688 } else {
1689 __ Drop(3);
1690 }
1691 break;
1692 case ObjectLiteral::Property::PROTOTYPE:
1693 // Duplicate receiver on stack.
1694 __ ld(a0, MemOperand(sp));
1695 __ push(a0);
1696 VisitForStackValue(value);
1697 DCHECK(property->emit_store());
1698 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1699 break;
1700 case ObjectLiteral::Property::GETTER:
1701 if (property->emit_store()) {
1702 accessor_table.lookup(key)->second->getter = value;
1703 }
1704 break;
1705 case ObjectLiteral::Property::SETTER:
1706 if (property->emit_store()) {
1707 accessor_table.lookup(key)->second->setter = value;
1708 }
1709 break;
1710 }
1711 }
1712
1713 // Emit code to define accessors, using only a single call to the runtime for
1714 // each pair of corresponding getters and setters.
1715 for (AccessorTable::Iterator it = accessor_table.begin();
1716 it != accessor_table.end();
1717 ++it) {
1718 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
1719 __ push(a0);
1720 VisitForStackValue(it->first);
1721 EmitAccessor(it->second->getter);
1722 EmitSetHomeObjectIfNeeded(
1723 it->second->getter, 2,
1724 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1725 EmitAccessor(it->second->setter);
1726 EmitSetHomeObjectIfNeeded(
1727 it->second->setter, 3,
1728 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1729 __ li(a0, Operand(Smi::FromInt(NONE)));
1730 __ push(a0);
1731 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1732 }
1733
1734 // Object literals have two parts. The "static" part on the left contains no
1735 // computed property names, and so we can compute its map ahead of time; see
1736 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1737 // starts with the first computed property name, and continues with all
1738 // properties to its right. All the code from above initializes the static
1739 // component of the object literal, and arranges for the map of the result to
1740 // reflect the static order in which the keys appear. For the dynamic
1741 // properties, we compile them into a series of "SetOwnProperty" runtime
1742 // calls. This will preserve insertion order.
1743 for (; property_index < expr->properties()->length(); property_index++) {
1744 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1745
1746 Expression* value = property->value();
1747 if (!result_saved) {
1748 __ push(v0); // Save result on the stack
1749 result_saved = true;
1750 }
1751
1752 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
1753 __ push(a0);
1754
1755 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1756 DCHECK(!property->is_computed_name());
1757 VisitForStackValue(value);
1758 DCHECK(property->emit_store());
1759 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1760 } else {
1761 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1762 VisitForStackValue(value);
1763 EmitSetHomeObjectIfNeeded(
1764 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1765
1766 switch (property->kind()) {
1767 case ObjectLiteral::Property::CONSTANT:
1768 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1769 case ObjectLiteral::Property::COMPUTED:
1770 if (property->emit_store()) {
1771 __ li(a0, Operand(Smi::FromInt(NONE)));
1772 __ push(a0);
1773 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1774 } else {
1775 __ Drop(3);
1776 }
1777 break;
1778
1779 case ObjectLiteral::Property::PROTOTYPE:
1780 UNREACHABLE();
1781 break;
1782
1783 case ObjectLiteral::Property::GETTER:
1784 __ li(a0, Operand(Smi::FromInt(NONE)));
1785 __ push(a0);
1786 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1787 break;
1788
1789 case ObjectLiteral::Property::SETTER:
1790 __ li(a0, Operand(Smi::FromInt(NONE)));
1791 __ push(a0);
1792 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1793 break;
1794 }
1795 }
1796 }
1797
1798 if (expr->has_function()) {
1799 DCHECK(result_saved);
1800 __ ld(a0, MemOperand(sp));
1801 __ push(a0);
1802 __ CallRuntime(Runtime::kToFastProperties, 1);
1803 }
1804
1805 if (result_saved) {
1806 context()->PlugTOS();
1807 } else {
1808 context()->Plug(v0);
1809 }
1810
1811 // Verify that compilation exactly consumed the number of store ic slots that
1812 // the ObjectLiteral node had to offer.
1813 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1814 }
1815
1816
1817 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1818 Comment cmnt(masm_, "[ ArrayLiteral");
1819
1820 expr->BuildConstantElements(isolate());
1821
1822 Handle<FixedArray> constant_elements = expr->constant_elements();
1823 bool has_fast_elements =
1824 IsFastObjectElementsKind(expr->constant_elements_kind());
1825
1826 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1827 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1828 // If the only customer of allocation sites is transitioning, then
1829 // we can turn it off if we don't have anywhere else to transition to.
1830 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1831 }
1832
1833 __ mov(a0, result_register());
1834 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1835 __ ld(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1836 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1837 __ li(a1, Operand(constant_elements));
1838 if (MustCreateArrayLiteralWithRuntime(expr)) {
1839 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1840 __ Push(a3, a2, a1, a0);
1841 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1842 } else {
1843 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1844 __ CallStub(&stub);
1845 }
1846 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1847
1848 bool result_saved = false; // Is the result saved to the stack?
1849 ZoneList<Expression*>* subexprs = expr->values();
1850 int length = subexprs->length();
1851
1852 // Emit code to evaluate all the non-constant subexpressions and to store
1853 // them into the newly cloned array.
1854 int array_index = 0;
1855 for (; array_index < length; array_index++) {
1856 Expression* subexpr = subexprs->at(array_index);
1857 if (subexpr->IsSpread()) break;
1858
1859 // If the subexpression is a literal or a simple materialized literal it
1860 // is already set in the cloned array.
1861 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1862
1863 if (!result_saved) {
1864 __ push(v0); // array literal
1865 __ Push(Smi::FromInt(expr->literal_index()));
1866 result_saved = true;
1867 }
1868
1869 VisitForAccumulatorValue(subexpr);
1870
1871 if (has_fast_elements) {
1872 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1873 __ ld(a6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1874 __ ld(a1, FieldMemOperand(a6, JSObject::kElementsOffset));
1875 __ sd(result_register(), FieldMemOperand(a1, offset));
1876 // Update the write barrier for the array store.
1877 __ RecordWriteField(a1, offset, result_register(), a2,
1878 kRAHasBeenSaved, kDontSaveFPRegs,
1879 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1880 } else {
1881 __ li(a3, Operand(Smi::FromInt(array_index)));
1882 __ mov(a0, result_register());
1883 StoreArrayLiteralElementStub stub(isolate());
1884 __ CallStub(&stub);
1885 }
1886
1887 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1888 }
1889
1890 // In case the array literal contains spread expressions it has two parts. The
1891 // first part is the "static" array which has a literal index is handled
1892 // above. The second part is the part after the first spread expression
1893 // (inclusive) and these elements gets appended to the array. Note that the
1894 // number elements an iterable produces is unknown ahead of time.
1895 if (array_index < length && result_saved) {
1896 __ Pop(); // literal index
1897 __ Pop(v0);
1898 result_saved = false;
1899 }
1900 for (; array_index < length; array_index++) {
1901 Expression* subexpr = subexprs->at(array_index);
1902
1903 __ Push(v0);
1904 if (subexpr->IsSpread()) {
1905 VisitForStackValue(subexpr->AsSpread()->expression());
1906 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1907 } else {
1908 VisitForStackValue(subexpr);
1909 __ CallRuntime(Runtime::kAppendElement, 2);
1910 }
1911
1912 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1913 }
1914
1915 if (result_saved) {
1916 __ Pop(); // literal index
1917 context()->PlugTOS();
1918 } else {
1919 context()->Plug(v0);
1920 }
1921 }
1922
1923
1924 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1925 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1926
1927 Comment cmnt(masm_, "[ Assignment");
1928 SetExpressionPosition(expr, INSERT_BREAK);
1929
1930 Property* property = expr->target()->AsProperty();
1931 LhsKind assign_type = Property::GetAssignType(property);
1932
1933 // Evaluate LHS expression.
1934 switch (assign_type) {
1935 case VARIABLE:
1936 // Nothing to do here.
1937 break;
1938 case NAMED_PROPERTY:
1939 if (expr->is_compound()) {
1940 // We need the receiver both on the stack and in the register.
1941 VisitForStackValue(property->obj());
1942 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1943 } else {
1944 VisitForStackValue(property->obj());
1945 }
1946 break;
1947 case NAMED_SUPER_PROPERTY:
1948 VisitForStackValue(
1949 property->obj()->AsSuperPropertyReference()->this_var());
1950 VisitForAccumulatorValue(
1951 property->obj()->AsSuperPropertyReference()->home_object());
1952 __ Push(result_register());
1953 if (expr->is_compound()) {
1954 const Register scratch = a1;
1955 __ ld(scratch, MemOperand(sp, kPointerSize));
1956 __ Push(scratch, result_register());
1957 }
1958 break;
1959 case KEYED_SUPER_PROPERTY: {
1960 const Register scratch = a1;
1961 VisitForStackValue(
1962 property->obj()->AsSuperPropertyReference()->this_var());
1963 VisitForAccumulatorValue(
1964 property->obj()->AsSuperPropertyReference()->home_object());
1965 __ Move(scratch, result_register());
1966 VisitForAccumulatorValue(property->key());
1967 __ Push(scratch, result_register());
1968 if (expr->is_compound()) {
1969 const Register scratch1 = a4;
1970 __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
1971 __ Push(scratch1, scratch, result_register());
1972 }
1973 break;
1974 }
1975 case KEYED_PROPERTY:
1976 // We need the key and receiver on both the stack and in v0 and a1.
1977 if (expr->is_compound()) {
1978 VisitForStackValue(property->obj());
1979 VisitForStackValue(property->key());
1980 __ ld(LoadDescriptor::ReceiverRegister(),
1981 MemOperand(sp, 1 * kPointerSize));
1982 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1983 } else {
1984 VisitForStackValue(property->obj());
1985 VisitForStackValue(property->key());
1986 }
1987 break;
1988 }
1989
1990 // For compound assignments we need another deoptimization point after the
1991 // variable/property load.
1992 if (expr->is_compound()) {
1993 { AccumulatorValueContext context(this);
1994 switch (assign_type) {
1995 case VARIABLE:
1996 EmitVariableLoad(expr->target()->AsVariableProxy());
1997 PrepareForBailout(expr->target(), TOS_REG);
1998 break;
1999 case NAMED_PROPERTY:
2000 EmitNamedPropertyLoad(property);
2001 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2002 break;
2003 case NAMED_SUPER_PROPERTY:
2004 EmitNamedSuperPropertyLoad(property);
2005 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2006 break;
2007 case KEYED_SUPER_PROPERTY:
2008 EmitKeyedSuperPropertyLoad(property);
2009 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2010 break;
2011 case KEYED_PROPERTY:
2012 EmitKeyedPropertyLoad(property);
2013 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2014 break;
2015 }
2016 }
2017
2018 Token::Value op = expr->binary_op();
2019 __ push(v0); // Left operand goes on the stack.
2020 VisitForAccumulatorValue(expr->value());
2021
2022 AccumulatorValueContext context(this);
2023 if (ShouldInlineSmiCase(op)) {
2024 EmitInlineSmiBinaryOp(expr->binary_operation(),
2025 op,
2026 expr->target(),
2027 expr->value());
2028 } else {
2029 EmitBinaryOp(expr->binary_operation(), op);
2030 }
2031
2032 // Deoptimization point in case the binary operation may have side effects.
2033 PrepareForBailout(expr->binary_operation(), TOS_REG);
2034 } else {
2035 VisitForAccumulatorValue(expr->value());
2036 }
2037
2038 SetExpressionPosition(expr);
2039
2040 // Store the value.
2041 switch (assign_type) {
2042 case VARIABLE:
2043 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2044 expr->op(), expr->AssignmentSlot());
2045 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2046 context()->Plug(v0);
2047 break;
2048 case NAMED_PROPERTY:
2049 EmitNamedPropertyAssignment(expr);
2050 break;
2051 case NAMED_SUPER_PROPERTY:
2052 EmitNamedSuperPropertyStore(property);
2053 context()->Plug(v0);
2054 break;
2055 case KEYED_SUPER_PROPERTY:
2056 EmitKeyedSuperPropertyStore(property);
2057 context()->Plug(v0);
2058 break;
2059 case KEYED_PROPERTY:
2060 EmitKeyedPropertyAssignment(expr);
2061 break;
2062 }
2063 }
2064
2065
2066 void FullCodeGenerator::VisitYield(Yield* expr) {
2067 Comment cmnt(masm_, "[ Yield");
2068 SetExpressionPosition(expr);
2069
2070 // Evaluate yielded value first; the initial iterator definition depends on
2071 // this. It stays on the stack while we update the iterator.
2072 VisitForStackValue(expr->expression());
2073
2074 switch (expr->yield_kind()) {
2075 case Yield::kSuspend:
2076 // Pop value from top-of-stack slot; box result into result register.
2077 EmitCreateIteratorResult(false);
2078 __ push(result_register());
2079 // Fall through.
2080 case Yield::kInitial: {
2081 Label suspend, continuation, post_runtime, resume;
2082
2083 __ jmp(&suspend);
2084 __ bind(&continuation);
2085 __ RecordGeneratorContinuation();
2086 __ jmp(&resume);
2087
2088 __ bind(&suspend);
2089 VisitForAccumulatorValue(expr->generator_object());
2090 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2091 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
2092 __ sd(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
2093 __ sd(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
2094 __ mov(a1, cp);
2095 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
2096 kRAHasBeenSaved, kDontSaveFPRegs);
2097 __ Daddu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2098 __ Branch(&post_runtime, eq, sp, Operand(a1));
2099 __ push(v0); // generator object
2100 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2101 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2102 __ bind(&post_runtime);
2103 __ pop(result_register());
2104 EmitReturnSequence();
2105
2106 __ bind(&resume);
2107 context()->Plug(result_register());
2108 break;
2109 }
2110
2111 case Yield::kFinal: {
2112 VisitForAccumulatorValue(expr->generator_object());
2113 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2114 __ sd(a1, FieldMemOperand(result_register(),
2115 JSGeneratorObject::kContinuationOffset));
2116 // Pop value from top-of-stack slot, box result into result register.
2117 EmitCreateIteratorResult(true);
2118 EmitUnwindBeforeReturn();
2119 EmitReturnSequence();
2120 break;
2121 }
2122
2123 case Yield::kDelegating: {
2124 VisitForStackValue(expr->generator_object());
2125
2126 // Initial stack layout is as follows:
2127 // [sp + 1 * kPointerSize] iter
2128 // [sp + 0 * kPointerSize] g
2129
2130 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2131 Label l_next, l_call;
2132 Register load_receiver = LoadDescriptor::ReceiverRegister();
2133 Register load_name = LoadDescriptor::NameRegister();
2134 // Initial send value is undefined.
2135 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2136 __ Branch(&l_next);
2137
2138 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2139 __ bind(&l_catch);
2140 __ mov(a0, v0);
2141 __ LoadRoot(a2, Heap::kthrow_stringRootIndex); // "throw"
2142 __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2143 __ Push(a2, a3, a0); // "throw", iter, except
2144 __ jmp(&l_call);
2145
2146 // try { received = %yield result }
2147 // Shuffle the received result above a try handler and yield it without
2148 // re-boxing.
2149 __ bind(&l_try);
2150 __ pop(a0); // result
2151 int handler_index = NewHandlerTableEntry();
2152 EnterTryBlock(handler_index, &l_catch);
2153 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2154 __ push(a0); // result
2155
2156 __ jmp(&l_suspend);
2157 __ bind(&l_continuation);
2158 __ RecordGeneratorContinuation();
2159 __ mov(a0, v0);
2160 __ jmp(&l_resume);
2161
2162 __ bind(&l_suspend);
2163 const int generator_object_depth = kPointerSize + try_block_size;
2164 __ ld(a0, MemOperand(sp, generator_object_depth));
2165 __ push(a0); // g
2166 __ Push(Smi::FromInt(handler_index)); // handler-index
2167 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2168 __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2169 __ sd(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
2170 __ sd(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
2171 __ mov(a1, cp);
2172 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2173 kRAHasBeenSaved, kDontSaveFPRegs);
2174 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2175 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2176 __ pop(v0); // result
2177 EmitReturnSequence();
2178 __ mov(a0, v0);
2179 __ bind(&l_resume); // received in a0
2180 ExitTryBlock(handler_index);
2181
2182 // receiver = iter; f = 'next'; arg = received;
2183 __ bind(&l_next);
2184 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2185 __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2186 __ Push(load_name, a3, a0); // "next", iter, received
2187
2188 // result = receiver[f](arg);
2189 __ bind(&l_call);
2190 __ ld(load_receiver, MemOperand(sp, kPointerSize));
2191 __ ld(load_name, MemOperand(sp, 2 * kPointerSize));
2192 __ li(LoadDescriptor::SlotRegister(),
2193 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2194 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2195 CallIC(ic, TypeFeedbackId::None());
2196 __ mov(a0, v0);
2197 __ mov(a1, a0);
2198 __ sd(a1, MemOperand(sp, 2 * kPointerSize));
2199 SetCallPosition(expr, 1);
2200 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2201 __ CallStub(&stub);
2202
2203 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2204 __ Drop(1); // The function is still on the stack; drop it.
2205
2206 // if (!result.done) goto l_try;
2207 __ Move(load_receiver, v0);
2208
2209 __ push(load_receiver); // save result
2210 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2211 __ li(LoadDescriptor::SlotRegister(),
2212 Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2213 CallLoadIC(NOT_INSIDE_TYPEOF); // v0=result.done
2214 __ mov(a0, v0);
2215 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2216 CallIC(bool_ic);
2217 __ Branch(&l_try, eq, v0, Operand(zero_reg));
2218
2219 // result.value
2220 __ pop(load_receiver); // result
2221 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2222 __ li(LoadDescriptor::SlotRegister(),
2223 Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2224 CallLoadIC(NOT_INSIDE_TYPEOF); // v0=result.value
2225 context()->DropAndPlug(2, v0); // drop iter and g
2226 break;
2227 }
2228 }
2229 }
2230
2231
2232 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2233 Expression *value,
2234 JSGeneratorObject::ResumeMode resume_mode) {
2235 // The value stays in a0, and is ultimately read by the resumed generator, as
2236 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2237 // is read to throw the value when the resumed generator is already closed.
2238 // a1 will hold the generator object until the activation has been resumed.
2239 VisitForStackValue(generator);
2240 VisitForAccumulatorValue(value);
2241 __ pop(a1);
2242
2243 // Load suspended function and context.
2244 __ ld(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
2245 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
2246
2247 // Load receiver and store as the first argument.
2248 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
2249 __ push(a2);
2250
2251 // Push holes for the rest of the arguments to the generator function.
2252 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
2253 // The argument count is stored as int32_t on 64-bit platforms.
2254 // TODO(plind): Smi on 32-bit platforms.
2255 __ lw(a3,
2256 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
2257 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2258 Label push_argument_holes, push_frame;
2259 __ bind(&push_argument_holes);
2260 __ Dsubu(a3, a3, Operand(1));
2261 __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2262 __ push(a2);
2263 __ jmp(&push_argument_holes);
2264
2265 // Enter a new JavaScript frame, and initialize its slots as they were when
2266 // the generator was suspended.
2267 Label resume_frame, done;
2268 __ bind(&push_frame);
2269 __ Call(&resume_frame);
2270 __ jmp(&done);
2271 __ bind(&resume_frame);
2272 // ra = return address.
2273 // fp = caller's frame pointer.
2274 // cp = callee's context,
2275 // a4 = callee's JS function.
2276 __ Push(ra, fp, cp, a4);
2277 // Adjust FP to point to saved FP.
2278 __ Daddu(fp, sp, 2 * kPointerSize);
2279
2280 // Load the operand stack size.
2281 __ ld(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
2282 __ ld(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
2283 __ SmiUntag(a3);
2284
2285 // If we are sending a value and there is no operand stack, we can jump back
2286 // in directly.
2287 if (resume_mode == JSGeneratorObject::NEXT) {
2288 Label slow_resume;
2289 __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2290 __ ld(a3, FieldMemOperand(a4, JSFunction::kCodeEntryOffset));
2291 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2292 __ SmiUntag(a2);
2293 __ Daddu(a3, a3, Operand(a2));
2294 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2295 __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2296 __ Jump(a3);
2297 __ bind(&slow_resume);
2298 }
2299
2300 // Otherwise, we push holes for the operand stack and call the runtime to fix
2301 // up the stack and the handlers.
2302 Label push_operand_holes, call_resume;
2303 __ bind(&push_operand_holes);
2304 __ Dsubu(a3, a3, Operand(1));
2305 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2306 __ push(a2);
2307 __ Branch(&push_operand_holes);
2308 __ bind(&call_resume);
2309 DCHECK(!result_register().is(a1));
2310 __ Push(a1, result_register());
2311 __ Push(Smi::FromInt(resume_mode));
2312 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2313 // Not reached: the runtime call returns elsewhere.
2314 __ stop("not-reached");
2315
2316 __ bind(&done);
2317 context()->Plug(result_register());
2318 }
2319
2320
2321 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2322 Label gc_required;
2323 Label allocated;
2324
2325 const int instance_size = 5 * kPointerSize;
2326 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2327 instance_size);
2328
2329 __ Allocate(instance_size, v0, a2, a3, &gc_required, TAG_OBJECT);
2330 __ jmp(&allocated);
2331
2332 __ bind(&gc_required);
2333 __ Push(Smi::FromInt(instance_size));
2334 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2335 __ ld(context_register(),
2336 MemOperand(fp, StandardFrameConstants::kContextOffset));
2337
2338 __ bind(&allocated);
2339 __ ld(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2340 __ ld(a1, FieldMemOperand(a1, GlobalObject::kNativeContextOffset));
2341 __ ld(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX));
2342 __ pop(a2);
2343 __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2344 __ li(a4, Operand(isolate()->factory()->empty_fixed_array()));
2345 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2346 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2347 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
2348 __ sd(a2,
2349 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset));
2350 __ sd(a3,
2351 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset));
2352
2353 // Only the value field needs a write barrier, as the other values are in the
2354 // root set.
2355 __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset,
2356 a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
2357 }
2358
2359
2360 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2361 SetExpressionPosition(prop);
2362 Literal* key = prop->key()->AsLiteral();
2363 DCHECK(!prop->IsSuperAccess());
2364
2365 __ li(LoadDescriptor::NameRegister(), Operand(key->value()));
2366 __ li(LoadDescriptor::SlotRegister(),
2367 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2368 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2369 }
2370
2371
2372 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2373 // Stack: receiver, home_object.
2374 SetExpressionPosition(prop);
2375
2376 Literal* key = prop->key()->AsLiteral();
2377 DCHECK(!key->value()->IsSmi());
2378 DCHECK(prop->IsSuperAccess());
2379
2380 __ Push(key->value());
2381 __ Push(Smi::FromInt(language_mode()));
2382 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2383 }
2384
2385
2386 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2387 // Call keyed load IC. It has register arguments receiver and key.
2388 SetExpressionPosition(prop);
2389
2390 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2391 __ li(LoadDescriptor::SlotRegister(),
2392 Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2393 CallIC(ic);
2394 }
2395
2396
2397 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2398 // Stack: receiver, home_object, key.
2399 SetExpressionPosition(prop);
2400 __ Push(Smi::FromInt(language_mode()));
2401 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2402 }
2403
2404
2405 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2406 Token::Value op,
2407 Expression* left_expr,
2408 Expression* right_expr) {
2409 Label done, smi_case, stub_call;
2410
2411 Register scratch1 = a2;
2412 Register scratch2 = a3;
2413
2414 // Get the arguments.
2415 Register left = a1;
2416 Register right = a0;
2417 __ pop(left);
2418 __ mov(a0, result_register());
2419
2420 // Perform combined smi check on both operands.
2421 __ Or(scratch1, left, Operand(right));
2422 STATIC_ASSERT(kSmiTag == 0);
2423 JumpPatchSite patch_site(masm_);
2424 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2425
2426 __ bind(&stub_call);
2427 Handle<Code> code =
2428 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2429 CallIC(code, expr->BinaryOperationFeedbackId());
2430 patch_site.EmitPatchInfo();
2431 __ jmp(&done);
2432
2433 __ bind(&smi_case);
2434 // Smi case. This code works the same way as the smi-smi case in the type
2435 // recording binary operation stub, see
2436 switch (op) {
2437 case Token::SAR:
2438 __ GetLeastBitsFromSmi(scratch1, right, 5);
2439 __ dsrav(right, left, scratch1);
2440 __ And(v0, right, Operand(0xffffffff00000000L));
2441 break;
2442 case Token::SHL: {
2443 __ SmiUntag(scratch1, left);
2444 __ GetLeastBitsFromSmi(scratch2, right, 5);
2445 __ dsllv(scratch1, scratch1, scratch2);
2446 __ SmiTag(v0, scratch1);
2447 break;
2448 }
2449 case Token::SHR: {
2450 __ SmiUntag(scratch1, left);
2451 __ GetLeastBitsFromSmi(scratch2, right, 5);
2452 __ dsrlv(scratch1, scratch1, scratch2);
2453 __ And(scratch2, scratch1, 0x80000000);
2454 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2455 __ SmiTag(v0, scratch1);
2456 break;
2457 }
2458 case Token::ADD:
2459 __ DadduAndCheckForOverflow(v0, left, right, scratch1);
2460 __ BranchOnOverflow(&stub_call, scratch1);
2461 break;
2462 case Token::SUB:
2463 __ DsubuAndCheckForOverflow(v0, left, right, scratch1);
2464 __ BranchOnOverflow(&stub_call, scratch1);
2465 break;
2466 case Token::MUL: {
2467 __ Dmulh(v0, left, right);
2468 __ dsra32(scratch2, v0, 0);
2469 __ sra(scratch1, v0, 31);
2470 __ Branch(USE_DELAY_SLOT, &stub_call, ne, scratch2, Operand(scratch1));
2471 __ SmiTag(v0);
2472 __ Branch(USE_DELAY_SLOT, &done, ne, v0, Operand(zero_reg));
2473 __ Daddu(scratch2, right, left);
2474 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2475 DCHECK(Smi::FromInt(0) == 0);
2476 __ mov(v0, zero_reg);
2477 break;
2478 }
2479 case Token::BIT_OR:
2480 __ Or(v0, left, Operand(right));
2481 break;
2482 case Token::BIT_AND:
2483 __ And(v0, left, Operand(right));
2484 break;
2485 case Token::BIT_XOR:
2486 __ Xor(v0, left, Operand(right));
2487 break;
2488 default:
2489 UNREACHABLE();
2490 }
2491
2492 __ bind(&done);
2493 context()->Plug(v0);
2494 }
2495
2496
2497 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2498 int* used_store_slots) {
2499 // Constructor is in v0.
2500 DCHECK(lit != NULL);
2501 __ push(v0);
2502
2503 // No access check is needed here since the constructor is created by the
2504 // class literal.
2505 Register scratch = a1;
2506 __ ld(scratch,
2507 FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset));
2508 __ push(scratch);
2509
2510 for (int i = 0; i < lit->properties()->length(); i++) {
2511 ObjectLiteral::Property* property = lit->properties()->at(i);
2512 Expression* value = property->value();
2513
2514 if (property->is_static()) {
2515 __ ld(scratch, MemOperand(sp, kPointerSize)); // constructor
2516 } else {
2517 __ ld(scratch, MemOperand(sp, 0)); // prototype
2518 }
2519 __ push(scratch);
2520 EmitPropertyKey(property, lit->GetIdForProperty(i));
2521
2522 // The static prototype property is read only. We handle the non computed
2523 // property name case in the parser. Since this is the only case where we
2524 // need to check for an own read only property we special case this so we do
2525 // not need to do this for every property.
2526 if (property->is_static() && property->is_computed_name()) {
2527 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2528 __ push(v0);
2529 }
2530
2531 VisitForStackValue(value);
2532 EmitSetHomeObjectIfNeeded(value, 2,
2533 lit->SlotForHomeObject(value, used_store_slots));
2534
2535 switch (property->kind()) {
2536 case ObjectLiteral::Property::CONSTANT:
2537 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2538 case ObjectLiteral::Property::PROTOTYPE:
2539 UNREACHABLE();
2540 case ObjectLiteral::Property::COMPUTED:
2541 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2542 break;
2543
2544 case ObjectLiteral::Property::GETTER:
2545 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2546 __ push(a0);
2547 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2548 break;
2549
2550 case ObjectLiteral::Property::SETTER:
2551 __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2552 __ push(a0);
2553 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2554 break;
2555
2556 default:
2557 UNREACHABLE();
2558 }
2559 }
2560
2561 // prototype
2562 __ CallRuntime(Runtime::kToFastProperties, 1);
2563
2564 // constructor
2565 __ CallRuntime(Runtime::kToFastProperties, 1);
2566
2567 if (is_strong(language_mode())) {
2568 __ ld(scratch,
2569 FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset));
2570 __ Push(v0, scratch);
2571 // TODO(conradw): It would be more efficient to define the properties with
2572 // the right attributes the first time round.
2573 // Freeze the prototype.
2574 __ CallRuntime(Runtime::kObjectFreeze, 1);
2575 // Freeze the constructor.
2576 __ CallRuntime(Runtime::kObjectFreeze, 1);
2577 }
2578 }
2579
2580
2581 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2582 __ mov(a0, result_register());
2583 __ pop(a1);
2584 Handle<Code> code =
2585 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2586 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2587 CallIC(code, expr->BinaryOperationFeedbackId());
2588 patch_site.EmitPatchInfo();
2589 context()->Plug(v0);
2590 }
2591
2592
2593 void FullCodeGenerator::EmitAssignment(Expression* expr,
2594 FeedbackVectorICSlot slot) {
2595 DCHECK(expr->IsValidReferenceExpressionOrThis());
2596
2597 Property* prop = expr->AsProperty();
2598 LhsKind assign_type = Property::GetAssignType(prop);
2599
2600 switch (assign_type) {
2601 case VARIABLE: {
2602 Variable* var = expr->AsVariableProxy()->var();
2603 EffectContext context(this);
2604 EmitVariableAssignment(var, Token::ASSIGN, slot);
2605 break;
2606 }
2607 case NAMED_PROPERTY: {
2608 __ push(result_register()); // Preserve value.
2609 VisitForAccumulatorValue(prop->obj());
2610 __ mov(StoreDescriptor::ReceiverRegister(), result_register());
2611 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2612 __ li(StoreDescriptor::NameRegister(),
2613 Operand(prop->key()->AsLiteral()->value()));
2614 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2615 CallStoreIC();
2616 break;
2617 }
2618 case NAMED_SUPER_PROPERTY: {
2619 __ Push(v0);
2620 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2621 VisitForAccumulatorValue(
2622 prop->obj()->AsSuperPropertyReference()->home_object());
2623 // stack: value, this; v0: home_object
2624 Register scratch = a2;
2625 Register scratch2 = a3;
2626 __ mov(scratch, result_register()); // home_object
2627 __ ld(v0, MemOperand(sp, kPointerSize)); // value
2628 __ ld(scratch2, MemOperand(sp, 0)); // this
2629 __ sd(scratch2, MemOperand(sp, kPointerSize)); // this
2630 __ sd(scratch, MemOperand(sp, 0)); // home_object
2631 // stack: this, home_object; v0: value
2632 EmitNamedSuperPropertyStore(prop);
2633 break;
2634 }
2635 case KEYED_SUPER_PROPERTY: {
2636 __ Push(v0);
2637 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2638 VisitForStackValue(
2639 prop->obj()->AsSuperPropertyReference()->home_object());
2640 VisitForAccumulatorValue(prop->key());
2641 Register scratch = a2;
2642 Register scratch2 = a3;
2643 __ ld(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2644 // stack: value, this, home_object; v0: key, a3: value
2645 __ ld(scratch, MemOperand(sp, kPointerSize)); // this
2646 __ sd(scratch, MemOperand(sp, 2 * kPointerSize));
2647 __ ld(scratch, MemOperand(sp, 0)); // home_object
2648 __ sd(scratch, MemOperand(sp, kPointerSize));
2649 __ sd(v0, MemOperand(sp, 0));
2650 __ Move(v0, scratch2);
2651 // stack: this, home_object, key; v0: value.
2652 EmitKeyedSuperPropertyStore(prop);
2653 break;
2654 }
2655 case KEYED_PROPERTY: {
2656 __ push(result_register()); // Preserve value.
2657 VisitForStackValue(prop->obj());
2658 VisitForAccumulatorValue(prop->key());
2659 __ Move(StoreDescriptor::NameRegister(), result_register());
2660 __ Pop(StoreDescriptor::ValueRegister(),
2661 StoreDescriptor::ReceiverRegister());
2662 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2663 Handle<Code> ic =
2664 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2665 CallIC(ic);
2666 break;
2667 }
2668 }
2669 context()->Plug(v0);
2670 }
2671
2672
2673 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2674 Variable* var, MemOperand location) {
2675 __ sd(result_register(), location);
2676 if (var->IsContextSlot()) {
2677 // RecordWrite may destroy all its register arguments.
2678 __ Move(a3, result_register());
2679 int offset = Context::SlotOffset(var->index());
2680 __ RecordWriteContextSlot(
2681 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2682 }
2683 }
2684
2685
2686 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2687 FeedbackVectorICSlot slot) {
2688 if (var->IsUnallocated()) {
2689 // Global var, const, or let.
2690 __ mov(StoreDescriptor::ValueRegister(), result_register());
2691 __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2692 __ ld(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2693 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2694 CallStoreIC();
2695
2696 } else if (var->IsGlobalSlot()) {
2697 // Global var, const, or let.
2698 DCHECK(var->index() > 0);
2699 DCHECK(var->IsStaticGlobalObjectProperty());
2700 // Each var occupies two slots in the context: for reads and writes.
2701 int slot_index = var->index() + 1;
2702 int depth = scope()->ContextChainLength(var->scope());
2703 __ li(StoreGlobalViaContextDescriptor::DepthRegister(),
2704 Operand(Smi::FromInt(depth)));
2705 __ li(StoreGlobalViaContextDescriptor::SlotRegister(),
2706 Operand(Smi::FromInt(slot_index)));
2707 __ li(StoreGlobalViaContextDescriptor::NameRegister(),
2708 Operand(var->name()));
2709 __ mov(StoreGlobalViaContextDescriptor::ValueRegister(), result_register());
2710 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2711 __ CallStub(&stub);
2712
2713 } else if (var->mode() == LET && op != Token::INIT_LET) {
2714 // Non-initializing assignment to let variable needs a write barrier.
2715 DCHECK(!var->IsLookupSlot());
2716 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2717 Label assign;
2718 MemOperand location = VarOperand(var, a1);
2719 __ ld(a3, location);
2720 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2721 __ Branch(&assign, ne, a3, Operand(a4));
2722 __ li(a3, Operand(var->name()));
2723 __ push(a3);
2724 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2725 // Perform the assignment.
2726 __ bind(&assign);
2727 EmitStoreToStackLocalOrContextSlot(var, location);
2728
2729 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2730 // Assignment to const variable needs a write barrier.
2731 DCHECK(!var->IsLookupSlot());
2732 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2733 Label const_error;
2734 MemOperand location = VarOperand(var, a1);
2735 __ ld(a3, location);
2736 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2737 __ Branch(&const_error, ne, a3, Operand(at));
2738 __ li(a3, Operand(var->name()));
2739 __ push(a3);
2740 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2741 __ bind(&const_error);
2742 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2743
2744 } else if (var->is_this() && op == Token::INIT_CONST) {
2745 // Initializing assignment to const {this} needs a write barrier.
2746 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2747 Label uninitialized_this;
2748 MemOperand location = VarOperand(var, a1);
2749 __ ld(a3, location);
2750 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2751 __ Branch(&uninitialized_this, eq, a3, Operand(at));
2752 __ li(a0, Operand(var->name()));
2753 __ Push(a0);
2754 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2755 __ bind(&uninitialized_this);
2756 EmitStoreToStackLocalOrContextSlot(var, location);
2757
2758 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2759 if (var->IsLookupSlot()) {
2760 // Assignment to var.
2761 __ li(a4, Operand(var->name()));
2762 __ li(a3, Operand(Smi::FromInt(language_mode())));
2763 // jssp[0] : language mode.
2764 // jssp[8] : name.
2765 // jssp[16] : context.
2766 // jssp[24] : value.
2767 __ Push(v0, cp, a4, a3);
2768 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2769 } else {
2770 // Assignment to var or initializing assignment to let/const in harmony
2771 // mode.
2772 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2773 MemOperand location = VarOperand(var, a1);
2774 if (generate_debug_code_ && op == Token::INIT_LET) {
2775 // Check for an uninitialized let binding.
2776 __ ld(a2, location);
2777 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2778 __ Check(eq, kLetBindingReInitialization, a2, Operand(a4));
2779 }
2780 EmitStoreToStackLocalOrContextSlot(var, location);
2781 }
2782
2783 } else if (op == Token::INIT_CONST_LEGACY) {
2784 // Const initializers need a write barrier.
2785 DCHECK(!var->IsParameter()); // No const parameters.
2786 if (var->IsLookupSlot()) {
2787 __ li(a0, Operand(var->name()));
2788 __ Push(v0, cp, a0); // Context and name.
2789 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2790 } else {
2791 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2792 Label skip;
2793 MemOperand location = VarOperand(var, a1);
2794 __ ld(a2, location);
2795 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2796 __ Branch(&skip, ne, a2, Operand(at));
2797 EmitStoreToStackLocalOrContextSlot(var, location);
2798 __ bind(&skip);
2799 }
2800
2801 } else {
2802 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2803 if (is_strict(language_mode())) {
2804 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2805 }
2806 // Silently ignore store in sloppy mode.
2807 }
2808 }
2809
2810
2811 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2812 // Assignment to a property, using a named store IC.
2813 Property* prop = expr->target()->AsProperty();
2814 DCHECK(prop != NULL);
2815 DCHECK(prop->key()->IsLiteral());
2816
2817 __ mov(StoreDescriptor::ValueRegister(), result_register());
2818 __ li(StoreDescriptor::NameRegister(),
2819 Operand(prop->key()->AsLiteral()->value()));
2820 __ pop(StoreDescriptor::ReceiverRegister());
2821 if (FLAG_vector_stores) {
2822 EmitLoadStoreICSlot(expr->AssignmentSlot());
2823 CallStoreIC();
2824 } else {
2825 CallStoreIC(expr->AssignmentFeedbackId());
2826 }
2827
2828 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2829 context()->Plug(v0);
2830 }
2831
2832
2833 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2834 // Assignment to named property of super.
2835 // v0 : value
2836 // stack : receiver ('this'), home_object
2837 DCHECK(prop != NULL);
2838 Literal* key = prop->key()->AsLiteral();
2839 DCHECK(key != NULL);
2840
2841 __ Push(key->value());
2842 __ Push(v0);
2843 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2844 : Runtime::kStoreToSuper_Sloppy),
2845 4);
2846 }
2847
2848
2849 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2850 // Assignment to named property of super.
2851 // v0 : value
2852 // stack : receiver ('this'), home_object, key
2853 DCHECK(prop != NULL);
2854
2855 __ Push(v0);
2856 __ CallRuntime(
2857 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2858 : Runtime::kStoreKeyedToSuper_Sloppy),
2859 4);
2860 }
2861
2862
2863 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2864 // Assignment to a property, using a keyed store IC.
2865 // Call keyed store IC.
2866 // The arguments are:
2867 // - a0 is the value,
2868 // - a1 is the key,
2869 // - a2 is the receiver.
2870 __ mov(StoreDescriptor::ValueRegister(), result_register());
2871 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2872 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2873
2874 Handle<Code> ic =
2875 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2876 if (FLAG_vector_stores) {
2877 EmitLoadStoreICSlot(expr->AssignmentSlot());
2878 CallIC(ic);
2879 } else {
2880 CallIC(ic, expr->AssignmentFeedbackId());
2881 }
2882
2883 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2884 context()->Plug(v0);
2885 }
2886
2887
2888 void FullCodeGenerator::VisitProperty(Property* expr) {
2889 Comment cmnt(masm_, "[ Property");
2890 SetExpressionPosition(expr);
2891
2892 Expression* key = expr->key();
2893
2894 if (key->IsPropertyName()) {
2895 if (!expr->IsSuperAccess()) {
2896 VisitForAccumulatorValue(expr->obj());
2897 __ Move(LoadDescriptor::ReceiverRegister(), v0);
2898 EmitNamedPropertyLoad(expr);
2899 } else {
2900 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2901 VisitForStackValue(
2902 expr->obj()->AsSuperPropertyReference()->home_object());
2903 EmitNamedSuperPropertyLoad(expr);
2904 }
2905 } else {
2906 if (!expr->IsSuperAccess()) {
2907 VisitForStackValue(expr->obj());
2908 VisitForAccumulatorValue(expr->key());
2909 __ Move(LoadDescriptor::NameRegister(), v0);
2910 __ pop(LoadDescriptor::ReceiverRegister());
2911 EmitKeyedPropertyLoad(expr);
2912 } else {
2913 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2914 VisitForStackValue(
2915 expr->obj()->AsSuperPropertyReference()->home_object());
2916 VisitForStackValue(expr->key());
2917 EmitKeyedSuperPropertyLoad(expr);
2918 }
2919 }
2920 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2921 context()->Plug(v0);
2922 }
2923
2924
2925 void FullCodeGenerator::CallIC(Handle<Code> code,
2926 TypeFeedbackId id) {
2927 ic_total_count_++;
2928 __ Call(code, RelocInfo::CODE_TARGET, id);
2929 }
2930
2931
2932 // Code common for calls using the IC.
2933 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2934 Expression* callee = expr->expression();
2935
2936 CallICState::CallType call_type =
2937 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2938
2939 // Get the target function.
2940 if (call_type == CallICState::FUNCTION) {
2941 { StackValueContext context(this);
2942 EmitVariableLoad(callee->AsVariableProxy());
2943 PrepareForBailout(callee, NO_REGISTERS);
2944 }
2945 // Push undefined as receiver. This is patched in the method prologue if it
2946 // is a sloppy mode method.
2947 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2948 __ push(at);
2949 } else {
2950 // Load the function from the receiver.
2951 DCHECK(callee->IsProperty());
2952 DCHECK(!callee->AsProperty()->IsSuperAccess());
2953 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2954 EmitNamedPropertyLoad(callee->AsProperty());
2955 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2956 // Push the target function under the receiver.
2957 __ ld(at, MemOperand(sp, 0));
2958 __ push(at);
2959 __ sd(v0, MemOperand(sp, kPointerSize));
2960 }
2961
2962 EmitCall(expr, call_type);
2963 }
2964
2965
2966 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2967 SetExpressionPosition(expr);
2968 Expression* callee = expr->expression();
2969 DCHECK(callee->IsProperty());
2970 Property* prop = callee->AsProperty();
2971 DCHECK(prop->IsSuperAccess());
2972
2973 Literal* key = prop->key()->AsLiteral();
2974 DCHECK(!key->value()->IsSmi());
2975 // Load the function from the receiver.
2976 const Register scratch = a1;
2977 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2978 VisitForAccumulatorValue(super_ref->home_object());
2979 __ mov(scratch, v0);
2980 VisitForAccumulatorValue(super_ref->this_var());
2981 __ Push(scratch, v0, v0, scratch);
2982 __ Push(key->value());
2983 __ Push(Smi::FromInt(language_mode()));
2984
2985 // Stack here:
2986 // - home_object
2987 // - this (receiver)
2988 // - this (receiver) <-- LoadFromSuper will pop here and below.
2989 // - home_object
2990 // - key
2991 // - language_mode
2992 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2993
2994 // Replace home_object with target function.
2995 __ sd(v0, MemOperand(sp, kPointerSize));
2996
2997 // Stack here:
2998 // - target function
2999 // - this (receiver)
3000 EmitCall(expr, CallICState::METHOD);
3001 }
3002
3003
3004 // Code common for calls using the IC.
3005 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
3006 Expression* key) {
3007 // Load the key.
3008 VisitForAccumulatorValue(key);
3009
3010 Expression* callee = expr->expression();
3011
3012 // Load the function from the receiver.
3013 DCHECK(callee->IsProperty());
3014 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3015 __ Move(LoadDescriptor::NameRegister(), v0);
3016 EmitKeyedPropertyLoad(callee->AsProperty());
3017 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
3018
3019 // Push the target function under the receiver.
3020 __ ld(at, MemOperand(sp, 0));
3021 __ push(at);
3022 __ sd(v0, MemOperand(sp, kPointerSize));
3023
3024 EmitCall(expr, CallICState::METHOD);
3025 }
3026
3027
3028 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
3029 Expression* callee = expr->expression();
3030 DCHECK(callee->IsProperty());
3031 Property* prop = callee->AsProperty();
3032 DCHECK(prop->IsSuperAccess());
3033
3034 SetExpressionPosition(prop);
3035 // Load the function from the receiver.
3036 const Register scratch = a1;
3037 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
3038 VisitForAccumulatorValue(super_ref->home_object());
3039 __ Move(scratch, v0);
3040 VisitForAccumulatorValue(super_ref->this_var());
3041 __ Push(scratch, v0, v0, scratch);
3042 VisitForStackValue(prop->key());
3043 __ Push(Smi::FromInt(language_mode()));
3044
3045 // Stack here:
3046 // - home_object
3047 // - this (receiver)
3048 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
3049 // - home_object
3050 // - key
3051 // - language_mode
3052 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
3053
3054 // Replace home_object with target function.
3055 __ sd(v0, MemOperand(sp, kPointerSize));
3056
3057 // Stack here:
3058 // - target function
3059 // - this (receiver)
3060 EmitCall(expr, CallICState::METHOD);
3061 }
3062
3063
3064 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
3065 // Load the arguments.
3066 ZoneList<Expression*>* args = expr->arguments();
3067 int arg_count = args->length();
3068 for (int i = 0; i < arg_count; i++) {
3069 VisitForStackValue(args->at(i));
3070 }
3071
3072 // Record source position of the IC call.
3073 SetCallPosition(expr, arg_count);
3074 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
3075 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
3076 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3077 // Don't assign a type feedback id to the IC, since type feedback is provided
3078 // by the vector above.
3079 CallIC(ic);
3080 RecordJSReturnSite(expr);
3081 // Restore context register.
3082 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3083 context()->DropAndPlug(1, v0);
3084 }
3085
3086
3087 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
3088 // a6: copy of the first argument or undefined if it doesn't exist.
3089 if (arg_count > 0) {
3090 __ ld(a6, MemOperand(sp, arg_count * kPointerSize));
3091 } else {
3092 __ LoadRoot(a6, Heap::kUndefinedValueRootIndex);
3093 }
3094
3095 // a5: the receiver of the enclosing function.
3096 __ ld(a5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3097
3098 // a4: the language mode.
3099 __ li(a4, Operand(Smi::FromInt(language_mode())));
3100
3101 // a1: the start position of the scope the calls resides in.
3102 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
3103
3104 // Do the runtime call.
3105 __ Push(a6, a5, a4, a1);
3106 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
3107 }
3108
3109
3110 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
3111 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
3112 VariableProxy* callee = expr->expression()->AsVariableProxy();
3113 if (callee->var()->IsLookupSlot()) {
3114 Label slow, done;
3115
3116 SetExpressionPosition(callee);
3117 // Generate code for loading from variables potentially shadowed by
3118 // eval-introduced variables.
3119 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
3120
3121 __ bind(&slow);
3122 // Call the runtime to find the function to call (returned in v0)
3123 // and the object holding it (returned in v1).
3124 DCHECK(!context_register().is(a2));
3125 __ li(a2, Operand(callee->name()));
3126 __ Push(context_register(), a2);
3127 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3128 __ Push(v0, v1); // Function, receiver.
3129 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
3130
3131 // If fast case code has been generated, emit code to push the
3132 // function and receiver and have the slow path jump around this
3133 // code.
3134 if (done.is_linked()) {
3135 Label call;
3136 __ Branch(&call);
3137 __ bind(&done);
3138 // Push function.
3139 __ push(v0);
3140 // The receiver is implicitly the global receiver. Indicate this
3141 // by passing the hole to the call function stub.
3142 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3143 __ push(a1);
3144 __ bind(&call);
3145 }
3146 } else {
3147 VisitForStackValue(callee);
3148 // refEnv.WithBaseObject()
3149 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
3150 __ push(a2); // Reserved receiver slot.
3151 }
3152 }
3153
3154
3155 void FullCodeGenerator::VisitCall(Call* expr) {
3156 #ifdef DEBUG
3157 // We want to verify that RecordJSReturnSite gets called on all paths
3158 // through this function. Avoid early returns.
3159 expr->return_is_recorded_ = false;
3160 #endif
3161
3162 Comment cmnt(masm_, "[ Call");
3163 Expression* callee = expr->expression();
3164 Call::CallType call_type = expr->GetCallType(isolate());
3165
3166 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3167 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3168 // to resolve the function we need to call. Then we call the resolved
3169 // function using the given arguments.
3170 ZoneList<Expression*>* args = expr->arguments();
3171 int arg_count = args->length();
3172 PushCalleeAndWithBaseObject(expr);
3173
3174 // Push the arguments.
3175 for (int i = 0; i < arg_count; i++) {
3176 VisitForStackValue(args->at(i));
3177 }
3178
3179 // Push a copy of the function (found below the arguments) and
3180 // resolve eval.
3181 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3182 __ push(a1);
3183 EmitResolvePossiblyDirectEval(arg_count);
3184
3185 // Touch up the stack with the resolved function.
3186 __ sd(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
3187
3188 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
3189 // Record source position for debugger.
3190 SetCallPosition(expr, arg_count);
3191 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3192 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3193 __ CallStub(&stub);
3194 RecordJSReturnSite(expr);
3195 // Restore context register.
3196 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3197 context()->DropAndPlug(1, v0);
3198 } else if (call_type == Call::GLOBAL_CALL) {
3199 EmitCallWithLoadIC(expr);
3200 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3201 // Call to a lookup slot (dynamically introduced variable).
3202 PushCalleeAndWithBaseObject(expr);
3203 EmitCall(expr);
3204 } else if (call_type == Call::PROPERTY_CALL) {
3205 Property* property = callee->AsProperty();
3206 bool is_named_call = property->key()->IsPropertyName();
3207 if (property->IsSuperAccess()) {
3208 if (is_named_call) {
3209 EmitSuperCallWithLoadIC(expr);
3210 } else {
3211 EmitKeyedSuperCallWithLoadIC(expr);
3212 }
3213 } else {
3214 VisitForStackValue(property->obj());
3215 if (is_named_call) {
3216 EmitCallWithLoadIC(expr);
3217 } else {
3218 EmitKeyedCallWithLoadIC(expr, property->key());
3219 }
3220 }
3221 } else if (call_type == Call::SUPER_CALL) {
3222 EmitSuperConstructorCall(expr);
3223 } else {
3224 DCHECK(call_type == Call::OTHER_CALL);
3225 // Call to an arbitrary expression not handled specially above.
3226 VisitForStackValue(callee);
3227 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
3228 __ push(a1);
3229 // Emit function call.
3230 EmitCall(expr);
3231 }
3232
3233 #ifdef DEBUG
3234 // RecordJSReturnSite should have been called.
3235 DCHECK(expr->return_is_recorded_);
3236 #endif
3237 }
3238
3239
3240 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3241 Comment cmnt(masm_, "[ CallNew");
3242 // According to ECMA-262, section 11.2.2, page 44, the function
3243 // expression in new calls must be evaluated before the
3244 // arguments.
3245
3246 // Push constructor on the stack. If it's not a function it's used as
3247 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3248 // ignored.
3249 DCHECK(!expr->expression()->IsSuperPropertyReference());
3250 VisitForStackValue(expr->expression());
3251
3252 // Push the arguments ("left-to-right") on the stack.
3253 ZoneList<Expression*>* args = expr->arguments();
3254 int arg_count = args->length();
3255 for (int i = 0; i < arg_count; i++) {
3256 VisitForStackValue(args->at(i));
3257 }
3258
3259 // Call the construct call builtin that handles allocation and
3260 // constructor invocation.
3261 SetConstructCallPosition(expr);
3262
3263 // Load function and argument count into a1 and a0.
3264 __ li(a0, Operand(arg_count));
3265 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
3266
3267 // Record call targets in unoptimized code.
3268 if (FLAG_pretenuring_call_new) {
3269 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3270 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3271 expr->CallNewFeedbackSlot().ToInt() + 1);
3272 }
3273
3274 __ li(a2, FeedbackVector());
3275 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3276
3277 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3278 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3279 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3280 context()->Plug(v0);
3281 }
3282
3283
3284 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3285 SuperCallReference* super_call_ref =
3286 expr->expression()->AsSuperCallReference();
3287 DCHECK_NOT_NULL(super_call_ref);
3288
3289 EmitLoadSuperConstructor(super_call_ref);
3290 __ push(result_register());
3291
3292 // Push the arguments ("left-to-right") on the stack.
3293 ZoneList<Expression*>* args = expr->arguments();
3294 int arg_count = args->length();
3295 for (int i = 0; i < arg_count; i++) {
3296 VisitForStackValue(args->at(i));
3297 }
3298
3299 // Call the construct call builtin that handles allocation and
3300 // constructor invocation.
3301 SetConstructCallPosition(expr);
3302
3303 // Load original constructor into a4.
3304 VisitForAccumulatorValue(super_call_ref->new_target_var());
3305 __ mov(a4, result_register());
3306
3307 // Load function and argument count into a1 and a0.
3308 __ li(a0, Operand(arg_count));
3309 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
3310
3311 // Record call targets in unoptimized code.
3312 if (FLAG_pretenuring_call_new) {
3313 UNREACHABLE();
3314 /* TODO(dslomov): support pretenuring.
3315 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3316 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3317 expr->CallNewFeedbackSlot().ToInt() + 1);
3318 */
3319 }
3320
3321 __ li(a2, FeedbackVector());
3322 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackSlot())));
3323
3324 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3325 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3326
3327 RecordJSReturnSite(expr);
3328
3329 context()->Plug(v0);
3330 }
3331
3332
3333 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3334 ZoneList<Expression*>* args = expr->arguments();
3335 DCHECK(args->length() == 1);
3336
3337 VisitForAccumulatorValue(args->at(0));
3338
3339 Label materialize_true, materialize_false;
3340 Label* if_true = NULL;
3341 Label* if_false = NULL;
3342 Label* fall_through = NULL;
3343 context()->PrepareTest(&materialize_true, &materialize_false,
3344 &if_true, &if_false, &fall_through);
3345
3346 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3347 __ SmiTst(v0, a4);
3348 Split(eq, a4, Operand(zero_reg), if_true, if_false, fall_through);
3349
3350 context()->Plug(if_true, if_false);
3351 }
3352
3353
3354 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3355 ZoneList<Expression*>* args = expr->arguments();
3356 DCHECK(args->length() == 1);
3357
3358 VisitForAccumulatorValue(args->at(0));
3359
3360 Label materialize_true, materialize_false;
3361 Label* if_true = NULL;
3362 Label* if_false = NULL;
3363 Label* fall_through = NULL;
3364 context()->PrepareTest(&materialize_true, &materialize_false,
3365 &if_true, &if_false, &fall_through);
3366
3367 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3368 __ NonNegativeSmiTst(v0, at);
3369 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
3370
3371 context()->Plug(if_true, if_false);
3372 }
3373
3374
3375 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3376 ZoneList<Expression*>* args = expr->arguments();
3377 DCHECK(args->length() == 1);
3378
3379 VisitForAccumulatorValue(args->at(0));
3380
3381 Label materialize_true, materialize_false;
3382 Label* if_true = NULL;
3383 Label* if_false = NULL;
3384 Label* fall_through = NULL;
3385 context()->PrepareTest(&materialize_true, &materialize_false,
3386 &if_true, &if_false, &fall_through);
3387
3388 __ JumpIfSmi(v0, if_false);
3389 __ LoadRoot(at, Heap::kNullValueRootIndex);
3390 __ Branch(if_true, eq, v0, Operand(at));
3391 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
3392 // Undetectable objects behave like undefined when tested with typeof.
3393 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
3394 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3395 __ Branch(if_false, ne, at, Operand(zero_reg));
3396 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset));
3397 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3398 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3399 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
3400 if_true, if_false, fall_through);
3401
3402 context()->Plug(if_true, if_false);
3403 }
3404
3405
3406 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3407 ZoneList<Expression*>* args = expr->arguments();
3408 DCHECK(args->length() == 1);
3409
3410 VisitForAccumulatorValue(args->at(0));
3411
3412 Label materialize_true, materialize_false;
3413 Label* if_true = NULL;
3414 Label* if_false = NULL;
3415 Label* fall_through = NULL;
3416 context()->PrepareTest(&materialize_true, &materialize_false,
3417 &if_true, &if_false, &fall_through);
3418
3419 __ JumpIfSmi(v0, if_false);
3420 __ GetObjectType(v0, a1, a1);
3421 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3422 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
3423 if_true, if_false, fall_through);
3424
3425 context()->Plug(if_true, if_false);
3426 }
3427
3428
3429 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3430 ZoneList<Expression*>* args = expr->arguments();
3431 DCHECK(args->length() == 1);
3432
3433 VisitForAccumulatorValue(args->at(0));
3434
3435 Label materialize_true, materialize_false;
3436 Label* if_true = NULL;
3437 Label* if_false = NULL;
3438 Label* fall_through = NULL;
3439 context()->PrepareTest(&materialize_true, &materialize_false,
3440 &if_true, &if_false, &fall_through);
3441
3442 __ JumpIfSmi(v0, if_false);
3443 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3444 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
3445 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3446 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3447 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
3448
3449 context()->Plug(if_true, if_false);
3450 }
3451
3452
3453 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3454 CallRuntime* expr) {
3455 ZoneList<Expression*>* args = expr->arguments();
3456 DCHECK(args->length() == 1);
3457
3458 VisitForAccumulatorValue(args->at(0));
3459
3460 Label materialize_true, materialize_false, skip_lookup;
3461 Label* if_true = NULL;
3462 Label* if_false = NULL;
3463 Label* fall_through = NULL;
3464 context()->PrepareTest(&materialize_true, &materialize_false,
3465 &if_true, &if_false, &fall_through);
3466
3467 __ AssertNotSmi(v0);
3468
3469 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3470 __ lbu(a4, FieldMemOperand(a1, Map::kBitField2Offset));
3471 __ And(a4, a4, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3472 __ Branch(&skip_lookup, ne, a4, Operand(zero_reg));
3473
3474 // Check for fast case object. Generate false result for slow case object.
3475 __ ld(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3476 __ ld(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3477 __ LoadRoot(a4, Heap::kHashTableMapRootIndex);
3478 __ Branch(if_false, eq, a2, Operand(a4));
3479
3480 // Look for valueOf name in the descriptor array, and indicate false if
3481 // found. Since we omit an enumeration index check, if it is added via a
3482 // transition that shares its descriptor array, this is a false positive.
3483 Label entry, loop, done;
3484
3485 // Skip loop if no descriptors are valid.
3486 __ NumberOfOwnDescriptors(a3, a1);
3487 __ Branch(&done, eq, a3, Operand(zero_reg));
3488
3489 __ LoadInstanceDescriptors(a1, a4);
3490 // a4: descriptor array.
3491 // a3: valid entries in the descriptor array.
3492 STATIC_ASSERT(kSmiTag == 0);
3493 STATIC_ASSERT(kSmiTagSize == 1);
3494 // Does not need?
3495 // STATIC_ASSERT(kPointerSize == 4);
3496 __ li(at, Operand(DescriptorArray::kDescriptorSize));
3497 __ Dmul(a3, a3, at);
3498 // Calculate location of the first key name.
3499 __ Daddu(a4, a4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3500 // Calculate the end of the descriptor array.
3501 __ mov(a2, a4);
3502 __ dsll(a5, a3, kPointerSizeLog2);
3503 __ Daddu(a2, a2, a5);
3504
3505 // Loop through all the keys in the descriptor array. If one of these is the
3506 // string "valueOf" the result is false.
3507 // The use of a6 to store the valueOf string assumes that it is not otherwise
3508 // used in the loop below.
3509 __ li(a6, Operand(isolate()->factory()->value_of_string()));
3510 __ jmp(&entry);
3511 __ bind(&loop);
3512 __ ld(a3, MemOperand(a4, 0));
3513 __ Branch(if_false, eq, a3, Operand(a6));
3514 __ Daddu(a4, a4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3515 __ bind(&entry);
3516 __ Branch(&loop, ne, a4, Operand(a2));
3517
3518 __ bind(&done);
3519
3520 // Set the bit in the map to indicate that there is no local valueOf field.
3521 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3522 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3523 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3524
3525 __ bind(&skip_lookup);
3526
3527 // If a valueOf property is not found on the object check that its
3528 // prototype is the un-modified String prototype. If not result is false.
3529 __ ld(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
3530 __ JumpIfSmi(a2, if_false);
3531 __ ld(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3532 __ ld(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3533 __ ld(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
3534 __ ld(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3535 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3536 Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3537
3538 context()->Plug(if_true, if_false);
3539 }
3540
3541
3542 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3543 ZoneList<Expression*>* args = expr->arguments();
3544 DCHECK(args->length() == 1);
3545
3546 VisitForAccumulatorValue(args->at(0));
3547
3548 Label materialize_true, materialize_false;
3549 Label* if_true = NULL;
3550 Label* if_false = NULL;
3551 Label* fall_through = NULL;
3552 context()->PrepareTest(&materialize_true, &materialize_false,
3553 &if_true, &if_false, &fall_through);
3554
3555 __ JumpIfSmi(v0, if_false);
3556 __ GetObjectType(v0, a1, a2);
3557 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3558 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3559 __ Branch(if_false);
3560
3561 context()->Plug(if_true, if_false);
3562 }
3563
3564
3565 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3566 ZoneList<Expression*>* args = expr->arguments();
3567 DCHECK(args->length() == 1);
3568
3569 VisitForAccumulatorValue(args->at(0));
3570
3571 Label materialize_true, materialize_false;
3572 Label* if_true = NULL;
3573 Label* if_false = NULL;
3574 Label* fall_through = NULL;
3575 context()->PrepareTest(&materialize_true, &materialize_false,
3576 &if_true, &if_false, &fall_through);
3577
3578 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3579 __ lwu(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3580 __ lwu(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3581 __ li(a4, 0x80000000);
3582 Label not_nan;
3583 __ Branch(&not_nan, ne, a2, Operand(a4));
3584 __ mov(a4, zero_reg);
3585 __ mov(a2, a1);
3586 __ bind(&not_nan);
3587
3588 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3589 Split(eq, a2, Operand(a4), if_true, if_false, fall_through);
3590
3591 context()->Plug(if_true, if_false);
3592 }
3593
3594
3595 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3596 ZoneList<Expression*>* args = expr->arguments();
3597 DCHECK(args->length() == 1);
3598
3599 VisitForAccumulatorValue(args->at(0));
3600
3601 Label materialize_true, materialize_false;
3602 Label* if_true = NULL;
3603 Label* if_false = NULL;
3604 Label* fall_through = NULL;
3605 context()->PrepareTest(&materialize_true, &materialize_false,
3606 &if_true, &if_false, &fall_through);
3607
3608 __ JumpIfSmi(v0, if_false);
3609 __ GetObjectType(v0, a1, a1);
3610 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3611 Split(eq, a1, Operand(JS_ARRAY_TYPE),
3612 if_true, if_false, fall_through);
3613
3614 context()->Plug(if_true, if_false);
3615 }
3616
3617
3618 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3619 ZoneList<Expression*>* args = expr->arguments();
3620 DCHECK(args->length() == 1);
3621
3622 VisitForAccumulatorValue(args->at(0));
3623
3624 Label materialize_true, materialize_false;
3625 Label* if_true = NULL;
3626 Label* if_false = NULL;
3627 Label* fall_through = NULL;
3628 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3629 &if_false, &fall_through);
3630
3631 __ JumpIfSmi(v0, if_false);
3632 __ GetObjectType(v0, a1, a1);
3633 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3634 Split(eq, a1, Operand(JS_TYPED_ARRAY_TYPE), if_true, if_false, fall_through);
3635
3636 context()->Plug(if_true, if_false);
3637 }
3638
3639
3640 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3641 ZoneList<Expression*>* args = expr->arguments();
3642 DCHECK(args->length() == 1);
3643
3644 VisitForAccumulatorValue(args->at(0));
3645
3646 Label materialize_true, materialize_false;
3647 Label* if_true = NULL;
3648 Label* if_false = NULL;
3649 Label* fall_through = NULL;
3650 context()->PrepareTest(&materialize_true, &materialize_false,
3651 &if_true, &if_false, &fall_through);
3652
3653 __ JumpIfSmi(v0, if_false);
3654 __ GetObjectType(v0, a1, a1);
3655 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3656 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3657
3658 context()->Plug(if_true, if_false);
3659 }
3660
3661
3662 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3663 ZoneList<Expression*>* args = expr->arguments();
3664 DCHECK(args->length() == 1);
3665
3666 VisitForAccumulatorValue(args->at(0));
3667
3668 Label materialize_true, materialize_false;
3669 Label* if_true = NULL;
3670 Label* if_false = NULL;
3671 Label* fall_through = NULL;
3672 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3673 &if_false, &fall_through);
3674
3675 __ JumpIfSmi(v0, if_false);
3676 Register map = a1;
3677 Register type_reg = a2;
3678 __ GetObjectType(v0, map, type_reg);
3679 __ Subu(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3680 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3681 Split(ls, type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE),
3682 if_true, if_false, fall_through);
3683
3684 context()->Plug(if_true, if_false);
3685 }
3686
3687
3688 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3689 DCHECK(expr->arguments()->length() == 0);
3690
3691 Label materialize_true, materialize_false;
3692 Label* if_true = NULL;
3693 Label* if_false = NULL;
3694 Label* fall_through = NULL;
3695 context()->PrepareTest(&materialize_true, &materialize_false,
3696 &if_true, &if_false, &fall_through);
3697
3698 // Get the frame pointer for the calling frame.
3699 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3700
3701 // Skip the arguments adaptor frame if it exists.
3702 Label check_frame_marker;
3703 __ ld(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
3704 __ Branch(&check_frame_marker, ne,
3705 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3706 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
3707
3708 // Check the marker in the calling frame.
3709 __ bind(&check_frame_marker);
3710 __ ld(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
3711 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3712 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3713 if_true, if_false, fall_through);
3714
3715 context()->Plug(if_true, if_false);
3716 }
3717
3718
3719 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3720 ZoneList<Expression*>* args = expr->arguments();
3721 DCHECK(args->length() == 2);
3722
3723 // Load the two objects into registers and perform the comparison.
3724 VisitForStackValue(args->at(0));
3725 VisitForAccumulatorValue(args->at(1));
3726
3727 Label materialize_true, materialize_false;
3728 Label* if_true = NULL;
3729 Label* if_false = NULL;
3730 Label* fall_through = NULL;
3731 context()->PrepareTest(&materialize_true, &materialize_false,
3732 &if_true, &if_false, &fall_through);
3733
3734 __ pop(a1);
3735 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3736 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3737
3738 context()->Plug(if_true, if_false);
3739 }
3740
3741
3742 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3743 ZoneList<Expression*>* args = expr->arguments();
3744 DCHECK(args->length() == 1);
3745
3746 // ArgumentsAccessStub expects the key in a1 and the formal
3747 // parameter count in a0.
3748 VisitForAccumulatorValue(args->at(0));
3749 __ mov(a1, v0);
3750 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3751 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3752 __ CallStub(&stub);
3753 context()->Plug(v0);
3754 }
3755
3756
3757 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3758 DCHECK(expr->arguments()->length() == 0);
3759 Label exit;
3760 // Get the number of formal parameters.
3761 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3762
3763 // Check if the calling frame is an arguments adaptor frame.
3764 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3765 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
3766 __ Branch(&exit, ne, a3,
3767 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3768
3769 // Arguments adaptor case: Read the arguments length from the
3770 // adaptor frame.
3771 __ ld(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3772
3773 __ bind(&exit);
3774 context()->Plug(v0);
3775 }
3776
3777
3778 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3779 ZoneList<Expression*>* args = expr->arguments();
3780 DCHECK(args->length() == 1);
3781 Label done, null, function, non_function_constructor;
3782
3783 VisitForAccumulatorValue(args->at(0));
3784
3785 // If the object is a smi, we return null.
3786 __ JumpIfSmi(v0, &null);
3787
3788 // Check that the object is a JS object but take special care of JS
3789 // functions to make sure they have 'Function' as their class.
3790 // Assume that there are only two callable types, and one of them is at
3791 // either end of the type range for JS object types. Saves extra comparisons.
3792 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3793 __ GetObjectType(v0, v0, a1); // Map is now in v0.
3794 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3795
3796 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3797 FIRST_SPEC_OBJECT_TYPE + 1);
3798 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3799
3800 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3801 LAST_SPEC_OBJECT_TYPE - 1);
3802 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
3803 // Assume that there is no larger type.
3804 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3805
3806 // Check if the constructor in the map is a JS function.
3807 Register instance_type = a2;
3808 __ GetMapConstructor(v0, v0, a1, instance_type);
3809 __ Branch(&non_function_constructor, ne, instance_type,
3810 Operand(JS_FUNCTION_TYPE));
3811
3812 // v0 now contains the constructor function. Grab the
3813 // instance class name from there.
3814 __ ld(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3815 __ ld(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3816 __ Branch(&done);
3817
3818 // Functions have class 'Function'.
3819 __ bind(&function);
3820 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
3821 __ jmp(&done);
3822
3823 // Objects with a non-function constructor have class 'Object'.
3824 __ bind(&non_function_constructor);
3825 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3826 __ jmp(&done);
3827
3828 // Non-JS objects have class null.
3829 __ bind(&null);
3830 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3831
3832 // All done.
3833 __ bind(&done);
3834
3835 context()->Plug(v0);
3836 }
3837
3838
3839 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3840 ZoneList<Expression*>* args = expr->arguments();
3841 DCHECK(args->length() == 1);
3842
3843 VisitForAccumulatorValue(args->at(0)); // Load the object.
3844
3845 Label done;
3846 // If the object is a smi return the object.
3847 __ JumpIfSmi(v0, &done);
3848 // If the object is not a value type, return the object.
3849 __ GetObjectType(v0, a1, a1);
3850 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3851
3852 __ ld(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3853
3854 __ bind(&done);
3855 context()->Plug(v0);
3856 }
3857
3858
3859 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3860 ZoneList<Expression*>* args = expr->arguments();
3861 DCHECK_EQ(1, args->length());
3862
3863 VisitForAccumulatorValue(args->at(0));
3864
3865 Label materialize_true, materialize_false;
3866 Label* if_true = nullptr;
3867 Label* if_false = nullptr;
3868 Label* fall_through = nullptr;
3869 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3870 &if_false, &fall_through);
3871
3872 __ JumpIfSmi(v0, if_false);
3873 __ GetObjectType(v0, a1, a1);
3874 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3875 Split(eq, a1, Operand(JS_DATE_TYPE), if_true, if_false, fall_through);
3876
3877 context()->Plug(if_true, if_false);
3878 }
3879
3880
3881 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3882 ZoneList<Expression*>* args = expr->arguments();
3883 DCHECK(args->length() == 2);
3884 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3885 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3886
3887 VisitForAccumulatorValue(args->at(0)); // Load the object.
3888
3889 Register object = v0;
3890 Register result = v0;
3891 Register scratch0 = t1;
3892 Register scratch1 = a1;
3893
3894 if (index->value() == 0) {
3895 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset));
3896 } else {
3897 Label runtime, done;
3898 if (index->value() < JSDate::kFirstUncachedField) {
3899 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3900 __ li(scratch1, Operand(stamp));
3901 __ ld(scratch1, MemOperand(scratch1));
3902 __ ld(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3903 __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3904 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset +
3905 kPointerSize * index->value()));
3906 __ jmp(&done);
3907 }
3908 __ bind(&runtime);
3909 __ PrepareCallCFunction(2, scratch1);
3910 __ li(a1, Operand(index));
3911 __ Move(a0, object);
3912 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3913 __ bind(&done);
3914 }
3915
3916 context()->Plug(result);
3917 }
3918
3919
3920 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3921 ZoneList<Expression*>* args = expr->arguments();
3922 DCHECK_EQ(3, args->length());
3923
3924 Register string = v0;
3925 Register index = a1;
3926 Register value = a2;
3927
3928 VisitForStackValue(args->at(0)); // index
3929 VisitForStackValue(args->at(1)); // value
3930 VisitForAccumulatorValue(args->at(2)); // string
3931 __ Pop(index, value);
3932
3933 if (FLAG_debug_code) {
3934 __ SmiTst(value, at);
3935 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3936 __ SmiTst(index, at);
3937 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3938 __ SmiUntag(index, index);
3939 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3940 Register scratch = t1;
3941 __ EmitSeqStringSetCharCheck(
3942 string, index, value, scratch, one_byte_seq_type);
3943 __ SmiTag(index, index);
3944 }
3945
3946 __ SmiUntag(value, value);
3947 __ Daddu(at,
3948 string,
3949 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3950 __ SmiUntag(index);
3951 __ Daddu(at, at, index);
3952 __ sb(value, MemOperand(at));
3953 context()->Plug(string);
3954 }
3955
3956
3957 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3958 ZoneList<Expression*>* args = expr->arguments();
3959 DCHECK_EQ(3, args->length());
3960
3961 Register string = v0;
3962 Register index = a1;
3963 Register value = a2;
3964
3965 VisitForStackValue(args->at(0)); // index
3966 VisitForStackValue(args->at(1)); // value
3967 VisitForAccumulatorValue(args->at(2)); // string
3968 __ Pop(index, value);
3969
3970 if (FLAG_debug_code) {
3971 __ SmiTst(value, at);
3972 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3973 __ SmiTst(index, at);
3974 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3975 __ SmiUntag(index, index);
3976 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3977 Register scratch = t1;
3978 __ EmitSeqStringSetCharCheck(
3979 string, index, value, scratch, two_byte_seq_type);
3980 __ SmiTag(index, index);
3981 }
3982
3983 __ SmiUntag(value, value);
3984 __ Daddu(at,
3985 string,
3986 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3987 __ dsra(index, index, 32 - 1);
3988 __ Daddu(at, at, index);
3989 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3990 __ sh(value, MemOperand(at));
3991 context()->Plug(string);
3992 }
3993
3994
3995 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3996 ZoneList<Expression*>* args = expr->arguments();
3997 DCHECK(args->length() == 2);
3998
3999 VisitForStackValue(args->at(0)); // Load the object.
4000 VisitForAccumulatorValue(args->at(1)); // Load the value.
4001 __ pop(a1); // v0 = value. a1 = object.
4002
4003 Label done;
4004 // If the object is a smi, return the value.
4005 __ JumpIfSmi(a1, &done);
4006
4007 // If the object is not a value type, return the value.
4008 __ GetObjectType(a1, a2, a2);
4009 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
4010
4011 // Store the value.
4012 __ sd(v0, FieldMemOperand(a1, JSValue::kValueOffset));
4013 // Update the write barrier. Save the value as it will be
4014 // overwritten by the write barrier code and is needed afterward.
4015 __ mov(a2, v0);
4016 __ RecordWriteField(
4017 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
4018
4019 __ bind(&done);
4020 context()->Plug(v0);
4021 }
4022
4023
4024 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
4025 ZoneList<Expression*>* args = expr->arguments();
4026 DCHECK_EQ(args->length(), 1);
4027
4028 // Load the argument into a0 and call the stub.
4029 VisitForAccumulatorValue(args->at(0));
4030 __ mov(a0, result_register());
4031
4032 NumberToStringStub stub(isolate());
4033 __ CallStub(&stub);
4034 context()->Plug(v0);
4035 }
4036
4037
4038 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
4039 ZoneList<Expression*>* args = expr->arguments();
4040 DCHECK(args->length() == 1);
4041
4042 VisitForAccumulatorValue(args->at(0));
4043
4044 Label done;
4045 StringCharFromCodeGenerator generator(v0, a1);
4046 generator.GenerateFast(masm_);
4047 __ jmp(&done);
4048
4049 NopRuntimeCallHelper call_helper;
4050 generator.GenerateSlow(masm_, call_helper);
4051
4052 __ bind(&done);
4053 context()->Plug(a1);
4054 }
4055
4056
4057 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
4058 ZoneList<Expression*>* args = expr->arguments();
4059 DCHECK(args->length() == 2);
4060
4061 VisitForStackValue(args->at(0));
4062 VisitForAccumulatorValue(args->at(1));
4063 __ mov(a0, result_register());
4064
4065 Register object = a1;
4066 Register index = a0;
4067 Register result = v0;
4068
4069 __ pop(object);
4070
4071 Label need_conversion;
4072 Label index_out_of_range;
4073 Label done;
4074 StringCharCodeAtGenerator generator(object,
4075 index,
4076 result,
4077 &need_conversion,
4078 &need_conversion,
4079 &index_out_of_range,
4080 STRING_INDEX_IS_NUMBER);
4081 generator.GenerateFast(masm_);
4082 __ jmp(&done);
4083
4084 __ bind(&index_out_of_range);
4085 // When the index is out of range, the spec requires us to return
4086 // NaN.
4087 __ LoadRoot(result, Heap::kNanValueRootIndex);
4088 __ jmp(&done);
4089
4090 __ bind(&need_conversion);
4091 // Load the undefined value into the result register, which will
4092 // trigger conversion.
4093 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4094 __ jmp(&done);
4095
4096 NopRuntimeCallHelper call_helper;
4097 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4098
4099 __ bind(&done);
4100 context()->Plug(result);
4101 }
4102
4103
4104 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
4105 ZoneList<Expression*>* args = expr->arguments();
4106 DCHECK(args->length() == 2);
4107
4108 VisitForStackValue(args->at(0));
4109 VisitForAccumulatorValue(args->at(1));
4110 __ mov(a0, result_register());
4111
4112 Register object = a1;
4113 Register index = a0;
4114 Register scratch = a3;
4115 Register result = v0;
4116
4117 __ pop(object);
4118
4119 Label need_conversion;
4120 Label index_out_of_range;
4121 Label done;
4122 StringCharAtGenerator generator(object,
4123 index,
4124 scratch,
4125 result,
4126 &need_conversion,
4127 &need_conversion,
4128 &index_out_of_range,
4129 STRING_INDEX_IS_NUMBER);
4130 generator.GenerateFast(masm_);
4131 __ jmp(&done);
4132
4133 __ bind(&index_out_of_range);
4134 // When the index is out of range, the spec requires us to return
4135 // the empty string.
4136 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4137 __ jmp(&done);
4138
4139 __ bind(&need_conversion);
4140 // Move smi zero into the result register, which will trigger
4141 // conversion.
4142 __ li(result, Operand(Smi::FromInt(0)));
4143 __ jmp(&done);
4144
4145 NopRuntimeCallHelper call_helper;
4146 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4147
4148 __ bind(&done);
4149 context()->Plug(result);
4150 }
4151
4152
4153 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4154 ZoneList<Expression*>* args = expr->arguments();
4155 DCHECK_EQ(2, args->length());
4156 VisitForStackValue(args->at(0));
4157 VisitForAccumulatorValue(args->at(1));
4158
4159 __ pop(a1);
4160 __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
4161 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4162 __ CallStub(&stub);
4163 context()->Plug(v0);
4164 }
4165
4166
4167 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4168 ZoneList<Expression*>* args = expr->arguments();
4169 DCHECK(args->length() >= 2);
4170
4171 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4172 for (int i = 0; i < arg_count + 1; i++) {
4173 VisitForStackValue(args->at(i));
4174 }
4175 VisitForAccumulatorValue(args->last()); // Function.
4176
4177 Label runtime, done;
4178 // Check for non-function argument (including proxy).
4179 __ JumpIfSmi(v0, &runtime);
4180 __ GetObjectType(v0, a1, a1);
4181 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
4182
4183 // InvokeFunction requires the function in a1. Move it in there.
4184 __ mov(a1, result_register());
4185 ParameterCount count(arg_count);
4186 __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
4187 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4188 __ jmp(&done);
4189
4190 __ bind(&runtime);
4191 __ push(v0);
4192 __ CallRuntime(Runtime::kCall, args->length());
4193 __ bind(&done);
4194
4195 context()->Plug(v0);
4196 }
4197
4198
4199 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4200 ZoneList<Expression*>* args = expr->arguments();
4201 DCHECK(args->length() == 2);
4202
4203 // new.target
4204 VisitForStackValue(args->at(0));
4205
4206 // .this_function
4207 VisitForStackValue(args->at(1));
4208 __ CallRuntime(Runtime::kGetPrototype, 1);
4209 __ Push(result_register());
4210
4211 // Load original constructor into a4.
4212 __ ld(a4, MemOperand(sp, 1 * kPointerSize));
4213
4214 // Check if the calling frame is an arguments adaptor frame.
4215 Label adaptor_frame, args_set_up, runtime;
4216 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4217 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
4218 __ Branch(&adaptor_frame, eq, a3,
4219 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4220 // default constructor has no arguments, so no adaptor frame means no args.
4221 __ mov(a0, zero_reg);
4222 __ Branch(&args_set_up);
4223
4224 // Copy arguments from adaptor frame.
4225 {
4226 __ bind(&adaptor_frame);
4227 __ ld(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
4228 __ SmiUntag(a1, a1);
4229
4230 __ mov(a0, a1);
4231
4232 // Get arguments pointer in a2.
4233 __ dsll(at, a1, kPointerSizeLog2);
4234 __ Daddu(a2, a2, Operand(at));
4235 __ Daddu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset));
4236 Label loop;
4237 __ bind(&loop);
4238 // Pre-decrement a2 with kPointerSize on each iteration.
4239 // Pre-decrement in order to skip receiver.
4240 __ Daddu(a2, a2, Operand(-kPointerSize));
4241 __ ld(a3, MemOperand(a2));
4242 __ Push(a3);
4243 __ Daddu(a1, a1, Operand(-1));
4244 __ Branch(&loop, ne, a1, Operand(zero_reg));
4245 }
4246
4247 __ bind(&args_set_up);
4248 __ dsll(at, a0, kPointerSizeLog2);
4249 __ Daddu(at, at, Operand(sp));
4250 __ ld(a1, MemOperand(at, 0));
4251 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
4252
4253 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4254 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4255
4256 __ Drop(1);
4257
4258 context()->Plug(result_register());
4259 }
4260
4261
4262 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4263 RegExpConstructResultStub stub(isolate());
4264 ZoneList<Expression*>* args = expr->arguments();
4265 DCHECK(args->length() == 3);
4266 VisitForStackValue(args->at(0));
4267 VisitForStackValue(args->at(1));
4268 VisitForAccumulatorValue(args->at(2));
4269 __ mov(a0, result_register());
4270 __ pop(a1);
4271 __ pop(a2);
4272 __ CallStub(&stub);
4273 context()->Plug(v0);
4274 }
4275
4276
4277 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4278 ZoneList<Expression*>* args = expr->arguments();
4279 DCHECK_EQ(2, args->length());
4280
4281 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4282 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4283
4284 Handle<FixedArray> jsfunction_result_caches(
4285 isolate()->native_context()->jsfunction_result_caches());
4286 if (jsfunction_result_caches->length() <= cache_id) {
4287 __ Abort(kAttemptToUseUndefinedCache);
4288 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4289 context()->Plug(v0);
4290 return;
4291 }
4292
4293 VisitForAccumulatorValue(args->at(1));
4294
4295 Register key = v0;
4296 Register cache = a1;
4297 __ ld(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
4298 __ ld(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
4299 __ ld(cache,
4300 ContextOperand(
4301 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4302 __ ld(cache,
4303 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4304
4305
4306 Label done, not_found;
4307 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4308 __ ld(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
4309 // a2 now holds finger offset as a smi.
4310 __ Daddu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4311 // a3 now points to the start of fixed array elements.
4312 __ SmiScale(at, a2, kPointerSizeLog2);
4313 __ daddu(a3, a3, at);
4314 // a3 now points to key of indexed element of cache.
4315 __ ld(a2, MemOperand(a3));
4316 __ Branch(&not_found, ne, key, Operand(a2));
4317
4318 __ ld(v0, MemOperand(a3, kPointerSize));
4319 __ Branch(&done);
4320
4321 __ bind(&not_found);
4322 // Call runtime to perform the lookup.
4323 __ Push(cache, key);
4324 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4325
4326 __ bind(&done);
4327 context()->Plug(v0);
4328 }
4329
4330
4331 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4332 ZoneList<Expression*>* args = expr->arguments();
4333 VisitForAccumulatorValue(args->at(0));
4334
4335 Label materialize_true, materialize_false;
4336 Label* if_true = NULL;
4337 Label* if_false = NULL;
4338 Label* fall_through = NULL;
4339 context()->PrepareTest(&materialize_true, &materialize_false,
4340 &if_true, &if_false, &fall_through);
4341
4342 __ lwu(a0, FieldMemOperand(v0, String::kHashFieldOffset));
4343 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
4344
4345 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4346 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
4347
4348 context()->Plug(if_true, if_false);
4349 }
4350
4351
4352 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4353 ZoneList<Expression*>* args = expr->arguments();
4354 DCHECK(args->length() == 1);
4355 VisitForAccumulatorValue(args->at(0));
4356
4357 __ AssertString(v0);
4358
4359 __ lwu(v0, FieldMemOperand(v0, String::kHashFieldOffset));
4360 __ IndexFromHash(v0, v0);
4361
4362 context()->Plug(v0);
4363 }
4364
4365
4366 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4367 Label bailout, done, one_char_separator, long_separator,
4368 non_trivial_array, not_size_one_array, loop,
4369 empty_separator_loop, one_char_separator_loop,
4370 one_char_separator_loop_entry, long_separator_loop;
4371 ZoneList<Expression*>* args = expr->arguments();
4372 DCHECK(args->length() == 2);
4373 VisitForStackValue(args->at(1));
4374 VisitForAccumulatorValue(args->at(0));
4375
4376 // All aliases of the same register have disjoint lifetimes.
4377 Register array = v0;
4378 Register elements = no_reg; // Will be v0.
4379 Register result = no_reg; // Will be v0.
4380 Register separator = a1;
4381 Register array_length = a2;
4382 Register result_pos = no_reg; // Will be a2.
4383 Register string_length = a3;
4384 Register string = a4;
4385 Register element = a5;
4386 Register elements_end = a6;
4387 Register scratch1 = a7;
4388 Register scratch2 = t1;
4389 Register scratch3 = t0;
4390
4391 // Separator operand is on the stack.
4392 __ pop(separator);
4393
4394 // Check that the array is a JSArray.
4395 __ JumpIfSmi(array, &bailout);
4396 __ GetObjectType(array, scratch1, scratch2);
4397 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
4398
4399 // Check that the array has fast elements.
4400 __ CheckFastElements(scratch1, scratch2, &bailout);
4401
4402 // If the array has length zero, return the empty string.
4403 __ ld(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
4404 __ SmiUntag(array_length);
4405 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
4406 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
4407 __ Branch(&done);
4408
4409 __ bind(&non_trivial_array);
4410
4411 // Get the FixedArray containing array's elements.
4412 elements = array;
4413 __ ld(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4414 array = no_reg; // End of array's live range.
4415
4416 // Check that all array elements are sequential one-byte strings, and
4417 // accumulate the sum of their lengths, as a smi-encoded value.
4418 __ mov(string_length, zero_reg);
4419 __ Daddu(element,
4420 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4421 __ dsll(elements_end, array_length, kPointerSizeLog2);
4422 __ Daddu(elements_end, element, elements_end);
4423 // Loop condition: while (element < elements_end).
4424 // Live values in registers:
4425 // elements: Fixed array of strings.
4426 // array_length: Length of the fixed array of strings (not smi)
4427 // separator: Separator string
4428 // string_length: Accumulated sum of string lengths (smi).
4429 // element: Current array element.
4430 // elements_end: Array end.
4431 if (generate_debug_code_) {
4432 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin, array_length,
4433 Operand(zero_reg));
4434 }
4435 __ bind(&loop);
4436 __ ld(string, MemOperand(element));
4437 __ Daddu(element, element, kPointerSize);
4438 __ JumpIfSmi(string, &bailout);
4439 __ ld(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4440 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4441 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4442 __ ld(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4443 __ DadduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
4444 __ BranchOnOverflow(&bailout, scratch3);
4445 __ Branch(&loop, lt, element, Operand(elements_end));
4446
4447 // If array_length is 1, return elements[0], a string.
4448 __ Branch(&not_size_one_array, ne, array_length, Operand(1));
4449 __ ld(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4450 __ Branch(&done);
4451
4452 __ bind(&not_size_one_array);
4453
4454 // Live values in registers:
4455 // separator: Separator string
4456 // array_length: Length of the array.
4457 // string_length: Sum of string lengths (smi).
4458 // elements: FixedArray of strings.
4459
4460 // Check that the separator is a flat one-byte string.
4461 __ JumpIfSmi(separator, &bailout);
4462 __ ld(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4463 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4464 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4465
4466 // Add (separator length times array_length) - separator length to the
4467 // string_length to get the length of the result string. array_length is not
4468 // smi but the other values are, so the result is a smi.
4469 __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4470 __ Dsubu(string_length, string_length, Operand(scratch1));
4471 __ SmiUntag(scratch1);
4472 __ Dmul(scratch2, array_length, scratch1);
4473 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4474 // zero.
4475 __ dsra32(scratch1, scratch2, 0);
4476 __ Branch(&bailout, ne, scratch2, Operand(zero_reg));
4477 __ SmiUntag(string_length);
4478 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4479 __ BranchOnOverflow(&bailout, scratch3);
4480
4481 // Get first element in the array to free up the elements register to be used
4482 // for the result.
4483 __ Daddu(element,
4484 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4485 result = elements; // End of live range for elements.
4486 elements = no_reg;
4487 // Live values in registers:
4488 // element: First array element
4489 // separator: Separator string
4490 // string_length: Length of result string (not smi)
4491 // array_length: Length of the array.
4492 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4493 elements_end, &bailout);
4494 // Prepare for looping. Set up elements_end to end of the array. Set
4495 // result_pos to the position of the result where to write the first
4496 // character.
4497 __ dsll(elements_end, array_length, kPointerSizeLog2);
4498 __ Daddu(elements_end, element, elements_end);
4499 result_pos = array_length; // End of live range for array_length.
4500 array_length = no_reg;
4501 __ Daddu(result_pos,
4502 result,
4503 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4504
4505 // Check the length of the separator.
4506 __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4507 __ li(at, Operand(Smi::FromInt(1)));
4508 __ Branch(&one_char_separator, eq, scratch1, Operand(at));
4509 __ Branch(&long_separator, gt, scratch1, Operand(at));
4510
4511 // Empty separator case.
4512 __ bind(&empty_separator_loop);
4513 // Live values in registers:
4514 // result_pos: the position to which we are currently copying characters.
4515 // element: Current array element.
4516 // elements_end: Array end.
4517
4518 // Copy next array element to the result.
4519 __ ld(string, MemOperand(element));
4520 __ Daddu(element, element, kPointerSize);
4521 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4522 __ SmiUntag(string_length);
4523 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4524 __ CopyBytes(string, result_pos, string_length, scratch1);
4525 // End while (element < elements_end).
4526 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
4527 DCHECK(result.is(v0));
4528 __ Branch(&done);
4529
4530 // One-character separator case.
4531 __ bind(&one_char_separator);
4532 // Replace separator with its one-byte character value.
4533 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4534 // Jump into the loop after the code that copies the separator, so the first
4535 // element is not preceded by a separator.
4536 __ jmp(&one_char_separator_loop_entry);
4537
4538 __ bind(&one_char_separator_loop);
4539 // Live values in registers:
4540 // result_pos: the position to which we are currently copying characters.
4541 // element: Current array element.
4542 // elements_end: Array end.
4543 // separator: Single separator one-byte char (in lower byte).
4544
4545 // Copy the separator character to the result.
4546 __ sb(separator, MemOperand(result_pos));
4547 __ Daddu(result_pos, result_pos, 1);
4548
4549 // Copy next array element to the result.
4550 __ bind(&one_char_separator_loop_entry);
4551 __ ld(string, MemOperand(element));
4552 __ Daddu(element, element, kPointerSize);
4553 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4554 __ SmiUntag(string_length);
4555 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4556 __ CopyBytes(string, result_pos, string_length, scratch1);
4557 // End while (element < elements_end).
4558 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
4559 DCHECK(result.is(v0));
4560 __ Branch(&done);
4561
4562 // Long separator case (separator is more than one character). Entry is at the
4563 // label long_separator below.
4564 __ bind(&long_separator_loop);
4565 // Live values in registers:
4566 // result_pos: the position to which we are currently copying characters.
4567 // element: Current array element.
4568 // elements_end: Array end.
4569 // separator: Separator string.
4570
4571 // Copy the separator to the result.
4572 __ ld(string_length, FieldMemOperand(separator, String::kLengthOffset));
4573 __ SmiUntag(string_length);
4574 __ Daddu(string,
4575 separator,
4576 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4577 __ CopyBytes(string, result_pos, string_length, scratch1);
4578
4579 __ bind(&long_separator);
4580 __ ld(string, MemOperand(element));
4581 __ Daddu(element, element, kPointerSize);
4582 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4583 __ SmiUntag(string_length);
4584 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4585 __ CopyBytes(string, result_pos, string_length, scratch1);
4586 // End while (element < elements_end).
4587 __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
4588 DCHECK(result.is(v0));
4589 __ Branch(&done);
4590
4591 __ bind(&bailout);
4592 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4593 __ bind(&done);
4594 context()->Plug(v0);
4595 }
4596
4597
4598 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4599 DCHECK(expr->arguments()->length() == 0);
4600 ExternalReference debug_is_active =
4601 ExternalReference::debug_is_active_address(isolate());
4602 __ li(at, Operand(debug_is_active));
4603 __ lbu(v0, MemOperand(at));
4604 __ SmiTag(v0);
4605 context()->Plug(v0);
4606 }
4607
4608
4609 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4610 // Push the builtins object as the receiver.
4611 Register receiver = LoadDescriptor::ReceiverRegister();
4612 __ ld(receiver, GlobalObjectOperand());
4613 __ ld(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4614 __ push(receiver);
4615
4616 // Load the function from the receiver.
4617 __ li(LoadDescriptor::NameRegister(), Operand(expr->name()));
4618 __ li(LoadDescriptor::SlotRegister(),
4619 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4620 CallLoadIC(NOT_INSIDE_TYPEOF);
4621 }
4622
4623
4624 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4625 ZoneList<Expression*>* args = expr->arguments();
4626 int arg_count = args->length();
4627
4628 SetCallPosition(expr, arg_count);
4629 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4630 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4631 __ CallStub(&stub);
4632 }
4633
4634
4635 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4636 ZoneList<Expression*>* args = expr->arguments();
4637 int arg_count = args->length();
4638
4639 if (expr->is_jsruntime()) {
4640 Comment cmnt(masm_, "[ CallRuntime");
4641 EmitLoadJSRuntimeFunction(expr);
4642
4643 // Push the target function under the receiver.
4644 __ ld(at, MemOperand(sp, 0));
4645 __ push(at);
4646 __ sd(v0, MemOperand(sp, kPointerSize));
4647
4648 // Push the arguments ("left-to-right").
4649 for (int i = 0; i < arg_count; i++) {
4650 VisitForStackValue(args->at(i));
4651 }
4652
4653 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4654 EmitCallJSRuntimeFunction(expr);
4655
4656 // Restore context register.
4657 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4658
4659 context()->DropAndPlug(1, v0);
4660 } else {
4661 const Runtime::Function* function = expr->function();
4662 switch (function->function_id) {
4663 #define CALL_INTRINSIC_GENERATOR(Name) \
4664 case Runtime::kInline##Name: { \
4665 Comment cmnt(masm_, "[ Inline" #Name); \
4666 return Emit##Name(expr); \
4667 }
4668 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4669 #undef CALL_INTRINSIC_GENERATOR
4670 default: {
4671 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4672 // Push the arguments ("left-to-right").
4673 for (int i = 0; i < arg_count; i++) {
4674 VisitForStackValue(args->at(i));
4675 }
4676
4677 // Call the C runtime function.
4678 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4679 __ CallRuntime(expr->function(), arg_count);
4680 context()->Plug(v0);
4681 }
4682 }
4683 }
4684 }
4685
4686
4687 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4688 switch (expr->op()) {
4689 case Token::DELETE: {
4690 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4691 Property* property = expr->expression()->AsProperty();
4692 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4693
4694 if (property != NULL) {
4695 VisitForStackValue(property->obj());
4696 VisitForStackValue(property->key());
4697 __ li(a1, Operand(Smi::FromInt(language_mode())));
4698 __ push(a1);
4699 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4700 context()->Plug(v0);
4701 } else if (proxy != NULL) {
4702 Variable* var = proxy->var();
4703 // Delete of an unqualified identifier is disallowed in strict mode but
4704 // "delete this" is allowed.
4705 bool is_this = var->HasThisName(isolate());
4706 DCHECK(is_sloppy(language_mode()) || is_this);
4707 if (var->IsUnallocatedOrGlobalSlot()) {
4708 __ ld(a2, GlobalObjectOperand());
4709 __ li(a1, Operand(var->name()));
4710 __ li(a0, Operand(Smi::FromInt(SLOPPY)));
4711 __ Push(a2, a1, a0);
4712 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4713 context()->Plug(v0);
4714 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4715 // Result of deleting non-global, non-dynamic variables is false.
4716 // The subexpression does not have side effects.
4717 context()->Plug(is_this);
4718 } else {
4719 // Non-global variable. Call the runtime to try to delete from the
4720 // context where the variable was introduced.
4721 DCHECK(!context_register().is(a2));
4722 __ li(a2, Operand(var->name()));
4723 __ Push(context_register(), a2);
4724 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4725 context()->Plug(v0);
4726 }
4727 } else {
4728 // Result of deleting non-property, non-variable reference is true.
4729 // The subexpression may have side effects.
4730 VisitForEffect(expr->expression());
4731 context()->Plug(true);
4732 }
4733 break;
4734 }
4735
4736 case Token::VOID: {
4737 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4738 VisitForEffect(expr->expression());
4739 context()->Plug(Heap::kUndefinedValueRootIndex);
4740 break;
4741 }
4742
4743 case Token::NOT: {
4744 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4745 if (context()->IsEffect()) {
4746 // Unary NOT has no side effects so it's only necessary to visit the
4747 // subexpression. Match the optimizing compiler by not branching.
4748 VisitForEffect(expr->expression());
4749 } else if (context()->IsTest()) {
4750 const TestContext* test = TestContext::cast(context());
4751 // The labels are swapped for the recursive call.
4752 VisitForControl(expr->expression(),
4753 test->false_label(),
4754 test->true_label(),
4755 test->fall_through());
4756 context()->Plug(test->true_label(), test->false_label());
4757 } else {
4758 // We handle value contexts explicitly rather than simply visiting
4759 // for control and plugging the control flow into the context,
4760 // because we need to prepare a pair of extra administrative AST ids
4761 // for the optimizing compiler.
4762 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4763 Label materialize_true, materialize_false, done;
4764 VisitForControl(expr->expression(),
4765 &materialize_false,
4766 &materialize_true,
4767 &materialize_true);
4768 __ bind(&materialize_true);
4769 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4770 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4771 if (context()->IsStackValue()) __ push(v0);
4772 __ jmp(&done);
4773 __ bind(&materialize_false);
4774 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4775 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4776 if (context()->IsStackValue()) __ push(v0);
4777 __ bind(&done);
4778 }
4779 break;
4780 }
4781
4782 case Token::TYPEOF: {
4783 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4784 {
4785 AccumulatorValueContext context(this);
4786 VisitForTypeofValue(expr->expression());
4787 }
4788 __ mov(a3, v0);
4789 TypeofStub typeof_stub(isolate());
4790 __ CallStub(&typeof_stub);
4791 context()->Plug(v0);
4792 break;
4793 }
4794
4795 default:
4796 UNREACHABLE();
4797 }
4798 }
4799
4800
4801 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4802 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4803
4804 Comment cmnt(masm_, "[ CountOperation");
4805
4806 Property* prop = expr->expression()->AsProperty();
4807 LhsKind assign_type = Property::GetAssignType(prop);
4808
4809 // Evaluate expression and get value.
4810 if (assign_type == VARIABLE) {
4811 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4812 AccumulatorValueContext context(this);
4813 EmitVariableLoad(expr->expression()->AsVariableProxy());
4814 } else {
4815 // Reserve space for result of postfix operation.
4816 if (expr->is_postfix() && !context()->IsEffect()) {
4817 __ li(at, Operand(Smi::FromInt(0)));
4818 __ push(at);
4819 }
4820 switch (assign_type) {
4821 case NAMED_PROPERTY: {
4822 // Put the object both on the stack and in the register.
4823 VisitForStackValue(prop->obj());
4824 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4825 EmitNamedPropertyLoad(prop);
4826 break;
4827 }
4828
4829 case NAMED_SUPER_PROPERTY: {
4830 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4831 VisitForAccumulatorValue(
4832 prop->obj()->AsSuperPropertyReference()->home_object());
4833 __ Push(result_register());
4834 const Register scratch = a1;
4835 __ ld(scratch, MemOperand(sp, kPointerSize));
4836 __ Push(scratch, result_register());
4837 EmitNamedSuperPropertyLoad(prop);
4838 break;
4839 }
4840
4841 case KEYED_SUPER_PROPERTY: {
4842 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4843 VisitForAccumulatorValue(
4844 prop->obj()->AsSuperPropertyReference()->home_object());
4845 const Register scratch = a1;
4846 const Register scratch1 = a4;
4847 __ Move(scratch, result_register());
4848 VisitForAccumulatorValue(prop->key());
4849 __ Push(scratch, result_register());
4850 __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
4851 __ Push(scratch1, scratch, result_register());
4852 EmitKeyedSuperPropertyLoad(prop);
4853 break;
4854 }
4855
4856 case KEYED_PROPERTY: {
4857 VisitForStackValue(prop->obj());
4858 VisitForStackValue(prop->key());
4859 __ ld(LoadDescriptor::ReceiverRegister(),
4860 MemOperand(sp, 1 * kPointerSize));
4861 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4862 EmitKeyedPropertyLoad(prop);
4863 break;
4864 }
4865
4866 case VARIABLE:
4867 UNREACHABLE();
4868 }
4869 }
4870
4871 // We need a second deoptimization point after loading the value
4872 // in case evaluating the property load my have a side effect.
4873 if (assign_type == VARIABLE) {
4874 PrepareForBailout(expr->expression(), TOS_REG);
4875 } else {
4876 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4877 }
4878
4879 // Inline smi case if we are in a loop.
4880 Label stub_call, done;
4881 JumpPatchSite patch_site(masm_);
4882
4883 int count_value = expr->op() == Token::INC ? 1 : -1;
4884 __ mov(a0, v0);
4885 if (ShouldInlineSmiCase(expr->op())) {
4886 Label slow;
4887 patch_site.EmitJumpIfNotSmi(v0, &slow);
4888
4889 // Save result for postfix expressions.
4890 if (expr->is_postfix()) {
4891 if (!context()->IsEffect()) {
4892 // Save the result on the stack. If we have a named or keyed property
4893 // we store the result under the receiver that is currently on top
4894 // of the stack.
4895 switch (assign_type) {
4896 case VARIABLE:
4897 __ push(v0);
4898 break;
4899 case NAMED_PROPERTY:
4900 __ sd(v0, MemOperand(sp, kPointerSize));
4901 break;
4902 case NAMED_SUPER_PROPERTY:
4903 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4904 break;
4905 case KEYED_PROPERTY:
4906 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4907 break;
4908 case KEYED_SUPER_PROPERTY:
4909 __ sd(v0, MemOperand(sp, 3 * kPointerSize));
4910 break;
4911 }
4912 }
4913 }
4914
4915 Register scratch1 = a1;
4916 Register scratch2 = a4;
4917 __ li(scratch1, Operand(Smi::FromInt(count_value)));
4918 __ DadduAndCheckForOverflow(v0, v0, scratch1, scratch2);
4919 __ BranchOnNoOverflow(&done, scratch2);
4920 // Call stub. Undo operation first.
4921 __ Move(v0, a0);
4922 __ jmp(&stub_call);
4923 __ bind(&slow);
4924 }
4925 if (!is_strong(language_mode())) {
4926 ToNumberStub convert_stub(isolate());
4927 __ CallStub(&convert_stub);
4928 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4929 }
4930
4931 // Save result for postfix expressions.
4932 if (expr->is_postfix()) {
4933 if (!context()->IsEffect()) {
4934 // Save the result on the stack. If we have a named or keyed property
4935 // we store the result under the receiver that is currently on top
4936 // of the stack.
4937 switch (assign_type) {
4938 case VARIABLE:
4939 __ push(v0);
4940 break;
4941 case NAMED_PROPERTY:
4942 __ sd(v0, MemOperand(sp, kPointerSize));
4943 break;
4944 case NAMED_SUPER_PROPERTY:
4945 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4946 break;
4947 case KEYED_PROPERTY:
4948 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4949 break;
4950 case KEYED_SUPER_PROPERTY:
4951 __ sd(v0, MemOperand(sp, 3 * kPointerSize));
4952 break;
4953 }
4954 }
4955 }
4956
4957 __ bind(&stub_call);
4958 __ mov(a1, v0);
4959 __ li(a0, Operand(Smi::FromInt(count_value)));
4960
4961 SetExpressionPosition(expr);
4962
4963
4964 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD,
4965 strength(language_mode())).code();
4966 CallIC(code, expr->CountBinOpFeedbackId());
4967 patch_site.EmitPatchInfo();
4968 __ bind(&done);
4969
4970 if (is_strong(language_mode())) {
4971 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4972 }
4973 // Store the value returned in v0.
4974 switch (assign_type) {
4975 case VARIABLE:
4976 if (expr->is_postfix()) {
4977 { EffectContext context(this);
4978 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4979 Token::ASSIGN, expr->CountSlot());
4980 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4981 context.Plug(v0);
4982 }
4983 // For all contexts except EffectConstant we have the result on
4984 // top of the stack.
4985 if (!context()->IsEffect()) {
4986 context()->PlugTOS();
4987 }
4988 } else {
4989 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4990 Token::ASSIGN, expr->CountSlot());
4991 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4992 context()->Plug(v0);
4993 }
4994 break;
4995 case NAMED_PROPERTY: {
4996 __ mov(StoreDescriptor::ValueRegister(), result_register());
4997 __ li(StoreDescriptor::NameRegister(),
4998 Operand(prop->key()->AsLiteral()->value()));
4999 __ pop(StoreDescriptor::ReceiverRegister());
5000 if (FLAG_vector_stores) {
5001 EmitLoadStoreICSlot(expr->CountSlot());
5002 CallStoreIC();
5003 } else {
5004 CallStoreIC(expr->CountStoreFeedbackId());
5005 }
5006 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5007 if (expr->is_postfix()) {
5008 if (!context()->IsEffect()) {
5009 context()->PlugTOS();
5010 }
5011 } else {
5012 context()->Plug(v0);
5013 }
5014 break;
5015 }
5016 case NAMED_SUPER_PROPERTY: {
5017 EmitNamedSuperPropertyStore(prop);
5018 if (expr->is_postfix()) {
5019 if (!context()->IsEffect()) {
5020 context()->PlugTOS();
5021 }
5022 } else {
5023 context()->Plug(v0);
5024 }
5025 break;
5026 }
5027 case KEYED_SUPER_PROPERTY: {
5028 EmitKeyedSuperPropertyStore(prop);
5029 if (expr->is_postfix()) {
5030 if (!context()->IsEffect()) {
5031 context()->PlugTOS();
5032 }
5033 } else {
5034 context()->Plug(v0);
5035 }
5036 break;
5037 }
5038 case KEYED_PROPERTY: {
5039 __ mov(StoreDescriptor::ValueRegister(), result_register());
5040 __ Pop(StoreDescriptor::ReceiverRegister(),
5041 StoreDescriptor::NameRegister());
5042 Handle<Code> ic =
5043 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
5044 if (FLAG_vector_stores) {
5045 EmitLoadStoreICSlot(expr->CountSlot());
5046 CallIC(ic);
5047 } else {
5048 CallIC(ic, expr->CountStoreFeedbackId());
5049 }
5050 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
5051 if (expr->is_postfix()) {
5052 if (!context()->IsEffect()) {
5053 context()->PlugTOS();
5054 }
5055 } else {
5056 context()->Plug(v0);
5057 }
5058 break;
5059 }
5060 }
5061 }
5062
5063
5064 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
5065 Expression* sub_expr,
5066 Handle<String> check) {
5067 Label materialize_true, materialize_false;
5068 Label* if_true = NULL;
5069 Label* if_false = NULL;
5070 Label* fall_through = NULL;
5071 context()->PrepareTest(&materialize_true, &materialize_false,
5072 &if_true, &if_false, &fall_through);
5073
5074 { AccumulatorValueContext context(this);
5075 VisitForTypeofValue(sub_expr);
5076 }
5077 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5078
5079 Factory* factory = isolate()->factory();
5080 if (String::Equals(check, factory->number_string())) {
5081 __ JumpIfSmi(v0, if_true);
5082 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
5083 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
5084 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5085 } else if (String::Equals(check, factory->string_string())) {
5086 __ JumpIfSmi(v0, if_false);
5087 // Check for undetectable objects => false.
5088 __ GetObjectType(v0, v0, a1);
5089 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
5090 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5091 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5092 Split(eq, a1, Operand(zero_reg),
5093 if_true, if_false, fall_through);
5094 } else if (String::Equals(check, factory->symbol_string())) {
5095 __ JumpIfSmi(v0, if_false);
5096 __ GetObjectType(v0, v0, a1);
5097 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
5098 } else if (String::Equals(check, factory->float32x4_string())) {
5099 __ JumpIfSmi(v0, if_false);
5100 __ GetObjectType(v0, v0, a1);
5101 Split(eq, a1, Operand(FLOAT32X4_TYPE), if_true, if_false, fall_through);
5102 } else if (String::Equals(check, factory->boolean_string())) {
5103 __ LoadRoot(at, Heap::kTrueValueRootIndex);
5104 __ Branch(if_true, eq, v0, Operand(at));
5105 __ LoadRoot(at, Heap::kFalseValueRootIndex);
5106 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
5107 } else if (String::Equals(check, factory->undefined_string())) {
5108 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5109 __ Branch(if_true, eq, v0, Operand(at));
5110 __ JumpIfSmi(v0, if_false);
5111 // Check for undetectable objects => true.
5112 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
5113 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5114 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5115 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
5116 } else if (String::Equals(check, factory->function_string())) {
5117 __ JumpIfSmi(v0, if_false);
5118 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5119 __ GetObjectType(v0, v0, a1);
5120 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
5121 Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
5122 if_true, if_false, fall_through);
5123 } else if (String::Equals(check, factory->object_string())) {
5124 __ JumpIfSmi(v0, if_false);
5125 __ LoadRoot(at, Heap::kNullValueRootIndex);
5126 __ Branch(if_true, eq, v0, Operand(at));
5127 // Check for JS objects => true.
5128 __ GetObjectType(v0, v0, a1);
5129 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
5130 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset));
5131 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
5132 // Check for undetectable objects => false.
5133 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
5134 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
5135 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
5136 } else {
5137 if (if_false != fall_through) __ jmp(if_false);
5138 }
5139 context()->Plug(if_true, if_false);
5140 }
5141
5142
5143 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5144 Comment cmnt(masm_, "[ CompareOperation");
5145 SetExpressionPosition(expr);
5146
5147 // First we try a fast inlined version of the compare when one of
5148 // the operands is a literal.
5149 if (TryLiteralCompare(expr)) return;
5150
5151 // Always perform the comparison for its control flow. Pack the result
5152 // into the expression's context after the comparison is performed.
5153 Label materialize_true, materialize_false;
5154 Label* if_true = NULL;
5155 Label* if_false = NULL;
5156 Label* fall_through = NULL;
5157 context()->PrepareTest(&materialize_true, &materialize_false,
5158 &if_true, &if_false, &fall_through);
5159
5160 Token::Value op = expr->op();
5161 VisitForStackValue(expr->left());
5162 switch (op) {
5163 case Token::IN:
5164 VisitForStackValue(expr->right());
5165 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5166 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5167 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
5168 Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
5169 break;
5170
5171 case Token::INSTANCEOF: {
5172 VisitForStackValue(expr->right());
5173 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5174 __ CallStub(&stub);
5175 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5176 // The stub returns 0 for true.
5177 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
5178 break;
5179 }
5180
5181 default: {
5182 VisitForAccumulatorValue(expr->right());
5183 Condition cc = CompareIC::ComputeCondition(op);
5184 __ mov(a0, result_register());
5185 __ pop(a1);
5186
5187 bool inline_smi_code = ShouldInlineSmiCase(op);
5188 JumpPatchSite patch_site(masm_);
5189 if (inline_smi_code) {
5190 Label slow_case;
5191 __ Or(a2, a0, Operand(a1));
5192 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
5193 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
5194 __ bind(&slow_case);
5195 }
5196
5197 Handle<Code> ic = CodeFactory::CompareIC(
5198 isolate(), op, strength(language_mode())).code();
5199 CallIC(ic, expr->CompareOperationFeedbackId());
5200 patch_site.EmitPatchInfo();
5201 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5202 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
5203 }
5204 }
5205
5206 // Convert the result of the comparison into one expected for this
5207 // expression's context.
5208 context()->Plug(if_true, if_false);
5209 }
5210
5211
5212 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5213 Expression* sub_expr,
5214 NilValue nil) {
5215 Label materialize_true, materialize_false;
5216 Label* if_true = NULL;
5217 Label* if_false = NULL;
5218 Label* fall_through = NULL;
5219 context()->PrepareTest(&materialize_true, &materialize_false,
5220 &if_true, &if_false, &fall_through);
5221
5222 VisitForAccumulatorValue(sub_expr);
5223 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5224 __ mov(a0, result_register());
5225 if (expr->op() == Token::EQ_STRICT) {
5226 Heap::RootListIndex nil_value = nil == kNullValue ?
5227 Heap::kNullValueRootIndex :
5228 Heap::kUndefinedValueRootIndex;
5229 __ LoadRoot(a1, nil_value);
5230 Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
5231 } else {
5232 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5233 CallIC(ic, expr->CompareOperationFeedbackId());
5234 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
5235 }
5236 context()->Plug(if_true, if_false);
5237 }
5238
5239
5240 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5241 __ ld(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5242 context()->Plug(v0);
5243 }
5244
5245
5246 Register FullCodeGenerator::result_register() {
5247 return v0;
5248 }
5249
5250
5251 Register FullCodeGenerator::context_register() {
5252 return cp;
5253 }
5254
5255
5256 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5257 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5258 DCHECK(IsAligned(frame_offset, kPointerSize));
5259 // __ sw(value, MemOperand(fp, frame_offset));
5260 __ sd(value, MemOperand(fp, frame_offset));
5261 }
5262
5263
5264 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5265 __ ld(dst, ContextOperand(cp, context_index));
5266 }
5267
5268
5269 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5270 Scope* declaration_scope = scope()->DeclarationScope();
5271 if (declaration_scope->is_script_scope() ||
5272 declaration_scope->is_module_scope()) {
5273 // Contexts nested in the native context have a canonical empty function
5274 // as their closure, not the anonymous closure containing the global
5275 // code. Pass a smi sentinel and let the runtime look up the empty
5276 // function.
5277 __ li(at, Operand(Smi::FromInt(0)));
5278 } else if (declaration_scope->is_eval_scope()) {
5279 // Contexts created by a call to eval have the same closure as the
5280 // context calling eval, not the anonymous closure containing the eval
5281 // code. Fetch it from the context.
5282 __ ld(at, ContextOperand(cp, Context::CLOSURE_INDEX));
5283 } else {
5284 DCHECK(declaration_scope->is_function_scope());
5285 __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5286 }
5287 __ push(at);
5288 }
5289
5290
5291 // ----------------------------------------------------------------------------
5292 // Non-local control flow support.
5293
5294 void FullCodeGenerator::EnterFinallyBlock() {
5295 DCHECK(!result_register().is(a1));
5296 // Store result register while executing finally block.
5297 __ push(result_register());
5298 // Cook return address in link register to stack (smi encoded Code* delta).
5299 __ Dsubu(a1, ra, Operand(masm_->CodeObject()));
5300 __ SmiTag(a1);
5301
5302 // Store result register while executing finally block.
5303 __ push(a1);
5304
5305 // Store pending message while executing finally block.
5306 ExternalReference pending_message_obj =
5307 ExternalReference::address_of_pending_message_obj(isolate());
5308 __ li(at, Operand(pending_message_obj));
5309 __ ld(a1, MemOperand(at));
5310 __ push(a1);
5311
5312 ClearPendingMessage();
5313 }
5314
5315
5316 void FullCodeGenerator::ExitFinallyBlock() {
5317 DCHECK(!result_register().is(a1));
5318 // Restore pending message from stack.
5319 __ pop(a1);
5320 ExternalReference pending_message_obj =
5321 ExternalReference::address_of_pending_message_obj(isolate());
5322 __ li(at, Operand(pending_message_obj));
5323 __ sd(a1, MemOperand(at));
5324
5325 // Restore result register from stack.
5326 __ pop(a1);
5327
5328 // Uncook return address and return.
5329 __ pop(result_register());
5330
5331 __ SmiUntag(a1);
5332 __ Daddu(at, a1, Operand(masm_->CodeObject()));
5333 __ Jump(at);
5334 }
5335
5336
5337 void FullCodeGenerator::ClearPendingMessage() {
5338 DCHECK(!result_register().is(a1));
5339 ExternalReference pending_message_obj =
5340 ExternalReference::address_of_pending_message_obj(isolate());
5341 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
5342 __ li(at, Operand(pending_message_obj));
5343 __ sd(a1, MemOperand(at));
5344 }
5345
5346
5347 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5348 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5349 __ li(VectorStoreICTrampolineDescriptor::SlotRegister(),
5350 Operand(SmiFromSlot(slot)));
5351 }
5352
5353
5354 #undef __
5355
5356
5357 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5358 Address pc,
5359 BackEdgeState target_state,
5360 Code* replacement_code) {
5361 static const int kInstrSize = Assembler::kInstrSize;
5362 Address branch_address = pc - 8 * kInstrSize;
5363 CodePatcher patcher(branch_address, 1);
5364
5365 switch (target_state) {
5366 case INTERRUPT:
5367 // slt at, a3, zero_reg (in case of count based interrupts)
5368 // beq at, zero_reg, ok
5369 // lui t9, <interrupt stub address> upper
5370 // ori t9, <interrupt stub address> u-middle
5371 // dsll t9, t9, 16
5372 // ori t9, <interrupt stub address> lower
5373 // jalr t9
5374 // nop
5375 // ok-label ----- pc_after points here
5376 patcher.masm()->slt(at, a3, zero_reg);
5377 break;
5378 case ON_STACK_REPLACEMENT:
5379 case OSR_AFTER_STACK_CHECK:
5380 // addiu at, zero_reg, 1
5381 // beq at, zero_reg, ok ;; Not changed
5382 // lui t9, <on-stack replacement address> upper
5383 // ori t9, <on-stack replacement address> middle
5384 // dsll t9, t9, 16
5385 // ori t9, <on-stack replacement address> lower
5386 // jalr t9 ;; Not changed
5387 // nop ;; Not changed
5388 // ok-label ----- pc_after points here
5389 patcher.masm()->daddiu(at, zero_reg, 1);
5390 break;
5391 }
5392 Address pc_immediate_load_address = pc - 6 * kInstrSize;
5393 // Replace the stack check address in the load-immediate (6-instr sequence)
5394 // with the entry address of the replacement code.
5395 Assembler::set_target_address_at(pc_immediate_load_address,
5396 replacement_code->entry());
5397
5398 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5399 unoptimized_code, pc_immediate_load_address, replacement_code);
5400 }
5401
5402
5403 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5404 Isolate* isolate,
5405 Code* unoptimized_code,
5406 Address pc) {
5407 static const int kInstrSize = Assembler::kInstrSize;
5408 Address branch_address = pc - 8 * kInstrSize;
5409 Address pc_immediate_load_address = pc - 6 * kInstrSize;
5410
5411 DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 7 * kInstrSize)));
5412 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
5413 DCHECK(reinterpret_cast<uint64_t>(
5414 Assembler::target_address_at(pc_immediate_load_address)) ==
5415 reinterpret_cast<uint64_t>(
5416 isolate->builtins()->InterruptCheck()->entry()));
5417 return INTERRUPT;
5418 }
5419
5420 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
5421
5422 if (reinterpret_cast<uint64_t>(
5423 Assembler::target_address_at(pc_immediate_load_address)) ==
5424 reinterpret_cast<uint64_t>(
5425 isolate->builtins()->OnStackReplacement()->entry())) {
5426 return ON_STACK_REPLACEMENT;
5427 }
5428
5429 DCHECK(reinterpret_cast<uint64_t>(
5430 Assembler::target_address_at(pc_immediate_load_address)) ==
5431 reinterpret_cast<uint64_t>(
5432 isolate->builtins()->OsrAfterStackCheck()->entry()));
5433 return OSR_AFTER_STACK_CHECK;
5434 }
5435
5436
5437 } // namespace internal
5438 } // namespace v8
5439
5440 #endif // V8_TARGET_ARCH_MIPS64
OLDNEW
« no previous file with comments | « src/mips64/deoptimizer-mips64.cc ('k') | src/objects.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698