Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(6)

Side by Side Diff: src/arm64/full-codegen-arm64.cc

Issue 1248443003: Move Full-codegen into its own folder. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: rename define Created 5 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/arm64/deoptimizer-arm64.cc ('k') | src/compiler.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #if V8_TARGET_ARCH_ARM64
8
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/parser.h"
17 #include "src/scopes.h"
18
19 #include "src/arm64/code-stubs-arm64.h"
20 #include "src/arm64/macro-assembler-arm64.h"
21
22 namespace v8 {
23 namespace internal {
24
25 #define __ ACCESS_MASM(masm_)
26
27 class JumpPatchSite BASE_EMBEDDED {
28 public:
29 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
30 #ifdef DEBUG
31 info_emitted_ = false;
32 #endif
33 }
34
35 ~JumpPatchSite() {
36 if (patch_site_.is_bound()) {
37 DCHECK(info_emitted_);
38 } else {
39 DCHECK(reg_.IsNone());
40 }
41 }
42
43 void EmitJumpIfNotSmi(Register reg, Label* target) {
44 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
45 InstructionAccurateScope scope(masm_, 1);
46 DCHECK(!info_emitted_);
47 DCHECK(reg.Is64Bits());
48 DCHECK(!reg.Is(csp));
49 reg_ = reg;
50 __ bind(&patch_site_);
51 __ tbz(xzr, 0, target); // Always taken before patched.
52 }
53
54 void EmitJumpIfSmi(Register reg, Label* target) {
55 // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
56 InstructionAccurateScope scope(masm_, 1);
57 DCHECK(!info_emitted_);
58 DCHECK(reg.Is64Bits());
59 DCHECK(!reg.Is(csp));
60 reg_ = reg;
61 __ bind(&patch_site_);
62 __ tbnz(xzr, 0, target); // Never taken before patched.
63 }
64
65 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
66 UseScratchRegisterScope temps(masm_);
67 Register temp = temps.AcquireX();
68 __ Orr(temp, reg1, reg2);
69 EmitJumpIfNotSmi(temp, target);
70 }
71
72 void EmitPatchInfo() {
73 Assembler::BlockPoolsScope scope(masm_);
74 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
75 #ifdef DEBUG
76 info_emitted_ = true;
77 #endif
78 }
79
80 private:
81 MacroAssembler* masm_;
82 Label patch_site_;
83 Register reg_;
84 #ifdef DEBUG
85 bool info_emitted_;
86 #endif
87 };
88
89
90 // Generate code for a JS function. On entry to the function the receiver
91 // and arguments have been pushed on the stack left to right. The actual
92 // argument count matches the formal parameter count expected by the
93 // function.
94 //
95 // The live registers are:
96 // - x1: the JS function object being called (i.e. ourselves).
97 // - cp: our context.
98 // - fp: our caller's frame pointer.
99 // - jssp: stack pointer.
100 // - lr: return address.
101 //
102 // The function builds a JS frame. See JavaScriptFrameConstants in
103 // frames-arm.h for its layout.
104 void FullCodeGenerator::Generate() {
105 CompilationInfo* info = info_;
106 profiling_counter_ = isolate()->factory()->NewCell(
107 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
108 SetFunctionPosition(function());
109 Comment cmnt(masm_, "[ Function compiled by full code generator");
110
111 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
112
113 #ifdef DEBUG
114 if (strlen(FLAG_stop_at) > 0 &&
115 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
116 __ Debug("stop-at", __LINE__, BREAK);
117 }
118 #endif
119
120 // Sloppy mode functions and builtins need to replace the receiver with the
121 // global proxy when called as functions (without an explicit receiver
122 // object).
123 if (is_sloppy(info->language_mode()) && !info->is_native() &&
124 info->MayUseThis() && info->scope()->has_this_declaration()) {
125 Label ok;
126 int receiver_offset = info->scope()->num_parameters() * kXRegSize;
127 __ Peek(x10, receiver_offset);
128 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
129
130 __ Ldr(x10, GlobalObjectMemOperand());
131 __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalProxyOffset));
132 __ Poke(x10, receiver_offset);
133
134 __ Bind(&ok);
135 }
136
137
138 // Open a frame scope to indicate that there is a frame on the stack.
139 // The MANUAL indicates that the scope shouldn't actually generate code
140 // to set up the frame because we do it manually below.
141 FrameScope frame_scope(masm_, StackFrame::MANUAL);
142
143 // This call emits the following sequence in a way that can be patched for
144 // code ageing support:
145 // Push(lr, fp, cp, x1);
146 // Add(fp, jssp, 2 * kPointerSize);
147 info->set_prologue_offset(masm_->pc_offset());
148 __ Prologue(info->IsCodePreAgingActive());
149 info->AddNoFrameRange(0, masm_->pc_offset());
150
151 // Reserve space on the stack for locals.
152 { Comment cmnt(masm_, "[ Allocate locals");
153 int locals_count = info->scope()->num_stack_slots();
154 // Generators allocate locals, if any, in context slots.
155 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
156
157 if (locals_count > 0) {
158 if (locals_count >= 128) {
159 Label ok;
160 DCHECK(jssp.Is(__ StackPointer()));
161 __ Sub(x10, jssp, locals_count * kPointerSize);
162 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
163 __ B(hs, &ok);
164 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
165 __ Bind(&ok);
166 }
167 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
168 if (FLAG_optimize_for_size) {
169 __ PushMultipleTimes(x10 , locals_count);
170 } else {
171 const int kMaxPushes = 32;
172 if (locals_count >= kMaxPushes) {
173 int loop_iterations = locals_count / kMaxPushes;
174 __ Mov(x3, loop_iterations);
175 Label loop_header;
176 __ Bind(&loop_header);
177 // Do pushes.
178 __ PushMultipleTimes(x10 , kMaxPushes);
179 __ Subs(x3, x3, 1);
180 __ B(ne, &loop_header);
181 }
182 int remaining = locals_count % kMaxPushes;
183 // Emit the remaining pushes.
184 __ PushMultipleTimes(x10 , remaining);
185 }
186 }
187 }
188
189 bool function_in_register_x1 = true;
190
191 if (info->scope()->num_heap_slots() > 0) {
192 // Argument to NewContext is the function, which is still in x1.
193 Comment cmnt(masm_, "[ Allocate context");
194 bool need_write_barrier = true;
195 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
196 if (info->scope()->is_script_scope()) {
197 __ Mov(x10, Operand(info->scope()->GetScopeInfo(info->isolate())));
198 __ Push(x1, x10);
199 __ CallRuntime(Runtime::kNewScriptContext, 2);
200 } else if (slots <= FastNewContextStub::kMaximumSlots) {
201 FastNewContextStub stub(isolate(), slots);
202 __ CallStub(&stub);
203 // Result of FastNewContextStub is always in new space.
204 need_write_barrier = false;
205 } else {
206 __ Push(x1);
207 __ CallRuntime(Runtime::kNewFunctionContext, 1);
208 }
209 function_in_register_x1 = false;
210 // Context is returned in x0. It replaces the context passed to us.
211 // It's saved in the stack and kept live in cp.
212 __ Mov(cp, x0);
213 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
214 // Copy any necessary parameters into the context.
215 int num_parameters = info->scope()->num_parameters();
216 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
217 for (int i = first_parameter; i < num_parameters; i++) {
218 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
219 if (var->IsContextSlot()) {
220 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
221 (num_parameters - 1 - i) * kPointerSize;
222 // Load parameter from stack.
223 __ Ldr(x10, MemOperand(fp, parameter_offset));
224 // Store it in the context.
225 MemOperand target = ContextMemOperand(cp, var->index());
226 __ Str(x10, target);
227
228 // Update the write barrier.
229 if (need_write_barrier) {
230 __ RecordWriteContextSlot(cp, static_cast<int>(target.offset()), x10,
231 x11, kLRHasBeenSaved, kDontSaveFPRegs);
232 } else if (FLAG_debug_code) {
233 Label done;
234 __ JumpIfInNewSpace(cp, &done);
235 __ Abort(kExpectedNewSpaceObject);
236 __ bind(&done);
237 }
238 }
239 }
240 }
241
242 // Possibly set up a local binding to the this function which is used in
243 // derived constructors with super calls.
244 Variable* this_function_var = scope()->this_function_var();
245 if (this_function_var != nullptr) {
246 Comment cmnt(masm_, "[ This function");
247 if (!function_in_register_x1) {
248 __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
249 // The write barrier clobbers register again, keep is marked as such.
250 }
251 SetVar(this_function_var, x1, x0, x2);
252 }
253
254 Variable* new_target_var = scope()->new_target_var();
255 if (new_target_var != nullptr) {
256 Comment cmnt(masm_, "[ new.target");
257 // Get the frame pointer for the calling frame.
258 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
259
260 Label check_frame_marker;
261 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
262 __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
263 __ B(ne, &check_frame_marker);
264 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
265 __ Bind(&check_frame_marker);
266 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
267 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
268
269 Label non_construct_frame, done;
270
271 __ B(ne, &non_construct_frame);
272 __ Ldr(x0,
273 MemOperand(x2, ConstructFrameConstants::kOriginalConstructorOffset));
274 __ B(&done);
275
276 __ Bind(&non_construct_frame);
277 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
278
279 __ Bind(&done);
280
281 SetVar(new_target_var, x0, x2, x3);
282 }
283
284 // Possibly allocate RestParameters
285 int rest_index;
286 Variable* rest_param = scope()->rest_parameter(&rest_index);
287 if (rest_param) {
288 Comment cmnt(masm_, "[ Allocate rest parameter array");
289
290 int num_parameters = info->scope()->num_parameters();
291 int offset = num_parameters * kPointerSize;
292
293 __ Add(x3, fp, StandardFrameConstants::kCallerSPOffset + offset);
294 __ Mov(x2, Smi::FromInt(num_parameters));
295 __ Mov(x1, Smi::FromInt(rest_index));
296 __ Mov(x0, Smi::FromInt(language_mode()));
297 __ Push(x3, x2, x1, x0);
298
299 RestParamAccessStub stub(isolate());
300 __ CallStub(&stub);
301
302 SetVar(rest_param, x0, x1, x2);
303 }
304
305 Variable* arguments = scope()->arguments();
306 if (arguments != NULL) {
307 // Function uses arguments object.
308 Comment cmnt(masm_, "[ Allocate arguments object");
309 if (!function_in_register_x1) {
310 // Load this again, if it's used by the local context below.
311 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
312 } else {
313 __ Mov(x3, x1);
314 }
315 // Receiver is just before the parameters on the caller's stack.
316 int num_parameters = info->scope()->num_parameters();
317 int offset = num_parameters * kPointerSize;
318 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset);
319 __ Mov(x1, Smi::FromInt(num_parameters));
320 __ Push(x3, x2, x1);
321
322 // Arguments to ArgumentsAccessStub:
323 // function, receiver address, parameter count.
324 // The stub will rewrite receiver and parameter count if the previous
325 // stack frame was an arguments adapter frame.
326 ArgumentsAccessStub::Type type;
327 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
328 type = ArgumentsAccessStub::NEW_STRICT;
329 } else if (function()->has_duplicate_parameters()) {
330 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
331 } else {
332 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
333 }
334 ArgumentsAccessStub stub(isolate(), type);
335 __ CallStub(&stub);
336
337 SetVar(arguments, x0, x1, x2);
338 }
339
340 if (FLAG_trace) {
341 __ CallRuntime(Runtime::kTraceEnter, 0);
342 }
343
344 // Visit the declarations and body unless there is an illegal
345 // redeclaration.
346 if (scope()->HasIllegalRedeclaration()) {
347 Comment cmnt(masm_, "[ Declarations");
348 scope()->VisitIllegalRedeclaration(this);
349
350 } else {
351 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
352 { Comment cmnt(masm_, "[ Declarations");
353 VisitDeclarations(scope()->declarations());
354 }
355
356 {
357 Comment cmnt(masm_, "[ Stack check");
358 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
359 Label ok;
360 DCHECK(jssp.Is(__ StackPointer()));
361 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
362 __ B(hs, &ok);
363 PredictableCodeSizeScope predictable(masm_,
364 Assembler::kCallSizeWithRelocation);
365 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
366 __ Bind(&ok);
367 }
368
369 {
370 Comment cmnt(masm_, "[ Body");
371 DCHECK(loop_depth() == 0);
372 VisitStatements(function()->body());
373 DCHECK(loop_depth() == 0);
374 }
375 }
376
377 // Always emit a 'return undefined' in case control fell off the end of
378 // the body.
379 { Comment cmnt(masm_, "[ return <undefined>;");
380 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
381 }
382 EmitReturnSequence();
383
384 // Force emission of the pools, so they don't get emitted in the middle
385 // of the back edge table.
386 masm()->CheckVeneerPool(true, false);
387 masm()->CheckConstPool(true, false);
388 }
389
390
391 void FullCodeGenerator::ClearAccumulator() {
392 __ Mov(x0, Smi::FromInt(0));
393 }
394
395
396 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
397 __ Mov(x2, Operand(profiling_counter_));
398 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
399 __ Subs(x3, x3, Smi::FromInt(delta));
400 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
401 }
402
403
404 void FullCodeGenerator::EmitProfilingCounterReset() {
405 int reset_value = FLAG_interrupt_budget;
406 if (info_->is_debug()) {
407 // Detect debug break requests as soon as possible.
408 reset_value = FLAG_interrupt_budget >> 4;
409 }
410 __ Mov(x2, Operand(profiling_counter_));
411 __ Mov(x3, Smi::FromInt(reset_value));
412 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
413 }
414
415
416 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
417 Label* back_edge_target) {
418 DCHECK(jssp.Is(__ StackPointer()));
419 Comment cmnt(masm_, "[ Back edge bookkeeping");
420 // Block literal pools whilst emitting back edge code.
421 Assembler::BlockPoolsScope block_const_pool(masm_);
422 Label ok;
423
424 DCHECK(back_edge_target->is_bound());
425 // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
426 // to reduce the absolute error due to the integer division. To do that,
427 // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
428 // the result).
429 int distance =
430 static_cast<int>(masm_->SizeOfCodeGeneratedSince(back_edge_target) +
431 kCodeSizeMultiplier / 2);
432 int weight = Min(kMaxBackEdgeWeight,
433 Max(1, distance / kCodeSizeMultiplier));
434 EmitProfilingCounterDecrement(weight);
435 __ B(pl, &ok);
436 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
437
438 // Record a mapping of this PC offset to the OSR id. This is used to find
439 // the AST id from the unoptimized code in order to use it as a key into
440 // the deoptimization input data found in the optimized code.
441 RecordBackEdge(stmt->OsrEntryId());
442
443 EmitProfilingCounterReset();
444
445 __ Bind(&ok);
446 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
447 // Record a mapping of the OSR id to this PC. This is used if the OSR
448 // entry becomes the target of a bailout. We don't expect it to be, but
449 // we want it to work if it is.
450 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
451 }
452
453
454 void FullCodeGenerator::EmitReturnSequence() {
455 Comment cmnt(masm_, "[ Return sequence");
456
457 if (return_label_.is_bound()) {
458 __ B(&return_label_);
459
460 } else {
461 __ Bind(&return_label_);
462 if (FLAG_trace) {
463 // Push the return value on the stack as the parameter.
464 // Runtime::TraceExit returns its parameter in x0.
465 __ Push(result_register());
466 __ CallRuntime(Runtime::kTraceExit, 1);
467 DCHECK(x0.Is(result_register()));
468 }
469 // Pretend that the exit is a backwards jump to the entry.
470 int weight = 1;
471 if (info_->ShouldSelfOptimize()) {
472 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
473 } else {
474 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
475 weight = Min(kMaxBackEdgeWeight,
476 Max(1, distance / kCodeSizeMultiplier));
477 }
478 EmitProfilingCounterDecrement(weight);
479 Label ok;
480 __ B(pl, &ok);
481 __ Push(x0);
482 __ Call(isolate()->builtins()->InterruptCheck(),
483 RelocInfo::CODE_TARGET);
484 __ Pop(x0);
485 EmitProfilingCounterReset();
486 __ Bind(&ok);
487
488 SetReturnPosition(function());
489 const Register& current_sp = __ StackPointer();
490 // Nothing ensures 16 bytes alignment here.
491 DCHECK(!current_sp.Is(csp));
492 __ Mov(current_sp, fp);
493 int no_frame_start = masm_->pc_offset();
494 __ Ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
495 // Drop the arguments and receiver and return.
496 // TODO(all): This implementation is overkill as it supports 2**31+1
497 // arguments, consider how to improve it without creating a security
498 // hole.
499 __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2);
500 __ Add(current_sp, current_sp, ip0);
501 __ Ret();
502 int32_t arg_count = info_->scope()->num_parameters() + 1;
503 __ dc64(kXRegSize * arg_count);
504 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
505 }
506 }
507
508
509 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
510 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
511 codegen()->GetVar(result_register(), var);
512 __ Push(result_register());
513 }
514
515
516 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
517 // Root values have no side effects.
518 }
519
520
521 void FullCodeGenerator::AccumulatorValueContext::Plug(
522 Heap::RootListIndex index) const {
523 __ LoadRoot(result_register(), index);
524 }
525
526
527 void FullCodeGenerator::StackValueContext::Plug(
528 Heap::RootListIndex index) const {
529 __ LoadRoot(result_register(), index);
530 __ Push(result_register());
531 }
532
533
534 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
535 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
536 false_label_);
537 if (index == Heap::kUndefinedValueRootIndex ||
538 index == Heap::kNullValueRootIndex ||
539 index == Heap::kFalseValueRootIndex) {
540 if (false_label_ != fall_through_) __ B(false_label_);
541 } else if (index == Heap::kTrueValueRootIndex) {
542 if (true_label_ != fall_through_) __ B(true_label_);
543 } else {
544 __ LoadRoot(result_register(), index);
545 codegen()->DoTest(this);
546 }
547 }
548
549
550 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
551 }
552
553
554 void FullCodeGenerator::AccumulatorValueContext::Plug(
555 Handle<Object> lit) const {
556 __ Mov(result_register(), Operand(lit));
557 }
558
559
560 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
561 // Immediates cannot be pushed directly.
562 __ Mov(result_register(), Operand(lit));
563 __ Push(result_register());
564 }
565
566
567 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
568 codegen()->PrepareForBailoutBeforeSplit(condition(),
569 true,
570 true_label_,
571 false_label_);
572 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
573 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
574 if (false_label_ != fall_through_) __ B(false_label_);
575 } else if (lit->IsTrue() || lit->IsJSObject()) {
576 if (true_label_ != fall_through_) __ B(true_label_);
577 } else if (lit->IsString()) {
578 if (String::cast(*lit)->length() == 0) {
579 if (false_label_ != fall_through_) __ B(false_label_);
580 } else {
581 if (true_label_ != fall_through_) __ B(true_label_);
582 }
583 } else if (lit->IsSmi()) {
584 if (Smi::cast(*lit)->value() == 0) {
585 if (false_label_ != fall_through_) __ B(false_label_);
586 } else {
587 if (true_label_ != fall_through_) __ B(true_label_);
588 }
589 } else {
590 // For simplicity we always test the accumulator register.
591 __ Mov(result_register(), Operand(lit));
592 codegen()->DoTest(this);
593 }
594 }
595
596
597 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
598 Register reg) const {
599 DCHECK(count > 0);
600 __ Drop(count);
601 }
602
603
604 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
605 int count,
606 Register reg) const {
607 DCHECK(count > 0);
608 __ Drop(count);
609 __ Move(result_register(), reg);
610 }
611
612
613 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
614 Register reg) const {
615 DCHECK(count > 0);
616 if (count > 1) __ Drop(count - 1);
617 __ Poke(reg, 0);
618 }
619
620
621 void FullCodeGenerator::TestContext::DropAndPlug(int count,
622 Register reg) const {
623 DCHECK(count > 0);
624 // For simplicity we always test the accumulator register.
625 __ Drop(count);
626 __ Mov(result_register(), reg);
627 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
628 codegen()->DoTest(this);
629 }
630
631
632 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
633 Label* materialize_false) const {
634 DCHECK(materialize_true == materialize_false);
635 __ Bind(materialize_true);
636 }
637
638
639 void FullCodeGenerator::AccumulatorValueContext::Plug(
640 Label* materialize_true,
641 Label* materialize_false) const {
642 Label done;
643 __ Bind(materialize_true);
644 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
645 __ B(&done);
646 __ Bind(materialize_false);
647 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
648 __ Bind(&done);
649 }
650
651
652 void FullCodeGenerator::StackValueContext::Plug(
653 Label* materialize_true,
654 Label* materialize_false) const {
655 Label done;
656 __ Bind(materialize_true);
657 __ LoadRoot(x10, Heap::kTrueValueRootIndex);
658 __ B(&done);
659 __ Bind(materialize_false);
660 __ LoadRoot(x10, Heap::kFalseValueRootIndex);
661 __ Bind(&done);
662 __ Push(x10);
663 }
664
665
666 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
667 Label* materialize_false) const {
668 DCHECK(materialize_true == true_label_);
669 DCHECK(materialize_false == false_label_);
670 }
671
672
673 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
674 Heap::RootListIndex value_root_index =
675 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
676 __ LoadRoot(result_register(), value_root_index);
677 }
678
679
680 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
681 Heap::RootListIndex value_root_index =
682 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
683 __ LoadRoot(x10, value_root_index);
684 __ Push(x10);
685 }
686
687
688 void FullCodeGenerator::TestContext::Plug(bool flag) const {
689 codegen()->PrepareForBailoutBeforeSplit(condition(),
690 true,
691 true_label_,
692 false_label_);
693 if (flag) {
694 if (true_label_ != fall_through_) {
695 __ B(true_label_);
696 }
697 } else {
698 if (false_label_ != fall_through_) {
699 __ B(false_label_);
700 }
701 }
702 }
703
704
705 void FullCodeGenerator::DoTest(Expression* condition,
706 Label* if_true,
707 Label* if_false,
708 Label* fall_through) {
709 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
710 CallIC(ic, condition->test_id());
711 __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through);
712 }
713
714
715 // If (cond), branch to if_true.
716 // If (!cond), branch to if_false.
717 // fall_through is used as an optimization in cases where only one branch
718 // instruction is necessary.
719 void FullCodeGenerator::Split(Condition cond,
720 Label* if_true,
721 Label* if_false,
722 Label* fall_through) {
723 if (if_false == fall_through) {
724 __ B(cond, if_true);
725 } else if (if_true == fall_through) {
726 DCHECK(if_false != fall_through);
727 __ B(NegateCondition(cond), if_false);
728 } else {
729 __ B(cond, if_true);
730 __ B(if_false);
731 }
732 }
733
734
735 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
736 // Offset is negative because higher indexes are at lower addresses.
737 int offset = -var->index() * kXRegSize;
738 // Adjust by a (parameter or local) base offset.
739 if (var->IsParameter()) {
740 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
741 } else {
742 offset += JavaScriptFrameConstants::kLocal0Offset;
743 }
744 return MemOperand(fp, offset);
745 }
746
747
748 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
749 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
750 if (var->IsContextSlot()) {
751 int context_chain_length = scope()->ContextChainLength(var->scope());
752 __ LoadContext(scratch, context_chain_length);
753 return ContextMemOperand(scratch, var->index());
754 } else {
755 return StackOperand(var);
756 }
757 }
758
759
760 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
761 // Use destination as scratch.
762 MemOperand location = VarOperand(var, dest);
763 __ Ldr(dest, location);
764 }
765
766
767 void FullCodeGenerator::SetVar(Variable* var,
768 Register src,
769 Register scratch0,
770 Register scratch1) {
771 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
772 DCHECK(!AreAliased(src, scratch0, scratch1));
773 MemOperand location = VarOperand(var, scratch0);
774 __ Str(src, location);
775
776 // Emit the write barrier code if the location is in the heap.
777 if (var->IsContextSlot()) {
778 // scratch0 contains the correct context.
779 __ RecordWriteContextSlot(scratch0, static_cast<int>(location.offset()),
780 src, scratch1, kLRHasBeenSaved, kDontSaveFPRegs);
781 }
782 }
783
784
785 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
786 bool should_normalize,
787 Label* if_true,
788 Label* if_false) {
789 // Only prepare for bailouts before splits if we're in a test
790 // context. Otherwise, we let the Visit function deal with the
791 // preparation to avoid preparing with the same AST id twice.
792 if (!context()->IsTest() || !info_->IsOptimizable()) return;
793
794 // TODO(all): Investigate to see if there is something to work on here.
795 Label skip;
796 if (should_normalize) {
797 __ B(&skip);
798 }
799 PrepareForBailout(expr, TOS_REG);
800 if (should_normalize) {
801 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
802 Split(eq, if_true, if_false, NULL);
803 __ Bind(&skip);
804 }
805 }
806
807
808 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
809 // The variable in the declaration always resides in the current function
810 // context.
811 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
812 if (generate_debug_code_) {
813 // Check that we're not inside a with or catch context.
814 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
815 __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
816 __ Check(ne, kDeclarationInWithContext);
817 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
818 __ Check(ne, kDeclarationInCatchContext);
819 }
820 }
821
822
823 void FullCodeGenerator::VisitVariableDeclaration(
824 VariableDeclaration* declaration) {
825 // If it was not possible to allocate the variable at compile time, we
826 // need to "declare" it at runtime to make sure it actually exists in the
827 // local context.
828 VariableProxy* proxy = declaration->proxy();
829 VariableMode mode = declaration->mode();
830 Variable* variable = proxy->var();
831 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
832
833 switch (variable->location()) {
834 case VariableLocation::GLOBAL:
835 case VariableLocation::UNALLOCATED:
836 globals_->Add(variable->name(), zone());
837 globals_->Add(variable->binding_needs_init()
838 ? isolate()->factory()->the_hole_value()
839 : isolate()->factory()->undefined_value(),
840 zone());
841 break;
842
843 case VariableLocation::PARAMETER:
844 case VariableLocation::LOCAL:
845 if (hole_init) {
846 Comment cmnt(masm_, "[ VariableDeclaration");
847 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
848 __ Str(x10, StackOperand(variable));
849 }
850 break;
851
852 case VariableLocation::CONTEXT:
853 if (hole_init) {
854 Comment cmnt(masm_, "[ VariableDeclaration");
855 EmitDebugCheckDeclarationContext(variable);
856 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
857 __ Str(x10, ContextMemOperand(cp, variable->index()));
858 // No write barrier since the_hole_value is in old space.
859 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
860 }
861 break;
862
863 case VariableLocation::LOOKUP: {
864 Comment cmnt(masm_, "[ VariableDeclaration");
865 __ Mov(x2, Operand(variable->name()));
866 // Declaration nodes are always introduced in one of four modes.
867 DCHECK(IsDeclaredVariableMode(mode));
868 PropertyAttributes attr = IsImmutableVariableMode(mode) ? READ_ONLY
869 : NONE;
870 __ Mov(x1, Smi::FromInt(attr));
871 // Push initial value, if any.
872 // Note: For variables we must not push an initial value (such as
873 // 'undefined') because we may have a (legal) redeclaration and we
874 // must not destroy the current value.
875 if (hole_init) {
876 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
877 __ Push(cp, x2, x1, x0);
878 } else {
879 // Pushing 0 (xzr) indicates no initial value.
880 __ Push(cp, x2, x1, xzr);
881 }
882 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
883 break;
884 }
885 }
886 }
887
888
889 void FullCodeGenerator::VisitFunctionDeclaration(
890 FunctionDeclaration* declaration) {
891 VariableProxy* proxy = declaration->proxy();
892 Variable* variable = proxy->var();
893 switch (variable->location()) {
894 case VariableLocation::GLOBAL:
895 case VariableLocation::UNALLOCATED: {
896 globals_->Add(variable->name(), zone());
897 Handle<SharedFunctionInfo> function =
898 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
899 // Check for stack overflow exception.
900 if (function.is_null()) return SetStackOverflow();
901 globals_->Add(function, zone());
902 break;
903 }
904
905 case VariableLocation::PARAMETER:
906 case VariableLocation::LOCAL: {
907 Comment cmnt(masm_, "[ Function Declaration");
908 VisitForAccumulatorValue(declaration->fun());
909 __ Str(result_register(), StackOperand(variable));
910 break;
911 }
912
913 case VariableLocation::CONTEXT: {
914 Comment cmnt(masm_, "[ Function Declaration");
915 EmitDebugCheckDeclarationContext(variable);
916 VisitForAccumulatorValue(declaration->fun());
917 __ Str(result_register(), ContextMemOperand(cp, variable->index()));
918 int offset = Context::SlotOffset(variable->index());
919 // We know that we have written a function, which is not a smi.
920 __ RecordWriteContextSlot(cp,
921 offset,
922 result_register(),
923 x2,
924 kLRHasBeenSaved,
925 kDontSaveFPRegs,
926 EMIT_REMEMBERED_SET,
927 OMIT_SMI_CHECK);
928 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
929 break;
930 }
931
932 case VariableLocation::LOOKUP: {
933 Comment cmnt(masm_, "[ Function Declaration");
934 __ Mov(x2, Operand(variable->name()));
935 __ Mov(x1, Smi::FromInt(NONE));
936 __ Push(cp, x2, x1);
937 // Push initial value for function declaration.
938 VisitForStackValue(declaration->fun());
939 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
940 break;
941 }
942 }
943 }
944
945
946 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
947 // Call the runtime to declare the globals.
948 __ Mov(x11, Operand(pairs));
949 Register flags = xzr;
950 if (Smi::FromInt(DeclareGlobalsFlags())) {
951 flags = x10;
952 __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
953 }
954 __ Push(cp, x11, flags);
955 __ CallRuntime(Runtime::kDeclareGlobals, 3);
956 // Return value is ignored.
957 }
958
959
960 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
961 // Call the runtime to declare the modules.
962 __ Push(descriptions);
963 __ CallRuntime(Runtime::kDeclareModules, 1);
964 // Return value is ignored.
965 }
966
967
968 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
969 ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
970 Comment cmnt(masm_, "[ SwitchStatement");
971 Breakable nested_statement(this, stmt);
972 SetStatementPosition(stmt);
973
974 // Keep the switch value on the stack until a case matches.
975 VisitForStackValue(stmt->tag());
976 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
977
978 ZoneList<CaseClause*>* clauses = stmt->cases();
979 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
980
981 Label next_test; // Recycled for each test.
982 // Compile all the tests with branches to their bodies.
983 for (int i = 0; i < clauses->length(); i++) {
984 CaseClause* clause = clauses->at(i);
985 clause->body_target()->Unuse();
986
987 // The default is not a test, but remember it as final fall through.
988 if (clause->is_default()) {
989 default_clause = clause;
990 continue;
991 }
992
993 Comment cmnt(masm_, "[ Case comparison");
994 __ Bind(&next_test);
995 next_test.Unuse();
996
997 // Compile the label expression.
998 VisitForAccumulatorValue(clause->label());
999
1000 // Perform the comparison as if via '==='.
1001 __ Peek(x1, 0); // Switch value.
1002
1003 JumpPatchSite patch_site(masm_);
1004 if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
1005 Label slow_case;
1006 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
1007 __ Cmp(x1, x0);
1008 __ B(ne, &next_test);
1009 __ Drop(1); // Switch value is no longer needed.
1010 __ B(clause->body_target());
1011 __ Bind(&slow_case);
1012 }
1013
1014 // Record position before stub call for type feedback.
1015 SetExpressionPosition(clause);
1016 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
1017 strength(language_mode())).code();
1018 CallIC(ic, clause->CompareId());
1019 patch_site.EmitPatchInfo();
1020
1021 Label skip;
1022 __ B(&skip);
1023 PrepareForBailout(clause, TOS_REG);
1024 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
1025 __ Drop(1);
1026 __ B(clause->body_target());
1027 __ Bind(&skip);
1028
1029 __ Cbnz(x0, &next_test);
1030 __ Drop(1); // Switch value is no longer needed.
1031 __ B(clause->body_target());
1032 }
1033
1034 // Discard the test value and jump to the default if present, otherwise to
1035 // the end of the statement.
1036 __ Bind(&next_test);
1037 __ Drop(1); // Switch value is no longer needed.
1038 if (default_clause == NULL) {
1039 __ B(nested_statement.break_label());
1040 } else {
1041 __ B(default_clause->body_target());
1042 }
1043
1044 // Compile all the case bodies.
1045 for (int i = 0; i < clauses->length(); i++) {
1046 Comment cmnt(masm_, "[ Case body");
1047 CaseClause* clause = clauses->at(i);
1048 __ Bind(clause->body_target());
1049 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1050 VisitStatements(clause->statements());
1051 }
1052
1053 __ Bind(nested_statement.break_label());
1054 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1055 }
1056
1057
1058 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1059 ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
1060 Comment cmnt(masm_, "[ ForInStatement");
1061 SetStatementPosition(stmt, SKIP_BREAK);
1062
1063 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1064
1065 // TODO(all): This visitor probably needs better comments and a revisit.
1066
1067 Label loop, exit;
1068 ForIn loop_statement(this, stmt);
1069 increment_loop_depth();
1070
1071 // Get the object to enumerate over. If the object is null or undefined, skip
1072 // over the loop. See ECMA-262 version 5, section 12.6.4.
1073 SetExpressionAsStatementPosition(stmt->enumerable());
1074 VisitForAccumulatorValue(stmt->enumerable());
1075 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1076 Register null_value = x15;
1077 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1078 __ Cmp(x0, null_value);
1079 __ B(eq, &exit);
1080
1081 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1082
1083 // Convert the object to a JS object.
1084 Label convert, done_convert;
1085 __ JumpIfSmi(x0, &convert);
1086 __ JumpIfObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE, &done_convert, ge);
1087 __ Bind(&convert);
1088 __ Push(x0);
1089 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1090 __ Bind(&done_convert);
1091 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1092 __ Push(x0);
1093
1094 // Check for proxies.
1095 Label call_runtime;
1096 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1097 __ JumpIfObjectType(x0, x10, x11, LAST_JS_PROXY_TYPE, &call_runtime, le);
1098
1099 // Check cache validity in generated code. This is a fast case for
1100 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1101 // guarantee cache validity, call the runtime system to check cache
1102 // validity or get the property names in a fixed array.
1103 __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime);
1104
1105 // The enum cache is valid. Load the map of the object being
1106 // iterated over and use the cache for the iteration.
1107 Label use_cache;
1108 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1109 __ B(&use_cache);
1110
1111 // Get the set of properties to enumerate.
1112 __ Bind(&call_runtime);
1113 __ Push(x0); // Duplicate the enumerable object on the stack.
1114 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1115 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1116
1117 // If we got a map from the runtime call, we can do a fast
1118 // modification check. Otherwise, we got a fixed array, and we have
1119 // to do a slow check.
1120 Label fixed_array, no_descriptors;
1121 __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1122 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1123
1124 // We got a map in register x0. Get the enumeration cache from it.
1125 __ Bind(&use_cache);
1126
1127 __ EnumLengthUntagged(x1, x0);
1128 __ Cbz(x1, &no_descriptors);
1129
1130 __ LoadInstanceDescriptors(x0, x2);
1131 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1132 __ Ldr(x2,
1133 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1134
1135 // Set up the four remaining stack slots.
1136 __ SmiTag(x1);
1137 // Map, enumeration cache, enum cache length, zero (both last as smis).
1138 __ Push(x0, x2, x1, xzr);
1139 __ B(&loop);
1140
1141 __ Bind(&no_descriptors);
1142 __ Drop(1);
1143 __ B(&exit);
1144
1145 // We got a fixed array in register x0. Iterate through that.
1146 __ Bind(&fixed_array);
1147
1148 __ LoadObject(x1, FeedbackVector());
1149 __ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1150 int vector_index = FeedbackVector()->GetIndex(slot);
1151 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(vector_index)));
1152
1153 __ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check.
1154 __ Peek(x10, 0); // Get enumerated object.
1155 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1156 // TODO(all): similar check was done already. Can we avoid it here?
1157 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE);
1158 DCHECK(Smi::FromInt(0) == 0);
1159 __ CzeroX(x1, le); // Zero indicates proxy.
1160 __ Ldr(x2, FieldMemOperand(x0, FixedArray::kLengthOffset));
1161 // Smi and array, fixed array length (as smi) and initial index.
1162 __ Push(x1, x0, x2, xzr);
1163
1164 // Generate code for doing the condition check.
1165 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1166 __ Bind(&loop);
1167 SetExpressionAsStatementPosition(stmt->each());
1168
1169 // Load the current count to x0, load the length to x1.
1170 __ PeekPair(x0, x1, 0);
1171 __ Cmp(x0, x1); // Compare to the array length.
1172 __ B(hs, loop_statement.break_label());
1173
1174 // Get the current entry of the array into register r3.
1175 __ Peek(x10, 2 * kXRegSize);
1176 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1177 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1178
1179 // Get the expected map from the stack or a smi in the
1180 // permanent slow case into register x10.
1181 __ Peek(x2, 3 * kXRegSize);
1182
1183 // Check if the expected map still matches that of the enumerable.
1184 // If not, we may have to filter the key.
1185 Label update_each;
1186 __ Peek(x1, 4 * kXRegSize);
1187 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1188 __ Cmp(x11, x2);
1189 __ B(eq, &update_each);
1190
1191 // For proxies, no filtering is done.
1192 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1193 STATIC_ASSERT(kSmiTag == 0);
1194 __ Cbz(x2, &update_each);
1195
1196 // Convert the entry to a string or (smi) 0 if it isn't a property
1197 // any more. If the property has been removed while iterating, we
1198 // just skip it.
1199 __ Push(x1, x3);
1200 __ CallRuntime(Runtime::kForInFilter, 2);
1201 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1202 __ Mov(x3, x0);
1203 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex,
1204 loop_statement.continue_label());
1205
1206 // Update the 'each' property or variable from the possibly filtered
1207 // entry in register x3.
1208 __ Bind(&update_each);
1209 __ Mov(result_register(), x3);
1210 // Perform the assignment as if via '='.
1211 { EffectContext context(this);
1212 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1213 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1214 }
1215
1216 // Generate code for the body of the loop.
1217 Visit(stmt->body());
1218
1219 // Generate code for going to the next element by incrementing
1220 // the index (smi) stored on top of the stack.
1221 __ Bind(loop_statement.continue_label());
1222 // TODO(all): We could use a callee saved register to avoid popping.
1223 __ Pop(x0);
1224 __ Add(x0, x0, Smi::FromInt(1));
1225 __ Push(x0);
1226
1227 EmitBackEdgeBookkeeping(stmt, &loop);
1228 __ B(&loop);
1229
1230 // Remove the pointers stored on the stack.
1231 __ Bind(loop_statement.break_label());
1232 __ Drop(5);
1233
1234 // Exit and decrement the loop depth.
1235 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1236 __ Bind(&exit);
1237 decrement_loop_depth();
1238 }
1239
1240
1241 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1242 bool pretenure) {
1243 // Use the fast case closure allocation code that allocates in new space for
1244 // nested functions that don't need literals cloning. If we're running with
1245 // the --always-opt or the --prepare-always-opt flag, we need to use the
1246 // runtime function so that the new function we are creating here gets a
1247 // chance to have its code optimized and doesn't just get a copy of the
1248 // existing unoptimized code.
1249 if (!FLAG_always_opt &&
1250 !FLAG_prepare_always_opt &&
1251 !pretenure &&
1252 scope()->is_function_scope() &&
1253 info->num_literals() == 0) {
1254 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1255 __ Mov(x2, Operand(info));
1256 __ CallStub(&stub);
1257 } else {
1258 __ Mov(x11, Operand(info));
1259 __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex
1260 : Heap::kFalseValueRootIndex);
1261 __ Push(cp, x11, x10);
1262 __ CallRuntime(Runtime::kNewClosure, 3);
1263 }
1264 context()->Plug(x0);
1265 }
1266
1267
1268 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1269 int offset,
1270 FeedbackVectorICSlot slot) {
1271 if (NeedsHomeObject(initializer)) {
1272 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1273 __ Mov(StoreDescriptor::NameRegister(),
1274 Operand(isolate()->factory()->home_object_symbol()));
1275 __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1276 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1277 CallStoreIC();
1278 }
1279 }
1280
1281
1282 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1283 TypeofMode typeof_mode,
1284 Label* slow) {
1285 Register current = cp;
1286 Register next = x10;
1287 Register temp = x11;
1288
1289 Scope* s = scope();
1290 while (s != NULL) {
1291 if (s->num_heap_slots() > 0) {
1292 if (s->calls_sloppy_eval()) {
1293 // Check that extension is NULL.
1294 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1295 __ Cbnz(temp, slow);
1296 }
1297 // Load next context in chain.
1298 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1299 // Walk the rest of the chain without clobbering cp.
1300 current = next;
1301 }
1302 // If no outer scope calls eval, we do not need to check more
1303 // context extensions.
1304 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1305 s = s->outer_scope();
1306 }
1307
1308 if (s->is_eval_scope()) {
1309 Label loop, fast;
1310 __ Mov(next, current);
1311
1312 __ Bind(&loop);
1313 // Terminate at native context.
1314 __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1315 __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1316 // Check that extension is NULL.
1317 __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1318 __ Cbnz(temp, slow);
1319 // Load next context in chain.
1320 __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1321 __ B(&loop);
1322 __ Bind(&fast);
1323 }
1324
1325 // All extension objects were empty and it is safe to use a normal global
1326 // load machinery.
1327 EmitGlobalVariableLoad(proxy, typeof_mode);
1328 }
1329
1330
1331 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1332 Label* slow) {
1333 DCHECK(var->IsContextSlot());
1334 Register context = cp;
1335 Register next = x10;
1336 Register temp = x11;
1337
1338 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1339 if (s->num_heap_slots() > 0) {
1340 if (s->calls_sloppy_eval()) {
1341 // Check that extension is NULL.
1342 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1343 __ Cbnz(temp, slow);
1344 }
1345 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1346 // Walk the rest of the chain without clobbering cp.
1347 context = next;
1348 }
1349 }
1350 // Check that last extension is NULL.
1351 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1352 __ Cbnz(temp, slow);
1353
1354 // This function is used only for loads, not stores, so it's safe to
1355 // return an cp-based operand (the write barrier cannot be allowed to
1356 // destroy the cp register).
1357 return ContextMemOperand(context, var->index());
1358 }
1359
1360
1361 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1362 TypeofMode typeof_mode,
1363 Label* slow, Label* done) {
1364 // Generate fast-case code for variables that might be shadowed by
1365 // eval-introduced variables. Eval is used a lot without
1366 // introducing variables. In those cases, we do not want to
1367 // perform a runtime call for all variables in the scope
1368 // containing the eval.
1369 Variable* var = proxy->var();
1370 if (var->mode() == DYNAMIC_GLOBAL) {
1371 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1372 __ B(done);
1373 } else if (var->mode() == DYNAMIC_LOCAL) {
1374 Variable* local = var->local_if_not_shadowed();
1375 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1376 if (local->mode() == LET || local->mode() == CONST ||
1377 local->mode() == CONST_LEGACY) {
1378 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1379 if (local->mode() == CONST_LEGACY) {
1380 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1381 } else { // LET || CONST
1382 __ Mov(x0, Operand(var->name()));
1383 __ Push(x0);
1384 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1385 }
1386 }
1387 __ B(done);
1388 }
1389 }
1390
1391
1392 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1393 TypeofMode typeof_mode) {
1394 Variable* var = proxy->var();
1395 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1396 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1397 if (var->IsGlobalSlot()) {
1398 DCHECK(var->index() > 0);
1399 DCHECK(var->IsStaticGlobalObjectProperty());
1400 // Each var occupies two slots in the context: for reads and writes.
1401 int slot_index = var->index();
1402 int depth = scope()->ContextChainLength(var->scope());
1403 __ Mov(LoadGlobalViaContextDescriptor::DepthRegister(),
1404 Operand(Smi::FromInt(depth)));
1405 __ Mov(LoadGlobalViaContextDescriptor::SlotRegister(),
1406 Operand(Smi::FromInt(slot_index)));
1407 __ Mov(LoadGlobalViaContextDescriptor::NameRegister(),
1408 Operand(var->name()));
1409 LoadGlobalViaContextStub stub(isolate(), depth);
1410 __ CallStub(&stub);
1411
1412 } else {
1413 __ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
1414 __ Mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1415 __ Mov(LoadDescriptor::SlotRegister(),
1416 SmiFromSlot(proxy->VariableFeedbackSlot()));
1417 CallLoadIC(typeof_mode);
1418 }
1419 }
1420
1421
1422 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1423 TypeofMode typeof_mode) {
1424 // Record position before possible IC call.
1425 SetExpressionPosition(proxy);
1426 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1427 Variable* var = proxy->var();
1428
1429 // Three cases: global variables, lookup variables, and all other types of
1430 // variables.
1431 switch (var->location()) {
1432 case VariableLocation::GLOBAL:
1433 case VariableLocation::UNALLOCATED: {
1434 Comment cmnt(masm_, "Global variable");
1435 EmitGlobalVariableLoad(proxy, typeof_mode);
1436 context()->Plug(x0);
1437 break;
1438 }
1439
1440 case VariableLocation::PARAMETER:
1441 case VariableLocation::LOCAL:
1442 case VariableLocation::CONTEXT: {
1443 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1444 Comment cmnt(masm_, var->IsContextSlot()
1445 ? "Context variable"
1446 : "Stack variable");
1447 if (var->binding_needs_init()) {
1448 // var->scope() may be NULL when the proxy is located in eval code and
1449 // refers to a potential outside binding. Currently those bindings are
1450 // always looked up dynamically, i.e. in that case
1451 // var->location() == LOOKUP.
1452 // always holds.
1453 DCHECK(var->scope() != NULL);
1454
1455 // Check if the binding really needs an initialization check. The check
1456 // can be skipped in the following situation: we have a LET or CONST
1457 // binding in harmony mode, both the Variable and the VariableProxy have
1458 // the same declaration scope (i.e. they are both in global code, in the
1459 // same function or in the same eval code) and the VariableProxy is in
1460 // the source physically located after the initializer of the variable.
1461 //
1462 // We cannot skip any initialization checks for CONST in non-harmony
1463 // mode because const variables may be declared but never initialized:
1464 // if (false) { const x; }; var y = x;
1465 //
1466 // The condition on the declaration scopes is a conservative check for
1467 // nested functions that access a binding and are called before the
1468 // binding is initialized:
1469 // function() { f(); let x = 1; function f() { x = 2; } }
1470 //
1471 bool skip_init_check;
1472 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1473 skip_init_check = false;
1474 } else if (var->is_this()) {
1475 CHECK(info_->function() != nullptr &&
1476 (info_->function()->kind() & kSubclassConstructor) != 0);
1477 // TODO(dslomov): implement 'this' hole check elimination.
1478 skip_init_check = false;
1479 } else {
1480 // Check that we always have valid source position.
1481 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1482 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1483 skip_init_check = var->mode() != CONST_LEGACY &&
1484 var->initializer_position() < proxy->position();
1485 }
1486
1487 if (!skip_init_check) {
1488 // Let and const need a read barrier.
1489 GetVar(x0, var);
1490 Label done;
1491 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1492 if (var->mode() == LET || var->mode() == CONST) {
1493 // Throw a reference error when using an uninitialized let/const
1494 // binding in harmony mode.
1495 __ Mov(x0, Operand(var->name()));
1496 __ Push(x0);
1497 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1498 __ Bind(&done);
1499 } else {
1500 // Uninitalized const bindings outside of harmony mode are unholed.
1501 DCHECK(var->mode() == CONST_LEGACY);
1502 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1503 __ Bind(&done);
1504 }
1505 context()->Plug(x0);
1506 break;
1507 }
1508 }
1509 context()->Plug(var);
1510 break;
1511 }
1512
1513 case VariableLocation::LOOKUP: {
1514 Label done, slow;
1515 // Generate code for loading from variables potentially shadowed by
1516 // eval-introduced variables.
1517 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1518 __ Bind(&slow);
1519 Comment cmnt(masm_, "Lookup variable");
1520 __ Mov(x1, Operand(var->name()));
1521 __ Push(cp, x1); // Context and name.
1522 Runtime::FunctionId function_id =
1523 typeof_mode == NOT_INSIDE_TYPEOF
1524 ? Runtime::kLoadLookupSlot
1525 : Runtime::kLoadLookupSlotNoReferenceError;
1526 __ CallRuntime(function_id, 2);
1527 __ Bind(&done);
1528 context()->Plug(x0);
1529 break;
1530 }
1531 }
1532 }
1533
1534
1535 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1536 Comment cmnt(masm_, "[ RegExpLiteral");
1537 Label materialized;
1538 // Registers will be used as follows:
1539 // x5 = materialized value (RegExp literal)
1540 // x4 = JS function, literals array
1541 // x3 = literal index
1542 // x2 = RegExp pattern
1543 // x1 = RegExp flags
1544 // x0 = RegExp literal clone
1545 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1546 __ Ldr(x4, FieldMemOperand(x10, JSFunction::kLiteralsOffset));
1547 int literal_offset =
1548 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1549 __ Ldr(x5, FieldMemOperand(x4, literal_offset));
1550 __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized);
1551
1552 // Create regexp literal using runtime function.
1553 // Result will be in x0.
1554 __ Mov(x3, Smi::FromInt(expr->literal_index()));
1555 __ Mov(x2, Operand(expr->pattern()));
1556 __ Mov(x1, Operand(expr->flags()));
1557 __ Push(x4, x3, x2, x1);
1558 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1559 __ Mov(x5, x0);
1560
1561 __ Bind(&materialized);
1562 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1563 Label allocated, runtime_allocate;
1564 __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT);
1565 __ B(&allocated);
1566
1567 __ Bind(&runtime_allocate);
1568 __ Mov(x10, Smi::FromInt(size));
1569 __ Push(x5, x10);
1570 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1571 __ Pop(x5);
1572
1573 __ Bind(&allocated);
1574 // After this, registers are used as follows:
1575 // x0: Newly allocated regexp.
1576 // x5: Materialized regexp.
1577 // x10, x11, x12: temps.
1578 __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize);
1579 context()->Plug(x0);
1580 }
1581
1582
1583 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1584 if (expression == NULL) {
1585 __ LoadRoot(x10, Heap::kNullValueRootIndex);
1586 __ Push(x10);
1587 } else {
1588 VisitForStackValue(expression);
1589 }
1590 }
1591
1592
1593 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1594 Comment cmnt(masm_, "[ ObjectLiteral");
1595
1596 Handle<FixedArray> constant_properties = expr->constant_properties();
1597 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1598 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1599 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1600 __ Mov(x1, Operand(constant_properties));
1601 int flags = expr->ComputeFlags();
1602 __ Mov(x0, Smi::FromInt(flags));
1603 if (MustCreateObjectLiteralWithRuntime(expr)) {
1604 __ Push(x3, x2, x1, x0);
1605 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1606 } else {
1607 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1608 __ CallStub(&stub);
1609 }
1610 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1611
1612 // If result_saved is true the result is on top of the stack. If
1613 // result_saved is false the result is in x0.
1614 bool result_saved = false;
1615
1616 AccessorTable accessor_table(zone());
1617 int property_index = 0;
1618 // store_slot_index points to the vector IC slot for the next store IC used.
1619 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1620 // and must be updated if the number of store ICs emitted here changes.
1621 int store_slot_index = 0;
1622 for (; property_index < expr->properties()->length(); property_index++) {
1623 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1624 if (property->is_computed_name()) break;
1625 if (property->IsCompileTimeValue()) continue;
1626
1627 Literal* key = property->key()->AsLiteral();
1628 Expression* value = property->value();
1629 if (!result_saved) {
1630 __ Push(x0); // Save result on stack
1631 result_saved = true;
1632 }
1633 switch (property->kind()) {
1634 case ObjectLiteral::Property::CONSTANT:
1635 UNREACHABLE();
1636 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1637 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1638 // Fall through.
1639 case ObjectLiteral::Property::COMPUTED:
1640 // It is safe to use [[Put]] here because the boilerplate already
1641 // contains computed properties with an uninitialized value.
1642 if (key->value()->IsInternalizedString()) {
1643 if (property->emit_store()) {
1644 VisitForAccumulatorValue(value);
1645 DCHECK(StoreDescriptor::ValueRegister().is(x0));
1646 __ Mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1647 __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1648 if (FLAG_vector_stores) {
1649 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1650 CallStoreIC();
1651 } else {
1652 CallStoreIC(key->LiteralFeedbackId());
1653 }
1654 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1655
1656 if (NeedsHomeObject(value)) {
1657 __ Mov(StoreDescriptor::ReceiverRegister(), x0);
1658 __ Mov(StoreDescriptor::NameRegister(),
1659 Operand(isolate()->factory()->home_object_symbol()));
1660 __ Peek(StoreDescriptor::ValueRegister(), 0);
1661 if (FLAG_vector_stores) {
1662 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1663 }
1664 CallStoreIC();
1665 }
1666 } else {
1667 VisitForEffect(value);
1668 }
1669 break;
1670 }
1671 __ Peek(x0, 0);
1672 __ Push(x0);
1673 VisitForStackValue(key);
1674 VisitForStackValue(value);
1675 if (property->emit_store()) {
1676 EmitSetHomeObjectIfNeeded(
1677 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1678 __ Mov(x0, Smi::FromInt(SLOPPY)); // Language mode
1679 __ Push(x0);
1680 __ CallRuntime(Runtime::kSetProperty, 4);
1681 } else {
1682 __ Drop(3);
1683 }
1684 break;
1685 case ObjectLiteral::Property::PROTOTYPE:
1686 DCHECK(property->emit_store());
1687 // Duplicate receiver on stack.
1688 __ Peek(x0, 0);
1689 __ Push(x0);
1690 VisitForStackValue(value);
1691 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1692 break;
1693 case ObjectLiteral::Property::GETTER:
1694 if (property->emit_store()) {
1695 accessor_table.lookup(key)->second->getter = value;
1696 }
1697 break;
1698 case ObjectLiteral::Property::SETTER:
1699 if (property->emit_store()) {
1700 accessor_table.lookup(key)->second->setter = value;
1701 }
1702 break;
1703 }
1704 }
1705
1706 // Emit code to define accessors, using only a single call to the runtime for
1707 // each pair of corresponding getters and setters.
1708 for (AccessorTable::Iterator it = accessor_table.begin();
1709 it != accessor_table.end();
1710 ++it) {
1711 __ Peek(x10, 0); // Duplicate receiver.
1712 __ Push(x10);
1713 VisitForStackValue(it->first);
1714 EmitAccessor(it->second->getter);
1715 EmitSetHomeObjectIfNeeded(
1716 it->second->getter, 2,
1717 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1718 EmitAccessor(it->second->setter);
1719 EmitSetHomeObjectIfNeeded(
1720 it->second->setter, 3,
1721 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1722 __ Mov(x10, Smi::FromInt(NONE));
1723 __ Push(x10);
1724 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1725 }
1726
1727 // Object literals have two parts. The "static" part on the left contains no
1728 // computed property names, and so we can compute its map ahead of time; see
1729 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1730 // starts with the first computed property name, and continues with all
1731 // properties to its right. All the code from above initializes the static
1732 // component of the object literal, and arranges for the map of the result to
1733 // reflect the static order in which the keys appear. For the dynamic
1734 // properties, we compile them into a series of "SetOwnProperty" runtime
1735 // calls. This will preserve insertion order.
1736 for (; property_index < expr->properties()->length(); property_index++) {
1737 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1738
1739 Expression* value = property->value();
1740 if (!result_saved) {
1741 __ Push(x0); // Save result on stack
1742 result_saved = true;
1743 }
1744
1745 __ Peek(x10, 0); // Duplicate receiver.
1746 __ Push(x10);
1747
1748 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1749 DCHECK(!property->is_computed_name());
1750 VisitForStackValue(value);
1751 DCHECK(property->emit_store());
1752 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1753 } else {
1754 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1755 VisitForStackValue(value);
1756 EmitSetHomeObjectIfNeeded(
1757 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1758
1759 switch (property->kind()) {
1760 case ObjectLiteral::Property::CONSTANT:
1761 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1762 case ObjectLiteral::Property::COMPUTED:
1763 if (property->emit_store()) {
1764 __ Mov(x0, Smi::FromInt(NONE));
1765 __ Push(x0);
1766 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1767 } else {
1768 __ Drop(3);
1769 }
1770 break;
1771
1772 case ObjectLiteral::Property::PROTOTYPE:
1773 UNREACHABLE();
1774 break;
1775
1776 case ObjectLiteral::Property::GETTER:
1777 __ Mov(x0, Smi::FromInt(NONE));
1778 __ Push(x0);
1779 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1780 break;
1781
1782 case ObjectLiteral::Property::SETTER:
1783 __ Mov(x0, Smi::FromInt(NONE));
1784 __ Push(x0);
1785 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1786 break;
1787 }
1788 }
1789 }
1790
1791 if (expr->has_function()) {
1792 DCHECK(result_saved);
1793 __ Peek(x0, 0);
1794 __ Push(x0);
1795 __ CallRuntime(Runtime::kToFastProperties, 1);
1796 }
1797
1798 if (result_saved) {
1799 context()->PlugTOS();
1800 } else {
1801 context()->Plug(x0);
1802 }
1803
1804 // Verify that compilation exactly consumed the number of store ic slots that
1805 // the ObjectLiteral node had to offer.
1806 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1807 }
1808
1809
1810 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1811 Comment cmnt(masm_, "[ ArrayLiteral");
1812
1813 expr->BuildConstantElements(isolate());
1814 Handle<FixedArray> constant_elements = expr->constant_elements();
1815 bool has_fast_elements =
1816 IsFastObjectElementsKind(expr->constant_elements_kind());
1817
1818 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1819 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1820 // If the only customer of allocation sites is transitioning, then
1821 // we can turn it off if we don't have anywhere else to transition to.
1822 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1823 }
1824
1825 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1826 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset));
1827 __ Mov(x2, Smi::FromInt(expr->literal_index()));
1828 __ Mov(x1, Operand(constant_elements));
1829 if (MustCreateArrayLiteralWithRuntime(expr)) {
1830 __ Mov(x0, Smi::FromInt(expr->ComputeFlags()));
1831 __ Push(x3, x2, x1, x0);
1832 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1833 } else {
1834 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1835 __ CallStub(&stub);
1836 }
1837 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1838
1839 bool result_saved = false; // Is the result saved to the stack?
1840 ZoneList<Expression*>* subexprs = expr->values();
1841 int length = subexprs->length();
1842
1843 // Emit code to evaluate all the non-constant subexpressions and to store
1844 // them into the newly cloned array.
1845 int array_index = 0;
1846 for (; array_index < length; array_index++) {
1847 Expression* subexpr = subexprs->at(array_index);
1848 if (subexpr->IsSpread()) break;
1849
1850 // If the subexpression is a literal or a simple materialized literal it
1851 // is already set in the cloned array.
1852 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1853
1854 if (!result_saved) {
1855 __ Mov(x1, Smi::FromInt(expr->literal_index()));
1856 __ Push(x0, x1);
1857 result_saved = true;
1858 }
1859 VisitForAccumulatorValue(subexpr);
1860
1861 if (has_fast_elements) {
1862 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1863 __ Peek(x6, kPointerSize); // Copy of array literal.
1864 __ Ldr(x1, FieldMemOperand(x6, JSObject::kElementsOffset));
1865 __ Str(result_register(), FieldMemOperand(x1, offset));
1866 // Update the write barrier for the array store.
1867 __ RecordWriteField(x1, offset, result_register(), x10,
1868 kLRHasBeenSaved, kDontSaveFPRegs,
1869 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1870 } else {
1871 __ Mov(x3, Smi::FromInt(array_index));
1872 StoreArrayLiteralElementStub stub(isolate());
1873 __ CallStub(&stub);
1874 }
1875
1876 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1877 }
1878
1879 // In case the array literal contains spread expressions it has two parts. The
1880 // first part is the "static" array which has a literal index is handled
1881 // above. The second part is the part after the first spread expression
1882 // (inclusive) and these elements gets appended to the array. Note that the
1883 // number elements an iterable produces is unknown ahead of time.
1884 if (array_index < length && result_saved) {
1885 __ Drop(1); // literal index
1886 __ Pop(x0);
1887 result_saved = false;
1888 }
1889 for (; array_index < length; array_index++) {
1890 Expression* subexpr = subexprs->at(array_index);
1891
1892 __ Push(x0);
1893 if (subexpr->IsSpread()) {
1894 VisitForStackValue(subexpr->AsSpread()->expression());
1895 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1896 } else {
1897 VisitForStackValue(subexpr);
1898 __ CallRuntime(Runtime::kAppendElement, 2);
1899 }
1900
1901 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1902 }
1903
1904 if (result_saved) {
1905 __ Drop(1); // literal index
1906 context()->PlugTOS();
1907 } else {
1908 context()->Plug(x0);
1909 }
1910 }
1911
1912
1913 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1914 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1915
1916 Comment cmnt(masm_, "[ Assignment");
1917 SetExpressionPosition(expr, INSERT_BREAK);
1918
1919 Property* property = expr->target()->AsProperty();
1920 LhsKind assign_type = Property::GetAssignType(property);
1921
1922 // Evaluate LHS expression.
1923 switch (assign_type) {
1924 case VARIABLE:
1925 // Nothing to do here.
1926 break;
1927 case NAMED_PROPERTY:
1928 if (expr->is_compound()) {
1929 // We need the receiver both on the stack and in the register.
1930 VisitForStackValue(property->obj());
1931 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
1932 } else {
1933 VisitForStackValue(property->obj());
1934 }
1935 break;
1936 case NAMED_SUPER_PROPERTY:
1937 VisitForStackValue(
1938 property->obj()->AsSuperPropertyReference()->this_var());
1939 VisitForAccumulatorValue(
1940 property->obj()->AsSuperPropertyReference()->home_object());
1941 __ Push(result_register());
1942 if (expr->is_compound()) {
1943 const Register scratch = x10;
1944 __ Peek(scratch, kPointerSize);
1945 __ Push(scratch, result_register());
1946 }
1947 break;
1948 case KEYED_SUPER_PROPERTY:
1949 VisitForStackValue(
1950 property->obj()->AsSuperPropertyReference()->this_var());
1951 VisitForStackValue(
1952 property->obj()->AsSuperPropertyReference()->home_object());
1953 VisitForAccumulatorValue(property->key());
1954 __ Push(result_register());
1955 if (expr->is_compound()) {
1956 const Register scratch1 = x10;
1957 const Register scratch2 = x11;
1958 __ Peek(scratch1, 2 * kPointerSize);
1959 __ Peek(scratch2, kPointerSize);
1960 __ Push(scratch1, scratch2, result_register());
1961 }
1962 break;
1963 case KEYED_PROPERTY:
1964 if (expr->is_compound()) {
1965 VisitForStackValue(property->obj());
1966 VisitForStackValue(property->key());
1967 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
1968 __ Peek(LoadDescriptor::NameRegister(), 0);
1969 } else {
1970 VisitForStackValue(property->obj());
1971 VisitForStackValue(property->key());
1972 }
1973 break;
1974 }
1975
1976 // For compound assignments we need another deoptimization point after the
1977 // variable/property load.
1978 if (expr->is_compound()) {
1979 { AccumulatorValueContext context(this);
1980 switch (assign_type) {
1981 case VARIABLE:
1982 EmitVariableLoad(expr->target()->AsVariableProxy());
1983 PrepareForBailout(expr->target(), TOS_REG);
1984 break;
1985 case NAMED_PROPERTY:
1986 EmitNamedPropertyLoad(property);
1987 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1988 break;
1989 case NAMED_SUPER_PROPERTY:
1990 EmitNamedSuperPropertyLoad(property);
1991 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1992 break;
1993 case KEYED_SUPER_PROPERTY:
1994 EmitKeyedSuperPropertyLoad(property);
1995 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1996 break;
1997 case KEYED_PROPERTY:
1998 EmitKeyedPropertyLoad(property);
1999 PrepareForBailoutForId(property->LoadId(), TOS_REG);
2000 break;
2001 }
2002 }
2003
2004 Token::Value op = expr->binary_op();
2005 __ Push(x0); // Left operand goes on the stack.
2006 VisitForAccumulatorValue(expr->value());
2007
2008 AccumulatorValueContext context(this);
2009 if (ShouldInlineSmiCase(op)) {
2010 EmitInlineSmiBinaryOp(expr->binary_operation(),
2011 op,
2012 expr->target(),
2013 expr->value());
2014 } else {
2015 EmitBinaryOp(expr->binary_operation(), op);
2016 }
2017
2018 // Deoptimization point in case the binary operation may have side effects.
2019 PrepareForBailout(expr->binary_operation(), TOS_REG);
2020 } else {
2021 VisitForAccumulatorValue(expr->value());
2022 }
2023
2024 SetExpressionPosition(expr);
2025
2026 // Store the value.
2027 switch (assign_type) {
2028 case VARIABLE:
2029 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
2030 expr->op(), expr->AssignmentSlot());
2031 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2032 context()->Plug(x0);
2033 break;
2034 case NAMED_PROPERTY:
2035 EmitNamedPropertyAssignment(expr);
2036 break;
2037 case NAMED_SUPER_PROPERTY:
2038 EmitNamedSuperPropertyStore(property);
2039 context()->Plug(x0);
2040 break;
2041 case KEYED_SUPER_PROPERTY:
2042 EmitKeyedSuperPropertyStore(property);
2043 context()->Plug(x0);
2044 break;
2045 case KEYED_PROPERTY:
2046 EmitKeyedPropertyAssignment(expr);
2047 break;
2048 }
2049 }
2050
2051
2052 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2053 SetExpressionPosition(prop);
2054 Literal* key = prop->key()->AsLiteral();
2055 DCHECK(!prop->IsSuperAccess());
2056
2057 __ Mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2058 __ Mov(LoadDescriptor::SlotRegister(),
2059 SmiFromSlot(prop->PropertyFeedbackSlot()));
2060 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2061 }
2062
2063
2064 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2065 // Stack: receiver, home_object.
2066 SetExpressionPosition(prop);
2067 Literal* key = prop->key()->AsLiteral();
2068 DCHECK(!key->value()->IsSmi());
2069 DCHECK(prop->IsSuperAccess());
2070
2071 __ Push(key->value());
2072 __ Push(Smi::FromInt(language_mode()));
2073 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2074 }
2075
2076
2077 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2078 SetExpressionPosition(prop);
2079 // Call keyed load IC. It has arguments key and receiver in x0 and x1.
2080 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2081 __ Mov(LoadDescriptor::SlotRegister(),
2082 SmiFromSlot(prop->PropertyFeedbackSlot()));
2083 CallIC(ic);
2084 }
2085
2086
2087 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2088 // Stack: receiver, home_object, key.
2089 SetExpressionPosition(prop);
2090 __ Push(Smi::FromInt(language_mode()));
2091 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2092 }
2093
2094
2095 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2096 Token::Value op,
2097 Expression* left_expr,
2098 Expression* right_expr) {
2099 Label done, both_smis, stub_call;
2100
2101 // Get the arguments.
2102 Register left = x1;
2103 Register right = x0;
2104 Register result = x0;
2105 __ Pop(left);
2106
2107 // Perform combined smi check on both operands.
2108 __ Orr(x10, left, right);
2109 JumpPatchSite patch_site(masm_);
2110 patch_site.EmitJumpIfSmi(x10, &both_smis);
2111
2112 __ Bind(&stub_call);
2113
2114 Handle<Code> code =
2115 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2116 {
2117 Assembler::BlockPoolsScope scope(masm_);
2118 CallIC(code, expr->BinaryOperationFeedbackId());
2119 patch_site.EmitPatchInfo();
2120 }
2121 __ B(&done);
2122
2123 __ Bind(&both_smis);
2124 // Smi case. This code works in the same way as the smi-smi case in the type
2125 // recording binary operation stub, see
2126 // BinaryOpStub::GenerateSmiSmiOperation for comments.
2127 // TODO(all): That doesn't exist any more. Where are the comments?
2128 //
2129 // The set of operations that needs to be supported here is controlled by
2130 // FullCodeGenerator::ShouldInlineSmiCase().
2131 switch (op) {
2132 case Token::SAR:
2133 __ Ubfx(right, right, kSmiShift, 5);
2134 __ Asr(result, left, right);
2135 __ Bic(result, result, kSmiShiftMask);
2136 break;
2137 case Token::SHL:
2138 __ Ubfx(right, right, kSmiShift, 5);
2139 __ Lsl(result, left, right);
2140 break;
2141 case Token::SHR:
2142 // If `left >>> right` >= 0x80000000, the result is not representable in a
2143 // signed 32-bit smi.
2144 __ Ubfx(right, right, kSmiShift, 5);
2145 __ Lsr(x10, left, right);
2146 __ Tbnz(x10, kXSignBit, &stub_call);
2147 __ Bic(result, x10, kSmiShiftMask);
2148 break;
2149 case Token::ADD:
2150 __ Adds(x10, left, right);
2151 __ B(vs, &stub_call);
2152 __ Mov(result, x10);
2153 break;
2154 case Token::SUB:
2155 __ Subs(x10, left, right);
2156 __ B(vs, &stub_call);
2157 __ Mov(result, x10);
2158 break;
2159 case Token::MUL: {
2160 Label not_minus_zero, done;
2161 STATIC_ASSERT(static_cast<unsigned>(kSmiShift) == (kXRegSizeInBits / 2));
2162 STATIC_ASSERT(kSmiTag == 0);
2163 __ Smulh(x10, left, right);
2164 __ Cbnz(x10, &not_minus_zero);
2165 __ Eor(x11, left, right);
2166 __ Tbnz(x11, kXSignBit, &stub_call);
2167 __ Mov(result, x10);
2168 __ B(&done);
2169 __ Bind(&not_minus_zero);
2170 __ Cls(x11, x10);
2171 __ Cmp(x11, kXRegSizeInBits - kSmiShift);
2172 __ B(lt, &stub_call);
2173 __ SmiTag(result, x10);
2174 __ Bind(&done);
2175 break;
2176 }
2177 case Token::BIT_OR:
2178 __ Orr(result, left, right);
2179 break;
2180 case Token::BIT_AND:
2181 __ And(result, left, right);
2182 break;
2183 case Token::BIT_XOR:
2184 __ Eor(result, left, right);
2185 break;
2186 default:
2187 UNREACHABLE();
2188 }
2189
2190 __ Bind(&done);
2191 context()->Plug(x0);
2192 }
2193
2194
2195 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2196 __ Pop(x1);
2197 Handle<Code> code =
2198 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2199 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
2200 {
2201 Assembler::BlockPoolsScope scope(masm_);
2202 CallIC(code, expr->BinaryOperationFeedbackId());
2203 patch_site.EmitPatchInfo();
2204 }
2205 context()->Plug(x0);
2206 }
2207
2208
2209 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2210 int* used_store_slots) {
2211 // Constructor is in x0.
2212 DCHECK(lit != NULL);
2213 __ push(x0);
2214
2215 // No access check is needed here since the constructor is created by the
2216 // class literal.
2217 Register scratch = x1;
2218 __ Ldr(scratch,
2219 FieldMemOperand(x0, JSFunction::kPrototypeOrInitialMapOffset));
2220 __ Push(scratch);
2221
2222 for (int i = 0; i < lit->properties()->length(); i++) {
2223 ObjectLiteral::Property* property = lit->properties()->at(i);
2224 Expression* value = property->value();
2225
2226 if (property->is_static()) {
2227 __ Peek(scratch, kPointerSize); // constructor
2228 } else {
2229 __ Peek(scratch, 0); // prototype
2230 }
2231 __ Push(scratch);
2232 EmitPropertyKey(property, lit->GetIdForProperty(i));
2233
2234 // The static prototype property is read only. We handle the non computed
2235 // property name case in the parser. Since this is the only case where we
2236 // need to check for an own read only property we special case this so we do
2237 // not need to do this for every property.
2238 if (property->is_static() && property->is_computed_name()) {
2239 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2240 __ Push(x0);
2241 }
2242
2243 VisitForStackValue(value);
2244 EmitSetHomeObjectIfNeeded(value, 2,
2245 lit->SlotForHomeObject(value, used_store_slots));
2246
2247 switch (property->kind()) {
2248 case ObjectLiteral::Property::CONSTANT:
2249 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2250 case ObjectLiteral::Property::PROTOTYPE:
2251 UNREACHABLE();
2252 case ObjectLiteral::Property::COMPUTED:
2253 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2254 break;
2255
2256 case ObjectLiteral::Property::GETTER:
2257 __ Mov(x0, Smi::FromInt(DONT_ENUM));
2258 __ Push(x0);
2259 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2260 break;
2261
2262 case ObjectLiteral::Property::SETTER:
2263 __ Mov(x0, Smi::FromInt(DONT_ENUM));
2264 __ Push(x0);
2265 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2266 break;
2267
2268 default:
2269 UNREACHABLE();
2270 }
2271 }
2272
2273 // prototype
2274 __ CallRuntime(Runtime::kToFastProperties, 1);
2275
2276 // constructor
2277 __ CallRuntime(Runtime::kToFastProperties, 1);
2278
2279 if (is_strong(language_mode())) {
2280 __ Ldr(scratch,
2281 FieldMemOperand(x0, JSFunction::kPrototypeOrInitialMapOffset));
2282 __ push(x0);
2283 __ Push(scratch);
2284 // TODO(conradw): It would be more efficient to define the properties with
2285 // the right attributes the first time round.
2286 // Freeze the prototype.
2287 __ CallRuntime(Runtime::kObjectFreeze, 1);
2288 // Freeze the constructor.
2289 __ CallRuntime(Runtime::kObjectFreeze, 1);
2290 }
2291 }
2292
2293
2294 void FullCodeGenerator::EmitAssignment(Expression* expr,
2295 FeedbackVectorICSlot slot) {
2296 DCHECK(expr->IsValidReferenceExpressionOrThis());
2297
2298 Property* prop = expr->AsProperty();
2299 LhsKind assign_type = Property::GetAssignType(prop);
2300
2301 switch (assign_type) {
2302 case VARIABLE: {
2303 Variable* var = expr->AsVariableProxy()->var();
2304 EffectContext context(this);
2305 EmitVariableAssignment(var, Token::ASSIGN, slot);
2306 break;
2307 }
2308 case NAMED_PROPERTY: {
2309 __ Push(x0); // Preserve value.
2310 VisitForAccumulatorValue(prop->obj());
2311 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
2312 // this copy.
2313 __ Mov(StoreDescriptor::ReceiverRegister(), x0);
2314 __ Pop(StoreDescriptor::ValueRegister()); // Restore value.
2315 __ Mov(StoreDescriptor::NameRegister(),
2316 Operand(prop->key()->AsLiteral()->value()));
2317 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2318 CallStoreIC();
2319 break;
2320 }
2321 case NAMED_SUPER_PROPERTY: {
2322 __ Push(x0);
2323 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2324 VisitForAccumulatorValue(
2325 prop->obj()->AsSuperPropertyReference()->home_object());
2326 // stack: value, this; x0: home_object
2327 Register scratch = x10;
2328 Register scratch2 = x11;
2329 __ mov(scratch, result_register()); // home_object
2330 __ Peek(x0, kPointerSize); // value
2331 __ Peek(scratch2, 0); // this
2332 __ Poke(scratch2, kPointerSize); // this
2333 __ Poke(scratch, 0); // home_object
2334 // stack: this, home_object; x0: value
2335 EmitNamedSuperPropertyStore(prop);
2336 break;
2337 }
2338 case KEYED_SUPER_PROPERTY: {
2339 __ Push(x0);
2340 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2341 VisitForStackValue(
2342 prop->obj()->AsSuperPropertyReference()->home_object());
2343 VisitForAccumulatorValue(prop->key());
2344 Register scratch = x10;
2345 Register scratch2 = x11;
2346 __ Peek(scratch2, 2 * kPointerSize); // value
2347 // stack: value, this, home_object; x0: key, x11: value
2348 __ Peek(scratch, kPointerSize); // this
2349 __ Poke(scratch, 2 * kPointerSize);
2350 __ Peek(scratch, 0); // home_object
2351 __ Poke(scratch, kPointerSize);
2352 __ Poke(x0, 0);
2353 __ Move(x0, scratch2);
2354 // stack: this, home_object, key; x0: value.
2355 EmitKeyedSuperPropertyStore(prop);
2356 break;
2357 }
2358 case KEYED_PROPERTY: {
2359 __ Push(x0); // Preserve value.
2360 VisitForStackValue(prop->obj());
2361 VisitForAccumulatorValue(prop->key());
2362 __ Mov(StoreDescriptor::NameRegister(), x0);
2363 __ Pop(StoreDescriptor::ReceiverRegister(),
2364 StoreDescriptor::ValueRegister());
2365 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2366 Handle<Code> ic =
2367 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2368 CallIC(ic);
2369 break;
2370 }
2371 }
2372 context()->Plug(x0);
2373 }
2374
2375
2376 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2377 Variable* var, MemOperand location) {
2378 __ Str(result_register(), location);
2379 if (var->IsContextSlot()) {
2380 // RecordWrite may destroy all its register arguments.
2381 __ Mov(x10, result_register());
2382 int offset = Context::SlotOffset(var->index());
2383 __ RecordWriteContextSlot(
2384 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2385 }
2386 }
2387
2388
2389 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2390 FeedbackVectorICSlot slot) {
2391 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2392 if (var->IsUnallocated()) {
2393 // Global var, const, or let.
2394 __ Mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2395 __ Ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
2396 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2397 CallStoreIC();
2398
2399 } else if (var->IsGlobalSlot()) {
2400 // Global var, const, or let.
2401 DCHECK(var->index() > 0);
2402 DCHECK(var->IsStaticGlobalObjectProperty());
2403 // Each var occupies two slots in the context: for reads and writes.
2404 int slot_index = var->index() + 1;
2405 int depth = scope()->ContextChainLength(var->scope());
2406 __ Mov(StoreGlobalViaContextDescriptor::DepthRegister(),
2407 Operand(Smi::FromInt(depth)));
2408 __ Mov(StoreGlobalViaContextDescriptor::SlotRegister(),
2409 Operand(Smi::FromInt(slot_index)));
2410 __ Mov(StoreGlobalViaContextDescriptor::NameRegister(),
2411 Operand(var->name()));
2412 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(x0));
2413 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2414 __ CallStub(&stub);
2415
2416 } else if (var->mode() == LET && op != Token::INIT_LET) {
2417 // Non-initializing assignment to let variable needs a write barrier.
2418 DCHECK(!var->IsLookupSlot());
2419 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2420 Label assign;
2421 MemOperand location = VarOperand(var, x1);
2422 __ Ldr(x10, location);
2423 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2424 __ Mov(x10, Operand(var->name()));
2425 __ Push(x10);
2426 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2427 // Perform the assignment.
2428 __ Bind(&assign);
2429 EmitStoreToStackLocalOrContextSlot(var, location);
2430
2431 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2432 // Assignment to const variable needs a write barrier.
2433 DCHECK(!var->IsLookupSlot());
2434 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2435 Label const_error;
2436 MemOperand location = VarOperand(var, x1);
2437 __ Ldr(x10, location);
2438 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &const_error);
2439 __ Mov(x10, Operand(var->name()));
2440 __ Push(x10);
2441 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2442 __ Bind(&const_error);
2443 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2444
2445 } else if (var->is_this() && op == Token::INIT_CONST) {
2446 // Initializing assignment to const {this} needs a write barrier.
2447 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2448 Label uninitialized_this;
2449 MemOperand location = VarOperand(var, x1);
2450 __ Ldr(x10, location);
2451 __ JumpIfRoot(x10, Heap::kTheHoleValueRootIndex, &uninitialized_this);
2452 __ Mov(x0, Operand(var->name()));
2453 __ Push(x0);
2454 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2455 __ bind(&uninitialized_this);
2456 EmitStoreToStackLocalOrContextSlot(var, location);
2457
2458 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2459 if (var->IsLookupSlot()) {
2460 // Assignment to var.
2461 __ Mov(x11, Operand(var->name()));
2462 __ Mov(x10, Smi::FromInt(language_mode()));
2463 // jssp[0] : mode.
2464 // jssp[8] : name.
2465 // jssp[16] : context.
2466 // jssp[24] : value.
2467 __ Push(x0, cp, x11, x10);
2468 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2469 } else {
2470 // Assignment to var or initializing assignment to let/const in harmony
2471 // mode.
2472 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2473 MemOperand location = VarOperand(var, x1);
2474 if (FLAG_debug_code && op == Token::INIT_LET) {
2475 __ Ldr(x10, location);
2476 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2477 __ Check(eq, kLetBindingReInitialization);
2478 }
2479 EmitStoreToStackLocalOrContextSlot(var, location);
2480 }
2481
2482 } else if (op == Token::INIT_CONST_LEGACY) {
2483 // Const initializers need a write barrier.
2484 DCHECK(var->mode() == CONST_LEGACY);
2485 DCHECK(!var->IsParameter()); // No const parameters.
2486 if (var->IsLookupSlot()) {
2487 __ Mov(x1, Operand(var->name()));
2488 __ Push(x0, cp, x1);
2489 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2490 } else {
2491 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2492 Label skip;
2493 MemOperand location = VarOperand(var, x1);
2494 __ Ldr(x10, location);
2495 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip);
2496 EmitStoreToStackLocalOrContextSlot(var, location);
2497 __ Bind(&skip);
2498 }
2499
2500 } else {
2501 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2502 if (is_strict(language_mode())) {
2503 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2504 }
2505 // Silently ignore store in sloppy mode.
2506 }
2507 }
2508
2509
2510 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2511 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2512 // Assignment to a property, using a named store IC.
2513 Property* prop = expr->target()->AsProperty();
2514 DCHECK(prop != NULL);
2515 DCHECK(prop->key()->IsLiteral());
2516
2517 __ Mov(StoreDescriptor::NameRegister(),
2518 Operand(prop->key()->AsLiteral()->value()));
2519 __ Pop(StoreDescriptor::ReceiverRegister());
2520 if (FLAG_vector_stores) {
2521 EmitLoadStoreICSlot(expr->AssignmentSlot());
2522 CallStoreIC();
2523 } else {
2524 CallStoreIC(expr->AssignmentFeedbackId());
2525 }
2526
2527 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2528 context()->Plug(x0);
2529 }
2530
2531
2532 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2533 // Assignment to named property of super.
2534 // x0 : value
2535 // stack : receiver ('this'), home_object
2536 DCHECK(prop != NULL);
2537 Literal* key = prop->key()->AsLiteral();
2538 DCHECK(key != NULL);
2539
2540 __ Push(key->value());
2541 __ Push(x0);
2542 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2543 : Runtime::kStoreToSuper_Sloppy),
2544 4);
2545 }
2546
2547
2548 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2549 // Assignment to named property of super.
2550 // x0 : value
2551 // stack : receiver ('this'), home_object, key
2552 DCHECK(prop != NULL);
2553
2554 __ Push(x0);
2555 __ CallRuntime(
2556 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2557 : Runtime::kStoreKeyedToSuper_Sloppy),
2558 4);
2559 }
2560
2561
2562 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2563 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2564 // Assignment to a property, using a keyed store IC.
2565
2566 // TODO(all): Could we pass this in registers rather than on the stack?
2567 __ Pop(StoreDescriptor::NameRegister(), StoreDescriptor::ReceiverRegister());
2568 DCHECK(StoreDescriptor::ValueRegister().is(x0));
2569
2570 Handle<Code> ic =
2571 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2572 if (FLAG_vector_stores) {
2573 EmitLoadStoreICSlot(expr->AssignmentSlot());
2574 CallIC(ic);
2575 } else {
2576 CallIC(ic, expr->AssignmentFeedbackId());
2577 }
2578
2579 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2580 context()->Plug(x0);
2581 }
2582
2583
2584 void FullCodeGenerator::VisitProperty(Property* expr) {
2585 Comment cmnt(masm_, "[ Property");
2586 SetExpressionPosition(expr);
2587 Expression* key = expr->key();
2588
2589 if (key->IsPropertyName()) {
2590 if (!expr->IsSuperAccess()) {
2591 VisitForAccumulatorValue(expr->obj());
2592 __ Move(LoadDescriptor::ReceiverRegister(), x0);
2593 EmitNamedPropertyLoad(expr);
2594 } else {
2595 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2596 VisitForStackValue(
2597 expr->obj()->AsSuperPropertyReference()->home_object());
2598 EmitNamedSuperPropertyLoad(expr);
2599 }
2600 } else {
2601 if (!expr->IsSuperAccess()) {
2602 VisitForStackValue(expr->obj());
2603 VisitForAccumulatorValue(expr->key());
2604 __ Move(LoadDescriptor::NameRegister(), x0);
2605 __ Pop(LoadDescriptor::ReceiverRegister());
2606 EmitKeyedPropertyLoad(expr);
2607 } else {
2608 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2609 VisitForStackValue(
2610 expr->obj()->AsSuperPropertyReference()->home_object());
2611 VisitForStackValue(expr->key());
2612 EmitKeyedSuperPropertyLoad(expr);
2613 }
2614 }
2615 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2616 context()->Plug(x0);
2617 }
2618
2619
2620 void FullCodeGenerator::CallIC(Handle<Code> code,
2621 TypeFeedbackId ast_id) {
2622 ic_total_count_++;
2623 // All calls must have a predictable size in full-codegen code to ensure that
2624 // the debugger can patch them correctly.
2625 __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2626 }
2627
2628
2629 // Code common for calls using the IC.
2630 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2631 Expression* callee = expr->expression();
2632
2633 CallICState::CallType call_type =
2634 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2635
2636 // Get the target function.
2637 if (call_type == CallICState::FUNCTION) {
2638 { StackValueContext context(this);
2639 EmitVariableLoad(callee->AsVariableProxy());
2640 PrepareForBailout(callee, NO_REGISTERS);
2641 }
2642 // Push undefined as receiver. This is patched in the method prologue if it
2643 // is a sloppy mode method.
2644 {
2645 UseScratchRegisterScope temps(masm_);
2646 Register temp = temps.AcquireX();
2647 __ LoadRoot(temp, Heap::kUndefinedValueRootIndex);
2648 __ Push(temp);
2649 }
2650 } else {
2651 // Load the function from the receiver.
2652 DCHECK(callee->IsProperty());
2653 DCHECK(!callee->AsProperty()->IsSuperAccess());
2654 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2655 EmitNamedPropertyLoad(callee->AsProperty());
2656 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2657 // Push the target function under the receiver.
2658 __ Pop(x10);
2659 __ Push(x0, x10);
2660 }
2661
2662 EmitCall(expr, call_type);
2663 }
2664
2665
2666 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2667 Expression* callee = expr->expression();
2668 DCHECK(callee->IsProperty());
2669 Property* prop = callee->AsProperty();
2670 DCHECK(prop->IsSuperAccess());
2671 SetExpressionPosition(prop);
2672
2673 Literal* key = prop->key()->AsLiteral();
2674 DCHECK(!key->value()->IsSmi());
2675
2676 // Load the function from the receiver.
2677 const Register scratch = x10;
2678 SuperPropertyReference* super_ref =
2679 callee->AsProperty()->obj()->AsSuperPropertyReference();
2680 VisitForStackValue(super_ref->home_object());
2681 VisitForAccumulatorValue(super_ref->this_var());
2682 __ Push(x0);
2683 __ Peek(scratch, kPointerSize);
2684 __ Push(x0, scratch);
2685 __ Push(key->value());
2686 __ Push(Smi::FromInt(language_mode()));
2687
2688 // Stack here:
2689 // - home_object
2690 // - this (receiver)
2691 // - this (receiver) <-- LoadFromSuper will pop here and below.
2692 // - home_object
2693 // - language_mode
2694 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2695
2696 // Replace home_object with target function.
2697 __ Poke(x0, kPointerSize);
2698
2699 // Stack here:
2700 // - target function
2701 // - this (receiver)
2702 EmitCall(expr, CallICState::METHOD);
2703 }
2704
2705
2706 // Code common for calls using the IC.
2707 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2708 Expression* key) {
2709 // Load the key.
2710 VisitForAccumulatorValue(key);
2711
2712 Expression* callee = expr->expression();
2713
2714 // Load the function from the receiver.
2715 DCHECK(callee->IsProperty());
2716 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2717 __ Move(LoadDescriptor::NameRegister(), x0);
2718 EmitKeyedPropertyLoad(callee->AsProperty());
2719 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2720
2721 // Push the target function under the receiver.
2722 __ Pop(x10);
2723 __ Push(x0, x10);
2724
2725 EmitCall(expr, CallICState::METHOD);
2726 }
2727
2728
2729 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2730 Expression* callee = expr->expression();
2731 DCHECK(callee->IsProperty());
2732 Property* prop = callee->AsProperty();
2733 DCHECK(prop->IsSuperAccess());
2734 SetExpressionPosition(prop);
2735
2736 // Load the function from the receiver.
2737 const Register scratch = x10;
2738 SuperPropertyReference* super_ref =
2739 callee->AsProperty()->obj()->AsSuperPropertyReference();
2740 VisitForStackValue(super_ref->home_object());
2741 VisitForAccumulatorValue(super_ref->this_var());
2742 __ Push(x0);
2743 __ Peek(scratch, kPointerSize);
2744 __ Push(x0, scratch);
2745 VisitForStackValue(prop->key());
2746 __ Push(Smi::FromInt(language_mode()));
2747
2748 // Stack here:
2749 // - home_object
2750 // - this (receiver)
2751 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2752 // - home_object
2753 // - key
2754 // - language_mode
2755 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2756
2757 // Replace home_object with target function.
2758 __ Poke(x0, kPointerSize);
2759
2760 // Stack here:
2761 // - target function
2762 // - this (receiver)
2763 EmitCall(expr, CallICState::METHOD);
2764 }
2765
2766
2767 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2768 // Load the arguments.
2769 ZoneList<Expression*>* args = expr->arguments();
2770 int arg_count = args->length();
2771 for (int i = 0; i < arg_count; i++) {
2772 VisitForStackValue(args->at(i));
2773 }
2774
2775 SetCallPosition(expr, arg_count);
2776
2777 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
2778 __ Mov(x3, SmiFromSlot(expr->CallFeedbackICSlot()));
2779 __ Peek(x1, (arg_count + 1) * kXRegSize);
2780 // Don't assign a type feedback id to the IC, since type feedback is provided
2781 // by the vector above.
2782 CallIC(ic);
2783
2784 RecordJSReturnSite(expr);
2785 // Restore context register.
2786 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2787 context()->DropAndPlug(1, x0);
2788 }
2789
2790
2791 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2792 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2793 // Prepare to push a copy of the first argument or undefined if it doesn't
2794 // exist.
2795 if (arg_count > 0) {
2796 __ Peek(x9, arg_count * kXRegSize);
2797 } else {
2798 __ LoadRoot(x9, Heap::kUndefinedValueRootIndex);
2799 }
2800
2801 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2802
2803 // Prepare to push the language mode.
2804 __ Mov(x11, Smi::FromInt(language_mode()));
2805 // Prepare to push the start position of the scope the calls resides in.
2806 __ Mov(x12, Smi::FromInt(scope()->start_position()));
2807
2808 // Push.
2809 __ Push(x9, x10, x11, x12);
2810
2811 // Do the runtime call.
2812 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2813 }
2814
2815
2816 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
2817 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2818 VariableProxy* callee = expr->expression()->AsVariableProxy();
2819 if (callee->var()->IsLookupSlot()) {
2820 Label slow, done;
2821 SetExpressionPosition(callee);
2822 // Generate code for loading from variables potentially shadowed
2823 // by eval-introduced variables.
2824 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2825
2826 __ Bind(&slow);
2827 // Call the runtime to find the function to call (returned in x0)
2828 // and the object holding it (returned in x1).
2829 __ Mov(x10, Operand(callee->name()));
2830 __ Push(context_register(), x10);
2831 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2832 __ Push(x0, x1); // Receiver, function.
2833 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2834
2835 // If fast case code has been generated, emit code to push the
2836 // function and receiver and have the slow path jump around this
2837 // code.
2838 if (done.is_linked()) {
2839 Label call;
2840 __ B(&call);
2841 __ Bind(&done);
2842 // Push function.
2843 // The receiver is implicitly the global receiver. Indicate this
2844 // by passing the undefined to the call function stub.
2845 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2846 __ Push(x0, x1);
2847 __ Bind(&call);
2848 }
2849 } else {
2850 VisitForStackValue(callee);
2851 // refEnv.WithBaseObject()
2852 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2853 __ Push(x10); // Reserved receiver slot.
2854 }
2855 }
2856
2857
2858 void FullCodeGenerator::VisitCall(Call* expr) {
2859 #ifdef DEBUG
2860 // We want to verify that RecordJSReturnSite gets called on all paths
2861 // through this function. Avoid early returns.
2862 expr->return_is_recorded_ = false;
2863 #endif
2864
2865 Comment cmnt(masm_, "[ Call");
2866 Expression* callee = expr->expression();
2867 Call::CallType call_type = expr->GetCallType(isolate());
2868
2869 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2870 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2871 // to resolve the function we need to call. Then we call the resolved
2872 // function using the given arguments.
2873 ZoneList<Expression*>* args = expr->arguments();
2874 int arg_count = args->length();
2875
2876 PushCalleeAndWithBaseObject(expr);
2877
2878 // Push the arguments.
2879 for (int i = 0; i < arg_count; i++) {
2880 VisitForStackValue(args->at(i));
2881 }
2882
2883 // Push a copy of the function (found below the arguments) and
2884 // resolve eval.
2885 __ Peek(x10, (arg_count + 1) * kPointerSize);
2886 __ Push(x10);
2887 EmitResolvePossiblyDirectEval(arg_count);
2888
2889 // Touch up the stack with the resolved function.
2890 __ Poke(x0, (arg_count + 1) * kPointerSize);
2891
2892 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2893
2894 // Record source position for debugger.
2895 SetCallPosition(expr, arg_count);
2896
2897 // Call the evaluated function.
2898 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2899 __ Peek(x1, (arg_count + 1) * kXRegSize);
2900 __ CallStub(&stub);
2901 RecordJSReturnSite(expr);
2902 // Restore context register.
2903 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2904 context()->DropAndPlug(1, x0);
2905
2906 } else if (call_type == Call::GLOBAL_CALL) {
2907 EmitCallWithLoadIC(expr);
2908
2909 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2910 // Call to a lookup slot (dynamically introduced variable).
2911 PushCalleeAndWithBaseObject(expr);
2912 EmitCall(expr);
2913 } else if (call_type == Call::PROPERTY_CALL) {
2914 Property* property = callee->AsProperty();
2915 bool is_named_call = property->key()->IsPropertyName();
2916 if (property->IsSuperAccess()) {
2917 if (is_named_call) {
2918 EmitSuperCallWithLoadIC(expr);
2919 } else {
2920 EmitKeyedSuperCallWithLoadIC(expr);
2921 }
2922 } else {
2923 VisitForStackValue(property->obj());
2924 if (is_named_call) {
2925 EmitCallWithLoadIC(expr);
2926 } else {
2927 EmitKeyedCallWithLoadIC(expr, property->key());
2928 }
2929 }
2930 } else if (call_type == Call::SUPER_CALL) {
2931 EmitSuperConstructorCall(expr);
2932 } else {
2933 DCHECK(call_type == Call::OTHER_CALL);
2934 // Call to an arbitrary expression not handled specially above.
2935 VisitForStackValue(callee);
2936 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2937 __ Push(x1);
2938 // Emit function call.
2939 EmitCall(expr);
2940 }
2941
2942 #ifdef DEBUG
2943 // RecordJSReturnSite should have been called.
2944 DCHECK(expr->return_is_recorded_);
2945 #endif
2946 }
2947
2948
2949 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2950 Comment cmnt(masm_, "[ CallNew");
2951 // According to ECMA-262, section 11.2.2, page 44, the function
2952 // expression in new calls must be evaluated before the
2953 // arguments.
2954
2955 // Push constructor on the stack. If it's not a function it's used as
2956 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2957 // ignored.
2958 DCHECK(!expr->expression()->IsSuperPropertyReference());
2959 VisitForStackValue(expr->expression());
2960
2961 // Push the arguments ("left-to-right") on the stack.
2962 ZoneList<Expression*>* args = expr->arguments();
2963 int arg_count = args->length();
2964 for (int i = 0; i < arg_count; i++) {
2965 VisitForStackValue(args->at(i));
2966 }
2967
2968 // Call the construct call builtin that handles allocation and
2969 // constructor invocation.
2970 SetConstructCallPosition(expr);
2971
2972 // Load function and argument count into x1 and x0.
2973 __ Mov(x0, arg_count);
2974 __ Peek(x1, arg_count * kXRegSize);
2975
2976 // Record call targets in unoptimized code.
2977 if (FLAG_pretenuring_call_new) {
2978 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2979 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
2980 expr->CallNewFeedbackSlot().ToInt() + 1);
2981 }
2982
2983 __ LoadObject(x2, FeedbackVector());
2984 __ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot()));
2985
2986 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2987 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2988 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2989 context()->Plug(x0);
2990 }
2991
2992
2993 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2994 SuperCallReference* super_call_ref =
2995 expr->expression()->AsSuperCallReference();
2996 DCHECK_NOT_NULL(super_call_ref);
2997
2998 EmitLoadSuperConstructor(super_call_ref);
2999 __ push(result_register());
3000
3001 // Push the arguments ("left-to-right") on the stack.
3002 ZoneList<Expression*>* args = expr->arguments();
3003 int arg_count = args->length();
3004 for (int i = 0; i < arg_count; i++) {
3005 VisitForStackValue(args->at(i));
3006 }
3007
3008 // Call the construct call builtin that handles allocation and
3009 // constructor invocation.
3010 SetConstructCallPosition(expr);
3011
3012 // Load original constructor into x4.
3013 VisitForAccumulatorValue(super_call_ref->new_target_var());
3014 __ Mov(x4, result_register());
3015
3016 // Load function and argument count into x1 and x0.
3017 __ Mov(x0, arg_count);
3018 __ Peek(x1, arg_count * kXRegSize);
3019
3020 // Record call targets in unoptimized code.
3021 if (FLAG_pretenuring_call_new) {
3022 UNREACHABLE();
3023 /* TODO(dslomov): support pretenuring.
3024 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3025 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3026 expr->CallNewFeedbackSlot().ToInt() + 1);
3027 */
3028 }
3029
3030 __ LoadObject(x2, FeedbackVector());
3031 __ Mov(x3, SmiFromSlot(expr->CallFeedbackSlot()));
3032
3033 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3034 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3035
3036 RecordJSReturnSite(expr);
3037
3038 context()->Plug(x0);
3039 }
3040
3041
3042 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3043 ZoneList<Expression*>* args = expr->arguments();
3044 DCHECK(args->length() == 1);
3045
3046 VisitForAccumulatorValue(args->at(0));
3047
3048 Label materialize_true, materialize_false;
3049 Label* if_true = NULL;
3050 Label* if_false = NULL;
3051 Label* fall_through = NULL;
3052 context()->PrepareTest(&materialize_true, &materialize_false,
3053 &if_true, &if_false, &fall_through);
3054
3055 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3056 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
3057
3058 context()->Plug(if_true, if_false);
3059 }
3060
3061
3062 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3063 ZoneList<Expression*>* args = expr->arguments();
3064 DCHECK(args->length() == 1);
3065
3066 VisitForAccumulatorValue(args->at(0));
3067
3068 Label materialize_true, materialize_false;
3069 Label* if_true = NULL;
3070 Label* if_false = NULL;
3071 Label* fall_through = NULL;
3072 context()->PrepareTest(&materialize_true, &materialize_false,
3073 &if_true, &if_false, &fall_through);
3074
3075 uint64_t sign_mask = V8_UINT64_C(1) << (kSmiShift + kSmiValueSize - 1);
3076
3077 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3078 __ TestAndSplit(x0, kSmiTagMask | sign_mask, if_true, if_false, fall_through);
3079
3080 context()->Plug(if_true, if_false);
3081 }
3082
3083
3084 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3085 ZoneList<Expression*>* args = expr->arguments();
3086 DCHECK(args->length() == 1);
3087
3088 VisitForAccumulatorValue(args->at(0));
3089
3090 Label materialize_true, materialize_false;
3091 Label* if_true = NULL;
3092 Label* if_false = NULL;
3093 Label* fall_through = NULL;
3094 context()->PrepareTest(&materialize_true, &materialize_false,
3095 &if_true, &if_false, &fall_through);
3096
3097 __ JumpIfSmi(x0, if_false);
3098 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
3099 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
3100 // Undetectable objects behave like undefined when tested with typeof.
3101 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
3102 __ Tbnz(x11, Map::kIsUndetectable, if_false);
3103 __ Ldrb(x12, FieldMemOperand(x10, Map::kInstanceTypeOffset));
3104 __ Cmp(x12, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
3105 __ B(lt, if_false);
3106 __ Cmp(x12, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3107 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3108 Split(le, if_true, if_false, fall_through);
3109
3110 context()->Plug(if_true, if_false);
3111 }
3112
3113
3114 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3115 ZoneList<Expression*>* args = expr->arguments();
3116 DCHECK(args->length() == 1);
3117
3118 VisitForAccumulatorValue(args->at(0));
3119
3120 Label materialize_true, materialize_false;
3121 Label* if_true = NULL;
3122 Label* if_false = NULL;
3123 Label* fall_through = NULL;
3124 context()->PrepareTest(&materialize_true, &materialize_false,
3125 &if_true, &if_false, &fall_through);
3126
3127 __ JumpIfSmi(x0, if_false);
3128 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3129 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3130 Split(ge, if_true, if_false, fall_through);
3131
3132 context()->Plug(if_true, if_false);
3133 }
3134
3135
3136 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3137 ASM_LOCATION("FullCodeGenerator::EmitIsUndetectableObject");
3138 ZoneList<Expression*>* args = expr->arguments();
3139 DCHECK(args->length() == 1);
3140
3141 VisitForAccumulatorValue(args->at(0));
3142
3143 Label materialize_true, materialize_false;
3144 Label* if_true = NULL;
3145 Label* if_false = NULL;
3146 Label* fall_through = NULL;
3147 context()->PrepareTest(&materialize_true, &materialize_false,
3148 &if_true, &if_false, &fall_through);
3149
3150 __ JumpIfSmi(x0, if_false);
3151 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
3152 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset));
3153 __ Tst(x11, 1 << Map::kIsUndetectable);
3154 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3155 Split(ne, if_true, if_false, fall_through);
3156
3157 context()->Plug(if_true, if_false);
3158 }
3159
3160
3161 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3162 CallRuntime* expr) {
3163 ZoneList<Expression*>* args = expr->arguments();
3164 DCHECK(args->length() == 1);
3165 VisitForAccumulatorValue(args->at(0));
3166
3167 Label materialize_true, materialize_false, skip_lookup;
3168 Label* if_true = NULL;
3169 Label* if_false = NULL;
3170 Label* fall_through = NULL;
3171 context()->PrepareTest(&materialize_true, &materialize_false,
3172 &if_true, &if_false, &fall_through);
3173
3174 Register object = x0;
3175 __ AssertNotSmi(object);
3176
3177 Register map = x10;
3178 Register bitfield2 = x11;
3179 __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
3180 __ Ldrb(bitfield2, FieldMemOperand(map, Map::kBitField2Offset));
3181 __ Tbnz(bitfield2, Map::kStringWrapperSafeForDefaultValueOf, &skip_lookup);
3182
3183 // Check for fast case object. Generate false result for slow case object.
3184 Register props = x12;
3185 Register props_map = x12;
3186 Register hash_table_map = x13;
3187 __ Ldr(props, FieldMemOperand(object, JSObject::kPropertiesOffset));
3188 __ Ldr(props_map, FieldMemOperand(props, HeapObject::kMapOffset));
3189 __ LoadRoot(hash_table_map, Heap::kHashTableMapRootIndex);
3190 __ Cmp(props_map, hash_table_map);
3191 __ B(eq, if_false);
3192
3193 // Look for valueOf name in the descriptor array, and indicate false if found.
3194 // Since we omit an enumeration index check, if it is added via a transition
3195 // that shares its descriptor array, this is a false positive.
3196 Label loop, done;
3197
3198 // Skip loop if no descriptors are valid.
3199 Register descriptors = x12;
3200 Register descriptors_length = x13;
3201 __ NumberOfOwnDescriptors(descriptors_length, map);
3202 __ Cbz(descriptors_length, &done);
3203
3204 __ LoadInstanceDescriptors(map, descriptors);
3205
3206 // Calculate the end of the descriptor array.
3207 Register descriptors_end = x14;
3208 __ Mov(x15, DescriptorArray::kDescriptorSize);
3209 __ Mul(descriptors_length, descriptors_length, x15);
3210 // Calculate location of the first key name.
3211 __ Add(descriptors, descriptors,
3212 DescriptorArray::kFirstOffset - kHeapObjectTag);
3213 // Calculate the end of the descriptor array.
3214 __ Add(descriptors_end, descriptors,
3215 Operand(descriptors_length, LSL, kPointerSizeLog2));
3216
3217 // Loop through all the keys in the descriptor array. If one of these is the
3218 // string "valueOf" the result is false.
3219 Register valueof_string = x1;
3220 int descriptor_size = DescriptorArray::kDescriptorSize * kPointerSize;
3221 __ Mov(valueof_string, Operand(isolate()->factory()->value_of_string()));
3222 __ Bind(&loop);
3223 __ Ldr(x15, MemOperand(descriptors, descriptor_size, PostIndex));
3224 __ Cmp(x15, valueof_string);
3225 __ B(eq, if_false);
3226 __ Cmp(descriptors, descriptors_end);
3227 __ B(ne, &loop);
3228
3229 __ Bind(&done);
3230
3231 // Set the bit in the map to indicate that there is no local valueOf field.
3232 __ Ldrb(x2, FieldMemOperand(map, Map::kBitField2Offset));
3233 __ Orr(x2, x2, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3234 __ Strb(x2, FieldMemOperand(map, Map::kBitField2Offset));
3235
3236 __ Bind(&skip_lookup);
3237
3238 // If a valueOf property is not found on the object check that its prototype
3239 // is the unmodified String prototype. If not result is false.
3240 Register prototype = x1;
3241 Register global_idx = x2;
3242 Register native_context = x2;
3243 Register string_proto = x3;
3244 Register proto_map = x4;
3245 __ Ldr(prototype, FieldMemOperand(map, Map::kPrototypeOffset));
3246 __ JumpIfSmi(prototype, if_false);
3247 __ Ldr(proto_map, FieldMemOperand(prototype, HeapObject::kMapOffset));
3248 __ Ldr(global_idx, GlobalObjectMemOperand());
3249 __ Ldr(native_context,
3250 FieldMemOperand(global_idx, GlobalObject::kNativeContextOffset));
3251 __ Ldr(string_proto,
3252 ContextMemOperand(native_context,
3253 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3254 __ Cmp(proto_map, string_proto);
3255
3256 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3257 Split(eq, if_true, if_false, fall_through);
3258
3259 context()->Plug(if_true, if_false);
3260 }
3261
3262
3263 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3264 ZoneList<Expression*>* args = expr->arguments();
3265 DCHECK(args->length() == 1);
3266
3267 VisitForAccumulatorValue(args->at(0));
3268
3269 Label materialize_true, materialize_false;
3270 Label* if_true = NULL;
3271 Label* if_false = NULL;
3272 Label* fall_through = NULL;
3273 context()->PrepareTest(&materialize_true, &materialize_false,
3274 &if_true, &if_false, &fall_through);
3275
3276 __ JumpIfSmi(x0, if_false);
3277 __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE);
3278 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3279 Split(eq, if_true, if_false, fall_through);
3280
3281 context()->Plug(if_true, if_false);
3282 }
3283
3284
3285 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3286 ZoneList<Expression*>* args = expr->arguments();
3287 DCHECK(args->length() == 1);
3288
3289 VisitForAccumulatorValue(args->at(0));
3290
3291 Label materialize_true, materialize_false;
3292 Label* if_true = NULL;
3293 Label* if_false = NULL;
3294 Label* fall_through = NULL;
3295 context()->PrepareTest(&materialize_true, &materialize_false,
3296 &if_true, &if_false, &fall_through);
3297
3298 // Only a HeapNumber can be -0.0, so return false if we have something else.
3299 __ JumpIfNotHeapNumber(x0, if_false, DO_SMI_CHECK);
3300
3301 // Test the bit pattern.
3302 __ Ldr(x10, FieldMemOperand(x0, HeapNumber::kValueOffset));
3303 __ Cmp(x10, 1); // Set V on 0x8000000000000000.
3304
3305 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3306 Split(vs, if_true, if_false, fall_through);
3307
3308 context()->Plug(if_true, if_false);
3309 }
3310
3311
3312 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3313 ZoneList<Expression*>* args = expr->arguments();
3314 DCHECK(args->length() == 1);
3315
3316 VisitForAccumulatorValue(args->at(0));
3317
3318 Label materialize_true, materialize_false;
3319 Label* if_true = NULL;
3320 Label* if_false = NULL;
3321 Label* fall_through = NULL;
3322 context()->PrepareTest(&materialize_true, &materialize_false,
3323 &if_true, &if_false, &fall_through);
3324
3325 __ JumpIfSmi(x0, if_false);
3326 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
3327 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3328 Split(eq, if_true, if_false, fall_through);
3329
3330 context()->Plug(if_true, if_false);
3331 }
3332
3333
3334 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3335 ZoneList<Expression*>* args = expr->arguments();
3336 DCHECK(args->length() == 1);
3337
3338 VisitForAccumulatorValue(args->at(0));
3339
3340 Label materialize_true, materialize_false;
3341 Label* if_true = NULL;
3342 Label* if_false = NULL;
3343 Label* fall_through = NULL;
3344 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3345 &if_false, &fall_through);
3346
3347 __ JumpIfSmi(x0, if_false);
3348 __ CompareObjectType(x0, x10, x11, JS_TYPED_ARRAY_TYPE);
3349 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3350 Split(eq, if_true, if_false, fall_through);
3351
3352 context()->Plug(if_true, if_false);
3353 }
3354
3355
3356 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3357 ZoneList<Expression*>* args = expr->arguments();
3358 DCHECK(args->length() == 1);
3359
3360 VisitForAccumulatorValue(args->at(0));
3361
3362 Label materialize_true, materialize_false;
3363 Label* if_true = NULL;
3364 Label* if_false = NULL;
3365 Label* fall_through = NULL;
3366 context()->PrepareTest(&materialize_true, &materialize_false,
3367 &if_true, &if_false, &fall_through);
3368
3369 __ JumpIfSmi(x0, if_false);
3370 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
3371 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3372 Split(eq, if_true, if_false, fall_through);
3373
3374 context()->Plug(if_true, if_false);
3375 }
3376
3377
3378 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3379 ZoneList<Expression*>* args = expr->arguments();
3380 DCHECK(args->length() == 1);
3381
3382 VisitForAccumulatorValue(args->at(0));
3383
3384 Label materialize_true, materialize_false;
3385 Label* if_true = NULL;
3386 Label* if_false = NULL;
3387 Label* fall_through = NULL;
3388 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3389 &if_false, &fall_through);
3390
3391 __ JumpIfSmi(x0, if_false);
3392 Register map = x10;
3393 Register type_reg = x11;
3394 __ Ldr(map, FieldMemOperand(x0, HeapObject::kMapOffset));
3395 __ Ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
3396 __ Sub(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE));
3397 __ Cmp(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE));
3398 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3399 Split(ls, if_true, if_false, fall_through);
3400
3401 context()->Plug(if_true, if_false);
3402 }
3403
3404
3405 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3406 DCHECK(expr->arguments()->length() == 0);
3407
3408 Label materialize_true, materialize_false;
3409 Label* if_true = NULL;
3410 Label* if_false = NULL;
3411 Label* fall_through = NULL;
3412 context()->PrepareTest(&materialize_true, &materialize_false,
3413 &if_true, &if_false, &fall_through);
3414
3415 // Get the frame pointer for the calling frame.
3416 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3417
3418 // Skip the arguments adaptor frame if it exists.
3419 Label check_frame_marker;
3420 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset));
3421 __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3422 __ B(ne, &check_frame_marker);
3423 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
3424
3425 // Check the marker in the calling frame.
3426 __ Bind(&check_frame_marker);
3427 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
3428 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT));
3429 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3430 Split(eq, if_true, if_false, fall_through);
3431
3432 context()->Plug(if_true, if_false);
3433 }
3434
3435
3436 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3437 ZoneList<Expression*>* args = expr->arguments();
3438 DCHECK(args->length() == 2);
3439
3440 // Load the two objects into registers and perform the comparison.
3441 VisitForStackValue(args->at(0));
3442 VisitForAccumulatorValue(args->at(1));
3443
3444 Label materialize_true, materialize_false;
3445 Label* if_true = NULL;
3446 Label* if_false = NULL;
3447 Label* fall_through = NULL;
3448 context()->PrepareTest(&materialize_true, &materialize_false,
3449 &if_true, &if_false, &fall_through);
3450
3451 __ Pop(x1);
3452 __ Cmp(x0, x1);
3453 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3454 Split(eq, if_true, if_false, fall_through);
3455
3456 context()->Plug(if_true, if_false);
3457 }
3458
3459
3460 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3461 ZoneList<Expression*>* args = expr->arguments();
3462 DCHECK(args->length() == 1);
3463
3464 // ArgumentsAccessStub expects the key in x1.
3465 VisitForAccumulatorValue(args->at(0));
3466 __ Mov(x1, x0);
3467 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3468 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3469 __ CallStub(&stub);
3470 context()->Plug(x0);
3471 }
3472
3473
3474 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3475 DCHECK(expr->arguments()->length() == 0);
3476 Label exit;
3477 // Get the number of formal parameters.
3478 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
3479
3480 // Check if the calling frame is an arguments adaptor frame.
3481 __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3482 __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset));
3483 __ Cmp(x13, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3484 __ B(ne, &exit);
3485
3486 // Arguments adaptor case: Read the arguments length from the
3487 // adaptor frame.
3488 __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset));
3489
3490 __ Bind(&exit);
3491 context()->Plug(x0);
3492 }
3493
3494
3495 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3496 ASM_LOCATION("FullCodeGenerator::EmitClassOf");
3497 ZoneList<Expression*>* args = expr->arguments();
3498 DCHECK(args->length() == 1);
3499 Label done, null, function, non_function_constructor;
3500
3501 VisitForAccumulatorValue(args->at(0));
3502
3503 // If the object is a smi, we return null.
3504 __ JumpIfSmi(x0, &null);
3505
3506 // Check that the object is a JS object but take special care of JS
3507 // functions to make sure they have 'Function' as their class.
3508 // Assume that there are only two callable types, and one of them is at
3509 // either end of the type range for JS object types. Saves extra comparisons.
3510 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3511 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE);
3512 // x10: object's map.
3513 // x11: object's type.
3514 __ B(lt, &null);
3515 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3516 FIRST_SPEC_OBJECT_TYPE + 1);
3517 __ B(eq, &function);
3518
3519 __ Cmp(x11, LAST_SPEC_OBJECT_TYPE);
3520 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3521 LAST_SPEC_OBJECT_TYPE - 1);
3522 __ B(eq, &function);
3523 // Assume that there is no larger type.
3524 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3525
3526 // Check if the constructor in the map is a JS function.
3527 Register instance_type = x14;
3528 __ GetMapConstructor(x12, x10, x13, instance_type);
3529 __ Cmp(instance_type, JS_FUNCTION_TYPE);
3530 __ B(ne, &non_function_constructor);
3531
3532 // x12 now contains the constructor function. Grab the
3533 // instance class name from there.
3534 __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
3535 __ Ldr(x0,
3536 FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
3537 __ B(&done);
3538
3539 // Functions have class 'Function'.
3540 __ Bind(&function);
3541 __ LoadRoot(x0, Heap::kFunction_stringRootIndex);
3542 __ B(&done);
3543
3544 // Objects with a non-function constructor have class 'Object'.
3545 __ Bind(&non_function_constructor);
3546 __ LoadRoot(x0, Heap::kObject_stringRootIndex);
3547 __ B(&done);
3548
3549 // Non-JS objects have class null.
3550 __ Bind(&null);
3551 __ LoadRoot(x0, Heap::kNullValueRootIndex);
3552
3553 // All done.
3554 __ Bind(&done);
3555
3556 context()->Plug(x0);
3557 }
3558
3559
3560 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3561 ASM_LOCATION("FullCodeGenerator::EmitValueOf");
3562 ZoneList<Expression*>* args = expr->arguments();
3563 DCHECK(args->length() == 1);
3564 VisitForAccumulatorValue(args->at(0)); // Load the object.
3565
3566 Label done;
3567 // If the object is a smi return the object.
3568 __ JumpIfSmi(x0, &done);
3569 // If the object is not a value type, return the object.
3570 __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
3571 __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
3572
3573 __ Bind(&done);
3574 context()->Plug(x0);
3575 }
3576
3577
3578 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3579 ZoneList<Expression*>* args = expr->arguments();
3580 DCHECK_EQ(1, args->length());
3581
3582 VisitForAccumulatorValue(args->at(0));
3583
3584 Label materialize_true, materialize_false;
3585 Label* if_true = nullptr;
3586 Label* if_false = nullptr;
3587 Label* fall_through = nullptr;
3588 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3589 &if_false, &fall_through);
3590
3591 __ JumpIfSmi(x0, if_false);
3592 __ CompareObjectType(x0, x10, x11, JS_DATE_TYPE);
3593 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3594 Split(eq, if_true, if_false, fall_through);
3595
3596 context()->Plug(if_true, if_false);
3597 }
3598
3599
3600 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3601 ZoneList<Expression*>* args = expr->arguments();
3602 DCHECK(args->length() == 2);
3603 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3604 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3605
3606 VisitForAccumulatorValue(args->at(0)); // Load the object.
3607
3608 Register object = x0;
3609 Register result = x0;
3610 Register stamp_addr = x10;
3611 Register stamp_cache = x11;
3612
3613 if (index->value() == 0) {
3614 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3615 } else {
3616 Label runtime, done;
3617 if (index->value() < JSDate::kFirstUncachedField) {
3618 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3619 __ Mov(stamp_addr, stamp);
3620 __ Ldr(stamp_addr, MemOperand(stamp_addr));
3621 __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset));
3622 __ Cmp(stamp_addr, stamp_cache);
3623 __ B(ne, &runtime);
3624 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3625 kPointerSize * index->value()));
3626 __ B(&done);
3627 }
3628
3629 __ Bind(&runtime);
3630 __ Mov(x1, index);
3631 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3632 __ Bind(&done);
3633 }
3634
3635 context()->Plug(result);
3636 }
3637
3638
3639 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3640 ZoneList<Expression*>* args = expr->arguments();
3641 DCHECK_EQ(3, args->length());
3642
3643 Register string = x0;
3644 Register index = x1;
3645 Register value = x2;
3646 Register scratch = x10;
3647
3648 VisitForStackValue(args->at(0)); // index
3649 VisitForStackValue(args->at(1)); // value
3650 VisitForAccumulatorValue(args->at(2)); // string
3651 __ Pop(value, index);
3652
3653 if (FLAG_debug_code) {
3654 __ AssertSmi(value, kNonSmiValue);
3655 __ AssertSmi(index, kNonSmiIndex);
3656 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3657 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3658 one_byte_seq_type);
3659 }
3660
3661 __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3662 __ SmiUntag(value);
3663 __ SmiUntag(index);
3664 __ Strb(value, MemOperand(scratch, index));
3665 context()->Plug(string);
3666 }
3667
3668
3669 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3670 ZoneList<Expression*>* args = expr->arguments();
3671 DCHECK_EQ(3, args->length());
3672
3673 Register string = x0;
3674 Register index = x1;
3675 Register value = x2;
3676 Register scratch = x10;
3677
3678 VisitForStackValue(args->at(0)); // index
3679 VisitForStackValue(args->at(1)); // value
3680 VisitForAccumulatorValue(args->at(2)); // string
3681 __ Pop(value, index);
3682
3683 if (FLAG_debug_code) {
3684 __ AssertSmi(value, kNonSmiValue);
3685 __ AssertSmi(index, kNonSmiIndex);
3686 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3687 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch,
3688 two_byte_seq_type);
3689 }
3690
3691 __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag);
3692 __ SmiUntag(value);
3693 __ SmiUntag(index);
3694 __ Strh(value, MemOperand(scratch, index, LSL, 1));
3695 context()->Plug(string);
3696 }
3697
3698
3699 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3700 ZoneList<Expression*>* args = expr->arguments();
3701 DCHECK(args->length() == 2);
3702 VisitForStackValue(args->at(0)); // Load the object.
3703 VisitForAccumulatorValue(args->at(1)); // Load the value.
3704 __ Pop(x1);
3705 // x0 = value.
3706 // x1 = object.
3707
3708 Label done;
3709 // If the object is a smi, return the value.
3710 __ JumpIfSmi(x1, &done);
3711
3712 // If the object is not a value type, return the value.
3713 __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done);
3714
3715 // Store the value.
3716 __ Str(x0, FieldMemOperand(x1, JSValue::kValueOffset));
3717 // Update the write barrier. Save the value as it will be
3718 // overwritten by the write barrier code and is needed afterward.
3719 __ Mov(x10, x0);
3720 __ RecordWriteField(
3721 x1, JSValue::kValueOffset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
3722
3723 __ Bind(&done);
3724 context()->Plug(x0);
3725 }
3726
3727
3728 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3729 ZoneList<Expression*>* args = expr->arguments();
3730 DCHECK_EQ(args->length(), 1);
3731
3732 // Load the argument into x0 and call the stub.
3733 VisitForAccumulatorValue(args->at(0));
3734
3735 NumberToStringStub stub(isolate());
3736 __ CallStub(&stub);
3737 context()->Plug(x0);
3738 }
3739
3740
3741 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3742 ZoneList<Expression*>* args = expr->arguments();
3743 DCHECK(args->length() == 1);
3744
3745 VisitForAccumulatorValue(args->at(0));
3746
3747 Label done;
3748 Register code = x0;
3749 Register result = x1;
3750
3751 StringCharFromCodeGenerator generator(code, result);
3752 generator.GenerateFast(masm_);
3753 __ B(&done);
3754
3755 NopRuntimeCallHelper call_helper;
3756 generator.GenerateSlow(masm_, call_helper);
3757
3758 __ Bind(&done);
3759 context()->Plug(result);
3760 }
3761
3762
3763 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3764 ZoneList<Expression*>* args = expr->arguments();
3765 DCHECK(args->length() == 2);
3766
3767 VisitForStackValue(args->at(0));
3768 VisitForAccumulatorValue(args->at(1));
3769
3770 Register object = x1;
3771 Register index = x0;
3772 Register result = x3;
3773
3774 __ Pop(object);
3775
3776 Label need_conversion;
3777 Label index_out_of_range;
3778 Label done;
3779 StringCharCodeAtGenerator generator(object,
3780 index,
3781 result,
3782 &need_conversion,
3783 &need_conversion,
3784 &index_out_of_range,
3785 STRING_INDEX_IS_NUMBER);
3786 generator.GenerateFast(masm_);
3787 __ B(&done);
3788
3789 __ Bind(&index_out_of_range);
3790 // When the index is out of range, the spec requires us to return NaN.
3791 __ LoadRoot(result, Heap::kNanValueRootIndex);
3792 __ B(&done);
3793
3794 __ Bind(&need_conversion);
3795 // Load the undefined value into the result register, which will
3796 // trigger conversion.
3797 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3798 __ B(&done);
3799
3800 NopRuntimeCallHelper call_helper;
3801 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3802
3803 __ Bind(&done);
3804 context()->Plug(result);
3805 }
3806
3807
3808 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3809 ZoneList<Expression*>* args = expr->arguments();
3810 DCHECK(args->length() == 2);
3811
3812 VisitForStackValue(args->at(0));
3813 VisitForAccumulatorValue(args->at(1));
3814
3815 Register object = x1;
3816 Register index = x0;
3817 Register result = x0;
3818
3819 __ Pop(object);
3820
3821 Label need_conversion;
3822 Label index_out_of_range;
3823 Label done;
3824 StringCharAtGenerator generator(object,
3825 index,
3826 x3,
3827 result,
3828 &need_conversion,
3829 &need_conversion,
3830 &index_out_of_range,
3831 STRING_INDEX_IS_NUMBER);
3832 generator.GenerateFast(masm_);
3833 __ B(&done);
3834
3835 __ Bind(&index_out_of_range);
3836 // When the index is out of range, the spec requires us to return
3837 // the empty string.
3838 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3839 __ B(&done);
3840
3841 __ Bind(&need_conversion);
3842 // Move smi zero into the result register, which will trigger conversion.
3843 __ Mov(result, Smi::FromInt(0));
3844 __ B(&done);
3845
3846 NopRuntimeCallHelper call_helper;
3847 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3848
3849 __ Bind(&done);
3850 context()->Plug(result);
3851 }
3852
3853
3854 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3855 ASM_LOCATION("FullCodeGenerator::EmitStringAdd");
3856 ZoneList<Expression*>* args = expr->arguments();
3857 DCHECK_EQ(2, args->length());
3858
3859 VisitForStackValue(args->at(0));
3860 VisitForAccumulatorValue(args->at(1));
3861
3862 __ Pop(x1);
3863 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3864 __ CallStub(&stub);
3865
3866 context()->Plug(x0);
3867 }
3868
3869
3870 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3871 ASM_LOCATION("FullCodeGenerator::EmitCallFunction");
3872 ZoneList<Expression*>* args = expr->arguments();
3873 DCHECK(args->length() >= 2);
3874
3875 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3876 for (int i = 0; i < arg_count + 1; i++) {
3877 VisitForStackValue(args->at(i));
3878 }
3879 VisitForAccumulatorValue(args->last()); // Function.
3880
3881 Label runtime, done;
3882 // Check for non-function argument (including proxy).
3883 __ JumpIfSmi(x0, &runtime);
3884 __ JumpIfNotObjectType(x0, x1, x1, JS_FUNCTION_TYPE, &runtime);
3885
3886 // InvokeFunction requires the function in x1. Move it in there.
3887 __ Mov(x1, x0);
3888 ParameterCount count(arg_count);
3889 __ InvokeFunction(x1, count, CALL_FUNCTION, NullCallWrapper());
3890 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3891 __ B(&done);
3892
3893 __ Bind(&runtime);
3894 __ Push(x0);
3895 __ CallRuntime(Runtime::kCall, args->length());
3896 __ Bind(&done);
3897
3898 context()->Plug(x0);
3899 }
3900
3901
3902 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
3903 ZoneList<Expression*>* args = expr->arguments();
3904 DCHECK(args->length() == 2);
3905
3906 // new.target
3907 VisitForStackValue(args->at(0));
3908
3909 // .this_function
3910 VisitForStackValue(args->at(1));
3911 __ CallRuntime(Runtime::kGetPrototype, 1);
3912 __ Push(result_register());
3913
3914 // Load original constructor into x4.
3915 __ Peek(x4, 1 * kPointerSize);
3916
3917 // Check if the calling frame is an arguments adaptor frame.
3918 Label adaptor_frame, args_set_up, runtime;
3919 __ Ldr(x11, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3920 __ Ldr(x12, MemOperand(x11, StandardFrameConstants::kContextOffset));
3921 __ Cmp(x12, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3922 __ B(eq, &adaptor_frame);
3923 // default constructor has no arguments, so no adaptor frame means no args.
3924 __ Mov(x0, Operand(0));
3925 __ B(&args_set_up);
3926
3927 // Copy arguments from adaptor frame.
3928 {
3929 __ bind(&adaptor_frame);
3930 __ Ldr(x1, MemOperand(x11, ArgumentsAdaptorFrameConstants::kLengthOffset));
3931 __ SmiUntag(x1, x1);
3932
3933 __ Mov(x0, x1);
3934
3935 // Get arguments pointer in x11.
3936 __ Add(x11, x11, Operand(x1, LSL, kPointerSizeLog2));
3937 __ Add(x11, x11, StandardFrameConstants::kCallerSPOffset);
3938 Label loop;
3939 __ bind(&loop);
3940 // Pre-decrement x11 with kPointerSize on each iteration.
3941 // Pre-decrement in order to skip receiver.
3942 __ Ldr(x10, MemOperand(x11, -kPointerSize, PreIndex));
3943 __ Push(x10);
3944 __ Sub(x1, x1, Operand(1));
3945 __ Cbnz(x1, &loop);
3946 }
3947
3948 __ bind(&args_set_up);
3949 __ Peek(x1, Operand(x0, LSL, kPointerSizeLog2));
3950 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
3951
3952 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
3953 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3954
3955 __ Drop(1);
3956
3957 context()->Plug(result_register());
3958 }
3959
3960
3961 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3962 RegExpConstructResultStub stub(isolate());
3963 ZoneList<Expression*>* args = expr->arguments();
3964 DCHECK(args->length() == 3);
3965 VisitForStackValue(args->at(0));
3966 VisitForStackValue(args->at(1));
3967 VisitForAccumulatorValue(args->at(2));
3968 __ Pop(x1, x2);
3969 __ CallStub(&stub);
3970 context()->Plug(x0);
3971 }
3972
3973
3974 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3975 ZoneList<Expression*>* args = expr->arguments();
3976 DCHECK_EQ(2, args->length());
3977 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
3978 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3979
3980 Handle<FixedArray> jsfunction_result_caches(
3981 isolate()->native_context()->jsfunction_result_caches());
3982 if (jsfunction_result_caches->length() <= cache_id) {
3983 __ Abort(kAttemptToUseUndefinedCache);
3984 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
3985 context()->Plug(x0);
3986 return;
3987 }
3988
3989 VisitForAccumulatorValue(args->at(1));
3990
3991 Register key = x0;
3992 Register cache = x1;
3993 __ Ldr(cache, GlobalObjectMemOperand());
3994 __ Ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3995 __ Ldr(cache, ContextMemOperand(cache,
3996 Context::JSFUNCTION_RESULT_CACHES_INDEX));
3997 __ Ldr(cache,
3998 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3999
4000 Label done;
4001 __ Ldrsw(x2, UntagSmiFieldMemOperand(cache,
4002 JSFunctionResultCache::kFingerOffset));
4003 __ Add(x3, cache, FixedArray::kHeaderSize - kHeapObjectTag);
4004 __ Add(x3, x3, Operand(x2, LSL, kPointerSizeLog2));
4005
4006 // Load the key and data from the cache.
4007 __ Ldp(x2, x3, MemOperand(x3));
4008
4009 __ Cmp(key, x2);
4010 __ CmovX(x0, x3, eq);
4011 __ B(eq, &done);
4012
4013 // Call runtime to perform the lookup.
4014 __ Push(cache, key);
4015 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4016
4017 __ Bind(&done);
4018 context()->Plug(x0);
4019 }
4020
4021
4022 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4023 ZoneList<Expression*>* args = expr->arguments();
4024 VisitForAccumulatorValue(args->at(0));
4025
4026 Label materialize_true, materialize_false;
4027 Label* if_true = NULL;
4028 Label* if_false = NULL;
4029 Label* fall_through = NULL;
4030 context()->PrepareTest(&materialize_true, &materialize_false,
4031 &if_true, &if_false, &fall_through);
4032
4033 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
4034 __ Tst(x10, String::kContainsCachedArrayIndexMask);
4035 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4036 Split(eq, if_true, if_false, fall_through);
4037
4038 context()->Plug(if_true, if_false);
4039 }
4040
4041
4042 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4043 ZoneList<Expression*>* args = expr->arguments();
4044 DCHECK(args->length() == 1);
4045 VisitForAccumulatorValue(args->at(0));
4046
4047 __ AssertString(x0);
4048
4049 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
4050 __ IndexFromHash(x10, x0);
4051
4052 context()->Plug(x0);
4053 }
4054
4055
4056 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4057 ASM_LOCATION("FullCodeGenerator::EmitFastOneByteArrayJoin");
4058
4059 ZoneList<Expression*>* args = expr->arguments();
4060 DCHECK(args->length() == 2);
4061 VisitForStackValue(args->at(1));
4062 VisitForAccumulatorValue(args->at(0));
4063
4064 Register array = x0;
4065 Register result = x0;
4066 Register elements = x1;
4067 Register element = x2;
4068 Register separator = x3;
4069 Register array_length = x4;
4070 Register result_pos = x5;
4071 Register map = x6;
4072 Register string_length = x10;
4073 Register elements_end = x11;
4074 Register string = x12;
4075 Register scratch1 = x13;
4076 Register scratch2 = x14;
4077 Register scratch3 = x7;
4078 Register separator_length = x15;
4079
4080 Label bailout, done, one_char_separator, long_separator,
4081 non_trivial_array, not_size_one_array, loop,
4082 empty_separator_loop, one_char_separator_loop,
4083 one_char_separator_loop_entry, long_separator_loop;
4084
4085 // The separator operand is on the stack.
4086 __ Pop(separator);
4087
4088 // Check that the array is a JSArray.
4089 __ JumpIfSmi(array, &bailout);
4090 __ JumpIfNotObjectType(array, map, scratch1, JS_ARRAY_TYPE, &bailout);
4091
4092 // Check that the array has fast elements.
4093 __ CheckFastElements(map, scratch1, &bailout);
4094
4095 // If the array has length zero, return the empty string.
4096 // Load and untag the length of the array.
4097 // It is an unsigned value, so we can skip sign extension.
4098 // We assume little endianness.
4099 __ Ldrsw(array_length,
4100 UntagSmiFieldMemOperand(array, JSArray::kLengthOffset));
4101 __ Cbnz(array_length, &non_trivial_array);
4102 __ LoadRoot(result, Heap::kempty_stringRootIndex);
4103 __ B(&done);
4104
4105 __ Bind(&non_trivial_array);
4106 // Get the FixedArray containing array's elements.
4107 __ Ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
4108
4109 // Check that all array elements are sequential one-byte strings, and
4110 // accumulate the sum of their lengths.
4111 __ Mov(string_length, 0);
4112 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
4113 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4114 // Loop condition: while (element < elements_end).
4115 // Live values in registers:
4116 // elements: Fixed array of strings.
4117 // array_length: Length of the fixed array of strings (not smi)
4118 // separator: Separator string
4119 // string_length: Accumulated sum of string lengths (not smi).
4120 // element: Current array element.
4121 // elements_end: Array end.
4122 if (FLAG_debug_code) {
4123 __ Cmp(array_length, 0);
4124 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4125 }
4126 __ Bind(&loop);
4127 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4128 __ JumpIfSmi(string, &bailout);
4129 __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4130 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4131 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4132 __ Ldrsw(scratch1,
4133 UntagSmiFieldMemOperand(string, SeqOneByteString::kLengthOffset));
4134 __ Adds(string_length, string_length, scratch1);
4135 __ B(vs, &bailout);
4136 __ Cmp(element, elements_end);
4137 __ B(lt, &loop);
4138
4139 // If array_length is 1, return elements[0], a string.
4140 __ Cmp(array_length, 1);
4141 __ B(ne, &not_size_one_array);
4142 __ Ldr(result, FieldMemOperand(elements, FixedArray::kHeaderSize));
4143 __ B(&done);
4144
4145 __ Bind(&not_size_one_array);
4146
4147 // Live values in registers:
4148 // separator: Separator string
4149 // array_length: Length of the array (not smi).
4150 // string_length: Sum of string lengths (not smi).
4151 // elements: FixedArray of strings.
4152
4153 // Check that the separator is a flat one-byte string.
4154 __ JumpIfSmi(separator, &bailout);
4155 __ Ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4156 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4157 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4158
4159 // Add (separator length times array_length) - separator length to the
4160 // string_length to get the length of the result string.
4161 // Load the separator length as untagged.
4162 // We assume little endianness, and that the length is positive.
4163 __ Ldrsw(separator_length,
4164 UntagSmiFieldMemOperand(separator,
4165 SeqOneByteString::kLengthOffset));
4166 __ Sub(string_length, string_length, separator_length);
4167 __ Umaddl(string_length, array_length.W(), separator_length.W(),
4168 string_length);
4169
4170 // Get first element in the array.
4171 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag);
4172 // Live values in registers:
4173 // element: First array element
4174 // separator: Separator string
4175 // string_length: Length of result string (not smi)
4176 // array_length: Length of the array (not smi).
4177 __ AllocateOneByteString(result, string_length, scratch1, scratch2, scratch3,
4178 &bailout);
4179
4180 // Prepare for looping. Set up elements_end to end of the array. Set
4181 // result_pos to the position of the result where to write the first
4182 // character.
4183 // TODO(all): useless unless AllocateOneByteString trashes the register.
4184 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
4185 __ Add(result_pos, result, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4186
4187 // Check the length of the separator.
4188 __ Cmp(separator_length, 1);
4189 __ B(eq, &one_char_separator);
4190 __ B(gt, &long_separator);
4191
4192 // Empty separator case
4193 __ Bind(&empty_separator_loop);
4194 // Live values in registers:
4195 // result_pos: the position to which we are currently copying characters.
4196 // element: Current array element.
4197 // elements_end: Array end.
4198
4199 // Copy next array element to the result.
4200 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4201 __ Ldrsw(string_length,
4202 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4203 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4204 __ CopyBytes(result_pos, string, string_length, scratch1);
4205 __ Cmp(element, elements_end);
4206 __ B(lt, &empty_separator_loop); // End while (element < elements_end).
4207 __ B(&done);
4208
4209 // One-character separator case
4210 __ Bind(&one_char_separator);
4211 // Replace separator with its one-byte character value.
4212 __ Ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4213 // Jump into the loop after the code that copies the separator, so the first
4214 // element is not preceded by a separator
4215 __ B(&one_char_separator_loop_entry);
4216
4217 __ Bind(&one_char_separator_loop);
4218 // Live values in registers:
4219 // result_pos: the position to which we are currently copying characters.
4220 // element: Current array element.
4221 // elements_end: Array end.
4222 // separator: Single separator one-byte char (in lower byte).
4223
4224 // Copy the separator character to the result.
4225 __ Strb(separator, MemOperand(result_pos, 1, PostIndex));
4226
4227 // Copy next array element to the result.
4228 __ Bind(&one_char_separator_loop_entry);
4229 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4230 __ Ldrsw(string_length,
4231 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4232 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4233 __ CopyBytes(result_pos, string, string_length, scratch1);
4234 __ Cmp(element, elements_end);
4235 __ B(lt, &one_char_separator_loop); // End while (element < elements_end).
4236 __ B(&done);
4237
4238 // Long separator case (separator is more than one character). Entry is at the
4239 // label long_separator below.
4240 __ Bind(&long_separator_loop);
4241 // Live values in registers:
4242 // result_pos: the position to which we are currently copying characters.
4243 // element: Current array element.
4244 // elements_end: Array end.
4245 // separator: Separator string.
4246
4247 // Copy the separator to the result.
4248 // TODO(all): hoist next two instructions.
4249 __ Ldrsw(string_length,
4250 UntagSmiFieldMemOperand(separator, String::kLengthOffset));
4251 __ Add(string, separator, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4252 __ CopyBytes(result_pos, string, string_length, scratch1);
4253
4254 __ Bind(&long_separator);
4255 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex));
4256 __ Ldrsw(string_length,
4257 UntagSmiFieldMemOperand(string, String::kLengthOffset));
4258 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4259 __ CopyBytes(result_pos, string, string_length, scratch1);
4260 __ Cmp(element, elements_end);
4261 __ B(lt, &long_separator_loop); // End while (element < elements_end).
4262 __ B(&done);
4263
4264 __ Bind(&bailout);
4265 // Returning undefined will force slower code to handle it.
4266 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
4267 __ Bind(&done);
4268 context()->Plug(result);
4269 }
4270
4271
4272 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4273 DCHECK(expr->arguments()->length() == 0);
4274 ExternalReference debug_is_active =
4275 ExternalReference::debug_is_active_address(isolate());
4276 __ Mov(x10, debug_is_active);
4277 __ Ldrb(x0, MemOperand(x10));
4278 __ SmiTag(x0);
4279 context()->Plug(x0);
4280 }
4281
4282
4283 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4284 // Push the builtins object as the receiver.
4285 __ Ldr(x10, GlobalObjectMemOperand());
4286 __ Ldr(LoadDescriptor::ReceiverRegister(),
4287 FieldMemOperand(x10, GlobalObject::kBuiltinsOffset));
4288 __ Push(LoadDescriptor::ReceiverRegister());
4289
4290 // Load the function from the receiver.
4291 Handle<String> name = expr->name();
4292 __ Mov(LoadDescriptor::NameRegister(), Operand(name));
4293 __ Mov(LoadDescriptor::SlotRegister(),
4294 SmiFromSlot(expr->CallRuntimeFeedbackSlot()));
4295 CallLoadIC(NOT_INSIDE_TYPEOF);
4296 }
4297
4298
4299 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4300 ZoneList<Expression*>* args = expr->arguments();
4301 int arg_count = args->length();
4302
4303 SetCallPosition(expr, arg_count);
4304 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4305 __ Peek(x1, (arg_count + 1) * kPointerSize);
4306 __ CallStub(&stub);
4307 }
4308
4309
4310 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4311 ZoneList<Expression*>* args = expr->arguments();
4312 int arg_count = args->length();
4313
4314 if (expr->is_jsruntime()) {
4315 Comment cmnt(masm_, "[ CallRunTime");
4316 EmitLoadJSRuntimeFunction(expr);
4317
4318 // Push the target function under the receiver.
4319 __ Pop(x10);
4320 __ Push(x0, x10);
4321
4322 for (int i = 0; i < arg_count; i++) {
4323 VisitForStackValue(args->at(i));
4324 }
4325
4326 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4327 EmitCallJSRuntimeFunction(expr);
4328
4329 // Restore context register.
4330 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4331
4332 context()->DropAndPlug(1, x0);
4333
4334 } else {
4335 const Runtime::Function* function = expr->function();
4336 switch (function->function_id) {
4337 #define CALL_INTRINSIC_GENERATOR(Name) \
4338 case Runtime::kInline##Name: { \
4339 Comment cmnt(masm_, "[ Inline" #Name); \
4340 return Emit##Name(expr); \
4341 }
4342 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4343 #undef CALL_INTRINSIC_GENERATOR
4344 default: {
4345 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4346 // Push the arguments ("left-to-right").
4347 for (int i = 0; i < arg_count; i++) {
4348 VisitForStackValue(args->at(i));
4349 }
4350
4351 // Call the C runtime function.
4352 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4353 __ CallRuntime(expr->function(), arg_count);
4354 context()->Plug(x0);
4355 }
4356 }
4357 }
4358 }
4359
4360
4361 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4362 switch (expr->op()) {
4363 case Token::DELETE: {
4364 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4365 Property* property = expr->expression()->AsProperty();
4366 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4367
4368 if (property != NULL) {
4369 VisitForStackValue(property->obj());
4370 VisitForStackValue(property->key());
4371 __ Mov(x10, Smi::FromInt(language_mode()));
4372 __ Push(x10);
4373 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4374 context()->Plug(x0);
4375 } else if (proxy != NULL) {
4376 Variable* var = proxy->var();
4377 // Delete of an unqualified identifier is disallowed in strict mode but
4378 // "delete this" is allowed.
4379 bool is_this = var->HasThisName(isolate());
4380 DCHECK(is_sloppy(language_mode()) || is_this);
4381 if (var->IsUnallocatedOrGlobalSlot()) {
4382 __ Ldr(x12, GlobalObjectMemOperand());
4383 __ Mov(x11, Operand(var->name()));
4384 __ Mov(x10, Smi::FromInt(SLOPPY));
4385 __ Push(x12, x11, x10);
4386 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4387 context()->Plug(x0);
4388 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4389 // Result of deleting non-global, non-dynamic variables is false.
4390 // The subexpression does not have side effects.
4391 context()->Plug(is_this);
4392 } else {
4393 // Non-global variable. Call the runtime to try to delete from the
4394 // context where the variable was introduced.
4395 __ Mov(x2, Operand(var->name()));
4396 __ Push(context_register(), x2);
4397 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4398 context()->Plug(x0);
4399 }
4400 } else {
4401 // Result of deleting non-property, non-variable reference is true.
4402 // The subexpression may have side effects.
4403 VisitForEffect(expr->expression());
4404 context()->Plug(true);
4405 }
4406 break;
4407 break;
4408 }
4409 case Token::VOID: {
4410 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4411 VisitForEffect(expr->expression());
4412 context()->Plug(Heap::kUndefinedValueRootIndex);
4413 break;
4414 }
4415 case Token::NOT: {
4416 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4417 if (context()->IsEffect()) {
4418 // Unary NOT has no side effects so it's only necessary to visit the
4419 // subexpression. Match the optimizing compiler by not branching.
4420 VisitForEffect(expr->expression());
4421 } else if (context()->IsTest()) {
4422 const TestContext* test = TestContext::cast(context());
4423 // The labels are swapped for the recursive call.
4424 VisitForControl(expr->expression(),
4425 test->false_label(),
4426 test->true_label(),
4427 test->fall_through());
4428 context()->Plug(test->true_label(), test->false_label());
4429 } else {
4430 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4431 // TODO(jbramley): This could be much more efficient using (for
4432 // example) the CSEL instruction.
4433 Label materialize_true, materialize_false, done;
4434 VisitForControl(expr->expression(),
4435 &materialize_false,
4436 &materialize_true,
4437 &materialize_true);
4438
4439 __ Bind(&materialize_true);
4440 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4441 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
4442 __ B(&done);
4443
4444 __ Bind(&materialize_false);
4445 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4446 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
4447 __ B(&done);
4448
4449 __ Bind(&done);
4450 if (context()->IsStackValue()) {
4451 __ Push(result_register());
4452 }
4453 }
4454 break;
4455 }
4456 case Token::TYPEOF: {
4457 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4458 {
4459 AccumulatorValueContext context(this);
4460 VisitForTypeofValue(expr->expression());
4461 }
4462 __ Mov(x3, x0);
4463 TypeofStub typeof_stub(isolate());
4464 __ CallStub(&typeof_stub);
4465 context()->Plug(x0);
4466 break;
4467 }
4468 default:
4469 UNREACHABLE();
4470 }
4471 }
4472
4473
4474 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4475 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4476
4477 Comment cmnt(masm_, "[ CountOperation");
4478
4479 Property* prop = expr->expression()->AsProperty();
4480 LhsKind assign_type = Property::GetAssignType(prop);
4481
4482 // Evaluate expression and get value.
4483 if (assign_type == VARIABLE) {
4484 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4485 AccumulatorValueContext context(this);
4486 EmitVariableLoad(expr->expression()->AsVariableProxy());
4487 } else {
4488 // Reserve space for result of postfix operation.
4489 if (expr->is_postfix() && !context()->IsEffect()) {
4490 __ Push(xzr);
4491 }
4492 switch (assign_type) {
4493 case NAMED_PROPERTY: {
4494 // Put the object both on the stack and in the register.
4495 VisitForStackValue(prop->obj());
4496 __ Peek(LoadDescriptor::ReceiverRegister(), 0);
4497 EmitNamedPropertyLoad(prop);
4498 break;
4499 }
4500
4501 case NAMED_SUPER_PROPERTY: {
4502 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4503 VisitForAccumulatorValue(
4504 prop->obj()->AsSuperPropertyReference()->home_object());
4505 __ Push(result_register());
4506 const Register scratch = x10;
4507 __ Peek(scratch, kPointerSize);
4508 __ Push(scratch, result_register());
4509 EmitNamedSuperPropertyLoad(prop);
4510 break;
4511 }
4512
4513 case KEYED_SUPER_PROPERTY: {
4514 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4515 VisitForStackValue(
4516 prop->obj()->AsSuperPropertyReference()->home_object());
4517 VisitForAccumulatorValue(prop->key());
4518 __ Push(result_register());
4519 const Register scratch1 = x10;
4520 const Register scratch2 = x11;
4521 __ Peek(scratch1, 2 * kPointerSize);
4522 __ Peek(scratch2, kPointerSize);
4523 __ Push(scratch1, scratch2, result_register());
4524 EmitKeyedSuperPropertyLoad(prop);
4525 break;
4526 }
4527
4528 case KEYED_PROPERTY: {
4529 VisitForStackValue(prop->obj());
4530 VisitForStackValue(prop->key());
4531 __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
4532 __ Peek(LoadDescriptor::NameRegister(), 0);
4533 EmitKeyedPropertyLoad(prop);
4534 break;
4535 }
4536
4537 case VARIABLE:
4538 UNREACHABLE();
4539 }
4540 }
4541
4542 // We need a second deoptimization point after loading the value
4543 // in case evaluating the property load my have a side effect.
4544 if (assign_type == VARIABLE) {
4545 PrepareForBailout(expr->expression(), TOS_REG);
4546 } else {
4547 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4548 }
4549
4550 // Inline smi case if we are in a loop.
4551 Label stub_call, done;
4552 JumpPatchSite patch_site(masm_);
4553
4554 int count_value = expr->op() == Token::INC ? 1 : -1;
4555 if (ShouldInlineSmiCase(expr->op())) {
4556 Label slow;
4557 patch_site.EmitJumpIfNotSmi(x0, &slow);
4558
4559 // Save result for postfix expressions.
4560 if (expr->is_postfix()) {
4561 if (!context()->IsEffect()) {
4562 // Save the result on the stack. If we have a named or keyed property we
4563 // store the result under the receiver that is currently on top of the
4564 // stack.
4565 switch (assign_type) {
4566 case VARIABLE:
4567 __ Push(x0);
4568 break;
4569 case NAMED_PROPERTY:
4570 __ Poke(x0, kPointerSize);
4571 break;
4572 case NAMED_SUPER_PROPERTY:
4573 __ Poke(x0, kPointerSize * 2);
4574 break;
4575 case KEYED_PROPERTY:
4576 __ Poke(x0, kPointerSize * 2);
4577 break;
4578 case KEYED_SUPER_PROPERTY:
4579 __ Poke(x0, kPointerSize * 3);
4580 break;
4581 }
4582 }
4583 }
4584
4585 __ Adds(x0, x0, Smi::FromInt(count_value));
4586 __ B(vc, &done);
4587 // Call stub. Undo operation first.
4588 __ Sub(x0, x0, Smi::FromInt(count_value));
4589 __ B(&stub_call);
4590 __ Bind(&slow);
4591 }
4592 if (!is_strong(language_mode())) {
4593 ToNumberStub convert_stub(isolate());
4594 __ CallStub(&convert_stub);
4595 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4596 }
4597
4598 // Save result for postfix expressions.
4599 if (expr->is_postfix()) {
4600 if (!context()->IsEffect()) {
4601 // Save the result on the stack. If we have a named or keyed property
4602 // we store the result under the receiver that is currently on top
4603 // of the stack.
4604 switch (assign_type) {
4605 case VARIABLE:
4606 __ Push(x0);
4607 break;
4608 case NAMED_PROPERTY:
4609 __ Poke(x0, kXRegSize);
4610 break;
4611 case NAMED_SUPER_PROPERTY:
4612 __ Poke(x0, 2 * kXRegSize);
4613 break;
4614 case KEYED_PROPERTY:
4615 __ Poke(x0, 2 * kXRegSize);
4616 break;
4617 case KEYED_SUPER_PROPERTY:
4618 __ Poke(x0, 3 * kXRegSize);
4619 break;
4620 }
4621 }
4622 }
4623
4624 __ Bind(&stub_call);
4625 __ Mov(x1, x0);
4626 __ Mov(x0, Smi::FromInt(count_value));
4627
4628 SetExpressionPosition(expr);
4629
4630 {
4631 Assembler::BlockPoolsScope scope(masm_);
4632 Handle<Code> code =
4633 CodeFactory::BinaryOpIC(isolate(), Token::ADD,
4634 strength(language_mode())).code();
4635 CallIC(code, expr->CountBinOpFeedbackId());
4636 patch_site.EmitPatchInfo();
4637 }
4638 __ Bind(&done);
4639
4640 if (is_strong(language_mode())) {
4641 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4642 }
4643 // Store the value returned in x0.
4644 switch (assign_type) {
4645 case VARIABLE:
4646 if (expr->is_postfix()) {
4647 { EffectContext context(this);
4648 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4649 Token::ASSIGN, expr->CountSlot());
4650 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4651 context.Plug(x0);
4652 }
4653 // For all contexts except EffectConstant We have the result on
4654 // top of the stack.
4655 if (!context()->IsEffect()) {
4656 context()->PlugTOS();
4657 }
4658 } else {
4659 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4660 Token::ASSIGN, expr->CountSlot());
4661 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4662 context()->Plug(x0);
4663 }
4664 break;
4665 case NAMED_PROPERTY: {
4666 __ Mov(StoreDescriptor::NameRegister(),
4667 Operand(prop->key()->AsLiteral()->value()));
4668 __ Pop(StoreDescriptor::ReceiverRegister());
4669 if (FLAG_vector_stores) {
4670 EmitLoadStoreICSlot(expr->CountSlot());
4671 CallStoreIC();
4672 } else {
4673 CallStoreIC(expr->CountStoreFeedbackId());
4674 }
4675 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4676 if (expr->is_postfix()) {
4677 if (!context()->IsEffect()) {
4678 context()->PlugTOS();
4679 }
4680 } else {
4681 context()->Plug(x0);
4682 }
4683 break;
4684 }
4685 case NAMED_SUPER_PROPERTY: {
4686 EmitNamedSuperPropertyStore(prop);
4687 if (expr->is_postfix()) {
4688 if (!context()->IsEffect()) {
4689 context()->PlugTOS();
4690 }
4691 } else {
4692 context()->Plug(x0);
4693 }
4694 break;
4695 }
4696 case KEYED_SUPER_PROPERTY: {
4697 EmitKeyedSuperPropertyStore(prop);
4698 if (expr->is_postfix()) {
4699 if (!context()->IsEffect()) {
4700 context()->PlugTOS();
4701 }
4702 } else {
4703 context()->Plug(x0);
4704 }
4705 break;
4706 }
4707 case KEYED_PROPERTY: {
4708 __ Pop(StoreDescriptor::NameRegister());
4709 __ Pop(StoreDescriptor::ReceiverRegister());
4710 Handle<Code> ic =
4711 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4712 if (FLAG_vector_stores) {
4713 EmitLoadStoreICSlot(expr->CountSlot());
4714 CallIC(ic);
4715 } else {
4716 CallIC(ic, expr->CountStoreFeedbackId());
4717 }
4718 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4719 if (expr->is_postfix()) {
4720 if (!context()->IsEffect()) {
4721 context()->PlugTOS();
4722 }
4723 } else {
4724 context()->Plug(x0);
4725 }
4726 break;
4727 }
4728 }
4729 }
4730
4731
4732 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4733 Expression* sub_expr,
4734 Handle<String> check) {
4735 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
4736 Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
4737 Label materialize_true, materialize_false;
4738 Label* if_true = NULL;
4739 Label* if_false = NULL;
4740 Label* fall_through = NULL;
4741 context()->PrepareTest(&materialize_true, &materialize_false,
4742 &if_true, &if_false, &fall_through);
4743
4744 { AccumulatorValueContext context(this);
4745 VisitForTypeofValue(sub_expr);
4746 }
4747 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4748
4749 Factory* factory = isolate()->factory();
4750 if (String::Equals(check, factory->number_string())) {
4751 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
4752 __ JumpIfSmi(x0, if_true);
4753 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4754 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
4755 Split(eq, if_true, if_false, fall_through);
4756 } else if (String::Equals(check, factory->string_string())) {
4757 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
4758 __ JumpIfSmi(x0, if_false);
4759 // Check for undetectable objects => false.
4760 __ JumpIfObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE, if_false, ge);
4761 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4762 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_true, if_false,
4763 fall_through);
4764 } else if (String::Equals(check, factory->symbol_string())) {
4765 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
4766 __ JumpIfSmi(x0, if_false);
4767 __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
4768 Split(eq, if_true, if_false, fall_through);
4769 } else if (String::Equals(check, factory->float32x4_string())) {
4770 ASM_LOCATION(
4771 "FullCodeGenerator::EmitLiteralCompareTypeof float32x4_string");
4772 __ JumpIfSmi(x0, if_false);
4773 __ CompareObjectType(x0, x0, x1, FLOAT32X4_TYPE);
4774 Split(eq, if_true, if_false, fall_through);
4775 } else if (String::Equals(check, factory->boolean_string())) {
4776 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
4777 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
4778 __ CompareRoot(x0, Heap::kFalseValueRootIndex);
4779 Split(eq, if_true, if_false, fall_through);
4780 } else if (String::Equals(check, factory->undefined_string())) {
4781 ASM_LOCATION(
4782 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
4783 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true);
4784 __ JumpIfSmi(x0, if_false);
4785 // Check for undetectable objects => true.
4786 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
4787 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
4788 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
4789 fall_through);
4790 } else if (String::Equals(check, factory->function_string())) {
4791 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
4792 __ JumpIfSmi(x0, if_false);
4793 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4794 __ JumpIfObjectType(x0, x10, x11, JS_FUNCTION_TYPE, if_true);
4795 __ CompareAndSplit(x11, JS_FUNCTION_PROXY_TYPE, eq, if_true, if_false,
4796 fall_through);
4797
4798 } else if (String::Equals(check, factory->object_string())) {
4799 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
4800 __ JumpIfSmi(x0, if_false);
4801 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
4802 // Check for JS objects => true.
4803 Register map = x10;
4804 __ JumpIfObjectType(x0, map, x11, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE,
4805 if_false, lt);
4806 __ CompareInstanceType(map, x11, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4807 __ B(gt, if_false);
4808 // Check for undetectable objects => false.
4809 __ Ldrb(x10, FieldMemOperand(map, Map::kBitFieldOffset));
4810
4811 __ TestAndSplit(x10, 1 << Map::kIsUndetectable, if_true, if_false,
4812 fall_through);
4813
4814 } else {
4815 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
4816 if (if_false != fall_through) __ B(if_false);
4817 }
4818 context()->Plug(if_true, if_false);
4819 }
4820
4821
4822 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4823 Comment cmnt(masm_, "[ CompareOperation");
4824 SetExpressionPosition(expr);
4825
4826 // Try to generate an optimized comparison with a literal value.
4827 // TODO(jbramley): This only checks common values like NaN or undefined.
4828 // Should it also handle ARM64 immediate operands?
4829 if (TryLiteralCompare(expr)) {
4830 return;
4831 }
4832
4833 // Assign labels according to context()->PrepareTest.
4834 Label materialize_true;
4835 Label materialize_false;
4836 Label* if_true = NULL;
4837 Label* if_false = NULL;
4838 Label* fall_through = NULL;
4839 context()->PrepareTest(&materialize_true, &materialize_false,
4840 &if_true, &if_false, &fall_through);
4841
4842 Token::Value op = expr->op();
4843 VisitForStackValue(expr->left());
4844 switch (op) {
4845 case Token::IN:
4846 VisitForStackValue(expr->right());
4847 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4848 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4849 __ CompareRoot(x0, Heap::kTrueValueRootIndex);
4850 Split(eq, if_true, if_false, fall_through);
4851 break;
4852
4853 case Token::INSTANCEOF: {
4854 VisitForStackValue(expr->right());
4855 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4856 __ CallStub(&stub);
4857 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4858 // The stub returns 0 for true.
4859 __ CompareAndSplit(x0, 0, eq, if_true, if_false, fall_through);
4860 break;
4861 }
4862
4863 default: {
4864 VisitForAccumulatorValue(expr->right());
4865 Condition cond = CompareIC::ComputeCondition(op);
4866
4867 // Pop the stack value.
4868 __ Pop(x1);
4869
4870 JumpPatchSite patch_site(masm_);
4871 if (ShouldInlineSmiCase(op)) {
4872 Label slow_case;
4873 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
4874 __ Cmp(x1, x0);
4875 Split(cond, if_true, if_false, NULL);
4876 __ Bind(&slow_case);
4877 }
4878
4879 Handle<Code> ic = CodeFactory::CompareIC(
4880 isolate(), op, strength(language_mode())).code();
4881 CallIC(ic, expr->CompareOperationFeedbackId());
4882 patch_site.EmitPatchInfo();
4883 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4884 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
4885 }
4886 }
4887
4888 // Convert the result of the comparison into one expected for this
4889 // expression's context.
4890 context()->Plug(if_true, if_false);
4891 }
4892
4893
4894 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4895 Expression* sub_expr,
4896 NilValue nil) {
4897 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
4898 Label materialize_true, materialize_false;
4899 Label* if_true = NULL;
4900 Label* if_false = NULL;
4901 Label* fall_through = NULL;
4902 context()->PrepareTest(&materialize_true, &materialize_false,
4903 &if_true, &if_false, &fall_through);
4904
4905 VisitForAccumulatorValue(sub_expr);
4906 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4907
4908 if (expr->op() == Token::EQ_STRICT) {
4909 Heap::RootListIndex nil_value = nil == kNullValue ?
4910 Heap::kNullValueRootIndex :
4911 Heap::kUndefinedValueRootIndex;
4912 __ CompareRoot(x0, nil_value);
4913 Split(eq, if_true, if_false, fall_through);
4914 } else {
4915 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4916 CallIC(ic, expr->CompareOperationFeedbackId());
4917 __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through);
4918 }
4919
4920 context()->Plug(if_true, if_false);
4921 }
4922
4923
4924 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4925 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4926 context()->Plug(x0);
4927 }
4928
4929
4930 void FullCodeGenerator::VisitYield(Yield* expr) {
4931 Comment cmnt(masm_, "[ Yield");
4932 SetExpressionPosition(expr);
4933
4934 // Evaluate yielded value first; the initial iterator definition depends on
4935 // this. It stays on the stack while we update the iterator.
4936 VisitForStackValue(expr->expression());
4937
4938 // TODO(jbramley): Tidy this up once the merge is done, using named registers
4939 // and suchlike. The implementation changes a little by bleeding_edge so I
4940 // don't want to spend too much time on it now.
4941
4942 switch (expr->yield_kind()) {
4943 case Yield::kSuspend:
4944 // Pop value from top-of-stack slot; box result into result register.
4945 EmitCreateIteratorResult(false);
4946 __ Push(result_register());
4947 // Fall through.
4948 case Yield::kInitial: {
4949 Label suspend, continuation, post_runtime, resume;
4950
4951 __ B(&suspend);
4952 // TODO(jbramley): This label is bound here because the following code
4953 // looks at its pos(). Is it possible to do something more efficient here,
4954 // perhaps using Adr?
4955 __ Bind(&continuation);
4956 __ RecordGeneratorContinuation();
4957 __ B(&resume);
4958
4959 __ Bind(&suspend);
4960 VisitForAccumulatorValue(expr->generator_object());
4961 DCHECK((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
4962 __ Mov(x1, Smi::FromInt(continuation.pos()));
4963 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
4964 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
4965 __ Mov(x1, cp);
4966 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
4967 kLRHasBeenSaved, kDontSaveFPRegs);
4968 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
4969 __ Cmp(__ StackPointer(), x1);
4970 __ B(eq, &post_runtime);
4971 __ Push(x0); // generator object
4972 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
4973 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4974 __ Bind(&post_runtime);
4975 __ Pop(result_register());
4976 EmitReturnSequence();
4977
4978 __ Bind(&resume);
4979 context()->Plug(result_register());
4980 break;
4981 }
4982
4983 case Yield::kFinal: {
4984 VisitForAccumulatorValue(expr->generator_object());
4985 __ Mov(x1, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
4986 __ Str(x1, FieldMemOperand(result_register(),
4987 JSGeneratorObject::kContinuationOffset));
4988 // Pop value from top-of-stack slot, box result into result register.
4989 EmitCreateIteratorResult(true);
4990 EmitUnwindBeforeReturn();
4991 EmitReturnSequence();
4992 break;
4993 }
4994
4995 case Yield::kDelegating: {
4996 VisitForStackValue(expr->generator_object());
4997
4998 // Initial stack layout is as follows:
4999 // [sp + 1 * kPointerSize] iter
5000 // [sp + 0 * kPointerSize] g
5001
5002 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
5003 Label l_next, l_call, l_loop;
5004 Register load_receiver = LoadDescriptor::ReceiverRegister();
5005 Register load_name = LoadDescriptor::NameRegister();
5006
5007 // Initial send value is undefined.
5008 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
5009 __ B(&l_next);
5010
5011 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
5012 __ Bind(&l_catch);
5013 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw"
5014 __ Peek(x3, 1 * kPointerSize); // iter
5015 __ Push(load_name, x3, x0); // "throw", iter, except
5016 __ B(&l_call);
5017
5018 // try { received = %yield result }
5019 // Shuffle the received result above a try handler and yield it without
5020 // re-boxing.
5021 __ Bind(&l_try);
5022 __ Pop(x0); // result
5023 int handler_index = NewHandlerTableEntry();
5024 EnterTryBlock(handler_index, &l_catch);
5025 const int try_block_size = TryCatch::kElementCount * kPointerSize;
5026 __ Push(x0); // result
5027
5028 __ B(&l_suspend);
5029 // TODO(jbramley): This label is bound here because the following code
5030 // looks at its pos(). Is it possible to do something more efficient here,
5031 // perhaps using Adr?
5032 __ Bind(&l_continuation);
5033 __ RecordGeneratorContinuation();
5034 __ B(&l_resume);
5035
5036 __ Bind(&l_suspend);
5037 const int generator_object_depth = kPointerSize + try_block_size;
5038 __ Peek(x0, generator_object_depth);
5039 __ Push(x0); // g
5040 __ Push(Smi::FromInt(handler_index)); // handler-index
5041 DCHECK((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos()));
5042 __ Mov(x1, Smi::FromInt(l_continuation.pos()));
5043 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
5044 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
5045 __ Mov(x1, cp);
5046 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
5047 kLRHasBeenSaved, kDontSaveFPRegs);
5048 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
5049 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5050 __ Pop(x0); // result
5051 EmitReturnSequence();
5052 __ Bind(&l_resume); // received in x0
5053 ExitTryBlock(handler_index);
5054
5055 // receiver = iter; f = 'next'; arg = received;
5056 __ Bind(&l_next);
5057
5058 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
5059 __ Peek(x3, 1 * kPointerSize); // iter
5060 __ Push(load_name, x3, x0); // "next", iter, received
5061
5062 // result = receiver[f](arg);
5063 __ Bind(&l_call);
5064 __ Peek(load_receiver, 1 * kPointerSize);
5065 __ Peek(load_name, 2 * kPointerSize);
5066 __ Mov(LoadDescriptor::SlotRegister(),
5067 SmiFromSlot(expr->KeyedLoadFeedbackSlot()));
5068 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
5069 CallIC(ic, TypeFeedbackId::None());
5070 __ Mov(x1, x0);
5071 __ Poke(x1, 2 * kPointerSize);
5072 SetCallPosition(expr, 1);
5073 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
5074 __ CallStub(&stub);
5075
5076 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
5077 __ Drop(1); // The function is still on the stack; drop it.
5078
5079 // if (!result.done) goto l_try;
5080 __ Bind(&l_loop);
5081 __ Move(load_receiver, x0);
5082
5083 __ Push(load_receiver); // save result
5084 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
5085 __ Mov(LoadDescriptor::SlotRegister(),
5086 SmiFromSlot(expr->DoneFeedbackSlot()));
5087 CallLoadIC(NOT_INSIDE_TYPEOF); // x0=result.done
5088 // The ToBooleanStub argument (result.done) is in x0.
5089 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
5090 CallIC(bool_ic);
5091 __ Cbz(x0, &l_try);
5092
5093 // result.value
5094 __ Pop(load_receiver); // result
5095 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
5096 __ Mov(LoadDescriptor::SlotRegister(),
5097 SmiFromSlot(expr->ValueFeedbackSlot()));
5098 CallLoadIC(NOT_INSIDE_TYPEOF); // x0=result.value
5099 context()->DropAndPlug(2, x0); // drop iter and g
5100 break;
5101 }
5102 }
5103 }
5104
5105
5106 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
5107 Expression *value,
5108 JSGeneratorObject::ResumeMode resume_mode) {
5109 ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume");
5110 Register generator_object = x1;
5111 Register the_hole = x2;
5112 Register operand_stack_size = w3;
5113 Register function = x4;
5114
5115 // The value stays in x0, and is ultimately read by the resumed generator, as
5116 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
5117 // is read to throw the value when the resumed generator is already closed. x1
5118 // will hold the generator object until the activation has been resumed.
5119 VisitForStackValue(generator);
5120 VisitForAccumulatorValue(value);
5121 __ Pop(generator_object);
5122
5123 // Load suspended function and context.
5124 __ Ldr(cp, FieldMemOperand(generator_object,
5125 JSGeneratorObject::kContextOffset));
5126 __ Ldr(function, FieldMemOperand(generator_object,
5127 JSGeneratorObject::kFunctionOffset));
5128
5129 // Load receiver and store as the first argument.
5130 __ Ldr(x10, FieldMemOperand(generator_object,
5131 JSGeneratorObject::kReceiverOffset));
5132 __ Push(x10);
5133
5134 // Push holes for the rest of the arguments to the generator function.
5135 __ Ldr(x10, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
5136
5137 // The number of arguments is stored as an int32_t, and -1 is a marker
5138 // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign
5139 // extension to correctly handle it. However, in this case, we operate on
5140 // 32-bit W registers, so extension isn't required.
5141 __ Ldr(w10, FieldMemOperand(x10,
5142 SharedFunctionInfo::kFormalParameterCountOffset));
5143 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
5144 __ PushMultipleTimes(the_hole, w10);
5145
5146 // Enter a new JavaScript frame, and initialize its slots as they were when
5147 // the generator was suspended.
5148 Label resume_frame, done;
5149 __ Bl(&resume_frame);
5150 __ B(&done);
5151
5152 __ Bind(&resume_frame);
5153 __ Push(lr, // Return address.
5154 fp, // Caller's frame pointer.
5155 cp, // Callee's context.
5156 function); // Callee's JS Function.
5157 __ Add(fp, __ StackPointer(), kPointerSize * 2);
5158
5159 // Load and untag the operand stack size.
5160 __ Ldr(x10, FieldMemOperand(generator_object,
5161 JSGeneratorObject::kOperandStackOffset));
5162 __ Ldr(operand_stack_size,
5163 UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
5164
5165 // If we are sending a value and there is no operand stack, we can jump back
5166 // in directly.
5167 if (resume_mode == JSGeneratorObject::NEXT) {
5168 Label slow_resume;
5169 __ Cbnz(operand_stack_size, &slow_resume);
5170 __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
5171 __ Ldrsw(x11,
5172 UntagSmiFieldMemOperand(generator_object,
5173 JSGeneratorObject::kContinuationOffset));
5174 __ Add(x10, x10, x11);
5175 __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
5176 __ Str(x12, FieldMemOperand(generator_object,
5177 JSGeneratorObject::kContinuationOffset));
5178 __ Br(x10);
5179
5180 __ Bind(&slow_resume);
5181 }
5182
5183 // Otherwise, we push holes for the operand stack and call the runtime to fix
5184 // up the stack and the handlers.
5185 __ PushMultipleTimes(the_hole, operand_stack_size);
5186
5187 __ Mov(x10, Smi::FromInt(resume_mode));
5188 __ Push(generator_object, result_register(), x10);
5189 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
5190 // Not reached: the runtime call returns elsewhere.
5191 __ Unreachable();
5192
5193 __ Bind(&done);
5194 context()->Plug(result_register());
5195 }
5196
5197
5198 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
5199 Label gc_required;
5200 Label allocated;
5201
5202 const int instance_size = 5 * kPointerSize;
5203 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
5204 instance_size);
5205
5206 // Allocate and populate an object with this form: { value: VAL, done: DONE }
5207
5208 Register result = x0;
5209 __ Allocate(instance_size, result, x10, x11, &gc_required, TAG_OBJECT);
5210 __ B(&allocated);
5211
5212 __ Bind(&gc_required);
5213 __ Push(Smi::FromInt(instance_size));
5214 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
5215 __ Ldr(context_register(),
5216 MemOperand(fp, StandardFrameConstants::kContextOffset));
5217
5218 __ Bind(&allocated);
5219 Register map_reg = x1;
5220 Register result_value = x2;
5221 Register boolean_done = x3;
5222 Register empty_fixed_array = x4;
5223 Register untagged_result = x5;
5224 __ Ldr(map_reg, GlobalObjectMemOperand());
5225 __ Ldr(map_reg, FieldMemOperand(map_reg, GlobalObject::kNativeContextOffset));
5226 __ Ldr(map_reg,
5227 ContextMemOperand(map_reg, Context::ITERATOR_RESULT_MAP_INDEX));
5228 __ Pop(result_value);
5229 __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done)));
5230 __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array()));
5231 STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
5232 JSObject::kElementsOffset);
5233 STATIC_ASSERT(JSGeneratorObject::kResultValuePropertyOffset + kPointerSize ==
5234 JSGeneratorObject::kResultDonePropertyOffset);
5235 __ ObjectUntag(untagged_result, result);
5236 __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
5237 __ Stp(empty_fixed_array, empty_fixed_array,
5238 MemOperand(untagged_result, JSObject::kPropertiesOffset));
5239 __ Stp(result_value, boolean_done,
5240 MemOperand(untagged_result,
5241 JSGeneratorObject::kResultValuePropertyOffset));
5242
5243 // Only the value field needs a write barrier, as the other values are in the
5244 // root set.
5245 __ RecordWriteField(result, JSGeneratorObject::kResultValuePropertyOffset,
5246 x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
5247 }
5248
5249
5250 // TODO(all): I don't like this method.
5251 // It seems to me that in too many places x0 is used in place of this.
5252 // Also, this function is not suitable for all places where x0 should be
5253 // abstracted (eg. when used as an argument). But some places assume that the
5254 // first argument register is x0, and use this function instead.
5255 // Considering that most of the register allocation is hard-coded in the
5256 // FullCodeGen, that it is unlikely we will need to change it extensively, and
5257 // that abstracting the allocation through functions would not yield any
5258 // performance benefit, I think the existence of this function is debatable.
5259 Register FullCodeGenerator::result_register() {
5260 return x0;
5261 }
5262
5263
5264 Register FullCodeGenerator::context_register() {
5265 return cp;
5266 }
5267
5268
5269 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5270 DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
5271 __ Str(value, MemOperand(fp, frame_offset));
5272 }
5273
5274
5275 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5276 __ Ldr(dst, ContextMemOperand(cp, context_index));
5277 }
5278
5279
5280 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5281 Scope* declaration_scope = scope()->DeclarationScope();
5282 if (declaration_scope->is_script_scope() ||
5283 declaration_scope->is_module_scope()) {
5284 // Contexts nested in the native context have a canonical empty function
5285 // as their closure, not the anonymous closure containing the global
5286 // code. Pass a smi sentinel and let the runtime look up the empty
5287 // function.
5288 DCHECK(kSmiTag == 0);
5289 __ Push(xzr);
5290 } else if (declaration_scope->is_eval_scope()) {
5291 // Contexts created by a call to eval have the same closure as the
5292 // context calling eval, not the anonymous closure containing the eval
5293 // code. Fetch it from the context.
5294 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
5295 __ Push(x10);
5296 } else {
5297 DCHECK(declaration_scope->is_function_scope());
5298 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
5299 __ Push(x10);
5300 }
5301 }
5302
5303
5304 void FullCodeGenerator::EnterFinallyBlock() {
5305 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
5306 DCHECK(!result_register().is(x10));
5307 // Preserve the result register while executing finally block.
5308 // Also cook the return address in lr to the stack (smi encoded Code* delta).
5309 __ Sub(x10, lr, Operand(masm_->CodeObject()));
5310 __ SmiTag(x10);
5311 __ Push(result_register(), x10);
5312
5313 // Store pending message while executing finally block.
5314 ExternalReference pending_message_obj =
5315 ExternalReference::address_of_pending_message_obj(isolate());
5316 __ Mov(x10, pending_message_obj);
5317 __ Ldr(x10, MemOperand(x10));
5318 __ Push(x10);
5319
5320 ClearPendingMessage();
5321 }
5322
5323
5324 void FullCodeGenerator::ExitFinallyBlock() {
5325 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
5326 DCHECK(!result_register().is(x10));
5327
5328 // Restore pending message from stack.
5329 __ Pop(x10);
5330 ExternalReference pending_message_obj =
5331 ExternalReference::address_of_pending_message_obj(isolate());
5332 __ Mov(x13, pending_message_obj);
5333 __ Str(x10, MemOperand(x13));
5334
5335 // Restore result register and cooked return address from the stack.
5336 __ Pop(x10, result_register());
5337
5338 // Uncook the return address (see EnterFinallyBlock).
5339 __ SmiUntag(x10);
5340 __ Add(x11, x10, Operand(masm_->CodeObject()));
5341 __ Br(x11);
5342 }
5343
5344
5345 void FullCodeGenerator::ClearPendingMessage() {
5346 DCHECK(!result_register().is(x10));
5347 ExternalReference pending_message_obj =
5348 ExternalReference::address_of_pending_message_obj(isolate());
5349 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
5350 __ Mov(x13, pending_message_obj);
5351 __ Str(x10, MemOperand(x13));
5352 }
5353
5354
5355 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5356 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5357 __ Mov(VectorStoreICTrampolineDescriptor::SlotRegister(), SmiFromSlot(slot));
5358 }
5359
5360
5361 #undef __
5362
5363
5364 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5365 Address pc,
5366 BackEdgeState target_state,
5367 Code* replacement_code) {
5368 // Turn the jump into a nop.
5369 Address branch_address = pc - 3 * kInstructionSize;
5370 PatchingAssembler patcher(branch_address, 1);
5371
5372 DCHECK(Instruction::Cast(branch_address)
5373 ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
5374 (Instruction::Cast(branch_address)->IsCondBranchImm() &&
5375 Instruction::Cast(branch_address)->ImmPCOffset() ==
5376 6 * kInstructionSize));
5377
5378 switch (target_state) {
5379 case INTERRUPT:
5380 // <decrement profiling counter>
5381 // .. .. .. .. b.pl ok
5382 // .. .. .. .. ldr x16, pc+<interrupt stub address>
5383 // .. .. .. .. blr x16
5384 // ... more instructions.
5385 // ok-label
5386 // Jump offset is 6 instructions.
5387 patcher.b(6, pl);
5388 break;
5389 case ON_STACK_REPLACEMENT:
5390 case OSR_AFTER_STACK_CHECK:
5391 // <decrement profiling counter>
5392 // .. .. .. .. mov x0, x0 (NOP)
5393 // .. .. .. .. ldr x16, pc+<on-stack replacement address>
5394 // .. .. .. .. blr x16
5395 patcher.nop(Assembler::INTERRUPT_CODE_NOP);
5396 break;
5397 }
5398
5399 // Replace the call address.
5400 Instruction* load = Instruction::Cast(pc)->preceding(2);
5401 Address interrupt_address_pointer =
5402 reinterpret_cast<Address>(load) + load->ImmPCOffset();
5403 DCHECK((Memory::uint64_at(interrupt_address_pointer) ==
5404 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5405 ->builtins()
5406 ->OnStackReplacement()
5407 ->entry())) ||
5408 (Memory::uint64_at(interrupt_address_pointer) ==
5409 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5410 ->builtins()
5411 ->InterruptCheck()
5412 ->entry())) ||
5413 (Memory::uint64_at(interrupt_address_pointer) ==
5414 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5415 ->builtins()
5416 ->OsrAfterStackCheck()
5417 ->entry())) ||
5418 (Memory::uint64_at(interrupt_address_pointer) ==
5419 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate()
5420 ->builtins()
5421 ->OnStackReplacement()
5422 ->entry())));
5423 Memory::uint64_at(interrupt_address_pointer) =
5424 reinterpret_cast<uint64_t>(replacement_code->entry());
5425
5426 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5427 unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
5428 }
5429
5430
5431 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5432 Isolate* isolate,
5433 Code* unoptimized_code,
5434 Address pc) {
5435 // TODO(jbramley): There should be some extra assertions here (as in the ARM
5436 // back-end), but this function is gone in bleeding_edge so it might not
5437 // matter anyway.
5438 Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
5439
5440 if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
5441 Instruction* load = Instruction::Cast(pc)->preceding(2);
5442 uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
5443 load->ImmPCOffset());
5444 if (entry == reinterpret_cast<uint64_t>(
5445 isolate->builtins()->OnStackReplacement()->entry())) {
5446 return ON_STACK_REPLACEMENT;
5447 } else if (entry == reinterpret_cast<uint64_t>(
5448 isolate->builtins()->OsrAfterStackCheck()->entry())) {
5449 return OSR_AFTER_STACK_CHECK;
5450 } else {
5451 UNREACHABLE();
5452 }
5453 }
5454
5455 return INTERRUPT;
5456 }
5457
5458
5459 } // namespace internal
5460 } // namespace v8
5461
5462 #endif // V8_TARGET_ARCH_ARM64
OLDNEW
« no previous file with comments | « src/arm64/deoptimizer-arm64.cc ('k') | src/compiler.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698