Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(391)

Side by Side Diff: src/ia32/full-codegen-ia32.cc

Issue 1248443003: Move Full-codegen into its own folder. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: rename define Created 5 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/ia32/deoptimizer-ia32.cc ('k') | src/mips/builtins-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #if V8_TARGET_ARCH_IA32
8
9 #include "src/code-factory.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/compiler.h"
13 #include "src/debug.h"
14 #include "src/full-codegen.h"
15 #include "src/ic/ic.h"
16 #include "src/parser.h"
17 #include "src/scopes.h"
18
19 namespace v8 {
20 namespace internal {
21
22 #define __ ACCESS_MASM(masm_)
23
24
25 class JumpPatchSite BASE_EMBEDDED {
26 public:
27 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
28 #ifdef DEBUG
29 info_emitted_ = false;
30 #endif
31 }
32
33 ~JumpPatchSite() {
34 DCHECK(patch_site_.is_bound() == info_emitted_);
35 }
36
37 void EmitJumpIfNotSmi(Register reg,
38 Label* target,
39 Label::Distance distance = Label::kFar) {
40 __ test(reg, Immediate(kSmiTagMask));
41 EmitJump(not_carry, target, distance); // Always taken before patched.
42 }
43
44 void EmitJumpIfSmi(Register reg,
45 Label* target,
46 Label::Distance distance = Label::kFar) {
47 __ test(reg, Immediate(kSmiTagMask));
48 EmitJump(carry, target, distance); // Never taken before patched.
49 }
50
51 void EmitPatchInfo() {
52 if (patch_site_.is_bound()) {
53 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
54 DCHECK(is_uint8(delta_to_patch_site));
55 __ test(eax, Immediate(delta_to_patch_site));
56 #ifdef DEBUG
57 info_emitted_ = true;
58 #endif
59 } else {
60 __ nop(); // Signals no inlined code.
61 }
62 }
63
64 private:
65 // jc will be patched with jz, jnc will become jnz.
66 void EmitJump(Condition cc, Label* target, Label::Distance distance) {
67 DCHECK(!patch_site_.is_bound() && !info_emitted_);
68 DCHECK(cc == carry || cc == not_carry);
69 __ bind(&patch_site_);
70 __ j(cc, target, distance);
71 }
72
73 MacroAssembler* masm_;
74 Label patch_site_;
75 #ifdef DEBUG
76 bool info_emitted_;
77 #endif
78 };
79
80
81 // Generate code for a JS function. On entry to the function the receiver
82 // and arguments have been pushed on the stack left to right, with the
83 // return address on top of them. The actual argument count matches the
84 // formal parameter count expected by the function.
85 //
86 // The live registers are:
87 // o edi: the JS function object being called (i.e. ourselves)
88 // o esi: our context
89 // o ebp: our caller's frame pointer
90 // o esp: stack pointer (pointing to return address)
91 //
92 // The function builds a JS frame. Please see JavaScriptFrameConstants in
93 // frames-ia32.h for its layout.
94 void FullCodeGenerator::Generate() {
95 CompilationInfo* info = info_;
96 profiling_counter_ = isolate()->factory()->NewCell(
97 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
98 SetFunctionPosition(function());
99 Comment cmnt(masm_, "[ function compiled by full code generator");
100
101 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
102
103 #ifdef DEBUG
104 if (strlen(FLAG_stop_at) > 0 &&
105 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
106 __ int3();
107 }
108 #endif
109
110 // Sloppy mode functions and builtins need to replace the receiver with the
111 // global proxy when called as functions (without an explicit receiver
112 // object).
113 if (is_sloppy(info->language_mode()) && !info->is_native() &&
114 info->MayUseThis()) {
115 Label ok;
116 // +1 for return address.
117 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
118 __ mov(ecx, Operand(esp, receiver_offset));
119
120 __ cmp(ecx, isolate()->factory()->undefined_value());
121 __ j(not_equal, &ok, Label::kNear);
122
123 __ mov(ecx, GlobalObjectOperand());
124 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalProxyOffset));
125
126 __ mov(Operand(esp, receiver_offset), ecx);
127
128 __ bind(&ok);
129 }
130
131 // Open a frame scope to indicate that there is a frame on the stack. The
132 // MANUAL indicates that the scope shouldn't actually generate code to set up
133 // the frame (that is done below).
134 FrameScope frame_scope(masm_, StackFrame::MANUAL);
135
136 info->set_prologue_offset(masm_->pc_offset());
137 __ Prologue(info->IsCodePreAgingActive());
138 info->AddNoFrameRange(0, masm_->pc_offset());
139
140 { Comment cmnt(masm_, "[ Allocate locals");
141 int locals_count = info->scope()->num_stack_slots();
142 // Generators allocate locals, if any, in context slots.
143 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0);
144 if (locals_count == 1) {
145 __ push(Immediate(isolate()->factory()->undefined_value()));
146 } else if (locals_count > 1) {
147 if (locals_count >= 128) {
148 Label ok;
149 __ mov(ecx, esp);
150 __ sub(ecx, Immediate(locals_count * kPointerSize));
151 ExternalReference stack_limit =
152 ExternalReference::address_of_real_stack_limit(isolate());
153 __ cmp(ecx, Operand::StaticVariable(stack_limit));
154 __ j(above_equal, &ok, Label::kNear);
155 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
156 __ bind(&ok);
157 }
158 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
159 const int kMaxPushes = 32;
160 if (locals_count >= kMaxPushes) {
161 int loop_iterations = locals_count / kMaxPushes;
162 __ mov(ecx, loop_iterations);
163 Label loop_header;
164 __ bind(&loop_header);
165 // Do pushes.
166 for (int i = 0; i < kMaxPushes; i++) {
167 __ push(eax);
168 }
169 __ dec(ecx);
170 __ j(not_zero, &loop_header, Label::kNear);
171 }
172 int remaining = locals_count % kMaxPushes;
173 // Emit the remaining pushes.
174 for (int i = 0; i < remaining; i++) {
175 __ push(eax);
176 }
177 }
178 }
179
180 bool function_in_register = true;
181
182 // Possibly allocate a local context.
183 if (info->scope()->num_heap_slots() > 0) {
184 Comment cmnt(masm_, "[ Allocate context");
185 bool need_write_barrier = true;
186 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
187 // Argument to NewContext is the function, which is still in edi.
188 if (info->scope()->is_script_scope()) {
189 __ push(edi);
190 __ Push(info->scope()->GetScopeInfo(info->isolate()));
191 __ CallRuntime(Runtime::kNewScriptContext, 2);
192 } else if (slots <= FastNewContextStub::kMaximumSlots) {
193 FastNewContextStub stub(isolate(), slots);
194 __ CallStub(&stub);
195 // Result of FastNewContextStub is always in new space.
196 need_write_barrier = false;
197 } else {
198 __ push(edi);
199 __ CallRuntime(Runtime::kNewFunctionContext, 1);
200 }
201 function_in_register = false;
202 // Context is returned in eax. It replaces the context passed to us.
203 // It's saved in the stack and kept live in esi.
204 __ mov(esi, eax);
205 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
206
207 // Copy parameters into context if necessary.
208 int num_parameters = info->scope()->num_parameters();
209 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
210 for (int i = first_parameter; i < num_parameters; i++) {
211 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
212 if (var->IsContextSlot()) {
213 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
214 (num_parameters - 1 - i) * kPointerSize;
215 // Load parameter from stack.
216 __ mov(eax, Operand(ebp, parameter_offset));
217 // Store it in the context.
218 int context_offset = Context::SlotOffset(var->index());
219 __ mov(Operand(esi, context_offset), eax);
220 // Update the write barrier. This clobbers eax and ebx.
221 if (need_write_barrier) {
222 __ RecordWriteContextSlot(esi,
223 context_offset,
224 eax,
225 ebx,
226 kDontSaveFPRegs);
227 } else if (FLAG_debug_code) {
228 Label done;
229 __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
230 __ Abort(kExpectedNewSpaceObject);
231 __ bind(&done);
232 }
233 }
234 }
235 }
236
237 // Possibly set up a local binding to the this function which is used in
238 // derived constructors with super calls.
239 Variable* this_function_var = scope()->this_function_var();
240 if (this_function_var != nullptr) {
241 Comment cmnt(masm_, "[ This function");
242 if (!function_in_register) {
243 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
244 // The write barrier clobbers register again, keep is marked as such.
245 }
246 SetVar(this_function_var, edi, ebx, edx);
247 }
248
249 Variable* new_target_var = scope()->new_target_var();
250 if (new_target_var != nullptr) {
251 Comment cmnt(masm_, "[ new.target");
252 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
253 Label non_adaptor_frame;
254 __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
255 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
256 __ j(not_equal, &non_adaptor_frame);
257 __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
258
259 __ bind(&non_adaptor_frame);
260 __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
261 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
262
263 Label non_construct_frame, done;
264 __ j(not_equal, &non_construct_frame);
265
266 // Construct frame
267 __ mov(eax,
268 Operand(eax, ConstructFrameConstants::kOriginalConstructorOffset));
269 __ jmp(&done);
270
271 // Non-construct frame
272 __ bind(&non_construct_frame);
273 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
274
275 __ bind(&done);
276 SetVar(new_target_var, eax, ebx, edx);
277 }
278
279
280 // Possibly allocate RestParameters
281 int rest_index;
282 Variable* rest_param = scope()->rest_parameter(&rest_index);
283 if (rest_param) {
284 Comment cmnt(masm_, "[ Allocate rest parameter array");
285
286 int num_parameters = info->scope()->num_parameters();
287 int offset = num_parameters * kPointerSize;
288
289 __ lea(edx,
290 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
291 __ push(edx);
292 __ push(Immediate(Smi::FromInt(num_parameters)));
293 __ push(Immediate(Smi::FromInt(rest_index)));
294 __ push(Immediate(Smi::FromInt(language_mode())));
295
296 RestParamAccessStub stub(isolate());
297 __ CallStub(&stub);
298
299 SetVar(rest_param, eax, ebx, edx);
300 }
301
302 Variable* arguments = scope()->arguments();
303 if (arguments != NULL) {
304 // Function uses arguments object.
305 Comment cmnt(masm_, "[ Allocate arguments object");
306 if (function_in_register) {
307 __ push(edi);
308 } else {
309 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
310 }
311 // Receiver is just before the parameters on the caller's stack.
312 int num_parameters = info->scope()->num_parameters();
313 int offset = num_parameters * kPointerSize;
314 __ lea(edx,
315 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
316 __ push(edx);
317 __ push(Immediate(Smi::FromInt(num_parameters)));
318 // Arguments to ArgumentsAccessStub:
319 // function, receiver address, parameter count.
320 // The stub will rewrite receiver and parameter count if the previous
321 // stack frame was an arguments adapter frame.
322 ArgumentsAccessStub::Type type;
323 if (is_strict(language_mode()) || !is_simple_parameter_list()) {
324 type = ArgumentsAccessStub::NEW_STRICT;
325 } else if (function()->has_duplicate_parameters()) {
326 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
327 } else {
328 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
329 }
330
331 ArgumentsAccessStub stub(isolate(), type);
332 __ CallStub(&stub);
333
334 SetVar(arguments, eax, ebx, edx);
335 }
336
337 if (FLAG_trace) {
338 __ CallRuntime(Runtime::kTraceEnter, 0);
339 }
340
341 // Visit the declarations and body unless there is an illegal
342 // redeclaration.
343 if (scope()->HasIllegalRedeclaration()) {
344 Comment cmnt(masm_, "[ Declarations");
345 scope()->VisitIllegalRedeclaration(this);
346
347 } else {
348 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
349 { Comment cmnt(masm_, "[ Declarations");
350 VisitDeclarations(scope()->declarations());
351 }
352
353 { Comment cmnt(masm_, "[ Stack check");
354 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
355 Label ok;
356 ExternalReference stack_limit
357 = ExternalReference::address_of_stack_limit(isolate());
358 __ cmp(esp, Operand::StaticVariable(stack_limit));
359 __ j(above_equal, &ok, Label::kNear);
360 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
361 __ bind(&ok);
362 }
363
364 { Comment cmnt(masm_, "[ Body");
365 DCHECK(loop_depth() == 0);
366 VisitStatements(function()->body());
367 DCHECK(loop_depth() == 0);
368 }
369 }
370
371 // Always emit a 'return undefined' in case control fell off the end of
372 // the body.
373 { Comment cmnt(masm_, "[ return <undefined>;");
374 __ mov(eax, isolate()->factory()->undefined_value());
375 EmitReturnSequence();
376 }
377 }
378
379
380 void FullCodeGenerator::ClearAccumulator() {
381 __ Move(eax, Immediate(Smi::FromInt(0)));
382 }
383
384
385 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
386 __ mov(ebx, Immediate(profiling_counter_));
387 __ sub(FieldOperand(ebx, Cell::kValueOffset),
388 Immediate(Smi::FromInt(delta)));
389 }
390
391
392 void FullCodeGenerator::EmitProfilingCounterReset() {
393 int reset_value = FLAG_interrupt_budget;
394 __ mov(ebx, Immediate(profiling_counter_));
395 __ mov(FieldOperand(ebx, Cell::kValueOffset),
396 Immediate(Smi::FromInt(reset_value)));
397 }
398
399
400 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
401 Label* back_edge_target) {
402 Comment cmnt(masm_, "[ Back edge bookkeeping");
403 Label ok;
404
405 DCHECK(back_edge_target->is_bound());
406 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
407 int weight = Min(kMaxBackEdgeWeight,
408 Max(1, distance / kCodeSizeMultiplier));
409 EmitProfilingCounterDecrement(weight);
410 __ j(positive, &ok, Label::kNear);
411 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
412
413 // Record a mapping of this PC offset to the OSR id. This is used to find
414 // the AST id from the unoptimized code in order to use it as a key into
415 // the deoptimization input data found in the optimized code.
416 RecordBackEdge(stmt->OsrEntryId());
417
418 EmitProfilingCounterReset();
419
420 __ bind(&ok);
421 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
422 // Record a mapping of the OSR id to this PC. This is used if the OSR
423 // entry becomes the target of a bailout. We don't expect it to be, but
424 // we want it to work if it is.
425 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
426 }
427
428
429 void FullCodeGenerator::EmitReturnSequence() {
430 Comment cmnt(masm_, "[ Return sequence");
431 if (return_label_.is_bound()) {
432 __ jmp(&return_label_);
433 } else {
434 // Common return label
435 __ bind(&return_label_);
436 if (FLAG_trace) {
437 __ push(eax);
438 __ CallRuntime(Runtime::kTraceExit, 1);
439 }
440 // Pretend that the exit is a backwards jump to the entry.
441 int weight = 1;
442 if (info_->ShouldSelfOptimize()) {
443 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
444 } else {
445 int distance = masm_->pc_offset();
446 weight = Min(kMaxBackEdgeWeight,
447 Max(1, distance / kCodeSizeMultiplier));
448 }
449 EmitProfilingCounterDecrement(weight);
450 Label ok;
451 __ j(positive, &ok, Label::kNear);
452 __ push(eax);
453 __ call(isolate()->builtins()->InterruptCheck(),
454 RelocInfo::CODE_TARGET);
455 __ pop(eax);
456 EmitProfilingCounterReset();
457 __ bind(&ok);
458
459 SetReturnPosition(function());
460 int no_frame_start = masm_->pc_offset();
461 __ leave();
462
463 int arg_count = info_->scope()->num_parameters() + 1;
464 int arguments_bytes = arg_count * kPointerSize;
465 __ Ret(arguments_bytes, ecx);
466 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
467 }
468 }
469
470
471 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
472 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
473 MemOperand operand = codegen()->VarOperand(var, result_register());
474 // Memory operands can be pushed directly.
475 __ push(operand);
476 }
477
478
479 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
480 UNREACHABLE(); // Not used on IA32.
481 }
482
483
484 void FullCodeGenerator::AccumulatorValueContext::Plug(
485 Heap::RootListIndex index) const {
486 UNREACHABLE(); // Not used on IA32.
487 }
488
489
490 void FullCodeGenerator::StackValueContext::Plug(
491 Heap::RootListIndex index) const {
492 UNREACHABLE(); // Not used on IA32.
493 }
494
495
496 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
497 UNREACHABLE(); // Not used on IA32.
498 }
499
500
501 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
502 }
503
504
505 void FullCodeGenerator::AccumulatorValueContext::Plug(
506 Handle<Object> lit) const {
507 if (lit->IsSmi()) {
508 __ SafeMove(result_register(), Immediate(lit));
509 } else {
510 __ Move(result_register(), Immediate(lit));
511 }
512 }
513
514
515 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
516 if (lit->IsSmi()) {
517 __ SafePush(Immediate(lit));
518 } else {
519 __ push(Immediate(lit));
520 }
521 }
522
523
524 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
525 codegen()->PrepareForBailoutBeforeSplit(condition(),
526 true,
527 true_label_,
528 false_label_);
529 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
530 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
531 if (false_label_ != fall_through_) __ jmp(false_label_);
532 } else if (lit->IsTrue() || lit->IsJSObject()) {
533 if (true_label_ != fall_through_) __ jmp(true_label_);
534 } else if (lit->IsString()) {
535 if (String::cast(*lit)->length() == 0) {
536 if (false_label_ != fall_through_) __ jmp(false_label_);
537 } else {
538 if (true_label_ != fall_through_) __ jmp(true_label_);
539 }
540 } else if (lit->IsSmi()) {
541 if (Smi::cast(*lit)->value() == 0) {
542 if (false_label_ != fall_through_) __ jmp(false_label_);
543 } else {
544 if (true_label_ != fall_through_) __ jmp(true_label_);
545 }
546 } else {
547 // For simplicity we always test the accumulator register.
548 __ mov(result_register(), lit);
549 codegen()->DoTest(this);
550 }
551 }
552
553
554 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
555 Register reg) const {
556 DCHECK(count > 0);
557 __ Drop(count);
558 }
559
560
561 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
562 int count,
563 Register reg) const {
564 DCHECK(count > 0);
565 __ Drop(count);
566 __ Move(result_register(), reg);
567 }
568
569
570 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
571 Register reg) const {
572 DCHECK(count > 0);
573 if (count > 1) __ Drop(count - 1);
574 __ mov(Operand(esp, 0), reg);
575 }
576
577
578 void FullCodeGenerator::TestContext::DropAndPlug(int count,
579 Register reg) const {
580 DCHECK(count > 0);
581 // For simplicity we always test the accumulator register.
582 __ Drop(count);
583 __ Move(result_register(), reg);
584 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
585 codegen()->DoTest(this);
586 }
587
588
589 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
590 Label* materialize_false) const {
591 DCHECK(materialize_true == materialize_false);
592 __ bind(materialize_true);
593 }
594
595
596 void FullCodeGenerator::AccumulatorValueContext::Plug(
597 Label* materialize_true,
598 Label* materialize_false) const {
599 Label done;
600 __ bind(materialize_true);
601 __ mov(result_register(), isolate()->factory()->true_value());
602 __ jmp(&done, Label::kNear);
603 __ bind(materialize_false);
604 __ mov(result_register(), isolate()->factory()->false_value());
605 __ bind(&done);
606 }
607
608
609 void FullCodeGenerator::StackValueContext::Plug(
610 Label* materialize_true,
611 Label* materialize_false) const {
612 Label done;
613 __ bind(materialize_true);
614 __ push(Immediate(isolate()->factory()->true_value()));
615 __ jmp(&done, Label::kNear);
616 __ bind(materialize_false);
617 __ push(Immediate(isolate()->factory()->false_value()));
618 __ bind(&done);
619 }
620
621
622 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
623 Label* materialize_false) const {
624 DCHECK(materialize_true == true_label_);
625 DCHECK(materialize_false == false_label_);
626 }
627
628
629 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
630 Handle<Object> value = flag
631 ? isolate()->factory()->true_value()
632 : isolate()->factory()->false_value();
633 __ mov(result_register(), value);
634 }
635
636
637 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
638 Handle<Object> value = flag
639 ? isolate()->factory()->true_value()
640 : isolate()->factory()->false_value();
641 __ push(Immediate(value));
642 }
643
644
645 void FullCodeGenerator::TestContext::Plug(bool flag) const {
646 codegen()->PrepareForBailoutBeforeSplit(condition(),
647 true,
648 true_label_,
649 false_label_);
650 if (flag) {
651 if (true_label_ != fall_through_) __ jmp(true_label_);
652 } else {
653 if (false_label_ != fall_through_) __ jmp(false_label_);
654 }
655 }
656
657
658 void FullCodeGenerator::DoTest(Expression* condition,
659 Label* if_true,
660 Label* if_false,
661 Label* fall_through) {
662 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
663 CallIC(ic, condition->test_id());
664 __ test(result_register(), result_register());
665 // The stub returns nonzero for true.
666 Split(not_zero, if_true, if_false, fall_through);
667 }
668
669
670 void FullCodeGenerator::Split(Condition cc,
671 Label* if_true,
672 Label* if_false,
673 Label* fall_through) {
674 if (if_false == fall_through) {
675 __ j(cc, if_true);
676 } else if (if_true == fall_through) {
677 __ j(NegateCondition(cc), if_false);
678 } else {
679 __ j(cc, if_true);
680 __ jmp(if_false);
681 }
682 }
683
684
685 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
686 DCHECK(var->IsStackAllocated());
687 // Offset is negative because higher indexes are at lower addresses.
688 int offset = -var->index() * kPointerSize;
689 // Adjust by a (parameter or local) base offset.
690 if (var->IsParameter()) {
691 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
692 } else {
693 offset += JavaScriptFrameConstants::kLocal0Offset;
694 }
695 return Operand(ebp, offset);
696 }
697
698
699 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
700 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
701 if (var->IsContextSlot()) {
702 int context_chain_length = scope()->ContextChainLength(var->scope());
703 __ LoadContext(scratch, context_chain_length);
704 return ContextOperand(scratch, var->index());
705 } else {
706 return StackOperand(var);
707 }
708 }
709
710
711 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
712 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
713 MemOperand location = VarOperand(var, dest);
714 __ mov(dest, location);
715 }
716
717
718 void FullCodeGenerator::SetVar(Variable* var,
719 Register src,
720 Register scratch0,
721 Register scratch1) {
722 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
723 DCHECK(!scratch0.is(src));
724 DCHECK(!scratch0.is(scratch1));
725 DCHECK(!scratch1.is(src));
726 MemOperand location = VarOperand(var, scratch0);
727 __ mov(location, src);
728
729 // Emit the write barrier code if the location is in the heap.
730 if (var->IsContextSlot()) {
731 int offset = Context::SlotOffset(var->index());
732 DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
733 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
734 }
735 }
736
737
738 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
739 bool should_normalize,
740 Label* if_true,
741 Label* if_false) {
742 // Only prepare for bailouts before splits if we're in a test
743 // context. Otherwise, we let the Visit function deal with the
744 // preparation to avoid preparing with the same AST id twice.
745 if (!context()->IsTest() || !info_->IsOptimizable()) return;
746
747 Label skip;
748 if (should_normalize) __ jmp(&skip, Label::kNear);
749 PrepareForBailout(expr, TOS_REG);
750 if (should_normalize) {
751 __ cmp(eax, isolate()->factory()->true_value());
752 Split(equal, if_true, if_false, NULL);
753 __ bind(&skip);
754 }
755 }
756
757
758 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
759 // The variable in the declaration always resides in the current context.
760 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
761 if (generate_debug_code_) {
762 // Check that we're not inside a with or catch context.
763 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
764 __ cmp(ebx, isolate()->factory()->with_context_map());
765 __ Check(not_equal, kDeclarationInWithContext);
766 __ cmp(ebx, isolate()->factory()->catch_context_map());
767 __ Check(not_equal, kDeclarationInCatchContext);
768 }
769 }
770
771
772 void FullCodeGenerator::VisitVariableDeclaration(
773 VariableDeclaration* declaration) {
774 // If it was not possible to allocate the variable at compile time, we
775 // need to "declare" it at runtime to make sure it actually exists in the
776 // local context.
777 VariableProxy* proxy = declaration->proxy();
778 VariableMode mode = declaration->mode();
779 Variable* variable = proxy->var();
780 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
781 switch (variable->location()) {
782 case VariableLocation::GLOBAL:
783 case VariableLocation::UNALLOCATED:
784 globals_->Add(variable->name(), zone());
785 globals_->Add(variable->binding_needs_init()
786 ? isolate()->factory()->the_hole_value()
787 : isolate()->factory()->undefined_value(), zone());
788 break;
789
790 case VariableLocation::PARAMETER:
791 case VariableLocation::LOCAL:
792 if (hole_init) {
793 Comment cmnt(masm_, "[ VariableDeclaration");
794 __ mov(StackOperand(variable),
795 Immediate(isolate()->factory()->the_hole_value()));
796 }
797 break;
798
799 case VariableLocation::CONTEXT:
800 if (hole_init) {
801 Comment cmnt(masm_, "[ VariableDeclaration");
802 EmitDebugCheckDeclarationContext(variable);
803 __ mov(ContextOperand(esi, variable->index()),
804 Immediate(isolate()->factory()->the_hole_value()));
805 // No write barrier since the hole value is in old space.
806 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
807 }
808 break;
809
810 case VariableLocation::LOOKUP: {
811 Comment cmnt(masm_, "[ VariableDeclaration");
812 __ push(esi);
813 __ push(Immediate(variable->name()));
814 // VariableDeclaration nodes are always introduced in one of four modes.
815 DCHECK(IsDeclaredVariableMode(mode));
816 PropertyAttributes attr =
817 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
818 __ push(Immediate(Smi::FromInt(attr)));
819 // Push initial value, if any.
820 // Note: For variables we must not push an initial value (such as
821 // 'undefined') because we may have a (legal) redeclaration and we
822 // must not destroy the current value.
823 if (hole_init) {
824 __ push(Immediate(isolate()->factory()->the_hole_value()));
825 } else {
826 __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
827 }
828 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
829 break;
830 }
831 }
832 }
833
834
835 void FullCodeGenerator::VisitFunctionDeclaration(
836 FunctionDeclaration* declaration) {
837 VariableProxy* proxy = declaration->proxy();
838 Variable* variable = proxy->var();
839 switch (variable->location()) {
840 case VariableLocation::GLOBAL:
841 case VariableLocation::UNALLOCATED: {
842 globals_->Add(variable->name(), zone());
843 Handle<SharedFunctionInfo> function =
844 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
845 // Check for stack-overflow exception.
846 if (function.is_null()) return SetStackOverflow();
847 globals_->Add(function, zone());
848 break;
849 }
850
851 case VariableLocation::PARAMETER:
852 case VariableLocation::LOCAL: {
853 Comment cmnt(masm_, "[ FunctionDeclaration");
854 VisitForAccumulatorValue(declaration->fun());
855 __ mov(StackOperand(variable), result_register());
856 break;
857 }
858
859 case VariableLocation::CONTEXT: {
860 Comment cmnt(masm_, "[ FunctionDeclaration");
861 EmitDebugCheckDeclarationContext(variable);
862 VisitForAccumulatorValue(declaration->fun());
863 __ mov(ContextOperand(esi, variable->index()), result_register());
864 // We know that we have written a function, which is not a smi.
865 __ RecordWriteContextSlot(esi,
866 Context::SlotOffset(variable->index()),
867 result_register(),
868 ecx,
869 kDontSaveFPRegs,
870 EMIT_REMEMBERED_SET,
871 OMIT_SMI_CHECK);
872 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
873 break;
874 }
875
876 case VariableLocation::LOOKUP: {
877 Comment cmnt(masm_, "[ FunctionDeclaration");
878 __ push(esi);
879 __ push(Immediate(variable->name()));
880 __ push(Immediate(Smi::FromInt(NONE)));
881 VisitForStackValue(declaration->fun());
882 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
883 break;
884 }
885 }
886 }
887
888
889 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
890 // Call the runtime to declare the globals.
891 __ push(esi); // The context is the first argument.
892 __ Push(pairs);
893 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
894 __ CallRuntime(Runtime::kDeclareGlobals, 3);
895 // Return value is ignored.
896 }
897
898
899 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
900 // Call the runtime to declare the modules.
901 __ Push(descriptions);
902 __ CallRuntime(Runtime::kDeclareModules, 1);
903 // Return value is ignored.
904 }
905
906
907 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
908 Comment cmnt(masm_, "[ SwitchStatement");
909 Breakable nested_statement(this, stmt);
910 SetStatementPosition(stmt);
911
912 // Keep the switch value on the stack until a case matches.
913 VisitForStackValue(stmt->tag());
914 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
915
916 ZoneList<CaseClause*>* clauses = stmt->cases();
917 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
918
919 Label next_test; // Recycled for each test.
920 // Compile all the tests with branches to their bodies.
921 for (int i = 0; i < clauses->length(); i++) {
922 CaseClause* clause = clauses->at(i);
923 clause->body_target()->Unuse();
924
925 // The default is not a test, but remember it as final fall through.
926 if (clause->is_default()) {
927 default_clause = clause;
928 continue;
929 }
930
931 Comment cmnt(masm_, "[ Case comparison");
932 __ bind(&next_test);
933 next_test.Unuse();
934
935 // Compile the label expression.
936 VisitForAccumulatorValue(clause->label());
937
938 // Perform the comparison as if via '==='.
939 __ mov(edx, Operand(esp, 0)); // Switch value.
940 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
941 JumpPatchSite patch_site(masm_);
942 if (inline_smi_code) {
943 Label slow_case;
944 __ mov(ecx, edx);
945 __ or_(ecx, eax);
946 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
947
948 __ cmp(edx, eax);
949 __ j(not_equal, &next_test);
950 __ Drop(1); // Switch value is no longer needed.
951 __ jmp(clause->body_target());
952 __ bind(&slow_case);
953 }
954
955 SetExpressionPosition(clause);
956 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
957 strength(language_mode())).code();
958 CallIC(ic, clause->CompareId());
959 patch_site.EmitPatchInfo();
960
961 Label skip;
962 __ jmp(&skip, Label::kNear);
963 PrepareForBailout(clause, TOS_REG);
964 __ cmp(eax, isolate()->factory()->true_value());
965 __ j(not_equal, &next_test);
966 __ Drop(1);
967 __ jmp(clause->body_target());
968 __ bind(&skip);
969
970 __ test(eax, eax);
971 __ j(not_equal, &next_test);
972 __ Drop(1); // Switch value is no longer needed.
973 __ jmp(clause->body_target());
974 }
975
976 // Discard the test value and jump to the default if present, otherwise to
977 // the end of the statement.
978 __ bind(&next_test);
979 __ Drop(1); // Switch value is no longer needed.
980 if (default_clause == NULL) {
981 __ jmp(nested_statement.break_label());
982 } else {
983 __ jmp(default_clause->body_target());
984 }
985
986 // Compile all the case bodies.
987 for (int i = 0; i < clauses->length(); i++) {
988 Comment cmnt(masm_, "[ Case body");
989 CaseClause* clause = clauses->at(i);
990 __ bind(clause->body_target());
991 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
992 VisitStatements(clause->statements());
993 }
994
995 __ bind(nested_statement.break_label());
996 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
997 }
998
999
1000 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1001 Comment cmnt(masm_, "[ ForInStatement");
1002 SetStatementPosition(stmt, SKIP_BREAK);
1003
1004 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1005
1006 Label loop, exit;
1007 ForIn loop_statement(this, stmt);
1008 increment_loop_depth();
1009
1010 // Get the object to enumerate over. If the object is null or undefined, skip
1011 // over the loop. See ECMA-262 version 5, section 12.6.4.
1012 SetExpressionAsStatementPosition(stmt->enumerable());
1013 VisitForAccumulatorValue(stmt->enumerable());
1014 __ cmp(eax, isolate()->factory()->undefined_value());
1015 __ j(equal, &exit);
1016 __ cmp(eax, isolate()->factory()->null_value());
1017 __ j(equal, &exit);
1018
1019 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1020
1021 // Convert the object to a JS object.
1022 Label convert, done_convert;
1023 __ JumpIfSmi(eax, &convert, Label::kNear);
1024 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1025 __ j(above_equal, &done_convert, Label::kNear);
1026 __ bind(&convert);
1027 __ push(eax);
1028 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1029 __ bind(&done_convert);
1030 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1031 __ push(eax);
1032
1033 // Check for proxies.
1034 Label call_runtime, use_cache, fixed_array;
1035 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1036 __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
1037 __ j(below_equal, &call_runtime);
1038
1039 // Check cache validity in generated code. This is a fast case for
1040 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1041 // guarantee cache validity, call the runtime system to check cache
1042 // validity or get the property names in a fixed array.
1043 __ CheckEnumCache(&call_runtime);
1044
1045 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
1046 __ jmp(&use_cache, Label::kNear);
1047
1048 // Get the set of properties to enumerate.
1049 __ bind(&call_runtime);
1050 __ push(eax);
1051 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1052 PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1053 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
1054 isolate()->factory()->meta_map());
1055 __ j(not_equal, &fixed_array);
1056
1057
1058 // We got a map in register eax. Get the enumeration cache from it.
1059 Label no_descriptors;
1060 __ bind(&use_cache);
1061
1062 __ EnumLength(edx, eax);
1063 __ cmp(edx, Immediate(Smi::FromInt(0)));
1064 __ j(equal, &no_descriptors);
1065
1066 __ LoadInstanceDescriptors(eax, ecx);
1067 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
1068 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1069
1070 // Set up the four remaining stack slots.
1071 __ push(eax); // Map.
1072 __ push(ecx); // Enumeration cache.
1073 __ push(edx); // Number of valid entries for the map in the enum cache.
1074 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1075 __ jmp(&loop);
1076
1077 __ bind(&no_descriptors);
1078 __ add(esp, Immediate(kPointerSize));
1079 __ jmp(&exit);
1080
1081 // We got a fixed array in register eax. Iterate through that.
1082 Label non_proxy;
1083 __ bind(&fixed_array);
1084
1085 // No need for a write barrier, we are storing a Smi in the feedback vector.
1086 __ LoadHeapObject(ebx, FeedbackVector());
1087 int vector_index = FeedbackVector()->GetIndex(slot);
1088 __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(vector_index)),
1089 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1090
1091 __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
1092 __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
1093 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1094 __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
1095 __ j(above, &non_proxy);
1096 __ Move(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy
1097 __ bind(&non_proxy);
1098 __ push(ebx); // Smi
1099 __ push(eax); // Array
1100 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1101 __ push(eax); // Fixed array length (as smi).
1102 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1103
1104 // Generate code for doing the condition check.
1105 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1106 __ bind(&loop);
1107 SetExpressionAsStatementPosition(stmt->each());
1108
1109 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1110 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1111 __ j(above_equal, loop_statement.break_label());
1112
1113 // Get the current entry of the array into register ebx.
1114 __ mov(ebx, Operand(esp, 2 * kPointerSize));
1115 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1116
1117 // Get the expected map from the stack or a smi in the
1118 // permanent slow case into register edx.
1119 __ mov(edx, Operand(esp, 3 * kPointerSize));
1120
1121 // Check if the expected map still matches that of the enumerable.
1122 // If not, we may have to filter the key.
1123 Label update_each;
1124 __ mov(ecx, Operand(esp, 4 * kPointerSize));
1125 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1126 __ j(equal, &update_each, Label::kNear);
1127
1128 // For proxies, no filtering is done.
1129 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1130 DCHECK(Smi::FromInt(0) == 0);
1131 __ test(edx, edx);
1132 __ j(zero, &update_each);
1133
1134 // Convert the entry to a string or null if it isn't a property
1135 // anymore. If the property has been removed while iterating, we
1136 // just skip it.
1137 __ push(ecx); // Enumerable.
1138 __ push(ebx); // Current entry.
1139 __ CallRuntime(Runtime::kForInFilter, 2);
1140 PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1141 __ cmp(eax, isolate()->factory()->undefined_value());
1142 __ j(equal, loop_statement.continue_label());
1143 __ mov(ebx, eax);
1144
1145 // Update the 'each' property or variable from the possibly filtered
1146 // entry in register ebx.
1147 __ bind(&update_each);
1148 __ mov(result_register(), ebx);
1149 // Perform the assignment as if via '='.
1150 { EffectContext context(this);
1151 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1152 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1153 }
1154
1155 // Generate code for the body of the loop.
1156 Visit(stmt->body());
1157
1158 // Generate code for going to the next element by incrementing the
1159 // index (smi) stored on top of the stack.
1160 __ bind(loop_statement.continue_label());
1161 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1162
1163 EmitBackEdgeBookkeeping(stmt, &loop);
1164 __ jmp(&loop);
1165
1166 // Remove the pointers stored on the stack.
1167 __ bind(loop_statement.break_label());
1168 __ add(esp, Immediate(5 * kPointerSize));
1169
1170 // Exit and decrement the loop depth.
1171 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1172 __ bind(&exit);
1173 decrement_loop_depth();
1174 }
1175
1176
1177 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1178 bool pretenure) {
1179 // Use the fast case closure allocation code that allocates in new
1180 // space for nested functions that don't need literals cloning. If
1181 // we're running with the --always-opt or the --prepare-always-opt
1182 // flag, we need to use the runtime function so that the new function
1183 // we are creating here gets a chance to have its code optimized and
1184 // doesn't just get a copy of the existing unoptimized code.
1185 if (!FLAG_always_opt &&
1186 !FLAG_prepare_always_opt &&
1187 !pretenure &&
1188 scope()->is_function_scope() &&
1189 info->num_literals() == 0) {
1190 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1191 __ mov(ebx, Immediate(info));
1192 __ CallStub(&stub);
1193 } else {
1194 __ push(esi);
1195 __ push(Immediate(info));
1196 __ push(Immediate(pretenure
1197 ? isolate()->factory()->true_value()
1198 : isolate()->factory()->false_value()));
1199 __ CallRuntime(Runtime::kNewClosure, 3);
1200 }
1201 context()->Plug(eax);
1202 }
1203
1204
1205 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer,
1206 int offset,
1207 FeedbackVectorICSlot slot) {
1208 if (NeedsHomeObject(initializer)) {
1209 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1210 __ mov(StoreDescriptor::NameRegister(),
1211 Immediate(isolate()->factory()->home_object_symbol()));
1212 __ mov(StoreDescriptor::ValueRegister(),
1213 Operand(esp, offset * kPointerSize));
1214 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
1215 CallStoreIC();
1216 }
1217 }
1218
1219
1220 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1221 TypeofMode typeof_mode,
1222 Label* slow) {
1223 Register context = esi;
1224 Register temp = edx;
1225
1226 Scope* s = scope();
1227 while (s != NULL) {
1228 if (s->num_heap_slots() > 0) {
1229 if (s->calls_sloppy_eval()) {
1230 // Check that extension is NULL.
1231 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1232 Immediate(0));
1233 __ j(not_equal, slow);
1234 }
1235 // Load next context in chain.
1236 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1237 // Walk the rest of the chain without clobbering esi.
1238 context = temp;
1239 }
1240 // If no outer scope calls eval, we do not need to check more
1241 // context extensions. If we have reached an eval scope, we check
1242 // all extensions from this point.
1243 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1244 s = s->outer_scope();
1245 }
1246
1247 if (s != NULL && s->is_eval_scope()) {
1248 // Loop up the context chain. There is no frame effect so it is
1249 // safe to use raw labels here.
1250 Label next, fast;
1251 if (!context.is(temp)) {
1252 __ mov(temp, context);
1253 }
1254 __ bind(&next);
1255 // Terminate at native context.
1256 __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1257 Immediate(isolate()->factory()->native_context_map()));
1258 __ j(equal, &fast, Label::kNear);
1259 // Check that extension is NULL.
1260 __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1261 __ j(not_equal, slow);
1262 // Load next context in chain.
1263 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1264 __ jmp(&next);
1265 __ bind(&fast);
1266 }
1267
1268 // All extension objects were empty and it is safe to use a normal global
1269 // load machinery.
1270 EmitGlobalVariableLoad(proxy, typeof_mode);
1271 }
1272
1273
1274 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1275 Label* slow) {
1276 DCHECK(var->IsContextSlot());
1277 Register context = esi;
1278 Register temp = ebx;
1279
1280 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1281 if (s->num_heap_slots() > 0) {
1282 if (s->calls_sloppy_eval()) {
1283 // Check that extension is NULL.
1284 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1285 Immediate(0));
1286 __ j(not_equal, slow);
1287 }
1288 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1289 // Walk the rest of the chain without clobbering esi.
1290 context = temp;
1291 }
1292 }
1293 // Check that last extension is NULL.
1294 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1295 __ j(not_equal, slow);
1296
1297 // This function is used only for loads, not stores, so it's safe to
1298 // return an esi-based operand (the write barrier cannot be allowed to
1299 // destroy the esi register).
1300 return ContextOperand(context, var->index());
1301 }
1302
1303
1304 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1305 TypeofMode typeof_mode,
1306 Label* slow, Label* done) {
1307 // Generate fast-case code for variables that might be shadowed by
1308 // eval-introduced variables. Eval is used a lot without
1309 // introducing variables. In those cases, we do not want to
1310 // perform a runtime call for all variables in the scope
1311 // containing the eval.
1312 Variable* var = proxy->var();
1313 if (var->mode() == DYNAMIC_GLOBAL) {
1314 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1315 __ jmp(done);
1316 } else if (var->mode() == DYNAMIC_LOCAL) {
1317 Variable* local = var->local_if_not_shadowed();
1318 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1319 if (local->mode() == LET || local->mode() == CONST ||
1320 local->mode() == CONST_LEGACY) {
1321 __ cmp(eax, isolate()->factory()->the_hole_value());
1322 __ j(not_equal, done);
1323 if (local->mode() == CONST_LEGACY) {
1324 __ mov(eax, isolate()->factory()->undefined_value());
1325 } else { // LET || CONST
1326 __ push(Immediate(var->name()));
1327 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1328 }
1329 }
1330 __ jmp(done);
1331 }
1332 }
1333
1334
1335 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1336 TypeofMode typeof_mode) {
1337 Variable* var = proxy->var();
1338 DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1339 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1340 if (var->IsGlobalSlot()) {
1341 DCHECK(var->index() > 0);
1342 DCHECK(var->IsStaticGlobalObjectProperty());
1343 // Each var occupies two slots in the context: for reads and writes.
1344 int slot_index = var->index();
1345 int depth = scope()->ContextChainLength(var->scope());
1346 __ mov(LoadGlobalViaContextDescriptor::DepthRegister(),
1347 Immediate(Smi::FromInt(depth)));
1348 __ mov(LoadGlobalViaContextDescriptor::SlotRegister(),
1349 Immediate(Smi::FromInt(slot_index)));
1350 __ mov(LoadGlobalViaContextDescriptor::NameRegister(), var->name());
1351 LoadGlobalViaContextStub stub(isolate(), depth);
1352 __ CallStub(&stub);
1353
1354 } else {
1355 __ mov(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1356 __ mov(LoadDescriptor::NameRegister(), var->name());
1357 __ mov(LoadDescriptor::SlotRegister(),
1358 Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
1359 CallLoadIC(typeof_mode);
1360 }
1361 }
1362
1363
1364 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1365 TypeofMode typeof_mode) {
1366 SetExpressionPosition(proxy);
1367 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1368 Variable* var = proxy->var();
1369
1370 // Three cases: global variables, lookup variables, and all other types of
1371 // variables.
1372 switch (var->location()) {
1373 case VariableLocation::GLOBAL:
1374 case VariableLocation::UNALLOCATED: {
1375 Comment cmnt(masm_, "[ Global variable");
1376 EmitGlobalVariableLoad(proxy, typeof_mode);
1377 context()->Plug(eax);
1378 break;
1379 }
1380
1381 case VariableLocation::PARAMETER:
1382 case VariableLocation::LOCAL:
1383 case VariableLocation::CONTEXT: {
1384 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1385 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1386 : "[ Stack variable");
1387 if (var->binding_needs_init()) {
1388 // var->scope() may be NULL when the proxy is located in eval code and
1389 // refers to a potential outside binding. Currently those bindings are
1390 // always looked up dynamically, i.e. in that case
1391 // var->location() == LOOKUP.
1392 // always holds.
1393 DCHECK(var->scope() != NULL);
1394
1395 // Check if the binding really needs an initialization check. The check
1396 // can be skipped in the following situation: we have a LET or CONST
1397 // binding in harmony mode, both the Variable and the VariableProxy have
1398 // the same declaration scope (i.e. they are both in global code, in the
1399 // same function or in the same eval code) and the VariableProxy is in
1400 // the source physically located after the initializer of the variable.
1401 //
1402 // We cannot skip any initialization checks for CONST in non-harmony
1403 // mode because const variables may be declared but never initialized:
1404 // if (false) { const x; }; var y = x;
1405 //
1406 // The condition on the declaration scopes is a conservative check for
1407 // nested functions that access a binding and are called before the
1408 // binding is initialized:
1409 // function() { f(); let x = 1; function f() { x = 2; } }
1410 //
1411 bool skip_init_check;
1412 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1413 skip_init_check = false;
1414 } else if (var->is_this()) {
1415 CHECK(info_->function() != nullptr &&
1416 (info_->function()->kind() & kSubclassConstructor) != 0);
1417 // TODO(dslomov): implement 'this' hole check elimination.
1418 skip_init_check = false;
1419 } else {
1420 // Check that we always have valid source position.
1421 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1422 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1423 skip_init_check = var->mode() != CONST_LEGACY &&
1424 var->initializer_position() < proxy->position();
1425 }
1426
1427 if (!skip_init_check) {
1428 // Let and const need a read barrier.
1429 Label done;
1430 GetVar(eax, var);
1431 __ cmp(eax, isolate()->factory()->the_hole_value());
1432 __ j(not_equal, &done, Label::kNear);
1433 if (var->mode() == LET || var->mode() == CONST) {
1434 // Throw a reference error when using an uninitialized let/const
1435 // binding in harmony mode.
1436 __ push(Immediate(var->name()));
1437 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1438 } else {
1439 // Uninitalized const bindings outside of harmony mode are unholed.
1440 DCHECK(var->mode() == CONST_LEGACY);
1441 __ mov(eax, isolate()->factory()->undefined_value());
1442 }
1443 __ bind(&done);
1444 context()->Plug(eax);
1445 break;
1446 }
1447 }
1448 context()->Plug(var);
1449 break;
1450 }
1451
1452 case VariableLocation::LOOKUP: {
1453 Comment cmnt(masm_, "[ Lookup variable");
1454 Label done, slow;
1455 // Generate code for loading from variables potentially shadowed
1456 // by eval-introduced variables.
1457 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1458 __ bind(&slow);
1459 __ push(esi); // Context.
1460 __ push(Immediate(var->name()));
1461 Runtime::FunctionId function_id =
1462 typeof_mode == NOT_INSIDE_TYPEOF
1463 ? Runtime::kLoadLookupSlot
1464 : Runtime::kLoadLookupSlotNoReferenceError;
1465 __ CallRuntime(function_id, 2);
1466 __ bind(&done);
1467 context()->Plug(eax);
1468 break;
1469 }
1470 }
1471 }
1472
1473
1474 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1475 Comment cmnt(masm_, "[ RegExpLiteral");
1476 Label materialized;
1477 // Registers will be used as follows:
1478 // edi = JS function.
1479 // ecx = literals array.
1480 // ebx = regexp literal.
1481 // eax = regexp literal clone.
1482 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1483 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
1484 int literal_offset =
1485 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1486 __ mov(ebx, FieldOperand(ecx, literal_offset));
1487 __ cmp(ebx, isolate()->factory()->undefined_value());
1488 __ j(not_equal, &materialized, Label::kNear);
1489
1490 // Create regexp literal using runtime function
1491 // Result will be in eax.
1492 __ push(ecx);
1493 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1494 __ push(Immediate(expr->pattern()));
1495 __ push(Immediate(expr->flags()));
1496 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1497 __ mov(ebx, eax);
1498
1499 __ bind(&materialized);
1500 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1501 Label allocated, runtime_allocate;
1502 __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1503 __ jmp(&allocated);
1504
1505 __ bind(&runtime_allocate);
1506 __ push(ebx);
1507 __ push(Immediate(Smi::FromInt(size)));
1508 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1509 __ pop(ebx);
1510
1511 __ bind(&allocated);
1512 // Copy the content into the newly allocated memory.
1513 // (Unroll copy loop once for better throughput).
1514 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1515 __ mov(edx, FieldOperand(ebx, i));
1516 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1517 __ mov(FieldOperand(eax, i), edx);
1518 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1519 }
1520 if ((size % (2 * kPointerSize)) != 0) {
1521 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1522 __ mov(FieldOperand(eax, size - kPointerSize), edx);
1523 }
1524 context()->Plug(eax);
1525 }
1526
1527
1528 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1529 if (expression == NULL) {
1530 __ push(Immediate(isolate()->factory()->null_value()));
1531 } else {
1532 VisitForStackValue(expression);
1533 }
1534 }
1535
1536
1537 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1538 Comment cmnt(masm_, "[ ObjectLiteral");
1539
1540 Handle<FixedArray> constant_properties = expr->constant_properties();
1541 int flags = expr->ComputeFlags();
1542 // If any of the keys would store to the elements array, then we shouldn't
1543 // allow it.
1544 if (MustCreateObjectLiteralWithRuntime(expr)) {
1545 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1546 __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
1547 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1548 __ push(Immediate(constant_properties));
1549 __ push(Immediate(Smi::FromInt(flags)));
1550 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1551 } else {
1552 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1553 __ mov(eax, FieldOperand(edi, JSFunction::kLiteralsOffset));
1554 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1555 __ mov(ecx, Immediate(constant_properties));
1556 __ mov(edx, Immediate(Smi::FromInt(flags)));
1557 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1558 __ CallStub(&stub);
1559 }
1560 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1561
1562 // If result_saved is true the result is on top of the stack. If
1563 // result_saved is false the result is in eax.
1564 bool result_saved = false;
1565
1566 AccessorTable accessor_table(zone());
1567 int property_index = 0;
1568 // store_slot_index points to the vector IC slot for the next store IC used.
1569 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots
1570 // and must be updated if the number of store ICs emitted here changes.
1571 int store_slot_index = 0;
1572 for (; property_index < expr->properties()->length(); property_index++) {
1573 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1574 if (property->is_computed_name()) break;
1575 if (property->IsCompileTimeValue()) continue;
1576
1577 Literal* key = property->key()->AsLiteral();
1578 Expression* value = property->value();
1579 if (!result_saved) {
1580 __ push(eax); // Save result on the stack
1581 result_saved = true;
1582 }
1583 switch (property->kind()) {
1584 case ObjectLiteral::Property::CONSTANT:
1585 UNREACHABLE();
1586 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1587 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1588 // Fall through.
1589 case ObjectLiteral::Property::COMPUTED:
1590 // It is safe to use [[Put]] here because the boilerplate already
1591 // contains computed properties with an uninitialized value.
1592 if (key->value()->IsInternalizedString()) {
1593 if (property->emit_store()) {
1594 VisitForAccumulatorValue(value);
1595 DCHECK(StoreDescriptor::ValueRegister().is(eax));
1596 __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
1597 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1598 if (FLAG_vector_stores) {
1599 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1600 CallStoreIC();
1601 } else {
1602 CallStoreIC(key->LiteralFeedbackId());
1603 }
1604 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1605
1606 if (NeedsHomeObject(value)) {
1607 __ mov(StoreDescriptor::ReceiverRegister(), eax);
1608 __ mov(StoreDescriptor::NameRegister(),
1609 Immediate(isolate()->factory()->home_object_symbol()));
1610 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, 0));
1611 if (FLAG_vector_stores) {
1612 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++));
1613 }
1614 CallStoreIC();
1615 }
1616 } else {
1617 VisitForEffect(value);
1618 }
1619 break;
1620 }
1621 __ push(Operand(esp, 0)); // Duplicate receiver.
1622 VisitForStackValue(key);
1623 VisitForStackValue(value);
1624 if (property->emit_store()) {
1625 EmitSetHomeObjectIfNeeded(
1626 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1627 __ push(Immediate(Smi::FromInt(SLOPPY))); // Language mode
1628 __ CallRuntime(Runtime::kSetProperty, 4);
1629 } else {
1630 __ Drop(3);
1631 }
1632 break;
1633 case ObjectLiteral::Property::PROTOTYPE:
1634 __ push(Operand(esp, 0)); // Duplicate receiver.
1635 VisitForStackValue(value);
1636 DCHECK(property->emit_store());
1637 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1638 break;
1639 case ObjectLiteral::Property::GETTER:
1640 if (property->emit_store()) {
1641 accessor_table.lookup(key)->second->getter = value;
1642 }
1643 break;
1644 case ObjectLiteral::Property::SETTER:
1645 if (property->emit_store()) {
1646 accessor_table.lookup(key)->second->setter = value;
1647 }
1648 break;
1649 }
1650 }
1651
1652 // Emit code to define accessors, using only a single call to the runtime for
1653 // each pair of corresponding getters and setters.
1654 for (AccessorTable::Iterator it = accessor_table.begin();
1655 it != accessor_table.end();
1656 ++it) {
1657 __ push(Operand(esp, 0)); // Duplicate receiver.
1658 VisitForStackValue(it->first);
1659 EmitAccessor(it->second->getter);
1660 EmitSetHomeObjectIfNeeded(
1661 it->second->getter, 2,
1662 expr->SlotForHomeObject(it->second->getter, &store_slot_index));
1663
1664 EmitAccessor(it->second->setter);
1665 EmitSetHomeObjectIfNeeded(
1666 it->second->setter, 3,
1667 expr->SlotForHomeObject(it->second->setter, &store_slot_index));
1668
1669 __ push(Immediate(Smi::FromInt(NONE)));
1670 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1671 }
1672
1673 // Object literals have two parts. The "static" part on the left contains no
1674 // computed property names, and so we can compute its map ahead of time; see
1675 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1676 // starts with the first computed property name, and continues with all
1677 // properties to its right. All the code from above initializes the static
1678 // component of the object literal, and arranges for the map of the result to
1679 // reflect the static order in which the keys appear. For the dynamic
1680 // properties, we compile them into a series of "SetOwnProperty" runtime
1681 // calls. This will preserve insertion order.
1682 for (; property_index < expr->properties()->length(); property_index++) {
1683 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1684
1685 Expression* value = property->value();
1686 if (!result_saved) {
1687 __ push(eax); // Save result on the stack
1688 result_saved = true;
1689 }
1690
1691 __ push(Operand(esp, 0)); // Duplicate receiver.
1692
1693 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1694 DCHECK(!property->is_computed_name());
1695 VisitForStackValue(value);
1696 DCHECK(property->emit_store());
1697 __ CallRuntime(Runtime::kInternalSetPrototype, 2);
1698 } else {
1699 EmitPropertyKey(property, expr->GetIdForProperty(property_index));
1700 VisitForStackValue(value);
1701 EmitSetHomeObjectIfNeeded(
1702 value, 2, expr->SlotForHomeObject(value, &store_slot_index));
1703
1704 switch (property->kind()) {
1705 case ObjectLiteral::Property::CONSTANT:
1706 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1707 case ObjectLiteral::Property::COMPUTED:
1708 if (property->emit_store()) {
1709 __ push(Immediate(Smi::FromInt(NONE)));
1710 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4);
1711 } else {
1712 __ Drop(3);
1713 }
1714 break;
1715
1716 case ObjectLiteral::Property::PROTOTYPE:
1717 UNREACHABLE();
1718 break;
1719
1720 case ObjectLiteral::Property::GETTER:
1721 __ push(Immediate(Smi::FromInt(NONE)));
1722 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
1723 break;
1724
1725 case ObjectLiteral::Property::SETTER:
1726 __ push(Immediate(Smi::FromInt(NONE)));
1727 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
1728 break;
1729 }
1730 }
1731 }
1732
1733 if (expr->has_function()) {
1734 DCHECK(result_saved);
1735 __ push(Operand(esp, 0));
1736 __ CallRuntime(Runtime::kToFastProperties, 1);
1737 }
1738
1739 if (result_saved) {
1740 context()->PlugTOS();
1741 } else {
1742 context()->Plug(eax);
1743 }
1744
1745 // Verify that compilation exactly consumed the number of store ic slots that
1746 // the ObjectLiteral node had to offer.
1747 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count());
1748 }
1749
1750
1751 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1752 Comment cmnt(masm_, "[ ArrayLiteral");
1753
1754 expr->BuildConstantElements(isolate());
1755 Handle<FixedArray> constant_elements = expr->constant_elements();
1756 bool has_constant_fast_elements =
1757 IsFastObjectElementsKind(expr->constant_elements_kind());
1758
1759 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1760 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1761 // If the only customer of allocation sites is transitioning, then
1762 // we can turn it off if we don't have anywhere else to transition to.
1763 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1764 }
1765
1766 if (MustCreateArrayLiteralWithRuntime(expr)) {
1767 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1768 __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
1769 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1770 __ push(Immediate(constant_elements));
1771 __ push(Immediate(Smi::FromInt(expr->ComputeFlags())));
1772 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1773 } else {
1774 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1775 __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
1776 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1777 __ mov(ecx, Immediate(constant_elements));
1778 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1779 __ CallStub(&stub);
1780 }
1781 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1782
1783 bool result_saved = false; // Is the result saved to the stack?
1784 ZoneList<Expression*>* subexprs = expr->values();
1785 int length = subexprs->length();
1786
1787 // Emit code to evaluate all the non-constant subexpressions and to store
1788 // them into the newly cloned array.
1789 int array_index = 0;
1790 for (; array_index < length; array_index++) {
1791 Expression* subexpr = subexprs->at(array_index);
1792 if (subexpr->IsSpread()) break;
1793
1794 // If the subexpression is a literal or a simple materialized literal it
1795 // is already set in the cloned array.
1796 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1797
1798 if (!result_saved) {
1799 __ push(eax); // array literal.
1800 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1801 result_saved = true;
1802 }
1803 VisitForAccumulatorValue(subexpr);
1804
1805 if (has_constant_fast_elements) {
1806 // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1807 // cannot transition and don't need to call the runtime stub.
1808 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize);
1809 __ mov(ebx, Operand(esp, kPointerSize)); // Copy of array literal.
1810 __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
1811 // Store the subexpression value in the array's elements.
1812 __ mov(FieldOperand(ebx, offset), result_register());
1813 // Update the write barrier for the array store.
1814 __ RecordWriteField(ebx, offset, result_register(), ecx,
1815 kDontSaveFPRegs,
1816 EMIT_REMEMBERED_SET,
1817 INLINE_SMI_CHECK);
1818 } else {
1819 // Store the subexpression value in the array's elements.
1820 __ mov(ecx, Immediate(Smi::FromInt(array_index)));
1821 StoreArrayLiteralElementStub stub(isolate());
1822 __ CallStub(&stub);
1823 }
1824
1825 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1826 }
1827
1828 // In case the array literal contains spread expressions it has two parts. The
1829 // first part is the "static" array which has a literal index is handled
1830 // above. The second part is the part after the first spread expression
1831 // (inclusive) and these elements gets appended to the array. Note that the
1832 // number elements an iterable produces is unknown ahead of time.
1833 if (array_index < length && result_saved) {
1834 __ Drop(1); // literal index
1835 __ Pop(eax);
1836 result_saved = false;
1837 }
1838 for (; array_index < length; array_index++) {
1839 Expression* subexpr = subexprs->at(array_index);
1840
1841 __ Push(eax);
1842 if (subexpr->IsSpread()) {
1843 VisitForStackValue(subexpr->AsSpread()->expression());
1844 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION);
1845 } else {
1846 VisitForStackValue(subexpr);
1847 __ CallRuntime(Runtime::kAppendElement, 2);
1848 }
1849
1850 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1851 }
1852
1853 if (result_saved) {
1854 __ Drop(1); // literal index
1855 context()->PlugTOS();
1856 } else {
1857 context()->Plug(eax);
1858 }
1859 }
1860
1861
1862 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1863 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1864
1865 Comment cmnt(masm_, "[ Assignment");
1866 SetExpressionPosition(expr, INSERT_BREAK);
1867
1868 Property* property = expr->target()->AsProperty();
1869 LhsKind assign_type = Property::GetAssignType(property);
1870
1871 // Evaluate LHS expression.
1872 switch (assign_type) {
1873 case VARIABLE:
1874 // Nothing to do here.
1875 break;
1876 case NAMED_SUPER_PROPERTY:
1877 VisitForStackValue(
1878 property->obj()->AsSuperPropertyReference()->this_var());
1879 VisitForAccumulatorValue(
1880 property->obj()->AsSuperPropertyReference()->home_object());
1881 __ push(result_register());
1882 if (expr->is_compound()) {
1883 __ push(MemOperand(esp, kPointerSize));
1884 __ push(result_register());
1885 }
1886 break;
1887 case NAMED_PROPERTY:
1888 if (expr->is_compound()) {
1889 // We need the receiver both on the stack and in the register.
1890 VisitForStackValue(property->obj());
1891 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1892 } else {
1893 VisitForStackValue(property->obj());
1894 }
1895 break;
1896 case KEYED_SUPER_PROPERTY:
1897 VisitForStackValue(
1898 property->obj()->AsSuperPropertyReference()->this_var());
1899 VisitForStackValue(
1900 property->obj()->AsSuperPropertyReference()->home_object());
1901 VisitForAccumulatorValue(property->key());
1902 __ Push(result_register());
1903 if (expr->is_compound()) {
1904 __ push(MemOperand(esp, 2 * kPointerSize));
1905 __ push(MemOperand(esp, 2 * kPointerSize));
1906 __ push(result_register());
1907 }
1908 break;
1909 case KEYED_PROPERTY: {
1910 if (expr->is_compound()) {
1911 VisitForStackValue(property->obj());
1912 VisitForStackValue(property->key());
1913 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
1914 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1915 } else {
1916 VisitForStackValue(property->obj());
1917 VisitForStackValue(property->key());
1918 }
1919 break;
1920 }
1921 }
1922
1923 // For compound assignments we need another deoptimization point after the
1924 // variable/property load.
1925 if (expr->is_compound()) {
1926 AccumulatorValueContext result_context(this);
1927 { AccumulatorValueContext left_operand_context(this);
1928 switch (assign_type) {
1929 case VARIABLE:
1930 EmitVariableLoad(expr->target()->AsVariableProxy());
1931 PrepareForBailout(expr->target(), TOS_REG);
1932 break;
1933 case NAMED_SUPER_PROPERTY:
1934 EmitNamedSuperPropertyLoad(property);
1935 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1936 break;
1937 case NAMED_PROPERTY:
1938 EmitNamedPropertyLoad(property);
1939 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1940 break;
1941 case KEYED_SUPER_PROPERTY:
1942 EmitKeyedSuperPropertyLoad(property);
1943 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1944 break;
1945 case KEYED_PROPERTY:
1946 EmitKeyedPropertyLoad(property);
1947 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1948 break;
1949 }
1950 }
1951
1952 Token::Value op = expr->binary_op();
1953 __ push(eax); // Left operand goes on the stack.
1954 VisitForAccumulatorValue(expr->value());
1955
1956 if (ShouldInlineSmiCase(op)) {
1957 EmitInlineSmiBinaryOp(expr->binary_operation(),
1958 op,
1959 expr->target(),
1960 expr->value());
1961 } else {
1962 EmitBinaryOp(expr->binary_operation(), op);
1963 }
1964
1965 // Deoptimization point in case the binary operation may have side effects.
1966 PrepareForBailout(expr->binary_operation(), TOS_REG);
1967 } else {
1968 VisitForAccumulatorValue(expr->value());
1969 }
1970
1971 SetExpressionPosition(expr);
1972
1973 // Store the value.
1974 switch (assign_type) {
1975 case VARIABLE:
1976 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1977 expr->op(), expr->AssignmentSlot());
1978 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1979 context()->Plug(eax);
1980 break;
1981 case NAMED_PROPERTY:
1982 EmitNamedPropertyAssignment(expr);
1983 break;
1984 case NAMED_SUPER_PROPERTY:
1985 EmitNamedSuperPropertyStore(property);
1986 context()->Plug(result_register());
1987 break;
1988 case KEYED_SUPER_PROPERTY:
1989 EmitKeyedSuperPropertyStore(property);
1990 context()->Plug(result_register());
1991 break;
1992 case KEYED_PROPERTY:
1993 EmitKeyedPropertyAssignment(expr);
1994 break;
1995 }
1996 }
1997
1998
1999 void FullCodeGenerator::VisitYield(Yield* expr) {
2000 Comment cmnt(masm_, "[ Yield");
2001 SetExpressionPosition(expr);
2002
2003 // Evaluate yielded value first; the initial iterator definition depends on
2004 // this. It stays on the stack while we update the iterator.
2005 VisitForStackValue(expr->expression());
2006
2007 switch (expr->yield_kind()) {
2008 case Yield::kSuspend:
2009 // Pop value from top-of-stack slot; box result into result register.
2010 EmitCreateIteratorResult(false);
2011 __ push(result_register());
2012 // Fall through.
2013 case Yield::kInitial: {
2014 Label suspend, continuation, post_runtime, resume;
2015
2016 __ jmp(&suspend);
2017 __ bind(&continuation);
2018 __ RecordGeneratorContinuation();
2019 __ jmp(&resume);
2020
2021 __ bind(&suspend);
2022 VisitForAccumulatorValue(expr->generator_object());
2023 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2024 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
2025 Immediate(Smi::FromInt(continuation.pos())));
2026 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
2027 __ mov(ecx, esi);
2028 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
2029 kDontSaveFPRegs);
2030 __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
2031 __ cmp(esp, ebx);
2032 __ j(equal, &post_runtime);
2033 __ push(eax); // generator object
2034 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2035 __ mov(context_register(),
2036 Operand(ebp, StandardFrameConstants::kContextOffset));
2037 __ bind(&post_runtime);
2038 __ pop(result_register());
2039 EmitReturnSequence();
2040
2041 __ bind(&resume);
2042 context()->Plug(result_register());
2043 break;
2044 }
2045
2046 case Yield::kFinal: {
2047 VisitForAccumulatorValue(expr->generator_object());
2048 __ mov(FieldOperand(result_register(),
2049 JSGeneratorObject::kContinuationOffset),
2050 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2051 // Pop value from top-of-stack slot, box result into result register.
2052 EmitCreateIteratorResult(true);
2053 EmitUnwindBeforeReturn();
2054 EmitReturnSequence();
2055 break;
2056 }
2057
2058 case Yield::kDelegating: {
2059 VisitForStackValue(expr->generator_object());
2060
2061 // Initial stack layout is as follows:
2062 // [sp + 1 * kPointerSize] iter
2063 // [sp + 0 * kPointerSize] g
2064
2065 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2066 Label l_next, l_call, l_loop;
2067 Register load_receiver = LoadDescriptor::ReceiverRegister();
2068 Register load_name = LoadDescriptor::NameRegister();
2069
2070 // Initial send value is undefined.
2071 __ mov(eax, isolate()->factory()->undefined_value());
2072 __ jmp(&l_next);
2073
2074 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2075 __ bind(&l_catch);
2076 __ mov(load_name, isolate()->factory()->throw_string()); // "throw"
2077 __ push(load_name); // "throw"
2078 __ push(Operand(esp, 2 * kPointerSize)); // iter
2079 __ push(eax); // exception
2080 __ jmp(&l_call);
2081
2082 // try { received = %yield result }
2083 // Shuffle the received result above a try handler and yield it without
2084 // re-boxing.
2085 __ bind(&l_try);
2086 __ pop(eax); // result
2087 int handler_index = NewHandlerTableEntry();
2088 EnterTryBlock(handler_index, &l_catch);
2089 const int try_block_size = TryCatch::kElementCount * kPointerSize;
2090 __ push(eax); // result
2091
2092 __ jmp(&l_suspend);
2093 __ bind(&l_continuation);
2094 __ RecordGeneratorContinuation();
2095 __ jmp(&l_resume);
2096
2097 __ bind(&l_suspend);
2098 const int generator_object_depth = kPointerSize + try_block_size;
2099 __ mov(eax, Operand(esp, generator_object_depth));
2100 __ push(eax); // g
2101 __ push(Immediate(Smi::FromInt(handler_index))); // handler-index
2102 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2103 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
2104 Immediate(Smi::FromInt(l_continuation.pos())));
2105 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
2106 __ mov(ecx, esi);
2107 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
2108 kDontSaveFPRegs);
2109 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2110 __ mov(context_register(),
2111 Operand(ebp, StandardFrameConstants::kContextOffset));
2112 __ pop(eax); // result
2113 EmitReturnSequence();
2114 __ bind(&l_resume); // received in eax
2115 ExitTryBlock(handler_index);
2116
2117 // receiver = iter; f = iter.next; arg = received;
2118 __ bind(&l_next);
2119
2120 __ mov(load_name, isolate()->factory()->next_string());
2121 __ push(load_name); // "next"
2122 __ push(Operand(esp, 2 * kPointerSize)); // iter
2123 __ push(eax); // received
2124
2125 // result = receiver[f](arg);
2126 __ bind(&l_call);
2127 __ mov(load_receiver, Operand(esp, kPointerSize));
2128 __ mov(LoadDescriptor::SlotRegister(),
2129 Immediate(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2130 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2131 CallIC(ic, TypeFeedbackId::None());
2132 __ mov(edi, eax);
2133 __ mov(Operand(esp, 2 * kPointerSize), edi);
2134 SetCallPosition(expr, 1);
2135 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2136 __ CallStub(&stub);
2137
2138 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2139 __ Drop(1); // The function is still on the stack; drop it.
2140
2141 // if (!result.done) goto l_try;
2142 __ bind(&l_loop);
2143 __ push(eax); // save result
2144 __ Move(load_receiver, eax); // result
2145 __ mov(load_name,
2146 isolate()->factory()->done_string()); // "done"
2147 __ mov(LoadDescriptor::SlotRegister(),
2148 Immediate(SmiFromSlot(expr->DoneFeedbackSlot())));
2149 CallLoadIC(NOT_INSIDE_TYPEOF); // result.done in eax
2150 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2151 CallIC(bool_ic);
2152 __ test(eax, eax);
2153 __ j(zero, &l_try);
2154
2155 // result.value
2156 __ pop(load_receiver); // result
2157 __ mov(load_name,
2158 isolate()->factory()->value_string()); // "value"
2159 __ mov(LoadDescriptor::SlotRegister(),
2160 Immediate(SmiFromSlot(expr->ValueFeedbackSlot())));
2161 CallLoadIC(NOT_INSIDE_TYPEOF); // result.value in eax
2162 context()->DropAndPlug(2, eax); // drop iter and g
2163 break;
2164 }
2165 }
2166 }
2167
2168
2169 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2170 Expression *value,
2171 JSGeneratorObject::ResumeMode resume_mode) {
2172 // The value stays in eax, and is ultimately read by the resumed generator, as
2173 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2174 // is read to throw the value when the resumed generator is already closed.
2175 // ebx will hold the generator object until the activation has been resumed.
2176 VisitForStackValue(generator);
2177 VisitForAccumulatorValue(value);
2178 __ pop(ebx);
2179
2180 // Load suspended function and context.
2181 __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
2182 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
2183
2184 // Push receiver.
2185 __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
2186
2187 // Push holes for arguments to generator function.
2188 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2189 __ mov(edx,
2190 FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2191 __ mov(ecx, isolate()->factory()->the_hole_value());
2192 Label push_argument_holes, push_frame;
2193 __ bind(&push_argument_holes);
2194 __ sub(edx, Immediate(Smi::FromInt(1)));
2195 __ j(carry, &push_frame);
2196 __ push(ecx);
2197 __ jmp(&push_argument_holes);
2198
2199 // Enter a new JavaScript frame, and initialize its slots as they were when
2200 // the generator was suspended.
2201 Label resume_frame, done;
2202 __ bind(&push_frame);
2203 __ call(&resume_frame);
2204 __ jmp(&done);
2205 __ bind(&resume_frame);
2206 __ push(ebp); // Caller's frame pointer.
2207 __ mov(ebp, esp);
2208 __ push(esi); // Callee's context.
2209 __ push(edi); // Callee's JS Function.
2210
2211 // Load the operand stack size.
2212 __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
2213 __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset));
2214 __ SmiUntag(edx);
2215
2216 // If we are sending a value and there is no operand stack, we can jump back
2217 // in directly.
2218 if (resume_mode == JSGeneratorObject::NEXT) {
2219 Label slow_resume;
2220 __ cmp(edx, Immediate(0));
2221 __ j(not_zero, &slow_resume);
2222 __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2223 __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
2224 __ SmiUntag(ecx);
2225 __ add(edx, ecx);
2226 __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
2227 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2228 __ jmp(edx);
2229 __ bind(&slow_resume);
2230 }
2231
2232 // Otherwise, we push holes for the operand stack and call the runtime to fix
2233 // up the stack and the handlers.
2234 Label push_operand_holes, call_resume;
2235 __ bind(&push_operand_holes);
2236 __ sub(edx, Immediate(1));
2237 __ j(carry, &call_resume);
2238 __ push(ecx);
2239 __ jmp(&push_operand_holes);
2240 __ bind(&call_resume);
2241 __ push(ebx);
2242 __ push(result_register());
2243 __ Push(Smi::FromInt(resume_mode));
2244 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2245 // Not reached: the runtime call returns elsewhere.
2246 __ Abort(kGeneratorFailedToResume);
2247
2248 __ bind(&done);
2249 context()->Plug(result_register());
2250 }
2251
2252
2253 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2254 Label gc_required;
2255 Label allocated;
2256
2257 const int instance_size = 5 * kPointerSize;
2258 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2259 instance_size);
2260
2261 __ Allocate(instance_size, eax, ecx, edx, &gc_required, TAG_OBJECT);
2262 __ jmp(&allocated);
2263
2264 __ bind(&gc_required);
2265 __ Push(Smi::FromInt(instance_size));
2266 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2267 __ mov(context_register(),
2268 Operand(ebp, StandardFrameConstants::kContextOffset));
2269
2270 __ bind(&allocated);
2271 __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2272 __ mov(ebx, FieldOperand(ebx, GlobalObject::kNativeContextOffset));
2273 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
2274 __ pop(ecx);
2275 __ mov(edx, isolate()->factory()->ToBoolean(done));
2276 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
2277 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
2278 isolate()->factory()->empty_fixed_array());
2279 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
2280 isolate()->factory()->empty_fixed_array());
2281 __ mov(FieldOperand(eax, JSGeneratorObject::kResultValuePropertyOffset), ecx);
2282 __ mov(FieldOperand(eax, JSGeneratorObject::kResultDonePropertyOffset), edx);
2283
2284 // Only the value field needs a write barrier, as the other values are in the
2285 // root set.
2286 __ RecordWriteField(eax, JSGeneratorObject::kResultValuePropertyOffset,
2287 ecx, edx, kDontSaveFPRegs);
2288 }
2289
2290
2291 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2292 SetExpressionPosition(prop);
2293 Literal* key = prop->key()->AsLiteral();
2294 DCHECK(!key->value()->IsSmi());
2295 DCHECK(!prop->IsSuperAccess());
2296
2297 __ mov(LoadDescriptor::NameRegister(), Immediate(key->value()));
2298 __ mov(LoadDescriptor::SlotRegister(),
2299 Immediate(SmiFromSlot(prop->PropertyFeedbackSlot())));
2300 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2301 }
2302
2303
2304 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2305 // Stack: receiver, home_object.
2306 SetExpressionPosition(prop);
2307 Literal* key = prop->key()->AsLiteral();
2308 DCHECK(!key->value()->IsSmi());
2309 DCHECK(prop->IsSuperAccess());
2310
2311 __ push(Immediate(key->value()));
2312 __ push(Immediate(Smi::FromInt(language_mode())));
2313 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2314 }
2315
2316
2317 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2318 SetExpressionPosition(prop);
2319 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2320 __ mov(LoadDescriptor::SlotRegister(),
2321 Immediate(SmiFromSlot(prop->PropertyFeedbackSlot())));
2322 CallIC(ic);
2323 }
2324
2325
2326 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2327 // Stack: receiver, home_object, key.
2328 SetExpressionPosition(prop);
2329 __ push(Immediate(Smi::FromInt(language_mode())));
2330 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2331 }
2332
2333
2334 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2335 Token::Value op,
2336 Expression* left,
2337 Expression* right) {
2338 // Do combined smi check of the operands. Left operand is on the
2339 // stack. Right operand is in eax.
2340 Label smi_case, done, stub_call;
2341 __ pop(edx);
2342 __ mov(ecx, eax);
2343 __ or_(eax, edx);
2344 JumpPatchSite patch_site(masm_);
2345 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
2346
2347 __ bind(&stub_call);
2348 __ mov(eax, ecx);
2349 Handle<Code> code =
2350 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2351 CallIC(code, expr->BinaryOperationFeedbackId());
2352 patch_site.EmitPatchInfo();
2353 __ jmp(&done, Label::kNear);
2354
2355 // Smi case.
2356 __ bind(&smi_case);
2357 __ mov(eax, edx); // Copy left operand in case of a stub call.
2358
2359 switch (op) {
2360 case Token::SAR:
2361 __ SmiUntag(ecx);
2362 __ sar_cl(eax); // No checks of result necessary
2363 __ and_(eax, Immediate(~kSmiTagMask));
2364 break;
2365 case Token::SHL: {
2366 Label result_ok;
2367 __ SmiUntag(eax);
2368 __ SmiUntag(ecx);
2369 __ shl_cl(eax);
2370 // Check that the *signed* result fits in a smi.
2371 __ cmp(eax, 0xc0000000);
2372 __ j(positive, &result_ok);
2373 __ SmiTag(ecx);
2374 __ jmp(&stub_call);
2375 __ bind(&result_ok);
2376 __ SmiTag(eax);
2377 break;
2378 }
2379 case Token::SHR: {
2380 Label result_ok;
2381 __ SmiUntag(eax);
2382 __ SmiUntag(ecx);
2383 __ shr_cl(eax);
2384 __ test(eax, Immediate(0xc0000000));
2385 __ j(zero, &result_ok);
2386 __ SmiTag(ecx);
2387 __ jmp(&stub_call);
2388 __ bind(&result_ok);
2389 __ SmiTag(eax);
2390 break;
2391 }
2392 case Token::ADD:
2393 __ add(eax, ecx);
2394 __ j(overflow, &stub_call);
2395 break;
2396 case Token::SUB:
2397 __ sub(eax, ecx);
2398 __ j(overflow, &stub_call);
2399 break;
2400 case Token::MUL: {
2401 __ SmiUntag(eax);
2402 __ imul(eax, ecx);
2403 __ j(overflow, &stub_call);
2404 __ test(eax, eax);
2405 __ j(not_zero, &done, Label::kNear);
2406 __ mov(ebx, edx);
2407 __ or_(ebx, ecx);
2408 __ j(negative, &stub_call);
2409 break;
2410 }
2411 case Token::BIT_OR:
2412 __ or_(eax, ecx);
2413 break;
2414 case Token::BIT_AND:
2415 __ and_(eax, ecx);
2416 break;
2417 case Token::BIT_XOR:
2418 __ xor_(eax, ecx);
2419 break;
2420 default:
2421 UNREACHABLE();
2422 }
2423
2424 __ bind(&done);
2425 context()->Plug(eax);
2426 }
2427
2428
2429 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit,
2430 int* used_store_slots) {
2431 // Constructor is in eax.
2432 DCHECK(lit != NULL);
2433 __ push(eax);
2434
2435 // No access check is needed here since the constructor is created by the
2436 // class literal.
2437 Register scratch = ebx;
2438 __ mov(scratch, FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset));
2439 __ Push(scratch);
2440
2441 for (int i = 0; i < lit->properties()->length(); i++) {
2442 ObjectLiteral::Property* property = lit->properties()->at(i);
2443 Expression* value = property->value();
2444
2445 if (property->is_static()) {
2446 __ push(Operand(esp, kPointerSize)); // constructor
2447 } else {
2448 __ push(Operand(esp, 0)); // prototype
2449 }
2450 EmitPropertyKey(property, lit->GetIdForProperty(i));
2451
2452 // The static prototype property is read only. We handle the non computed
2453 // property name case in the parser. Since this is the only case where we
2454 // need to check for an own read only property we special case this so we do
2455 // not need to do this for every property.
2456 if (property->is_static() && property->is_computed_name()) {
2457 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1);
2458 __ push(eax);
2459 }
2460
2461 VisitForStackValue(value);
2462 EmitSetHomeObjectIfNeeded(value, 2,
2463 lit->SlotForHomeObject(value, used_store_slots));
2464
2465 switch (property->kind()) {
2466 case ObjectLiteral::Property::CONSTANT:
2467 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2468 case ObjectLiteral::Property::PROTOTYPE:
2469 UNREACHABLE();
2470 case ObjectLiteral::Property::COMPUTED:
2471 __ CallRuntime(Runtime::kDefineClassMethod, 3);
2472 break;
2473
2474 case ObjectLiteral::Property::GETTER:
2475 __ push(Immediate(Smi::FromInt(DONT_ENUM)));
2476 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4);
2477 break;
2478
2479 case ObjectLiteral::Property::SETTER:
2480 __ push(Immediate(Smi::FromInt(DONT_ENUM)));
2481 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4);
2482 break;
2483 }
2484 }
2485
2486 // prototype
2487 __ CallRuntime(Runtime::kToFastProperties, 1);
2488
2489 // constructor
2490 __ CallRuntime(Runtime::kToFastProperties, 1);
2491
2492 if (is_strong(language_mode())) {
2493 __ mov(scratch,
2494 FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset));
2495 __ push(eax);
2496 __ Push(scratch);
2497 // TODO(conradw): It would be more efficient to define the properties with
2498 // the right attributes the first time round.
2499 // Freeze the prototype.
2500 __ CallRuntime(Runtime::kObjectFreeze, 1);
2501 // Freeze the constructor.
2502 __ CallRuntime(Runtime::kObjectFreeze, 1);
2503 }
2504 }
2505
2506
2507 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2508 __ pop(edx);
2509 Handle<Code> code =
2510 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2511 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2512 CallIC(code, expr->BinaryOperationFeedbackId());
2513 patch_site.EmitPatchInfo();
2514 context()->Plug(eax);
2515 }
2516
2517
2518 void FullCodeGenerator::EmitAssignment(Expression* expr,
2519 FeedbackVectorICSlot slot) {
2520 DCHECK(expr->IsValidReferenceExpressionOrThis());
2521
2522 Property* prop = expr->AsProperty();
2523 LhsKind assign_type = Property::GetAssignType(prop);
2524
2525 switch (assign_type) {
2526 case VARIABLE: {
2527 Variable* var = expr->AsVariableProxy()->var();
2528 EffectContext context(this);
2529 EmitVariableAssignment(var, Token::ASSIGN, slot);
2530 break;
2531 }
2532 case NAMED_PROPERTY: {
2533 __ push(eax); // Preserve value.
2534 VisitForAccumulatorValue(prop->obj());
2535 __ Move(StoreDescriptor::ReceiverRegister(), eax);
2536 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2537 __ mov(StoreDescriptor::NameRegister(),
2538 prop->key()->AsLiteral()->value());
2539 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2540 CallStoreIC();
2541 break;
2542 }
2543 case NAMED_SUPER_PROPERTY: {
2544 __ push(eax);
2545 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2546 VisitForAccumulatorValue(
2547 prop->obj()->AsSuperPropertyReference()->home_object());
2548 // stack: value, this; eax: home_object
2549 Register scratch = ecx;
2550 Register scratch2 = edx;
2551 __ mov(scratch, result_register()); // home_object
2552 __ mov(eax, MemOperand(esp, kPointerSize)); // value
2553 __ mov(scratch2, MemOperand(esp, 0)); // this
2554 __ mov(MemOperand(esp, kPointerSize), scratch2); // this
2555 __ mov(MemOperand(esp, 0), scratch); // home_object
2556 // stack: this, home_object. eax: value
2557 EmitNamedSuperPropertyStore(prop);
2558 break;
2559 }
2560 case KEYED_SUPER_PROPERTY: {
2561 __ push(eax);
2562 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2563 VisitForStackValue(
2564 prop->obj()->AsSuperPropertyReference()->home_object());
2565 VisitForAccumulatorValue(prop->key());
2566 Register scratch = ecx;
2567 Register scratch2 = edx;
2568 __ mov(scratch2, MemOperand(esp, 2 * kPointerSize)); // value
2569 // stack: value, this, home_object; eax: key, edx: value
2570 __ mov(scratch, MemOperand(esp, kPointerSize)); // this
2571 __ mov(MemOperand(esp, 2 * kPointerSize), scratch);
2572 __ mov(scratch, MemOperand(esp, 0)); // home_object
2573 __ mov(MemOperand(esp, kPointerSize), scratch);
2574 __ mov(MemOperand(esp, 0), eax);
2575 __ mov(eax, scratch2);
2576 // stack: this, home_object, key; eax: value.
2577 EmitKeyedSuperPropertyStore(prop);
2578 break;
2579 }
2580 case KEYED_PROPERTY: {
2581 __ push(eax); // Preserve value.
2582 VisitForStackValue(prop->obj());
2583 VisitForAccumulatorValue(prop->key());
2584 __ Move(StoreDescriptor::NameRegister(), eax);
2585 __ pop(StoreDescriptor::ReceiverRegister()); // Receiver.
2586 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2587 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2588 Handle<Code> ic =
2589 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2590 CallIC(ic);
2591 break;
2592 }
2593 }
2594 context()->Plug(eax);
2595 }
2596
2597
2598 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2599 Variable* var, MemOperand location) {
2600 __ mov(location, eax);
2601 if (var->IsContextSlot()) {
2602 __ mov(edx, eax);
2603 int offset = Context::SlotOffset(var->index());
2604 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2605 }
2606 }
2607
2608
2609 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2610 FeedbackVectorICSlot slot) {
2611 if (var->IsUnallocated()) {
2612 // Global var, const, or let.
2613 __ mov(StoreDescriptor::NameRegister(), var->name());
2614 __ mov(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2615 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot);
2616 CallStoreIC();
2617
2618 } else if (var->IsGlobalSlot()) {
2619 // Global var, const, or let.
2620 DCHECK(var->index() > 0);
2621 DCHECK(var->IsStaticGlobalObjectProperty());
2622 // Each var occupies two slots in the context: for reads and writes.
2623 int slot_index = var->index() + 1;
2624 int depth = scope()->ContextChainLength(var->scope());
2625 __ mov(StoreGlobalViaContextDescriptor::DepthRegister(),
2626 Immediate(Smi::FromInt(depth)));
2627 __ mov(StoreGlobalViaContextDescriptor::SlotRegister(),
2628 Immediate(Smi::FromInt(slot_index)));
2629 __ mov(StoreGlobalViaContextDescriptor::NameRegister(), var->name());
2630 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(eax));
2631 StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
2632 __ CallStub(&stub);
2633
2634 } else if (var->mode() == LET && op != Token::INIT_LET) {
2635 // Non-initializing assignment to let variable needs a write barrier.
2636 DCHECK(!var->IsLookupSlot());
2637 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2638 Label assign;
2639 MemOperand location = VarOperand(var, ecx);
2640 __ mov(edx, location);
2641 __ cmp(edx, isolate()->factory()->the_hole_value());
2642 __ j(not_equal, &assign, Label::kNear);
2643 __ push(Immediate(var->name()));
2644 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2645 __ bind(&assign);
2646 EmitStoreToStackLocalOrContextSlot(var, location);
2647
2648 } else if (var->mode() == CONST && op != Token::INIT_CONST) {
2649 // Assignment to const variable needs a write barrier.
2650 DCHECK(!var->IsLookupSlot());
2651 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2652 Label const_error;
2653 MemOperand location = VarOperand(var, ecx);
2654 __ mov(edx, location);
2655 __ cmp(edx, isolate()->factory()->the_hole_value());
2656 __ j(not_equal, &const_error, Label::kNear);
2657 __ push(Immediate(var->name()));
2658 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2659 __ bind(&const_error);
2660 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2661
2662 } else if (var->is_this() && op == Token::INIT_CONST) {
2663 // Initializing assignment to const {this} needs a write barrier.
2664 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2665 Label uninitialized_this;
2666 MemOperand location = VarOperand(var, ecx);
2667 __ mov(edx, location);
2668 __ cmp(edx, isolate()->factory()->the_hole_value());
2669 __ j(equal, &uninitialized_this);
2670 __ push(Immediate(var->name()));
2671 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2672 __ bind(&uninitialized_this);
2673 EmitStoreToStackLocalOrContextSlot(var, location);
2674
2675 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2676 if (var->IsLookupSlot()) {
2677 // Assignment to var.
2678 __ push(eax); // Value.
2679 __ push(esi); // Context.
2680 __ push(Immediate(var->name()));
2681 __ push(Immediate(Smi::FromInt(language_mode())));
2682 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2683 } else {
2684 // Assignment to var or initializing assignment to let/const in harmony
2685 // mode.
2686 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2687 MemOperand location = VarOperand(var, ecx);
2688 if (generate_debug_code_ && op == Token::INIT_LET) {
2689 // Check for an uninitialized let binding.
2690 __ mov(edx, location);
2691 __ cmp(edx, isolate()->factory()->the_hole_value());
2692 __ Check(equal, kLetBindingReInitialization);
2693 }
2694 EmitStoreToStackLocalOrContextSlot(var, location);
2695 }
2696
2697 } else if (op == Token::INIT_CONST_LEGACY) {
2698 // Const initializers need a write barrier.
2699 DCHECK(var->mode() == CONST_LEGACY);
2700 DCHECK(!var->IsParameter()); // No const parameters.
2701 if (var->IsLookupSlot()) {
2702 __ push(eax);
2703 __ push(esi);
2704 __ push(Immediate(var->name()));
2705 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2706 } else {
2707 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2708 Label skip;
2709 MemOperand location = VarOperand(var, ecx);
2710 __ mov(edx, location);
2711 __ cmp(edx, isolate()->factory()->the_hole_value());
2712 __ j(not_equal, &skip, Label::kNear);
2713 EmitStoreToStackLocalOrContextSlot(var, location);
2714 __ bind(&skip);
2715 }
2716
2717 } else {
2718 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY);
2719 if (is_strict(language_mode())) {
2720 __ CallRuntime(Runtime::kThrowConstAssignError, 0);
2721 }
2722 // Silently ignore store in sloppy mode.
2723 }
2724 }
2725
2726
2727 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2728 // Assignment to a property, using a named store IC.
2729 // eax : value
2730 // esp[0] : receiver
2731 Property* prop = expr->target()->AsProperty();
2732 DCHECK(prop != NULL);
2733 DCHECK(prop->key()->IsLiteral());
2734
2735 __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
2736 __ pop(StoreDescriptor::ReceiverRegister());
2737 if (FLAG_vector_stores) {
2738 EmitLoadStoreICSlot(expr->AssignmentSlot());
2739 CallStoreIC();
2740 } else {
2741 CallStoreIC(expr->AssignmentFeedbackId());
2742 }
2743 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2744 context()->Plug(eax);
2745 }
2746
2747
2748 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2749 // Assignment to named property of super.
2750 // eax : value
2751 // stack : receiver ('this'), home_object
2752 DCHECK(prop != NULL);
2753 Literal* key = prop->key()->AsLiteral();
2754 DCHECK(key != NULL);
2755
2756 __ push(Immediate(key->value()));
2757 __ push(eax);
2758 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2759 : Runtime::kStoreToSuper_Sloppy),
2760 4);
2761 }
2762
2763
2764 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2765 // Assignment to named property of super.
2766 // eax : value
2767 // stack : receiver ('this'), home_object, key
2768
2769 __ push(eax);
2770 __ CallRuntime(
2771 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
2772 : Runtime::kStoreKeyedToSuper_Sloppy),
2773 4);
2774 }
2775
2776
2777 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2778 // Assignment to a property, using a keyed store IC.
2779 // eax : value
2780 // esp[0] : key
2781 // esp[kPointerSize] : receiver
2782
2783 __ pop(StoreDescriptor::NameRegister()); // Key.
2784 __ pop(StoreDescriptor::ReceiverRegister());
2785 DCHECK(StoreDescriptor::ValueRegister().is(eax));
2786 Handle<Code> ic =
2787 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2788 if (FLAG_vector_stores) {
2789 EmitLoadStoreICSlot(expr->AssignmentSlot());
2790 CallIC(ic);
2791 } else {
2792 CallIC(ic, expr->AssignmentFeedbackId());
2793 }
2794
2795 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2796 context()->Plug(eax);
2797 }
2798
2799
2800 void FullCodeGenerator::VisitProperty(Property* expr) {
2801 Comment cmnt(masm_, "[ Property");
2802 SetExpressionPosition(expr);
2803
2804 Expression* key = expr->key();
2805
2806 if (key->IsPropertyName()) {
2807 if (!expr->IsSuperAccess()) {
2808 VisitForAccumulatorValue(expr->obj());
2809 __ Move(LoadDescriptor::ReceiverRegister(), result_register());
2810 EmitNamedPropertyLoad(expr);
2811 } else {
2812 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2813 VisitForStackValue(
2814 expr->obj()->AsSuperPropertyReference()->home_object());
2815 EmitNamedSuperPropertyLoad(expr);
2816 }
2817 } else {
2818 if (!expr->IsSuperAccess()) {
2819 VisitForStackValue(expr->obj());
2820 VisitForAccumulatorValue(expr->key());
2821 __ pop(LoadDescriptor::ReceiverRegister()); // Object.
2822 __ Move(LoadDescriptor::NameRegister(), result_register()); // Key.
2823 EmitKeyedPropertyLoad(expr);
2824 } else {
2825 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2826 VisitForStackValue(
2827 expr->obj()->AsSuperPropertyReference()->home_object());
2828 VisitForStackValue(expr->key());
2829 EmitKeyedSuperPropertyLoad(expr);
2830 }
2831 }
2832 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2833 context()->Plug(eax);
2834 }
2835
2836
2837 void FullCodeGenerator::CallIC(Handle<Code> code,
2838 TypeFeedbackId ast_id) {
2839 ic_total_count_++;
2840 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2841 }
2842
2843
2844 // Code common for calls using the IC.
2845 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2846 Expression* callee = expr->expression();
2847
2848 CallICState::CallType call_type =
2849 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2850 // Get the target function.
2851 if (call_type == CallICState::FUNCTION) {
2852 { StackValueContext context(this);
2853 EmitVariableLoad(callee->AsVariableProxy());
2854 PrepareForBailout(callee, NO_REGISTERS);
2855 }
2856 // Push undefined as receiver. This is patched in the method prologue if it
2857 // is a sloppy mode method.
2858 __ push(Immediate(isolate()->factory()->undefined_value()));
2859 } else {
2860 // Load the function from the receiver.
2861 DCHECK(callee->IsProperty());
2862 DCHECK(!callee->AsProperty()->IsSuperAccess());
2863 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2864 EmitNamedPropertyLoad(callee->AsProperty());
2865 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2866 // Push the target function under the receiver.
2867 __ push(Operand(esp, 0));
2868 __ mov(Operand(esp, kPointerSize), eax);
2869 }
2870
2871 EmitCall(expr, call_type);
2872 }
2873
2874
2875 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2876 SetExpressionPosition(expr);
2877 Expression* callee = expr->expression();
2878 DCHECK(callee->IsProperty());
2879 Property* prop = callee->AsProperty();
2880 DCHECK(prop->IsSuperAccess());
2881
2882 Literal* key = prop->key()->AsLiteral();
2883 DCHECK(!key->value()->IsSmi());
2884 // Load the function from the receiver.
2885 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2886 VisitForStackValue(super_ref->home_object());
2887 VisitForAccumulatorValue(super_ref->this_var());
2888 __ push(eax);
2889 __ push(eax);
2890 __ push(Operand(esp, kPointerSize * 2));
2891 __ push(Immediate(key->value()));
2892 __ push(Immediate(Smi::FromInt(language_mode())));
2893 // Stack here:
2894 // - home_object
2895 // - this (receiver)
2896 // - this (receiver) <-- LoadFromSuper will pop here and below.
2897 // - home_object
2898 // - key
2899 // - language_mode
2900 __ CallRuntime(Runtime::kLoadFromSuper, 4);
2901
2902 // Replace home_object with target function.
2903 __ mov(Operand(esp, kPointerSize), eax);
2904
2905 // Stack here:
2906 // - target function
2907 // - this (receiver)
2908 EmitCall(expr, CallICState::METHOD);
2909 }
2910
2911
2912 // Code common for calls using the IC.
2913 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2914 Expression* key) {
2915 // Load the key.
2916 VisitForAccumulatorValue(key);
2917
2918 Expression* callee = expr->expression();
2919
2920 // Load the function from the receiver.
2921 DCHECK(callee->IsProperty());
2922 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2923 __ mov(LoadDescriptor::NameRegister(), eax);
2924 EmitKeyedPropertyLoad(callee->AsProperty());
2925 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2926
2927 // Push the target function under the receiver.
2928 __ push(Operand(esp, 0));
2929 __ mov(Operand(esp, kPointerSize), eax);
2930
2931 EmitCall(expr, CallICState::METHOD);
2932 }
2933
2934
2935 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2936 Expression* callee = expr->expression();
2937 DCHECK(callee->IsProperty());
2938 Property* prop = callee->AsProperty();
2939 DCHECK(prop->IsSuperAccess());
2940
2941 SetExpressionPosition(prop);
2942 // Load the function from the receiver.
2943 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2944 VisitForStackValue(super_ref->home_object());
2945 VisitForAccumulatorValue(super_ref->this_var());
2946 __ push(eax);
2947 __ push(eax);
2948 __ push(Operand(esp, kPointerSize * 2));
2949 VisitForStackValue(prop->key());
2950 __ push(Immediate(Smi::FromInt(language_mode())));
2951 // Stack here:
2952 // - home_object
2953 // - this (receiver)
2954 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2955 // - home_object
2956 // - key
2957 // - language_mode
2958 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4);
2959
2960 // Replace home_object with target function.
2961 __ mov(Operand(esp, kPointerSize), eax);
2962
2963 // Stack here:
2964 // - target function
2965 // - this (receiver)
2966 EmitCall(expr, CallICState::METHOD);
2967 }
2968
2969
2970 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2971 // Load the arguments.
2972 ZoneList<Expression*>* args = expr->arguments();
2973 int arg_count = args->length();
2974 for (int i = 0; i < arg_count; i++) {
2975 VisitForStackValue(args->at(i));
2976 }
2977
2978 SetCallPosition(expr, arg_count);
2979 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code();
2980 __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
2981 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2982 // Don't assign a type feedback id to the IC, since type feedback is provided
2983 // by the vector above.
2984 CallIC(ic);
2985
2986 RecordJSReturnSite(expr);
2987
2988 // Restore context register.
2989 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2990
2991 context()->DropAndPlug(1, eax);
2992 }
2993
2994
2995 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2996 // Push copy of the first argument or undefined if it doesn't exist.
2997 if (arg_count > 0) {
2998 __ push(Operand(esp, arg_count * kPointerSize));
2999 } else {
3000 __ push(Immediate(isolate()->factory()->undefined_value()));
3001 }
3002
3003 // Push the enclosing function.
3004 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3005
3006 // Push the language mode.
3007 __ push(Immediate(Smi::FromInt(language_mode())));
3008
3009 // Push the start position of the scope the calls resides in.
3010 __ push(Immediate(Smi::FromInt(scope()->start_position())));
3011
3012 // Do the runtime call.
3013 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
3014 }
3015
3016
3017 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
3018 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
3019 VariableProxy* callee = expr->expression()->AsVariableProxy();
3020 if (callee->var()->IsLookupSlot()) {
3021 Label slow, done;
3022 SetExpressionPosition(callee);
3023 // Generate code for loading from variables potentially shadowed by
3024 // eval-introduced variables.
3025 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
3026
3027 __ bind(&slow);
3028 // Call the runtime to find the function to call (returned in eax) and
3029 // the object holding it (returned in edx).
3030 __ push(context_register());
3031 __ push(Immediate(callee->name()));
3032 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
3033 __ push(eax); // Function.
3034 __ push(edx); // Receiver.
3035 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
3036
3037 // If fast case code has been generated, emit code to push the function
3038 // and receiver and have the slow path jump around this code.
3039 if (done.is_linked()) {
3040 Label call;
3041 __ jmp(&call, Label::kNear);
3042 __ bind(&done);
3043 // Push function.
3044 __ push(eax);
3045 // The receiver is implicitly the global receiver. Indicate this by
3046 // passing the hole to the call function stub.
3047 __ push(Immediate(isolate()->factory()->undefined_value()));
3048 __ bind(&call);
3049 }
3050 } else {
3051 VisitForStackValue(callee);
3052 // refEnv.WithBaseObject()
3053 __ push(Immediate(isolate()->factory()->undefined_value()));
3054 }
3055 }
3056
3057
3058 void FullCodeGenerator::VisitCall(Call* expr) {
3059 #ifdef DEBUG
3060 // We want to verify that RecordJSReturnSite gets called on all paths
3061 // through this function. Avoid early returns.
3062 expr->return_is_recorded_ = false;
3063 #endif
3064
3065 Comment cmnt(masm_, "[ Call");
3066 Expression* callee = expr->expression();
3067 Call::CallType call_type = expr->GetCallType(isolate());
3068
3069 if (call_type == Call::POSSIBLY_EVAL_CALL) {
3070 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
3071 // to resolve the function we need to call. Then we call the resolved
3072 // function using the given arguments.
3073 ZoneList<Expression*>* args = expr->arguments();
3074 int arg_count = args->length();
3075
3076 PushCalleeAndWithBaseObject(expr);
3077
3078 // Push the arguments.
3079 for (int i = 0; i < arg_count; i++) {
3080 VisitForStackValue(args->at(i));
3081 }
3082
3083 // Push a copy of the function (found below the arguments) and
3084 // resolve eval.
3085 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
3086 EmitResolvePossiblyDirectEval(arg_count);
3087
3088 // Touch up the stack with the resolved function.
3089 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
3090
3091 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
3092
3093 SetCallPosition(expr, arg_count);
3094 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
3095 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
3096 __ CallStub(&stub);
3097 RecordJSReturnSite(expr);
3098 // Restore context register.
3099 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3100 context()->DropAndPlug(1, eax);
3101
3102 } else if (call_type == Call::GLOBAL_CALL) {
3103 EmitCallWithLoadIC(expr);
3104 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
3105 // Call to a lookup slot (dynamically introduced variable).
3106 PushCalleeAndWithBaseObject(expr);
3107 EmitCall(expr);
3108 } else if (call_type == Call::PROPERTY_CALL) {
3109 Property* property = callee->AsProperty();
3110 bool is_named_call = property->key()->IsPropertyName();
3111 if (property->IsSuperAccess()) {
3112 if (is_named_call) {
3113 EmitSuperCallWithLoadIC(expr);
3114 } else {
3115 EmitKeyedSuperCallWithLoadIC(expr);
3116 }
3117 } else {
3118 VisitForStackValue(property->obj());
3119 if (is_named_call) {
3120 EmitCallWithLoadIC(expr);
3121 } else {
3122 EmitKeyedCallWithLoadIC(expr, property->key());
3123 }
3124 }
3125 } else if (call_type == Call::SUPER_CALL) {
3126 EmitSuperConstructorCall(expr);
3127 } else {
3128 DCHECK(call_type == Call::OTHER_CALL);
3129 // Call to an arbitrary expression not handled specially above.
3130 VisitForStackValue(callee);
3131 __ push(Immediate(isolate()->factory()->undefined_value()));
3132 // Emit function call.
3133 EmitCall(expr);
3134 }
3135
3136 #ifdef DEBUG
3137 // RecordJSReturnSite should have been called.
3138 DCHECK(expr->return_is_recorded_);
3139 #endif
3140 }
3141
3142
3143 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
3144 Comment cmnt(masm_, "[ CallNew");
3145 // According to ECMA-262, section 11.2.2, page 44, the function
3146 // expression in new calls must be evaluated before the
3147 // arguments.
3148
3149 // Push constructor on the stack. If it's not a function it's used as
3150 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3151 // ignored.
3152 DCHECK(!expr->expression()->IsSuperPropertyReference());
3153 VisitForStackValue(expr->expression());
3154
3155 // Push the arguments ("left-to-right") on the stack.
3156 ZoneList<Expression*>* args = expr->arguments();
3157 int arg_count = args->length();
3158 for (int i = 0; i < arg_count; i++) {
3159 VisitForStackValue(args->at(i));
3160 }
3161
3162 // Call the construct call builtin that handles allocation and
3163 // constructor invocation.
3164 SetConstructCallPosition(expr);
3165
3166 // Load function and argument count into edi and eax.
3167 __ Move(eax, Immediate(arg_count));
3168 __ mov(edi, Operand(esp, arg_count * kPointerSize));
3169
3170 // Record call targets in unoptimized code.
3171 if (FLAG_pretenuring_call_new) {
3172 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3173 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3174 expr->CallNewFeedbackSlot().ToInt() + 1);
3175 }
3176
3177 __ LoadHeapObject(ebx, FeedbackVector());
3178 __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
3179
3180 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
3181 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3182 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3183 context()->Plug(eax);
3184 }
3185
3186
3187 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3188 SuperCallReference* super_call_ref =
3189 expr->expression()->AsSuperCallReference();
3190 DCHECK_NOT_NULL(super_call_ref);
3191
3192 EmitLoadSuperConstructor(super_call_ref);
3193 __ push(result_register());
3194
3195 // Push the arguments ("left-to-right") on the stack.
3196 ZoneList<Expression*>* args = expr->arguments();
3197 int arg_count = args->length();
3198 for (int i = 0; i < arg_count; i++) {
3199 VisitForStackValue(args->at(i));
3200 }
3201
3202 // Call the construct call builtin that handles allocation and
3203 // constructor invocation.
3204 SetConstructCallPosition(expr);
3205
3206 // Load original constructor into ecx.
3207 VisitForAccumulatorValue(super_call_ref->new_target_var());
3208 __ mov(ecx, result_register());
3209
3210 // Load function and argument count into edi and eax.
3211 __ Move(eax, Immediate(arg_count));
3212 __ mov(edi, Operand(esp, arg_count * kPointerSize));
3213
3214 // Record call targets in unoptimized code.
3215 if (FLAG_pretenuring_call_new) {
3216 UNREACHABLE();
3217 /* TODO(dslomov): support pretenuring.
3218 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
3219 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() ==
3220 expr->CallNewFeedbackSlot().ToInt() + 1);
3221 */
3222 }
3223
3224 __ LoadHeapObject(ebx, FeedbackVector());
3225 __ mov(edx, Immediate(SmiFromSlot(expr->CallFeedbackSlot())));
3226
3227 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET);
3228 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
3229
3230 RecordJSReturnSite(expr);
3231
3232 context()->Plug(eax);
3233 }
3234
3235
3236 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3237 ZoneList<Expression*>* args = expr->arguments();
3238 DCHECK(args->length() == 1);
3239
3240 VisitForAccumulatorValue(args->at(0));
3241
3242 Label materialize_true, materialize_false;
3243 Label* if_true = NULL;
3244 Label* if_false = NULL;
3245 Label* fall_through = NULL;
3246 context()->PrepareTest(&materialize_true, &materialize_false,
3247 &if_true, &if_false, &fall_through);
3248
3249 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3250 __ test(eax, Immediate(kSmiTagMask));
3251 Split(zero, if_true, if_false, fall_through);
3252
3253 context()->Plug(if_true, if_false);
3254 }
3255
3256
3257 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
3258 ZoneList<Expression*>* args = expr->arguments();
3259 DCHECK(args->length() == 1);
3260
3261 VisitForAccumulatorValue(args->at(0));
3262
3263 Label materialize_true, materialize_false;
3264 Label* if_true = NULL;
3265 Label* if_false = NULL;
3266 Label* fall_through = NULL;
3267 context()->PrepareTest(&materialize_true, &materialize_false,
3268 &if_true, &if_false, &fall_through);
3269
3270 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3271 __ test(eax, Immediate(kSmiTagMask | 0x80000000));
3272 Split(zero, if_true, if_false, fall_through);
3273
3274 context()->Plug(if_true, if_false);
3275 }
3276
3277
3278 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3279 ZoneList<Expression*>* args = expr->arguments();
3280 DCHECK(args->length() == 1);
3281
3282 VisitForAccumulatorValue(args->at(0));
3283
3284 Label materialize_true, materialize_false;
3285 Label* if_true = NULL;
3286 Label* if_false = NULL;
3287 Label* fall_through = NULL;
3288 context()->PrepareTest(&materialize_true, &materialize_false,
3289 &if_true, &if_false, &fall_through);
3290
3291 __ JumpIfSmi(eax, if_false);
3292 __ cmp(eax, isolate()->factory()->null_value());
3293 __ j(equal, if_true);
3294 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3295 // Undetectable objects behave like undefined when tested with typeof.
3296 __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
3297 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
3298 __ j(not_zero, if_false);
3299 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
3300 __ cmp(ecx, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
3301 __ j(below, if_false);
3302 __ cmp(ecx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
3303 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3304 Split(below_equal, if_true, if_false, fall_through);
3305
3306 context()->Plug(if_true, if_false);
3307 }
3308
3309
3310 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3311 ZoneList<Expression*>* args = expr->arguments();
3312 DCHECK(args->length() == 1);
3313
3314 VisitForAccumulatorValue(args->at(0));
3315
3316 Label materialize_true, materialize_false;
3317 Label* if_true = NULL;
3318 Label* if_false = NULL;
3319 Label* fall_through = NULL;
3320 context()->PrepareTest(&materialize_true, &materialize_false,
3321 &if_true, &if_false, &fall_through);
3322
3323 __ JumpIfSmi(eax, if_false);
3324 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
3325 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3326 Split(above_equal, if_true, if_false, fall_through);
3327
3328 context()->Plug(if_true, if_false);
3329 }
3330
3331
3332 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3333 ZoneList<Expression*>* args = expr->arguments();
3334 DCHECK(args->length() == 1);
3335
3336 VisitForAccumulatorValue(args->at(0));
3337
3338 Label materialize_true, materialize_false;
3339 Label* if_true = NULL;
3340 Label* if_false = NULL;
3341 Label* fall_through = NULL;
3342 context()->PrepareTest(&materialize_true, &materialize_false,
3343 &if_true, &if_false, &fall_through);
3344
3345 __ JumpIfSmi(eax, if_false);
3346 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3347 __ movzx_b(ebx, FieldOperand(ebx, Map::kBitFieldOffset));
3348 __ test(ebx, Immediate(1 << Map::kIsUndetectable));
3349 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3350 Split(not_zero, if_true, if_false, fall_through);
3351
3352 context()->Plug(if_true, if_false);
3353 }
3354
3355
3356 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3357 CallRuntime* expr) {
3358 ZoneList<Expression*>* args = expr->arguments();
3359 DCHECK(args->length() == 1);
3360
3361 VisitForAccumulatorValue(args->at(0));
3362
3363 Label materialize_true, materialize_false, skip_lookup;
3364 Label* if_true = NULL;
3365 Label* if_false = NULL;
3366 Label* fall_through = NULL;
3367 context()->PrepareTest(&materialize_true, &materialize_false,
3368 &if_true, &if_false, &fall_through);
3369
3370 __ AssertNotSmi(eax);
3371
3372 // Check whether this map has already been checked to be safe for default
3373 // valueOf.
3374 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3375 __ test_b(FieldOperand(ebx, Map::kBitField2Offset),
3376 1 << Map::kStringWrapperSafeForDefaultValueOf);
3377 __ j(not_zero, &skip_lookup);
3378
3379 // Check for fast case object. Return false for slow case objects.
3380 __ mov(ecx, FieldOperand(eax, JSObject::kPropertiesOffset));
3381 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3382 __ cmp(ecx, isolate()->factory()->hash_table_map());
3383 __ j(equal, if_false);
3384
3385 // Look for valueOf string in the descriptor array, and indicate false if
3386 // found. Since we omit an enumeration index check, if it is added via a
3387 // transition that shares its descriptor array, this is a false positive.
3388 Label entry, loop, done;
3389
3390 // Skip loop if no descriptors are valid.
3391 __ NumberOfOwnDescriptors(ecx, ebx);
3392 __ cmp(ecx, 0);
3393 __ j(equal, &done);
3394
3395 __ LoadInstanceDescriptors(ebx, ebx);
3396 // ebx: descriptor array.
3397 // ecx: valid entries in the descriptor array.
3398 // Calculate the end of the descriptor array.
3399 STATIC_ASSERT(kSmiTag == 0);
3400 STATIC_ASSERT(kSmiTagSize == 1);
3401 STATIC_ASSERT(kPointerSize == 4);
3402 __ imul(ecx, ecx, DescriptorArray::kDescriptorSize);
3403 __ lea(ecx, Operand(ebx, ecx, times_4, DescriptorArray::kFirstOffset));
3404 // Calculate location of the first key name.
3405 __ add(ebx, Immediate(DescriptorArray::kFirstOffset));
3406 // Loop through all the keys in the descriptor array. If one of these is the
3407 // internalized string "valueOf" the result is false.
3408 __ jmp(&entry);
3409 __ bind(&loop);
3410 __ mov(edx, FieldOperand(ebx, 0));
3411 __ cmp(edx, isolate()->factory()->value_of_string());
3412 __ j(equal, if_false);
3413 __ add(ebx, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
3414 __ bind(&entry);
3415 __ cmp(ebx, ecx);
3416 __ j(not_equal, &loop);
3417
3418 __ bind(&done);
3419
3420 // Reload map as register ebx was used as temporary above.
3421 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3422
3423 // Set the bit in the map to indicate that there is no local valueOf field.
3424 __ or_(FieldOperand(ebx, Map::kBitField2Offset),
3425 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3426
3427 __ bind(&skip_lookup);
3428
3429 // If a valueOf property is not found on the object check that its
3430 // prototype is the un-modified String prototype. If not result is false.
3431 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3432 __ JumpIfSmi(ecx, if_false);
3433 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3434 __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
3435 __ mov(edx,
3436 FieldOperand(edx, GlobalObject::kNativeContextOffset));
3437 __ cmp(ecx,
3438 ContextOperand(edx,
3439 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3440 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3441 Split(equal, if_true, if_false, fall_through);
3442
3443 context()->Plug(if_true, if_false);
3444 }
3445
3446
3447 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3448 ZoneList<Expression*>* args = expr->arguments();
3449 DCHECK(args->length() == 1);
3450
3451 VisitForAccumulatorValue(args->at(0));
3452
3453 Label materialize_true, materialize_false;
3454 Label* if_true = NULL;
3455 Label* if_false = NULL;
3456 Label* fall_through = NULL;
3457 context()->PrepareTest(&materialize_true, &materialize_false,
3458 &if_true, &if_false, &fall_through);
3459
3460 __ JumpIfSmi(eax, if_false);
3461 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3462 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3463 Split(equal, if_true, if_false, fall_through);
3464
3465 context()->Plug(if_true, if_false);
3466 }
3467
3468
3469 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3470 ZoneList<Expression*>* args = expr->arguments();
3471 DCHECK(args->length() == 1);
3472
3473 VisitForAccumulatorValue(args->at(0));
3474
3475 Label materialize_true, materialize_false;
3476 Label* if_true = NULL;
3477 Label* if_false = NULL;
3478 Label* fall_through = NULL;
3479 context()->PrepareTest(&materialize_true, &materialize_false,
3480 &if_true, &if_false, &fall_through);
3481
3482 Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3483 __ CheckMap(eax, map, if_false, DO_SMI_CHECK);
3484 // Check if the exponent half is 0x80000000. Comparing against 1 and
3485 // checking for overflow is the shortest possible encoding.
3486 __ cmp(FieldOperand(eax, HeapNumber::kExponentOffset), Immediate(0x1));
3487 __ j(no_overflow, if_false);
3488 __ cmp(FieldOperand(eax, HeapNumber::kMantissaOffset), Immediate(0x0));
3489 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3490 Split(equal, if_true, if_false, fall_through);
3491
3492 context()->Plug(if_true, if_false);
3493 }
3494
3495
3496 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3497 ZoneList<Expression*>* args = expr->arguments();
3498 DCHECK(args->length() == 1);
3499
3500 VisitForAccumulatorValue(args->at(0));
3501
3502 Label materialize_true, materialize_false;
3503 Label* if_true = NULL;
3504 Label* if_false = NULL;
3505 Label* fall_through = NULL;
3506 context()->PrepareTest(&materialize_true, &materialize_false,
3507 &if_true, &if_false, &fall_through);
3508
3509 __ JumpIfSmi(eax, if_false);
3510 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3511 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3512 Split(equal, if_true, if_false, fall_through);
3513
3514 context()->Plug(if_true, if_false);
3515 }
3516
3517
3518 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3519 ZoneList<Expression*>* args = expr->arguments();
3520 DCHECK(args->length() == 1);
3521
3522 VisitForAccumulatorValue(args->at(0));
3523
3524 Label materialize_true, materialize_false;
3525 Label* if_true = NULL;
3526 Label* if_false = NULL;
3527 Label* fall_through = NULL;
3528 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3529 &if_false, &fall_through);
3530
3531 __ JumpIfSmi(eax, if_false);
3532 __ CmpObjectType(eax, JS_TYPED_ARRAY_TYPE, ebx);
3533 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3534 Split(equal, if_true, if_false, fall_through);
3535
3536 context()->Plug(if_true, if_false);
3537 }
3538
3539
3540 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3541 ZoneList<Expression*>* args = expr->arguments();
3542 DCHECK(args->length() == 1);
3543
3544 VisitForAccumulatorValue(args->at(0));
3545
3546 Label materialize_true, materialize_false;
3547 Label* if_true = NULL;
3548 Label* if_false = NULL;
3549 Label* fall_through = NULL;
3550 context()->PrepareTest(&materialize_true, &materialize_false,
3551 &if_true, &if_false, &fall_through);
3552
3553 __ JumpIfSmi(eax, if_false);
3554 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
3555 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3556 Split(equal, if_true, if_false, fall_through);
3557
3558 context()->Plug(if_true, if_false);
3559 }
3560
3561
3562 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3563 ZoneList<Expression*>* args = expr->arguments();
3564 DCHECK(args->length() == 1);
3565
3566 VisitForAccumulatorValue(args->at(0));
3567
3568 Label materialize_true, materialize_false;
3569 Label* if_true = NULL;
3570 Label* if_false = NULL;
3571 Label* fall_through = NULL;
3572 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3573 &if_false, &fall_through);
3574
3575 __ JumpIfSmi(eax, if_false);
3576 Register map = ebx;
3577 __ mov(map, FieldOperand(eax, HeapObject::kMapOffset));
3578 __ CmpInstanceType(map, FIRST_JS_PROXY_TYPE);
3579 __ j(less, if_false);
3580 __ CmpInstanceType(map, LAST_JS_PROXY_TYPE);
3581 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3582 Split(less_equal, if_true, if_false, fall_through);
3583
3584 context()->Plug(if_true, if_false);
3585 }
3586
3587
3588 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3589 DCHECK(expr->arguments()->length() == 0);
3590
3591 Label materialize_true, materialize_false;
3592 Label* if_true = NULL;
3593 Label* if_false = NULL;
3594 Label* fall_through = NULL;
3595 context()->PrepareTest(&materialize_true, &materialize_false,
3596 &if_true, &if_false, &fall_through);
3597
3598 // Get the frame pointer for the calling frame.
3599 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3600
3601 // Skip the arguments adaptor frame if it exists.
3602 Label check_frame_marker;
3603 __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
3604 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3605 __ j(not_equal, &check_frame_marker);
3606 __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
3607
3608 // Check the marker in the calling frame.
3609 __ bind(&check_frame_marker);
3610 __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
3611 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
3612 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3613 Split(equal, if_true, if_false, fall_through);
3614
3615 context()->Plug(if_true, if_false);
3616 }
3617
3618
3619 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3620 ZoneList<Expression*>* args = expr->arguments();
3621 DCHECK(args->length() == 2);
3622
3623 // Load the two objects into registers and perform the comparison.
3624 VisitForStackValue(args->at(0));
3625 VisitForAccumulatorValue(args->at(1));
3626
3627 Label materialize_true, materialize_false;
3628 Label* if_true = NULL;
3629 Label* if_false = NULL;
3630 Label* fall_through = NULL;
3631 context()->PrepareTest(&materialize_true, &materialize_false,
3632 &if_true, &if_false, &fall_through);
3633
3634 __ pop(ebx);
3635 __ cmp(eax, ebx);
3636 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3637 Split(equal, if_true, if_false, fall_through);
3638
3639 context()->Plug(if_true, if_false);
3640 }
3641
3642
3643 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3644 ZoneList<Expression*>* args = expr->arguments();
3645 DCHECK(args->length() == 1);
3646
3647 // ArgumentsAccessStub expects the key in edx and the formal
3648 // parameter count in eax.
3649 VisitForAccumulatorValue(args->at(0));
3650 __ mov(edx, eax);
3651 __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3652 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3653 __ CallStub(&stub);
3654 context()->Plug(eax);
3655 }
3656
3657
3658 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3659 DCHECK(expr->arguments()->length() == 0);
3660
3661 Label exit;
3662 // Get the number of formal parameters.
3663 __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3664
3665 // Check if the calling frame is an arguments adaptor frame.
3666 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3667 __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset),
3668 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3669 __ j(not_equal, &exit);
3670
3671 // Arguments adaptor case: Read the arguments length from the
3672 // adaptor frame.
3673 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3674
3675 __ bind(&exit);
3676 __ AssertSmi(eax);
3677 context()->Plug(eax);
3678 }
3679
3680
3681 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3682 ZoneList<Expression*>* args = expr->arguments();
3683 DCHECK(args->length() == 1);
3684 Label done, null, function, non_function_constructor;
3685
3686 VisitForAccumulatorValue(args->at(0));
3687
3688 // If the object is a smi, we return null.
3689 __ JumpIfSmi(eax, &null);
3690
3691 // Check that the object is a JS object but take special care of JS
3692 // functions to make sure they have 'Function' as their class.
3693 // Assume that there are only two callable types, and one of them is at
3694 // either end of the type range for JS object types. Saves extra comparisons.
3695 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3696 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
3697 // Map is now in eax.
3698 __ j(below, &null);
3699 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3700 FIRST_SPEC_OBJECT_TYPE + 1);
3701 __ j(equal, &function);
3702
3703 __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
3704 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3705 LAST_SPEC_OBJECT_TYPE - 1);
3706 __ j(equal, &function);
3707 // Assume that there is no larger type.
3708 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3709
3710 // Check if the constructor in the map is a JS function.
3711 __ GetMapConstructor(eax, eax, ebx);
3712 __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
3713 __ j(not_equal, &non_function_constructor);
3714
3715 // eax now contains the constructor function. Grab the
3716 // instance class name from there.
3717 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
3718 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
3719 __ jmp(&done);
3720
3721 // Functions have class 'Function'.
3722 __ bind(&function);
3723 __ mov(eax, isolate()->factory()->Function_string());
3724 __ jmp(&done);
3725
3726 // Objects with a non-function constructor have class 'Object'.
3727 __ bind(&non_function_constructor);
3728 __ mov(eax, isolate()->factory()->Object_string());
3729 __ jmp(&done);
3730
3731 // Non-JS objects have class null.
3732 __ bind(&null);
3733 __ mov(eax, isolate()->factory()->null_value());
3734
3735 // All done.
3736 __ bind(&done);
3737
3738 context()->Plug(eax);
3739 }
3740
3741
3742 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3743 ZoneList<Expression*>* args = expr->arguments();
3744 DCHECK(args->length() == 1);
3745
3746 VisitForAccumulatorValue(args->at(0)); // Load the object.
3747
3748 Label done;
3749 // If the object is a smi return the object.
3750 __ JumpIfSmi(eax, &done, Label::kNear);
3751 // If the object is not a value type, return the object.
3752 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
3753 __ j(not_equal, &done, Label::kNear);
3754 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
3755
3756 __ bind(&done);
3757 context()->Plug(eax);
3758 }
3759
3760
3761 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3762 ZoneList<Expression*>* args = expr->arguments();
3763 DCHECK_EQ(1, args->length());
3764
3765 VisitForAccumulatorValue(args->at(0));
3766
3767 Label materialize_true, materialize_false;
3768 Label* if_true = nullptr;
3769 Label* if_false = nullptr;
3770 Label* fall_through = nullptr;
3771 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3772 &if_false, &fall_through);
3773
3774 __ JumpIfSmi(eax, if_false);
3775 __ CmpObjectType(eax, JS_DATE_TYPE, ebx);
3776 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3777 Split(equal, if_true, if_false, fall_through);
3778
3779 context()->Plug(if_true, if_false);
3780 }
3781
3782
3783 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3784 ZoneList<Expression*>* args = expr->arguments();
3785 DCHECK(args->length() == 2);
3786 DCHECK_NOT_NULL(args->at(1)->AsLiteral());
3787 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3788
3789 VisitForAccumulatorValue(args->at(0)); // Load the object.
3790
3791 Register object = eax;
3792 Register result = eax;
3793 Register scratch = ecx;
3794
3795 if (index->value() == 0) {
3796 __ mov(result, FieldOperand(object, JSDate::kValueOffset));
3797 } else {
3798 Label runtime, done;
3799 if (index->value() < JSDate::kFirstUncachedField) {
3800 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3801 __ mov(scratch, Operand::StaticVariable(stamp));
3802 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3803 __ j(not_equal, &runtime, Label::kNear);
3804 __ mov(result, FieldOperand(object, JSDate::kValueOffset +
3805 kPointerSize * index->value()));
3806 __ jmp(&done, Label::kNear);
3807 }
3808 __ bind(&runtime);
3809 __ PrepareCallCFunction(2, scratch);
3810 __ mov(Operand(esp, 0), object);
3811 __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
3812 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3813 __ bind(&done);
3814 }
3815
3816 context()->Plug(result);
3817 }
3818
3819
3820 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3821 ZoneList<Expression*>* args = expr->arguments();
3822 DCHECK_EQ(3, args->length());
3823
3824 Register string = eax;
3825 Register index = ebx;
3826 Register value = ecx;
3827
3828 VisitForStackValue(args->at(0)); // index
3829 VisitForStackValue(args->at(1)); // value
3830 VisitForAccumulatorValue(args->at(2)); // string
3831
3832 __ pop(value);
3833 __ pop(index);
3834
3835 if (FLAG_debug_code) {
3836 __ test(value, Immediate(kSmiTagMask));
3837 __ Check(zero, kNonSmiValue);
3838 __ test(index, Immediate(kSmiTagMask));
3839 __ Check(zero, kNonSmiValue);
3840 }
3841
3842 __ SmiUntag(value);
3843 __ SmiUntag(index);
3844
3845 if (FLAG_debug_code) {
3846 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3847 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3848 }
3849
3850 __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3851 value);
3852 context()->Plug(string);
3853 }
3854
3855
3856 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3857 ZoneList<Expression*>* args = expr->arguments();
3858 DCHECK_EQ(3, args->length());
3859
3860 Register string = eax;
3861 Register index = ebx;
3862 Register value = ecx;
3863
3864 VisitForStackValue(args->at(0)); // index
3865 VisitForStackValue(args->at(1)); // value
3866 VisitForAccumulatorValue(args->at(2)); // string
3867 __ pop(value);
3868 __ pop(index);
3869
3870 if (FLAG_debug_code) {
3871 __ test(value, Immediate(kSmiTagMask));
3872 __ Check(zero, kNonSmiValue);
3873 __ test(index, Immediate(kSmiTagMask));
3874 __ Check(zero, kNonSmiValue);
3875 __ SmiUntag(index);
3876 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3877 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3878 __ SmiTag(index);
3879 }
3880
3881 __ SmiUntag(value);
3882 // No need to untag a smi for two-byte addressing.
3883 __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
3884 value);
3885 context()->Plug(string);
3886 }
3887
3888
3889 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3890 ZoneList<Expression*>* args = expr->arguments();
3891 DCHECK(args->length() == 2);
3892
3893 VisitForStackValue(args->at(0)); // Load the object.
3894 VisitForAccumulatorValue(args->at(1)); // Load the value.
3895 __ pop(ebx); // eax = value. ebx = object.
3896
3897 Label done;
3898 // If the object is a smi, return the value.
3899 __ JumpIfSmi(ebx, &done, Label::kNear);
3900
3901 // If the object is not a value type, return the value.
3902 __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
3903 __ j(not_equal, &done, Label::kNear);
3904
3905 // Store the value.
3906 __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
3907
3908 // Update the write barrier. Save the value as it will be
3909 // overwritten by the write barrier code and is needed afterward.
3910 __ mov(edx, eax);
3911 __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
3912
3913 __ bind(&done);
3914 context()->Plug(eax);
3915 }
3916
3917
3918 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3919 ZoneList<Expression*>* args = expr->arguments();
3920 DCHECK_EQ(args->length(), 1);
3921
3922 // Load the argument into eax and call the stub.
3923 VisitForAccumulatorValue(args->at(0));
3924
3925 NumberToStringStub stub(isolate());
3926 __ CallStub(&stub);
3927 context()->Plug(eax);
3928 }
3929
3930
3931 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3932 ZoneList<Expression*>* args = expr->arguments();
3933 DCHECK(args->length() == 1);
3934
3935 VisitForAccumulatorValue(args->at(0));
3936
3937 Label done;
3938 StringCharFromCodeGenerator generator(eax, ebx);
3939 generator.GenerateFast(masm_);
3940 __ jmp(&done);
3941
3942 NopRuntimeCallHelper call_helper;
3943 generator.GenerateSlow(masm_, call_helper);
3944
3945 __ bind(&done);
3946 context()->Plug(ebx);
3947 }
3948
3949
3950 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3951 ZoneList<Expression*>* args = expr->arguments();
3952 DCHECK(args->length() == 2);
3953
3954 VisitForStackValue(args->at(0));
3955 VisitForAccumulatorValue(args->at(1));
3956
3957 Register object = ebx;
3958 Register index = eax;
3959 Register result = edx;
3960
3961 __ pop(object);
3962
3963 Label need_conversion;
3964 Label index_out_of_range;
3965 Label done;
3966 StringCharCodeAtGenerator generator(object,
3967 index,
3968 result,
3969 &need_conversion,
3970 &need_conversion,
3971 &index_out_of_range,
3972 STRING_INDEX_IS_NUMBER);
3973 generator.GenerateFast(masm_);
3974 __ jmp(&done);
3975
3976 __ bind(&index_out_of_range);
3977 // When the index is out of range, the spec requires us to return
3978 // NaN.
3979 __ Move(result, Immediate(isolate()->factory()->nan_value()));
3980 __ jmp(&done);
3981
3982 __ bind(&need_conversion);
3983 // Move the undefined value into the result register, which will
3984 // trigger conversion.
3985 __ Move(result, Immediate(isolate()->factory()->undefined_value()));
3986 __ jmp(&done);
3987
3988 NopRuntimeCallHelper call_helper;
3989 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3990
3991 __ bind(&done);
3992 context()->Plug(result);
3993 }
3994
3995
3996 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3997 ZoneList<Expression*>* args = expr->arguments();
3998 DCHECK(args->length() == 2);
3999
4000 VisitForStackValue(args->at(0));
4001 VisitForAccumulatorValue(args->at(1));
4002
4003 Register object = ebx;
4004 Register index = eax;
4005 Register scratch = edx;
4006 Register result = eax;
4007
4008 __ pop(object);
4009
4010 Label need_conversion;
4011 Label index_out_of_range;
4012 Label done;
4013 StringCharAtGenerator generator(object,
4014 index,
4015 scratch,
4016 result,
4017 &need_conversion,
4018 &need_conversion,
4019 &index_out_of_range,
4020 STRING_INDEX_IS_NUMBER);
4021 generator.GenerateFast(masm_);
4022 __ jmp(&done);
4023
4024 __ bind(&index_out_of_range);
4025 // When the index is out of range, the spec requires us to return
4026 // the empty string.
4027 __ Move(result, Immediate(isolate()->factory()->empty_string()));
4028 __ jmp(&done);
4029
4030 __ bind(&need_conversion);
4031 // Move smi zero into the result register, which will trigger
4032 // conversion.
4033 __ Move(result, Immediate(Smi::FromInt(0)));
4034 __ jmp(&done);
4035
4036 NopRuntimeCallHelper call_helper;
4037 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
4038
4039 __ bind(&done);
4040 context()->Plug(result);
4041 }
4042
4043
4044 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
4045 ZoneList<Expression*>* args = expr->arguments();
4046 DCHECK_EQ(2, args->length());
4047 VisitForStackValue(args->at(0));
4048 VisitForAccumulatorValue(args->at(1));
4049
4050 __ pop(edx);
4051 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
4052 __ CallStub(&stub);
4053 context()->Plug(eax);
4054 }
4055
4056
4057 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
4058 ZoneList<Expression*>* args = expr->arguments();
4059 DCHECK(args->length() >= 2);
4060
4061 int arg_count = args->length() - 2; // 2 ~ receiver and function.
4062 for (int i = 0; i < arg_count + 1; ++i) {
4063 VisitForStackValue(args->at(i));
4064 }
4065 VisitForAccumulatorValue(args->last()); // Function.
4066
4067 Label runtime, done;
4068 // Check for non-function argument (including proxy).
4069 __ JumpIfSmi(eax, &runtime);
4070 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
4071 __ j(not_equal, &runtime);
4072
4073 // InvokeFunction requires the function in edi. Move it in there.
4074 __ mov(edi, result_register());
4075 ParameterCount count(arg_count);
4076 __ InvokeFunction(edi, count, CALL_FUNCTION, NullCallWrapper());
4077 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4078 __ jmp(&done);
4079
4080 __ bind(&runtime);
4081 __ push(eax);
4082 __ CallRuntime(Runtime::kCall, args->length());
4083 __ bind(&done);
4084
4085 context()->Plug(eax);
4086 }
4087
4088
4089 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) {
4090 ZoneList<Expression*>* args = expr->arguments();
4091 DCHECK(args->length() == 2);
4092
4093 // new.target
4094 VisitForStackValue(args->at(0));
4095
4096 // .this_function
4097 VisitForStackValue(args->at(1));
4098 __ CallRuntime(Runtime::kGetPrototype, 1);
4099 __ push(result_register());
4100
4101 // Load original constructor into ecx.
4102 __ mov(ecx, Operand(esp, 1 * kPointerSize));
4103
4104 // Check if the calling frame is an arguments adaptor frame.
4105 Label adaptor_frame, args_set_up, runtime;
4106 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
4107 __ mov(ebx, Operand(edx, StandardFrameConstants::kContextOffset));
4108 __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4109 __ j(equal, &adaptor_frame);
4110 // default constructor has no arguments, so no adaptor frame means no args.
4111 __ mov(eax, Immediate(0));
4112 __ jmp(&args_set_up);
4113
4114 // Copy arguments from adaptor frame.
4115 {
4116 __ bind(&adaptor_frame);
4117 __ mov(ebx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4118 __ SmiUntag(ebx);
4119
4120 __ mov(eax, ebx);
4121 __ lea(edx, Operand(edx, ebx, times_pointer_size,
4122 StandardFrameConstants::kCallerSPOffset));
4123 Label loop;
4124 __ bind(&loop);
4125 __ push(Operand(edx, -1 * kPointerSize));
4126 __ sub(edx, Immediate(kPointerSize));
4127 __ dec(ebx);
4128 __ j(not_zero, &loop);
4129 }
4130
4131 __ bind(&args_set_up);
4132
4133 __ mov(edi, Operand(esp, eax, times_pointer_size, 0));
4134 __ mov(ebx, Immediate(isolate()->factory()->undefined_value()));
4135 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
4136 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
4137
4138 __ Drop(1);
4139
4140 context()->Plug(eax);
4141 }
4142
4143
4144 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
4145 // Load the arguments on the stack and call the stub.
4146 RegExpConstructResultStub stub(isolate());
4147 ZoneList<Expression*>* args = expr->arguments();
4148 DCHECK(args->length() == 3);
4149 VisitForStackValue(args->at(0));
4150 VisitForStackValue(args->at(1));
4151 VisitForAccumulatorValue(args->at(2));
4152 __ pop(ebx);
4153 __ pop(ecx);
4154 __ CallStub(&stub);
4155 context()->Plug(eax);
4156 }
4157
4158
4159 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
4160 ZoneList<Expression*>* args = expr->arguments();
4161 DCHECK_EQ(2, args->length());
4162
4163 DCHECK_NOT_NULL(args->at(0)->AsLiteral());
4164 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
4165
4166 Handle<FixedArray> jsfunction_result_caches(
4167 isolate()->native_context()->jsfunction_result_caches());
4168 if (jsfunction_result_caches->length() <= cache_id) {
4169 __ Abort(kAttemptToUseUndefinedCache);
4170 __ mov(eax, isolate()->factory()->undefined_value());
4171 context()->Plug(eax);
4172 return;
4173 }
4174
4175 VisitForAccumulatorValue(args->at(1));
4176
4177 Register key = eax;
4178 Register cache = ebx;
4179 Register tmp = ecx;
4180 __ mov(cache, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
4181 __ mov(cache,
4182 FieldOperand(cache, GlobalObject::kNativeContextOffset));
4183 __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
4184 __ mov(cache,
4185 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
4186
4187 Label done, not_found;
4188 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4189 __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
4190 // tmp now holds finger offset as a smi.
4191 __ cmp(key, FixedArrayElementOperand(cache, tmp));
4192 __ j(not_equal, &not_found);
4193
4194 __ mov(eax, FixedArrayElementOperand(cache, tmp, 1));
4195 __ jmp(&done);
4196
4197 __ bind(&not_found);
4198 // Call runtime to perform the lookup.
4199 __ push(cache);
4200 __ push(key);
4201 __ CallRuntime(Runtime::kGetFromCacheRT, 2);
4202
4203 __ bind(&done);
4204 context()->Plug(eax);
4205 }
4206
4207
4208 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
4209 ZoneList<Expression*>* args = expr->arguments();
4210 DCHECK(args->length() == 1);
4211
4212 VisitForAccumulatorValue(args->at(0));
4213
4214 __ AssertString(eax);
4215
4216 Label materialize_true, materialize_false;
4217 Label* if_true = NULL;
4218 Label* if_false = NULL;
4219 Label* fall_through = NULL;
4220 context()->PrepareTest(&materialize_true, &materialize_false,
4221 &if_true, &if_false, &fall_through);
4222
4223 __ test(FieldOperand(eax, String::kHashFieldOffset),
4224 Immediate(String::kContainsCachedArrayIndexMask));
4225 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4226 Split(zero, if_true, if_false, fall_through);
4227
4228 context()->Plug(if_true, if_false);
4229 }
4230
4231
4232 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
4233 ZoneList<Expression*>* args = expr->arguments();
4234 DCHECK(args->length() == 1);
4235 VisitForAccumulatorValue(args->at(0));
4236
4237 __ AssertString(eax);
4238
4239 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
4240 __ IndexFromHash(eax, eax);
4241
4242 context()->Plug(eax);
4243 }
4244
4245
4246 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
4247 Label bailout, done, one_char_separator, long_separator,
4248 non_trivial_array, not_size_one_array, loop,
4249 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
4250
4251 ZoneList<Expression*>* args = expr->arguments();
4252 DCHECK(args->length() == 2);
4253 // We will leave the separator on the stack until the end of the function.
4254 VisitForStackValue(args->at(1));
4255 // Load this to eax (= array)
4256 VisitForAccumulatorValue(args->at(0));
4257 // All aliases of the same register have disjoint lifetimes.
4258 Register array = eax;
4259 Register elements = no_reg; // Will be eax.
4260
4261 Register index = edx;
4262
4263 Register string_length = ecx;
4264
4265 Register string = esi;
4266
4267 Register scratch = ebx;
4268
4269 Register array_length = edi;
4270 Register result_pos = no_reg; // Will be edi.
4271
4272 // Separator operand is already pushed.
4273 Operand separator_operand = Operand(esp, 2 * kPointerSize);
4274 Operand result_operand = Operand(esp, 1 * kPointerSize);
4275 Operand array_length_operand = Operand(esp, 0);
4276 __ sub(esp, Immediate(2 * kPointerSize));
4277 __ cld();
4278 // Check that the array is a JSArray
4279 __ JumpIfSmi(array, &bailout);
4280 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
4281 __ j(not_equal, &bailout);
4282
4283 // Check that the array has fast elements.
4284 __ CheckFastElements(scratch, &bailout);
4285
4286 // If the array has length zero, return the empty string.
4287 __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
4288 __ SmiUntag(array_length);
4289 __ j(not_zero, &non_trivial_array);
4290 __ mov(result_operand, isolate()->factory()->empty_string());
4291 __ jmp(&done);
4292
4293 // Save the array length.
4294 __ bind(&non_trivial_array);
4295 __ mov(array_length_operand, array_length);
4296
4297 // Save the FixedArray containing array's elements.
4298 // End of array's live range.
4299 elements = array;
4300 __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
4301 array = no_reg;
4302
4303
4304 // Check that all array elements are sequential one-byte strings, and
4305 // accumulate the sum of their lengths, as a smi-encoded value.
4306 __ Move(index, Immediate(0));
4307 __ Move(string_length, Immediate(0));
4308 // Loop condition: while (index < length).
4309 // Live loop registers: index, array_length, string,
4310 // scratch, string_length, elements.
4311 if (generate_debug_code_) {
4312 __ cmp(index, array_length);
4313 __ Assert(less, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
4314 }
4315 __ bind(&loop);
4316 __ mov(string, FieldOperand(elements,
4317 index,
4318 times_pointer_size,
4319 FixedArray::kHeaderSize));
4320 __ JumpIfSmi(string, &bailout);
4321 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
4322 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4323 __ and_(scratch, Immediate(
4324 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4325 __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
4326 __ j(not_equal, &bailout);
4327 __ add(string_length,
4328 FieldOperand(string, SeqOneByteString::kLengthOffset));
4329 __ j(overflow, &bailout);
4330 __ add(index, Immediate(1));
4331 __ cmp(index, array_length);
4332 __ j(less, &loop);
4333
4334 // If array_length is 1, return elements[0], a string.
4335 __ cmp(array_length, 1);
4336 __ j(not_equal, &not_size_one_array);
4337 __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
4338 __ mov(result_operand, scratch);
4339 __ jmp(&done);
4340
4341 __ bind(&not_size_one_array);
4342
4343 // End of array_length live range.
4344 result_pos = array_length;
4345 array_length = no_reg;
4346
4347 // Live registers:
4348 // string_length: Sum of string lengths, as a smi.
4349 // elements: FixedArray of strings.
4350
4351 // Check that the separator is a flat one-byte string.
4352 __ mov(string, separator_operand);
4353 __ JumpIfSmi(string, &bailout);
4354 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
4355 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
4356 __ and_(scratch, Immediate(
4357 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
4358 __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
4359 __ j(not_equal, &bailout);
4360
4361 // Add (separator length times array_length) - separator length
4362 // to string_length.
4363 __ mov(scratch, separator_operand);
4364 __ mov(scratch, FieldOperand(scratch, SeqOneByteString::kLengthOffset));
4365 __ sub(string_length, scratch); // May be negative, temporarily.
4366 __ imul(scratch, array_length_operand);
4367 __ j(overflow, &bailout);
4368 __ add(string_length, scratch);
4369 __ j(overflow, &bailout);
4370
4371 __ shr(string_length, 1);
4372 // Live registers and stack values:
4373 // string_length
4374 // elements
4375 __ AllocateOneByteString(result_pos, string_length, scratch, index, string,
4376 &bailout);
4377 __ mov(result_operand, result_pos);
4378 __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
4379
4380
4381 __ mov(string, separator_operand);
4382 __ cmp(FieldOperand(string, SeqOneByteString::kLengthOffset),
4383 Immediate(Smi::FromInt(1)));
4384 __ j(equal, &one_char_separator);
4385 __ j(greater, &long_separator);
4386
4387
4388 // Empty separator case
4389 __ mov(index, Immediate(0));
4390 __ jmp(&loop_1_condition);
4391 // Loop condition: while (index < length).
4392 __ bind(&loop_1);
4393 // Each iteration of the loop concatenates one string to the result.
4394 // Live values in registers:
4395 // index: which element of the elements array we are adding to the result.
4396 // result_pos: the position to which we are currently copying characters.
4397 // elements: the FixedArray of strings we are joining.
4398
4399 // Get string = array[index].
4400 __ mov(string, FieldOperand(elements, index,
4401 times_pointer_size,
4402 FixedArray::kHeaderSize));
4403 __ mov(string_length,
4404 FieldOperand(string, String::kLengthOffset));
4405 __ shr(string_length, 1);
4406 __ lea(string,
4407 FieldOperand(string, SeqOneByteString::kHeaderSize));
4408 __ CopyBytes(string, result_pos, string_length, scratch);
4409 __ add(index, Immediate(1));
4410 __ bind(&loop_1_condition);
4411 __ cmp(index, array_length_operand);
4412 __ j(less, &loop_1); // End while (index < length).
4413 __ jmp(&done);
4414
4415
4416
4417 // One-character separator case
4418 __ bind(&one_char_separator);
4419 // Replace separator with its one-byte character value.
4420 __ mov_b(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4421 __ mov_b(separator_operand, scratch);
4422
4423 __ Move(index, Immediate(0));
4424 // Jump into the loop after the code that copies the separator, so the first
4425 // element is not preceded by a separator
4426 __ jmp(&loop_2_entry);
4427 // Loop condition: while (index < length).
4428 __ bind(&loop_2);
4429 // Each iteration of the loop concatenates one string to the result.
4430 // Live values in registers:
4431 // index: which element of the elements array we are adding to the result.
4432 // result_pos: the position to which we are currently copying characters.
4433
4434 // Copy the separator character to the result.
4435 __ mov_b(scratch, separator_operand);
4436 __ mov_b(Operand(result_pos, 0), scratch);
4437 __ inc(result_pos);
4438
4439 __ bind(&loop_2_entry);
4440 // Get string = array[index].
4441 __ mov(string, FieldOperand(elements, index,
4442 times_pointer_size,
4443 FixedArray::kHeaderSize));
4444 __ mov(string_length,
4445 FieldOperand(string, String::kLengthOffset));
4446 __ shr(string_length, 1);
4447 __ lea(string,
4448 FieldOperand(string, SeqOneByteString::kHeaderSize));
4449 __ CopyBytes(string, result_pos, string_length, scratch);
4450 __ add(index, Immediate(1));
4451
4452 __ cmp(index, array_length_operand);
4453 __ j(less, &loop_2); // End while (index < length).
4454 __ jmp(&done);
4455
4456
4457 // Long separator case (separator is more than one character).
4458 __ bind(&long_separator);
4459
4460 __ Move(index, Immediate(0));
4461 // Jump into the loop after the code that copies the separator, so the first
4462 // element is not preceded by a separator
4463 __ jmp(&loop_3_entry);
4464 // Loop condition: while (index < length).
4465 __ bind(&loop_3);
4466 // Each iteration of the loop concatenates one string to the result.
4467 // Live values in registers:
4468 // index: which element of the elements array we are adding to the result.
4469 // result_pos: the position to which we are currently copying characters.
4470
4471 // Copy the separator to the result.
4472 __ mov(string, separator_operand);
4473 __ mov(string_length,
4474 FieldOperand(string, String::kLengthOffset));
4475 __ shr(string_length, 1);
4476 __ lea(string,
4477 FieldOperand(string, SeqOneByteString::kHeaderSize));
4478 __ CopyBytes(string, result_pos, string_length, scratch);
4479
4480 __ bind(&loop_3_entry);
4481 // Get string = array[index].
4482 __ mov(string, FieldOperand(elements, index,
4483 times_pointer_size,
4484 FixedArray::kHeaderSize));
4485 __ mov(string_length,
4486 FieldOperand(string, String::kLengthOffset));
4487 __ shr(string_length, 1);
4488 __ lea(string,
4489 FieldOperand(string, SeqOneByteString::kHeaderSize));
4490 __ CopyBytes(string, result_pos, string_length, scratch);
4491 __ add(index, Immediate(1));
4492
4493 __ cmp(index, array_length_operand);
4494 __ j(less, &loop_3); // End while (index < length).
4495 __ jmp(&done);
4496
4497
4498 __ bind(&bailout);
4499 __ mov(result_operand, isolate()->factory()->undefined_value());
4500 __ bind(&done);
4501 __ mov(eax, result_operand);
4502 // Drop temp values from the stack, and restore context register.
4503 __ add(esp, Immediate(3 * kPointerSize));
4504
4505 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4506 context()->Plug(eax);
4507 }
4508
4509
4510 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4511 DCHECK(expr->arguments()->length() == 0);
4512 ExternalReference debug_is_active =
4513 ExternalReference::debug_is_active_address(isolate());
4514 __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
4515 __ SmiTag(eax);
4516 context()->Plug(eax);
4517 }
4518
4519
4520 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4521 // Push the builtins object as receiver.
4522 __ mov(eax, GlobalObjectOperand());
4523 __ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset));
4524
4525 // Load the function from the receiver.
4526 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
4527 __ mov(LoadDescriptor::NameRegister(), Immediate(expr->name()));
4528 __ mov(LoadDescriptor::SlotRegister(),
4529 Immediate(SmiFromSlot(expr->CallRuntimeFeedbackSlot())));
4530 CallLoadIC(NOT_INSIDE_TYPEOF);
4531 }
4532
4533
4534 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4535 ZoneList<Expression*>* args = expr->arguments();
4536 int arg_count = args->length();
4537
4538 SetCallPosition(expr, arg_count);
4539 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4540 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
4541 __ CallStub(&stub);
4542 }
4543
4544
4545 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4546 ZoneList<Expression*>* args = expr->arguments();
4547 int arg_count = args->length();
4548
4549 if (expr->is_jsruntime()) {
4550 Comment cmnt(masm_, "[ CallRuntime");
4551 EmitLoadJSRuntimeFunction(expr);
4552
4553 // Push the target function under the receiver.
4554 __ push(Operand(esp, 0));
4555 __ mov(Operand(esp, kPointerSize), eax);
4556
4557 // Push the arguments ("left-to-right").
4558 for (int i = 0; i < arg_count; i++) {
4559 VisitForStackValue(args->at(i));
4560 }
4561
4562 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4563 EmitCallJSRuntimeFunction(expr);
4564
4565 // Restore context register.
4566 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4567 context()->DropAndPlug(1, eax);
4568
4569 } else {
4570 const Runtime::Function* function = expr->function();
4571 switch (function->function_id) {
4572 #define CALL_INTRINSIC_GENERATOR(Name) \
4573 case Runtime::kInline##Name: { \
4574 Comment cmnt(masm_, "[ Inline" #Name); \
4575 return Emit##Name(expr); \
4576 }
4577 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4578 #undef CALL_INTRINSIC_GENERATOR
4579 default: {
4580 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4581 // Push the arguments ("left-to-right").
4582 for (int i = 0; i < arg_count; i++) {
4583 VisitForStackValue(args->at(i));
4584 }
4585
4586 // Call the C runtime function.
4587 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4588 __ CallRuntime(expr->function(), arg_count);
4589 context()->Plug(eax);
4590 }
4591 }
4592 }
4593 }
4594
4595
4596 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4597 switch (expr->op()) {
4598 case Token::DELETE: {
4599 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4600 Property* property = expr->expression()->AsProperty();
4601 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4602
4603 if (property != NULL) {
4604 VisitForStackValue(property->obj());
4605 VisitForStackValue(property->key());
4606 __ push(Immediate(Smi::FromInt(language_mode())));
4607 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4608 context()->Plug(eax);
4609 } else if (proxy != NULL) {
4610 Variable* var = proxy->var();
4611 // Delete of an unqualified identifier is disallowed in strict mode but
4612 // "delete this" is allowed.
4613 bool is_this = var->HasThisName(isolate());
4614 DCHECK(is_sloppy(language_mode()) || is_this);
4615 if (var->IsUnallocatedOrGlobalSlot()) {
4616 __ push(GlobalObjectOperand());
4617 __ push(Immediate(var->name()));
4618 __ push(Immediate(Smi::FromInt(SLOPPY)));
4619 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4620 context()->Plug(eax);
4621 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4622 // Result of deleting non-global variables is false. 'this' is
4623 // not really a variable, though we implement it as one. The
4624 // subexpression does not have side effects.
4625 context()->Plug(is_this);
4626 } else {
4627 // Non-global variable. Call the runtime to try to delete from the
4628 // context where the variable was introduced.
4629 __ push(context_register());
4630 __ push(Immediate(var->name()));
4631 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4632 context()->Plug(eax);
4633 }
4634 } else {
4635 // Result of deleting non-property, non-variable reference is true.
4636 // The subexpression may have side effects.
4637 VisitForEffect(expr->expression());
4638 context()->Plug(true);
4639 }
4640 break;
4641 }
4642
4643 case Token::VOID: {
4644 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4645 VisitForEffect(expr->expression());
4646 context()->Plug(isolate()->factory()->undefined_value());
4647 break;
4648 }
4649
4650 case Token::NOT: {
4651 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4652 if (context()->IsEffect()) {
4653 // Unary NOT has no side effects so it's only necessary to visit the
4654 // subexpression. Match the optimizing compiler by not branching.
4655 VisitForEffect(expr->expression());
4656 } else if (context()->IsTest()) {
4657 const TestContext* test = TestContext::cast(context());
4658 // The labels are swapped for the recursive call.
4659 VisitForControl(expr->expression(),
4660 test->false_label(),
4661 test->true_label(),
4662 test->fall_through());
4663 context()->Plug(test->true_label(), test->false_label());
4664 } else {
4665 // We handle value contexts explicitly rather than simply visiting
4666 // for control and plugging the control flow into the context,
4667 // because we need to prepare a pair of extra administrative AST ids
4668 // for the optimizing compiler.
4669 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4670 Label materialize_true, materialize_false, done;
4671 VisitForControl(expr->expression(),
4672 &materialize_false,
4673 &materialize_true,
4674 &materialize_true);
4675 __ bind(&materialize_true);
4676 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4677 if (context()->IsAccumulatorValue()) {
4678 __ mov(eax, isolate()->factory()->true_value());
4679 } else {
4680 __ Push(isolate()->factory()->true_value());
4681 }
4682 __ jmp(&done, Label::kNear);
4683 __ bind(&materialize_false);
4684 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4685 if (context()->IsAccumulatorValue()) {
4686 __ mov(eax, isolate()->factory()->false_value());
4687 } else {
4688 __ Push(isolate()->factory()->false_value());
4689 }
4690 __ bind(&done);
4691 }
4692 break;
4693 }
4694
4695 case Token::TYPEOF: {
4696 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4697 {
4698 AccumulatorValueContext context(this);
4699 VisitForTypeofValue(expr->expression());
4700 }
4701 __ mov(ebx, eax);
4702 TypeofStub typeof_stub(isolate());
4703 __ CallStub(&typeof_stub);
4704 context()->Plug(eax);
4705 break;
4706 }
4707
4708 default:
4709 UNREACHABLE();
4710 }
4711 }
4712
4713
4714 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4715 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4716
4717 Comment cmnt(masm_, "[ CountOperation");
4718
4719 Property* prop = expr->expression()->AsProperty();
4720 LhsKind assign_type = Property::GetAssignType(prop);
4721
4722 // Evaluate expression and get value.
4723 if (assign_type == VARIABLE) {
4724 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4725 AccumulatorValueContext context(this);
4726 EmitVariableLoad(expr->expression()->AsVariableProxy());
4727 } else {
4728 // Reserve space for result of postfix operation.
4729 if (expr->is_postfix() && !context()->IsEffect()) {
4730 __ push(Immediate(Smi::FromInt(0)));
4731 }
4732 switch (assign_type) {
4733 case NAMED_PROPERTY: {
4734 // Put the object both on the stack and in the register.
4735 VisitForStackValue(prop->obj());
4736 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
4737 EmitNamedPropertyLoad(prop);
4738 break;
4739 }
4740
4741 case NAMED_SUPER_PROPERTY: {
4742 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4743 VisitForAccumulatorValue(
4744 prop->obj()->AsSuperPropertyReference()->home_object());
4745 __ push(result_register());
4746 __ push(MemOperand(esp, kPointerSize));
4747 __ push(result_register());
4748 EmitNamedSuperPropertyLoad(prop);
4749 break;
4750 }
4751
4752 case KEYED_SUPER_PROPERTY: {
4753 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4754 VisitForStackValue(
4755 prop->obj()->AsSuperPropertyReference()->home_object());
4756 VisitForAccumulatorValue(prop->key());
4757 __ push(result_register());
4758 __ push(MemOperand(esp, 2 * kPointerSize));
4759 __ push(MemOperand(esp, 2 * kPointerSize));
4760 __ push(result_register());
4761 EmitKeyedSuperPropertyLoad(prop);
4762 break;
4763 }
4764
4765 case KEYED_PROPERTY: {
4766 VisitForStackValue(prop->obj());
4767 VisitForStackValue(prop->key());
4768 __ mov(LoadDescriptor::ReceiverRegister(),
4769 Operand(esp, kPointerSize)); // Object.
4770 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0)); // Key.
4771 EmitKeyedPropertyLoad(prop);
4772 break;
4773 }
4774
4775 case VARIABLE:
4776 UNREACHABLE();
4777 }
4778 }
4779
4780 // We need a second deoptimization point after loading the value
4781 // in case evaluating the property load my have a side effect.
4782 if (assign_type == VARIABLE) {
4783 PrepareForBailout(expr->expression(), TOS_REG);
4784 } else {
4785 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4786 }
4787
4788 // Inline smi case if we are in a loop.
4789 Label done, stub_call;
4790 JumpPatchSite patch_site(masm_);
4791 if (ShouldInlineSmiCase(expr->op())) {
4792 Label slow;
4793 patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
4794
4795 // Save result for postfix expressions.
4796 if (expr->is_postfix()) {
4797 if (!context()->IsEffect()) {
4798 // Save the result on the stack. If we have a named or keyed property
4799 // we store the result under the receiver that is currently on top
4800 // of the stack.
4801 switch (assign_type) {
4802 case VARIABLE:
4803 __ push(eax);
4804 break;
4805 case NAMED_PROPERTY:
4806 __ mov(Operand(esp, kPointerSize), eax);
4807 break;
4808 case NAMED_SUPER_PROPERTY:
4809 __ mov(Operand(esp, 2 * kPointerSize), eax);
4810 break;
4811 case KEYED_PROPERTY:
4812 __ mov(Operand(esp, 2 * kPointerSize), eax);
4813 break;
4814 case KEYED_SUPER_PROPERTY:
4815 __ mov(Operand(esp, 3 * kPointerSize), eax);
4816 break;
4817 }
4818 }
4819 }
4820
4821 if (expr->op() == Token::INC) {
4822 __ add(eax, Immediate(Smi::FromInt(1)));
4823 } else {
4824 __ sub(eax, Immediate(Smi::FromInt(1)));
4825 }
4826 __ j(no_overflow, &done, Label::kNear);
4827 // Call stub. Undo operation first.
4828 if (expr->op() == Token::INC) {
4829 __ sub(eax, Immediate(Smi::FromInt(1)));
4830 } else {
4831 __ add(eax, Immediate(Smi::FromInt(1)));
4832 }
4833 __ jmp(&stub_call, Label::kNear);
4834 __ bind(&slow);
4835 }
4836 if (!is_strong(language_mode())) {
4837 ToNumberStub convert_stub(isolate());
4838 __ CallStub(&convert_stub);
4839 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4840 }
4841
4842 // Save result for postfix expressions.
4843 if (expr->is_postfix()) {
4844 if (!context()->IsEffect()) {
4845 // Save the result on the stack. If we have a named or keyed property
4846 // we store the result under the receiver that is currently on top
4847 // of the stack.
4848 switch (assign_type) {
4849 case VARIABLE:
4850 __ push(eax);
4851 break;
4852 case NAMED_PROPERTY:
4853 __ mov(Operand(esp, kPointerSize), eax);
4854 break;
4855 case NAMED_SUPER_PROPERTY:
4856 __ mov(Operand(esp, 2 * kPointerSize), eax);
4857 break;
4858 case KEYED_PROPERTY:
4859 __ mov(Operand(esp, 2 * kPointerSize), eax);
4860 break;
4861 case KEYED_SUPER_PROPERTY:
4862 __ mov(Operand(esp, 3 * kPointerSize), eax);
4863 break;
4864 }
4865 }
4866 }
4867
4868 SetExpressionPosition(expr);
4869
4870 // Call stub for +1/-1.
4871 __ bind(&stub_call);
4872 __ mov(edx, eax);
4873 __ mov(eax, Immediate(Smi::FromInt(1)));
4874 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), expr->binary_op(),
4875 strength(language_mode())).code();
4876 CallIC(code, expr->CountBinOpFeedbackId());
4877 patch_site.EmitPatchInfo();
4878 __ bind(&done);
4879
4880 if (is_strong(language_mode())) {
4881 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4882 }
4883 // Store the value returned in eax.
4884 switch (assign_type) {
4885 case VARIABLE:
4886 if (expr->is_postfix()) {
4887 // Perform the assignment as if via '='.
4888 { EffectContext context(this);
4889 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4890 Token::ASSIGN, expr->CountSlot());
4891 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4892 context.Plug(eax);
4893 }
4894 // For all contexts except EffectContext We have the result on
4895 // top of the stack.
4896 if (!context()->IsEffect()) {
4897 context()->PlugTOS();
4898 }
4899 } else {
4900 // Perform the assignment as if via '='.
4901 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4902 Token::ASSIGN, expr->CountSlot());
4903 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4904 context()->Plug(eax);
4905 }
4906 break;
4907 case NAMED_PROPERTY: {
4908 __ mov(StoreDescriptor::NameRegister(),
4909 prop->key()->AsLiteral()->value());
4910 __ pop(StoreDescriptor::ReceiverRegister());
4911 if (FLAG_vector_stores) {
4912 EmitLoadStoreICSlot(expr->CountSlot());
4913 CallStoreIC();
4914 } else {
4915 CallStoreIC(expr->CountStoreFeedbackId());
4916 }
4917 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4918 if (expr->is_postfix()) {
4919 if (!context()->IsEffect()) {
4920 context()->PlugTOS();
4921 }
4922 } else {
4923 context()->Plug(eax);
4924 }
4925 break;
4926 }
4927 case NAMED_SUPER_PROPERTY: {
4928 EmitNamedSuperPropertyStore(prop);
4929 if (expr->is_postfix()) {
4930 if (!context()->IsEffect()) {
4931 context()->PlugTOS();
4932 }
4933 } else {
4934 context()->Plug(eax);
4935 }
4936 break;
4937 }
4938 case KEYED_SUPER_PROPERTY: {
4939 EmitKeyedSuperPropertyStore(prop);
4940 if (expr->is_postfix()) {
4941 if (!context()->IsEffect()) {
4942 context()->PlugTOS();
4943 }
4944 } else {
4945 context()->Plug(eax);
4946 }
4947 break;
4948 }
4949 case KEYED_PROPERTY: {
4950 __ pop(StoreDescriptor::NameRegister());
4951 __ pop(StoreDescriptor::ReceiverRegister());
4952 Handle<Code> ic =
4953 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4954 if (FLAG_vector_stores) {
4955 EmitLoadStoreICSlot(expr->CountSlot());
4956 CallIC(ic);
4957 } else {
4958 CallIC(ic, expr->CountStoreFeedbackId());
4959 }
4960 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4961 if (expr->is_postfix()) {
4962 // Result is on the stack
4963 if (!context()->IsEffect()) {
4964 context()->PlugTOS();
4965 }
4966 } else {
4967 context()->Plug(eax);
4968 }
4969 break;
4970 }
4971 }
4972 }
4973
4974
4975 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4976 Expression* sub_expr,
4977 Handle<String> check) {
4978 Label materialize_true, materialize_false;
4979 Label* if_true = NULL;
4980 Label* if_false = NULL;
4981 Label* fall_through = NULL;
4982 context()->PrepareTest(&materialize_true, &materialize_false,
4983 &if_true, &if_false, &fall_through);
4984
4985 { AccumulatorValueContext context(this);
4986 VisitForTypeofValue(sub_expr);
4987 }
4988 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4989
4990 Factory* factory = isolate()->factory();
4991 if (String::Equals(check, factory->number_string())) {
4992 __ JumpIfSmi(eax, if_true);
4993 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
4994 isolate()->factory()->heap_number_map());
4995 Split(equal, if_true, if_false, fall_through);
4996 } else if (String::Equals(check, factory->string_string())) {
4997 __ JumpIfSmi(eax, if_false);
4998 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
4999 __ j(above_equal, if_false);
5000 // Check for undetectable objects => false.
5001 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
5002 1 << Map::kIsUndetectable);
5003 Split(zero, if_true, if_false, fall_through);
5004 } else if (String::Equals(check, factory->symbol_string())) {
5005 __ JumpIfSmi(eax, if_false);
5006 __ CmpObjectType(eax, SYMBOL_TYPE, edx);
5007 Split(equal, if_true, if_false, fall_through);
5008 } else if (String::Equals(check, factory->float32x4_string())) {
5009 __ JumpIfSmi(eax, if_false);
5010 __ CmpObjectType(eax, FLOAT32X4_TYPE, edx);
5011 Split(equal, if_true, if_false, fall_through);
5012 } else if (String::Equals(check, factory->boolean_string())) {
5013 __ cmp(eax, isolate()->factory()->true_value());
5014 __ j(equal, if_true);
5015 __ cmp(eax, isolate()->factory()->false_value());
5016 Split(equal, if_true, if_false, fall_through);
5017 } else if (String::Equals(check, factory->undefined_string())) {
5018 __ cmp(eax, isolate()->factory()->undefined_value());
5019 __ j(equal, if_true);
5020 __ JumpIfSmi(eax, if_false);
5021 // Check for undetectable objects => true.
5022 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
5023 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
5024 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
5025 Split(not_zero, if_true, if_false, fall_through);
5026 } else if (String::Equals(check, factory->function_string())) {
5027 __ JumpIfSmi(eax, if_false);
5028 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5029 __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
5030 __ j(equal, if_true);
5031 __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
5032 Split(equal, if_true, if_false, fall_through);
5033 } else if (String::Equals(check, factory->object_string())) {
5034 __ JumpIfSmi(eax, if_false);
5035 __ cmp(eax, isolate()->factory()->null_value());
5036 __ j(equal, if_true);
5037 __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
5038 __ j(below, if_false);
5039 __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
5040 __ j(above, if_false);
5041 // Check for undetectable objects => false.
5042 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
5043 1 << Map::kIsUndetectable);
5044 Split(zero, if_true, if_false, fall_through);
5045 } else {
5046 if (if_false != fall_through) __ jmp(if_false);
5047 }
5048 context()->Plug(if_true, if_false);
5049 }
5050
5051
5052 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5053 Comment cmnt(masm_, "[ CompareOperation");
5054 SetExpressionPosition(expr);
5055
5056 // First we try a fast inlined version of the compare when one of
5057 // the operands is a literal.
5058 if (TryLiteralCompare(expr)) return;
5059
5060 // Always perform the comparison for its control flow. Pack the result
5061 // into the expression's context after the comparison is performed.
5062 Label materialize_true, materialize_false;
5063 Label* if_true = NULL;
5064 Label* if_false = NULL;
5065 Label* fall_through = NULL;
5066 context()->PrepareTest(&materialize_true, &materialize_false,
5067 &if_true, &if_false, &fall_through);
5068
5069 Token::Value op = expr->op();
5070 VisitForStackValue(expr->left());
5071 switch (op) {
5072 case Token::IN:
5073 VisitForStackValue(expr->right());
5074 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
5075 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
5076 __ cmp(eax, isolate()->factory()->true_value());
5077 Split(equal, if_true, if_false, fall_through);
5078 break;
5079
5080 case Token::INSTANCEOF: {
5081 VisitForStackValue(expr->right());
5082 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
5083 __ CallStub(&stub);
5084 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5085 __ test(eax, eax);
5086 // The stub returns 0 for true.
5087 Split(zero, if_true, if_false, fall_through);
5088 break;
5089 }
5090
5091 default: {
5092 VisitForAccumulatorValue(expr->right());
5093 Condition cc = CompareIC::ComputeCondition(op);
5094 __ pop(edx);
5095
5096 bool inline_smi_code = ShouldInlineSmiCase(op);
5097 JumpPatchSite patch_site(masm_);
5098 if (inline_smi_code) {
5099 Label slow_case;
5100 __ mov(ecx, edx);
5101 __ or_(ecx, eax);
5102 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
5103 __ cmp(edx, eax);
5104 Split(cc, if_true, if_false, NULL);
5105 __ bind(&slow_case);
5106 }
5107
5108 Handle<Code> ic = CodeFactory::CompareIC(
5109 isolate(), op, strength(language_mode())).code();
5110 CallIC(ic, expr->CompareOperationFeedbackId());
5111 patch_site.EmitPatchInfo();
5112
5113 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5114 __ test(eax, eax);
5115 Split(cc, if_true, if_false, fall_through);
5116 }
5117 }
5118
5119 // Convert the result of the comparison into one expected for this
5120 // expression's context.
5121 context()->Plug(if_true, if_false);
5122 }
5123
5124
5125 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
5126 Expression* sub_expr,
5127 NilValue nil) {
5128 Label materialize_true, materialize_false;
5129 Label* if_true = NULL;
5130 Label* if_false = NULL;
5131 Label* fall_through = NULL;
5132 context()->PrepareTest(&materialize_true, &materialize_false,
5133 &if_true, &if_false, &fall_through);
5134
5135 VisitForAccumulatorValue(sub_expr);
5136 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
5137
5138 Handle<Object> nil_value = nil == kNullValue
5139 ? isolate()->factory()->null_value()
5140 : isolate()->factory()->undefined_value();
5141 if (expr->op() == Token::EQ_STRICT) {
5142 __ cmp(eax, nil_value);
5143 Split(equal, if_true, if_false, fall_through);
5144 } else {
5145 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
5146 CallIC(ic, expr->CompareOperationFeedbackId());
5147 __ test(eax, eax);
5148 Split(not_zero, if_true, if_false, fall_through);
5149 }
5150 context()->Plug(if_true, if_false);
5151 }
5152
5153
5154 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
5155 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
5156 context()->Plug(eax);
5157 }
5158
5159
5160 Register FullCodeGenerator::result_register() {
5161 return eax;
5162 }
5163
5164
5165 Register FullCodeGenerator::context_register() {
5166 return esi;
5167 }
5168
5169
5170 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
5171 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
5172 __ mov(Operand(ebp, frame_offset), value);
5173 }
5174
5175
5176 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
5177 __ mov(dst, ContextOperand(esi, context_index));
5178 }
5179
5180
5181 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
5182 Scope* declaration_scope = scope()->DeclarationScope();
5183 if (declaration_scope->is_script_scope() ||
5184 declaration_scope->is_module_scope()) {
5185 // Contexts nested in the native context have a canonical empty function
5186 // as their closure, not the anonymous closure containing the global
5187 // code. Pass a smi sentinel and let the runtime look up the empty
5188 // function.
5189 __ push(Immediate(Smi::FromInt(0)));
5190 } else if (declaration_scope->is_eval_scope()) {
5191 // Contexts nested inside eval code have the same closure as the context
5192 // calling eval, not the anonymous closure containing the eval code.
5193 // Fetch it from the context.
5194 __ push(ContextOperand(esi, Context::CLOSURE_INDEX));
5195 } else {
5196 DCHECK(declaration_scope->is_function_scope());
5197 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
5198 }
5199 }
5200
5201
5202 // ----------------------------------------------------------------------------
5203 // Non-local control flow support.
5204
5205 void FullCodeGenerator::EnterFinallyBlock() {
5206 // Cook return address on top of stack (smi encoded Code* delta)
5207 DCHECK(!result_register().is(edx));
5208 __ pop(edx);
5209 __ sub(edx, Immediate(masm_->CodeObject()));
5210 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
5211 STATIC_ASSERT(kSmiTag == 0);
5212 __ SmiTag(edx);
5213 __ push(edx);
5214
5215 // Store result register while executing finally block.
5216 __ push(result_register());
5217
5218 // Store pending message while executing finally block.
5219 ExternalReference pending_message_obj =
5220 ExternalReference::address_of_pending_message_obj(isolate());
5221 __ mov(edx, Operand::StaticVariable(pending_message_obj));
5222 __ push(edx);
5223
5224 ClearPendingMessage();
5225 }
5226
5227
5228 void FullCodeGenerator::ExitFinallyBlock() {
5229 DCHECK(!result_register().is(edx));
5230 // Restore pending message from stack.
5231 __ pop(edx);
5232 ExternalReference pending_message_obj =
5233 ExternalReference::address_of_pending_message_obj(isolate());
5234 __ mov(Operand::StaticVariable(pending_message_obj), edx);
5235
5236 // Restore result register from stack.
5237 __ pop(result_register());
5238
5239 // Uncook return address.
5240 __ pop(edx);
5241 __ SmiUntag(edx);
5242 __ add(edx, Immediate(masm_->CodeObject()));
5243 __ jmp(edx);
5244 }
5245
5246
5247 void FullCodeGenerator::ClearPendingMessage() {
5248 DCHECK(!result_register().is(edx));
5249 ExternalReference pending_message_obj =
5250 ExternalReference::address_of_pending_message_obj(isolate());
5251 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
5252 __ mov(Operand::StaticVariable(pending_message_obj), edx);
5253 }
5254
5255
5256 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) {
5257 DCHECK(FLAG_vector_stores && !slot.IsInvalid());
5258 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
5259 Immediate(SmiFromSlot(slot)));
5260 }
5261
5262
5263 #undef __
5264
5265
5266 static const byte kJnsInstruction = 0x79;
5267 static const byte kJnsOffset = 0x11;
5268 static const byte kNopByteOne = 0x66;
5269 static const byte kNopByteTwo = 0x90;
5270 #ifdef DEBUG
5271 static const byte kCallInstruction = 0xe8;
5272 #endif
5273
5274
5275 void BackEdgeTable::PatchAt(Code* unoptimized_code,
5276 Address pc,
5277 BackEdgeState target_state,
5278 Code* replacement_code) {
5279 Address call_target_address = pc - kIntSize;
5280 Address jns_instr_address = call_target_address - 3;
5281 Address jns_offset_address = call_target_address - 2;
5282
5283 switch (target_state) {
5284 case INTERRUPT:
5285 // sub <profiling_counter>, <delta> ;; Not changed
5286 // jns ok
5287 // call <interrupt stub>
5288 // ok:
5289 *jns_instr_address = kJnsInstruction;
5290 *jns_offset_address = kJnsOffset;
5291 break;
5292 case ON_STACK_REPLACEMENT:
5293 case OSR_AFTER_STACK_CHECK:
5294 // sub <profiling_counter>, <delta> ;; Not changed
5295 // nop
5296 // nop
5297 // call <on-stack replacment>
5298 // ok:
5299 *jns_instr_address = kNopByteOne;
5300 *jns_offset_address = kNopByteTwo;
5301 break;
5302 }
5303
5304 Assembler::set_target_address_at(call_target_address,
5305 unoptimized_code,
5306 replacement_code->entry());
5307 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
5308 unoptimized_code, call_target_address, replacement_code);
5309 }
5310
5311
5312 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
5313 Isolate* isolate,
5314 Code* unoptimized_code,
5315 Address pc) {
5316 Address call_target_address = pc - kIntSize;
5317 Address jns_instr_address = call_target_address - 3;
5318 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
5319
5320 if (*jns_instr_address == kJnsInstruction) {
5321 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
5322 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
5323 Assembler::target_address_at(call_target_address,
5324 unoptimized_code));
5325 return INTERRUPT;
5326 }
5327
5328 DCHECK_EQ(kNopByteOne, *jns_instr_address);
5329 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
5330
5331 if (Assembler::target_address_at(call_target_address, unoptimized_code) ==
5332 isolate->builtins()->OnStackReplacement()->entry()) {
5333 return ON_STACK_REPLACEMENT;
5334 }
5335
5336 DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
5337 Assembler::target_address_at(call_target_address,
5338 unoptimized_code));
5339 return OSR_AFTER_STACK_CHECK;
5340 }
5341
5342
5343 } // namespace internal
5344 } // namespace v8
5345
5346 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/deoptimizer-ia32.cc ('k') | src/mips/builtins-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698