OLD | NEW |
| (Empty) |
1 // Copyright 2013 the V8 project authors. All rights reserved. | |
2 // Redistribution and use in source and binary forms, with or without | |
3 // modification, are permitted provided that the following conditions are | |
4 // met: | |
5 // | |
6 // * Redistributions of source code must retain the above copyright | |
7 // notice, this list of conditions and the following disclaimer. | |
8 // * Redistributions in binary form must reproduce the above | |
9 // copyright notice, this list of conditions and the following | |
10 // disclaimer in the documentation and/or other materials provided | |
11 // with the distribution. | |
12 // * Neither the name of Google Inc. nor the names of its | |
13 // contributors may be used to endorse or promote products derived | |
14 // from this software without specific prior written permission. | |
15 // | |
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | |
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | |
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | |
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | |
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
27 | |
28 #include "v8.h" | |
29 | |
30 #if V8_TARGET_ARCH_A64 | |
31 | |
32 #include "code-stubs.h" | |
33 #include "codegen.h" | |
34 #include "compiler.h" | |
35 #include "debug.h" | |
36 #include "full-codegen.h" | |
37 #include "isolate-inl.h" | |
38 #include "parser.h" | |
39 #include "scopes.h" | |
40 #include "stub-cache.h" | |
41 | |
42 #include "a64/code-stubs-a64.h" | |
43 #include "a64/macro-assembler-a64.h" | |
44 | |
45 namespace v8 { | |
46 namespace internal { | |
47 | |
48 #define __ ACCESS_MASM(masm_) | |
49 | |
50 class JumpPatchSite BASE_EMBEDDED { | |
51 public: | |
52 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) { | |
53 #ifdef DEBUG | |
54 info_emitted_ = false; | |
55 #endif | |
56 } | |
57 | |
58 ~JumpPatchSite() { | |
59 if (patch_site_.is_bound()) { | |
60 ASSERT(info_emitted_); | |
61 } else { | |
62 ASSERT(reg_.IsNone()); | |
63 } | |
64 } | |
65 | |
66 void EmitJumpIfNotSmi(Register reg, Label* target) { | |
67 // This code will be patched by PatchInlinedSmiCode, in ic-a64.cc. | |
68 InstructionAccurateScope scope(masm_, 1); | |
69 ASSERT(!info_emitted_); | |
70 ASSERT(reg.Is64Bits()); | |
71 ASSERT(!reg.Is(csp)); | |
72 reg_ = reg; | |
73 __ bind(&patch_site_); | |
74 __ tbz(xzr, 0, target); // Always taken before patched. | |
75 } | |
76 | |
77 void EmitJumpIfSmi(Register reg, Label* target) { | |
78 // This code will be patched by PatchInlinedSmiCode, in ic-a64.cc. | |
79 InstructionAccurateScope scope(masm_, 1); | |
80 ASSERT(!info_emitted_); | |
81 ASSERT(reg.Is64Bits()); | |
82 ASSERT(!reg.Is(csp)); | |
83 reg_ = reg; | |
84 __ bind(&patch_site_); | |
85 __ tbnz(xzr, 0, target); // Never taken before patched. | |
86 } | |
87 | |
88 void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) { | |
89 UseScratchRegisterScope temps(masm_); | |
90 Register temp = temps.AcquireX(); | |
91 __ Orr(temp, reg1, reg2); | |
92 EmitJumpIfNotSmi(temp, target); | |
93 } | |
94 | |
95 void EmitPatchInfo() { | |
96 Assembler::BlockPoolsScope scope(masm_); | |
97 InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_); | |
98 #ifdef DEBUG | |
99 info_emitted_ = true; | |
100 #endif | |
101 } | |
102 | |
103 private: | |
104 MacroAssembler* masm_; | |
105 Label patch_site_; | |
106 Register reg_; | |
107 #ifdef DEBUG | |
108 bool info_emitted_; | |
109 #endif | |
110 }; | |
111 | |
112 | |
113 // Generate code for a JS function. On entry to the function the receiver | |
114 // and arguments have been pushed on the stack left to right. The actual | |
115 // argument count matches the formal parameter count expected by the | |
116 // function. | |
117 // | |
118 // The live registers are: | |
119 // - x1: the JS function object being called (i.e. ourselves). | |
120 // - cp: our context. | |
121 // - fp: our caller's frame pointer. | |
122 // - jssp: stack pointer. | |
123 // - lr: return address. | |
124 // | |
125 // The function builds a JS frame. See JavaScriptFrameConstants in | |
126 // frames-arm.h for its layout. | |
127 void FullCodeGenerator::Generate() { | |
128 CompilationInfo* info = info_; | |
129 handler_table_ = | |
130 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); | |
131 | |
132 InitializeFeedbackVector(); | |
133 | |
134 profiling_counter_ = isolate()->factory()->NewCell( | |
135 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); | |
136 SetFunctionPosition(function()); | |
137 Comment cmnt(masm_, "[ Function compiled by full code generator"); | |
138 | |
139 ProfileEntryHookStub::MaybeCallEntryHook(masm_); | |
140 | |
141 #ifdef DEBUG | |
142 if (strlen(FLAG_stop_at) > 0 && | |
143 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { | |
144 __ Debug("stop-at", __LINE__, BREAK); | |
145 } | |
146 #endif | |
147 | |
148 // Sloppy mode functions and builtins need to replace the receiver with the | |
149 // global proxy when called as functions (without an explicit receiver | |
150 // object). | |
151 if (info->strict_mode() == SLOPPY && !info->is_native()) { | |
152 Label ok; | |
153 int receiver_offset = info->scope()->num_parameters() * kXRegSize; | |
154 __ Peek(x10, receiver_offset); | |
155 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok); | |
156 | |
157 __ Ldr(x10, GlobalObjectMemOperand()); | |
158 __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalReceiverOffset)); | |
159 __ Poke(x10, receiver_offset); | |
160 | |
161 __ Bind(&ok); | |
162 } | |
163 | |
164 | |
165 // Open a frame scope to indicate that there is a frame on the stack. | |
166 // The MANUAL indicates that the scope shouldn't actually generate code | |
167 // to set up the frame because we do it manually below. | |
168 FrameScope frame_scope(masm_, StackFrame::MANUAL); | |
169 | |
170 // This call emits the following sequence in a way that can be patched for | |
171 // code ageing support: | |
172 // Push(lr, fp, cp, x1); | |
173 // Add(fp, jssp, 2 * kPointerSize); | |
174 info->set_prologue_offset(masm_->pc_offset()); | |
175 __ Prologue(BUILD_FUNCTION_FRAME); | |
176 info->AddNoFrameRange(0, masm_->pc_offset()); | |
177 | |
178 // Reserve space on the stack for locals. | |
179 { Comment cmnt(masm_, "[ Allocate locals"); | |
180 int locals_count = info->scope()->num_stack_slots(); | |
181 // Generators allocate locals, if any, in context slots. | |
182 ASSERT(!info->function()->is_generator() || locals_count == 0); | |
183 | |
184 if (locals_count > 0) { | |
185 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); | |
186 __ PushMultipleTimes(x10, locals_count); | |
187 } | |
188 } | |
189 | |
190 bool function_in_register_x1 = true; | |
191 | |
192 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; | |
193 if (heap_slots > 0) { | |
194 // Argument to NewContext is the function, which is still in x1. | |
195 Comment cmnt(masm_, "[ Allocate context"); | |
196 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) { | |
197 __ Mov(x10, Operand(info->scope()->GetScopeInfo())); | |
198 __ Push(x1, x10); | |
199 __ CallRuntime(Runtime::kNewGlobalContext, 2); | |
200 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { | |
201 FastNewContextStub stub(heap_slots); | |
202 __ CallStub(&stub); | |
203 } else { | |
204 __ Push(x1); | |
205 __ CallRuntime(Runtime::kNewFunctionContext, 1); | |
206 } | |
207 function_in_register_x1 = false; | |
208 // Context is returned in x0. It replaces the context passed to us. | |
209 // It's saved in the stack and kept live in cp. | |
210 __ Mov(cp, x0); | |
211 __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
212 // Copy any necessary parameters into the context. | |
213 int num_parameters = info->scope()->num_parameters(); | |
214 for (int i = 0; i < num_parameters; i++) { | |
215 Variable* var = scope()->parameter(i); | |
216 if (var->IsContextSlot()) { | |
217 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | |
218 (num_parameters - 1 - i) * kPointerSize; | |
219 // Load parameter from stack. | |
220 __ Ldr(x10, MemOperand(fp, parameter_offset)); | |
221 // Store it in the context. | |
222 MemOperand target = ContextMemOperand(cp, var->index()); | |
223 __ Str(x10, target); | |
224 | |
225 // Update the write barrier. | |
226 __ RecordWriteContextSlot( | |
227 cp, target.offset(), x10, x11, kLRHasBeenSaved, kDontSaveFPRegs); | |
228 } | |
229 } | |
230 } | |
231 | |
232 Variable* arguments = scope()->arguments(); | |
233 if (arguments != NULL) { | |
234 // Function uses arguments object. | |
235 Comment cmnt(masm_, "[ Allocate arguments object"); | |
236 if (!function_in_register_x1) { | |
237 // Load this again, if it's used by the local context below. | |
238 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
239 } else { | |
240 __ Mov(x3, x1); | |
241 } | |
242 // Receiver is just before the parameters on the caller's stack. | |
243 int num_parameters = info->scope()->num_parameters(); | |
244 int offset = num_parameters * kPointerSize; | |
245 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset + offset); | |
246 __ Mov(x1, Smi::FromInt(num_parameters)); | |
247 __ Push(x3, x2, x1); | |
248 | |
249 // Arguments to ArgumentsAccessStub: | |
250 // function, receiver address, parameter count. | |
251 // The stub will rewrite receiver and parameter count if the previous | |
252 // stack frame was an arguments adapter frame. | |
253 ArgumentsAccessStub::Type type; | |
254 if (strict_mode() == STRICT) { | |
255 type = ArgumentsAccessStub::NEW_STRICT; | |
256 } else if (function()->has_duplicate_parameters()) { | |
257 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW; | |
258 } else { | |
259 type = ArgumentsAccessStub::NEW_SLOPPY_FAST; | |
260 } | |
261 ArgumentsAccessStub stub(type); | |
262 __ CallStub(&stub); | |
263 | |
264 SetVar(arguments, x0, x1, x2); | |
265 } | |
266 | |
267 if (FLAG_trace) { | |
268 __ CallRuntime(Runtime::kTraceEnter, 0); | |
269 } | |
270 | |
271 | |
272 // Visit the declarations and body unless there is an illegal | |
273 // redeclaration. | |
274 if (scope()->HasIllegalRedeclaration()) { | |
275 Comment cmnt(masm_, "[ Declarations"); | |
276 scope()->VisitIllegalRedeclaration(this); | |
277 | |
278 } else { | |
279 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); | |
280 { Comment cmnt(masm_, "[ Declarations"); | |
281 if (scope()->is_function_scope() && scope()->function() != NULL) { | |
282 VariableDeclaration* function = scope()->function(); | |
283 ASSERT(function->proxy()->var()->mode() == CONST || | |
284 function->proxy()->var()->mode() == CONST_LEGACY); | |
285 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); | |
286 VisitVariableDeclaration(function); | |
287 } | |
288 VisitDeclarations(scope()->declarations()); | |
289 } | |
290 } | |
291 | |
292 { Comment cmnt(masm_, "[ Stack check"); | |
293 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); | |
294 Label ok; | |
295 ASSERT(jssp.Is(__ StackPointer())); | |
296 __ CompareRoot(jssp, Heap::kStackLimitRootIndex); | |
297 __ B(hs, &ok); | |
298 PredictableCodeSizeScope predictable(masm_, | |
299 Assembler::kCallSizeWithRelocation); | |
300 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET); | |
301 __ Bind(&ok); | |
302 } | |
303 | |
304 { Comment cmnt(masm_, "[ Body"); | |
305 ASSERT(loop_depth() == 0); | |
306 VisitStatements(function()->body()); | |
307 ASSERT(loop_depth() == 0); | |
308 } | |
309 | |
310 // Always emit a 'return undefined' in case control fell off the end of | |
311 // the body. | |
312 { Comment cmnt(masm_, "[ return <undefined>;"); | |
313 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex); | |
314 } | |
315 EmitReturnSequence(); | |
316 | |
317 // Force emission of the pools, so they don't get emitted in the middle | |
318 // of the back edge table. | |
319 masm()->CheckVeneerPool(true, false); | |
320 masm()->CheckConstPool(true, false); | |
321 } | |
322 | |
323 | |
324 void FullCodeGenerator::ClearAccumulator() { | |
325 __ Mov(x0, Smi::FromInt(0)); | |
326 } | |
327 | |
328 | |
329 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { | |
330 __ Mov(x2, Operand(profiling_counter_)); | |
331 __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset)); | |
332 __ Subs(x3, x3, Smi::FromInt(delta)); | |
333 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset)); | |
334 } | |
335 | |
336 | |
337 void FullCodeGenerator::EmitProfilingCounterReset() { | |
338 int reset_value = FLAG_interrupt_budget; | |
339 if (isolate()->IsDebuggerActive()) { | |
340 // Detect debug break requests as soon as possible. | |
341 reset_value = FLAG_interrupt_budget >> 4; | |
342 } | |
343 __ Mov(x2, Operand(profiling_counter_)); | |
344 __ Mov(x3, Smi::FromInt(reset_value)); | |
345 __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset)); | |
346 } | |
347 | |
348 | |
349 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, | |
350 Label* back_edge_target) { | |
351 ASSERT(jssp.Is(__ StackPointer())); | |
352 Comment cmnt(masm_, "[ Back edge bookkeeping"); | |
353 // Block literal pools whilst emitting back edge code. | |
354 Assembler::BlockPoolsScope block_const_pool(masm_); | |
355 Label ok; | |
356 | |
357 ASSERT(back_edge_target->is_bound()); | |
358 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); | |
359 int weight = Min(kMaxBackEdgeWeight, | |
360 Max(1, distance / kCodeSizeMultiplier)); | |
361 EmitProfilingCounterDecrement(weight); | |
362 __ B(pl, &ok); | |
363 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); | |
364 | |
365 // Record a mapping of this PC offset to the OSR id. This is used to find | |
366 // the AST id from the unoptimized code in order to use it as a key into | |
367 // the deoptimization input data found in the optimized code. | |
368 RecordBackEdge(stmt->OsrEntryId()); | |
369 | |
370 EmitProfilingCounterReset(); | |
371 | |
372 __ Bind(&ok); | |
373 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); | |
374 // Record a mapping of the OSR id to this PC. This is used if the OSR | |
375 // entry becomes the target of a bailout. We don't expect it to be, but | |
376 // we want it to work if it is. | |
377 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); | |
378 } | |
379 | |
380 | |
381 void FullCodeGenerator::EmitReturnSequence() { | |
382 Comment cmnt(masm_, "[ Return sequence"); | |
383 | |
384 if (return_label_.is_bound()) { | |
385 __ B(&return_label_); | |
386 | |
387 } else { | |
388 __ Bind(&return_label_); | |
389 if (FLAG_trace) { | |
390 // Push the return value on the stack as the parameter. | |
391 // Runtime::TraceExit returns its parameter in x0. | |
392 __ Push(result_register()); | |
393 __ CallRuntime(Runtime::kTraceExit, 1); | |
394 ASSERT(x0.Is(result_register())); | |
395 } | |
396 // Pretend that the exit is a backwards jump to the entry. | |
397 int weight = 1; | |
398 if (info_->ShouldSelfOptimize()) { | |
399 weight = FLAG_interrupt_budget / FLAG_self_opt_count; | |
400 } else { | |
401 int distance = masm_->pc_offset(); | |
402 weight = Min(kMaxBackEdgeWeight, | |
403 Max(1, distance / kCodeSizeMultiplier)); | |
404 } | |
405 EmitProfilingCounterDecrement(weight); | |
406 Label ok; | |
407 __ B(pl, &ok); | |
408 __ Push(x0); | |
409 __ Call(isolate()->builtins()->InterruptCheck(), | |
410 RelocInfo::CODE_TARGET); | |
411 __ Pop(x0); | |
412 EmitProfilingCounterReset(); | |
413 __ Bind(&ok); | |
414 | |
415 // Make sure that the constant pool is not emitted inside of the return | |
416 // sequence. This sequence can get patched when the debugger is used. See | |
417 // debug-a64.cc:BreakLocationIterator::SetDebugBreakAtReturn(). | |
418 { | |
419 InstructionAccurateScope scope(masm_, | |
420 Assembler::kJSRetSequenceInstructions); | |
421 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); | |
422 __ RecordJSReturn(); | |
423 // This code is generated using Assembler methods rather than Macro | |
424 // Assembler methods because it will be patched later on, and so the size | |
425 // of the generated code must be consistent. | |
426 const Register& current_sp = __ StackPointer(); | |
427 // Nothing ensures 16 bytes alignment here. | |
428 ASSERT(!current_sp.Is(csp)); | |
429 __ mov(current_sp, fp); | |
430 int no_frame_start = masm_->pc_offset(); | |
431 __ ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex)); | |
432 // Drop the arguments and receiver and return. | |
433 // TODO(all): This implementation is overkill as it supports 2**31+1 | |
434 // arguments, consider how to improve it without creating a security | |
435 // hole. | |
436 __ LoadLiteral(ip0, 3 * kInstructionSize); | |
437 __ add(current_sp, current_sp, ip0); | |
438 __ ret(); | |
439 __ dc64(kXRegSize * (info_->scope()->num_parameters() + 1)); | |
440 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); | |
441 } | |
442 } | |
443 } | |
444 | |
445 | |
446 void FullCodeGenerator::EffectContext::Plug(Variable* var) const { | |
447 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); | |
448 } | |
449 | |
450 | |
451 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const { | |
452 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); | |
453 codegen()->GetVar(result_register(), var); | |
454 } | |
455 | |
456 | |
457 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { | |
458 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); | |
459 codegen()->GetVar(result_register(), var); | |
460 __ Push(result_register()); | |
461 } | |
462 | |
463 | |
464 void FullCodeGenerator::TestContext::Plug(Variable* var) const { | |
465 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); | |
466 // For simplicity we always test the accumulator register. | |
467 codegen()->GetVar(result_register(), var); | |
468 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); | |
469 codegen()->DoTest(this); | |
470 } | |
471 | |
472 | |
473 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { | |
474 // Root values have no side effects. | |
475 } | |
476 | |
477 | |
478 void FullCodeGenerator::AccumulatorValueContext::Plug( | |
479 Heap::RootListIndex index) const { | |
480 __ LoadRoot(result_register(), index); | |
481 } | |
482 | |
483 | |
484 void FullCodeGenerator::StackValueContext::Plug( | |
485 Heap::RootListIndex index) const { | |
486 __ LoadRoot(result_register(), index); | |
487 __ Push(result_register()); | |
488 } | |
489 | |
490 | |
491 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { | |
492 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, | |
493 false_label_); | |
494 if (index == Heap::kUndefinedValueRootIndex || | |
495 index == Heap::kNullValueRootIndex || | |
496 index == Heap::kFalseValueRootIndex) { | |
497 if (false_label_ != fall_through_) __ B(false_label_); | |
498 } else if (index == Heap::kTrueValueRootIndex) { | |
499 if (true_label_ != fall_through_) __ B(true_label_); | |
500 } else { | |
501 __ LoadRoot(result_register(), index); | |
502 codegen()->DoTest(this); | |
503 } | |
504 } | |
505 | |
506 | |
507 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const { | |
508 } | |
509 | |
510 | |
511 void FullCodeGenerator::AccumulatorValueContext::Plug( | |
512 Handle<Object> lit) const { | |
513 __ Mov(result_register(), Operand(lit)); | |
514 } | |
515 | |
516 | |
517 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { | |
518 // Immediates cannot be pushed directly. | |
519 __ Mov(result_register(), Operand(lit)); | |
520 __ Push(result_register()); | |
521 } | |
522 | |
523 | |
524 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { | |
525 codegen()->PrepareForBailoutBeforeSplit(condition(), | |
526 true, | |
527 true_label_, | |
528 false_label_); | |
529 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals. | |
530 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { | |
531 if (false_label_ != fall_through_) __ B(false_label_); | |
532 } else if (lit->IsTrue() || lit->IsJSObject()) { | |
533 if (true_label_ != fall_through_) __ B(true_label_); | |
534 } else if (lit->IsString()) { | |
535 if (String::cast(*lit)->length() == 0) { | |
536 if (false_label_ != fall_through_) __ B(false_label_); | |
537 } else { | |
538 if (true_label_ != fall_through_) __ B(true_label_); | |
539 } | |
540 } else if (lit->IsSmi()) { | |
541 if (Smi::cast(*lit)->value() == 0) { | |
542 if (false_label_ != fall_through_) __ B(false_label_); | |
543 } else { | |
544 if (true_label_ != fall_through_) __ B(true_label_); | |
545 } | |
546 } else { | |
547 // For simplicity we always test the accumulator register. | |
548 __ Mov(result_register(), Operand(lit)); | |
549 codegen()->DoTest(this); | |
550 } | |
551 } | |
552 | |
553 | |
554 void FullCodeGenerator::EffectContext::DropAndPlug(int count, | |
555 Register reg) const { | |
556 ASSERT(count > 0); | |
557 __ Drop(count); | |
558 } | |
559 | |
560 | |
561 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( | |
562 int count, | |
563 Register reg) const { | |
564 ASSERT(count > 0); | |
565 __ Drop(count); | |
566 __ Move(result_register(), reg); | |
567 } | |
568 | |
569 | |
570 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, | |
571 Register reg) const { | |
572 ASSERT(count > 0); | |
573 if (count > 1) __ Drop(count - 1); | |
574 __ Poke(reg, 0); | |
575 } | |
576 | |
577 | |
578 void FullCodeGenerator::TestContext::DropAndPlug(int count, | |
579 Register reg) const { | |
580 ASSERT(count > 0); | |
581 // For simplicity we always test the accumulator register. | |
582 __ Drop(count); | |
583 __ Mov(result_register(), reg); | |
584 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); | |
585 codegen()->DoTest(this); | |
586 } | |
587 | |
588 | |
589 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, | |
590 Label* materialize_false) const { | |
591 ASSERT(materialize_true == materialize_false); | |
592 __ Bind(materialize_true); | |
593 } | |
594 | |
595 | |
596 void FullCodeGenerator::AccumulatorValueContext::Plug( | |
597 Label* materialize_true, | |
598 Label* materialize_false) const { | |
599 Label done; | |
600 __ Bind(materialize_true); | |
601 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); | |
602 __ B(&done); | |
603 __ Bind(materialize_false); | |
604 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); | |
605 __ Bind(&done); | |
606 } | |
607 | |
608 | |
609 void FullCodeGenerator::StackValueContext::Plug( | |
610 Label* materialize_true, | |
611 Label* materialize_false) const { | |
612 Label done; | |
613 __ Bind(materialize_true); | |
614 __ LoadRoot(x10, Heap::kTrueValueRootIndex); | |
615 __ B(&done); | |
616 __ Bind(materialize_false); | |
617 __ LoadRoot(x10, Heap::kFalseValueRootIndex); | |
618 __ Bind(&done); | |
619 __ Push(x10); | |
620 } | |
621 | |
622 | |
623 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, | |
624 Label* materialize_false) const { | |
625 ASSERT(materialize_true == true_label_); | |
626 ASSERT(materialize_false == false_label_); | |
627 } | |
628 | |
629 | |
630 void FullCodeGenerator::EffectContext::Plug(bool flag) const { | |
631 } | |
632 | |
633 | |
634 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { | |
635 Heap::RootListIndex value_root_index = | |
636 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; | |
637 __ LoadRoot(result_register(), value_root_index); | |
638 } | |
639 | |
640 | |
641 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { | |
642 Heap::RootListIndex value_root_index = | |
643 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; | |
644 __ LoadRoot(x10, value_root_index); | |
645 __ Push(x10); | |
646 } | |
647 | |
648 | |
649 void FullCodeGenerator::TestContext::Plug(bool flag) const { | |
650 codegen()->PrepareForBailoutBeforeSplit(condition(), | |
651 true, | |
652 true_label_, | |
653 false_label_); | |
654 if (flag) { | |
655 if (true_label_ != fall_through_) { | |
656 __ B(true_label_); | |
657 } | |
658 } else { | |
659 if (false_label_ != fall_through_) { | |
660 __ B(false_label_); | |
661 } | |
662 } | |
663 } | |
664 | |
665 | |
666 void FullCodeGenerator::DoTest(Expression* condition, | |
667 Label* if_true, | |
668 Label* if_false, | |
669 Label* fall_through) { | |
670 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate()); | |
671 CallIC(ic, condition->test_id()); | |
672 __ CompareAndSplit(result_register(), 0, ne, if_true, if_false, fall_through); | |
673 } | |
674 | |
675 | |
676 // If (cond), branch to if_true. | |
677 // If (!cond), branch to if_false. | |
678 // fall_through is used as an optimization in cases where only one branch | |
679 // instruction is necessary. | |
680 void FullCodeGenerator::Split(Condition cond, | |
681 Label* if_true, | |
682 Label* if_false, | |
683 Label* fall_through) { | |
684 if (if_false == fall_through) { | |
685 __ B(cond, if_true); | |
686 } else if (if_true == fall_through) { | |
687 ASSERT(if_false != fall_through); | |
688 __ B(InvertCondition(cond), if_false); | |
689 } else { | |
690 __ B(cond, if_true); | |
691 __ B(if_false); | |
692 } | |
693 } | |
694 | |
695 | |
696 MemOperand FullCodeGenerator::StackOperand(Variable* var) { | |
697 // Offset is negative because higher indexes are at lower addresses. | |
698 int offset = -var->index() * kXRegSize; | |
699 // Adjust by a (parameter or local) base offset. | |
700 if (var->IsParameter()) { | |
701 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; | |
702 } else { | |
703 offset += JavaScriptFrameConstants::kLocal0Offset; | |
704 } | |
705 return MemOperand(fp, offset); | |
706 } | |
707 | |
708 | |
709 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { | |
710 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); | |
711 if (var->IsContextSlot()) { | |
712 int context_chain_length = scope()->ContextChainLength(var->scope()); | |
713 __ LoadContext(scratch, context_chain_length); | |
714 return ContextMemOperand(scratch, var->index()); | |
715 } else { | |
716 return StackOperand(var); | |
717 } | |
718 } | |
719 | |
720 | |
721 void FullCodeGenerator::GetVar(Register dest, Variable* var) { | |
722 // Use destination as scratch. | |
723 MemOperand location = VarOperand(var, dest); | |
724 __ Ldr(dest, location); | |
725 } | |
726 | |
727 | |
728 void FullCodeGenerator::SetVar(Variable* var, | |
729 Register src, | |
730 Register scratch0, | |
731 Register scratch1) { | |
732 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); | |
733 ASSERT(!AreAliased(src, scratch0, scratch1)); | |
734 MemOperand location = VarOperand(var, scratch0); | |
735 __ Str(src, location); | |
736 | |
737 // Emit the write barrier code if the location is in the heap. | |
738 if (var->IsContextSlot()) { | |
739 // scratch0 contains the correct context. | |
740 __ RecordWriteContextSlot(scratch0, | |
741 location.offset(), | |
742 src, | |
743 scratch1, | |
744 kLRHasBeenSaved, | |
745 kDontSaveFPRegs); | |
746 } | |
747 } | |
748 | |
749 | |
750 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, | |
751 bool should_normalize, | |
752 Label* if_true, | |
753 Label* if_false) { | |
754 // Only prepare for bailouts before splits if we're in a test | |
755 // context. Otherwise, we let the Visit function deal with the | |
756 // preparation to avoid preparing with the same AST id twice. | |
757 if (!context()->IsTest() || !info_->IsOptimizable()) return; | |
758 | |
759 // TODO(all): Investigate to see if there is something to work on here. | |
760 Label skip; | |
761 if (should_normalize) { | |
762 __ B(&skip); | |
763 } | |
764 PrepareForBailout(expr, TOS_REG); | |
765 if (should_normalize) { | |
766 __ CompareRoot(x0, Heap::kTrueValueRootIndex); | |
767 Split(eq, if_true, if_false, NULL); | |
768 __ Bind(&skip); | |
769 } | |
770 } | |
771 | |
772 | |
773 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { | |
774 // The variable in the declaration always resides in the current function | |
775 // context. | |
776 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); | |
777 if (generate_debug_code_) { | |
778 // Check that we're not inside a with or catch context. | |
779 __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset)); | |
780 __ CompareRoot(x1, Heap::kWithContextMapRootIndex); | |
781 __ Check(ne, kDeclarationInWithContext); | |
782 __ CompareRoot(x1, Heap::kCatchContextMapRootIndex); | |
783 __ Check(ne, kDeclarationInCatchContext); | |
784 } | |
785 } | |
786 | |
787 | |
788 void FullCodeGenerator::VisitVariableDeclaration( | |
789 VariableDeclaration* declaration) { | |
790 // If it was not possible to allocate the variable at compile time, we | |
791 // need to "declare" it at runtime to make sure it actually exists in the | |
792 // local context. | |
793 VariableProxy* proxy = declaration->proxy(); | |
794 VariableMode mode = declaration->mode(); | |
795 Variable* variable = proxy->var(); | |
796 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY; | |
797 | |
798 switch (variable->location()) { | |
799 case Variable::UNALLOCATED: | |
800 globals_->Add(variable->name(), zone()); | |
801 globals_->Add(variable->binding_needs_init() | |
802 ? isolate()->factory()->the_hole_value() | |
803 : isolate()->factory()->undefined_value(), | |
804 zone()); | |
805 break; | |
806 | |
807 case Variable::PARAMETER: | |
808 case Variable::LOCAL: | |
809 if (hole_init) { | |
810 Comment cmnt(masm_, "[ VariableDeclaration"); | |
811 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex); | |
812 __ Str(x10, StackOperand(variable)); | |
813 } | |
814 break; | |
815 | |
816 case Variable::CONTEXT: | |
817 if (hole_init) { | |
818 Comment cmnt(masm_, "[ VariableDeclaration"); | |
819 EmitDebugCheckDeclarationContext(variable); | |
820 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex); | |
821 __ Str(x10, ContextMemOperand(cp, variable->index())); | |
822 // No write barrier since the_hole_value is in old space. | |
823 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | |
824 } | |
825 break; | |
826 | |
827 case Variable::LOOKUP: { | |
828 Comment cmnt(masm_, "[ VariableDeclaration"); | |
829 __ Mov(x2, Operand(variable->name())); | |
830 // Declaration nodes are always introduced in one of four modes. | |
831 ASSERT(IsDeclaredVariableMode(mode)); | |
832 PropertyAttributes attr = IsImmutableVariableMode(mode) ? READ_ONLY | |
833 : NONE; | |
834 __ Mov(x1, Smi::FromInt(attr)); | |
835 // Push initial value, if any. | |
836 // Note: For variables we must not push an initial value (such as | |
837 // 'undefined') because we may have a (legal) redeclaration and we | |
838 // must not destroy the current value. | |
839 if (hole_init) { | |
840 __ LoadRoot(x0, Heap::kTheHoleValueRootIndex); | |
841 __ Push(cp, x2, x1, x0); | |
842 } else { | |
843 // Pushing 0 (xzr) indicates no initial value. | |
844 __ Push(cp, x2, x1, xzr); | |
845 } | |
846 __ CallRuntime(Runtime::kDeclareContextSlot, 4); | |
847 break; | |
848 } | |
849 } | |
850 } | |
851 | |
852 | |
853 void FullCodeGenerator::VisitFunctionDeclaration( | |
854 FunctionDeclaration* declaration) { | |
855 VariableProxy* proxy = declaration->proxy(); | |
856 Variable* variable = proxy->var(); | |
857 switch (variable->location()) { | |
858 case Variable::UNALLOCATED: { | |
859 globals_->Add(variable->name(), zone()); | |
860 Handle<SharedFunctionInfo> function = | |
861 Compiler::BuildFunctionInfo(declaration->fun(), script()); | |
862 // Check for stack overflow exception. | |
863 if (function.is_null()) return SetStackOverflow(); | |
864 globals_->Add(function, zone()); | |
865 break; | |
866 } | |
867 | |
868 case Variable::PARAMETER: | |
869 case Variable::LOCAL: { | |
870 Comment cmnt(masm_, "[ Function Declaration"); | |
871 VisitForAccumulatorValue(declaration->fun()); | |
872 __ Str(result_register(), StackOperand(variable)); | |
873 break; | |
874 } | |
875 | |
876 case Variable::CONTEXT: { | |
877 Comment cmnt(masm_, "[ Function Declaration"); | |
878 EmitDebugCheckDeclarationContext(variable); | |
879 VisitForAccumulatorValue(declaration->fun()); | |
880 __ Str(result_register(), ContextMemOperand(cp, variable->index())); | |
881 int offset = Context::SlotOffset(variable->index()); | |
882 // We know that we have written a function, which is not a smi. | |
883 __ RecordWriteContextSlot(cp, | |
884 offset, | |
885 result_register(), | |
886 x2, | |
887 kLRHasBeenSaved, | |
888 kDontSaveFPRegs, | |
889 EMIT_REMEMBERED_SET, | |
890 OMIT_SMI_CHECK); | |
891 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | |
892 break; | |
893 } | |
894 | |
895 case Variable::LOOKUP: { | |
896 Comment cmnt(masm_, "[ Function Declaration"); | |
897 __ Mov(x2, Operand(variable->name())); | |
898 __ Mov(x1, Smi::FromInt(NONE)); | |
899 __ Push(cp, x2, x1); | |
900 // Push initial value for function declaration. | |
901 VisitForStackValue(declaration->fun()); | |
902 __ CallRuntime(Runtime::kDeclareContextSlot, 4); | |
903 break; | |
904 } | |
905 } | |
906 } | |
907 | |
908 | |
909 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { | |
910 Variable* variable = declaration->proxy()->var(); | |
911 ASSERT(variable->location() == Variable::CONTEXT); | |
912 ASSERT(variable->interface()->IsFrozen()); | |
913 | |
914 Comment cmnt(masm_, "[ ModuleDeclaration"); | |
915 EmitDebugCheckDeclarationContext(variable); | |
916 | |
917 // Load instance object. | |
918 __ LoadContext(x1, scope_->ContextChainLength(scope_->GlobalScope())); | |
919 __ Ldr(x1, ContextMemOperand(x1, variable->interface()->Index())); | |
920 __ Ldr(x1, ContextMemOperand(x1, Context::EXTENSION_INDEX)); | |
921 | |
922 // Assign it. | |
923 __ Str(x1, ContextMemOperand(cp, variable->index())); | |
924 // We know that we have written a module, which is not a smi. | |
925 __ RecordWriteContextSlot(cp, | |
926 Context::SlotOffset(variable->index()), | |
927 x1, | |
928 x3, | |
929 kLRHasBeenSaved, | |
930 kDontSaveFPRegs, | |
931 EMIT_REMEMBERED_SET, | |
932 OMIT_SMI_CHECK); | |
933 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS); | |
934 | |
935 // Traverse info body. | |
936 Visit(declaration->module()); | |
937 } | |
938 | |
939 | |
940 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) { | |
941 VariableProxy* proxy = declaration->proxy(); | |
942 Variable* variable = proxy->var(); | |
943 switch (variable->location()) { | |
944 case Variable::UNALLOCATED: | |
945 // TODO(rossberg) | |
946 break; | |
947 | |
948 case Variable::CONTEXT: { | |
949 Comment cmnt(masm_, "[ ImportDeclaration"); | |
950 EmitDebugCheckDeclarationContext(variable); | |
951 // TODO(rossberg) | |
952 break; | |
953 } | |
954 | |
955 case Variable::PARAMETER: | |
956 case Variable::LOCAL: | |
957 case Variable::LOOKUP: | |
958 UNREACHABLE(); | |
959 } | |
960 } | |
961 | |
962 | |
963 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) { | |
964 // TODO(rossberg) | |
965 } | |
966 | |
967 | |
968 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { | |
969 // Call the runtime to declare the globals. | |
970 __ Mov(x11, Operand(pairs)); | |
971 Register flags = xzr; | |
972 if (Smi::FromInt(DeclareGlobalsFlags())) { | |
973 flags = x10; | |
974 __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags())); | |
975 } | |
976 __ Push(cp, x11, flags); | |
977 __ CallRuntime(Runtime::kDeclareGlobals, 3); | |
978 // Return value is ignored. | |
979 } | |
980 | |
981 | |
982 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { | |
983 // Call the runtime to declare the modules. | |
984 __ Push(descriptions); | |
985 __ CallRuntime(Runtime::kDeclareModules, 1); | |
986 // Return value is ignored. | |
987 } | |
988 | |
989 | |
990 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { | |
991 ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement"); | |
992 Comment cmnt(masm_, "[ SwitchStatement"); | |
993 Breakable nested_statement(this, stmt); | |
994 SetStatementPosition(stmt); | |
995 | |
996 // Keep the switch value on the stack until a case matches. | |
997 VisitForStackValue(stmt->tag()); | |
998 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); | |
999 | |
1000 ZoneList<CaseClause*>* clauses = stmt->cases(); | |
1001 CaseClause* default_clause = NULL; // Can occur anywhere in the list. | |
1002 | |
1003 Label next_test; // Recycled for each test. | |
1004 // Compile all the tests with branches to their bodies. | |
1005 for (int i = 0; i < clauses->length(); i++) { | |
1006 CaseClause* clause = clauses->at(i); | |
1007 clause->body_target()->Unuse(); | |
1008 | |
1009 // The default is not a test, but remember it as final fall through. | |
1010 if (clause->is_default()) { | |
1011 default_clause = clause; | |
1012 continue; | |
1013 } | |
1014 | |
1015 Comment cmnt(masm_, "[ Case comparison"); | |
1016 __ Bind(&next_test); | |
1017 next_test.Unuse(); | |
1018 | |
1019 // Compile the label expression. | |
1020 VisitForAccumulatorValue(clause->label()); | |
1021 | |
1022 // Perform the comparison as if via '==='. | |
1023 __ Peek(x1, 0); // Switch value. | |
1024 | |
1025 JumpPatchSite patch_site(masm_); | |
1026 if (ShouldInlineSmiCase(Token::EQ_STRICT)) { | |
1027 Label slow_case; | |
1028 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case); | |
1029 __ Cmp(x1, x0); | |
1030 __ B(ne, &next_test); | |
1031 __ Drop(1); // Switch value is no longer needed. | |
1032 __ B(clause->body_target()); | |
1033 __ Bind(&slow_case); | |
1034 } | |
1035 | |
1036 // Record position before stub call for type feedback. | |
1037 SetSourcePosition(clause->position()); | |
1038 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT); | |
1039 CallIC(ic, clause->CompareId()); | |
1040 patch_site.EmitPatchInfo(); | |
1041 | |
1042 Label skip; | |
1043 __ B(&skip); | |
1044 PrepareForBailout(clause, TOS_REG); | |
1045 __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test); | |
1046 __ Drop(1); | |
1047 __ B(clause->body_target()); | |
1048 __ Bind(&skip); | |
1049 | |
1050 __ Cbnz(x0, &next_test); | |
1051 __ Drop(1); // Switch value is no longer needed. | |
1052 __ B(clause->body_target()); | |
1053 } | |
1054 | |
1055 // Discard the test value and jump to the default if present, otherwise to | |
1056 // the end of the statement. | |
1057 __ Bind(&next_test); | |
1058 __ Drop(1); // Switch value is no longer needed. | |
1059 if (default_clause == NULL) { | |
1060 __ B(nested_statement.break_label()); | |
1061 } else { | |
1062 __ B(default_clause->body_target()); | |
1063 } | |
1064 | |
1065 // Compile all the case bodies. | |
1066 for (int i = 0; i < clauses->length(); i++) { | |
1067 Comment cmnt(masm_, "[ Case body"); | |
1068 CaseClause* clause = clauses->at(i); | |
1069 __ Bind(clause->body_target()); | |
1070 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS); | |
1071 VisitStatements(clause->statements()); | |
1072 } | |
1073 | |
1074 __ Bind(nested_statement.break_label()); | |
1075 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); | |
1076 } | |
1077 | |
1078 | |
1079 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { | |
1080 ASM_LOCATION("FullCodeGenerator::VisitForInStatement"); | |
1081 Comment cmnt(masm_, "[ ForInStatement"); | |
1082 int slot = stmt->ForInFeedbackSlot(); | |
1083 // TODO(all): This visitor probably needs better comments and a revisit. | |
1084 SetStatementPosition(stmt); | |
1085 | |
1086 Label loop, exit; | |
1087 ForIn loop_statement(this, stmt); | |
1088 increment_loop_depth(); | |
1089 | |
1090 // Get the object to enumerate over. If the object is null or undefined, skip | |
1091 // over the loop. See ECMA-262 version 5, section 12.6.4. | |
1092 VisitForAccumulatorValue(stmt->enumerable()); | |
1093 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit); | |
1094 Register null_value = x15; | |
1095 __ LoadRoot(null_value, Heap::kNullValueRootIndex); | |
1096 __ Cmp(x0, null_value); | |
1097 __ B(eq, &exit); | |
1098 | |
1099 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); | |
1100 | |
1101 // Convert the object to a JS object. | |
1102 Label convert, done_convert; | |
1103 __ JumpIfSmi(x0, &convert); | |
1104 __ JumpIfObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE, &done_convert, ge); | |
1105 __ Bind(&convert); | |
1106 __ Push(x0); | |
1107 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | |
1108 __ Bind(&done_convert); | |
1109 __ Push(x0); | |
1110 | |
1111 // Check for proxies. | |
1112 Label call_runtime; | |
1113 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | |
1114 __ JumpIfObjectType(x0, x10, x11, LAST_JS_PROXY_TYPE, &call_runtime, le); | |
1115 | |
1116 // Check cache validity in generated code. This is a fast case for | |
1117 // the JSObject::IsSimpleEnum cache validity checks. If we cannot | |
1118 // guarantee cache validity, call the runtime system to check cache | |
1119 // validity or get the property names in a fixed array. | |
1120 __ CheckEnumCache(x0, null_value, x10, x11, x12, x13, &call_runtime); | |
1121 | |
1122 // The enum cache is valid. Load the map of the object being | |
1123 // iterated over and use the cache for the iteration. | |
1124 Label use_cache; | |
1125 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset)); | |
1126 __ B(&use_cache); | |
1127 | |
1128 // Get the set of properties to enumerate. | |
1129 __ Bind(&call_runtime); | |
1130 __ Push(x0); // Duplicate the enumerable object on the stack. | |
1131 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); | |
1132 | |
1133 // If we got a map from the runtime call, we can do a fast | |
1134 // modification check. Otherwise, we got a fixed array, and we have | |
1135 // to do a slow check. | |
1136 Label fixed_array, no_descriptors; | |
1137 __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset)); | |
1138 __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array); | |
1139 | |
1140 // We got a map in register x0. Get the enumeration cache from it. | |
1141 __ Bind(&use_cache); | |
1142 | |
1143 __ EnumLengthUntagged(x1, x0); | |
1144 __ Cbz(x1, &no_descriptors); | |
1145 | |
1146 __ LoadInstanceDescriptors(x0, x2); | |
1147 __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset)); | |
1148 __ Ldr(x2, | |
1149 FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset)); | |
1150 | |
1151 // Set up the four remaining stack slots. | |
1152 __ Push(x0); // Map. | |
1153 __ Mov(x0, Smi::FromInt(0)); | |
1154 // Push enumeration cache, enumeration cache length (as smi) and zero. | |
1155 __ SmiTag(x1); | |
1156 __ Push(x2, x1, x0); | |
1157 __ B(&loop); | |
1158 | |
1159 __ Bind(&no_descriptors); | |
1160 __ Drop(1); | |
1161 __ B(&exit); | |
1162 | |
1163 // We got a fixed array in register x0. Iterate through that. | |
1164 __ Bind(&fixed_array); | |
1165 | |
1166 Handle<Object> feedback = Handle<Object>( | |
1167 Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker), | |
1168 isolate()); | |
1169 StoreFeedbackVectorSlot(slot, feedback); | |
1170 __ LoadObject(x1, FeedbackVector()); | |
1171 __ Mov(x10, Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)); | |
1172 __ Str(x10, FieldMemOperand(x1, FixedArray::OffsetOfElementAt(slot))); | |
1173 | |
1174 __ Mov(x1, Smi::FromInt(1)); // Smi indicates slow check. | |
1175 __ Peek(x10, 0); // Get enumerated object. | |
1176 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | |
1177 // TODO(all): similar check was done already. Can we avoid it here? | |
1178 __ CompareObjectType(x10, x11, x12, LAST_JS_PROXY_TYPE); | |
1179 ASSERT(Smi::FromInt(0) == 0); | |
1180 __ CzeroX(x1, le); // Zero indicates proxy. | |
1181 __ Push(x1, x0); // Smi and array | |
1182 __ Ldr(x1, FieldMemOperand(x0, FixedArray::kLengthOffset)); | |
1183 __ Push(x1, xzr); // Fixed array length (as smi) and initial index. | |
1184 | |
1185 // Generate code for doing the condition check. | |
1186 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); | |
1187 __ Bind(&loop); | |
1188 // Load the current count to x0, load the length to x1. | |
1189 __ PeekPair(x0, x1, 0); | |
1190 __ Cmp(x0, x1); // Compare to the array length. | |
1191 __ B(hs, loop_statement.break_label()); | |
1192 | |
1193 // Get the current entry of the array into register r3. | |
1194 __ Peek(x10, 2 * kXRegSize); | |
1195 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2)); | |
1196 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag)); | |
1197 | |
1198 // Get the expected map from the stack or a smi in the | |
1199 // permanent slow case into register x10. | |
1200 __ Peek(x2, 3 * kXRegSize); | |
1201 | |
1202 // Check if the expected map still matches that of the enumerable. | |
1203 // If not, we may have to filter the key. | |
1204 Label update_each; | |
1205 __ Peek(x1, 4 * kXRegSize); | |
1206 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset)); | |
1207 __ Cmp(x11, x2); | |
1208 __ B(eq, &update_each); | |
1209 | |
1210 // For proxies, no filtering is done. | |
1211 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. | |
1212 STATIC_ASSERT(kSmiTag == 0); | |
1213 __ Cbz(x2, &update_each); | |
1214 | |
1215 // Convert the entry to a string or (smi) 0 if it isn't a property | |
1216 // any more. If the property has been removed while iterating, we | |
1217 // just skip it. | |
1218 __ Push(x1, x3); | |
1219 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); | |
1220 __ Mov(x3, x0); | |
1221 __ Cbz(x0, loop_statement.continue_label()); | |
1222 | |
1223 // Update the 'each' property or variable from the possibly filtered | |
1224 // entry in register x3. | |
1225 __ Bind(&update_each); | |
1226 __ Mov(result_register(), x3); | |
1227 // Perform the assignment as if via '='. | |
1228 { EffectContext context(this); | |
1229 EmitAssignment(stmt->each()); | |
1230 } | |
1231 | |
1232 // Generate code for the body of the loop. | |
1233 Visit(stmt->body()); | |
1234 | |
1235 // Generate code for going to the next element by incrementing | |
1236 // the index (smi) stored on top of the stack. | |
1237 __ Bind(loop_statement.continue_label()); | |
1238 // TODO(all): We could use a callee saved register to avoid popping. | |
1239 __ Pop(x0); | |
1240 __ Add(x0, x0, Smi::FromInt(1)); | |
1241 __ Push(x0); | |
1242 | |
1243 EmitBackEdgeBookkeeping(stmt, &loop); | |
1244 __ B(&loop); | |
1245 | |
1246 // Remove the pointers stored on the stack. | |
1247 __ Bind(loop_statement.break_label()); | |
1248 __ Drop(5); | |
1249 | |
1250 // Exit and decrement the loop depth. | |
1251 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); | |
1252 __ Bind(&exit); | |
1253 decrement_loop_depth(); | |
1254 } | |
1255 | |
1256 | |
1257 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) { | |
1258 Comment cmnt(masm_, "[ ForOfStatement"); | |
1259 SetStatementPosition(stmt); | |
1260 | |
1261 Iteration loop_statement(this, stmt); | |
1262 increment_loop_depth(); | |
1263 | |
1264 // var iterator = iterable[@@iterator]() | |
1265 VisitForAccumulatorValue(stmt->assign_iterator()); | |
1266 | |
1267 // As with for-in, skip the loop if the iterator is null or undefined. | |
1268 Register iterator = x0; | |
1269 __ JumpIfRoot(iterator, Heap::kUndefinedValueRootIndex, | |
1270 loop_statement.break_label()); | |
1271 __ JumpIfRoot(iterator, Heap::kNullValueRootIndex, | |
1272 loop_statement.break_label()); | |
1273 | |
1274 // Convert the iterator to a JS object. | |
1275 Label convert, done_convert; | |
1276 __ JumpIfSmi(iterator, &convert); | |
1277 __ CompareObjectType(iterator, x1, x1, FIRST_SPEC_OBJECT_TYPE); | |
1278 __ B(ge, &done_convert); | |
1279 __ Bind(&convert); | |
1280 __ Push(iterator); | |
1281 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | |
1282 __ Bind(&done_convert); | |
1283 __ Push(iterator); | |
1284 | |
1285 // Loop entry. | |
1286 __ Bind(loop_statement.continue_label()); | |
1287 | |
1288 // result = iterator.next() | |
1289 VisitForEffect(stmt->next_result()); | |
1290 | |
1291 // if (result.done) break; | |
1292 Label result_not_done; | |
1293 VisitForControl(stmt->result_done(), | |
1294 loop_statement.break_label(), | |
1295 &result_not_done, | |
1296 &result_not_done); | |
1297 __ Bind(&result_not_done); | |
1298 | |
1299 // each = result.value | |
1300 VisitForEffect(stmt->assign_each()); | |
1301 | |
1302 // Generate code for the body of the loop. | |
1303 Visit(stmt->body()); | |
1304 | |
1305 // Check stack before looping. | |
1306 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS); | |
1307 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label()); | |
1308 __ B(loop_statement.continue_label()); | |
1309 | |
1310 // Exit and decrement the loop depth. | |
1311 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); | |
1312 __ Bind(loop_statement.break_label()); | |
1313 decrement_loop_depth(); | |
1314 } | |
1315 | |
1316 | |
1317 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, | |
1318 bool pretenure) { | |
1319 // Use the fast case closure allocation code that allocates in new space for | |
1320 // nested functions that don't need literals cloning. If we're running with | |
1321 // the --always-opt or the --prepare-always-opt flag, we need to use the | |
1322 // runtime function so that the new function we are creating here gets a | |
1323 // chance to have its code optimized and doesn't just get a copy of the | |
1324 // existing unoptimized code. | |
1325 if (!FLAG_always_opt && | |
1326 !FLAG_prepare_always_opt && | |
1327 !pretenure && | |
1328 scope()->is_function_scope() && | |
1329 info->num_literals() == 0) { | |
1330 FastNewClosureStub stub(info->strict_mode(), info->is_generator()); | |
1331 __ Mov(x2, Operand(info)); | |
1332 __ CallStub(&stub); | |
1333 } else { | |
1334 __ Mov(x11, Operand(info)); | |
1335 __ LoadRoot(x10, pretenure ? Heap::kTrueValueRootIndex | |
1336 : Heap::kFalseValueRootIndex); | |
1337 __ Push(cp, x11, x10); | |
1338 __ CallRuntime(Runtime::kNewClosure, 3); | |
1339 } | |
1340 context()->Plug(x0); | |
1341 } | |
1342 | |
1343 | |
1344 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { | |
1345 Comment cmnt(masm_, "[ VariableProxy"); | |
1346 EmitVariableLoad(expr); | |
1347 } | |
1348 | |
1349 | |
1350 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, | |
1351 TypeofState typeof_state, | |
1352 Label* slow) { | |
1353 Register current = cp; | |
1354 Register next = x10; | |
1355 Register temp = x11; | |
1356 | |
1357 Scope* s = scope(); | |
1358 while (s != NULL) { | |
1359 if (s->num_heap_slots() > 0) { | |
1360 if (s->calls_sloppy_eval()) { | |
1361 // Check that extension is NULL. | |
1362 __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX)); | |
1363 __ Cbnz(temp, slow); | |
1364 } | |
1365 // Load next context in chain. | |
1366 __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX)); | |
1367 // Walk the rest of the chain without clobbering cp. | |
1368 current = next; | |
1369 } | |
1370 // If no outer scope calls eval, we do not need to check more | |
1371 // context extensions. | |
1372 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; | |
1373 s = s->outer_scope(); | |
1374 } | |
1375 | |
1376 if (s->is_eval_scope()) { | |
1377 Label loop, fast; | |
1378 __ Mov(next, current); | |
1379 | |
1380 __ Bind(&loop); | |
1381 // Terminate at native context. | |
1382 __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset)); | |
1383 __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast); | |
1384 // Check that extension is NULL. | |
1385 __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX)); | |
1386 __ Cbnz(temp, slow); | |
1387 // Load next context in chain. | |
1388 __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX)); | |
1389 __ B(&loop); | |
1390 __ Bind(&fast); | |
1391 } | |
1392 | |
1393 __ Ldr(x0, GlobalObjectMemOperand()); | |
1394 __ Mov(x2, Operand(var->name())); | |
1395 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) ? NOT_CONTEXTUAL | |
1396 : CONTEXTUAL; | |
1397 CallLoadIC(mode); | |
1398 } | |
1399 | |
1400 | |
1401 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, | |
1402 Label* slow) { | |
1403 ASSERT(var->IsContextSlot()); | |
1404 Register context = cp; | |
1405 Register next = x10; | |
1406 Register temp = x11; | |
1407 | |
1408 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { | |
1409 if (s->num_heap_slots() > 0) { | |
1410 if (s->calls_sloppy_eval()) { | |
1411 // Check that extension is NULL. | |
1412 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); | |
1413 __ Cbnz(temp, slow); | |
1414 } | |
1415 __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX)); | |
1416 // Walk the rest of the chain without clobbering cp. | |
1417 context = next; | |
1418 } | |
1419 } | |
1420 // Check that last extension is NULL. | |
1421 __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); | |
1422 __ Cbnz(temp, slow); | |
1423 | |
1424 // This function is used only for loads, not stores, so it's safe to | |
1425 // return an cp-based operand (the write barrier cannot be allowed to | |
1426 // destroy the cp register). | |
1427 return ContextMemOperand(context, var->index()); | |
1428 } | |
1429 | |
1430 | |
1431 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var, | |
1432 TypeofState typeof_state, | |
1433 Label* slow, | |
1434 Label* done) { | |
1435 // Generate fast-case code for variables that might be shadowed by | |
1436 // eval-introduced variables. Eval is used a lot without | |
1437 // introducing variables. In those cases, we do not want to | |
1438 // perform a runtime call for all variables in the scope | |
1439 // containing the eval. | |
1440 if (var->mode() == DYNAMIC_GLOBAL) { | |
1441 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); | |
1442 __ B(done); | |
1443 } else if (var->mode() == DYNAMIC_LOCAL) { | |
1444 Variable* local = var->local_if_not_shadowed(); | |
1445 __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow)); | |
1446 if (local->mode() == LET || local->mode() == CONST || | |
1447 local->mode() == CONST_LEGACY) { | |
1448 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done); | |
1449 if (local->mode() == CONST_LEGACY) { | |
1450 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex); | |
1451 } else { // LET || CONST | |
1452 __ Mov(x0, Operand(var->name())); | |
1453 __ Push(x0); | |
1454 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
1455 } | |
1456 } | |
1457 __ B(done); | |
1458 } | |
1459 } | |
1460 | |
1461 | |
1462 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { | |
1463 // Record position before possible IC call. | |
1464 SetSourcePosition(proxy->position()); | |
1465 Variable* var = proxy->var(); | |
1466 | |
1467 // Three cases: global variables, lookup variables, and all other types of | |
1468 // variables. | |
1469 switch (var->location()) { | |
1470 case Variable::UNALLOCATED: { | |
1471 Comment cmnt(masm_, "Global variable"); | |
1472 // Use inline caching. Variable name is passed in x2 and the global | |
1473 // object (receiver) in x0. | |
1474 __ Ldr(x0, GlobalObjectMemOperand()); | |
1475 __ Mov(x2, Operand(var->name())); | |
1476 CallLoadIC(CONTEXTUAL); | |
1477 context()->Plug(x0); | |
1478 break; | |
1479 } | |
1480 | |
1481 case Variable::PARAMETER: | |
1482 case Variable::LOCAL: | |
1483 case Variable::CONTEXT: { | |
1484 Comment cmnt(masm_, var->IsContextSlot() | |
1485 ? "Context variable" | |
1486 : "Stack variable"); | |
1487 if (var->binding_needs_init()) { | |
1488 // var->scope() may be NULL when the proxy is located in eval code and | |
1489 // refers to a potential outside binding. Currently those bindings are | |
1490 // always looked up dynamically, i.e. in that case | |
1491 // var->location() == LOOKUP. | |
1492 // always holds. | |
1493 ASSERT(var->scope() != NULL); | |
1494 | |
1495 // Check if the binding really needs an initialization check. The check | |
1496 // can be skipped in the following situation: we have a LET or CONST | |
1497 // binding in harmony mode, both the Variable and the VariableProxy have | |
1498 // the same declaration scope (i.e. they are both in global code, in the | |
1499 // same function or in the same eval code) and the VariableProxy is in | |
1500 // the source physically located after the initializer of the variable. | |
1501 // | |
1502 // We cannot skip any initialization checks for CONST in non-harmony | |
1503 // mode because const variables may be declared but never initialized: | |
1504 // if (false) { const x; }; var y = x; | |
1505 // | |
1506 // The condition on the declaration scopes is a conservative check for | |
1507 // nested functions that access a binding and are called before the | |
1508 // binding is initialized: | |
1509 // function() { f(); let x = 1; function f() { x = 2; } } | |
1510 // | |
1511 bool skip_init_check; | |
1512 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) { | |
1513 skip_init_check = false; | |
1514 } else { | |
1515 // Check that we always have valid source position. | |
1516 ASSERT(var->initializer_position() != RelocInfo::kNoPosition); | |
1517 ASSERT(proxy->position() != RelocInfo::kNoPosition); | |
1518 skip_init_check = var->mode() != CONST_LEGACY && | |
1519 var->initializer_position() < proxy->position(); | |
1520 } | |
1521 | |
1522 if (!skip_init_check) { | |
1523 // Let and const need a read barrier. | |
1524 GetVar(x0, var); | |
1525 Label done; | |
1526 __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done); | |
1527 if (var->mode() == LET || var->mode() == CONST) { | |
1528 // Throw a reference error when using an uninitialized let/const | |
1529 // binding in harmony mode. | |
1530 __ Mov(x0, Operand(var->name())); | |
1531 __ Push(x0); | |
1532 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
1533 __ Bind(&done); | |
1534 } else { | |
1535 // Uninitalized const bindings outside of harmony mode are unholed. | |
1536 ASSERT(var->mode() == CONST_LEGACY); | |
1537 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex); | |
1538 __ Bind(&done); | |
1539 } | |
1540 context()->Plug(x0); | |
1541 break; | |
1542 } | |
1543 } | |
1544 context()->Plug(var); | |
1545 break; | |
1546 } | |
1547 | |
1548 case Variable::LOOKUP: { | |
1549 Label done, slow; | |
1550 // Generate code for loading from variables potentially shadowed by | |
1551 // eval-introduced variables. | |
1552 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done); | |
1553 __ Bind(&slow); | |
1554 Comment cmnt(masm_, "Lookup variable"); | |
1555 __ Mov(x1, Operand(var->name())); | |
1556 __ Push(cp, x1); // Context and name. | |
1557 __ CallRuntime(Runtime::kLoadContextSlot, 2); | |
1558 __ Bind(&done); | |
1559 context()->Plug(x0); | |
1560 break; | |
1561 } | |
1562 } | |
1563 } | |
1564 | |
1565 | |
1566 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { | |
1567 Comment cmnt(masm_, "[ RegExpLiteral"); | |
1568 Label materialized; | |
1569 // Registers will be used as follows: | |
1570 // x5 = materialized value (RegExp literal) | |
1571 // x4 = JS function, literals array | |
1572 // x3 = literal index | |
1573 // x2 = RegExp pattern | |
1574 // x1 = RegExp flags | |
1575 // x0 = RegExp literal clone | |
1576 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
1577 __ Ldr(x4, FieldMemOperand(x10, JSFunction::kLiteralsOffset)); | |
1578 int literal_offset = | |
1579 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; | |
1580 __ Ldr(x5, FieldMemOperand(x4, literal_offset)); | |
1581 __ JumpIfNotRoot(x5, Heap::kUndefinedValueRootIndex, &materialized); | |
1582 | |
1583 // Create regexp literal using runtime function. | |
1584 // Result will be in x0. | |
1585 __ Mov(x3, Smi::FromInt(expr->literal_index())); | |
1586 __ Mov(x2, Operand(expr->pattern())); | |
1587 __ Mov(x1, Operand(expr->flags())); | |
1588 __ Push(x4, x3, x2, x1); | |
1589 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); | |
1590 __ Mov(x5, x0); | |
1591 | |
1592 __ Bind(&materialized); | |
1593 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | |
1594 Label allocated, runtime_allocate; | |
1595 __ Allocate(size, x0, x2, x3, &runtime_allocate, TAG_OBJECT); | |
1596 __ B(&allocated); | |
1597 | |
1598 __ Bind(&runtime_allocate); | |
1599 __ Mov(x10, Smi::FromInt(size)); | |
1600 __ Push(x5, x10); | |
1601 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); | |
1602 __ Pop(x5); | |
1603 | |
1604 __ Bind(&allocated); | |
1605 // After this, registers are used as follows: | |
1606 // x0: Newly allocated regexp. | |
1607 // x5: Materialized regexp. | |
1608 // x10, x11, x12: temps. | |
1609 __ CopyFields(x0, x5, CPURegList(x10, x11, x12), size / kPointerSize); | |
1610 context()->Plug(x0); | |
1611 } | |
1612 | |
1613 | |
1614 void FullCodeGenerator::EmitAccessor(Expression* expression) { | |
1615 if (expression == NULL) { | |
1616 __ LoadRoot(x10, Heap::kNullValueRootIndex); | |
1617 __ Push(x10); | |
1618 } else { | |
1619 VisitForStackValue(expression); | |
1620 } | |
1621 } | |
1622 | |
1623 | |
1624 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { | |
1625 Comment cmnt(masm_, "[ ObjectLiteral"); | |
1626 | |
1627 expr->BuildConstantProperties(isolate()); | |
1628 Handle<FixedArray> constant_properties = expr->constant_properties(); | |
1629 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
1630 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset)); | |
1631 __ Mov(x2, Smi::FromInt(expr->literal_index())); | |
1632 __ Mov(x1, Operand(constant_properties)); | |
1633 int flags = expr->fast_elements() | |
1634 ? ObjectLiteral::kFastElements | |
1635 : ObjectLiteral::kNoFlags; | |
1636 flags |= expr->has_function() | |
1637 ? ObjectLiteral::kHasFunction | |
1638 : ObjectLiteral::kNoFlags; | |
1639 __ Mov(x0, Smi::FromInt(flags)); | |
1640 int properties_count = constant_properties->length() / 2; | |
1641 const int max_cloned_properties = | |
1642 FastCloneShallowObjectStub::kMaximumClonedProperties; | |
1643 if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() || | |
1644 flags != ObjectLiteral::kFastElements || | |
1645 properties_count > max_cloned_properties) { | |
1646 __ Push(x3, x2, x1, x0); | |
1647 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); | |
1648 } else { | |
1649 FastCloneShallowObjectStub stub(properties_count); | |
1650 __ CallStub(&stub); | |
1651 } | |
1652 | |
1653 // If result_saved is true the result is on top of the stack. If | |
1654 // result_saved is false the result is in x0. | |
1655 bool result_saved = false; | |
1656 | |
1657 // Mark all computed expressions that are bound to a key that | |
1658 // is shadowed by a later occurrence of the same key. For the | |
1659 // marked expressions, no store code is emitted. | |
1660 expr->CalculateEmitStore(zone()); | |
1661 | |
1662 AccessorTable accessor_table(zone()); | |
1663 for (int i = 0; i < expr->properties()->length(); i++) { | |
1664 ObjectLiteral::Property* property = expr->properties()->at(i); | |
1665 if (property->IsCompileTimeValue()) continue; | |
1666 | |
1667 Literal* key = property->key(); | |
1668 Expression* value = property->value(); | |
1669 if (!result_saved) { | |
1670 __ Push(x0); // Save result on stack | |
1671 result_saved = true; | |
1672 } | |
1673 switch (property->kind()) { | |
1674 case ObjectLiteral::Property::CONSTANT: | |
1675 UNREACHABLE(); | |
1676 case ObjectLiteral::Property::MATERIALIZED_LITERAL: | |
1677 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value())); | |
1678 // Fall through. | |
1679 case ObjectLiteral::Property::COMPUTED: | |
1680 if (key->value()->IsInternalizedString()) { | |
1681 if (property->emit_store()) { | |
1682 VisitForAccumulatorValue(value); | |
1683 __ Mov(x2, Operand(key->value())); | |
1684 __ Peek(x1, 0); | |
1685 CallStoreIC(key->LiteralFeedbackId()); | |
1686 PrepareForBailoutForId(key->id(), NO_REGISTERS); | |
1687 } else { | |
1688 VisitForEffect(value); | |
1689 } | |
1690 break; | |
1691 } | |
1692 if (property->emit_store()) { | |
1693 // Duplicate receiver on stack. | |
1694 __ Peek(x0, 0); | |
1695 __ Push(x0); | |
1696 VisitForStackValue(key); | |
1697 VisitForStackValue(value); | |
1698 __ Mov(x0, Smi::FromInt(NONE)); // PropertyAttributes | |
1699 __ Push(x0); | |
1700 __ CallRuntime(Runtime::kSetProperty, 4); | |
1701 } else { | |
1702 VisitForEffect(key); | |
1703 VisitForEffect(value); | |
1704 } | |
1705 break; | |
1706 case ObjectLiteral::Property::PROTOTYPE: | |
1707 if (property->emit_store()) { | |
1708 // Duplicate receiver on stack. | |
1709 __ Peek(x0, 0); | |
1710 __ Push(x0); | |
1711 VisitForStackValue(value); | |
1712 __ CallRuntime(Runtime::kSetPrototype, 2); | |
1713 } else { | |
1714 VisitForEffect(value); | |
1715 } | |
1716 break; | |
1717 case ObjectLiteral::Property::GETTER: | |
1718 accessor_table.lookup(key)->second->getter = value; | |
1719 break; | |
1720 case ObjectLiteral::Property::SETTER: | |
1721 accessor_table.lookup(key)->second->setter = value; | |
1722 break; | |
1723 } | |
1724 } | |
1725 | |
1726 // Emit code to define accessors, using only a single call to the runtime for | |
1727 // each pair of corresponding getters and setters. | |
1728 for (AccessorTable::Iterator it = accessor_table.begin(); | |
1729 it != accessor_table.end(); | |
1730 ++it) { | |
1731 __ Peek(x10, 0); // Duplicate receiver. | |
1732 __ Push(x10); | |
1733 VisitForStackValue(it->first); | |
1734 EmitAccessor(it->second->getter); | |
1735 EmitAccessor(it->second->setter); | |
1736 __ Mov(x10, Smi::FromInt(NONE)); | |
1737 __ Push(x10); | |
1738 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5); | |
1739 } | |
1740 | |
1741 if (expr->has_function()) { | |
1742 ASSERT(result_saved); | |
1743 __ Peek(x0, 0); | |
1744 __ Push(x0); | |
1745 __ CallRuntime(Runtime::kToFastProperties, 1); | |
1746 } | |
1747 | |
1748 if (result_saved) { | |
1749 context()->PlugTOS(); | |
1750 } else { | |
1751 context()->Plug(x0); | |
1752 } | |
1753 } | |
1754 | |
1755 | |
1756 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { | |
1757 Comment cmnt(masm_, "[ ArrayLiteral"); | |
1758 | |
1759 expr->BuildConstantElements(isolate()); | |
1760 int flags = (expr->depth() == 1) ? ArrayLiteral::kShallowElements | |
1761 : ArrayLiteral::kNoFlags; | |
1762 | |
1763 ZoneList<Expression*>* subexprs = expr->values(); | |
1764 int length = subexprs->length(); | |
1765 Handle<FixedArray> constant_elements = expr->constant_elements(); | |
1766 ASSERT_EQ(2, constant_elements->length()); | |
1767 ElementsKind constant_elements_kind = | |
1768 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); | |
1769 bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind); | |
1770 Handle<FixedArrayBase> constant_elements_values( | |
1771 FixedArrayBase::cast(constant_elements->get(1))); | |
1772 | |
1773 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; | |
1774 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { | |
1775 // If the only customer of allocation sites is transitioning, then | |
1776 // we can turn it off if we don't have anywhere else to transition to. | |
1777 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; | |
1778 } | |
1779 | |
1780 __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
1781 __ Ldr(x3, FieldMemOperand(x3, JSFunction::kLiteralsOffset)); | |
1782 __ Mov(x2, Smi::FromInt(expr->literal_index())); | |
1783 __ Mov(x1, Operand(constant_elements)); | |
1784 if (has_fast_elements && constant_elements_values->map() == | |
1785 isolate()->heap()->fixed_cow_array_map()) { | |
1786 FastCloneShallowArrayStub stub( | |
1787 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, | |
1788 allocation_site_mode, | |
1789 length); | |
1790 __ CallStub(&stub); | |
1791 __ IncrementCounter( | |
1792 isolate()->counters()->cow_arrays_created_stub(), 1, x10, x11); | |
1793 } else if ((expr->depth() > 1) || Serializer::enabled() || | |
1794 length > FastCloneShallowArrayStub::kMaximumClonedLength) { | |
1795 __ Mov(x0, Smi::FromInt(flags)); | |
1796 __ Push(x3, x2, x1, x0); | |
1797 __ CallRuntime(Runtime::kCreateArrayLiteral, 4); | |
1798 } else { | |
1799 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || | |
1800 FLAG_smi_only_arrays); | |
1801 FastCloneShallowArrayStub::Mode mode = | |
1802 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; | |
1803 | |
1804 if (has_fast_elements) { | |
1805 mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; | |
1806 } | |
1807 | |
1808 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); | |
1809 __ CallStub(&stub); | |
1810 } | |
1811 | |
1812 bool result_saved = false; // Is the result saved to the stack? | |
1813 | |
1814 // Emit code to evaluate all the non-constant subexpressions and to store | |
1815 // them into the newly cloned array. | |
1816 for (int i = 0; i < length; i++) { | |
1817 Expression* subexpr = subexprs->at(i); | |
1818 // If the subexpression is a literal or a simple materialized literal it | |
1819 // is already set in the cloned array. | |
1820 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; | |
1821 | |
1822 if (!result_saved) { | |
1823 __ Push(x0); | |
1824 __ Push(Smi::FromInt(expr->literal_index())); | |
1825 result_saved = true; | |
1826 } | |
1827 VisitForAccumulatorValue(subexpr); | |
1828 | |
1829 if (IsFastObjectElementsKind(constant_elements_kind)) { | |
1830 int offset = FixedArray::kHeaderSize + (i * kPointerSize); | |
1831 __ Peek(x6, kPointerSize); // Copy of array literal. | |
1832 __ Ldr(x1, FieldMemOperand(x6, JSObject::kElementsOffset)); | |
1833 __ Str(result_register(), FieldMemOperand(x1, offset)); | |
1834 // Update the write barrier for the array store. | |
1835 __ RecordWriteField(x1, offset, result_register(), x10, | |
1836 kLRHasBeenSaved, kDontSaveFPRegs, | |
1837 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK); | |
1838 } else { | |
1839 __ Mov(x3, Smi::FromInt(i)); | |
1840 StoreArrayLiteralElementStub stub; | |
1841 __ CallStub(&stub); | |
1842 } | |
1843 | |
1844 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); | |
1845 } | |
1846 | |
1847 if (result_saved) { | |
1848 __ Drop(1); // literal index | |
1849 context()->PlugTOS(); | |
1850 } else { | |
1851 context()->Plug(x0); | |
1852 } | |
1853 } | |
1854 | |
1855 | |
1856 void FullCodeGenerator::VisitAssignment(Assignment* expr) { | |
1857 ASSERT(expr->target()->IsValidLeftHandSide()); | |
1858 | |
1859 Comment cmnt(masm_, "[ Assignment"); | |
1860 | |
1861 // Left-hand side can only be a property, a global or a (parameter or local) | |
1862 // slot. | |
1863 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; | |
1864 LhsKind assign_type = VARIABLE; | |
1865 Property* property = expr->target()->AsProperty(); | |
1866 if (property != NULL) { | |
1867 assign_type = (property->key()->IsPropertyName()) | |
1868 ? NAMED_PROPERTY | |
1869 : KEYED_PROPERTY; | |
1870 } | |
1871 | |
1872 // Evaluate LHS expression. | |
1873 switch (assign_type) { | |
1874 case VARIABLE: | |
1875 // Nothing to do here. | |
1876 break; | |
1877 case NAMED_PROPERTY: | |
1878 if (expr->is_compound()) { | |
1879 // We need the receiver both on the stack and in the accumulator. | |
1880 VisitForAccumulatorValue(property->obj()); | |
1881 __ Push(result_register()); | |
1882 } else { | |
1883 VisitForStackValue(property->obj()); | |
1884 } | |
1885 break; | |
1886 case KEYED_PROPERTY: | |
1887 if (expr->is_compound()) { | |
1888 VisitForStackValue(property->obj()); | |
1889 VisitForAccumulatorValue(property->key()); | |
1890 __ Peek(x1, 0); | |
1891 __ Push(x0); | |
1892 } else { | |
1893 VisitForStackValue(property->obj()); | |
1894 VisitForStackValue(property->key()); | |
1895 } | |
1896 break; | |
1897 } | |
1898 | |
1899 // For compound assignments we need another deoptimization point after the | |
1900 // variable/property load. | |
1901 if (expr->is_compound()) { | |
1902 { AccumulatorValueContext context(this); | |
1903 switch (assign_type) { | |
1904 case VARIABLE: | |
1905 EmitVariableLoad(expr->target()->AsVariableProxy()); | |
1906 PrepareForBailout(expr->target(), TOS_REG); | |
1907 break; | |
1908 case NAMED_PROPERTY: | |
1909 EmitNamedPropertyLoad(property); | |
1910 PrepareForBailoutForId(property->LoadId(), TOS_REG); | |
1911 break; | |
1912 case KEYED_PROPERTY: | |
1913 EmitKeyedPropertyLoad(property); | |
1914 PrepareForBailoutForId(property->LoadId(), TOS_REG); | |
1915 break; | |
1916 } | |
1917 } | |
1918 | |
1919 Token::Value op = expr->binary_op(); | |
1920 __ Push(x0); // Left operand goes on the stack. | |
1921 VisitForAccumulatorValue(expr->value()); | |
1922 | |
1923 OverwriteMode mode = expr->value()->ResultOverwriteAllowed() | |
1924 ? OVERWRITE_RIGHT | |
1925 : NO_OVERWRITE; | |
1926 SetSourcePosition(expr->position() + 1); | |
1927 AccumulatorValueContext context(this); | |
1928 if (ShouldInlineSmiCase(op)) { | |
1929 EmitInlineSmiBinaryOp(expr->binary_operation(), | |
1930 op, | |
1931 mode, | |
1932 expr->target(), | |
1933 expr->value()); | |
1934 } else { | |
1935 EmitBinaryOp(expr->binary_operation(), op, mode); | |
1936 } | |
1937 | |
1938 // Deoptimization point in case the binary operation may have side effects. | |
1939 PrepareForBailout(expr->binary_operation(), TOS_REG); | |
1940 } else { | |
1941 VisitForAccumulatorValue(expr->value()); | |
1942 } | |
1943 | |
1944 // Record source position before possible IC call. | |
1945 SetSourcePosition(expr->position()); | |
1946 | |
1947 // Store the value. | |
1948 switch (assign_type) { | |
1949 case VARIABLE: | |
1950 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), | |
1951 expr->op()); | |
1952 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
1953 context()->Plug(x0); | |
1954 break; | |
1955 case NAMED_PROPERTY: | |
1956 EmitNamedPropertyAssignment(expr); | |
1957 break; | |
1958 case KEYED_PROPERTY: | |
1959 EmitKeyedPropertyAssignment(expr); | |
1960 break; | |
1961 } | |
1962 } | |
1963 | |
1964 | |
1965 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { | |
1966 SetSourcePosition(prop->position()); | |
1967 Literal* key = prop->key()->AsLiteral(); | |
1968 __ Mov(x2, Operand(key->value())); | |
1969 // Call load IC. It has arguments receiver and property name x0 and x2. | |
1970 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId()); | |
1971 } | |
1972 | |
1973 | |
1974 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { | |
1975 SetSourcePosition(prop->position()); | |
1976 // Call keyed load IC. It has arguments key and receiver in r0 and r1. | |
1977 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); | |
1978 CallIC(ic, prop->PropertyFeedbackId()); | |
1979 } | |
1980 | |
1981 | |
1982 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, | |
1983 Token::Value op, | |
1984 OverwriteMode mode, | |
1985 Expression* left_expr, | |
1986 Expression* right_expr) { | |
1987 Label done, both_smis, stub_call; | |
1988 | |
1989 // Get the arguments. | |
1990 Register left = x1; | |
1991 Register right = x0; | |
1992 Register result = x0; | |
1993 __ Pop(left); | |
1994 | |
1995 // Perform combined smi check on both operands. | |
1996 __ Orr(x10, left, right); | |
1997 JumpPatchSite patch_site(masm_); | |
1998 patch_site.EmitJumpIfSmi(x10, &both_smis); | |
1999 | |
2000 __ Bind(&stub_call); | |
2001 BinaryOpICStub stub(op, mode); | |
2002 { | |
2003 Assembler::BlockPoolsScope scope(masm_); | |
2004 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); | |
2005 patch_site.EmitPatchInfo(); | |
2006 } | |
2007 __ B(&done); | |
2008 | |
2009 __ Bind(&both_smis); | |
2010 // Smi case. This code works in the same way as the smi-smi case in the type | |
2011 // recording binary operation stub, see | |
2012 // BinaryOpStub::GenerateSmiSmiOperation for comments. | |
2013 // TODO(all): That doesn't exist any more. Where are the comments? | |
2014 // | |
2015 // The set of operations that needs to be supported here is controlled by | |
2016 // FullCodeGenerator::ShouldInlineSmiCase(). | |
2017 switch (op) { | |
2018 case Token::SAR: | |
2019 __ Ubfx(right, right, kSmiShift, 5); | |
2020 __ Asr(result, left, right); | |
2021 __ Bic(result, result, kSmiShiftMask); | |
2022 break; | |
2023 case Token::SHL: | |
2024 __ Ubfx(right, right, kSmiShift, 5); | |
2025 __ Lsl(result, left, right); | |
2026 break; | |
2027 case Token::SHR: { | |
2028 Label right_not_zero; | |
2029 __ Cbnz(right, &right_not_zero); | |
2030 __ Tbnz(left, kXSignBit, &stub_call); | |
2031 __ Bind(&right_not_zero); | |
2032 __ Ubfx(right, right, kSmiShift, 5); | |
2033 __ Lsr(result, left, right); | |
2034 __ Bic(result, result, kSmiShiftMask); | |
2035 break; | |
2036 } | |
2037 case Token::ADD: | |
2038 __ Adds(x10, left, right); | |
2039 __ B(vs, &stub_call); | |
2040 __ Mov(result, x10); | |
2041 break; | |
2042 case Token::SUB: | |
2043 __ Subs(x10, left, right); | |
2044 __ B(vs, &stub_call); | |
2045 __ Mov(result, x10); | |
2046 break; | |
2047 case Token::MUL: { | |
2048 Label not_minus_zero, done; | |
2049 __ Smulh(x10, left, right); | |
2050 __ Cbnz(x10, ¬_minus_zero); | |
2051 __ Eor(x11, left, right); | |
2052 __ Tbnz(x11, kXSignBit, &stub_call); | |
2053 STATIC_ASSERT(kSmiTag == 0); | |
2054 __ Mov(result, x10); | |
2055 __ B(&done); | |
2056 __ Bind(¬_minus_zero); | |
2057 __ Cls(x11, x10); | |
2058 __ Cmp(x11, kXRegSizeInBits - kSmiShift); | |
2059 __ B(lt, &stub_call); | |
2060 __ SmiTag(result, x10); | |
2061 __ Bind(&done); | |
2062 break; | |
2063 } | |
2064 case Token::BIT_OR: | |
2065 __ Orr(result, left, right); | |
2066 break; | |
2067 case Token::BIT_AND: | |
2068 __ And(result, left, right); | |
2069 break; | |
2070 case Token::BIT_XOR: | |
2071 __ Eor(result, left, right); | |
2072 break; | |
2073 default: | |
2074 UNREACHABLE(); | |
2075 } | |
2076 | |
2077 __ Bind(&done); | |
2078 context()->Plug(x0); | |
2079 } | |
2080 | |
2081 | |
2082 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, | |
2083 Token::Value op, | |
2084 OverwriteMode mode) { | |
2085 __ Pop(x1); | |
2086 BinaryOpICStub stub(op, mode); | |
2087 JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code. | |
2088 { | |
2089 Assembler::BlockPoolsScope scope(masm_); | |
2090 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId()); | |
2091 patch_site.EmitPatchInfo(); | |
2092 } | |
2093 context()->Plug(x0); | |
2094 } | |
2095 | |
2096 | |
2097 void FullCodeGenerator::EmitAssignment(Expression* expr) { | |
2098 ASSERT(expr->IsValidLeftHandSide()); | |
2099 | |
2100 // Left-hand side can only be a property, a global or a (parameter or local) | |
2101 // slot. | |
2102 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; | |
2103 LhsKind assign_type = VARIABLE; | |
2104 Property* prop = expr->AsProperty(); | |
2105 if (prop != NULL) { | |
2106 assign_type = (prop->key()->IsPropertyName()) | |
2107 ? NAMED_PROPERTY | |
2108 : KEYED_PROPERTY; | |
2109 } | |
2110 | |
2111 switch (assign_type) { | |
2112 case VARIABLE: { | |
2113 Variable* var = expr->AsVariableProxy()->var(); | |
2114 EffectContext context(this); | |
2115 EmitVariableAssignment(var, Token::ASSIGN); | |
2116 break; | |
2117 } | |
2118 case NAMED_PROPERTY: { | |
2119 __ Push(x0); // Preserve value. | |
2120 VisitForAccumulatorValue(prop->obj()); | |
2121 // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid | |
2122 // this copy. | |
2123 __ Mov(x1, x0); | |
2124 __ Pop(x0); // Restore value. | |
2125 __ Mov(x2, Operand(prop->key()->AsLiteral()->value())); | |
2126 CallStoreIC(); | |
2127 break; | |
2128 } | |
2129 case KEYED_PROPERTY: { | |
2130 __ Push(x0); // Preserve value. | |
2131 VisitForStackValue(prop->obj()); | |
2132 VisitForAccumulatorValue(prop->key()); | |
2133 __ Mov(x1, x0); | |
2134 __ Pop(x2, x0); | |
2135 Handle<Code> ic = strict_mode() == SLOPPY | |
2136 ? isolate()->builtins()->KeyedStoreIC_Initialize() | |
2137 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); | |
2138 CallIC(ic); | |
2139 break; | |
2140 } | |
2141 } | |
2142 context()->Plug(x0); | |
2143 } | |
2144 | |
2145 | |
2146 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( | |
2147 Variable* var, MemOperand location) { | |
2148 __ Str(result_register(), location); | |
2149 if (var->IsContextSlot()) { | |
2150 // RecordWrite may destroy all its register arguments. | |
2151 __ Mov(x10, result_register()); | |
2152 int offset = Context::SlotOffset(var->index()); | |
2153 __ RecordWriteContextSlot( | |
2154 x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs); | |
2155 } | |
2156 } | |
2157 | |
2158 | |
2159 void FullCodeGenerator::EmitCallStoreContextSlot( | |
2160 Handle<String> name, StrictMode strict_mode) { | |
2161 __ Mov(x11, Operand(name)); | |
2162 __ Mov(x10, Smi::FromInt(strict_mode)); | |
2163 // jssp[0] : mode. | |
2164 // jssp[8] : name. | |
2165 // jssp[16] : context. | |
2166 // jssp[24] : value. | |
2167 __ Push(x0, cp, x11, x10); | |
2168 __ CallRuntime(Runtime::kStoreContextSlot, 4); | |
2169 } | |
2170 | |
2171 | |
2172 void FullCodeGenerator::EmitVariableAssignment(Variable* var, | |
2173 Token::Value op) { | |
2174 ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment"); | |
2175 if (var->IsUnallocated()) { | |
2176 // Global var, const, or let. | |
2177 __ Mov(x2, Operand(var->name())); | |
2178 __ Ldr(x1, GlobalObjectMemOperand()); | |
2179 CallStoreIC(); | |
2180 | |
2181 } else if (op == Token::INIT_CONST_LEGACY) { | |
2182 // Const initializers need a write barrier. | |
2183 ASSERT(!var->IsParameter()); // No const parameters. | |
2184 if (var->IsLookupSlot()) { | |
2185 __ Push(x0); | |
2186 __ Mov(x0, Operand(var->name())); | |
2187 __ Push(cp, x0); // Context and name. | |
2188 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); | |
2189 } else { | |
2190 ASSERT(var->IsStackLocal() || var->IsContextSlot()); | |
2191 Label skip; | |
2192 MemOperand location = VarOperand(var, x1); | |
2193 __ Ldr(x10, location); | |
2194 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &skip); | |
2195 EmitStoreToStackLocalOrContextSlot(var, location); | |
2196 __ Bind(&skip); | |
2197 } | |
2198 | |
2199 } else if (var->mode() == LET && op != Token::INIT_LET) { | |
2200 // Non-initializing assignment to let variable needs a write barrier. | |
2201 if (var->IsLookupSlot()) { | |
2202 EmitCallStoreContextSlot(var->name(), strict_mode()); | |
2203 } else { | |
2204 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); | |
2205 Label assign; | |
2206 MemOperand location = VarOperand(var, x1); | |
2207 __ Ldr(x10, location); | |
2208 __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign); | |
2209 __ Mov(x10, Operand(var->name())); | |
2210 __ Push(x10); | |
2211 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
2212 // Perform the assignment. | |
2213 __ Bind(&assign); | |
2214 EmitStoreToStackLocalOrContextSlot(var, location); | |
2215 } | |
2216 | |
2217 } else if (!var->is_const_mode() || op == Token::INIT_CONST) { | |
2218 // Assignment to var or initializing assignment to let/const | |
2219 // in harmony mode. | |
2220 if (var->IsLookupSlot()) { | |
2221 EmitCallStoreContextSlot(var->name(), strict_mode()); | |
2222 } else { | |
2223 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); | |
2224 MemOperand location = VarOperand(var, x1); | |
2225 if (FLAG_debug_code && op == Token::INIT_LET) { | |
2226 __ Ldr(x10, location); | |
2227 __ CompareRoot(x10, Heap::kTheHoleValueRootIndex); | |
2228 __ Check(eq, kLetBindingReInitialization); | |
2229 } | |
2230 EmitStoreToStackLocalOrContextSlot(var, location); | |
2231 } | |
2232 } | |
2233 // Non-initializing assignments to consts are ignored. | |
2234 } | |
2235 | |
2236 | |
2237 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { | |
2238 ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment"); | |
2239 // Assignment to a property, using a named store IC. | |
2240 Property* prop = expr->target()->AsProperty(); | |
2241 ASSERT(prop != NULL); | |
2242 ASSERT(prop->key()->AsLiteral() != NULL); | |
2243 | |
2244 // Record source code position before IC call. | |
2245 SetSourcePosition(expr->position()); | |
2246 __ Mov(x2, Operand(prop->key()->AsLiteral()->value())); | |
2247 __ Pop(x1); | |
2248 | |
2249 CallStoreIC(expr->AssignmentFeedbackId()); | |
2250 | |
2251 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
2252 context()->Plug(x0); | |
2253 } | |
2254 | |
2255 | |
2256 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { | |
2257 ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment"); | |
2258 // Assignment to a property, using a keyed store IC. | |
2259 | |
2260 // Record source code position before IC call. | |
2261 SetSourcePosition(expr->position()); | |
2262 // TODO(all): Could we pass this in registers rather than on the stack? | |
2263 __ Pop(x1, x2); // Key and object holding the property. | |
2264 | |
2265 Handle<Code> ic = strict_mode() == SLOPPY | |
2266 ? isolate()->builtins()->KeyedStoreIC_Initialize() | |
2267 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); | |
2268 CallIC(ic, expr->AssignmentFeedbackId()); | |
2269 | |
2270 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
2271 context()->Plug(x0); | |
2272 } | |
2273 | |
2274 | |
2275 void FullCodeGenerator::VisitProperty(Property* expr) { | |
2276 Comment cmnt(masm_, "[ Property"); | |
2277 Expression* key = expr->key(); | |
2278 | |
2279 if (key->IsPropertyName()) { | |
2280 VisitForAccumulatorValue(expr->obj()); | |
2281 EmitNamedPropertyLoad(expr); | |
2282 PrepareForBailoutForId(expr->LoadId(), TOS_REG); | |
2283 context()->Plug(x0); | |
2284 } else { | |
2285 VisitForStackValue(expr->obj()); | |
2286 VisitForAccumulatorValue(expr->key()); | |
2287 __ Pop(x1); | |
2288 EmitKeyedPropertyLoad(expr); | |
2289 context()->Plug(x0); | |
2290 } | |
2291 } | |
2292 | |
2293 | |
2294 void FullCodeGenerator::CallIC(Handle<Code> code, | |
2295 TypeFeedbackId ast_id) { | |
2296 ic_total_count_++; | |
2297 // All calls must have a predictable size in full-codegen code to ensure that | |
2298 // the debugger can patch them correctly. | |
2299 __ Call(code, RelocInfo::CODE_TARGET, ast_id); | |
2300 } | |
2301 | |
2302 | |
2303 // Code common for calls using the IC. | |
2304 void FullCodeGenerator::EmitCallWithIC(Call* expr) { | |
2305 ASM_LOCATION("EmitCallWithIC"); | |
2306 | |
2307 Expression* callee = expr->expression(); | |
2308 ZoneList<Expression*>* args = expr->arguments(); | |
2309 int arg_count = args->length(); | |
2310 | |
2311 CallFunctionFlags flags; | |
2312 // Get the target function. | |
2313 if (callee->IsVariableProxy()) { | |
2314 { StackValueContext context(this); | |
2315 EmitVariableLoad(callee->AsVariableProxy()); | |
2316 PrepareForBailout(callee, NO_REGISTERS); | |
2317 } | |
2318 // Push undefined as receiver. This is patched in the method prologue if it | |
2319 // is a sloppy mode method. | |
2320 __ Push(isolate()->factory()->undefined_value()); | |
2321 flags = NO_CALL_FUNCTION_FLAGS; | |
2322 } else { | |
2323 // Load the function from the receiver. | |
2324 ASSERT(callee->IsProperty()); | |
2325 __ Peek(x0, 0); | |
2326 EmitNamedPropertyLoad(callee->AsProperty()); | |
2327 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); | |
2328 // Push the target function under the receiver. | |
2329 __ Pop(x10); | |
2330 __ Push(x0, x10); | |
2331 flags = CALL_AS_METHOD; | |
2332 } | |
2333 | |
2334 // Load the arguments. | |
2335 { PreservePositionScope scope(masm()->positions_recorder()); | |
2336 for (int i = 0; i < arg_count; i++) { | |
2337 VisitForStackValue(args->at(i)); | |
2338 } | |
2339 } | |
2340 | |
2341 // Record source position for debugger. | |
2342 SetSourcePosition(expr->position()); | |
2343 CallFunctionStub stub(arg_count, flags); | |
2344 __ Peek(x1, (arg_count + 1) * kPointerSize); | |
2345 __ CallStub(&stub); | |
2346 | |
2347 RecordJSReturnSite(expr); | |
2348 | |
2349 // Restore context register. | |
2350 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
2351 | |
2352 context()->DropAndPlug(1, x0); | |
2353 } | |
2354 | |
2355 | |
2356 // Code common for calls using the IC. | |
2357 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, | |
2358 Expression* key) { | |
2359 // Load the key. | |
2360 VisitForAccumulatorValue(key); | |
2361 | |
2362 Expression* callee = expr->expression(); | |
2363 ZoneList<Expression*>* args = expr->arguments(); | |
2364 int arg_count = args->length(); | |
2365 | |
2366 // Load the function from the receiver. | |
2367 ASSERT(callee->IsProperty()); | |
2368 __ Peek(x1, 0); | |
2369 EmitKeyedPropertyLoad(callee->AsProperty()); | |
2370 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); | |
2371 | |
2372 // Push the target function under the receiver. | |
2373 __ Pop(x10); | |
2374 __ Push(x0, x10); | |
2375 | |
2376 { PreservePositionScope scope(masm()->positions_recorder()); | |
2377 for (int i = 0; i < arg_count; i++) { | |
2378 VisitForStackValue(args->at(i)); | |
2379 } | |
2380 } | |
2381 | |
2382 // Record source position for debugger. | |
2383 SetSourcePosition(expr->position()); | |
2384 CallFunctionStub stub(arg_count, CALL_AS_METHOD); | |
2385 __ Peek(x1, (arg_count + 1) * kPointerSize); | |
2386 __ CallStub(&stub); | |
2387 | |
2388 RecordJSReturnSite(expr); | |
2389 // Restore context register. | |
2390 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
2391 | |
2392 context()->DropAndPlug(1, x0); | |
2393 } | |
2394 | |
2395 | |
2396 void FullCodeGenerator::EmitCallWithStub(Call* expr) { | |
2397 // Code common for calls using the call stub. | |
2398 ZoneList<Expression*>* args = expr->arguments(); | |
2399 int arg_count = args->length(); | |
2400 { PreservePositionScope scope(masm()->positions_recorder()); | |
2401 for (int i = 0; i < arg_count; i++) { | |
2402 VisitForStackValue(args->at(i)); | |
2403 } | |
2404 } | |
2405 // Record source position for debugger. | |
2406 SetSourcePosition(expr->position()); | |
2407 | |
2408 Handle<Object> uninitialized = | |
2409 TypeFeedbackInfo::UninitializedSentinel(isolate()); | |
2410 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized); | |
2411 __ LoadObject(x2, FeedbackVector()); | |
2412 __ Mov(x3, Smi::FromInt(expr->CallFeedbackSlot())); | |
2413 | |
2414 // Record call targets in unoptimized code. | |
2415 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); | |
2416 __ Peek(x1, (arg_count + 1) * kXRegSize); | |
2417 __ CallStub(&stub); | |
2418 RecordJSReturnSite(expr); | |
2419 // Restore context register. | |
2420 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
2421 context()->DropAndPlug(1, x0); | |
2422 } | |
2423 | |
2424 | |
2425 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { | |
2426 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval"); | |
2427 // Prepare to push a copy of the first argument or undefined if it doesn't | |
2428 // exist. | |
2429 if (arg_count > 0) { | |
2430 __ Peek(x10, arg_count * kXRegSize); | |
2431 } else { | |
2432 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); | |
2433 } | |
2434 | |
2435 // Prepare to push the receiver of the enclosing function. | |
2436 int receiver_offset = 2 + info_->scope()->num_parameters(); | |
2437 __ Ldr(x11, MemOperand(fp, receiver_offset * kPointerSize)); | |
2438 | |
2439 // Push. | |
2440 __ Push(x10, x11); | |
2441 | |
2442 // Prepare to push the language mode. | |
2443 __ Mov(x10, Smi::FromInt(strict_mode())); | |
2444 // Prepare to push the start position of the scope the calls resides in. | |
2445 __ Mov(x11, Smi::FromInt(scope()->start_position())); | |
2446 | |
2447 // Push. | |
2448 __ Push(x10, x11); | |
2449 | |
2450 // Do the runtime call. | |
2451 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); | |
2452 } | |
2453 | |
2454 | |
2455 void FullCodeGenerator::VisitCall(Call* expr) { | |
2456 #ifdef DEBUG | |
2457 // We want to verify that RecordJSReturnSite gets called on all paths | |
2458 // through this function. Avoid early returns. | |
2459 expr->return_is_recorded_ = false; | |
2460 #endif | |
2461 | |
2462 Comment cmnt(masm_, "[ Call"); | |
2463 Expression* callee = expr->expression(); | |
2464 Call::CallType call_type = expr->GetCallType(isolate()); | |
2465 | |
2466 if (call_type == Call::POSSIBLY_EVAL_CALL) { | |
2467 // In a call to eval, we first call %ResolvePossiblyDirectEval to | |
2468 // resolve the function we need to call and the receiver of the | |
2469 // call. Then we call the resolved function using the given | |
2470 // arguments. | |
2471 ZoneList<Expression*>* args = expr->arguments(); | |
2472 int arg_count = args->length(); | |
2473 | |
2474 { | |
2475 PreservePositionScope pos_scope(masm()->positions_recorder()); | |
2476 VisitForStackValue(callee); | |
2477 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); | |
2478 __ Push(x10); // Reserved receiver slot. | |
2479 | |
2480 // Push the arguments. | |
2481 for (int i = 0; i < arg_count; i++) { | |
2482 VisitForStackValue(args->at(i)); | |
2483 } | |
2484 | |
2485 // Push a copy of the function (found below the arguments) and | |
2486 // resolve eval. | |
2487 __ Peek(x10, (arg_count + 1) * kPointerSize); | |
2488 __ Push(x10); | |
2489 EmitResolvePossiblyDirectEval(arg_count); | |
2490 | |
2491 // The runtime call returns a pair of values in x0 (function) and | |
2492 // x1 (receiver). Touch up the stack with the right values. | |
2493 __ PokePair(x1, x0, arg_count * kPointerSize); | |
2494 } | |
2495 | |
2496 // Record source position for debugger. | |
2497 SetSourcePosition(expr->position()); | |
2498 | |
2499 // Call the evaluated function. | |
2500 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); | |
2501 __ Peek(x1, (arg_count + 1) * kXRegSize); | |
2502 __ CallStub(&stub); | |
2503 RecordJSReturnSite(expr); | |
2504 // Restore context register. | |
2505 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
2506 context()->DropAndPlug(1, x0); | |
2507 | |
2508 } else if (call_type == Call::GLOBAL_CALL) { | |
2509 EmitCallWithIC(expr); | |
2510 | |
2511 } else if (call_type == Call::LOOKUP_SLOT_CALL) { | |
2512 // Call to a lookup slot (dynamically introduced variable). | |
2513 VariableProxy* proxy = callee->AsVariableProxy(); | |
2514 Label slow, done; | |
2515 | |
2516 { PreservePositionScope scope(masm()->positions_recorder()); | |
2517 // Generate code for loading from variables potentially shadowed | |
2518 // by eval-introduced variables. | |
2519 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done); | |
2520 } | |
2521 | |
2522 __ Bind(&slow); | |
2523 // Call the runtime to find the function to call (returned in x0) | |
2524 // and the object holding it (returned in x1). | |
2525 __ Push(context_register()); | |
2526 __ Mov(x10, Operand(proxy->name())); | |
2527 __ Push(x10); | |
2528 __ CallRuntime(Runtime::kLoadContextSlot, 2); | |
2529 __ Push(x0, x1); // Receiver, function. | |
2530 | |
2531 // If fast case code has been generated, emit code to push the | |
2532 // function and receiver and have the slow path jump around this | |
2533 // code. | |
2534 if (done.is_linked()) { | |
2535 Label call; | |
2536 __ B(&call); | |
2537 __ Bind(&done); | |
2538 // Push function. | |
2539 __ Push(x0); | |
2540 // The receiver is implicitly the global receiver. Indicate this | |
2541 // by passing the undefined to the call function stub. | |
2542 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex); | |
2543 __ Push(x1); | |
2544 __ Bind(&call); | |
2545 } | |
2546 | |
2547 // The receiver is either the global receiver or an object found | |
2548 // by LoadContextSlot. | |
2549 EmitCallWithStub(expr); | |
2550 } else if (call_type == Call::PROPERTY_CALL) { | |
2551 Property* property = callee->AsProperty(); | |
2552 { PreservePositionScope scope(masm()->positions_recorder()); | |
2553 VisitForStackValue(property->obj()); | |
2554 } | |
2555 if (property->key()->IsPropertyName()) { | |
2556 EmitCallWithIC(expr); | |
2557 } else { | |
2558 EmitKeyedCallWithIC(expr, property->key()); | |
2559 } | |
2560 | |
2561 } else { | |
2562 ASSERT(call_type == Call::OTHER_CALL); | |
2563 // Call to an arbitrary expression not handled specially above. | |
2564 { PreservePositionScope scope(masm()->positions_recorder()); | |
2565 VisitForStackValue(callee); | |
2566 } | |
2567 __ LoadRoot(x1, Heap::kUndefinedValueRootIndex); | |
2568 __ Push(x1); | |
2569 // Emit function call. | |
2570 EmitCallWithStub(expr); | |
2571 } | |
2572 | |
2573 #ifdef DEBUG | |
2574 // RecordJSReturnSite should have been called. | |
2575 ASSERT(expr->return_is_recorded_); | |
2576 #endif | |
2577 } | |
2578 | |
2579 | |
2580 void FullCodeGenerator::VisitCallNew(CallNew* expr) { | |
2581 Comment cmnt(masm_, "[ CallNew"); | |
2582 // According to ECMA-262, section 11.2.2, page 44, the function | |
2583 // expression in new calls must be evaluated before the | |
2584 // arguments. | |
2585 | |
2586 // Push constructor on the stack. If it's not a function it's used as | |
2587 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is | |
2588 // ignored. | |
2589 VisitForStackValue(expr->expression()); | |
2590 | |
2591 // Push the arguments ("left-to-right") on the stack. | |
2592 ZoneList<Expression*>* args = expr->arguments(); | |
2593 int arg_count = args->length(); | |
2594 for (int i = 0; i < arg_count; i++) { | |
2595 VisitForStackValue(args->at(i)); | |
2596 } | |
2597 | |
2598 // Call the construct call builtin that handles allocation and | |
2599 // constructor invocation. | |
2600 SetSourcePosition(expr->position()); | |
2601 | |
2602 // Load function and argument count into x1 and x0. | |
2603 __ Mov(x0, arg_count); | |
2604 __ Peek(x1, arg_count * kXRegSize); | |
2605 | |
2606 // Record call targets in unoptimized code. | |
2607 Handle<Object> uninitialized = | |
2608 TypeFeedbackInfo::UninitializedSentinel(isolate()); | |
2609 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized); | |
2610 if (FLAG_pretenuring_call_new) { | |
2611 StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(), | |
2612 isolate()->factory()->NewAllocationSite()); | |
2613 ASSERT(expr->AllocationSiteFeedbackSlot() == | |
2614 expr->CallNewFeedbackSlot() + 1); | |
2615 } | |
2616 | |
2617 __ LoadObject(x2, FeedbackVector()); | |
2618 __ Mov(x3, Smi::FromInt(expr->CallNewFeedbackSlot())); | |
2619 | |
2620 CallConstructStub stub(RECORD_CALL_TARGET); | |
2621 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); | |
2622 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); | |
2623 context()->Plug(x0); | |
2624 } | |
2625 | |
2626 | |
2627 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { | |
2628 ZoneList<Expression*>* args = expr->arguments(); | |
2629 ASSERT(args->length() == 1); | |
2630 | |
2631 VisitForAccumulatorValue(args->at(0)); | |
2632 | |
2633 Label materialize_true, materialize_false; | |
2634 Label* if_true = NULL; | |
2635 Label* if_false = NULL; | |
2636 Label* fall_through = NULL; | |
2637 context()->PrepareTest(&materialize_true, &materialize_false, | |
2638 &if_true, &if_false, &fall_through); | |
2639 | |
2640 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
2641 __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through); | |
2642 | |
2643 context()->Plug(if_true, if_false); | |
2644 } | |
2645 | |
2646 | |
2647 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) { | |
2648 ZoneList<Expression*>* args = expr->arguments(); | |
2649 ASSERT(args->length() == 1); | |
2650 | |
2651 VisitForAccumulatorValue(args->at(0)); | |
2652 | |
2653 Label materialize_true, materialize_false; | |
2654 Label* if_true = NULL; | |
2655 Label* if_false = NULL; | |
2656 Label* fall_through = NULL; | |
2657 context()->PrepareTest(&materialize_true, &materialize_false, | |
2658 &if_true, &if_false, &fall_through); | |
2659 | |
2660 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
2661 __ TestAndSplit(x0, kSmiTagMask | (0x80000000UL << kSmiShift), if_true, | |
2662 if_false, fall_through); | |
2663 | |
2664 context()->Plug(if_true, if_false); | |
2665 } | |
2666 | |
2667 | |
2668 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) { | |
2669 ZoneList<Expression*>* args = expr->arguments(); | |
2670 ASSERT(args->length() == 1); | |
2671 | |
2672 VisitForAccumulatorValue(args->at(0)); | |
2673 | |
2674 Label materialize_true, materialize_false; | |
2675 Label* if_true = NULL; | |
2676 Label* if_false = NULL; | |
2677 Label* fall_through = NULL; | |
2678 context()->PrepareTest(&materialize_true, &materialize_false, | |
2679 &if_true, &if_false, &fall_through); | |
2680 | |
2681 __ JumpIfSmi(x0, if_false); | |
2682 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true); | |
2683 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset)); | |
2684 // Undetectable objects behave like undefined when tested with typeof. | |
2685 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset)); | |
2686 __ Tbnz(x11, Map::kIsUndetectable, if_false); | |
2687 __ Ldrb(x12, FieldMemOperand(x10, Map::kInstanceTypeOffset)); | |
2688 __ Cmp(x12, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); | |
2689 __ B(lt, if_false); | |
2690 __ Cmp(x12, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); | |
2691 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
2692 Split(le, if_true, if_false, fall_through); | |
2693 | |
2694 context()->Plug(if_true, if_false); | |
2695 } | |
2696 | |
2697 | |
2698 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) { | |
2699 ZoneList<Expression*>* args = expr->arguments(); | |
2700 ASSERT(args->length() == 1); | |
2701 | |
2702 VisitForAccumulatorValue(args->at(0)); | |
2703 | |
2704 Label materialize_true, materialize_false; | |
2705 Label* if_true = NULL; | |
2706 Label* if_false = NULL; | |
2707 Label* fall_through = NULL; | |
2708 context()->PrepareTest(&materialize_true, &materialize_false, | |
2709 &if_true, &if_false, &fall_through); | |
2710 | |
2711 __ JumpIfSmi(x0, if_false); | |
2712 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE); | |
2713 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
2714 Split(ge, if_true, if_false, fall_through); | |
2715 | |
2716 context()->Plug(if_true, if_false); | |
2717 } | |
2718 | |
2719 | |
2720 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) { | |
2721 ASM_LOCATION("FullCodeGenerator::EmitIsUndetectableObject"); | |
2722 ZoneList<Expression*>* args = expr->arguments(); | |
2723 ASSERT(args->length() == 1); | |
2724 | |
2725 VisitForAccumulatorValue(args->at(0)); | |
2726 | |
2727 Label materialize_true, materialize_false; | |
2728 Label* if_true = NULL; | |
2729 Label* if_false = NULL; | |
2730 Label* fall_through = NULL; | |
2731 context()->PrepareTest(&materialize_true, &materialize_false, | |
2732 &if_true, &if_false, &fall_through); | |
2733 | |
2734 __ JumpIfSmi(x0, if_false); | |
2735 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset)); | |
2736 __ Ldrb(x11, FieldMemOperand(x10, Map::kBitFieldOffset)); | |
2737 __ Tst(x11, 1 << Map::kIsUndetectable); | |
2738 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
2739 Split(ne, if_true, if_false, fall_through); | |
2740 | |
2741 context()->Plug(if_true, if_false); | |
2742 } | |
2743 | |
2744 | |
2745 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( | |
2746 CallRuntime* expr) { | |
2747 ZoneList<Expression*>* args = expr->arguments(); | |
2748 ASSERT(args->length() == 1); | |
2749 VisitForAccumulatorValue(args->at(0)); | |
2750 | |
2751 Label materialize_true, materialize_false, skip_lookup; | |
2752 Label* if_true = NULL; | |
2753 Label* if_false = NULL; | |
2754 Label* fall_through = NULL; | |
2755 context()->PrepareTest(&materialize_true, &materialize_false, | |
2756 &if_true, &if_false, &fall_through); | |
2757 | |
2758 Register object = x0; | |
2759 __ AssertNotSmi(object); | |
2760 | |
2761 Register map = x10; | |
2762 Register bitfield2 = x11; | |
2763 __ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); | |
2764 __ Ldrb(bitfield2, FieldMemOperand(map, Map::kBitField2Offset)); | |
2765 __ Tbnz(bitfield2, Map::kStringWrapperSafeForDefaultValueOf, &skip_lookup); | |
2766 | |
2767 // Check for fast case object. Generate false result for slow case object. | |
2768 Register props = x12; | |
2769 Register props_map = x12; | |
2770 Register hash_table_map = x13; | |
2771 __ Ldr(props, FieldMemOperand(object, JSObject::kPropertiesOffset)); | |
2772 __ Ldr(props_map, FieldMemOperand(props, HeapObject::kMapOffset)); | |
2773 __ LoadRoot(hash_table_map, Heap::kHashTableMapRootIndex); | |
2774 __ Cmp(props_map, hash_table_map); | |
2775 __ B(eq, if_false); | |
2776 | |
2777 // Look for valueOf name in the descriptor array, and indicate false if found. | |
2778 // Since we omit an enumeration index check, if it is added via a transition | |
2779 // that shares its descriptor array, this is a false positive. | |
2780 Label loop, done; | |
2781 | |
2782 // Skip loop if no descriptors are valid. | |
2783 Register descriptors = x12; | |
2784 Register descriptors_length = x13; | |
2785 __ NumberOfOwnDescriptors(descriptors_length, map); | |
2786 __ Cbz(descriptors_length, &done); | |
2787 | |
2788 __ LoadInstanceDescriptors(map, descriptors); | |
2789 | |
2790 // Calculate the end of the descriptor array. | |
2791 Register descriptors_end = x14; | |
2792 __ Mov(x15, DescriptorArray::kDescriptorSize); | |
2793 __ Mul(descriptors_length, descriptors_length, x15); | |
2794 // Calculate location of the first key name. | |
2795 __ Add(descriptors, descriptors, | |
2796 DescriptorArray::kFirstOffset - kHeapObjectTag); | |
2797 // Calculate the end of the descriptor array. | |
2798 __ Add(descriptors_end, descriptors, | |
2799 Operand(descriptors_length, LSL, kPointerSizeLog2)); | |
2800 | |
2801 // Loop through all the keys in the descriptor array. If one of these is the | |
2802 // string "valueOf" the result is false. | |
2803 Register valueof_string = x1; | |
2804 int descriptor_size = DescriptorArray::kDescriptorSize * kPointerSize; | |
2805 __ Mov(valueof_string, Operand(isolate()->factory()->value_of_string())); | |
2806 __ Bind(&loop); | |
2807 __ Ldr(x15, MemOperand(descriptors, descriptor_size, PostIndex)); | |
2808 __ Cmp(x15, valueof_string); | |
2809 __ B(eq, if_false); | |
2810 __ Cmp(descriptors, descriptors_end); | |
2811 __ B(ne, &loop); | |
2812 | |
2813 __ Bind(&done); | |
2814 | |
2815 // Set the bit in the map to indicate that there is no local valueOf field. | |
2816 __ Ldrb(x2, FieldMemOperand(map, Map::kBitField2Offset)); | |
2817 __ Orr(x2, x2, 1 << Map::kStringWrapperSafeForDefaultValueOf); | |
2818 __ Strb(x2, FieldMemOperand(map, Map::kBitField2Offset)); | |
2819 | |
2820 __ Bind(&skip_lookup); | |
2821 | |
2822 // If a valueOf property is not found on the object check that its prototype | |
2823 // is the unmodified String prototype. If not result is false. | |
2824 Register prototype = x1; | |
2825 Register global_idx = x2; | |
2826 Register native_context = x2; | |
2827 Register string_proto = x3; | |
2828 Register proto_map = x4; | |
2829 __ Ldr(prototype, FieldMemOperand(map, Map::kPrototypeOffset)); | |
2830 __ JumpIfSmi(prototype, if_false); | |
2831 __ Ldr(proto_map, FieldMemOperand(prototype, HeapObject::kMapOffset)); | |
2832 __ Ldr(global_idx, GlobalObjectMemOperand()); | |
2833 __ Ldr(native_context, | |
2834 FieldMemOperand(global_idx, GlobalObject::kNativeContextOffset)); | |
2835 __ Ldr(string_proto, | |
2836 ContextMemOperand(native_context, | |
2837 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); | |
2838 __ Cmp(proto_map, string_proto); | |
2839 | |
2840 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
2841 Split(eq, if_true, if_false, fall_through); | |
2842 | |
2843 context()->Plug(if_true, if_false); | |
2844 } | |
2845 | |
2846 | |
2847 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { | |
2848 ZoneList<Expression*>* args = expr->arguments(); | |
2849 ASSERT(args->length() == 1); | |
2850 | |
2851 VisitForAccumulatorValue(args->at(0)); | |
2852 | |
2853 Label materialize_true, materialize_false; | |
2854 Label* if_true = NULL; | |
2855 Label* if_false = NULL; | |
2856 Label* fall_through = NULL; | |
2857 context()->PrepareTest(&materialize_true, &materialize_false, | |
2858 &if_true, &if_false, &fall_through); | |
2859 | |
2860 __ JumpIfSmi(x0, if_false); | |
2861 __ CompareObjectType(x0, x10, x11, JS_FUNCTION_TYPE); | |
2862 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
2863 Split(eq, if_true, if_false, fall_through); | |
2864 | |
2865 context()->Plug(if_true, if_false); | |
2866 } | |
2867 | |
2868 | |
2869 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) { | |
2870 ZoneList<Expression*>* args = expr->arguments(); | |
2871 ASSERT(args->length() == 1); | |
2872 | |
2873 VisitForAccumulatorValue(args->at(0)); | |
2874 | |
2875 Label materialize_true, materialize_false; | |
2876 Label* if_true = NULL; | |
2877 Label* if_false = NULL; | |
2878 Label* fall_through = NULL; | |
2879 context()->PrepareTest(&materialize_true, &materialize_false, | |
2880 &if_true, &if_false, &fall_through); | |
2881 | |
2882 // Only a HeapNumber can be -0.0, so return false if we have something else. | |
2883 __ CheckMap(x0, x1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK); | |
2884 | |
2885 // Test the bit pattern. | |
2886 __ Ldr(x10, FieldMemOperand(x0, HeapNumber::kValueOffset)); | |
2887 __ Cmp(x10, 1); // Set V on 0x8000000000000000. | |
2888 | |
2889 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
2890 Split(vs, if_true, if_false, fall_through); | |
2891 | |
2892 context()->Plug(if_true, if_false); | |
2893 } | |
2894 | |
2895 | |
2896 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { | |
2897 ZoneList<Expression*>* args = expr->arguments(); | |
2898 ASSERT(args->length() == 1); | |
2899 | |
2900 VisitForAccumulatorValue(args->at(0)); | |
2901 | |
2902 Label materialize_true, materialize_false; | |
2903 Label* if_true = NULL; | |
2904 Label* if_false = NULL; | |
2905 Label* fall_through = NULL; | |
2906 context()->PrepareTest(&materialize_true, &materialize_false, | |
2907 &if_true, &if_false, &fall_through); | |
2908 | |
2909 __ JumpIfSmi(x0, if_false); | |
2910 __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE); | |
2911 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
2912 Split(eq, if_true, if_false, fall_through); | |
2913 | |
2914 context()->Plug(if_true, if_false); | |
2915 } | |
2916 | |
2917 | |
2918 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { | |
2919 ZoneList<Expression*>* args = expr->arguments(); | |
2920 ASSERT(args->length() == 1); | |
2921 | |
2922 VisitForAccumulatorValue(args->at(0)); | |
2923 | |
2924 Label materialize_true, materialize_false; | |
2925 Label* if_true = NULL; | |
2926 Label* if_false = NULL; | |
2927 Label* fall_through = NULL; | |
2928 context()->PrepareTest(&materialize_true, &materialize_false, | |
2929 &if_true, &if_false, &fall_through); | |
2930 | |
2931 __ JumpIfSmi(x0, if_false); | |
2932 __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE); | |
2933 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
2934 Split(eq, if_true, if_false, fall_through); | |
2935 | |
2936 context()->Plug(if_true, if_false); | |
2937 } | |
2938 | |
2939 | |
2940 | |
2941 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) { | |
2942 ASSERT(expr->arguments()->length() == 0); | |
2943 | |
2944 Label materialize_true, materialize_false; | |
2945 Label* if_true = NULL; | |
2946 Label* if_false = NULL; | |
2947 Label* fall_through = NULL; | |
2948 context()->PrepareTest(&materialize_true, &materialize_false, | |
2949 &if_true, &if_false, &fall_through); | |
2950 | |
2951 // Get the frame pointer for the calling frame. | |
2952 __ Ldr(x2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
2953 | |
2954 // Skip the arguments adaptor frame if it exists. | |
2955 Label check_frame_marker; | |
2956 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kContextOffset)); | |
2957 __ Cmp(x1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | |
2958 __ B(ne, &check_frame_marker); | |
2959 __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset)); | |
2960 | |
2961 // Check the marker in the calling frame. | |
2962 __ Bind(&check_frame_marker); | |
2963 __ Ldr(x1, MemOperand(x2, StandardFrameConstants::kMarkerOffset)); | |
2964 __ Cmp(x1, Smi::FromInt(StackFrame::CONSTRUCT)); | |
2965 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
2966 Split(eq, if_true, if_false, fall_through); | |
2967 | |
2968 context()->Plug(if_true, if_false); | |
2969 } | |
2970 | |
2971 | |
2972 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { | |
2973 ZoneList<Expression*>* args = expr->arguments(); | |
2974 ASSERT(args->length() == 2); | |
2975 | |
2976 // Load the two objects into registers and perform the comparison. | |
2977 VisitForStackValue(args->at(0)); | |
2978 VisitForAccumulatorValue(args->at(1)); | |
2979 | |
2980 Label materialize_true, materialize_false; | |
2981 Label* if_true = NULL; | |
2982 Label* if_false = NULL; | |
2983 Label* fall_through = NULL; | |
2984 context()->PrepareTest(&materialize_true, &materialize_false, | |
2985 &if_true, &if_false, &fall_through); | |
2986 | |
2987 __ Pop(x1); | |
2988 __ Cmp(x0, x1); | |
2989 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
2990 Split(eq, if_true, if_false, fall_through); | |
2991 | |
2992 context()->Plug(if_true, if_false); | |
2993 } | |
2994 | |
2995 | |
2996 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { | |
2997 ZoneList<Expression*>* args = expr->arguments(); | |
2998 ASSERT(args->length() == 1); | |
2999 | |
3000 // ArgumentsAccessStub expects the key in x1. | |
3001 VisitForAccumulatorValue(args->at(0)); | |
3002 __ Mov(x1, x0); | |
3003 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters())); | |
3004 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); | |
3005 __ CallStub(&stub); | |
3006 context()->Plug(x0); | |
3007 } | |
3008 | |
3009 | |
3010 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { | |
3011 ASSERT(expr->arguments()->length() == 0); | |
3012 Label exit; | |
3013 // Get the number of formal parameters. | |
3014 __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters())); | |
3015 | |
3016 // Check if the calling frame is an arguments adaptor frame. | |
3017 __ Ldr(x12, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
3018 __ Ldr(x13, MemOperand(x12, StandardFrameConstants::kContextOffset)); | |
3019 __ Cmp(x13, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | |
3020 __ B(ne, &exit); | |
3021 | |
3022 // Arguments adaptor case: Read the arguments length from the | |
3023 // adaptor frame. | |
3024 __ Ldr(x0, MemOperand(x12, ArgumentsAdaptorFrameConstants::kLengthOffset)); | |
3025 | |
3026 __ Bind(&exit); | |
3027 context()->Plug(x0); | |
3028 } | |
3029 | |
3030 | |
3031 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { | |
3032 ASM_LOCATION("FullCodeGenerator::EmitClassOf"); | |
3033 ZoneList<Expression*>* args = expr->arguments(); | |
3034 ASSERT(args->length() == 1); | |
3035 Label done, null, function, non_function_constructor; | |
3036 | |
3037 VisitForAccumulatorValue(args->at(0)); | |
3038 | |
3039 // If the object is a smi, we return null. | |
3040 __ JumpIfSmi(x0, &null); | |
3041 | |
3042 // Check that the object is a JS object but take special care of JS | |
3043 // functions to make sure they have 'Function' as their class. | |
3044 // Assume that there are only two callable types, and one of them is at | |
3045 // either end of the type range for JS object types. Saves extra comparisons. | |
3046 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); | |
3047 __ CompareObjectType(x0, x10, x11, FIRST_SPEC_OBJECT_TYPE); | |
3048 // x10: object's map. | |
3049 // x11: object's type. | |
3050 __ B(lt, &null); | |
3051 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == | |
3052 FIRST_SPEC_OBJECT_TYPE + 1); | |
3053 __ B(eq, &function); | |
3054 | |
3055 __ Cmp(x11, LAST_SPEC_OBJECT_TYPE); | |
3056 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == | |
3057 LAST_SPEC_OBJECT_TYPE - 1); | |
3058 __ B(eq, &function); | |
3059 // Assume that there is no larger type. | |
3060 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); | |
3061 | |
3062 // Check if the constructor in the map is a JS function. | |
3063 __ Ldr(x12, FieldMemOperand(x10, Map::kConstructorOffset)); | |
3064 __ JumpIfNotObjectType(x12, x13, x14, JS_FUNCTION_TYPE, | |
3065 &non_function_constructor); | |
3066 | |
3067 // x12 now contains the constructor function. Grab the | |
3068 // instance class name from there. | |
3069 __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset)); | |
3070 __ Ldr(x0, | |
3071 FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset)); | |
3072 __ B(&done); | |
3073 | |
3074 // Functions have class 'Function'. | |
3075 __ Bind(&function); | |
3076 __ LoadRoot(x0, Heap::kfunction_class_stringRootIndex); | |
3077 __ B(&done); | |
3078 | |
3079 // Objects with a non-function constructor have class 'Object'. | |
3080 __ Bind(&non_function_constructor); | |
3081 __ LoadRoot(x0, Heap::kObject_stringRootIndex); | |
3082 __ B(&done); | |
3083 | |
3084 // Non-JS objects have class null. | |
3085 __ Bind(&null); | |
3086 __ LoadRoot(x0, Heap::kNullValueRootIndex); | |
3087 | |
3088 // All done. | |
3089 __ Bind(&done); | |
3090 | |
3091 context()->Plug(x0); | |
3092 } | |
3093 | |
3094 | |
3095 void FullCodeGenerator::EmitLog(CallRuntime* expr) { | |
3096 // Conditionally generate a log call. | |
3097 // Args: | |
3098 // 0 (literal string): The type of logging (corresponds to the flags). | |
3099 // This is used to determine whether or not to generate the log call. | |
3100 // 1 (string): Format string. Access the string at argument index 2 | |
3101 // with '%2s' (see Logger::LogRuntime for all the formats). | |
3102 // 2 (array): Arguments to the format string. | |
3103 ZoneList<Expression*>* args = expr->arguments(); | |
3104 ASSERT_EQ(args->length(), 3); | |
3105 if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) { | |
3106 VisitForStackValue(args->at(1)); | |
3107 VisitForStackValue(args->at(2)); | |
3108 __ CallRuntime(Runtime::kLog, 2); | |
3109 } | |
3110 | |
3111 // Finally, we're expected to leave a value on the top of the stack. | |
3112 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex); | |
3113 context()->Plug(x0); | |
3114 } | |
3115 | |
3116 | |
3117 void FullCodeGenerator::EmitSubString(CallRuntime* expr) { | |
3118 // Load the arguments on the stack and call the stub. | |
3119 SubStringStub stub; | |
3120 ZoneList<Expression*>* args = expr->arguments(); | |
3121 ASSERT(args->length() == 3); | |
3122 VisitForStackValue(args->at(0)); | |
3123 VisitForStackValue(args->at(1)); | |
3124 VisitForStackValue(args->at(2)); | |
3125 __ CallStub(&stub); | |
3126 context()->Plug(x0); | |
3127 } | |
3128 | |
3129 | |
3130 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { | |
3131 // Load the arguments on the stack and call the stub. | |
3132 RegExpExecStub stub; | |
3133 ZoneList<Expression*>* args = expr->arguments(); | |
3134 ASSERT(args->length() == 4); | |
3135 VisitForStackValue(args->at(0)); | |
3136 VisitForStackValue(args->at(1)); | |
3137 VisitForStackValue(args->at(2)); | |
3138 VisitForStackValue(args->at(3)); | |
3139 __ CallStub(&stub); | |
3140 context()->Plug(x0); | |
3141 } | |
3142 | |
3143 | |
3144 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { | |
3145 ASM_LOCATION("FullCodeGenerator::EmitValueOf"); | |
3146 ZoneList<Expression*>* args = expr->arguments(); | |
3147 ASSERT(args->length() == 1); | |
3148 VisitForAccumulatorValue(args->at(0)); // Load the object. | |
3149 | |
3150 Label done; | |
3151 // If the object is a smi return the object. | |
3152 __ JumpIfSmi(x0, &done); | |
3153 // If the object is not a value type, return the object. | |
3154 __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done); | |
3155 __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset)); | |
3156 | |
3157 __ Bind(&done); | |
3158 context()->Plug(x0); | |
3159 } | |
3160 | |
3161 | |
3162 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { | |
3163 ZoneList<Expression*>* args = expr->arguments(); | |
3164 ASSERT(args->length() == 2); | |
3165 ASSERT_NE(NULL, args->at(1)->AsLiteral()); | |
3166 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value())); | |
3167 | |
3168 VisitForAccumulatorValue(args->at(0)); // Load the object. | |
3169 | |
3170 Label runtime, done, not_date_object; | |
3171 Register object = x0; | |
3172 Register result = x0; | |
3173 Register stamp_addr = x10; | |
3174 Register stamp_cache = x11; | |
3175 | |
3176 __ JumpIfSmi(object, ¬_date_object); | |
3177 __ JumpIfNotObjectType(object, x10, x10, JS_DATE_TYPE, ¬_date_object); | |
3178 | |
3179 if (index->value() == 0) { | |
3180 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset)); | |
3181 __ B(&done); | |
3182 } else { | |
3183 if (index->value() < JSDate::kFirstUncachedField) { | |
3184 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); | |
3185 __ Mov(x10, stamp); | |
3186 __ Ldr(stamp_addr, MemOperand(x10)); | |
3187 __ Ldr(stamp_cache, FieldMemOperand(object, JSDate::kCacheStampOffset)); | |
3188 __ Cmp(stamp_addr, stamp_cache); | |
3189 __ B(ne, &runtime); | |
3190 __ Ldr(result, FieldMemOperand(object, JSDate::kValueOffset + | |
3191 kPointerSize * index->value())); | |
3192 __ B(&done); | |
3193 } | |
3194 | |
3195 __ Bind(&runtime); | |
3196 __ Mov(x1, index); | |
3197 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); | |
3198 __ B(&done); | |
3199 } | |
3200 | |
3201 __ Bind(¬_date_object); | |
3202 __ CallRuntime(Runtime::kThrowNotDateError, 0); | |
3203 __ Bind(&done); | |
3204 context()->Plug(x0); | |
3205 } | |
3206 | |
3207 | |
3208 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { | |
3209 ZoneList<Expression*>* args = expr->arguments(); | |
3210 ASSERT_EQ(3, args->length()); | |
3211 | |
3212 Register string = x0; | |
3213 Register index = x1; | |
3214 Register value = x2; | |
3215 Register scratch = x10; | |
3216 | |
3217 VisitForStackValue(args->at(1)); // index | |
3218 VisitForStackValue(args->at(2)); // value | |
3219 VisitForAccumulatorValue(args->at(0)); // string | |
3220 __ Pop(value, index); | |
3221 | |
3222 if (FLAG_debug_code) { | |
3223 __ AssertSmi(value, kNonSmiValue); | |
3224 __ AssertSmi(index, kNonSmiIndex); | |
3225 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; | |
3226 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch, | |
3227 one_byte_seq_type); | |
3228 } | |
3229 | |
3230 __ Add(scratch, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); | |
3231 __ SmiUntag(value); | |
3232 __ SmiUntag(index); | |
3233 __ Strb(value, MemOperand(scratch, index)); | |
3234 context()->Plug(string); | |
3235 } | |
3236 | |
3237 | |
3238 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { | |
3239 ZoneList<Expression*>* args = expr->arguments(); | |
3240 ASSERT_EQ(3, args->length()); | |
3241 | |
3242 Register string = x0; | |
3243 Register index = x1; | |
3244 Register value = x2; | |
3245 Register scratch = x10; | |
3246 | |
3247 VisitForStackValue(args->at(1)); // index | |
3248 VisitForStackValue(args->at(2)); // value | |
3249 VisitForAccumulatorValue(args->at(0)); // string | |
3250 __ Pop(value, index); | |
3251 | |
3252 if (FLAG_debug_code) { | |
3253 __ AssertSmi(value, kNonSmiValue); | |
3254 __ AssertSmi(index, kNonSmiIndex); | |
3255 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; | |
3256 __ EmitSeqStringSetCharCheck(string, index, kIndexIsSmi, scratch, | |
3257 two_byte_seq_type); | |
3258 } | |
3259 | |
3260 __ Add(scratch, string, SeqTwoByteString::kHeaderSize - kHeapObjectTag); | |
3261 __ SmiUntag(value); | |
3262 __ SmiUntag(index); | |
3263 __ Strh(value, MemOperand(scratch, index, LSL, 1)); | |
3264 context()->Plug(string); | |
3265 } | |
3266 | |
3267 | |
3268 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { | |
3269 // Load the arguments on the stack and call the MathPow stub. | |
3270 ZoneList<Expression*>* args = expr->arguments(); | |
3271 ASSERT(args->length() == 2); | |
3272 VisitForStackValue(args->at(0)); | |
3273 VisitForStackValue(args->at(1)); | |
3274 MathPowStub stub(MathPowStub::ON_STACK); | |
3275 __ CallStub(&stub); | |
3276 context()->Plug(x0); | |
3277 } | |
3278 | |
3279 | |
3280 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { | |
3281 ZoneList<Expression*>* args = expr->arguments(); | |
3282 ASSERT(args->length() == 2); | |
3283 VisitForStackValue(args->at(0)); // Load the object. | |
3284 VisitForAccumulatorValue(args->at(1)); // Load the value. | |
3285 __ Pop(x1); | |
3286 // x0 = value. | |
3287 // x1 = object. | |
3288 | |
3289 Label done; | |
3290 // If the object is a smi, return the value. | |
3291 __ JumpIfSmi(x1, &done); | |
3292 | |
3293 // If the object is not a value type, return the value. | |
3294 __ JumpIfNotObjectType(x1, x10, x11, JS_VALUE_TYPE, &done); | |
3295 | |
3296 // Store the value. | |
3297 __ Str(x0, FieldMemOperand(x1, JSValue::kValueOffset)); | |
3298 // Update the write barrier. Save the value as it will be | |
3299 // overwritten by the write barrier code and is needed afterward. | |
3300 __ Mov(x10, x0); | |
3301 __ RecordWriteField( | |
3302 x1, JSValue::kValueOffset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs); | |
3303 | |
3304 __ Bind(&done); | |
3305 context()->Plug(x0); | |
3306 } | |
3307 | |
3308 | |
3309 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { | |
3310 ZoneList<Expression*>* args = expr->arguments(); | |
3311 ASSERT_EQ(args->length(), 1); | |
3312 | |
3313 // Load the argument into x0 and call the stub. | |
3314 VisitForAccumulatorValue(args->at(0)); | |
3315 | |
3316 NumberToStringStub stub; | |
3317 __ CallStub(&stub); | |
3318 context()->Plug(x0); | |
3319 } | |
3320 | |
3321 | |
3322 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { | |
3323 ZoneList<Expression*>* args = expr->arguments(); | |
3324 ASSERT(args->length() == 1); | |
3325 | |
3326 VisitForAccumulatorValue(args->at(0)); | |
3327 | |
3328 Label done; | |
3329 Register code = x0; | |
3330 Register result = x1; | |
3331 | |
3332 StringCharFromCodeGenerator generator(code, result); | |
3333 generator.GenerateFast(masm_); | |
3334 __ B(&done); | |
3335 | |
3336 NopRuntimeCallHelper call_helper; | |
3337 generator.GenerateSlow(masm_, call_helper); | |
3338 | |
3339 __ Bind(&done); | |
3340 context()->Plug(result); | |
3341 } | |
3342 | |
3343 | |
3344 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { | |
3345 ZoneList<Expression*>* args = expr->arguments(); | |
3346 ASSERT(args->length() == 2); | |
3347 | |
3348 VisitForStackValue(args->at(0)); | |
3349 VisitForAccumulatorValue(args->at(1)); | |
3350 | |
3351 Register object = x1; | |
3352 Register index = x0; | |
3353 Register result = x3; | |
3354 | |
3355 __ Pop(object); | |
3356 | |
3357 Label need_conversion; | |
3358 Label index_out_of_range; | |
3359 Label done; | |
3360 StringCharCodeAtGenerator generator(object, | |
3361 index, | |
3362 result, | |
3363 &need_conversion, | |
3364 &need_conversion, | |
3365 &index_out_of_range, | |
3366 STRING_INDEX_IS_NUMBER); | |
3367 generator.GenerateFast(masm_); | |
3368 __ B(&done); | |
3369 | |
3370 __ Bind(&index_out_of_range); | |
3371 // When the index is out of range, the spec requires us to return NaN. | |
3372 __ LoadRoot(result, Heap::kNanValueRootIndex); | |
3373 __ B(&done); | |
3374 | |
3375 __ Bind(&need_conversion); | |
3376 // Load the undefined value into the result register, which will | |
3377 // trigger conversion. | |
3378 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | |
3379 __ B(&done); | |
3380 | |
3381 NopRuntimeCallHelper call_helper; | |
3382 generator.GenerateSlow(masm_, call_helper); | |
3383 | |
3384 __ Bind(&done); | |
3385 context()->Plug(result); | |
3386 } | |
3387 | |
3388 | |
3389 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { | |
3390 ZoneList<Expression*>* args = expr->arguments(); | |
3391 ASSERT(args->length() == 2); | |
3392 | |
3393 VisitForStackValue(args->at(0)); | |
3394 VisitForAccumulatorValue(args->at(1)); | |
3395 | |
3396 Register object = x1; | |
3397 Register index = x0; | |
3398 Register result = x0; | |
3399 | |
3400 __ Pop(object); | |
3401 | |
3402 Label need_conversion; | |
3403 Label index_out_of_range; | |
3404 Label done; | |
3405 StringCharAtGenerator generator(object, | |
3406 index, | |
3407 x3, | |
3408 result, | |
3409 &need_conversion, | |
3410 &need_conversion, | |
3411 &index_out_of_range, | |
3412 STRING_INDEX_IS_NUMBER); | |
3413 generator.GenerateFast(masm_); | |
3414 __ B(&done); | |
3415 | |
3416 __ Bind(&index_out_of_range); | |
3417 // When the index is out of range, the spec requires us to return | |
3418 // the empty string. | |
3419 __ LoadRoot(result, Heap::kempty_stringRootIndex); | |
3420 __ B(&done); | |
3421 | |
3422 __ Bind(&need_conversion); | |
3423 // Move smi zero into the result register, which will trigger conversion. | |
3424 __ Mov(result, Smi::FromInt(0)); | |
3425 __ B(&done); | |
3426 | |
3427 NopRuntimeCallHelper call_helper; | |
3428 generator.GenerateSlow(masm_, call_helper); | |
3429 | |
3430 __ Bind(&done); | |
3431 context()->Plug(result); | |
3432 } | |
3433 | |
3434 | |
3435 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { | |
3436 ASM_LOCATION("FullCodeGenerator::EmitStringAdd"); | |
3437 ZoneList<Expression*>* args = expr->arguments(); | |
3438 ASSERT_EQ(2, args->length()); | |
3439 | |
3440 VisitForStackValue(args->at(0)); | |
3441 VisitForAccumulatorValue(args->at(1)); | |
3442 | |
3443 __ Pop(x1); | |
3444 StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED); | |
3445 __ CallStub(&stub); | |
3446 | |
3447 context()->Plug(x0); | |
3448 } | |
3449 | |
3450 | |
3451 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { | |
3452 ZoneList<Expression*>* args = expr->arguments(); | |
3453 ASSERT_EQ(2, args->length()); | |
3454 VisitForStackValue(args->at(0)); | |
3455 VisitForStackValue(args->at(1)); | |
3456 | |
3457 StringCompareStub stub; | |
3458 __ CallStub(&stub); | |
3459 context()->Plug(x0); | |
3460 } | |
3461 | |
3462 | |
3463 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) { | |
3464 // Load the argument on the stack and call the runtime function. | |
3465 ZoneList<Expression*>* args = expr->arguments(); | |
3466 ASSERT(args->length() == 1); | |
3467 VisitForStackValue(args->at(0)); | |
3468 __ CallRuntime(Runtime::kMath_log, 1); | |
3469 context()->Plug(x0); | |
3470 } | |
3471 | |
3472 | |
3473 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) { | |
3474 // Load the argument on the stack and call the runtime function. | |
3475 ZoneList<Expression*>* args = expr->arguments(); | |
3476 ASSERT(args->length() == 1); | |
3477 VisitForStackValue(args->at(0)); | |
3478 __ CallRuntime(Runtime::kMath_sqrt, 1); | |
3479 context()->Plug(x0); | |
3480 } | |
3481 | |
3482 | |
3483 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { | |
3484 ASM_LOCATION("FullCodeGenerator::EmitCallFunction"); | |
3485 ZoneList<Expression*>* args = expr->arguments(); | |
3486 ASSERT(args->length() >= 2); | |
3487 | |
3488 int arg_count = args->length() - 2; // 2 ~ receiver and function. | |
3489 for (int i = 0; i < arg_count + 1; i++) { | |
3490 VisitForStackValue(args->at(i)); | |
3491 } | |
3492 VisitForAccumulatorValue(args->last()); // Function. | |
3493 | |
3494 Label runtime, done; | |
3495 // Check for non-function argument (including proxy). | |
3496 __ JumpIfSmi(x0, &runtime); | |
3497 __ JumpIfNotObjectType(x0, x1, x1, JS_FUNCTION_TYPE, &runtime); | |
3498 | |
3499 // InvokeFunction requires the function in x1. Move it in there. | |
3500 __ Mov(x1, x0); | |
3501 ParameterCount count(arg_count); | |
3502 __ InvokeFunction(x1, count, CALL_FUNCTION, NullCallWrapper()); | |
3503 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
3504 __ B(&done); | |
3505 | |
3506 __ Bind(&runtime); | |
3507 __ Push(x0); | |
3508 __ CallRuntime(Runtime::kCall, args->length()); | |
3509 __ Bind(&done); | |
3510 | |
3511 context()->Plug(x0); | |
3512 } | |
3513 | |
3514 | |
3515 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { | |
3516 RegExpConstructResultStub stub; | |
3517 ZoneList<Expression*>* args = expr->arguments(); | |
3518 ASSERT(args->length() == 3); | |
3519 VisitForStackValue(args->at(0)); | |
3520 VisitForStackValue(args->at(1)); | |
3521 VisitForAccumulatorValue(args->at(2)); | |
3522 __ Pop(x1, x2); | |
3523 __ CallStub(&stub); | |
3524 context()->Plug(x0); | |
3525 } | |
3526 | |
3527 | |
3528 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { | |
3529 ZoneList<Expression*>* args = expr->arguments(); | |
3530 ASSERT_EQ(2, args->length()); | |
3531 ASSERT_NE(NULL, args->at(0)->AsLiteral()); | |
3532 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value(); | |
3533 | |
3534 Handle<FixedArray> jsfunction_result_caches( | |
3535 isolate()->native_context()->jsfunction_result_caches()); | |
3536 if (jsfunction_result_caches->length() <= cache_id) { | |
3537 __ Abort(kAttemptToUseUndefinedCache); | |
3538 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex); | |
3539 context()->Plug(x0); | |
3540 return; | |
3541 } | |
3542 | |
3543 VisitForAccumulatorValue(args->at(1)); | |
3544 | |
3545 Register key = x0; | |
3546 Register cache = x1; | |
3547 __ Ldr(cache, GlobalObjectMemOperand()); | |
3548 __ Ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset)); | |
3549 __ Ldr(cache, ContextMemOperand(cache, | |
3550 Context::JSFUNCTION_RESULT_CACHES_INDEX)); | |
3551 __ Ldr(cache, | |
3552 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); | |
3553 | |
3554 Label done; | |
3555 __ Ldrsw(x2, UntagSmiFieldMemOperand(cache, | |
3556 JSFunctionResultCache::kFingerOffset)); | |
3557 __ Add(x3, cache, FixedArray::kHeaderSize - kHeapObjectTag); | |
3558 __ Add(x3, x3, Operand(x2, LSL, kPointerSizeLog2)); | |
3559 | |
3560 // Load the key and data from the cache. | |
3561 __ Ldp(x2, x3, MemOperand(x3)); | |
3562 | |
3563 __ Cmp(key, x2); | |
3564 __ CmovX(x0, x3, eq); | |
3565 __ B(eq, &done); | |
3566 | |
3567 // Call runtime to perform the lookup. | |
3568 __ Push(cache, key); | |
3569 __ CallRuntime(Runtime::kGetFromCache, 2); | |
3570 | |
3571 __ Bind(&done); | |
3572 context()->Plug(x0); | |
3573 } | |
3574 | |
3575 | |
3576 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { | |
3577 ZoneList<Expression*>* args = expr->arguments(); | |
3578 VisitForAccumulatorValue(args->at(0)); | |
3579 | |
3580 Label materialize_true, materialize_false; | |
3581 Label* if_true = NULL; | |
3582 Label* if_false = NULL; | |
3583 Label* fall_through = NULL; | |
3584 context()->PrepareTest(&materialize_true, &materialize_false, | |
3585 &if_true, &if_false, &fall_through); | |
3586 | |
3587 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset)); | |
3588 __ Tst(x10, String::kContainsCachedArrayIndexMask); | |
3589 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
3590 Split(eq, if_true, if_false, fall_through); | |
3591 | |
3592 context()->Plug(if_true, if_false); | |
3593 } | |
3594 | |
3595 | |
3596 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { | |
3597 ZoneList<Expression*>* args = expr->arguments(); | |
3598 ASSERT(args->length() == 1); | |
3599 VisitForAccumulatorValue(args->at(0)); | |
3600 | |
3601 __ AssertString(x0); | |
3602 | |
3603 __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset)); | |
3604 __ IndexFromHash(x10, x0); | |
3605 | |
3606 context()->Plug(x0); | |
3607 } | |
3608 | |
3609 | |
3610 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { | |
3611 ASM_LOCATION("FullCodeGenerator::EmitFastAsciiArrayJoin"); | |
3612 | |
3613 ZoneList<Expression*>* args = expr->arguments(); | |
3614 ASSERT(args->length() == 2); | |
3615 VisitForStackValue(args->at(1)); | |
3616 VisitForAccumulatorValue(args->at(0)); | |
3617 | |
3618 Register array = x0; | |
3619 Register result = x0; | |
3620 Register elements = x1; | |
3621 Register element = x2; | |
3622 Register separator = x3; | |
3623 Register array_length = x4; | |
3624 Register result_pos = x5; | |
3625 Register map = x6; | |
3626 Register string_length = x10; | |
3627 Register elements_end = x11; | |
3628 Register string = x12; | |
3629 Register scratch1 = x13; | |
3630 Register scratch2 = x14; | |
3631 Register scratch3 = x7; | |
3632 Register separator_length = x15; | |
3633 | |
3634 Label bailout, done, one_char_separator, long_separator, | |
3635 non_trivial_array, not_size_one_array, loop, | |
3636 empty_separator_loop, one_char_separator_loop, | |
3637 one_char_separator_loop_entry, long_separator_loop; | |
3638 | |
3639 // The separator operand is on the stack. | |
3640 __ Pop(separator); | |
3641 | |
3642 // Check that the array is a JSArray. | |
3643 __ JumpIfSmi(array, &bailout); | |
3644 __ JumpIfNotObjectType(array, map, scratch1, JS_ARRAY_TYPE, &bailout); | |
3645 | |
3646 // Check that the array has fast elements. | |
3647 __ CheckFastElements(map, scratch1, &bailout); | |
3648 | |
3649 // If the array has length zero, return the empty string. | |
3650 // Load and untag the length of the array. | |
3651 // It is an unsigned value, so we can skip sign extension. | |
3652 // We assume little endianness. | |
3653 __ Ldrsw(array_length, | |
3654 UntagSmiFieldMemOperand(array, JSArray::kLengthOffset)); | |
3655 __ Cbnz(array_length, &non_trivial_array); | |
3656 __ LoadRoot(result, Heap::kempty_stringRootIndex); | |
3657 __ B(&done); | |
3658 | |
3659 __ Bind(&non_trivial_array); | |
3660 // Get the FixedArray containing array's elements. | |
3661 __ Ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset)); | |
3662 | |
3663 // Check that all array elements are sequential ASCII strings, and | |
3664 // accumulate the sum of their lengths. | |
3665 __ Mov(string_length, 0); | |
3666 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag); | |
3667 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); | |
3668 // Loop condition: while (element < elements_end). | |
3669 // Live values in registers: | |
3670 // elements: Fixed array of strings. | |
3671 // array_length: Length of the fixed array of strings (not smi) | |
3672 // separator: Separator string | |
3673 // string_length: Accumulated sum of string lengths (not smi). | |
3674 // element: Current array element. | |
3675 // elements_end: Array end. | |
3676 if (FLAG_debug_code) { | |
3677 __ Cmp(array_length, 0); | |
3678 __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin); | |
3679 } | |
3680 __ Bind(&loop); | |
3681 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex)); | |
3682 __ JumpIfSmi(string, &bailout); | |
3683 __ Ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset)); | |
3684 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); | |
3685 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); | |
3686 __ Ldrsw(scratch1, | |
3687 UntagSmiFieldMemOperand(string, SeqOneByteString::kLengthOffset)); | |
3688 __ Adds(string_length, string_length, scratch1); | |
3689 __ B(vs, &bailout); | |
3690 __ Cmp(element, elements_end); | |
3691 __ B(lt, &loop); | |
3692 | |
3693 // If array_length is 1, return elements[0], a string. | |
3694 __ Cmp(array_length, 1); | |
3695 __ B(ne, ¬_size_one_array); | |
3696 __ Ldr(result, FieldMemOperand(elements, FixedArray::kHeaderSize)); | |
3697 __ B(&done); | |
3698 | |
3699 __ Bind(¬_size_one_array); | |
3700 | |
3701 // Live values in registers: | |
3702 // separator: Separator string | |
3703 // array_length: Length of the array (not smi). | |
3704 // string_length: Sum of string lengths (not smi). | |
3705 // elements: FixedArray of strings. | |
3706 | |
3707 // Check that the separator is a flat ASCII string. | |
3708 __ JumpIfSmi(separator, &bailout); | |
3709 __ Ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset)); | |
3710 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); | |
3711 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); | |
3712 | |
3713 // Add (separator length times array_length) - separator length to the | |
3714 // string_length to get the length of the result string. | |
3715 // Load the separator length as untagged. | |
3716 // We assume little endianness, and that the length is positive. | |
3717 __ Ldrsw(separator_length, | |
3718 UntagSmiFieldMemOperand(separator, | |
3719 SeqOneByteString::kLengthOffset)); | |
3720 __ Sub(string_length, string_length, separator_length); | |
3721 __ Umaddl(string_length, array_length.W(), separator_length.W(), | |
3722 string_length); | |
3723 | |
3724 // Get first element in the array. | |
3725 __ Add(element, elements, FixedArray::kHeaderSize - kHeapObjectTag); | |
3726 // Live values in registers: | |
3727 // element: First array element | |
3728 // separator: Separator string | |
3729 // string_length: Length of result string (not smi) | |
3730 // array_length: Length of the array (not smi). | |
3731 __ AllocateAsciiString(result, string_length, scratch1, scratch2, scratch3, | |
3732 &bailout); | |
3733 | |
3734 // Prepare for looping. Set up elements_end to end of the array. Set | |
3735 // result_pos to the position of the result where to write the first | |
3736 // character. | |
3737 // TODO(all): useless unless AllocateAsciiString trashes the register. | |
3738 __ Add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); | |
3739 __ Add(result_pos, result, SeqOneByteString::kHeaderSize - kHeapObjectTag); | |
3740 | |
3741 // Check the length of the separator. | |
3742 __ Cmp(separator_length, 1); | |
3743 __ B(eq, &one_char_separator); | |
3744 __ B(gt, &long_separator); | |
3745 | |
3746 // Empty separator case | |
3747 __ Bind(&empty_separator_loop); | |
3748 // Live values in registers: | |
3749 // result_pos: the position to which we are currently copying characters. | |
3750 // element: Current array element. | |
3751 // elements_end: Array end. | |
3752 | |
3753 // Copy next array element to the result. | |
3754 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex)); | |
3755 __ Ldrsw(string_length, | |
3756 UntagSmiFieldMemOperand(string, String::kLengthOffset)); | |
3757 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); | |
3758 __ CopyBytes(result_pos, string, string_length, scratch1); | |
3759 __ Cmp(element, elements_end); | |
3760 __ B(lt, &empty_separator_loop); // End while (element < elements_end). | |
3761 __ B(&done); | |
3762 | |
3763 // One-character separator case | |
3764 __ Bind(&one_char_separator); | |
3765 // Replace separator with its ASCII character value. | |
3766 __ Ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize)); | |
3767 // Jump into the loop after the code that copies the separator, so the first | |
3768 // element is not preceded by a separator | |
3769 __ B(&one_char_separator_loop_entry); | |
3770 | |
3771 __ Bind(&one_char_separator_loop); | |
3772 // Live values in registers: | |
3773 // result_pos: the position to which we are currently copying characters. | |
3774 // element: Current array element. | |
3775 // elements_end: Array end. | |
3776 // separator: Single separator ASCII char (in lower byte). | |
3777 | |
3778 // Copy the separator character to the result. | |
3779 __ Strb(separator, MemOperand(result_pos, 1, PostIndex)); | |
3780 | |
3781 // Copy next array element to the result. | |
3782 __ Bind(&one_char_separator_loop_entry); | |
3783 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex)); | |
3784 __ Ldrsw(string_length, | |
3785 UntagSmiFieldMemOperand(string, String::kLengthOffset)); | |
3786 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); | |
3787 __ CopyBytes(result_pos, string, string_length, scratch1); | |
3788 __ Cmp(element, elements_end); | |
3789 __ B(lt, &one_char_separator_loop); // End while (element < elements_end). | |
3790 __ B(&done); | |
3791 | |
3792 // Long separator case (separator is more than one character). Entry is at the | |
3793 // label long_separator below. | |
3794 __ Bind(&long_separator_loop); | |
3795 // Live values in registers: | |
3796 // result_pos: the position to which we are currently copying characters. | |
3797 // element: Current array element. | |
3798 // elements_end: Array end. | |
3799 // separator: Separator string. | |
3800 | |
3801 // Copy the separator to the result. | |
3802 // TODO(all): hoist next two instructions. | |
3803 __ Ldrsw(string_length, | |
3804 UntagSmiFieldMemOperand(separator, String::kLengthOffset)); | |
3805 __ Add(string, separator, SeqOneByteString::kHeaderSize - kHeapObjectTag); | |
3806 __ CopyBytes(result_pos, string, string_length, scratch1); | |
3807 | |
3808 __ Bind(&long_separator); | |
3809 __ Ldr(string, MemOperand(element, kPointerSize, PostIndex)); | |
3810 __ Ldrsw(string_length, | |
3811 UntagSmiFieldMemOperand(string, String::kLengthOffset)); | |
3812 __ Add(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); | |
3813 __ CopyBytes(result_pos, string, string_length, scratch1); | |
3814 __ Cmp(element, elements_end); | |
3815 __ B(lt, &long_separator_loop); // End while (element < elements_end). | |
3816 __ B(&done); | |
3817 | |
3818 __ Bind(&bailout); | |
3819 // Returning undefined will force slower code to handle it. | |
3820 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | |
3821 __ Bind(&done); | |
3822 context()->Plug(result); | |
3823 } | |
3824 | |
3825 | |
3826 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { | |
3827 Handle<String> name = expr->name(); | |
3828 if (name->length() > 0 && name->Get(0) == '_') { | |
3829 Comment cmnt(masm_, "[ InlineRuntimeCall"); | |
3830 EmitInlineRuntimeCall(expr); | |
3831 return; | |
3832 } | |
3833 | |
3834 Comment cmnt(masm_, "[ CallRunTime"); | |
3835 ZoneList<Expression*>* args = expr->arguments(); | |
3836 int arg_count = args->length(); | |
3837 | |
3838 if (expr->is_jsruntime()) { | |
3839 // Push the builtins object as the receiver. | |
3840 __ Ldr(x10, GlobalObjectMemOperand()); | |
3841 __ Ldr(x0, FieldMemOperand(x10, GlobalObject::kBuiltinsOffset)); | |
3842 __ Push(x0); | |
3843 | |
3844 // Load the function from the receiver. | |
3845 __ Mov(x2, Operand(name)); | |
3846 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId()); | |
3847 | |
3848 // Push the target function under the receiver. | |
3849 __ Pop(x10); | |
3850 __ Push(x0, x10); | |
3851 | |
3852 int arg_count = args->length(); | |
3853 for (int i = 0; i < arg_count; i++) { | |
3854 VisitForStackValue(args->at(i)); | |
3855 } | |
3856 | |
3857 // Record source position of the IC call. | |
3858 SetSourcePosition(expr->position()); | |
3859 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); | |
3860 __ Peek(x1, (arg_count + 1) * kPointerSize); | |
3861 __ CallStub(&stub); | |
3862 | |
3863 // Restore context register. | |
3864 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
3865 | |
3866 context()->DropAndPlug(1, x0); | |
3867 } else { | |
3868 // Push the arguments ("left-to-right"). | |
3869 for (int i = 0; i < arg_count; i++) { | |
3870 VisitForStackValue(args->at(i)); | |
3871 } | |
3872 | |
3873 // Call the C runtime function. | |
3874 __ CallRuntime(expr->function(), arg_count); | |
3875 context()->Plug(x0); | |
3876 } | |
3877 } | |
3878 | |
3879 | |
3880 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { | |
3881 switch (expr->op()) { | |
3882 case Token::DELETE: { | |
3883 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); | |
3884 Property* property = expr->expression()->AsProperty(); | |
3885 VariableProxy* proxy = expr->expression()->AsVariableProxy(); | |
3886 | |
3887 if (property != NULL) { | |
3888 VisitForStackValue(property->obj()); | |
3889 VisitForStackValue(property->key()); | |
3890 __ Mov(x10, Smi::FromInt(strict_mode())); | |
3891 __ Push(x10); | |
3892 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); | |
3893 context()->Plug(x0); | |
3894 } else if (proxy != NULL) { | |
3895 Variable* var = proxy->var(); | |
3896 // Delete of an unqualified identifier is disallowed in strict mode | |
3897 // but "delete this" is allowed. | |
3898 ASSERT(strict_mode() == SLOPPY || var->is_this()); | |
3899 if (var->IsUnallocated()) { | |
3900 __ Ldr(x12, GlobalObjectMemOperand()); | |
3901 __ Mov(x11, Operand(var->name())); | |
3902 __ Mov(x10, Smi::FromInt(SLOPPY)); | |
3903 __ Push(x12, x11, x10); | |
3904 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); | |
3905 context()->Plug(x0); | |
3906 } else if (var->IsStackAllocated() || var->IsContextSlot()) { | |
3907 // Result of deleting non-global, non-dynamic variables is false. | |
3908 // The subexpression does not have side effects. | |
3909 context()->Plug(var->is_this()); | |
3910 } else { | |
3911 // Non-global variable. Call the runtime to try to delete from the | |
3912 // context where the variable was introduced. | |
3913 __ Mov(x2, Operand(var->name())); | |
3914 __ Push(context_register(), x2); | |
3915 __ CallRuntime(Runtime::kDeleteContextSlot, 2); | |
3916 context()->Plug(x0); | |
3917 } | |
3918 } else { | |
3919 // Result of deleting non-property, non-variable reference is true. | |
3920 // The subexpression may have side effects. | |
3921 VisitForEffect(expr->expression()); | |
3922 context()->Plug(true); | |
3923 } | |
3924 break; | |
3925 break; | |
3926 } | |
3927 case Token::VOID: { | |
3928 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); | |
3929 VisitForEffect(expr->expression()); | |
3930 context()->Plug(Heap::kUndefinedValueRootIndex); | |
3931 break; | |
3932 } | |
3933 case Token::NOT: { | |
3934 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); | |
3935 if (context()->IsEffect()) { | |
3936 // Unary NOT has no side effects so it's only necessary to visit the | |
3937 // subexpression. Match the optimizing compiler by not branching. | |
3938 VisitForEffect(expr->expression()); | |
3939 } else if (context()->IsTest()) { | |
3940 const TestContext* test = TestContext::cast(context()); | |
3941 // The labels are swapped for the recursive call. | |
3942 VisitForControl(expr->expression(), | |
3943 test->false_label(), | |
3944 test->true_label(), | |
3945 test->fall_through()); | |
3946 context()->Plug(test->true_label(), test->false_label()); | |
3947 } else { | |
3948 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue()); | |
3949 // TODO(jbramley): This could be much more efficient using (for | |
3950 // example) the CSEL instruction. | |
3951 Label materialize_true, materialize_false, done; | |
3952 VisitForControl(expr->expression(), | |
3953 &materialize_false, | |
3954 &materialize_true, | |
3955 &materialize_true); | |
3956 | |
3957 __ Bind(&materialize_true); | |
3958 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); | |
3959 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); | |
3960 __ B(&done); | |
3961 | |
3962 __ Bind(&materialize_false); | |
3963 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); | |
3964 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); | |
3965 __ B(&done); | |
3966 | |
3967 __ Bind(&done); | |
3968 if (context()->IsStackValue()) { | |
3969 __ Push(result_register()); | |
3970 } | |
3971 } | |
3972 break; | |
3973 } | |
3974 case Token::TYPEOF: { | |
3975 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); | |
3976 { | |
3977 StackValueContext context(this); | |
3978 VisitForTypeofValue(expr->expression()); | |
3979 } | |
3980 __ CallRuntime(Runtime::kTypeof, 1); | |
3981 context()->Plug(x0); | |
3982 break; | |
3983 } | |
3984 default: | |
3985 UNREACHABLE(); | |
3986 } | |
3987 } | |
3988 | |
3989 | |
3990 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { | |
3991 ASSERT(expr->expression()->IsValidLeftHandSide()); | |
3992 | |
3993 Comment cmnt(masm_, "[ CountOperation"); | |
3994 SetSourcePosition(expr->position()); | |
3995 | |
3996 // Expression can only be a property, a global or a (parameter or local) | |
3997 // slot. | |
3998 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; | |
3999 LhsKind assign_type = VARIABLE; | |
4000 Property* prop = expr->expression()->AsProperty(); | |
4001 // In case of a property we use the uninitialized expression context | |
4002 // of the key to detect a named property. | |
4003 if (prop != NULL) { | |
4004 assign_type = | |
4005 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY; | |
4006 } | |
4007 | |
4008 // Evaluate expression and get value. | |
4009 if (assign_type == VARIABLE) { | |
4010 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL); | |
4011 AccumulatorValueContext context(this); | |
4012 EmitVariableLoad(expr->expression()->AsVariableProxy()); | |
4013 } else { | |
4014 // Reserve space for result of postfix operation. | |
4015 if (expr->is_postfix() && !context()->IsEffect()) { | |
4016 __ Push(xzr); | |
4017 } | |
4018 if (assign_type == NAMED_PROPERTY) { | |
4019 // Put the object both on the stack and in the accumulator. | |
4020 VisitForAccumulatorValue(prop->obj()); | |
4021 __ Push(x0); | |
4022 EmitNamedPropertyLoad(prop); | |
4023 } else { | |
4024 // KEYED_PROPERTY | |
4025 VisitForStackValue(prop->obj()); | |
4026 VisitForAccumulatorValue(prop->key()); | |
4027 __ Peek(x1, 0); | |
4028 __ Push(x0); | |
4029 EmitKeyedPropertyLoad(prop); | |
4030 } | |
4031 } | |
4032 | |
4033 // We need a second deoptimization point after loading the value | |
4034 // in case evaluating the property load my have a side effect. | |
4035 if (assign_type == VARIABLE) { | |
4036 PrepareForBailout(expr->expression(), TOS_REG); | |
4037 } else { | |
4038 PrepareForBailoutForId(prop->LoadId(), TOS_REG); | |
4039 } | |
4040 | |
4041 // Inline smi case if we are in a loop. | |
4042 Label stub_call, done; | |
4043 JumpPatchSite patch_site(masm_); | |
4044 | |
4045 int count_value = expr->op() == Token::INC ? 1 : -1; | |
4046 if (ShouldInlineSmiCase(expr->op())) { | |
4047 Label slow; | |
4048 patch_site.EmitJumpIfNotSmi(x0, &slow); | |
4049 | |
4050 // Save result for postfix expressions. | |
4051 if (expr->is_postfix()) { | |
4052 if (!context()->IsEffect()) { | |
4053 // Save the result on the stack. If we have a named or keyed property we | |
4054 // store the result under the receiver that is currently on top of the | |
4055 // stack. | |
4056 switch (assign_type) { | |
4057 case VARIABLE: | |
4058 __ Push(x0); | |
4059 break; | |
4060 case NAMED_PROPERTY: | |
4061 __ Poke(x0, kPointerSize); | |
4062 break; | |
4063 case KEYED_PROPERTY: | |
4064 __ Poke(x0, kPointerSize * 2); | |
4065 break; | |
4066 } | |
4067 } | |
4068 } | |
4069 | |
4070 __ Adds(x0, x0, Smi::FromInt(count_value)); | |
4071 __ B(vc, &done); | |
4072 // Call stub. Undo operation first. | |
4073 __ Sub(x0, x0, Smi::FromInt(count_value)); | |
4074 __ B(&stub_call); | |
4075 __ Bind(&slow); | |
4076 } | |
4077 ToNumberStub convert_stub; | |
4078 __ CallStub(&convert_stub); | |
4079 | |
4080 // Save result for postfix expressions. | |
4081 if (expr->is_postfix()) { | |
4082 if (!context()->IsEffect()) { | |
4083 // Save the result on the stack. If we have a named or keyed property | |
4084 // we store the result under the receiver that is currently on top | |
4085 // of the stack. | |
4086 switch (assign_type) { | |
4087 case VARIABLE: | |
4088 __ Push(x0); | |
4089 break; | |
4090 case NAMED_PROPERTY: | |
4091 __ Poke(x0, kXRegSize); | |
4092 break; | |
4093 case KEYED_PROPERTY: | |
4094 __ Poke(x0, 2 * kXRegSize); | |
4095 break; | |
4096 } | |
4097 } | |
4098 } | |
4099 | |
4100 __ Bind(&stub_call); | |
4101 __ Mov(x1, x0); | |
4102 __ Mov(x0, Smi::FromInt(count_value)); | |
4103 | |
4104 // Record position before stub call. | |
4105 SetSourcePosition(expr->position()); | |
4106 | |
4107 { | |
4108 Assembler::BlockPoolsScope scope(masm_); | |
4109 BinaryOpICStub stub(Token::ADD, NO_OVERWRITE); | |
4110 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId()); | |
4111 patch_site.EmitPatchInfo(); | |
4112 } | |
4113 __ Bind(&done); | |
4114 | |
4115 // Store the value returned in x0. | |
4116 switch (assign_type) { | |
4117 case VARIABLE: | |
4118 if (expr->is_postfix()) { | |
4119 { EffectContext context(this); | |
4120 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), | |
4121 Token::ASSIGN); | |
4122 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
4123 context.Plug(x0); | |
4124 } | |
4125 // For all contexts except EffectConstant We have the result on | |
4126 // top of the stack. | |
4127 if (!context()->IsEffect()) { | |
4128 context()->PlugTOS(); | |
4129 } | |
4130 } else { | |
4131 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), | |
4132 Token::ASSIGN); | |
4133 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
4134 context()->Plug(x0); | |
4135 } | |
4136 break; | |
4137 case NAMED_PROPERTY: { | |
4138 __ Mov(x2, Operand(prop->key()->AsLiteral()->value())); | |
4139 __ Pop(x1); | |
4140 CallStoreIC(expr->CountStoreFeedbackId()); | |
4141 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
4142 if (expr->is_postfix()) { | |
4143 if (!context()->IsEffect()) { | |
4144 context()->PlugTOS(); | |
4145 } | |
4146 } else { | |
4147 context()->Plug(x0); | |
4148 } | |
4149 break; | |
4150 } | |
4151 case KEYED_PROPERTY: { | |
4152 __ Pop(x1); // Key. | |
4153 __ Pop(x2); // Receiver. | |
4154 Handle<Code> ic = strict_mode() == SLOPPY | |
4155 ? isolate()->builtins()->KeyedStoreIC_Initialize() | |
4156 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); | |
4157 CallIC(ic, expr->CountStoreFeedbackId()); | |
4158 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
4159 if (expr->is_postfix()) { | |
4160 if (!context()->IsEffect()) { | |
4161 context()->PlugTOS(); | |
4162 } | |
4163 } else { | |
4164 context()->Plug(x0); | |
4165 } | |
4166 break; | |
4167 } | |
4168 } | |
4169 } | |
4170 | |
4171 | |
4172 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { | |
4173 ASSERT(!context()->IsEffect()); | |
4174 ASSERT(!context()->IsTest()); | |
4175 VariableProxy* proxy = expr->AsVariableProxy(); | |
4176 if (proxy != NULL && proxy->var()->IsUnallocated()) { | |
4177 Comment cmnt(masm_, "Global variable"); | |
4178 __ Ldr(x0, GlobalObjectMemOperand()); | |
4179 __ Mov(x2, Operand(proxy->name())); | |
4180 // Use a regular load, not a contextual load, to avoid a reference | |
4181 // error. | |
4182 CallLoadIC(NOT_CONTEXTUAL); | |
4183 PrepareForBailout(expr, TOS_REG); | |
4184 context()->Plug(x0); | |
4185 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { | |
4186 Label done, slow; | |
4187 | |
4188 // Generate code for loading from variables potentially shadowed | |
4189 // by eval-introduced variables. | |
4190 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done); | |
4191 | |
4192 __ Bind(&slow); | |
4193 __ Mov(x0, Operand(proxy->name())); | |
4194 __ Push(cp, x0); | |
4195 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); | |
4196 PrepareForBailout(expr, TOS_REG); | |
4197 __ Bind(&done); | |
4198 | |
4199 context()->Plug(x0); | |
4200 } else { | |
4201 // This expression cannot throw a reference error at the top level. | |
4202 VisitInDuplicateContext(expr); | |
4203 } | |
4204 } | |
4205 | |
4206 | |
4207 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, | |
4208 Expression* sub_expr, | |
4209 Handle<String> check) { | |
4210 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof"); | |
4211 Comment cmnt(masm_, "[ EmitLiteralCompareTypeof"); | |
4212 Label materialize_true, materialize_false; | |
4213 Label* if_true = NULL; | |
4214 Label* if_false = NULL; | |
4215 Label* fall_through = NULL; | |
4216 context()->PrepareTest(&materialize_true, &materialize_false, | |
4217 &if_true, &if_false, &fall_through); | |
4218 | |
4219 { AccumulatorValueContext context(this); | |
4220 VisitForTypeofValue(sub_expr); | |
4221 } | |
4222 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
4223 | |
4224 if (check->Equals(isolate()->heap()->number_string())) { | |
4225 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string"); | |
4226 __ JumpIfSmi(x0, if_true); | |
4227 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset)); | |
4228 __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex); | |
4229 Split(eq, if_true, if_false, fall_through); | |
4230 } else if (check->Equals(isolate()->heap()->string_string())) { | |
4231 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string"); | |
4232 __ JumpIfSmi(x0, if_false); | |
4233 // Check for undetectable objects => false. | |
4234 __ JumpIfObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE, if_false, ge); | |
4235 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset)); | |
4236 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_true, if_false, | |
4237 fall_through); | |
4238 } else if (check->Equals(isolate()->heap()->symbol_string())) { | |
4239 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string"); | |
4240 __ JumpIfSmi(x0, if_false); | |
4241 __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE); | |
4242 Split(eq, if_true, if_false, fall_through); | |
4243 } else if (check->Equals(isolate()->heap()->boolean_string())) { | |
4244 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string"); | |
4245 __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true); | |
4246 __ CompareRoot(x0, Heap::kFalseValueRootIndex); | |
4247 Split(eq, if_true, if_false, fall_through); | |
4248 } else if (FLAG_harmony_typeof && | |
4249 check->Equals(isolate()->heap()->null_string())) { | |
4250 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof null_string"); | |
4251 __ CompareRoot(x0, Heap::kNullValueRootIndex); | |
4252 Split(eq, if_true, if_false, fall_through); | |
4253 } else if (check->Equals(isolate()->heap()->undefined_string())) { | |
4254 ASM_LOCATION( | |
4255 "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string"); | |
4256 __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, if_true); | |
4257 __ JumpIfSmi(x0, if_false); | |
4258 // Check for undetectable objects => true. | |
4259 __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset)); | |
4260 __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset)); | |
4261 __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true, | |
4262 fall_through); | |
4263 } else if (check->Equals(isolate()->heap()->function_string())) { | |
4264 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string"); | |
4265 __ JumpIfSmi(x0, if_false); | |
4266 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); | |
4267 __ JumpIfObjectType(x0, x10, x11, JS_FUNCTION_TYPE, if_true); | |
4268 __ CompareAndSplit(x11, JS_FUNCTION_PROXY_TYPE, eq, if_true, if_false, | |
4269 fall_through); | |
4270 | |
4271 } else if (check->Equals(isolate()->heap()->object_string())) { | |
4272 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string"); | |
4273 __ JumpIfSmi(x0, if_false); | |
4274 if (!FLAG_harmony_typeof) { | |
4275 __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true); | |
4276 } | |
4277 // Check for JS objects => true. | |
4278 Register map = x10; | |
4279 __ JumpIfObjectType(x0, map, x11, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, | |
4280 if_false, lt); | |
4281 __ CompareInstanceType(map, x11, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); | |
4282 __ B(gt, if_false); | |
4283 // Check for undetectable objects => false. | |
4284 __ Ldrb(x10, FieldMemOperand(map, Map::kBitFieldOffset)); | |
4285 | |
4286 __ TestAndSplit(x10, 1 << Map::kIsUndetectable, if_true, if_false, | |
4287 fall_through); | |
4288 | |
4289 } else { | |
4290 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other"); | |
4291 if (if_false != fall_through) __ B(if_false); | |
4292 } | |
4293 context()->Plug(if_true, if_false); | |
4294 } | |
4295 | |
4296 | |
4297 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { | |
4298 Comment cmnt(masm_, "[ CompareOperation"); | |
4299 SetSourcePosition(expr->position()); | |
4300 | |
4301 // Try to generate an optimized comparison with a literal value. | |
4302 // TODO(jbramley): This only checks common values like NaN or undefined. | |
4303 // Should it also handle A64 immediate operands? | |
4304 if (TryLiteralCompare(expr)) { | |
4305 return; | |
4306 } | |
4307 | |
4308 // Assign labels according to context()->PrepareTest. | |
4309 Label materialize_true; | |
4310 Label materialize_false; | |
4311 Label* if_true = NULL; | |
4312 Label* if_false = NULL; | |
4313 Label* fall_through = NULL; | |
4314 context()->PrepareTest(&materialize_true, &materialize_false, | |
4315 &if_true, &if_false, &fall_through); | |
4316 | |
4317 Token::Value op = expr->op(); | |
4318 VisitForStackValue(expr->left()); | |
4319 switch (op) { | |
4320 case Token::IN: | |
4321 VisitForStackValue(expr->right()); | |
4322 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); | |
4323 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); | |
4324 __ CompareRoot(x0, Heap::kTrueValueRootIndex); | |
4325 Split(eq, if_true, if_false, fall_through); | |
4326 break; | |
4327 | |
4328 case Token::INSTANCEOF: { | |
4329 VisitForStackValue(expr->right()); | |
4330 InstanceofStub stub(InstanceofStub::kNoFlags); | |
4331 __ CallStub(&stub); | |
4332 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
4333 // The stub returns 0 for true. | |
4334 __ CompareAndSplit(x0, 0, eq, if_true, if_false, fall_through); | |
4335 break; | |
4336 } | |
4337 | |
4338 default: { | |
4339 VisitForAccumulatorValue(expr->right()); | |
4340 Condition cond = CompareIC::ComputeCondition(op); | |
4341 | |
4342 // Pop the stack value. | |
4343 __ Pop(x1); | |
4344 | |
4345 JumpPatchSite patch_site(masm_); | |
4346 if (ShouldInlineSmiCase(op)) { | |
4347 Label slow_case; | |
4348 patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case); | |
4349 __ Cmp(x1, x0); | |
4350 Split(cond, if_true, if_false, NULL); | |
4351 __ Bind(&slow_case); | |
4352 } | |
4353 | |
4354 // Record position and call the compare IC. | |
4355 SetSourcePosition(expr->position()); | |
4356 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); | |
4357 CallIC(ic, expr->CompareOperationFeedbackId()); | |
4358 patch_site.EmitPatchInfo(); | |
4359 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
4360 __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through); | |
4361 } | |
4362 } | |
4363 | |
4364 // Convert the result of the comparison into one expected for this | |
4365 // expression's context. | |
4366 context()->Plug(if_true, if_false); | |
4367 } | |
4368 | |
4369 | |
4370 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, | |
4371 Expression* sub_expr, | |
4372 NilValue nil) { | |
4373 ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil"); | |
4374 Label materialize_true, materialize_false; | |
4375 Label* if_true = NULL; | |
4376 Label* if_false = NULL; | |
4377 Label* fall_through = NULL; | |
4378 context()->PrepareTest(&materialize_true, &materialize_false, | |
4379 &if_true, &if_false, &fall_through); | |
4380 | |
4381 VisitForAccumulatorValue(sub_expr); | |
4382 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
4383 | |
4384 if (expr->op() == Token::EQ_STRICT) { | |
4385 Heap::RootListIndex nil_value = nil == kNullValue ? | |
4386 Heap::kNullValueRootIndex : | |
4387 Heap::kUndefinedValueRootIndex; | |
4388 __ CompareRoot(x0, nil_value); | |
4389 Split(eq, if_true, if_false, fall_through); | |
4390 } else { | |
4391 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); | |
4392 CallIC(ic, expr->CompareOperationFeedbackId()); | |
4393 __ CompareAndSplit(x0, 0, ne, if_true, if_false, fall_through); | |
4394 } | |
4395 | |
4396 context()->Plug(if_true, if_false); | |
4397 } | |
4398 | |
4399 | |
4400 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { | |
4401 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
4402 context()->Plug(x0); | |
4403 } | |
4404 | |
4405 | |
4406 void FullCodeGenerator::VisitYield(Yield* expr) { | |
4407 Comment cmnt(masm_, "[ Yield"); | |
4408 // Evaluate yielded value first; the initial iterator definition depends on | |
4409 // this. It stays on the stack while we update the iterator. | |
4410 VisitForStackValue(expr->expression()); | |
4411 | |
4412 // TODO(jbramley): Tidy this up once the merge is done, using named registers | |
4413 // and suchlike. The implementation changes a little by bleeding_edge so I | |
4414 // don't want to spend too much time on it now. | |
4415 | |
4416 switch (expr->yield_kind()) { | |
4417 case Yield::SUSPEND: | |
4418 // Pop value from top-of-stack slot; box result into result register. | |
4419 EmitCreateIteratorResult(false); | |
4420 __ Push(result_register()); | |
4421 // Fall through. | |
4422 case Yield::INITIAL: { | |
4423 Label suspend, continuation, post_runtime, resume; | |
4424 | |
4425 __ B(&suspend); | |
4426 | |
4427 // TODO(jbramley): This label is bound here because the following code | |
4428 // looks at its pos(). Is it possible to do something more efficient here, | |
4429 // perhaps using Adr? | |
4430 __ Bind(&continuation); | |
4431 __ B(&resume); | |
4432 | |
4433 __ Bind(&suspend); | |
4434 VisitForAccumulatorValue(expr->generator_object()); | |
4435 ASSERT((continuation.pos() > 0) && Smi::IsValid(continuation.pos())); | |
4436 __ Mov(x1, Smi::FromInt(continuation.pos())); | |
4437 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset)); | |
4438 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset)); | |
4439 __ Mov(x1, cp); | |
4440 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2, | |
4441 kLRHasBeenSaved, kDontSaveFPRegs); | |
4442 __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset); | |
4443 __ Cmp(__ StackPointer(), x1); | |
4444 __ B(eq, &post_runtime); | |
4445 __ Push(x0); // generator object | |
4446 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); | |
4447 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
4448 __ Bind(&post_runtime); | |
4449 __ Pop(result_register()); | |
4450 EmitReturnSequence(); | |
4451 | |
4452 __ Bind(&resume); | |
4453 context()->Plug(result_register()); | |
4454 break; | |
4455 } | |
4456 | |
4457 case Yield::FINAL: { | |
4458 VisitForAccumulatorValue(expr->generator_object()); | |
4459 __ Mov(x1, Smi::FromInt(JSGeneratorObject::kGeneratorClosed)); | |
4460 __ Str(x1, FieldMemOperand(result_register(), | |
4461 JSGeneratorObject::kContinuationOffset)); | |
4462 // Pop value from top-of-stack slot, box result into result register. | |
4463 EmitCreateIteratorResult(true); | |
4464 EmitUnwindBeforeReturn(); | |
4465 EmitReturnSequence(); | |
4466 break; | |
4467 } | |
4468 | |
4469 case Yield::DELEGATING: { | |
4470 VisitForStackValue(expr->generator_object()); | |
4471 | |
4472 // Initial stack layout is as follows: | |
4473 // [sp + 1 * kPointerSize] iter | |
4474 // [sp + 0 * kPointerSize] g | |
4475 | |
4476 Label l_catch, l_try, l_suspend, l_continuation, l_resume; | |
4477 Label l_next, l_call, l_loop; | |
4478 // Initial send value is undefined. | |
4479 __ LoadRoot(x0, Heap::kUndefinedValueRootIndex); | |
4480 __ B(&l_next); | |
4481 | |
4482 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; } | |
4483 __ Bind(&l_catch); | |
4484 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos())); | |
4485 __ LoadRoot(x2, Heap::kthrow_stringRootIndex); // "throw" | |
4486 __ Peek(x3, 1 * kPointerSize); // iter | |
4487 __ Push(x2, x3, x0); // "throw", iter, except | |
4488 __ B(&l_call); | |
4489 | |
4490 // try { received = %yield result } | |
4491 // Shuffle the received result above a try handler and yield it without | |
4492 // re-boxing. | |
4493 __ Bind(&l_try); | |
4494 __ Pop(x0); // result | |
4495 __ PushTryHandler(StackHandler::CATCH, expr->index()); | |
4496 const int handler_size = StackHandlerConstants::kSize; | |
4497 __ Push(x0); // result | |
4498 __ B(&l_suspend); | |
4499 | |
4500 // TODO(jbramley): This label is bound here because the following code | |
4501 // looks at its pos(). Is it possible to do something more efficient here, | |
4502 // perhaps using Adr? | |
4503 __ Bind(&l_continuation); | |
4504 __ B(&l_resume); | |
4505 | |
4506 __ Bind(&l_suspend); | |
4507 const int generator_object_depth = kPointerSize + handler_size; | |
4508 __ Peek(x0, generator_object_depth); | |
4509 __ Push(x0); // g | |
4510 ASSERT((l_continuation.pos() > 0) && Smi::IsValid(l_continuation.pos())); | |
4511 __ Mov(x1, Smi::FromInt(l_continuation.pos())); | |
4512 __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset)); | |
4513 __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset)); | |
4514 __ Mov(x1, cp); | |
4515 __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2, | |
4516 kLRHasBeenSaved, kDontSaveFPRegs); | |
4517 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); | |
4518 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
4519 __ Pop(x0); // result | |
4520 EmitReturnSequence(); | |
4521 __ Bind(&l_resume); // received in x0 | |
4522 __ PopTryHandler(); | |
4523 | |
4524 // receiver = iter; f = 'next'; arg = received; | |
4525 __ Bind(&l_next); | |
4526 __ LoadRoot(x2, Heap::knext_stringRootIndex); // "next" | |
4527 __ Peek(x3, 1 * kPointerSize); // iter | |
4528 __ Push(x2, x3, x0); // "next", iter, received | |
4529 | |
4530 // result = receiver[f](arg); | |
4531 __ Bind(&l_call); | |
4532 __ Peek(x1, 1 * kPointerSize); | |
4533 __ Peek(x0, 2 * kPointerSize); | |
4534 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); | |
4535 CallIC(ic, TypeFeedbackId::None()); | |
4536 __ Mov(x1, x0); | |
4537 __ Poke(x1, 2 * kPointerSize); | |
4538 CallFunctionStub stub(1, CALL_AS_METHOD); | |
4539 __ CallStub(&stub); | |
4540 | |
4541 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
4542 __ Drop(1); // The function is still on the stack; drop it. | |
4543 | |
4544 // if (!result.done) goto l_try; | |
4545 __ Bind(&l_loop); | |
4546 __ Push(x0); // save result | |
4547 __ LoadRoot(x2, Heap::kdone_stringRootIndex); // "done" | |
4548 CallLoadIC(NOT_CONTEXTUAL); // result.done in x0 | |
4549 // The ToBooleanStub argument (result.done) is in x0. | |
4550 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate()); | |
4551 CallIC(bool_ic); | |
4552 __ Cbz(x0, &l_try); | |
4553 | |
4554 // result.value | |
4555 __ Pop(x0); // result | |
4556 __ LoadRoot(x2, Heap::kvalue_stringRootIndex); // "value" | |
4557 CallLoadIC(NOT_CONTEXTUAL); // result.value in x0 | |
4558 context()->DropAndPlug(2, x0); // drop iter and g | |
4559 break; | |
4560 } | |
4561 } | |
4562 } | |
4563 | |
4564 | |
4565 void FullCodeGenerator::EmitGeneratorResume(Expression *generator, | |
4566 Expression *value, | |
4567 JSGeneratorObject::ResumeMode resume_mode) { | |
4568 ASM_LOCATION("FullCodeGenerator::EmitGeneratorResume"); | |
4569 Register value_reg = x0; | |
4570 Register generator_object = x1; | |
4571 Register the_hole = x2; | |
4572 Register operand_stack_size = w3; | |
4573 Register function = x4; | |
4574 | |
4575 // The value stays in x0, and is ultimately read by the resumed generator, as | |
4576 // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it | |
4577 // is read to throw the value when the resumed generator is already closed. r1 | |
4578 // will hold the generator object until the activation has been resumed. | |
4579 VisitForStackValue(generator); | |
4580 VisitForAccumulatorValue(value); | |
4581 __ Pop(generator_object); | |
4582 | |
4583 // Check generator state. | |
4584 Label wrong_state, closed_state, done; | |
4585 __ Ldr(x10, FieldMemOperand(generator_object, | |
4586 JSGeneratorObject::kContinuationOffset)); | |
4587 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0); | |
4588 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0); | |
4589 __ CompareAndBranch(x10, Smi::FromInt(0), eq, &closed_state); | |
4590 __ CompareAndBranch(x10, Smi::FromInt(0), lt, &wrong_state); | |
4591 | |
4592 // Load suspended function and context. | |
4593 __ Ldr(cp, FieldMemOperand(generator_object, | |
4594 JSGeneratorObject::kContextOffset)); | |
4595 __ Ldr(function, FieldMemOperand(generator_object, | |
4596 JSGeneratorObject::kFunctionOffset)); | |
4597 | |
4598 // Load receiver and store as the first argument. | |
4599 __ Ldr(x10, FieldMemOperand(generator_object, | |
4600 JSGeneratorObject::kReceiverOffset)); | |
4601 __ Push(x10); | |
4602 | |
4603 // Push holes for the rest of the arguments to the generator function. | |
4604 __ Ldr(x10, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); | |
4605 | |
4606 // The number of arguments is stored as an int32_t, and -1 is a marker | |
4607 // (SharedFunctionInfo::kDontAdaptArgumentsSentinel), so we need sign | |
4608 // extension to correctly handle it. However, in this case, we operate on | |
4609 // 32-bit W registers, so extension isn't required. | |
4610 __ Ldr(w10, FieldMemOperand(x10, | |
4611 SharedFunctionInfo::kFormalParameterCountOffset)); | |
4612 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex); | |
4613 __ PushMultipleTimes(the_hole, w10); | |
4614 | |
4615 // Enter a new JavaScript frame, and initialize its slots as they were when | |
4616 // the generator was suspended. | |
4617 Label resume_frame; | |
4618 __ Bl(&resume_frame); | |
4619 __ B(&done); | |
4620 | |
4621 __ Bind(&resume_frame); | |
4622 __ Push(lr, // Return address. | |
4623 fp, // Caller's frame pointer. | |
4624 cp, // Callee's context. | |
4625 function); // Callee's JS Function. | |
4626 __ Add(fp, __ StackPointer(), kPointerSize * 2); | |
4627 | |
4628 // Load and untag the operand stack size. | |
4629 __ Ldr(x10, FieldMemOperand(generator_object, | |
4630 JSGeneratorObject::kOperandStackOffset)); | |
4631 __ Ldr(operand_stack_size, | |
4632 UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset)); | |
4633 | |
4634 // If we are sending a value and there is no operand stack, we can jump back | |
4635 // in directly. | |
4636 if (resume_mode == JSGeneratorObject::NEXT) { | |
4637 Label slow_resume; | |
4638 __ Cbnz(operand_stack_size, &slow_resume); | |
4639 __ Ldr(x10, FieldMemOperand(function, JSFunction::kCodeEntryOffset)); | |
4640 __ Ldrsw(x11, | |
4641 UntagSmiFieldMemOperand(generator_object, | |
4642 JSGeneratorObject::kContinuationOffset)); | |
4643 __ Add(x10, x10, x11); | |
4644 __ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)); | |
4645 __ Str(x12, FieldMemOperand(generator_object, | |
4646 JSGeneratorObject::kContinuationOffset)); | |
4647 __ Br(x10); | |
4648 | |
4649 __ Bind(&slow_resume); | |
4650 } | |
4651 | |
4652 // Otherwise, we push holes for the operand stack and call the runtime to fix | |
4653 // up the stack and the handlers. | |
4654 __ PushMultipleTimes(the_hole, operand_stack_size); | |
4655 | |
4656 __ Mov(x10, Smi::FromInt(resume_mode)); | |
4657 __ Push(generator_object, result_register(), x10); | |
4658 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3); | |
4659 // Not reached: the runtime call returns elsewhere. | |
4660 __ Unreachable(); | |
4661 | |
4662 // Reach here when generator is closed. | |
4663 __ Bind(&closed_state); | |
4664 if (resume_mode == JSGeneratorObject::NEXT) { | |
4665 // Return completed iterator result when generator is closed. | |
4666 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); | |
4667 __ Push(x10); | |
4668 // Pop value from top-of-stack slot; box result into result register. | |
4669 EmitCreateIteratorResult(true); | |
4670 } else { | |
4671 // Throw the provided value. | |
4672 __ Push(value_reg); | |
4673 __ CallRuntime(Runtime::kThrow, 1); | |
4674 } | |
4675 __ B(&done); | |
4676 | |
4677 // Throw error if we attempt to operate on a running generator. | |
4678 __ Bind(&wrong_state); | |
4679 __ Push(generator_object); | |
4680 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1); | |
4681 | |
4682 __ Bind(&done); | |
4683 context()->Plug(result_register()); | |
4684 } | |
4685 | |
4686 | |
4687 void FullCodeGenerator::EmitCreateIteratorResult(bool done) { | |
4688 Label gc_required; | |
4689 Label allocated; | |
4690 | |
4691 Handle<Map> map(isolate()->native_context()->generator_result_map()); | |
4692 | |
4693 // Allocate and populate an object with this form: { value: VAL, done: DONE } | |
4694 | |
4695 Register result = x0; | |
4696 __ Allocate(map->instance_size(), result, x10, x11, &gc_required, TAG_OBJECT); | |
4697 __ B(&allocated); | |
4698 | |
4699 __ Bind(&gc_required); | |
4700 __ Push(Smi::FromInt(map->instance_size())); | |
4701 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); | |
4702 __ Ldr(context_register(), | |
4703 MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
4704 | |
4705 __ Bind(&allocated); | |
4706 Register map_reg = x1; | |
4707 Register result_value = x2; | |
4708 Register boolean_done = x3; | |
4709 Register empty_fixed_array = x4; | |
4710 __ Mov(map_reg, Operand(map)); | |
4711 __ Pop(result_value); | |
4712 __ Mov(boolean_done, Operand(isolate()->factory()->ToBoolean(done))); | |
4713 __ Mov(empty_fixed_array, Operand(isolate()->factory()->empty_fixed_array())); | |
4714 ASSERT_EQ(map->instance_size(), 5 * kPointerSize); | |
4715 // TODO(jbramley): Use Stp if possible. | |
4716 __ Str(map_reg, FieldMemOperand(result, HeapObject::kMapOffset)); | |
4717 __ Str(empty_fixed_array, | |
4718 FieldMemOperand(result, JSObject::kPropertiesOffset)); | |
4719 __ Str(empty_fixed_array, FieldMemOperand(result, JSObject::kElementsOffset)); | |
4720 __ Str(result_value, | |
4721 FieldMemOperand(result, | |
4722 JSGeneratorObject::kResultValuePropertyOffset)); | |
4723 __ Str(boolean_done, | |
4724 FieldMemOperand(result, | |
4725 JSGeneratorObject::kResultDonePropertyOffset)); | |
4726 | |
4727 // Only the value field needs a write barrier, as the other values are in the | |
4728 // root set. | |
4729 __ RecordWriteField(result, JSGeneratorObject::kResultValuePropertyOffset, | |
4730 x10, x11, kLRHasBeenSaved, kDontSaveFPRegs); | |
4731 } | |
4732 | |
4733 | |
4734 // TODO(all): I don't like this method. | |
4735 // It seems to me that in too many places x0 is used in place of this. | |
4736 // Also, this function is not suitable for all places where x0 should be | |
4737 // abstracted (eg. when used as an argument). But some places assume that the | |
4738 // first argument register is x0, and use this function instead. | |
4739 // Considering that most of the register allocation is hard-coded in the | |
4740 // FullCodeGen, that it is unlikely we will need to change it extensively, and | |
4741 // that abstracting the allocation through functions would not yield any | |
4742 // performance benefit, I think the existence of this function is debatable. | |
4743 Register FullCodeGenerator::result_register() { | |
4744 return x0; | |
4745 } | |
4746 | |
4747 | |
4748 Register FullCodeGenerator::context_register() { | |
4749 return cp; | |
4750 } | |
4751 | |
4752 | |
4753 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { | |
4754 ASSERT(POINTER_SIZE_ALIGN(frame_offset) == frame_offset); | |
4755 __ Str(value, MemOperand(fp, frame_offset)); | |
4756 } | |
4757 | |
4758 | |
4759 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { | |
4760 __ Ldr(dst, ContextMemOperand(cp, context_index)); | |
4761 } | |
4762 | |
4763 | |
4764 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { | |
4765 Scope* declaration_scope = scope()->DeclarationScope(); | |
4766 if (declaration_scope->is_global_scope() || | |
4767 declaration_scope->is_module_scope()) { | |
4768 // Contexts nested in the native context have a canonical empty function | |
4769 // as their closure, not the anonymous closure containing the global | |
4770 // code. Pass a smi sentinel and let the runtime look up the empty | |
4771 // function. | |
4772 ASSERT(kSmiTag == 0); | |
4773 __ Push(xzr); | |
4774 } else if (declaration_scope->is_eval_scope()) { | |
4775 // Contexts created by a call to eval have the same closure as the | |
4776 // context calling eval, not the anonymous closure containing the eval | |
4777 // code. Fetch it from the context. | |
4778 __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX)); | |
4779 __ Push(x10); | |
4780 } else { | |
4781 ASSERT(declaration_scope->is_function_scope()); | |
4782 __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
4783 __ Push(x10); | |
4784 } | |
4785 } | |
4786 | |
4787 | |
4788 void FullCodeGenerator::EnterFinallyBlock() { | |
4789 ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock"); | |
4790 ASSERT(!result_register().is(x10)); | |
4791 // Preserve the result register while executing finally block. | |
4792 // Also cook the return address in lr to the stack (smi encoded Code* delta). | |
4793 __ Sub(x10, lr, Operand(masm_->CodeObject())); | |
4794 __ SmiTag(x10); | |
4795 __ Push(result_register(), x10); | |
4796 | |
4797 // Store pending message while executing finally block. | |
4798 ExternalReference pending_message_obj = | |
4799 ExternalReference::address_of_pending_message_obj(isolate()); | |
4800 __ Mov(x10, pending_message_obj); | |
4801 __ Ldr(x10, MemOperand(x10)); | |
4802 | |
4803 ExternalReference has_pending_message = | |
4804 ExternalReference::address_of_has_pending_message(isolate()); | |
4805 __ Mov(x11, has_pending_message); | |
4806 __ Ldr(x11, MemOperand(x11)); | |
4807 __ SmiTag(x11); | |
4808 | |
4809 __ Push(x10, x11); | |
4810 | |
4811 ExternalReference pending_message_script = | |
4812 ExternalReference::address_of_pending_message_script(isolate()); | |
4813 __ Mov(x10, pending_message_script); | |
4814 __ Ldr(x10, MemOperand(x10)); | |
4815 __ Push(x10); | |
4816 } | |
4817 | |
4818 | |
4819 void FullCodeGenerator::ExitFinallyBlock() { | |
4820 ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock"); | |
4821 ASSERT(!result_register().is(x10)); | |
4822 | |
4823 // Restore pending message from stack. | |
4824 __ Pop(x10, x11, x12); | |
4825 ExternalReference pending_message_script = | |
4826 ExternalReference::address_of_pending_message_script(isolate()); | |
4827 __ Mov(x13, pending_message_script); | |
4828 __ Str(x10, MemOperand(x13)); | |
4829 | |
4830 __ SmiUntag(x11); | |
4831 ExternalReference has_pending_message = | |
4832 ExternalReference::address_of_has_pending_message(isolate()); | |
4833 __ Mov(x13, has_pending_message); | |
4834 __ Str(x11, MemOperand(x13)); | |
4835 | |
4836 ExternalReference pending_message_obj = | |
4837 ExternalReference::address_of_pending_message_obj(isolate()); | |
4838 __ Mov(x13, pending_message_obj); | |
4839 __ Str(x12, MemOperand(x13)); | |
4840 | |
4841 // Restore result register and cooked return address from the stack. | |
4842 __ Pop(x10, result_register()); | |
4843 | |
4844 // Uncook the return address (see EnterFinallyBlock). | |
4845 __ SmiUntag(x10); | |
4846 __ Add(x11, x10, Operand(masm_->CodeObject())); | |
4847 __ Br(x11); | |
4848 } | |
4849 | |
4850 | |
4851 #undef __ | |
4852 | |
4853 | |
4854 void BackEdgeTable::PatchAt(Code* unoptimized_code, | |
4855 Address pc, | |
4856 BackEdgeState target_state, | |
4857 Code* replacement_code) { | |
4858 // Turn the jump into a nop. | |
4859 Address branch_address = pc - 3 * kInstructionSize; | |
4860 PatchingAssembler patcher(branch_address, 1); | |
4861 | |
4862 ASSERT(Instruction::Cast(branch_address) | |
4863 ->IsNop(Assembler::INTERRUPT_CODE_NOP) || | |
4864 (Instruction::Cast(branch_address)->IsCondBranchImm() && | |
4865 Instruction::Cast(branch_address)->ImmPCOffset() == | |
4866 6 * kInstructionSize)); | |
4867 | |
4868 switch (target_state) { | |
4869 case INTERRUPT: | |
4870 // <decrement profiling counter> | |
4871 // .. .. .. .. b.pl ok | |
4872 // .. .. .. .. ldr x16, pc+<interrupt stub address> | |
4873 // .. .. .. .. blr x16 | |
4874 // ... more instructions. | |
4875 // ok-label | |
4876 // Jump offset is 6 instructions. | |
4877 patcher.b(6, pl); | |
4878 break; | |
4879 case ON_STACK_REPLACEMENT: | |
4880 case OSR_AFTER_STACK_CHECK: | |
4881 // <decrement profiling counter> | |
4882 // .. .. .. .. mov x0, x0 (NOP) | |
4883 // .. .. .. .. ldr x16, pc+<on-stack replacement address> | |
4884 // .. .. .. .. blr x16 | |
4885 patcher.nop(Assembler::INTERRUPT_CODE_NOP); | |
4886 break; | |
4887 } | |
4888 | |
4889 // Replace the call address. | |
4890 Instruction* load = Instruction::Cast(pc)->preceding(2); | |
4891 Address interrupt_address_pointer = | |
4892 reinterpret_cast<Address>(load) + load->ImmPCOffset(); | |
4893 ASSERT((Memory::uint64_at(interrupt_address_pointer) == | |
4894 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate() | |
4895 ->builtins() | |
4896 ->OnStackReplacement() | |
4897 ->entry())) || | |
4898 (Memory::uint64_at(interrupt_address_pointer) == | |
4899 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate() | |
4900 ->builtins() | |
4901 ->InterruptCheck() | |
4902 ->entry())) || | |
4903 (Memory::uint64_at(interrupt_address_pointer) == | |
4904 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate() | |
4905 ->builtins() | |
4906 ->OsrAfterStackCheck() | |
4907 ->entry())) || | |
4908 (Memory::uint64_at(interrupt_address_pointer) == | |
4909 reinterpret_cast<uint64_t>(unoptimized_code->GetIsolate() | |
4910 ->builtins() | |
4911 ->OnStackReplacement() | |
4912 ->entry()))); | |
4913 Memory::uint64_at(interrupt_address_pointer) = | |
4914 reinterpret_cast<uint64_t>(replacement_code->entry()); | |
4915 | |
4916 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | |
4917 unoptimized_code, reinterpret_cast<Address>(load), replacement_code); | |
4918 } | |
4919 | |
4920 | |
4921 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( | |
4922 Isolate* isolate, | |
4923 Code* unoptimized_code, | |
4924 Address pc) { | |
4925 // TODO(jbramley): There should be some extra assertions here (as in the ARM | |
4926 // back-end), but this function is gone in bleeding_edge so it might not | |
4927 // matter anyway. | |
4928 Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3); | |
4929 | |
4930 if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) { | |
4931 Instruction* load = Instruction::Cast(pc)->preceding(2); | |
4932 uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) + | |
4933 load->ImmPCOffset()); | |
4934 if (entry == reinterpret_cast<uint64_t>( | |
4935 isolate->builtins()->OnStackReplacement()->entry())) { | |
4936 return ON_STACK_REPLACEMENT; | |
4937 } else if (entry == reinterpret_cast<uint64_t>( | |
4938 isolate->builtins()->OsrAfterStackCheck()->entry())) { | |
4939 return OSR_AFTER_STACK_CHECK; | |
4940 } else { | |
4941 UNREACHABLE(); | |
4942 } | |
4943 } | |
4944 | |
4945 return INTERRUPT; | |
4946 } | |
4947 | |
4948 | |
4949 #define __ ACCESS_MASM(masm()) | |
4950 | |
4951 | |
4952 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( | |
4953 int* stack_depth, | |
4954 int* context_length) { | |
4955 ASM_LOCATION("FullCodeGenerator::TryFinally::Exit"); | |
4956 // The macros used here must preserve the result register. | |
4957 | |
4958 // Because the handler block contains the context of the finally | |
4959 // code, we can restore it directly from there for the finally code | |
4960 // rather than iteratively unwinding contexts via their previous | |
4961 // links. | |
4962 __ Drop(*stack_depth); // Down to the handler block. | |
4963 if (*context_length > 0) { | |
4964 // Restore the context to its dedicated register and the stack. | |
4965 __ Peek(cp, StackHandlerConstants::kContextOffset); | |
4966 __ Str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
4967 } | |
4968 __ PopTryHandler(); | |
4969 __ Bl(finally_entry_); | |
4970 | |
4971 *stack_depth = 0; | |
4972 *context_length = 0; | |
4973 return previous_; | |
4974 } | |
4975 | |
4976 | |
4977 #undef __ | |
4978 | |
4979 | |
4980 } } // namespace v8::internal | |
4981 | |
4982 #endif // V8_TARGET_ARCH_A64 | |
OLD | NEW |