| OLD | NEW |
| (Empty) |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "src/v8.h" | |
| 6 | |
| 7 #if V8_TARGET_ARCH_ARM | |
| 8 | |
| 9 #include "src/code-factory.h" | |
| 10 #include "src/code-stubs.h" | |
| 11 #include "src/codegen.h" | |
| 12 #include "src/compiler.h" | |
| 13 #include "src/debug.h" | |
| 14 #include "src/full-codegen.h" | |
| 15 #include "src/ic/ic.h" | |
| 16 #include "src/parser.h" | |
| 17 #include "src/scopes.h" | |
| 18 | |
| 19 #include "src/arm/code-stubs-arm.h" | |
| 20 #include "src/arm/macro-assembler-arm.h" | |
| 21 | |
| 22 namespace v8 { | |
| 23 namespace internal { | |
| 24 | |
| 25 #define __ ACCESS_MASM(masm_) | |
| 26 | |
| 27 | |
| 28 // A patch site is a location in the code which it is possible to patch. This | |
| 29 // class has a number of methods to emit the code which is patchable and the | |
| 30 // method EmitPatchInfo to record a marker back to the patchable code. This | |
| 31 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit | |
| 32 // immediate value is used) is the delta from the pc to the first instruction of | |
| 33 // the patchable code. | |
| 34 class JumpPatchSite BASE_EMBEDDED { | |
| 35 public: | |
| 36 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { | |
| 37 #ifdef DEBUG | |
| 38 info_emitted_ = false; | |
| 39 #endif | |
| 40 } | |
| 41 | |
| 42 ~JumpPatchSite() { | |
| 43 DCHECK(patch_site_.is_bound() == info_emitted_); | |
| 44 } | |
| 45 | |
| 46 // When initially emitting this ensure that a jump is always generated to skip | |
| 47 // the inlined smi code. | |
| 48 void EmitJumpIfNotSmi(Register reg, Label* target) { | |
| 49 DCHECK(!patch_site_.is_bound() && !info_emitted_); | |
| 50 Assembler::BlockConstPoolScope block_const_pool(masm_); | |
| 51 __ bind(&patch_site_); | |
| 52 __ cmp(reg, Operand(reg)); | |
| 53 __ b(eq, target); // Always taken before patched. | |
| 54 } | |
| 55 | |
| 56 // When initially emitting this ensure that a jump is never generated to skip | |
| 57 // the inlined smi code. | |
| 58 void EmitJumpIfSmi(Register reg, Label* target) { | |
| 59 DCHECK(!patch_site_.is_bound() && !info_emitted_); | |
| 60 Assembler::BlockConstPoolScope block_const_pool(masm_); | |
| 61 __ bind(&patch_site_); | |
| 62 __ cmp(reg, Operand(reg)); | |
| 63 __ b(ne, target); // Never taken before patched. | |
| 64 } | |
| 65 | |
| 66 void EmitPatchInfo() { | |
| 67 // Block literal pool emission whilst recording patch site information. | |
| 68 Assembler::BlockConstPoolScope block_const_pool(masm_); | |
| 69 if (patch_site_.is_bound()) { | |
| 70 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); | |
| 71 Register reg; | |
| 72 reg.set_code(delta_to_patch_site / kOff12Mask); | |
| 73 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask); | |
| 74 #ifdef DEBUG | |
| 75 info_emitted_ = true; | |
| 76 #endif | |
| 77 } else { | |
| 78 __ nop(); // Signals no inlined code. | |
| 79 } | |
| 80 } | |
| 81 | |
| 82 private: | |
| 83 MacroAssembler* masm_; | |
| 84 Label patch_site_; | |
| 85 #ifdef DEBUG | |
| 86 bool info_emitted_; | |
| 87 #endif | |
| 88 }; | |
| 89 | |
| 90 | |
| 91 // Generate code for a JS function. On entry to the function the receiver | |
| 92 // and arguments have been pushed on the stack left to right. The actual | |
| 93 // argument count matches the formal parameter count expected by the | |
| 94 // function. | |
| 95 // | |
| 96 // The live registers are: | |
| 97 // o r1: the JS function object being called (i.e., ourselves) | |
| 98 // o cp: our context | |
| 99 // o pp: our caller's constant pool pointer (if enabled) | |
| 100 // o fp: our caller's frame pointer | |
| 101 // o sp: stack pointer | |
| 102 // o lr: return address | |
| 103 // | |
| 104 // The function builds a JS frame. Please see JavaScriptFrameConstants in | |
| 105 // frames-arm.h for its layout. | |
| 106 void FullCodeGenerator::Generate() { | |
| 107 CompilationInfo* info = info_; | |
| 108 profiling_counter_ = isolate()->factory()->NewCell( | |
| 109 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); | |
| 110 SetFunctionPosition(function()); | |
| 111 Comment cmnt(masm_, "[ function compiled by full code generator"); | |
| 112 | |
| 113 ProfileEntryHookStub::MaybeCallEntryHook(masm_); | |
| 114 | |
| 115 #ifdef DEBUG | |
| 116 if (strlen(FLAG_stop_at) > 0 && | |
| 117 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { | |
| 118 __ stop("stop-at"); | |
| 119 } | |
| 120 #endif | |
| 121 | |
| 122 // Sloppy mode functions and builtins need to replace the receiver with the | |
| 123 // global proxy when called as functions (without an explicit receiver | |
| 124 // object). | |
| 125 if (is_sloppy(info->language_mode()) && !info->is_native() && | |
| 126 info->MayUseThis() && info->scope()->has_this_declaration()) { | |
| 127 Label ok; | |
| 128 int receiver_offset = info->scope()->num_parameters() * kPointerSize; | |
| 129 __ ldr(r2, MemOperand(sp, receiver_offset)); | |
| 130 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); | |
| 131 __ b(ne, &ok); | |
| 132 | |
| 133 __ ldr(r2, GlobalObjectOperand()); | |
| 134 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalProxyOffset)); | |
| 135 | |
| 136 __ str(r2, MemOperand(sp, receiver_offset)); | |
| 137 | |
| 138 __ bind(&ok); | |
| 139 } | |
| 140 | |
| 141 // Open a frame scope to indicate that there is a frame on the stack. The | |
| 142 // MANUAL indicates that the scope shouldn't actually generate code to set up | |
| 143 // the frame (that is done below). | |
| 144 FrameScope frame_scope(masm_, StackFrame::MANUAL); | |
| 145 | |
| 146 info->set_prologue_offset(masm_->pc_offset()); | |
| 147 __ Prologue(info->IsCodePreAgingActive()); | |
| 148 info->AddNoFrameRange(0, masm_->pc_offset()); | |
| 149 | |
| 150 { Comment cmnt(masm_, "[ Allocate locals"); | |
| 151 int locals_count = info->scope()->num_stack_slots(); | |
| 152 // Generators allocate locals, if any, in context slots. | |
| 153 DCHECK(!IsGeneratorFunction(info->function()->kind()) || locals_count == 0); | |
| 154 if (locals_count > 0) { | |
| 155 if (locals_count >= 128) { | |
| 156 Label ok; | |
| 157 __ sub(r9, sp, Operand(locals_count * kPointerSize)); | |
| 158 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex); | |
| 159 __ cmp(r9, Operand(r2)); | |
| 160 __ b(hs, &ok); | |
| 161 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); | |
| 162 __ bind(&ok); | |
| 163 } | |
| 164 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex); | |
| 165 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32; | |
| 166 if (locals_count >= kMaxPushes) { | |
| 167 int loop_iterations = locals_count / kMaxPushes; | |
| 168 __ mov(r2, Operand(loop_iterations)); | |
| 169 Label loop_header; | |
| 170 __ bind(&loop_header); | |
| 171 // Do pushes. | |
| 172 for (int i = 0; i < kMaxPushes; i++) { | |
| 173 __ push(r9); | |
| 174 } | |
| 175 // Continue loop if not done. | |
| 176 __ sub(r2, r2, Operand(1), SetCC); | |
| 177 __ b(&loop_header, ne); | |
| 178 } | |
| 179 int remaining = locals_count % kMaxPushes; | |
| 180 // Emit the remaining pushes. | |
| 181 for (int i = 0; i < remaining; i++) { | |
| 182 __ push(r9); | |
| 183 } | |
| 184 } | |
| 185 } | |
| 186 | |
| 187 bool function_in_register = true; | |
| 188 | |
| 189 // Possibly allocate a local context. | |
| 190 if (info->scope()->num_heap_slots() > 0) { | |
| 191 // Argument to NewContext is the function, which is still in r1. | |
| 192 Comment cmnt(masm_, "[ Allocate context"); | |
| 193 bool need_write_barrier = true; | |
| 194 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; | |
| 195 if (info->scope()->is_script_scope()) { | |
| 196 __ push(r1); | |
| 197 __ Push(info->scope()->GetScopeInfo(info->isolate())); | |
| 198 __ CallRuntime(Runtime::kNewScriptContext, 2); | |
| 199 } else if (slots <= FastNewContextStub::kMaximumSlots) { | |
| 200 FastNewContextStub stub(isolate(), slots); | |
| 201 __ CallStub(&stub); | |
| 202 // Result of FastNewContextStub is always in new space. | |
| 203 need_write_barrier = false; | |
| 204 } else { | |
| 205 __ push(r1); | |
| 206 __ CallRuntime(Runtime::kNewFunctionContext, 1); | |
| 207 } | |
| 208 function_in_register = false; | |
| 209 // Context is returned in r0. It replaces the context passed to us. | |
| 210 // It's saved in the stack and kept live in cp. | |
| 211 __ mov(cp, r0); | |
| 212 __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 213 // Copy any necessary parameters into the context. | |
| 214 int num_parameters = info->scope()->num_parameters(); | |
| 215 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0; | |
| 216 for (int i = first_parameter; i < num_parameters; i++) { | |
| 217 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i); | |
| 218 if (var->IsContextSlot()) { | |
| 219 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | |
| 220 (num_parameters - 1 - i) * kPointerSize; | |
| 221 // Load parameter from stack. | |
| 222 __ ldr(r0, MemOperand(fp, parameter_offset)); | |
| 223 // Store it in the context. | |
| 224 MemOperand target = ContextOperand(cp, var->index()); | |
| 225 __ str(r0, target); | |
| 226 | |
| 227 // Update the write barrier. | |
| 228 if (need_write_barrier) { | |
| 229 __ RecordWriteContextSlot( | |
| 230 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs); | |
| 231 } else if (FLAG_debug_code) { | |
| 232 Label done; | |
| 233 __ JumpIfInNewSpace(cp, r0, &done); | |
| 234 __ Abort(kExpectedNewSpaceObject); | |
| 235 __ bind(&done); | |
| 236 } | |
| 237 } | |
| 238 } | |
| 239 } | |
| 240 | |
| 241 // Possibly set up a local binding to the this function which is used in | |
| 242 // derived constructors with super calls. | |
| 243 Variable* this_function_var = scope()->this_function_var(); | |
| 244 if (this_function_var != nullptr) { | |
| 245 Comment cmnt(masm_, "[ This function"); | |
| 246 if (!function_in_register) { | |
| 247 __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
| 248 // The write barrier clobbers register again, keep is marked as such. | |
| 249 } | |
| 250 SetVar(this_function_var, r1, r0, r2); | |
| 251 } | |
| 252 | |
| 253 Variable* new_target_var = scope()->new_target_var(); | |
| 254 if (new_target_var != nullptr) { | |
| 255 Comment cmnt(masm_, "[ new.target"); | |
| 256 | |
| 257 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
| 258 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset)); | |
| 259 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | |
| 260 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq); | |
| 261 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset)); | |
| 262 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); | |
| 263 Label non_construct_frame, done; | |
| 264 | |
| 265 __ b(ne, &non_construct_frame); | |
| 266 __ ldr(r0, | |
| 267 MemOperand(r2, ConstructFrameConstants::kOriginalConstructorOffset)); | |
| 268 __ b(&done); | |
| 269 | |
| 270 __ bind(&non_construct_frame); | |
| 271 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | |
| 272 __ bind(&done); | |
| 273 | |
| 274 SetVar(new_target_var, r0, r2, r3); | |
| 275 } | |
| 276 | |
| 277 // Possibly allocate RestParameters | |
| 278 int rest_index; | |
| 279 Variable* rest_param = scope()->rest_parameter(&rest_index); | |
| 280 if (rest_param) { | |
| 281 Comment cmnt(masm_, "[ Allocate rest parameter array"); | |
| 282 | |
| 283 int num_parameters = info->scope()->num_parameters(); | |
| 284 int offset = num_parameters * kPointerSize; | |
| 285 | |
| 286 __ add(r3, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset)); | |
| 287 __ mov(r2, Operand(Smi::FromInt(num_parameters))); | |
| 288 __ mov(r1, Operand(Smi::FromInt(rest_index))); | |
| 289 __ mov(r0, Operand(Smi::FromInt(language_mode()))); | |
| 290 __ Push(r3, r2, r1, r0); | |
| 291 | |
| 292 RestParamAccessStub stub(isolate()); | |
| 293 __ CallStub(&stub); | |
| 294 | |
| 295 SetVar(rest_param, r0, r1, r2); | |
| 296 } | |
| 297 | |
| 298 Variable* arguments = scope()->arguments(); | |
| 299 if (arguments != NULL) { | |
| 300 // Function uses arguments object. | |
| 301 Comment cmnt(masm_, "[ Allocate arguments object"); | |
| 302 if (!function_in_register) { | |
| 303 // Load this again, if it's used by the local context below. | |
| 304 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
| 305 } else { | |
| 306 __ mov(r3, r1); | |
| 307 } | |
| 308 // Receiver is just before the parameters on the caller's stack. | |
| 309 int num_parameters = info->scope()->num_parameters(); | |
| 310 int offset = num_parameters * kPointerSize; | |
| 311 | |
| 312 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset + offset)); | |
| 313 __ mov(r1, Operand(Smi::FromInt(num_parameters))); | |
| 314 __ Push(r3, r2, r1); | |
| 315 | |
| 316 // Arguments to ArgumentsAccessStub: | |
| 317 // function, receiver address, parameter count. | |
| 318 // The stub will rewrite receiever and parameter count if the previous | |
| 319 // stack frame was an arguments adapter frame. | |
| 320 ArgumentsAccessStub::Type type; | |
| 321 if (is_strict(language_mode()) || !is_simple_parameter_list()) { | |
| 322 type = ArgumentsAccessStub::NEW_STRICT; | |
| 323 } else if (function()->has_duplicate_parameters()) { | |
| 324 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW; | |
| 325 } else { | |
| 326 type = ArgumentsAccessStub::NEW_SLOPPY_FAST; | |
| 327 } | |
| 328 ArgumentsAccessStub stub(isolate(), type); | |
| 329 __ CallStub(&stub); | |
| 330 | |
| 331 SetVar(arguments, r0, r1, r2); | |
| 332 } | |
| 333 | |
| 334 | |
| 335 if (FLAG_trace) { | |
| 336 __ CallRuntime(Runtime::kTraceEnter, 0); | |
| 337 } | |
| 338 | |
| 339 // Visit the declarations and body unless there is an illegal | |
| 340 // redeclaration. | |
| 341 if (scope()->HasIllegalRedeclaration()) { | |
| 342 Comment cmnt(masm_, "[ Declarations"); | |
| 343 scope()->VisitIllegalRedeclaration(this); | |
| 344 | |
| 345 } else { | |
| 346 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); | |
| 347 { Comment cmnt(masm_, "[ Declarations"); | |
| 348 VisitDeclarations(scope()->declarations()); | |
| 349 } | |
| 350 | |
| 351 { Comment cmnt(masm_, "[ Stack check"); | |
| 352 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); | |
| 353 Label ok; | |
| 354 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | |
| 355 __ cmp(sp, Operand(ip)); | |
| 356 __ b(hs, &ok); | |
| 357 Handle<Code> stack_check = isolate()->builtins()->StackCheck(); | |
| 358 PredictableCodeSizeScope predictable(masm_); | |
| 359 predictable.ExpectSize( | |
| 360 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET)); | |
| 361 __ Call(stack_check, RelocInfo::CODE_TARGET); | |
| 362 __ bind(&ok); | |
| 363 } | |
| 364 | |
| 365 { Comment cmnt(masm_, "[ Body"); | |
| 366 DCHECK(loop_depth() == 0); | |
| 367 VisitStatements(function()->body()); | |
| 368 DCHECK(loop_depth() == 0); | |
| 369 } | |
| 370 } | |
| 371 | |
| 372 // Always emit a 'return undefined' in case control fell off the end of | |
| 373 // the body. | |
| 374 { Comment cmnt(masm_, "[ return <undefined>;"); | |
| 375 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | |
| 376 } | |
| 377 EmitReturnSequence(); | |
| 378 | |
| 379 // Force emit the constant pool, so it doesn't get emitted in the middle | |
| 380 // of the back edge table. | |
| 381 masm()->CheckConstPool(true, false); | |
| 382 } | |
| 383 | |
| 384 | |
| 385 void FullCodeGenerator::ClearAccumulator() { | |
| 386 __ mov(r0, Operand(Smi::FromInt(0))); | |
| 387 } | |
| 388 | |
| 389 | |
| 390 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { | |
| 391 __ mov(r2, Operand(profiling_counter_)); | |
| 392 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); | |
| 393 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC); | |
| 394 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); | |
| 395 } | |
| 396 | |
| 397 | |
| 398 #ifdef CAN_USE_ARMV7_INSTRUCTIONS | |
| 399 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize; | |
| 400 #else | |
| 401 static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize; | |
| 402 #endif | |
| 403 | |
| 404 | |
| 405 void FullCodeGenerator::EmitProfilingCounterReset() { | |
| 406 Assembler::BlockConstPoolScope block_const_pool(masm_); | |
| 407 PredictableCodeSizeScope predictable_code_size_scope( | |
| 408 masm_, kProfileCounterResetSequenceLength); | |
| 409 Label start; | |
| 410 __ bind(&start); | |
| 411 int reset_value = FLAG_interrupt_budget; | |
| 412 if (info_->is_debug()) { | |
| 413 // Detect debug break requests as soon as possible. | |
| 414 reset_value = FLAG_interrupt_budget >> 4; | |
| 415 } | |
| 416 __ mov(r2, Operand(profiling_counter_)); | |
| 417 // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5 | |
| 418 // instructions (for ARMv6) depending upon whether it is an extended constant | |
| 419 // pool - insert nop to compensate. | |
| 420 int expected_instr_count = | |
| 421 (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2; | |
| 422 DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count); | |
| 423 while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) { | |
| 424 __ nop(); | |
| 425 } | |
| 426 __ mov(r3, Operand(Smi::FromInt(reset_value))); | |
| 427 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); | |
| 428 } | |
| 429 | |
| 430 | |
| 431 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, | |
| 432 Label* back_edge_target) { | |
| 433 Comment cmnt(masm_, "[ Back edge bookkeeping"); | |
| 434 // Block literal pools whilst emitting back edge code. | |
| 435 Assembler::BlockConstPoolScope block_const_pool(masm_); | |
| 436 Label ok; | |
| 437 | |
| 438 DCHECK(back_edge_target->is_bound()); | |
| 439 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); | |
| 440 int weight = Min(kMaxBackEdgeWeight, | |
| 441 Max(1, distance / kCodeSizeMultiplier)); | |
| 442 EmitProfilingCounterDecrement(weight); | |
| 443 __ b(pl, &ok); | |
| 444 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); | |
| 445 | |
| 446 // Record a mapping of this PC offset to the OSR id. This is used to find | |
| 447 // the AST id from the unoptimized code in order to use it as a key into | |
| 448 // the deoptimization input data found in the optimized code. | |
| 449 RecordBackEdge(stmt->OsrEntryId()); | |
| 450 | |
| 451 EmitProfilingCounterReset(); | |
| 452 | |
| 453 __ bind(&ok); | |
| 454 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); | |
| 455 // Record a mapping of the OSR id to this PC. This is used if the OSR | |
| 456 // entry becomes the target of a bailout. We don't expect it to be, but | |
| 457 // we want it to work if it is. | |
| 458 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); | |
| 459 } | |
| 460 | |
| 461 | |
| 462 void FullCodeGenerator::EmitReturnSequence() { | |
| 463 Comment cmnt(masm_, "[ Return sequence"); | |
| 464 if (return_label_.is_bound()) { | |
| 465 __ b(&return_label_); | |
| 466 } else { | |
| 467 __ bind(&return_label_); | |
| 468 if (FLAG_trace) { | |
| 469 // Push the return value on the stack as the parameter. | |
| 470 // Runtime::TraceExit returns its parameter in r0. | |
| 471 __ push(r0); | |
| 472 __ CallRuntime(Runtime::kTraceExit, 1); | |
| 473 } | |
| 474 // Pretend that the exit is a backwards jump to the entry. | |
| 475 int weight = 1; | |
| 476 if (info_->ShouldSelfOptimize()) { | |
| 477 weight = FLAG_interrupt_budget / FLAG_self_opt_count; | |
| 478 } else { | |
| 479 int distance = masm_->pc_offset(); | |
| 480 weight = Min(kMaxBackEdgeWeight, | |
| 481 Max(1, distance / kCodeSizeMultiplier)); | |
| 482 } | |
| 483 EmitProfilingCounterDecrement(weight); | |
| 484 Label ok; | |
| 485 __ b(pl, &ok); | |
| 486 __ push(r0); | |
| 487 __ Call(isolate()->builtins()->InterruptCheck(), | |
| 488 RelocInfo::CODE_TARGET); | |
| 489 __ pop(r0); | |
| 490 EmitProfilingCounterReset(); | |
| 491 __ bind(&ok); | |
| 492 | |
| 493 // Make sure that the constant pool is not emitted inside of the return | |
| 494 // sequence. | |
| 495 { Assembler::BlockConstPoolScope block_const_pool(masm_); | |
| 496 int32_t arg_count = info_->scope()->num_parameters() + 1; | |
| 497 int32_t sp_delta = arg_count * kPointerSize; | |
| 498 SetReturnPosition(function()); | |
| 499 // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5! | |
| 500 PredictableCodeSizeScope predictable(masm_, -1); | |
| 501 int no_frame_start = __ LeaveFrame(StackFrame::JAVA_SCRIPT); | |
| 502 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_); | |
| 503 __ add(sp, sp, Operand(sp_delta)); | |
| 504 __ Jump(lr); | |
| 505 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); | |
| 506 } | |
| 507 } | |
| 508 } | |
| 509 } | |
| 510 | |
| 511 | |
| 512 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { | |
| 513 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | |
| 514 codegen()->GetVar(result_register(), var); | |
| 515 __ push(result_register()); | |
| 516 } | |
| 517 | |
| 518 | |
| 519 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { | |
| 520 } | |
| 521 | |
| 522 | |
| 523 void FullCodeGenerator::AccumulatorValueContext::Plug( | |
| 524 Heap::RootListIndex index) const { | |
| 525 __ LoadRoot(result_register(), index); | |
| 526 } | |
| 527 | |
| 528 | |
| 529 void FullCodeGenerator::StackValueContext::Plug( | |
| 530 Heap::RootListIndex index) const { | |
| 531 __ LoadRoot(result_register(), index); | |
| 532 __ push(result_register()); | |
| 533 } | |
| 534 | |
| 535 | |
| 536 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { | |
| 537 codegen()->PrepareForBailoutBeforeSplit(condition(), | |
| 538 true, | |
| 539 true_label_, | |
| 540 false_label_); | |
| 541 if (index == Heap::kUndefinedValueRootIndex || | |
| 542 index == Heap::kNullValueRootIndex || | |
| 543 index == Heap::kFalseValueRootIndex) { | |
| 544 if (false_label_ != fall_through_) __ b(false_label_); | |
| 545 } else if (index == Heap::kTrueValueRootIndex) { | |
| 546 if (true_label_ != fall_through_) __ b(true_label_); | |
| 547 } else { | |
| 548 __ LoadRoot(result_register(), index); | |
| 549 codegen()->DoTest(this); | |
| 550 } | |
| 551 } | |
| 552 | |
| 553 | |
| 554 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const { | |
| 555 } | |
| 556 | |
| 557 | |
| 558 void FullCodeGenerator::AccumulatorValueContext::Plug( | |
| 559 Handle<Object> lit) const { | |
| 560 __ mov(result_register(), Operand(lit)); | |
| 561 } | |
| 562 | |
| 563 | |
| 564 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { | |
| 565 // Immediates cannot be pushed directly. | |
| 566 __ mov(result_register(), Operand(lit)); | |
| 567 __ push(result_register()); | |
| 568 } | |
| 569 | |
| 570 | |
| 571 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { | |
| 572 codegen()->PrepareForBailoutBeforeSplit(condition(), | |
| 573 true, | |
| 574 true_label_, | |
| 575 false_label_); | |
| 576 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals. | |
| 577 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { | |
| 578 if (false_label_ != fall_through_) __ b(false_label_); | |
| 579 } else if (lit->IsTrue() || lit->IsJSObject()) { | |
| 580 if (true_label_ != fall_through_) __ b(true_label_); | |
| 581 } else if (lit->IsString()) { | |
| 582 if (String::cast(*lit)->length() == 0) { | |
| 583 if (false_label_ != fall_through_) __ b(false_label_); | |
| 584 } else { | |
| 585 if (true_label_ != fall_through_) __ b(true_label_); | |
| 586 } | |
| 587 } else if (lit->IsSmi()) { | |
| 588 if (Smi::cast(*lit)->value() == 0) { | |
| 589 if (false_label_ != fall_through_) __ b(false_label_); | |
| 590 } else { | |
| 591 if (true_label_ != fall_through_) __ b(true_label_); | |
| 592 } | |
| 593 } else { | |
| 594 // For simplicity we always test the accumulator register. | |
| 595 __ mov(result_register(), Operand(lit)); | |
| 596 codegen()->DoTest(this); | |
| 597 } | |
| 598 } | |
| 599 | |
| 600 | |
| 601 void FullCodeGenerator::EffectContext::DropAndPlug(int count, | |
| 602 Register reg) const { | |
| 603 DCHECK(count > 0); | |
| 604 __ Drop(count); | |
| 605 } | |
| 606 | |
| 607 | |
| 608 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( | |
| 609 int count, | |
| 610 Register reg) const { | |
| 611 DCHECK(count > 0); | |
| 612 __ Drop(count); | |
| 613 __ Move(result_register(), reg); | |
| 614 } | |
| 615 | |
| 616 | |
| 617 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, | |
| 618 Register reg) const { | |
| 619 DCHECK(count > 0); | |
| 620 if (count > 1) __ Drop(count - 1); | |
| 621 __ str(reg, MemOperand(sp, 0)); | |
| 622 } | |
| 623 | |
| 624 | |
| 625 void FullCodeGenerator::TestContext::DropAndPlug(int count, | |
| 626 Register reg) const { | |
| 627 DCHECK(count > 0); | |
| 628 // For simplicity we always test the accumulator register. | |
| 629 __ Drop(count); | |
| 630 __ Move(result_register(), reg); | |
| 631 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); | |
| 632 codegen()->DoTest(this); | |
| 633 } | |
| 634 | |
| 635 | |
| 636 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, | |
| 637 Label* materialize_false) const { | |
| 638 DCHECK(materialize_true == materialize_false); | |
| 639 __ bind(materialize_true); | |
| 640 } | |
| 641 | |
| 642 | |
| 643 void FullCodeGenerator::AccumulatorValueContext::Plug( | |
| 644 Label* materialize_true, | |
| 645 Label* materialize_false) const { | |
| 646 Label done; | |
| 647 __ bind(materialize_true); | |
| 648 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); | |
| 649 __ jmp(&done); | |
| 650 __ bind(materialize_false); | |
| 651 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); | |
| 652 __ bind(&done); | |
| 653 } | |
| 654 | |
| 655 | |
| 656 void FullCodeGenerator::StackValueContext::Plug( | |
| 657 Label* materialize_true, | |
| 658 Label* materialize_false) const { | |
| 659 Label done; | |
| 660 __ bind(materialize_true); | |
| 661 __ LoadRoot(ip, Heap::kTrueValueRootIndex); | |
| 662 __ jmp(&done); | |
| 663 __ bind(materialize_false); | |
| 664 __ LoadRoot(ip, Heap::kFalseValueRootIndex); | |
| 665 __ bind(&done); | |
| 666 __ push(ip); | |
| 667 } | |
| 668 | |
| 669 | |
| 670 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, | |
| 671 Label* materialize_false) const { | |
| 672 DCHECK(materialize_true == true_label_); | |
| 673 DCHECK(materialize_false == false_label_); | |
| 674 } | |
| 675 | |
| 676 | |
| 677 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { | |
| 678 Heap::RootListIndex value_root_index = | |
| 679 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; | |
| 680 __ LoadRoot(result_register(), value_root_index); | |
| 681 } | |
| 682 | |
| 683 | |
| 684 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { | |
| 685 Heap::RootListIndex value_root_index = | |
| 686 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; | |
| 687 __ LoadRoot(ip, value_root_index); | |
| 688 __ push(ip); | |
| 689 } | |
| 690 | |
| 691 | |
| 692 void FullCodeGenerator::TestContext::Plug(bool flag) const { | |
| 693 codegen()->PrepareForBailoutBeforeSplit(condition(), | |
| 694 true, | |
| 695 true_label_, | |
| 696 false_label_); | |
| 697 if (flag) { | |
| 698 if (true_label_ != fall_through_) __ b(true_label_); | |
| 699 } else { | |
| 700 if (false_label_ != fall_through_) __ b(false_label_); | |
| 701 } | |
| 702 } | |
| 703 | |
| 704 | |
| 705 void FullCodeGenerator::DoTest(Expression* condition, | |
| 706 Label* if_true, | |
| 707 Label* if_false, | |
| 708 Label* fall_through) { | |
| 709 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate()); | |
| 710 CallIC(ic, condition->test_id()); | |
| 711 __ tst(result_register(), result_register()); | |
| 712 Split(ne, if_true, if_false, fall_through); | |
| 713 } | |
| 714 | |
| 715 | |
| 716 void FullCodeGenerator::Split(Condition cond, | |
| 717 Label* if_true, | |
| 718 Label* if_false, | |
| 719 Label* fall_through) { | |
| 720 if (if_false == fall_through) { | |
| 721 __ b(cond, if_true); | |
| 722 } else if (if_true == fall_through) { | |
| 723 __ b(NegateCondition(cond), if_false); | |
| 724 } else { | |
| 725 __ b(cond, if_true); | |
| 726 __ b(if_false); | |
| 727 } | |
| 728 } | |
| 729 | |
| 730 | |
| 731 MemOperand FullCodeGenerator::StackOperand(Variable* var) { | |
| 732 DCHECK(var->IsStackAllocated()); | |
| 733 // Offset is negative because higher indexes are at lower addresses. | |
| 734 int offset = -var->index() * kPointerSize; | |
| 735 // Adjust by a (parameter or local) base offset. | |
| 736 if (var->IsParameter()) { | |
| 737 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; | |
| 738 } else { | |
| 739 offset += JavaScriptFrameConstants::kLocal0Offset; | |
| 740 } | |
| 741 return MemOperand(fp, offset); | |
| 742 } | |
| 743 | |
| 744 | |
| 745 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { | |
| 746 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); | |
| 747 if (var->IsContextSlot()) { | |
| 748 int context_chain_length = scope()->ContextChainLength(var->scope()); | |
| 749 __ LoadContext(scratch, context_chain_length); | |
| 750 return ContextOperand(scratch, var->index()); | |
| 751 } else { | |
| 752 return StackOperand(var); | |
| 753 } | |
| 754 } | |
| 755 | |
| 756 | |
| 757 void FullCodeGenerator::GetVar(Register dest, Variable* var) { | |
| 758 // Use destination as scratch. | |
| 759 MemOperand location = VarOperand(var, dest); | |
| 760 __ ldr(dest, location); | |
| 761 } | |
| 762 | |
| 763 | |
| 764 void FullCodeGenerator::SetVar(Variable* var, | |
| 765 Register src, | |
| 766 Register scratch0, | |
| 767 Register scratch1) { | |
| 768 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); | |
| 769 DCHECK(!scratch0.is(src)); | |
| 770 DCHECK(!scratch0.is(scratch1)); | |
| 771 DCHECK(!scratch1.is(src)); | |
| 772 MemOperand location = VarOperand(var, scratch0); | |
| 773 __ str(src, location); | |
| 774 | |
| 775 // Emit the write barrier code if the location is in the heap. | |
| 776 if (var->IsContextSlot()) { | |
| 777 __ RecordWriteContextSlot(scratch0, | |
| 778 location.offset(), | |
| 779 src, | |
| 780 scratch1, | |
| 781 kLRHasBeenSaved, | |
| 782 kDontSaveFPRegs); | |
| 783 } | |
| 784 } | |
| 785 | |
| 786 | |
| 787 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, | |
| 788 bool should_normalize, | |
| 789 Label* if_true, | |
| 790 Label* if_false) { | |
| 791 // Only prepare for bailouts before splits if we're in a test | |
| 792 // context. Otherwise, we let the Visit function deal with the | |
| 793 // preparation to avoid preparing with the same AST id twice. | |
| 794 if (!context()->IsTest() || !info_->IsOptimizable()) return; | |
| 795 | |
| 796 Label skip; | |
| 797 if (should_normalize) __ b(&skip); | |
| 798 PrepareForBailout(expr, TOS_REG); | |
| 799 if (should_normalize) { | |
| 800 __ LoadRoot(ip, Heap::kTrueValueRootIndex); | |
| 801 __ cmp(r0, ip); | |
| 802 Split(eq, if_true, if_false, NULL); | |
| 803 __ bind(&skip); | |
| 804 } | |
| 805 } | |
| 806 | |
| 807 | |
| 808 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { | |
| 809 // The variable in the declaration always resides in the current function | |
| 810 // context. | |
| 811 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope())); | |
| 812 if (generate_debug_code_) { | |
| 813 // Check that we're not inside a with or catch context. | |
| 814 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset)); | |
| 815 __ CompareRoot(r1, Heap::kWithContextMapRootIndex); | |
| 816 __ Check(ne, kDeclarationInWithContext); | |
| 817 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex); | |
| 818 __ Check(ne, kDeclarationInCatchContext); | |
| 819 } | |
| 820 } | |
| 821 | |
| 822 | |
| 823 void FullCodeGenerator::VisitVariableDeclaration( | |
| 824 VariableDeclaration* declaration) { | |
| 825 // If it was not possible to allocate the variable at compile time, we | |
| 826 // need to "declare" it at runtime to make sure it actually exists in the | |
| 827 // local context. | |
| 828 VariableProxy* proxy = declaration->proxy(); | |
| 829 VariableMode mode = declaration->mode(); | |
| 830 Variable* variable = proxy->var(); | |
| 831 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY; | |
| 832 switch (variable->location()) { | |
| 833 case VariableLocation::GLOBAL: | |
| 834 case VariableLocation::UNALLOCATED: | |
| 835 globals_->Add(variable->name(), zone()); | |
| 836 globals_->Add(variable->binding_needs_init() | |
| 837 ? isolate()->factory()->the_hole_value() | |
| 838 : isolate()->factory()->undefined_value(), | |
| 839 zone()); | |
| 840 break; | |
| 841 | |
| 842 case VariableLocation::PARAMETER: | |
| 843 case VariableLocation::LOCAL: | |
| 844 if (hole_init) { | |
| 845 Comment cmnt(masm_, "[ VariableDeclaration"); | |
| 846 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | |
| 847 __ str(ip, StackOperand(variable)); | |
| 848 } | |
| 849 break; | |
| 850 | |
| 851 case VariableLocation::CONTEXT: | |
| 852 if (hole_init) { | |
| 853 Comment cmnt(masm_, "[ VariableDeclaration"); | |
| 854 EmitDebugCheckDeclarationContext(variable); | |
| 855 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | |
| 856 __ str(ip, ContextOperand(cp, variable->index())); | |
| 857 // No write barrier since the_hole_value is in old space. | |
| 858 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | |
| 859 } | |
| 860 break; | |
| 861 | |
| 862 case VariableLocation::LOOKUP: { | |
| 863 Comment cmnt(masm_, "[ VariableDeclaration"); | |
| 864 __ mov(r2, Operand(variable->name())); | |
| 865 // Declaration nodes are always introduced in one of four modes. | |
| 866 DCHECK(IsDeclaredVariableMode(mode)); | |
| 867 PropertyAttributes attr = | |
| 868 IsImmutableVariableMode(mode) ? READ_ONLY : NONE; | |
| 869 __ mov(r1, Operand(Smi::FromInt(attr))); | |
| 870 // Push initial value, if any. | |
| 871 // Note: For variables we must not push an initial value (such as | |
| 872 // 'undefined') because we may have a (legal) redeclaration and we | |
| 873 // must not destroy the current value. | |
| 874 if (hole_init) { | |
| 875 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); | |
| 876 __ Push(cp, r2, r1, r0); | |
| 877 } else { | |
| 878 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value. | |
| 879 __ Push(cp, r2, r1, r0); | |
| 880 } | |
| 881 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); | |
| 882 break; | |
| 883 } | |
| 884 } | |
| 885 } | |
| 886 | |
| 887 | |
| 888 void FullCodeGenerator::VisitFunctionDeclaration( | |
| 889 FunctionDeclaration* declaration) { | |
| 890 VariableProxy* proxy = declaration->proxy(); | |
| 891 Variable* variable = proxy->var(); | |
| 892 switch (variable->location()) { | |
| 893 case VariableLocation::GLOBAL: | |
| 894 case VariableLocation::UNALLOCATED: { | |
| 895 globals_->Add(variable->name(), zone()); | |
| 896 Handle<SharedFunctionInfo> function = | |
| 897 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_); | |
| 898 // Check for stack-overflow exception. | |
| 899 if (function.is_null()) return SetStackOverflow(); | |
| 900 globals_->Add(function, zone()); | |
| 901 break; | |
| 902 } | |
| 903 | |
| 904 case VariableLocation::PARAMETER: | |
| 905 case VariableLocation::LOCAL: { | |
| 906 Comment cmnt(masm_, "[ FunctionDeclaration"); | |
| 907 VisitForAccumulatorValue(declaration->fun()); | |
| 908 __ str(result_register(), StackOperand(variable)); | |
| 909 break; | |
| 910 } | |
| 911 | |
| 912 case VariableLocation::CONTEXT: { | |
| 913 Comment cmnt(masm_, "[ FunctionDeclaration"); | |
| 914 EmitDebugCheckDeclarationContext(variable); | |
| 915 VisitForAccumulatorValue(declaration->fun()); | |
| 916 __ str(result_register(), ContextOperand(cp, variable->index())); | |
| 917 int offset = Context::SlotOffset(variable->index()); | |
| 918 // We know that we have written a function, which is not a smi. | |
| 919 __ RecordWriteContextSlot(cp, | |
| 920 offset, | |
| 921 result_register(), | |
| 922 r2, | |
| 923 kLRHasBeenSaved, | |
| 924 kDontSaveFPRegs, | |
| 925 EMIT_REMEMBERED_SET, | |
| 926 OMIT_SMI_CHECK); | |
| 927 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | |
| 928 break; | |
| 929 } | |
| 930 | |
| 931 case VariableLocation::LOOKUP: { | |
| 932 Comment cmnt(masm_, "[ FunctionDeclaration"); | |
| 933 __ mov(r2, Operand(variable->name())); | |
| 934 __ mov(r1, Operand(Smi::FromInt(NONE))); | |
| 935 __ Push(cp, r2, r1); | |
| 936 // Push initial value for function declaration. | |
| 937 VisitForStackValue(declaration->fun()); | |
| 938 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); | |
| 939 break; | |
| 940 } | |
| 941 } | |
| 942 } | |
| 943 | |
| 944 | |
| 945 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { | |
| 946 // Call the runtime to declare the globals. | |
| 947 // The context is the first argument. | |
| 948 __ mov(r1, Operand(pairs)); | |
| 949 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags()))); | |
| 950 __ Push(cp, r1, r0); | |
| 951 __ CallRuntime(Runtime::kDeclareGlobals, 3); | |
| 952 // Return value is ignored. | |
| 953 } | |
| 954 | |
| 955 | |
| 956 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { | |
| 957 // Call the runtime to declare the modules. | |
| 958 __ Push(descriptions); | |
| 959 __ CallRuntime(Runtime::kDeclareModules, 1); | |
| 960 // Return value is ignored. | |
| 961 } | |
| 962 | |
| 963 | |
| 964 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { | |
| 965 Comment cmnt(masm_, "[ SwitchStatement"); | |
| 966 Breakable nested_statement(this, stmt); | |
| 967 SetStatementPosition(stmt); | |
| 968 | |
| 969 // Keep the switch value on the stack until a case matches. | |
| 970 VisitForStackValue(stmt->tag()); | |
| 971 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); | |
| 972 | |
| 973 ZoneList<CaseClause*>* clauses = stmt->cases(); | |
| 974 CaseClause* default_clause = NULL; // Can occur anywhere in the list. | |
| 975 | |
| 976 Label next_test; // Recycled for each test. | |
| 977 // Compile all the tests with branches to their bodies. | |
| 978 for (int i = 0; i < clauses->length(); i++) { | |
| 979 CaseClause* clause = clauses->at(i); | |
| 980 clause->body_target()->Unuse(); | |
| 981 | |
| 982 // The default is not a test, but remember it as final fall through. | |
| 983 if (clause->is_default()) { | |
| 984 default_clause = clause; | |
| 985 continue; | |
| 986 } | |
| 987 | |
| 988 Comment cmnt(masm_, "[ Case comparison"); | |
| 989 __ bind(&next_test); | |
| 990 next_test.Unuse(); | |
| 991 | |
| 992 // Compile the label expression. | |
| 993 VisitForAccumulatorValue(clause->label()); | |
| 994 | |
| 995 // Perform the comparison as if via '==='. | |
| 996 __ ldr(r1, MemOperand(sp, 0)); // Switch value. | |
| 997 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); | |
| 998 JumpPatchSite patch_site(masm_); | |
| 999 if (inline_smi_code) { | |
| 1000 Label slow_case; | |
| 1001 __ orr(r2, r1, r0); | |
| 1002 patch_site.EmitJumpIfNotSmi(r2, &slow_case); | |
| 1003 | |
| 1004 __ cmp(r1, r0); | |
| 1005 __ b(ne, &next_test); | |
| 1006 __ Drop(1); // Switch value is no longer needed. | |
| 1007 __ b(clause->body_target()); | |
| 1008 __ bind(&slow_case); | |
| 1009 } | |
| 1010 | |
| 1011 // Record position before stub call for type feedback. | |
| 1012 SetExpressionPosition(clause); | |
| 1013 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT, | |
| 1014 strength(language_mode())).code(); | |
| 1015 CallIC(ic, clause->CompareId()); | |
| 1016 patch_site.EmitPatchInfo(); | |
| 1017 | |
| 1018 Label skip; | |
| 1019 __ b(&skip); | |
| 1020 PrepareForBailout(clause, TOS_REG); | |
| 1021 __ LoadRoot(ip, Heap::kTrueValueRootIndex); | |
| 1022 __ cmp(r0, ip); | |
| 1023 __ b(ne, &next_test); | |
| 1024 __ Drop(1); | |
| 1025 __ jmp(clause->body_target()); | |
| 1026 __ bind(&skip); | |
| 1027 | |
| 1028 __ cmp(r0, Operand::Zero()); | |
| 1029 __ b(ne, &next_test); | |
| 1030 __ Drop(1); // Switch value is no longer needed. | |
| 1031 __ b(clause->body_target()); | |
| 1032 } | |
| 1033 | |
| 1034 // Discard the test value and jump to the default if present, otherwise to | |
| 1035 // the end of the statement. | |
| 1036 __ bind(&next_test); | |
| 1037 __ Drop(1); // Switch value is no longer needed. | |
| 1038 if (default_clause == NULL) { | |
| 1039 __ b(nested_statement.break_label()); | |
| 1040 } else { | |
| 1041 __ b(default_clause->body_target()); | |
| 1042 } | |
| 1043 | |
| 1044 // Compile all the case bodies. | |
| 1045 for (int i = 0; i < clauses->length(); i++) { | |
| 1046 Comment cmnt(masm_, "[ Case body"); | |
| 1047 CaseClause* clause = clauses->at(i); | |
| 1048 __ bind(clause->body_target()); | |
| 1049 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS); | |
| 1050 VisitStatements(clause->statements()); | |
| 1051 } | |
| 1052 | |
| 1053 __ bind(nested_statement.break_label()); | |
| 1054 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); | |
| 1055 } | |
| 1056 | |
| 1057 | |
| 1058 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { | |
| 1059 Comment cmnt(masm_, "[ ForInStatement"); | |
| 1060 SetStatementPosition(stmt, SKIP_BREAK); | |
| 1061 | |
| 1062 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot(); | |
| 1063 | |
| 1064 Label loop, exit; | |
| 1065 ForIn loop_statement(this, stmt); | |
| 1066 increment_loop_depth(); | |
| 1067 | |
| 1068 // Get the object to enumerate over. If the object is null or undefined, skip | |
| 1069 // over the loop. See ECMA-262 version 5, section 12.6.4. | |
| 1070 SetExpressionAsStatementPosition(stmt->enumerable()); | |
| 1071 VisitForAccumulatorValue(stmt->enumerable()); | |
| 1072 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | |
| 1073 __ cmp(r0, ip); | |
| 1074 __ b(eq, &exit); | |
| 1075 Register null_value = r5; | |
| 1076 __ LoadRoot(null_value, Heap::kNullValueRootIndex); | |
| 1077 __ cmp(r0, null_value); | |
| 1078 __ b(eq, &exit); | |
| 1079 | |
| 1080 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); | |
| 1081 | |
| 1082 // Convert the object to a JS object. | |
| 1083 Label convert, done_convert; | |
| 1084 __ JumpIfSmi(r0, &convert); | |
| 1085 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); | |
| 1086 __ b(ge, &done_convert); | |
| 1087 __ bind(&convert); | |
| 1088 __ push(r0); | |
| 1089 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | |
| 1090 __ bind(&done_convert); | |
| 1091 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG); | |
| 1092 __ push(r0); | |
| 1093 | |
| 1094 // Check for proxies. | |
| 1095 Label call_runtime; | |
| 1096 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | |
| 1097 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE); | |
| 1098 __ b(le, &call_runtime); | |
| 1099 | |
| 1100 // Check cache validity in generated code. This is a fast case for | |
| 1101 // the JSObject::IsSimpleEnum cache validity checks. If we cannot | |
| 1102 // guarantee cache validity, call the runtime system to check cache | |
| 1103 // validity or get the property names in a fixed array. | |
| 1104 __ CheckEnumCache(null_value, &call_runtime); | |
| 1105 | |
| 1106 // The enum cache is valid. Load the map of the object being | |
| 1107 // iterated over and use the cache for the iteration. | |
| 1108 Label use_cache; | |
| 1109 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); | |
| 1110 __ b(&use_cache); | |
| 1111 | |
| 1112 // Get the set of properties to enumerate. | |
| 1113 __ bind(&call_runtime); | |
| 1114 __ push(r0); // Duplicate the enumerable object on the stack. | |
| 1115 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); | |
| 1116 PrepareForBailoutForId(stmt->EnumId(), TOS_REG); | |
| 1117 | |
| 1118 // If we got a map from the runtime call, we can do a fast | |
| 1119 // modification check. Otherwise, we got a fixed array, and we have | |
| 1120 // to do a slow check. | |
| 1121 Label fixed_array; | |
| 1122 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); | |
| 1123 __ LoadRoot(ip, Heap::kMetaMapRootIndex); | |
| 1124 __ cmp(r2, ip); | |
| 1125 __ b(ne, &fixed_array); | |
| 1126 | |
| 1127 // We got a map in register r0. Get the enumeration cache from it. | |
| 1128 Label no_descriptors; | |
| 1129 __ bind(&use_cache); | |
| 1130 | |
| 1131 __ EnumLength(r1, r0); | |
| 1132 __ cmp(r1, Operand(Smi::FromInt(0))); | |
| 1133 __ b(eq, &no_descriptors); | |
| 1134 | |
| 1135 __ LoadInstanceDescriptors(r0, r2); | |
| 1136 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset)); | |
| 1137 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset)); | |
| 1138 | |
| 1139 // Set up the four remaining stack slots. | |
| 1140 __ push(r0); // Map. | |
| 1141 __ mov(r0, Operand(Smi::FromInt(0))); | |
| 1142 // Push enumeration cache, enumeration cache length (as smi) and zero. | |
| 1143 __ Push(r2, r1, r0); | |
| 1144 __ jmp(&loop); | |
| 1145 | |
| 1146 __ bind(&no_descriptors); | |
| 1147 __ Drop(1); | |
| 1148 __ jmp(&exit); | |
| 1149 | |
| 1150 // We got a fixed array in register r0. Iterate through that. | |
| 1151 Label non_proxy; | |
| 1152 __ bind(&fixed_array); | |
| 1153 | |
| 1154 __ Move(r1, FeedbackVector()); | |
| 1155 __ mov(r2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); | |
| 1156 int vector_index = FeedbackVector()->GetIndex(slot); | |
| 1157 __ str(r2, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(vector_index))); | |
| 1158 | |
| 1159 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check | |
| 1160 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object | |
| 1161 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | |
| 1162 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE); | |
| 1163 __ b(gt, &non_proxy); | |
| 1164 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy | |
| 1165 __ bind(&non_proxy); | |
| 1166 __ Push(r1, r0); // Smi and array | |
| 1167 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset)); | |
| 1168 __ mov(r0, Operand(Smi::FromInt(0))); | |
| 1169 __ Push(r1, r0); // Fixed array length (as smi) and initial index. | |
| 1170 | |
| 1171 // Generate code for doing the condition check. | |
| 1172 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); | |
| 1173 __ bind(&loop); | |
| 1174 SetExpressionAsStatementPosition(stmt->each()); | |
| 1175 | |
| 1176 // Load the current count to r0, load the length to r1. | |
| 1177 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize)); | |
| 1178 __ cmp(r0, r1); // Compare to the array length. | |
| 1179 __ b(hs, loop_statement.break_label()); | |
| 1180 | |
| 1181 // Get the current entry of the array into register r3. | |
| 1182 __ ldr(r2, MemOperand(sp, 2 * kPointerSize)); | |
| 1183 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
| 1184 __ ldr(r3, MemOperand::PointerAddressFromSmiKey(r2, r0)); | |
| 1185 | |
| 1186 // Get the expected map from the stack or a smi in the | |
| 1187 // permanent slow case into register r2. | |
| 1188 __ ldr(r2, MemOperand(sp, 3 * kPointerSize)); | |
| 1189 | |
| 1190 // Check if the expected map still matches that of the enumerable. | |
| 1191 // If not, we may have to filter the key. | |
| 1192 Label update_each; | |
| 1193 __ ldr(r1, MemOperand(sp, 4 * kPointerSize)); | |
| 1194 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset)); | |
| 1195 __ cmp(r4, Operand(r2)); | |
| 1196 __ b(eq, &update_each); | |
| 1197 | |
| 1198 // For proxies, no filtering is done. | |
| 1199 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. | |
| 1200 __ cmp(r2, Operand(Smi::FromInt(0))); | |
| 1201 __ b(eq, &update_each); | |
| 1202 | |
| 1203 // Convert the entry to a string or (smi) 0 if it isn't a property | |
| 1204 // any more. If the property has been removed while iterating, we | |
| 1205 // just skip it. | |
| 1206 __ push(r1); // Enumerable. | |
| 1207 __ push(r3); // Current entry. | |
| 1208 __ CallRuntime(Runtime::kForInFilter, 2); | |
| 1209 PrepareForBailoutForId(stmt->FilterId(), TOS_REG); | |
| 1210 __ mov(r3, Operand(r0)); | |
| 1211 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | |
| 1212 __ cmp(r0, ip); | |
| 1213 __ b(eq, loop_statement.continue_label()); | |
| 1214 | |
| 1215 // Update the 'each' property or variable from the possibly filtered | |
| 1216 // entry in register r3. | |
| 1217 __ bind(&update_each); | |
| 1218 __ mov(result_register(), r3); | |
| 1219 // Perform the assignment as if via '='. | |
| 1220 { EffectContext context(this); | |
| 1221 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot()); | |
| 1222 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS); | |
| 1223 } | |
| 1224 | |
| 1225 // Generate code for the body of the loop. | |
| 1226 Visit(stmt->body()); | |
| 1227 | |
| 1228 // Generate code for the going to the next element by incrementing | |
| 1229 // the index (smi) stored on top of the stack. | |
| 1230 __ bind(loop_statement.continue_label()); | |
| 1231 __ pop(r0); | |
| 1232 __ add(r0, r0, Operand(Smi::FromInt(1))); | |
| 1233 __ push(r0); | |
| 1234 | |
| 1235 EmitBackEdgeBookkeeping(stmt, &loop); | |
| 1236 __ b(&loop); | |
| 1237 | |
| 1238 // Remove the pointers stored on the stack. | |
| 1239 __ bind(loop_statement.break_label()); | |
| 1240 __ Drop(5); | |
| 1241 | |
| 1242 // Exit and decrement the loop depth. | |
| 1243 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); | |
| 1244 __ bind(&exit); | |
| 1245 decrement_loop_depth(); | |
| 1246 } | |
| 1247 | |
| 1248 | |
| 1249 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, | |
| 1250 bool pretenure) { | |
| 1251 // Use the fast case closure allocation code that allocates in new | |
| 1252 // space for nested functions that don't need literals cloning. If | |
| 1253 // we're running with the --always-opt or the --prepare-always-opt | |
| 1254 // flag, we need to use the runtime function so that the new function | |
| 1255 // we are creating here gets a chance to have its code optimized and | |
| 1256 // doesn't just get a copy of the existing unoptimized code. | |
| 1257 if (!FLAG_always_opt && | |
| 1258 !FLAG_prepare_always_opt && | |
| 1259 !pretenure && | |
| 1260 scope()->is_function_scope() && | |
| 1261 info->num_literals() == 0) { | |
| 1262 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind()); | |
| 1263 __ mov(r2, Operand(info)); | |
| 1264 __ CallStub(&stub); | |
| 1265 } else { | |
| 1266 __ mov(r0, Operand(info)); | |
| 1267 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex | |
| 1268 : Heap::kFalseValueRootIndex); | |
| 1269 __ Push(cp, r0, r1); | |
| 1270 __ CallRuntime(Runtime::kNewClosure, 3); | |
| 1271 } | |
| 1272 context()->Plug(r0); | |
| 1273 } | |
| 1274 | |
| 1275 | |
| 1276 void FullCodeGenerator::EmitSetHomeObjectIfNeeded(Expression* initializer, | |
| 1277 int offset, | |
| 1278 FeedbackVectorICSlot slot) { | |
| 1279 if (NeedsHomeObject(initializer)) { | |
| 1280 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); | |
| 1281 __ mov(StoreDescriptor::NameRegister(), | |
| 1282 Operand(isolate()->factory()->home_object_symbol())); | |
| 1283 __ ldr(StoreDescriptor::ValueRegister(), | |
| 1284 MemOperand(sp, offset * kPointerSize)); | |
| 1285 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot); | |
| 1286 CallStoreIC(); | |
| 1287 } | |
| 1288 } | |
| 1289 | |
| 1290 | |
| 1291 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy, | |
| 1292 TypeofMode typeof_mode, | |
| 1293 Label* slow) { | |
| 1294 Register current = cp; | |
| 1295 Register next = r1; | |
| 1296 Register temp = r2; | |
| 1297 | |
| 1298 Scope* s = scope(); | |
| 1299 while (s != NULL) { | |
| 1300 if (s->num_heap_slots() > 0) { | |
| 1301 if (s->calls_sloppy_eval()) { | |
| 1302 // Check that extension is NULL. | |
| 1303 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX)); | |
| 1304 __ tst(temp, temp); | |
| 1305 __ b(ne, slow); | |
| 1306 } | |
| 1307 // Load next context in chain. | |
| 1308 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX)); | |
| 1309 // Walk the rest of the chain without clobbering cp. | |
| 1310 current = next; | |
| 1311 } | |
| 1312 // If no outer scope calls eval, we do not need to check more | |
| 1313 // context extensions. | |
| 1314 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; | |
| 1315 s = s->outer_scope(); | |
| 1316 } | |
| 1317 | |
| 1318 if (s->is_eval_scope()) { | |
| 1319 Label loop, fast; | |
| 1320 if (!current.is(next)) { | |
| 1321 __ Move(next, current); | |
| 1322 } | |
| 1323 __ bind(&loop); | |
| 1324 // Terminate at native context. | |
| 1325 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset)); | |
| 1326 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex); | |
| 1327 __ cmp(temp, ip); | |
| 1328 __ b(eq, &fast); | |
| 1329 // Check that extension is NULL. | |
| 1330 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX)); | |
| 1331 __ tst(temp, temp); | |
| 1332 __ b(ne, slow); | |
| 1333 // Load next context in chain. | |
| 1334 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX)); | |
| 1335 __ b(&loop); | |
| 1336 __ bind(&fast); | |
| 1337 } | |
| 1338 | |
| 1339 // All extension objects were empty and it is safe to use a normal global | |
| 1340 // load machinery. | |
| 1341 EmitGlobalVariableLoad(proxy, typeof_mode); | |
| 1342 } | |
| 1343 | |
| 1344 | |
| 1345 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, | |
| 1346 Label* slow) { | |
| 1347 DCHECK(var->IsContextSlot()); | |
| 1348 Register context = cp; | |
| 1349 Register next = r3; | |
| 1350 Register temp = r4; | |
| 1351 | |
| 1352 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { | |
| 1353 if (s->num_heap_slots() > 0) { | |
| 1354 if (s->calls_sloppy_eval()) { | |
| 1355 // Check that extension is NULL. | |
| 1356 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); | |
| 1357 __ tst(temp, temp); | |
| 1358 __ b(ne, slow); | |
| 1359 } | |
| 1360 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX)); | |
| 1361 // Walk the rest of the chain without clobbering cp. | |
| 1362 context = next; | |
| 1363 } | |
| 1364 } | |
| 1365 // Check that last extension is NULL. | |
| 1366 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX)); | |
| 1367 __ tst(temp, temp); | |
| 1368 __ b(ne, slow); | |
| 1369 | |
| 1370 // This function is used only for loads, not stores, so it's safe to | |
| 1371 // return an cp-based operand (the write barrier cannot be allowed to | |
| 1372 // destroy the cp register). | |
| 1373 return ContextOperand(context, var->index()); | |
| 1374 } | |
| 1375 | |
| 1376 | |
| 1377 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, | |
| 1378 TypeofMode typeof_mode, | |
| 1379 Label* slow, Label* done) { | |
| 1380 // Generate fast-case code for variables that might be shadowed by | |
| 1381 // eval-introduced variables. Eval is used a lot without | |
| 1382 // introducing variables. In those cases, we do not want to | |
| 1383 // perform a runtime call for all variables in the scope | |
| 1384 // containing the eval. | |
| 1385 Variable* var = proxy->var(); | |
| 1386 if (var->mode() == DYNAMIC_GLOBAL) { | |
| 1387 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow); | |
| 1388 __ jmp(done); | |
| 1389 } else if (var->mode() == DYNAMIC_LOCAL) { | |
| 1390 Variable* local = var->local_if_not_shadowed(); | |
| 1391 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow)); | |
| 1392 if (local->mode() == LET || local->mode() == CONST || | |
| 1393 local->mode() == CONST_LEGACY) { | |
| 1394 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); | |
| 1395 if (local->mode() == CONST_LEGACY) { | |
| 1396 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); | |
| 1397 } else { // LET || CONST | |
| 1398 __ b(ne, done); | |
| 1399 __ mov(r0, Operand(var->name())); | |
| 1400 __ push(r0); | |
| 1401 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
| 1402 } | |
| 1403 } | |
| 1404 __ jmp(done); | |
| 1405 } | |
| 1406 } | |
| 1407 | |
| 1408 | |
| 1409 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy, | |
| 1410 TypeofMode typeof_mode) { | |
| 1411 Variable* var = proxy->var(); | |
| 1412 DCHECK(var->IsUnallocatedOrGlobalSlot() || | |
| 1413 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL)); | |
| 1414 if (var->IsGlobalSlot()) { | |
| 1415 DCHECK(var->index() > 0); | |
| 1416 DCHECK(var->IsStaticGlobalObjectProperty()); | |
| 1417 // Each var occupies two slots in the context: for reads and writes. | |
| 1418 int slot_index = var->index(); | |
| 1419 int depth = scope()->ContextChainLength(var->scope()); | |
| 1420 __ mov(LoadGlobalViaContextDescriptor::DepthRegister(), | |
| 1421 Operand(Smi::FromInt(depth))); | |
| 1422 __ mov(LoadGlobalViaContextDescriptor::SlotRegister(), | |
| 1423 Operand(Smi::FromInt(slot_index))); | |
| 1424 __ mov(LoadGlobalViaContextDescriptor::NameRegister(), | |
| 1425 Operand(var->name())); | |
| 1426 LoadGlobalViaContextStub stub(isolate(), depth); | |
| 1427 __ CallStub(&stub); | |
| 1428 | |
| 1429 } else { | |
| 1430 __ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); | |
| 1431 __ mov(LoadDescriptor::NameRegister(), Operand(var->name())); | |
| 1432 __ mov(LoadDescriptor::SlotRegister(), | |
| 1433 Operand(SmiFromSlot(proxy->VariableFeedbackSlot()))); | |
| 1434 CallLoadIC(typeof_mode); | |
| 1435 } | |
| 1436 } | |
| 1437 | |
| 1438 | |
| 1439 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy, | |
| 1440 TypeofMode typeof_mode) { | |
| 1441 // Record position before possible IC call. | |
| 1442 SetExpressionPosition(proxy); | |
| 1443 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS); | |
| 1444 Variable* var = proxy->var(); | |
| 1445 | |
| 1446 // Three cases: global variables, lookup variables, and all other types of | |
| 1447 // variables. | |
| 1448 switch (var->location()) { | |
| 1449 case VariableLocation::GLOBAL: | |
| 1450 case VariableLocation::UNALLOCATED: { | |
| 1451 Comment cmnt(masm_, "[ Global variable"); | |
| 1452 EmitGlobalVariableLoad(proxy, typeof_mode); | |
| 1453 context()->Plug(r0); | |
| 1454 break; | |
| 1455 } | |
| 1456 | |
| 1457 case VariableLocation::PARAMETER: | |
| 1458 case VariableLocation::LOCAL: | |
| 1459 case VariableLocation::CONTEXT: { | |
| 1460 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode); | |
| 1461 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" | |
| 1462 : "[ Stack variable"); | |
| 1463 if (var->binding_needs_init()) { | |
| 1464 // var->scope() may be NULL when the proxy is located in eval code and | |
| 1465 // refers to a potential outside binding. Currently those bindings are | |
| 1466 // always looked up dynamically, i.e. in that case | |
| 1467 // var->location() == LOOKUP. | |
| 1468 // always holds. | |
| 1469 DCHECK(var->scope() != NULL); | |
| 1470 | |
| 1471 // Check if the binding really needs an initialization check. The check | |
| 1472 // can be skipped in the following situation: we have a LET or CONST | |
| 1473 // binding in harmony mode, both the Variable and the VariableProxy have | |
| 1474 // the same declaration scope (i.e. they are both in global code, in the | |
| 1475 // same function or in the same eval code) and the VariableProxy is in | |
| 1476 // the source physically located after the initializer of the variable. | |
| 1477 // | |
| 1478 // We cannot skip any initialization checks for CONST in non-harmony | |
| 1479 // mode because const variables may be declared but never initialized: | |
| 1480 // if (false) { const x; }; var y = x; | |
| 1481 // | |
| 1482 // The condition on the declaration scopes is a conservative check for | |
| 1483 // nested functions that access a binding and are called before the | |
| 1484 // binding is initialized: | |
| 1485 // function() { f(); let x = 1; function f() { x = 2; } } | |
| 1486 // | |
| 1487 bool skip_init_check; | |
| 1488 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) { | |
| 1489 skip_init_check = false; | |
| 1490 } else if (var->is_this()) { | |
| 1491 CHECK(info_->function() != nullptr && | |
| 1492 (info_->function()->kind() & kSubclassConstructor) != 0); | |
| 1493 // TODO(dslomov): implement 'this' hole check elimination. | |
| 1494 skip_init_check = false; | |
| 1495 } else { | |
| 1496 // Check that we always have valid source position. | |
| 1497 DCHECK(var->initializer_position() != RelocInfo::kNoPosition); | |
| 1498 DCHECK(proxy->position() != RelocInfo::kNoPosition); | |
| 1499 skip_init_check = var->mode() != CONST_LEGACY && | |
| 1500 var->initializer_position() < proxy->position(); | |
| 1501 } | |
| 1502 | |
| 1503 if (!skip_init_check) { | |
| 1504 // Let and const need a read barrier. | |
| 1505 GetVar(r0, var); | |
| 1506 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); | |
| 1507 if (var->mode() == LET || var->mode() == CONST) { | |
| 1508 // Throw a reference error when using an uninitialized let/const | |
| 1509 // binding in harmony mode. | |
| 1510 Label done; | |
| 1511 __ b(ne, &done); | |
| 1512 __ mov(r0, Operand(var->name())); | |
| 1513 __ push(r0); | |
| 1514 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
| 1515 __ bind(&done); | |
| 1516 } else { | |
| 1517 // Uninitalized const bindings outside of harmony mode are unholed. | |
| 1518 DCHECK(var->mode() == CONST_LEGACY); | |
| 1519 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); | |
| 1520 } | |
| 1521 context()->Plug(r0); | |
| 1522 break; | |
| 1523 } | |
| 1524 } | |
| 1525 context()->Plug(var); | |
| 1526 break; | |
| 1527 } | |
| 1528 | |
| 1529 case VariableLocation::LOOKUP: { | |
| 1530 Comment cmnt(masm_, "[ Lookup variable"); | |
| 1531 Label done, slow; | |
| 1532 // Generate code for loading from variables potentially shadowed | |
| 1533 // by eval-introduced variables. | |
| 1534 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done); | |
| 1535 __ bind(&slow); | |
| 1536 __ mov(r1, Operand(var->name())); | |
| 1537 __ Push(cp, r1); // Context and name. | |
| 1538 Runtime::FunctionId function_id = | |
| 1539 typeof_mode == NOT_INSIDE_TYPEOF | |
| 1540 ? Runtime::kLoadLookupSlot | |
| 1541 : Runtime::kLoadLookupSlotNoReferenceError; | |
| 1542 __ CallRuntime(function_id, 2); | |
| 1543 __ bind(&done); | |
| 1544 context()->Plug(r0); | |
| 1545 } | |
| 1546 } | |
| 1547 } | |
| 1548 | |
| 1549 | |
| 1550 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { | |
| 1551 Comment cmnt(masm_, "[ RegExpLiteral"); | |
| 1552 Label materialized; | |
| 1553 // Registers will be used as follows: | |
| 1554 // r5 = materialized value (RegExp literal) | |
| 1555 // r4 = JS function, literals array | |
| 1556 // r3 = literal index | |
| 1557 // r2 = RegExp pattern | |
| 1558 // r1 = RegExp flags | |
| 1559 // r0 = RegExp literal clone | |
| 1560 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
| 1561 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset)); | |
| 1562 int literal_offset = | |
| 1563 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; | |
| 1564 __ ldr(r5, FieldMemOperand(r4, literal_offset)); | |
| 1565 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | |
| 1566 __ cmp(r5, ip); | |
| 1567 __ b(ne, &materialized); | |
| 1568 | |
| 1569 // Create regexp literal using runtime function. | |
| 1570 // Result will be in r0. | |
| 1571 __ mov(r3, Operand(Smi::FromInt(expr->literal_index()))); | |
| 1572 __ mov(r2, Operand(expr->pattern())); | |
| 1573 __ mov(r1, Operand(expr->flags())); | |
| 1574 __ Push(r4, r3, r2, r1); | |
| 1575 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); | |
| 1576 __ mov(r5, r0); | |
| 1577 | |
| 1578 __ bind(&materialized); | |
| 1579 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | |
| 1580 Label allocated, runtime_allocate; | |
| 1581 __ Allocate(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT); | |
| 1582 __ jmp(&allocated); | |
| 1583 | |
| 1584 __ bind(&runtime_allocate); | |
| 1585 __ mov(r0, Operand(Smi::FromInt(size))); | |
| 1586 __ Push(r5, r0); | |
| 1587 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); | |
| 1588 __ pop(r5); | |
| 1589 | |
| 1590 __ bind(&allocated); | |
| 1591 // After this, registers are used as follows: | |
| 1592 // r0: Newly allocated regexp. | |
| 1593 // r5: Materialized regexp. | |
| 1594 // r2: temp. | |
| 1595 __ CopyFields(r0, r5, d0, size / kPointerSize); | |
| 1596 context()->Plug(r0); | |
| 1597 } | |
| 1598 | |
| 1599 | |
| 1600 void FullCodeGenerator::EmitAccessor(Expression* expression) { | |
| 1601 if (expression == NULL) { | |
| 1602 __ LoadRoot(r1, Heap::kNullValueRootIndex); | |
| 1603 __ push(r1); | |
| 1604 } else { | |
| 1605 VisitForStackValue(expression); | |
| 1606 } | |
| 1607 } | |
| 1608 | |
| 1609 | |
| 1610 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { | |
| 1611 Comment cmnt(masm_, "[ ObjectLiteral"); | |
| 1612 | |
| 1613 Handle<FixedArray> constant_properties = expr->constant_properties(); | |
| 1614 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
| 1615 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); | |
| 1616 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); | |
| 1617 __ mov(r1, Operand(constant_properties)); | |
| 1618 int flags = expr->ComputeFlags(); | |
| 1619 __ mov(r0, Operand(Smi::FromInt(flags))); | |
| 1620 if (MustCreateObjectLiteralWithRuntime(expr)) { | |
| 1621 __ Push(r3, r2, r1, r0); | |
| 1622 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); | |
| 1623 } else { | |
| 1624 FastCloneShallowObjectStub stub(isolate(), expr->properties_count()); | |
| 1625 __ CallStub(&stub); | |
| 1626 } | |
| 1627 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); | |
| 1628 | |
| 1629 // If result_saved is true the result is on top of the stack. If | |
| 1630 // result_saved is false the result is in r0. | |
| 1631 bool result_saved = false; | |
| 1632 | |
| 1633 AccessorTable accessor_table(zone()); | |
| 1634 int property_index = 0; | |
| 1635 // store_slot_index points to the vector IC slot for the next store IC used. | |
| 1636 // ObjectLiteral::ComputeFeedbackRequirements controls the allocation of slots | |
| 1637 // and must be updated if the number of store ICs emitted here changes. | |
| 1638 int store_slot_index = 0; | |
| 1639 for (; property_index < expr->properties()->length(); property_index++) { | |
| 1640 ObjectLiteral::Property* property = expr->properties()->at(property_index); | |
| 1641 if (property->is_computed_name()) break; | |
| 1642 if (property->IsCompileTimeValue()) continue; | |
| 1643 | |
| 1644 Literal* key = property->key()->AsLiteral(); | |
| 1645 Expression* value = property->value(); | |
| 1646 if (!result_saved) { | |
| 1647 __ push(r0); // Save result on stack | |
| 1648 result_saved = true; | |
| 1649 } | |
| 1650 switch (property->kind()) { | |
| 1651 case ObjectLiteral::Property::CONSTANT: | |
| 1652 UNREACHABLE(); | |
| 1653 case ObjectLiteral::Property::MATERIALIZED_LITERAL: | |
| 1654 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value())); | |
| 1655 // Fall through. | |
| 1656 case ObjectLiteral::Property::COMPUTED: | |
| 1657 // It is safe to use [[Put]] here because the boilerplate already | |
| 1658 // contains computed properties with an uninitialized value. | |
| 1659 if (key->value()->IsInternalizedString()) { | |
| 1660 if (property->emit_store()) { | |
| 1661 VisitForAccumulatorValue(value); | |
| 1662 DCHECK(StoreDescriptor::ValueRegister().is(r0)); | |
| 1663 __ mov(StoreDescriptor::NameRegister(), Operand(key->value())); | |
| 1664 __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); | |
| 1665 if (FLAG_vector_stores) { | |
| 1666 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++)); | |
| 1667 CallStoreIC(); | |
| 1668 } else { | |
| 1669 CallStoreIC(key->LiteralFeedbackId()); | |
| 1670 } | |
| 1671 PrepareForBailoutForId(key->id(), NO_REGISTERS); | |
| 1672 | |
| 1673 if (NeedsHomeObject(value)) { | |
| 1674 __ Move(StoreDescriptor::ReceiverRegister(), r0); | |
| 1675 __ mov(StoreDescriptor::NameRegister(), | |
| 1676 Operand(isolate()->factory()->home_object_symbol())); | |
| 1677 __ ldr(StoreDescriptor::ValueRegister(), MemOperand(sp)); | |
| 1678 if (FLAG_vector_stores) { | |
| 1679 EmitLoadStoreICSlot(expr->GetNthSlot(store_slot_index++)); | |
| 1680 } | |
| 1681 CallStoreIC(); | |
| 1682 } | |
| 1683 } else { | |
| 1684 VisitForEffect(value); | |
| 1685 } | |
| 1686 break; | |
| 1687 } | |
| 1688 // Duplicate receiver on stack. | |
| 1689 __ ldr(r0, MemOperand(sp)); | |
| 1690 __ push(r0); | |
| 1691 VisitForStackValue(key); | |
| 1692 VisitForStackValue(value); | |
| 1693 if (property->emit_store()) { | |
| 1694 EmitSetHomeObjectIfNeeded( | |
| 1695 value, 2, expr->SlotForHomeObject(value, &store_slot_index)); | |
| 1696 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes | |
| 1697 __ push(r0); | |
| 1698 __ CallRuntime(Runtime::kSetProperty, 4); | |
| 1699 } else { | |
| 1700 __ Drop(3); | |
| 1701 } | |
| 1702 break; | |
| 1703 case ObjectLiteral::Property::PROTOTYPE: | |
| 1704 // Duplicate receiver on stack. | |
| 1705 __ ldr(r0, MemOperand(sp)); | |
| 1706 __ push(r0); | |
| 1707 VisitForStackValue(value); | |
| 1708 DCHECK(property->emit_store()); | |
| 1709 __ CallRuntime(Runtime::kInternalSetPrototype, 2); | |
| 1710 break; | |
| 1711 | |
| 1712 case ObjectLiteral::Property::GETTER: | |
| 1713 if (property->emit_store()) { | |
| 1714 accessor_table.lookup(key)->second->getter = value; | |
| 1715 } | |
| 1716 break; | |
| 1717 case ObjectLiteral::Property::SETTER: | |
| 1718 if (property->emit_store()) { | |
| 1719 accessor_table.lookup(key)->second->setter = value; | |
| 1720 } | |
| 1721 break; | |
| 1722 } | |
| 1723 } | |
| 1724 | |
| 1725 // Emit code to define accessors, using only a single call to the runtime for | |
| 1726 // each pair of corresponding getters and setters. | |
| 1727 for (AccessorTable::Iterator it = accessor_table.begin(); | |
| 1728 it != accessor_table.end(); | |
| 1729 ++it) { | |
| 1730 __ ldr(r0, MemOperand(sp)); // Duplicate receiver. | |
| 1731 __ push(r0); | |
| 1732 VisitForStackValue(it->first); | |
| 1733 EmitAccessor(it->second->getter); | |
| 1734 EmitSetHomeObjectIfNeeded( | |
| 1735 it->second->getter, 2, | |
| 1736 expr->SlotForHomeObject(it->second->getter, &store_slot_index)); | |
| 1737 EmitAccessor(it->second->setter); | |
| 1738 EmitSetHomeObjectIfNeeded( | |
| 1739 it->second->setter, 3, | |
| 1740 expr->SlotForHomeObject(it->second->setter, &store_slot_index)); | |
| 1741 __ mov(r0, Operand(Smi::FromInt(NONE))); | |
| 1742 __ push(r0); | |
| 1743 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5); | |
| 1744 } | |
| 1745 | |
| 1746 // Object literals have two parts. The "static" part on the left contains no | |
| 1747 // computed property names, and so we can compute its map ahead of time; see | |
| 1748 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part | |
| 1749 // starts with the first computed property name, and continues with all | |
| 1750 // properties to its right. All the code from above initializes the static | |
| 1751 // component of the object literal, and arranges for the map of the result to | |
| 1752 // reflect the static order in which the keys appear. For the dynamic | |
| 1753 // properties, we compile them into a series of "SetOwnProperty" runtime | |
| 1754 // calls. This will preserve insertion order. | |
| 1755 for (; property_index < expr->properties()->length(); property_index++) { | |
| 1756 ObjectLiteral::Property* property = expr->properties()->at(property_index); | |
| 1757 | |
| 1758 Expression* value = property->value(); | |
| 1759 if (!result_saved) { | |
| 1760 __ push(r0); // Save result on the stack | |
| 1761 result_saved = true; | |
| 1762 } | |
| 1763 | |
| 1764 __ ldr(r0, MemOperand(sp)); // Duplicate receiver. | |
| 1765 __ push(r0); | |
| 1766 | |
| 1767 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) { | |
| 1768 DCHECK(!property->is_computed_name()); | |
| 1769 VisitForStackValue(value); | |
| 1770 DCHECK(property->emit_store()); | |
| 1771 __ CallRuntime(Runtime::kInternalSetPrototype, 2); | |
| 1772 } else { | |
| 1773 EmitPropertyKey(property, expr->GetIdForProperty(property_index)); | |
| 1774 VisitForStackValue(value); | |
| 1775 EmitSetHomeObjectIfNeeded( | |
| 1776 value, 2, expr->SlotForHomeObject(value, &store_slot_index)); | |
| 1777 | |
| 1778 switch (property->kind()) { | |
| 1779 case ObjectLiteral::Property::CONSTANT: | |
| 1780 case ObjectLiteral::Property::MATERIALIZED_LITERAL: | |
| 1781 case ObjectLiteral::Property::COMPUTED: | |
| 1782 if (property->emit_store()) { | |
| 1783 __ mov(r0, Operand(Smi::FromInt(NONE))); | |
| 1784 __ push(r0); | |
| 1785 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked, 4); | |
| 1786 } else { | |
| 1787 __ Drop(3); | |
| 1788 } | |
| 1789 break; | |
| 1790 | |
| 1791 case ObjectLiteral::Property::PROTOTYPE: | |
| 1792 UNREACHABLE(); | |
| 1793 break; | |
| 1794 | |
| 1795 case ObjectLiteral::Property::GETTER: | |
| 1796 __ mov(r0, Operand(Smi::FromInt(NONE))); | |
| 1797 __ push(r0); | |
| 1798 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4); | |
| 1799 break; | |
| 1800 | |
| 1801 case ObjectLiteral::Property::SETTER: | |
| 1802 __ mov(r0, Operand(Smi::FromInt(NONE))); | |
| 1803 __ push(r0); | |
| 1804 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4); | |
| 1805 break; | |
| 1806 } | |
| 1807 } | |
| 1808 } | |
| 1809 | |
| 1810 if (expr->has_function()) { | |
| 1811 DCHECK(result_saved); | |
| 1812 __ ldr(r0, MemOperand(sp)); | |
| 1813 __ push(r0); | |
| 1814 __ CallRuntime(Runtime::kToFastProperties, 1); | |
| 1815 } | |
| 1816 | |
| 1817 if (result_saved) { | |
| 1818 context()->PlugTOS(); | |
| 1819 } else { | |
| 1820 context()->Plug(r0); | |
| 1821 } | |
| 1822 | |
| 1823 // Verify that compilation exactly consumed the number of store ic slots that | |
| 1824 // the ObjectLiteral node had to offer. | |
| 1825 DCHECK(!FLAG_vector_stores || store_slot_index == expr->slot_count()); | |
| 1826 } | |
| 1827 | |
| 1828 | |
| 1829 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { | |
| 1830 Comment cmnt(masm_, "[ ArrayLiteral"); | |
| 1831 | |
| 1832 expr->BuildConstantElements(isolate()); | |
| 1833 | |
| 1834 Handle<FixedArray> constant_elements = expr->constant_elements(); | |
| 1835 bool has_fast_elements = | |
| 1836 IsFastObjectElementsKind(expr->constant_elements_kind()); | |
| 1837 Handle<FixedArrayBase> constant_elements_values( | |
| 1838 FixedArrayBase::cast(constant_elements->get(1))); | |
| 1839 | |
| 1840 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; | |
| 1841 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { | |
| 1842 // If the only customer of allocation sites is transitioning, then | |
| 1843 // we can turn it off if we don't have anywhere else to transition to. | |
| 1844 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; | |
| 1845 } | |
| 1846 | |
| 1847 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
| 1848 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); | |
| 1849 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); | |
| 1850 __ mov(r1, Operand(constant_elements)); | |
| 1851 if (MustCreateArrayLiteralWithRuntime(expr)) { | |
| 1852 __ mov(r0, Operand(Smi::FromInt(expr->ComputeFlags()))); | |
| 1853 __ Push(r3, r2, r1, r0); | |
| 1854 __ CallRuntime(Runtime::kCreateArrayLiteral, 4); | |
| 1855 } else { | |
| 1856 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); | |
| 1857 __ CallStub(&stub); | |
| 1858 } | |
| 1859 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); | |
| 1860 | |
| 1861 bool result_saved = false; // Is the result saved to the stack? | |
| 1862 ZoneList<Expression*>* subexprs = expr->values(); | |
| 1863 int length = subexprs->length(); | |
| 1864 | |
| 1865 // Emit code to evaluate all the non-constant subexpressions and to store | |
| 1866 // them into the newly cloned array. | |
| 1867 int array_index = 0; | |
| 1868 for (; array_index < length; array_index++) { | |
| 1869 Expression* subexpr = subexprs->at(array_index); | |
| 1870 if (subexpr->IsSpread()) break; | |
| 1871 | |
| 1872 // If the subexpression is a literal or a simple materialized literal it | |
| 1873 // is already set in the cloned array. | |
| 1874 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; | |
| 1875 | |
| 1876 if (!result_saved) { | |
| 1877 __ push(r0); | |
| 1878 __ Push(Smi::FromInt(expr->literal_index())); | |
| 1879 result_saved = true; | |
| 1880 } | |
| 1881 VisitForAccumulatorValue(subexpr); | |
| 1882 | |
| 1883 if (has_fast_elements) { | |
| 1884 int offset = FixedArray::kHeaderSize + (array_index * kPointerSize); | |
| 1885 __ ldr(r6, MemOperand(sp, kPointerSize)); // Copy of array literal. | |
| 1886 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset)); | |
| 1887 __ str(result_register(), FieldMemOperand(r1, offset)); | |
| 1888 // Update the write barrier for the array store. | |
| 1889 __ RecordWriteField(r1, offset, result_register(), r2, | |
| 1890 kLRHasBeenSaved, kDontSaveFPRegs, | |
| 1891 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK); | |
| 1892 } else { | |
| 1893 __ mov(r3, Operand(Smi::FromInt(array_index))); | |
| 1894 StoreArrayLiteralElementStub stub(isolate()); | |
| 1895 __ CallStub(&stub); | |
| 1896 } | |
| 1897 | |
| 1898 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); | |
| 1899 } | |
| 1900 | |
| 1901 // In case the array literal contains spread expressions it has two parts. The | |
| 1902 // first part is the "static" array which has a literal index is handled | |
| 1903 // above. The second part is the part after the first spread expression | |
| 1904 // (inclusive) and these elements gets appended to the array. Note that the | |
| 1905 // number elements an iterable produces is unknown ahead of time. | |
| 1906 if (array_index < length && result_saved) { | |
| 1907 __ pop(); // literal index | |
| 1908 __ Pop(r0); | |
| 1909 result_saved = false; | |
| 1910 } | |
| 1911 for (; array_index < length; array_index++) { | |
| 1912 Expression* subexpr = subexprs->at(array_index); | |
| 1913 | |
| 1914 __ Push(r0); | |
| 1915 if (subexpr->IsSpread()) { | |
| 1916 VisitForStackValue(subexpr->AsSpread()->expression()); | |
| 1917 __ InvokeBuiltin(Builtins::CONCAT_ITERABLE_TO_ARRAY, CALL_FUNCTION); | |
| 1918 } else { | |
| 1919 VisitForStackValue(subexpr); | |
| 1920 __ CallRuntime(Runtime::kAppendElement, 2); | |
| 1921 } | |
| 1922 | |
| 1923 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); | |
| 1924 } | |
| 1925 | |
| 1926 if (result_saved) { | |
| 1927 __ pop(); // literal index | |
| 1928 context()->PlugTOS(); | |
| 1929 } else { | |
| 1930 context()->Plug(r0); | |
| 1931 } | |
| 1932 } | |
| 1933 | |
| 1934 | |
| 1935 void FullCodeGenerator::VisitAssignment(Assignment* expr) { | |
| 1936 DCHECK(expr->target()->IsValidReferenceExpressionOrThis()); | |
| 1937 | |
| 1938 Comment cmnt(masm_, "[ Assignment"); | |
| 1939 SetExpressionPosition(expr, INSERT_BREAK); | |
| 1940 | |
| 1941 Property* property = expr->target()->AsProperty(); | |
| 1942 LhsKind assign_type = Property::GetAssignType(property); | |
| 1943 | |
| 1944 // Evaluate LHS expression. | |
| 1945 switch (assign_type) { | |
| 1946 case VARIABLE: | |
| 1947 // Nothing to do here. | |
| 1948 break; | |
| 1949 case NAMED_PROPERTY: | |
| 1950 if (expr->is_compound()) { | |
| 1951 // We need the receiver both on the stack and in the register. | |
| 1952 VisitForStackValue(property->obj()); | |
| 1953 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); | |
| 1954 } else { | |
| 1955 VisitForStackValue(property->obj()); | |
| 1956 } | |
| 1957 break; | |
| 1958 case NAMED_SUPER_PROPERTY: | |
| 1959 VisitForStackValue( | |
| 1960 property->obj()->AsSuperPropertyReference()->this_var()); | |
| 1961 VisitForAccumulatorValue( | |
| 1962 property->obj()->AsSuperPropertyReference()->home_object()); | |
| 1963 __ Push(result_register()); | |
| 1964 if (expr->is_compound()) { | |
| 1965 const Register scratch = r1; | |
| 1966 __ ldr(scratch, MemOperand(sp, kPointerSize)); | |
| 1967 __ Push(scratch); | |
| 1968 __ Push(result_register()); | |
| 1969 } | |
| 1970 break; | |
| 1971 case KEYED_SUPER_PROPERTY: | |
| 1972 VisitForStackValue( | |
| 1973 property->obj()->AsSuperPropertyReference()->this_var()); | |
| 1974 VisitForStackValue( | |
| 1975 property->obj()->AsSuperPropertyReference()->home_object()); | |
| 1976 VisitForAccumulatorValue(property->key()); | |
| 1977 __ Push(result_register()); | |
| 1978 if (expr->is_compound()) { | |
| 1979 const Register scratch = r1; | |
| 1980 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize)); | |
| 1981 __ Push(scratch); | |
| 1982 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize)); | |
| 1983 __ Push(scratch); | |
| 1984 __ Push(result_register()); | |
| 1985 } | |
| 1986 break; | |
| 1987 case KEYED_PROPERTY: | |
| 1988 if (expr->is_compound()) { | |
| 1989 VisitForStackValue(property->obj()); | |
| 1990 VisitForStackValue(property->key()); | |
| 1991 __ ldr(LoadDescriptor::ReceiverRegister(), | |
| 1992 MemOperand(sp, 1 * kPointerSize)); | |
| 1993 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); | |
| 1994 } else { | |
| 1995 VisitForStackValue(property->obj()); | |
| 1996 VisitForStackValue(property->key()); | |
| 1997 } | |
| 1998 break; | |
| 1999 } | |
| 2000 | |
| 2001 // For compound assignments we need another deoptimization point after the | |
| 2002 // variable/property load. | |
| 2003 if (expr->is_compound()) { | |
| 2004 { AccumulatorValueContext context(this); | |
| 2005 switch (assign_type) { | |
| 2006 case VARIABLE: | |
| 2007 EmitVariableLoad(expr->target()->AsVariableProxy()); | |
| 2008 PrepareForBailout(expr->target(), TOS_REG); | |
| 2009 break; | |
| 2010 case NAMED_PROPERTY: | |
| 2011 EmitNamedPropertyLoad(property); | |
| 2012 PrepareForBailoutForId(property->LoadId(), TOS_REG); | |
| 2013 break; | |
| 2014 case NAMED_SUPER_PROPERTY: | |
| 2015 EmitNamedSuperPropertyLoad(property); | |
| 2016 PrepareForBailoutForId(property->LoadId(), TOS_REG); | |
| 2017 break; | |
| 2018 case KEYED_SUPER_PROPERTY: | |
| 2019 EmitKeyedSuperPropertyLoad(property); | |
| 2020 PrepareForBailoutForId(property->LoadId(), TOS_REG); | |
| 2021 break; | |
| 2022 case KEYED_PROPERTY: | |
| 2023 EmitKeyedPropertyLoad(property); | |
| 2024 PrepareForBailoutForId(property->LoadId(), TOS_REG); | |
| 2025 break; | |
| 2026 } | |
| 2027 } | |
| 2028 | |
| 2029 Token::Value op = expr->binary_op(); | |
| 2030 __ push(r0); // Left operand goes on the stack. | |
| 2031 VisitForAccumulatorValue(expr->value()); | |
| 2032 | |
| 2033 AccumulatorValueContext context(this); | |
| 2034 if (ShouldInlineSmiCase(op)) { | |
| 2035 EmitInlineSmiBinaryOp(expr->binary_operation(), | |
| 2036 op, | |
| 2037 expr->target(), | |
| 2038 expr->value()); | |
| 2039 } else { | |
| 2040 EmitBinaryOp(expr->binary_operation(), op); | |
| 2041 } | |
| 2042 | |
| 2043 // Deoptimization point in case the binary operation may have side effects. | |
| 2044 PrepareForBailout(expr->binary_operation(), TOS_REG); | |
| 2045 } else { | |
| 2046 VisitForAccumulatorValue(expr->value()); | |
| 2047 } | |
| 2048 | |
| 2049 SetExpressionPosition(expr); | |
| 2050 | |
| 2051 // Store the value. | |
| 2052 switch (assign_type) { | |
| 2053 case VARIABLE: | |
| 2054 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), | |
| 2055 expr->op(), expr->AssignmentSlot()); | |
| 2056 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
| 2057 context()->Plug(r0); | |
| 2058 break; | |
| 2059 case NAMED_PROPERTY: | |
| 2060 EmitNamedPropertyAssignment(expr); | |
| 2061 break; | |
| 2062 case NAMED_SUPER_PROPERTY: | |
| 2063 EmitNamedSuperPropertyStore(property); | |
| 2064 context()->Plug(r0); | |
| 2065 break; | |
| 2066 case KEYED_SUPER_PROPERTY: | |
| 2067 EmitKeyedSuperPropertyStore(property); | |
| 2068 context()->Plug(r0); | |
| 2069 break; | |
| 2070 case KEYED_PROPERTY: | |
| 2071 EmitKeyedPropertyAssignment(expr); | |
| 2072 break; | |
| 2073 } | |
| 2074 } | |
| 2075 | |
| 2076 | |
| 2077 void FullCodeGenerator::VisitYield(Yield* expr) { | |
| 2078 Comment cmnt(masm_, "[ Yield"); | |
| 2079 SetExpressionPosition(expr); | |
| 2080 | |
| 2081 // Evaluate yielded value first; the initial iterator definition depends on | |
| 2082 // this. It stays on the stack while we update the iterator. | |
| 2083 VisitForStackValue(expr->expression()); | |
| 2084 | |
| 2085 switch (expr->yield_kind()) { | |
| 2086 case Yield::kSuspend: | |
| 2087 // Pop value from top-of-stack slot; box result into result register. | |
| 2088 EmitCreateIteratorResult(false); | |
| 2089 __ push(result_register()); | |
| 2090 // Fall through. | |
| 2091 case Yield::kInitial: { | |
| 2092 Label suspend, continuation, post_runtime, resume; | |
| 2093 | |
| 2094 __ jmp(&suspend); | |
| 2095 __ bind(&continuation); | |
| 2096 __ RecordGeneratorContinuation(); | |
| 2097 __ jmp(&resume); | |
| 2098 | |
| 2099 __ bind(&suspend); | |
| 2100 VisitForAccumulatorValue(expr->generator_object()); | |
| 2101 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); | |
| 2102 __ mov(r1, Operand(Smi::FromInt(continuation.pos()))); | |
| 2103 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset)); | |
| 2104 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset)); | |
| 2105 __ mov(r1, cp); | |
| 2106 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2, | |
| 2107 kLRHasBeenSaved, kDontSaveFPRegs); | |
| 2108 __ add(r1, fp, Operand(StandardFrameConstants::kExpressionsOffset)); | |
| 2109 __ cmp(sp, r1); | |
| 2110 __ b(eq, &post_runtime); | |
| 2111 __ push(r0); // generator object | |
| 2112 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); | |
| 2113 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 2114 __ bind(&post_runtime); | |
| 2115 __ pop(result_register()); | |
| 2116 EmitReturnSequence(); | |
| 2117 | |
| 2118 __ bind(&resume); | |
| 2119 context()->Plug(result_register()); | |
| 2120 break; | |
| 2121 } | |
| 2122 | |
| 2123 case Yield::kFinal: { | |
| 2124 VisitForAccumulatorValue(expr->generator_object()); | |
| 2125 __ mov(r1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed))); | |
| 2126 __ str(r1, FieldMemOperand(result_register(), | |
| 2127 JSGeneratorObject::kContinuationOffset)); | |
| 2128 // Pop value from top-of-stack slot, box result into result register. | |
| 2129 EmitCreateIteratorResult(true); | |
| 2130 EmitUnwindBeforeReturn(); | |
| 2131 EmitReturnSequence(); | |
| 2132 break; | |
| 2133 } | |
| 2134 | |
| 2135 case Yield::kDelegating: { | |
| 2136 VisitForStackValue(expr->generator_object()); | |
| 2137 | |
| 2138 // Initial stack layout is as follows: | |
| 2139 // [sp + 1 * kPointerSize] iter | |
| 2140 // [sp + 0 * kPointerSize] g | |
| 2141 | |
| 2142 Label l_catch, l_try, l_suspend, l_continuation, l_resume; | |
| 2143 Label l_next, l_call, l_loop; | |
| 2144 Register load_receiver = LoadDescriptor::ReceiverRegister(); | |
| 2145 Register load_name = LoadDescriptor::NameRegister(); | |
| 2146 | |
| 2147 // Initial send value is undefined. | |
| 2148 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | |
| 2149 __ b(&l_next); | |
| 2150 | |
| 2151 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; } | |
| 2152 __ bind(&l_catch); | |
| 2153 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw" | |
| 2154 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter | |
| 2155 __ Push(load_name, r3, r0); // "throw", iter, except | |
| 2156 __ jmp(&l_call); | |
| 2157 | |
| 2158 // try { received = %yield result } | |
| 2159 // Shuffle the received result above a try handler and yield it without | |
| 2160 // re-boxing. | |
| 2161 __ bind(&l_try); | |
| 2162 __ pop(r0); // result | |
| 2163 int handler_index = NewHandlerTableEntry(); | |
| 2164 EnterTryBlock(handler_index, &l_catch); | |
| 2165 const int try_block_size = TryCatch::kElementCount * kPointerSize; | |
| 2166 __ push(r0); // result | |
| 2167 | |
| 2168 __ jmp(&l_suspend); | |
| 2169 __ bind(&l_continuation); | |
| 2170 __ RecordGeneratorContinuation(); | |
| 2171 __ jmp(&l_resume); | |
| 2172 | |
| 2173 __ bind(&l_suspend); | |
| 2174 const int generator_object_depth = kPointerSize + try_block_size; | |
| 2175 __ ldr(r0, MemOperand(sp, generator_object_depth)); | |
| 2176 __ push(r0); // g | |
| 2177 __ Push(Smi::FromInt(handler_index)); // handler-index | |
| 2178 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); | |
| 2179 __ mov(r1, Operand(Smi::FromInt(l_continuation.pos()))); | |
| 2180 __ str(r1, FieldMemOperand(r0, JSGeneratorObject::kContinuationOffset)); | |
| 2181 __ str(cp, FieldMemOperand(r0, JSGeneratorObject::kContextOffset)); | |
| 2182 __ mov(r1, cp); | |
| 2183 __ RecordWriteField(r0, JSGeneratorObject::kContextOffset, r1, r2, | |
| 2184 kLRHasBeenSaved, kDontSaveFPRegs); | |
| 2185 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2); | |
| 2186 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 2187 __ pop(r0); // result | |
| 2188 EmitReturnSequence(); | |
| 2189 __ bind(&l_resume); // received in r0 | |
| 2190 ExitTryBlock(handler_index); | |
| 2191 | |
| 2192 // receiver = iter; f = 'next'; arg = received; | |
| 2193 __ bind(&l_next); | |
| 2194 | |
| 2195 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next" | |
| 2196 __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter | |
| 2197 __ Push(load_name, r3, r0); // "next", iter, received | |
| 2198 | |
| 2199 // result = receiver[f](arg); | |
| 2200 __ bind(&l_call); | |
| 2201 __ ldr(load_receiver, MemOperand(sp, kPointerSize)); | |
| 2202 __ ldr(load_name, MemOperand(sp, 2 * kPointerSize)); | |
| 2203 __ mov(LoadDescriptor::SlotRegister(), | |
| 2204 Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot()))); | |
| 2205 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code(); | |
| 2206 CallIC(ic, TypeFeedbackId::None()); | |
| 2207 __ mov(r1, r0); | |
| 2208 __ str(r1, MemOperand(sp, 2 * kPointerSize)); | |
| 2209 SetCallPosition(expr, 1); | |
| 2210 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD); | |
| 2211 __ CallStub(&stub); | |
| 2212 | |
| 2213 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 2214 __ Drop(1); // The function is still on the stack; drop it. | |
| 2215 | |
| 2216 // if (!result.done) goto l_try; | |
| 2217 __ bind(&l_loop); | |
| 2218 __ Move(load_receiver, r0); | |
| 2219 | |
| 2220 __ push(load_receiver); // save result | |
| 2221 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done" | |
| 2222 __ mov(LoadDescriptor::SlotRegister(), | |
| 2223 Operand(SmiFromSlot(expr->DoneFeedbackSlot()))); | |
| 2224 CallLoadIC(NOT_INSIDE_TYPEOF); // r0=result.done | |
| 2225 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate()); | |
| 2226 CallIC(bool_ic); | |
| 2227 __ cmp(r0, Operand(0)); | |
| 2228 __ b(eq, &l_try); | |
| 2229 | |
| 2230 // result.value | |
| 2231 __ pop(load_receiver); // result | |
| 2232 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value" | |
| 2233 __ mov(LoadDescriptor::SlotRegister(), | |
| 2234 Operand(SmiFromSlot(expr->ValueFeedbackSlot()))); | |
| 2235 CallLoadIC(NOT_INSIDE_TYPEOF); // r0=result.value | |
| 2236 context()->DropAndPlug(2, r0); // drop iter and g | |
| 2237 break; | |
| 2238 } | |
| 2239 } | |
| 2240 } | |
| 2241 | |
| 2242 | |
| 2243 void FullCodeGenerator::EmitGeneratorResume(Expression *generator, | |
| 2244 Expression *value, | |
| 2245 JSGeneratorObject::ResumeMode resume_mode) { | |
| 2246 // The value stays in r0, and is ultimately read by the resumed generator, as | |
| 2247 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it | |
| 2248 // is read to throw the value when the resumed generator is already closed. | |
| 2249 // r1 will hold the generator object until the activation has been resumed. | |
| 2250 VisitForStackValue(generator); | |
| 2251 VisitForAccumulatorValue(value); | |
| 2252 __ pop(r1); | |
| 2253 | |
| 2254 // Load suspended function and context. | |
| 2255 __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset)); | |
| 2256 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset)); | |
| 2257 | |
| 2258 // Load receiver and store as the first argument. | |
| 2259 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset)); | |
| 2260 __ push(r2); | |
| 2261 | |
| 2262 // Push holes for the rest of the arguments to the generator function. | |
| 2263 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | |
| 2264 __ ldr(r3, | |
| 2265 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset)); | |
| 2266 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex); | |
| 2267 Label push_argument_holes, push_frame; | |
| 2268 __ bind(&push_argument_holes); | |
| 2269 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC); | |
| 2270 __ b(mi, &push_frame); | |
| 2271 __ push(r2); | |
| 2272 __ jmp(&push_argument_holes); | |
| 2273 | |
| 2274 // Enter a new JavaScript frame, and initialize its slots as they were when | |
| 2275 // the generator was suspended. | |
| 2276 Label resume_frame, done; | |
| 2277 __ bind(&push_frame); | |
| 2278 __ bl(&resume_frame); | |
| 2279 __ jmp(&done); | |
| 2280 __ bind(&resume_frame); | |
| 2281 // lr = return address. | |
| 2282 // fp = caller's frame pointer. | |
| 2283 // pp = caller's constant pool (if FLAG_enable_embedded_constant_pool), | |
| 2284 // cp = callee's context, | |
| 2285 // r4 = callee's JS function. | |
| 2286 __ PushFixedFrame(r4); | |
| 2287 // Adjust FP to point to saved FP. | |
| 2288 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); | |
| 2289 | |
| 2290 // Load the operand stack size. | |
| 2291 __ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset)); | |
| 2292 __ ldr(r3, FieldMemOperand(r3, FixedArray::kLengthOffset)); | |
| 2293 __ SmiUntag(r3); | |
| 2294 | |
| 2295 // If we are sending a value and there is no operand stack, we can jump back | |
| 2296 // in directly. | |
| 2297 if (resume_mode == JSGeneratorObject::NEXT) { | |
| 2298 Label slow_resume; | |
| 2299 __ cmp(r3, Operand(0)); | |
| 2300 __ b(ne, &slow_resume); | |
| 2301 __ ldr(r3, FieldMemOperand(r4, JSFunction::kCodeEntryOffset)); | |
| 2302 | |
| 2303 { ConstantPoolUnavailableScope constant_pool_unavailable(masm_); | |
| 2304 if (FLAG_enable_embedded_constant_pool) { | |
| 2305 // Load the new code object's constant pool pointer. | |
| 2306 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3); | |
| 2307 } | |
| 2308 | |
| 2309 __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset)); | |
| 2310 __ SmiUntag(r2); | |
| 2311 __ add(r3, r3, r2); | |
| 2312 __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting))); | |
| 2313 __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset)); | |
| 2314 __ Jump(r3); | |
| 2315 } | |
| 2316 __ bind(&slow_resume); | |
| 2317 } | |
| 2318 | |
| 2319 // Otherwise, we push holes for the operand stack and call the runtime to fix | |
| 2320 // up the stack and the handlers. | |
| 2321 Label push_operand_holes, call_resume; | |
| 2322 __ bind(&push_operand_holes); | |
| 2323 __ sub(r3, r3, Operand(1), SetCC); | |
| 2324 __ b(mi, &call_resume); | |
| 2325 __ push(r2); | |
| 2326 __ b(&push_operand_holes); | |
| 2327 __ bind(&call_resume); | |
| 2328 DCHECK(!result_register().is(r1)); | |
| 2329 __ Push(r1, result_register()); | |
| 2330 __ Push(Smi::FromInt(resume_mode)); | |
| 2331 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3); | |
| 2332 // Not reached: the runtime call returns elsewhere. | |
| 2333 __ stop("not-reached"); | |
| 2334 | |
| 2335 __ bind(&done); | |
| 2336 context()->Plug(result_register()); | |
| 2337 } | |
| 2338 | |
| 2339 | |
| 2340 void FullCodeGenerator::EmitCreateIteratorResult(bool done) { | |
| 2341 Label gc_required; | |
| 2342 Label allocated; | |
| 2343 | |
| 2344 const int instance_size = 5 * kPointerSize; | |
| 2345 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(), | |
| 2346 instance_size); | |
| 2347 | |
| 2348 __ Allocate(instance_size, r0, r2, r3, &gc_required, TAG_OBJECT); | |
| 2349 __ jmp(&allocated); | |
| 2350 | |
| 2351 __ bind(&gc_required); | |
| 2352 __ Push(Smi::FromInt(instance_size)); | |
| 2353 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); | |
| 2354 __ ldr(context_register(), | |
| 2355 MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 2356 | |
| 2357 __ bind(&allocated); | |
| 2358 __ ldr(r1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | |
| 2359 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kNativeContextOffset)); | |
| 2360 __ ldr(r1, ContextOperand(r1, Context::ITERATOR_RESULT_MAP_INDEX)); | |
| 2361 __ pop(r2); | |
| 2362 __ mov(r3, Operand(isolate()->factory()->ToBoolean(done))); | |
| 2363 __ mov(r4, Operand(isolate()->factory()->empty_fixed_array())); | |
| 2364 __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); | |
| 2365 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset)); | |
| 2366 __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset)); | |
| 2367 __ str(r2, | |
| 2368 FieldMemOperand(r0, JSGeneratorObject::kResultValuePropertyOffset)); | |
| 2369 __ str(r3, | |
| 2370 FieldMemOperand(r0, JSGeneratorObject::kResultDonePropertyOffset)); | |
| 2371 | |
| 2372 // Only the value field needs a write barrier, as the other values are in the | |
| 2373 // root set. | |
| 2374 __ RecordWriteField(r0, JSGeneratorObject::kResultValuePropertyOffset, | |
| 2375 r2, r3, kLRHasBeenSaved, kDontSaveFPRegs); | |
| 2376 } | |
| 2377 | |
| 2378 | |
| 2379 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { | |
| 2380 SetExpressionPosition(prop); | |
| 2381 Literal* key = prop->key()->AsLiteral(); | |
| 2382 DCHECK(!prop->IsSuperAccess()); | |
| 2383 | |
| 2384 __ mov(LoadDescriptor::NameRegister(), Operand(key->value())); | |
| 2385 __ mov(LoadDescriptor::SlotRegister(), | |
| 2386 Operand(SmiFromSlot(prop->PropertyFeedbackSlot()))); | |
| 2387 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode()); | |
| 2388 } | |
| 2389 | |
| 2390 | |
| 2391 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) { | |
| 2392 // Stack: receiver, home_object. | |
| 2393 SetExpressionPosition(prop); | |
| 2394 Literal* key = prop->key()->AsLiteral(); | |
| 2395 DCHECK(!key->value()->IsSmi()); | |
| 2396 DCHECK(prop->IsSuperAccess()); | |
| 2397 | |
| 2398 __ Push(key->value()); | |
| 2399 __ Push(Smi::FromInt(language_mode())); | |
| 2400 __ CallRuntime(Runtime::kLoadFromSuper, 4); | |
| 2401 } | |
| 2402 | |
| 2403 | |
| 2404 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { | |
| 2405 SetExpressionPosition(prop); | |
| 2406 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code(); | |
| 2407 __ mov(LoadDescriptor::SlotRegister(), | |
| 2408 Operand(SmiFromSlot(prop->PropertyFeedbackSlot()))); | |
| 2409 CallIC(ic); | |
| 2410 } | |
| 2411 | |
| 2412 | |
| 2413 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) { | |
| 2414 // Stack: receiver, home_object, key. | |
| 2415 SetExpressionPosition(prop); | |
| 2416 __ Push(Smi::FromInt(language_mode())); | |
| 2417 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4); | |
| 2418 } | |
| 2419 | |
| 2420 | |
| 2421 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, | |
| 2422 Token::Value op, | |
| 2423 Expression* left_expr, | |
| 2424 Expression* right_expr) { | |
| 2425 Label done, smi_case, stub_call; | |
| 2426 | |
| 2427 Register scratch1 = r2; | |
| 2428 Register scratch2 = r3; | |
| 2429 | |
| 2430 // Get the arguments. | |
| 2431 Register left = r1; | |
| 2432 Register right = r0; | |
| 2433 __ pop(left); | |
| 2434 | |
| 2435 // Perform combined smi check on both operands. | |
| 2436 __ orr(scratch1, left, Operand(right)); | |
| 2437 STATIC_ASSERT(kSmiTag == 0); | |
| 2438 JumpPatchSite patch_site(masm_); | |
| 2439 patch_site.EmitJumpIfSmi(scratch1, &smi_case); | |
| 2440 | |
| 2441 __ bind(&stub_call); | |
| 2442 Handle<Code> code = | |
| 2443 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code(); | |
| 2444 CallIC(code, expr->BinaryOperationFeedbackId()); | |
| 2445 patch_site.EmitPatchInfo(); | |
| 2446 __ jmp(&done); | |
| 2447 | |
| 2448 __ bind(&smi_case); | |
| 2449 // Smi case. This code works the same way as the smi-smi case in the type | |
| 2450 // recording binary operation stub, see | |
| 2451 switch (op) { | |
| 2452 case Token::SAR: | |
| 2453 __ GetLeastBitsFromSmi(scratch1, right, 5); | |
| 2454 __ mov(right, Operand(left, ASR, scratch1)); | |
| 2455 __ bic(right, right, Operand(kSmiTagMask)); | |
| 2456 break; | |
| 2457 case Token::SHL: { | |
| 2458 __ SmiUntag(scratch1, left); | |
| 2459 __ GetLeastBitsFromSmi(scratch2, right, 5); | |
| 2460 __ mov(scratch1, Operand(scratch1, LSL, scratch2)); | |
| 2461 __ TrySmiTag(right, scratch1, &stub_call); | |
| 2462 break; | |
| 2463 } | |
| 2464 case Token::SHR: { | |
| 2465 __ SmiUntag(scratch1, left); | |
| 2466 __ GetLeastBitsFromSmi(scratch2, right, 5); | |
| 2467 __ mov(scratch1, Operand(scratch1, LSR, scratch2)); | |
| 2468 __ tst(scratch1, Operand(0xc0000000)); | |
| 2469 __ b(ne, &stub_call); | |
| 2470 __ SmiTag(right, scratch1); | |
| 2471 break; | |
| 2472 } | |
| 2473 case Token::ADD: | |
| 2474 __ add(scratch1, left, Operand(right), SetCC); | |
| 2475 __ b(vs, &stub_call); | |
| 2476 __ mov(right, scratch1); | |
| 2477 break; | |
| 2478 case Token::SUB: | |
| 2479 __ sub(scratch1, left, Operand(right), SetCC); | |
| 2480 __ b(vs, &stub_call); | |
| 2481 __ mov(right, scratch1); | |
| 2482 break; | |
| 2483 case Token::MUL: { | |
| 2484 __ SmiUntag(ip, right); | |
| 2485 __ smull(scratch1, scratch2, left, ip); | |
| 2486 __ mov(ip, Operand(scratch1, ASR, 31)); | |
| 2487 __ cmp(ip, Operand(scratch2)); | |
| 2488 __ b(ne, &stub_call); | |
| 2489 __ cmp(scratch1, Operand::Zero()); | |
| 2490 __ mov(right, Operand(scratch1), LeaveCC, ne); | |
| 2491 __ b(ne, &done); | |
| 2492 __ add(scratch2, right, Operand(left), SetCC); | |
| 2493 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl); | |
| 2494 __ b(mi, &stub_call); | |
| 2495 break; | |
| 2496 } | |
| 2497 case Token::BIT_OR: | |
| 2498 __ orr(right, left, Operand(right)); | |
| 2499 break; | |
| 2500 case Token::BIT_AND: | |
| 2501 __ and_(right, left, Operand(right)); | |
| 2502 break; | |
| 2503 case Token::BIT_XOR: | |
| 2504 __ eor(right, left, Operand(right)); | |
| 2505 break; | |
| 2506 default: | |
| 2507 UNREACHABLE(); | |
| 2508 } | |
| 2509 | |
| 2510 __ bind(&done); | |
| 2511 context()->Plug(r0); | |
| 2512 } | |
| 2513 | |
| 2514 | |
| 2515 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit, | |
| 2516 int* used_store_slots) { | |
| 2517 // Constructor is in r0. | |
| 2518 DCHECK(lit != NULL); | |
| 2519 __ push(r0); | |
| 2520 | |
| 2521 // No access check is needed here since the constructor is created by the | |
| 2522 // class literal. | |
| 2523 Register scratch = r1; | |
| 2524 __ ldr(scratch, | |
| 2525 FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset)); | |
| 2526 __ push(scratch); | |
| 2527 | |
| 2528 for (int i = 0; i < lit->properties()->length(); i++) { | |
| 2529 ObjectLiteral::Property* property = lit->properties()->at(i); | |
| 2530 Expression* value = property->value(); | |
| 2531 | |
| 2532 if (property->is_static()) { | |
| 2533 __ ldr(scratch, MemOperand(sp, kPointerSize)); // constructor | |
| 2534 } else { | |
| 2535 __ ldr(scratch, MemOperand(sp, 0)); // prototype | |
| 2536 } | |
| 2537 __ push(scratch); | |
| 2538 EmitPropertyKey(property, lit->GetIdForProperty(i)); | |
| 2539 | |
| 2540 // The static prototype property is read only. We handle the non computed | |
| 2541 // property name case in the parser. Since this is the only case where we | |
| 2542 // need to check for an own read only property we special case this so we do | |
| 2543 // not need to do this for every property. | |
| 2544 if (property->is_static() && property->is_computed_name()) { | |
| 2545 __ CallRuntime(Runtime::kThrowIfStaticPrototype, 1); | |
| 2546 __ push(r0); | |
| 2547 } | |
| 2548 | |
| 2549 VisitForStackValue(value); | |
| 2550 EmitSetHomeObjectIfNeeded(value, 2, | |
| 2551 lit->SlotForHomeObject(value, used_store_slots)); | |
| 2552 | |
| 2553 switch (property->kind()) { | |
| 2554 case ObjectLiteral::Property::CONSTANT: | |
| 2555 case ObjectLiteral::Property::MATERIALIZED_LITERAL: | |
| 2556 case ObjectLiteral::Property::PROTOTYPE: | |
| 2557 UNREACHABLE(); | |
| 2558 case ObjectLiteral::Property::COMPUTED: | |
| 2559 __ CallRuntime(Runtime::kDefineClassMethod, 3); | |
| 2560 break; | |
| 2561 | |
| 2562 case ObjectLiteral::Property::GETTER: | |
| 2563 __ mov(r0, Operand(Smi::FromInt(DONT_ENUM))); | |
| 2564 __ push(r0); | |
| 2565 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked, 4); | |
| 2566 break; | |
| 2567 | |
| 2568 case ObjectLiteral::Property::SETTER: | |
| 2569 __ mov(r0, Operand(Smi::FromInt(DONT_ENUM))); | |
| 2570 __ push(r0); | |
| 2571 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked, 4); | |
| 2572 break; | |
| 2573 | |
| 2574 default: | |
| 2575 UNREACHABLE(); | |
| 2576 } | |
| 2577 } | |
| 2578 | |
| 2579 // prototype | |
| 2580 __ CallRuntime(Runtime::kToFastProperties, 1); | |
| 2581 | |
| 2582 // constructor | |
| 2583 __ CallRuntime(Runtime::kToFastProperties, 1); | |
| 2584 | |
| 2585 if (is_strong(language_mode())) { | |
| 2586 __ ldr(scratch, | |
| 2587 FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset)); | |
| 2588 __ push(r0); | |
| 2589 __ push(scratch); | |
| 2590 // TODO(conradw): It would be more efficient to define the properties with | |
| 2591 // the right attributes the first time round. | |
| 2592 // Freeze the prototype. | |
| 2593 __ CallRuntime(Runtime::kObjectFreeze, 1); | |
| 2594 // Freeze the constructor. | |
| 2595 __ CallRuntime(Runtime::kObjectFreeze, 1); | |
| 2596 } | |
| 2597 } | |
| 2598 | |
| 2599 | |
| 2600 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) { | |
| 2601 __ pop(r1); | |
| 2602 Handle<Code> code = | |
| 2603 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code(); | |
| 2604 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. | |
| 2605 CallIC(code, expr->BinaryOperationFeedbackId()); | |
| 2606 patch_site.EmitPatchInfo(); | |
| 2607 context()->Plug(r0); | |
| 2608 } | |
| 2609 | |
| 2610 | |
| 2611 void FullCodeGenerator::EmitAssignment(Expression* expr, | |
| 2612 FeedbackVectorICSlot slot) { | |
| 2613 DCHECK(expr->IsValidReferenceExpressionOrThis()); | |
| 2614 | |
| 2615 Property* prop = expr->AsProperty(); | |
| 2616 LhsKind assign_type = Property::GetAssignType(prop); | |
| 2617 | |
| 2618 switch (assign_type) { | |
| 2619 case VARIABLE: { | |
| 2620 Variable* var = expr->AsVariableProxy()->var(); | |
| 2621 EffectContext context(this); | |
| 2622 EmitVariableAssignment(var, Token::ASSIGN, slot); | |
| 2623 break; | |
| 2624 } | |
| 2625 case NAMED_PROPERTY: { | |
| 2626 __ push(r0); // Preserve value. | |
| 2627 VisitForAccumulatorValue(prop->obj()); | |
| 2628 __ Move(StoreDescriptor::ReceiverRegister(), r0); | |
| 2629 __ pop(StoreDescriptor::ValueRegister()); // Restore value. | |
| 2630 __ mov(StoreDescriptor::NameRegister(), | |
| 2631 Operand(prop->key()->AsLiteral()->value())); | |
| 2632 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot); | |
| 2633 CallStoreIC(); | |
| 2634 break; | |
| 2635 } | |
| 2636 case NAMED_SUPER_PROPERTY: { | |
| 2637 __ Push(r0); | |
| 2638 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); | |
| 2639 VisitForAccumulatorValue( | |
| 2640 prop->obj()->AsSuperPropertyReference()->home_object()); | |
| 2641 // stack: value, this; r0: home_object | |
| 2642 Register scratch = r2; | |
| 2643 Register scratch2 = r3; | |
| 2644 __ mov(scratch, result_register()); // home_object | |
| 2645 __ ldr(r0, MemOperand(sp, kPointerSize)); // value | |
| 2646 __ ldr(scratch2, MemOperand(sp, 0)); // this | |
| 2647 __ str(scratch2, MemOperand(sp, kPointerSize)); // this | |
| 2648 __ str(scratch, MemOperand(sp, 0)); // home_object | |
| 2649 // stack: this, home_object; r0: value | |
| 2650 EmitNamedSuperPropertyStore(prop); | |
| 2651 break; | |
| 2652 } | |
| 2653 case KEYED_SUPER_PROPERTY: { | |
| 2654 __ Push(r0); | |
| 2655 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); | |
| 2656 VisitForStackValue( | |
| 2657 prop->obj()->AsSuperPropertyReference()->home_object()); | |
| 2658 VisitForAccumulatorValue(prop->key()); | |
| 2659 Register scratch = r2; | |
| 2660 Register scratch2 = r3; | |
| 2661 __ ldr(scratch2, MemOperand(sp, 2 * kPointerSize)); // value | |
| 2662 // stack: value, this, home_object; r0: key, r3: value | |
| 2663 __ ldr(scratch, MemOperand(sp, kPointerSize)); // this | |
| 2664 __ str(scratch, MemOperand(sp, 2 * kPointerSize)); | |
| 2665 __ ldr(scratch, MemOperand(sp, 0)); // home_object | |
| 2666 __ str(scratch, MemOperand(sp, kPointerSize)); | |
| 2667 __ str(r0, MemOperand(sp, 0)); | |
| 2668 __ Move(r0, scratch2); | |
| 2669 // stack: this, home_object, key; r0: value. | |
| 2670 EmitKeyedSuperPropertyStore(prop); | |
| 2671 break; | |
| 2672 } | |
| 2673 case KEYED_PROPERTY: { | |
| 2674 __ push(r0); // Preserve value. | |
| 2675 VisitForStackValue(prop->obj()); | |
| 2676 VisitForAccumulatorValue(prop->key()); | |
| 2677 __ Move(StoreDescriptor::NameRegister(), r0); | |
| 2678 __ Pop(StoreDescriptor::ValueRegister(), | |
| 2679 StoreDescriptor::ReceiverRegister()); | |
| 2680 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot); | |
| 2681 Handle<Code> ic = | |
| 2682 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); | |
| 2683 CallIC(ic); | |
| 2684 break; | |
| 2685 } | |
| 2686 } | |
| 2687 context()->Plug(r0); | |
| 2688 } | |
| 2689 | |
| 2690 | |
| 2691 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( | |
| 2692 Variable* var, MemOperand location) { | |
| 2693 __ str(result_register(), location); | |
| 2694 if (var->IsContextSlot()) { | |
| 2695 // RecordWrite may destroy all its register arguments. | |
| 2696 __ mov(r3, result_register()); | |
| 2697 int offset = Context::SlotOffset(var->index()); | |
| 2698 __ RecordWriteContextSlot( | |
| 2699 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs); | |
| 2700 } | |
| 2701 } | |
| 2702 | |
| 2703 | |
| 2704 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, | |
| 2705 FeedbackVectorICSlot slot) { | |
| 2706 if (var->IsUnallocated()) { | |
| 2707 // Global var, const, or let. | |
| 2708 __ mov(StoreDescriptor::NameRegister(), Operand(var->name())); | |
| 2709 __ ldr(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand()); | |
| 2710 if (FLAG_vector_stores) EmitLoadStoreICSlot(slot); | |
| 2711 CallStoreIC(); | |
| 2712 | |
| 2713 } else if (var->IsGlobalSlot()) { | |
| 2714 // Global var, const, or let. | |
| 2715 DCHECK(var->index() > 0); | |
| 2716 DCHECK(var->IsStaticGlobalObjectProperty()); | |
| 2717 // Each var occupies two slots in the context: for reads and writes. | |
| 2718 int slot_index = var->index() + 1; | |
| 2719 int depth = scope()->ContextChainLength(var->scope()); | |
| 2720 __ mov(StoreGlobalViaContextDescriptor::DepthRegister(), | |
| 2721 Operand(Smi::FromInt(depth))); | |
| 2722 __ mov(StoreGlobalViaContextDescriptor::SlotRegister(), | |
| 2723 Operand(Smi::FromInt(slot_index))); | |
| 2724 __ mov(StoreGlobalViaContextDescriptor::NameRegister(), | |
| 2725 Operand(var->name())); | |
| 2726 DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(r0)); | |
| 2727 StoreGlobalViaContextStub stub(isolate(), depth, language_mode()); | |
| 2728 __ CallStub(&stub); | |
| 2729 | |
| 2730 } else if (var->mode() == LET && op != Token::INIT_LET) { | |
| 2731 // Non-initializing assignment to let variable needs a write barrier. | |
| 2732 DCHECK(!var->IsLookupSlot()); | |
| 2733 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | |
| 2734 Label assign; | |
| 2735 MemOperand location = VarOperand(var, r1); | |
| 2736 __ ldr(r3, location); | |
| 2737 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); | |
| 2738 __ b(ne, &assign); | |
| 2739 __ mov(r3, Operand(var->name())); | |
| 2740 __ push(r3); | |
| 2741 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
| 2742 // Perform the assignment. | |
| 2743 __ bind(&assign); | |
| 2744 EmitStoreToStackLocalOrContextSlot(var, location); | |
| 2745 | |
| 2746 } else if (var->mode() == CONST && op != Token::INIT_CONST) { | |
| 2747 // Assignment to const variable needs a write barrier. | |
| 2748 DCHECK(!var->IsLookupSlot()); | |
| 2749 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | |
| 2750 Label const_error; | |
| 2751 MemOperand location = VarOperand(var, r1); | |
| 2752 __ ldr(r3, location); | |
| 2753 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); | |
| 2754 __ b(ne, &const_error); | |
| 2755 __ mov(r3, Operand(var->name())); | |
| 2756 __ push(r3); | |
| 2757 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
| 2758 __ bind(&const_error); | |
| 2759 __ CallRuntime(Runtime::kThrowConstAssignError, 0); | |
| 2760 | |
| 2761 } else if (var->is_this() && op == Token::INIT_CONST) { | |
| 2762 // Initializing assignment to const {this} needs a write barrier. | |
| 2763 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | |
| 2764 Label uninitialized_this; | |
| 2765 MemOperand location = VarOperand(var, r1); | |
| 2766 __ ldr(r3, location); | |
| 2767 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); | |
| 2768 __ b(eq, &uninitialized_this); | |
| 2769 __ mov(r0, Operand(var->name())); | |
| 2770 __ Push(r0); | |
| 2771 __ CallRuntime(Runtime::kThrowReferenceError, 1); | |
| 2772 __ bind(&uninitialized_this); | |
| 2773 EmitStoreToStackLocalOrContextSlot(var, location); | |
| 2774 | |
| 2775 } else if (!var->is_const_mode() || op == Token::INIT_CONST) { | |
| 2776 if (var->IsLookupSlot()) { | |
| 2777 // Assignment to var. | |
| 2778 __ push(r0); // Value. | |
| 2779 __ mov(r1, Operand(var->name())); | |
| 2780 __ mov(r0, Operand(Smi::FromInt(language_mode()))); | |
| 2781 __ Push(cp, r1, r0); // Context, name, language mode. | |
| 2782 __ CallRuntime(Runtime::kStoreLookupSlot, 4); | |
| 2783 } else { | |
| 2784 // Assignment to var or initializing assignment to let/const in harmony | |
| 2785 // mode. | |
| 2786 DCHECK((var->IsStackAllocated() || var->IsContextSlot())); | |
| 2787 MemOperand location = VarOperand(var, r1); | |
| 2788 if (generate_debug_code_ && op == Token::INIT_LET) { | |
| 2789 // Check for an uninitialized let binding. | |
| 2790 __ ldr(r2, location); | |
| 2791 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); | |
| 2792 __ Check(eq, kLetBindingReInitialization); | |
| 2793 } | |
| 2794 EmitStoreToStackLocalOrContextSlot(var, location); | |
| 2795 } | |
| 2796 | |
| 2797 } else if (op == Token::INIT_CONST_LEGACY) { | |
| 2798 // Const initializers need a write barrier. | |
| 2799 DCHECK(var->mode() == CONST_LEGACY); | |
| 2800 DCHECK(!var->IsParameter()); // No const parameters. | |
| 2801 if (var->IsLookupSlot()) { | |
| 2802 __ push(r0); | |
| 2803 __ mov(r0, Operand(var->name())); | |
| 2804 __ Push(cp, r0); // Context and name. | |
| 2805 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3); | |
| 2806 } else { | |
| 2807 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | |
| 2808 Label skip; | |
| 2809 MemOperand location = VarOperand(var, r1); | |
| 2810 __ ldr(r2, location); | |
| 2811 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); | |
| 2812 __ b(ne, &skip); | |
| 2813 EmitStoreToStackLocalOrContextSlot(var, location); | |
| 2814 __ bind(&skip); | |
| 2815 } | |
| 2816 | |
| 2817 } else { | |
| 2818 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT_CONST_LEGACY); | |
| 2819 if (is_strict(language_mode())) { | |
| 2820 __ CallRuntime(Runtime::kThrowConstAssignError, 0); | |
| 2821 } | |
| 2822 // Silently ignore store in sloppy mode. | |
| 2823 } | |
| 2824 } | |
| 2825 | |
| 2826 | |
| 2827 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { | |
| 2828 // Assignment to a property, using a named store IC. | |
| 2829 Property* prop = expr->target()->AsProperty(); | |
| 2830 DCHECK(prop != NULL); | |
| 2831 DCHECK(prop->key()->IsLiteral()); | |
| 2832 | |
| 2833 __ mov(StoreDescriptor::NameRegister(), | |
| 2834 Operand(prop->key()->AsLiteral()->value())); | |
| 2835 __ pop(StoreDescriptor::ReceiverRegister()); | |
| 2836 if (FLAG_vector_stores) { | |
| 2837 EmitLoadStoreICSlot(expr->AssignmentSlot()); | |
| 2838 CallStoreIC(); | |
| 2839 } else { | |
| 2840 CallStoreIC(expr->AssignmentFeedbackId()); | |
| 2841 } | |
| 2842 | |
| 2843 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
| 2844 context()->Plug(r0); | |
| 2845 } | |
| 2846 | |
| 2847 | |
| 2848 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) { | |
| 2849 // Assignment to named property of super. | |
| 2850 // r0 : value | |
| 2851 // stack : receiver ('this'), home_object | |
| 2852 DCHECK(prop != NULL); | |
| 2853 Literal* key = prop->key()->AsLiteral(); | |
| 2854 DCHECK(key != NULL); | |
| 2855 | |
| 2856 __ Push(key->value()); | |
| 2857 __ Push(r0); | |
| 2858 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict | |
| 2859 : Runtime::kStoreToSuper_Sloppy), | |
| 2860 4); | |
| 2861 } | |
| 2862 | |
| 2863 | |
| 2864 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) { | |
| 2865 // Assignment to named property of super. | |
| 2866 // r0 : value | |
| 2867 // stack : receiver ('this'), home_object, key | |
| 2868 DCHECK(prop != NULL); | |
| 2869 | |
| 2870 __ Push(r0); | |
| 2871 __ CallRuntime( | |
| 2872 (is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict | |
| 2873 : Runtime::kStoreKeyedToSuper_Sloppy), | |
| 2874 4); | |
| 2875 } | |
| 2876 | |
| 2877 | |
| 2878 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { | |
| 2879 // Assignment to a property, using a keyed store IC. | |
| 2880 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister()); | |
| 2881 DCHECK(StoreDescriptor::ValueRegister().is(r0)); | |
| 2882 | |
| 2883 Handle<Code> ic = | |
| 2884 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); | |
| 2885 if (FLAG_vector_stores) { | |
| 2886 EmitLoadStoreICSlot(expr->AssignmentSlot()); | |
| 2887 CallIC(ic); | |
| 2888 } else { | |
| 2889 CallIC(ic, expr->AssignmentFeedbackId()); | |
| 2890 } | |
| 2891 | |
| 2892 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
| 2893 context()->Plug(r0); | |
| 2894 } | |
| 2895 | |
| 2896 | |
| 2897 void FullCodeGenerator::VisitProperty(Property* expr) { | |
| 2898 Comment cmnt(masm_, "[ Property"); | |
| 2899 SetExpressionPosition(expr); | |
| 2900 | |
| 2901 Expression* key = expr->key(); | |
| 2902 | |
| 2903 if (key->IsPropertyName()) { | |
| 2904 if (!expr->IsSuperAccess()) { | |
| 2905 VisitForAccumulatorValue(expr->obj()); | |
| 2906 __ Move(LoadDescriptor::ReceiverRegister(), r0); | |
| 2907 EmitNamedPropertyLoad(expr); | |
| 2908 } else { | |
| 2909 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var()); | |
| 2910 VisitForStackValue( | |
| 2911 expr->obj()->AsSuperPropertyReference()->home_object()); | |
| 2912 EmitNamedSuperPropertyLoad(expr); | |
| 2913 } | |
| 2914 } else { | |
| 2915 if (!expr->IsSuperAccess()) { | |
| 2916 VisitForStackValue(expr->obj()); | |
| 2917 VisitForAccumulatorValue(expr->key()); | |
| 2918 __ Move(LoadDescriptor::NameRegister(), r0); | |
| 2919 __ pop(LoadDescriptor::ReceiverRegister()); | |
| 2920 EmitKeyedPropertyLoad(expr); | |
| 2921 } else { | |
| 2922 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var()); | |
| 2923 VisitForStackValue( | |
| 2924 expr->obj()->AsSuperPropertyReference()->home_object()); | |
| 2925 VisitForStackValue(expr->key()); | |
| 2926 EmitKeyedSuperPropertyLoad(expr); | |
| 2927 } | |
| 2928 } | |
| 2929 PrepareForBailoutForId(expr->LoadId(), TOS_REG); | |
| 2930 context()->Plug(r0); | |
| 2931 } | |
| 2932 | |
| 2933 | |
| 2934 void FullCodeGenerator::CallIC(Handle<Code> code, | |
| 2935 TypeFeedbackId ast_id) { | |
| 2936 ic_total_count_++; | |
| 2937 // All calls must have a predictable size in full-codegen code to ensure that | |
| 2938 // the debugger can patch them correctly. | |
| 2939 __ Call(code, RelocInfo::CODE_TARGET, ast_id, al, | |
| 2940 NEVER_INLINE_TARGET_ADDRESS); | |
| 2941 } | |
| 2942 | |
| 2943 | |
| 2944 // Code common for calls using the IC. | |
| 2945 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { | |
| 2946 Expression* callee = expr->expression(); | |
| 2947 | |
| 2948 CallICState::CallType call_type = | |
| 2949 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD; | |
| 2950 | |
| 2951 // Get the target function. | |
| 2952 if (call_type == CallICState::FUNCTION) { | |
| 2953 { StackValueContext context(this); | |
| 2954 EmitVariableLoad(callee->AsVariableProxy()); | |
| 2955 PrepareForBailout(callee, NO_REGISTERS); | |
| 2956 } | |
| 2957 // Push undefined as receiver. This is patched in the method prologue if it | |
| 2958 // is a sloppy mode method. | |
| 2959 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | |
| 2960 __ push(ip); | |
| 2961 } else { | |
| 2962 // Load the function from the receiver. | |
| 2963 DCHECK(callee->IsProperty()); | |
| 2964 DCHECK(!callee->AsProperty()->IsSuperAccess()); | |
| 2965 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); | |
| 2966 EmitNamedPropertyLoad(callee->AsProperty()); | |
| 2967 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); | |
| 2968 // Push the target function under the receiver. | |
| 2969 __ ldr(ip, MemOperand(sp, 0)); | |
| 2970 __ push(ip); | |
| 2971 __ str(r0, MemOperand(sp, kPointerSize)); | |
| 2972 } | |
| 2973 | |
| 2974 EmitCall(expr, call_type); | |
| 2975 } | |
| 2976 | |
| 2977 | |
| 2978 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) { | |
| 2979 Expression* callee = expr->expression(); | |
| 2980 DCHECK(callee->IsProperty()); | |
| 2981 Property* prop = callee->AsProperty(); | |
| 2982 DCHECK(prop->IsSuperAccess()); | |
| 2983 SetExpressionPosition(prop); | |
| 2984 | |
| 2985 Literal* key = prop->key()->AsLiteral(); | |
| 2986 DCHECK(!key->value()->IsSmi()); | |
| 2987 // Load the function from the receiver. | |
| 2988 const Register scratch = r1; | |
| 2989 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference(); | |
| 2990 VisitForStackValue(super_ref->home_object()); | |
| 2991 VisitForAccumulatorValue(super_ref->this_var()); | |
| 2992 __ Push(r0); | |
| 2993 __ Push(r0); | |
| 2994 __ ldr(scratch, MemOperand(sp, kPointerSize * 2)); | |
| 2995 __ Push(scratch); | |
| 2996 __ Push(key->value()); | |
| 2997 __ Push(Smi::FromInt(language_mode())); | |
| 2998 | |
| 2999 // Stack here: | |
| 3000 // - home_object | |
| 3001 // - this (receiver) | |
| 3002 // - this (receiver) <-- LoadFromSuper will pop here and below. | |
| 3003 // - home_object | |
| 3004 // - key | |
| 3005 // - language_mode | |
| 3006 __ CallRuntime(Runtime::kLoadFromSuper, 4); | |
| 3007 | |
| 3008 // Replace home_object with target function. | |
| 3009 __ str(r0, MemOperand(sp, kPointerSize)); | |
| 3010 | |
| 3011 // Stack here: | |
| 3012 // - target function | |
| 3013 // - this (receiver) | |
| 3014 EmitCall(expr, CallICState::METHOD); | |
| 3015 } | |
| 3016 | |
| 3017 | |
| 3018 // Code common for calls using the IC. | |
| 3019 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, | |
| 3020 Expression* key) { | |
| 3021 // Load the key. | |
| 3022 VisitForAccumulatorValue(key); | |
| 3023 | |
| 3024 Expression* callee = expr->expression(); | |
| 3025 | |
| 3026 // Load the function from the receiver. | |
| 3027 DCHECK(callee->IsProperty()); | |
| 3028 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); | |
| 3029 __ Move(LoadDescriptor::NameRegister(), r0); | |
| 3030 EmitKeyedPropertyLoad(callee->AsProperty()); | |
| 3031 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); | |
| 3032 | |
| 3033 // Push the target function under the receiver. | |
| 3034 __ ldr(ip, MemOperand(sp, 0)); | |
| 3035 __ push(ip); | |
| 3036 __ str(r0, MemOperand(sp, kPointerSize)); | |
| 3037 | |
| 3038 EmitCall(expr, CallICState::METHOD); | |
| 3039 } | |
| 3040 | |
| 3041 | |
| 3042 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) { | |
| 3043 Expression* callee = expr->expression(); | |
| 3044 DCHECK(callee->IsProperty()); | |
| 3045 Property* prop = callee->AsProperty(); | |
| 3046 DCHECK(prop->IsSuperAccess()); | |
| 3047 | |
| 3048 SetExpressionPosition(prop); | |
| 3049 // Load the function from the receiver. | |
| 3050 const Register scratch = r1; | |
| 3051 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference(); | |
| 3052 VisitForStackValue(super_ref->home_object()); | |
| 3053 VisitForAccumulatorValue(super_ref->this_var()); | |
| 3054 __ Push(r0); | |
| 3055 __ Push(r0); | |
| 3056 __ ldr(scratch, MemOperand(sp, kPointerSize * 2)); | |
| 3057 __ Push(scratch); | |
| 3058 VisitForStackValue(prop->key()); | |
| 3059 __ Push(Smi::FromInt(language_mode())); | |
| 3060 | |
| 3061 // Stack here: | |
| 3062 // - home_object | |
| 3063 // - this (receiver) | |
| 3064 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below. | |
| 3065 // - home_object | |
| 3066 // - key | |
| 3067 // - language_mode | |
| 3068 __ CallRuntime(Runtime::kLoadKeyedFromSuper, 4); | |
| 3069 | |
| 3070 // Replace home_object with target function. | |
| 3071 __ str(r0, MemOperand(sp, kPointerSize)); | |
| 3072 | |
| 3073 // Stack here: | |
| 3074 // - target function | |
| 3075 // - this (receiver) | |
| 3076 EmitCall(expr, CallICState::METHOD); | |
| 3077 } | |
| 3078 | |
| 3079 | |
| 3080 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) { | |
| 3081 // Load the arguments. | |
| 3082 ZoneList<Expression*>* args = expr->arguments(); | |
| 3083 int arg_count = args->length(); | |
| 3084 for (int i = 0; i < arg_count; i++) { | |
| 3085 VisitForStackValue(args->at(i)); | |
| 3086 } | |
| 3087 | |
| 3088 SetCallPosition(expr, arg_count); | |
| 3089 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, call_type).code(); | |
| 3090 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackICSlot()))); | |
| 3091 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); | |
| 3092 // Don't assign a type feedback id to the IC, since type feedback is provided | |
| 3093 // by the vector above. | |
| 3094 CallIC(ic); | |
| 3095 | |
| 3096 RecordJSReturnSite(expr); | |
| 3097 // Restore context register. | |
| 3098 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 3099 context()->DropAndPlug(1, r0); | |
| 3100 } | |
| 3101 | |
| 3102 | |
| 3103 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { | |
| 3104 // r4: copy of the first argument or undefined if it doesn't exist. | |
| 3105 if (arg_count > 0) { | |
| 3106 __ ldr(r4, MemOperand(sp, arg_count * kPointerSize)); | |
| 3107 } else { | |
| 3108 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); | |
| 3109 } | |
| 3110 | |
| 3111 // r3: the receiver of the enclosing function. | |
| 3112 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
| 3113 | |
| 3114 // r2: language mode. | |
| 3115 __ mov(r2, Operand(Smi::FromInt(language_mode()))); | |
| 3116 | |
| 3117 // r1: the start position of the scope the calls resides in. | |
| 3118 __ mov(r1, Operand(Smi::FromInt(scope()->start_position()))); | |
| 3119 | |
| 3120 // Do the runtime call. | |
| 3121 __ Push(r4, r3, r2, r1); | |
| 3122 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); | |
| 3123 } | |
| 3124 | |
| 3125 | |
| 3126 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls. | |
| 3127 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) { | |
| 3128 VariableProxy* callee = expr->expression()->AsVariableProxy(); | |
| 3129 if (callee->var()->IsLookupSlot()) { | |
| 3130 Label slow, done; | |
| 3131 SetExpressionPosition(callee); | |
| 3132 // Generate code for loading from variables potentially shadowed | |
| 3133 // by eval-introduced variables. | |
| 3134 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done); | |
| 3135 | |
| 3136 __ bind(&slow); | |
| 3137 // Call the runtime to find the function to call (returned in r0) | |
| 3138 // and the object holding it (returned in edx). | |
| 3139 DCHECK(!context_register().is(r2)); | |
| 3140 __ mov(r2, Operand(callee->name())); | |
| 3141 __ Push(context_register(), r2); | |
| 3142 __ CallRuntime(Runtime::kLoadLookupSlot, 2); | |
| 3143 __ Push(r0, r1); // Function, receiver. | |
| 3144 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS); | |
| 3145 | |
| 3146 // If fast case code has been generated, emit code to push the | |
| 3147 // function and receiver and have the slow path jump around this | |
| 3148 // code. | |
| 3149 if (done.is_linked()) { | |
| 3150 Label call; | |
| 3151 __ b(&call); | |
| 3152 __ bind(&done); | |
| 3153 // Push function. | |
| 3154 __ push(r0); | |
| 3155 // The receiver is implicitly the global receiver. Indicate this | |
| 3156 // by passing the hole to the call function stub. | |
| 3157 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); | |
| 3158 __ push(r1); | |
| 3159 __ bind(&call); | |
| 3160 } | |
| 3161 } else { | |
| 3162 VisitForStackValue(callee); | |
| 3163 // refEnv.WithBaseObject() | |
| 3164 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); | |
| 3165 __ push(r2); // Reserved receiver slot. | |
| 3166 } | |
| 3167 } | |
| 3168 | |
| 3169 | |
| 3170 void FullCodeGenerator::VisitCall(Call* expr) { | |
| 3171 #ifdef DEBUG | |
| 3172 // We want to verify that RecordJSReturnSite gets called on all paths | |
| 3173 // through this function. Avoid early returns. | |
| 3174 expr->return_is_recorded_ = false; | |
| 3175 #endif | |
| 3176 | |
| 3177 Comment cmnt(masm_, "[ Call"); | |
| 3178 Expression* callee = expr->expression(); | |
| 3179 Call::CallType call_type = expr->GetCallType(isolate()); | |
| 3180 | |
| 3181 if (call_type == Call::POSSIBLY_EVAL_CALL) { | |
| 3182 // In a call to eval, we first call | |
| 3183 // RuntimeHidden_asResolvePossiblyDirectEval to resolve the function we need | |
| 3184 // to call. Then we call the resolved function using the given arguments. | |
| 3185 ZoneList<Expression*>* args = expr->arguments(); | |
| 3186 int arg_count = args->length(); | |
| 3187 | |
| 3188 PushCalleeAndWithBaseObject(expr); | |
| 3189 | |
| 3190 // Push the arguments. | |
| 3191 for (int i = 0; i < arg_count; i++) { | |
| 3192 VisitForStackValue(args->at(i)); | |
| 3193 } | |
| 3194 | |
| 3195 // Push a copy of the function (found below the arguments) and | |
| 3196 // resolve eval. | |
| 3197 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); | |
| 3198 __ push(r1); | |
| 3199 EmitResolvePossiblyDirectEval(arg_count); | |
| 3200 | |
| 3201 // Touch up the stack with the resolved function. | |
| 3202 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize)); | |
| 3203 | |
| 3204 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS); | |
| 3205 | |
| 3206 // Record source position for debugger. | |
| 3207 SetCallPosition(expr, arg_count); | |
| 3208 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); | |
| 3209 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); | |
| 3210 __ CallStub(&stub); | |
| 3211 RecordJSReturnSite(expr); | |
| 3212 // Restore context register. | |
| 3213 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 3214 context()->DropAndPlug(1, r0); | |
| 3215 } else if (call_type == Call::GLOBAL_CALL) { | |
| 3216 EmitCallWithLoadIC(expr); | |
| 3217 | |
| 3218 } else if (call_type == Call::LOOKUP_SLOT_CALL) { | |
| 3219 // Call to a lookup slot (dynamically introduced variable). | |
| 3220 PushCalleeAndWithBaseObject(expr); | |
| 3221 EmitCall(expr); | |
| 3222 } else if (call_type == Call::PROPERTY_CALL) { | |
| 3223 Property* property = callee->AsProperty(); | |
| 3224 bool is_named_call = property->key()->IsPropertyName(); | |
| 3225 if (property->IsSuperAccess()) { | |
| 3226 if (is_named_call) { | |
| 3227 EmitSuperCallWithLoadIC(expr); | |
| 3228 } else { | |
| 3229 EmitKeyedSuperCallWithLoadIC(expr); | |
| 3230 } | |
| 3231 } else { | |
| 3232 VisitForStackValue(property->obj()); | |
| 3233 if (is_named_call) { | |
| 3234 EmitCallWithLoadIC(expr); | |
| 3235 } else { | |
| 3236 EmitKeyedCallWithLoadIC(expr, property->key()); | |
| 3237 } | |
| 3238 } | |
| 3239 } else if (call_type == Call::SUPER_CALL) { | |
| 3240 EmitSuperConstructorCall(expr); | |
| 3241 } else { | |
| 3242 DCHECK(call_type == Call::OTHER_CALL); | |
| 3243 // Call to an arbitrary expression not handled specially above. | |
| 3244 VisitForStackValue(callee); | |
| 3245 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); | |
| 3246 __ push(r1); | |
| 3247 // Emit function call. | |
| 3248 EmitCall(expr); | |
| 3249 } | |
| 3250 | |
| 3251 #ifdef DEBUG | |
| 3252 // RecordJSReturnSite should have been called. | |
| 3253 DCHECK(expr->return_is_recorded_); | |
| 3254 #endif | |
| 3255 } | |
| 3256 | |
| 3257 | |
| 3258 void FullCodeGenerator::VisitCallNew(CallNew* expr) { | |
| 3259 Comment cmnt(masm_, "[ CallNew"); | |
| 3260 // According to ECMA-262, section 11.2.2, page 44, the function | |
| 3261 // expression in new calls must be evaluated before the | |
| 3262 // arguments. | |
| 3263 | |
| 3264 // Push constructor on the stack. If it's not a function it's used as | |
| 3265 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is | |
| 3266 // ignored. | |
| 3267 DCHECK(!expr->expression()->IsSuperPropertyReference()); | |
| 3268 VisitForStackValue(expr->expression()); | |
| 3269 | |
| 3270 // Push the arguments ("left-to-right") on the stack. | |
| 3271 ZoneList<Expression*>* args = expr->arguments(); | |
| 3272 int arg_count = args->length(); | |
| 3273 for (int i = 0; i < arg_count; i++) { | |
| 3274 VisitForStackValue(args->at(i)); | |
| 3275 } | |
| 3276 | |
| 3277 // Call the construct call builtin that handles allocation and | |
| 3278 // constructor invocation. | |
| 3279 SetConstructCallPosition(expr); | |
| 3280 | |
| 3281 // Load function and argument count into r1 and r0. | |
| 3282 __ mov(r0, Operand(arg_count)); | |
| 3283 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); | |
| 3284 | |
| 3285 // Record call targets in unoptimized code. | |
| 3286 if (FLAG_pretenuring_call_new) { | |
| 3287 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); | |
| 3288 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() == | |
| 3289 expr->CallNewFeedbackSlot().ToInt() + 1); | |
| 3290 } | |
| 3291 | |
| 3292 __ Move(r2, FeedbackVector()); | |
| 3293 __ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot()))); | |
| 3294 | |
| 3295 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); | |
| 3296 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); | |
| 3297 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); | |
| 3298 context()->Plug(r0); | |
| 3299 } | |
| 3300 | |
| 3301 | |
| 3302 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) { | |
| 3303 SuperCallReference* super_call_ref = | |
| 3304 expr->expression()->AsSuperCallReference(); | |
| 3305 DCHECK_NOT_NULL(super_call_ref); | |
| 3306 | |
| 3307 EmitLoadSuperConstructor(super_call_ref); | |
| 3308 __ push(result_register()); | |
| 3309 | |
| 3310 // Push the arguments ("left-to-right") on the stack. | |
| 3311 ZoneList<Expression*>* args = expr->arguments(); | |
| 3312 int arg_count = args->length(); | |
| 3313 for (int i = 0; i < arg_count; i++) { | |
| 3314 VisitForStackValue(args->at(i)); | |
| 3315 } | |
| 3316 | |
| 3317 // Call the construct call builtin that handles allocation and | |
| 3318 // constructor invocation. | |
| 3319 SetConstructCallPosition(expr); | |
| 3320 | |
| 3321 // Load original constructor into r4. | |
| 3322 VisitForAccumulatorValue(super_call_ref->new_target_var()); | |
| 3323 __ mov(r4, result_register()); | |
| 3324 | |
| 3325 // Load function and argument count into r1 and r0. | |
| 3326 __ mov(r0, Operand(arg_count)); | |
| 3327 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); | |
| 3328 | |
| 3329 // Record call targets in unoptimized code. | |
| 3330 if (FLAG_pretenuring_call_new) { | |
| 3331 UNREACHABLE(); | |
| 3332 /* TODO(dslomov): support pretenuring. | |
| 3333 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); | |
| 3334 DCHECK(expr->AllocationSiteFeedbackSlot().ToInt() == | |
| 3335 expr->CallNewFeedbackSlot().ToInt() + 1); | |
| 3336 */ | |
| 3337 } | |
| 3338 | |
| 3339 __ Move(r2, FeedbackVector()); | |
| 3340 __ mov(r3, Operand(SmiFromSlot(expr->CallFeedbackSlot()))); | |
| 3341 | |
| 3342 CallConstructStub stub(isolate(), SUPER_CALL_RECORD_TARGET); | |
| 3343 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); | |
| 3344 | |
| 3345 RecordJSReturnSite(expr); | |
| 3346 | |
| 3347 context()->Plug(r0); | |
| 3348 } | |
| 3349 | |
| 3350 | |
| 3351 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { | |
| 3352 ZoneList<Expression*>* args = expr->arguments(); | |
| 3353 DCHECK(args->length() == 1); | |
| 3354 | |
| 3355 VisitForAccumulatorValue(args->at(0)); | |
| 3356 | |
| 3357 Label materialize_true, materialize_false; | |
| 3358 Label* if_true = NULL; | |
| 3359 Label* if_false = NULL; | |
| 3360 Label* fall_through = NULL; | |
| 3361 context()->PrepareTest(&materialize_true, &materialize_false, | |
| 3362 &if_true, &if_false, &fall_through); | |
| 3363 | |
| 3364 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 3365 __ SmiTst(r0); | |
| 3366 Split(eq, if_true, if_false, fall_through); | |
| 3367 | |
| 3368 context()->Plug(if_true, if_false); | |
| 3369 } | |
| 3370 | |
| 3371 | |
| 3372 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) { | |
| 3373 ZoneList<Expression*>* args = expr->arguments(); | |
| 3374 DCHECK(args->length() == 1); | |
| 3375 | |
| 3376 VisitForAccumulatorValue(args->at(0)); | |
| 3377 | |
| 3378 Label materialize_true, materialize_false; | |
| 3379 Label* if_true = NULL; | |
| 3380 Label* if_false = NULL; | |
| 3381 Label* fall_through = NULL; | |
| 3382 context()->PrepareTest(&materialize_true, &materialize_false, | |
| 3383 &if_true, &if_false, &fall_through); | |
| 3384 | |
| 3385 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 3386 __ NonNegativeSmiTst(r0); | |
| 3387 Split(eq, if_true, if_false, fall_through); | |
| 3388 | |
| 3389 context()->Plug(if_true, if_false); | |
| 3390 } | |
| 3391 | |
| 3392 | |
| 3393 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) { | |
| 3394 ZoneList<Expression*>* args = expr->arguments(); | |
| 3395 DCHECK(args->length() == 1); | |
| 3396 | |
| 3397 VisitForAccumulatorValue(args->at(0)); | |
| 3398 | |
| 3399 Label materialize_true, materialize_false; | |
| 3400 Label* if_true = NULL; | |
| 3401 Label* if_false = NULL; | |
| 3402 Label* fall_through = NULL; | |
| 3403 context()->PrepareTest(&materialize_true, &materialize_false, | |
| 3404 &if_true, &if_false, &fall_through); | |
| 3405 | |
| 3406 __ JumpIfSmi(r0, if_false); | |
| 3407 __ LoadRoot(ip, Heap::kNullValueRootIndex); | |
| 3408 __ cmp(r0, ip); | |
| 3409 __ b(eq, if_true); | |
| 3410 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); | |
| 3411 // Undetectable objects behave like undefined when tested with typeof. | |
| 3412 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset)); | |
| 3413 __ tst(r1, Operand(1 << Map::kIsUndetectable)); | |
| 3414 __ b(ne, if_false); | |
| 3415 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset)); | |
| 3416 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); | |
| 3417 __ b(lt, if_false); | |
| 3418 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); | |
| 3419 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 3420 Split(le, if_true, if_false, fall_through); | |
| 3421 | |
| 3422 context()->Plug(if_true, if_false); | |
| 3423 } | |
| 3424 | |
| 3425 | |
| 3426 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) { | |
| 3427 ZoneList<Expression*>* args = expr->arguments(); | |
| 3428 DCHECK(args->length() == 1); | |
| 3429 | |
| 3430 VisitForAccumulatorValue(args->at(0)); | |
| 3431 | |
| 3432 Label materialize_true, materialize_false; | |
| 3433 Label* if_true = NULL; | |
| 3434 Label* if_false = NULL; | |
| 3435 Label* fall_through = NULL; | |
| 3436 context()->PrepareTest(&materialize_true, &materialize_false, | |
| 3437 &if_true, &if_false, &fall_through); | |
| 3438 | |
| 3439 __ JumpIfSmi(r0, if_false); | |
| 3440 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); | |
| 3441 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 3442 Split(ge, if_true, if_false, fall_through); | |
| 3443 | |
| 3444 context()->Plug(if_true, if_false); | |
| 3445 } | |
| 3446 | |
| 3447 | |
| 3448 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) { | |
| 3449 ZoneList<Expression*>* args = expr->arguments(); | |
| 3450 DCHECK(args->length() == 1); | |
| 3451 | |
| 3452 VisitForAccumulatorValue(args->at(0)); | |
| 3453 | |
| 3454 Label materialize_true, materialize_false; | |
| 3455 Label* if_true = NULL; | |
| 3456 Label* if_false = NULL; | |
| 3457 Label* fall_through = NULL; | |
| 3458 context()->PrepareTest(&materialize_true, &materialize_false, | |
| 3459 &if_true, &if_false, &fall_through); | |
| 3460 | |
| 3461 __ JumpIfSmi(r0, if_false); | |
| 3462 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); | |
| 3463 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset)); | |
| 3464 __ tst(r1, Operand(1 << Map::kIsUndetectable)); | |
| 3465 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 3466 Split(ne, if_true, if_false, fall_through); | |
| 3467 | |
| 3468 context()->Plug(if_true, if_false); | |
| 3469 } | |
| 3470 | |
| 3471 | |
| 3472 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( | |
| 3473 CallRuntime* expr) { | |
| 3474 ZoneList<Expression*>* args = expr->arguments(); | |
| 3475 DCHECK(args->length() == 1); | |
| 3476 | |
| 3477 VisitForAccumulatorValue(args->at(0)); | |
| 3478 | |
| 3479 Label materialize_true, materialize_false, skip_lookup; | |
| 3480 Label* if_true = NULL; | |
| 3481 Label* if_false = NULL; | |
| 3482 Label* fall_through = NULL; | |
| 3483 context()->PrepareTest(&materialize_true, &materialize_false, | |
| 3484 &if_true, &if_false, &fall_through); | |
| 3485 | |
| 3486 __ AssertNotSmi(r0); | |
| 3487 | |
| 3488 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); | |
| 3489 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset)); | |
| 3490 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); | |
| 3491 __ b(ne, &skip_lookup); | |
| 3492 | |
| 3493 // Check for fast case object. Generate false result for slow case object. | |
| 3494 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset)); | |
| 3495 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); | |
| 3496 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); | |
| 3497 __ cmp(r2, ip); | |
| 3498 __ b(eq, if_false); | |
| 3499 | |
| 3500 // Look for valueOf name in the descriptor array, and indicate false if | |
| 3501 // found. Since we omit an enumeration index check, if it is added via a | |
| 3502 // transition that shares its descriptor array, this is a false positive. | |
| 3503 Label entry, loop, done; | |
| 3504 | |
| 3505 // Skip loop if no descriptors are valid. | |
| 3506 __ NumberOfOwnDescriptors(r3, r1); | |
| 3507 __ cmp(r3, Operand::Zero()); | |
| 3508 __ b(eq, &done); | |
| 3509 | |
| 3510 __ LoadInstanceDescriptors(r1, r4); | |
| 3511 // r4: descriptor array. | |
| 3512 // r3: valid entries in the descriptor array. | |
| 3513 __ mov(ip, Operand(DescriptorArray::kDescriptorSize)); | |
| 3514 __ mul(r3, r3, ip); | |
| 3515 // Calculate location of the first key name. | |
| 3516 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag)); | |
| 3517 // Calculate the end of the descriptor array. | |
| 3518 __ mov(r2, r4); | |
| 3519 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2)); | |
| 3520 | |
| 3521 // Loop through all the keys in the descriptor array. If one of these is the | |
| 3522 // string "valueOf" the result is false. | |
| 3523 // The use of ip to store the valueOf string assumes that it is not otherwise | |
| 3524 // used in the loop below. | |
| 3525 __ mov(ip, Operand(isolate()->factory()->value_of_string())); | |
| 3526 __ jmp(&entry); | |
| 3527 __ bind(&loop); | |
| 3528 __ ldr(r3, MemOperand(r4, 0)); | |
| 3529 __ cmp(r3, ip); | |
| 3530 __ b(eq, if_false); | |
| 3531 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize)); | |
| 3532 __ bind(&entry); | |
| 3533 __ cmp(r4, Operand(r2)); | |
| 3534 __ b(ne, &loop); | |
| 3535 | |
| 3536 __ bind(&done); | |
| 3537 | |
| 3538 // Set the bit in the map to indicate that there is no local valueOf field. | |
| 3539 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset)); | |
| 3540 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); | |
| 3541 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset)); | |
| 3542 | |
| 3543 __ bind(&skip_lookup); | |
| 3544 | |
| 3545 // If a valueOf property is not found on the object check that its | |
| 3546 // prototype is the un-modified String prototype. If not result is false. | |
| 3547 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset)); | |
| 3548 __ JumpIfSmi(r2, if_false); | |
| 3549 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); | |
| 3550 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | |
| 3551 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset)); | |
| 3552 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); | |
| 3553 __ cmp(r2, r3); | |
| 3554 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 3555 Split(eq, if_true, if_false, fall_through); | |
| 3556 | |
| 3557 context()->Plug(if_true, if_false); | |
| 3558 } | |
| 3559 | |
| 3560 | |
| 3561 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { | |
| 3562 ZoneList<Expression*>* args = expr->arguments(); | |
| 3563 DCHECK(args->length() == 1); | |
| 3564 | |
| 3565 VisitForAccumulatorValue(args->at(0)); | |
| 3566 | |
| 3567 Label materialize_true, materialize_false; | |
| 3568 Label* if_true = NULL; | |
| 3569 Label* if_false = NULL; | |
| 3570 Label* fall_through = NULL; | |
| 3571 context()->PrepareTest(&materialize_true, &materialize_false, | |
| 3572 &if_true, &if_false, &fall_through); | |
| 3573 | |
| 3574 __ JumpIfSmi(r0, if_false); | |
| 3575 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE); | |
| 3576 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 3577 Split(eq, if_true, if_false, fall_through); | |
| 3578 | |
| 3579 context()->Plug(if_true, if_false); | |
| 3580 } | |
| 3581 | |
| 3582 | |
| 3583 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) { | |
| 3584 ZoneList<Expression*>* args = expr->arguments(); | |
| 3585 DCHECK(args->length() == 1); | |
| 3586 | |
| 3587 VisitForAccumulatorValue(args->at(0)); | |
| 3588 | |
| 3589 Label materialize_true, materialize_false; | |
| 3590 Label* if_true = NULL; | |
| 3591 Label* if_false = NULL; | |
| 3592 Label* fall_through = NULL; | |
| 3593 context()->PrepareTest(&materialize_true, &materialize_false, | |
| 3594 &if_true, &if_false, &fall_through); | |
| 3595 | |
| 3596 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK); | |
| 3597 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); | |
| 3598 __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); | |
| 3599 __ cmp(r2, Operand(0x80000000)); | |
| 3600 __ cmp(r1, Operand(0x00000000), eq); | |
| 3601 | |
| 3602 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 3603 Split(eq, if_true, if_false, fall_through); | |
| 3604 | |
| 3605 context()->Plug(if_true, if_false); | |
| 3606 } | |
| 3607 | |
| 3608 | |
| 3609 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { | |
| 3610 ZoneList<Expression*>* args = expr->arguments(); | |
| 3611 DCHECK(args->length() == 1); | |
| 3612 | |
| 3613 VisitForAccumulatorValue(args->at(0)); | |
| 3614 | |
| 3615 Label materialize_true, materialize_false; | |
| 3616 Label* if_true = NULL; | |
| 3617 Label* if_false = NULL; | |
| 3618 Label* fall_through = NULL; | |
| 3619 context()->PrepareTest(&materialize_true, &materialize_false, | |
| 3620 &if_true, &if_false, &fall_through); | |
| 3621 | |
| 3622 __ JumpIfSmi(r0, if_false); | |
| 3623 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE); | |
| 3624 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 3625 Split(eq, if_true, if_false, fall_through); | |
| 3626 | |
| 3627 context()->Plug(if_true, if_false); | |
| 3628 } | |
| 3629 | |
| 3630 | |
| 3631 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) { | |
| 3632 ZoneList<Expression*>* args = expr->arguments(); | |
| 3633 DCHECK(args->length() == 1); | |
| 3634 | |
| 3635 VisitForAccumulatorValue(args->at(0)); | |
| 3636 | |
| 3637 Label materialize_true, materialize_false; | |
| 3638 Label* if_true = NULL; | |
| 3639 Label* if_false = NULL; | |
| 3640 Label* fall_through = NULL; | |
| 3641 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | |
| 3642 &if_false, &fall_through); | |
| 3643 | |
| 3644 __ JumpIfSmi(r0, if_false); | |
| 3645 __ CompareObjectType(r0, r1, r1, JS_TYPED_ARRAY_TYPE); | |
| 3646 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 3647 Split(eq, if_true, if_false, fall_through); | |
| 3648 | |
| 3649 context()->Plug(if_true, if_false); | |
| 3650 } | |
| 3651 | |
| 3652 | |
| 3653 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { | |
| 3654 ZoneList<Expression*>* args = expr->arguments(); | |
| 3655 DCHECK(args->length() == 1); | |
| 3656 | |
| 3657 VisitForAccumulatorValue(args->at(0)); | |
| 3658 | |
| 3659 Label materialize_true, materialize_false; | |
| 3660 Label* if_true = NULL; | |
| 3661 Label* if_false = NULL; | |
| 3662 Label* fall_through = NULL; | |
| 3663 context()->PrepareTest(&materialize_true, &materialize_false, | |
| 3664 &if_true, &if_false, &fall_through); | |
| 3665 | |
| 3666 __ JumpIfSmi(r0, if_false); | |
| 3667 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE); | |
| 3668 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 3669 Split(eq, if_true, if_false, fall_through); | |
| 3670 | |
| 3671 context()->Plug(if_true, if_false); | |
| 3672 } | |
| 3673 | |
| 3674 | |
| 3675 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) { | |
| 3676 ZoneList<Expression*>* args = expr->arguments(); | |
| 3677 DCHECK(args->length() == 1); | |
| 3678 | |
| 3679 VisitForAccumulatorValue(args->at(0)); | |
| 3680 | |
| 3681 Label materialize_true, materialize_false; | |
| 3682 Label* if_true = NULL; | |
| 3683 Label* if_false = NULL; | |
| 3684 Label* fall_through = NULL; | |
| 3685 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | |
| 3686 &if_false, &fall_through); | |
| 3687 | |
| 3688 __ JumpIfSmi(r0, if_false); | |
| 3689 Register map = r1; | |
| 3690 Register type_reg = r2; | |
| 3691 __ ldr(map, FieldMemOperand(r0, HeapObject::kMapOffset)); | |
| 3692 __ ldrb(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset)); | |
| 3693 __ sub(type_reg, type_reg, Operand(FIRST_JS_PROXY_TYPE)); | |
| 3694 __ cmp(type_reg, Operand(LAST_JS_PROXY_TYPE - FIRST_JS_PROXY_TYPE)); | |
| 3695 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 3696 Split(ls, if_true, if_false, fall_through); | |
| 3697 | |
| 3698 context()->Plug(if_true, if_false); | |
| 3699 } | |
| 3700 | |
| 3701 | |
| 3702 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) { | |
| 3703 DCHECK(expr->arguments()->length() == 0); | |
| 3704 | |
| 3705 Label materialize_true, materialize_false; | |
| 3706 Label* if_true = NULL; | |
| 3707 Label* if_false = NULL; | |
| 3708 Label* fall_through = NULL; | |
| 3709 context()->PrepareTest(&materialize_true, &materialize_false, | |
| 3710 &if_true, &if_false, &fall_through); | |
| 3711 | |
| 3712 // Get the frame pointer for the calling frame. | |
| 3713 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
| 3714 | |
| 3715 // Skip the arguments adaptor frame if it exists. | |
| 3716 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset)); | |
| 3717 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | |
| 3718 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset), eq); | |
| 3719 | |
| 3720 // Check the marker in the calling frame. | |
| 3721 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset)); | |
| 3722 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); | |
| 3723 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 3724 Split(eq, if_true, if_false, fall_through); | |
| 3725 | |
| 3726 context()->Plug(if_true, if_false); | |
| 3727 } | |
| 3728 | |
| 3729 | |
| 3730 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { | |
| 3731 ZoneList<Expression*>* args = expr->arguments(); | |
| 3732 DCHECK(args->length() == 2); | |
| 3733 | |
| 3734 // Load the two objects into registers and perform the comparison. | |
| 3735 VisitForStackValue(args->at(0)); | |
| 3736 VisitForAccumulatorValue(args->at(1)); | |
| 3737 | |
| 3738 Label materialize_true, materialize_false; | |
| 3739 Label* if_true = NULL; | |
| 3740 Label* if_false = NULL; | |
| 3741 Label* fall_through = NULL; | |
| 3742 context()->PrepareTest(&materialize_true, &materialize_false, | |
| 3743 &if_true, &if_false, &fall_through); | |
| 3744 | |
| 3745 __ pop(r1); | |
| 3746 __ cmp(r0, r1); | |
| 3747 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 3748 Split(eq, if_true, if_false, fall_through); | |
| 3749 | |
| 3750 context()->Plug(if_true, if_false); | |
| 3751 } | |
| 3752 | |
| 3753 | |
| 3754 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { | |
| 3755 ZoneList<Expression*>* args = expr->arguments(); | |
| 3756 DCHECK(args->length() == 1); | |
| 3757 | |
| 3758 // ArgumentsAccessStub expects the key in edx and the formal | |
| 3759 // parameter count in r0. | |
| 3760 VisitForAccumulatorValue(args->at(0)); | |
| 3761 __ mov(r1, r0); | |
| 3762 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); | |
| 3763 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT); | |
| 3764 __ CallStub(&stub); | |
| 3765 context()->Plug(r0); | |
| 3766 } | |
| 3767 | |
| 3768 | |
| 3769 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { | |
| 3770 DCHECK(expr->arguments()->length() == 0); | |
| 3771 | |
| 3772 // Get the number of formal parameters. | |
| 3773 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); | |
| 3774 | |
| 3775 // Check if the calling frame is an arguments adaptor frame. | |
| 3776 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
| 3777 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); | |
| 3778 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | |
| 3779 | |
| 3780 // Arguments adaptor case: Read the arguments length from the | |
| 3781 // adaptor frame. | |
| 3782 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset), eq); | |
| 3783 | |
| 3784 context()->Plug(r0); | |
| 3785 } | |
| 3786 | |
| 3787 | |
| 3788 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { | |
| 3789 ZoneList<Expression*>* args = expr->arguments(); | |
| 3790 DCHECK(args->length() == 1); | |
| 3791 Label done, null, function, non_function_constructor; | |
| 3792 | |
| 3793 VisitForAccumulatorValue(args->at(0)); | |
| 3794 | |
| 3795 // If the object is a smi, we return null. | |
| 3796 __ JumpIfSmi(r0, &null); | |
| 3797 | |
| 3798 // Check that the object is a JS object but take special care of JS | |
| 3799 // functions to make sure they have 'Function' as their class. | |
| 3800 // Assume that there are only two callable types, and one of them is at | |
| 3801 // either end of the type range for JS object types. Saves extra comparisons. | |
| 3802 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); | |
| 3803 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE); | |
| 3804 // Map is now in r0. | |
| 3805 __ b(lt, &null); | |
| 3806 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == | |
| 3807 FIRST_SPEC_OBJECT_TYPE + 1); | |
| 3808 __ b(eq, &function); | |
| 3809 | |
| 3810 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE)); | |
| 3811 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == | |
| 3812 LAST_SPEC_OBJECT_TYPE - 1); | |
| 3813 __ b(eq, &function); | |
| 3814 // Assume that there is no larger type. | |
| 3815 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); | |
| 3816 | |
| 3817 // Check if the constructor in the map is a JS function. | |
| 3818 Register instance_type = r2; | |
| 3819 __ GetMapConstructor(r0, r0, r1, instance_type); | |
| 3820 __ cmp(instance_type, Operand(JS_FUNCTION_TYPE)); | |
| 3821 __ b(ne, &non_function_constructor); | |
| 3822 | |
| 3823 // r0 now contains the constructor function. Grab the | |
| 3824 // instance class name from there. | |
| 3825 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); | |
| 3826 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset)); | |
| 3827 __ b(&done); | |
| 3828 | |
| 3829 // Functions have class 'Function'. | |
| 3830 __ bind(&function); | |
| 3831 __ LoadRoot(r0, Heap::kFunction_stringRootIndex); | |
| 3832 __ jmp(&done); | |
| 3833 | |
| 3834 // Objects with a non-function constructor have class 'Object'. | |
| 3835 __ bind(&non_function_constructor); | |
| 3836 __ LoadRoot(r0, Heap::kObject_stringRootIndex); | |
| 3837 __ jmp(&done); | |
| 3838 | |
| 3839 // Non-JS objects have class null. | |
| 3840 __ bind(&null); | |
| 3841 __ LoadRoot(r0, Heap::kNullValueRootIndex); | |
| 3842 | |
| 3843 // All done. | |
| 3844 __ bind(&done); | |
| 3845 | |
| 3846 context()->Plug(r0); | |
| 3847 } | |
| 3848 | |
| 3849 | |
| 3850 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { | |
| 3851 ZoneList<Expression*>* args = expr->arguments(); | |
| 3852 DCHECK(args->length() == 1); | |
| 3853 VisitForAccumulatorValue(args->at(0)); // Load the object. | |
| 3854 | |
| 3855 Label done; | |
| 3856 // If the object is a smi return the object. | |
| 3857 __ JumpIfSmi(r0, &done); | |
| 3858 // If the object is not a value type, return the object. | |
| 3859 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE); | |
| 3860 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset), eq); | |
| 3861 | |
| 3862 __ bind(&done); | |
| 3863 context()->Plug(r0); | |
| 3864 } | |
| 3865 | |
| 3866 | |
| 3867 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) { | |
| 3868 ZoneList<Expression*>* args = expr->arguments(); | |
| 3869 DCHECK_EQ(1, args->length()); | |
| 3870 | |
| 3871 VisitForAccumulatorValue(args->at(0)); | |
| 3872 | |
| 3873 Label materialize_true, materialize_false; | |
| 3874 Label* if_true = nullptr; | |
| 3875 Label* if_false = nullptr; | |
| 3876 Label* fall_through = nullptr; | |
| 3877 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | |
| 3878 &if_false, &fall_through); | |
| 3879 | |
| 3880 __ JumpIfSmi(r0, if_false); | |
| 3881 __ CompareObjectType(r0, r1, r1, JS_DATE_TYPE); | |
| 3882 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 3883 Split(eq, if_true, if_false, fall_through); | |
| 3884 | |
| 3885 context()->Plug(if_true, if_false); | |
| 3886 } | |
| 3887 | |
| 3888 | |
| 3889 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { | |
| 3890 ZoneList<Expression*>* args = expr->arguments(); | |
| 3891 DCHECK(args->length() == 2); | |
| 3892 DCHECK_NOT_NULL(args->at(1)->AsLiteral()); | |
| 3893 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value())); | |
| 3894 | |
| 3895 VisitForAccumulatorValue(args->at(0)); // Load the object. | |
| 3896 | |
| 3897 Register object = r0; | |
| 3898 Register result = r0; | |
| 3899 Register scratch0 = r9; | |
| 3900 Register scratch1 = r1; | |
| 3901 | |
| 3902 if (index->value() == 0) { | |
| 3903 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset)); | |
| 3904 } else { | |
| 3905 Label runtime, done; | |
| 3906 if (index->value() < JSDate::kFirstUncachedField) { | |
| 3907 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); | |
| 3908 __ mov(scratch1, Operand(stamp)); | |
| 3909 __ ldr(scratch1, MemOperand(scratch1)); | |
| 3910 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset)); | |
| 3911 __ cmp(scratch1, scratch0); | |
| 3912 __ b(ne, &runtime); | |
| 3913 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset + | |
| 3914 kPointerSize * index->value())); | |
| 3915 __ jmp(&done); | |
| 3916 } | |
| 3917 __ bind(&runtime); | |
| 3918 __ PrepareCallCFunction(2, scratch1); | |
| 3919 __ mov(r1, Operand(index)); | |
| 3920 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); | |
| 3921 __ bind(&done); | |
| 3922 } | |
| 3923 | |
| 3924 context()->Plug(result); | |
| 3925 } | |
| 3926 | |
| 3927 | |
| 3928 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { | |
| 3929 ZoneList<Expression*>* args = expr->arguments(); | |
| 3930 DCHECK_EQ(3, args->length()); | |
| 3931 | |
| 3932 Register string = r0; | |
| 3933 Register index = r1; | |
| 3934 Register value = r2; | |
| 3935 | |
| 3936 VisitForStackValue(args->at(0)); // index | |
| 3937 VisitForStackValue(args->at(1)); // value | |
| 3938 VisitForAccumulatorValue(args->at(2)); // string | |
| 3939 __ Pop(index, value); | |
| 3940 | |
| 3941 if (FLAG_debug_code) { | |
| 3942 __ SmiTst(value); | |
| 3943 __ Check(eq, kNonSmiValue); | |
| 3944 __ SmiTst(index); | |
| 3945 __ Check(eq, kNonSmiIndex); | |
| 3946 __ SmiUntag(index, index); | |
| 3947 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; | |
| 3948 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type); | |
| 3949 __ SmiTag(index, index); | |
| 3950 } | |
| 3951 | |
| 3952 __ SmiUntag(value, value); | |
| 3953 __ add(ip, | |
| 3954 string, | |
| 3955 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); | |
| 3956 __ strb(value, MemOperand(ip, index, LSR, kSmiTagSize)); | |
| 3957 context()->Plug(string); | |
| 3958 } | |
| 3959 | |
| 3960 | |
| 3961 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { | |
| 3962 ZoneList<Expression*>* args = expr->arguments(); | |
| 3963 DCHECK_EQ(3, args->length()); | |
| 3964 | |
| 3965 Register string = r0; | |
| 3966 Register index = r1; | |
| 3967 Register value = r2; | |
| 3968 | |
| 3969 VisitForStackValue(args->at(0)); // index | |
| 3970 VisitForStackValue(args->at(1)); // value | |
| 3971 VisitForAccumulatorValue(args->at(2)); // string | |
| 3972 __ Pop(index, value); | |
| 3973 | |
| 3974 if (FLAG_debug_code) { | |
| 3975 __ SmiTst(value); | |
| 3976 __ Check(eq, kNonSmiValue); | |
| 3977 __ SmiTst(index); | |
| 3978 __ Check(eq, kNonSmiIndex); | |
| 3979 __ SmiUntag(index, index); | |
| 3980 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; | |
| 3981 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type); | |
| 3982 __ SmiTag(index, index); | |
| 3983 } | |
| 3984 | |
| 3985 __ SmiUntag(value, value); | |
| 3986 __ add(ip, | |
| 3987 string, | |
| 3988 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); | |
| 3989 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); | |
| 3990 __ strh(value, MemOperand(ip, index)); | |
| 3991 context()->Plug(string); | |
| 3992 } | |
| 3993 | |
| 3994 | |
| 3995 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { | |
| 3996 ZoneList<Expression*>* args = expr->arguments(); | |
| 3997 DCHECK(args->length() == 2); | |
| 3998 VisitForStackValue(args->at(0)); // Load the object. | |
| 3999 VisitForAccumulatorValue(args->at(1)); // Load the value. | |
| 4000 __ pop(r1); // r0 = value. r1 = object. | |
| 4001 | |
| 4002 Label done; | |
| 4003 // If the object is a smi, return the value. | |
| 4004 __ JumpIfSmi(r1, &done); | |
| 4005 | |
| 4006 // If the object is not a value type, return the value. | |
| 4007 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE); | |
| 4008 __ b(ne, &done); | |
| 4009 | |
| 4010 // Store the value. | |
| 4011 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset)); | |
| 4012 // Update the write barrier. Save the value as it will be | |
| 4013 // overwritten by the write barrier code and is needed afterward. | |
| 4014 __ mov(r2, r0); | |
| 4015 __ RecordWriteField( | |
| 4016 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs); | |
| 4017 | |
| 4018 __ bind(&done); | |
| 4019 context()->Plug(r0); | |
| 4020 } | |
| 4021 | |
| 4022 | |
| 4023 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { | |
| 4024 ZoneList<Expression*>* args = expr->arguments(); | |
| 4025 DCHECK_EQ(args->length(), 1); | |
| 4026 // Load the argument into r0 and call the stub. | |
| 4027 VisitForAccumulatorValue(args->at(0)); | |
| 4028 | |
| 4029 NumberToStringStub stub(isolate()); | |
| 4030 __ CallStub(&stub); | |
| 4031 context()->Plug(r0); | |
| 4032 } | |
| 4033 | |
| 4034 | |
| 4035 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { | |
| 4036 ZoneList<Expression*>* args = expr->arguments(); | |
| 4037 DCHECK(args->length() == 1); | |
| 4038 VisitForAccumulatorValue(args->at(0)); | |
| 4039 | |
| 4040 Label done; | |
| 4041 StringCharFromCodeGenerator generator(r0, r1); | |
| 4042 generator.GenerateFast(masm_); | |
| 4043 __ jmp(&done); | |
| 4044 | |
| 4045 NopRuntimeCallHelper call_helper; | |
| 4046 generator.GenerateSlow(masm_, call_helper); | |
| 4047 | |
| 4048 __ bind(&done); | |
| 4049 context()->Plug(r1); | |
| 4050 } | |
| 4051 | |
| 4052 | |
| 4053 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { | |
| 4054 ZoneList<Expression*>* args = expr->arguments(); | |
| 4055 DCHECK(args->length() == 2); | |
| 4056 VisitForStackValue(args->at(0)); | |
| 4057 VisitForAccumulatorValue(args->at(1)); | |
| 4058 | |
| 4059 Register object = r1; | |
| 4060 Register index = r0; | |
| 4061 Register result = r3; | |
| 4062 | |
| 4063 __ pop(object); | |
| 4064 | |
| 4065 Label need_conversion; | |
| 4066 Label index_out_of_range; | |
| 4067 Label done; | |
| 4068 StringCharCodeAtGenerator generator(object, | |
| 4069 index, | |
| 4070 result, | |
| 4071 &need_conversion, | |
| 4072 &need_conversion, | |
| 4073 &index_out_of_range, | |
| 4074 STRING_INDEX_IS_NUMBER); | |
| 4075 generator.GenerateFast(masm_); | |
| 4076 __ jmp(&done); | |
| 4077 | |
| 4078 __ bind(&index_out_of_range); | |
| 4079 // When the index is out of range, the spec requires us to return | |
| 4080 // NaN. | |
| 4081 __ LoadRoot(result, Heap::kNanValueRootIndex); | |
| 4082 __ jmp(&done); | |
| 4083 | |
| 4084 __ bind(&need_conversion); | |
| 4085 // Load the undefined value into the result register, which will | |
| 4086 // trigger conversion. | |
| 4087 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | |
| 4088 __ jmp(&done); | |
| 4089 | |
| 4090 NopRuntimeCallHelper call_helper; | |
| 4091 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper); | |
| 4092 | |
| 4093 __ bind(&done); | |
| 4094 context()->Plug(result); | |
| 4095 } | |
| 4096 | |
| 4097 | |
| 4098 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { | |
| 4099 ZoneList<Expression*>* args = expr->arguments(); | |
| 4100 DCHECK(args->length() == 2); | |
| 4101 VisitForStackValue(args->at(0)); | |
| 4102 VisitForAccumulatorValue(args->at(1)); | |
| 4103 | |
| 4104 Register object = r1; | |
| 4105 Register index = r0; | |
| 4106 Register scratch = r3; | |
| 4107 Register result = r0; | |
| 4108 | |
| 4109 __ pop(object); | |
| 4110 | |
| 4111 Label need_conversion; | |
| 4112 Label index_out_of_range; | |
| 4113 Label done; | |
| 4114 StringCharAtGenerator generator(object, | |
| 4115 index, | |
| 4116 scratch, | |
| 4117 result, | |
| 4118 &need_conversion, | |
| 4119 &need_conversion, | |
| 4120 &index_out_of_range, | |
| 4121 STRING_INDEX_IS_NUMBER); | |
| 4122 generator.GenerateFast(masm_); | |
| 4123 __ jmp(&done); | |
| 4124 | |
| 4125 __ bind(&index_out_of_range); | |
| 4126 // When the index is out of range, the spec requires us to return | |
| 4127 // the empty string. | |
| 4128 __ LoadRoot(result, Heap::kempty_stringRootIndex); | |
| 4129 __ jmp(&done); | |
| 4130 | |
| 4131 __ bind(&need_conversion); | |
| 4132 // Move smi zero into the result register, which will trigger | |
| 4133 // conversion. | |
| 4134 __ mov(result, Operand(Smi::FromInt(0))); | |
| 4135 __ jmp(&done); | |
| 4136 | |
| 4137 NopRuntimeCallHelper call_helper; | |
| 4138 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper); | |
| 4139 | |
| 4140 __ bind(&done); | |
| 4141 context()->Plug(result); | |
| 4142 } | |
| 4143 | |
| 4144 | |
| 4145 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { | |
| 4146 ZoneList<Expression*>* args = expr->arguments(); | |
| 4147 DCHECK_EQ(2, args->length()); | |
| 4148 VisitForStackValue(args->at(0)); | |
| 4149 VisitForAccumulatorValue(args->at(1)); | |
| 4150 | |
| 4151 __ pop(r1); | |
| 4152 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED); | |
| 4153 __ CallStub(&stub); | |
| 4154 context()->Plug(r0); | |
| 4155 } | |
| 4156 | |
| 4157 | |
| 4158 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { | |
| 4159 ZoneList<Expression*>* args = expr->arguments(); | |
| 4160 DCHECK(args->length() >= 2); | |
| 4161 | |
| 4162 int arg_count = args->length() - 2; // 2 ~ receiver and function. | |
| 4163 for (int i = 0; i < arg_count + 1; i++) { | |
| 4164 VisitForStackValue(args->at(i)); | |
| 4165 } | |
| 4166 VisitForAccumulatorValue(args->last()); // Function. | |
| 4167 | |
| 4168 Label runtime, done; | |
| 4169 // Check for non-function argument (including proxy). | |
| 4170 __ JumpIfSmi(r0, &runtime); | |
| 4171 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); | |
| 4172 __ b(ne, &runtime); | |
| 4173 | |
| 4174 // InvokeFunction requires the function in r1. Move it in there. | |
| 4175 __ mov(r1, result_register()); | |
| 4176 ParameterCount count(arg_count); | |
| 4177 __ InvokeFunction(r1, count, CALL_FUNCTION, NullCallWrapper()); | |
| 4178 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 4179 __ jmp(&done); | |
| 4180 | |
| 4181 __ bind(&runtime); | |
| 4182 __ push(r0); | |
| 4183 __ CallRuntime(Runtime::kCall, args->length()); | |
| 4184 __ bind(&done); | |
| 4185 | |
| 4186 context()->Plug(r0); | |
| 4187 } | |
| 4188 | |
| 4189 | |
| 4190 void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) { | |
| 4191 ZoneList<Expression*>* args = expr->arguments(); | |
| 4192 DCHECK(args->length() == 2); | |
| 4193 | |
| 4194 // new.target | |
| 4195 VisitForStackValue(args->at(0)); | |
| 4196 | |
| 4197 // .this_function | |
| 4198 VisitForStackValue(args->at(1)); | |
| 4199 __ CallRuntime(Runtime::kGetPrototype, 1); | |
| 4200 __ Push(result_register()); | |
| 4201 | |
| 4202 // Load original constructor into r4. | |
| 4203 __ ldr(r4, MemOperand(sp, 1 * kPointerSize)); | |
| 4204 | |
| 4205 // Check if the calling frame is an arguments adaptor frame. | |
| 4206 Label adaptor_frame, args_set_up, runtime; | |
| 4207 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
| 4208 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); | |
| 4209 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | |
| 4210 __ b(eq, &adaptor_frame); | |
| 4211 // default constructor has no arguments, so no adaptor frame means no args. | |
| 4212 __ mov(r0, Operand::Zero()); | |
| 4213 __ b(&args_set_up); | |
| 4214 | |
| 4215 // Copy arguments from adaptor frame. | |
| 4216 { | |
| 4217 __ bind(&adaptor_frame); | |
| 4218 __ ldr(r1, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset)); | |
| 4219 __ SmiUntag(r1, r1); | |
| 4220 __ mov(r0, r1); | |
| 4221 | |
| 4222 // Get arguments pointer in r2. | |
| 4223 __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2)); | |
| 4224 __ add(r2, r2, Operand(StandardFrameConstants::kCallerSPOffset)); | |
| 4225 Label loop; | |
| 4226 __ bind(&loop); | |
| 4227 // Pre-decrement r2 with kPointerSize on each iteration. | |
| 4228 // Pre-decrement in order to skip receiver. | |
| 4229 __ ldr(r3, MemOperand(r2, kPointerSize, NegPreIndex)); | |
| 4230 __ Push(r3); | |
| 4231 __ sub(r1, r1, Operand(1)); | |
| 4232 __ cmp(r1, Operand::Zero()); | |
| 4233 __ b(ne, &loop); | |
| 4234 } | |
| 4235 | |
| 4236 __ bind(&args_set_up); | |
| 4237 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); | |
| 4238 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); | |
| 4239 | |
| 4240 CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL); | |
| 4241 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); | |
| 4242 | |
| 4243 __ Drop(1); | |
| 4244 | |
| 4245 context()->Plug(result_register()); | |
| 4246 } | |
| 4247 | |
| 4248 | |
| 4249 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { | |
| 4250 RegExpConstructResultStub stub(isolate()); | |
| 4251 ZoneList<Expression*>* args = expr->arguments(); | |
| 4252 DCHECK(args->length() == 3); | |
| 4253 VisitForStackValue(args->at(0)); | |
| 4254 VisitForStackValue(args->at(1)); | |
| 4255 VisitForAccumulatorValue(args->at(2)); | |
| 4256 __ pop(r1); | |
| 4257 __ pop(r2); | |
| 4258 __ CallStub(&stub); | |
| 4259 context()->Plug(r0); | |
| 4260 } | |
| 4261 | |
| 4262 | |
| 4263 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { | |
| 4264 ZoneList<Expression*>* args = expr->arguments(); | |
| 4265 DCHECK_EQ(2, args->length()); | |
| 4266 DCHECK_NOT_NULL(args->at(0)->AsLiteral()); | |
| 4267 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value(); | |
| 4268 | |
| 4269 Handle<FixedArray> jsfunction_result_caches( | |
| 4270 isolate()->native_context()->jsfunction_result_caches()); | |
| 4271 if (jsfunction_result_caches->length() <= cache_id) { | |
| 4272 __ Abort(kAttemptToUseUndefinedCache); | |
| 4273 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | |
| 4274 context()->Plug(r0); | |
| 4275 return; | |
| 4276 } | |
| 4277 | |
| 4278 VisitForAccumulatorValue(args->at(1)); | |
| 4279 | |
| 4280 Register key = r0; | |
| 4281 Register cache = r1; | |
| 4282 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | |
| 4283 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset)); | |
| 4284 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); | |
| 4285 __ ldr(cache, | |
| 4286 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); | |
| 4287 | |
| 4288 | |
| 4289 Label done, not_found; | |
| 4290 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset)); | |
| 4291 // r2 now holds finger offset as a smi. | |
| 4292 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
| 4293 // r3 now points to the start of fixed array elements. | |
| 4294 __ ldr(r2, MemOperand::PointerAddressFromSmiKey(r3, r2, PreIndex)); | |
| 4295 // Note side effect of PreIndex: r3 now points to the key of the pair. | |
| 4296 __ cmp(key, r2); | |
| 4297 __ b(ne, ¬_found); | |
| 4298 | |
| 4299 __ ldr(r0, MemOperand(r3, kPointerSize)); | |
| 4300 __ b(&done); | |
| 4301 | |
| 4302 __ bind(¬_found); | |
| 4303 // Call runtime to perform the lookup. | |
| 4304 __ Push(cache, key); | |
| 4305 __ CallRuntime(Runtime::kGetFromCacheRT, 2); | |
| 4306 | |
| 4307 __ bind(&done); | |
| 4308 context()->Plug(r0); | |
| 4309 } | |
| 4310 | |
| 4311 | |
| 4312 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { | |
| 4313 ZoneList<Expression*>* args = expr->arguments(); | |
| 4314 VisitForAccumulatorValue(args->at(0)); | |
| 4315 | |
| 4316 Label materialize_true, materialize_false; | |
| 4317 Label* if_true = NULL; | |
| 4318 Label* if_false = NULL; | |
| 4319 Label* fall_through = NULL; | |
| 4320 context()->PrepareTest(&materialize_true, &materialize_false, | |
| 4321 &if_true, &if_false, &fall_through); | |
| 4322 | |
| 4323 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset)); | |
| 4324 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask)); | |
| 4325 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 4326 Split(eq, if_true, if_false, fall_through); | |
| 4327 | |
| 4328 context()->Plug(if_true, if_false); | |
| 4329 } | |
| 4330 | |
| 4331 | |
| 4332 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { | |
| 4333 ZoneList<Expression*>* args = expr->arguments(); | |
| 4334 DCHECK(args->length() == 1); | |
| 4335 VisitForAccumulatorValue(args->at(0)); | |
| 4336 | |
| 4337 __ AssertString(r0); | |
| 4338 | |
| 4339 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset)); | |
| 4340 __ IndexFromHash(r0, r0); | |
| 4341 | |
| 4342 context()->Plug(r0); | |
| 4343 } | |
| 4344 | |
| 4345 | |
| 4346 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) { | |
| 4347 Label bailout, done, one_char_separator, long_separator, non_trivial_array, | |
| 4348 not_size_one_array, loop, empty_separator_loop, one_char_separator_loop, | |
| 4349 one_char_separator_loop_entry, long_separator_loop; | |
| 4350 ZoneList<Expression*>* args = expr->arguments(); | |
| 4351 DCHECK(args->length() == 2); | |
| 4352 VisitForStackValue(args->at(1)); | |
| 4353 VisitForAccumulatorValue(args->at(0)); | |
| 4354 | |
| 4355 // All aliases of the same register have disjoint lifetimes. | |
| 4356 Register array = r0; | |
| 4357 Register elements = no_reg; // Will be r0. | |
| 4358 Register result = no_reg; // Will be r0. | |
| 4359 Register separator = r1; | |
| 4360 Register array_length = r2; | |
| 4361 Register result_pos = no_reg; // Will be r2 | |
| 4362 Register string_length = r3; | |
| 4363 Register string = r4; | |
| 4364 Register element = r5; | |
| 4365 Register elements_end = r6; | |
| 4366 Register scratch = r9; | |
| 4367 | |
| 4368 // Separator operand is on the stack. | |
| 4369 __ pop(separator); | |
| 4370 | |
| 4371 // Check that the array is a JSArray. | |
| 4372 __ JumpIfSmi(array, &bailout); | |
| 4373 __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE); | |
| 4374 __ b(ne, &bailout); | |
| 4375 | |
| 4376 // Check that the array has fast elements. | |
| 4377 __ CheckFastElements(scratch, array_length, &bailout); | |
| 4378 | |
| 4379 // If the array has length zero, return the empty string. | |
| 4380 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset)); | |
| 4381 __ SmiUntag(array_length, SetCC); | |
| 4382 __ b(ne, &non_trivial_array); | |
| 4383 __ LoadRoot(r0, Heap::kempty_stringRootIndex); | |
| 4384 __ b(&done); | |
| 4385 | |
| 4386 __ bind(&non_trivial_array); | |
| 4387 | |
| 4388 // Get the FixedArray containing array's elements. | |
| 4389 elements = array; | |
| 4390 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset)); | |
| 4391 array = no_reg; // End of array's live range. | |
| 4392 | |
| 4393 // Check that all array elements are sequential one-byte strings, and | |
| 4394 // accumulate the sum of their lengths, as a smi-encoded value. | |
| 4395 __ mov(string_length, Operand::Zero()); | |
| 4396 __ add(element, | |
| 4397 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
| 4398 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); | |
| 4399 // Loop condition: while (element < elements_end). | |
| 4400 // Live values in registers: | |
| 4401 // elements: Fixed array of strings. | |
| 4402 // array_length: Length of the fixed array of strings (not smi) | |
| 4403 // separator: Separator string | |
| 4404 // string_length: Accumulated sum of string lengths (smi). | |
| 4405 // element: Current array element. | |
| 4406 // elements_end: Array end. | |
| 4407 if (generate_debug_code_) { | |
| 4408 __ cmp(array_length, Operand::Zero()); | |
| 4409 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin); | |
| 4410 } | |
| 4411 __ bind(&loop); | |
| 4412 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); | |
| 4413 __ JumpIfSmi(string, &bailout); | |
| 4414 __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset)); | |
| 4415 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); | |
| 4416 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout); | |
| 4417 __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset)); | |
| 4418 __ add(string_length, string_length, Operand(scratch), SetCC); | |
| 4419 __ b(vs, &bailout); | |
| 4420 __ cmp(element, elements_end); | |
| 4421 __ b(lt, &loop); | |
| 4422 | |
| 4423 // If array_length is 1, return elements[0], a string. | |
| 4424 __ cmp(array_length, Operand(1)); | |
| 4425 __ b(ne, ¬_size_one_array); | |
| 4426 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize)); | |
| 4427 __ b(&done); | |
| 4428 | |
| 4429 __ bind(¬_size_one_array); | |
| 4430 | |
| 4431 // Live values in registers: | |
| 4432 // separator: Separator string | |
| 4433 // array_length: Length of the array. | |
| 4434 // string_length: Sum of string lengths (smi). | |
| 4435 // elements: FixedArray of strings. | |
| 4436 | |
| 4437 // Check that the separator is a flat one-byte string. | |
| 4438 __ JumpIfSmi(separator, &bailout); | |
| 4439 __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset)); | |
| 4440 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); | |
| 4441 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch, scratch, &bailout); | |
| 4442 | |
| 4443 // Add (separator length times array_length) - separator length to the | |
| 4444 // string_length to get the length of the result string. array_length is not | |
| 4445 // smi but the other values are, so the result is a smi | |
| 4446 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); | |
| 4447 __ sub(string_length, string_length, Operand(scratch)); | |
| 4448 __ smull(scratch, ip, array_length, scratch); | |
| 4449 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are | |
| 4450 // zero. | |
| 4451 __ cmp(ip, Operand::Zero()); | |
| 4452 __ b(ne, &bailout); | |
| 4453 __ tst(scratch, Operand(0x80000000)); | |
| 4454 __ b(ne, &bailout); | |
| 4455 __ add(string_length, string_length, Operand(scratch), SetCC); | |
| 4456 __ b(vs, &bailout); | |
| 4457 __ SmiUntag(string_length); | |
| 4458 | |
| 4459 // Get first element in the array to free up the elements register to be used | |
| 4460 // for the result. | |
| 4461 __ add(element, | |
| 4462 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
| 4463 result = elements; // End of live range for elements. | |
| 4464 elements = no_reg; | |
| 4465 // Live values in registers: | |
| 4466 // element: First array element | |
| 4467 // separator: Separator string | |
| 4468 // string_length: Length of result string (not smi) | |
| 4469 // array_length: Length of the array. | |
| 4470 __ AllocateOneByteString(result, string_length, scratch, | |
| 4471 string, // used as scratch | |
| 4472 elements_end, // used as scratch | |
| 4473 &bailout); | |
| 4474 // Prepare for looping. Set up elements_end to end of the array. Set | |
| 4475 // result_pos to the position of the result where to write the first | |
| 4476 // character. | |
| 4477 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); | |
| 4478 result_pos = array_length; // End of live range for array_length. | |
| 4479 array_length = no_reg; | |
| 4480 __ add(result_pos, | |
| 4481 result, | |
| 4482 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); | |
| 4483 | |
| 4484 // Check the length of the separator. | |
| 4485 __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); | |
| 4486 __ cmp(scratch, Operand(Smi::FromInt(1))); | |
| 4487 __ b(eq, &one_char_separator); | |
| 4488 __ b(gt, &long_separator); | |
| 4489 | |
| 4490 // Empty separator case | |
| 4491 __ bind(&empty_separator_loop); | |
| 4492 // Live values in registers: | |
| 4493 // result_pos: the position to which we are currently copying characters. | |
| 4494 // element: Current array element. | |
| 4495 // elements_end: Array end. | |
| 4496 | |
| 4497 // Copy next array element to the result. | |
| 4498 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); | |
| 4499 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); | |
| 4500 __ SmiUntag(string_length); | |
| 4501 __ add(string, | |
| 4502 string, | |
| 4503 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); | |
| 4504 __ CopyBytes(string, result_pos, string_length, scratch); | |
| 4505 __ cmp(element, elements_end); | |
| 4506 __ b(lt, &empty_separator_loop); // End while (element < elements_end). | |
| 4507 DCHECK(result.is(r0)); | |
| 4508 __ b(&done); | |
| 4509 | |
| 4510 // One-character separator case | |
| 4511 __ bind(&one_char_separator); | |
| 4512 // Replace separator with its one-byte character value. | |
| 4513 __ ldrb(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize)); | |
| 4514 // Jump into the loop after the code that copies the separator, so the first | |
| 4515 // element is not preceded by a separator | |
| 4516 __ jmp(&one_char_separator_loop_entry); | |
| 4517 | |
| 4518 __ bind(&one_char_separator_loop); | |
| 4519 // Live values in registers: | |
| 4520 // result_pos: the position to which we are currently copying characters. | |
| 4521 // element: Current array element. | |
| 4522 // elements_end: Array end. | |
| 4523 // separator: Single separator one-byte char (in lower byte). | |
| 4524 | |
| 4525 // Copy the separator character to the result. | |
| 4526 __ strb(separator, MemOperand(result_pos, 1, PostIndex)); | |
| 4527 | |
| 4528 // Copy next array element to the result. | |
| 4529 __ bind(&one_char_separator_loop_entry); | |
| 4530 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); | |
| 4531 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); | |
| 4532 __ SmiUntag(string_length); | |
| 4533 __ add(string, | |
| 4534 string, | |
| 4535 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); | |
| 4536 __ CopyBytes(string, result_pos, string_length, scratch); | |
| 4537 __ cmp(element, elements_end); | |
| 4538 __ b(lt, &one_char_separator_loop); // End while (element < elements_end). | |
| 4539 DCHECK(result.is(r0)); | |
| 4540 __ b(&done); | |
| 4541 | |
| 4542 // Long separator case (separator is more than one character). Entry is at the | |
| 4543 // label long_separator below. | |
| 4544 __ bind(&long_separator_loop); | |
| 4545 // Live values in registers: | |
| 4546 // result_pos: the position to which we are currently copying characters. | |
| 4547 // element: Current array element. | |
| 4548 // elements_end: Array end. | |
| 4549 // separator: Separator string. | |
| 4550 | |
| 4551 // Copy the separator to the result. | |
| 4552 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset)); | |
| 4553 __ SmiUntag(string_length); | |
| 4554 __ add(string, | |
| 4555 separator, | |
| 4556 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); | |
| 4557 __ CopyBytes(string, result_pos, string_length, scratch); | |
| 4558 | |
| 4559 __ bind(&long_separator); | |
| 4560 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); | |
| 4561 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); | |
| 4562 __ SmiUntag(string_length); | |
| 4563 __ add(string, | |
| 4564 string, | |
| 4565 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); | |
| 4566 __ CopyBytes(string, result_pos, string_length, scratch); | |
| 4567 __ cmp(element, elements_end); | |
| 4568 __ b(lt, &long_separator_loop); // End while (element < elements_end). | |
| 4569 DCHECK(result.is(r0)); | |
| 4570 __ b(&done); | |
| 4571 | |
| 4572 __ bind(&bailout); | |
| 4573 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | |
| 4574 __ bind(&done); | |
| 4575 context()->Plug(r0); | |
| 4576 } | |
| 4577 | |
| 4578 | |
| 4579 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { | |
| 4580 DCHECK(expr->arguments()->length() == 0); | |
| 4581 ExternalReference debug_is_active = | |
| 4582 ExternalReference::debug_is_active_address(isolate()); | |
| 4583 __ mov(ip, Operand(debug_is_active)); | |
| 4584 __ ldrb(r0, MemOperand(ip)); | |
| 4585 __ SmiTag(r0); | |
| 4586 context()->Plug(r0); | |
| 4587 } | |
| 4588 | |
| 4589 | |
| 4590 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) { | |
| 4591 // Push the builtins object as the receiver. | |
| 4592 Register receiver = LoadDescriptor::ReceiverRegister(); | |
| 4593 __ ldr(receiver, GlobalObjectOperand()); | |
| 4594 __ ldr(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset)); | |
| 4595 __ push(receiver); | |
| 4596 | |
| 4597 // Load the function from the receiver. | |
| 4598 __ mov(LoadDescriptor::NameRegister(), Operand(expr->name())); | |
| 4599 __ mov(LoadDescriptor::SlotRegister(), | |
| 4600 Operand(SmiFromSlot(expr->CallRuntimeFeedbackSlot()))); | |
| 4601 CallLoadIC(NOT_INSIDE_TYPEOF); | |
| 4602 } | |
| 4603 | |
| 4604 | |
| 4605 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) { | |
| 4606 ZoneList<Expression*>* args = expr->arguments(); | |
| 4607 int arg_count = args->length(); | |
| 4608 | |
| 4609 SetCallPosition(expr, arg_count); | |
| 4610 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); | |
| 4611 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); | |
| 4612 __ CallStub(&stub); | |
| 4613 } | |
| 4614 | |
| 4615 | |
| 4616 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { | |
| 4617 ZoneList<Expression*>* args = expr->arguments(); | |
| 4618 int arg_count = args->length(); | |
| 4619 | |
| 4620 if (expr->is_jsruntime()) { | |
| 4621 Comment cmnt(masm_, "[ CallRuntime"); | |
| 4622 EmitLoadJSRuntimeFunction(expr); | |
| 4623 | |
| 4624 // Push the target function under the receiver. | |
| 4625 __ ldr(ip, MemOperand(sp, 0)); | |
| 4626 __ push(ip); | |
| 4627 __ str(r0, MemOperand(sp, kPointerSize)); | |
| 4628 | |
| 4629 // Push the arguments ("left-to-right"). | |
| 4630 for (int i = 0; i < arg_count; i++) { | |
| 4631 VisitForStackValue(args->at(i)); | |
| 4632 } | |
| 4633 | |
| 4634 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); | |
| 4635 EmitCallJSRuntimeFunction(expr); | |
| 4636 | |
| 4637 // Restore context register. | |
| 4638 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | |
| 4639 | |
| 4640 context()->DropAndPlug(1, r0); | |
| 4641 | |
| 4642 } else { | |
| 4643 const Runtime::Function* function = expr->function(); | |
| 4644 switch (function->function_id) { | |
| 4645 #define CALL_INTRINSIC_GENERATOR(Name) \ | |
| 4646 case Runtime::kInline##Name: { \ | |
| 4647 Comment cmnt(masm_, "[ Inline" #Name); \ | |
| 4648 return Emit##Name(expr); \ | |
| 4649 } | |
| 4650 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR) | |
| 4651 #undef CALL_INTRINSIC_GENERATOR | |
| 4652 default: { | |
| 4653 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic"); | |
| 4654 // Push the arguments ("left-to-right"). | |
| 4655 for (int i = 0; i < arg_count; i++) { | |
| 4656 VisitForStackValue(args->at(i)); | |
| 4657 } | |
| 4658 | |
| 4659 // Call the C runtime function. | |
| 4660 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); | |
| 4661 __ CallRuntime(expr->function(), arg_count); | |
| 4662 context()->Plug(r0); | |
| 4663 } | |
| 4664 } | |
| 4665 } | |
| 4666 } | |
| 4667 | |
| 4668 | |
| 4669 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { | |
| 4670 switch (expr->op()) { | |
| 4671 case Token::DELETE: { | |
| 4672 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); | |
| 4673 Property* property = expr->expression()->AsProperty(); | |
| 4674 VariableProxy* proxy = expr->expression()->AsVariableProxy(); | |
| 4675 | |
| 4676 if (property != NULL) { | |
| 4677 VisitForStackValue(property->obj()); | |
| 4678 VisitForStackValue(property->key()); | |
| 4679 __ mov(r1, Operand(Smi::FromInt(language_mode()))); | |
| 4680 __ push(r1); | |
| 4681 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); | |
| 4682 context()->Plug(r0); | |
| 4683 } else if (proxy != NULL) { | |
| 4684 Variable* var = proxy->var(); | |
| 4685 // Delete of an unqualified identifier is disallowed in strict mode but | |
| 4686 // "delete this" is allowed. | |
| 4687 bool is_this = var->HasThisName(isolate()); | |
| 4688 DCHECK(is_sloppy(language_mode()) || is_this); | |
| 4689 if (var->IsUnallocatedOrGlobalSlot()) { | |
| 4690 __ ldr(r2, GlobalObjectOperand()); | |
| 4691 __ mov(r1, Operand(var->name())); | |
| 4692 __ mov(r0, Operand(Smi::FromInt(SLOPPY))); | |
| 4693 __ Push(r2, r1, r0); | |
| 4694 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); | |
| 4695 context()->Plug(r0); | |
| 4696 } else if (var->IsStackAllocated() || var->IsContextSlot()) { | |
| 4697 // Result of deleting non-global, non-dynamic variables is false. | |
| 4698 // The subexpression does not have side effects. | |
| 4699 context()->Plug(is_this); | |
| 4700 } else { | |
| 4701 // Non-global variable. Call the runtime to try to delete from the | |
| 4702 // context where the variable was introduced. | |
| 4703 DCHECK(!context_register().is(r2)); | |
| 4704 __ mov(r2, Operand(var->name())); | |
| 4705 __ Push(context_register(), r2); | |
| 4706 __ CallRuntime(Runtime::kDeleteLookupSlot, 2); | |
| 4707 context()->Plug(r0); | |
| 4708 } | |
| 4709 } else { | |
| 4710 // Result of deleting non-property, non-variable reference is true. | |
| 4711 // The subexpression may have side effects. | |
| 4712 VisitForEffect(expr->expression()); | |
| 4713 context()->Plug(true); | |
| 4714 } | |
| 4715 break; | |
| 4716 } | |
| 4717 | |
| 4718 case Token::VOID: { | |
| 4719 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); | |
| 4720 VisitForEffect(expr->expression()); | |
| 4721 context()->Plug(Heap::kUndefinedValueRootIndex); | |
| 4722 break; | |
| 4723 } | |
| 4724 | |
| 4725 case Token::NOT: { | |
| 4726 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); | |
| 4727 if (context()->IsEffect()) { | |
| 4728 // Unary NOT has no side effects so it's only necessary to visit the | |
| 4729 // subexpression. Match the optimizing compiler by not branching. | |
| 4730 VisitForEffect(expr->expression()); | |
| 4731 } else if (context()->IsTest()) { | |
| 4732 const TestContext* test = TestContext::cast(context()); | |
| 4733 // The labels are swapped for the recursive call. | |
| 4734 VisitForControl(expr->expression(), | |
| 4735 test->false_label(), | |
| 4736 test->true_label(), | |
| 4737 test->fall_through()); | |
| 4738 context()->Plug(test->true_label(), test->false_label()); | |
| 4739 } else { | |
| 4740 // We handle value contexts explicitly rather than simply visiting | |
| 4741 // for control and plugging the control flow into the context, | |
| 4742 // because we need to prepare a pair of extra administrative AST ids | |
| 4743 // for the optimizing compiler. | |
| 4744 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue()); | |
| 4745 Label materialize_true, materialize_false, done; | |
| 4746 VisitForControl(expr->expression(), | |
| 4747 &materialize_false, | |
| 4748 &materialize_true, | |
| 4749 &materialize_true); | |
| 4750 __ bind(&materialize_true); | |
| 4751 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); | |
| 4752 __ LoadRoot(r0, Heap::kTrueValueRootIndex); | |
| 4753 if (context()->IsStackValue()) __ push(r0); | |
| 4754 __ jmp(&done); | |
| 4755 __ bind(&materialize_false); | |
| 4756 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); | |
| 4757 __ LoadRoot(r0, Heap::kFalseValueRootIndex); | |
| 4758 if (context()->IsStackValue()) __ push(r0); | |
| 4759 __ bind(&done); | |
| 4760 } | |
| 4761 break; | |
| 4762 } | |
| 4763 | |
| 4764 case Token::TYPEOF: { | |
| 4765 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); | |
| 4766 { | |
| 4767 AccumulatorValueContext context(this); | |
| 4768 VisitForTypeofValue(expr->expression()); | |
| 4769 } | |
| 4770 __ mov(r3, r0); | |
| 4771 TypeofStub typeof_stub(isolate()); | |
| 4772 __ CallStub(&typeof_stub); | |
| 4773 context()->Plug(r0); | |
| 4774 break; | |
| 4775 } | |
| 4776 | |
| 4777 default: | |
| 4778 UNREACHABLE(); | |
| 4779 } | |
| 4780 } | |
| 4781 | |
| 4782 | |
| 4783 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { | |
| 4784 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis()); | |
| 4785 | |
| 4786 Comment cmnt(masm_, "[ CountOperation"); | |
| 4787 | |
| 4788 Property* prop = expr->expression()->AsProperty(); | |
| 4789 LhsKind assign_type = Property::GetAssignType(prop); | |
| 4790 | |
| 4791 // Evaluate expression and get value. | |
| 4792 if (assign_type == VARIABLE) { | |
| 4793 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL); | |
| 4794 AccumulatorValueContext context(this); | |
| 4795 EmitVariableLoad(expr->expression()->AsVariableProxy()); | |
| 4796 } else { | |
| 4797 // Reserve space for result of postfix operation. | |
| 4798 if (expr->is_postfix() && !context()->IsEffect()) { | |
| 4799 __ mov(ip, Operand(Smi::FromInt(0))); | |
| 4800 __ push(ip); | |
| 4801 } | |
| 4802 switch (assign_type) { | |
| 4803 case NAMED_PROPERTY: { | |
| 4804 // Put the object both on the stack and in the register. | |
| 4805 VisitForStackValue(prop->obj()); | |
| 4806 __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); | |
| 4807 EmitNamedPropertyLoad(prop); | |
| 4808 break; | |
| 4809 } | |
| 4810 | |
| 4811 case NAMED_SUPER_PROPERTY: { | |
| 4812 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); | |
| 4813 VisitForAccumulatorValue( | |
| 4814 prop->obj()->AsSuperPropertyReference()->home_object()); | |
| 4815 __ Push(result_register()); | |
| 4816 const Register scratch = r1; | |
| 4817 __ ldr(scratch, MemOperand(sp, kPointerSize)); | |
| 4818 __ Push(scratch); | |
| 4819 __ Push(result_register()); | |
| 4820 EmitNamedSuperPropertyLoad(prop); | |
| 4821 break; | |
| 4822 } | |
| 4823 | |
| 4824 case KEYED_SUPER_PROPERTY: { | |
| 4825 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); | |
| 4826 VisitForStackValue( | |
| 4827 prop->obj()->AsSuperPropertyReference()->home_object()); | |
| 4828 VisitForAccumulatorValue(prop->key()); | |
| 4829 __ Push(result_register()); | |
| 4830 const Register scratch = r1; | |
| 4831 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize)); | |
| 4832 __ Push(scratch); | |
| 4833 __ ldr(scratch, MemOperand(sp, 2 * kPointerSize)); | |
| 4834 __ Push(scratch); | |
| 4835 __ Push(result_register()); | |
| 4836 EmitKeyedSuperPropertyLoad(prop); | |
| 4837 break; | |
| 4838 } | |
| 4839 | |
| 4840 case KEYED_PROPERTY: { | |
| 4841 VisitForStackValue(prop->obj()); | |
| 4842 VisitForStackValue(prop->key()); | |
| 4843 __ ldr(LoadDescriptor::ReceiverRegister(), | |
| 4844 MemOperand(sp, 1 * kPointerSize)); | |
| 4845 __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); | |
| 4846 EmitKeyedPropertyLoad(prop); | |
| 4847 break; | |
| 4848 } | |
| 4849 | |
| 4850 case VARIABLE: | |
| 4851 UNREACHABLE(); | |
| 4852 } | |
| 4853 } | |
| 4854 | |
| 4855 // We need a second deoptimization point after loading the value | |
| 4856 // in case evaluating the property load my have a side effect. | |
| 4857 if (assign_type == VARIABLE) { | |
| 4858 PrepareForBailout(expr->expression(), TOS_REG); | |
| 4859 } else { | |
| 4860 PrepareForBailoutForId(prop->LoadId(), TOS_REG); | |
| 4861 } | |
| 4862 | |
| 4863 // Inline smi case if we are in a loop. | |
| 4864 Label stub_call, done; | |
| 4865 JumpPatchSite patch_site(masm_); | |
| 4866 | |
| 4867 int count_value = expr->op() == Token::INC ? 1 : -1; | |
| 4868 if (ShouldInlineSmiCase(expr->op())) { | |
| 4869 Label slow; | |
| 4870 patch_site.EmitJumpIfNotSmi(r0, &slow); | |
| 4871 | |
| 4872 // Save result for postfix expressions. | |
| 4873 if (expr->is_postfix()) { | |
| 4874 if (!context()->IsEffect()) { | |
| 4875 // Save the result on the stack. If we have a named or keyed property | |
| 4876 // we store the result under the receiver that is currently on top | |
| 4877 // of the stack. | |
| 4878 switch (assign_type) { | |
| 4879 case VARIABLE: | |
| 4880 __ push(r0); | |
| 4881 break; | |
| 4882 case NAMED_PROPERTY: | |
| 4883 __ str(r0, MemOperand(sp, kPointerSize)); | |
| 4884 break; | |
| 4885 case NAMED_SUPER_PROPERTY: | |
| 4886 __ str(r0, MemOperand(sp, 2 * kPointerSize)); | |
| 4887 break; | |
| 4888 case KEYED_PROPERTY: | |
| 4889 __ str(r0, MemOperand(sp, 2 * kPointerSize)); | |
| 4890 break; | |
| 4891 case KEYED_SUPER_PROPERTY: | |
| 4892 __ str(r0, MemOperand(sp, 3 * kPointerSize)); | |
| 4893 break; | |
| 4894 } | |
| 4895 } | |
| 4896 } | |
| 4897 | |
| 4898 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC); | |
| 4899 __ b(vc, &done); | |
| 4900 // Call stub. Undo operation first. | |
| 4901 __ sub(r0, r0, Operand(Smi::FromInt(count_value))); | |
| 4902 __ jmp(&stub_call); | |
| 4903 __ bind(&slow); | |
| 4904 } | |
| 4905 if (!is_strong(language_mode())) { | |
| 4906 ToNumberStub convert_stub(isolate()); | |
| 4907 __ CallStub(&convert_stub); | |
| 4908 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG); | |
| 4909 } | |
| 4910 | |
| 4911 // Save result for postfix expressions. | |
| 4912 if (expr->is_postfix()) { | |
| 4913 if (!context()->IsEffect()) { | |
| 4914 // Save the result on the stack. If we have a named or keyed property | |
| 4915 // we store the result under the receiver that is currently on top | |
| 4916 // of the stack. | |
| 4917 switch (assign_type) { | |
| 4918 case VARIABLE: | |
| 4919 __ push(r0); | |
| 4920 break; | |
| 4921 case NAMED_PROPERTY: | |
| 4922 __ str(r0, MemOperand(sp, kPointerSize)); | |
| 4923 break; | |
| 4924 case NAMED_SUPER_PROPERTY: | |
| 4925 __ str(r0, MemOperand(sp, 2 * kPointerSize)); | |
| 4926 break; | |
| 4927 case KEYED_PROPERTY: | |
| 4928 __ str(r0, MemOperand(sp, 2 * kPointerSize)); | |
| 4929 break; | |
| 4930 case KEYED_SUPER_PROPERTY: | |
| 4931 __ str(r0, MemOperand(sp, 3 * kPointerSize)); | |
| 4932 break; | |
| 4933 } | |
| 4934 } | |
| 4935 } | |
| 4936 | |
| 4937 | |
| 4938 __ bind(&stub_call); | |
| 4939 __ mov(r1, r0); | |
| 4940 __ mov(r0, Operand(Smi::FromInt(count_value))); | |
| 4941 | |
| 4942 SetExpressionPosition(expr); | |
| 4943 | |
| 4944 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD, | |
| 4945 strength(language_mode())).code(); | |
| 4946 CallIC(code, expr->CountBinOpFeedbackId()); | |
| 4947 patch_site.EmitPatchInfo(); | |
| 4948 __ bind(&done); | |
| 4949 | |
| 4950 if (is_strong(language_mode())) { | |
| 4951 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG); | |
| 4952 } | |
| 4953 // Store the value returned in r0. | |
| 4954 switch (assign_type) { | |
| 4955 case VARIABLE: | |
| 4956 if (expr->is_postfix()) { | |
| 4957 { EffectContext context(this); | |
| 4958 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), | |
| 4959 Token::ASSIGN, expr->CountSlot()); | |
| 4960 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
| 4961 context.Plug(r0); | |
| 4962 } | |
| 4963 // For all contexts except EffectConstant We have the result on | |
| 4964 // top of the stack. | |
| 4965 if (!context()->IsEffect()) { | |
| 4966 context()->PlugTOS(); | |
| 4967 } | |
| 4968 } else { | |
| 4969 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), | |
| 4970 Token::ASSIGN, expr->CountSlot()); | |
| 4971 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
| 4972 context()->Plug(r0); | |
| 4973 } | |
| 4974 break; | |
| 4975 case NAMED_PROPERTY: { | |
| 4976 __ mov(StoreDescriptor::NameRegister(), | |
| 4977 Operand(prop->key()->AsLiteral()->value())); | |
| 4978 __ pop(StoreDescriptor::ReceiverRegister()); | |
| 4979 if (FLAG_vector_stores) { | |
| 4980 EmitLoadStoreICSlot(expr->CountSlot()); | |
| 4981 CallStoreIC(); | |
| 4982 } else { | |
| 4983 CallStoreIC(expr->CountStoreFeedbackId()); | |
| 4984 } | |
| 4985 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
| 4986 if (expr->is_postfix()) { | |
| 4987 if (!context()->IsEffect()) { | |
| 4988 context()->PlugTOS(); | |
| 4989 } | |
| 4990 } else { | |
| 4991 context()->Plug(r0); | |
| 4992 } | |
| 4993 break; | |
| 4994 } | |
| 4995 case NAMED_SUPER_PROPERTY: { | |
| 4996 EmitNamedSuperPropertyStore(prop); | |
| 4997 if (expr->is_postfix()) { | |
| 4998 if (!context()->IsEffect()) { | |
| 4999 context()->PlugTOS(); | |
| 5000 } | |
| 5001 } else { | |
| 5002 context()->Plug(r0); | |
| 5003 } | |
| 5004 break; | |
| 5005 } | |
| 5006 case KEYED_SUPER_PROPERTY: { | |
| 5007 EmitKeyedSuperPropertyStore(prop); | |
| 5008 if (expr->is_postfix()) { | |
| 5009 if (!context()->IsEffect()) { | |
| 5010 context()->PlugTOS(); | |
| 5011 } | |
| 5012 } else { | |
| 5013 context()->Plug(r0); | |
| 5014 } | |
| 5015 break; | |
| 5016 } | |
| 5017 case KEYED_PROPERTY: { | |
| 5018 __ Pop(StoreDescriptor::ReceiverRegister(), | |
| 5019 StoreDescriptor::NameRegister()); | |
| 5020 Handle<Code> ic = | |
| 5021 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); | |
| 5022 if (FLAG_vector_stores) { | |
| 5023 EmitLoadStoreICSlot(expr->CountSlot()); | |
| 5024 CallIC(ic); | |
| 5025 } else { | |
| 5026 CallIC(ic, expr->CountStoreFeedbackId()); | |
| 5027 } | |
| 5028 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | |
| 5029 if (expr->is_postfix()) { | |
| 5030 if (!context()->IsEffect()) { | |
| 5031 context()->PlugTOS(); | |
| 5032 } | |
| 5033 } else { | |
| 5034 context()->Plug(r0); | |
| 5035 } | |
| 5036 break; | |
| 5037 } | |
| 5038 } | |
| 5039 } | |
| 5040 | |
| 5041 | |
| 5042 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, | |
| 5043 Expression* sub_expr, | |
| 5044 Handle<String> check) { | |
| 5045 Label materialize_true, materialize_false; | |
| 5046 Label* if_true = NULL; | |
| 5047 Label* if_false = NULL; | |
| 5048 Label* fall_through = NULL; | |
| 5049 context()->PrepareTest(&materialize_true, &materialize_false, | |
| 5050 &if_true, &if_false, &fall_through); | |
| 5051 | |
| 5052 { AccumulatorValueContext context(this); | |
| 5053 VisitForTypeofValue(sub_expr); | |
| 5054 } | |
| 5055 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 5056 | |
| 5057 Factory* factory = isolate()->factory(); | |
| 5058 if (String::Equals(check, factory->number_string())) { | |
| 5059 __ JumpIfSmi(r0, if_true); | |
| 5060 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); | |
| 5061 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); | |
| 5062 __ cmp(r0, ip); | |
| 5063 Split(eq, if_true, if_false, fall_through); | |
| 5064 } else if (String::Equals(check, factory->string_string())) { | |
| 5065 __ JumpIfSmi(r0, if_false); | |
| 5066 // Check for undetectable objects => false. | |
| 5067 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE); | |
| 5068 __ b(ge, if_false); | |
| 5069 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); | |
| 5070 __ tst(r1, Operand(1 << Map::kIsUndetectable)); | |
| 5071 Split(eq, if_true, if_false, fall_through); | |
| 5072 } else if (String::Equals(check, factory->symbol_string())) { | |
| 5073 __ JumpIfSmi(r0, if_false); | |
| 5074 __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE); | |
| 5075 Split(eq, if_true, if_false, fall_through); | |
| 5076 } else if (String::Equals(check, factory->float32x4_string())) { | |
| 5077 __ JumpIfSmi(r0, if_false); | |
| 5078 __ CompareObjectType(r0, r0, r1, FLOAT32X4_TYPE); | |
| 5079 Split(eq, if_true, if_false, fall_through); | |
| 5080 } else if (String::Equals(check, factory->boolean_string())) { | |
| 5081 __ CompareRoot(r0, Heap::kTrueValueRootIndex); | |
| 5082 __ b(eq, if_true); | |
| 5083 __ CompareRoot(r0, Heap::kFalseValueRootIndex); | |
| 5084 Split(eq, if_true, if_false, fall_through); | |
| 5085 } else if (String::Equals(check, factory->undefined_string())) { | |
| 5086 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex); | |
| 5087 __ b(eq, if_true); | |
| 5088 __ JumpIfSmi(r0, if_false); | |
| 5089 // Check for undetectable objects => true. | |
| 5090 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); | |
| 5091 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); | |
| 5092 __ tst(r1, Operand(1 << Map::kIsUndetectable)); | |
| 5093 Split(ne, if_true, if_false, fall_through); | |
| 5094 | |
| 5095 } else if (String::Equals(check, factory->function_string())) { | |
| 5096 __ JumpIfSmi(r0, if_false); | |
| 5097 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); | |
| 5098 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE); | |
| 5099 __ b(eq, if_true); | |
| 5100 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE)); | |
| 5101 Split(eq, if_true, if_false, fall_through); | |
| 5102 } else if (String::Equals(check, factory->object_string())) { | |
| 5103 __ JumpIfSmi(r0, if_false); | |
| 5104 __ CompareRoot(r0, Heap::kNullValueRootIndex); | |
| 5105 __ b(eq, if_true); | |
| 5106 // Check for JS objects => true. | |
| 5107 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); | |
| 5108 __ b(lt, if_false); | |
| 5109 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); | |
| 5110 __ b(gt, if_false); | |
| 5111 // Check for undetectable objects => false. | |
| 5112 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); | |
| 5113 __ tst(r1, Operand(1 << Map::kIsUndetectable)); | |
| 5114 Split(eq, if_true, if_false, fall_through); | |
| 5115 } else { | |
| 5116 if (if_false != fall_through) __ jmp(if_false); | |
| 5117 } | |
| 5118 context()->Plug(if_true, if_false); | |
| 5119 } | |
| 5120 | |
| 5121 | |
| 5122 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { | |
| 5123 Comment cmnt(masm_, "[ CompareOperation"); | |
| 5124 SetExpressionPosition(expr); | |
| 5125 | |
| 5126 // First we try a fast inlined version of the compare when one of | |
| 5127 // the operands is a literal. | |
| 5128 if (TryLiteralCompare(expr)) return; | |
| 5129 | |
| 5130 // Always perform the comparison for its control flow. Pack the result | |
| 5131 // into the expression's context after the comparison is performed. | |
| 5132 Label materialize_true, materialize_false; | |
| 5133 Label* if_true = NULL; | |
| 5134 Label* if_false = NULL; | |
| 5135 Label* fall_through = NULL; | |
| 5136 context()->PrepareTest(&materialize_true, &materialize_false, | |
| 5137 &if_true, &if_false, &fall_through); | |
| 5138 | |
| 5139 Token::Value op = expr->op(); | |
| 5140 VisitForStackValue(expr->left()); | |
| 5141 switch (op) { | |
| 5142 case Token::IN: | |
| 5143 VisitForStackValue(expr->right()); | |
| 5144 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); | |
| 5145 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); | |
| 5146 __ LoadRoot(ip, Heap::kTrueValueRootIndex); | |
| 5147 __ cmp(r0, ip); | |
| 5148 Split(eq, if_true, if_false, fall_through); | |
| 5149 break; | |
| 5150 | |
| 5151 case Token::INSTANCEOF: { | |
| 5152 VisitForStackValue(expr->right()); | |
| 5153 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags); | |
| 5154 __ CallStub(&stub); | |
| 5155 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 5156 // The stub returns 0 for true. | |
| 5157 __ tst(r0, r0); | |
| 5158 Split(eq, if_true, if_false, fall_through); | |
| 5159 break; | |
| 5160 } | |
| 5161 | |
| 5162 default: { | |
| 5163 VisitForAccumulatorValue(expr->right()); | |
| 5164 Condition cond = CompareIC::ComputeCondition(op); | |
| 5165 __ pop(r1); | |
| 5166 | |
| 5167 bool inline_smi_code = ShouldInlineSmiCase(op); | |
| 5168 JumpPatchSite patch_site(masm_); | |
| 5169 if (inline_smi_code) { | |
| 5170 Label slow_case; | |
| 5171 __ orr(r2, r0, Operand(r1)); | |
| 5172 patch_site.EmitJumpIfNotSmi(r2, &slow_case); | |
| 5173 __ cmp(r1, r0); | |
| 5174 Split(cond, if_true, if_false, NULL); | |
| 5175 __ bind(&slow_case); | |
| 5176 } | |
| 5177 | |
| 5178 Handle<Code> ic = CodeFactory::CompareIC( | |
| 5179 isolate(), op, strength(language_mode())).code(); | |
| 5180 CallIC(ic, expr->CompareOperationFeedbackId()); | |
| 5181 patch_site.EmitPatchInfo(); | |
| 5182 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 5183 __ cmp(r0, Operand::Zero()); | |
| 5184 Split(cond, if_true, if_false, fall_through); | |
| 5185 } | |
| 5186 } | |
| 5187 | |
| 5188 // Convert the result of the comparison into one expected for this | |
| 5189 // expression's context. | |
| 5190 context()->Plug(if_true, if_false); | |
| 5191 } | |
| 5192 | |
| 5193 | |
| 5194 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, | |
| 5195 Expression* sub_expr, | |
| 5196 NilValue nil) { | |
| 5197 Label materialize_true, materialize_false; | |
| 5198 Label* if_true = NULL; | |
| 5199 Label* if_false = NULL; | |
| 5200 Label* fall_through = NULL; | |
| 5201 context()->PrepareTest(&materialize_true, &materialize_false, | |
| 5202 &if_true, &if_false, &fall_through); | |
| 5203 | |
| 5204 VisitForAccumulatorValue(sub_expr); | |
| 5205 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | |
| 5206 if (expr->op() == Token::EQ_STRICT) { | |
| 5207 Heap::RootListIndex nil_value = nil == kNullValue ? | |
| 5208 Heap::kNullValueRootIndex : | |
| 5209 Heap::kUndefinedValueRootIndex; | |
| 5210 __ LoadRoot(r1, nil_value); | |
| 5211 __ cmp(r0, r1); | |
| 5212 Split(eq, if_true, if_false, fall_through); | |
| 5213 } else { | |
| 5214 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); | |
| 5215 CallIC(ic, expr->CompareOperationFeedbackId()); | |
| 5216 __ cmp(r0, Operand(0)); | |
| 5217 Split(ne, if_true, if_false, fall_through); | |
| 5218 } | |
| 5219 context()->Plug(if_true, if_false); | |
| 5220 } | |
| 5221 | |
| 5222 | |
| 5223 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { | |
| 5224 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
| 5225 context()->Plug(r0); | |
| 5226 } | |
| 5227 | |
| 5228 | |
| 5229 Register FullCodeGenerator::result_register() { | |
| 5230 return r0; | |
| 5231 } | |
| 5232 | |
| 5233 | |
| 5234 Register FullCodeGenerator::context_register() { | |
| 5235 return cp; | |
| 5236 } | |
| 5237 | |
| 5238 | |
| 5239 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { | |
| 5240 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); | |
| 5241 __ str(value, MemOperand(fp, frame_offset)); | |
| 5242 } | |
| 5243 | |
| 5244 | |
| 5245 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { | |
| 5246 __ ldr(dst, ContextOperand(cp, context_index)); | |
| 5247 } | |
| 5248 | |
| 5249 | |
| 5250 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { | |
| 5251 Scope* declaration_scope = scope()->DeclarationScope(); | |
| 5252 if (declaration_scope->is_script_scope() || | |
| 5253 declaration_scope->is_module_scope()) { | |
| 5254 // Contexts nested in the native context have a canonical empty function | |
| 5255 // as their closure, not the anonymous closure containing the global | |
| 5256 // code. Pass a smi sentinel and let the runtime look up the empty | |
| 5257 // function. | |
| 5258 __ mov(ip, Operand(Smi::FromInt(0))); | |
| 5259 } else if (declaration_scope->is_eval_scope()) { | |
| 5260 // Contexts created by a call to eval have the same closure as the | |
| 5261 // context calling eval, not the anonymous closure containing the eval | |
| 5262 // code. Fetch it from the context. | |
| 5263 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX)); | |
| 5264 } else { | |
| 5265 DCHECK(declaration_scope->is_function_scope()); | |
| 5266 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | |
| 5267 } | |
| 5268 __ push(ip); | |
| 5269 } | |
| 5270 | |
| 5271 | |
| 5272 // ---------------------------------------------------------------------------- | |
| 5273 // Non-local control flow support. | |
| 5274 | |
| 5275 void FullCodeGenerator::EnterFinallyBlock() { | |
| 5276 DCHECK(!result_register().is(r1)); | |
| 5277 // Store result register while executing finally block. | |
| 5278 __ push(result_register()); | |
| 5279 // Cook return address in link register to stack (smi encoded Code* delta) | |
| 5280 __ sub(r1, lr, Operand(masm_->CodeObject())); | |
| 5281 __ SmiTag(r1); | |
| 5282 | |
| 5283 // Store result register while executing finally block. | |
| 5284 __ push(r1); | |
| 5285 | |
| 5286 // Store pending message while executing finally block. | |
| 5287 ExternalReference pending_message_obj = | |
| 5288 ExternalReference::address_of_pending_message_obj(isolate()); | |
| 5289 __ mov(ip, Operand(pending_message_obj)); | |
| 5290 __ ldr(r1, MemOperand(ip)); | |
| 5291 __ push(r1); | |
| 5292 | |
| 5293 ClearPendingMessage(); | |
| 5294 } | |
| 5295 | |
| 5296 | |
| 5297 void FullCodeGenerator::ExitFinallyBlock() { | |
| 5298 DCHECK(!result_register().is(r1)); | |
| 5299 // Restore pending message from stack. | |
| 5300 __ pop(r1); | |
| 5301 ExternalReference pending_message_obj = | |
| 5302 ExternalReference::address_of_pending_message_obj(isolate()); | |
| 5303 __ mov(ip, Operand(pending_message_obj)); | |
| 5304 __ str(r1, MemOperand(ip)); | |
| 5305 | |
| 5306 // Restore result register from stack. | |
| 5307 __ pop(r1); | |
| 5308 | |
| 5309 // Uncook return address and return. | |
| 5310 __ pop(result_register()); | |
| 5311 __ SmiUntag(r1); | |
| 5312 __ add(pc, r1, Operand(masm_->CodeObject())); | |
| 5313 } | |
| 5314 | |
| 5315 | |
| 5316 void FullCodeGenerator::ClearPendingMessage() { | |
| 5317 DCHECK(!result_register().is(r1)); | |
| 5318 ExternalReference pending_message_obj = | |
| 5319 ExternalReference::address_of_pending_message_obj(isolate()); | |
| 5320 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex); | |
| 5321 __ mov(ip, Operand(pending_message_obj)); | |
| 5322 __ str(r1, MemOperand(ip)); | |
| 5323 } | |
| 5324 | |
| 5325 | |
| 5326 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorICSlot slot) { | |
| 5327 DCHECK(FLAG_vector_stores && !slot.IsInvalid()); | |
| 5328 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(), | |
| 5329 Operand(SmiFromSlot(slot))); | |
| 5330 } | |
| 5331 | |
| 5332 | |
| 5333 #undef __ | |
| 5334 | |
| 5335 | |
| 5336 static Address GetInterruptImmediateLoadAddress(Address pc) { | |
| 5337 Address load_address = pc - 2 * Assembler::kInstrSize; | |
| 5338 if (!FLAG_enable_embedded_constant_pool) { | |
| 5339 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address))); | |
| 5340 } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) { | |
| 5341 // This is an extended constant pool lookup. | |
| 5342 if (CpuFeatures::IsSupported(ARMv7)) { | |
| 5343 load_address -= 2 * Assembler::kInstrSize; | |
| 5344 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address))); | |
| 5345 DCHECK(Assembler::IsMovT( | |
| 5346 Memory::int32_at(load_address + Assembler::kInstrSize))); | |
| 5347 } else { | |
| 5348 load_address -= 4 * Assembler::kInstrSize; | |
| 5349 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address))); | |
| 5350 DCHECK(Assembler::IsOrrImmed( | |
| 5351 Memory::int32_at(load_address + Assembler::kInstrSize))); | |
| 5352 DCHECK(Assembler::IsOrrImmed( | |
| 5353 Memory::int32_at(load_address + 2 * Assembler::kInstrSize))); | |
| 5354 DCHECK(Assembler::IsOrrImmed( | |
| 5355 Memory::int32_at(load_address + 3 * Assembler::kInstrSize))); | |
| 5356 } | |
| 5357 } else if (CpuFeatures::IsSupported(ARMv7) && | |
| 5358 Assembler::IsMovT(Memory::int32_at(load_address))) { | |
| 5359 // This is a movw / movt immediate load. | |
| 5360 load_address -= Assembler::kInstrSize; | |
| 5361 DCHECK(Assembler::IsMovW(Memory::int32_at(load_address))); | |
| 5362 } else if (!CpuFeatures::IsSupported(ARMv7) && | |
| 5363 Assembler::IsOrrImmed(Memory::int32_at(load_address))) { | |
| 5364 // This is a mov / orr immediate load. | |
| 5365 load_address -= 3 * Assembler::kInstrSize; | |
| 5366 DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address))); | |
| 5367 DCHECK(Assembler::IsOrrImmed( | |
| 5368 Memory::int32_at(load_address + Assembler::kInstrSize))); | |
| 5369 DCHECK(Assembler::IsOrrImmed( | |
| 5370 Memory::int32_at(load_address + 2 * Assembler::kInstrSize))); | |
| 5371 } else { | |
| 5372 // This is a small constant pool lookup. | |
| 5373 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address))); | |
| 5374 } | |
| 5375 return load_address; | |
| 5376 } | |
| 5377 | |
| 5378 | |
| 5379 void BackEdgeTable::PatchAt(Code* unoptimized_code, | |
| 5380 Address pc, | |
| 5381 BackEdgeState target_state, | |
| 5382 Code* replacement_code) { | |
| 5383 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc); | |
| 5384 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize; | |
| 5385 CodePatcher patcher(branch_address, 1); | |
| 5386 switch (target_state) { | |
| 5387 case INTERRUPT: | |
| 5388 { | |
| 5389 // <decrement profiling counter> | |
| 5390 // bpl ok | |
| 5391 // ; load interrupt stub address into ip - either of (for ARMv7): | |
| 5392 // ; <small cp load> | <extended cp load> | <immediate load> | |
| 5393 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm | |
| 5394 // | movt ip, #imm | movw ip, #imm | |
| 5395 // | ldr ip, [pp, ip] | |
| 5396 // ; or (for ARMv6): | |
| 5397 // ; <small cp load> | <extended cp load> | <immediate load> | |
| 5398 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm | |
| 5399 // | orr ip, ip, #imm> | orr ip, ip, #imm | |
| 5400 // | orr ip, ip, #imm> | orr ip, ip, #imm | |
| 5401 // | orr ip, ip, #imm> | orr ip, ip, #imm | |
| 5402 // blx ip | |
| 5403 // <reset profiling counter> | |
| 5404 // ok-label | |
| 5405 | |
| 5406 // Calculate branch offset to the ok-label - this is the difference | |
| 5407 // between the branch address and |pc| (which points at <blx ip>) plus | |
| 5408 // kProfileCounterResetSequence instructions | |
| 5409 int branch_offset = pc - Instruction::kPCReadOffset - branch_address + | |
| 5410 kProfileCounterResetSequenceLength; | |
| 5411 patcher.masm()->b(branch_offset, pl); | |
| 5412 break; | |
| 5413 } | |
| 5414 case ON_STACK_REPLACEMENT: | |
| 5415 case OSR_AFTER_STACK_CHECK: | |
| 5416 // <decrement profiling counter> | |
| 5417 // mov r0, r0 (NOP) | |
| 5418 // ; load on-stack replacement address into ip - either of (for ARMv7): | |
| 5419 // ; <small cp load> | <extended cp load> | <immediate load> | |
| 5420 // ldr ip, [pc/pp, #imm] | movw ip, #imm | movw ip, #imm | |
| 5421 // | movt ip, #imm> | movw ip, #imm | |
| 5422 // | ldr ip, [pp, ip] | |
| 5423 // ; or (for ARMv6): | |
| 5424 // ; <small cp load> | <extended cp load> | <immediate load> | |
| 5425 // ldr ip, [pc/pp, #imm] | mov ip, #imm | mov ip, #imm | |
| 5426 // | orr ip, ip, #imm> | orr ip, ip, #imm | |
| 5427 // | orr ip, ip, #imm> | orr ip, ip, #imm | |
| 5428 // | orr ip, ip, #imm> | orr ip, ip, #imm | |
| 5429 // blx ip | |
| 5430 // <reset profiling counter> | |
| 5431 // ok-label | |
| 5432 patcher.masm()->nop(); | |
| 5433 break; | |
| 5434 } | |
| 5435 | |
| 5436 // Replace the call address. | |
| 5437 Assembler::set_target_address_at(pc_immediate_load_address, unoptimized_code, | |
| 5438 replacement_code->entry()); | |
| 5439 | |
| 5440 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | |
| 5441 unoptimized_code, pc_immediate_load_address, replacement_code); | |
| 5442 } | |
| 5443 | |
| 5444 | |
| 5445 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( | |
| 5446 Isolate* isolate, | |
| 5447 Code* unoptimized_code, | |
| 5448 Address pc) { | |
| 5449 DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize))); | |
| 5450 | |
| 5451 Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc); | |
| 5452 Address branch_address = pc_immediate_load_address - Assembler::kInstrSize; | |
| 5453 Address interrupt_address = Assembler::target_address_at( | |
| 5454 pc_immediate_load_address, unoptimized_code); | |
| 5455 | |
| 5456 if (Assembler::IsBranch(Assembler::instr_at(branch_address))) { | |
| 5457 DCHECK(interrupt_address == | |
| 5458 isolate->builtins()->InterruptCheck()->entry()); | |
| 5459 return INTERRUPT; | |
| 5460 } | |
| 5461 | |
| 5462 DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address))); | |
| 5463 | |
| 5464 if (interrupt_address == | |
| 5465 isolate->builtins()->OnStackReplacement()->entry()) { | |
| 5466 return ON_STACK_REPLACEMENT; | |
| 5467 } | |
| 5468 | |
| 5469 DCHECK(interrupt_address == | |
| 5470 isolate->builtins()->OsrAfterStackCheck()->entry()); | |
| 5471 return OSR_AFTER_STACK_CHECK; | |
| 5472 } | |
| 5473 | |
| 5474 | |
| 5475 } // namespace internal | |
| 5476 } // namespace v8 | |
| 5477 | |
| 5478 #endif // V8_TARGET_ARCH_ARM | |
| OLD | NEW |