OLD | NEW |
(Empty) | |
| 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are |
| 4 // met: |
| 5 // |
| 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. |
| 12 // * Neither the name of Google Inc. nor the names of its |
| 13 // contributors may be used to endorse or promote products derived |
| 14 // from this software without specific prior written permission. |
| 15 // |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 |
| 28 #include "v8.h" |
| 29 |
| 30 #include "bootstrapper.h" |
| 31 #include "codegen-inl.h" |
| 32 #include "debug.h" |
| 33 #include "parser.h" |
| 34 #include "register-allocator-inl.h" |
| 35 #include "runtime.h" |
| 36 #include "scopes.h" |
| 37 |
| 38 |
| 39 namespace v8 { namespace internal { |
| 40 |
| 41 #define __ ACCESS_MASM(masm_) |
| 42 |
| 43 |
| 44 // ------------------------------------------------------------------------- |
| 45 // CodeGenState implementation. |
| 46 |
| 47 CodeGenState::CodeGenState(CodeGenerator* owner) |
| 48 : owner_(owner), |
| 49 typeof_state_(NOT_INSIDE_TYPEOF), |
| 50 true_target_(NULL), |
| 51 false_target_(NULL), |
| 52 previous_(NULL) { |
| 53 owner_->set_state(this); |
| 54 } |
| 55 |
| 56 |
| 57 CodeGenState::CodeGenState(CodeGenerator* owner, |
| 58 TypeofState typeof_state, |
| 59 JumpTarget* true_target, |
| 60 JumpTarget* false_target) |
| 61 : owner_(owner), |
| 62 typeof_state_(typeof_state), |
| 63 true_target_(true_target), |
| 64 false_target_(false_target), |
| 65 previous_(owner->state()) { |
| 66 owner_->set_state(this); |
| 67 } |
| 68 |
| 69 |
| 70 CodeGenState::~CodeGenState() { |
| 71 ASSERT(owner_->state() == this); |
| 72 owner_->set_state(previous_); |
| 73 } |
| 74 |
| 75 |
| 76 // ------------------------------------------------------------------------- |
| 77 // CodeGenerator implementation |
| 78 |
| 79 CodeGenerator::CodeGenerator(int buffer_size, Handle<Script> script, |
| 80 bool is_eval) |
| 81 : is_eval_(is_eval), |
| 82 script_(script), |
| 83 deferred_(8), |
| 84 masm_(new MacroAssembler(NULL, buffer_size)), |
| 85 scope_(NULL), |
| 86 frame_(NULL), |
| 87 allocator_(NULL), |
| 88 cc_reg_(al), |
| 89 state_(NULL), |
| 90 function_return_is_shadowed_(false), |
| 91 in_spilled_code_(false) { |
| 92 } |
| 93 |
| 94 |
| 95 // Calling conventions: |
| 96 // fp: caller's frame pointer |
| 97 // sp: stack pointer |
| 98 // r1: called JS function |
| 99 // cp: callee's context |
| 100 |
| 101 void CodeGenerator::GenCode(FunctionLiteral* fun) { |
| 102 ZoneList<Statement*>* body = fun->body(); |
| 103 |
| 104 // Initialize state. |
| 105 ASSERT(scope_ == NULL); |
| 106 scope_ = fun->scope(); |
| 107 ASSERT(allocator_ == NULL); |
| 108 RegisterAllocator register_allocator(this); |
| 109 allocator_ = ®ister_allocator; |
| 110 ASSERT(frame_ == NULL); |
| 111 frame_ = new VirtualFrame(this); |
| 112 cc_reg_ = al; |
| 113 set_in_spilled_code(false); |
| 114 { |
| 115 CodeGenState state(this); |
| 116 |
| 117 // Entry: |
| 118 // Stack: receiver, arguments |
| 119 // lr: return address |
| 120 // fp: caller's frame pointer |
| 121 // sp: stack pointer |
| 122 // r1: called JS function |
| 123 // cp: callee's context |
| 124 allocator_->Initialize(); |
| 125 frame_->Enter(); |
| 126 // tos: code slot |
| 127 #ifdef DEBUG |
| 128 if (strlen(FLAG_stop_at) > 0 && |
| 129 fun->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { |
| 130 frame_->SpillAll(); |
| 131 __ stop("stop-at"); |
| 132 } |
| 133 #endif |
| 134 |
| 135 // Allocate space for locals and initialize them. |
| 136 frame_->AllocateStackSlots(scope_->num_stack_slots()); |
| 137 // Initialize the function return target after the locals are set |
| 138 // up, because it needs the expected frame height from the frame. |
| 139 function_return_.Initialize(this, JumpTarget::BIDIRECTIONAL); |
| 140 function_return_is_shadowed_ = false; |
| 141 |
| 142 VirtualFrame::SpilledScope spilled_scope(this); |
| 143 if (scope_->num_heap_slots() > 0) { |
| 144 // Allocate local context. |
| 145 // Get outer context and create a new context based on it. |
| 146 __ ldr(r0, frame_->Function()); |
| 147 frame_->EmitPush(r0); |
| 148 frame_->CallRuntime(Runtime::kNewContext, 1); // r0 holds the result |
| 149 |
| 150 #ifdef DEBUG |
| 151 JumpTarget verified_true(this); |
| 152 __ cmp(r0, Operand(cp)); |
| 153 verified_true.Branch(eq); |
| 154 __ stop("NewContext: r0 is expected to be the same as cp"); |
| 155 verified_true.Bind(); |
| 156 #endif |
| 157 // Update context local. |
| 158 __ str(cp, frame_->Context()); |
| 159 } |
| 160 |
| 161 // TODO(1241774): Improve this code: |
| 162 // 1) only needed if we have a context |
| 163 // 2) no need to recompute context ptr every single time |
| 164 // 3) don't copy parameter operand code from SlotOperand! |
| 165 { |
| 166 Comment cmnt2(masm_, "[ copy context parameters into .context"); |
| 167 |
| 168 // Note that iteration order is relevant here! If we have the same |
| 169 // parameter twice (e.g., function (x, y, x)), and that parameter |
| 170 // needs to be copied into the context, it must be the last argument |
| 171 // passed to the parameter that needs to be copied. This is a rare |
| 172 // case so we don't check for it, instead we rely on the copying |
| 173 // order: such a parameter is copied repeatedly into the same |
| 174 // context location and thus the last value is what is seen inside |
| 175 // the function. |
| 176 for (int i = 0; i < scope_->num_parameters(); i++) { |
| 177 Variable* par = scope_->parameter(i); |
| 178 Slot* slot = par->slot(); |
| 179 if (slot != NULL && slot->type() == Slot::CONTEXT) { |
| 180 ASSERT(!scope_->is_global_scope()); // no parameters in global scope |
| 181 __ ldr(r1, frame_->ParameterAt(i)); |
| 182 // Loads r2 with context; used below in RecordWrite. |
| 183 __ str(r1, SlotOperand(slot, r2)); |
| 184 // Load the offset into r3. |
| 185 int slot_offset = |
| 186 FixedArray::kHeaderSize + slot->index() * kPointerSize; |
| 187 __ mov(r3, Operand(slot_offset)); |
| 188 __ RecordWrite(r2, r3, r1); |
| 189 } |
| 190 } |
| 191 } |
| 192 |
| 193 // Store the arguments object. This must happen after context |
| 194 // initialization because the arguments object may be stored in the |
| 195 // context. |
| 196 if (scope_->arguments() != NULL) { |
| 197 ASSERT(scope_->arguments_shadow() != NULL); |
| 198 Comment cmnt(masm_, "[ allocate arguments object"); |
| 199 { Reference shadow_ref(this, scope_->arguments_shadow()); |
| 200 { Reference arguments_ref(this, scope_->arguments()); |
| 201 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); |
| 202 __ ldr(r2, frame_->Function()); |
| 203 // The receiver is below the arguments, the return address, |
| 204 // and the frame pointer on the stack. |
| 205 const int kReceiverDisplacement = 2 + scope_->num_parameters(); |
| 206 __ add(r1, fp, Operand(kReceiverDisplacement * kPointerSize)); |
| 207 __ mov(r0, Operand(Smi::FromInt(scope_->num_parameters()))); |
| 208 frame_->Adjust(3); |
| 209 __ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit()); |
| 210 frame_->CallStub(&stub, 3); |
| 211 frame_->EmitPush(r0); |
| 212 arguments_ref.SetValue(NOT_CONST_INIT); |
| 213 } |
| 214 shadow_ref.SetValue(NOT_CONST_INIT); |
| 215 } |
| 216 frame_->Drop(); // Value is no longer needed. |
| 217 } |
| 218 |
| 219 // Generate code to 'execute' declarations and initialize functions |
| 220 // (source elements). In case of an illegal redeclaration we need to |
| 221 // handle that instead of processing the declarations. |
| 222 if (scope_->HasIllegalRedeclaration()) { |
| 223 Comment cmnt(masm_, "[ illegal redeclarations"); |
| 224 scope_->VisitIllegalRedeclaration(this); |
| 225 } else { |
| 226 Comment cmnt(masm_, "[ declarations"); |
| 227 ProcessDeclarations(scope_->declarations()); |
| 228 // Bail out if a stack-overflow exception occurred when processing |
| 229 // declarations. |
| 230 if (HasStackOverflow()) return; |
| 231 } |
| 232 |
| 233 if (FLAG_trace) { |
| 234 frame_->CallRuntime(Runtime::kTraceEnter, 0); |
| 235 // Ignore the return value. |
| 236 } |
| 237 CheckStack(); |
| 238 |
| 239 // Compile the body of the function in a vanilla state. Don't |
| 240 // bother compiling all the code if the scope has an illegal |
| 241 // redeclaration. |
| 242 if (!scope_->HasIllegalRedeclaration()) { |
| 243 Comment cmnt(masm_, "[ function body"); |
| 244 #ifdef DEBUG |
| 245 bool is_builtin = Bootstrapper::IsActive(); |
| 246 bool should_trace = |
| 247 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls; |
| 248 if (should_trace) { |
| 249 frame_->CallRuntime(Runtime::kDebugTrace, 0); |
| 250 // Ignore the return value. |
| 251 } |
| 252 #endif |
| 253 VisitStatementsAndSpill(body); |
| 254 } |
| 255 } |
| 256 |
| 257 // Generate the return sequence if necessary. |
| 258 if (frame_ != NULL || function_return_.is_linked()) { |
| 259 // exit |
| 260 // r0: result |
| 261 // sp: stack pointer |
| 262 // fp: frame pointer |
| 263 // pp: parameter pointer |
| 264 // cp: callee's context |
| 265 __ mov(r0, Operand(Factory::undefined_value())); |
| 266 |
| 267 function_return_.Bind(); |
| 268 if (FLAG_trace) { |
| 269 // Push the return value on the stack as the parameter. |
| 270 // Runtime::TraceExit returns the parameter as it is. |
| 271 frame_->EmitPush(r0); |
| 272 frame_->CallRuntime(Runtime::kTraceExit, 1); |
| 273 } |
| 274 |
| 275 // Tear down the frame which will restore the caller's frame pointer and |
| 276 // the link register. |
| 277 frame_->Exit(); |
| 278 |
| 279 __ add(sp, sp, Operand((scope_->num_parameters() + 1) * kPointerSize)); |
| 280 __ mov(pc, lr); |
| 281 } |
| 282 |
| 283 // Code generation state must be reset. |
| 284 ASSERT(!has_cc()); |
| 285 ASSERT(state_ == NULL); |
| 286 ASSERT(!function_return_is_shadowed_); |
| 287 function_return_.Unuse(); |
| 288 DeleteFrame(); |
| 289 |
| 290 // Process any deferred code using the register allocator. |
| 291 if (HasStackOverflow()) { |
| 292 ClearDeferred(); |
| 293 } else { |
| 294 ProcessDeferred(); |
| 295 } |
| 296 |
| 297 allocator_ = NULL; |
| 298 scope_ = NULL; |
| 299 } |
| 300 |
| 301 |
| 302 MemOperand CodeGenerator::SlotOperand(Slot* slot, Register tmp) { |
| 303 // Currently, this assertion will fail if we try to assign to |
| 304 // a constant variable that is constant because it is read-only |
| 305 // (such as the variable referring to a named function expression). |
| 306 // We need to implement assignments to read-only variables. |
| 307 // Ideally, we should do this during AST generation (by converting |
| 308 // such assignments into expression statements); however, in general |
| 309 // we may not be able to make the decision until past AST generation, |
| 310 // that is when the entire program is known. |
| 311 ASSERT(slot != NULL); |
| 312 int index = slot->index(); |
| 313 switch (slot->type()) { |
| 314 case Slot::PARAMETER: |
| 315 return frame_->ParameterAt(index); |
| 316 |
| 317 case Slot::LOCAL: |
| 318 return frame_->LocalAt(index); |
| 319 |
| 320 case Slot::CONTEXT: { |
| 321 // Follow the context chain if necessary. |
| 322 ASSERT(!tmp.is(cp)); // do not overwrite context register |
| 323 Register context = cp; |
| 324 int chain_length = scope()->ContextChainLength(slot->var()->scope()); |
| 325 for (int i = 0; i < chain_length; i++) { |
| 326 // Load the closure. |
| 327 // (All contexts, even 'with' contexts, have a closure, |
| 328 // and it is the same for all contexts inside a function. |
| 329 // There is no need to go to the function context first.) |
| 330 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX)); |
| 331 // Load the function context (which is the incoming, outer context). |
| 332 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset)); |
| 333 context = tmp; |
| 334 } |
| 335 // We may have a 'with' context now. Get the function context. |
| 336 // (In fact this mov may never be the needed, since the scope analysis |
| 337 // may not permit a direct context access in this case and thus we are |
| 338 // always at a function context. However it is safe to dereference be- |
| 339 // cause the function context of a function context is itself. Before |
| 340 // deleting this mov we should try to create a counter-example first, |
| 341 // though...) |
| 342 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX)); |
| 343 return ContextOperand(tmp, index); |
| 344 } |
| 345 |
| 346 default: |
| 347 UNREACHABLE(); |
| 348 return MemOperand(r0, 0); |
| 349 } |
| 350 } |
| 351 |
| 352 |
| 353 MemOperand CodeGenerator::ContextSlotOperandCheckExtensions( |
| 354 Slot* slot, |
| 355 Register tmp, |
| 356 Register tmp2, |
| 357 JumpTarget* slow) { |
| 358 ASSERT(slot->type() == Slot::CONTEXT); |
| 359 Register context = cp; |
| 360 |
| 361 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) { |
| 362 if (s->num_heap_slots() > 0) { |
| 363 if (s->calls_eval()) { |
| 364 // Check that extension is NULL. |
| 365 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX)); |
| 366 __ tst(tmp2, tmp2); |
| 367 slow->Branch(ne); |
| 368 } |
| 369 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX)); |
| 370 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset)); |
| 371 context = tmp; |
| 372 } |
| 373 } |
| 374 // Check that last extension is NULL. |
| 375 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX)); |
| 376 __ tst(tmp2, tmp2); |
| 377 slow->Branch(ne); |
| 378 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX)); |
| 379 return ContextOperand(tmp, slot->index()); |
| 380 } |
| 381 |
| 382 |
| 383 void CodeGenerator::LoadConditionAndSpill(Expression* expression, |
| 384 TypeofState typeof_state, |
| 385 JumpTarget* true_target, |
| 386 JumpTarget* false_target, |
| 387 bool force_control) { |
| 388 ASSERT(in_spilled_code()); |
| 389 set_in_spilled_code(false); |
| 390 LoadCondition(expression, typeof_state, true_target, false_target, |
| 391 force_control); |
| 392 if (frame_ != NULL) { |
| 393 frame_->SpillAll(); |
| 394 } |
| 395 set_in_spilled_code(true); |
| 396 } |
| 397 |
| 398 |
| 399 // Loads a value on TOS. If it is a boolean value, the result may have been |
| 400 // (partially) translated into branches, or it may have set the condition |
| 401 // code register. If force_cc is set, the value is forced to set the |
| 402 // condition code register and no value is pushed. If the condition code |
| 403 // register was set, has_cc() is true and cc_reg_ contains the condition to |
| 404 // test for 'true'. |
| 405 void CodeGenerator::LoadCondition(Expression* x, |
| 406 TypeofState typeof_state, |
| 407 JumpTarget* true_target, |
| 408 JumpTarget* false_target, |
| 409 bool force_cc) { |
| 410 ASSERT(!in_spilled_code()); |
| 411 ASSERT(!has_cc()); |
| 412 int original_height = frame_->height(); |
| 413 |
| 414 { CodeGenState new_state(this, typeof_state, true_target, false_target); |
| 415 Visit(x); |
| 416 |
| 417 // If we hit a stack overflow, we may not have actually visited |
| 418 // the expression. In that case, we ensure that we have a |
| 419 // valid-looking frame state because we will continue to generate |
| 420 // code as we unwind the C++ stack. |
| 421 // |
| 422 // It's possible to have both a stack overflow and a valid frame |
| 423 // state (eg, a subexpression overflowed, visiting it returned |
| 424 // with a dummied frame state, and visiting this expression |
| 425 // returned with a normal-looking state). |
| 426 if (HasStackOverflow() && |
| 427 has_valid_frame() && |
| 428 !has_cc() && |
| 429 frame_->height() == original_height) { |
| 430 true_target->Jump(); |
| 431 } |
| 432 } |
| 433 if (force_cc && frame_ != NULL && !has_cc()) { |
| 434 // Convert the TOS value to a boolean in the condition code register. |
| 435 ToBoolean(true_target, false_target); |
| 436 } |
| 437 ASSERT(!force_cc || !has_valid_frame() || has_cc()); |
| 438 ASSERT(!has_valid_frame() || |
| 439 (has_cc() && frame_->height() == original_height) || |
| 440 (!has_cc() && frame_->height() == original_height + 1)); |
| 441 } |
| 442 |
| 443 |
| 444 void CodeGenerator::LoadAndSpill(Expression* expression, |
| 445 TypeofState typeof_state) { |
| 446 ASSERT(in_spilled_code()); |
| 447 set_in_spilled_code(false); |
| 448 Load(expression, typeof_state); |
| 449 frame_->SpillAll(); |
| 450 set_in_spilled_code(true); |
| 451 } |
| 452 |
| 453 |
| 454 void CodeGenerator::Load(Expression* x, TypeofState typeof_state) { |
| 455 #ifdef DEBUG |
| 456 int original_height = frame_->height(); |
| 457 #endif |
| 458 ASSERT(!in_spilled_code()); |
| 459 JumpTarget true_target(this); |
| 460 JumpTarget false_target(this); |
| 461 LoadCondition(x, typeof_state, &true_target, &false_target, false); |
| 462 |
| 463 if (has_cc()) { |
| 464 // Convert cc_reg_ into a boolean value. |
| 465 JumpTarget loaded(this); |
| 466 JumpTarget materialize_true(this); |
| 467 materialize_true.Branch(cc_reg_); |
| 468 __ mov(r0, Operand(Factory::false_value())); |
| 469 frame_->EmitPush(r0); |
| 470 loaded.Jump(); |
| 471 materialize_true.Bind(); |
| 472 __ mov(r0, Operand(Factory::true_value())); |
| 473 frame_->EmitPush(r0); |
| 474 loaded.Bind(); |
| 475 cc_reg_ = al; |
| 476 } |
| 477 |
| 478 if (true_target.is_linked() || false_target.is_linked()) { |
| 479 // We have at least one condition value that has been "translated" |
| 480 // into a branch, thus it needs to be loaded explicitly. |
| 481 JumpTarget loaded(this); |
| 482 if (frame_ != NULL) { |
| 483 loaded.Jump(); // Don't lose the current TOS. |
| 484 } |
| 485 bool both = true_target.is_linked() && false_target.is_linked(); |
| 486 // Load "true" if necessary. |
| 487 if (true_target.is_linked()) { |
| 488 true_target.Bind(); |
| 489 __ mov(r0, Operand(Factory::true_value())); |
| 490 frame_->EmitPush(r0); |
| 491 } |
| 492 // If both "true" and "false" need to be loaded jump across the code for |
| 493 // "false". |
| 494 if (both) { |
| 495 loaded.Jump(); |
| 496 } |
| 497 // Load "false" if necessary. |
| 498 if (false_target.is_linked()) { |
| 499 false_target.Bind(); |
| 500 __ mov(r0, Operand(Factory::false_value())); |
| 501 frame_->EmitPush(r0); |
| 502 } |
| 503 // A value is loaded on all paths reaching this point. |
| 504 loaded.Bind(); |
| 505 } |
| 506 ASSERT(has_valid_frame()); |
| 507 ASSERT(!has_cc()); |
| 508 ASSERT(frame_->height() == original_height + 1); |
| 509 } |
| 510 |
| 511 |
| 512 void CodeGenerator::LoadGlobal() { |
| 513 VirtualFrame::SpilledScope spilled_scope(this); |
| 514 __ ldr(r0, GlobalObject()); |
| 515 frame_->EmitPush(r0); |
| 516 } |
| 517 |
| 518 |
| 519 void CodeGenerator::LoadGlobalReceiver(Register scratch) { |
| 520 VirtualFrame::SpilledScope spilled_scope(this); |
| 521 __ ldr(scratch, ContextOperand(cp, Context::GLOBAL_INDEX)); |
| 522 __ ldr(scratch, |
| 523 FieldMemOperand(scratch, GlobalObject::kGlobalReceiverOffset)); |
| 524 frame_->EmitPush(scratch); |
| 525 } |
| 526 |
| 527 |
| 528 // TODO(1241834): Get rid of this function in favor of just using Load, now |
| 529 // that we have the INSIDE_TYPEOF typeof state. => Need to handle global |
| 530 // variables w/o reference errors elsewhere. |
| 531 void CodeGenerator::LoadTypeofExpression(Expression* x) { |
| 532 VirtualFrame::SpilledScope spilled_scope(this); |
| 533 Variable* variable = x->AsVariableProxy()->AsVariable(); |
| 534 if (variable != NULL && !variable->is_this() && variable->is_global()) { |
| 535 // NOTE: This is somewhat nasty. We force the compiler to load |
| 536 // the variable as if through '<global>.<variable>' to make sure we |
| 537 // do not get reference errors. |
| 538 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX); |
| 539 Literal key(variable->name()); |
| 540 // TODO(1241834): Fetch the position from the variable instead of using |
| 541 // no position. |
| 542 Property property(&global, &key, RelocInfo::kNoPosition); |
| 543 LoadAndSpill(&property); |
| 544 } else { |
| 545 LoadAndSpill(x, INSIDE_TYPEOF); |
| 546 } |
| 547 } |
| 548 |
| 549 |
| 550 Reference::Reference(CodeGenerator* cgen, Expression* expression) |
| 551 : cgen_(cgen), expression_(expression), type_(ILLEGAL) { |
| 552 cgen->LoadReference(this); |
| 553 } |
| 554 |
| 555 |
| 556 Reference::~Reference() { |
| 557 cgen_->UnloadReference(this); |
| 558 } |
| 559 |
| 560 |
| 561 void CodeGenerator::LoadReference(Reference* ref) { |
| 562 VirtualFrame::SpilledScope spilled_scope(this); |
| 563 Comment cmnt(masm_, "[ LoadReference"); |
| 564 Expression* e = ref->expression(); |
| 565 Property* property = e->AsProperty(); |
| 566 Variable* var = e->AsVariableProxy()->AsVariable(); |
| 567 |
| 568 if (property != NULL) { |
| 569 // The expression is either a property or a variable proxy that rewrites |
| 570 // to a property. |
| 571 LoadAndSpill(property->obj()); |
| 572 // We use a named reference if the key is a literal symbol, unless it is |
| 573 // a string that can be legally parsed as an integer. This is because |
| 574 // otherwise we will not get into the slow case code that handles [] on |
| 575 // String objects. |
| 576 Literal* literal = property->key()->AsLiteral(); |
| 577 uint32_t dummy; |
| 578 if (literal != NULL && |
| 579 literal->handle()->IsSymbol() && |
| 580 !String::cast(*(literal->handle()))->AsArrayIndex(&dummy)) { |
| 581 ref->set_type(Reference::NAMED); |
| 582 } else { |
| 583 LoadAndSpill(property->key()); |
| 584 ref->set_type(Reference::KEYED); |
| 585 } |
| 586 } else if (var != NULL) { |
| 587 // The expression is a variable proxy that does not rewrite to a |
| 588 // property. Global variables are treated as named property references. |
| 589 if (var->is_global()) { |
| 590 LoadGlobal(); |
| 591 ref->set_type(Reference::NAMED); |
| 592 } else { |
| 593 ASSERT(var->slot() != NULL); |
| 594 ref->set_type(Reference::SLOT); |
| 595 } |
| 596 } else { |
| 597 // Anything else is a runtime error. |
| 598 LoadAndSpill(e); |
| 599 frame_->CallRuntime(Runtime::kThrowReferenceError, 1); |
| 600 } |
| 601 } |
| 602 |
| 603 |
| 604 void CodeGenerator::UnloadReference(Reference* ref) { |
| 605 VirtualFrame::SpilledScope spilled_scope(this); |
| 606 // Pop a reference from the stack while preserving TOS. |
| 607 Comment cmnt(masm_, "[ UnloadReference"); |
| 608 int size = ref->size(); |
| 609 if (size > 0) { |
| 610 frame_->EmitPop(r0); |
| 611 frame_->Drop(size); |
| 612 frame_->EmitPush(r0); |
| 613 } |
| 614 } |
| 615 |
| 616 |
| 617 // ECMA-262, section 9.2, page 30: ToBoolean(). Convert the given |
| 618 // register to a boolean in the condition code register. The code |
| 619 // may jump to 'false_target' in case the register converts to 'false'. |
| 620 void CodeGenerator::ToBoolean(JumpTarget* true_target, |
| 621 JumpTarget* false_target) { |
| 622 VirtualFrame::SpilledScope spilled_scope(this); |
| 623 // Note: The generated code snippet does not change stack variables. |
| 624 // Only the condition code should be set. |
| 625 frame_->EmitPop(r0); |
| 626 |
| 627 // Fast case checks |
| 628 |
| 629 // Check if the value is 'false'. |
| 630 __ cmp(r0, Operand(Factory::false_value())); |
| 631 false_target->Branch(eq); |
| 632 |
| 633 // Check if the value is 'true'. |
| 634 __ cmp(r0, Operand(Factory::true_value())); |
| 635 true_target->Branch(eq); |
| 636 |
| 637 // Check if the value is 'undefined'. |
| 638 __ cmp(r0, Operand(Factory::undefined_value())); |
| 639 false_target->Branch(eq); |
| 640 |
| 641 // Check if the value is a smi. |
| 642 __ cmp(r0, Operand(Smi::FromInt(0))); |
| 643 false_target->Branch(eq); |
| 644 __ tst(r0, Operand(kSmiTagMask)); |
| 645 true_target->Branch(eq); |
| 646 |
| 647 // Slow case: call the runtime. |
| 648 frame_->EmitPush(r0); |
| 649 frame_->CallRuntime(Runtime::kToBool, 1); |
| 650 // Convert the result (r0) to a condition code. |
| 651 __ cmp(r0, Operand(Factory::false_value())); |
| 652 |
| 653 cc_reg_ = ne; |
| 654 } |
| 655 |
| 656 |
| 657 class GenericBinaryOpStub : public CodeStub { |
| 658 public: |
| 659 GenericBinaryOpStub(Token::Value op, |
| 660 OverwriteMode mode) |
| 661 : op_(op), mode_(mode) { } |
| 662 |
| 663 private: |
| 664 Token::Value op_; |
| 665 OverwriteMode mode_; |
| 666 |
| 667 // Minor key encoding in 16 bits. |
| 668 class ModeBits: public BitField<OverwriteMode, 0, 2> {}; |
| 669 class OpBits: public BitField<Token::Value, 2, 14> {}; |
| 670 |
| 671 Major MajorKey() { return GenericBinaryOp; } |
| 672 int MinorKey() { |
| 673 // Encode the parameters in a unique 16 bit value. |
| 674 return OpBits::encode(op_) |
| 675 | ModeBits::encode(mode_); |
| 676 } |
| 677 |
| 678 void Generate(MacroAssembler* masm); |
| 679 |
| 680 const char* GetName() { |
| 681 switch (op_) { |
| 682 case Token::ADD: return "GenericBinaryOpStub_ADD"; |
| 683 case Token::SUB: return "GenericBinaryOpStub_SUB"; |
| 684 case Token::MUL: return "GenericBinaryOpStub_MUL"; |
| 685 case Token::DIV: return "GenericBinaryOpStub_DIV"; |
| 686 case Token::BIT_OR: return "GenericBinaryOpStub_BIT_OR"; |
| 687 case Token::BIT_AND: return "GenericBinaryOpStub_BIT_AND"; |
| 688 case Token::BIT_XOR: return "GenericBinaryOpStub_BIT_XOR"; |
| 689 case Token::SAR: return "GenericBinaryOpStub_SAR"; |
| 690 case Token::SHL: return "GenericBinaryOpStub_SHL"; |
| 691 case Token::SHR: return "GenericBinaryOpStub_SHR"; |
| 692 default: return "GenericBinaryOpStub"; |
| 693 } |
| 694 } |
| 695 |
| 696 #ifdef DEBUG |
| 697 void Print() { PrintF("GenericBinaryOpStub (%s)\n", Token::String(op_)); } |
| 698 #endif |
| 699 }; |
| 700 |
| 701 |
| 702 void CodeGenerator::GenericBinaryOperation(Token::Value op, |
| 703 OverwriteMode overwrite_mode) { |
| 704 VirtualFrame::SpilledScope spilled_scope(this); |
| 705 // sp[0] : y |
| 706 // sp[1] : x |
| 707 // result : r0 |
| 708 |
| 709 // Stub is entered with a call: 'return address' is in lr. |
| 710 switch (op) { |
| 711 case Token::ADD: // fall through. |
| 712 case Token::SUB: // fall through. |
| 713 case Token::MUL: |
| 714 case Token::BIT_OR: |
| 715 case Token::BIT_AND: |
| 716 case Token::BIT_XOR: |
| 717 case Token::SHL: |
| 718 case Token::SHR: |
| 719 case Token::SAR: { |
| 720 frame_->EmitPop(r0); // r0 : y |
| 721 frame_->EmitPop(r1); // r1 : x |
| 722 GenericBinaryOpStub stub(op, overwrite_mode); |
| 723 frame_->CallStub(&stub, 0); |
| 724 break; |
| 725 } |
| 726 |
| 727 case Token::DIV: { |
| 728 Result arg_count = allocator_->Allocate(r0); |
| 729 ASSERT(arg_count.is_valid()); |
| 730 __ mov(arg_count.reg(), Operand(1)); |
| 731 frame_->InvokeBuiltin(Builtins::DIV, CALL_JS, &arg_count, 2); |
| 732 break; |
| 733 } |
| 734 |
| 735 case Token::MOD: { |
| 736 Result arg_count = allocator_->Allocate(r0); |
| 737 ASSERT(arg_count.is_valid()); |
| 738 __ mov(arg_count.reg(), Operand(1)); |
| 739 frame_->InvokeBuiltin(Builtins::MOD, CALL_JS, &arg_count, 2); |
| 740 break; |
| 741 } |
| 742 |
| 743 case Token::COMMA: |
| 744 frame_->EmitPop(r0); |
| 745 // simply discard left value |
| 746 frame_->Drop(); |
| 747 break; |
| 748 |
| 749 default: |
| 750 // Other cases should have been handled before this point. |
| 751 UNREACHABLE(); |
| 752 break; |
| 753 } |
| 754 } |
| 755 |
| 756 |
| 757 class DeferredInlineSmiOperation: public DeferredCode { |
| 758 public: |
| 759 DeferredInlineSmiOperation(CodeGenerator* generator, |
| 760 Token::Value op, |
| 761 int value, |
| 762 bool reversed, |
| 763 OverwriteMode overwrite_mode) |
| 764 : DeferredCode(generator), |
| 765 op_(op), |
| 766 value_(value), |
| 767 reversed_(reversed), |
| 768 overwrite_mode_(overwrite_mode) { |
| 769 set_comment("[ DeferredInlinedSmiOperation"); |
| 770 } |
| 771 |
| 772 virtual void Generate(); |
| 773 |
| 774 private: |
| 775 Token::Value op_; |
| 776 int value_; |
| 777 bool reversed_; |
| 778 OverwriteMode overwrite_mode_; |
| 779 }; |
| 780 |
| 781 |
| 782 void DeferredInlineSmiOperation::Generate() { |
| 783 enter()->Bind(); |
| 784 VirtualFrame::SpilledScope spilled_scope(generator()); |
| 785 |
| 786 switch (op_) { |
| 787 case Token::ADD: { |
| 788 if (reversed_) { |
| 789 // revert optimistic add |
| 790 __ sub(r0, r0, Operand(Smi::FromInt(value_))); |
| 791 __ mov(r1, Operand(Smi::FromInt(value_))); |
| 792 } else { |
| 793 // revert optimistic add |
| 794 __ sub(r1, r0, Operand(Smi::FromInt(value_))); |
| 795 __ mov(r0, Operand(Smi::FromInt(value_))); |
| 796 } |
| 797 break; |
| 798 } |
| 799 |
| 800 case Token::SUB: { |
| 801 if (reversed_) { |
| 802 // revert optimistic sub |
| 803 __ rsb(r0, r0, Operand(Smi::FromInt(value_))); |
| 804 __ mov(r1, Operand(Smi::FromInt(value_))); |
| 805 } else { |
| 806 __ add(r1, r0, Operand(Smi::FromInt(value_))); |
| 807 __ mov(r0, Operand(Smi::FromInt(value_))); |
| 808 } |
| 809 break; |
| 810 } |
| 811 |
| 812 case Token::BIT_OR: |
| 813 case Token::BIT_XOR: |
| 814 case Token::BIT_AND: { |
| 815 if (reversed_) { |
| 816 __ mov(r1, Operand(Smi::FromInt(value_))); |
| 817 } else { |
| 818 __ mov(r1, Operand(r0)); |
| 819 __ mov(r0, Operand(Smi::FromInt(value_))); |
| 820 } |
| 821 break; |
| 822 } |
| 823 |
| 824 case Token::SHL: |
| 825 case Token::SHR: |
| 826 case Token::SAR: { |
| 827 if (!reversed_) { |
| 828 __ mov(r1, Operand(r0)); |
| 829 __ mov(r0, Operand(Smi::FromInt(value_))); |
| 830 } else { |
| 831 UNREACHABLE(); // should have been handled in SmiOperation |
| 832 } |
| 833 break; |
| 834 } |
| 835 |
| 836 default: |
| 837 // other cases should have been handled before this point. |
| 838 UNREACHABLE(); |
| 839 break; |
| 840 } |
| 841 |
| 842 GenericBinaryOpStub igostub(op_, overwrite_mode_); |
| 843 Result arg0 = generator()->allocator()->Allocate(r1); |
| 844 ASSERT(arg0.is_valid()); |
| 845 Result arg1 = generator()->allocator()->Allocate(r0); |
| 846 ASSERT(arg1.is_valid()); |
| 847 generator()->frame()->CallStub(&igostub, &arg0, &arg1); |
| 848 exit_.Jump(); |
| 849 } |
| 850 |
| 851 |
| 852 void CodeGenerator::SmiOperation(Token::Value op, |
| 853 Handle<Object> value, |
| 854 bool reversed, |
| 855 OverwriteMode mode) { |
| 856 VirtualFrame::SpilledScope spilled_scope(this); |
| 857 // NOTE: This is an attempt to inline (a bit) more of the code for |
| 858 // some possible smi operations (like + and -) when (at least) one |
| 859 // of the operands is a literal smi. With this optimization, the |
| 860 // performance of the system is increased by ~15%, and the generated |
| 861 // code size is increased by ~1% (measured on a combination of |
| 862 // different benchmarks). |
| 863 |
| 864 // sp[0] : operand |
| 865 |
| 866 int int_value = Smi::cast(*value)->value(); |
| 867 |
| 868 JumpTarget exit(this); |
| 869 frame_->EmitPop(r0); |
| 870 |
| 871 switch (op) { |
| 872 case Token::ADD: { |
| 873 DeferredCode* deferred = |
| 874 new DeferredInlineSmiOperation(this, op, int_value, reversed, mode); |
| 875 |
| 876 __ add(r0, r0, Operand(value), SetCC); |
| 877 deferred->enter()->Branch(vs); |
| 878 __ tst(r0, Operand(kSmiTagMask)); |
| 879 deferred->enter()->Branch(ne); |
| 880 deferred->BindExit(); |
| 881 break; |
| 882 } |
| 883 |
| 884 case Token::SUB: { |
| 885 DeferredCode* deferred = |
| 886 new DeferredInlineSmiOperation(this, op, int_value, reversed, mode); |
| 887 |
| 888 if (!reversed) { |
| 889 __ sub(r0, r0, Operand(value), SetCC); |
| 890 } else { |
| 891 __ rsb(r0, r0, Operand(value), SetCC); |
| 892 } |
| 893 deferred->enter()->Branch(vs); |
| 894 __ tst(r0, Operand(kSmiTagMask)); |
| 895 deferred->enter()->Branch(ne); |
| 896 deferred->BindExit(); |
| 897 break; |
| 898 } |
| 899 |
| 900 case Token::BIT_OR: |
| 901 case Token::BIT_XOR: |
| 902 case Token::BIT_AND: { |
| 903 DeferredCode* deferred = |
| 904 new DeferredInlineSmiOperation(this, op, int_value, reversed, mode); |
| 905 __ tst(r0, Operand(kSmiTagMask)); |
| 906 deferred->enter()->Branch(ne); |
| 907 switch (op) { |
| 908 case Token::BIT_OR: __ orr(r0, r0, Operand(value)); break; |
| 909 case Token::BIT_XOR: __ eor(r0, r0, Operand(value)); break; |
| 910 case Token::BIT_AND: __ and_(r0, r0, Operand(value)); break; |
| 911 default: UNREACHABLE(); |
| 912 } |
| 913 deferred->BindExit(); |
| 914 break; |
| 915 } |
| 916 |
| 917 case Token::SHL: |
| 918 case Token::SHR: |
| 919 case Token::SAR: { |
| 920 if (reversed) { |
| 921 __ mov(ip, Operand(value)); |
| 922 frame_->EmitPush(ip); |
| 923 frame_->EmitPush(r0); |
| 924 GenericBinaryOperation(op, mode); |
| 925 |
| 926 } else { |
| 927 int shift_value = int_value & 0x1f; // least significant 5 bits |
| 928 DeferredCode* deferred = |
| 929 new DeferredInlineSmiOperation(this, op, shift_value, false, mode); |
| 930 __ tst(r0, Operand(kSmiTagMask)); |
| 931 deferred->enter()->Branch(ne); |
| 932 __ mov(r2, Operand(r0, ASR, kSmiTagSize)); // remove tags |
| 933 switch (op) { |
| 934 case Token::SHL: { |
| 935 __ mov(r2, Operand(r2, LSL, shift_value)); |
| 936 // check that the *unsigned* result fits in a smi |
| 937 __ add(r3, r2, Operand(0x40000000), SetCC); |
| 938 deferred->enter()->Branch(mi); |
| 939 break; |
| 940 } |
| 941 case Token::SHR: { |
| 942 // LSR by immediate 0 means shifting 32 bits. |
| 943 if (shift_value != 0) { |
| 944 __ mov(r2, Operand(r2, LSR, shift_value)); |
| 945 } |
| 946 // check that the *unsigned* result fits in a smi |
| 947 // neither of the two high-order bits can be set: |
| 948 // - 0x80000000: high bit would be lost when smi tagging |
| 949 // - 0x40000000: this number would convert to negative when |
| 950 // smi tagging these two cases can only happen with shifts |
| 951 // by 0 or 1 when handed a valid smi |
| 952 __ and_(r3, r2, Operand(0xc0000000), SetCC); |
| 953 deferred->enter()->Branch(ne); |
| 954 break; |
| 955 } |
| 956 case Token::SAR: { |
| 957 if (shift_value != 0) { |
| 958 // ASR by immediate 0 means shifting 32 bits. |
| 959 __ mov(r2, Operand(r2, ASR, shift_value)); |
| 960 } |
| 961 break; |
| 962 } |
| 963 default: UNREACHABLE(); |
| 964 } |
| 965 __ mov(r0, Operand(r2, LSL, kSmiTagSize)); |
| 966 deferred->BindExit(); |
| 967 } |
| 968 break; |
| 969 } |
| 970 |
| 971 default: |
| 972 if (!reversed) { |
| 973 frame_->EmitPush(r0); |
| 974 __ mov(r0, Operand(value)); |
| 975 frame_->EmitPush(r0); |
| 976 } else { |
| 977 __ mov(ip, Operand(value)); |
| 978 frame_->EmitPush(ip); |
| 979 frame_->EmitPush(r0); |
| 980 } |
| 981 GenericBinaryOperation(op, mode); |
| 982 break; |
| 983 } |
| 984 |
| 985 exit.Bind(); |
| 986 } |
| 987 |
| 988 |
| 989 void CodeGenerator::Comparison(Condition cc, bool strict) { |
| 990 VirtualFrame::SpilledScope spilled_scope(this); |
| 991 // sp[0] : y |
| 992 // sp[1] : x |
| 993 // result : cc register |
| 994 |
| 995 // Strict only makes sense for equality comparisons. |
| 996 ASSERT(!strict || cc == eq); |
| 997 |
| 998 JumpTarget exit(this); |
| 999 JumpTarget smi(this); |
| 1000 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order. |
| 1001 if (cc == gt || cc == le) { |
| 1002 cc = ReverseCondition(cc); |
| 1003 frame_->EmitPop(r1); |
| 1004 frame_->EmitPop(r0); |
| 1005 } else { |
| 1006 frame_->EmitPop(r0); |
| 1007 frame_->EmitPop(r1); |
| 1008 } |
| 1009 __ orr(r2, r0, Operand(r1)); |
| 1010 __ tst(r2, Operand(kSmiTagMask)); |
| 1011 smi.Branch(eq); |
| 1012 |
| 1013 // Perform non-smi comparison by runtime call. |
| 1014 frame_->EmitPush(r1); |
| 1015 |
| 1016 // Figure out which native to call and setup the arguments. |
| 1017 Builtins::JavaScript native; |
| 1018 int arg_count = 1; |
| 1019 if (cc == eq) { |
| 1020 native = strict ? Builtins::STRICT_EQUALS : Builtins::EQUALS; |
| 1021 } else { |
| 1022 native = Builtins::COMPARE; |
| 1023 int ncr; // NaN compare result |
| 1024 if (cc == lt || cc == le) { |
| 1025 ncr = GREATER; |
| 1026 } else { |
| 1027 ASSERT(cc == gt || cc == ge); // remaining cases |
| 1028 ncr = LESS; |
| 1029 } |
| 1030 frame_->EmitPush(r0); |
| 1031 arg_count++; |
| 1032 __ mov(r0, Operand(Smi::FromInt(ncr))); |
| 1033 } |
| 1034 |
| 1035 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
| 1036 // tagged as a small integer. |
| 1037 frame_->EmitPush(r0); |
| 1038 Result arg_count_register = allocator_->Allocate(r0); |
| 1039 ASSERT(arg_count_register.is_valid()); |
| 1040 __ mov(arg_count_register.reg(), Operand(arg_count)); |
| 1041 Result result = frame_->InvokeBuiltin(native, |
| 1042 CALL_JS, |
| 1043 &arg_count_register, |
| 1044 arg_count + 1); |
| 1045 __ cmp(result.reg(), Operand(0)); |
| 1046 result.Unuse(); |
| 1047 exit.Jump(); |
| 1048 |
| 1049 // test smi equality by pointer comparison. |
| 1050 smi.Bind(); |
| 1051 __ cmp(r1, Operand(r0)); |
| 1052 |
| 1053 exit.Bind(); |
| 1054 cc_reg_ = cc; |
| 1055 } |
| 1056 |
| 1057 |
| 1058 class CallFunctionStub: public CodeStub { |
| 1059 public: |
| 1060 explicit CallFunctionStub(int argc) : argc_(argc) {} |
| 1061 |
| 1062 void Generate(MacroAssembler* masm); |
| 1063 |
| 1064 private: |
| 1065 int argc_; |
| 1066 |
| 1067 #if defined(DEBUG) |
| 1068 void Print() { PrintF("CallFunctionStub (argc %d)\n", argc_); } |
| 1069 #endif // defined(DEBUG) |
| 1070 |
| 1071 Major MajorKey() { return CallFunction; } |
| 1072 int MinorKey() { return argc_; } |
| 1073 }; |
| 1074 |
| 1075 |
| 1076 // Call the function on the stack with the given arguments. |
| 1077 void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args, |
| 1078 int position) { |
| 1079 VirtualFrame::SpilledScope spilled_scope(this); |
| 1080 // Push the arguments ("left-to-right") on the stack. |
| 1081 int arg_count = args->length(); |
| 1082 for (int i = 0; i < arg_count; i++) { |
| 1083 LoadAndSpill(args->at(i)); |
| 1084 } |
| 1085 |
| 1086 // Record the position for debugging purposes. |
| 1087 CodeForSourcePosition(position); |
| 1088 |
| 1089 // Use the shared code stub to call the function. |
| 1090 CallFunctionStub call_function(arg_count); |
| 1091 frame_->CallStub(&call_function, arg_count + 1); |
| 1092 |
| 1093 // Restore context and pop function from the stack. |
| 1094 __ ldr(cp, frame_->Context()); |
| 1095 frame_->Drop(); // discard the TOS |
| 1096 } |
| 1097 |
| 1098 |
| 1099 void CodeGenerator::Branch(bool if_true, JumpTarget* target) { |
| 1100 VirtualFrame::SpilledScope spilled_scope(this); |
| 1101 ASSERT(has_cc()); |
| 1102 Condition cc = if_true ? cc_reg_ : NegateCondition(cc_reg_); |
| 1103 target->Branch(cc); |
| 1104 cc_reg_ = al; |
| 1105 } |
| 1106 |
| 1107 |
| 1108 void CodeGenerator::CheckStack() { |
| 1109 VirtualFrame::SpilledScope spilled_scope(this); |
| 1110 if (FLAG_check_stack) { |
| 1111 Comment cmnt(masm_, "[ check stack"); |
| 1112 StackCheckStub stub; |
| 1113 frame_->CallStub(&stub, 0); |
| 1114 } |
| 1115 } |
| 1116 |
| 1117 |
| 1118 void CodeGenerator::VisitAndSpill(Statement* statement) { |
| 1119 ASSERT(in_spilled_code()); |
| 1120 set_in_spilled_code(false); |
| 1121 Visit(statement); |
| 1122 if (frame_ != NULL) { |
| 1123 frame_->SpillAll(); |
| 1124 } |
| 1125 set_in_spilled_code(true); |
| 1126 } |
| 1127 |
| 1128 |
| 1129 void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) { |
| 1130 ASSERT(in_spilled_code()); |
| 1131 set_in_spilled_code(false); |
| 1132 VisitStatements(statements); |
| 1133 if (frame_ != NULL) { |
| 1134 frame_->SpillAll(); |
| 1135 } |
| 1136 set_in_spilled_code(true); |
| 1137 } |
| 1138 |
| 1139 |
| 1140 void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) { |
| 1141 #ifdef DEBUG |
| 1142 int original_height = frame_->height(); |
| 1143 #endif |
| 1144 VirtualFrame::SpilledScope spilled_scope(this); |
| 1145 for (int i = 0; frame_ != NULL && i < statements->length(); i++) { |
| 1146 VisitAndSpill(statements->at(i)); |
| 1147 } |
| 1148 ASSERT(!has_valid_frame() || frame_->height() == original_height); |
| 1149 } |
| 1150 |
| 1151 |
| 1152 void CodeGenerator::VisitBlock(Block* node) { |
| 1153 #ifdef DEBUG |
| 1154 int original_height = frame_->height(); |
| 1155 #endif |
| 1156 VirtualFrame::SpilledScope spilled_scope(this); |
| 1157 Comment cmnt(masm_, "[ Block"); |
| 1158 CodeForStatementPosition(node); |
| 1159 node->break_target()->Initialize(this); |
| 1160 VisitStatementsAndSpill(node->statements()); |
| 1161 if (node->break_target()->is_linked()) { |
| 1162 node->break_target()->Bind(); |
| 1163 } |
| 1164 node->break_target()->Unuse(); |
| 1165 ASSERT(!has_valid_frame() || frame_->height() == original_height); |
| 1166 } |
| 1167 |
| 1168 |
| 1169 void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { |
| 1170 VirtualFrame::SpilledScope spilled_scope(this); |
| 1171 __ mov(r0, Operand(pairs)); |
| 1172 frame_->EmitPush(r0); |
| 1173 frame_->EmitPush(cp); |
| 1174 __ mov(r0, Operand(Smi::FromInt(is_eval() ? 1 : 0))); |
| 1175 frame_->EmitPush(r0); |
| 1176 frame_->CallRuntime(Runtime::kDeclareGlobals, 3); |
| 1177 // The result is discarded. |
| 1178 } |
| 1179 |
| 1180 |
| 1181 void CodeGenerator::VisitDeclaration(Declaration* node) { |
| 1182 #ifdef DEBUG |
| 1183 int original_height = frame_->height(); |
| 1184 #endif |
| 1185 VirtualFrame::SpilledScope spilled_scope(this); |
| 1186 Comment cmnt(masm_, "[ Declaration"); |
| 1187 CodeForStatementPosition(node); |
| 1188 Variable* var = node->proxy()->var(); |
| 1189 ASSERT(var != NULL); // must have been resolved |
| 1190 Slot* slot = var->slot(); |
| 1191 |
| 1192 // If it was not possible to allocate the variable at compile time, |
| 1193 // we need to "declare" it at runtime to make sure it actually |
| 1194 // exists in the local context. |
| 1195 if (slot != NULL && slot->type() == Slot::LOOKUP) { |
| 1196 // Variables with a "LOOKUP" slot were introduced as non-locals |
| 1197 // during variable resolution and must have mode DYNAMIC. |
| 1198 ASSERT(var->is_dynamic()); |
| 1199 // For now, just do a runtime call. |
| 1200 frame_->EmitPush(cp); |
| 1201 __ mov(r0, Operand(var->name())); |
| 1202 frame_->EmitPush(r0); |
| 1203 // Declaration nodes are always declared in only two modes. |
| 1204 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST); |
| 1205 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY; |
| 1206 __ mov(r0, Operand(Smi::FromInt(attr))); |
| 1207 frame_->EmitPush(r0); |
| 1208 // Push initial value, if any. |
| 1209 // Note: For variables we must not push an initial value (such as |
| 1210 // 'undefined') because we may have a (legal) redeclaration and we |
| 1211 // must not destroy the current value. |
| 1212 if (node->mode() == Variable::CONST) { |
| 1213 __ mov(r0, Operand(Factory::the_hole_value())); |
| 1214 frame_->EmitPush(r0); |
| 1215 } else if (node->fun() != NULL) { |
| 1216 LoadAndSpill(node->fun()); |
| 1217 } else { |
| 1218 __ mov(r0, Operand(0)); // no initial value! |
| 1219 frame_->EmitPush(r0); |
| 1220 } |
| 1221 frame_->CallRuntime(Runtime::kDeclareContextSlot, 4); |
| 1222 // Ignore the return value (declarations are statements). |
| 1223 ASSERT(frame_->height() == original_height); |
| 1224 return; |
| 1225 } |
| 1226 |
| 1227 ASSERT(!var->is_global()); |
| 1228 |
| 1229 // If we have a function or a constant, we need to initialize the variable. |
| 1230 Expression* val = NULL; |
| 1231 if (node->mode() == Variable::CONST) { |
| 1232 val = new Literal(Factory::the_hole_value()); |
| 1233 } else { |
| 1234 val = node->fun(); // NULL if we don't have a function |
| 1235 } |
| 1236 |
| 1237 if (val != NULL) { |
| 1238 { |
| 1239 // Set initial value. |
| 1240 Reference target(this, node->proxy()); |
| 1241 LoadAndSpill(val); |
| 1242 target.SetValue(NOT_CONST_INIT); |
| 1243 // The reference is removed from the stack (preserving TOS) when |
| 1244 // it goes out of scope. |
| 1245 } |
| 1246 // Get rid of the assigned value (declarations are statements). |
| 1247 frame_->Drop(); |
| 1248 } |
| 1249 ASSERT(frame_->height() == original_height); |
| 1250 } |
| 1251 |
| 1252 |
| 1253 void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) { |
| 1254 #ifdef DEBUG |
| 1255 int original_height = frame_->height(); |
| 1256 #endif |
| 1257 VirtualFrame::SpilledScope spilled_scope(this); |
| 1258 Comment cmnt(masm_, "[ ExpressionStatement"); |
| 1259 CodeForStatementPosition(node); |
| 1260 Expression* expression = node->expression(); |
| 1261 expression->MarkAsStatement(); |
| 1262 LoadAndSpill(expression); |
| 1263 frame_->Drop(); |
| 1264 ASSERT(frame_->height() == original_height); |
| 1265 } |
| 1266 |
| 1267 |
| 1268 void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) { |
| 1269 #ifdef DEBUG |
| 1270 int original_height = frame_->height(); |
| 1271 #endif |
| 1272 VirtualFrame::SpilledScope spilled_scope(this); |
| 1273 Comment cmnt(masm_, "// EmptyStatement"); |
| 1274 CodeForStatementPosition(node); |
| 1275 // nothing to do |
| 1276 ASSERT(frame_->height() == original_height); |
| 1277 } |
| 1278 |
| 1279 |
| 1280 void CodeGenerator::VisitIfStatement(IfStatement* node) { |
| 1281 #ifdef DEBUG |
| 1282 int original_height = frame_->height(); |
| 1283 #endif |
| 1284 VirtualFrame::SpilledScope spilled_scope(this); |
| 1285 Comment cmnt(masm_, "[ IfStatement"); |
| 1286 // Generate different code depending on which parts of the if statement |
| 1287 // are present or not. |
| 1288 bool has_then_stm = node->HasThenStatement(); |
| 1289 bool has_else_stm = node->HasElseStatement(); |
| 1290 |
| 1291 CodeForStatementPosition(node); |
| 1292 |
| 1293 JumpTarget exit(this); |
| 1294 if (has_then_stm && has_else_stm) { |
| 1295 Comment cmnt(masm_, "[ IfThenElse"); |
| 1296 JumpTarget then(this); |
| 1297 JumpTarget else_(this); |
| 1298 // if (cond) |
| 1299 LoadConditionAndSpill(node->condition(), NOT_INSIDE_TYPEOF, |
| 1300 &then, &else_, true); |
| 1301 if (frame_ != NULL) { |
| 1302 Branch(false, &else_); |
| 1303 } |
| 1304 // then |
| 1305 if (frame_ != NULL || then.is_linked()) { |
| 1306 then.Bind(); |
| 1307 VisitAndSpill(node->then_statement()); |
| 1308 } |
| 1309 if (frame_ != NULL) { |
| 1310 exit.Jump(); |
| 1311 } |
| 1312 // else |
| 1313 if (else_.is_linked()) { |
| 1314 else_.Bind(); |
| 1315 VisitAndSpill(node->else_statement()); |
| 1316 } |
| 1317 |
| 1318 } else if (has_then_stm) { |
| 1319 Comment cmnt(masm_, "[ IfThen"); |
| 1320 ASSERT(!has_else_stm); |
| 1321 JumpTarget then(this); |
| 1322 // if (cond) |
| 1323 LoadConditionAndSpill(node->condition(), NOT_INSIDE_TYPEOF, |
| 1324 &then, &exit, true); |
| 1325 if (frame_ != NULL) { |
| 1326 Branch(false, &exit); |
| 1327 } |
| 1328 // then |
| 1329 if (frame_ != NULL || then.is_linked()) { |
| 1330 then.Bind(); |
| 1331 VisitAndSpill(node->then_statement()); |
| 1332 } |
| 1333 |
| 1334 } else if (has_else_stm) { |
| 1335 Comment cmnt(masm_, "[ IfElse"); |
| 1336 ASSERT(!has_then_stm); |
| 1337 JumpTarget else_(this); |
| 1338 // if (!cond) |
| 1339 LoadConditionAndSpill(node->condition(), NOT_INSIDE_TYPEOF, |
| 1340 &exit, &else_, true); |
| 1341 if (frame_ != NULL) { |
| 1342 Branch(true, &exit); |
| 1343 } |
| 1344 // else |
| 1345 if (frame_ != NULL || else_.is_linked()) { |
| 1346 else_.Bind(); |
| 1347 VisitAndSpill(node->else_statement()); |
| 1348 } |
| 1349 |
| 1350 } else { |
| 1351 Comment cmnt(masm_, "[ If"); |
| 1352 ASSERT(!has_then_stm && !has_else_stm); |
| 1353 // if (cond) |
| 1354 LoadConditionAndSpill(node->condition(), NOT_INSIDE_TYPEOF, |
| 1355 &exit, &exit, false); |
| 1356 if (frame_ != NULL) { |
| 1357 if (has_cc()) { |
| 1358 cc_reg_ = al; |
| 1359 } else { |
| 1360 frame_->Drop(); |
| 1361 } |
| 1362 } |
| 1363 } |
| 1364 |
| 1365 // end |
| 1366 if (exit.is_linked()) { |
| 1367 exit.Bind(); |
| 1368 } |
| 1369 ASSERT(!has_valid_frame() || frame_->height() == original_height); |
| 1370 } |
| 1371 |
| 1372 |
| 1373 void CodeGenerator::VisitContinueStatement(ContinueStatement* node) { |
| 1374 VirtualFrame::SpilledScope spilled_scope(this); |
| 1375 Comment cmnt(masm_, "[ ContinueStatement"); |
| 1376 CodeForStatementPosition(node); |
| 1377 node->target()->continue_target()->Jump(); |
| 1378 } |
| 1379 |
| 1380 |
| 1381 void CodeGenerator::VisitBreakStatement(BreakStatement* node) { |
| 1382 VirtualFrame::SpilledScope spilled_scope(this); |
| 1383 Comment cmnt(masm_, "[ BreakStatement"); |
| 1384 CodeForStatementPosition(node); |
| 1385 node->target()->break_target()->Jump(); |
| 1386 } |
| 1387 |
| 1388 |
| 1389 void CodeGenerator::VisitReturnStatement(ReturnStatement* node) { |
| 1390 VirtualFrame::SpilledScope spilled_scope(this); |
| 1391 Comment cmnt(masm_, "[ ReturnStatement"); |
| 1392 |
| 1393 if (function_return_is_shadowed_) { |
| 1394 CodeForStatementPosition(node); |
| 1395 LoadAndSpill(node->expression()); |
| 1396 frame_->EmitPop(r0); |
| 1397 function_return_.Jump(); |
| 1398 } else { |
| 1399 // Load the returned value. |
| 1400 CodeForStatementPosition(node); |
| 1401 LoadAndSpill(node->expression()); |
| 1402 |
| 1403 // Pop the result from the frame and prepare the frame for |
| 1404 // returning thus making it easier to merge. |
| 1405 frame_->EmitPop(r0); |
| 1406 frame_->PrepareForReturn(); |
| 1407 |
| 1408 function_return_.Jump(); |
| 1409 } |
| 1410 } |
| 1411 |
| 1412 |
| 1413 void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) { |
| 1414 #ifdef DEBUG |
| 1415 int original_height = frame_->height(); |
| 1416 #endif |
| 1417 VirtualFrame::SpilledScope spilled_scope(this); |
| 1418 Comment cmnt(masm_, "[ WithEnterStatement"); |
| 1419 CodeForStatementPosition(node); |
| 1420 LoadAndSpill(node->expression()); |
| 1421 if (node->is_catch_block()) { |
| 1422 frame_->CallRuntime(Runtime::kPushCatchContext, 1); |
| 1423 } else { |
| 1424 frame_->CallRuntime(Runtime::kPushContext, 1); |
| 1425 } |
| 1426 #ifdef DEBUG |
| 1427 JumpTarget verified_true(this); |
| 1428 __ cmp(r0, Operand(cp)); |
| 1429 verified_true.Branch(eq); |
| 1430 __ stop("PushContext: r0 is expected to be the same as cp"); |
| 1431 verified_true.Bind(); |
| 1432 #endif |
| 1433 // Update context local. |
| 1434 __ str(cp, frame_->Context()); |
| 1435 ASSERT(frame_->height() == original_height); |
| 1436 } |
| 1437 |
| 1438 |
| 1439 void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) { |
| 1440 #ifdef DEBUG |
| 1441 int original_height = frame_->height(); |
| 1442 #endif |
| 1443 VirtualFrame::SpilledScope spilled_scope(this); |
| 1444 Comment cmnt(masm_, "[ WithExitStatement"); |
| 1445 CodeForStatementPosition(node); |
| 1446 // Pop context. |
| 1447 __ ldr(cp, ContextOperand(cp, Context::PREVIOUS_INDEX)); |
| 1448 // Update context local. |
| 1449 __ str(cp, frame_->Context()); |
| 1450 ASSERT(frame_->height() == original_height); |
| 1451 } |
| 1452 |
| 1453 |
| 1454 int CodeGenerator::FastCaseSwitchMaxOverheadFactor() { |
| 1455 return kFastSwitchMaxOverheadFactor; |
| 1456 } |
| 1457 |
| 1458 int CodeGenerator::FastCaseSwitchMinCaseCount() { |
| 1459 return kFastSwitchMinCaseCount; |
| 1460 } |
| 1461 |
| 1462 |
| 1463 void CodeGenerator::GenerateFastCaseSwitchJumpTable( |
| 1464 SwitchStatement* node, |
| 1465 int min_index, |
| 1466 int range, |
| 1467 Label* default_label, |
| 1468 Vector<Label*> case_targets, |
| 1469 Vector<Label> case_labels) { |
| 1470 VirtualFrame::SpilledScope spilled_scope(this); |
| 1471 JumpTarget setup_default(this); |
| 1472 JumpTarget is_smi(this); |
| 1473 |
| 1474 // A non-null default label pointer indicates a default case among |
| 1475 // the case labels. Otherwise we use the break target as a |
| 1476 // "default" for failure to hit the jump table. |
| 1477 JumpTarget* default_target = |
| 1478 (default_label == NULL) ? node->break_target() : &setup_default; |
| 1479 |
| 1480 ASSERT(kSmiTag == 0 && kSmiTagSize <= 2); |
| 1481 frame_->EmitPop(r0); |
| 1482 |
| 1483 // Test for a Smi value in a HeapNumber. |
| 1484 __ tst(r0, Operand(kSmiTagMask)); |
| 1485 is_smi.Branch(eq); |
| 1486 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 1487 __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset)); |
| 1488 __ cmp(r1, Operand(HEAP_NUMBER_TYPE)); |
| 1489 default_target->Branch(ne); |
| 1490 frame_->EmitPush(r0); |
| 1491 frame_->CallRuntime(Runtime::kNumberToSmi, 1); |
| 1492 is_smi.Bind(); |
| 1493 |
| 1494 if (min_index != 0) { |
| 1495 // Small positive numbers can be immediate operands. |
| 1496 if (min_index < 0) { |
| 1497 // If min_index is Smi::kMinValue, -min_index is not a Smi. |
| 1498 if (Smi::IsValid(-min_index)) { |
| 1499 __ add(r0, r0, Operand(Smi::FromInt(-min_index))); |
| 1500 } else { |
| 1501 __ add(r0, r0, Operand(Smi::FromInt(-min_index - 1))); |
| 1502 __ add(r0, r0, Operand(Smi::FromInt(1))); |
| 1503 } |
| 1504 } else { |
| 1505 __ sub(r0, r0, Operand(Smi::FromInt(min_index))); |
| 1506 } |
| 1507 } |
| 1508 __ tst(r0, Operand(0x80000000 | kSmiTagMask)); |
| 1509 default_target->Branch(ne); |
| 1510 __ cmp(r0, Operand(Smi::FromInt(range))); |
| 1511 default_target->Branch(ge); |
| 1512 VirtualFrame* start_frame = new VirtualFrame(frame_); |
| 1513 __ SmiJumpTable(r0, case_targets); |
| 1514 |
| 1515 GenerateFastCaseSwitchCases(node, case_labels, start_frame); |
| 1516 |
| 1517 // If there was a default case among the case labels, we need to |
| 1518 // emit code to jump to it from the default target used for failure |
| 1519 // to hit the jump table. |
| 1520 if (default_label != NULL) { |
| 1521 if (has_valid_frame()) { |
| 1522 node->break_target()->Jump(); |
| 1523 } |
| 1524 setup_default.Bind(); |
| 1525 frame_->MergeTo(start_frame); |
| 1526 __ b(default_label); |
| 1527 DeleteFrame(); |
| 1528 } |
| 1529 if (node->break_target()->is_linked()) { |
| 1530 node->break_target()->Bind(); |
| 1531 } |
| 1532 |
| 1533 delete start_frame; |
| 1534 } |
| 1535 |
| 1536 |
| 1537 void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) { |
| 1538 #ifdef DEBUG |
| 1539 int original_height = frame_->height(); |
| 1540 #endif |
| 1541 VirtualFrame::SpilledScope spilled_scope(this); |
| 1542 Comment cmnt(masm_, "[ SwitchStatement"); |
| 1543 CodeForStatementPosition(node); |
| 1544 node->break_target()->Initialize(this); |
| 1545 |
| 1546 LoadAndSpill(node->tag()); |
| 1547 if (TryGenerateFastCaseSwitchStatement(node)) { |
| 1548 ASSERT(!has_valid_frame() || frame_->height() == original_height); |
| 1549 return; |
| 1550 } |
| 1551 |
| 1552 JumpTarget next_test(this); |
| 1553 JumpTarget fall_through(this); |
| 1554 JumpTarget default_entry(this); |
| 1555 JumpTarget default_exit(this, JumpTarget::BIDIRECTIONAL); |
| 1556 ZoneList<CaseClause*>* cases = node->cases(); |
| 1557 int length = cases->length(); |
| 1558 CaseClause* default_clause = NULL; |
| 1559 |
| 1560 for (int i = 0; i < length; i++) { |
| 1561 CaseClause* clause = cases->at(i); |
| 1562 if (clause->is_default()) { |
| 1563 // Remember the default clause and compile it at the end. |
| 1564 default_clause = clause; |
| 1565 continue; |
| 1566 } |
| 1567 |
| 1568 Comment cmnt(masm_, "[ Case clause"); |
| 1569 // Compile the test. |
| 1570 next_test.Bind(); |
| 1571 next_test.Unuse(); |
| 1572 // Duplicate TOS. |
| 1573 __ ldr(r0, frame_->Top()); |
| 1574 frame_->EmitPush(r0); |
| 1575 LoadAndSpill(clause->label()); |
| 1576 Comparison(eq, true); |
| 1577 Branch(false, &next_test); |
| 1578 |
| 1579 // Before entering the body from the test, remove the switch value from |
| 1580 // the stack. |
| 1581 frame_->Drop(); |
| 1582 |
| 1583 // Label the body so that fall through is enabled. |
| 1584 if (i > 0 && cases->at(i - 1)->is_default()) { |
| 1585 default_exit.Bind(); |
| 1586 } else { |
| 1587 fall_through.Bind(); |
| 1588 fall_through.Unuse(); |
| 1589 } |
| 1590 VisitStatementsAndSpill(clause->statements()); |
| 1591 |
| 1592 // If control flow can fall through from the body, jump to the next body |
| 1593 // or the end of the statement. |
| 1594 if (frame_ != NULL) { |
| 1595 if (i < length - 1 && cases->at(i + 1)->is_default()) { |
| 1596 default_entry.Jump(); |
| 1597 } else { |
| 1598 fall_through.Jump(); |
| 1599 } |
| 1600 } |
| 1601 } |
| 1602 |
| 1603 // The final "test" removes the switch value. |
| 1604 next_test.Bind(); |
| 1605 frame_->Drop(); |
| 1606 |
| 1607 // If there is a default clause, compile it. |
| 1608 if (default_clause != NULL) { |
| 1609 Comment cmnt(masm_, "[ Default clause"); |
| 1610 default_entry.Bind(); |
| 1611 VisitStatementsAndSpill(default_clause->statements()); |
| 1612 // If control flow can fall out of the default and there is a case after |
| 1613 // it, jup to that case's body. |
| 1614 if (frame_ != NULL && default_exit.is_bound()) { |
| 1615 default_exit.Jump(); |
| 1616 } |
| 1617 } |
| 1618 |
| 1619 if (fall_through.is_linked()) { |
| 1620 fall_through.Bind(); |
| 1621 } |
| 1622 |
| 1623 if (node->break_target()->is_linked()) { |
| 1624 node->break_target()->Bind(); |
| 1625 } |
| 1626 node->break_target()->Unuse(); |
| 1627 ASSERT(!has_valid_frame() || frame_->height() == original_height); |
| 1628 } |
| 1629 |
| 1630 |
| 1631 void CodeGenerator::VisitLoopStatement(LoopStatement* node) { |
| 1632 #ifdef DEBUG |
| 1633 int original_height = frame_->height(); |
| 1634 #endif |
| 1635 VirtualFrame::SpilledScope spilled_scope(this); |
| 1636 Comment cmnt(masm_, "[ LoopStatement"); |
| 1637 CodeForStatementPosition(node); |
| 1638 node->break_target()->Initialize(this); |
| 1639 |
| 1640 // Simple condition analysis. ALWAYS_TRUE and ALWAYS_FALSE represent a |
| 1641 // known result for the test expression, with no side effects. |
| 1642 enum { ALWAYS_TRUE, ALWAYS_FALSE, DONT_KNOW } info = DONT_KNOW; |
| 1643 if (node->cond() == NULL) { |
| 1644 ASSERT(node->type() == LoopStatement::FOR_LOOP); |
| 1645 info = ALWAYS_TRUE; |
| 1646 } else { |
| 1647 Literal* lit = node->cond()->AsLiteral(); |
| 1648 if (lit != NULL) { |
| 1649 if (lit->IsTrue()) { |
| 1650 info = ALWAYS_TRUE; |
| 1651 } else if (lit->IsFalse()) { |
| 1652 info = ALWAYS_FALSE; |
| 1653 } |
| 1654 } |
| 1655 } |
| 1656 |
| 1657 switch (node->type()) { |
| 1658 case LoopStatement::DO_LOOP: { |
| 1659 JumpTarget body(this, JumpTarget::BIDIRECTIONAL); |
| 1660 |
| 1661 // Label the top of the loop for the backward CFG edge. If the test |
| 1662 // is always true we can use the continue target, and if the test is |
| 1663 // always false there is no need. |
| 1664 if (info == ALWAYS_TRUE) { |
| 1665 node->continue_target()->Initialize(this, JumpTarget::BIDIRECTIONAL); |
| 1666 node->continue_target()->Bind(); |
| 1667 } else if (info == ALWAYS_FALSE) { |
| 1668 node->continue_target()->Initialize(this); |
| 1669 } else { |
| 1670 ASSERT(info == DONT_KNOW); |
| 1671 node->continue_target()->Initialize(this); |
| 1672 body.Bind(); |
| 1673 } |
| 1674 |
| 1675 CheckStack(); // TODO(1222600): ignore if body contains calls. |
| 1676 VisitAndSpill(node->body()); |
| 1677 |
| 1678 // Compile the test. |
| 1679 if (info == ALWAYS_TRUE) { |
| 1680 if (has_valid_frame()) { |
| 1681 // If control can fall off the end of the body, jump back to the |
| 1682 // top. |
| 1683 node->continue_target()->Jump(); |
| 1684 } |
| 1685 } else if (info == ALWAYS_FALSE) { |
| 1686 // If we have a continue in the body, we only have to bind its jump |
| 1687 // target. |
| 1688 if (node->continue_target()->is_linked()) { |
| 1689 node->continue_target()->Bind(); |
| 1690 } |
| 1691 } else { |
| 1692 ASSERT(info == DONT_KNOW); |
| 1693 // We have to compile the test expression if it can be reached by |
| 1694 // control flow falling out of the body or via continue. |
| 1695 if (node->continue_target()->is_linked()) { |
| 1696 node->continue_target()->Bind(); |
| 1697 } |
| 1698 if (has_valid_frame()) { |
| 1699 LoadConditionAndSpill(node->cond(), NOT_INSIDE_TYPEOF, |
| 1700 &body, node->break_target(), true); |
| 1701 if (has_valid_frame()) { |
| 1702 // A invalid frame here indicates that control did not |
| 1703 // fall out of the test expression. |
| 1704 Branch(true, &body); |
| 1705 } |
| 1706 } |
| 1707 } |
| 1708 break; |
| 1709 } |
| 1710 |
| 1711 case LoopStatement::WHILE_LOOP: { |
| 1712 // If the test is never true and has no side effects there is no need |
| 1713 // to compile the test or body. |
| 1714 if (info == ALWAYS_FALSE) break; |
| 1715 |
| 1716 // Label the top of the loop with the continue target for the backward |
| 1717 // CFG edge. |
| 1718 node->continue_target()->Initialize(this, JumpTarget::BIDIRECTIONAL); |
| 1719 node->continue_target()->Bind(); |
| 1720 |
| 1721 if (info == DONT_KNOW) { |
| 1722 JumpTarget body(this); |
| 1723 LoadConditionAndSpill(node->cond(), NOT_INSIDE_TYPEOF, |
| 1724 &body, node->break_target(), true); |
| 1725 if (has_valid_frame()) { |
| 1726 // A NULL frame indicates that control did not fall out of the |
| 1727 // test expression. |
| 1728 Branch(false, node->break_target()); |
| 1729 } |
| 1730 if (has_valid_frame() || body.is_linked()) { |
| 1731 body.Bind(); |
| 1732 } |
| 1733 } |
| 1734 |
| 1735 if (has_valid_frame()) { |
| 1736 CheckStack(); // TODO(1222600): ignore if body contains calls. |
| 1737 VisitAndSpill(node->body()); |
| 1738 |
| 1739 // If control flow can fall out of the body, jump back to the top. |
| 1740 if (has_valid_frame()) { |
| 1741 node->continue_target()->Jump(); |
| 1742 } |
| 1743 } |
| 1744 break; |
| 1745 } |
| 1746 |
| 1747 case LoopStatement::FOR_LOOP: { |
| 1748 JumpTarget loop(this, JumpTarget::BIDIRECTIONAL); |
| 1749 |
| 1750 if (node->init() != NULL) { |
| 1751 VisitAndSpill(node->init()); |
| 1752 } |
| 1753 |
| 1754 // There is no need to compile the test or body. |
| 1755 if (info == ALWAYS_FALSE) break; |
| 1756 |
| 1757 // If there is no update statement, label the top of the loop with the |
| 1758 // continue target, otherwise with the loop target. |
| 1759 if (node->next() == NULL) { |
| 1760 node->continue_target()->Initialize(this, JumpTarget::BIDIRECTIONAL); |
| 1761 node->continue_target()->Bind(); |
| 1762 } else { |
| 1763 node->continue_target()->Initialize(this); |
| 1764 loop.Bind(); |
| 1765 } |
| 1766 |
| 1767 // If the test is always true, there is no need to compile it. |
| 1768 if (info == DONT_KNOW) { |
| 1769 JumpTarget body(this); |
| 1770 LoadConditionAndSpill(node->cond(), NOT_INSIDE_TYPEOF, |
| 1771 &body, node->break_target(), true); |
| 1772 if (has_valid_frame()) { |
| 1773 Branch(false, node->break_target()); |
| 1774 } |
| 1775 if (has_valid_frame() || body.is_linked()) { |
| 1776 body.Bind(); |
| 1777 } |
| 1778 } |
| 1779 |
| 1780 if (has_valid_frame()) { |
| 1781 CheckStack(); // TODO(1222600): ignore if body contains calls. |
| 1782 VisitAndSpill(node->body()); |
| 1783 |
| 1784 if (node->next() == NULL) { |
| 1785 // If there is no update statement and control flow can fall out |
| 1786 // of the loop, jump directly to the continue label. |
| 1787 if (has_valid_frame()) { |
| 1788 node->continue_target()->Jump(); |
| 1789 } |
| 1790 } else { |
| 1791 // If there is an update statement and control flow can reach it |
| 1792 // via falling out of the body of the loop or continuing, we |
| 1793 // compile the update statement. |
| 1794 if (node->continue_target()->is_linked()) { |
| 1795 node->continue_target()->Bind(); |
| 1796 } |
| 1797 if (has_valid_frame()) { |
| 1798 // Record source position of the statement as this code which is |
| 1799 // after the code for the body actually belongs to the loop |
| 1800 // statement and not the body. |
| 1801 CodeForStatementPosition(node); |
| 1802 VisitAndSpill(node->next()); |
| 1803 loop.Jump(); |
| 1804 } |
| 1805 } |
| 1806 } |
| 1807 break; |
| 1808 } |
| 1809 } |
| 1810 |
| 1811 if (node->break_target()->is_linked()) { |
| 1812 node->break_target()->Bind(); |
| 1813 } |
| 1814 node->continue_target()->Unuse(); |
| 1815 node->break_target()->Unuse(); |
| 1816 ASSERT(!has_valid_frame() || frame_->height() == original_height); |
| 1817 } |
| 1818 |
| 1819 |
| 1820 void CodeGenerator::VisitForInStatement(ForInStatement* node) { |
| 1821 #ifdef DEBUG |
| 1822 int original_height = frame_->height(); |
| 1823 #endif |
| 1824 ASSERT(!in_spilled_code()); |
| 1825 VirtualFrame::SpilledScope spilled_scope(this); |
| 1826 Comment cmnt(masm_, "[ ForInStatement"); |
| 1827 CodeForStatementPosition(node); |
| 1828 |
| 1829 JumpTarget primitive(this); |
| 1830 JumpTarget jsobject(this); |
| 1831 JumpTarget fixed_array(this); |
| 1832 JumpTarget entry(this, JumpTarget::BIDIRECTIONAL); |
| 1833 JumpTarget end_del_check(this); |
| 1834 JumpTarget exit(this); |
| 1835 |
| 1836 // Get the object to enumerate over (converted to JSObject). |
| 1837 LoadAndSpill(node->enumerable()); |
| 1838 |
| 1839 // Both SpiderMonkey and kjs ignore null and undefined in contrast |
| 1840 // to the specification. 12.6.4 mandates a call to ToObject. |
| 1841 frame_->EmitPop(r0); |
| 1842 __ cmp(r0, Operand(Factory::undefined_value())); |
| 1843 exit.Branch(eq); |
| 1844 __ cmp(r0, Operand(Factory::null_value())); |
| 1845 exit.Branch(eq); |
| 1846 |
| 1847 // Stack layout in body: |
| 1848 // [iteration counter (Smi)] |
| 1849 // [length of array] |
| 1850 // [FixedArray] |
| 1851 // [Map or 0] |
| 1852 // [Object] |
| 1853 |
| 1854 // Check if enumerable is already a JSObject |
| 1855 __ tst(r0, Operand(kSmiTagMask)); |
| 1856 primitive.Branch(eq); |
| 1857 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 1858 __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset)); |
| 1859 __ cmp(r1, Operand(FIRST_JS_OBJECT_TYPE)); |
| 1860 jsobject.Branch(hs); |
| 1861 |
| 1862 primitive.Bind(); |
| 1863 frame_->EmitPush(r0); |
| 1864 Result arg_count = allocator_->Allocate(r0); |
| 1865 ASSERT(arg_count.is_valid()); |
| 1866 __ mov(arg_count.reg(), Operand(0)); |
| 1867 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS, &arg_count, 1); |
| 1868 |
| 1869 jsobject.Bind(); |
| 1870 // Get the set of properties (as a FixedArray or Map). |
| 1871 frame_->EmitPush(r0); // duplicate the object being enumerated |
| 1872 frame_->EmitPush(r0); |
| 1873 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1); |
| 1874 |
| 1875 // If we got a Map, we can do a fast modification check. |
| 1876 // Otherwise, we got a FixedArray, and we have to do a slow check. |
| 1877 __ mov(r2, Operand(r0)); |
| 1878 __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 1879 __ cmp(r1, Operand(Factory::meta_map())); |
| 1880 fixed_array.Branch(ne); |
| 1881 |
| 1882 // Get enum cache |
| 1883 __ mov(r1, Operand(r0)); |
| 1884 __ ldr(r1, FieldMemOperand(r1, Map::kInstanceDescriptorsOffset)); |
| 1885 __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset)); |
| 1886 __ ldr(r2, |
| 1887 FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
| 1888 |
| 1889 frame_->EmitPush(r0); // map |
| 1890 frame_->EmitPush(r2); // enum cache bridge cache |
| 1891 __ ldr(r0, FieldMemOperand(r2, FixedArray::kLengthOffset)); |
| 1892 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); |
| 1893 frame_->EmitPush(r0); |
| 1894 __ mov(r0, Operand(Smi::FromInt(0))); |
| 1895 frame_->EmitPush(r0); |
| 1896 entry.Jump(); |
| 1897 |
| 1898 fixed_array.Bind(); |
| 1899 __ mov(r1, Operand(Smi::FromInt(0))); |
| 1900 frame_->EmitPush(r1); // insert 0 in place of Map |
| 1901 frame_->EmitPush(r0); |
| 1902 |
| 1903 // Push the length of the array and the initial index onto the stack. |
| 1904 __ ldr(r0, FieldMemOperand(r0, FixedArray::kLengthOffset)); |
| 1905 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); |
| 1906 frame_->EmitPush(r0); |
| 1907 __ mov(r0, Operand(Smi::FromInt(0))); // init index |
| 1908 frame_->EmitPush(r0); |
| 1909 |
| 1910 // Condition. |
| 1911 entry.Bind(); |
| 1912 // sp[0] : index |
| 1913 // sp[1] : array/enum cache length |
| 1914 // sp[2] : array or enum cache |
| 1915 // sp[3] : 0 or map |
| 1916 // sp[4] : enumerable |
| 1917 // Grab the current frame's height for the break and continue |
| 1918 // targets only after all the state is pushed on the frame. |
| 1919 node->break_target()->Initialize(this); |
| 1920 node->continue_target()->Initialize(this); |
| 1921 |
| 1922 __ ldr(r0, frame_->ElementAt(0)); // load the current count |
| 1923 __ ldr(r1, frame_->ElementAt(1)); // load the length |
| 1924 __ cmp(r0, Operand(r1)); // compare to the array length |
| 1925 node->break_target()->Branch(hs); |
| 1926 |
| 1927 __ ldr(r0, frame_->ElementAt(0)); |
| 1928 |
| 1929 // Get the i'th entry of the array. |
| 1930 __ ldr(r2, frame_->ElementAt(2)); |
| 1931 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 1932 __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 1933 |
| 1934 // Get Map or 0. |
| 1935 __ ldr(r2, frame_->ElementAt(3)); |
| 1936 // Check if this (still) matches the map of the enumerable. |
| 1937 // If not, we have to filter the key. |
| 1938 __ ldr(r1, frame_->ElementAt(4)); |
| 1939 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset)); |
| 1940 __ cmp(r1, Operand(r2)); |
| 1941 end_del_check.Branch(eq); |
| 1942 |
| 1943 // Convert the entry to a string (or null if it isn't a property anymore). |
| 1944 __ ldr(r0, frame_->ElementAt(4)); // push enumerable |
| 1945 frame_->EmitPush(r0); |
| 1946 frame_->EmitPush(r3); // push entry |
| 1947 Result arg_count_register = allocator_->Allocate(r0); |
| 1948 ASSERT(arg_count_register.is_valid()); |
| 1949 __ mov(arg_count_register.reg(), Operand(1)); |
| 1950 Result result = frame_->InvokeBuiltin(Builtins::FILTER_KEY, |
| 1951 CALL_JS, |
| 1952 &arg_count_register, |
| 1953 2); |
| 1954 __ mov(r3, Operand(result.reg())); |
| 1955 result.Unuse(); |
| 1956 |
| 1957 // If the property has been removed while iterating, we just skip it. |
| 1958 __ cmp(r3, Operand(Factory::null_value())); |
| 1959 node->continue_target()->Branch(eq); |
| 1960 |
| 1961 end_del_check.Bind(); |
| 1962 // Store the entry in the 'each' expression and take another spin in the |
| 1963 // loop. r3: i'th entry of the enum cache (or string there of) |
| 1964 frame_->EmitPush(r3); // push entry |
| 1965 { Reference each(this, node->each()); |
| 1966 if (!each.is_illegal()) { |
| 1967 if (each.size() > 0) { |
| 1968 __ ldr(r0, frame_->ElementAt(each.size())); |
| 1969 frame_->EmitPush(r0); |
| 1970 } |
| 1971 // If the reference was to a slot we rely on the convenient property |
| 1972 // that it doesn't matter whether a value (eg, r3 pushed above) is |
| 1973 // right on top of or right underneath a zero-sized reference. |
| 1974 each.SetValue(NOT_CONST_INIT); |
| 1975 if (each.size() > 0) { |
| 1976 // It's safe to pop the value lying on top of the reference before |
| 1977 // unloading the reference itself (which preserves the top of stack, |
| 1978 // ie, now the topmost value of the non-zero sized reference), since |
| 1979 // we will discard the top of stack after unloading the reference |
| 1980 // anyway. |
| 1981 frame_->EmitPop(r0); |
| 1982 } |
| 1983 } |
| 1984 } |
| 1985 // Discard the i'th entry pushed above or else the remainder of the |
| 1986 // reference, whichever is currently on top of the stack. |
| 1987 frame_->Drop(); |
| 1988 |
| 1989 // Body. |
| 1990 CheckStack(); // TODO(1222600): ignore if body contains calls. |
| 1991 VisitAndSpill(node->body()); |
| 1992 |
| 1993 // Next. Reestablish a spilled frame in case we are coming here via |
| 1994 // a continue in the body. |
| 1995 node->continue_target()->Bind(); |
| 1996 frame_->SpillAll(); |
| 1997 frame_->EmitPop(r0); |
| 1998 __ add(r0, r0, Operand(Smi::FromInt(1))); |
| 1999 frame_->EmitPush(r0); |
| 2000 entry.Jump(); |
| 2001 |
| 2002 // Cleanup. No need to spill because VirtualFrame::Drop is safe for |
| 2003 // any frame. |
| 2004 node->break_target()->Bind(); |
| 2005 frame_->Drop(5); |
| 2006 |
| 2007 // Exit. |
| 2008 exit.Bind(); |
| 2009 node->continue_target()->Unuse(); |
| 2010 node->break_target()->Unuse(); |
| 2011 ASSERT(frame_->height() == original_height); |
| 2012 } |
| 2013 |
| 2014 |
| 2015 void CodeGenerator::VisitTryCatch(TryCatch* node) { |
| 2016 #ifdef DEBUG |
| 2017 int original_height = frame_->height(); |
| 2018 #endif |
| 2019 VirtualFrame::SpilledScope spilled_scope(this); |
| 2020 Comment cmnt(masm_, "[ TryCatch"); |
| 2021 CodeForStatementPosition(node); |
| 2022 |
| 2023 JumpTarget try_block(this); |
| 2024 JumpTarget exit(this); |
| 2025 |
| 2026 try_block.Call(); |
| 2027 // --- Catch block --- |
| 2028 frame_->EmitPush(r0); |
| 2029 |
| 2030 // Store the caught exception in the catch variable. |
| 2031 { Reference ref(this, node->catch_var()); |
| 2032 ASSERT(ref.is_slot()); |
| 2033 // Here we make use of the convenient property that it doesn't matter |
| 2034 // whether a value is immediately on top of or underneath a zero-sized |
| 2035 // reference. |
| 2036 ref.SetValue(NOT_CONST_INIT); |
| 2037 } |
| 2038 |
| 2039 // Remove the exception from the stack. |
| 2040 frame_->Drop(); |
| 2041 |
| 2042 VisitStatementsAndSpill(node->catch_block()->statements()); |
| 2043 if (frame_ != NULL) { |
| 2044 exit.Jump(); |
| 2045 } |
| 2046 |
| 2047 |
| 2048 // --- Try block --- |
| 2049 try_block.Bind(); |
| 2050 |
| 2051 frame_->PushTryHandler(TRY_CATCH_HANDLER); |
| 2052 int handler_height = frame_->height(); |
| 2053 |
| 2054 // Shadow the labels for all escapes from the try block, including |
| 2055 // returns. During shadowing, the original label is hidden as the |
| 2056 // LabelShadow and operations on the original actually affect the |
| 2057 // shadowing label. |
| 2058 // |
| 2059 // We should probably try to unify the escaping labels and the return |
| 2060 // label. |
| 2061 int nof_escapes = node->escaping_targets()->length(); |
| 2062 List<ShadowTarget*> shadows(1 + nof_escapes); |
| 2063 |
| 2064 // Add the shadow target for the function return. |
| 2065 static const int kReturnShadowIndex = 0; |
| 2066 shadows.Add(new ShadowTarget(&function_return_)); |
| 2067 bool function_return_was_shadowed = function_return_is_shadowed_; |
| 2068 function_return_is_shadowed_ = true; |
| 2069 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_); |
| 2070 |
| 2071 // Add the remaining shadow targets. |
| 2072 for (int i = 0; i < nof_escapes; i++) { |
| 2073 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i))); |
| 2074 } |
| 2075 |
| 2076 // Generate code for the statements in the try block. |
| 2077 VisitStatementsAndSpill(node->try_block()->statements()); |
| 2078 |
| 2079 // Stop the introduced shadowing and count the number of required unlinks. |
| 2080 // After shadowing stops, the original labels are unshadowed and the |
| 2081 // LabelShadows represent the formerly shadowing labels. |
| 2082 bool has_unlinks = false; |
| 2083 for (int i = 0; i < shadows.length(); i++) { |
| 2084 shadows[i]->StopShadowing(); |
| 2085 has_unlinks = has_unlinks || shadows[i]->is_linked(); |
| 2086 } |
| 2087 function_return_is_shadowed_ = function_return_was_shadowed; |
| 2088 |
| 2089 // Get an external reference to the handler address. |
| 2090 ExternalReference handler_address(Top::k_handler_address); |
| 2091 |
| 2092 // The next handler address is at kNextIndex in the stack. |
| 2093 const int kNextIndex = StackHandlerConstants::kNextOffset / kPointerSize; |
| 2094 // If we can fall off the end of the try block, unlink from try chain. |
| 2095 if (has_valid_frame()) { |
| 2096 __ ldr(r1, frame_->ElementAt(kNextIndex)); |
| 2097 __ mov(r3, Operand(handler_address)); |
| 2098 __ str(r1, MemOperand(r3)); |
| 2099 frame_->Drop(StackHandlerConstants::kSize / kPointerSize); |
| 2100 if (has_unlinks) { |
| 2101 exit.Jump(); |
| 2102 } |
| 2103 } |
| 2104 |
| 2105 // Generate unlink code for the (formerly) shadowing labels that have been |
| 2106 // jumped to. Deallocate each shadow target. |
| 2107 for (int i = 0; i < shadows.length(); i++) { |
| 2108 if (shadows[i]->is_linked()) { |
| 2109 // Unlink from try chain; |
| 2110 shadows[i]->Bind(); |
| 2111 // Because we can be jumping here (to spilled code) from unspilled |
| 2112 // code, we need to reestablish a spilled frame at this block. |
| 2113 frame_->SpillAll(); |
| 2114 |
| 2115 // Reload sp from the top handler, because some statements that we |
| 2116 // break from (eg, for...in) may have left stuff on the stack. |
| 2117 __ mov(r3, Operand(handler_address)); |
| 2118 __ ldr(sp, MemOperand(r3)); |
| 2119 // The stack pointer was restored to just below the code slot |
| 2120 // (the topmost slot) in the handler. |
| 2121 frame_->Forget(frame_->height() - handler_height + 1); |
| 2122 |
| 2123 // kNextIndex is off by one because the code slot has already |
| 2124 // been dropped. |
| 2125 __ ldr(r1, frame_->ElementAt(kNextIndex - 1)); |
| 2126 __ str(r1, MemOperand(r3)); |
| 2127 // The code slot has already been dropped from the handler. |
| 2128 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); |
| 2129 |
| 2130 if (!function_return_is_shadowed_ && i == kReturnShadowIndex) { |
| 2131 frame_->PrepareForReturn(); |
| 2132 } |
| 2133 shadows[i]->other_target()->Jump(); |
| 2134 } |
| 2135 delete shadows[i]; |
| 2136 } |
| 2137 |
| 2138 exit.Bind(); |
| 2139 ASSERT(!has_valid_frame() || frame_->height() == original_height); |
| 2140 } |
| 2141 |
| 2142 |
| 2143 void CodeGenerator::VisitTryFinally(TryFinally* node) { |
| 2144 #ifdef DEBUG |
| 2145 int original_height = frame_->height(); |
| 2146 #endif |
| 2147 VirtualFrame::SpilledScope spilled_scope(this); |
| 2148 Comment cmnt(masm_, "[ TryFinally"); |
| 2149 CodeForStatementPosition(node); |
| 2150 |
| 2151 // State: Used to keep track of reason for entering the finally |
| 2152 // block. Should probably be extended to hold information for |
| 2153 // break/continue from within the try block. |
| 2154 enum { FALLING, THROWING, JUMPING }; |
| 2155 |
| 2156 JumpTarget try_block(this); |
| 2157 JumpTarget finally_block(this); |
| 2158 |
| 2159 try_block.Call(); |
| 2160 |
| 2161 frame_->EmitPush(r0); // save exception object on the stack |
| 2162 // In case of thrown exceptions, this is where we continue. |
| 2163 __ mov(r2, Operand(Smi::FromInt(THROWING))); |
| 2164 finally_block.Jump(); |
| 2165 |
| 2166 // --- Try block --- |
| 2167 try_block.Bind(); |
| 2168 |
| 2169 frame_->PushTryHandler(TRY_FINALLY_HANDLER); |
| 2170 int handler_height = frame_->height(); |
| 2171 |
| 2172 // Shadow the labels for all escapes from the try block, including |
| 2173 // returns. Shadowing hides the original label as the LabelShadow and |
| 2174 // operations on the original actually affect the shadowing label. |
| 2175 // |
| 2176 // We should probably try to unify the escaping labels and the return |
| 2177 // label. |
| 2178 int nof_escapes = node->escaping_targets()->length(); |
| 2179 List<ShadowTarget*> shadows(1 + nof_escapes); |
| 2180 |
| 2181 // Add the shadow target for the function return. |
| 2182 static const int kReturnShadowIndex = 0; |
| 2183 shadows.Add(new ShadowTarget(&function_return_)); |
| 2184 bool function_return_was_shadowed = function_return_is_shadowed_; |
| 2185 function_return_is_shadowed_ = true; |
| 2186 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_); |
| 2187 |
| 2188 // Add the remaining shadow targets. |
| 2189 for (int i = 0; i < nof_escapes; i++) { |
| 2190 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i))); |
| 2191 } |
| 2192 |
| 2193 // Generate code for the statements in the try block. |
| 2194 VisitStatementsAndSpill(node->try_block()->statements()); |
| 2195 |
| 2196 // Stop the introduced shadowing and count the number of required unlinks. |
| 2197 // After shadowing stops, the original labels are unshadowed and the |
| 2198 // LabelShadows represent the formerly shadowing labels. |
| 2199 int nof_unlinks = 0; |
| 2200 for (int i = 0; i < shadows.length(); i++) { |
| 2201 shadows[i]->StopShadowing(); |
| 2202 if (shadows[i]->is_linked()) nof_unlinks++; |
| 2203 } |
| 2204 function_return_is_shadowed_ = function_return_was_shadowed; |
| 2205 |
| 2206 // Get an external reference to the handler address. |
| 2207 ExternalReference handler_address(Top::k_handler_address); |
| 2208 |
| 2209 // The next handler address is at kNextIndex in the stack. |
| 2210 const int kNextIndex = StackHandlerConstants::kNextOffset / kPointerSize; |
| 2211 // If we can fall off the end of the try block, unlink from the try |
| 2212 // chain and set the state on the frame to FALLING. |
| 2213 if (has_valid_frame()) { |
| 2214 __ ldr(r1, frame_->ElementAt(kNextIndex)); |
| 2215 __ mov(r3, Operand(handler_address)); |
| 2216 __ str(r1, MemOperand(r3)); |
| 2217 frame_->Drop(StackHandlerConstants::kSize / kPointerSize); |
| 2218 |
| 2219 // Fake a top of stack value (unneeded when FALLING) and set the |
| 2220 // state in r2, then jump around the unlink blocks if any. |
| 2221 __ mov(r0, Operand(Factory::undefined_value())); |
| 2222 frame_->EmitPush(r0); |
| 2223 __ mov(r2, Operand(Smi::FromInt(FALLING))); |
| 2224 if (nof_unlinks > 0) { |
| 2225 finally_block.Jump(); |
| 2226 } |
| 2227 } |
| 2228 |
| 2229 // Generate code to unlink and set the state for the (formerly) |
| 2230 // shadowing targets that have been jumped to. |
| 2231 for (int i = 0; i < shadows.length(); i++) { |
| 2232 if (shadows[i]->is_linked()) { |
| 2233 // If we have come from the shadowed return, the return value is |
| 2234 // in (a non-refcounted reference to) r0. We must preserve it |
| 2235 // until it is pushed. |
| 2236 // |
| 2237 // Because we can be jumping here (to spilled code) from |
| 2238 // unspilled code, we need to reestablish a spilled frame at |
| 2239 // this block. |
| 2240 shadows[i]->Bind(); |
| 2241 frame_->SpillAll(); |
| 2242 |
| 2243 // Reload sp from the top handler, because some statements that |
| 2244 // we break from (eg, for...in) may have left stuff on the |
| 2245 // stack. |
| 2246 __ mov(r3, Operand(handler_address)); |
| 2247 __ ldr(sp, MemOperand(r3)); |
| 2248 // The stack pointer was restored to the address slot in the handler. |
| 2249 ASSERT(StackHandlerConstants::kNextOffset == 1 * kPointerSize); |
| 2250 frame_->Forget(frame_->height() - handler_height + 1); |
| 2251 |
| 2252 // Unlink this handler and drop it from the frame. The next |
| 2253 // handler address is now on top of the frame. |
| 2254 frame_->EmitPop(r1); |
| 2255 __ str(r1, MemOperand(r3)); |
| 2256 // The top (code) and the second (handler) slot have both been |
| 2257 // dropped already. |
| 2258 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 2); |
| 2259 |
| 2260 if (i == kReturnShadowIndex) { |
| 2261 // If this label shadowed the function return, materialize the |
| 2262 // return value on the stack. |
| 2263 frame_->EmitPush(r0); |
| 2264 } else { |
| 2265 // Fake TOS for targets that shadowed breaks and continues. |
| 2266 __ mov(r0, Operand(Factory::undefined_value())); |
| 2267 frame_->EmitPush(r0); |
| 2268 } |
| 2269 __ mov(r2, Operand(Smi::FromInt(JUMPING + i))); |
| 2270 if (--nof_unlinks > 0) { |
| 2271 // If this is not the last unlink block, jump around the next. |
| 2272 finally_block.Jump(); |
| 2273 } |
| 2274 } |
| 2275 } |
| 2276 |
| 2277 // --- Finally block --- |
| 2278 finally_block.Bind(); |
| 2279 |
| 2280 // Push the state on the stack. |
| 2281 frame_->EmitPush(r2); |
| 2282 |
| 2283 // We keep two elements on the stack - the (possibly faked) result |
| 2284 // and the state - while evaluating the finally block. |
| 2285 // |
| 2286 // Generate code for the statements in the finally block. |
| 2287 VisitStatementsAndSpill(node->finally_block()->statements()); |
| 2288 |
| 2289 if (has_valid_frame()) { |
| 2290 // Restore state and return value or faked TOS. |
| 2291 frame_->EmitPop(r2); |
| 2292 frame_->EmitPop(r0); |
| 2293 } |
| 2294 |
| 2295 // Generate code to jump to the right destination for all used |
| 2296 // formerly shadowing targets. Deallocate each shadow target. |
| 2297 for (int i = 0; i < shadows.length(); i++) { |
| 2298 if (has_valid_frame() && shadows[i]->is_bound()) { |
| 2299 JumpTarget* original = shadows[i]->other_target(); |
| 2300 __ cmp(r2, Operand(Smi::FromInt(JUMPING + i))); |
| 2301 if (!function_return_is_shadowed_ && i == kReturnShadowIndex) { |
| 2302 JumpTarget skip(this); |
| 2303 skip.Branch(ne); |
| 2304 frame_->PrepareForReturn(); |
| 2305 original->Jump(); |
| 2306 skip.Bind(); |
| 2307 } else { |
| 2308 original->Branch(eq); |
| 2309 } |
| 2310 } |
| 2311 delete shadows[i]; |
| 2312 } |
| 2313 |
| 2314 if (has_valid_frame()) { |
| 2315 // Check if we need to rethrow the exception. |
| 2316 JumpTarget exit(this); |
| 2317 __ cmp(r2, Operand(Smi::FromInt(THROWING))); |
| 2318 exit.Branch(ne); |
| 2319 |
| 2320 // Rethrow exception. |
| 2321 frame_->EmitPush(r0); |
| 2322 frame_->CallRuntime(Runtime::kReThrow, 1); |
| 2323 |
| 2324 // Done. |
| 2325 exit.Bind(); |
| 2326 } |
| 2327 ASSERT(!has_valid_frame() || frame_->height() == original_height); |
| 2328 } |
| 2329 |
| 2330 |
| 2331 void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) { |
| 2332 #ifdef DEBUG |
| 2333 int original_height = frame_->height(); |
| 2334 #endif |
| 2335 VirtualFrame::SpilledScope spilled_scope(this); |
| 2336 Comment cmnt(masm_, "[ DebuggerStatament"); |
| 2337 CodeForStatementPosition(node); |
| 2338 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 2339 frame_->CallRuntime(Runtime::kDebugBreak, 0); |
| 2340 #endif |
| 2341 // Ignore the return value. |
| 2342 ASSERT(frame_->height() == original_height); |
| 2343 } |
| 2344 |
| 2345 |
| 2346 void CodeGenerator::InstantiateBoilerplate(Handle<JSFunction> boilerplate) { |
| 2347 VirtualFrame::SpilledScope spilled_scope(this); |
| 2348 ASSERT(boilerplate->IsBoilerplate()); |
| 2349 |
| 2350 // Push the boilerplate on the stack. |
| 2351 __ mov(r0, Operand(boilerplate)); |
| 2352 frame_->EmitPush(r0); |
| 2353 |
| 2354 // Create a new closure. |
| 2355 frame_->EmitPush(cp); |
| 2356 frame_->CallRuntime(Runtime::kNewClosure, 2); |
| 2357 frame_->EmitPush(r0); |
| 2358 } |
| 2359 |
| 2360 |
| 2361 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) { |
| 2362 #ifdef DEBUG |
| 2363 int original_height = frame_->height(); |
| 2364 #endif |
| 2365 VirtualFrame::SpilledScope spilled_scope(this); |
| 2366 Comment cmnt(masm_, "[ FunctionLiteral"); |
| 2367 |
| 2368 // Build the function boilerplate and instantiate it. |
| 2369 Handle<JSFunction> boilerplate = BuildBoilerplate(node); |
| 2370 // Check for stack-overflow exception. |
| 2371 if (HasStackOverflow()) { |
| 2372 ASSERT(frame_->height() == original_height); |
| 2373 return; |
| 2374 } |
| 2375 InstantiateBoilerplate(boilerplate); |
| 2376 ASSERT(frame_->height() == original_height + 1); |
| 2377 } |
| 2378 |
| 2379 |
| 2380 void CodeGenerator::VisitFunctionBoilerplateLiteral( |
| 2381 FunctionBoilerplateLiteral* node) { |
| 2382 #ifdef DEBUG |
| 2383 int original_height = frame_->height(); |
| 2384 #endif |
| 2385 VirtualFrame::SpilledScope spilled_scope(this); |
| 2386 Comment cmnt(masm_, "[ FunctionBoilerplateLiteral"); |
| 2387 InstantiateBoilerplate(node->boilerplate()); |
| 2388 ASSERT(frame_->height() == original_height + 1); |
| 2389 } |
| 2390 |
| 2391 |
| 2392 void CodeGenerator::VisitConditional(Conditional* node) { |
| 2393 #ifdef DEBUG |
| 2394 int original_height = frame_->height(); |
| 2395 #endif |
| 2396 VirtualFrame::SpilledScope spilled_scope(this); |
| 2397 Comment cmnt(masm_, "[ Conditional"); |
| 2398 JumpTarget then(this); |
| 2399 JumpTarget else_(this); |
| 2400 JumpTarget exit(this); |
| 2401 LoadConditionAndSpill(node->condition(), NOT_INSIDE_TYPEOF, |
| 2402 &then, &else_, true); |
| 2403 Branch(false, &else_); |
| 2404 then.Bind(); |
| 2405 LoadAndSpill(node->then_expression(), typeof_state()); |
| 2406 exit.Jump(); |
| 2407 else_.Bind(); |
| 2408 LoadAndSpill(node->else_expression(), typeof_state()); |
| 2409 exit.Bind(); |
| 2410 ASSERT(frame_->height() == original_height + 1); |
| 2411 } |
| 2412 |
| 2413 |
| 2414 void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) { |
| 2415 VirtualFrame::SpilledScope spilled_scope(this); |
| 2416 if (slot->type() == Slot::LOOKUP) { |
| 2417 ASSERT(slot->var()->is_dynamic()); |
| 2418 |
| 2419 JumpTarget slow(this); |
| 2420 JumpTarget done(this); |
| 2421 |
| 2422 // Generate fast-case code for variables that might be shadowed by |
| 2423 // eval-introduced variables. Eval is used a lot without |
| 2424 // introducing variables. In those cases, we do not want to |
| 2425 // perform a runtime call for all variables in the scope |
| 2426 // containing the eval. |
| 2427 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) { |
| 2428 LoadFromGlobalSlotCheckExtensions(slot, typeof_state, r1, r2, &slow); |
| 2429 // If there was no control flow to slow, we can exit early. |
| 2430 if (!slow.is_linked()) { |
| 2431 frame_->EmitPush(r0); |
| 2432 return; |
| 2433 } |
| 2434 |
| 2435 done.Jump(); |
| 2436 |
| 2437 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) { |
| 2438 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot(); |
| 2439 // Only generate the fast case for locals that rewrite to slots. |
| 2440 // This rules out argument loads. |
| 2441 if (potential_slot != NULL) { |
| 2442 __ ldr(r0, |
| 2443 ContextSlotOperandCheckExtensions(potential_slot, |
| 2444 r1, |
| 2445 r2, |
| 2446 &slow)); |
| 2447 if (potential_slot->var()->mode() == Variable::CONST) { |
| 2448 __ cmp(r0, Operand(Factory::the_hole_value())); |
| 2449 __ mov(r0, Operand(Factory::undefined_value()), LeaveCC, eq); |
| 2450 } |
| 2451 // There is always control flow to slow from |
| 2452 // ContextSlotOperandCheckExtensions so we have to jump around |
| 2453 // it. |
| 2454 done.Jump(); |
| 2455 } |
| 2456 } |
| 2457 |
| 2458 slow.Bind(); |
| 2459 frame_->EmitPush(cp); |
| 2460 __ mov(r0, Operand(slot->var()->name())); |
| 2461 frame_->EmitPush(r0); |
| 2462 |
| 2463 if (typeof_state == INSIDE_TYPEOF) { |
| 2464 frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); |
| 2465 } else { |
| 2466 frame_->CallRuntime(Runtime::kLoadContextSlot, 2); |
| 2467 } |
| 2468 |
| 2469 done.Bind(); |
| 2470 frame_->EmitPush(r0); |
| 2471 |
| 2472 } else { |
| 2473 // Note: We would like to keep the assert below, but it fires because of |
| 2474 // some nasty code in LoadTypeofExpression() which should be removed... |
| 2475 // ASSERT(!slot->var()->is_dynamic()); |
| 2476 |
| 2477 // Special handling for locals allocated in registers. |
| 2478 __ ldr(r0, SlotOperand(slot, r2)); |
| 2479 frame_->EmitPush(r0); |
| 2480 if (slot->var()->mode() == Variable::CONST) { |
| 2481 // Const slots may contain 'the hole' value (the constant hasn't been |
| 2482 // initialized yet) which needs to be converted into the 'undefined' |
| 2483 // value. |
| 2484 Comment cmnt(masm_, "[ Unhole const"); |
| 2485 frame_->EmitPop(r0); |
| 2486 __ cmp(r0, Operand(Factory::the_hole_value())); |
| 2487 __ mov(r0, Operand(Factory::undefined_value()), LeaveCC, eq); |
| 2488 frame_->EmitPush(r0); |
| 2489 } |
| 2490 } |
| 2491 } |
| 2492 |
| 2493 |
| 2494 void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot, |
| 2495 TypeofState typeof_state, |
| 2496 Register tmp, |
| 2497 Register tmp2, |
| 2498 JumpTarget* slow) { |
| 2499 // Check that no extension objects have been created by calls to |
| 2500 // eval from the current scope to the global scope. |
| 2501 Register context = cp; |
| 2502 Scope* s = scope(); |
| 2503 while (s != NULL) { |
| 2504 if (s->num_heap_slots() > 0) { |
| 2505 if (s->calls_eval()) { |
| 2506 // Check that extension is NULL. |
| 2507 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX)); |
| 2508 __ tst(tmp2, tmp2); |
| 2509 slow->Branch(ne); |
| 2510 } |
| 2511 // Load next context in chain. |
| 2512 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX)); |
| 2513 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset)); |
| 2514 context = tmp; |
| 2515 } |
| 2516 // If no outer scope calls eval, we do not need to check more |
| 2517 // context extensions. |
| 2518 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; |
| 2519 s = s->outer_scope(); |
| 2520 } |
| 2521 |
| 2522 if (s->is_eval_scope()) { |
| 2523 Label next, fast; |
| 2524 if (!context.is(tmp)) { |
| 2525 __ mov(tmp, Operand(context)); |
| 2526 } |
| 2527 __ bind(&next); |
| 2528 // Terminate at global context. |
| 2529 __ ldr(tmp2, FieldMemOperand(tmp, HeapObject::kMapOffset)); |
| 2530 __ cmp(tmp2, Operand(Factory::global_context_map())); |
| 2531 __ b(eq, &fast); |
| 2532 // Check that extension is NULL. |
| 2533 __ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX)); |
| 2534 __ tst(tmp2, tmp2); |
| 2535 slow->Branch(ne); |
| 2536 // Load next context in chain. |
| 2537 __ ldr(tmp, ContextOperand(tmp, Context::CLOSURE_INDEX)); |
| 2538 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset)); |
| 2539 __ b(&next); |
| 2540 __ bind(&fast); |
| 2541 } |
| 2542 |
| 2543 // All extension objects were empty and it is safe to use a global |
| 2544 // load IC call. |
| 2545 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); |
| 2546 // Load the global object. |
| 2547 LoadGlobal(); |
| 2548 // Setup the name register. |
| 2549 Result name = allocator_->Allocate(r2); |
| 2550 ASSERT(name.is_valid()); // We are in spilled code. |
| 2551 __ mov(name.reg(), Operand(slot->var()->name())); |
| 2552 // Call IC stub. |
| 2553 if (typeof_state == INSIDE_TYPEOF) { |
| 2554 frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET, &name, 0); |
| 2555 } else { |
| 2556 frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET_CONTEXT, &name, 0); |
| 2557 } |
| 2558 |
| 2559 // Drop the global object. The result is in r0. |
| 2560 frame_->Drop(); |
| 2561 } |
| 2562 |
| 2563 |
| 2564 void CodeGenerator::VisitSlot(Slot* node) { |
| 2565 #ifdef DEBUG |
| 2566 int original_height = frame_->height(); |
| 2567 #endif |
| 2568 VirtualFrame::SpilledScope spilled_scope(this); |
| 2569 Comment cmnt(masm_, "[ Slot"); |
| 2570 LoadFromSlot(node, typeof_state()); |
| 2571 ASSERT(frame_->height() == original_height + 1); |
| 2572 } |
| 2573 |
| 2574 |
| 2575 void CodeGenerator::VisitVariableProxy(VariableProxy* node) { |
| 2576 #ifdef DEBUG |
| 2577 int original_height = frame_->height(); |
| 2578 #endif |
| 2579 VirtualFrame::SpilledScope spilled_scope(this); |
| 2580 Comment cmnt(masm_, "[ VariableProxy"); |
| 2581 |
| 2582 Variable* var = node->var(); |
| 2583 Expression* expr = var->rewrite(); |
| 2584 if (expr != NULL) { |
| 2585 Visit(expr); |
| 2586 } else { |
| 2587 ASSERT(var->is_global()); |
| 2588 Reference ref(this, node); |
| 2589 ref.GetValueAndSpill(typeof_state()); |
| 2590 } |
| 2591 ASSERT(frame_->height() == original_height + 1); |
| 2592 } |
| 2593 |
| 2594 |
| 2595 void CodeGenerator::VisitLiteral(Literal* node) { |
| 2596 #ifdef DEBUG |
| 2597 int original_height = frame_->height(); |
| 2598 #endif |
| 2599 VirtualFrame::SpilledScope spilled_scope(this); |
| 2600 Comment cmnt(masm_, "[ Literal"); |
| 2601 __ mov(r0, Operand(node->handle())); |
| 2602 frame_->EmitPush(r0); |
| 2603 ASSERT(frame_->height() == original_height + 1); |
| 2604 } |
| 2605 |
| 2606 |
| 2607 void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) { |
| 2608 #ifdef DEBUG |
| 2609 int original_height = frame_->height(); |
| 2610 #endif |
| 2611 VirtualFrame::SpilledScope spilled_scope(this); |
| 2612 Comment cmnt(masm_, "[ RexExp Literal"); |
| 2613 |
| 2614 // Retrieve the literal array and check the allocated entry. |
| 2615 |
| 2616 // Load the function of this activation. |
| 2617 __ ldr(r1, frame_->Function()); |
| 2618 |
| 2619 // Load the literals array of the function. |
| 2620 __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset)); |
| 2621 |
| 2622 // Load the literal at the ast saved index. |
| 2623 int literal_offset = |
| 2624 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; |
| 2625 __ ldr(r2, FieldMemOperand(r1, literal_offset)); |
| 2626 |
| 2627 JumpTarget done(this); |
| 2628 __ cmp(r2, Operand(Factory::undefined_value())); |
| 2629 done.Branch(ne); |
| 2630 |
| 2631 // If the entry is undefined we call the runtime system to computed |
| 2632 // the literal. |
| 2633 frame_->EmitPush(r1); // literal array (0) |
| 2634 __ mov(r0, Operand(Smi::FromInt(node->literal_index()))); |
| 2635 frame_->EmitPush(r0); // literal index (1) |
| 2636 __ mov(r0, Operand(node->pattern())); // RegExp pattern (2) |
| 2637 frame_->EmitPush(r0); |
| 2638 __ mov(r0, Operand(node->flags())); // RegExp flags (3) |
| 2639 frame_->EmitPush(r0); |
| 2640 frame_->CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); |
| 2641 __ mov(r2, Operand(r0)); |
| 2642 |
| 2643 done.Bind(); |
| 2644 // Push the literal. |
| 2645 frame_->EmitPush(r2); |
| 2646 ASSERT(frame_->height() == original_height + 1); |
| 2647 } |
| 2648 |
| 2649 |
| 2650 // This deferred code stub will be used for creating the boilerplate |
| 2651 // by calling Runtime_CreateObjectLiteralBoilerplate. |
| 2652 // Each created boilerplate is stored in the JSFunction and they are |
| 2653 // therefore context dependent. |
| 2654 class DeferredObjectLiteral: public DeferredCode { |
| 2655 public: |
| 2656 DeferredObjectLiteral(CodeGenerator* generator, ObjectLiteral* node) |
| 2657 : DeferredCode(generator), node_(node) { |
| 2658 set_comment("[ DeferredObjectLiteral"); |
| 2659 } |
| 2660 |
| 2661 virtual void Generate(); |
| 2662 |
| 2663 private: |
| 2664 ObjectLiteral* node_; |
| 2665 }; |
| 2666 |
| 2667 |
| 2668 void DeferredObjectLiteral::Generate() { |
| 2669 // Argument is passed in r1. |
| 2670 enter()->Bind(); |
| 2671 VirtualFrame::SpilledScope spilled_scope(generator()); |
| 2672 |
| 2673 // If the entry is undefined we call the runtime system to compute |
| 2674 // the literal. |
| 2675 |
| 2676 VirtualFrame* frame = generator()->frame(); |
| 2677 // Literal array (0). |
| 2678 frame->EmitPush(r1); |
| 2679 // Literal index (1). |
| 2680 __ mov(r0, Operand(Smi::FromInt(node_->literal_index()))); |
| 2681 frame->EmitPush(r0); |
| 2682 // Constant properties (2). |
| 2683 __ mov(r0, Operand(node_->constant_properties())); |
| 2684 frame->EmitPush(r0); |
| 2685 Result boilerplate = |
| 2686 frame->CallRuntime(Runtime::kCreateObjectLiteralBoilerplate, 3); |
| 2687 __ mov(r2, Operand(boilerplate.reg())); |
| 2688 // Result is returned in r2. |
| 2689 exit_.Jump(); |
| 2690 } |
| 2691 |
| 2692 |
| 2693 void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) { |
| 2694 #ifdef DEBUG |
| 2695 int original_height = frame_->height(); |
| 2696 #endif |
| 2697 VirtualFrame::SpilledScope spilled_scope(this); |
| 2698 Comment cmnt(masm_, "[ ObjectLiteral"); |
| 2699 |
| 2700 DeferredObjectLiteral* deferred = new DeferredObjectLiteral(this, node); |
| 2701 |
| 2702 // Retrieve the literal array and check the allocated entry. |
| 2703 |
| 2704 // Load the function of this activation. |
| 2705 __ ldr(r1, frame_->Function()); |
| 2706 |
| 2707 // Load the literals array of the function. |
| 2708 __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset)); |
| 2709 |
| 2710 // Load the literal at the ast saved index. |
| 2711 int literal_offset = |
| 2712 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; |
| 2713 __ ldr(r2, FieldMemOperand(r1, literal_offset)); |
| 2714 |
| 2715 // Check whether we need to materialize the object literal boilerplate. |
| 2716 // If so, jump to the deferred code. |
| 2717 __ cmp(r2, Operand(Factory::undefined_value())); |
| 2718 deferred->enter()->Branch(eq); |
| 2719 deferred->BindExit(); |
| 2720 |
| 2721 // Push the object literal boilerplate. |
| 2722 frame_->EmitPush(r2); |
| 2723 |
| 2724 // Clone the boilerplate object. |
| 2725 Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate; |
| 2726 if (node->depth() == 1) { |
| 2727 clone_function_id = Runtime::kCloneShallowLiteralBoilerplate; |
| 2728 } |
| 2729 frame_->CallRuntime(clone_function_id, 1); |
| 2730 frame_->EmitPush(r0); // save the result |
| 2731 // r0: cloned object literal |
| 2732 |
| 2733 for (int i = 0; i < node->properties()->length(); i++) { |
| 2734 ObjectLiteral::Property* property = node->properties()->at(i); |
| 2735 Literal* key = property->key(); |
| 2736 Expression* value = property->value(); |
| 2737 switch (property->kind()) { |
| 2738 case ObjectLiteral::Property::CONSTANT: |
| 2739 break; |
| 2740 case ObjectLiteral::Property::MATERIALIZED_LITERAL: |
| 2741 if (CompileTimeValue::IsCompileTimeValue(property->value())) break; |
| 2742 // else fall through |
| 2743 case ObjectLiteral::Property::COMPUTED: // fall through |
| 2744 case ObjectLiteral::Property::PROTOTYPE: { |
| 2745 frame_->EmitPush(r0); // dup the result |
| 2746 LoadAndSpill(key); |
| 2747 LoadAndSpill(value); |
| 2748 frame_->CallRuntime(Runtime::kSetProperty, 3); |
| 2749 // restore r0 |
| 2750 __ ldr(r0, frame_->Top()); |
| 2751 break; |
| 2752 } |
| 2753 case ObjectLiteral::Property::SETTER: { |
| 2754 frame_->EmitPush(r0); |
| 2755 LoadAndSpill(key); |
| 2756 __ mov(r0, Operand(Smi::FromInt(1))); |
| 2757 frame_->EmitPush(r0); |
| 2758 LoadAndSpill(value); |
| 2759 frame_->CallRuntime(Runtime::kDefineAccessor, 4); |
| 2760 __ ldr(r0, frame_->Top()); |
| 2761 break; |
| 2762 } |
| 2763 case ObjectLiteral::Property::GETTER: { |
| 2764 frame_->EmitPush(r0); |
| 2765 LoadAndSpill(key); |
| 2766 __ mov(r0, Operand(Smi::FromInt(0))); |
| 2767 frame_->EmitPush(r0); |
| 2768 LoadAndSpill(value); |
| 2769 frame_->CallRuntime(Runtime::kDefineAccessor, 4); |
| 2770 __ ldr(r0, frame_->Top()); |
| 2771 break; |
| 2772 } |
| 2773 } |
| 2774 } |
| 2775 ASSERT(frame_->height() == original_height + 1); |
| 2776 } |
| 2777 |
| 2778 |
| 2779 // This deferred code stub will be used for creating the boilerplate |
| 2780 // by calling Runtime_CreateArrayLiteralBoilerplate. |
| 2781 // Each created boilerplate is stored in the JSFunction and they are |
| 2782 // therefore context dependent. |
| 2783 class DeferredArrayLiteral: public DeferredCode { |
| 2784 public: |
| 2785 DeferredArrayLiteral(CodeGenerator* generator, ArrayLiteral* node) |
| 2786 : DeferredCode(generator), node_(node) { |
| 2787 set_comment("[ DeferredArrayLiteral"); |
| 2788 } |
| 2789 |
| 2790 virtual void Generate(); |
| 2791 |
| 2792 private: |
| 2793 ArrayLiteral* node_; |
| 2794 }; |
| 2795 |
| 2796 |
| 2797 void DeferredArrayLiteral::Generate() { |
| 2798 // Argument is passed in r1. |
| 2799 enter()->Bind(); |
| 2800 VirtualFrame::SpilledScope spilled_scope(generator()); |
| 2801 |
| 2802 // If the entry is undefined we call the runtime system to computed |
| 2803 // the literal. |
| 2804 |
| 2805 VirtualFrame* frame = generator()->frame(); |
| 2806 // Literal array (0). |
| 2807 frame->EmitPush(r1); |
| 2808 // Literal index (1). |
| 2809 __ mov(r0, Operand(Smi::FromInt(node_->literal_index()))); |
| 2810 frame->EmitPush(r0); |
| 2811 // Constant properties (2). |
| 2812 __ mov(r0, Operand(node_->literals())); |
| 2813 frame->EmitPush(r0); |
| 2814 Result boilerplate = |
| 2815 frame->CallRuntime(Runtime::kCreateArrayLiteralBoilerplate, 3); |
| 2816 __ mov(r2, Operand(boilerplate.reg())); |
| 2817 // Result is returned in r2. |
| 2818 exit_.Jump(); |
| 2819 } |
| 2820 |
| 2821 |
| 2822 void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) { |
| 2823 #ifdef DEBUG |
| 2824 int original_height = frame_->height(); |
| 2825 #endif |
| 2826 VirtualFrame::SpilledScope spilled_scope(this); |
| 2827 Comment cmnt(masm_, "[ ArrayLiteral"); |
| 2828 |
| 2829 DeferredArrayLiteral* deferred = new DeferredArrayLiteral(this, node); |
| 2830 |
| 2831 // Retrieve the literal array and check the allocated entry. |
| 2832 |
| 2833 // Load the function of this activation. |
| 2834 __ ldr(r1, frame_->Function()); |
| 2835 |
| 2836 // Load the literals array of the function. |
| 2837 __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset)); |
| 2838 |
| 2839 // Load the literal at the ast saved index. |
| 2840 int literal_offset = |
| 2841 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; |
| 2842 __ ldr(r2, FieldMemOperand(r1, literal_offset)); |
| 2843 |
| 2844 // Check whether we need to materialize the object literal boilerplate. |
| 2845 // If so, jump to the deferred code. |
| 2846 __ cmp(r2, Operand(Factory::undefined_value())); |
| 2847 deferred->enter()->Branch(eq); |
| 2848 deferred->BindExit(); |
| 2849 |
| 2850 // Push the object literal boilerplate. |
| 2851 frame_->EmitPush(r2); |
| 2852 |
| 2853 // Clone the boilerplate object. |
| 2854 Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate; |
| 2855 if (node->depth() == 1) { |
| 2856 clone_function_id = Runtime::kCloneShallowLiteralBoilerplate; |
| 2857 } |
| 2858 frame_->CallRuntime(clone_function_id, 1); |
| 2859 frame_->EmitPush(r0); // save the result |
| 2860 // r0: cloned object literal |
| 2861 |
| 2862 // Generate code to set the elements in the array that are not |
| 2863 // literals. |
| 2864 for (int i = 0; i < node->values()->length(); i++) { |
| 2865 Expression* value = node->values()->at(i); |
| 2866 |
| 2867 // If value is a literal the property value is already set in the |
| 2868 // boilerplate object. |
| 2869 if (value->AsLiteral() != NULL) continue; |
| 2870 // If value is a materialized literal the property value is already set |
| 2871 // in the boilerplate object if it is simple. |
| 2872 if (CompileTimeValue::IsCompileTimeValue(value)) continue; |
| 2873 |
| 2874 // The property must be set by generated code. |
| 2875 LoadAndSpill(value); |
| 2876 frame_->EmitPop(r0); |
| 2877 |
| 2878 // Fetch the object literal. |
| 2879 __ ldr(r1, frame_->Top()); |
| 2880 // Get the elements array. |
| 2881 __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset)); |
| 2882 |
| 2883 // Write to the indexed properties array. |
| 2884 int offset = i * kPointerSize + Array::kHeaderSize; |
| 2885 __ str(r0, FieldMemOperand(r1, offset)); |
| 2886 |
| 2887 // Update the write barrier for the array address. |
| 2888 __ mov(r3, Operand(offset)); |
| 2889 __ RecordWrite(r1, r3, r2); |
| 2890 } |
| 2891 ASSERT(frame_->height() == original_height + 1); |
| 2892 } |
| 2893 |
| 2894 |
| 2895 void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) { |
| 2896 #ifdef DEBUG |
| 2897 int original_height = frame_->height(); |
| 2898 #endif |
| 2899 ASSERT(!in_spilled_code()); |
| 2900 VirtualFrame::SpilledScope spilled_scope(this); |
| 2901 // Call runtime routine to allocate the catch extension object and |
| 2902 // assign the exception value to the catch variable. |
| 2903 Comment cmnt(masm_, "[ CatchExtensionObject"); |
| 2904 LoadAndSpill(node->key()); |
| 2905 LoadAndSpill(node->value()); |
| 2906 Result result = |
| 2907 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2); |
| 2908 frame_->EmitPush(result.reg()); |
| 2909 ASSERT(frame_->height() == original_height + 1); |
| 2910 } |
| 2911 |
| 2912 |
| 2913 void CodeGenerator::VisitAssignment(Assignment* node) { |
| 2914 #ifdef DEBUG |
| 2915 int original_height = frame_->height(); |
| 2916 #endif |
| 2917 VirtualFrame::SpilledScope spilled_scope(this); |
| 2918 Comment cmnt(masm_, "[ Assignment"); |
| 2919 CodeForStatementPosition(node); |
| 2920 |
| 2921 { Reference target(this, node->target()); |
| 2922 if (target.is_illegal()) { |
| 2923 // Fool the virtual frame into thinking that we left the assignment's |
| 2924 // value on the frame. |
| 2925 __ mov(r0, Operand(Smi::FromInt(0))); |
| 2926 frame_->EmitPush(r0); |
| 2927 ASSERT(frame_->height() == original_height + 1); |
| 2928 return; |
| 2929 } |
| 2930 |
| 2931 if (node->op() == Token::ASSIGN || |
| 2932 node->op() == Token::INIT_VAR || |
| 2933 node->op() == Token::INIT_CONST) { |
| 2934 LoadAndSpill(node->value()); |
| 2935 |
| 2936 } else { |
| 2937 // +=, *= and similar binary assignments. |
| 2938 // Get the old value of the lhs. |
| 2939 target.GetValueAndSpill(NOT_INSIDE_TYPEOF); |
| 2940 Literal* literal = node->value()->AsLiteral(); |
| 2941 bool overwrite = |
| 2942 (node->value()->AsBinaryOperation() != NULL && |
| 2943 node->value()->AsBinaryOperation()->ResultOverwriteAllowed()); |
| 2944 if (literal != NULL && literal->handle()->IsSmi()) { |
| 2945 SmiOperation(node->binary_op(), |
| 2946 literal->handle(), |
| 2947 false, |
| 2948 overwrite ? OVERWRITE_RIGHT : NO_OVERWRITE); |
| 2949 frame_->EmitPush(r0); |
| 2950 |
| 2951 } else { |
| 2952 LoadAndSpill(node->value()); |
| 2953 GenericBinaryOperation(node->binary_op(), |
| 2954 overwrite ? OVERWRITE_RIGHT : NO_OVERWRITE); |
| 2955 frame_->EmitPush(r0); |
| 2956 } |
| 2957 } |
| 2958 |
| 2959 Variable* var = node->target()->AsVariableProxy()->AsVariable(); |
| 2960 if (var != NULL && |
| 2961 (var->mode() == Variable::CONST) && |
| 2962 node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) { |
| 2963 // Assignment ignored - leave the value on the stack. |
| 2964 |
| 2965 } else { |
| 2966 CodeForSourcePosition(node->position()); |
| 2967 if (node->op() == Token::INIT_CONST) { |
| 2968 // Dynamic constant initializations must use the function context |
| 2969 // and initialize the actual constant declared. Dynamic variable |
| 2970 // initializations are simply assignments and use SetValue. |
| 2971 target.SetValue(CONST_INIT); |
| 2972 } else { |
| 2973 target.SetValue(NOT_CONST_INIT); |
| 2974 } |
| 2975 } |
| 2976 } |
| 2977 ASSERT(frame_->height() == original_height + 1); |
| 2978 } |
| 2979 |
| 2980 |
| 2981 void CodeGenerator::VisitThrow(Throw* node) { |
| 2982 #ifdef DEBUG |
| 2983 int original_height = frame_->height(); |
| 2984 #endif |
| 2985 VirtualFrame::SpilledScope spilled_scope(this); |
| 2986 Comment cmnt(masm_, "[ Throw"); |
| 2987 |
| 2988 LoadAndSpill(node->exception()); |
| 2989 CodeForSourcePosition(node->position()); |
| 2990 frame_->CallRuntime(Runtime::kThrow, 1); |
| 2991 frame_->EmitPush(r0); |
| 2992 ASSERT(frame_->height() == original_height + 1); |
| 2993 } |
| 2994 |
| 2995 |
| 2996 void CodeGenerator::VisitProperty(Property* node) { |
| 2997 #ifdef DEBUG |
| 2998 int original_height = frame_->height(); |
| 2999 #endif |
| 3000 VirtualFrame::SpilledScope spilled_scope(this); |
| 3001 Comment cmnt(masm_, "[ Property"); |
| 3002 |
| 3003 { Reference property(this, node); |
| 3004 property.GetValueAndSpill(typeof_state()); |
| 3005 } |
| 3006 ASSERT(frame_->height() == original_height + 1); |
| 3007 } |
| 3008 |
| 3009 |
| 3010 void CodeGenerator::VisitCall(Call* node) { |
| 3011 #ifdef DEBUG |
| 3012 int original_height = frame_->height(); |
| 3013 #endif |
| 3014 VirtualFrame::SpilledScope spilled_scope(this); |
| 3015 Comment cmnt(masm_, "[ Call"); |
| 3016 |
| 3017 ZoneList<Expression*>* args = node->arguments(); |
| 3018 |
| 3019 CodeForStatementPosition(node); |
| 3020 // Standard function call. |
| 3021 |
| 3022 // Check if the function is a variable or a property. |
| 3023 Expression* function = node->expression(); |
| 3024 Variable* var = function->AsVariableProxy()->AsVariable(); |
| 3025 Property* property = function->AsProperty(); |
| 3026 |
| 3027 // ------------------------------------------------------------------------ |
| 3028 // Fast-case: Use inline caching. |
| 3029 // --- |
| 3030 // According to ECMA-262, section 11.2.3, page 44, the function to call |
| 3031 // must be resolved after the arguments have been evaluated. The IC code |
| 3032 // automatically handles this by loading the arguments before the function |
| 3033 // is resolved in cache misses (this also holds for megamorphic calls). |
| 3034 // ------------------------------------------------------------------------ |
| 3035 |
| 3036 if (var != NULL && !var->is_this() && var->is_global()) { |
| 3037 // ---------------------------------- |
| 3038 // JavaScript example: 'foo(1, 2, 3)' // foo is global |
| 3039 // ---------------------------------- |
| 3040 |
| 3041 // Push the name of the function and the receiver onto the stack. |
| 3042 __ mov(r0, Operand(var->name())); |
| 3043 frame_->EmitPush(r0); |
| 3044 |
| 3045 // Pass the global object as the receiver and let the IC stub |
| 3046 // patch the stack to use the global proxy as 'this' in the |
| 3047 // invoked function. |
| 3048 LoadGlobal(); |
| 3049 |
| 3050 // Load the arguments. |
| 3051 int arg_count = args->length(); |
| 3052 for (int i = 0; i < arg_count; i++) { |
| 3053 LoadAndSpill(args->at(i)); |
| 3054 } |
| 3055 |
| 3056 // Setup the receiver register and call the IC initialization code. |
| 3057 Handle<Code> stub = ComputeCallInitialize(arg_count); |
| 3058 CodeForSourcePosition(node->position()); |
| 3059 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET_CONTEXT, |
| 3060 arg_count + 1); |
| 3061 __ ldr(cp, frame_->Context()); |
| 3062 // Remove the function from the stack. |
| 3063 frame_->Drop(); |
| 3064 frame_->EmitPush(r0); |
| 3065 |
| 3066 } else if (var != NULL && var->slot() != NULL && |
| 3067 var->slot()->type() == Slot::LOOKUP) { |
| 3068 // ---------------------------------- |
| 3069 // JavaScript example: 'with (obj) foo(1, 2, 3)' // foo is in obj |
| 3070 // ---------------------------------- |
| 3071 |
| 3072 // Load the function |
| 3073 frame_->EmitPush(cp); |
| 3074 __ mov(r0, Operand(var->name())); |
| 3075 frame_->EmitPush(r0); |
| 3076 frame_->CallRuntime(Runtime::kLoadContextSlot, 2); |
| 3077 // r0: slot value; r1: receiver |
| 3078 |
| 3079 // Load the receiver. |
| 3080 frame_->EmitPush(r0); // function |
| 3081 frame_->EmitPush(r1); // receiver |
| 3082 |
| 3083 // Call the function. |
| 3084 CallWithArguments(args, node->position()); |
| 3085 frame_->EmitPush(r0); |
| 3086 |
| 3087 } else if (property != NULL) { |
| 3088 // Check if the key is a literal string. |
| 3089 Literal* literal = property->key()->AsLiteral(); |
| 3090 |
| 3091 if (literal != NULL && literal->handle()->IsSymbol()) { |
| 3092 // ------------------------------------------------------------------ |
| 3093 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)' |
| 3094 // ------------------------------------------------------------------ |
| 3095 |
| 3096 // Push the name of the function and the receiver onto the stack. |
| 3097 __ mov(r0, Operand(literal->handle())); |
| 3098 frame_->EmitPush(r0); |
| 3099 LoadAndSpill(property->obj()); |
| 3100 |
| 3101 // Load the arguments. |
| 3102 int arg_count = args->length(); |
| 3103 for (int i = 0; i < arg_count; i++) { |
| 3104 LoadAndSpill(args->at(i)); |
| 3105 } |
| 3106 |
| 3107 // Set the receiver register and call the IC initialization code. |
| 3108 Handle<Code> stub = ComputeCallInitialize(arg_count); |
| 3109 CodeForSourcePosition(node->position()); |
| 3110 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1); |
| 3111 __ ldr(cp, frame_->Context()); |
| 3112 |
| 3113 // Remove the function from the stack. |
| 3114 frame_->Drop(); |
| 3115 |
| 3116 frame_->EmitPush(r0); // push after get rid of function from the stack |
| 3117 |
| 3118 } else { |
| 3119 // ------------------------------------------- |
| 3120 // JavaScript example: 'array[index](1, 2, 3)' |
| 3121 // ------------------------------------------- |
| 3122 |
| 3123 // Load the function to call from the property through a reference. |
| 3124 Reference ref(this, property); |
| 3125 ref.GetValueAndSpill(NOT_INSIDE_TYPEOF); // receiver |
| 3126 |
| 3127 // Pass receiver to called function. |
| 3128 if (property->is_synthetic()) { |
| 3129 LoadGlobalReceiver(r0); |
| 3130 } else { |
| 3131 __ ldr(r0, frame_->ElementAt(ref.size())); |
| 3132 frame_->EmitPush(r0); |
| 3133 } |
| 3134 |
| 3135 // Call the function. |
| 3136 CallWithArguments(args, node->position()); |
| 3137 frame_->EmitPush(r0); |
| 3138 } |
| 3139 |
| 3140 } else { |
| 3141 // ---------------------------------- |
| 3142 // JavaScript example: 'foo(1, 2, 3)' // foo is not global |
| 3143 // ---------------------------------- |
| 3144 |
| 3145 // Load the function. |
| 3146 LoadAndSpill(function); |
| 3147 |
| 3148 // Pass the global proxy as the receiver. |
| 3149 LoadGlobalReceiver(r0); |
| 3150 |
| 3151 // Call the function. |
| 3152 CallWithArguments(args, node->position()); |
| 3153 frame_->EmitPush(r0); |
| 3154 } |
| 3155 ASSERT(frame_->height() == original_height + 1); |
| 3156 } |
| 3157 |
| 3158 |
| 3159 void CodeGenerator::VisitCallEval(CallEval* node) { |
| 3160 #ifdef DEBUG |
| 3161 int original_height = frame_->height(); |
| 3162 #endif |
| 3163 VirtualFrame::SpilledScope spilled_scope(this); |
| 3164 Comment cmnt(masm_, "[ CallEval"); |
| 3165 |
| 3166 // In a call to eval, we first call %ResolvePossiblyDirectEval to resolve |
| 3167 // the function we need to call and the receiver of the call. |
| 3168 // Then we call the resolved function using the given arguments. |
| 3169 |
| 3170 ZoneList<Expression*>* args = node->arguments(); |
| 3171 Expression* function = node->expression(); |
| 3172 |
| 3173 CodeForStatementPosition(node); |
| 3174 |
| 3175 // Prepare stack for call to resolved function. |
| 3176 LoadAndSpill(function); |
| 3177 __ mov(r2, Operand(Factory::undefined_value())); |
| 3178 frame_->EmitPush(r2); // Slot for receiver |
| 3179 int arg_count = args->length(); |
| 3180 for (int i = 0; i < arg_count; i++) { |
| 3181 LoadAndSpill(args->at(i)); |
| 3182 } |
| 3183 |
| 3184 // Prepare stack for call to ResolvePossiblyDirectEval. |
| 3185 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize + kPointerSize)); |
| 3186 frame_->EmitPush(r1); |
| 3187 if (arg_count > 0) { |
| 3188 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize)); |
| 3189 frame_->EmitPush(r1); |
| 3190 } else { |
| 3191 frame_->EmitPush(r2); |
| 3192 } |
| 3193 |
| 3194 // Resolve the call. |
| 3195 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 2); |
| 3196 |
| 3197 // Touch up stack with the right values for the function and the receiver. |
| 3198 __ ldr(r1, FieldMemOperand(r0, FixedArray::kHeaderSize)); |
| 3199 __ str(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); |
| 3200 __ ldr(r1, FieldMemOperand(r0, FixedArray::kHeaderSize + kPointerSize)); |
| 3201 __ str(r1, MemOperand(sp, arg_count * kPointerSize)); |
| 3202 |
| 3203 // Call the function. |
| 3204 CodeForSourcePosition(node->position()); |
| 3205 |
| 3206 CallFunctionStub call_function(arg_count); |
| 3207 frame_->CallStub(&call_function, arg_count + 1); |
| 3208 |
| 3209 __ ldr(cp, frame_->Context()); |
| 3210 // Remove the function from the stack. |
| 3211 frame_->Drop(); |
| 3212 frame_->EmitPush(r0); |
| 3213 ASSERT(frame_->height() == original_height + 1); |
| 3214 } |
| 3215 |
| 3216 |
| 3217 void CodeGenerator::VisitCallNew(CallNew* node) { |
| 3218 #ifdef DEBUG |
| 3219 int original_height = frame_->height(); |
| 3220 #endif |
| 3221 VirtualFrame::SpilledScope spilled_scope(this); |
| 3222 Comment cmnt(masm_, "[ CallNew"); |
| 3223 CodeForStatementPosition(node); |
| 3224 |
| 3225 // According to ECMA-262, section 11.2.2, page 44, the function |
| 3226 // expression in new calls must be evaluated before the |
| 3227 // arguments. This is different from ordinary calls, where the |
| 3228 // actual function to call is resolved after the arguments have been |
| 3229 // evaluated. |
| 3230 |
| 3231 // Compute function to call and use the global object as the |
| 3232 // receiver. There is no need to use the global proxy here because |
| 3233 // it will always be replaced with a newly allocated object. |
| 3234 LoadAndSpill(node->expression()); |
| 3235 LoadGlobal(); |
| 3236 |
| 3237 // Push the arguments ("left-to-right") on the stack. |
| 3238 ZoneList<Expression*>* args = node->arguments(); |
| 3239 int arg_count = args->length(); |
| 3240 for (int i = 0; i < arg_count; i++) { |
| 3241 LoadAndSpill(args->at(i)); |
| 3242 } |
| 3243 |
| 3244 // r0: the number of arguments. |
| 3245 Result num_args = allocator_->Allocate(r0); |
| 3246 ASSERT(num_args.is_valid()); |
| 3247 __ mov(num_args.reg(), Operand(arg_count)); |
| 3248 |
| 3249 // Load the function into r1 as per calling convention. |
| 3250 Result function = allocator_->Allocate(r1); |
| 3251 ASSERT(function.is_valid()); |
| 3252 __ ldr(function.reg(), frame_->ElementAt(arg_count + 1)); |
| 3253 |
| 3254 // Call the construct call builtin that handles allocation and |
| 3255 // constructor invocation. |
| 3256 CodeForSourcePosition(node->position()); |
| 3257 Handle<Code> ic(Builtins::builtin(Builtins::JSConstructCall)); |
| 3258 Result result = frame_->CallCodeObject(ic, |
| 3259 RelocInfo::CONSTRUCT_CALL, |
| 3260 &num_args, |
| 3261 &function, |
| 3262 arg_count + 1); |
| 3263 |
| 3264 // Discard old TOS value and push r0 on the stack (same as Pop(), push(r0)). |
| 3265 __ str(r0, frame_->Top()); |
| 3266 ASSERT(frame_->height() == original_height + 1); |
| 3267 } |
| 3268 |
| 3269 |
| 3270 void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) { |
| 3271 VirtualFrame::SpilledScope spilled_scope(this); |
| 3272 ASSERT(args->length() == 1); |
| 3273 JumpTarget leave(this); |
| 3274 LoadAndSpill(args->at(0)); |
| 3275 frame_->EmitPop(r0); // r0 contains object. |
| 3276 // if (object->IsSmi()) return the object. |
| 3277 __ tst(r0, Operand(kSmiTagMask)); |
| 3278 leave.Branch(eq); |
| 3279 // It is a heap object - get map. |
| 3280 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 3281 __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset)); |
| 3282 // if (!object->IsJSValue()) return the object. |
| 3283 __ cmp(r1, Operand(JS_VALUE_TYPE)); |
| 3284 leave.Branch(ne); |
| 3285 // Load the value. |
| 3286 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset)); |
| 3287 leave.Bind(); |
| 3288 frame_->EmitPush(r0); |
| 3289 } |
| 3290 |
| 3291 |
| 3292 void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) { |
| 3293 VirtualFrame::SpilledScope spilled_scope(this); |
| 3294 ASSERT(args->length() == 2); |
| 3295 JumpTarget leave(this); |
| 3296 LoadAndSpill(args->at(0)); // Load the object. |
| 3297 LoadAndSpill(args->at(1)); // Load the value. |
| 3298 frame_->EmitPop(r0); // r0 contains value |
| 3299 frame_->EmitPop(r1); // r1 contains object |
| 3300 // if (object->IsSmi()) return object. |
| 3301 __ tst(r1, Operand(kSmiTagMask)); |
| 3302 leave.Branch(eq); |
| 3303 // It is a heap object - get map. |
| 3304 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); |
| 3305 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); |
| 3306 // if (!object->IsJSValue()) return object. |
| 3307 __ cmp(r2, Operand(JS_VALUE_TYPE)); |
| 3308 leave.Branch(ne); |
| 3309 // Store the value. |
| 3310 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset)); |
| 3311 // Update the write barrier. |
| 3312 __ mov(r2, Operand(JSValue::kValueOffset - kHeapObjectTag)); |
| 3313 __ RecordWrite(r1, r2, r3); |
| 3314 // Leave. |
| 3315 leave.Bind(); |
| 3316 frame_->EmitPush(r0); |
| 3317 } |
| 3318 |
| 3319 |
| 3320 void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) { |
| 3321 VirtualFrame::SpilledScope spilled_scope(this); |
| 3322 ASSERT(args->length() == 1); |
| 3323 LoadAndSpill(args->at(0)); |
| 3324 frame_->EmitPop(r0); |
| 3325 __ tst(r0, Operand(kSmiTagMask)); |
| 3326 cc_reg_ = eq; |
| 3327 } |
| 3328 |
| 3329 |
| 3330 void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) { |
| 3331 VirtualFrame::SpilledScope spilled_scope(this); |
| 3332 // See comment in CodeGenerator::GenerateLog in codegen-ia32.cc. |
| 3333 ASSERT_EQ(args->length(), 3); |
| 3334 #ifdef ENABLE_LOGGING_AND_PROFILING |
| 3335 if (ShouldGenerateLog(args->at(0))) { |
| 3336 LoadAndSpill(args->at(1)); |
| 3337 LoadAndSpill(args->at(2)); |
| 3338 __ CallRuntime(Runtime::kLog, 2); |
| 3339 } |
| 3340 #endif |
| 3341 __ mov(r0, Operand(Factory::undefined_value())); |
| 3342 frame_->EmitPush(r0); |
| 3343 } |
| 3344 |
| 3345 |
| 3346 void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) { |
| 3347 VirtualFrame::SpilledScope spilled_scope(this); |
| 3348 ASSERT(args->length() == 1); |
| 3349 LoadAndSpill(args->at(0)); |
| 3350 frame_->EmitPop(r0); |
| 3351 __ tst(r0, Operand(kSmiTagMask | 0x80000000)); |
| 3352 cc_reg_ = eq; |
| 3353 } |
| 3354 |
| 3355 |
| 3356 // This should generate code that performs a charCodeAt() call or returns |
| 3357 // undefined in order to trigger the slow case, Runtime_StringCharCodeAt. |
| 3358 // It is not yet implemented on ARM, so it always goes to the slow case. |
| 3359 void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) { |
| 3360 VirtualFrame::SpilledScope spilled_scope(this); |
| 3361 ASSERT(args->length() == 2); |
| 3362 __ mov(r0, Operand(Factory::undefined_value())); |
| 3363 frame_->EmitPush(r0); |
| 3364 } |
| 3365 |
| 3366 |
| 3367 void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) { |
| 3368 VirtualFrame::SpilledScope spilled_scope(this); |
| 3369 ASSERT(args->length() == 1); |
| 3370 LoadAndSpill(args->at(0)); |
| 3371 JumpTarget answer(this); |
| 3372 // We need the CC bits to come out as not_equal in the case where the |
| 3373 // object is a smi. This can't be done with the usual test opcode so |
| 3374 // we use XOR to get the right CC bits. |
| 3375 frame_->EmitPop(r0); |
| 3376 __ and_(r1, r0, Operand(kSmiTagMask)); |
| 3377 __ eor(r1, r1, Operand(kSmiTagMask), SetCC); |
| 3378 answer.Branch(ne); |
| 3379 // It is a heap object - get the map. |
| 3380 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 3381 __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset)); |
| 3382 // Check if the object is a JS array or not. |
| 3383 __ cmp(r1, Operand(JS_ARRAY_TYPE)); |
| 3384 answer.Bind(); |
| 3385 cc_reg_ = eq; |
| 3386 } |
| 3387 |
| 3388 |
| 3389 void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) { |
| 3390 VirtualFrame::SpilledScope spilled_scope(this); |
| 3391 ASSERT(args->length() == 0); |
| 3392 |
| 3393 // Seed the result with the formal parameters count, which will be used |
| 3394 // in case no arguments adaptor frame is found below the current frame. |
| 3395 __ mov(r0, Operand(Smi::FromInt(scope_->num_parameters()))); |
| 3396 |
| 3397 // Call the shared stub to get to the arguments.length. |
| 3398 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_LENGTH); |
| 3399 frame_->CallStub(&stub, 0); |
| 3400 frame_->EmitPush(r0); |
| 3401 } |
| 3402 |
| 3403 |
| 3404 void CodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* args) { |
| 3405 VirtualFrame::SpilledScope spilled_scope(this); |
| 3406 ASSERT(args->length() == 1); |
| 3407 |
| 3408 // Satisfy contract with ArgumentsAccessStub: |
| 3409 // Load the key into r1 and the formal parameters count into r0. |
| 3410 LoadAndSpill(args->at(0)); |
| 3411 frame_->EmitPop(r1); |
| 3412 __ mov(r0, Operand(Smi::FromInt(scope_->num_parameters()))); |
| 3413 |
| 3414 // Call the shared stub to get to arguments[key]. |
| 3415 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); |
| 3416 frame_->CallStub(&stub, 0); |
| 3417 frame_->EmitPush(r0); |
| 3418 } |
| 3419 |
| 3420 |
| 3421 void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) { |
| 3422 VirtualFrame::SpilledScope spilled_scope(this); |
| 3423 ASSERT(args->length() == 2); |
| 3424 |
| 3425 // Load the two objects into registers and perform the comparison. |
| 3426 LoadAndSpill(args->at(0)); |
| 3427 LoadAndSpill(args->at(1)); |
| 3428 frame_->EmitPop(r0); |
| 3429 frame_->EmitPop(r1); |
| 3430 __ cmp(r0, Operand(r1)); |
| 3431 cc_reg_ = eq; |
| 3432 } |
| 3433 |
| 3434 |
| 3435 void CodeGenerator::VisitCallRuntime(CallRuntime* node) { |
| 3436 #ifdef DEBUG |
| 3437 int original_height = frame_->height(); |
| 3438 #endif |
| 3439 VirtualFrame::SpilledScope spilled_scope(this); |
| 3440 if (CheckForInlineRuntimeCall(node)) { |
| 3441 ASSERT((has_cc() && frame_->height() == original_height) || |
| 3442 (!has_cc() && frame_->height() == original_height + 1)); |
| 3443 return; |
| 3444 } |
| 3445 |
| 3446 ZoneList<Expression*>* args = node->arguments(); |
| 3447 Comment cmnt(masm_, "[ CallRuntime"); |
| 3448 Runtime::Function* function = node->function(); |
| 3449 |
| 3450 if (function == NULL) { |
| 3451 // Prepare stack for calling JS runtime function. |
| 3452 __ mov(r0, Operand(node->name())); |
| 3453 frame_->EmitPush(r0); |
| 3454 // Push the builtins object found in the current global object. |
| 3455 __ ldr(r1, GlobalObject()); |
| 3456 __ ldr(r0, FieldMemOperand(r1, GlobalObject::kBuiltinsOffset)); |
| 3457 frame_->EmitPush(r0); |
| 3458 } |
| 3459 |
| 3460 // Push the arguments ("left-to-right"). |
| 3461 int arg_count = args->length(); |
| 3462 for (int i = 0; i < arg_count; i++) { |
| 3463 LoadAndSpill(args->at(i)); |
| 3464 } |
| 3465 |
| 3466 if (function == NULL) { |
| 3467 // Call the JS runtime function. |
| 3468 Handle<Code> stub = ComputeCallInitialize(arg_count); |
| 3469 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1); |
| 3470 __ ldr(cp, frame_->Context()); |
| 3471 frame_->Drop(); |
| 3472 frame_->EmitPush(r0); |
| 3473 } else { |
| 3474 // Call the C runtime function. |
| 3475 frame_->CallRuntime(function, arg_count); |
| 3476 frame_->EmitPush(r0); |
| 3477 } |
| 3478 ASSERT(frame_->height() == original_height + 1); |
| 3479 } |
| 3480 |
| 3481 |
| 3482 void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) { |
| 3483 #ifdef DEBUG |
| 3484 int original_height = frame_->height(); |
| 3485 #endif |
| 3486 VirtualFrame::SpilledScope spilled_scope(this); |
| 3487 Comment cmnt(masm_, "[ UnaryOperation"); |
| 3488 |
| 3489 Token::Value op = node->op(); |
| 3490 |
| 3491 if (op == Token::NOT) { |
| 3492 LoadConditionAndSpill(node->expression(), |
| 3493 NOT_INSIDE_TYPEOF, |
| 3494 false_target(), |
| 3495 true_target(), |
| 3496 true); |
| 3497 cc_reg_ = NegateCondition(cc_reg_); |
| 3498 |
| 3499 } else if (op == Token::DELETE) { |
| 3500 Property* property = node->expression()->AsProperty(); |
| 3501 Variable* variable = node->expression()->AsVariableProxy()->AsVariable(); |
| 3502 if (property != NULL) { |
| 3503 LoadAndSpill(property->obj()); |
| 3504 LoadAndSpill(property->key()); |
| 3505 Result arg_count = allocator_->Allocate(r0); |
| 3506 ASSERT(arg_count.is_valid()); |
| 3507 __ mov(arg_count.reg(), Operand(1)); // not counting receiver |
| 3508 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, &arg_count, 2); |
| 3509 |
| 3510 } else if (variable != NULL) { |
| 3511 Slot* slot = variable->slot(); |
| 3512 if (variable->is_global()) { |
| 3513 LoadGlobal(); |
| 3514 __ mov(r0, Operand(variable->name())); |
| 3515 frame_->EmitPush(r0); |
| 3516 Result arg_count = allocator_->Allocate(r0); |
| 3517 ASSERT(arg_count.is_valid()); |
| 3518 __ mov(arg_count.reg(), Operand(1)); // not counting receiver |
| 3519 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, &arg_count, 2); |
| 3520 |
| 3521 } else if (slot != NULL && slot->type() == Slot::LOOKUP) { |
| 3522 // lookup the context holding the named variable |
| 3523 frame_->EmitPush(cp); |
| 3524 __ mov(r0, Operand(variable->name())); |
| 3525 frame_->EmitPush(r0); |
| 3526 frame_->CallRuntime(Runtime::kLookupContext, 2); |
| 3527 // r0: context |
| 3528 frame_->EmitPush(r0); |
| 3529 __ mov(r0, Operand(variable->name())); |
| 3530 frame_->EmitPush(r0); |
| 3531 Result arg_count = allocator_->Allocate(r0); |
| 3532 ASSERT(arg_count.is_valid()); |
| 3533 __ mov(arg_count.reg(), Operand(1)); // not counting receiver |
| 3534 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, &arg_count, 2); |
| 3535 |
| 3536 } else { |
| 3537 // Default: Result of deleting non-global, not dynamically |
| 3538 // introduced variables is false. |
| 3539 __ mov(r0, Operand(Factory::false_value())); |
| 3540 } |
| 3541 |
| 3542 } else { |
| 3543 // Default: Result of deleting expressions is true. |
| 3544 LoadAndSpill(node->expression()); // may have side-effects |
| 3545 frame_->Drop(); |
| 3546 __ mov(r0, Operand(Factory::true_value())); |
| 3547 } |
| 3548 frame_->EmitPush(r0); |
| 3549 |
| 3550 } else if (op == Token::TYPEOF) { |
| 3551 // Special case for loading the typeof expression; see comment on |
| 3552 // LoadTypeofExpression(). |
| 3553 LoadTypeofExpression(node->expression()); |
| 3554 frame_->CallRuntime(Runtime::kTypeof, 1); |
| 3555 frame_->EmitPush(r0); // r0 has result |
| 3556 |
| 3557 } else { |
| 3558 LoadAndSpill(node->expression()); |
| 3559 frame_->EmitPop(r0); |
| 3560 switch (op) { |
| 3561 case Token::NOT: |
| 3562 case Token::DELETE: |
| 3563 case Token::TYPEOF: |
| 3564 UNREACHABLE(); // handled above |
| 3565 break; |
| 3566 |
| 3567 case Token::SUB: { |
| 3568 UnarySubStub stub; |
| 3569 frame_->CallStub(&stub, 0); |
| 3570 break; |
| 3571 } |
| 3572 |
| 3573 case Token::BIT_NOT: { |
| 3574 // smi check |
| 3575 JumpTarget smi_label(this); |
| 3576 JumpTarget continue_label(this); |
| 3577 __ tst(r0, Operand(kSmiTagMask)); |
| 3578 smi_label.Branch(eq); |
| 3579 |
| 3580 frame_->EmitPush(r0); |
| 3581 Result arg_count = allocator_->Allocate(r0); |
| 3582 ASSERT(arg_count.is_valid()); |
| 3583 __ mov(arg_count.reg(), Operand(0)); // not counting receiver |
| 3584 frame_->InvokeBuiltin(Builtins::BIT_NOT, CALL_JS, &arg_count, 1); |
| 3585 |
| 3586 continue_label.Jump(); |
| 3587 smi_label.Bind(); |
| 3588 __ mvn(r0, Operand(r0)); |
| 3589 __ bic(r0, r0, Operand(kSmiTagMask)); // bit-clear inverted smi-tag |
| 3590 continue_label.Bind(); |
| 3591 break; |
| 3592 } |
| 3593 |
| 3594 case Token::VOID: |
| 3595 // since the stack top is cached in r0, popping and then |
| 3596 // pushing a value can be done by just writing to r0. |
| 3597 __ mov(r0, Operand(Factory::undefined_value())); |
| 3598 break; |
| 3599 |
| 3600 case Token::ADD: { |
| 3601 // Smi check. |
| 3602 JumpTarget continue_label(this); |
| 3603 __ tst(r0, Operand(kSmiTagMask)); |
| 3604 continue_label.Branch(eq); |
| 3605 frame_->EmitPush(r0); |
| 3606 Result arg_count = allocator_->Allocate(r0); |
| 3607 ASSERT(arg_count.is_valid()); |
| 3608 __ mov(arg_count.reg(), Operand(0)); // not counting receiver |
| 3609 frame_->InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, &arg_count, 1); |
| 3610 continue_label.Bind(); |
| 3611 break; |
| 3612 } |
| 3613 default: |
| 3614 UNREACHABLE(); |
| 3615 } |
| 3616 frame_->EmitPush(r0); // r0 has result |
| 3617 } |
| 3618 ASSERT((has_cc() && frame_->height() == original_height) || |
| 3619 (!has_cc() && frame_->height() == original_height + 1)); |
| 3620 } |
| 3621 |
| 3622 |
| 3623 void CodeGenerator::VisitCountOperation(CountOperation* node) { |
| 3624 #ifdef DEBUG |
| 3625 int original_height = frame_->height(); |
| 3626 #endif |
| 3627 VirtualFrame::SpilledScope spilled_scope(this); |
| 3628 Comment cmnt(masm_, "[ CountOperation"); |
| 3629 |
| 3630 bool is_postfix = node->is_postfix(); |
| 3631 bool is_increment = node->op() == Token::INC; |
| 3632 |
| 3633 Variable* var = node->expression()->AsVariableProxy()->AsVariable(); |
| 3634 bool is_const = (var != NULL && var->mode() == Variable::CONST); |
| 3635 |
| 3636 // Postfix: Make room for the result. |
| 3637 if (is_postfix) { |
| 3638 __ mov(r0, Operand(0)); |
| 3639 frame_->EmitPush(r0); |
| 3640 } |
| 3641 |
| 3642 { Reference target(this, node->expression()); |
| 3643 if (target.is_illegal()) { |
| 3644 // Spoof the virtual frame to have the expected height (one higher |
| 3645 // than on entry). |
| 3646 if (!is_postfix) { |
| 3647 __ mov(r0, Operand(Smi::FromInt(0))); |
| 3648 frame_->EmitPush(r0); |
| 3649 } |
| 3650 ASSERT(frame_->height() == original_height + 1); |
| 3651 return; |
| 3652 } |
| 3653 target.GetValueAndSpill(NOT_INSIDE_TYPEOF); |
| 3654 frame_->EmitPop(r0); |
| 3655 |
| 3656 JumpTarget slow(this); |
| 3657 JumpTarget exit(this); |
| 3658 |
| 3659 // Load the value (1) into register r1. |
| 3660 __ mov(r1, Operand(Smi::FromInt(1))); |
| 3661 |
| 3662 // Check for smi operand. |
| 3663 __ tst(r0, Operand(kSmiTagMask)); |
| 3664 slow.Branch(ne); |
| 3665 |
| 3666 // Postfix: Store the old value as the result. |
| 3667 if (is_postfix) { |
| 3668 __ str(r0, frame_->ElementAt(target.size())); |
| 3669 } |
| 3670 |
| 3671 // Perform optimistic increment/decrement. |
| 3672 if (is_increment) { |
| 3673 __ add(r0, r0, Operand(r1), SetCC); |
| 3674 } else { |
| 3675 __ sub(r0, r0, Operand(r1), SetCC); |
| 3676 } |
| 3677 |
| 3678 // If the increment/decrement didn't overflow, we're done. |
| 3679 exit.Branch(vc); |
| 3680 |
| 3681 // Revert optimistic increment/decrement. |
| 3682 if (is_increment) { |
| 3683 __ sub(r0, r0, Operand(r1)); |
| 3684 } else { |
| 3685 __ add(r0, r0, Operand(r1)); |
| 3686 } |
| 3687 |
| 3688 // Slow case: Convert to number. |
| 3689 slow.Bind(); |
| 3690 { |
| 3691 // Convert the operand to a number. |
| 3692 frame_->EmitPush(r0); |
| 3693 Result arg_count = allocator_->Allocate(r0); |
| 3694 ASSERT(arg_count.is_valid()); |
| 3695 __ mov(arg_count.reg(), Operand(0)); |
| 3696 frame_->InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, &arg_count, 1); |
| 3697 } |
| 3698 if (is_postfix) { |
| 3699 // Postfix: store to result (on the stack). |
| 3700 __ str(r0, frame_->ElementAt(target.size())); |
| 3701 } |
| 3702 |
| 3703 // Compute the new value. |
| 3704 __ mov(r1, Operand(Smi::FromInt(1))); |
| 3705 frame_->EmitPush(r0); |
| 3706 frame_->EmitPush(r1); |
| 3707 if (is_increment) { |
| 3708 frame_->CallRuntime(Runtime::kNumberAdd, 2); |
| 3709 } else { |
| 3710 frame_->CallRuntime(Runtime::kNumberSub, 2); |
| 3711 } |
| 3712 |
| 3713 // Store the new value in the target if not const. |
| 3714 exit.Bind(); |
| 3715 frame_->EmitPush(r0); |
| 3716 if (!is_const) target.SetValue(NOT_CONST_INIT); |
| 3717 } |
| 3718 |
| 3719 // Postfix: Discard the new value and use the old. |
| 3720 if (is_postfix) frame_->EmitPop(r0); |
| 3721 ASSERT(frame_->height() == original_height + 1); |
| 3722 } |
| 3723 |
| 3724 |
| 3725 void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) { |
| 3726 #ifdef DEBUG |
| 3727 int original_height = frame_->height(); |
| 3728 #endif |
| 3729 VirtualFrame::SpilledScope spilled_scope(this); |
| 3730 Comment cmnt(masm_, "[ BinaryOperation"); |
| 3731 Token::Value op = node->op(); |
| 3732 |
| 3733 // According to ECMA-262 section 11.11, page 58, the binary logical |
| 3734 // operators must yield the result of one of the two expressions |
| 3735 // before any ToBoolean() conversions. This means that the value |
| 3736 // produced by a && or || operator is not necessarily a boolean. |
| 3737 |
| 3738 // NOTE: If the left hand side produces a materialized value (not in |
| 3739 // the CC register), we force the right hand side to do the |
| 3740 // same. This is necessary because we may have to branch to the exit |
| 3741 // after evaluating the left hand side (due to the shortcut |
| 3742 // semantics), but the compiler must (statically) know if the result |
| 3743 // of compiling the binary operation is materialized or not. |
| 3744 |
| 3745 if (op == Token::AND) { |
| 3746 JumpTarget is_true(this); |
| 3747 LoadConditionAndSpill(node->left(), |
| 3748 NOT_INSIDE_TYPEOF, |
| 3749 &is_true, |
| 3750 false_target(), |
| 3751 false); |
| 3752 if (has_cc()) { |
| 3753 Branch(false, false_target()); |
| 3754 |
| 3755 // Evaluate right side expression. |
| 3756 is_true.Bind(); |
| 3757 LoadConditionAndSpill(node->right(), |
| 3758 NOT_INSIDE_TYPEOF, |
| 3759 true_target(), |
| 3760 false_target(), |
| 3761 false); |
| 3762 |
| 3763 } else { |
| 3764 JumpTarget pop_and_continue(this); |
| 3765 JumpTarget exit(this); |
| 3766 |
| 3767 __ ldr(r0, frame_->Top()); // dup the stack top |
| 3768 frame_->EmitPush(r0); |
| 3769 // Avoid popping the result if it converts to 'false' using the |
| 3770 // standard ToBoolean() conversion as described in ECMA-262, |
| 3771 // section 9.2, page 30. |
| 3772 ToBoolean(&pop_and_continue, &exit); |
| 3773 Branch(false, &exit); |
| 3774 |
| 3775 // Pop the result of evaluating the first part. |
| 3776 pop_and_continue.Bind(); |
| 3777 frame_->EmitPop(r0); |
| 3778 |
| 3779 // Evaluate right side expression. |
| 3780 is_true.Bind(); |
| 3781 LoadAndSpill(node->right()); |
| 3782 |
| 3783 // Exit (always with a materialized value). |
| 3784 exit.Bind(); |
| 3785 } |
| 3786 |
| 3787 } else if (op == Token::OR) { |
| 3788 JumpTarget is_false(this); |
| 3789 LoadConditionAndSpill(node->left(), |
| 3790 NOT_INSIDE_TYPEOF, |
| 3791 true_target(), |
| 3792 &is_false, |
| 3793 false); |
| 3794 if (has_cc()) { |
| 3795 Branch(true, true_target()); |
| 3796 |
| 3797 // Evaluate right side expression. |
| 3798 is_false.Bind(); |
| 3799 LoadConditionAndSpill(node->right(), |
| 3800 NOT_INSIDE_TYPEOF, |
| 3801 true_target(), |
| 3802 false_target(), |
| 3803 false); |
| 3804 |
| 3805 } else { |
| 3806 JumpTarget pop_and_continue(this); |
| 3807 JumpTarget exit(this); |
| 3808 |
| 3809 __ ldr(r0, frame_->Top()); |
| 3810 frame_->EmitPush(r0); |
| 3811 // Avoid popping the result if it converts to 'true' using the |
| 3812 // standard ToBoolean() conversion as described in ECMA-262, |
| 3813 // section 9.2, page 30. |
| 3814 ToBoolean(&exit, &pop_and_continue); |
| 3815 Branch(true, &exit); |
| 3816 |
| 3817 // Pop the result of evaluating the first part. |
| 3818 pop_and_continue.Bind(); |
| 3819 frame_->EmitPop(r0); |
| 3820 |
| 3821 // Evaluate right side expression. |
| 3822 is_false.Bind(); |
| 3823 LoadAndSpill(node->right()); |
| 3824 |
| 3825 // Exit (always with a materialized value). |
| 3826 exit.Bind(); |
| 3827 } |
| 3828 |
| 3829 } else { |
| 3830 // Optimize for the case where (at least) one of the expressions |
| 3831 // is a literal small integer. |
| 3832 Literal* lliteral = node->left()->AsLiteral(); |
| 3833 Literal* rliteral = node->right()->AsLiteral(); |
| 3834 // NOTE: The code below assumes that the slow cases (calls to runtime) |
| 3835 // never return a constant/immutable object. |
| 3836 bool overwrite_left = |
| 3837 (node->left()->AsBinaryOperation() != NULL && |
| 3838 node->left()->AsBinaryOperation()->ResultOverwriteAllowed()); |
| 3839 bool overwrite_right = |
| 3840 (node->right()->AsBinaryOperation() != NULL && |
| 3841 node->right()->AsBinaryOperation()->ResultOverwriteAllowed()); |
| 3842 |
| 3843 if (rliteral != NULL && rliteral->handle()->IsSmi()) { |
| 3844 LoadAndSpill(node->left()); |
| 3845 SmiOperation(node->op(), |
| 3846 rliteral->handle(), |
| 3847 false, |
| 3848 overwrite_right ? OVERWRITE_RIGHT : NO_OVERWRITE); |
| 3849 |
| 3850 } else if (lliteral != NULL && lliteral->handle()->IsSmi()) { |
| 3851 LoadAndSpill(node->right()); |
| 3852 SmiOperation(node->op(), |
| 3853 lliteral->handle(), |
| 3854 true, |
| 3855 overwrite_left ? OVERWRITE_LEFT : NO_OVERWRITE); |
| 3856 |
| 3857 } else { |
| 3858 OverwriteMode overwrite_mode = NO_OVERWRITE; |
| 3859 if (overwrite_left) { |
| 3860 overwrite_mode = OVERWRITE_LEFT; |
| 3861 } else if (overwrite_right) { |
| 3862 overwrite_mode = OVERWRITE_RIGHT; |
| 3863 } |
| 3864 LoadAndSpill(node->left()); |
| 3865 LoadAndSpill(node->right()); |
| 3866 GenericBinaryOperation(node->op(), overwrite_mode); |
| 3867 } |
| 3868 frame_->EmitPush(r0); |
| 3869 } |
| 3870 ASSERT((has_cc() && frame_->height() == original_height) || |
| 3871 (!has_cc() && frame_->height() == original_height + 1)); |
| 3872 } |
| 3873 |
| 3874 |
| 3875 void CodeGenerator::VisitThisFunction(ThisFunction* node) { |
| 3876 #ifdef DEBUG |
| 3877 int original_height = frame_->height(); |
| 3878 #endif |
| 3879 VirtualFrame::SpilledScope spilled_scope(this); |
| 3880 __ ldr(r0, frame_->Function()); |
| 3881 frame_->EmitPush(r0); |
| 3882 ASSERT(frame_->height() == original_height + 1); |
| 3883 } |
| 3884 |
| 3885 |
| 3886 void CodeGenerator::VisitCompareOperation(CompareOperation* node) { |
| 3887 #ifdef DEBUG |
| 3888 int original_height = frame_->height(); |
| 3889 #endif |
| 3890 VirtualFrame::SpilledScope spilled_scope(this); |
| 3891 Comment cmnt(masm_, "[ CompareOperation"); |
| 3892 |
| 3893 // Get the expressions from the node. |
| 3894 Expression* left = node->left(); |
| 3895 Expression* right = node->right(); |
| 3896 Token::Value op = node->op(); |
| 3897 |
| 3898 // To make null checks efficient, we check if either left or right is the |
| 3899 // literal 'null'. If so, we optimize the code by inlining a null check |
| 3900 // instead of calling the (very) general runtime routine for checking |
| 3901 // equality. |
| 3902 if (op == Token::EQ || op == Token::EQ_STRICT) { |
| 3903 bool left_is_null = |
| 3904 left->AsLiteral() != NULL && left->AsLiteral()->IsNull(); |
| 3905 bool right_is_null = |
| 3906 right->AsLiteral() != NULL && right->AsLiteral()->IsNull(); |
| 3907 // The 'null' value can only be equal to 'null' or 'undefined'. |
| 3908 if (left_is_null || right_is_null) { |
| 3909 LoadAndSpill(left_is_null ? right : left); |
| 3910 frame_->EmitPop(r0); |
| 3911 __ cmp(r0, Operand(Factory::null_value())); |
| 3912 |
| 3913 // The 'null' value is only equal to 'undefined' if using non-strict |
| 3914 // comparisons. |
| 3915 if (op != Token::EQ_STRICT) { |
| 3916 true_target()->Branch(eq); |
| 3917 |
| 3918 __ cmp(r0, Operand(Factory::undefined_value())); |
| 3919 true_target()->Branch(eq); |
| 3920 |
| 3921 __ tst(r0, Operand(kSmiTagMask)); |
| 3922 false_target()->Branch(eq); |
| 3923 |
| 3924 // It can be an undetectable object. |
| 3925 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 3926 __ ldrb(r0, FieldMemOperand(r0, Map::kBitFieldOffset)); |
| 3927 __ and_(r0, r0, Operand(1 << Map::kIsUndetectable)); |
| 3928 __ cmp(r0, Operand(1 << Map::kIsUndetectable)); |
| 3929 } |
| 3930 |
| 3931 cc_reg_ = eq; |
| 3932 ASSERT(has_cc() && frame_->height() == original_height); |
| 3933 return; |
| 3934 } |
| 3935 } |
| 3936 |
| 3937 // To make typeof testing for natives implemented in JavaScript really |
| 3938 // efficient, we generate special code for expressions of the form: |
| 3939 // 'typeof <expression> == <string>'. |
| 3940 UnaryOperation* operation = left->AsUnaryOperation(); |
| 3941 if ((op == Token::EQ || op == Token::EQ_STRICT) && |
| 3942 (operation != NULL && operation->op() == Token::TYPEOF) && |
| 3943 (right->AsLiteral() != NULL && |
| 3944 right->AsLiteral()->handle()->IsString())) { |
| 3945 Handle<String> check(String::cast(*right->AsLiteral()->handle())); |
| 3946 |
| 3947 // Load the operand, move it to register r1. |
| 3948 LoadTypeofExpression(operation->expression()); |
| 3949 frame_->EmitPop(r1); |
| 3950 |
| 3951 if (check->Equals(Heap::number_symbol())) { |
| 3952 __ tst(r1, Operand(kSmiTagMask)); |
| 3953 true_target()->Branch(eq); |
| 3954 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset)); |
| 3955 __ cmp(r1, Operand(Factory::heap_number_map())); |
| 3956 cc_reg_ = eq; |
| 3957 |
| 3958 } else if (check->Equals(Heap::string_symbol())) { |
| 3959 __ tst(r1, Operand(kSmiTagMask)); |
| 3960 false_target()->Branch(eq); |
| 3961 |
| 3962 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset)); |
| 3963 |
| 3964 // It can be an undetectable string object. |
| 3965 __ ldrb(r2, FieldMemOperand(r1, Map::kBitFieldOffset)); |
| 3966 __ and_(r2, r2, Operand(1 << Map::kIsUndetectable)); |
| 3967 __ cmp(r2, Operand(1 << Map::kIsUndetectable)); |
| 3968 false_target()->Branch(eq); |
| 3969 |
| 3970 __ ldrb(r2, FieldMemOperand(r1, Map::kInstanceTypeOffset)); |
| 3971 __ cmp(r2, Operand(FIRST_NONSTRING_TYPE)); |
| 3972 cc_reg_ = lt; |
| 3973 |
| 3974 } else if (check->Equals(Heap::boolean_symbol())) { |
| 3975 __ cmp(r1, Operand(Factory::true_value())); |
| 3976 true_target()->Branch(eq); |
| 3977 __ cmp(r1, Operand(Factory::false_value())); |
| 3978 cc_reg_ = eq; |
| 3979 |
| 3980 } else if (check->Equals(Heap::undefined_symbol())) { |
| 3981 __ cmp(r1, Operand(Factory::undefined_value())); |
| 3982 true_target()->Branch(eq); |
| 3983 |
| 3984 __ tst(r1, Operand(kSmiTagMask)); |
| 3985 false_target()->Branch(eq); |
| 3986 |
| 3987 // It can be an undetectable object. |
| 3988 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset)); |
| 3989 __ ldrb(r2, FieldMemOperand(r1, Map::kBitFieldOffset)); |
| 3990 __ and_(r2, r2, Operand(1 << Map::kIsUndetectable)); |
| 3991 __ cmp(r2, Operand(1 << Map::kIsUndetectable)); |
| 3992 |
| 3993 cc_reg_ = eq; |
| 3994 |
| 3995 } else if (check->Equals(Heap::function_symbol())) { |
| 3996 __ tst(r1, Operand(kSmiTagMask)); |
| 3997 false_target()->Branch(eq); |
| 3998 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset)); |
| 3999 __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset)); |
| 4000 __ cmp(r1, Operand(JS_FUNCTION_TYPE)); |
| 4001 cc_reg_ = eq; |
| 4002 |
| 4003 } else if (check->Equals(Heap::object_symbol())) { |
| 4004 __ tst(r1, Operand(kSmiTagMask)); |
| 4005 false_target()->Branch(eq); |
| 4006 |
| 4007 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); |
| 4008 __ cmp(r1, Operand(Factory::null_value())); |
| 4009 true_target()->Branch(eq); |
| 4010 |
| 4011 // It can be an undetectable object. |
| 4012 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset)); |
| 4013 __ and_(r1, r1, Operand(1 << Map::kIsUndetectable)); |
| 4014 __ cmp(r1, Operand(1 << Map::kIsUndetectable)); |
| 4015 false_target()->Branch(eq); |
| 4016 |
| 4017 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); |
| 4018 __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE)); |
| 4019 false_target()->Branch(lt); |
| 4020 __ cmp(r2, Operand(LAST_JS_OBJECT_TYPE)); |
| 4021 cc_reg_ = le; |
| 4022 |
| 4023 } else { |
| 4024 // Uncommon case: typeof testing against a string literal that is |
| 4025 // never returned from the typeof operator. |
| 4026 false_target()->Jump(); |
| 4027 } |
| 4028 ASSERT(!has_valid_frame() || |
| 4029 (has_cc() && frame_->height() == original_height)); |
| 4030 return; |
| 4031 } |
| 4032 |
| 4033 LoadAndSpill(left); |
| 4034 LoadAndSpill(right); |
| 4035 switch (op) { |
| 4036 case Token::EQ: |
| 4037 Comparison(eq, false); |
| 4038 break; |
| 4039 |
| 4040 case Token::LT: |
| 4041 Comparison(lt); |
| 4042 break; |
| 4043 |
| 4044 case Token::GT: |
| 4045 Comparison(gt); |
| 4046 break; |
| 4047 |
| 4048 case Token::LTE: |
| 4049 Comparison(le); |
| 4050 break; |
| 4051 |
| 4052 case Token::GTE: |
| 4053 Comparison(ge); |
| 4054 break; |
| 4055 |
| 4056 case Token::EQ_STRICT: |
| 4057 Comparison(eq, true); |
| 4058 break; |
| 4059 |
| 4060 case Token::IN: { |
| 4061 Result arg_count = allocator_->Allocate(r0); |
| 4062 ASSERT(arg_count.is_valid()); |
| 4063 __ mov(arg_count.reg(), Operand(1)); // not counting receiver |
| 4064 Result result = frame_->InvokeBuiltin(Builtins::IN, |
| 4065 CALL_JS, |
| 4066 &arg_count, |
| 4067 2); |
| 4068 frame_->EmitPush(result.reg()); |
| 4069 break; |
| 4070 } |
| 4071 |
| 4072 case Token::INSTANCEOF: { |
| 4073 Result arg_count = allocator_->Allocate(r0); |
| 4074 ASSERT(arg_count.is_valid()); |
| 4075 __ mov(arg_count.reg(), Operand(1)); // not counting receiver |
| 4076 Result result = frame_->InvokeBuiltin(Builtins::INSTANCE_OF, |
| 4077 CALL_JS, |
| 4078 &arg_count, |
| 4079 2); |
| 4080 __ tst(result.reg(), Operand(result.reg())); |
| 4081 cc_reg_ = eq; |
| 4082 break; |
| 4083 } |
| 4084 |
| 4085 default: |
| 4086 UNREACHABLE(); |
| 4087 } |
| 4088 ASSERT((has_cc() && frame_->height() == original_height) || |
| 4089 (!has_cc() && frame_->height() == original_height + 1)); |
| 4090 } |
| 4091 |
| 4092 |
| 4093 #ifdef DEBUG |
| 4094 bool CodeGenerator::HasValidEntryRegisters() { return true; } |
| 4095 #endif |
| 4096 |
| 4097 |
| 4098 #undef __ |
| 4099 #define __ ACCESS_MASM(masm) |
| 4100 |
| 4101 |
| 4102 Handle<String> Reference::GetName() { |
| 4103 ASSERT(type_ == NAMED); |
| 4104 Property* property = expression_->AsProperty(); |
| 4105 if (property == NULL) { |
| 4106 // Global variable reference treated as a named property reference. |
| 4107 VariableProxy* proxy = expression_->AsVariableProxy(); |
| 4108 ASSERT(proxy->AsVariable() != NULL); |
| 4109 ASSERT(proxy->AsVariable()->is_global()); |
| 4110 return proxy->name(); |
| 4111 } else { |
| 4112 Literal* raw_name = property->key()->AsLiteral(); |
| 4113 ASSERT(raw_name != NULL); |
| 4114 return Handle<String>(String::cast(*raw_name->handle())); |
| 4115 } |
| 4116 } |
| 4117 |
| 4118 |
| 4119 void Reference::GetValueAndSpill(TypeofState typeof_state) { |
| 4120 ASSERT(cgen_->in_spilled_code()); |
| 4121 cgen_->set_in_spilled_code(false); |
| 4122 GetValue(typeof_state); |
| 4123 cgen_->frame()->SpillAll(); |
| 4124 cgen_->set_in_spilled_code(true); |
| 4125 } |
| 4126 |
| 4127 |
| 4128 void Reference::GetValue(TypeofState typeof_state) { |
| 4129 ASSERT(!cgen_->in_spilled_code()); |
| 4130 ASSERT(cgen_->HasValidEntryRegisters()); |
| 4131 ASSERT(!is_illegal()); |
| 4132 ASSERT(!cgen_->has_cc()); |
| 4133 MacroAssembler* masm = cgen_->masm(); |
| 4134 Property* property = expression_->AsProperty(); |
| 4135 if (property != NULL) { |
| 4136 cgen_->CodeForSourcePosition(property->position()); |
| 4137 } |
| 4138 |
| 4139 switch (type_) { |
| 4140 case SLOT: { |
| 4141 Comment cmnt(masm, "[ Load from Slot"); |
| 4142 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot(); |
| 4143 ASSERT(slot != NULL); |
| 4144 cgen_->LoadFromSlot(slot, typeof_state); |
| 4145 break; |
| 4146 } |
| 4147 |
| 4148 case NAMED: { |
| 4149 // TODO(1241834): Make sure that this it is safe to ignore the |
| 4150 // distinction between expressions in a typeof and not in a typeof. If |
| 4151 // there is a chance that reference errors can be thrown below, we |
| 4152 // must distinguish between the two kinds of loads (typeof expression |
| 4153 // loads must not throw a reference error). |
| 4154 VirtualFrame* frame = cgen_->frame(); |
| 4155 Comment cmnt(masm, "[ Load from named Property"); |
| 4156 Handle<String> name(GetName()); |
| 4157 Variable* var = expression_->AsVariableProxy()->AsVariable(); |
| 4158 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); |
| 4159 // Setup the name register. |
| 4160 Result name_reg = cgen_->allocator()->Allocate(r2); |
| 4161 ASSERT(name_reg.is_valid()); |
| 4162 __ mov(name_reg.reg(), Operand(name)); |
| 4163 ASSERT(var == NULL || var->is_global()); |
| 4164 RelocInfo::Mode rmode = (var == NULL) |
| 4165 ? RelocInfo::CODE_TARGET |
| 4166 : RelocInfo::CODE_TARGET_CONTEXT; |
| 4167 Result answer = frame->CallCodeObject(ic, rmode, &name_reg, 0); |
| 4168 frame->EmitPush(answer.reg()); |
| 4169 break; |
| 4170 } |
| 4171 |
| 4172 case KEYED: { |
| 4173 // TODO(1241834): Make sure that this it is safe to ignore the |
| 4174 // distinction between expressions in a typeof and not in a typeof. |
| 4175 |
| 4176 // TODO(181): Implement inlined version of array indexing once |
| 4177 // loop nesting is properly tracked on ARM. |
| 4178 VirtualFrame* frame = cgen_->frame(); |
| 4179 Comment cmnt(masm, "[ Load from keyed Property"); |
| 4180 ASSERT(property != NULL); |
| 4181 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); |
| 4182 Variable* var = expression_->AsVariableProxy()->AsVariable(); |
| 4183 ASSERT(var == NULL || var->is_global()); |
| 4184 RelocInfo::Mode rmode = (var == NULL) |
| 4185 ? RelocInfo::CODE_TARGET |
| 4186 : RelocInfo::CODE_TARGET_CONTEXT; |
| 4187 Result answer = frame->CallCodeObject(ic, rmode, 0); |
| 4188 frame->EmitPush(answer.reg()); |
| 4189 break; |
| 4190 } |
| 4191 |
| 4192 default: |
| 4193 UNREACHABLE(); |
| 4194 } |
| 4195 } |
| 4196 |
| 4197 |
| 4198 void Reference::SetValue(InitState init_state) { |
| 4199 ASSERT(!is_illegal()); |
| 4200 ASSERT(!cgen_->has_cc()); |
| 4201 MacroAssembler* masm = cgen_->masm(); |
| 4202 VirtualFrame* frame = cgen_->frame(); |
| 4203 Property* property = expression_->AsProperty(); |
| 4204 if (property != NULL) { |
| 4205 cgen_->CodeForSourcePosition(property->position()); |
| 4206 } |
| 4207 |
| 4208 switch (type_) { |
| 4209 case SLOT: { |
| 4210 Comment cmnt(masm, "[ Store to Slot"); |
| 4211 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot(); |
| 4212 ASSERT(slot != NULL); |
| 4213 if (slot->type() == Slot::LOOKUP) { |
| 4214 ASSERT(slot->var()->is_dynamic()); |
| 4215 |
| 4216 // For now, just do a runtime call. |
| 4217 frame->EmitPush(cp); |
| 4218 __ mov(r0, Operand(slot->var()->name())); |
| 4219 frame->EmitPush(r0); |
| 4220 |
| 4221 if (init_state == CONST_INIT) { |
| 4222 // Same as the case for a normal store, but ignores attribute |
| 4223 // (e.g. READ_ONLY) of context slot so that we can initialize |
| 4224 // const properties (introduced via eval("const foo = (some |
| 4225 // expr);")). Also, uses the current function context instead of |
| 4226 // the top context. |
| 4227 // |
| 4228 // Note that we must declare the foo upon entry of eval(), via a |
| 4229 // context slot declaration, but we cannot initialize it at the |
| 4230 // same time, because the const declaration may be at the end of |
| 4231 // the eval code (sigh...) and the const variable may have been |
| 4232 // used before (where its value is 'undefined'). Thus, we can only |
| 4233 // do the initialization when we actually encounter the expression |
| 4234 // and when the expression operands are defined and valid, and |
| 4235 // thus we need the split into 2 operations: declaration of the |
| 4236 // context slot followed by initialization. |
| 4237 frame->CallRuntime(Runtime::kInitializeConstContextSlot, 3); |
| 4238 } else { |
| 4239 frame->CallRuntime(Runtime::kStoreContextSlot, 3); |
| 4240 } |
| 4241 // Storing a variable must keep the (new) value on the expression |
| 4242 // stack. This is necessary for compiling assignment expressions. |
| 4243 frame->EmitPush(r0); |
| 4244 |
| 4245 } else { |
| 4246 ASSERT(!slot->var()->is_dynamic()); |
| 4247 |
| 4248 JumpTarget exit(cgen_); |
| 4249 if (init_state == CONST_INIT) { |
| 4250 ASSERT(slot->var()->mode() == Variable::CONST); |
| 4251 // Only the first const initialization must be executed (the slot |
| 4252 // still contains 'the hole' value). When the assignment is |
| 4253 // executed, the code is identical to a normal store (see below). |
| 4254 Comment cmnt(masm, "[ Init const"); |
| 4255 __ ldr(r2, cgen_->SlotOperand(slot, r2)); |
| 4256 __ cmp(r2, Operand(Factory::the_hole_value())); |
| 4257 exit.Branch(ne); |
| 4258 } |
| 4259 |
| 4260 // We must execute the store. Storing a variable must keep the |
| 4261 // (new) value on the stack. This is necessary for compiling |
| 4262 // assignment expressions. |
| 4263 // |
| 4264 // Note: We will reach here even with slot->var()->mode() == |
| 4265 // Variable::CONST because of const declarations which will |
| 4266 // initialize consts to 'the hole' value and by doing so, end up |
| 4267 // calling this code. r2 may be loaded with context; used below in |
| 4268 // RecordWrite. |
| 4269 frame->EmitPop(r0); |
| 4270 __ str(r0, cgen_->SlotOperand(slot, r2)); |
| 4271 frame->EmitPush(r0); |
| 4272 if (slot->type() == Slot::CONTEXT) { |
| 4273 // Skip write barrier if the written value is a smi. |
| 4274 __ tst(r0, Operand(kSmiTagMask)); |
| 4275 exit.Branch(eq); |
| 4276 // r2 is loaded with context when calling SlotOperand above. |
| 4277 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; |
| 4278 __ mov(r3, Operand(offset)); |
| 4279 __ RecordWrite(r2, r3, r1); |
| 4280 } |
| 4281 // If we definitely did not jump over the assignment, we do not need |
| 4282 // to bind the exit label. Doing so can defeat peephole |
| 4283 // optimization. |
| 4284 if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) { |
| 4285 exit.Bind(); |
| 4286 } |
| 4287 } |
| 4288 break; |
| 4289 } |
| 4290 |
| 4291 case NAMED: { |
| 4292 Comment cmnt(masm, "[ Store to named Property"); |
| 4293 // Call the appropriate IC code. |
| 4294 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); |
| 4295 Handle<String> name(GetName()); |
| 4296 |
| 4297 Result value = cgen_->allocator()->Allocate(r0); |
| 4298 ASSERT(value.is_valid()); |
| 4299 frame->EmitPop(value.reg()); |
| 4300 |
| 4301 // Setup the name register. |
| 4302 Result property_name = cgen_->allocator()->Allocate(r2); |
| 4303 ASSERT(property_name.is_valid()); |
| 4304 __ mov(property_name.reg(), Operand(name)); |
| 4305 Result answer = frame->CallCodeObject(ic, |
| 4306 RelocInfo::CODE_TARGET, |
| 4307 &value, |
| 4308 &property_name, |
| 4309 0); |
| 4310 frame->EmitPush(answer.reg()); |
| 4311 break; |
| 4312 } |
| 4313 |
| 4314 case KEYED: { |
| 4315 Comment cmnt(masm, "[ Store to keyed Property"); |
| 4316 Property* property = expression_->AsProperty(); |
| 4317 ASSERT(property != NULL); |
| 4318 cgen_->CodeForSourcePosition(property->position()); |
| 4319 |
| 4320 // Call IC code. |
| 4321 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); |
| 4322 // TODO(1222589): Make the IC grab the values from the stack. |
| 4323 Result value = cgen_->allocator()->Allocate(r0); |
| 4324 ASSERT(value.is_valid()); |
| 4325 frame->EmitPop(value.reg()); // value |
| 4326 Result result = |
| 4327 frame->CallCodeObject(ic, RelocInfo::CODE_TARGET, &value, 0); |
| 4328 frame->EmitPush(result.reg()); |
| 4329 break; |
| 4330 } |
| 4331 |
| 4332 default: |
| 4333 UNREACHABLE(); |
| 4334 } |
| 4335 } |
| 4336 |
| 4337 |
| 4338 static void HandleBinaryOpSlowCases(MacroAssembler* masm, |
| 4339 Label* not_smi, |
| 4340 const Builtins::JavaScript& builtin, |
| 4341 Token::Value operation, |
| 4342 int swi_number, |
| 4343 OverwriteMode mode) { |
| 4344 Label slow; |
| 4345 if (mode == NO_OVERWRITE) { |
| 4346 __ bind(not_smi); |
| 4347 } |
| 4348 __ bind(&slow); |
| 4349 __ push(r1); |
| 4350 __ push(r0); |
| 4351 __ mov(r0, Operand(1)); // Set number of arguments. |
| 4352 __ InvokeBuiltin(builtin, JUMP_JS); // Tail call. |
| 4353 |
| 4354 // Could it be a double-double op? If we already have a place to put |
| 4355 // the answer then we can do the op and skip the builtin and runtime call. |
| 4356 if (mode != NO_OVERWRITE) { |
| 4357 __ bind(not_smi); |
| 4358 __ tst(r0, Operand(kSmiTagMask)); |
| 4359 __ b(eq, &slow); // We can't handle a Smi-double combination yet. |
| 4360 __ tst(r1, Operand(kSmiTagMask)); |
| 4361 __ b(eq, &slow); // We can't handle a Smi-double combination yet. |
| 4362 // Get map of r0 into r2. |
| 4363 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 4364 // Get type of r0 into r3. |
| 4365 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceTypeOffset)); |
| 4366 __ cmp(r3, Operand(HEAP_NUMBER_TYPE)); |
| 4367 __ b(ne, &slow); |
| 4368 // Get type of r1 into r3. |
| 4369 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset)); |
| 4370 // Check they are both the same map (heap number map). |
| 4371 __ cmp(r2, r3); |
| 4372 __ b(ne, &slow); |
| 4373 // Both are doubles. |
| 4374 // Calling convention says that second double is in r2 and r3. |
| 4375 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kValueOffset)); |
| 4376 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kValueOffset + kPointerSize)); |
| 4377 __ push(lr); |
| 4378 if (mode == OVERWRITE_LEFT) { |
| 4379 __ push(r1); |
| 4380 } else { |
| 4381 __ push(r0); |
| 4382 } |
| 4383 // Calling convention says that first double is in r0 and r1. |
| 4384 __ ldr(r0, FieldMemOperand(r1, HeapNumber::kValueOffset)); |
| 4385 __ ldr(r1, FieldMemOperand(r1, HeapNumber::kValueOffset + kPointerSize)); |
| 4386 // Call C routine that may not cause GC or other trouble. |
| 4387 __ mov(r5, Operand(ExternalReference::double_fp_operation(operation))); |
| 4388 #if !defined(__arm__) |
| 4389 // Notify the simulator that we are calling an add routine in C. |
| 4390 __ swi(swi_number); |
| 4391 #else |
| 4392 // Actually call the add routine written in C. |
| 4393 __ Call(r5); |
| 4394 #endif |
| 4395 // Store answer in the overwritable heap number. |
| 4396 __ pop(r4); |
| 4397 #if !defined(__ARM_EABI__) && defined(__arm__) |
| 4398 // Double returned in fp coprocessor register 0 and 1, encoded as register |
| 4399 // cr8. Offsets must be divisible by 4 for coprocessor so we need to |
| 4400 // substract the tag from r4. |
| 4401 __ sub(r5, r4, Operand(kHeapObjectTag)); |
| 4402 __ stc(p1, cr8, MemOperand(r5, HeapNumber::kValueOffset)); |
| 4403 #else |
| 4404 // Double returned in fp coprocessor register 0 and 1. |
| 4405 __ str(r0, FieldMemOperand(r4, HeapNumber::kValueOffset)); |
| 4406 __ str(r1, FieldMemOperand(r4, HeapNumber::kValueOffset + kPointerSize)); |
| 4407 #endif |
| 4408 __ mov(r0, Operand(r4)); |
| 4409 // And we are done. |
| 4410 __ pop(pc); |
| 4411 } |
| 4412 } |
| 4413 |
| 4414 |
| 4415 void GenericBinaryOpStub::Generate(MacroAssembler* masm) { |
| 4416 // r1 : x |
| 4417 // r0 : y |
| 4418 // result : r0 |
| 4419 |
| 4420 // All ops need to know whether we are dealing with two Smis. Set up r2 to |
| 4421 // tell us that. |
| 4422 __ orr(r2, r1, Operand(r0)); // r2 = x | y; |
| 4423 |
| 4424 switch (op_) { |
| 4425 case Token::ADD: { |
| 4426 Label not_smi; |
| 4427 // Fast path. |
| 4428 ASSERT(kSmiTag == 0); // Adjust code below. |
| 4429 __ tst(r2, Operand(kSmiTagMask)); |
| 4430 __ b(ne, ¬_smi); |
| 4431 __ add(r0, r1, Operand(r0), SetCC); // Add y optimistically. |
| 4432 // Return if no overflow. |
| 4433 __ Ret(vc); |
| 4434 __ sub(r0, r0, Operand(r1)); // Revert optimistic add. |
| 4435 |
| 4436 HandleBinaryOpSlowCases(masm, |
| 4437 ¬_smi, |
| 4438 Builtins::ADD, |
| 4439 Token::ADD, |
| 4440 assembler::arm::simulator_fp_add, |
| 4441 mode_); |
| 4442 break; |
| 4443 } |
| 4444 |
| 4445 case Token::SUB: { |
| 4446 Label not_smi; |
| 4447 // Fast path. |
| 4448 ASSERT(kSmiTag == 0); // Adjust code below. |
| 4449 __ tst(r2, Operand(kSmiTagMask)); |
| 4450 __ b(ne, ¬_smi); |
| 4451 __ sub(r0, r1, Operand(r0), SetCC); // Subtract y optimistically. |
| 4452 // Return if no overflow. |
| 4453 __ Ret(vc); |
| 4454 __ sub(r0, r1, Operand(r0)); // Revert optimistic subtract. |
| 4455 |
| 4456 HandleBinaryOpSlowCases(masm, |
| 4457 ¬_smi, |
| 4458 Builtins::SUB, |
| 4459 Token::SUB, |
| 4460 assembler::arm::simulator_fp_sub, |
| 4461 mode_); |
| 4462 break; |
| 4463 } |
| 4464 |
| 4465 case Token::MUL: { |
| 4466 Label not_smi, slow; |
| 4467 ASSERT(kSmiTag == 0); // adjust code below |
| 4468 __ tst(r2, Operand(kSmiTagMask)); |
| 4469 __ b(ne, ¬_smi); |
| 4470 // Remove tag from one operand (but keep sign), so that result is Smi. |
| 4471 __ mov(ip, Operand(r0, ASR, kSmiTagSize)); |
| 4472 // Do multiplication |
| 4473 __ smull(r3, r2, r1, ip); // r3 = lower 32 bits of ip*r1. |
| 4474 // Go slow on overflows (overflow bit is not set). |
| 4475 __ mov(ip, Operand(r3, ASR, 31)); |
| 4476 __ cmp(ip, Operand(r2)); // no overflow if higher 33 bits are identical |
| 4477 __ b(ne, &slow); |
| 4478 // Go slow on zero result to handle -0. |
| 4479 __ tst(r3, Operand(r3)); |
| 4480 __ mov(r0, Operand(r3), LeaveCC, ne); |
| 4481 __ Ret(ne); |
| 4482 // Slow case. |
| 4483 __ bind(&slow); |
| 4484 |
| 4485 HandleBinaryOpSlowCases(masm, |
| 4486 ¬_smi, |
| 4487 Builtins::MUL, |
| 4488 Token::MUL, |
| 4489 assembler::arm::simulator_fp_mul, |
| 4490 mode_); |
| 4491 break; |
| 4492 } |
| 4493 |
| 4494 case Token::BIT_OR: |
| 4495 case Token::BIT_AND: |
| 4496 case Token::BIT_XOR: { |
| 4497 Label slow; |
| 4498 ASSERT(kSmiTag == 0); // adjust code below |
| 4499 __ tst(r2, Operand(kSmiTagMask)); |
| 4500 __ b(ne, &slow); |
| 4501 switch (op_) { |
| 4502 case Token::BIT_OR: __ orr(r0, r0, Operand(r1)); break; |
| 4503 case Token::BIT_AND: __ and_(r0, r0, Operand(r1)); break; |
| 4504 case Token::BIT_XOR: __ eor(r0, r0, Operand(r1)); break; |
| 4505 default: UNREACHABLE(); |
| 4506 } |
| 4507 __ Ret(); |
| 4508 __ bind(&slow); |
| 4509 __ push(r1); // restore stack |
| 4510 __ push(r0); |
| 4511 __ mov(r0, Operand(1)); // 1 argument (not counting receiver). |
| 4512 switch (op_) { |
| 4513 case Token::BIT_OR: |
| 4514 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_JS); |
| 4515 break; |
| 4516 case Token::BIT_AND: |
| 4517 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_JS); |
| 4518 break; |
| 4519 case Token::BIT_XOR: |
| 4520 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_JS); |
| 4521 break; |
| 4522 default: |
| 4523 UNREACHABLE(); |
| 4524 } |
| 4525 break; |
| 4526 } |
| 4527 |
| 4528 case Token::SHL: |
| 4529 case Token::SHR: |
| 4530 case Token::SAR: { |
| 4531 Label slow; |
| 4532 ASSERT(kSmiTag == 0); // adjust code below |
| 4533 __ tst(r2, Operand(kSmiTagMask)); |
| 4534 __ b(ne, &slow); |
| 4535 // remove tags from operands (but keep sign) |
| 4536 __ mov(r3, Operand(r1, ASR, kSmiTagSize)); // x |
| 4537 __ mov(r2, Operand(r0, ASR, kSmiTagSize)); // y |
| 4538 // use only the 5 least significant bits of the shift count |
| 4539 __ and_(r2, r2, Operand(0x1f)); |
| 4540 // perform operation |
| 4541 switch (op_) { |
| 4542 case Token::SAR: |
| 4543 __ mov(r3, Operand(r3, ASR, r2)); |
| 4544 // no checks of result necessary |
| 4545 break; |
| 4546 |
| 4547 case Token::SHR: |
| 4548 __ mov(r3, Operand(r3, LSR, r2)); |
| 4549 // check that the *unsigned* result fits in a smi |
| 4550 // neither of the two high-order bits can be set: |
| 4551 // - 0x80000000: high bit would be lost when smi tagging |
| 4552 // - 0x40000000: this number would convert to negative when |
| 4553 // smi tagging these two cases can only happen with shifts |
| 4554 // by 0 or 1 when handed a valid smi |
| 4555 __ and_(r2, r3, Operand(0xc0000000), SetCC); |
| 4556 __ b(ne, &slow); |
| 4557 break; |
| 4558 |
| 4559 case Token::SHL: |
| 4560 __ mov(r3, Operand(r3, LSL, r2)); |
| 4561 // check that the *signed* result fits in a smi |
| 4562 __ add(r2, r3, Operand(0x40000000), SetCC); |
| 4563 __ b(mi, &slow); |
| 4564 break; |
| 4565 |
| 4566 default: UNREACHABLE(); |
| 4567 } |
| 4568 // tag result and store it in r0 |
| 4569 ASSERT(kSmiTag == 0); // adjust code below |
| 4570 __ mov(r0, Operand(r3, LSL, kSmiTagSize)); |
| 4571 __ Ret(); |
| 4572 // slow case |
| 4573 __ bind(&slow); |
| 4574 __ push(r1); // restore stack |
| 4575 __ push(r0); |
| 4576 __ mov(r0, Operand(1)); // 1 argument (not counting receiver). |
| 4577 switch (op_) { |
| 4578 case Token::SAR: __ InvokeBuiltin(Builtins::SAR, JUMP_JS); break; |
| 4579 case Token::SHR: __ InvokeBuiltin(Builtins::SHR, JUMP_JS); break; |
| 4580 case Token::SHL: __ InvokeBuiltin(Builtins::SHL, JUMP_JS); break; |
| 4581 default: UNREACHABLE(); |
| 4582 } |
| 4583 break; |
| 4584 } |
| 4585 |
| 4586 default: UNREACHABLE(); |
| 4587 } |
| 4588 // This code should be unreachable. |
| 4589 __ stop("Unreachable"); |
| 4590 } |
| 4591 |
| 4592 |
| 4593 void StackCheckStub::Generate(MacroAssembler* masm) { |
| 4594 Label within_limit; |
| 4595 __ mov(ip, Operand(ExternalReference::address_of_stack_guard_limit())); |
| 4596 __ ldr(ip, MemOperand(ip)); |
| 4597 __ cmp(sp, Operand(ip)); |
| 4598 __ b(hs, &within_limit); |
| 4599 // Do tail-call to runtime routine. |
| 4600 __ push(r0); |
| 4601 __ TailCallRuntime(ExternalReference(Runtime::kStackGuard), 1); |
| 4602 __ bind(&within_limit); |
| 4603 |
| 4604 __ StubReturn(1); |
| 4605 } |
| 4606 |
| 4607 |
| 4608 void UnarySubStub::Generate(MacroAssembler* masm) { |
| 4609 Label undo; |
| 4610 Label slow; |
| 4611 Label done; |
| 4612 |
| 4613 // Enter runtime system if the value is not a smi. |
| 4614 __ tst(r0, Operand(kSmiTagMask)); |
| 4615 __ b(ne, &slow); |
| 4616 |
| 4617 // Enter runtime system if the value of the expression is zero |
| 4618 // to make sure that we switch between 0 and -0. |
| 4619 __ cmp(r0, Operand(0)); |
| 4620 __ b(eq, &slow); |
| 4621 |
| 4622 // The value of the expression is a smi that is not zero. Try |
| 4623 // optimistic subtraction '0 - value'. |
| 4624 __ rsb(r1, r0, Operand(0), SetCC); |
| 4625 __ b(vs, &slow); |
| 4626 |
| 4627 // If result is a smi we are done. |
| 4628 __ tst(r1, Operand(kSmiTagMask)); |
| 4629 __ mov(r0, Operand(r1), LeaveCC, eq); // conditionally set r0 to result |
| 4630 __ b(eq, &done); |
| 4631 |
| 4632 // Enter runtime system. |
| 4633 __ bind(&slow); |
| 4634 __ push(r0); |
| 4635 __ mov(r0, Operand(0)); // set number of arguments |
| 4636 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_JS); |
| 4637 |
| 4638 __ bind(&done); |
| 4639 __ StubReturn(1); |
| 4640 } |
| 4641 |
| 4642 |
| 4643 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { |
| 4644 // r0 holds exception |
| 4645 ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code |
| 4646 __ mov(r3, Operand(ExternalReference(Top::k_handler_address))); |
| 4647 __ ldr(sp, MemOperand(r3)); |
| 4648 __ pop(r2); // pop next in chain |
| 4649 __ str(r2, MemOperand(r3)); |
| 4650 // restore parameter- and frame-pointer and pop state. |
| 4651 __ ldm(ia_w, sp, r3.bit() | pp.bit() | fp.bit()); |
| 4652 // Before returning we restore the context from the frame pointer if not NULL. |
| 4653 // The frame pointer is NULL in the exception handler of a JS entry frame. |
| 4654 __ cmp(fp, Operand(0)); |
| 4655 // Set cp to NULL if fp is NULL. |
| 4656 __ mov(cp, Operand(0), LeaveCC, eq); |
| 4657 // Restore cp otherwise. |
| 4658 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne); |
| 4659 #ifdef DEBUG |
| 4660 if (FLAG_debug_code) { |
| 4661 __ mov(lr, Operand(pc)); |
| 4662 } |
| 4663 #endif |
| 4664 __ pop(pc); |
| 4665 } |
| 4666 |
| 4667 |
| 4668 void CEntryStub::GenerateThrowOutOfMemory(MacroAssembler* masm) { |
| 4669 // Fetch top stack handler. |
| 4670 __ mov(r3, Operand(ExternalReference(Top::k_handler_address))); |
| 4671 __ ldr(r3, MemOperand(r3)); |
| 4672 |
| 4673 // Unwind the handlers until the ENTRY handler is found. |
| 4674 Label loop, done; |
| 4675 __ bind(&loop); |
| 4676 // Load the type of the current stack handler. |
| 4677 const int kStateOffset = StackHandlerConstants::kAddressDisplacement + |
| 4678 StackHandlerConstants::kStateOffset; |
| 4679 __ ldr(r2, MemOperand(r3, kStateOffset)); |
| 4680 __ cmp(r2, Operand(StackHandler::ENTRY)); |
| 4681 __ b(eq, &done); |
| 4682 // Fetch the next handler in the list. |
| 4683 const int kNextOffset = StackHandlerConstants::kAddressDisplacement + |
| 4684 StackHandlerConstants::kNextOffset; |
| 4685 __ ldr(r3, MemOperand(r3, kNextOffset)); |
| 4686 __ jmp(&loop); |
| 4687 __ bind(&done); |
| 4688 |
| 4689 // Set the top handler address to next handler past the current ENTRY handler. |
| 4690 __ ldr(r0, MemOperand(r3, kNextOffset)); |
| 4691 __ mov(r2, Operand(ExternalReference(Top::k_handler_address))); |
| 4692 __ str(r0, MemOperand(r2)); |
| 4693 |
| 4694 // Set external caught exception to false. |
| 4695 __ mov(r0, Operand(false)); |
| 4696 ExternalReference external_caught(Top::k_external_caught_exception_address); |
| 4697 __ mov(r2, Operand(external_caught)); |
| 4698 __ str(r0, MemOperand(r2)); |
| 4699 |
| 4700 // Set pending exception and r0 to out of memory exception. |
| 4701 Failure* out_of_memory = Failure::OutOfMemoryException(); |
| 4702 __ mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory))); |
| 4703 __ mov(r2, Operand(ExternalReference(Top::k_pending_exception_address))); |
| 4704 __ str(r0, MemOperand(r2)); |
| 4705 |
| 4706 // Restore the stack to the address of the ENTRY handler |
| 4707 __ mov(sp, Operand(r3)); |
| 4708 |
| 4709 // Stack layout at this point. See also PushTryHandler |
| 4710 // r3, sp -> next handler |
| 4711 // state (ENTRY) |
| 4712 // pp |
| 4713 // fp |
| 4714 // lr |
| 4715 |
| 4716 // Discard ENTRY state (r2 is not used), and restore parameter- |
| 4717 // and frame-pointer and pop state. |
| 4718 __ ldm(ia_w, sp, r2.bit() | r3.bit() | pp.bit() | fp.bit()); |
| 4719 // Before returning we restore the context from the frame pointer if not NULL. |
| 4720 // The frame pointer is NULL in the exception handler of a JS entry frame. |
| 4721 __ cmp(fp, Operand(0)); |
| 4722 // Set cp to NULL if fp is NULL. |
| 4723 __ mov(cp, Operand(0), LeaveCC, eq); |
| 4724 // Restore cp otherwise. |
| 4725 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne); |
| 4726 #ifdef DEBUG |
| 4727 if (FLAG_debug_code) { |
| 4728 __ mov(lr, Operand(pc)); |
| 4729 } |
| 4730 #endif |
| 4731 __ pop(pc); |
| 4732 } |
| 4733 |
| 4734 |
| 4735 void CEntryStub::GenerateCore(MacroAssembler* masm, |
| 4736 Label* throw_normal_exception, |
| 4737 Label* throw_out_of_memory_exception, |
| 4738 StackFrame::Type frame_type, |
| 4739 bool do_gc, |
| 4740 bool always_allocate) { |
| 4741 // r0: result parameter for PerformGC, if any |
| 4742 // r4: number of arguments including receiver (C callee-saved) |
| 4743 // r5: pointer to builtin function (C callee-saved) |
| 4744 // r6: pointer to the first argument (C callee-saved) |
| 4745 |
| 4746 if (do_gc) { |
| 4747 // Passing r0. |
| 4748 __ Call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY); |
| 4749 } |
| 4750 |
| 4751 ExternalReference scope_depth = |
| 4752 ExternalReference::heap_always_allocate_scope_depth(); |
| 4753 if (always_allocate) { |
| 4754 __ mov(r0, Operand(scope_depth)); |
| 4755 __ ldr(r1, MemOperand(r0)); |
| 4756 __ add(r1, r1, Operand(1)); |
| 4757 __ str(r1, MemOperand(r0)); |
| 4758 } |
| 4759 |
| 4760 // Call C built-in. |
| 4761 // r0 = argc, r1 = argv |
| 4762 __ mov(r0, Operand(r4)); |
| 4763 __ mov(r1, Operand(r6)); |
| 4764 |
| 4765 // TODO(1242173): To let the GC traverse the return address of the exit |
| 4766 // frames, we need to know where the return address is. Right now, |
| 4767 // we push it on the stack to be able to find it again, but we never |
| 4768 // restore from it in case of changes, which makes it impossible to |
| 4769 // support moving the C entry code stub. This should be fixed, but currently |
| 4770 // this is OK because the CEntryStub gets generated so early in the V8 boot |
| 4771 // sequence that it is not moving ever. |
| 4772 __ add(lr, pc, Operand(4)); // compute return address: (pc + 8) + 4 |
| 4773 __ push(lr); |
| 4774 #if !defined(__arm__) |
| 4775 // Notify the simulator of the transition to C code. |
| 4776 __ swi(assembler::arm::call_rt_r5); |
| 4777 #else /* !defined(__arm__) */ |
| 4778 __ Jump(r5); |
| 4779 #endif /* !defined(__arm__) */ |
| 4780 |
| 4781 if (always_allocate) { |
| 4782 // It's okay to clobber r2 and r3 here. Don't mess with r0 and r1 |
| 4783 // though (contain the result). |
| 4784 __ mov(r2, Operand(scope_depth)); |
| 4785 __ ldr(r3, MemOperand(r2)); |
| 4786 __ sub(r3, r3, Operand(1)); |
| 4787 __ str(r3, MemOperand(r2)); |
| 4788 } |
| 4789 |
| 4790 // check for failure result |
| 4791 Label failure_returned; |
| 4792 ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0); |
| 4793 // Lower 2 bits of r2 are 0 iff r0 has failure tag. |
| 4794 __ add(r2, r0, Operand(1)); |
| 4795 __ tst(r2, Operand(kFailureTagMask)); |
| 4796 __ b(eq, &failure_returned); |
| 4797 |
| 4798 // Exit C frame and return. |
| 4799 // r0:r1: result |
| 4800 // sp: stack pointer |
| 4801 // fp: frame pointer |
| 4802 // pp: caller's parameter pointer pp (restored as C callee-saved) |
| 4803 __ LeaveExitFrame(frame_type); |
| 4804 |
| 4805 // check if we should retry or throw exception |
| 4806 Label retry; |
| 4807 __ bind(&failure_returned); |
| 4808 ASSERT(Failure::RETRY_AFTER_GC == 0); |
| 4809 __ tst(r0, Operand(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); |
| 4810 __ b(eq, &retry); |
| 4811 |
| 4812 Label continue_exception; |
| 4813 // If the returned failure is EXCEPTION then promote Top::pending_exception(). |
| 4814 __ cmp(r0, Operand(reinterpret_cast<int32_t>(Failure::Exception()))); |
| 4815 __ b(ne, &continue_exception); |
| 4816 |
| 4817 // Retrieve the pending exception and clear the variable. |
| 4818 __ mov(ip, Operand(ExternalReference::the_hole_value_location())); |
| 4819 __ ldr(r3, MemOperand(ip)); |
| 4820 __ mov(ip, Operand(ExternalReference(Top::k_pending_exception_address))); |
| 4821 __ ldr(r0, MemOperand(ip)); |
| 4822 __ str(r3, MemOperand(ip)); |
| 4823 |
| 4824 __ bind(&continue_exception); |
| 4825 // Special handling of out of memory exception. |
| 4826 Failure* out_of_memory = Failure::OutOfMemoryException(); |
| 4827 __ cmp(r0, Operand(reinterpret_cast<int32_t>(out_of_memory))); |
| 4828 __ b(eq, throw_out_of_memory_exception); |
| 4829 |
| 4830 // Handle normal exception. |
| 4831 __ jmp(throw_normal_exception); |
| 4832 |
| 4833 __ bind(&retry); // pass last failure (r0) as parameter (r0) when retrying |
| 4834 } |
| 4835 |
| 4836 |
| 4837 void CEntryStub::GenerateBody(MacroAssembler* masm, bool is_debug_break) { |
| 4838 // Called from JavaScript; parameters are on stack as if calling JS function |
| 4839 // r0: number of arguments including receiver |
| 4840 // r1: pointer to builtin function |
| 4841 // fp: frame pointer (restored after C call) |
| 4842 // sp: stack pointer (restored as callee's pp after C call) |
| 4843 // cp: current context (C callee-saved) |
| 4844 // pp: caller's parameter pointer pp (C callee-saved) |
| 4845 |
| 4846 // NOTE: Invocations of builtins may return failure objects |
| 4847 // instead of a proper result. The builtin entry handles |
| 4848 // this by performing a garbage collection and retrying the |
| 4849 // builtin once. |
| 4850 |
| 4851 StackFrame::Type frame_type = is_debug_break |
| 4852 ? StackFrame::EXIT_DEBUG |
| 4853 : StackFrame::EXIT; |
| 4854 |
| 4855 // Enter the exit frame that transitions from JavaScript to C++. |
| 4856 __ EnterExitFrame(frame_type); |
| 4857 |
| 4858 // r4: number of arguments (C callee-saved) |
| 4859 // r5: pointer to builtin function (C callee-saved) |
| 4860 // r6: pointer to first argument (C callee-saved) |
| 4861 |
| 4862 Label throw_out_of_memory_exception; |
| 4863 Label throw_normal_exception; |
| 4864 |
| 4865 // Call into the runtime system. Collect garbage before the call if |
| 4866 // running with --gc-greedy set. |
| 4867 if (FLAG_gc_greedy) { |
| 4868 Failure* failure = Failure::RetryAfterGC(0); |
| 4869 __ mov(r0, Operand(reinterpret_cast<intptr_t>(failure))); |
| 4870 } |
| 4871 GenerateCore(masm, &throw_normal_exception, |
| 4872 &throw_out_of_memory_exception, |
| 4873 frame_type, |
| 4874 FLAG_gc_greedy, |
| 4875 false); |
| 4876 |
| 4877 // Do space-specific GC and retry runtime call. |
| 4878 GenerateCore(masm, |
| 4879 &throw_normal_exception, |
| 4880 &throw_out_of_memory_exception, |
| 4881 frame_type, |
| 4882 true, |
| 4883 false); |
| 4884 |
| 4885 // Do full GC and retry runtime call one final time. |
| 4886 Failure* failure = Failure::InternalError(); |
| 4887 __ mov(r0, Operand(reinterpret_cast<int32_t>(failure))); |
| 4888 GenerateCore(masm, |
| 4889 &throw_normal_exception, |
| 4890 &throw_out_of_memory_exception, |
| 4891 frame_type, |
| 4892 true, |
| 4893 true); |
| 4894 |
| 4895 __ bind(&throw_out_of_memory_exception); |
| 4896 GenerateThrowOutOfMemory(masm); |
| 4897 // control flow for generated will not return. |
| 4898 |
| 4899 __ bind(&throw_normal_exception); |
| 4900 GenerateThrowTOS(masm); |
| 4901 } |
| 4902 |
| 4903 |
| 4904 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { |
| 4905 // r0: code entry |
| 4906 // r1: function |
| 4907 // r2: receiver |
| 4908 // r3: argc |
| 4909 // [sp+0]: argv |
| 4910 |
| 4911 Label invoke, exit; |
| 4912 |
| 4913 // Called from C, so do not pop argc and args on exit (preserve sp) |
| 4914 // No need to save register-passed args |
| 4915 // Save callee-saved registers (incl. cp, pp, and fp), sp, and lr |
| 4916 __ stm(db_w, sp, kCalleeSaved | lr.bit()); |
| 4917 |
| 4918 // Get address of argv, see stm above. |
| 4919 // r0: code entry |
| 4920 // r1: function |
| 4921 // r2: receiver |
| 4922 // r3: argc |
| 4923 __ add(r4, sp, Operand((kNumCalleeSaved + 1)*kPointerSize)); |
| 4924 __ ldr(r4, MemOperand(r4)); // argv |
| 4925 |
| 4926 // Push a frame with special values setup to mark it as an entry frame. |
| 4927 // r0: code entry |
| 4928 // r1: function |
| 4929 // r2: receiver |
| 4930 // r3: argc |
| 4931 // r4: argv |
| 4932 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; |
| 4933 __ mov(r8, Operand(-1)); // Push a bad frame pointer to fail if it is used. |
| 4934 __ mov(r7, Operand(~ArgumentsAdaptorFrame::SENTINEL)); |
| 4935 __ mov(r6, Operand(Smi::FromInt(marker))); |
| 4936 __ mov(r5, Operand(ExternalReference(Top::k_c_entry_fp_address))); |
| 4937 __ ldr(r5, MemOperand(r5)); |
| 4938 __ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | r8.bit()); |
| 4939 |
| 4940 // Setup frame pointer for the frame to be pushed. |
| 4941 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); |
| 4942 |
| 4943 // Call a faked try-block that does the invoke. |
| 4944 __ bl(&invoke); |
| 4945 |
| 4946 // Caught exception: Store result (exception) in the pending |
| 4947 // exception field in the JSEnv and return a failure sentinel. |
| 4948 // Coming in here the fp will be invalid because the PushTryHandler below |
| 4949 // sets it to 0 to signal the existence of the JSEntry frame. |
| 4950 __ mov(ip, Operand(ExternalReference(Top::k_pending_exception_address))); |
| 4951 __ str(r0, MemOperand(ip)); |
| 4952 __ mov(r0, Operand(reinterpret_cast<int32_t>(Failure::Exception()))); |
| 4953 __ b(&exit); |
| 4954 |
| 4955 // Invoke: Link this frame into the handler chain. |
| 4956 __ bind(&invoke); |
| 4957 // Must preserve r0-r4, r5-r7 are available. |
| 4958 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER); |
| 4959 // If an exception not caught by another handler occurs, this handler returns |
| 4960 // control to the code after the bl(&invoke) above, which restores all |
| 4961 // kCalleeSaved registers (including cp, pp and fp) to their saved values |
| 4962 // before returning a failure to C. |
| 4963 |
| 4964 // Clear any pending exceptions. |
| 4965 __ mov(ip, Operand(ExternalReference::the_hole_value_location())); |
| 4966 __ ldr(r5, MemOperand(ip)); |
| 4967 __ mov(ip, Operand(ExternalReference(Top::k_pending_exception_address))); |
| 4968 __ str(r5, MemOperand(ip)); |
| 4969 |
| 4970 // Invoke the function by calling through JS entry trampoline builtin. |
| 4971 // Notice that we cannot store a reference to the trampoline code directly in |
| 4972 // this stub, because runtime stubs are not traversed when doing GC. |
| 4973 |
| 4974 // Expected registers by Builtins::JSEntryTrampoline |
| 4975 // r0: code entry |
| 4976 // r1: function |
| 4977 // r2: receiver |
| 4978 // r3: argc |
| 4979 // r4: argv |
| 4980 if (is_construct) { |
| 4981 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline); |
| 4982 __ mov(ip, Operand(construct_entry)); |
| 4983 } else { |
| 4984 ExternalReference entry(Builtins::JSEntryTrampoline); |
| 4985 __ mov(ip, Operand(entry)); |
| 4986 } |
| 4987 __ ldr(ip, MemOperand(ip)); // deref address |
| 4988 |
| 4989 // Branch and link to JSEntryTrampoline. We don't use the double underscore |
| 4990 // macro for the add instruction because we don't want the coverage tool |
| 4991 // inserting instructions here after we read the pc. |
| 4992 __ mov(lr, Operand(pc)); |
| 4993 masm->add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); |
| 4994 |
| 4995 // Unlink this frame from the handler chain. When reading the |
| 4996 // address of the next handler, there is no need to use the address |
| 4997 // displacement since the current stack pointer (sp) points directly |
| 4998 // to the stack handler. |
| 4999 __ ldr(r3, MemOperand(sp, StackHandlerConstants::kNextOffset)); |
| 5000 __ mov(ip, Operand(ExternalReference(Top::k_handler_address))); |
| 5001 __ str(r3, MemOperand(ip)); |
| 5002 // No need to restore registers |
| 5003 __ add(sp, sp, Operand(StackHandlerConstants::kSize)); |
| 5004 |
| 5005 |
| 5006 __ bind(&exit); // r0 holds result |
| 5007 // Restore the top frame descriptors from the stack. |
| 5008 __ pop(r3); |
| 5009 __ mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address))); |
| 5010 __ str(r3, MemOperand(ip)); |
| 5011 |
| 5012 // Reset the stack to the callee saved registers. |
| 5013 __ add(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); |
| 5014 |
| 5015 // Restore callee-saved registers and return. |
| 5016 #ifdef DEBUG |
| 5017 if (FLAG_debug_code) { |
| 5018 __ mov(lr, Operand(pc)); |
| 5019 } |
| 5020 #endif |
| 5021 __ ldm(ia_w, sp, kCalleeSaved | pc.bit()); |
| 5022 } |
| 5023 |
| 5024 |
| 5025 void ArgumentsAccessStub::GenerateReadLength(MacroAssembler* masm) { |
| 5026 // Check if the calling frame is an arguments adaptor frame. |
| 5027 Label adaptor; |
| 5028 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 5029 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); |
| 5030 __ cmp(r3, Operand(ArgumentsAdaptorFrame::SENTINEL)); |
| 5031 __ b(eq, &adaptor); |
| 5032 |
| 5033 // Nothing to do: The formal number of parameters has already been |
| 5034 // passed in register r0 by calling function. Just return it. |
| 5035 __ mov(pc, lr); |
| 5036 |
| 5037 // Arguments adaptor case: Read the arguments length from the |
| 5038 // adaptor frame and return it. |
| 5039 __ bind(&adaptor); |
| 5040 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 5041 __ mov(pc, lr); |
| 5042 } |
| 5043 |
| 5044 |
| 5045 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { |
| 5046 // The displacement is the offset of the last parameter (if any) |
| 5047 // relative to the frame pointer. |
| 5048 static const int kDisplacement = |
| 5049 StandardFrameConstants::kCallerSPOffset - kPointerSize; |
| 5050 |
| 5051 // Check that the key is a smi. |
| 5052 Label slow; |
| 5053 __ tst(r1, Operand(kSmiTagMask)); |
| 5054 __ b(ne, &slow); |
| 5055 |
| 5056 // Check if the calling frame is an arguments adaptor frame. |
| 5057 Label adaptor; |
| 5058 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 5059 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); |
| 5060 __ cmp(r3, Operand(ArgumentsAdaptorFrame::SENTINEL)); |
| 5061 __ b(eq, &adaptor); |
| 5062 |
| 5063 // Check index against formal parameters count limit passed in |
| 5064 // through register eax. Use unsigned comparison to get negative |
| 5065 // check for free. |
| 5066 __ cmp(r1, r0); |
| 5067 __ b(cs, &slow); |
| 5068 |
| 5069 // Read the argument from the stack and return it. |
| 5070 __ sub(r3, r0, r1); |
| 5071 __ add(r3, fp, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 5072 __ ldr(r0, MemOperand(r3, kDisplacement)); |
| 5073 __ mov(pc, lr); |
| 5074 |
| 5075 // Arguments adaptor case: Check index against actual arguments |
| 5076 // limit found in the arguments adaptor frame. Use unsigned |
| 5077 // comparison to get negative check for free. |
| 5078 __ bind(&adaptor); |
| 5079 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 5080 __ cmp(r1, r0); |
| 5081 __ b(cs, &slow); |
| 5082 |
| 5083 // Read the argument from the adaptor frame and return it. |
| 5084 __ sub(r3, r0, r1); |
| 5085 __ add(r3, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 5086 __ ldr(r0, MemOperand(r3, kDisplacement)); |
| 5087 __ mov(pc, lr); |
| 5088 |
| 5089 // Slow-case: Handle non-smi or out-of-bounds access to arguments |
| 5090 // by calling the runtime system. |
| 5091 __ bind(&slow); |
| 5092 __ push(r1); |
| 5093 __ TailCallRuntime(ExternalReference(Runtime::kGetArgumentsProperty), 1); |
| 5094 } |
| 5095 |
| 5096 |
| 5097 void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) { |
| 5098 // Check if the calling frame is an arguments adaptor frame. |
| 5099 Label runtime; |
| 5100 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 5101 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); |
| 5102 __ cmp(r3, Operand(ArgumentsAdaptorFrame::SENTINEL)); |
| 5103 __ b(ne, &runtime); |
| 5104 |
| 5105 // Patch the arguments.length and the parameters pointer. |
| 5106 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 5107 __ str(r0, MemOperand(sp, 0 * kPointerSize)); |
| 5108 __ add(r3, r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 5109 __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset)); |
| 5110 __ str(r3, MemOperand(sp, 1 * kPointerSize)); |
| 5111 |
| 5112 // Do the runtime call to allocate the arguments object. |
| 5113 __ bind(&runtime); |
| 5114 __ TailCallRuntime(ExternalReference(Runtime::kNewArgumentsFast), 3); |
| 5115 } |
| 5116 |
| 5117 |
| 5118 void CallFunctionStub::Generate(MacroAssembler* masm) { |
| 5119 Label slow; |
| 5120 // Get the function to call from the stack. |
| 5121 // function, receiver [, arguments] |
| 5122 __ ldr(r1, MemOperand(sp, (argc_ + 1) * kPointerSize)); |
| 5123 |
| 5124 // Check that the function is really a JavaScript function. |
| 5125 // r1: pushed function (to be verified) |
| 5126 __ tst(r1, Operand(kSmiTagMask)); |
| 5127 __ b(eq, &slow); |
| 5128 // Get the map of the function object. |
| 5129 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); |
| 5130 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); |
| 5131 __ cmp(r2, Operand(JS_FUNCTION_TYPE)); |
| 5132 __ b(ne, &slow); |
| 5133 |
| 5134 // Fast-case: Invoke the function now. |
| 5135 // r1: pushed function |
| 5136 ParameterCount actual(argc_); |
| 5137 __ InvokeFunction(r1, actual, JUMP_FUNCTION); |
| 5138 |
| 5139 // Slow-case: Non-function called. |
| 5140 __ bind(&slow); |
| 5141 __ mov(r0, Operand(argc_)); // Setup the number of arguments. |
| 5142 __ mov(r2, Operand(0)); |
| 5143 __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION); |
| 5144 __ Jump(Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline)), |
| 5145 RelocInfo::CODE_TARGET); |
| 5146 } |
| 5147 |
| 5148 |
| 5149 #undef __ |
| 5150 |
| 5151 } } // namespace v8::internal |
OLD | NEW |