OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 12 matching lines...) Expand all Loading... |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 | 28 |
29 #include "v8.h" | 29 #include "v8.h" |
30 | 30 |
31 #include "bootstrapper.h" | 31 #include "bootstrapper.h" |
32 #include "codegen-inl.h" | 32 #include "codegen-inl.h" |
| 33 #include "compiler.h" |
33 #include "debug.h" | 34 #include "debug.h" |
34 #include "ic-inl.h" | 35 #include "ic-inl.h" |
35 #include "parser.h" | 36 #include "parser.h" |
36 #include "register-allocator-inl.h" | 37 #include "register-allocator-inl.h" |
37 #include "runtime.h" | 38 #include "runtime.h" |
38 #include "scopes.h" | 39 #include "scopes.h" |
39 #include "compiler.h" | 40 #include "virtual-frame-inl.h" |
40 | 41 |
41 | 42 |
42 | 43 |
43 namespace v8 { | 44 namespace v8 { |
44 namespace internal { | 45 namespace internal { |
45 | 46 |
46 #define __ ACCESS_MASM(masm_) | 47 #define __ ACCESS_MASM(masm_) |
47 | 48 |
48 | 49 |
49 | 50 |
50 // ------------------------------------------------------------------------- | 51 // ----------------------------------------------------------------------------- |
51 // Platform-specific DeferredCode functions. | 52 // Platform-specific DeferredCode functions. |
52 | 53 |
53 | 54 |
54 void DeferredCode::SaveRegisters() { | 55 void DeferredCode::SaveRegisters() { |
55 UNIMPLEMENTED_MIPS(); | 56 UNIMPLEMENTED_MIPS(); |
56 } | 57 } |
57 | 58 |
58 | 59 |
59 void DeferredCode::RestoreRegisters() { | 60 void DeferredCode::RestoreRegisters() { |
60 UNIMPLEMENTED_MIPS(); | 61 UNIMPLEMENTED_MIPS(); |
61 } | 62 } |
62 | 63 |
63 | 64 |
64 // ------------------------------------------------------------------------- | 65 // ----------------------------------------------------------------------------- |
| 66 // CodeGenState implementation. |
| 67 |
| 68 CodeGenState::CodeGenState(CodeGenerator* owner) |
| 69 : owner_(owner), |
| 70 true_target_(NULL), |
| 71 false_target_(NULL), |
| 72 previous_(NULL) { |
| 73 owner_->set_state(this); |
| 74 } |
| 75 |
| 76 |
| 77 CodeGenState::CodeGenState(CodeGenerator* owner, |
| 78 JumpTarget* true_target, |
| 79 JumpTarget* false_target) |
| 80 : owner_(owner), |
| 81 true_target_(true_target), |
| 82 false_target_(false_target), |
| 83 previous_(owner->state()) { |
| 84 owner_->set_state(this); |
| 85 } |
| 86 |
| 87 |
| 88 CodeGenState::~CodeGenState() { |
| 89 ASSERT(owner_->state() == this); |
| 90 owner_->set_state(previous_); |
| 91 } |
| 92 |
| 93 |
| 94 // ----------------------------------------------------------------------------- |
65 // CodeGenerator implementation | 95 // CodeGenerator implementation |
66 | 96 |
67 CodeGenerator::CodeGenerator(MacroAssembler* masm) | 97 CodeGenerator::CodeGenerator(MacroAssembler* masm) |
68 : deferred_(8), | 98 : deferred_(8), |
69 masm_(masm), | 99 masm_(masm), |
70 scope_(NULL), | |
71 frame_(NULL), | 100 frame_(NULL), |
72 allocator_(NULL), | 101 allocator_(NULL), |
73 cc_reg_(cc_always), | 102 cc_reg_(cc_always), |
74 state_(NULL), | 103 state_(NULL), |
75 function_return_is_shadowed_(false) { | 104 function_return_is_shadowed_(false) { |
76 } | 105 } |
77 | 106 |
78 | 107 |
79 // Calling conventions: | 108 // Calling conventions: |
80 // s8_fp: caller's frame pointer | 109 // fp: caller's frame pointer |
81 // sp: stack pointer | 110 // sp: stack pointer |
82 // a1: called JS function | 111 // a1: called JS function |
83 // cp: callee's context | 112 // cp: callee's context |
84 | 113 |
85 void CodeGenerator::Generate(CompilationInfo* infomode) { | 114 void CodeGenerator::Generate(CompilationInfo* info) { |
86 UNIMPLEMENTED_MIPS(); | 115 // Record the position for debugging purposes. |
| 116 CodeForFunctionPosition(info->function()); |
| 117 |
| 118 // Initialize state. |
| 119 info_ = info; |
| 120 ASSERT(allocator_ == NULL); |
| 121 RegisterAllocator register_allocator(this); |
| 122 allocator_ = ®ister_allocator; |
| 123 ASSERT(frame_ == NULL); |
| 124 frame_ = new VirtualFrame(); |
| 125 cc_reg_ = cc_always; |
| 126 |
| 127 { |
| 128 CodeGenState state(this); |
| 129 |
| 130 // Registers: |
| 131 // a1: called JS function |
| 132 // ra: return address |
| 133 // fp: caller's frame pointer |
| 134 // sp: stack pointer |
| 135 // cp: callee's context |
| 136 // |
| 137 // Stack: |
| 138 // arguments |
| 139 // receiver |
| 140 |
| 141 frame_->Enter(); |
| 142 |
| 143 // Allocate space for locals and initialize them. |
| 144 frame_->AllocateStackSlots(); |
| 145 |
| 146 // Initialize the function return target. |
| 147 function_return_.set_direction(JumpTarget::BIDIRECTIONAL); |
| 148 function_return_is_shadowed_ = false; |
| 149 |
| 150 VirtualFrame::SpilledScope spilled_scope; |
| 151 if (scope()->num_heap_slots() > 0) { |
| 152 UNIMPLEMENTED_MIPS(); |
| 153 } |
| 154 |
| 155 { |
| 156 Comment cmnt2(masm_, "[ copy context parameters into .context"); |
| 157 |
| 158 // Note that iteration order is relevant here! If we have the same |
| 159 // parameter twice (e.g., function (x, y, x)), and that parameter |
| 160 // needs to be copied into the context, it must be the last argument |
| 161 // passed to the parameter that needs to be copied. This is a rare |
| 162 // case so we don't check for it, instead we rely on the copying |
| 163 // order: such a parameter is copied repeatedly into the same |
| 164 // context location and thus the last value is what is seen inside |
| 165 // the function. |
| 166 for (int i = 0; i < scope()->num_parameters(); i++) { |
| 167 UNIMPLEMENTED_MIPS(); |
| 168 } |
| 169 } |
| 170 |
| 171 // Store the arguments object. This must happen after context |
| 172 // initialization because the arguments object may be stored in the |
| 173 // context. |
| 174 if (scope()->arguments() != NULL) { |
| 175 UNIMPLEMENTED_MIPS(); |
| 176 } |
| 177 |
| 178 // Generate code to 'execute' declarations and initialize functions |
| 179 // (source elements). In case of an illegal redeclaration we need to |
| 180 // handle that instead of processing the declarations. |
| 181 if (scope()->HasIllegalRedeclaration()) { |
| 182 Comment cmnt(masm_, "[ illegal redeclarations"); |
| 183 scope()->VisitIllegalRedeclaration(this); |
| 184 } else { |
| 185 Comment cmnt(masm_, "[ declarations"); |
| 186 ProcessDeclarations(scope()->declarations()); |
| 187 // Bail out if a stack-overflow exception occurred when processing |
| 188 // declarations. |
| 189 if (HasStackOverflow()) return; |
| 190 } |
| 191 |
| 192 if (FLAG_trace) { |
| 193 UNIMPLEMENTED_MIPS(); |
| 194 } |
| 195 |
| 196 // Compile the body of the function in a vanilla state. Don't |
| 197 // bother compiling all the code if the scope has an illegal |
| 198 // redeclaration. |
| 199 if (!scope()->HasIllegalRedeclaration()) { |
| 200 Comment cmnt(masm_, "[ function body"); |
| 201 #ifdef DEBUG |
| 202 bool is_builtin = Bootstrapper::IsActive(); |
| 203 bool should_trace = |
| 204 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls; |
| 205 if (should_trace) { |
| 206 UNIMPLEMENTED_MIPS(); |
| 207 } |
| 208 #endif |
| 209 VisitStatementsAndSpill(info->function()->body()); |
| 210 } |
| 211 } |
| 212 |
| 213 if (has_valid_frame() || function_return_.is_linked()) { |
| 214 if (!function_return_.is_linked()) { |
| 215 CodeForReturnPosition(info->function()); |
| 216 } |
| 217 // Registers: |
| 218 // v0: result |
| 219 // sp: stack pointer |
| 220 // fp: frame pointer |
| 221 // cp: callee's context |
| 222 |
| 223 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); |
| 224 |
| 225 function_return_.Bind(); |
| 226 if (FLAG_trace) { |
| 227 UNIMPLEMENTED_MIPS(); |
| 228 } |
| 229 |
| 230 // Add a label for checking the size of the code used for returning. |
| 231 Label check_exit_codesize; |
| 232 masm_->bind(&check_exit_codesize); |
| 233 |
| 234 masm_->mov(sp, fp); |
| 235 masm_->lw(fp, MemOperand(sp, 0)); |
| 236 masm_->lw(ra, MemOperand(sp, 4)); |
| 237 masm_->addiu(sp, sp, 8); |
| 238 |
| 239 // Here we use masm_-> instead of the __ macro to avoid the code coverage |
| 240 // tool from instrumenting as we rely on the code size here. |
| 241 // TODO(MIPS): Should we be able to use more than 0x1ffe parameters? |
| 242 masm_->addiu(sp, sp, (scope()->num_parameters() + 1) * kPointerSize); |
| 243 masm_->Jump(ra); |
| 244 // The Jump automatically generates a nop in the branch delay slot. |
| 245 |
| 246 // Check that the size of the code used for returning matches what is |
| 247 // expected by the debugger. |
| 248 ASSERT_EQ(kJSReturnSequenceLength, |
| 249 masm_->InstructionsGeneratedSince(&check_exit_codesize)); |
| 250 } |
| 251 |
| 252 // Code generation state must be reset. |
| 253 ASSERT(!has_cc()); |
| 254 ASSERT(state_ == NULL); |
| 255 ASSERT(!function_return_is_shadowed_); |
| 256 function_return_.Unuse(); |
| 257 DeleteFrame(); |
| 258 |
| 259 // Process any deferred code using the register allocator. |
| 260 if (!HasStackOverflow()) { |
| 261 ProcessDeferred(); |
| 262 } |
| 263 |
| 264 allocator_ = NULL; |
| 265 } |
| 266 |
| 267 |
| 268 void CodeGenerator::LoadReference(Reference* ref) { |
| 269 VirtualFrame::SpilledScope spilled_scope; |
| 270 Comment cmnt(masm_, "[ LoadReference"); |
| 271 Expression* e = ref->expression(); |
| 272 Property* property = e->AsProperty(); |
| 273 Variable* var = e->AsVariableProxy()->AsVariable(); |
| 274 |
| 275 if (property != NULL) { |
| 276 UNIMPLEMENTED_MIPS(); |
| 277 } else if (var != NULL) { |
| 278 // The expression is a variable proxy that does not rewrite to a |
| 279 // property. Global variables are treated as named property references. |
| 280 if (var->is_global()) { |
| 281 LoadGlobal(); |
| 282 ref->set_type(Reference::NAMED); |
| 283 } else { |
| 284 ASSERT(var->slot() != NULL); |
| 285 ref->set_type(Reference::SLOT); |
| 286 } |
| 287 } else { |
| 288 UNIMPLEMENTED_MIPS(); |
| 289 } |
| 290 } |
| 291 |
| 292 |
| 293 void CodeGenerator::UnloadReference(Reference* ref) { |
| 294 VirtualFrame::SpilledScope spilled_scope; |
| 295 // Pop a reference from the stack while preserving TOS. |
| 296 Comment cmnt(masm_, "[ UnloadReference"); |
| 297 int size = ref->size(); |
| 298 if (size > 0) { |
| 299 frame_->EmitPop(a0); |
| 300 frame_->Drop(size); |
| 301 frame_->EmitPush(a0); |
| 302 } |
| 303 ref->set_unloaded(); |
| 304 } |
| 305 |
| 306 |
| 307 MemOperand CodeGenerator::SlotOperand(Slot* slot, Register tmp) { |
| 308 // Currently, this assertion will fail if we try to assign to |
| 309 // a constant variable that is constant because it is read-only |
| 310 // (such as the variable referring to a named function expression). |
| 311 // We need to implement assignments to read-only variables. |
| 312 // Ideally, we should do this during AST generation (by converting |
| 313 // such assignments into expression statements); however, in general |
| 314 // we may not be able to make the decision until past AST generation, |
| 315 // that is when the entire program is known. |
| 316 ASSERT(slot != NULL); |
| 317 int index = slot->index(); |
| 318 switch (slot->type()) { |
| 319 case Slot::PARAMETER: |
| 320 UNIMPLEMENTED_MIPS(); |
| 321 return MemOperand(no_reg, 0); |
| 322 |
| 323 case Slot::LOCAL: |
| 324 return frame_->LocalAt(index); |
| 325 |
| 326 case Slot::CONTEXT: { |
| 327 UNIMPLEMENTED_MIPS(); |
| 328 return MemOperand(no_reg, 0); |
| 329 } |
| 330 |
| 331 default: |
| 332 UNREACHABLE(); |
| 333 return MemOperand(no_reg, 0); |
| 334 } |
| 335 } |
| 336 |
| 337 |
| 338 // Loads a value on TOS. If it is a boolean value, the result may have been |
| 339 // (partially) translated into branches, or it may have set the condition |
| 340 // code register. If force_cc is set, the value is forced to set the |
| 341 // condition code register and no value is pushed. If the condition code |
| 342 // register was set, has_cc() is true and cc_reg_ contains the condition to |
| 343 // test for 'true'. |
| 344 void CodeGenerator::LoadCondition(Expression* x, |
| 345 JumpTarget* true_target, |
| 346 JumpTarget* false_target, |
| 347 bool force_cc) { |
| 348 ASSERT(!has_cc()); |
| 349 int original_height = frame_->height(); |
| 350 |
| 351 { CodeGenState new_state(this, true_target, false_target); |
| 352 Visit(x); |
| 353 |
| 354 // If we hit a stack overflow, we may not have actually visited |
| 355 // the expression. In that case, we ensure that we have a |
| 356 // valid-looking frame state because we will continue to generate |
| 357 // code as we unwind the C++ stack. |
| 358 // |
| 359 // It's possible to have both a stack overflow and a valid frame |
| 360 // state (eg, a subexpression overflowed, visiting it returned |
| 361 // with a dummied frame state, and visiting this expression |
| 362 // returned with a normal-looking state). |
| 363 if (HasStackOverflow() && |
| 364 has_valid_frame() && |
| 365 !has_cc() && |
| 366 frame_->height() == original_height) { |
| 367 true_target->Jump(); |
| 368 } |
| 369 } |
| 370 if (force_cc && frame_ != NULL && !has_cc()) { |
| 371 // Convert the TOS value to a boolean in the condition code register. |
| 372 UNIMPLEMENTED_MIPS(); |
| 373 } |
| 374 ASSERT(!force_cc || !has_valid_frame() || has_cc()); |
| 375 ASSERT(!has_valid_frame() || |
| 376 (has_cc() && frame_->height() == original_height) || |
| 377 (!has_cc() && frame_->height() == original_height + 1)); |
| 378 } |
| 379 |
| 380 |
| 381 void CodeGenerator::Load(Expression* x) { |
| 382 #ifdef DEBUG |
| 383 int original_height = frame_->height(); |
| 384 #endif |
| 385 JumpTarget true_target; |
| 386 JumpTarget false_target; |
| 387 LoadCondition(x, &true_target, &false_target, false); |
| 388 |
| 389 if (has_cc()) { |
| 390 UNIMPLEMENTED_MIPS(); |
| 391 } |
| 392 |
| 393 if (true_target.is_linked() || false_target.is_linked()) { |
| 394 UNIMPLEMENTED_MIPS(); |
| 395 } |
| 396 ASSERT(has_valid_frame()); |
| 397 ASSERT(!has_cc()); |
| 398 ASSERT(frame_->height() == original_height + 1); |
| 399 } |
| 400 |
| 401 |
| 402 void CodeGenerator::LoadGlobal() { |
| 403 VirtualFrame::SpilledScope spilled_scope; |
| 404 __ lw(a0, GlobalObject()); |
| 405 frame_->EmitPush(a0); |
| 406 } |
| 407 |
| 408 |
| 409 void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) { |
| 410 VirtualFrame::SpilledScope spilled_scope; |
| 411 if (slot->type() == Slot::LOOKUP) { |
| 412 UNIMPLEMENTED_MIPS(); |
| 413 } else { |
| 414 __ lw(a0, SlotOperand(slot, a2)); |
| 415 frame_->EmitPush(a0); |
| 416 if (slot->var()->mode() == Variable::CONST) { |
| 417 UNIMPLEMENTED_MIPS(); |
| 418 } |
| 419 } |
| 420 } |
| 421 |
| 422 |
| 423 void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) { |
| 424 ASSERT(slot != NULL); |
| 425 if (slot->type() == Slot::LOOKUP) { |
| 426 UNIMPLEMENTED_MIPS(); |
| 427 } else { |
| 428 ASSERT(!slot->var()->is_dynamic()); |
| 429 |
| 430 JumpTarget exit; |
| 431 if (init_state == CONST_INIT) { |
| 432 UNIMPLEMENTED_MIPS(); |
| 433 } |
| 434 |
| 435 // We must execute the store. Storing a variable must keep the |
| 436 // (new) value on the stack. This is necessary for compiling |
| 437 // assignment expressions. |
| 438 // |
| 439 // Note: We will reach here even with slot->var()->mode() == |
| 440 // Variable::CONST because of const declarations which will |
| 441 // initialize consts to 'the hole' value and by doing so, end up |
| 442 // calling this code. a2 may be loaded with context; used below in |
| 443 // RecordWrite. |
| 444 frame_->EmitPop(a0); |
| 445 __ sw(a0, SlotOperand(slot, a2)); |
| 446 frame_->EmitPush(a0); |
| 447 if (slot->type() == Slot::CONTEXT) { |
| 448 UNIMPLEMENTED_MIPS(); |
| 449 } |
| 450 // If we definitely did not jump over the assignment, we do not need |
| 451 // to bind the exit label. Doing so can defeat peephole |
| 452 // optimization. |
| 453 if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) { |
| 454 exit.Bind(); |
| 455 } |
| 456 } |
87 } | 457 } |
88 | 458 |
89 | 459 |
90 void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) { | 460 void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) { |
91 UNIMPLEMENTED_MIPS(); | 461 VirtualFrame::SpilledScope spilled_scope; |
92 } | 462 for (int i = 0; frame_ != NULL && i < statements->length(); i++) { |
93 | 463 VisitAndSpill(statements->at(i)); |
94 | 464 } |
| 465 } |
| 466 |
| 467 |
95 void CodeGenerator::VisitBlock(Block* node) { | 468 void CodeGenerator::VisitBlock(Block* node) { |
96 UNIMPLEMENTED_MIPS(); | 469 UNIMPLEMENTED_MIPS(); |
97 } | 470 } |
98 | 471 |
99 | 472 |
100 void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { | 473 void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { |
101 UNIMPLEMENTED_MIPS(); | 474 VirtualFrame::SpilledScope spilled_scope; |
| 475 frame_->EmitPush(cp); |
| 476 __ li(t0, Operand(pairs)); |
| 477 frame_->EmitPush(t0); |
| 478 __ li(t0, Operand(Smi::FromInt(is_eval() ? 1 : 0))); |
| 479 frame_->EmitPush(t0); |
| 480 frame_->CallRuntime(Runtime::kDeclareGlobals, 3); |
| 481 // The result is discarded. |
102 } | 482 } |
103 | 483 |
104 | 484 |
105 void CodeGenerator::VisitDeclaration(Declaration* node) { | 485 void CodeGenerator::VisitDeclaration(Declaration* node) { |
106 UNIMPLEMENTED_MIPS(); | 486 UNIMPLEMENTED_MIPS(); |
107 } | 487 } |
108 | 488 |
109 | 489 |
110 void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) { | 490 void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) { |
111 UNIMPLEMENTED_MIPS(); | 491 #ifdef DEBUG |
| 492 int original_height = frame_->height(); |
| 493 #endif |
| 494 VirtualFrame::SpilledScope spilled_scope; |
| 495 Comment cmnt(masm_, "[ ExpressionStatement"); |
| 496 CodeForStatementPosition(node); |
| 497 Expression* expression = node->expression(); |
| 498 expression->MarkAsStatement(); |
| 499 LoadAndSpill(expression); |
| 500 frame_->Drop(); |
| 501 ASSERT(frame_->height() == original_height); |
112 } | 502 } |
113 | 503 |
114 | 504 |
115 void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) { | 505 void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) { |
116 UNIMPLEMENTED_MIPS(); | 506 UNIMPLEMENTED_MIPS(); |
117 } | 507 } |
118 | 508 |
119 | 509 |
120 void CodeGenerator::VisitIfStatement(IfStatement* node) { | 510 void CodeGenerator::VisitIfStatement(IfStatement* node) { |
121 UNIMPLEMENTED_MIPS(); | 511 UNIMPLEMENTED_MIPS(); |
122 } | 512 } |
123 | 513 |
124 | 514 |
125 void CodeGenerator::VisitContinueStatement(ContinueStatement* node) { | 515 void CodeGenerator::VisitContinueStatement(ContinueStatement* node) { |
126 UNIMPLEMENTED_MIPS(); | 516 UNIMPLEMENTED_MIPS(); |
127 } | 517 } |
128 | 518 |
129 | 519 |
130 void CodeGenerator::VisitBreakStatement(BreakStatement* node) { | 520 void CodeGenerator::VisitBreakStatement(BreakStatement* node) { |
131 UNIMPLEMENTED_MIPS(); | 521 UNIMPLEMENTED_MIPS(); |
132 } | 522 } |
133 | 523 |
134 | 524 |
135 void CodeGenerator::VisitReturnStatement(ReturnStatement* node) { | 525 void CodeGenerator::VisitReturnStatement(ReturnStatement* node) { |
136 UNIMPLEMENTED_MIPS(); | 526 VirtualFrame::SpilledScope spilled_scope; |
| 527 Comment cmnt(masm_, "[ ReturnStatement"); |
| 528 |
| 529 CodeForStatementPosition(node); |
| 530 LoadAndSpill(node->expression()); |
| 531 if (function_return_is_shadowed_) { |
| 532 frame_->EmitPop(v0); |
| 533 function_return_.Jump(); |
| 534 } else { |
| 535 // Pop the result from the frame and prepare the frame for |
| 536 // returning thus making it easier to merge. |
| 537 frame_->EmitPop(v0); |
| 538 frame_->PrepareForReturn(); |
| 539 |
| 540 function_return_.Jump(); |
| 541 } |
137 } | 542 } |
138 | 543 |
139 | 544 |
140 void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) { | 545 void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) { |
141 UNIMPLEMENTED_MIPS(); | 546 UNIMPLEMENTED_MIPS(); |
142 } | 547 } |
143 | 548 |
144 | 549 |
145 void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) { | 550 void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) { |
146 UNIMPLEMENTED_MIPS(); | 551 UNIMPLEMENTED_MIPS(); |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
185 void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) { | 590 void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) { |
186 UNIMPLEMENTED_MIPS(); | 591 UNIMPLEMENTED_MIPS(); |
187 } | 592 } |
188 | 593 |
189 | 594 |
190 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) { | 595 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) { |
191 UNIMPLEMENTED_MIPS(); | 596 UNIMPLEMENTED_MIPS(); |
192 } | 597 } |
193 | 598 |
194 | 599 |
195 void CodeGenerator::VisitFunctionBoilerplateLiteral( | 600 void CodeGenerator::VisitSharedFunctionInfoLiteral( |
196 FunctionBoilerplateLiteral* node) { | 601 SharedFunctionInfoLiteral* node) { |
197 UNIMPLEMENTED_MIPS(); | 602 UNIMPLEMENTED_MIPS(); |
198 } | 603 } |
199 | 604 |
200 | 605 |
201 void CodeGenerator::VisitConditional(Conditional* node) { | 606 void CodeGenerator::VisitConditional(Conditional* node) { |
202 UNIMPLEMENTED_MIPS(); | 607 UNIMPLEMENTED_MIPS(); |
203 } | 608 } |
204 | 609 |
205 | 610 |
206 void CodeGenerator::VisitSlot(Slot* node) { | 611 void CodeGenerator::VisitSlot(Slot* node) { |
207 UNIMPLEMENTED_MIPS(); | 612 #ifdef DEBUG |
| 613 int original_height = frame_->height(); |
| 614 #endif |
| 615 VirtualFrame::SpilledScope spilled_scope; |
| 616 Comment cmnt(masm_, "[ Slot"); |
| 617 LoadFromSlot(node, typeof_state()); |
| 618 ASSERT(frame_->height() == original_height + 1); |
208 } | 619 } |
209 | 620 |
210 | 621 |
211 void CodeGenerator::VisitVariableProxy(VariableProxy* node) { | 622 void CodeGenerator::VisitVariableProxy(VariableProxy* node) { |
212 UNIMPLEMENTED_MIPS(); | 623 #ifdef DEBUG |
| 624 int original_height = frame_->height(); |
| 625 #endif |
| 626 VirtualFrame::SpilledScope spilled_scope; |
| 627 Comment cmnt(masm_, "[ VariableProxy"); |
| 628 |
| 629 Variable* var = node->var(); |
| 630 Expression* expr = var->rewrite(); |
| 631 if (expr != NULL) { |
| 632 Visit(expr); |
| 633 } else { |
| 634 ASSERT(var->is_global()); |
| 635 Reference ref(this, node); |
| 636 ref.GetValueAndSpill(); |
| 637 } |
| 638 ASSERT(frame_->height() == original_height + 1); |
213 } | 639 } |
214 | 640 |
215 | 641 |
216 void CodeGenerator::VisitLiteral(Literal* node) { | 642 void CodeGenerator::VisitLiteral(Literal* node) { |
217 UNIMPLEMENTED_MIPS(); | 643 #ifdef DEBUG |
| 644 int original_height = frame_->height(); |
| 645 #endif |
| 646 VirtualFrame::SpilledScope spilled_scope; |
| 647 Comment cmnt(masm_, "[ Literal"); |
| 648 __ li(t0, Operand(node->handle())); |
| 649 frame_->EmitPush(t0); |
| 650 ASSERT(frame_->height() == original_height + 1); |
218 } | 651 } |
219 | 652 |
220 | 653 |
221 void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) { | 654 void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) { |
222 UNIMPLEMENTED_MIPS(); | 655 UNIMPLEMENTED_MIPS(); |
223 } | 656 } |
224 | 657 |
225 | 658 |
226 void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) { | 659 void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) { |
227 UNIMPLEMENTED_MIPS(); | 660 UNIMPLEMENTED_MIPS(); |
228 } | 661 } |
229 | 662 |
230 | 663 |
231 void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) { | 664 void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) { |
232 UNIMPLEMENTED_MIPS(); | 665 UNIMPLEMENTED_MIPS(); |
233 } | 666 } |
234 | 667 |
235 | 668 |
236 void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) { | 669 void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) { |
237 UNIMPLEMENTED_MIPS(); | 670 UNIMPLEMENTED_MIPS(); |
238 } | 671 } |
239 | 672 |
240 | 673 |
241 void CodeGenerator::VisitAssignment(Assignment* node) { | 674 void CodeGenerator::VisitAssignment(Assignment* node) { |
242 UNIMPLEMENTED_MIPS(); | 675 #ifdef DEBUG |
| 676 int original_height = frame_->height(); |
| 677 #endif |
| 678 VirtualFrame::SpilledScope spilled_scope; |
| 679 Comment cmnt(masm_, "[ Assignment"); |
| 680 |
| 681 { Reference target(this, node->target()); |
| 682 if (target.is_illegal()) { |
| 683 // Fool the virtual frame into thinking that we left the assignment's |
| 684 // value on the frame. |
| 685 frame_->EmitPush(zero_reg); |
| 686 ASSERT(frame_->height() == original_height + 1); |
| 687 return; |
| 688 } |
| 689 |
| 690 if (node->op() == Token::ASSIGN || |
| 691 node->op() == Token::INIT_VAR || |
| 692 node->op() == Token::INIT_CONST) { |
| 693 LoadAndSpill(node->value()); |
| 694 } else { |
| 695 UNIMPLEMENTED_MIPS(); |
| 696 } |
| 697 |
| 698 Variable* var = node->target()->AsVariableProxy()->AsVariable(); |
| 699 if (var != NULL && |
| 700 (var->mode() == Variable::CONST) && |
| 701 node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) { |
| 702 // Assignment ignored - leave the value on the stack. |
| 703 } else { |
| 704 CodeForSourcePosition(node->position()); |
| 705 if (node->op() == Token::INIT_CONST) { |
| 706 // Dynamic constant initializations must use the function context |
| 707 // and initialize the actual constant declared. Dynamic variable |
| 708 // initializations are simply assignments and use SetValue. |
| 709 target.SetValue(CONST_INIT); |
| 710 } else { |
| 711 target.SetValue(NOT_CONST_INIT); |
| 712 } |
| 713 } |
| 714 } |
| 715 ASSERT(frame_->height() == original_height + 1); |
243 } | 716 } |
244 | 717 |
245 | 718 |
246 void CodeGenerator::VisitThrow(Throw* node) { | 719 void CodeGenerator::VisitThrow(Throw* node) { |
247 UNIMPLEMENTED_MIPS(); | 720 UNIMPLEMENTED_MIPS(); |
248 } | 721 } |
249 | 722 |
250 | 723 |
251 void CodeGenerator::VisitProperty(Property* node) { | 724 void CodeGenerator::VisitProperty(Property* node) { |
252 UNIMPLEMENTED_MIPS(); | 725 UNIMPLEMENTED_MIPS(); |
253 } | 726 } |
254 | 727 |
255 | 728 |
256 void CodeGenerator::VisitCall(Call* node) { | 729 void CodeGenerator::VisitCall(Call* node) { |
257 UNIMPLEMENTED_MIPS(); | 730 #ifdef DEBUG |
| 731 int original_height = frame_->height(); |
| 732 #endif |
| 733 VirtualFrame::SpilledScope spilled_scope; |
| 734 Comment cmnt(masm_, "[ Call"); |
| 735 |
| 736 Expression* function = node->expression(); |
| 737 ZoneList<Expression*>* args = node->arguments(); |
| 738 |
| 739 // Standard function call. |
| 740 // Check if the function is a variable or a property. |
| 741 Variable* var = function->AsVariableProxy()->AsVariable(); |
| 742 Property* property = function->AsProperty(); |
| 743 |
| 744 // ------------------------------------------------------------------------ |
| 745 // Fast-case: Use inline caching. |
| 746 // --- |
| 747 // According to ECMA-262, section 11.2.3, page 44, the function to call |
| 748 // must be resolved after the arguments have been evaluated. The IC code |
| 749 // automatically handles this by loading the arguments before the function |
| 750 // is resolved in cache misses (this also holds for megamorphic calls). |
| 751 // ------------------------------------------------------------------------ |
| 752 |
| 753 if (var != NULL && var->is_possibly_eval()) { |
| 754 UNIMPLEMENTED_MIPS(); |
| 755 } else if (var != NULL && !var->is_this() && var->is_global()) { |
| 756 // ---------------------------------- |
| 757 // JavaScript example: 'foo(1, 2, 3)' // foo is global |
| 758 // ---------------------------------- |
| 759 |
| 760 int arg_count = args->length(); |
| 761 |
| 762 // We need sp to be 8 bytes aligned when calling the stub. |
| 763 __ SetupAlignedCall(t0, arg_count); |
| 764 |
| 765 // Pass the global object as the receiver and let the IC stub |
| 766 // patch the stack to use the global proxy as 'this' in the |
| 767 // invoked function. |
| 768 LoadGlobal(); |
| 769 |
| 770 // Load the arguments. |
| 771 for (int i = 0; i < arg_count; i++) { |
| 772 LoadAndSpill(args->at(i)); |
| 773 } |
| 774 |
| 775 // Setup the receiver register and call the IC initialization code. |
| 776 __ li(a2, Operand(var->name())); |
| 777 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP; |
| 778 Handle<Code> stub = ComputeCallInitialize(arg_count, in_loop); |
| 779 CodeForSourcePosition(node->position()); |
| 780 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET_CONTEXT, |
| 781 arg_count + 1); |
| 782 __ ReturnFromAlignedCall(); |
| 783 __ lw(cp, frame_->Context()); |
| 784 // Remove the function from the stack. |
| 785 frame_->EmitPush(v0); |
| 786 |
| 787 } else if (var != NULL && var->slot() != NULL && |
| 788 var->slot()->type() == Slot::LOOKUP) { |
| 789 UNIMPLEMENTED_MIPS(); |
| 790 } else if (property != NULL) { |
| 791 UNIMPLEMENTED_MIPS(); |
| 792 } else { |
| 793 UNIMPLEMENTED_MIPS(); |
| 794 } |
| 795 |
| 796 ASSERT(frame_->height() == original_height + 1); |
258 } | 797 } |
259 | 798 |
260 | 799 |
261 void CodeGenerator::VisitCallNew(CallNew* node) { | 800 void CodeGenerator::VisitCallNew(CallNew* node) { |
262 UNIMPLEMENTED_MIPS(); | 801 UNIMPLEMENTED_MIPS(); |
263 } | 802 } |
264 | 803 |
265 | 804 |
266 void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) { | 805 void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) { |
267 UNIMPLEMENTED_MIPS(); | 806 UNIMPLEMENTED_MIPS(); |
(...skipping 164 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
432 | 971 |
433 | 972 |
434 #ifdef DEBUG | 973 #ifdef DEBUG |
435 bool CodeGenerator::HasValidEntryRegisters() { return true; } | 974 bool CodeGenerator::HasValidEntryRegisters() { return true; } |
436 #endif | 975 #endif |
437 | 976 |
438 | 977 |
439 #undef __ | 978 #undef __ |
440 #define __ ACCESS_MASM(masm) | 979 #define __ ACCESS_MASM(masm) |
441 | 980 |
| 981 // ----------------------------------------------------------------------------- |
| 982 // Reference support |
442 | 983 |
443 Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) { | 984 Reference::Reference(CodeGenerator* cgen, |
444 return Handle<Code>::null(); | 985 Expression* expression, |
| 986 bool persist_after_get) |
| 987 : cgen_(cgen), |
| 988 expression_(expression), |
| 989 type_(ILLEGAL), |
| 990 persist_after_get_(persist_after_get) { |
| 991 cgen->LoadReference(this); |
445 } | 992 } |
446 | 993 |
447 | 994 |
448 // On entry a0 and a1 are the things to be compared. On exit v0 is 0, | 995 Reference::~Reference() { |
| 996 ASSERT(is_unloaded() || is_illegal()); |
| 997 } |
| 998 |
| 999 |
| 1000 Handle<String> Reference::GetName() { |
| 1001 ASSERT(type_ == NAMED); |
| 1002 Property* property = expression_->AsProperty(); |
| 1003 if (property == NULL) { |
| 1004 // Global variable reference treated as a named property reference. |
| 1005 VariableProxy* proxy = expression_->AsVariableProxy(); |
| 1006 ASSERT(proxy->AsVariable() != NULL); |
| 1007 ASSERT(proxy->AsVariable()->is_global()); |
| 1008 return proxy->name(); |
| 1009 } else { |
| 1010 Literal* raw_name = property->key()->AsLiteral(); |
| 1011 ASSERT(raw_name != NULL); |
| 1012 return Handle<String>(String::cast(*raw_name->handle())); |
| 1013 } |
| 1014 } |
| 1015 |
| 1016 |
| 1017 void Reference::GetValue() { |
| 1018 ASSERT(cgen_->HasValidEntryRegisters()); |
| 1019 ASSERT(!is_illegal()); |
| 1020 ASSERT(!cgen_->has_cc()); |
| 1021 Property* property = expression_->AsProperty(); |
| 1022 if (property != NULL) { |
| 1023 cgen_->CodeForSourcePosition(property->position()); |
| 1024 } |
| 1025 |
| 1026 switch (type_) { |
| 1027 case SLOT: { |
| 1028 UNIMPLEMENTED_MIPS(); |
| 1029 break; |
| 1030 } |
| 1031 |
| 1032 case NAMED: { |
| 1033 UNIMPLEMENTED_MIPS(); |
| 1034 break; |
| 1035 } |
| 1036 |
| 1037 case KEYED: { |
| 1038 UNIMPLEMENTED_MIPS(); |
| 1039 break; |
| 1040 } |
| 1041 |
| 1042 default: |
| 1043 UNREACHABLE(); |
| 1044 } |
| 1045 } |
| 1046 |
| 1047 |
| 1048 void Reference::SetValue(InitState init_state) { |
| 1049 ASSERT(!is_illegal()); |
| 1050 ASSERT(!cgen_->has_cc()); |
| 1051 MacroAssembler* masm = cgen_->masm(); |
| 1052 Property* property = expression_->AsProperty(); |
| 1053 if (property != NULL) { |
| 1054 cgen_->CodeForSourcePosition(property->position()); |
| 1055 } |
| 1056 |
| 1057 switch (type_) { |
| 1058 case SLOT: { |
| 1059 Comment cmnt(masm, "[ Store to Slot"); |
| 1060 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot(); |
| 1061 cgen_->StoreToSlot(slot, init_state); |
| 1062 cgen_->UnloadReference(this); |
| 1063 break; |
| 1064 } |
| 1065 |
| 1066 case NAMED: { |
| 1067 UNIMPLEMENTED_MIPS(); |
| 1068 break; |
| 1069 } |
| 1070 |
| 1071 case KEYED: { |
| 1072 UNIMPLEMENTED_MIPS(); |
| 1073 break; |
| 1074 } |
| 1075 |
| 1076 default: |
| 1077 UNREACHABLE(); |
| 1078 } |
| 1079 } |
| 1080 |
| 1081 |
| 1082 // On entry a0 and a1 are the things to be compared. On exit v0 is 0, |
449 // positive or negative to indicate the result of the comparison. | 1083 // positive or negative to indicate the result of the comparison. |
450 void CompareStub::Generate(MacroAssembler* masm) { | 1084 void CompareStub::Generate(MacroAssembler* masm) { |
451 UNIMPLEMENTED_MIPS(); | 1085 UNIMPLEMENTED_MIPS(); |
452 __ break_(0x765); | 1086 __ break_(0x765); |
453 } | 1087 } |
454 | 1088 |
455 | 1089 |
| 1090 Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) { |
| 1091 UNIMPLEMENTED_MIPS(); |
| 1092 return Handle<Code>::null(); |
| 1093 } |
| 1094 |
| 1095 |
456 void StackCheckStub::Generate(MacroAssembler* masm) { | 1096 void StackCheckStub::Generate(MacroAssembler* masm) { |
457 UNIMPLEMENTED_MIPS(); | 1097 UNIMPLEMENTED_MIPS(); |
458 __ break_(0x790); | 1098 __ break_(0x790); |
459 } | 1099 } |
460 | 1100 |
461 | 1101 |
462 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { | 1102 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { |
463 UNIMPLEMENTED_MIPS(); | 1103 UNIMPLEMENTED_MIPS(); |
464 __ break_(0x808); | 1104 __ break_(0x808); |
465 } | 1105 } |
466 | 1106 |
467 | 1107 |
468 void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm, | 1108 void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm, |
469 UncatchableExceptionType type) { | 1109 UncatchableExceptionType type) { |
470 UNIMPLEMENTED_MIPS(); | 1110 UNIMPLEMENTED_MIPS(); |
471 __ break_(0x815); | 1111 __ break_(0x815); |
472 } | 1112 } |
473 | 1113 |
474 void CEntryStub::GenerateCore(MacroAssembler* masm, | 1114 void CEntryStub::GenerateCore(MacroAssembler* masm, |
475 Label* throw_normal_exception, | 1115 Label* throw_normal_exception, |
476 Label* throw_termination_exception, | 1116 Label* throw_termination_exception, |
477 Label* throw_out_of_memory_exception, | 1117 Label* throw_out_of_memory_exception, |
478 bool do_gc, | 1118 bool do_gc, |
479 bool always_allocate) { | 1119 bool always_allocate) { |
480 UNIMPLEMENTED_MIPS(); | 1120 // s0: number of arguments including receiver (C callee-saved) |
481 __ break_(0x826); | 1121 // s1: pointer to the first argument (C callee-saved) |
| 1122 // s2: pointer to builtin function (C callee-saved) |
| 1123 |
| 1124 if (do_gc) { |
| 1125 UNIMPLEMENTED_MIPS(); |
| 1126 } |
| 1127 |
| 1128 ExternalReference scope_depth = |
| 1129 ExternalReference::heap_always_allocate_scope_depth(); |
| 1130 if (always_allocate) { |
| 1131 UNIMPLEMENTED_MIPS(); |
| 1132 } |
| 1133 |
| 1134 // Call C built-in. |
| 1135 // a0 = argc, a1 = argv |
| 1136 __ mov(a0, s0); |
| 1137 __ mov(a1, s1); |
| 1138 |
| 1139 __ CallBuiltin(s2); |
| 1140 |
| 1141 if (always_allocate) { |
| 1142 UNIMPLEMENTED_MIPS(); |
| 1143 } |
| 1144 |
| 1145 // Check for failure result. |
| 1146 Label failure_returned; |
| 1147 ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0); |
| 1148 __ addiu(a2, v0, 1); |
| 1149 __ andi(t0, a2, kFailureTagMask); |
| 1150 __ Branch(eq, &failure_returned, t0, Operand(zero_reg)); |
| 1151 |
| 1152 // Exit C frame and return. |
| 1153 // v0:v1: result |
| 1154 // sp: stack pointer |
| 1155 // fp: frame pointer |
| 1156 __ LeaveExitFrame(mode_); |
| 1157 |
| 1158 // Check if we should retry or throw exception. |
| 1159 Label retry; |
| 1160 __ bind(&failure_returned); |
| 1161 ASSERT(Failure::RETRY_AFTER_GC == 0); |
| 1162 __ andi(t0, v0, ((1 << kFailureTypeTagSize) - 1) << kFailureTagSize); |
| 1163 __ Branch(eq, &retry, t0, Operand(zero_reg)); |
| 1164 |
| 1165 // Special handling of out of memory exceptions. |
| 1166 Failure* out_of_memory = Failure::OutOfMemoryException(); |
| 1167 __ Branch(eq, throw_out_of_memory_exception, |
| 1168 v0, Operand(reinterpret_cast<int32_t>(out_of_memory))); |
| 1169 |
| 1170 // Retrieve the pending exception and clear the variable. |
| 1171 __ LoadExternalReference(t0, ExternalReference::the_hole_value_location()); |
| 1172 __ lw(a3, MemOperand(t0)); |
| 1173 __ LoadExternalReference(t0, |
| 1174 ExternalReference(Top::k_pending_exception_address)); |
| 1175 __ lw(v0, MemOperand(t0)); |
| 1176 __ sw(a3, MemOperand(t0)); |
| 1177 |
| 1178 // Special handling of termination exceptions which are uncatchable |
| 1179 // by javascript code. |
| 1180 __ Branch(eq, throw_termination_exception, |
| 1181 v0, Operand(Factory::termination_exception())); |
| 1182 |
| 1183 // Handle normal exception. |
| 1184 __ b(throw_normal_exception); |
| 1185 __ nop(); // Branch delay slot nop. |
| 1186 |
| 1187 __ bind(&retry); // pass last failure (r0) as parameter (r0) when retrying |
482 } | 1188 } |
483 | 1189 |
484 void CEntryStub::Generate(MacroAssembler* masm) { | 1190 void CEntryStub::Generate(MacroAssembler* masm) { |
485 UNIMPLEMENTED_MIPS(); | 1191 // Called from JavaScript; parameters are on stack as if calling JS function |
486 __ break_(0x831); | 1192 // a0: number of arguments including receiver |
| 1193 // a1: pointer to builtin function |
| 1194 // fp: frame pointer (restored after C call) |
| 1195 // sp: stack pointer (restored as callee's sp after C call) |
| 1196 // cp: current context (C callee-saved) |
| 1197 |
| 1198 // NOTE: Invocations of builtins may return failure objects |
| 1199 // instead of a proper result. The builtin entry handles |
| 1200 // this by performing a garbage collection and retrying the |
| 1201 // builtin once. |
| 1202 |
| 1203 // Enter the exit frame that transitions from JavaScript to C++. |
| 1204 __ EnterExitFrame(mode_, s0, s1, s2); |
| 1205 |
| 1206 // s0: number of arguments (C callee-saved) |
| 1207 // s1: pointer to first argument (C callee-saved) |
| 1208 // s2: pointer to builtin function (C callee-saved) |
| 1209 |
| 1210 Label throw_normal_exception; |
| 1211 Label throw_termination_exception; |
| 1212 Label throw_out_of_memory_exception; |
| 1213 |
| 1214 // Call into the runtime system. |
| 1215 GenerateCore(masm, |
| 1216 &throw_normal_exception, |
| 1217 &throw_termination_exception, |
| 1218 &throw_out_of_memory_exception, |
| 1219 false, |
| 1220 false); |
| 1221 |
| 1222 // Do space-specific GC and retry runtime call. |
| 1223 GenerateCore(masm, |
| 1224 &throw_normal_exception, |
| 1225 &throw_termination_exception, |
| 1226 &throw_out_of_memory_exception, |
| 1227 true, |
| 1228 false); |
| 1229 |
| 1230 // Do full GC and retry runtime call one final time. |
| 1231 Failure* failure = Failure::InternalError(); |
| 1232 __ li(v0, Operand(reinterpret_cast<int32_t>(failure))); |
| 1233 GenerateCore(masm, |
| 1234 &throw_normal_exception, |
| 1235 &throw_termination_exception, |
| 1236 &throw_out_of_memory_exception, |
| 1237 true, |
| 1238 true); |
| 1239 |
| 1240 __ bind(&throw_out_of_memory_exception); |
| 1241 GenerateThrowUncatchable(masm, OUT_OF_MEMORY); |
| 1242 |
| 1243 __ bind(&throw_termination_exception); |
| 1244 GenerateThrowUncatchable(masm, TERMINATION); |
| 1245 |
| 1246 __ bind(&throw_normal_exception); |
| 1247 GenerateThrowTOS(masm); |
487 } | 1248 } |
488 | 1249 |
489 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { | 1250 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { |
490 UNIMPLEMENTED_MIPS(); | 1251 Label invoke, exit; |
| 1252 |
| 1253 // Registers: |
| 1254 // a0: entry address |
| 1255 // a1: function |
| 1256 // a2: reveiver |
| 1257 // a3: argc |
| 1258 // |
| 1259 // Stack: |
| 1260 // 4 args slots |
| 1261 // args |
491 | 1262 |
492 // Save callee saved registers on the stack. | 1263 // Save callee saved registers on the stack. |
493 __ MultiPush(kCalleeSaved | ra.bit()); | 1264 __ MultiPush((kCalleeSaved | ra.bit()) & ~sp.bit()); |
494 | 1265 |
495 // ********** State ********** | 1266 // We build an EntryFrame. |
496 // | 1267 __ li(t3, Operand(-1)); // Push a bad frame pointer to fail if it is used. |
497 // * Registers: | 1268 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; |
| 1269 __ li(t2, Operand(Smi::FromInt(marker))); |
| 1270 __ li(t1, Operand(Smi::FromInt(marker))); |
| 1271 __ LoadExternalReference(t0, ExternalReference(Top::k_c_entry_fp_address)); |
| 1272 __ lw(t0, MemOperand(t0)); |
| 1273 __ MultiPush(t0.bit() | t1.bit() | t2.bit() | t3.bit()); |
| 1274 |
| 1275 // Setup frame pointer for the frame to be pushed. |
| 1276 __ addiu(fp, sp, -EntryFrameConstants::kCallerFPOffset); |
| 1277 |
| 1278 // Load argv in s0 register. |
| 1279 __ lw(s0, MemOperand(sp, (kNumCalleeSaved + 1) * kPointerSize + |
| 1280 StandardFrameConstants::kCArgsSlotsSize)); |
| 1281 |
| 1282 // Registers: |
498 // a0: entry_address | 1283 // a0: entry_address |
499 // a1: function | 1284 // a1: function |
500 // a2: reveiver_pointer | 1285 // a2: reveiver_pointer |
501 // a3: argc | 1286 // a3: argc |
| 1287 // s0: argv |
502 // | 1288 // |
503 // * Stack: | 1289 // Stack: |
504 // --------------------------- | 1290 // caller fp | |
| 1291 // function slot | entry frame |
| 1292 // context slot | |
| 1293 // bad fp (0xff...f) | |
| 1294 // callee saved registers + ra |
| 1295 // 4 args slots |
505 // args | 1296 // args |
506 // --------------------------- | 1297 |
| 1298 // Call a faked try-block that does the invoke. |
| 1299 __ bal(&invoke); |
| 1300 __ nop(); // Branch delay slot nop. |
| 1301 |
| 1302 // Caught exception: Store result (exception) in the pending |
| 1303 // exception field in the JSEnv and return a failure sentinel. |
| 1304 // Coming in here the fp will be invalid because the PushTryHandler below |
| 1305 // sets it to 0 to signal the existence of the JSEntry frame. |
| 1306 __ LoadExternalReference(t0, |
| 1307 ExternalReference(Top::k_pending_exception_address)); |
| 1308 __ sw(v0, MemOperand(t0)); // We come back from 'invoke'. result is in v0. |
| 1309 __ li(v0, Operand(reinterpret_cast<int32_t>(Failure::Exception()))); |
| 1310 __ b(&exit); |
| 1311 __ nop(); // Branch delay slot nop. |
| 1312 |
| 1313 // Invoke: Link this frame into the handler chain. |
| 1314 __ bind(&invoke); |
| 1315 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER); |
| 1316 // If an exception not caught by another handler occurs, this handler |
| 1317 // returns control to the code after the bal(&invoke) above, which |
| 1318 // restores all kCalleeSaved registers (including cp and fp) to their |
| 1319 // saved values before returning a failure to C. |
| 1320 |
| 1321 // Clear any pending exceptions. |
| 1322 __ LoadExternalReference(t0, ExternalReference::the_hole_value_location()); |
| 1323 __ lw(t1, MemOperand(t0)); |
| 1324 __ LoadExternalReference(t0, |
| 1325 ExternalReference(Top::k_pending_exception_address)); |
| 1326 __ sw(t1, MemOperand(t0)); |
| 1327 |
| 1328 // Invoke the function by calling through JS entry trampoline builtin. |
| 1329 // Notice that we cannot store a reference to the trampoline code directly in |
| 1330 // this stub, because runtime stubs are not traversed when doing GC. |
| 1331 |
| 1332 // Registers: |
| 1333 // a0: entry_address |
| 1334 // a1: function |
| 1335 // a2: reveiver_pointer |
| 1336 // a3: argc |
| 1337 // s0: argv |
| 1338 // |
| 1339 // Stack: |
| 1340 // handler frame |
| 1341 // entry frame |
| 1342 // callee saved registers + ra |
507 // 4 args slots | 1343 // 4 args slots |
508 // --------------------------- | 1344 // args |
509 // callee saved registers + ra | 1345 |
510 // --------------------------- | 1346 if (is_construct) { |
511 // | 1347 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline); |
512 // *************************** | 1348 __ LoadExternalReference(t0, construct_entry); |
513 | 1349 } else { |
514 __ break_(0x1234); | 1350 ExternalReference entry(Builtins::JSEntryTrampoline); |
| 1351 __ LoadExternalReference(t0, entry); |
| 1352 } |
| 1353 __ lw(t9, MemOperand(t0)); // deref address |
| 1354 |
| 1355 // Call JSEntryTrampoline. |
| 1356 __ addiu(t9, t9, Code::kHeaderSize - kHeapObjectTag); |
| 1357 __ CallBuiltin(t9); |
| 1358 |
| 1359 // Unlink this frame from the handler chain. When reading the |
| 1360 // address of the next handler, there is no need to use the address |
| 1361 // displacement since the current stack pointer (sp) points directly |
| 1362 // to the stack handler. |
| 1363 __ lw(t1, MemOperand(sp, StackHandlerConstants::kNextOffset)); |
| 1364 __ LoadExternalReference(t0, ExternalReference(Top::k_handler_address)); |
| 1365 __ sw(t1, MemOperand(t0)); |
| 1366 |
| 1367 // This restores sp to its position before PushTryHandler. |
| 1368 __ addiu(sp, sp, StackHandlerConstants::kSize); |
| 1369 |
| 1370 __ bind(&exit); // v0 holds result |
| 1371 // Restore the top frame descriptors from the stack. |
| 1372 __ Pop(t1); |
| 1373 __ LoadExternalReference(t0, ExternalReference(Top::k_c_entry_fp_address)); |
| 1374 __ sw(t1, MemOperand(t0)); |
| 1375 |
| 1376 // Reset the stack to the callee saved registers. |
| 1377 __ addiu(sp, sp, -EntryFrameConstants::kCallerFPOffset); |
515 | 1378 |
516 // Restore callee saved registers from the stack. | 1379 // Restore callee saved registers from the stack. |
517 __ MultiPop(kCalleeSaved | ra.bit()); | 1380 __ MultiPop((kCalleeSaved | ra.bit()) & ~sp.bit()); |
518 | 1381 // Return. |
519 // Load a result. | 1382 __ Jump(ra); |
520 __ li(v0, Operand(0x1234)); | |
521 __ jr(ra); | |
522 // Return | |
523 __ nop(); | |
524 } | 1383 } |
525 | 1384 |
526 | 1385 |
527 // This stub performs an instanceof, calling the builtin function if | 1386 // This stub performs an instanceof, calling the builtin function if |
528 // necessary. Uses a1 for the object, a0 for the function that it may | 1387 // necessary. Uses a1 for the object, a0 for the function that it may |
529 // be an instance of (these are fetched from the stack). | 1388 // be an instance of (these are fetched from the stack). |
530 void InstanceofStub::Generate(MacroAssembler* masm) { | 1389 void InstanceofStub::Generate(MacroAssembler* masm) { |
531 UNIMPLEMENTED_MIPS(); | 1390 UNIMPLEMENTED_MIPS(); |
532 __ break_(0x845); | 1391 __ break_(0x845); |
533 } | 1392 } |
534 | 1393 |
535 | 1394 |
536 void ArgumentsAccessStub::GenerateReadLength(MacroAssembler* masm) { | 1395 void ArgumentsAccessStub::GenerateReadLength(MacroAssembler* masm) { |
537 UNIMPLEMENTED_MIPS(); | 1396 UNIMPLEMENTED_MIPS(); |
538 __ break_(0x851); | 1397 __ break_(0x851); |
(...skipping 21 matching lines...) Expand all Loading... |
560 int CompareStub::MinorKey() { | 1419 int CompareStub::MinorKey() { |
561 // Encode the two parameters in a unique 16 bit value. | 1420 // Encode the two parameters in a unique 16 bit value. |
562 ASSERT(static_cast<unsigned>(cc_) >> 28 < (1 << 15)); | 1421 ASSERT(static_cast<unsigned>(cc_) >> 28 < (1 << 15)); |
563 return (static_cast<unsigned>(cc_) >> 27) | (strict_ ? 1 : 0); | 1422 return (static_cast<unsigned>(cc_) >> 27) | (strict_ ? 1 : 0); |
564 } | 1423 } |
565 | 1424 |
566 | 1425 |
567 #undef __ | 1426 #undef __ |
568 | 1427 |
569 } } // namespace v8::internal | 1428 } } // namespace v8::internal |
OLD | NEW |