OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
46 #include "scopes.h" | 46 #include "scopes.h" |
47 #include "stub-cache.h" | 47 #include "stub-cache.h" |
48 | 48 |
49 #include "mips/code-stubs-mips.h" | 49 #include "mips/code-stubs-mips.h" |
50 | 50 |
51 namespace v8 { | 51 namespace v8 { |
52 namespace internal { | 52 namespace internal { |
53 | 53 |
54 #define __ ACCESS_MASM(masm_) | 54 #define __ ACCESS_MASM(masm_) |
55 | 55 |
| 56 |
| 57 // A patch site is a location in the code which it is possible to patch. This |
| 58 // class has a number of methods to emit the code which is patchable and the |
| 59 // method EmitPatchInfo to record a marker back to the patchable code. This |
| 60 // marker is a andi at, rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 |
| 61 // bit immediate value is used) is the delta from the pc to the first |
| 62 // instruction of the patchable code. |
| 63 class JumpPatchSite BASE_EMBEDDED { |
| 64 public: |
| 65 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { |
| 66 #ifdef DEBUG |
| 67 info_emitted_ = false; |
| 68 #endif |
| 69 } |
| 70 |
| 71 ~JumpPatchSite() { |
| 72 ASSERT(patch_site_.is_bound() == info_emitted_); |
| 73 } |
| 74 |
| 75 // When initially emitting this ensure that a jump is always generated to skip |
| 76 // the inlined smi code. |
| 77 void EmitJumpIfNotSmi(Register reg, Label* target) { |
| 78 ASSERT(!patch_site_.is_bound() && !info_emitted_); |
| 79 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
| 80 __ bind(&patch_site_); |
| 81 __ andi(at, reg, 0); |
| 82 // Always taken before patched. |
| 83 __ Branch(target, eq, at, Operand(zero_reg)); |
| 84 } |
| 85 |
| 86 // When initially emitting this ensure that a jump is never generated to skip |
| 87 // the inlined smi code. |
| 88 void EmitJumpIfSmi(Register reg, Label* target) { |
| 89 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
| 90 ASSERT(!patch_site_.is_bound() && !info_emitted_); |
| 91 __ bind(&patch_site_); |
| 92 __ andi(at, reg, 0); |
| 93 // Never taken before patched. |
| 94 __ Branch(target, ne, at, Operand(zero_reg)); |
| 95 } |
| 96 |
| 97 void EmitPatchInfo() { |
| 98 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); |
| 99 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask); |
| 100 __ andi(at, reg, delta_to_patch_site % kImm16Mask); |
| 101 #ifdef DEBUG |
| 102 info_emitted_ = true; |
| 103 #endif |
| 104 } |
| 105 |
| 106 bool is_bound() const { return patch_site_.is_bound(); } |
| 107 |
| 108 private: |
| 109 MacroAssembler* masm_; |
| 110 Label patch_site_; |
| 111 #ifdef DEBUG |
| 112 bool info_emitted_; |
| 113 #endif |
| 114 }; |
| 115 |
| 116 |
56 // Generate code for a JS function. On entry to the function the receiver | 117 // Generate code for a JS function. On entry to the function the receiver |
57 // and arguments have been pushed on the stack left to right. The actual | 118 // and arguments have been pushed on the stack left to right. The actual |
58 // argument count matches the formal parameter count expected by the | 119 // argument count matches the formal parameter count expected by the |
59 // function. | 120 // function. |
60 // | 121 // |
61 // The live registers are: | 122 // The live registers are: |
62 // o a1: the JS function object being called (ie, ourselves) | 123 // o a1: the JS function object being called (ie, ourselves) |
63 // o cp: our context | 124 // o cp: our context |
64 // o fp: our caller's frame pointer | 125 // o fp: our caller's frame pointer |
65 // o sp: stack pointer | 126 // o sp: stack pointer |
66 // o ra: return address | 127 // o ra: return address |
67 // | 128 // |
68 // The function builds a JS frame. Please see JavaScriptFrameConstants in | 129 // The function builds a JS frame. Please see JavaScriptFrameConstants in |
69 // frames-mips.h for its layout. | 130 // frames-mips.h for its layout. |
70 void FullCodeGenerator::Generate(CompilationInfo* info) { | 131 void FullCodeGenerator::Generate(CompilationInfo* info) { |
71 UNIMPLEMENTED_MIPS(); | 132 ASSERT(info_ == NULL); |
| 133 info_ = info; |
| 134 SetFunctionPosition(function()); |
| 135 Comment cmnt(masm_, "[ function compiled by full code generator"); |
| 136 |
| 137 #ifdef DEBUG |
| 138 if (strlen(FLAG_stop_at) > 0 && |
| 139 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { |
| 140 __ stop("stop-at"); |
| 141 } |
| 142 #endif |
| 143 |
| 144 int locals_count = scope()->num_stack_slots(); |
| 145 |
| 146 __ Push(ra, fp, cp, a1); |
| 147 if (locals_count > 0) { |
| 148 // Load undefined value here, so the value is ready for the loop |
| 149 // below. |
| 150 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 151 } |
| 152 // Adjust fp to point to caller's fp. |
| 153 __ Addu(fp, sp, Operand(2 * kPointerSize)); |
| 154 |
| 155 { Comment cmnt(masm_, "[ Allocate locals"); |
| 156 for (int i = 0; i < locals_count; i++) { |
| 157 __ push(at); |
| 158 } |
| 159 } |
| 160 |
| 161 bool function_in_register = true; |
| 162 |
| 163 // Possibly allocate a local context. |
| 164 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
| 165 if (heap_slots > 0) { |
| 166 Comment cmnt(masm_, "[ Allocate local context"); |
| 167 // Argument to NewContext is the function, which is in a1. |
| 168 __ push(a1); |
| 169 if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
| 170 FastNewContextStub stub(heap_slots); |
| 171 __ CallStub(&stub); |
| 172 } else { |
| 173 __ CallRuntime(Runtime::kNewContext, 1); |
| 174 } |
| 175 function_in_register = false; |
| 176 // Context is returned in both v0 and cp. It replaces the context |
| 177 // passed to us. It's saved in the stack and kept live in cp. |
| 178 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 179 // Copy any necessary parameters into the context. |
| 180 int num_parameters = scope()->num_parameters(); |
| 181 for (int i = 0; i < num_parameters; i++) { |
| 182 Slot* slot = scope()->parameter(i)->AsSlot(); |
| 183 if (slot != NULL && slot->type() == Slot::CONTEXT) { |
| 184 int parameter_offset = StandardFrameConstants::kCallerSPOffset + |
| 185 (num_parameters - 1 - i) * kPointerSize; |
| 186 // Load parameter from stack. |
| 187 __ lw(a0, MemOperand(fp, parameter_offset)); |
| 188 // Store it in the context. |
| 189 __ li(a1, Operand(Context::SlotOffset(slot->index()))); |
| 190 __ addu(a2, cp, a1); |
| 191 __ sw(a0, MemOperand(a2, 0)); |
| 192 // Update the write barrier. This clobbers all involved |
| 193 // registers, so we have to use two more registers to avoid |
| 194 // clobbering cp. |
| 195 __ mov(a2, cp); |
| 196 __ RecordWrite(a2, a1, a3); |
| 197 } |
| 198 } |
| 199 } |
| 200 |
| 201 Variable* arguments = scope()->arguments(); |
| 202 if (arguments != NULL) { |
| 203 // Function uses arguments object. |
| 204 Comment cmnt(masm_, "[ Allocate arguments object"); |
| 205 if (!function_in_register) { |
| 206 // Load this again, if it's used by the local context below. |
| 207 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 208 } else { |
| 209 __ mov(a3, a1); |
| 210 } |
| 211 // Receiver is just before the parameters on the caller's stack. |
| 212 int offset = scope()->num_parameters() * kPointerSize; |
| 213 __ Addu(a2, fp, |
| 214 Operand(StandardFrameConstants::kCallerSPOffset + offset)); |
| 215 __ li(a1, Operand(Smi::FromInt(scope()->num_parameters()))); |
| 216 __ Push(a3, a2, a1); |
| 217 |
| 218 // Arguments to ArgumentsAccessStub: |
| 219 // function, receiver address, parameter count. |
| 220 // The stub will rewrite receiever and parameter count if the previous |
| 221 // stack frame was an arguments adapter frame. |
| 222 ArgumentsAccessStub stub( |
| 223 is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT |
| 224 : ArgumentsAccessStub::NEW_NON_STRICT); |
| 225 __ CallStub(&stub); |
| 226 |
| 227 Variable* arguments_shadow = scope()->arguments_shadow(); |
| 228 if (arguments_shadow != NULL) { |
| 229 // Duplicate the value; move-to-slot operation might clobber registers. |
| 230 __ mov(a3, v0); |
| 231 Move(arguments_shadow->AsSlot(), a3, a1, a2); |
| 232 } |
| 233 Move(arguments->AsSlot(), v0, a1, a2); |
| 234 } |
| 235 |
| 236 if (FLAG_trace) { |
| 237 __ CallRuntime(Runtime::kTraceEnter, 0); |
| 238 } |
| 239 |
| 240 // Visit the declarations and body unless there is an illegal |
| 241 // redeclaration. |
| 242 if (scope()->HasIllegalRedeclaration()) { |
| 243 Comment cmnt(masm_, "[ Declarations"); |
| 244 scope()->VisitIllegalRedeclaration(this); |
| 245 |
| 246 } else { |
| 247 { Comment cmnt(masm_, "[ Declarations"); |
| 248 // For named function expressions, declare the function name as a |
| 249 // constant. |
| 250 if (scope()->is_function_scope() && scope()->function() != NULL) { |
| 251 EmitDeclaration(scope()->function(), Variable::CONST, NULL); |
| 252 } |
| 253 VisitDeclarations(scope()->declarations()); |
| 254 } |
| 255 |
| 256 { Comment cmnt(masm_, "[ Stack check"); |
| 257 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS); |
| 258 Label ok; |
| 259 __ LoadRoot(t0, Heap::kStackLimitRootIndex); |
| 260 __ Branch(&ok, hs, sp, Operand(t0)); |
| 261 StackCheckStub stub; |
| 262 __ CallStub(&stub); |
| 263 __ bind(&ok); |
| 264 } |
| 265 |
| 266 { Comment cmnt(masm_, "[ Body"); |
| 267 ASSERT(loop_depth() == 0); |
| 268 VisitStatements(function()->body()); |
| 269 ASSERT(loop_depth() == 0); |
| 270 } |
| 271 } |
| 272 |
| 273 // Always emit a 'return undefined' in case control fell off the end of |
| 274 // the body. |
| 275 { Comment cmnt(masm_, "[ return <undefined>;"); |
| 276 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); |
| 277 } |
| 278 EmitReturnSequence(); |
72 } | 279 } |
73 | 280 |
74 | 281 |
75 void FullCodeGenerator::ClearAccumulator() { | 282 void FullCodeGenerator::ClearAccumulator() { |
76 UNIMPLEMENTED_MIPS(); | 283 ASSERT(Smi::FromInt(0) == 0); |
| 284 __ mov(v0, zero_reg); |
77 } | 285 } |
78 | 286 |
79 | 287 |
80 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) { | 288 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) { |
81 UNIMPLEMENTED_MIPS(); | 289 Comment cmnt(masm_, "[ Stack check"); |
| 290 Label ok; |
| 291 __ LoadRoot(t0, Heap::kStackLimitRootIndex); |
| 292 __ Branch(&ok, hs, sp, Operand(t0)); |
| 293 StackCheckStub stub; |
| 294 // Record a mapping of this PC offset to the OSR id. This is used to find |
| 295 // the AST id from the unoptimized code in order to use it as a key into |
| 296 // the deoptimization input data found in the optimized code. |
| 297 RecordStackCheck(stmt->OsrEntryId()); |
| 298 |
| 299 __ CallStub(&stub); |
| 300 __ bind(&ok); |
| 301 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); |
| 302 // Record a mapping of the OSR id to this PC. This is used if the OSR |
| 303 // entry becomes the target of a bailout. We don't expect it to be, but |
| 304 // we want it to work if it is. |
| 305 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); |
82 } | 306 } |
83 | 307 |
84 | 308 |
85 void FullCodeGenerator::EmitReturnSequence() { | 309 void FullCodeGenerator::EmitReturnSequence() { |
86 UNIMPLEMENTED_MIPS(); | 310 Comment cmnt(masm_, "[ Return sequence"); |
| 311 if (return_label_.is_bound()) { |
| 312 __ Branch(&return_label_); |
| 313 } else { |
| 314 __ bind(&return_label_); |
| 315 if (FLAG_trace) { |
| 316 // Push the return value on the stack as the parameter. |
| 317 // Runtime::TraceExit returns its parameter in v0. |
| 318 __ push(v0); |
| 319 __ CallRuntime(Runtime::kTraceExit, 1); |
| 320 } |
| 321 |
| 322 #ifdef DEBUG |
| 323 // Add a label for checking the size of the code used for returning. |
| 324 Label check_exit_codesize; |
| 325 masm_->bind(&check_exit_codesize); |
| 326 #endif |
| 327 // Make sure that the constant pool is not emitted inside of the return |
| 328 // sequence. |
| 329 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
| 330 // Here we use masm_-> instead of the __ macro to avoid the code coverage |
| 331 // tool from instrumenting as we rely on the code size here. |
| 332 int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize; |
| 333 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); |
| 334 __ RecordJSReturn(); |
| 335 masm_->mov(sp, fp); |
| 336 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit())); |
| 337 masm_->Addu(sp, sp, Operand(sp_delta)); |
| 338 masm_->Jump(ra); |
| 339 } |
| 340 |
| 341 #ifdef DEBUG |
| 342 // Check that the size of the code used for returning is large enough |
| 343 // for the debugger's requirements. |
| 344 ASSERT(Assembler::kJSReturnSequenceInstructions <= |
| 345 masm_->InstructionsGeneratedSince(&check_exit_codesize)); |
| 346 #endif |
| 347 } |
87 } | 348 } |
88 | 349 |
89 | 350 |
90 void FullCodeGenerator::EffectContext::Plug(Slot* slot) const { | 351 void FullCodeGenerator::EffectContext::Plug(Slot* slot) const { |
91 UNIMPLEMENTED_MIPS(); | |
92 } | 352 } |
93 | 353 |
94 | 354 |
95 void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const { | 355 void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const { |
96 UNIMPLEMENTED_MIPS(); | 356 codegen()->Move(result_register(), slot); |
97 } | 357 } |
98 | 358 |
99 | 359 |
100 void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const { | 360 void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const { |
101 UNIMPLEMENTED_MIPS(); | 361 codegen()->Move(result_register(), slot); |
| 362 __ push(result_register()); |
102 } | 363 } |
103 | 364 |
104 | 365 |
105 void FullCodeGenerator::TestContext::Plug(Slot* slot) const { | 366 void FullCodeGenerator::TestContext::Plug(Slot* slot) const { |
106 UNIMPLEMENTED_MIPS(); | 367 // For simplicity we always test the accumulator register. |
| 368 codegen()->Move(result_register(), slot); |
| 369 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); |
| 370 codegen()->DoTest(true_label_, false_label_, fall_through_); |
107 } | 371 } |
108 | 372 |
109 | 373 |
110 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { | 374 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { |
111 UNIMPLEMENTED_MIPS(); | |
112 } | 375 } |
113 | 376 |
114 | 377 |
115 void FullCodeGenerator::AccumulatorValueContext::Plug( | 378 void FullCodeGenerator::AccumulatorValueContext::Plug( |
116 Heap::RootListIndex index) const { | 379 Heap::RootListIndex index) const { |
117 UNIMPLEMENTED_MIPS(); | 380 __ LoadRoot(result_register(), index); |
118 } | 381 } |
119 | 382 |
120 | 383 |
121 void FullCodeGenerator::StackValueContext::Plug( | 384 void FullCodeGenerator::StackValueContext::Plug( |
122 Heap::RootListIndex index) const { | 385 Heap::RootListIndex index) const { |
123 UNIMPLEMENTED_MIPS(); | 386 __ LoadRoot(result_register(), index); |
| 387 __ push(result_register()); |
124 } | 388 } |
125 | 389 |
126 | 390 |
127 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { | 391 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { |
128 UNIMPLEMENTED_MIPS(); | 392 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, |
| 393 true, |
| 394 true_label_, |
| 395 false_label_); |
| 396 if (index == Heap::kUndefinedValueRootIndex || |
| 397 index == Heap::kNullValueRootIndex || |
| 398 index == Heap::kFalseValueRootIndex) { |
| 399 if (false_label_ != fall_through_) __ Branch(false_label_); |
| 400 } else if (index == Heap::kTrueValueRootIndex) { |
| 401 if (true_label_ != fall_through_) __ Branch(true_label_); |
| 402 } else { |
| 403 __ LoadRoot(result_register(), index); |
| 404 codegen()->DoTest(true_label_, false_label_, fall_through_); |
| 405 } |
129 } | 406 } |
130 | 407 |
131 | 408 |
132 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const { | 409 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const { |
133 UNIMPLEMENTED_MIPS(); | |
134 } | 410 } |
135 | 411 |
136 | 412 |
137 void FullCodeGenerator::AccumulatorValueContext::Plug( | 413 void FullCodeGenerator::AccumulatorValueContext::Plug( |
138 Handle<Object> lit) const { | 414 Handle<Object> lit) const { |
139 UNIMPLEMENTED_MIPS(); | 415 __ li(result_register(), Operand(lit)); |
140 } | 416 } |
141 | 417 |
142 | 418 |
143 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { | 419 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { |
144 UNIMPLEMENTED_MIPS(); | 420 // Immediates cannot be pushed directly. |
| 421 __ li(result_register(), Operand(lit)); |
| 422 __ push(result_register()); |
145 } | 423 } |
146 | 424 |
147 | 425 |
148 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { | 426 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { |
149 UNIMPLEMENTED_MIPS(); | 427 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, |
| 428 true, |
| 429 true_label_, |
| 430 false_label_); |
| 431 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals. |
| 432 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { |
| 433 if (false_label_ != fall_through_) __ Branch(false_label_); |
| 434 } else if (lit->IsTrue() || lit->IsJSObject()) { |
| 435 if (true_label_ != fall_through_) __ Branch(true_label_); |
| 436 } else if (lit->IsString()) { |
| 437 if (String::cast(*lit)->length() == 0) { |
| 438 if (false_label_ != fall_through_) __ Branch(false_label_); |
| 439 } else { |
| 440 if (true_label_ != fall_through_) __ Branch(true_label_); |
| 441 } |
| 442 } else if (lit->IsSmi()) { |
| 443 if (Smi::cast(*lit)->value() == 0) { |
| 444 if (false_label_ != fall_through_) __ Branch(false_label_); |
| 445 } else { |
| 446 if (true_label_ != fall_through_) __ Branch(true_label_); |
| 447 } |
| 448 } else { |
| 449 // For simplicity we always test the accumulator register. |
| 450 __ li(result_register(), Operand(lit)); |
| 451 codegen()->DoTest(true_label_, false_label_, fall_through_); |
| 452 } |
150 } | 453 } |
151 | 454 |
152 | 455 |
153 void FullCodeGenerator::EffectContext::DropAndPlug(int count, | 456 void FullCodeGenerator::EffectContext::DropAndPlug(int count, |
154 Register reg) const { | 457 Register reg) const { |
155 UNIMPLEMENTED_MIPS(); | 458 ASSERT(count > 0); |
| 459 __ Drop(count); |
156 } | 460 } |
157 | 461 |
158 | 462 |
159 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( | 463 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( |
160 int count, | 464 int count, |
161 Register reg) const { | 465 Register reg) const { |
162 UNIMPLEMENTED_MIPS(); | 466 ASSERT(count > 0); |
| 467 __ Drop(count); |
| 468 __ Move(result_register(), reg); |
163 } | 469 } |
164 | 470 |
165 | 471 |
166 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, | 472 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, |
167 Register reg) const { | 473 Register reg) const { |
168 UNIMPLEMENTED_MIPS(); | 474 ASSERT(count > 0); |
| 475 if (count > 1) __ Drop(count - 1); |
| 476 __ sw(reg, MemOperand(sp, 0)); |
169 } | 477 } |
170 | 478 |
171 | 479 |
172 void FullCodeGenerator::TestContext::DropAndPlug(int count, | 480 void FullCodeGenerator::TestContext::DropAndPlug(int count, |
173 Register reg) const { | 481 Register reg) const { |
174 UNIMPLEMENTED_MIPS(); | 482 ASSERT(count > 0); |
| 483 // For simplicity we always test the accumulator register. |
| 484 __ Drop(count); |
| 485 __ Move(result_register(), reg); |
| 486 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); |
| 487 codegen()->DoTest(true_label_, false_label_, fall_through_); |
175 } | 488 } |
176 | 489 |
177 | 490 |
178 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, | 491 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, |
179 Label* materialize_false) const { | 492 Label* materialize_false) const { |
180 UNIMPLEMENTED_MIPS(); | 493 ASSERT(materialize_true == materialize_false); |
| 494 __ bind(materialize_true); |
181 } | 495 } |
182 | 496 |
183 | 497 |
184 void FullCodeGenerator::AccumulatorValueContext::Plug( | 498 void FullCodeGenerator::AccumulatorValueContext::Plug( |
185 Label* materialize_true, | 499 Label* materialize_true, |
186 Label* materialize_false) const { | 500 Label* materialize_false) const { |
187 UNIMPLEMENTED_MIPS(); | 501 Label done; |
| 502 __ bind(materialize_true); |
| 503 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); |
| 504 __ Branch(&done); |
| 505 __ bind(materialize_false); |
| 506 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); |
| 507 __ bind(&done); |
188 } | 508 } |
189 | 509 |
190 | 510 |
191 void FullCodeGenerator::StackValueContext::Plug( | 511 void FullCodeGenerator::StackValueContext::Plug( |
192 Label* materialize_true, | 512 Label* materialize_true, |
193 Label* materialize_false) const { | 513 Label* materialize_false) const { |
194 UNIMPLEMENTED_MIPS(); | 514 Label done; |
| 515 __ bind(materialize_true); |
| 516 __ LoadRoot(at, Heap::kTrueValueRootIndex); |
| 517 __ push(at); |
| 518 __ Branch(&done); |
| 519 __ bind(materialize_false); |
| 520 __ LoadRoot(at, Heap::kFalseValueRootIndex); |
| 521 __ push(at); |
| 522 __ bind(&done); |
195 } | 523 } |
196 | 524 |
197 | 525 |
198 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, | 526 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, |
199 Label* materialize_false) const { | 527 Label* materialize_false) const { |
200 UNIMPLEMENTED_MIPS(); | 528 ASSERT(materialize_true == true_label_); |
| 529 ASSERT(materialize_false == false_label_); |
201 } | 530 } |
202 | 531 |
203 | 532 |
204 void FullCodeGenerator::EffectContext::Plug(bool flag) const { | 533 void FullCodeGenerator::EffectContext::Plug(bool flag) const { |
205 UNIMPLEMENTED_MIPS(); | |
206 } | 534 } |
207 | 535 |
208 | 536 |
209 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { | 537 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { |
210 UNIMPLEMENTED_MIPS(); | 538 Heap::RootListIndex value_root_index = |
| 539 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; |
| 540 __ LoadRoot(result_register(), value_root_index); |
211 } | 541 } |
212 | 542 |
213 | 543 |
214 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { | 544 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { |
215 UNIMPLEMENTED_MIPS(); | 545 Heap::RootListIndex value_root_index = |
| 546 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; |
| 547 __ LoadRoot(at, value_root_index); |
| 548 __ push(at); |
216 } | 549 } |
217 | 550 |
218 | 551 |
219 void FullCodeGenerator::TestContext::Plug(bool flag) const { | 552 void FullCodeGenerator::TestContext::Plug(bool flag) const { |
220 UNIMPLEMENTED_MIPS(); | 553 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, |
| 554 true, |
| 555 true_label_, |
| 556 false_label_); |
| 557 if (flag) { |
| 558 if (true_label_ != fall_through_) __ Branch(true_label_); |
| 559 } else { |
| 560 if (false_label_ != fall_through_) __ Branch(false_label_); |
| 561 } |
221 } | 562 } |
222 | 563 |
223 | 564 |
224 void FullCodeGenerator::DoTest(Label* if_true, | 565 void FullCodeGenerator::DoTest(Label* if_true, |
225 Label* if_false, | 566 Label* if_false, |
226 Label* fall_through) { | 567 Label* fall_through) { |
227 UNIMPLEMENTED_MIPS(); | 568 if (CpuFeatures::IsSupported(FPU)) { |
228 } | 569 CpuFeatures::Scope scope(FPU); |
229 | 570 // Emit the inlined tests assumed by the stub. |
230 | 571 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
231 // Original prototype for mips, needs arch-indep change. Leave out for now. | 572 __ Branch(if_false, eq, result_register(), Operand(at)); |
232 // void FullCodeGenerator::Split(Condition cc, | 573 __ LoadRoot(at, Heap::kTrueValueRootIndex); |
233 // Register lhs, | 574 __ Branch(if_true, eq, result_register(), Operand(at)); |
234 // const Operand& rhs, | 575 __ LoadRoot(at, Heap::kFalseValueRootIndex); |
235 // Label* if_true, | 576 __ Branch(if_false, eq, result_register(), Operand(at)); |
236 // Label* if_false, | 577 STATIC_ASSERT(kSmiTag == 0); |
237 // Label* fall_through) { | 578 __ Branch(if_false, eq, result_register(), Operand(zero_reg)); |
| 579 __ JumpIfSmi(result_register(), if_true); |
| 580 |
| 581 // Call the ToBoolean stub for all other cases. |
| 582 ToBooleanStub stub(result_register()); |
| 583 __ CallStub(&stub); |
| 584 __ mov(at, zero_reg); |
| 585 } else { |
| 586 // Call the runtime to find the boolean value of the source and then |
| 587 // translate it into control flow to the pair of labels. |
| 588 __ push(result_register()); |
| 589 __ CallRuntime(Runtime::kToBool, 1); |
| 590 __ LoadRoot(at, Heap::kFalseValueRootIndex); |
| 591 } |
| 592 |
| 593 // The stub returns nonzero for true. |
| 594 Split(ne, v0, Operand(at), if_true, if_false, fall_through); |
| 595 } |
| 596 |
| 597 |
238 void FullCodeGenerator::Split(Condition cc, | 598 void FullCodeGenerator::Split(Condition cc, |
| 599 Register lhs, |
| 600 const Operand& rhs, |
239 Label* if_true, | 601 Label* if_true, |
240 Label* if_false, | 602 Label* if_false, |
241 Label* fall_through) { | 603 Label* fall_through) { |
242 UNIMPLEMENTED_MIPS(); | 604 if (if_false == fall_through) { |
| 605 __ Branch(if_true, cc, lhs, rhs); |
| 606 } else if (if_true == fall_through) { |
| 607 __ Branch(if_false, NegateCondition(cc), lhs, rhs); |
| 608 } else { |
| 609 __ Branch(if_true, cc, lhs, rhs); |
| 610 __ Branch(if_false); |
| 611 } |
243 } | 612 } |
244 | 613 |
245 | 614 |
246 MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) { | 615 MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) { |
247 UNIMPLEMENTED_MIPS(); | 616 switch (slot->type()) { |
248 return MemOperand(zero_reg, 0); | 617 case Slot::PARAMETER: |
| 618 case Slot::LOCAL: |
| 619 return MemOperand(fp, SlotOffset(slot)); |
| 620 case Slot::CONTEXT: { |
| 621 int context_chain_length = |
| 622 scope()->ContextChainLength(slot->var()->scope()); |
| 623 __ LoadContext(scratch, context_chain_length); |
| 624 return ContextOperand(scratch, slot->index()); |
| 625 } |
| 626 case Slot::LOOKUP: |
| 627 UNREACHABLE(); |
| 628 } |
| 629 UNREACHABLE(); |
| 630 return MemOperand(v0, 0); |
249 } | 631 } |
250 | 632 |
251 | 633 |
252 void FullCodeGenerator::Move(Register destination, Slot* source) { | 634 void FullCodeGenerator::Move(Register destination, Slot* source) { |
253 UNIMPLEMENTED_MIPS(); | 635 // Use destination as scratch. |
254 } | 636 MemOperand slot_operand = EmitSlotSearch(source, destination); |
255 | 637 __ lw(destination, slot_operand); |
256 | 638 } |
| 639 |
| 640 |
257 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, | 641 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, |
258 bool should_normalize, | 642 bool should_normalize, |
259 Label* if_true, | 643 Label* if_true, |
260 Label* if_false) { | 644 Label* if_false) { |
261 UNIMPLEMENTED_MIPS(); | 645 // Only prepare for bailouts before splits if we're in a test |
| 646 // context. Otherwise, we let the Visit function deal with the |
| 647 // preparation to avoid preparing with the same AST id twice. |
| 648 if (!context()->IsTest() || !info_->IsOptimizable()) return; |
| 649 |
| 650 Label skip; |
| 651 if (should_normalize) __ Branch(&skip); |
| 652 |
| 653 ForwardBailoutStack* current = forward_bailout_stack_; |
| 654 while (current != NULL) { |
| 655 PrepareForBailout(current->expr(), state); |
| 656 current = current->parent(); |
| 657 } |
| 658 |
| 659 if (should_normalize) { |
| 660 __ LoadRoot(t0, Heap::kTrueValueRootIndex); |
| 661 Split(eq, a0, Operand(t0), if_true, if_false, NULL); |
| 662 __ bind(&skip); |
| 663 } |
262 } | 664 } |
263 | 665 |
264 | 666 |
265 void FullCodeGenerator::Move(Slot* dst, | 667 void FullCodeGenerator::Move(Slot* dst, |
266 Register src, | 668 Register src, |
267 Register scratch1, | 669 Register scratch1, |
268 Register scratch2) { | 670 Register scratch2) { |
269 UNIMPLEMENTED_MIPS(); | 671 ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented. |
| 672 ASSERT(!scratch1.is(src) && !scratch2.is(src)); |
| 673 MemOperand location = EmitSlotSearch(dst, scratch1); |
| 674 __ sw(src, location); |
| 675 // Emit the write barrier code if the location is in the heap. |
| 676 if (dst->type() == Slot::CONTEXT) { |
| 677 __ RecordWrite(scratch1, |
| 678 Operand(Context::SlotOffset(dst->index())), |
| 679 scratch2, |
| 680 src); |
| 681 } |
270 } | 682 } |
271 | 683 |
272 | 684 |
273 void FullCodeGenerator::EmitDeclaration(Variable* variable, | 685 void FullCodeGenerator::EmitDeclaration(Variable* variable, |
274 Variable::Mode mode, | 686 Variable::Mode mode, |
275 FunctionLiteral* function) { | 687 FunctionLiteral* function) { |
276 UNIMPLEMENTED_MIPS(); | 688 Comment cmnt(masm_, "[ Declaration"); |
| 689 ASSERT(variable != NULL); // Must have been resolved. |
| 690 Slot* slot = variable->AsSlot(); |
| 691 Property* prop = variable->AsProperty(); |
| 692 |
| 693 if (slot != NULL) { |
| 694 switch (slot->type()) { |
| 695 case Slot::PARAMETER: |
| 696 case Slot::LOCAL: |
| 697 if (mode == Variable::CONST) { |
| 698 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); |
| 699 __ sw(t0, MemOperand(fp, SlotOffset(slot))); |
| 700 } else if (function != NULL) { |
| 701 VisitForAccumulatorValue(function); |
| 702 __ sw(result_register(), MemOperand(fp, SlotOffset(slot))); |
| 703 } |
| 704 break; |
| 705 |
| 706 case Slot::CONTEXT: |
| 707 // We bypass the general EmitSlotSearch because we know more about |
| 708 // this specific context. |
| 709 |
| 710 // The variable in the decl always resides in the current function |
| 711 // context. |
| 712 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); |
| 713 if (FLAG_debug_code) { |
| 714 // Check that we're not inside a 'with'. |
| 715 __ lw(a1, ContextOperand(cp, Context::FCONTEXT_INDEX)); |
| 716 __ Check(eq, "Unexpected declaration in current context.", |
| 717 a1, Operand(cp)); |
| 718 } |
| 719 if (mode == Variable::CONST) { |
| 720 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
| 721 __ sw(at, ContextOperand(cp, slot->index())); |
| 722 // No write barrier since the_hole_value is in old space. |
| 723 } else if (function != NULL) { |
| 724 VisitForAccumulatorValue(function); |
| 725 __ sw(result_register(), ContextOperand(cp, slot->index())); |
| 726 int offset = Context::SlotOffset(slot->index()); |
| 727 // We know that we have written a function, which is not a smi. |
| 728 __ mov(a1, cp); |
| 729 __ RecordWrite(a1, Operand(offset), a2, result_register()); |
| 730 } |
| 731 break; |
| 732 |
| 733 case Slot::LOOKUP: { |
| 734 __ li(a2, Operand(variable->name())); |
| 735 // Declaration nodes are always introduced in one of two modes. |
| 736 ASSERT(mode == Variable::VAR || |
| 737 mode == Variable::CONST); |
| 738 PropertyAttributes attr = |
| 739 (mode == Variable::VAR) ? NONE : READ_ONLY; |
| 740 __ li(a1, Operand(Smi::FromInt(attr))); |
| 741 // Push initial value, if any. |
| 742 // Note: For variables we must not push an initial value (such as |
| 743 // 'undefined') because we may have a (legal) redeclaration and we |
| 744 // must not destroy the current value. |
| 745 if (mode == Variable::CONST) { |
| 746 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex); |
| 747 __ Push(cp, a2, a1, a0); |
| 748 } else if (function != NULL) { |
| 749 __ Push(cp, a2, a1); |
| 750 // Push initial value for function declaration. |
| 751 VisitForStackValue(function); |
| 752 } else { |
| 753 ASSERT(Smi::FromInt(0) == 0); |
| 754 // No initial value! |
| 755 __ mov(a0, zero_reg); // Operand(Smi::FromInt(0))); |
| 756 __ Push(cp, a2, a1, a0); |
| 757 } |
| 758 __ CallRuntime(Runtime::kDeclareContextSlot, 4); |
| 759 break; |
| 760 } |
| 761 } |
| 762 |
| 763 } else if (prop != NULL) { |
| 764 if (function != NULL || mode == Variable::CONST) { |
| 765 // We are declaring a function or constant that rewrites to a |
| 766 // property. Use (keyed) IC to set the initial value. We |
| 767 // cannot visit the rewrite because it's shared and we risk |
| 768 // recording duplicate AST IDs for bailouts from optimized code. |
| 769 ASSERT(prop->obj()->AsVariableProxy() != NULL); |
| 770 { AccumulatorValueContext for_object(this); |
| 771 EmitVariableLoad(prop->obj()->AsVariableProxy()->var()); |
| 772 } |
| 773 if (function != NULL) { |
| 774 __ push(result_register()); |
| 775 VisitForAccumulatorValue(function); |
| 776 __ mov(a0, result_register()); |
| 777 __ pop(a2); |
| 778 } else { |
| 779 __ mov(a2, result_register()); |
| 780 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex); |
| 781 } |
| 782 ASSERT(prop->key()->AsLiteral() != NULL && |
| 783 prop->key()->AsLiteral()->handle()->IsSmi()); |
| 784 __ li(a1, Operand(prop->key()->AsLiteral()->handle())); |
| 785 |
| 786 Handle<Code> ic = is_strict_mode() |
| 787 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() |
| 788 : isolate()->builtins()->KeyedStoreIC_Initialize(); |
| 789 EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber); |
| 790 // Value in v0 is ignored (declarations are statements). |
| 791 } |
| 792 } |
277 } | 793 } |
278 | 794 |
279 | 795 |
280 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { | 796 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { |
281 UNIMPLEMENTED_MIPS(); | 797 EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun()); |
282 } | 798 } |
283 | 799 |
284 | 800 |
285 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { | 801 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { |
286 UNIMPLEMENTED_MIPS(); | 802 // Call the runtime to declare the globals. |
| 803 // The context is the first argument. |
| 804 __ li(a2, Operand(pairs)); |
| 805 __ li(a1, Operand(Smi::FromInt(is_eval() ? 1 : 0))); |
| 806 __ li(a0, Operand(Smi::FromInt(strict_mode_flag()))); |
| 807 __ Push(cp, a2, a1, a0); |
| 808 __ CallRuntime(Runtime::kDeclareGlobals, 4); |
| 809 // Return value is ignored. |
287 } | 810 } |
288 | 811 |
289 | 812 |
290 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { | 813 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { |
291 UNIMPLEMENTED_MIPS(); | 814 Comment cmnt(masm_, "[ SwitchStatement"); |
| 815 Breakable nested_statement(this, stmt); |
| 816 SetStatementPosition(stmt); |
| 817 |
| 818 // Keep the switch value on the stack until a case matches. |
| 819 VisitForStackValue(stmt->tag()); |
| 820 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); |
| 821 |
| 822 ZoneList<CaseClause*>* clauses = stmt->cases(); |
| 823 CaseClause* default_clause = NULL; // Can occur anywhere in the list. |
| 824 |
| 825 Label next_test; // Recycled for each test. |
| 826 // Compile all the tests with branches to their bodies. |
| 827 for (int i = 0; i < clauses->length(); i++) { |
| 828 CaseClause* clause = clauses->at(i); |
| 829 clause->body_target()->Unuse(); |
| 830 |
| 831 // The default is not a test, but remember it as final fall through. |
| 832 if (clause->is_default()) { |
| 833 default_clause = clause; |
| 834 continue; |
| 835 } |
| 836 |
| 837 Comment cmnt(masm_, "[ Case comparison"); |
| 838 __ bind(&next_test); |
| 839 next_test.Unuse(); |
| 840 |
| 841 // Compile the label expression. |
| 842 VisitForAccumulatorValue(clause->label()); |
| 843 __ mov(a0, result_register()); // CompareStub requires args in a0, a1. |
| 844 |
| 845 // Perform the comparison as if via '==='. |
| 846 __ lw(a1, MemOperand(sp, 0)); // Switch value. |
| 847 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); |
| 848 JumpPatchSite patch_site(masm_); |
| 849 if (inline_smi_code) { |
| 850 Label slow_case; |
| 851 __ or_(a2, a1, a0); |
| 852 patch_site.EmitJumpIfNotSmi(a2, &slow_case); |
| 853 |
| 854 __ Branch(&next_test, ne, a1, Operand(a0)); |
| 855 __ Drop(1); // Switch value is no longer needed. |
| 856 __ Branch(clause->body_target()); |
| 857 |
| 858 __ bind(&slow_case); |
| 859 } |
| 860 |
| 861 // Record position before stub call for type feedback. |
| 862 SetSourcePosition(clause->position()); |
| 863 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT); |
| 864 EmitCallIC(ic, &patch_site, clause->CompareId()); |
| 865 __ Branch(&next_test, ne, v0, Operand(zero_reg)); |
| 866 __ Drop(1); // Switch value is no longer needed. |
| 867 __ Branch(clause->body_target()); |
| 868 } |
| 869 |
| 870 // Discard the test value and jump to the default if present, otherwise to |
| 871 // the end of the statement. |
| 872 __ bind(&next_test); |
| 873 __ Drop(1); // Switch value is no longer needed. |
| 874 if (default_clause == NULL) { |
| 875 __ Branch(nested_statement.break_target()); |
| 876 } else { |
| 877 __ Branch(default_clause->body_target()); |
| 878 } |
| 879 |
| 880 // Compile all the case bodies. |
| 881 for (int i = 0; i < clauses->length(); i++) { |
| 882 Comment cmnt(masm_, "[ Case body"); |
| 883 CaseClause* clause = clauses->at(i); |
| 884 __ bind(clause->body_target()); |
| 885 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS); |
| 886 VisitStatements(clause->statements()); |
| 887 } |
| 888 |
| 889 __ bind(nested_statement.break_target()); |
| 890 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); |
292 } | 891 } |
293 | 892 |
294 | 893 |
295 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { | 894 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { |
296 UNIMPLEMENTED_MIPS(); | 895 Comment cmnt(masm_, "[ ForInStatement"); |
| 896 SetStatementPosition(stmt); |
| 897 |
| 898 Label loop, exit; |
| 899 ForIn loop_statement(this, stmt); |
| 900 increment_loop_depth(); |
| 901 |
| 902 // Get the object to enumerate over. Both SpiderMonkey and JSC |
| 903 // ignore null and undefined in contrast to the specification; see |
| 904 // ECMA-262 section 12.6.4. |
| 905 VisitForAccumulatorValue(stmt->enumerable()); |
| 906 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below. |
| 907 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 908 __ Branch(&exit, eq, a0, Operand(at)); |
| 909 Register null_value = t1; |
| 910 __ LoadRoot(null_value, Heap::kNullValueRootIndex); |
| 911 __ Branch(&exit, eq, a0, Operand(null_value)); |
| 912 |
| 913 // Convert the object to a JS object. |
| 914 Label convert, done_convert; |
| 915 __ JumpIfSmi(a0, &convert); |
| 916 __ GetObjectType(a0, a1, a1); |
| 917 __ Branch(&done_convert, hs, a1, Operand(FIRST_JS_OBJECT_TYPE)); |
| 918 __ bind(&convert); |
| 919 __ push(a0); |
| 920 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
| 921 __ mov(a0, v0); |
| 922 __ bind(&done_convert); |
| 923 __ push(a0); |
| 924 |
| 925 // Check cache validity in generated code. This is a fast case for |
| 926 // the JSObject::IsSimpleEnum cache validity checks. If we cannot |
| 927 // guarantee cache validity, call the runtime system to check cache |
| 928 // validity or get the property names in a fixed array. |
| 929 Label next, call_runtime; |
| 930 // Preload a couple of values used in the loop. |
| 931 Register empty_fixed_array_value = t2; |
| 932 __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex); |
| 933 Register empty_descriptor_array_value = t3; |
| 934 __ LoadRoot(empty_descriptor_array_value, |
| 935 Heap::kEmptyDescriptorArrayRootIndex); |
| 936 __ mov(a1, a0); |
| 937 __ bind(&next); |
| 938 |
| 939 // Check that there are no elements. Register a1 contains the |
| 940 // current JS object we've reached through the prototype chain. |
| 941 __ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset)); |
| 942 __ Branch(&call_runtime, ne, a2, Operand(empty_fixed_array_value)); |
| 943 |
| 944 // Check that instance descriptors are not empty so that we can |
| 945 // check for an enum cache. Leave the map in a2 for the subsequent |
| 946 // prototype load. |
| 947 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset)); |
| 948 __ lw(a3, FieldMemOperand(a2, Map::kInstanceDescriptorsOffset)); |
| 949 __ Branch(&call_runtime, eq, a3, Operand(empty_descriptor_array_value)); |
| 950 |
| 951 // Check that there is an enum cache in the non-empty instance |
| 952 // descriptors (a3). This is the case if the next enumeration |
| 953 // index field does not contain a smi. |
| 954 __ lw(a3, FieldMemOperand(a3, DescriptorArray::kEnumerationIndexOffset)); |
| 955 __ JumpIfSmi(a3, &call_runtime); |
| 956 |
| 957 // For all objects but the receiver, check that the cache is empty. |
| 958 Label check_prototype; |
| 959 __ Branch(&check_prototype, eq, a1, Operand(a0)); |
| 960 __ lw(a3, FieldMemOperand(a3, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
| 961 __ Branch(&call_runtime, ne, a3, Operand(empty_fixed_array_value)); |
| 962 |
| 963 // Load the prototype from the map and loop if non-null. |
| 964 __ bind(&check_prototype); |
| 965 __ lw(a1, FieldMemOperand(a2, Map::kPrototypeOffset)); |
| 966 __ Branch(&next, ne, a1, Operand(null_value)); |
| 967 |
| 968 // The enum cache is valid. Load the map of the object being |
| 969 // iterated over and use the cache for the iteration. |
| 970 Label use_cache; |
| 971 __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset)); |
| 972 __ Branch(&use_cache); |
| 973 |
| 974 // Get the set of properties to enumerate. |
| 975 __ bind(&call_runtime); |
| 976 __ push(a0); // Duplicate the enumerable object on the stack. |
| 977 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); |
| 978 |
| 979 // If we got a map from the runtime call, we can do a fast |
| 980 // modification check. Otherwise, we got a fixed array, and we have |
| 981 // to do a slow check. |
| 982 Label fixed_array; |
| 983 __ mov(a2, v0); |
| 984 __ lw(a1, FieldMemOperand(a2, HeapObject::kMapOffset)); |
| 985 __ LoadRoot(at, Heap::kMetaMapRootIndex); |
| 986 __ Branch(&fixed_array, ne, a1, Operand(at)); |
| 987 |
| 988 // We got a map in register v0. Get the enumeration cache from it. |
| 989 __ bind(&use_cache); |
| 990 __ lw(a1, FieldMemOperand(v0, Map::kInstanceDescriptorsOffset)); |
| 991 __ lw(a1, FieldMemOperand(a1, DescriptorArray::kEnumerationIndexOffset)); |
| 992 __ lw(a2, FieldMemOperand(a1, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
| 993 |
| 994 // Setup the four remaining stack slots. |
| 995 __ push(v0); // Map. |
| 996 __ lw(a1, FieldMemOperand(a2, FixedArray::kLengthOffset)); |
| 997 __ li(a0, Operand(Smi::FromInt(0))); |
| 998 // Push enumeration cache, enumeration cache length (as smi) and zero. |
| 999 __ Push(a2, a1, a0); |
| 1000 __ jmp(&loop); |
| 1001 |
| 1002 // We got a fixed array in register v0. Iterate through that. |
| 1003 __ bind(&fixed_array); |
| 1004 __ li(a1, Operand(Smi::FromInt(0))); // Map (0) - force slow check. |
| 1005 __ Push(a1, v0); |
| 1006 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset)); |
| 1007 __ li(a0, Operand(Smi::FromInt(0))); |
| 1008 __ Push(a1, a0); // Fixed array length (as smi) and initial index. |
| 1009 |
| 1010 // Generate code for doing the condition check. |
| 1011 __ bind(&loop); |
| 1012 // Load the current count to a0, load the length to a1. |
| 1013 __ lw(a0, MemOperand(sp, 0 * kPointerSize)); |
| 1014 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); |
| 1015 __ Branch(loop_statement.break_target(), hs, a0, Operand(a1)); |
| 1016 |
| 1017 // Get the current entry of the array into register a3. |
| 1018 __ lw(a2, MemOperand(sp, 2 * kPointerSize)); |
| 1019 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 1020 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize); |
| 1021 __ addu(t0, a2, t0); // Array base + scaled (smi) index. |
| 1022 __ lw(a3, MemOperand(t0)); // Current entry. |
| 1023 |
| 1024 // Get the expected map from the stack or a zero map in the |
| 1025 // permanent slow case into register a2. |
| 1026 __ lw(a2, MemOperand(sp, 3 * kPointerSize)); |
| 1027 |
| 1028 // Check if the expected map still matches that of the enumerable. |
| 1029 // If not, we have to filter the key. |
| 1030 Label update_each; |
| 1031 __ lw(a1, MemOperand(sp, 4 * kPointerSize)); |
| 1032 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); |
| 1033 __ Branch(&update_each, eq, t0, Operand(a2)); |
| 1034 |
| 1035 // Convert the entry to a string or (smi) 0 if it isn't a property |
| 1036 // any more. If the property has been removed while iterating, we |
| 1037 // just skip it. |
| 1038 __ push(a1); // Enumerable. |
| 1039 __ push(a3); // Current entry. |
| 1040 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); |
| 1041 __ mov(a3, result_register()); |
| 1042 __ Branch(loop_statement.continue_target(), eq, a3, Operand(zero_reg)); |
| 1043 |
| 1044 // Update the 'each' property or variable from the possibly filtered |
| 1045 // entry in register a3. |
| 1046 __ bind(&update_each); |
| 1047 __ mov(result_register(), a3); |
| 1048 // Perform the assignment as if via '='. |
| 1049 { EffectContext context(this); |
| 1050 EmitAssignment(stmt->each(), stmt->AssignmentId()); |
| 1051 } |
| 1052 |
| 1053 // Generate code for the body of the loop. |
| 1054 Visit(stmt->body()); |
| 1055 |
| 1056 // Generate code for the going to the next element by incrementing |
| 1057 // the index (smi) stored on top of the stack. |
| 1058 __ bind(loop_statement.continue_target()); |
| 1059 __ pop(a0); |
| 1060 __ Addu(a0, a0, Operand(Smi::FromInt(1))); |
| 1061 __ push(a0); |
| 1062 |
| 1063 EmitStackCheck(stmt); |
| 1064 __ Branch(&loop); |
| 1065 |
| 1066 // Remove the pointers stored on the stack. |
| 1067 __ bind(loop_statement.break_target()); |
| 1068 __ Drop(5); |
| 1069 |
| 1070 // Exit and decrement the loop depth. |
| 1071 __ bind(&exit); |
| 1072 decrement_loop_depth(); |
297 } | 1073 } |
298 | 1074 |
299 | 1075 |
300 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, | 1076 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, |
301 bool pretenure) { | 1077 bool pretenure) { |
302 UNIMPLEMENTED_MIPS(); | 1078 // Use the fast case closure allocation code that allocates in new |
| 1079 // space for nested functions that don't need literals cloning. If |
| 1080 // we're running with the --always-opt or the --prepare-always-opt |
| 1081 // flag, we need to use the runtime function so that the new function |
| 1082 // we are creating here gets a chance to have its code optimized and |
| 1083 // doesn't just get a copy of the existing unoptimized code. |
| 1084 if (!FLAG_always_opt && |
| 1085 !FLAG_prepare_always_opt && |
| 1086 !pretenure && |
| 1087 scope()->is_function_scope() && |
| 1088 info->num_literals() == 0) { |
| 1089 FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode); |
| 1090 __ li(a0, Operand(info)); |
| 1091 __ push(a0); |
| 1092 __ CallStub(&stub); |
| 1093 } else { |
| 1094 __ li(a0, Operand(info)); |
| 1095 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex |
| 1096 : Heap::kFalseValueRootIndex); |
| 1097 __ Push(cp, a0, a1); |
| 1098 __ CallRuntime(Runtime::kNewClosure, 3); |
| 1099 } |
| 1100 context()->Plug(v0); |
303 } | 1101 } |
304 | 1102 |
305 | 1103 |
306 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { | 1104 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { |
307 UNIMPLEMENTED_MIPS(); | 1105 Comment cmnt(masm_, "[ VariableProxy"); |
| 1106 EmitVariableLoad(expr->var()); |
| 1107 } |
| 1108 |
| 1109 |
| 1110 void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions( |
| 1111 Slot* slot, |
| 1112 TypeofState typeof_state, |
| 1113 Label* slow) { |
| 1114 Register current = cp; |
| 1115 Register next = a1; |
| 1116 Register temp = a2; |
| 1117 |
| 1118 Scope* s = scope(); |
| 1119 while (s != NULL) { |
| 1120 if (s->num_heap_slots() > 0) { |
| 1121 if (s->calls_eval()) { |
| 1122 // Check that extension is NULL. |
| 1123 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX)); |
| 1124 __ Branch(slow, ne, temp, Operand(zero_reg)); |
| 1125 } |
| 1126 // Load next context in chain. |
| 1127 __ lw(next, ContextOperand(current, Context::CLOSURE_INDEX)); |
| 1128 __ lw(next, FieldMemOperand(next, JSFunction::kContextOffset)); |
| 1129 // Walk the rest of the chain without clobbering cp. |
| 1130 current = next; |
| 1131 } |
| 1132 // If no outer scope calls eval, we do not need to check more |
| 1133 // context extensions. |
| 1134 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; |
| 1135 s = s->outer_scope(); |
| 1136 } |
| 1137 |
| 1138 if (s->is_eval_scope()) { |
| 1139 Label loop, fast; |
| 1140 if (!current.is(next)) { |
| 1141 __ Move(next, current); |
| 1142 } |
| 1143 __ bind(&loop); |
| 1144 // Terminate at global context. |
| 1145 __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset)); |
| 1146 __ LoadRoot(t0, Heap::kGlobalContextMapRootIndex); |
| 1147 __ Branch(&fast, eq, temp, Operand(t0)); |
| 1148 // Check that extension is NULL. |
| 1149 __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX)); |
| 1150 __ Branch(slow, ne, temp, Operand(zero_reg)); |
| 1151 // Load next context in chain. |
| 1152 __ lw(next, ContextOperand(next, Context::CLOSURE_INDEX)); |
| 1153 __ lw(next, FieldMemOperand(next, JSFunction::kContextOffset)); |
| 1154 __ Branch(&loop); |
| 1155 __ bind(&fast); |
| 1156 } |
| 1157 |
| 1158 __ lw(a0, GlobalObjectOperand()); |
| 1159 __ li(a2, Operand(slot->var()->name())); |
| 1160 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) |
| 1161 ? RelocInfo::CODE_TARGET |
| 1162 : RelocInfo::CODE_TARGET_CONTEXT; |
| 1163 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
| 1164 EmitCallIC(ic, mode, AstNode::kNoNumber); |
308 } | 1165 } |
309 | 1166 |
310 | 1167 |
311 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions( | 1168 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions( |
312 Slot* slot, | 1169 Slot* slot, |
313 Label* slow) { | 1170 Label* slow) { |
314 UNIMPLEMENTED_MIPS(); | 1171 ASSERT(slot->type() == Slot::CONTEXT); |
315 return MemOperand(zero_reg, 0); | 1172 Register context = cp; |
| 1173 Register next = a3; |
| 1174 Register temp = t0; |
| 1175 |
| 1176 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) { |
| 1177 if (s->num_heap_slots() > 0) { |
| 1178 if (s->calls_eval()) { |
| 1179 // Check that extension is NULL. |
| 1180 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX)); |
| 1181 __ Branch(slow, ne, temp, Operand(zero_reg)); |
| 1182 } |
| 1183 __ lw(next, ContextOperand(context, Context::CLOSURE_INDEX)); |
| 1184 __ lw(next, FieldMemOperand(next, JSFunction::kContextOffset)); |
| 1185 // Walk the rest of the chain without clobbering cp. |
| 1186 context = next; |
| 1187 } |
| 1188 } |
| 1189 // Check that last extension is NULL. |
| 1190 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX)); |
| 1191 __ Branch(slow, ne, temp, Operand(zero_reg)); |
| 1192 |
| 1193 // This function is used only for loads, not stores, so it's safe to |
| 1194 // return an cp-based operand (the write barrier cannot be allowed to |
| 1195 // destroy the cp register). |
| 1196 return ContextOperand(context, slot->index()); |
316 } | 1197 } |
317 | 1198 |
318 | 1199 |
319 void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase( | 1200 void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase( |
320 Slot* slot, | 1201 Slot* slot, |
321 TypeofState typeof_state, | 1202 TypeofState typeof_state, |
322 Label* slow, | 1203 Label* slow, |
323 Label* done) { | 1204 Label* done) { |
324 UNIMPLEMENTED_MIPS(); | 1205 // Generate fast-case code for variables that might be shadowed by |
325 } | 1206 // eval-introduced variables. Eval is used a lot without |
326 | 1207 // introducing variables. In those cases, we do not want to |
327 | 1208 // perform a runtime call for all variables in the scope |
328 void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions( | 1209 // containing the eval. |
329 Slot* slot, | 1210 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) { |
330 TypeofState typeof_state, | 1211 EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow); |
331 Label* slow) { | 1212 __ Branch(done); |
332 UNIMPLEMENTED_MIPS(); | 1213 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) { |
| 1214 Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot(); |
| 1215 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite(); |
| 1216 if (potential_slot != NULL) { |
| 1217 // Generate fast case for locals that rewrite to slots. |
| 1218 __ lw(v0, ContextSlotOperandCheckExtensions(potential_slot, slow)); |
| 1219 if (potential_slot->var()->mode() == Variable::CONST) { |
| 1220 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
| 1221 __ subu(at, v0, at); // Sub as compare: at == 0 on eq. |
| 1222 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); |
| 1223 __ movz(v0, a0, at); // Conditional move. |
| 1224 } |
| 1225 __ Branch(done); |
| 1226 } else if (rewrite != NULL) { |
| 1227 // Generate fast case for calls of an argument function. |
| 1228 Property* property = rewrite->AsProperty(); |
| 1229 if (property != NULL) { |
| 1230 VariableProxy* obj_proxy = property->obj()->AsVariableProxy(); |
| 1231 Literal* key_literal = property->key()->AsLiteral(); |
| 1232 if (obj_proxy != NULL && |
| 1233 key_literal != NULL && |
| 1234 obj_proxy->IsArguments() && |
| 1235 key_literal->handle()->IsSmi()) { |
| 1236 // Load arguments object if there are no eval-introduced |
| 1237 // variables. Then load the argument from the arguments |
| 1238 // object using keyed load. |
| 1239 __ lw(a1, |
| 1240 ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(), |
| 1241 slow)); |
| 1242 __ li(a0, Operand(key_literal->handle())); |
| 1243 Handle<Code> ic = |
| 1244 isolate()->builtins()->KeyedLoadIC_Initialize(); |
| 1245 EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber); |
| 1246 __ Branch(done); |
| 1247 } |
| 1248 } |
| 1249 } |
| 1250 } |
333 } | 1251 } |
334 | 1252 |
335 | 1253 |
336 void FullCodeGenerator::EmitVariableLoad(Variable* var) { | 1254 void FullCodeGenerator::EmitVariableLoad(Variable* var) { |
337 UNIMPLEMENTED_MIPS(); | 1255 // Four cases: non-this global variables, lookup slots, all other |
| 1256 // types of slots, and parameters that rewrite to explicit property |
| 1257 // accesses on the arguments object. |
| 1258 Slot* slot = var->AsSlot(); |
| 1259 Property* property = var->AsProperty(); |
| 1260 |
| 1261 if (var->is_global() && !var->is_this()) { |
| 1262 Comment cmnt(masm_, "Global variable"); |
| 1263 // Use inline caching. Variable name is passed in a2 and the global |
| 1264 // object (receiver) in a0. |
| 1265 __ lw(a0, GlobalObjectOperand()); |
| 1266 __ li(a2, Operand(var->name())); |
| 1267 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
| 1268 EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber); |
| 1269 context()->Plug(v0); |
| 1270 |
| 1271 } else if (slot != NULL && slot->type() == Slot::LOOKUP) { |
| 1272 Label done, slow; |
| 1273 |
| 1274 // Generate code for loading from variables potentially shadowed |
| 1275 // by eval-introduced variables. |
| 1276 EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done); |
| 1277 |
| 1278 __ bind(&slow); |
| 1279 Comment cmnt(masm_, "Lookup slot"); |
| 1280 __ li(a1, Operand(var->name())); |
| 1281 __ Push(cp, a1); // Context and name. |
| 1282 __ CallRuntime(Runtime::kLoadContextSlot, 2); |
| 1283 __ bind(&done); |
| 1284 |
| 1285 context()->Plug(v0); |
| 1286 |
| 1287 } else if (slot != NULL) { |
| 1288 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT) |
| 1289 ? "Context slot" |
| 1290 : "Stack slot"); |
| 1291 if (var->mode() == Variable::CONST) { |
| 1292 // Constants may be the hole value if they have not been initialized. |
| 1293 // Unhole them. |
| 1294 MemOperand slot_operand = EmitSlotSearch(slot, a0); |
| 1295 __ lw(v0, slot_operand); |
| 1296 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
| 1297 __ subu(at, v0, at); // Sub as compare: at == 0 on eq. |
| 1298 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); |
| 1299 __ movz(v0, a0, at); // Conditional move. |
| 1300 context()->Plug(v0); |
| 1301 } else { |
| 1302 context()->Plug(slot); |
| 1303 } |
| 1304 } else { |
| 1305 Comment cmnt(masm_, "Rewritten parameter"); |
| 1306 ASSERT_NOT_NULL(property); |
| 1307 // Rewritten parameter accesses are of the form "slot[literal]". |
| 1308 // Assert that the object is in a slot. |
| 1309 Variable* object_var = property->obj()->AsVariableProxy()->AsVariable(); |
| 1310 ASSERT_NOT_NULL(object_var); |
| 1311 Slot* object_slot = object_var->AsSlot(); |
| 1312 ASSERT_NOT_NULL(object_slot); |
| 1313 |
| 1314 // Load the object. |
| 1315 Move(a1, object_slot); |
| 1316 |
| 1317 // Assert that the key is a smi. |
| 1318 Literal* key_literal = property->key()->AsLiteral(); |
| 1319 ASSERT_NOT_NULL(key_literal); |
| 1320 ASSERT(key_literal->handle()->IsSmi()); |
| 1321 |
| 1322 // Load the key. |
| 1323 __ li(a0, Operand(key_literal->handle())); |
| 1324 |
| 1325 // Call keyed load IC. It has arguments key and receiver in a0 and a1. |
| 1326 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); |
| 1327 EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber); |
| 1328 context()->Plug(v0); |
| 1329 } |
338 } | 1330 } |
339 | 1331 |
340 | 1332 |
341 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { | 1333 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { |
342 UNIMPLEMENTED_MIPS(); | 1334 Comment cmnt(masm_, "[ RegExpLiteral"); |
| 1335 Label materialized; |
| 1336 // Registers will be used as follows: |
| 1337 // t1 = materialized value (RegExp literal) |
| 1338 // t0 = JS function, literals array |
| 1339 // a3 = literal index |
| 1340 // a2 = RegExp pattern |
| 1341 // a1 = RegExp flags |
| 1342 // a0 = RegExp literal clone |
| 1343 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 1344 __ lw(t0, FieldMemOperand(a0, JSFunction::kLiteralsOffset)); |
| 1345 int literal_offset = |
| 1346 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; |
| 1347 __ lw(t1, FieldMemOperand(t0, literal_offset)); |
| 1348 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 1349 __ Branch(&materialized, ne, t1, Operand(at)); |
| 1350 |
| 1351 // Create regexp literal using runtime function. |
| 1352 // Result will be in v0. |
| 1353 __ li(a3, Operand(Smi::FromInt(expr->literal_index()))); |
| 1354 __ li(a2, Operand(expr->pattern())); |
| 1355 __ li(a1, Operand(expr->flags())); |
| 1356 __ Push(t0, a3, a2, a1); |
| 1357 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); |
| 1358 __ mov(t1, v0); |
| 1359 |
| 1360 __ bind(&materialized); |
| 1361 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; |
| 1362 Label allocated, runtime_allocate; |
| 1363 __ AllocateInNewSpace(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT); |
| 1364 __ jmp(&allocated); |
| 1365 |
| 1366 __ bind(&runtime_allocate); |
| 1367 __ push(t1); |
| 1368 __ li(a0, Operand(Smi::FromInt(size))); |
| 1369 __ push(a0); |
| 1370 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); |
| 1371 __ pop(t1); |
| 1372 |
| 1373 __ bind(&allocated); |
| 1374 |
| 1375 // After this, registers are used as follows: |
| 1376 // v0: Newly allocated regexp. |
| 1377 // t1: Materialized regexp. |
| 1378 // a2: temp. |
| 1379 __ CopyFields(v0, t1, a2.bit(), size / kPointerSize); |
| 1380 context()->Plug(v0); |
343 } | 1381 } |
344 | 1382 |
345 | 1383 |
346 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { | 1384 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { |
347 UNIMPLEMENTED_MIPS(); | 1385 Comment cmnt(masm_, "[ ObjectLiteral"); |
| 1386 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 1387 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset)); |
| 1388 __ li(a2, Operand(Smi::FromInt(expr->literal_index()))); |
| 1389 __ li(a1, Operand(expr->constant_properties())); |
| 1390 int flags = expr->fast_elements() |
| 1391 ? ObjectLiteral::kFastElements |
| 1392 : ObjectLiteral::kNoFlags; |
| 1393 flags |= expr->has_function() |
| 1394 ? ObjectLiteral::kHasFunction |
| 1395 : ObjectLiteral::kNoFlags; |
| 1396 __ li(a0, Operand(Smi::FromInt(flags))); |
| 1397 __ Push(a3, a2, a1, a0); |
| 1398 if (expr->depth() > 1) { |
| 1399 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); |
| 1400 } else { |
| 1401 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4); |
| 1402 } |
| 1403 |
| 1404 // If result_saved is true the result is on top of the stack. If |
| 1405 // result_saved is false the result is in v0. |
| 1406 bool result_saved = false; |
| 1407 |
| 1408 // Mark all computed expressions that are bound to a key that |
| 1409 // is shadowed by a later occurrence of the same key. For the |
| 1410 // marked expressions, no store code is emitted. |
| 1411 expr->CalculateEmitStore(); |
| 1412 |
| 1413 for (int i = 0; i < expr->properties()->length(); i++) { |
| 1414 ObjectLiteral::Property* property = expr->properties()->at(i); |
| 1415 if (property->IsCompileTimeValue()) continue; |
| 1416 |
| 1417 Literal* key = property->key(); |
| 1418 Expression* value = property->value(); |
| 1419 if (!result_saved) { |
| 1420 __ push(v0); // Save result on stack. |
| 1421 result_saved = true; |
| 1422 } |
| 1423 switch (property->kind()) { |
| 1424 case ObjectLiteral::Property::CONSTANT: |
| 1425 UNREACHABLE(); |
| 1426 case ObjectLiteral::Property::MATERIALIZED_LITERAL: |
| 1427 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value())); |
| 1428 // Fall through. |
| 1429 case ObjectLiteral::Property::COMPUTED: |
| 1430 if (key->handle()->IsSymbol()) { |
| 1431 if (property->emit_store()) { |
| 1432 VisitForAccumulatorValue(value); |
| 1433 __ mov(a0, result_register()); |
| 1434 __ li(a2, Operand(key->handle())); |
| 1435 __ lw(a1, MemOperand(sp)); |
| 1436 Handle<Code> ic = is_strict_mode() |
| 1437 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
| 1438 : isolate()->builtins()->StoreIC_Initialize(); |
| 1439 EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, key->id()); |
| 1440 PrepareForBailoutForId(key->id(), NO_REGISTERS); |
| 1441 } else { |
| 1442 VisitForEffect(value); |
| 1443 } |
| 1444 break; |
| 1445 } |
| 1446 // Fall through. |
| 1447 case ObjectLiteral::Property::PROTOTYPE: |
| 1448 // Duplicate receiver on stack. |
| 1449 __ lw(a0, MemOperand(sp)); |
| 1450 __ push(a0); |
| 1451 VisitForStackValue(key); |
| 1452 VisitForStackValue(value); |
| 1453 if (property->emit_store()) { |
| 1454 __ li(a0, Operand(Smi::FromInt(NONE))); // PropertyAttributes. |
| 1455 __ push(a0); |
| 1456 __ CallRuntime(Runtime::kSetProperty, 4); |
| 1457 } else { |
| 1458 __ Drop(3); |
| 1459 } |
| 1460 break; |
| 1461 case ObjectLiteral::Property::GETTER: |
| 1462 case ObjectLiteral::Property::SETTER: |
| 1463 // Duplicate receiver on stack. |
| 1464 __ lw(a0, MemOperand(sp)); |
| 1465 __ push(a0); |
| 1466 VisitForStackValue(key); |
| 1467 __ li(a1, Operand(property->kind() == ObjectLiteral::Property::SETTER ? |
| 1468 Smi::FromInt(1) : |
| 1469 Smi::FromInt(0))); |
| 1470 __ push(a1); |
| 1471 VisitForStackValue(value); |
| 1472 __ CallRuntime(Runtime::kDefineAccessor, 4); |
| 1473 break; |
| 1474 } |
| 1475 } |
| 1476 |
| 1477 if (expr->has_function()) { |
| 1478 ASSERT(result_saved); |
| 1479 __ lw(a0, MemOperand(sp)); |
| 1480 __ push(a0); |
| 1481 __ CallRuntime(Runtime::kToFastProperties, 1); |
| 1482 } |
| 1483 |
| 1484 if (result_saved) { |
| 1485 context()->PlugTOS(); |
| 1486 } else { |
| 1487 context()->Plug(v0); |
| 1488 } |
348 } | 1489 } |
349 | 1490 |
350 | 1491 |
351 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { | 1492 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { |
352 UNIMPLEMENTED_MIPS(); | 1493 Comment cmnt(masm_, "[ ArrayLiteral"); |
| 1494 |
| 1495 ZoneList<Expression*>* subexprs = expr->values(); |
| 1496 int length = subexprs->length(); |
| 1497 __ mov(a0, result_register()); |
| 1498 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 1499 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset)); |
| 1500 __ li(a2, Operand(Smi::FromInt(expr->literal_index()))); |
| 1501 __ li(a1, Operand(expr->constant_elements())); |
| 1502 __ Push(a3, a2, a1); |
| 1503 if (expr->constant_elements()->map() == |
| 1504 isolate()->heap()->fixed_cow_array_map()) { |
| 1505 FastCloneShallowArrayStub stub( |
| 1506 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); |
| 1507 __ CallStub(&stub); |
| 1508 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), |
| 1509 1, a1, a2); |
| 1510 } else if (expr->depth() > 1) { |
| 1511 __ CallRuntime(Runtime::kCreateArrayLiteral, 3); |
| 1512 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
| 1513 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); |
| 1514 } else { |
| 1515 FastCloneShallowArrayStub stub( |
| 1516 FastCloneShallowArrayStub::CLONE_ELEMENTS, length); |
| 1517 __ CallStub(&stub); |
| 1518 } |
| 1519 |
| 1520 bool result_saved = false; // Is the result saved to the stack? |
| 1521 |
| 1522 // Emit code to evaluate all the non-constant subexpressions and to store |
| 1523 // them into the newly cloned array. |
| 1524 for (int i = 0; i < length; i++) { |
| 1525 Expression* subexpr = subexprs->at(i); |
| 1526 // If the subexpression is a literal or a simple materialized literal it |
| 1527 // is already set in the cloned array. |
| 1528 if (subexpr->AsLiteral() != NULL || |
| 1529 CompileTimeValue::IsCompileTimeValue(subexpr)) { |
| 1530 continue; |
| 1531 } |
| 1532 |
| 1533 if (!result_saved) { |
| 1534 __ push(v0); |
| 1535 result_saved = true; |
| 1536 } |
| 1537 VisitForAccumulatorValue(subexpr); |
| 1538 |
| 1539 // Store the subexpression value in the array's elements. |
| 1540 __ lw(a1, MemOperand(sp)); // Copy of array literal. |
| 1541 __ lw(a1, FieldMemOperand(a1, JSObject::kElementsOffset)); |
| 1542 int offset = FixedArray::kHeaderSize + (i * kPointerSize); |
| 1543 __ sw(result_register(), FieldMemOperand(a1, offset)); |
| 1544 |
| 1545 // Update the write barrier for the array store with v0 as the scratch |
| 1546 // register. |
| 1547 __ li(a2, Operand(offset)); |
| 1548 // TODO(PJ): double check this RecordWrite call. |
| 1549 __ RecordWrite(a1, a2, result_register()); |
| 1550 |
| 1551 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); |
| 1552 } |
| 1553 |
| 1554 if (result_saved) { |
| 1555 context()->PlugTOS(); |
| 1556 } else { |
| 1557 context()->Plug(v0); |
| 1558 } |
353 } | 1559 } |
354 | 1560 |
355 | 1561 |
356 void FullCodeGenerator::VisitAssignment(Assignment* expr) { | 1562 void FullCodeGenerator::VisitAssignment(Assignment* expr) { |
357 UNIMPLEMENTED_MIPS(); | 1563 Comment cmnt(masm_, "[ Assignment"); |
| 1564 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError' |
| 1565 // on the left-hand side. |
| 1566 if (!expr->target()->IsValidLeftHandSide()) { |
| 1567 VisitForEffect(expr->target()); |
| 1568 return; |
| 1569 } |
| 1570 |
| 1571 // Left-hand side can only be a property, a global or a (parameter or local) |
| 1572 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY. |
| 1573 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; |
| 1574 LhsKind assign_type = VARIABLE; |
| 1575 Property* property = expr->target()->AsProperty(); |
| 1576 if (property != NULL) { |
| 1577 assign_type = (property->key()->IsPropertyName()) |
| 1578 ? NAMED_PROPERTY |
| 1579 : KEYED_PROPERTY; |
| 1580 } |
| 1581 |
| 1582 // Evaluate LHS expression. |
| 1583 switch (assign_type) { |
| 1584 case VARIABLE: |
| 1585 // Nothing to do here. |
| 1586 break; |
| 1587 case NAMED_PROPERTY: |
| 1588 if (expr->is_compound()) { |
| 1589 // We need the receiver both on the stack and in the accumulator. |
| 1590 VisitForAccumulatorValue(property->obj()); |
| 1591 __ push(result_register()); |
| 1592 } else { |
| 1593 VisitForStackValue(property->obj()); |
| 1594 } |
| 1595 break; |
| 1596 case KEYED_PROPERTY: |
| 1597 // We need the key and receiver on both the stack and in v0 and a1. |
| 1598 if (expr->is_compound()) { |
| 1599 if (property->is_arguments_access()) { |
| 1600 VariableProxy* obj_proxy = property->obj()->AsVariableProxy(); |
| 1601 __ lw(v0, EmitSlotSearch(obj_proxy->var()->AsSlot(), v0)); |
| 1602 __ push(v0); |
| 1603 __ li(v0, Operand(property->key()->AsLiteral()->handle())); |
| 1604 } else { |
| 1605 VisitForStackValue(property->obj()); |
| 1606 VisitForAccumulatorValue(property->key()); |
| 1607 } |
| 1608 __ lw(a1, MemOperand(sp, 0)); |
| 1609 __ push(v0); |
| 1610 } else { |
| 1611 if (property->is_arguments_access()) { |
| 1612 VariableProxy* obj_proxy = property->obj()->AsVariableProxy(); |
| 1613 __ lw(a1, EmitSlotSearch(obj_proxy->var()->AsSlot(), v0)); |
| 1614 __ li(v0, Operand(property->key()->AsLiteral()->handle())); |
| 1615 __ Push(a1, v0); |
| 1616 } else { |
| 1617 VisitForStackValue(property->obj()); |
| 1618 VisitForStackValue(property->key()); |
| 1619 } |
| 1620 } |
| 1621 break; |
| 1622 } |
| 1623 |
| 1624 // For compound assignments we need another deoptimization point after the |
| 1625 // variable/property load. |
| 1626 if (expr->is_compound()) { |
| 1627 { AccumulatorValueContext context(this); |
| 1628 switch (assign_type) { |
| 1629 case VARIABLE: |
| 1630 EmitVariableLoad(expr->target()->AsVariableProxy()->var()); |
| 1631 PrepareForBailout(expr->target(), TOS_REG); |
| 1632 break; |
| 1633 case NAMED_PROPERTY: |
| 1634 EmitNamedPropertyLoad(property); |
| 1635 PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG); |
| 1636 break; |
| 1637 case KEYED_PROPERTY: |
| 1638 EmitKeyedPropertyLoad(property); |
| 1639 PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG); |
| 1640 break; |
| 1641 } |
| 1642 } |
| 1643 |
| 1644 Token::Value op = expr->binary_op(); |
| 1645 __ push(v0); // Left operand goes on the stack. |
| 1646 VisitForAccumulatorValue(expr->value()); |
| 1647 |
| 1648 OverwriteMode mode = expr->value()->ResultOverwriteAllowed() |
| 1649 ? OVERWRITE_RIGHT |
| 1650 : NO_OVERWRITE; |
| 1651 SetSourcePosition(expr->position() + 1); |
| 1652 AccumulatorValueContext context(this); |
| 1653 if (ShouldInlineSmiCase(op)) { |
| 1654 EmitInlineSmiBinaryOp(expr->binary_operation(), |
| 1655 op, |
| 1656 mode, |
| 1657 expr->target(), |
| 1658 expr->value()); |
| 1659 } else { |
| 1660 EmitBinaryOp(expr->binary_operation(), op, mode); |
| 1661 } |
| 1662 |
| 1663 // Deoptimization point in case the binary operation may have side effects. |
| 1664 PrepareForBailout(expr->binary_operation(), TOS_REG); |
| 1665 } else { |
| 1666 VisitForAccumulatorValue(expr->value()); |
| 1667 } |
| 1668 |
| 1669 // Record source position before possible IC call. |
| 1670 SetSourcePosition(expr->position()); |
| 1671 |
| 1672 // Store the value. |
| 1673 switch (assign_type) { |
| 1674 case VARIABLE: |
| 1675 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), |
| 1676 expr->op()); |
| 1677 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 1678 context()->Plug(v0); |
| 1679 break; |
| 1680 case NAMED_PROPERTY: |
| 1681 EmitNamedPropertyAssignment(expr); |
| 1682 break; |
| 1683 case KEYED_PROPERTY: |
| 1684 EmitKeyedPropertyAssignment(expr); |
| 1685 break; |
| 1686 } |
358 } | 1687 } |
359 | 1688 |
360 | 1689 |
361 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { | 1690 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { |
362 UNIMPLEMENTED_MIPS(); | 1691 SetSourcePosition(prop->position()); |
| 1692 Literal* key = prop->key()->AsLiteral(); |
| 1693 __ mov(a0, result_register()); |
| 1694 __ li(a2, Operand(key->handle())); |
| 1695 // Call load IC. It has arguments receiver and property name a0 and a2. |
| 1696 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
| 1697 if (prop->is_synthetic()) { |
| 1698 EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber); |
| 1699 } else { |
| 1700 EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, prop->id()); |
| 1701 } |
363 } | 1702 } |
364 | 1703 |
365 | 1704 |
366 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { | 1705 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { |
367 UNIMPLEMENTED_MIPS(); | 1706 SetSourcePosition(prop->position()); |
| 1707 __ mov(a0, result_register()); |
| 1708 // Call keyed load IC. It has arguments key and receiver in a0 and a1. |
| 1709 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); |
| 1710 if (prop->is_synthetic()) { |
| 1711 EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber); |
| 1712 } else { |
| 1713 EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, prop->id()); |
| 1714 } |
368 } | 1715 } |
369 | 1716 |
370 | 1717 |
371 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, | 1718 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, |
372 Token::Value op, | 1719 Token::Value op, |
373 OverwriteMode mode, | 1720 OverwriteMode mode, |
374 Expression* left, | 1721 Expression* left_expr, |
375 Expression* right) { | 1722 Expression* right_expr) { |
376 UNIMPLEMENTED_MIPS(); | 1723 Label done, smi_case, stub_call; |
| 1724 |
| 1725 Register scratch1 = a2; |
| 1726 Register scratch2 = a3; |
| 1727 |
| 1728 // Get the arguments. |
| 1729 Register left = a1; |
| 1730 Register right = a0; |
| 1731 __ pop(left); |
| 1732 __ mov(a0, result_register()); |
| 1733 |
| 1734 // Perform combined smi check on both operands. |
| 1735 __ Or(scratch1, left, Operand(right)); |
| 1736 STATIC_ASSERT(kSmiTag == 0); |
| 1737 JumpPatchSite patch_site(masm_); |
| 1738 patch_site.EmitJumpIfSmi(scratch1, &smi_case); |
| 1739 |
| 1740 __ bind(&stub_call); |
| 1741 TypeRecordingBinaryOpStub stub(op, mode); |
| 1742 EmitCallIC(stub.GetCode(), &patch_site, expr->id()); |
| 1743 __ jmp(&done); |
| 1744 |
| 1745 __ bind(&smi_case); |
| 1746 // Smi case. This code works the same way as the smi-smi case in the type |
| 1747 // recording binary operation stub, see |
| 1748 // TypeRecordingBinaryOpStub::GenerateSmiSmiOperation for comments. |
| 1749 switch (op) { |
| 1750 case Token::SAR: |
| 1751 __ Branch(&stub_call); |
| 1752 __ GetLeastBitsFromSmi(scratch1, right, 5); |
| 1753 __ srav(right, left, scratch1); |
| 1754 __ And(v0, right, Operand(~kSmiTagMask)); |
| 1755 break; |
| 1756 case Token::SHL: { |
| 1757 __ Branch(&stub_call); |
| 1758 __ SmiUntag(scratch1, left); |
| 1759 __ GetLeastBitsFromSmi(scratch2, right, 5); |
| 1760 __ sllv(scratch1, scratch1, scratch2); |
| 1761 __ Addu(scratch2, scratch1, Operand(0x40000000)); |
| 1762 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg)); |
| 1763 __ SmiTag(v0, scratch1); |
| 1764 break; |
| 1765 } |
| 1766 case Token::SHR: { |
| 1767 __ Branch(&stub_call); |
| 1768 __ SmiUntag(scratch1, left); |
| 1769 __ GetLeastBitsFromSmi(scratch2, right, 5); |
| 1770 __ srlv(scratch1, scratch1, scratch2); |
| 1771 __ And(scratch2, scratch1, 0xc0000000); |
| 1772 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg)); |
| 1773 __ SmiTag(v0, scratch1); |
| 1774 break; |
| 1775 } |
| 1776 case Token::ADD: |
| 1777 __ AdduAndCheckForOverflow(v0, left, right, scratch1); |
| 1778 __ BranchOnOverflow(&stub_call, scratch1); |
| 1779 break; |
| 1780 case Token::SUB: |
| 1781 __ SubuAndCheckForOverflow(v0, left, right, scratch1); |
| 1782 __ BranchOnOverflow(&stub_call, scratch1); |
| 1783 break; |
| 1784 case Token::MUL: { |
| 1785 __ SmiUntag(scratch1, right); |
| 1786 __ Mult(left, scratch1); |
| 1787 __ mflo(scratch1); |
| 1788 __ mfhi(scratch2); |
| 1789 __ sra(scratch1, scratch1, 31); |
| 1790 __ Branch(&stub_call, ne, scratch1, Operand(scratch2)); |
| 1791 __ mflo(v0); |
| 1792 __ Branch(&done, ne, v0, Operand(zero_reg)); |
| 1793 __ Addu(scratch2, right, left); |
| 1794 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg)); |
| 1795 ASSERT(Smi::FromInt(0) == 0); |
| 1796 __ mov(v0, zero_reg); |
| 1797 break; |
| 1798 } |
| 1799 case Token::BIT_OR: |
| 1800 __ Or(v0, left, Operand(right)); |
| 1801 break; |
| 1802 case Token::BIT_AND: |
| 1803 __ And(v0, left, Operand(right)); |
| 1804 break; |
| 1805 case Token::BIT_XOR: |
| 1806 __ Xor(v0, left, Operand(right)); |
| 1807 break; |
| 1808 default: |
| 1809 UNREACHABLE(); |
| 1810 } |
| 1811 |
| 1812 __ bind(&done); |
| 1813 context()->Plug(v0); |
377 } | 1814 } |
378 | 1815 |
379 | 1816 |
380 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, | 1817 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, |
381 Token::Value op, | 1818 Token::Value op, |
382 OverwriteMode mode) { | 1819 OverwriteMode mode) { |
383 UNIMPLEMENTED_MIPS(); | 1820 __ mov(a0, result_register()); |
| 1821 __ pop(a1); |
| 1822 TypeRecordingBinaryOpStub stub(op, mode); |
| 1823 EmitCallIC(stub.GetCode(), NULL, expr->id()); |
| 1824 context()->Plug(v0); |
384 } | 1825 } |
385 | 1826 |
386 | 1827 |
387 void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) { | 1828 void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) { |
388 UNIMPLEMENTED_MIPS(); | 1829 // Invalid left-hand sides are rewritten to have a 'throw |
| 1830 // ReferenceError' on the left-hand side. |
| 1831 if (!expr->IsValidLeftHandSide()) { |
| 1832 VisitForEffect(expr); |
| 1833 return; |
| 1834 } |
| 1835 |
| 1836 // Left-hand side can only be a property, a global or a (parameter or local) |
| 1837 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY. |
| 1838 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; |
| 1839 LhsKind assign_type = VARIABLE; |
| 1840 Property* prop = expr->AsProperty(); |
| 1841 if (prop != NULL) { |
| 1842 assign_type = (prop->key()->IsPropertyName()) |
| 1843 ? NAMED_PROPERTY |
| 1844 : KEYED_PROPERTY; |
| 1845 } |
| 1846 |
| 1847 switch (assign_type) { |
| 1848 case VARIABLE: { |
| 1849 Variable* var = expr->AsVariableProxy()->var(); |
| 1850 EffectContext context(this); |
| 1851 EmitVariableAssignment(var, Token::ASSIGN); |
| 1852 break; |
| 1853 } |
| 1854 case NAMED_PROPERTY: { |
| 1855 __ push(result_register()); // Preserve value. |
| 1856 VisitForAccumulatorValue(prop->obj()); |
| 1857 __ mov(a1, result_register()); |
| 1858 __ pop(a0); // Restore value. |
| 1859 __ li(a2, Operand(prop->key()->AsLiteral()->handle())); |
| 1860 Handle<Code> ic = is_strict_mode() |
| 1861 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
| 1862 : isolate()->builtins()->StoreIC_Initialize(); |
| 1863 EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber); |
| 1864 break; |
| 1865 } |
| 1866 case KEYED_PROPERTY: { |
| 1867 __ push(result_register()); // Preserve value. |
| 1868 if (prop->is_synthetic()) { |
| 1869 ASSERT(prop->obj()->AsVariableProxy() != NULL); |
| 1870 ASSERT(prop->key()->AsLiteral() != NULL); |
| 1871 { AccumulatorValueContext for_object(this); |
| 1872 EmitVariableLoad(prop->obj()->AsVariableProxy()->var()); |
| 1873 } |
| 1874 __ mov(a2, result_register()); |
| 1875 __ li(a1, Operand(prop->key()->AsLiteral()->handle())); |
| 1876 } else { |
| 1877 VisitForStackValue(prop->obj()); |
| 1878 VisitForAccumulatorValue(prop->key()); |
| 1879 __ mov(a1, result_register()); |
| 1880 __ pop(a2); |
| 1881 } |
| 1882 __ pop(a0); // Restore value. |
| 1883 Handle<Code> ic = is_strict_mode() |
| 1884 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() |
| 1885 : isolate()->builtins()->KeyedStoreIC_Initialize(); |
| 1886 EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber); |
| 1887 break; |
| 1888 } |
| 1889 } |
| 1890 PrepareForBailoutForId(bailout_ast_id, TOS_REG); |
| 1891 context()->Plug(v0); |
389 } | 1892 } |
390 | 1893 |
391 | 1894 |
392 void FullCodeGenerator::EmitVariableAssignment(Variable* var, | 1895 void FullCodeGenerator::EmitVariableAssignment(Variable* var, |
393 Token::Value op) { | 1896 Token::Value op) { |
394 UNIMPLEMENTED_MIPS(); | 1897 // Left-hand sides that rewrite to explicit property accesses do not reach |
| 1898 // here. |
| 1899 ASSERT(var != NULL); |
| 1900 ASSERT(var->is_global() || var->AsSlot() != NULL); |
| 1901 |
| 1902 if (var->is_global()) { |
| 1903 ASSERT(!var->is_this()); |
| 1904 // Assignment to a global variable. Use inline caching for the |
| 1905 // assignment. Right-hand-side value is passed in a0, variable name in |
| 1906 // a2, and the global object in a1. |
| 1907 __ mov(a0, result_register()); |
| 1908 __ li(a2, Operand(var->name())); |
| 1909 __ lw(a1, GlobalObjectOperand()); |
| 1910 Handle<Code> ic = is_strict_mode() |
| 1911 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
| 1912 : isolate()->builtins()->StoreIC_Initialize(); |
| 1913 EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber); |
| 1914 |
| 1915 } else if (op == Token::INIT_CONST) { |
| 1916 // Like var declarations, const declarations are hoisted to function |
| 1917 // scope. However, unlike var initializers, const initializers are able |
| 1918 // to drill a hole to that function context, even from inside a 'with' |
| 1919 // context. We thus bypass the normal static scope lookup. |
| 1920 Slot* slot = var->AsSlot(); |
| 1921 Label skip; |
| 1922 switch (slot->type()) { |
| 1923 case Slot::PARAMETER: |
| 1924 // No const parameters. |
| 1925 UNREACHABLE(); |
| 1926 break; |
| 1927 case Slot::LOCAL: |
| 1928 // Detect const reinitialization by checking for the hole value. |
| 1929 __ lw(a1, MemOperand(fp, SlotOffset(slot))); |
| 1930 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); |
| 1931 __ Branch(&skip, ne, a1, Operand(t0)); |
| 1932 __ sw(result_register(), MemOperand(fp, SlotOffset(slot))); |
| 1933 break; |
| 1934 case Slot::CONTEXT: { |
| 1935 __ lw(a1, ContextOperand(cp, Context::FCONTEXT_INDEX)); |
| 1936 __ lw(a2, ContextOperand(a1, slot->index())); |
| 1937 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); |
| 1938 __ Branch(&skip, ne, a2, Operand(t0)); |
| 1939 __ sw(result_register(), ContextOperand(a1, slot->index())); |
| 1940 int offset = Context::SlotOffset(slot->index()); |
| 1941 __ mov(a3, result_register()); // Preserve the stored value in v0. |
| 1942 __ RecordWrite(a1, Operand(offset), a3, a2); |
| 1943 break; |
| 1944 } |
| 1945 case Slot::LOOKUP: |
| 1946 __ push(result_register()); |
| 1947 __ li(a0, Operand(slot->var()->name())); |
| 1948 __ Push(cp, a0); // Context and name. |
| 1949 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); |
| 1950 break; |
| 1951 } |
| 1952 __ bind(&skip); |
| 1953 |
| 1954 } else if (var->mode() != Variable::CONST) { |
| 1955 // Perform the assignment for non-const variables. Const assignments |
| 1956 // are simply skipped. |
| 1957 Slot* slot = var->AsSlot(); |
| 1958 switch (slot->type()) { |
| 1959 case Slot::PARAMETER: |
| 1960 case Slot::LOCAL: |
| 1961 // Perform the assignment. |
| 1962 __ sw(result_register(), MemOperand(fp, SlotOffset(slot))); |
| 1963 break; |
| 1964 |
| 1965 case Slot::CONTEXT: { |
| 1966 MemOperand target = EmitSlotSearch(slot, a1); |
| 1967 // Perform the assignment and issue the write barrier. |
| 1968 __ sw(result_register(), target); |
| 1969 // RecordWrite may destroy all its register arguments. |
| 1970 __ mov(a3, result_register()); |
| 1971 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; |
| 1972 __ RecordWrite(a1, Operand(offset), a2, a3); |
| 1973 break; |
| 1974 } |
| 1975 |
| 1976 case Slot::LOOKUP: |
| 1977 // Call the runtime for the assignment. |
| 1978 __ push(v0); // Value. |
| 1979 __ li(a1, Operand(slot->var()->name())); |
| 1980 __ li(a0, Operand(Smi::FromInt(strict_mode_flag()))); |
| 1981 __ Push(cp, a1, a0); // Context, name, strict mode. |
| 1982 __ CallRuntime(Runtime::kStoreContextSlot, 4); |
| 1983 break; |
| 1984 } |
| 1985 } |
395 } | 1986 } |
396 | 1987 |
397 | 1988 |
398 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { | 1989 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { |
399 UNIMPLEMENTED_MIPS(); | 1990 // Assignment to a property, using a named store IC. |
| 1991 Property* prop = expr->target()->AsProperty(); |
| 1992 ASSERT(prop != NULL); |
| 1993 ASSERT(prop->key()->AsLiteral() != NULL); |
| 1994 |
| 1995 // If the assignment starts a block of assignments to the same object, |
| 1996 // change to slow case to avoid the quadratic behavior of repeatedly |
| 1997 // adding fast properties. |
| 1998 if (expr->starts_initialization_block()) { |
| 1999 __ push(result_register()); |
| 2000 __ lw(t0, MemOperand(sp, kPointerSize)); // Receiver is now under value. |
| 2001 __ push(t0); |
| 2002 __ CallRuntime(Runtime::kToSlowProperties, 1); |
| 2003 __ pop(result_register()); |
| 2004 } |
| 2005 |
| 2006 // Record source code position before IC call. |
| 2007 SetSourcePosition(expr->position()); |
| 2008 __ mov(a0, result_register()); // Load the value. |
| 2009 __ li(a2, Operand(prop->key()->AsLiteral()->handle())); |
| 2010 // Load receiver to a1. Leave a copy in the stack if needed for turning the |
| 2011 // receiver into fast case. |
| 2012 if (expr->ends_initialization_block()) { |
| 2013 __ lw(a1, MemOperand(sp)); |
| 2014 } else { |
| 2015 __ pop(a1); |
| 2016 } |
| 2017 |
| 2018 Handle<Code> ic = is_strict_mode() |
| 2019 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
| 2020 : isolate()->builtins()->StoreIC_Initialize(); |
| 2021 EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id()); |
| 2022 |
| 2023 // If the assignment ends an initialization block, revert to fast case. |
| 2024 if (expr->ends_initialization_block()) { |
| 2025 __ push(v0); // Result of assignment, saved even if not needed. |
| 2026 // Receiver is under the result value. |
| 2027 __ lw(t0, MemOperand(sp, kPointerSize)); |
| 2028 __ push(t0); |
| 2029 __ CallRuntime(Runtime::kToFastProperties, 1); |
| 2030 __ pop(v0); |
| 2031 __ Drop(1); |
| 2032 } |
| 2033 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 2034 context()->Plug(v0); |
400 } | 2035 } |
401 | 2036 |
402 | 2037 |
403 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { | 2038 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { |
404 UNIMPLEMENTED_MIPS(); | 2039 // Assignment to a property, using a keyed store IC. |
| 2040 |
| 2041 // If the assignment starts a block of assignments to the same object, |
| 2042 // change to slow case to avoid the quadratic behavior of repeatedly |
| 2043 // adding fast properties. |
| 2044 if (expr->starts_initialization_block()) { |
| 2045 __ push(result_register()); |
| 2046 // Receiver is now under the key and value. |
| 2047 __ lw(t0, MemOperand(sp, 2 * kPointerSize)); |
| 2048 __ push(t0); |
| 2049 __ CallRuntime(Runtime::kToSlowProperties, 1); |
| 2050 __ pop(result_register()); |
| 2051 } |
| 2052 |
| 2053 // Record source code position before IC call. |
| 2054 SetSourcePosition(expr->position()); |
| 2055 // Call keyed store IC. |
| 2056 // The arguments are: |
| 2057 // - a0 is the value, |
| 2058 // - a1 is the key, |
| 2059 // - a2 is the receiver. |
| 2060 __ mov(a0, result_register()); |
| 2061 __ pop(a1); // Key. |
| 2062 // Load receiver to a2. Leave a copy in the stack if needed for turning the |
| 2063 // receiver into fast case. |
| 2064 if (expr->ends_initialization_block()) { |
| 2065 __ lw(a2, MemOperand(sp)); |
| 2066 } else { |
| 2067 __ pop(a2); |
| 2068 } |
| 2069 |
| 2070 Handle<Code> ic = is_strict_mode() |
| 2071 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() |
| 2072 : isolate()->builtins()->KeyedStoreIC_Initialize(); |
| 2073 EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id()); |
| 2074 |
| 2075 // If the assignment ends an initialization block, revert to fast case. |
| 2076 if (expr->ends_initialization_block()) { |
| 2077 __ push(v0); // Result of assignment, saved even if not needed. |
| 2078 // Receiver is under the result value. |
| 2079 __ lw(t0, MemOperand(sp, kPointerSize)); |
| 2080 __ push(t0); |
| 2081 __ CallRuntime(Runtime::kToFastProperties, 1); |
| 2082 __ pop(v0); |
| 2083 __ Drop(1); |
| 2084 } |
| 2085 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 2086 context()->Plug(v0); |
405 } | 2087 } |
406 | 2088 |
407 | 2089 |
408 void FullCodeGenerator::VisitProperty(Property* expr) { | 2090 void FullCodeGenerator::VisitProperty(Property* expr) { |
409 UNIMPLEMENTED_MIPS(); | 2091 Comment cmnt(masm_, "[ Property"); |
| 2092 Expression* key = expr->key(); |
| 2093 |
| 2094 if (key->IsPropertyName()) { |
| 2095 VisitForAccumulatorValue(expr->obj()); |
| 2096 EmitNamedPropertyLoad(expr); |
| 2097 context()->Plug(v0); |
| 2098 } else { |
| 2099 VisitForStackValue(expr->obj()); |
| 2100 VisitForAccumulatorValue(expr->key()); |
| 2101 __ pop(a1); |
| 2102 EmitKeyedPropertyLoad(expr); |
| 2103 context()->Plug(v0); |
| 2104 } |
410 } | 2105 } |
411 | 2106 |
412 | 2107 |
413 void FullCodeGenerator::EmitCallWithIC(Call* expr, | 2108 void FullCodeGenerator::EmitCallWithIC(Call* expr, |
414 Handle<Object> name, | 2109 Handle<Object> name, |
415 RelocInfo::Mode mode) { | 2110 RelocInfo::Mode mode) { |
416 UNIMPLEMENTED_MIPS(); | 2111 // Code common for calls using the IC. |
| 2112 ZoneList<Expression*>* args = expr->arguments(); |
| 2113 int arg_count = args->length(); |
| 2114 { PreservePositionScope scope(masm()->positions_recorder()); |
| 2115 for (int i = 0; i < arg_count; i++) { |
| 2116 VisitForStackValue(args->at(i)); |
| 2117 } |
| 2118 __ li(a2, Operand(name)); |
| 2119 } |
| 2120 // Record source position for debugger. |
| 2121 SetSourcePosition(expr->position()); |
| 2122 // Call the IC initialization code. |
| 2123 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; |
| 2124 Handle<Code> ic = |
| 2125 isolate()->stub_cache()->ComputeCallInitialize(arg_count, in_loop); |
| 2126 unsigned ast_id = |
| 2127 (mode == RelocInfo::CODE_TARGET_WITH_ID) ? expr->id() : kNoASTId; |
| 2128 EmitCallIC(ic, mode, ast_id); |
| 2129 RecordJSReturnSite(expr); |
| 2130 // Restore context register. |
| 2131 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2132 context()->Plug(v0); |
417 } | 2133 } |
418 | 2134 |
419 | 2135 |
420 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, | 2136 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, |
421 Expression* key, | 2137 Expression* key, |
422 RelocInfo::Mode mode) { | 2138 RelocInfo::Mode mode) { |
423 UNIMPLEMENTED_MIPS(); | 2139 // Load the key. |
| 2140 VisitForAccumulatorValue(key); |
| 2141 |
| 2142 // Swap the name of the function and the receiver on the stack to follow |
| 2143 // the calling convention for call ICs. |
| 2144 __ pop(a1); |
| 2145 __ push(v0); |
| 2146 __ push(a1); |
| 2147 |
| 2148 // Code common for calls using the IC. |
| 2149 ZoneList<Expression*>* args = expr->arguments(); |
| 2150 int arg_count = args->length(); |
| 2151 { PreservePositionScope scope(masm()->positions_recorder()); |
| 2152 for (int i = 0; i < arg_count; i++) { |
| 2153 VisitForStackValue(args->at(i)); |
| 2154 } |
| 2155 } |
| 2156 // Record source position for debugger. |
| 2157 SetSourcePosition(expr->position()); |
| 2158 // Call the IC initialization code. |
| 2159 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; |
| 2160 Handle<Code> ic = |
| 2161 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop); |
| 2162 __ lw(a2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key. |
| 2163 EmitCallIC(ic, mode, expr->id()); |
| 2164 RecordJSReturnSite(expr); |
| 2165 // Restore context register. |
| 2166 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2167 context()->DropAndPlug(1, v0); // Drop the key still on the stack. |
424 } | 2168 } |
425 | 2169 |
426 | 2170 |
427 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) { | 2171 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) { |
428 UNIMPLEMENTED_MIPS(); | 2172 // Code common for calls using the call stub. |
| 2173 ZoneList<Expression*>* args = expr->arguments(); |
| 2174 int arg_count = args->length(); |
| 2175 { PreservePositionScope scope(masm()->positions_recorder()); |
| 2176 for (int i = 0; i < arg_count; i++) { |
| 2177 VisitForStackValue(args->at(i)); |
| 2178 } |
| 2179 } |
| 2180 // Record source position for debugger. |
| 2181 SetSourcePosition(expr->position()); |
| 2182 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; |
| 2183 CallFunctionStub stub(arg_count, in_loop, flags); |
| 2184 __ CallStub(&stub); |
| 2185 RecordJSReturnSite(expr); |
| 2186 // Restore context register. |
| 2187 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2188 context()->DropAndPlug(1, v0); |
| 2189 } |
| 2190 |
| 2191 |
| 2192 void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag, |
| 2193 int arg_count) { |
| 2194 // Push copy of the first argument or undefined if it doesn't exist. |
| 2195 if (arg_count > 0) { |
| 2196 __ lw(a1, MemOperand(sp, arg_count * kPointerSize)); |
| 2197 } else { |
| 2198 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); |
| 2199 } |
| 2200 __ push(a1); |
| 2201 |
| 2202 // Push the receiver of the enclosing function and do runtime call. |
| 2203 __ lw(a1, MemOperand(fp, (2 + scope()->num_parameters()) * kPointerSize)); |
| 2204 __ push(a1); |
| 2205 // Push the strict mode flag. |
| 2206 __ li(a1, Operand(Smi::FromInt(strict_mode_flag()))); |
| 2207 __ push(a1); |
| 2208 |
| 2209 __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP |
| 2210 ? Runtime::kResolvePossiblyDirectEvalNoLookup |
| 2211 : Runtime::kResolvePossiblyDirectEval, 4); |
429 } | 2212 } |
430 | 2213 |
431 | 2214 |
432 void FullCodeGenerator::VisitCall(Call* expr) { | 2215 void FullCodeGenerator::VisitCall(Call* expr) { |
433 UNIMPLEMENTED_MIPS(); | 2216 #ifdef DEBUG |
| 2217 // We want to verify that RecordJSReturnSite gets called on all paths |
| 2218 // through this function. Avoid early returns. |
| 2219 expr->return_is_recorded_ = false; |
| 2220 #endif |
| 2221 |
| 2222 Comment cmnt(masm_, "[ Call"); |
| 2223 Expression* fun = expr->expression(); |
| 2224 Variable* var = fun->AsVariableProxy()->AsVariable(); |
| 2225 |
| 2226 if (var != NULL && var->is_possibly_eval()) { |
| 2227 // In a call to eval, we first call %ResolvePossiblyDirectEval to |
| 2228 // resolve the function we need to call and the receiver of the |
| 2229 // call. Then we call the resolved function using the given |
| 2230 // arguments. |
| 2231 ZoneList<Expression*>* args = expr->arguments(); |
| 2232 int arg_count = args->length(); |
| 2233 |
| 2234 { PreservePositionScope pos_scope(masm()->positions_recorder()); |
| 2235 VisitForStackValue(fun); |
| 2236 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); |
| 2237 __ push(a2); // Reserved receiver slot. |
| 2238 |
| 2239 // Push the arguments. |
| 2240 for (int i = 0; i < arg_count; i++) { |
| 2241 VisitForStackValue(args->at(i)); |
| 2242 } |
| 2243 // If we know that eval can only be shadowed by eval-introduced |
| 2244 // variables we attempt to load the global eval function directly |
| 2245 // in generated code. If we succeed, there is no need to perform a |
| 2246 // context lookup in the runtime system. |
| 2247 Label done; |
| 2248 if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) { |
| 2249 Label slow; |
| 2250 EmitLoadGlobalSlotCheckExtensions(var->AsSlot(), |
| 2251 NOT_INSIDE_TYPEOF, |
| 2252 &slow); |
| 2253 // Push the function and resolve eval. |
| 2254 __ push(v0); |
| 2255 EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count); |
| 2256 __ jmp(&done); |
| 2257 __ bind(&slow); |
| 2258 } |
| 2259 |
| 2260 // Push copy of the function (found below the arguments) and |
| 2261 // resolve eval. |
| 2262 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); |
| 2263 __ push(a1); |
| 2264 EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count); |
| 2265 if (done.is_linked()) { |
| 2266 __ bind(&done); |
| 2267 } |
| 2268 |
| 2269 // The runtime call returns a pair of values in v0 (function) and |
| 2270 // v1 (receiver). Touch up the stack with the right values. |
| 2271 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize)); |
| 2272 __ sw(v1, MemOperand(sp, arg_count * kPointerSize)); |
| 2273 } |
| 2274 // Record source position for debugger. |
| 2275 SetSourcePosition(expr->position()); |
| 2276 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; |
| 2277 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE); |
| 2278 __ CallStub(&stub); |
| 2279 RecordJSReturnSite(expr); |
| 2280 // Restore context register. |
| 2281 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2282 context()->DropAndPlug(1, v0); |
| 2283 } else if (var != NULL && !var->is_this() && var->is_global()) { |
| 2284 // Push global object as receiver for the call IC. |
| 2285 __ lw(a0, GlobalObjectOperand()); |
| 2286 __ push(a0); |
| 2287 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT); |
| 2288 } else if (var != NULL && var->AsSlot() != NULL && |
| 2289 var->AsSlot()->type() == Slot::LOOKUP) { |
| 2290 // Call to a lookup slot (dynamically introduced variable). |
| 2291 Label slow, done; |
| 2292 |
| 2293 { PreservePositionScope scope(masm()->positions_recorder()); |
| 2294 // Generate code for loading from variables potentially shadowed |
| 2295 // by eval-introduced variables. |
| 2296 EmitDynamicLoadFromSlotFastCase(var->AsSlot(), |
| 2297 NOT_INSIDE_TYPEOF, |
| 2298 &slow, |
| 2299 &done); |
| 2300 } |
| 2301 |
| 2302 __ bind(&slow); |
| 2303 // Call the runtime to find the function to call (returned in v0) |
| 2304 // and the object holding it (returned in v1). |
| 2305 __ push(context_register()); |
| 2306 __ li(a2, Operand(var->name())); |
| 2307 __ push(a2); |
| 2308 __ CallRuntime(Runtime::kLoadContextSlot, 2); |
| 2309 __ Push(v0, v1); // Function, receiver. |
| 2310 |
| 2311 // If fast case code has been generated, emit code to push the |
| 2312 // function and receiver and have the slow path jump around this |
| 2313 // code. |
| 2314 if (done.is_linked()) { |
| 2315 Label call; |
| 2316 __ Branch(&call); |
| 2317 __ bind(&done); |
| 2318 // Push function. |
| 2319 __ push(v0); |
| 2320 // Push global receiver. |
| 2321 __ lw(a1, GlobalObjectOperand()); |
| 2322 __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset)); |
| 2323 __ push(a1); |
| 2324 __ bind(&call); |
| 2325 } |
| 2326 |
| 2327 // The receiver is either the global receiver or a JSObject found by |
| 2328 // LoadContextSlot. |
| 2329 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); |
| 2330 } else if (fun->AsProperty() != NULL) { |
| 2331 // Call to an object property. |
| 2332 Property* prop = fun->AsProperty(); |
| 2333 Literal* key = prop->key()->AsLiteral(); |
| 2334 if (key != NULL && key->handle()->IsSymbol()) { |
| 2335 // Call to a named property, use call IC. |
| 2336 { PreservePositionScope scope(masm()->positions_recorder()); |
| 2337 VisitForStackValue(prop->obj()); |
| 2338 } |
| 2339 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET_WITH_ID); |
| 2340 } else { |
| 2341 // Call to a keyed property. |
| 2342 // For a synthetic property use keyed load IC followed by function call, |
| 2343 // for a regular property use keyed EmitCallIC. |
| 2344 if (prop->is_synthetic()) { |
| 2345 // Do not visit the object and key subexpressions (they are shared |
| 2346 // by all occurrences of the same rewritten parameter). |
| 2347 ASSERT(prop->obj()->AsVariableProxy() != NULL); |
| 2348 ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL); |
| 2349 Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot(); |
| 2350 MemOperand operand = EmitSlotSearch(slot, a1); |
| 2351 __ lw(a1, operand); |
| 2352 |
| 2353 ASSERT(prop->key()->AsLiteral() != NULL); |
| 2354 ASSERT(prop->key()->AsLiteral()->handle()->IsSmi()); |
| 2355 __ li(a0, Operand(prop->key()->AsLiteral()->handle())); |
| 2356 |
| 2357 // Record source code position for IC call. |
| 2358 SetSourcePosition(prop->position()); |
| 2359 |
| 2360 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); |
| 2361 EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber); |
| 2362 __ lw(a1, GlobalObjectOperand()); |
| 2363 __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset)); |
| 2364 __ Push(v0, a1); // Function, receiver. |
| 2365 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); |
| 2366 } else { |
| 2367 { PreservePositionScope scope(masm()->positions_recorder()); |
| 2368 VisitForStackValue(prop->obj()); |
| 2369 } |
| 2370 EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET_WITH_ID); |
| 2371 } |
| 2372 } |
| 2373 } else { |
| 2374 { PreservePositionScope scope(masm()->positions_recorder()); |
| 2375 VisitForStackValue(fun); |
| 2376 } |
| 2377 // Load global receiver object. |
| 2378 __ lw(a1, GlobalObjectOperand()); |
| 2379 __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset)); |
| 2380 __ push(a1); |
| 2381 // Emit function call. |
| 2382 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); |
| 2383 } |
| 2384 |
| 2385 #ifdef DEBUG |
| 2386 // RecordJSReturnSite should have been called. |
| 2387 ASSERT(expr->return_is_recorded_); |
| 2388 #endif |
434 } | 2389 } |
435 | 2390 |
436 | 2391 |
437 void FullCodeGenerator::VisitCallNew(CallNew* expr) { | 2392 void FullCodeGenerator::VisitCallNew(CallNew* expr) { |
438 UNIMPLEMENTED_MIPS(); | 2393 Comment cmnt(masm_, "[ CallNew"); |
| 2394 // According to ECMA-262, section 11.2.2, page 44, the function |
| 2395 // expression in new calls must be evaluated before the |
| 2396 // arguments. |
| 2397 |
| 2398 // Push constructor on the stack. If it's not a function it's used as |
| 2399 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is |
| 2400 // ignored. |
| 2401 VisitForStackValue(expr->expression()); |
| 2402 |
| 2403 // Push the arguments ("left-to-right") on the stack. |
| 2404 ZoneList<Expression*>* args = expr->arguments(); |
| 2405 int arg_count = args->length(); |
| 2406 for (int i = 0; i < arg_count; i++) { |
| 2407 VisitForStackValue(args->at(i)); |
| 2408 } |
| 2409 |
| 2410 // Call the construct call builtin that handles allocation and |
| 2411 // constructor invocation. |
| 2412 SetSourcePosition(expr->position()); |
| 2413 |
| 2414 // Load function and argument count into a1 and a0. |
| 2415 __ li(a0, Operand(arg_count)); |
| 2416 __ lw(a1, MemOperand(sp, arg_count * kPointerSize)); |
| 2417 |
| 2418 Handle<Code> construct_builtin = |
| 2419 isolate()->builtins()->JSConstructCall(); |
| 2420 __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL); |
| 2421 context()->Plug(v0); |
439 } | 2422 } |
440 | 2423 |
441 | 2424 |
442 void FullCodeGenerator::EmitIsSmi(ZoneList<Expression*>* args) { | 2425 void FullCodeGenerator::EmitIsSmi(ZoneList<Expression*>* args) { |
443 UNIMPLEMENTED_MIPS(); | 2426 ASSERT(args->length() == 1); |
| 2427 |
| 2428 VisitForAccumulatorValue(args->at(0)); |
| 2429 |
| 2430 Label materialize_true, materialize_false; |
| 2431 Label* if_true = NULL; |
| 2432 Label* if_false = NULL; |
| 2433 Label* fall_through = NULL; |
| 2434 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2435 &if_true, &if_false, &fall_through); |
| 2436 |
| 2437 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2438 __ And(t0, v0, Operand(kSmiTagMask)); |
| 2439 Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through); |
| 2440 |
| 2441 context()->Plug(if_true, if_false); |
444 } | 2442 } |
445 | 2443 |
446 | 2444 |
447 void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) { | 2445 void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) { |
448 UNIMPLEMENTED_MIPS(); | 2446 ASSERT(args->length() == 1); |
| 2447 |
| 2448 VisitForAccumulatorValue(args->at(0)); |
| 2449 |
| 2450 Label materialize_true, materialize_false; |
| 2451 Label* if_true = NULL; |
| 2452 Label* if_false = NULL; |
| 2453 Label* fall_through = NULL; |
| 2454 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2455 &if_true, &if_false, &fall_through); |
| 2456 |
| 2457 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2458 __ And(at, v0, Operand(kSmiTagMask | 0x80000000)); |
| 2459 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through); |
| 2460 |
| 2461 context()->Plug(if_true, if_false); |
449 } | 2462 } |
450 | 2463 |
451 | 2464 |
452 void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) { | 2465 void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) { |
453 UNIMPLEMENTED_MIPS(); | 2466 ASSERT(args->length() == 1); |
| 2467 |
| 2468 VisitForAccumulatorValue(args->at(0)); |
| 2469 |
| 2470 Label materialize_true, materialize_false; |
| 2471 Label* if_true = NULL; |
| 2472 Label* if_false = NULL; |
| 2473 Label* fall_through = NULL; |
| 2474 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2475 &if_true, &if_false, &fall_through); |
| 2476 |
| 2477 __ JumpIfSmi(v0, if_false); |
| 2478 __ LoadRoot(at, Heap::kNullValueRootIndex); |
| 2479 __ Branch(if_true, eq, v0, Operand(at)); |
| 2480 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); |
| 2481 // Undetectable objects behave like undefined when tested with typeof. |
| 2482 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset)); |
| 2483 __ And(at, a1, Operand(1 << Map::kIsUndetectable)); |
| 2484 __ Branch(if_false, ne, at, Operand(zero_reg)); |
| 2485 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset)); |
| 2486 __ Branch(if_false, lt, a1, Operand(FIRST_JS_OBJECT_TYPE)); |
| 2487 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2488 Split(le, a1, Operand(LAST_JS_OBJECT_TYPE), if_true, if_false, fall_through); |
| 2489 |
| 2490 context()->Plug(if_true, if_false); |
454 } | 2491 } |
455 | 2492 |
456 | 2493 |
457 void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) { | 2494 void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) { |
458 UNIMPLEMENTED_MIPS(); | 2495 ASSERT(args->length() == 1); |
| 2496 |
| 2497 VisitForAccumulatorValue(args->at(0)); |
| 2498 |
| 2499 Label materialize_true, materialize_false; |
| 2500 Label* if_true = NULL; |
| 2501 Label* if_false = NULL; |
| 2502 Label* fall_through = NULL; |
| 2503 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2504 &if_true, &if_false, &fall_through); |
| 2505 |
| 2506 __ JumpIfSmi(v0, if_false); |
| 2507 __ GetObjectType(v0, a1, a1); |
| 2508 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2509 Split(ge, a1, Operand(FIRST_JS_OBJECT_TYPE), |
| 2510 if_true, if_false, fall_through); |
| 2511 |
| 2512 context()->Plug(if_true, if_false); |
459 } | 2513 } |
460 | 2514 |
461 | 2515 |
462 void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) { | 2516 void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) { |
463 UNIMPLEMENTED_MIPS(); | 2517 ASSERT(args->length() == 1); |
| 2518 |
| 2519 VisitForAccumulatorValue(args->at(0)); |
| 2520 |
| 2521 Label materialize_true, materialize_false; |
| 2522 Label* if_true = NULL; |
| 2523 Label* if_false = NULL; |
| 2524 Label* fall_through = NULL; |
| 2525 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2526 &if_true, &if_false, &fall_through); |
| 2527 |
| 2528 __ JumpIfSmi(v0, if_false); |
| 2529 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); |
| 2530 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset)); |
| 2531 __ And(at, a1, Operand(1 << Map::kIsUndetectable)); |
| 2532 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2533 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through); |
| 2534 |
| 2535 context()->Plug(if_true, if_false); |
464 } | 2536 } |
465 | 2537 |
466 | 2538 |
467 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( | 2539 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( |
468 ZoneList<Expression*>* args) { | 2540 ZoneList<Expression*>* args) { |
469 UNIMPLEMENTED_MIPS(); | 2541 |
| 2542 ASSERT(args->length() == 1); |
| 2543 |
| 2544 VisitForAccumulatorValue(args->at(0)); |
| 2545 |
| 2546 Label materialize_true, materialize_false; |
| 2547 Label* if_true = NULL; |
| 2548 Label* if_false = NULL; |
| 2549 Label* fall_through = NULL; |
| 2550 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2551 &if_true, &if_false, &fall_through); |
| 2552 |
| 2553 if (FLAG_debug_code) __ AbortIfSmi(v0); |
| 2554 |
| 2555 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); |
| 2556 __ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset)); |
| 2557 __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf); |
| 2558 __ Branch(if_true, ne, t0, Operand(zero_reg)); |
| 2559 |
| 2560 // Check for fast case object. Generate false result for slow case object. |
| 2561 __ lw(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset)); |
| 2562 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset)); |
| 2563 __ LoadRoot(t0, Heap::kHashTableMapRootIndex); |
| 2564 __ Branch(if_false, eq, a2, Operand(t0)); |
| 2565 |
| 2566 // Look for valueOf symbol in the descriptor array, and indicate false if |
| 2567 // found. The type is not checked, so if it is a transition it is a false |
| 2568 // negative. |
| 2569 __ lw(t0, FieldMemOperand(a1, Map::kInstanceDescriptorsOffset)); |
| 2570 __ lw(a3, FieldMemOperand(t0, FixedArray::kLengthOffset)); |
| 2571 // t0: descriptor array |
| 2572 // a3: length of descriptor array |
| 2573 // Calculate the end of the descriptor array. |
| 2574 STATIC_ASSERT(kSmiTag == 0); |
| 2575 STATIC_ASSERT(kSmiTagSize == 1); |
| 2576 STATIC_ASSERT(kPointerSize == 4); |
| 2577 __ Addu(a2, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 2578 __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize); |
| 2579 __ Addu(a2, a2, t1); |
| 2580 |
| 2581 // Calculate location of the first key name. |
| 2582 __ Addu(t0, |
| 2583 t0, |
| 2584 Operand(FixedArray::kHeaderSize - kHeapObjectTag + |
| 2585 DescriptorArray::kFirstIndex * kPointerSize)); |
| 2586 // Loop through all the keys in the descriptor array. If one of these is the |
| 2587 // symbol valueOf the result is false. |
| 2588 Label entry, loop; |
| 2589 // The use of t2 to store the valueOf symbol asumes that it is not otherwise |
| 2590 // used in the loop below. |
| 2591 __ li(t2, Operand(FACTORY->value_of_symbol())); |
| 2592 __ jmp(&entry); |
| 2593 __ bind(&loop); |
| 2594 __ lw(a3, MemOperand(t0, 0)); |
| 2595 __ Branch(if_false, eq, a3, Operand(t2)); |
| 2596 __ Addu(t0, t0, Operand(kPointerSize)); |
| 2597 __ bind(&entry); |
| 2598 __ Branch(&loop, ne, t0, Operand(a2)); |
| 2599 |
| 2600 // If a valueOf property is not found on the object check that it's |
| 2601 // prototype is the un-modified String prototype. If not result is false. |
| 2602 __ lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset)); |
| 2603 __ JumpIfSmi(a2, if_false); |
| 2604 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset)); |
| 2605 __ lw(a3, ContextOperand(cp, Context::GLOBAL_INDEX)); |
| 2606 __ lw(a3, FieldMemOperand(a3, GlobalObject::kGlobalContextOffset)); |
| 2607 __ lw(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); |
| 2608 __ Branch(if_false, ne, a2, Operand(a3)); |
| 2609 |
| 2610 // Set the bit in the map to indicate that it has been checked safe for |
| 2611 // default valueOf and set true result. |
| 2612 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset)); |
| 2613 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); |
| 2614 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset)); |
| 2615 __ jmp(if_true); |
| 2616 |
| 2617 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2618 context()->Plug(if_true, if_false); |
470 } | 2619 } |
471 | 2620 |
472 | 2621 |
473 void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) { | 2622 void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) { |
474 UNIMPLEMENTED_MIPS(); | 2623 ASSERT(args->length() == 1); |
| 2624 |
| 2625 VisitForAccumulatorValue(args->at(0)); |
| 2626 |
| 2627 Label materialize_true, materialize_false; |
| 2628 Label* if_true = NULL; |
| 2629 Label* if_false = NULL; |
| 2630 Label* fall_through = NULL; |
| 2631 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2632 &if_true, &if_false, &fall_through); |
| 2633 |
| 2634 __ JumpIfSmi(v0, if_false); |
| 2635 __ GetObjectType(v0, a1, a2); |
| 2636 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2637 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE)); |
| 2638 __ Branch(if_false); |
| 2639 |
| 2640 context()->Plug(if_true, if_false); |
475 } | 2641 } |
476 | 2642 |
477 | 2643 |
478 void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) { | 2644 void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) { |
479 UNIMPLEMENTED_MIPS(); | 2645 ASSERT(args->length() == 1); |
| 2646 |
| 2647 VisitForAccumulatorValue(args->at(0)); |
| 2648 |
| 2649 Label materialize_true, materialize_false; |
| 2650 Label* if_true = NULL; |
| 2651 Label* if_false = NULL; |
| 2652 Label* fall_through = NULL; |
| 2653 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2654 &if_true, &if_false, &fall_through); |
| 2655 |
| 2656 __ JumpIfSmi(v0, if_false); |
| 2657 __ GetObjectType(v0, a1, a1); |
| 2658 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2659 Split(eq, a1, Operand(JS_ARRAY_TYPE), |
| 2660 if_true, if_false, fall_through); |
| 2661 |
| 2662 context()->Plug(if_true, if_false); |
480 } | 2663 } |
481 | 2664 |
482 | 2665 |
483 void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) { | 2666 void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) { |
484 UNIMPLEMENTED_MIPS(); | 2667 ASSERT(args->length() == 1); |
| 2668 |
| 2669 VisitForAccumulatorValue(args->at(0)); |
| 2670 |
| 2671 Label materialize_true, materialize_false; |
| 2672 Label* if_true = NULL; |
| 2673 Label* if_false = NULL; |
| 2674 Label* fall_through = NULL; |
| 2675 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2676 &if_true, &if_false, &fall_through); |
| 2677 |
| 2678 __ JumpIfSmi(v0, if_false); |
| 2679 __ GetObjectType(v0, a1, a1); |
| 2680 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2681 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through); |
| 2682 |
| 2683 context()->Plug(if_true, if_false); |
485 } | 2684 } |
486 | 2685 |
487 | 2686 |
488 void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) { | 2687 void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) { |
489 UNIMPLEMENTED_MIPS(); | 2688 ASSERT(args->length() == 0); |
| 2689 |
| 2690 Label materialize_true, materialize_false; |
| 2691 Label* if_true = NULL; |
| 2692 Label* if_false = NULL; |
| 2693 Label* fall_through = NULL; |
| 2694 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2695 &if_true, &if_false, &fall_through); |
| 2696 |
| 2697 // Get the frame pointer for the calling frame. |
| 2698 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 2699 |
| 2700 // Skip the arguments adaptor frame if it exists. |
| 2701 Label check_frame_marker; |
| 2702 __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset)); |
| 2703 __ Branch(&check_frame_marker, ne, |
| 2704 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 2705 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset)); |
| 2706 |
| 2707 // Check the marker in the calling frame. |
| 2708 __ bind(&check_frame_marker); |
| 2709 __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset)); |
| 2710 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2711 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)), |
| 2712 if_true, if_false, fall_through); |
| 2713 |
| 2714 context()->Plug(if_true, if_false); |
490 } | 2715 } |
491 | 2716 |
492 | 2717 |
493 void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) { | 2718 void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) { |
494 UNIMPLEMENTED_MIPS(); | 2719 ASSERT(args->length() == 2); |
| 2720 |
| 2721 // Load the two objects into registers and perform the comparison. |
| 2722 VisitForStackValue(args->at(0)); |
| 2723 VisitForAccumulatorValue(args->at(1)); |
| 2724 |
| 2725 Label materialize_true, materialize_false; |
| 2726 Label* if_true = NULL; |
| 2727 Label* if_false = NULL; |
| 2728 Label* fall_through = NULL; |
| 2729 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2730 &if_true, &if_false, &fall_through); |
| 2731 |
| 2732 __ pop(a1); |
| 2733 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 2734 Split(eq, v0, Operand(a1), if_true, if_false, fall_through); |
| 2735 |
| 2736 context()->Plug(if_true, if_false); |
495 } | 2737 } |
496 | 2738 |
497 | 2739 |
498 void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) { | 2740 void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) { |
499 UNIMPLEMENTED_MIPS(); | 2741 ASSERT(args->length() == 1); |
| 2742 |
| 2743 // ArgumentsAccessStub expects the key in a1 and the formal |
| 2744 // parameter count in a0. |
| 2745 VisitForAccumulatorValue(args->at(0)); |
| 2746 __ mov(a1, v0); |
| 2747 __ li(a0, Operand(Smi::FromInt(scope()->num_parameters()))); |
| 2748 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); |
| 2749 __ CallStub(&stub); |
| 2750 context()->Plug(v0); |
500 } | 2751 } |
501 | 2752 |
502 | 2753 |
503 void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) { | 2754 void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) { |
504 UNIMPLEMENTED_MIPS(); | 2755 ASSERT(args->length() == 0); |
| 2756 |
| 2757 Label exit; |
| 2758 // Get the number of formal parameters. |
| 2759 __ li(v0, Operand(Smi::FromInt(scope()->num_parameters()))); |
| 2760 |
| 2761 // Check if the calling frame is an arguments adaptor frame. |
| 2762 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 2763 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); |
| 2764 __ Branch(&exit, ne, a3, |
| 2765 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 2766 |
| 2767 // Arguments adaptor case: Read the arguments length from the |
| 2768 // adaptor frame. |
| 2769 __ lw(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 2770 |
| 2771 __ bind(&exit); |
| 2772 context()->Plug(v0); |
505 } | 2773 } |
506 | 2774 |
507 | 2775 |
508 void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) { | 2776 void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) { |
509 UNIMPLEMENTED_MIPS(); | 2777 ASSERT(args->length() == 1); |
| 2778 Label done, null, function, non_function_constructor; |
| 2779 |
| 2780 VisitForAccumulatorValue(args->at(0)); |
| 2781 |
| 2782 // If the object is a smi, we return null. |
| 2783 __ JumpIfSmi(v0, &null); |
| 2784 |
| 2785 // Check that the object is a JS object but take special care of JS |
| 2786 // functions to make sure they have 'Function' as their class. |
| 2787 __ GetObjectType(v0, v0, a1); // Map is now in v0. |
| 2788 __ Branch(&null, lt, a1, Operand(FIRST_JS_OBJECT_TYPE)); |
| 2789 |
| 2790 // As long as JS_FUNCTION_TYPE is the last instance type and it is |
| 2791 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for |
| 2792 // LAST_JS_OBJECT_TYPE. |
| 2793 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); |
| 2794 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); |
| 2795 __ Branch(&function, eq, a1, Operand(JS_FUNCTION_TYPE)); |
| 2796 |
| 2797 // Check if the constructor in the map is a function. |
| 2798 __ lw(v0, FieldMemOperand(v0, Map::kConstructorOffset)); |
| 2799 __ GetObjectType(v0, a1, a1); |
| 2800 __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE)); |
| 2801 |
| 2802 // v0 now contains the constructor function. Grab the |
| 2803 // instance class name from there. |
| 2804 __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset)); |
| 2805 __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset)); |
| 2806 __ Branch(&done); |
| 2807 |
| 2808 // Functions have class 'Function'. |
| 2809 __ bind(&function); |
| 2810 __ LoadRoot(v0, Heap::kfunction_class_symbolRootIndex); |
| 2811 __ jmp(&done); |
| 2812 |
| 2813 // Objects with a non-function constructor have class 'Object'. |
| 2814 __ bind(&non_function_constructor); |
| 2815 __ LoadRoot(v0, Heap::kfunction_class_symbolRootIndex); |
| 2816 __ jmp(&done); |
| 2817 |
| 2818 // Non-JS objects have class null. |
| 2819 __ bind(&null); |
| 2820 __ LoadRoot(v0, Heap::kNullValueRootIndex); |
| 2821 |
| 2822 // All done. |
| 2823 __ bind(&done); |
| 2824 |
| 2825 context()->Plug(v0); |
510 } | 2826 } |
511 | 2827 |
512 | 2828 |
513 void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) { | 2829 void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) { |
514 UNIMPLEMENTED_MIPS(); | 2830 // Conditionally generate a log call. |
| 2831 // Args: |
| 2832 // 0 (literal string): The type of logging (corresponds to the flags). |
| 2833 // This is used to determine whether or not to generate the log call. |
| 2834 // 1 (string): Format string. Access the string at argument index 2 |
| 2835 // with '%2s' (see Logger::LogRuntime for all the formats). |
| 2836 // 2 (array): Arguments to the format string. |
| 2837 ASSERT_EQ(args->length(), 3); |
| 2838 #ifdef ENABLE_LOGGING_AND_PROFILING |
| 2839 if (CodeGenerator::ShouldGenerateLog(args->at(0))) { |
| 2840 VisitForStackValue(args->at(1)); |
| 2841 VisitForStackValue(args->at(2)); |
| 2842 __ CallRuntime(Runtime::kLog, 2); |
| 2843 } |
| 2844 #endif |
| 2845 // Finally, we're expected to leave a value on the top of the stack. |
| 2846 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); |
| 2847 context()->Plug(v0); |
515 } | 2848 } |
516 | 2849 |
517 | 2850 |
518 void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) { | 2851 void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) { |
519 UNIMPLEMENTED_MIPS(); | 2852 ASSERT(args->length() == 0); |
| 2853 |
| 2854 Label slow_allocate_heapnumber; |
| 2855 Label heapnumber_allocated; |
| 2856 |
| 2857 // Save the new heap number in callee-saved register s0, since |
| 2858 // we call out to external C code below. |
| 2859 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex); |
| 2860 __ AllocateHeapNumber(s0, a1, a2, t6, &slow_allocate_heapnumber); |
| 2861 __ jmp(&heapnumber_allocated); |
| 2862 |
| 2863 __ bind(&slow_allocate_heapnumber); |
| 2864 |
| 2865 // Allocate a heap number. |
| 2866 __ CallRuntime(Runtime::kNumberAlloc, 0); |
| 2867 __ mov(s0, v0); // Save result in s0, so it is saved thru CFunc call. |
| 2868 |
| 2869 __ bind(&heapnumber_allocated); |
| 2870 |
| 2871 // Convert 32 random bits in v0 to 0.(32 random bits) in a double |
| 2872 // by computing: |
| 2873 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). |
| 2874 if (CpuFeatures::IsSupported(FPU)) { |
| 2875 __ PrepareCallCFunction(1, a0); |
| 2876 __ li(a0, Operand(ExternalReference::isolate_address())); |
| 2877 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1); |
| 2878 |
| 2879 |
| 2880 CpuFeatures::Scope scope(FPU); |
| 2881 // 0x41300000 is the top half of 1.0 x 2^20 as a double. |
| 2882 __ li(a1, Operand(0x41300000)); |
| 2883 // Move 0x41300000xxxxxxxx (x = random bits in v0) to FPU. |
| 2884 __ mtc1(a1, f13); |
| 2885 __ mtc1(v0, f12); |
| 2886 // Move 0x4130000000000000 to FPU. |
| 2887 __ mtc1(a1, f15); |
| 2888 __ mtc1(zero_reg, f14); |
| 2889 // Subtract and store the result in the heap number. |
| 2890 __ sub_d(f0, f12, f14); |
| 2891 __ sdc1(f0, MemOperand(s0, HeapNumber::kValueOffset - kHeapObjectTag)); |
| 2892 __ mov(v0, s0); |
| 2893 } else { |
| 2894 __ PrepareCallCFunction(2, a0); |
| 2895 __ mov(a0, s0); |
| 2896 __ li(a1, Operand(ExternalReference::isolate_address())); |
| 2897 __ CallCFunction( |
| 2898 ExternalReference::fill_heap_number_with_random_function(isolate()), 2); |
| 2899 } |
| 2900 |
| 2901 context()->Plug(v0); |
520 } | 2902 } |
521 | 2903 |
522 | 2904 |
523 void FullCodeGenerator::EmitSubString(ZoneList<Expression*>* args) { | 2905 void FullCodeGenerator::EmitSubString(ZoneList<Expression*>* args) { |
524 UNIMPLEMENTED_MIPS(); | 2906 // Load the arguments on the stack and call the stub. |
| 2907 SubStringStub stub; |
| 2908 ASSERT(args->length() == 3); |
| 2909 VisitForStackValue(args->at(0)); |
| 2910 VisitForStackValue(args->at(1)); |
| 2911 VisitForStackValue(args->at(2)); |
| 2912 __ CallStub(&stub); |
| 2913 context()->Plug(v0); |
525 } | 2914 } |
526 | 2915 |
527 | 2916 |
528 void FullCodeGenerator::EmitRegExpExec(ZoneList<Expression*>* args) { | 2917 void FullCodeGenerator::EmitRegExpExec(ZoneList<Expression*>* args) { |
529 UNIMPLEMENTED_MIPS(); | 2918 // Load the arguments on the stack and call the stub. |
| 2919 RegExpExecStub stub; |
| 2920 ASSERT(args->length() == 4); |
| 2921 VisitForStackValue(args->at(0)); |
| 2922 VisitForStackValue(args->at(1)); |
| 2923 VisitForStackValue(args->at(2)); |
| 2924 VisitForStackValue(args->at(3)); |
| 2925 __ CallStub(&stub); |
| 2926 context()->Plug(v0); |
530 } | 2927 } |
531 | 2928 |
532 | 2929 |
533 void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) { | 2930 void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) { |
534 UNIMPLEMENTED_MIPS(); | 2931 ASSERT(args->length() == 1); |
| 2932 |
| 2933 VisitForAccumulatorValue(args->at(0)); // Load the object. |
| 2934 |
| 2935 Label done; |
| 2936 // If the object is a smi return the object. |
| 2937 __ JumpIfSmi(v0, &done); |
| 2938 // If the object is not a value type, return the object. |
| 2939 __ GetObjectType(v0, a1, a1); |
| 2940 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE)); |
| 2941 |
| 2942 __ lw(v0, FieldMemOperand(v0, JSValue::kValueOffset)); |
| 2943 |
| 2944 __ bind(&done); |
| 2945 context()->Plug(v0); |
535 } | 2946 } |
536 | 2947 |
537 | 2948 |
538 void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) { | 2949 void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) { |
539 UNIMPLEMENTED_MIPS(); | 2950 // Load the arguments on the stack and call the runtime function. |
| 2951 ASSERT(args->length() == 2); |
| 2952 VisitForStackValue(args->at(0)); |
| 2953 VisitForStackValue(args->at(1)); |
| 2954 MathPowStub stub; |
| 2955 __ CallStub(&stub); |
| 2956 context()->Plug(v0); |
540 } | 2957 } |
541 | 2958 |
542 | 2959 |
543 void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) { | 2960 void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) { |
544 UNIMPLEMENTED_MIPS(); | 2961 ASSERT(args->length() == 2); |
| 2962 |
| 2963 VisitForStackValue(args->at(0)); // Load the object. |
| 2964 VisitForAccumulatorValue(args->at(1)); // Load the value. |
| 2965 __ pop(a1); // v0 = value. a1 = object. |
| 2966 |
| 2967 Label done; |
| 2968 // If the object is a smi, return the value. |
| 2969 __ JumpIfSmi(a1, &done); |
| 2970 |
| 2971 // If the object is not a value type, return the value. |
| 2972 __ GetObjectType(a1, a2, a2); |
| 2973 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE)); |
| 2974 |
| 2975 // Store the value. |
| 2976 __ sw(v0, FieldMemOperand(a1, JSValue::kValueOffset)); |
| 2977 // Update the write barrier. Save the value as it will be |
| 2978 // overwritten by the write barrier code and is needed afterward. |
| 2979 __ RecordWrite(a1, Operand(JSValue::kValueOffset - kHeapObjectTag), a2, a3); |
| 2980 |
| 2981 __ bind(&done); |
| 2982 context()->Plug(v0); |
545 } | 2983 } |
546 | 2984 |
547 | 2985 |
548 void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) { | 2986 void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) { |
549 UNIMPLEMENTED_MIPS(); | 2987 ASSERT_EQ(args->length(), 1); |
| 2988 |
| 2989 // Load the argument on the stack and call the stub. |
| 2990 VisitForStackValue(args->at(0)); |
| 2991 |
| 2992 NumberToStringStub stub; |
| 2993 __ CallStub(&stub); |
| 2994 context()->Plug(v0); |
550 } | 2995 } |
551 | 2996 |
552 | 2997 |
553 void FullCodeGenerator::EmitStringCharFromCode(ZoneList<Expression*>* args) { | 2998 void FullCodeGenerator::EmitStringCharFromCode(ZoneList<Expression*>* args) { |
554 UNIMPLEMENTED_MIPS(); | 2999 ASSERT(args->length() == 1); |
| 3000 |
| 3001 VisitForAccumulatorValue(args->at(0)); |
| 3002 |
| 3003 Label done; |
| 3004 StringCharFromCodeGenerator generator(v0, a1); |
| 3005 generator.GenerateFast(masm_); |
| 3006 __ jmp(&done); |
| 3007 |
| 3008 NopRuntimeCallHelper call_helper; |
| 3009 generator.GenerateSlow(masm_, call_helper); |
| 3010 |
| 3011 __ bind(&done); |
| 3012 context()->Plug(a1); |
555 } | 3013 } |
556 | 3014 |
557 | 3015 |
558 void FullCodeGenerator::EmitStringCharCodeAt(ZoneList<Expression*>* args) { | 3016 void FullCodeGenerator::EmitStringCharCodeAt(ZoneList<Expression*>* args) { |
559 UNIMPLEMENTED_MIPS(); | 3017 ASSERT(args->length() == 2); |
| 3018 |
| 3019 VisitForStackValue(args->at(0)); |
| 3020 VisitForAccumulatorValue(args->at(1)); |
| 3021 __ mov(a0, result_register()); |
| 3022 |
| 3023 Register object = a1; |
| 3024 Register index = a0; |
| 3025 Register scratch = a2; |
| 3026 Register result = v0; |
| 3027 |
| 3028 __ pop(object); |
| 3029 |
| 3030 Label need_conversion; |
| 3031 Label index_out_of_range; |
| 3032 Label done; |
| 3033 StringCharCodeAtGenerator generator(object, |
| 3034 index, |
| 3035 scratch, |
| 3036 result, |
| 3037 &need_conversion, |
| 3038 &need_conversion, |
| 3039 &index_out_of_range, |
| 3040 STRING_INDEX_IS_NUMBER); |
| 3041 generator.GenerateFast(masm_); |
| 3042 __ jmp(&done); |
| 3043 |
| 3044 __ bind(&index_out_of_range); |
| 3045 // When the index is out of range, the spec requires us to return |
| 3046 // NaN. |
| 3047 __ LoadRoot(result, Heap::kNanValueRootIndex); |
| 3048 __ jmp(&done); |
| 3049 |
| 3050 __ bind(&need_conversion); |
| 3051 // Load the undefined value into the result register, which will |
| 3052 // trigger conversion. |
| 3053 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); |
| 3054 __ jmp(&done); |
| 3055 |
| 3056 NopRuntimeCallHelper call_helper; |
| 3057 generator.GenerateSlow(masm_, call_helper); |
| 3058 |
| 3059 __ bind(&done); |
| 3060 context()->Plug(result); |
560 } | 3061 } |
561 | 3062 |
562 | 3063 |
563 void FullCodeGenerator::EmitStringCharAt(ZoneList<Expression*>* args) { | 3064 void FullCodeGenerator::EmitStringCharAt(ZoneList<Expression*>* args) { |
564 UNIMPLEMENTED_MIPS(); | 3065 ASSERT(args->length() == 2); |
| 3066 |
| 3067 VisitForStackValue(args->at(0)); |
| 3068 VisitForAccumulatorValue(args->at(1)); |
| 3069 __ mov(a0, result_register()); |
| 3070 |
| 3071 Register object = a1; |
| 3072 Register index = a0; |
| 3073 Register scratch1 = a2; |
| 3074 Register scratch2 = a3; |
| 3075 Register result = v0; |
| 3076 |
| 3077 __ pop(object); |
| 3078 |
| 3079 Label need_conversion; |
| 3080 Label index_out_of_range; |
| 3081 Label done; |
| 3082 StringCharAtGenerator generator(object, |
| 3083 index, |
| 3084 scratch1, |
| 3085 scratch2, |
| 3086 result, |
| 3087 &need_conversion, |
| 3088 &need_conversion, |
| 3089 &index_out_of_range, |
| 3090 STRING_INDEX_IS_NUMBER); |
| 3091 generator.GenerateFast(masm_); |
| 3092 __ jmp(&done); |
| 3093 |
| 3094 __ bind(&index_out_of_range); |
| 3095 // When the index is out of range, the spec requires us to return |
| 3096 // the empty string. |
| 3097 __ LoadRoot(result, Heap::kEmptyStringRootIndex); |
| 3098 __ jmp(&done); |
| 3099 |
| 3100 __ bind(&need_conversion); |
| 3101 // Move smi zero into the result register, which will trigger |
| 3102 // conversion. |
| 3103 __ li(result, Operand(Smi::FromInt(0))); |
| 3104 __ jmp(&done); |
| 3105 |
| 3106 NopRuntimeCallHelper call_helper; |
| 3107 generator.GenerateSlow(masm_, call_helper); |
| 3108 |
| 3109 __ bind(&done); |
| 3110 context()->Plug(result); |
565 } | 3111 } |
566 | 3112 |
567 | 3113 |
568 void FullCodeGenerator::EmitStringAdd(ZoneList<Expression*>* args) { | 3114 void FullCodeGenerator::EmitStringAdd(ZoneList<Expression*>* args) { |
569 UNIMPLEMENTED_MIPS(); | 3115 ASSERT_EQ(2, args->length()); |
| 3116 |
| 3117 VisitForStackValue(args->at(0)); |
| 3118 VisitForStackValue(args->at(1)); |
| 3119 |
| 3120 StringAddStub stub(NO_STRING_ADD_FLAGS); |
| 3121 __ CallStub(&stub); |
| 3122 context()->Plug(v0); |
570 } | 3123 } |
571 | 3124 |
572 | 3125 |
573 void FullCodeGenerator::EmitStringCompare(ZoneList<Expression*>* args) { | 3126 void FullCodeGenerator::EmitStringCompare(ZoneList<Expression*>* args) { |
574 UNIMPLEMENTED_MIPS(); | 3127 ASSERT_EQ(2, args->length()); |
| 3128 |
| 3129 VisitForStackValue(args->at(0)); |
| 3130 VisitForStackValue(args->at(1)); |
| 3131 |
| 3132 StringCompareStub stub; |
| 3133 __ CallStub(&stub); |
| 3134 context()->Plug(v0); |
575 } | 3135 } |
576 | 3136 |
577 | 3137 |
578 void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) { | 3138 void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) { |
579 UNIMPLEMENTED_MIPS(); | 3139 // Load the argument on the stack and call the stub. |
| 3140 TranscendentalCacheStub stub(TranscendentalCache::SIN, |
| 3141 TranscendentalCacheStub::TAGGED); |
| 3142 ASSERT(args->length() == 1); |
| 3143 VisitForStackValue(args->at(0)); |
| 3144 __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos. |
| 3145 __ CallStub(&stub); |
| 3146 context()->Plug(v0); |
580 } | 3147 } |
581 | 3148 |
582 | 3149 |
583 void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) { | 3150 void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) { |
584 UNIMPLEMENTED_MIPS(); | 3151 // Load the argument on the stack and call the stub. |
| 3152 TranscendentalCacheStub stub(TranscendentalCache::COS, |
| 3153 TranscendentalCacheStub::TAGGED); |
| 3154 ASSERT(args->length() == 1); |
| 3155 VisitForStackValue(args->at(0)); |
| 3156 __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos. |
| 3157 __ CallStub(&stub); |
| 3158 context()->Plug(v0); |
| 3159 } |
| 3160 |
| 3161 |
| 3162 void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) { |
| 3163 // Load the argument on the stack and call the stub. |
| 3164 TranscendentalCacheStub stub(TranscendentalCache::LOG, |
| 3165 TranscendentalCacheStub::TAGGED); |
| 3166 ASSERT(args->length() == 1); |
| 3167 VisitForStackValue(args->at(0)); |
| 3168 __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos. |
| 3169 __ CallStub(&stub); |
| 3170 context()->Plug(v0); |
585 } | 3171 } |
586 | 3172 |
587 | 3173 |
588 void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) { | 3174 void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) { |
589 UNIMPLEMENTED_MIPS(); | 3175 // Load the argument on the stack and call the runtime function. |
590 } | 3176 ASSERT(args->length() == 1); |
591 | 3177 VisitForStackValue(args->at(0)); |
592 | 3178 __ CallRuntime(Runtime::kMath_sqrt, 1); |
593 void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) { | 3179 context()->Plug(v0); |
594 UNIMPLEMENTED_MIPS(); | |
595 } | 3180 } |
596 | 3181 |
597 | 3182 |
598 void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) { | 3183 void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) { |
599 UNIMPLEMENTED_MIPS(); | 3184 ASSERT(args->length() >= 2); |
| 3185 |
| 3186 int arg_count = args->length() - 2; // 2 ~ receiver and function. |
| 3187 for (int i = 0; i < arg_count + 1; i++) { |
| 3188 VisitForStackValue(args->at(i)); |
| 3189 } |
| 3190 VisitForAccumulatorValue(args->last()); // Function. |
| 3191 |
| 3192 // InvokeFunction requires the function in a1. Move it in there. |
| 3193 __ mov(a1, result_register()); |
| 3194 ParameterCount count(arg_count); |
| 3195 __ InvokeFunction(a1, count, CALL_FUNCTION); |
| 3196 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 3197 context()->Plug(v0); |
600 } | 3198 } |
601 | 3199 |
602 | 3200 |
603 void FullCodeGenerator::EmitRegExpConstructResult(ZoneList<Expression*>* args) { | 3201 void FullCodeGenerator::EmitRegExpConstructResult(ZoneList<Expression*>* args) { |
604 UNIMPLEMENTED_MIPS(); | 3202 RegExpConstructResultStub stub; |
| 3203 ASSERT(args->length() == 3); |
| 3204 VisitForStackValue(args->at(0)); |
| 3205 VisitForStackValue(args->at(1)); |
| 3206 VisitForStackValue(args->at(2)); |
| 3207 __ CallStub(&stub); |
| 3208 context()->Plug(v0); |
605 } | 3209 } |
606 | 3210 |
607 | 3211 |
608 void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) { | 3212 void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) { |
609 UNIMPLEMENTED_MIPS(); | 3213 ASSERT(args->length() == 3); |
| 3214 VisitForStackValue(args->at(0)); |
| 3215 VisitForStackValue(args->at(1)); |
| 3216 VisitForStackValue(args->at(2)); |
| 3217 Label done; |
| 3218 Label slow_case; |
| 3219 Register object = a0; |
| 3220 Register index1 = a1; |
| 3221 Register index2 = a2; |
| 3222 Register elements = a3; |
| 3223 Register scratch1 = t0; |
| 3224 Register scratch2 = t1; |
| 3225 |
| 3226 __ lw(object, MemOperand(sp, 2 * kPointerSize)); |
| 3227 // Fetch the map and check if array is in fast case. |
| 3228 // Check that object doesn't require security checks and |
| 3229 // has no indexed interceptor. |
| 3230 __ GetObjectType(object, scratch1, scratch2); |
| 3231 __ Branch(&slow_case, ne, scratch2, Operand(JS_ARRAY_TYPE)); |
| 3232 // Map is now in scratch1. |
| 3233 |
| 3234 __ lbu(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset)); |
| 3235 __ And(scratch2, scratch2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask)); |
| 3236 __ Branch(&slow_case, ne, scratch2, Operand(zero_reg)); |
| 3237 |
| 3238 // Check the object's elements are in fast case and writable. |
| 3239 __ lw(elements, FieldMemOperand(object, JSObject::kElementsOffset)); |
| 3240 __ lw(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset)); |
| 3241 __ LoadRoot(scratch2, Heap::kFixedArrayMapRootIndex); |
| 3242 __ Branch(&slow_case, ne, scratch1, Operand(scratch2)); |
| 3243 |
| 3244 // Check that both indices are smis. |
| 3245 __ lw(index1, MemOperand(sp, 1 * kPointerSize)); |
| 3246 __ lw(index2, MemOperand(sp, 0)); |
| 3247 __ JumpIfNotBothSmi(index1, index2, &slow_case); |
| 3248 |
| 3249 // Check that both indices are valid. |
| 3250 Label not_hi; |
| 3251 __ lw(scratch1, FieldMemOperand(object, JSArray::kLengthOffset)); |
| 3252 __ Branch(&slow_case, ls, scratch1, Operand(index1)); |
| 3253 __ Branch(¬_hi, NegateCondition(hi), scratch1, Operand(index1)); |
| 3254 __ Branch(&slow_case, ls, scratch1, Operand(index2)); |
| 3255 __ bind(¬_hi); |
| 3256 |
| 3257 // Bring the address of the elements into index1 and index2. |
| 3258 __ Addu(scratch1, elements, |
| 3259 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 3260 __ sll(index1, index1, kPointerSizeLog2 - kSmiTagSize); |
| 3261 __ Addu(index1, scratch1, index1); |
| 3262 __ sll(index2, index2, kPointerSizeLog2 - kSmiTagSize); |
| 3263 __ Addu(index2, scratch1, index2); |
| 3264 |
| 3265 // Swap elements. |
| 3266 __ lw(scratch1, MemOperand(index1, 0)); |
| 3267 __ lw(scratch2, MemOperand(index2, 0)); |
| 3268 __ sw(scratch1, MemOperand(index2, 0)); |
| 3269 __ sw(scratch2, MemOperand(index1, 0)); |
| 3270 |
| 3271 Label new_space; |
| 3272 __ InNewSpace(elements, scratch1, eq, &new_space); |
| 3273 // Possible optimization: do a check that both values are Smis |
| 3274 // (or them and test against Smi mask). |
| 3275 |
| 3276 __ mov(scratch1, elements); |
| 3277 __ RecordWriteHelper(elements, index1, scratch2); |
| 3278 __ RecordWriteHelper(scratch1, index2, scratch2); // scratch1 holds elements. |
| 3279 |
| 3280 __ bind(&new_space); |
| 3281 // We are done. Drop elements from the stack, and return undefined. |
| 3282 __ Drop(3); |
| 3283 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); |
| 3284 __ jmp(&done); |
| 3285 |
| 3286 __ bind(&slow_case); |
| 3287 __ CallRuntime(Runtime::kSwapElements, 3); |
| 3288 |
| 3289 __ bind(&done); |
| 3290 context()->Plug(v0); |
610 } | 3291 } |
611 | 3292 |
612 | 3293 |
613 void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) { | 3294 void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) { |
614 UNIMPLEMENTED_MIPS(); | 3295 ASSERT_EQ(2, args->length()); |
| 3296 |
| 3297 ASSERT_NE(NULL, args->at(0)->AsLiteral()); |
| 3298 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); |
| 3299 |
| 3300 Handle<FixedArray> jsfunction_result_caches( |
| 3301 isolate()->global_context()->jsfunction_result_caches()); |
| 3302 if (jsfunction_result_caches->length() <= cache_id) { |
| 3303 __ Abort("Attempt to use undefined cache."); |
| 3304 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); |
| 3305 context()->Plug(v0); |
| 3306 return; |
| 3307 } |
| 3308 |
| 3309 VisitForAccumulatorValue(args->at(1)); |
| 3310 |
| 3311 Register key = v0; |
| 3312 Register cache = a1; |
| 3313 __ lw(cache, ContextOperand(cp, Context::GLOBAL_INDEX)); |
| 3314 __ lw(cache, FieldMemOperand(cache, GlobalObject::kGlobalContextOffset)); |
| 3315 __ lw(cache, |
| 3316 ContextOperand( |
| 3317 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); |
| 3318 __ lw(cache, |
| 3319 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); |
| 3320 |
| 3321 |
| 3322 Label done, not_found; |
| 3323 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); |
| 3324 __ lw(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset)); |
| 3325 // a2 now holds finger offset as a smi. |
| 3326 __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 3327 // a3 now points to the start of fixed array elements. |
| 3328 __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize); |
| 3329 __ addu(a3, a3, at); |
| 3330 // a3 now points to key of indexed element of cache. |
| 3331 __ lw(a2, MemOperand(a3)); |
| 3332 __ Branch(¬_found, ne, key, Operand(a2)); |
| 3333 |
| 3334 __ lw(v0, MemOperand(a3, kPointerSize)); |
| 3335 __ Branch(&done); |
| 3336 |
| 3337 __ bind(¬_found); |
| 3338 // Call runtime to perform the lookup. |
| 3339 __ Push(cache, key); |
| 3340 __ CallRuntime(Runtime::kGetFromCache, 2); |
| 3341 |
| 3342 __ bind(&done); |
| 3343 context()->Plug(v0); |
615 } | 3344 } |
616 | 3345 |
617 | 3346 |
618 void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) { | 3347 void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) { |
619 UNIMPLEMENTED_MIPS(); | 3348 ASSERT_EQ(2, args->length()); |
| 3349 |
| 3350 Register right = v0; |
| 3351 Register left = a1; |
| 3352 Register tmp = a2; |
| 3353 Register tmp2 = a3; |
| 3354 |
| 3355 VisitForStackValue(args->at(0)); |
| 3356 VisitForAccumulatorValue(args->at(1)); // Result (right) in v0. |
| 3357 __ pop(left); |
| 3358 |
| 3359 Label done, fail, ok; |
| 3360 __ Branch(&ok, eq, left, Operand(right)); |
| 3361 // Fail if either is a non-HeapObject. |
| 3362 __ And(tmp, left, Operand(right)); |
| 3363 __ And(at, tmp, Operand(kSmiTagMask)); |
| 3364 __ Branch(&fail, eq, at, Operand(zero_reg)); |
| 3365 __ lw(tmp, FieldMemOperand(left, HeapObject::kMapOffset)); |
| 3366 __ lbu(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset)); |
| 3367 __ Branch(&fail, ne, tmp2, Operand(JS_REGEXP_TYPE)); |
| 3368 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); |
| 3369 __ Branch(&fail, ne, tmp, Operand(tmp2)); |
| 3370 __ lw(tmp, FieldMemOperand(left, JSRegExp::kDataOffset)); |
| 3371 __ lw(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset)); |
| 3372 __ Branch(&ok, eq, tmp, Operand(tmp2)); |
| 3373 __ bind(&fail); |
| 3374 __ LoadRoot(v0, Heap::kFalseValueRootIndex); |
| 3375 __ jmp(&done); |
| 3376 __ bind(&ok); |
| 3377 __ LoadRoot(v0, Heap::kTrueValueRootIndex); |
| 3378 __ bind(&done); |
| 3379 |
| 3380 context()->Plug(v0); |
620 } | 3381 } |
621 | 3382 |
622 | 3383 |
623 void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) { | 3384 void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) { |
624 UNIMPLEMENTED_MIPS(); | 3385 VisitForAccumulatorValue(args->at(0)); |
| 3386 |
| 3387 Label materialize_true, materialize_false; |
| 3388 Label* if_true = NULL; |
| 3389 Label* if_false = NULL; |
| 3390 Label* fall_through = NULL; |
| 3391 context()->PrepareTest(&materialize_true, &materialize_false, |
| 3392 &if_true, &if_false, &fall_through); |
| 3393 |
| 3394 __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset)); |
| 3395 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask)); |
| 3396 |
| 3397 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 3398 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through); |
| 3399 |
| 3400 context()->Plug(if_true, if_false); |
625 } | 3401 } |
626 | 3402 |
627 | 3403 |
628 void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) { | 3404 void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) { |
629 UNIMPLEMENTED_MIPS(); | 3405 ASSERT(args->length() == 1); |
| 3406 VisitForAccumulatorValue(args->at(0)); |
| 3407 |
| 3408 if (FLAG_debug_code) { |
| 3409 __ AbortIfNotString(v0); |
| 3410 } |
| 3411 |
| 3412 __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset)); |
| 3413 __ IndexFromHash(v0, v0); |
| 3414 |
| 3415 context()->Plug(v0); |
630 } | 3416 } |
631 | 3417 |
632 | 3418 |
633 void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) { | 3419 void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) { |
634 UNIMPLEMENTED_MIPS(); | 3420 Label bailout, done, one_char_separator, long_separator, |
| 3421 non_trivial_array, not_size_one_array, loop, |
| 3422 empty_separator_loop, one_char_separator_loop, |
| 3423 one_char_separator_loop_entry, long_separator_loop; |
| 3424 |
| 3425 ASSERT(args->length() == 2); |
| 3426 VisitForStackValue(args->at(1)); |
| 3427 VisitForAccumulatorValue(args->at(0)); |
| 3428 |
| 3429 // All aliases of the same register have disjoint lifetimes. |
| 3430 Register array = v0; |
| 3431 Register elements = no_reg; // Will be v0. |
| 3432 Register result = no_reg; // Will be v0. |
| 3433 Register separator = a1; |
| 3434 Register array_length = a2; |
| 3435 Register result_pos = no_reg; // Will be a2. |
| 3436 Register string_length = a3; |
| 3437 Register string = t0; |
| 3438 Register element = t1; |
| 3439 Register elements_end = t2; |
| 3440 Register scratch1 = t3; |
| 3441 Register scratch2 = t5; |
| 3442 Register scratch3 = t4; |
| 3443 Register scratch4 = v1; |
| 3444 |
| 3445 // Separator operand is on the stack. |
| 3446 __ pop(separator); |
| 3447 |
| 3448 // Check that the array is a JSArray. |
| 3449 __ JumpIfSmi(array, &bailout); |
| 3450 __ GetObjectType(array, scratch1, scratch2); |
| 3451 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE)); |
| 3452 |
| 3453 // Check that the array has fast elements. |
| 3454 __ lbu(scratch2, FieldMemOperand(scratch1, Map::kBitField2Offset)); |
| 3455 __ And(scratch3, scratch2, Operand(1 << Map::kHasFastElements)); |
| 3456 __ Branch(&bailout, eq, scratch3, Operand(zero_reg)); |
| 3457 |
| 3458 // If the array has length zero, return the empty string. |
| 3459 __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset)); |
| 3460 __ SmiUntag(array_length); |
| 3461 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg)); |
| 3462 __ LoadRoot(v0, Heap::kEmptyStringRootIndex); |
| 3463 __ Branch(&done); |
| 3464 |
| 3465 __ bind(&non_trivial_array); |
| 3466 |
| 3467 // Get the FixedArray containing array's elements. |
| 3468 elements = array; |
| 3469 __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset)); |
| 3470 array = no_reg; // End of array's live range. |
| 3471 |
| 3472 // Check that all array elements are sequential ASCII strings, and |
| 3473 // accumulate the sum of their lengths, as a smi-encoded value. |
| 3474 __ mov(string_length, zero_reg); |
| 3475 __ Addu(element, |
| 3476 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 3477 __ sll(elements_end, array_length, kPointerSizeLog2); |
| 3478 __ Addu(elements_end, element, elements_end); |
| 3479 // Loop condition: while (element < elements_end). |
| 3480 // Live values in registers: |
| 3481 // elements: Fixed array of strings. |
| 3482 // array_length: Length of the fixed array of strings (not smi) |
| 3483 // separator: Separator string |
| 3484 // string_length: Accumulated sum of string lengths (smi). |
| 3485 // element: Current array element. |
| 3486 // elements_end: Array end. |
| 3487 if (FLAG_debug_code) { |
| 3488 __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin", |
| 3489 array_length, Operand(zero_reg)); |
| 3490 } |
| 3491 __ bind(&loop); |
| 3492 __ lw(string, MemOperand(element)); |
| 3493 __ Addu(element, element, kPointerSize); |
| 3494 __ JumpIfSmi(string, &bailout); |
| 3495 __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset)); |
| 3496 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); |
| 3497 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); |
| 3498 __ lw(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset)); |
| 3499 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3); |
| 3500 __ BranchOnOverflow(&bailout, scratch3); |
| 3501 __ Branch(&loop, lt, element, Operand(elements_end)); |
| 3502 |
| 3503 // If array_length is 1, return elements[0], a string. |
| 3504 __ Branch(¬_size_one_array, ne, array_length, Operand(1)); |
| 3505 __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize)); |
| 3506 __ Branch(&done); |
| 3507 |
| 3508 __ bind(¬_size_one_array); |
| 3509 |
| 3510 // Live values in registers: |
| 3511 // separator: Separator string |
| 3512 // array_length: Length of the array. |
| 3513 // string_length: Sum of string lengths (smi). |
| 3514 // elements: FixedArray of strings. |
| 3515 |
| 3516 // Check that the separator is a flat ASCII string. |
| 3517 __ JumpIfSmi(separator, &bailout); |
| 3518 __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset)); |
| 3519 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); |
| 3520 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); |
| 3521 |
| 3522 // Add (separator length times array_length) - separator length to the |
| 3523 // string_length to get the length of the result string. array_length is not |
| 3524 // smi but the other values are, so the result is a smi. |
| 3525 __ lw(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset)); |
| 3526 __ Subu(string_length, string_length, Operand(scratch1)); |
| 3527 __ Mult(array_length, scratch1); |
| 3528 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are |
| 3529 // zero. |
| 3530 __ mfhi(scratch2); |
| 3531 __ Branch(&bailout, ne, scratch2, Operand(zero_reg)); |
| 3532 __ mflo(scratch2); |
| 3533 __ And(scratch3, scratch2, Operand(0x80000000)); |
| 3534 __ Branch(&bailout, ne, scratch3, Operand(zero_reg)); |
| 3535 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3); |
| 3536 __ BranchOnOverflow(&bailout, scratch3); |
| 3537 __ SmiUntag(string_length); |
| 3538 |
| 3539 // Get first element in the array to free up the elements register to be used |
| 3540 // for the result. |
| 3541 __ Addu(element, |
| 3542 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 3543 result = elements; // End of live range for elements. |
| 3544 elements = no_reg; |
| 3545 // Live values in registers: |
| 3546 // element: First array element |
| 3547 // separator: Separator string |
| 3548 // string_length: Length of result string (not smi) |
| 3549 // array_length: Length of the array. |
| 3550 __ AllocateAsciiString(result, |
| 3551 string_length, |
| 3552 scratch1, |
| 3553 scratch2, |
| 3554 elements_end, |
| 3555 &bailout); |
| 3556 // Prepare for looping. Set up elements_end to end of the array. Set |
| 3557 // result_pos to the position of the result where to write the first |
| 3558 // character. |
| 3559 __ sll(elements_end, array_length, kPointerSizeLog2); |
| 3560 __ Addu(elements_end, element, elements_end); |
| 3561 result_pos = array_length; // End of live range for array_length. |
| 3562 array_length = no_reg; |
| 3563 __ Addu(result_pos, |
| 3564 result, |
| 3565 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); |
| 3566 |
| 3567 // Check the length of the separator. |
| 3568 __ lw(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset)); |
| 3569 __ li(at, Operand(Smi::FromInt(1))); |
| 3570 __ Branch(&one_char_separator, eq, scratch1, Operand(at)); |
| 3571 __ Branch(&long_separator, gt, scratch1, Operand(at)); |
| 3572 |
| 3573 // Empty separator case. |
| 3574 __ bind(&empty_separator_loop); |
| 3575 // Live values in registers: |
| 3576 // result_pos: the position to which we are currently copying characters. |
| 3577 // element: Current array element. |
| 3578 // elements_end: Array end. |
| 3579 |
| 3580 // Copy next array element to the result. |
| 3581 __ lw(string, MemOperand(element)); |
| 3582 __ Addu(element, element, kPointerSize); |
| 3583 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset)); |
| 3584 __ SmiUntag(string_length); |
| 3585 __ Addu(string, string, SeqAsciiString::kHeaderSize - kHeapObjectTag); |
| 3586 __ CopyBytes(string, result_pos, string_length, scratch1); |
| 3587 // End while (element < elements_end). |
| 3588 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end)); |
| 3589 ASSERT(result.is(v0)); |
| 3590 __ Branch(&done); |
| 3591 |
| 3592 // One-character separator case. |
| 3593 __ bind(&one_char_separator); |
| 3594 // Replace separator with its ascii character value. |
| 3595 __ lbu(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize)); |
| 3596 // Jump into the loop after the code that copies the separator, so the first |
| 3597 // element is not preceded by a separator. |
| 3598 __ jmp(&one_char_separator_loop_entry); |
| 3599 |
| 3600 __ bind(&one_char_separator_loop); |
| 3601 // Live values in registers: |
| 3602 // result_pos: the position to which we are currently copying characters. |
| 3603 // element: Current array element. |
| 3604 // elements_end: Array end. |
| 3605 // separator: Single separator ascii char (in lower byte). |
| 3606 |
| 3607 // Copy the separator character to the result. |
| 3608 __ sb(separator, MemOperand(result_pos)); |
| 3609 __ Addu(result_pos, result_pos, 1); |
| 3610 |
| 3611 // Copy next array element to the result. |
| 3612 __ bind(&one_char_separator_loop_entry); |
| 3613 __ lw(string, MemOperand(element)); |
| 3614 __ Addu(element, element, kPointerSize); |
| 3615 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset)); |
| 3616 __ SmiUntag(string_length); |
| 3617 __ Addu(string, string, SeqAsciiString::kHeaderSize - kHeapObjectTag); |
| 3618 __ CopyBytes(string, result_pos, string_length, scratch1); |
| 3619 // End while (element < elements_end). |
| 3620 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end)); |
| 3621 ASSERT(result.is(v0)); |
| 3622 __ Branch(&done); |
| 3623 |
| 3624 // Long separator case (separator is more than one character). Entry is at the |
| 3625 // label long_separator below. |
| 3626 __ bind(&long_separator_loop); |
| 3627 // Live values in registers: |
| 3628 // result_pos: the position to which we are currently copying characters. |
| 3629 // element: Current array element. |
| 3630 // elements_end: Array end. |
| 3631 // separator: Separator string. |
| 3632 |
| 3633 // Copy the separator to the result. |
| 3634 __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset)); |
| 3635 __ SmiUntag(string_length); |
| 3636 __ Addu(string, |
| 3637 separator, |
| 3638 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); |
| 3639 __ CopyBytes(string, result_pos, string_length, scratch1); |
| 3640 |
| 3641 __ bind(&long_separator); |
| 3642 __ lw(string, MemOperand(element)); |
| 3643 __ Addu(element, element, kPointerSize); |
| 3644 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset)); |
| 3645 __ SmiUntag(string_length); |
| 3646 __ Addu(string, string, SeqAsciiString::kHeaderSize - kHeapObjectTag); |
| 3647 __ CopyBytes(string, result_pos, string_length, scratch1); |
| 3648 // End while (element < elements_end). |
| 3649 __ Branch(&long_separator_loop, lt, element, Operand(elements_end)); |
| 3650 ASSERT(result.is(v0)); |
| 3651 __ Branch(&done); |
| 3652 |
| 3653 __ bind(&bailout); |
| 3654 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); |
| 3655 __ bind(&done); |
| 3656 context()->Plug(v0); |
635 } | 3657 } |
636 | 3658 |
637 | 3659 |
638 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { | 3660 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { |
639 UNIMPLEMENTED_MIPS(); | 3661 Handle<String> name = expr->name(); |
| 3662 if (name->length() > 0 && name->Get(0) == '_') { |
| 3663 Comment cmnt(masm_, "[ InlineRuntimeCall"); |
| 3664 EmitInlineRuntimeCall(expr); |
| 3665 return; |
| 3666 } |
| 3667 |
| 3668 Comment cmnt(masm_, "[ CallRuntime"); |
| 3669 ZoneList<Expression*>* args = expr->arguments(); |
| 3670 |
| 3671 if (expr->is_jsruntime()) { |
| 3672 // Prepare for calling JS runtime function. |
| 3673 __ lw(a0, GlobalObjectOperand()); |
| 3674 __ lw(a0, FieldMemOperand(a0, GlobalObject::kBuiltinsOffset)); |
| 3675 __ push(a0); |
| 3676 } |
| 3677 |
| 3678 // Push the arguments ("left-to-right"). |
| 3679 int arg_count = args->length(); |
| 3680 for (int i = 0; i < arg_count; i++) { |
| 3681 VisitForStackValue(args->at(i)); |
| 3682 } |
| 3683 |
| 3684 if (expr->is_jsruntime()) { |
| 3685 // Call the JS runtime function. |
| 3686 __ li(a2, Operand(expr->name())); |
| 3687 Handle<Code> ic = |
| 3688 isolate()->stub_cache()->ComputeCallInitialize(arg_count, NOT_IN_LOOP); |
| 3689 EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id()); |
| 3690 // Restore context register. |
| 3691 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 3692 } else { |
| 3693 // Call the C runtime function. |
| 3694 __ CallRuntime(expr->function(), arg_count); |
| 3695 } |
| 3696 context()->Plug(v0); |
640 } | 3697 } |
641 | 3698 |
642 | 3699 |
643 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { | 3700 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { |
644 UNIMPLEMENTED_MIPS(); | 3701 switch (expr->op()) { |
| 3702 case Token::DELETE: { |
| 3703 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); |
| 3704 Property* prop = expr->expression()->AsProperty(); |
| 3705 Variable* var = expr->expression()->AsVariableProxy()->AsVariable(); |
| 3706 |
| 3707 if (prop != NULL) { |
| 3708 if (prop->is_synthetic()) { |
| 3709 // Result of deleting parameters is false, even when they rewrite |
| 3710 // to accesses on the arguments object. |
| 3711 context()->Plug(false); |
| 3712 } else { |
| 3713 VisitForStackValue(prop->obj()); |
| 3714 VisitForStackValue(prop->key()); |
| 3715 __ li(a1, Operand(Smi::FromInt(strict_mode_flag()))); |
| 3716 __ push(a1); |
| 3717 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); |
| 3718 context()->Plug(v0); |
| 3719 } |
| 3720 } else if (var != NULL) { |
| 3721 // Delete of an unqualified identifier is disallowed in strict mode |
| 3722 // but "delete this" is. |
| 3723 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this()); |
| 3724 if (var->is_global()) { |
| 3725 __ lw(a2, GlobalObjectOperand()); |
| 3726 __ li(a1, Operand(var->name())); |
| 3727 __ li(a0, Operand(Smi::FromInt(kNonStrictMode))); |
| 3728 __ Push(a2, a1, a0); |
| 3729 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); |
| 3730 context()->Plug(v0); |
| 3731 } else if (var->AsSlot() != NULL && |
| 3732 var->AsSlot()->type() != Slot::LOOKUP) { |
| 3733 // Result of deleting non-global, non-dynamic variables is false. |
| 3734 // The subexpression does not have side effects. |
| 3735 context()->Plug(false); |
| 3736 } else { |
| 3737 // Non-global variable. Call the runtime to try to delete from the |
| 3738 // context where the variable was introduced. |
| 3739 __ push(context_register()); |
| 3740 __ li(a2, Operand(var->name())); |
| 3741 __ push(a2); |
| 3742 __ CallRuntime(Runtime::kDeleteContextSlot, 2); |
| 3743 context()->Plug(v0); |
| 3744 } |
| 3745 } else { |
| 3746 // Result of deleting non-property, non-variable reference is true. |
| 3747 // The subexpression may have side effects. |
| 3748 VisitForEffect(expr->expression()); |
| 3749 context()->Plug(true); |
| 3750 } |
| 3751 break; |
| 3752 } |
| 3753 |
| 3754 case Token::VOID: { |
| 3755 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); |
| 3756 VisitForEffect(expr->expression()); |
| 3757 context()->Plug(Heap::kUndefinedValueRootIndex); |
| 3758 break; |
| 3759 } |
| 3760 |
| 3761 case Token::NOT: { |
| 3762 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); |
| 3763 if (context()->IsEffect()) { |
| 3764 // Unary NOT has no side effects so it's only necessary to visit the |
| 3765 // subexpression. Match the optimizing compiler by not branching. |
| 3766 VisitForEffect(expr->expression()); |
| 3767 } else { |
| 3768 Label materialize_true, materialize_false; |
| 3769 Label* if_true = NULL; |
| 3770 Label* if_false = NULL; |
| 3771 Label* fall_through = NULL; |
| 3772 |
| 3773 // Notice that the labels are swapped. |
| 3774 context()->PrepareTest(&materialize_true, &materialize_false, |
| 3775 &if_false, &if_true, &fall_through); |
| 3776 if (context()->IsTest()) ForwardBailoutToChild(expr); |
| 3777 VisitForControl(expr->expression(), if_true, if_false, fall_through); |
| 3778 context()->Plug(if_false, if_true); // Labels swapped. |
| 3779 } |
| 3780 break; |
| 3781 } |
| 3782 |
| 3783 case Token::TYPEOF: { |
| 3784 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); |
| 3785 { StackValueContext context(this); |
| 3786 VisitForTypeofValue(expr->expression()); |
| 3787 } |
| 3788 __ CallRuntime(Runtime::kTypeof, 1); |
| 3789 context()->Plug(v0); |
| 3790 break; |
| 3791 } |
| 3792 |
| 3793 case Token::ADD: { |
| 3794 Comment cmt(masm_, "[ UnaryOperation (ADD)"); |
| 3795 VisitForAccumulatorValue(expr->expression()); |
| 3796 Label no_conversion; |
| 3797 __ JumpIfSmi(result_register(), &no_conversion); |
| 3798 __ mov(a0, result_register()); |
| 3799 ToNumberStub convert_stub; |
| 3800 __ CallStub(&convert_stub); |
| 3801 __ bind(&no_conversion); |
| 3802 context()->Plug(result_register()); |
| 3803 break; |
| 3804 } |
| 3805 |
| 3806 case Token::SUB: |
| 3807 EmitUnaryOperation(expr, "[ UnaryOperation (SUB)"); |
| 3808 break; |
| 3809 |
| 3810 case Token::BIT_NOT: |
| 3811 EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)"); |
| 3812 break; |
| 3813 |
| 3814 default: |
| 3815 UNREACHABLE(); |
| 3816 } |
| 3817 } |
| 3818 |
| 3819 |
| 3820 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr, |
| 3821 const char* comment) { |
| 3822 // TODO(svenpanne): Allowing format strings in Comment would be nice here... |
| 3823 Comment cmt(masm_, comment); |
| 3824 bool can_overwrite = expr->expression()->ResultOverwriteAllowed(); |
| 3825 UnaryOverwriteMode overwrite = |
| 3826 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE; |
| 3827 TypeRecordingUnaryOpStub stub(expr->op(), overwrite); |
| 3828 // TypeRecordingGenericUnaryOpStub expects the argument to be in a0. |
| 3829 VisitForAccumulatorValue(expr->expression()); |
| 3830 SetSourcePosition(expr->position()); |
| 3831 __ mov(a0, result_register()); |
| 3832 EmitCallIC(stub.GetCode(), NULL, expr->id()); |
| 3833 context()->Plug(v0); |
645 } | 3834 } |
646 | 3835 |
647 | 3836 |
648 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { | 3837 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { |
649 UNIMPLEMENTED_MIPS(); | 3838 Comment cmnt(masm_, "[ CountOperation"); |
| 3839 SetSourcePosition(expr->position()); |
| 3840 |
| 3841 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError' |
| 3842 // as the left-hand side. |
| 3843 if (!expr->expression()->IsValidLeftHandSide()) { |
| 3844 VisitForEffect(expr->expression()); |
| 3845 return; |
| 3846 } |
| 3847 |
| 3848 // Expression can only be a property, a global or a (parameter or local) |
| 3849 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY. |
| 3850 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; |
| 3851 LhsKind assign_type = VARIABLE; |
| 3852 Property* prop = expr->expression()->AsProperty(); |
| 3853 // In case of a property we use the uninitialized expression context |
| 3854 // of the key to detect a named property. |
| 3855 if (prop != NULL) { |
| 3856 assign_type = |
| 3857 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY; |
| 3858 } |
| 3859 |
| 3860 // Evaluate expression and get value. |
| 3861 if (assign_type == VARIABLE) { |
| 3862 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL); |
| 3863 AccumulatorValueContext context(this); |
| 3864 EmitVariableLoad(expr->expression()->AsVariableProxy()->var()); |
| 3865 } else { |
| 3866 // Reserve space for result of postfix operation. |
| 3867 if (expr->is_postfix() && !context()->IsEffect()) { |
| 3868 __ li(at, Operand(Smi::FromInt(0))); |
| 3869 __ push(at); |
| 3870 } |
| 3871 if (assign_type == NAMED_PROPERTY) { |
| 3872 // Put the object both on the stack and in the accumulator. |
| 3873 VisitForAccumulatorValue(prop->obj()); |
| 3874 __ push(v0); |
| 3875 EmitNamedPropertyLoad(prop); |
| 3876 } else { |
| 3877 if (prop->is_arguments_access()) { |
| 3878 VariableProxy* obj_proxy = prop->obj()->AsVariableProxy(); |
| 3879 __ lw(v0, EmitSlotSearch(obj_proxy->var()->AsSlot(), v0)); |
| 3880 __ push(v0); |
| 3881 __ li(v0, Operand(prop->key()->AsLiteral()->handle())); |
| 3882 } else { |
| 3883 VisitForStackValue(prop->obj()); |
| 3884 VisitForAccumulatorValue(prop->key()); |
| 3885 } |
| 3886 __ lw(a1, MemOperand(sp, 0)); |
| 3887 __ push(v0); |
| 3888 EmitKeyedPropertyLoad(prop); |
| 3889 } |
| 3890 } |
| 3891 |
| 3892 // We need a second deoptimization point after loading the value |
| 3893 // in case evaluating the property load my have a side effect. |
| 3894 if (assign_type == VARIABLE) { |
| 3895 PrepareForBailout(expr->expression(), TOS_REG); |
| 3896 } else { |
| 3897 PrepareForBailoutForId(expr->CountId(), TOS_REG); |
| 3898 } |
| 3899 |
| 3900 // Call ToNumber only if operand is not a smi. |
| 3901 Label no_conversion; |
| 3902 __ JumpIfSmi(v0, &no_conversion); |
| 3903 __ mov(a0, v0); |
| 3904 ToNumberStub convert_stub; |
| 3905 __ CallStub(&convert_stub); |
| 3906 __ bind(&no_conversion); |
| 3907 |
| 3908 // Save result for postfix expressions. |
| 3909 if (expr->is_postfix()) { |
| 3910 if (!context()->IsEffect()) { |
| 3911 // Save the result on the stack. If we have a named or keyed property |
| 3912 // we store the result under the receiver that is currently on top |
| 3913 // of the stack. |
| 3914 switch (assign_type) { |
| 3915 case VARIABLE: |
| 3916 __ push(v0); |
| 3917 break; |
| 3918 case NAMED_PROPERTY: |
| 3919 __ sw(v0, MemOperand(sp, kPointerSize)); |
| 3920 break; |
| 3921 case KEYED_PROPERTY: |
| 3922 __ sw(v0, MemOperand(sp, 2 * kPointerSize)); |
| 3923 break; |
| 3924 } |
| 3925 } |
| 3926 } |
| 3927 __ mov(a0, result_register()); |
| 3928 |
| 3929 // Inline smi case if we are in a loop. |
| 3930 Label stub_call, done; |
| 3931 JumpPatchSite patch_site(masm_); |
| 3932 |
| 3933 int count_value = expr->op() == Token::INC ? 1 : -1; |
| 3934 __ li(a1, Operand(Smi::FromInt(count_value))); |
| 3935 |
| 3936 if (ShouldInlineSmiCase(expr->op())) { |
| 3937 __ AdduAndCheckForOverflow(v0, a0, a1, t0); |
| 3938 __ BranchOnOverflow(&stub_call, t0); // Do stub on overflow. |
| 3939 |
| 3940 // We could eliminate this smi check if we split the code at |
| 3941 // the first smi check before calling ToNumber. |
| 3942 patch_site.EmitJumpIfSmi(v0, &done); |
| 3943 __ bind(&stub_call); |
| 3944 } |
| 3945 |
| 3946 // Record position before stub call. |
| 3947 SetSourcePosition(expr->position()); |
| 3948 |
| 3949 TypeRecordingBinaryOpStub stub(Token::ADD, NO_OVERWRITE); |
| 3950 EmitCallIC(stub.GetCode(), &patch_site, expr->CountId()); |
| 3951 __ bind(&done); |
| 3952 |
| 3953 // Store the value returned in v0. |
| 3954 switch (assign_type) { |
| 3955 case VARIABLE: |
| 3956 if (expr->is_postfix()) { |
| 3957 { EffectContext context(this); |
| 3958 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), |
| 3959 Token::ASSIGN); |
| 3960 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 3961 context.Plug(v0); |
| 3962 } |
| 3963 // For all contexts except EffectConstant we have the result on |
| 3964 // top of the stack. |
| 3965 if (!context()->IsEffect()) { |
| 3966 context()->PlugTOS(); |
| 3967 } |
| 3968 } else { |
| 3969 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), |
| 3970 Token::ASSIGN); |
| 3971 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 3972 context()->Plug(v0); |
| 3973 } |
| 3974 break; |
| 3975 case NAMED_PROPERTY: { |
| 3976 __ mov(a0, result_register()); // Value. |
| 3977 __ li(a2, Operand(prop->key()->AsLiteral()->handle())); // Name. |
| 3978 __ pop(a1); // Receiver. |
| 3979 Handle<Code> ic = is_strict_mode() |
| 3980 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
| 3981 : isolate()->builtins()->StoreIC_Initialize(); |
| 3982 EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id()); |
| 3983 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 3984 if (expr->is_postfix()) { |
| 3985 if (!context()->IsEffect()) { |
| 3986 context()->PlugTOS(); |
| 3987 } |
| 3988 } else { |
| 3989 context()->Plug(v0); |
| 3990 } |
| 3991 break; |
| 3992 } |
| 3993 case KEYED_PROPERTY: { |
| 3994 __ mov(a0, result_register()); // Value. |
| 3995 __ pop(a1); // Key. |
| 3996 __ pop(a2); // Receiver. |
| 3997 Handle<Code> ic = is_strict_mode() |
| 3998 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() |
| 3999 : isolate()->builtins()->KeyedStoreIC_Initialize(); |
| 4000 EmitCallIC(ic, RelocInfo::CODE_TARGET_WITH_ID, expr->id()); |
| 4001 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 4002 if (expr->is_postfix()) { |
| 4003 if (!context()->IsEffect()) { |
| 4004 context()->PlugTOS(); |
| 4005 } |
| 4006 } else { |
| 4007 context()->Plug(v0); |
| 4008 } |
| 4009 break; |
| 4010 } |
| 4011 } |
650 } | 4012 } |
651 | 4013 |
652 | 4014 |
653 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { | 4015 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { |
654 UNIMPLEMENTED_MIPS(); | 4016 VariableProxy* proxy = expr->AsVariableProxy(); |
655 } | 4017 if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) { |
656 | 4018 Comment cmnt(masm_, "Global variable"); |
657 | 4019 __ lw(a0, GlobalObjectOperand()); |
| 4020 __ li(a2, Operand(proxy->name())); |
| 4021 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
| 4022 // Use a regular load, not a contextual load, to avoid a reference |
| 4023 // error. |
| 4024 EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber); |
| 4025 PrepareForBailout(expr, TOS_REG); |
| 4026 context()->Plug(v0); |
| 4027 } else if (proxy != NULL && |
| 4028 proxy->var()->AsSlot() != NULL && |
| 4029 proxy->var()->AsSlot()->type() == Slot::LOOKUP) { |
| 4030 Label done, slow; |
| 4031 |
| 4032 // Generate code for loading from variables potentially shadowed |
| 4033 // by eval-introduced variables. |
| 4034 Slot* slot = proxy->var()->AsSlot(); |
| 4035 EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done); |
| 4036 |
| 4037 __ bind(&slow); |
| 4038 __ li(a0, Operand(proxy->name())); |
| 4039 __ Push(cp, a0); |
| 4040 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); |
| 4041 PrepareForBailout(expr, TOS_REG); |
| 4042 __ bind(&done); |
| 4043 |
| 4044 context()->Plug(v0); |
| 4045 } else { |
| 4046 // This expression cannot throw a reference error at the top level. |
| 4047 context()->HandleExpression(expr); |
| 4048 } |
| 4049 } |
| 4050 |
| 4051 |
658 bool FullCodeGenerator::TryLiteralCompare(Token::Value op, | 4052 bool FullCodeGenerator::TryLiteralCompare(Token::Value op, |
659 Expression* left, | 4053 Expression* left, |
660 Expression* right, | 4054 Expression* right, |
661 Label* if_true, | 4055 Label* if_true, |
662 Label* if_false, | 4056 Label* if_false, |
663 Label* fall_through) { | 4057 Label* fall_through) { |
664 UNIMPLEMENTED_MIPS(); | 4058 if (op != Token::EQ && op != Token::EQ_STRICT) return false; |
665 return false; | 4059 |
| 4060 // Check for the pattern: typeof <expression> == <string literal>. |
| 4061 Literal* right_literal = right->AsLiteral(); |
| 4062 if (right_literal == NULL) return false; |
| 4063 Handle<Object> right_literal_value = right_literal->handle(); |
| 4064 if (!right_literal_value->IsString()) return false; |
| 4065 UnaryOperation* left_unary = left->AsUnaryOperation(); |
| 4066 if (left_unary == NULL || left_unary->op() != Token::TYPEOF) return false; |
| 4067 Handle<String> check = Handle<String>::cast(right_literal_value); |
| 4068 |
| 4069 { AccumulatorValueContext context(this); |
| 4070 VisitForTypeofValue(left_unary->expression()); |
| 4071 } |
| 4072 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 4073 |
| 4074 if (check->Equals(isolate()->heap()->number_symbol())) { |
| 4075 __ JumpIfSmi(v0, if_true); |
| 4076 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); |
| 4077 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); |
| 4078 Split(eq, v0, Operand(at), if_true, if_false, fall_through); |
| 4079 } else if (check->Equals(isolate()->heap()->string_symbol())) { |
| 4080 __ JumpIfSmi(v0, if_false); |
| 4081 // Check for undetectable objects => false. |
| 4082 __ GetObjectType(v0, v0, a1); |
| 4083 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE)); |
| 4084 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); |
| 4085 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); |
| 4086 Split(eq, a1, Operand(zero_reg), |
| 4087 if_true, if_false, fall_through); |
| 4088 } else if (check->Equals(isolate()->heap()->boolean_symbol())) { |
| 4089 __ LoadRoot(at, Heap::kTrueValueRootIndex); |
| 4090 __ Branch(if_true, eq, v0, Operand(at)); |
| 4091 __ LoadRoot(at, Heap::kFalseValueRootIndex); |
| 4092 Split(eq, v0, Operand(at), if_true, if_false, fall_through); |
| 4093 } else if (check->Equals(isolate()->heap()->undefined_symbol())) { |
| 4094 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 4095 __ Branch(if_true, eq, v0, Operand(at)); |
| 4096 __ JumpIfSmi(v0, if_false); |
| 4097 // Check for undetectable objects => true. |
| 4098 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); |
| 4099 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); |
| 4100 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); |
| 4101 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through); |
| 4102 } else if (check->Equals(isolate()->heap()->function_symbol())) { |
| 4103 __ JumpIfSmi(v0, if_false); |
| 4104 __ GetObjectType(v0, a1, v0); // Leave map in a1. |
| 4105 Split(ge, v0, Operand(FIRST_FUNCTION_CLASS_TYPE), |
| 4106 if_true, if_false, fall_through); |
| 4107 |
| 4108 } else if (check->Equals(isolate()->heap()->object_symbol())) { |
| 4109 __ JumpIfSmi(v0, if_false); |
| 4110 __ LoadRoot(at, Heap::kNullValueRootIndex); |
| 4111 __ Branch(if_true, eq, v0, Operand(at)); |
| 4112 // Check for JS objects => true. |
| 4113 __ GetObjectType(v0, v0, a1); |
| 4114 __ Branch(if_false, lo, a1, Operand(FIRST_JS_OBJECT_TYPE)); |
| 4115 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset)); |
| 4116 __ Branch(if_false, hs, a1, Operand(FIRST_FUNCTION_CLASS_TYPE)); |
| 4117 // Check for undetectable objects => false. |
| 4118 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); |
| 4119 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); |
| 4120 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through); |
| 4121 } else { |
| 4122 if (if_false != fall_through) __ jmp(if_false); |
| 4123 } |
| 4124 |
| 4125 return true; |
666 } | 4126 } |
667 | 4127 |
668 | 4128 |
669 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { | 4129 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { |
670 UNIMPLEMENTED_MIPS(); | 4130 Comment cmnt(masm_, "[ CompareOperation"); |
| 4131 SetSourcePosition(expr->position()); |
| 4132 |
| 4133 // Always perform the comparison for its control flow. Pack the result |
| 4134 // into the expression's context after the comparison is performed. |
| 4135 |
| 4136 Label materialize_true, materialize_false; |
| 4137 Label* if_true = NULL; |
| 4138 Label* if_false = NULL; |
| 4139 Label* fall_through = NULL; |
| 4140 context()->PrepareTest(&materialize_true, &materialize_false, |
| 4141 &if_true, &if_false, &fall_through); |
| 4142 |
| 4143 // First we try a fast inlined version of the compare when one of |
| 4144 // the operands is a literal. |
| 4145 Token::Value op = expr->op(); |
| 4146 Expression* left = expr->left(); |
| 4147 Expression* right = expr->right(); |
| 4148 if (TryLiteralCompare(op, left, right, if_true, if_false, fall_through)) { |
| 4149 context()->Plug(if_true, if_false); |
| 4150 return; |
| 4151 } |
| 4152 |
| 4153 VisitForStackValue(expr->left()); |
| 4154 switch (op) { |
| 4155 case Token::IN: |
| 4156 VisitForStackValue(expr->right()); |
| 4157 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); |
| 4158 PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); |
| 4159 __ LoadRoot(t0, Heap::kTrueValueRootIndex); |
| 4160 Split(eq, v0, Operand(t0), if_true, if_false, fall_through); |
| 4161 break; |
| 4162 |
| 4163 case Token::INSTANCEOF: { |
| 4164 VisitForStackValue(expr->right()); |
| 4165 InstanceofStub stub(InstanceofStub::kNoFlags); |
| 4166 __ CallStub(&stub); |
| 4167 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 4168 // The stub returns 0 for true. |
| 4169 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through); |
| 4170 break; |
| 4171 } |
| 4172 |
| 4173 default: { |
| 4174 VisitForAccumulatorValue(expr->right()); |
| 4175 Condition cc = eq; |
| 4176 bool strict = false; |
| 4177 switch (op) { |
| 4178 case Token::EQ_STRICT: |
| 4179 strict = true; |
| 4180 // Fall through. |
| 4181 case Token::EQ: |
| 4182 cc = eq; |
| 4183 __ mov(a0, result_register()); |
| 4184 __ pop(a1); |
| 4185 break; |
| 4186 case Token::LT: |
| 4187 cc = lt; |
| 4188 __ mov(a0, result_register()); |
| 4189 __ pop(a1); |
| 4190 break; |
| 4191 case Token::GT: |
| 4192 // Reverse left and right sides to obtain ECMA-262 conversion order. |
| 4193 cc = lt; |
| 4194 __ mov(a1, result_register()); |
| 4195 __ pop(a0); |
| 4196 break; |
| 4197 case Token::LTE: |
| 4198 // Reverse left and right sides to obtain ECMA-262 conversion order. |
| 4199 cc = ge; |
| 4200 __ mov(a1, result_register()); |
| 4201 __ pop(a0); |
| 4202 break; |
| 4203 case Token::GTE: |
| 4204 cc = ge; |
| 4205 __ mov(a0, result_register()); |
| 4206 __ pop(a1); |
| 4207 break; |
| 4208 case Token::IN: |
| 4209 case Token::INSTANCEOF: |
| 4210 default: |
| 4211 UNREACHABLE(); |
| 4212 } |
| 4213 |
| 4214 bool inline_smi_code = ShouldInlineSmiCase(op); |
| 4215 JumpPatchSite patch_site(masm_); |
| 4216 if (inline_smi_code) { |
| 4217 Label slow_case; |
| 4218 __ Or(a2, a0, Operand(a1)); |
| 4219 patch_site.EmitJumpIfNotSmi(a2, &slow_case); |
| 4220 Split(cc, a1, Operand(a0), if_true, if_false, NULL); |
| 4221 __ bind(&slow_case); |
| 4222 } |
| 4223 // Record position and call the compare IC. |
| 4224 SetSourcePosition(expr->position()); |
| 4225 Handle<Code> ic = CompareIC::GetUninitialized(op); |
| 4226 EmitCallIC(ic, &patch_site, expr->id()); |
| 4227 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 4228 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through); |
| 4229 } |
| 4230 } |
| 4231 |
| 4232 // Convert the result of the comparison into one expected for this |
| 4233 // expression's context. |
| 4234 context()->Plug(if_true, if_false); |
671 } | 4235 } |
672 | 4236 |
673 | 4237 |
674 void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) { | 4238 void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) { |
675 UNIMPLEMENTED_MIPS(); | 4239 Comment cmnt(masm_, "[ CompareToNull"); |
| 4240 Label materialize_true, materialize_false; |
| 4241 Label* if_true = NULL; |
| 4242 Label* if_false = NULL; |
| 4243 Label* fall_through = NULL; |
| 4244 context()->PrepareTest(&materialize_true, &materialize_false, |
| 4245 &if_true, &if_false, &fall_through); |
| 4246 |
| 4247 VisitForAccumulatorValue(expr->expression()); |
| 4248 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 4249 __ mov(a0, result_register()); |
| 4250 __ LoadRoot(a1, Heap::kNullValueRootIndex); |
| 4251 if (expr->is_strict()) { |
| 4252 Split(eq, a0, Operand(a1), if_true, if_false, fall_through); |
| 4253 } else { |
| 4254 __ Branch(if_true, eq, a0, Operand(a1)); |
| 4255 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); |
| 4256 __ Branch(if_true, eq, a0, Operand(a1)); |
| 4257 __ And(at, a0, Operand(kSmiTagMask)); |
| 4258 __ Branch(if_false, eq, at, Operand(zero_reg)); |
| 4259 // It can be an undetectable object. |
| 4260 __ lw(a1, FieldMemOperand(a0, HeapObject::kMapOffset)); |
| 4261 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset)); |
| 4262 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); |
| 4263 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through); |
| 4264 } |
| 4265 context()->Plug(if_true, if_false); |
676 } | 4266 } |
677 | 4267 |
678 | 4268 |
679 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { | 4269 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { |
680 UNIMPLEMENTED_MIPS(); | 4270 __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 4271 context()->Plug(v0); |
681 } | 4272 } |
682 | 4273 |
683 | 4274 |
684 Register FullCodeGenerator::result_register() { | 4275 Register FullCodeGenerator::result_register() { |
685 UNIMPLEMENTED_MIPS(); | |
686 return v0; | 4276 return v0; |
687 } | 4277 } |
688 | 4278 |
689 | 4279 |
690 Register FullCodeGenerator::context_register() { | 4280 Register FullCodeGenerator::context_register() { |
691 UNIMPLEMENTED_MIPS(); | |
692 return cp; | 4281 return cp; |
693 } | 4282 } |
694 | 4283 |
695 | 4284 |
696 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, | 4285 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, |
697 RelocInfo::Mode mode, | 4286 RelocInfo::Mode mode, |
698 unsigned ast_id) { | 4287 unsigned ast_id) { |
699 UNIMPLEMENTED_MIPS(); | 4288 ASSERT(mode == RelocInfo::CODE_TARGET || |
| 4289 mode == RelocInfo::CODE_TARGET_CONTEXT || |
| 4290 mode == RelocInfo::CODE_TARGET_WITH_ID); |
| 4291 Counters* counters = isolate()->counters(); |
| 4292 switch (ic->kind()) { |
| 4293 case Code::LOAD_IC: |
| 4294 __ IncrementCounter(counters->named_load_full(), 1, a1, a2); |
| 4295 break; |
| 4296 case Code::KEYED_LOAD_IC: |
| 4297 __ IncrementCounter(counters->keyed_load_full(), 1, a1, a2); |
| 4298 break; |
| 4299 case Code::STORE_IC: |
| 4300 __ IncrementCounter(counters->named_store_full(), 1, a1, a2); |
| 4301 break; |
| 4302 case Code::KEYED_STORE_IC: |
| 4303 __ IncrementCounter(counters->keyed_store_full(), 1, a1, a2); |
| 4304 default: |
| 4305 break; |
| 4306 } |
| 4307 if (mode == RelocInfo::CODE_TARGET_WITH_ID) { |
| 4308 ASSERT(ast_id != kNoASTId); |
| 4309 __ CallWithAstId(ic, mode, ast_id); |
| 4310 } else { |
| 4311 ASSERT(ast_id == kNoASTId); |
| 4312 __ Call(ic, mode); |
| 4313 } |
| 4314 } |
| 4315 |
| 4316 |
| 4317 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, |
| 4318 JumpPatchSite* patch_site, |
| 4319 unsigned ast_id) { |
| 4320 Counters* counters = isolate()->counters(); |
| 4321 switch (ic->kind()) { |
| 4322 case Code::LOAD_IC: |
| 4323 __ IncrementCounter(counters->named_load_full(), 1, a1, a2); |
| 4324 break; |
| 4325 case Code::KEYED_LOAD_IC: |
| 4326 __ IncrementCounter(counters->keyed_load_full(), 1, a1, a2); |
| 4327 break; |
| 4328 case Code::STORE_IC: |
| 4329 __ IncrementCounter(counters->named_store_full(), 1, a1, a2); |
| 4330 break; |
| 4331 case Code::KEYED_STORE_IC: |
| 4332 __ IncrementCounter(counters->keyed_store_full(), 1, a1, a2); |
| 4333 default: |
| 4334 break; |
| 4335 } |
| 4336 |
| 4337 if (ast_id != kNoASTId) { |
| 4338 __ CallWithAstId(ic, RelocInfo::CODE_TARGET_WITH_ID, ast_id); |
| 4339 } else { |
| 4340 __ Call(ic, RelocInfo::CODE_TARGET); |
| 4341 } |
| 4342 if (patch_site != NULL && patch_site->is_bound()) { |
| 4343 patch_site->EmitPatchInfo(); |
| 4344 } else { |
| 4345 __ nop(); // Signals no inlined code. |
| 4346 } |
700 } | 4347 } |
701 | 4348 |
702 | 4349 |
703 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { | 4350 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { |
704 UNIMPLEMENTED_MIPS(); | 4351 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); |
| 4352 __ sw(value, MemOperand(fp, frame_offset)); |
705 } | 4353 } |
706 | 4354 |
707 | 4355 |
708 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { | 4356 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { |
709 UNIMPLEMENTED_MIPS(); | 4357 __ lw(dst, ContextOperand(cp, context_index)); |
710 } | 4358 } |
711 | 4359 |
712 | 4360 |
713 // ---------------------------------------------------------------------------- | 4361 // ---------------------------------------------------------------------------- |
714 // Non-local control flow support. | 4362 // Non-local control flow support. |
715 | 4363 |
716 void FullCodeGenerator::EnterFinallyBlock() { | 4364 void FullCodeGenerator::EnterFinallyBlock() { |
717 UNIMPLEMENTED_MIPS(); | 4365 ASSERT(!result_register().is(a1)); |
| 4366 // Store result register while executing finally block. |
| 4367 __ push(result_register()); |
| 4368 // Cook return address in link register to stack (smi encoded Code* delta). |
| 4369 __ Subu(a1, ra, Operand(masm_->CodeObject())); |
| 4370 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); |
| 4371 ASSERT_EQ(0, kSmiTag); |
| 4372 __ Addu(a1, a1, Operand(a1)); // Convert to smi. |
| 4373 __ push(a1); |
718 } | 4374 } |
719 | 4375 |
720 | 4376 |
721 void FullCodeGenerator::ExitFinallyBlock() { | 4377 void FullCodeGenerator::ExitFinallyBlock() { |
722 UNIMPLEMENTED_MIPS(); | 4378 ASSERT(!result_register().is(a1)); |
| 4379 // Restore result register from stack. |
| 4380 __ pop(a1); |
| 4381 // Uncook return address and return. |
| 4382 __ pop(result_register()); |
| 4383 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); |
| 4384 __ sra(a1, a1, 1); // Un-smi-tag value. |
| 4385 __ Addu(at, a1, Operand(masm_->CodeObject())); |
| 4386 __ Jump(at); |
723 } | 4387 } |
724 | 4388 |
725 | 4389 |
726 #undef __ | 4390 #undef __ |
727 | 4391 |
728 } } // namespace v8::internal | 4392 } } // namespace v8::internal |
729 | 4393 |
730 #endif // V8_TARGET_ARCH_MIPS | 4394 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |