Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(66)

Side by Side Diff: src/mips/lithium-codegen-mips.cc

Issue 430503007: Rename ASSERT* to DCHECK*. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: REBASE and fixes Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips/lithium-codegen-mips.h ('k') | src/mips/lithium-gap-resolver-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved.7 1 // Copyright 2012 the V8 project authors. All rights reserved.7
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
57 LCodeGen* codegen_; 57 LCodeGen* codegen_;
58 LPointerMap* pointers_; 58 LPointerMap* pointers_;
59 Safepoint::DeoptMode deopt_mode_; 59 Safepoint::DeoptMode deopt_mode_;
60 }; 60 };
61 61
62 62
63 #define __ masm()-> 63 #define __ masm()->
64 64
65 bool LCodeGen::GenerateCode() { 65 bool LCodeGen::GenerateCode() {
66 LPhase phase("Z_Code generation", chunk()); 66 LPhase phase("Z_Code generation", chunk());
67 ASSERT(is_unused()); 67 DCHECK(is_unused());
68 status_ = GENERATING; 68 status_ = GENERATING;
69 69
70 // Open a frame scope to indicate that there is a frame on the stack. The 70 // Open a frame scope to indicate that there is a frame on the stack. The
71 // NONE indicates that the scope shouldn't actually generate code to set up 71 // NONE indicates that the scope shouldn't actually generate code to set up
72 // the frame (that is done in GeneratePrologue). 72 // the frame (that is done in GeneratePrologue).
73 FrameScope frame_scope(masm_, StackFrame::NONE); 73 FrameScope frame_scope(masm_, StackFrame::NONE);
74 74
75 return GeneratePrologue() && 75 return GeneratePrologue() &&
76 GenerateBody() && 76 GenerateBody() &&
77 GenerateDeferredCode() && 77 GenerateDeferredCode() &&
78 GenerateDeoptJumpTable() && 78 GenerateDeoptJumpTable() &&
79 GenerateSafepointTable(); 79 GenerateSafepointTable();
80 } 80 }
81 81
82 82
83 void LCodeGen::FinishCode(Handle<Code> code) { 83 void LCodeGen::FinishCode(Handle<Code> code) {
84 ASSERT(is_done()); 84 DCHECK(is_done());
85 code->set_stack_slots(GetStackSlotCount()); 85 code->set_stack_slots(GetStackSlotCount());
86 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); 86 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
87 if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code); 87 if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code);
88 PopulateDeoptimizationData(code); 88 PopulateDeoptimizationData(code);
89 } 89 }
90 90
91 91
92 void LCodeGen::SaveCallerDoubles() { 92 void LCodeGen::SaveCallerDoubles() {
93 ASSERT(info()->saves_caller_doubles()); 93 DCHECK(info()->saves_caller_doubles());
94 ASSERT(NeedsEagerFrame()); 94 DCHECK(NeedsEagerFrame());
95 Comment(";;; Save clobbered callee double registers"); 95 Comment(";;; Save clobbered callee double registers");
96 int count = 0; 96 int count = 0;
97 BitVector* doubles = chunk()->allocated_double_registers(); 97 BitVector* doubles = chunk()->allocated_double_registers();
98 BitVector::Iterator save_iterator(doubles); 98 BitVector::Iterator save_iterator(doubles);
99 while (!save_iterator.Done()) { 99 while (!save_iterator.Done()) {
100 __ sdc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()), 100 __ sdc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()),
101 MemOperand(sp, count * kDoubleSize)); 101 MemOperand(sp, count * kDoubleSize));
102 save_iterator.Advance(); 102 save_iterator.Advance();
103 count++; 103 count++;
104 } 104 }
105 } 105 }
106 106
107 107
108 void LCodeGen::RestoreCallerDoubles() { 108 void LCodeGen::RestoreCallerDoubles() {
109 ASSERT(info()->saves_caller_doubles()); 109 DCHECK(info()->saves_caller_doubles());
110 ASSERT(NeedsEagerFrame()); 110 DCHECK(NeedsEagerFrame());
111 Comment(";;; Restore clobbered callee double registers"); 111 Comment(";;; Restore clobbered callee double registers");
112 BitVector* doubles = chunk()->allocated_double_registers(); 112 BitVector* doubles = chunk()->allocated_double_registers();
113 BitVector::Iterator save_iterator(doubles); 113 BitVector::Iterator save_iterator(doubles);
114 int count = 0; 114 int count = 0;
115 while (!save_iterator.Done()) { 115 while (!save_iterator.Done()) {
116 __ ldc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()), 116 __ ldc1(DoubleRegister::FromAllocationIndex(save_iterator.Current()),
117 MemOperand(sp, count * kDoubleSize)); 117 MemOperand(sp, count * kDoubleSize));
118 save_iterator.Advance(); 118 save_iterator.Advance();
119 count++; 119 count++;
120 } 120 }
121 } 121 }
122 122
123 123
124 bool LCodeGen::GeneratePrologue() { 124 bool LCodeGen::GeneratePrologue() {
125 ASSERT(is_generating()); 125 DCHECK(is_generating());
126 126
127 if (info()->IsOptimizing()) { 127 if (info()->IsOptimizing()) {
128 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 128 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
129 129
130 #ifdef DEBUG 130 #ifdef DEBUG
131 if (strlen(FLAG_stop_at) > 0 && 131 if (strlen(FLAG_stop_at) > 0 &&
132 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 132 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
133 __ stop("stop_at"); 133 __ stop("stop_at");
134 } 134 }
135 #endif 135 #endif
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
254 void LCodeGen::GenerateOsrPrologue() { 254 void LCodeGen::GenerateOsrPrologue() {
255 // Generate the OSR entry prologue at the first unknown OSR value, or if there 255 // Generate the OSR entry prologue at the first unknown OSR value, or if there
256 // are none, at the OSR entrypoint instruction. 256 // are none, at the OSR entrypoint instruction.
257 if (osr_pc_offset_ >= 0) return; 257 if (osr_pc_offset_ >= 0) return;
258 258
259 osr_pc_offset_ = masm()->pc_offset(); 259 osr_pc_offset_ = masm()->pc_offset();
260 260
261 // Adjust the frame size, subsuming the unoptimized frame into the 261 // Adjust the frame size, subsuming the unoptimized frame into the
262 // optimized frame. 262 // optimized frame.
263 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); 263 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots();
264 ASSERT(slots >= 0); 264 DCHECK(slots >= 0);
265 __ Subu(sp, sp, Operand(slots * kPointerSize)); 265 __ Subu(sp, sp, Operand(slots * kPointerSize));
266 } 266 }
267 267
268 268
269 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { 269 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) {
270 if (instr->IsCall()) { 270 if (instr->IsCall()) {
271 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); 271 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
272 } 272 }
273 if (!instr->IsLazyBailout() && !instr->IsGap()) { 273 if (!instr->IsLazyBailout() && !instr->IsGap()) {
274 safepoints_.BumpLastLazySafepointIndex(); 274 safepoints_.BumpLastLazySafepointIndex();
275 } 275 }
276 } 276 }
277 277
278 278
279 bool LCodeGen::GenerateDeferredCode() { 279 bool LCodeGen::GenerateDeferredCode() {
280 ASSERT(is_generating()); 280 DCHECK(is_generating());
281 if (deferred_.length() > 0) { 281 if (deferred_.length() > 0) {
282 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { 282 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
283 LDeferredCode* code = deferred_[i]; 283 LDeferredCode* code = deferred_[i];
284 284
285 HValue* value = 285 HValue* value =
286 instructions_->at(code->instruction_index())->hydrogen_value(); 286 instructions_->at(code->instruction_index())->hydrogen_value();
287 RecordAndWritePosition( 287 RecordAndWritePosition(
288 chunk()->graph()->SourcePositionToScriptPosition(value->position())); 288 chunk()->graph()->SourcePositionToScriptPosition(value->position()));
289 289
290 Comment(";;; <@%d,#%d> " 290 Comment(";;; <@%d,#%d> "
291 "-------------------- Deferred %s --------------------", 291 "-------------------- Deferred %s --------------------",
292 code->instruction_index(), 292 code->instruction_index(),
293 code->instr()->hydrogen_value()->id(), 293 code->instr()->hydrogen_value()->id(),
294 code->instr()->Mnemonic()); 294 code->instr()->Mnemonic());
295 __ bind(code->entry()); 295 __ bind(code->entry());
296 if (NeedsDeferredFrame()) { 296 if (NeedsDeferredFrame()) {
297 Comment(";;; Build frame"); 297 Comment(";;; Build frame");
298 ASSERT(!frame_is_built_); 298 DCHECK(!frame_is_built_);
299 ASSERT(info()->IsStub()); 299 DCHECK(info()->IsStub());
300 frame_is_built_ = true; 300 frame_is_built_ = true;
301 __ MultiPush(cp.bit() | fp.bit() | ra.bit()); 301 __ MultiPush(cp.bit() | fp.bit() | ra.bit());
302 __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); 302 __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB)));
303 __ push(scratch0()); 303 __ push(scratch0());
304 __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); 304 __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
305 Comment(";;; Deferred code"); 305 Comment(";;; Deferred code");
306 } 306 }
307 code->Generate(); 307 code->Generate();
308 if (NeedsDeferredFrame()) { 308 if (NeedsDeferredFrame()) {
309 Comment(";;; Destroy frame"); 309 Comment(";;; Destroy frame");
310 ASSERT(frame_is_built_); 310 DCHECK(frame_is_built_);
311 __ pop(at); 311 __ pop(at);
312 __ MultiPop(cp.bit() | fp.bit() | ra.bit()); 312 __ MultiPop(cp.bit() | fp.bit() | ra.bit());
313 frame_is_built_ = false; 313 frame_is_built_ = false;
314 } 314 }
315 __ jmp(code->exit()); 315 __ jmp(code->exit());
316 } 316 }
317 } 317 }
318 // Deferred code is the last part of the instruction sequence. Mark 318 // Deferred code is the last part of the instruction sequence. Mark
319 // the generated code as done unless we bailed out. 319 // the generated code as done unless we bailed out.
320 if (!is_aborted()) status_ = DONE; 320 if (!is_aborted()) status_ = DONE;
321 return !is_aborted(); 321 return !is_aborted();
322 } 322 }
323 323
324 324
325 bool LCodeGen::GenerateDeoptJumpTable() { 325 bool LCodeGen::GenerateDeoptJumpTable() {
326 if (deopt_jump_table_.length() > 0) { 326 if (deopt_jump_table_.length() > 0) {
327 Label needs_frame, call_deopt_entry; 327 Label needs_frame, call_deopt_entry;
328 328
329 Comment(";;; -------------------- Jump table --------------------"); 329 Comment(";;; -------------------- Jump table --------------------");
330 Address base = deopt_jump_table_[0].address; 330 Address base = deopt_jump_table_[0].address;
331 331
332 Register entry_offset = t9; 332 Register entry_offset = t9;
333 333
334 int length = deopt_jump_table_.length(); 334 int length = deopt_jump_table_.length();
335 for (int i = 0; i < length; i++) { 335 for (int i = 0; i < length; i++) {
336 __ bind(&deopt_jump_table_[i].label); 336 __ bind(&deopt_jump_table_[i].label);
337 337
338 Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type; 338 Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type;
339 ASSERT(type == deopt_jump_table_[0].bailout_type); 339 DCHECK(type == deopt_jump_table_[0].bailout_type);
340 Address entry = deopt_jump_table_[i].address; 340 Address entry = deopt_jump_table_[i].address;
341 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); 341 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
342 ASSERT(id != Deoptimizer::kNotDeoptimizationEntry); 342 DCHECK(id != Deoptimizer::kNotDeoptimizationEntry);
343 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); 343 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
344 344
345 // Second-level deopt table entries are contiguous and small, so instead 345 // Second-level deopt table entries are contiguous and small, so instead
346 // of loading the full, absolute address of each one, load an immediate 346 // of loading the full, absolute address of each one, load an immediate
347 // offset which will be added to the base address later. 347 // offset which will be added to the base address later.
348 __ li(entry_offset, Operand(entry - base)); 348 __ li(entry_offset, Operand(entry - base));
349 349
350 if (deopt_jump_table_[i].needs_frame) { 350 if (deopt_jump_table_[i].needs_frame) {
351 ASSERT(!info()->saves_caller_doubles()); 351 DCHECK(!info()->saves_caller_doubles());
352 if (needs_frame.is_bound()) { 352 if (needs_frame.is_bound()) {
353 __ Branch(&needs_frame); 353 __ Branch(&needs_frame);
354 } else { 354 } else {
355 __ bind(&needs_frame); 355 __ bind(&needs_frame);
356 Comment(";;; call deopt with frame"); 356 Comment(";;; call deopt with frame");
357 __ MultiPush(cp.bit() | fp.bit() | ra.bit()); 357 __ MultiPush(cp.bit() | fp.bit() | ra.bit());
358 // This variant of deopt can only be used with stubs. Since we don't 358 // This variant of deopt can only be used with stubs. Since we don't
359 // have a function pointer to install in the stack frame that we're 359 // have a function pointer to install in the stack frame that we're
360 // building, install a special marker there instead. 360 // building, install a special marker there instead.
361 ASSERT(info()->IsStub()); 361 DCHECK(info()->IsStub());
362 __ li(at, Operand(Smi::FromInt(StackFrame::STUB))); 362 __ li(at, Operand(Smi::FromInt(StackFrame::STUB)));
363 __ push(at); 363 __ push(at);
364 __ Addu(fp, sp, 364 __ Addu(fp, sp,
365 Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); 365 Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
366 __ bind(&call_deopt_entry); 366 __ bind(&call_deopt_entry);
367 // Add the base address to the offset previously loaded in 367 // Add the base address to the offset previously loaded in
368 // entry_offset. 368 // entry_offset.
369 __ Addu(entry_offset, entry_offset, 369 __ Addu(entry_offset, entry_offset,
370 Operand(ExternalReference::ForDeoptEntry(base))); 370 Operand(ExternalReference::ForDeoptEntry(base)));
371 __ Call(entry_offset); 371 __ Call(entry_offset);
372 } 372 }
373 } else { 373 } else {
374 // The last entry can fall through into `call_deopt_entry`, avoiding a 374 // The last entry can fall through into `call_deopt_entry`, avoiding a
375 // branch. 375 // branch.
376 bool need_branch = ((i + 1) != length) || call_deopt_entry.is_bound(); 376 bool need_branch = ((i + 1) != length) || call_deopt_entry.is_bound();
377 377
378 if (need_branch) __ Branch(&call_deopt_entry); 378 if (need_branch) __ Branch(&call_deopt_entry);
379 } 379 }
380 } 380 }
381 381
382 if (!call_deopt_entry.is_bound()) { 382 if (!call_deopt_entry.is_bound()) {
383 Comment(";;; call deopt"); 383 Comment(";;; call deopt");
384 __ bind(&call_deopt_entry); 384 __ bind(&call_deopt_entry);
385 385
386 if (info()->saves_caller_doubles()) { 386 if (info()->saves_caller_doubles()) {
387 ASSERT(info()->IsStub()); 387 DCHECK(info()->IsStub());
388 RestoreCallerDoubles(); 388 RestoreCallerDoubles();
389 } 389 }
390 390
391 // Add the base address to the offset previously loaded in entry_offset. 391 // Add the base address to the offset previously loaded in entry_offset.
392 __ Addu(entry_offset, entry_offset, 392 __ Addu(entry_offset, entry_offset,
393 Operand(ExternalReference::ForDeoptEntry(base))); 393 Operand(ExternalReference::ForDeoptEntry(base)));
394 __ Call(entry_offset); 394 __ Call(entry_offset);
395 } 395 }
396 } 396 }
397 __ RecordComment("]"); 397 __ RecordComment("]");
398 398
399 // The deoptimization jump table is the last part of the instruction 399 // The deoptimization jump table is the last part of the instruction
400 // sequence. Mark the generated code as done unless we bailed out. 400 // sequence. Mark the generated code as done unless we bailed out.
401 if (!is_aborted()) status_ = DONE; 401 if (!is_aborted()) status_ = DONE;
402 return !is_aborted(); 402 return !is_aborted();
403 } 403 }
404 404
405 405
406 bool LCodeGen::GenerateSafepointTable() { 406 bool LCodeGen::GenerateSafepointTable() {
407 ASSERT(is_done()); 407 DCHECK(is_done());
408 safepoints_.Emit(masm(), GetStackSlotCount()); 408 safepoints_.Emit(masm(), GetStackSlotCount());
409 return !is_aborted(); 409 return !is_aborted();
410 } 410 }
411 411
412 412
413 Register LCodeGen::ToRegister(int index) const { 413 Register LCodeGen::ToRegister(int index) const {
414 return Register::FromAllocationIndex(index); 414 return Register::FromAllocationIndex(index);
415 } 415 }
416 416
417 417
418 DoubleRegister LCodeGen::ToDoubleRegister(int index) const { 418 DoubleRegister LCodeGen::ToDoubleRegister(int index) const {
419 return DoubleRegister::FromAllocationIndex(index); 419 return DoubleRegister::FromAllocationIndex(index);
420 } 420 }
421 421
422 422
423 Register LCodeGen::ToRegister(LOperand* op) const { 423 Register LCodeGen::ToRegister(LOperand* op) const {
424 ASSERT(op->IsRegister()); 424 DCHECK(op->IsRegister());
425 return ToRegister(op->index()); 425 return ToRegister(op->index());
426 } 426 }
427 427
428 428
429 Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) { 429 Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
430 if (op->IsRegister()) { 430 if (op->IsRegister()) {
431 return ToRegister(op->index()); 431 return ToRegister(op->index());
432 } else if (op->IsConstantOperand()) { 432 } else if (op->IsConstantOperand()) {
433 LConstantOperand* const_op = LConstantOperand::cast(op); 433 LConstantOperand* const_op = LConstantOperand::cast(op);
434 HConstant* constant = chunk_->LookupConstant(const_op); 434 HConstant* constant = chunk_->LookupConstant(const_op);
435 Handle<Object> literal = constant->handle(isolate()); 435 Handle<Object> literal = constant->handle(isolate());
436 Representation r = chunk_->LookupLiteralRepresentation(const_op); 436 Representation r = chunk_->LookupLiteralRepresentation(const_op);
437 if (r.IsInteger32()) { 437 if (r.IsInteger32()) {
438 ASSERT(literal->IsNumber()); 438 DCHECK(literal->IsNumber());
439 __ li(scratch, Operand(static_cast<int32_t>(literal->Number()))); 439 __ li(scratch, Operand(static_cast<int32_t>(literal->Number())));
440 } else if (r.IsSmi()) { 440 } else if (r.IsSmi()) {
441 ASSERT(constant->HasSmiValue()); 441 DCHECK(constant->HasSmiValue());
442 __ li(scratch, Operand(Smi::FromInt(constant->Integer32Value()))); 442 __ li(scratch, Operand(Smi::FromInt(constant->Integer32Value())));
443 } else if (r.IsDouble()) { 443 } else if (r.IsDouble()) {
444 Abort(kEmitLoadRegisterUnsupportedDoubleImmediate); 444 Abort(kEmitLoadRegisterUnsupportedDoubleImmediate);
445 } else { 445 } else {
446 ASSERT(r.IsSmiOrTagged()); 446 DCHECK(r.IsSmiOrTagged());
447 __ li(scratch, literal); 447 __ li(scratch, literal);
448 } 448 }
449 return scratch; 449 return scratch;
450 } else if (op->IsStackSlot()) { 450 } else if (op->IsStackSlot()) {
451 __ lw(scratch, ToMemOperand(op)); 451 __ lw(scratch, ToMemOperand(op));
452 return scratch; 452 return scratch;
453 } 453 }
454 UNREACHABLE(); 454 UNREACHABLE();
455 return scratch; 455 return scratch;
456 } 456 }
457 457
458 458
459 DoubleRegister LCodeGen::ToDoubleRegister(LOperand* op) const { 459 DoubleRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
460 ASSERT(op->IsDoubleRegister()); 460 DCHECK(op->IsDoubleRegister());
461 return ToDoubleRegister(op->index()); 461 return ToDoubleRegister(op->index());
462 } 462 }
463 463
464 464
465 DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op, 465 DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
466 FloatRegister flt_scratch, 466 FloatRegister flt_scratch,
467 DoubleRegister dbl_scratch) { 467 DoubleRegister dbl_scratch) {
468 if (op->IsDoubleRegister()) { 468 if (op->IsDoubleRegister()) {
469 return ToDoubleRegister(op->index()); 469 return ToDoubleRegister(op->index());
470 } else if (op->IsConstantOperand()) { 470 } else if (op->IsConstantOperand()) {
471 LConstantOperand* const_op = LConstantOperand::cast(op); 471 LConstantOperand* const_op = LConstantOperand::cast(op);
472 HConstant* constant = chunk_->LookupConstant(const_op); 472 HConstant* constant = chunk_->LookupConstant(const_op);
473 Handle<Object> literal = constant->handle(isolate()); 473 Handle<Object> literal = constant->handle(isolate());
474 Representation r = chunk_->LookupLiteralRepresentation(const_op); 474 Representation r = chunk_->LookupLiteralRepresentation(const_op);
475 if (r.IsInteger32()) { 475 if (r.IsInteger32()) {
476 ASSERT(literal->IsNumber()); 476 DCHECK(literal->IsNumber());
477 __ li(at, Operand(static_cast<int32_t>(literal->Number()))); 477 __ li(at, Operand(static_cast<int32_t>(literal->Number())));
478 __ mtc1(at, flt_scratch); 478 __ mtc1(at, flt_scratch);
479 __ cvt_d_w(dbl_scratch, flt_scratch); 479 __ cvt_d_w(dbl_scratch, flt_scratch);
480 return dbl_scratch; 480 return dbl_scratch;
481 } else if (r.IsDouble()) { 481 } else if (r.IsDouble()) {
482 Abort(kUnsupportedDoubleImmediate); 482 Abort(kUnsupportedDoubleImmediate);
483 } else if (r.IsTagged()) { 483 } else if (r.IsTagged()) {
484 Abort(kUnsupportedTaggedImmediate); 484 Abort(kUnsupportedTaggedImmediate);
485 } 485 }
486 } else if (op->IsStackSlot()) { 486 } else if (op->IsStackSlot()) {
487 MemOperand mem_op = ToMemOperand(op); 487 MemOperand mem_op = ToMemOperand(op);
488 __ ldc1(dbl_scratch, mem_op); 488 __ ldc1(dbl_scratch, mem_op);
489 return dbl_scratch; 489 return dbl_scratch;
490 } 490 }
491 UNREACHABLE(); 491 UNREACHABLE();
492 return dbl_scratch; 492 return dbl_scratch;
493 } 493 }
494 494
495 495
496 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { 496 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
497 HConstant* constant = chunk_->LookupConstant(op); 497 HConstant* constant = chunk_->LookupConstant(op);
498 ASSERT(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged()); 498 DCHECK(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged());
499 return constant->handle(isolate()); 499 return constant->handle(isolate());
500 } 500 }
501 501
502 502
503 bool LCodeGen::IsInteger32(LConstantOperand* op) const { 503 bool LCodeGen::IsInteger32(LConstantOperand* op) const {
504 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32(); 504 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32();
505 } 505 }
506 506
507 507
508 bool LCodeGen::IsSmi(LConstantOperand* op) const { 508 bool LCodeGen::IsSmi(LConstantOperand* op) const {
509 return chunk_->LookupLiteralRepresentation(op).IsSmi(); 509 return chunk_->LookupLiteralRepresentation(op).IsSmi();
510 } 510 }
511 511
512 512
513 int32_t LCodeGen::ToInteger32(LConstantOperand* op) const { 513 int32_t LCodeGen::ToInteger32(LConstantOperand* op) const {
514 return ToRepresentation(op, Representation::Integer32()); 514 return ToRepresentation(op, Representation::Integer32());
515 } 515 }
516 516
517 517
518 int32_t LCodeGen::ToRepresentation(LConstantOperand* op, 518 int32_t LCodeGen::ToRepresentation(LConstantOperand* op,
519 const Representation& r) const { 519 const Representation& r) const {
520 HConstant* constant = chunk_->LookupConstant(op); 520 HConstant* constant = chunk_->LookupConstant(op);
521 int32_t value = constant->Integer32Value(); 521 int32_t value = constant->Integer32Value();
522 if (r.IsInteger32()) return value; 522 if (r.IsInteger32()) return value;
523 ASSERT(r.IsSmiOrTagged()); 523 DCHECK(r.IsSmiOrTagged());
524 return reinterpret_cast<int32_t>(Smi::FromInt(value)); 524 return reinterpret_cast<int32_t>(Smi::FromInt(value));
525 } 525 }
526 526
527 527
528 Smi* LCodeGen::ToSmi(LConstantOperand* op) const { 528 Smi* LCodeGen::ToSmi(LConstantOperand* op) const {
529 HConstant* constant = chunk_->LookupConstant(op); 529 HConstant* constant = chunk_->LookupConstant(op);
530 return Smi::FromInt(constant->Integer32Value()); 530 return Smi::FromInt(constant->Integer32Value());
531 } 531 }
532 532
533 533
534 double LCodeGen::ToDouble(LConstantOperand* op) const { 534 double LCodeGen::ToDouble(LConstantOperand* op) const {
535 HConstant* constant = chunk_->LookupConstant(op); 535 HConstant* constant = chunk_->LookupConstant(op);
536 ASSERT(constant->HasDoubleValue()); 536 DCHECK(constant->HasDoubleValue());
537 return constant->DoubleValue(); 537 return constant->DoubleValue();
538 } 538 }
539 539
540 540
541 Operand LCodeGen::ToOperand(LOperand* op) { 541 Operand LCodeGen::ToOperand(LOperand* op) {
542 if (op->IsConstantOperand()) { 542 if (op->IsConstantOperand()) {
543 LConstantOperand* const_op = LConstantOperand::cast(op); 543 LConstantOperand* const_op = LConstantOperand::cast(op);
544 HConstant* constant = chunk()->LookupConstant(const_op); 544 HConstant* constant = chunk()->LookupConstant(const_op);
545 Representation r = chunk_->LookupLiteralRepresentation(const_op); 545 Representation r = chunk_->LookupLiteralRepresentation(const_op);
546 if (r.IsSmi()) { 546 if (r.IsSmi()) {
547 ASSERT(constant->HasSmiValue()); 547 DCHECK(constant->HasSmiValue());
548 return Operand(Smi::FromInt(constant->Integer32Value())); 548 return Operand(Smi::FromInt(constant->Integer32Value()));
549 } else if (r.IsInteger32()) { 549 } else if (r.IsInteger32()) {
550 ASSERT(constant->HasInteger32Value()); 550 DCHECK(constant->HasInteger32Value());
551 return Operand(constant->Integer32Value()); 551 return Operand(constant->Integer32Value());
552 } else if (r.IsDouble()) { 552 } else if (r.IsDouble()) {
553 Abort(kToOperandUnsupportedDoubleImmediate); 553 Abort(kToOperandUnsupportedDoubleImmediate);
554 } 554 }
555 ASSERT(r.IsTagged()); 555 DCHECK(r.IsTagged());
556 return Operand(constant->handle(isolate())); 556 return Operand(constant->handle(isolate()));
557 } else if (op->IsRegister()) { 557 } else if (op->IsRegister()) {
558 return Operand(ToRegister(op)); 558 return Operand(ToRegister(op));
559 } else if (op->IsDoubleRegister()) { 559 } else if (op->IsDoubleRegister()) {
560 Abort(kToOperandIsDoubleRegisterUnimplemented); 560 Abort(kToOperandIsDoubleRegisterUnimplemented);
561 return Operand(0); 561 return Operand(0);
562 } 562 }
563 // Stack slots not implemented, use ToMemOperand instead. 563 // Stack slots not implemented, use ToMemOperand instead.
564 UNREACHABLE(); 564 UNREACHABLE();
565 return Operand(0); 565 return Operand(0);
566 } 566 }
567 567
568 568
569 static int ArgumentsOffsetWithoutFrame(int index) { 569 static int ArgumentsOffsetWithoutFrame(int index) {
570 ASSERT(index < 0); 570 DCHECK(index < 0);
571 return -(index + 1) * kPointerSize; 571 return -(index + 1) * kPointerSize;
572 } 572 }
573 573
574 574
575 MemOperand LCodeGen::ToMemOperand(LOperand* op) const { 575 MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
576 ASSERT(!op->IsRegister()); 576 DCHECK(!op->IsRegister());
577 ASSERT(!op->IsDoubleRegister()); 577 DCHECK(!op->IsDoubleRegister());
578 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot()); 578 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
579 if (NeedsEagerFrame()) { 579 if (NeedsEagerFrame()) {
580 return MemOperand(fp, StackSlotOffset(op->index())); 580 return MemOperand(fp, StackSlotOffset(op->index()));
581 } else { 581 } else {
582 // Retrieve parameter without eager stack-frame relative to the 582 // Retrieve parameter without eager stack-frame relative to the
583 // stack-pointer. 583 // stack-pointer.
584 return MemOperand(sp, ArgumentsOffsetWithoutFrame(op->index())); 584 return MemOperand(sp, ArgumentsOffsetWithoutFrame(op->index()));
585 } 585 }
586 } 586 }
587 587
588 588
589 MemOperand LCodeGen::ToHighMemOperand(LOperand* op) const { 589 MemOperand LCodeGen::ToHighMemOperand(LOperand* op) const {
590 ASSERT(op->IsDoubleStackSlot()); 590 DCHECK(op->IsDoubleStackSlot());
591 if (NeedsEagerFrame()) { 591 if (NeedsEagerFrame()) {
592 return MemOperand(fp, StackSlotOffset(op->index()) + kPointerSize); 592 return MemOperand(fp, StackSlotOffset(op->index()) + kPointerSize);
593 } else { 593 } else {
594 // Retrieve parameter without eager stack-frame relative to the 594 // Retrieve parameter without eager stack-frame relative to the
595 // stack-pointer. 595 // stack-pointer.
596 return MemOperand( 596 return MemOperand(
597 sp, ArgumentsOffsetWithoutFrame(op->index()) + kPointerSize); 597 sp, ArgumentsOffsetWithoutFrame(op->index()) + kPointerSize);
598 } 598 }
599 } 599 }
600 600
(...skipping 15 matching lines...) Expand all
616 : Translation::kSelfLiteralId; 616 : Translation::kSelfLiteralId;
617 617
618 switch (environment->frame_type()) { 618 switch (environment->frame_type()) {
619 case JS_FUNCTION: 619 case JS_FUNCTION:
620 translation->BeginJSFrame(environment->ast_id(), closure_id, height); 620 translation->BeginJSFrame(environment->ast_id(), closure_id, height);
621 break; 621 break;
622 case JS_CONSTRUCT: 622 case JS_CONSTRUCT:
623 translation->BeginConstructStubFrame(closure_id, translation_size); 623 translation->BeginConstructStubFrame(closure_id, translation_size);
624 break; 624 break;
625 case JS_GETTER: 625 case JS_GETTER:
626 ASSERT(translation_size == 1); 626 DCHECK(translation_size == 1);
627 ASSERT(height == 0); 627 DCHECK(height == 0);
628 translation->BeginGetterStubFrame(closure_id); 628 translation->BeginGetterStubFrame(closure_id);
629 break; 629 break;
630 case JS_SETTER: 630 case JS_SETTER:
631 ASSERT(translation_size == 2); 631 DCHECK(translation_size == 2);
632 ASSERT(height == 0); 632 DCHECK(height == 0);
633 translation->BeginSetterStubFrame(closure_id); 633 translation->BeginSetterStubFrame(closure_id);
634 break; 634 break;
635 case STUB: 635 case STUB:
636 translation->BeginCompiledStubFrame(); 636 translation->BeginCompiledStubFrame();
637 break; 637 break;
638 case ARGUMENTS_ADAPTOR: 638 case ARGUMENTS_ADAPTOR:
639 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size); 639 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
640 break; 640 break;
641 } 641 }
642 642
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
727 RelocInfo::Mode mode, 727 RelocInfo::Mode mode,
728 LInstruction* instr) { 728 LInstruction* instr) {
729 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT); 729 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
730 } 730 }
731 731
732 732
733 void LCodeGen::CallCodeGeneric(Handle<Code> code, 733 void LCodeGen::CallCodeGeneric(Handle<Code> code,
734 RelocInfo::Mode mode, 734 RelocInfo::Mode mode,
735 LInstruction* instr, 735 LInstruction* instr,
736 SafepointMode safepoint_mode) { 736 SafepointMode safepoint_mode) {
737 ASSERT(instr != NULL); 737 DCHECK(instr != NULL);
738 __ Call(code, mode); 738 __ Call(code, mode);
739 RecordSafepointWithLazyDeopt(instr, safepoint_mode); 739 RecordSafepointWithLazyDeopt(instr, safepoint_mode);
740 } 740 }
741 741
742 742
743 void LCodeGen::CallRuntime(const Runtime::Function* function, 743 void LCodeGen::CallRuntime(const Runtime::Function* function,
744 int num_arguments, 744 int num_arguments,
745 LInstruction* instr, 745 LInstruction* instr,
746 SaveFPRegsMode save_doubles) { 746 SaveFPRegsMode save_doubles) {
747 ASSERT(instr != NULL); 747 DCHECK(instr != NULL);
748 748
749 __ CallRuntime(function, num_arguments, save_doubles); 749 __ CallRuntime(function, num_arguments, save_doubles);
750 750
751 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); 751 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
752 } 752 }
753 753
754 754
755 void LCodeGen::LoadContextFromDeferred(LOperand* context) { 755 void LCodeGen::LoadContextFromDeferred(LOperand* context) {
756 if (context->IsRegister()) { 756 if (context->IsRegister()) {
757 __ Move(cp, ToRegister(context)); 757 __ Move(cp, ToRegister(context));
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
814 } 814 }
815 } 815 }
816 816
817 817
818 void LCodeGen::DeoptimizeIf(Condition condition, 818 void LCodeGen::DeoptimizeIf(Condition condition,
819 LEnvironment* environment, 819 LEnvironment* environment,
820 Deoptimizer::BailoutType bailout_type, 820 Deoptimizer::BailoutType bailout_type,
821 Register src1, 821 Register src1,
822 const Operand& src2) { 822 const Operand& src2) {
823 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); 823 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
824 ASSERT(environment->HasBeenRegistered()); 824 DCHECK(environment->HasBeenRegistered());
825 int id = environment->deoptimization_index(); 825 int id = environment->deoptimization_index();
826 ASSERT(info()->IsOptimizing() || info()->IsStub()); 826 DCHECK(info()->IsOptimizing() || info()->IsStub());
827 Address entry = 827 Address entry =
828 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); 828 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
829 if (entry == NULL) { 829 if (entry == NULL) {
830 Abort(kBailoutWasNotPrepared); 830 Abort(kBailoutWasNotPrepared);
831 return; 831 return;
832 } 832 }
833 833
834 if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) { 834 if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) {
835 Register scratch = scratch0(); 835 Register scratch = scratch0();
836 ExternalReference count = ExternalReference::stress_deopt_count(isolate()); 836 ExternalReference count = ExternalReference::stress_deopt_count(isolate());
(...skipping 15 matching lines...) Expand all
852 852
853 if (info()->ShouldTrapOnDeopt()) { 853 if (info()->ShouldTrapOnDeopt()) {
854 Label skip; 854 Label skip;
855 if (condition != al) { 855 if (condition != al) {
856 __ Branch(&skip, NegateCondition(condition), src1, src2); 856 __ Branch(&skip, NegateCondition(condition), src1, src2);
857 } 857 }
858 __ stop("trap_on_deopt"); 858 __ stop("trap_on_deopt");
859 __ bind(&skip); 859 __ bind(&skip);
860 } 860 }
861 861
862 ASSERT(info()->IsStub() || frame_is_built_); 862 DCHECK(info()->IsStub() || frame_is_built_);
863 // Go through jump table if we need to handle condition, build frame, or 863 // Go through jump table if we need to handle condition, build frame, or
864 // restore caller doubles. 864 // restore caller doubles.
865 if (condition == al && frame_is_built_ && 865 if (condition == al && frame_is_built_ &&
866 !info()->saves_caller_doubles()) { 866 !info()->saves_caller_doubles()) {
867 __ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2); 867 __ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2);
868 } else { 868 } else {
869 // We often have several deopts to the same entry, reuse the last 869 // We often have several deopts to the same entry, reuse the last
870 // jump entry if this is the case. 870 // jump entry if this is the case.
871 if (deopt_jump_table_.is_empty() || 871 if (deopt_jump_table_.is_empty() ||
872 (deopt_jump_table_.last().address != entry) || 872 (deopt_jump_table_.last().address != entry) ||
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
941 int result = deoptimization_literals_.length(); 941 int result = deoptimization_literals_.length();
942 for (int i = 0; i < deoptimization_literals_.length(); ++i) { 942 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
943 if (deoptimization_literals_[i].is_identical_to(literal)) return i; 943 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
944 } 944 }
945 deoptimization_literals_.Add(literal, zone()); 945 deoptimization_literals_.Add(literal, zone());
946 return result; 946 return result;
947 } 947 }
948 948
949 949
950 void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() { 950 void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
951 ASSERT(deoptimization_literals_.length() == 0); 951 DCHECK(deoptimization_literals_.length() == 0);
952 952
953 const ZoneList<Handle<JSFunction> >* inlined_closures = 953 const ZoneList<Handle<JSFunction> >* inlined_closures =
954 chunk()->inlined_closures(); 954 chunk()->inlined_closures();
955 955
956 for (int i = 0, length = inlined_closures->length(); 956 for (int i = 0, length = inlined_closures->length();
957 i < length; 957 i < length;
958 i++) { 958 i++) {
959 DefineDeoptimizationLiteral(inlined_closures->at(i)); 959 DefineDeoptimizationLiteral(inlined_closures->at(i));
960 } 960 }
961 961
962 inlined_function_count_ = deoptimization_literals_.length(); 962 inlined_function_count_ = deoptimization_literals_.length();
963 } 963 }
964 964
965 965
966 void LCodeGen::RecordSafepointWithLazyDeopt( 966 void LCodeGen::RecordSafepointWithLazyDeopt(
967 LInstruction* instr, SafepointMode safepoint_mode) { 967 LInstruction* instr, SafepointMode safepoint_mode) {
968 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) { 968 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
969 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt); 969 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
970 } else { 970 } else {
971 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); 971 DCHECK(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
972 RecordSafepointWithRegisters( 972 RecordSafepointWithRegisters(
973 instr->pointer_map(), 0, Safepoint::kLazyDeopt); 973 instr->pointer_map(), 0, Safepoint::kLazyDeopt);
974 } 974 }
975 } 975 }
976 976
977 977
978 void LCodeGen::RecordSafepoint( 978 void LCodeGen::RecordSafepoint(
979 LPointerMap* pointers, 979 LPointerMap* pointers,
980 Safepoint::Kind kind, 980 Safepoint::Kind kind,
981 int arguments, 981 int arguments,
982 Safepoint::DeoptMode deopt_mode) { 982 Safepoint::DeoptMode deopt_mode) {
983 ASSERT(expected_safepoint_kind_ == kind); 983 DCHECK(expected_safepoint_kind_ == kind);
984 984
985 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands(); 985 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
986 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), 986 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
987 kind, arguments, deopt_mode); 987 kind, arguments, deopt_mode);
988 for (int i = 0; i < operands->length(); i++) { 988 for (int i = 0; i < operands->length(); i++) {
989 LOperand* pointer = operands->at(i); 989 LOperand* pointer = operands->at(i);
990 if (pointer->IsStackSlot()) { 990 if (pointer->IsStackSlot()) {
991 safepoint.DefinePointerSlot(pointer->index(), zone()); 991 safepoint.DefinePointerSlot(pointer->index(), zone());
992 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { 992 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
993 safepoint.DefinePointerRegister(ToRegister(pointer), zone()); 993 safepoint.DefinePointerRegister(ToRegister(pointer), zone());
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
1062 DoGap(instr); 1062 DoGap(instr);
1063 } 1063 }
1064 1064
1065 1065
1066 void LCodeGen::DoParameter(LParameter* instr) { 1066 void LCodeGen::DoParameter(LParameter* instr) {
1067 // Nothing to do. 1067 // Nothing to do.
1068 } 1068 }
1069 1069
1070 1070
1071 void LCodeGen::DoCallStub(LCallStub* instr) { 1071 void LCodeGen::DoCallStub(LCallStub* instr) {
1072 ASSERT(ToRegister(instr->context()).is(cp)); 1072 DCHECK(ToRegister(instr->context()).is(cp));
1073 ASSERT(ToRegister(instr->result()).is(v0)); 1073 DCHECK(ToRegister(instr->result()).is(v0));
1074 switch (instr->hydrogen()->major_key()) { 1074 switch (instr->hydrogen()->major_key()) {
1075 case CodeStub::RegExpExec: { 1075 case CodeStub::RegExpExec: {
1076 RegExpExecStub stub(isolate()); 1076 RegExpExecStub stub(isolate());
1077 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1077 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1078 break; 1078 break;
1079 } 1079 }
1080 case CodeStub::SubString: { 1080 case CodeStub::SubString: {
1081 SubStringStub stub(isolate()); 1081 SubStringStub stub(isolate());
1082 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1082 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1083 break; 1083 break;
(...skipping 10 matching lines...) Expand all
1094 1094
1095 1095
1096 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { 1096 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
1097 GenerateOsrPrologue(); 1097 GenerateOsrPrologue();
1098 } 1098 }
1099 1099
1100 1100
1101 void LCodeGen::DoModByPowerOf2I(LModByPowerOf2I* instr) { 1101 void LCodeGen::DoModByPowerOf2I(LModByPowerOf2I* instr) {
1102 Register dividend = ToRegister(instr->dividend()); 1102 Register dividend = ToRegister(instr->dividend());
1103 int32_t divisor = instr->divisor(); 1103 int32_t divisor = instr->divisor();
1104 ASSERT(dividend.is(ToRegister(instr->result()))); 1104 DCHECK(dividend.is(ToRegister(instr->result())));
1105 1105
1106 // Theoretically, a variation of the branch-free code for integer division by 1106 // Theoretically, a variation of the branch-free code for integer division by
1107 // a power of 2 (calculating the remainder via an additional multiplication 1107 // a power of 2 (calculating the remainder via an additional multiplication
1108 // (which gets simplified to an 'and') and subtraction) should be faster, and 1108 // (which gets simplified to an 'and') and subtraction) should be faster, and
1109 // this is exactly what GCC and clang emit. Nevertheless, benchmarks seem to 1109 // this is exactly what GCC and clang emit. Nevertheless, benchmarks seem to
1110 // indicate that positive dividends are heavily favored, so the branching 1110 // indicate that positive dividends are heavily favored, so the branching
1111 // version performs better. 1111 // version performs better.
1112 HMod* hmod = instr->hydrogen(); 1112 HMod* hmod = instr->hydrogen();
1113 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1); 1113 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1);
1114 Label dividend_is_not_negative, done; 1114 Label dividend_is_not_negative, done;
(...skipping 13 matching lines...) Expand all
1128 __ bind(&dividend_is_not_negative); 1128 __ bind(&dividend_is_not_negative);
1129 __ And(dividend, dividend, Operand(mask)); 1129 __ And(dividend, dividend, Operand(mask));
1130 __ bind(&done); 1130 __ bind(&done);
1131 } 1131 }
1132 1132
1133 1133
1134 void LCodeGen::DoModByConstI(LModByConstI* instr) { 1134 void LCodeGen::DoModByConstI(LModByConstI* instr) {
1135 Register dividend = ToRegister(instr->dividend()); 1135 Register dividend = ToRegister(instr->dividend());
1136 int32_t divisor = instr->divisor(); 1136 int32_t divisor = instr->divisor();
1137 Register result = ToRegister(instr->result()); 1137 Register result = ToRegister(instr->result());
1138 ASSERT(!dividend.is(result)); 1138 DCHECK(!dividend.is(result));
1139 1139
1140 if (divisor == 0) { 1140 if (divisor == 0) {
1141 DeoptimizeIf(al, instr->environment()); 1141 DeoptimizeIf(al, instr->environment());
1142 return; 1142 return;
1143 } 1143 }
1144 1144
1145 __ TruncatingDiv(result, dividend, Abs(divisor)); 1145 __ TruncatingDiv(result, dividend, Abs(divisor));
1146 __ Mul(result, result, Operand(Abs(divisor))); 1146 __ Mul(result, result, Operand(Abs(divisor)));
1147 __ Subu(result, dividend, Operand(result)); 1147 __ Subu(result, dividend, Operand(result));
1148 1148
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
1195 DeoptimizeIf(eq, instr->environment(), result_reg, Operand(zero_reg)); 1195 DeoptimizeIf(eq, instr->environment(), result_reg, Operand(zero_reg));
1196 } 1196 }
1197 __ bind(&done); 1197 __ bind(&done);
1198 } 1198 }
1199 1199
1200 1200
1201 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) { 1201 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) {
1202 Register dividend = ToRegister(instr->dividend()); 1202 Register dividend = ToRegister(instr->dividend());
1203 int32_t divisor = instr->divisor(); 1203 int32_t divisor = instr->divisor();
1204 Register result = ToRegister(instr->result()); 1204 Register result = ToRegister(instr->result());
1205 ASSERT(divisor == kMinInt || IsPowerOf2(Abs(divisor))); 1205 DCHECK(divisor == kMinInt || IsPowerOf2(Abs(divisor)));
1206 ASSERT(!result.is(dividend)); 1206 DCHECK(!result.is(dividend));
1207 1207
1208 // Check for (0 / -x) that will produce negative zero. 1208 // Check for (0 / -x) that will produce negative zero.
1209 HDiv* hdiv = instr->hydrogen(); 1209 HDiv* hdiv = instr->hydrogen();
1210 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { 1210 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) {
1211 DeoptimizeIf(eq, instr->environment(), dividend, Operand(zero_reg)); 1211 DeoptimizeIf(eq, instr->environment(), dividend, Operand(zero_reg));
1212 } 1212 }
1213 // Check for (kMinInt / -1). 1213 // Check for (kMinInt / -1).
1214 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) { 1214 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) {
1215 DeoptimizeIf(eq, instr->environment(), dividend, Operand(kMinInt)); 1215 DeoptimizeIf(eq, instr->environment(), dividend, Operand(kMinInt));
1216 } 1216 }
(...skipping 22 matching lines...) Expand all
1239 } 1239 }
1240 if (shift > 0) __ sra(result, result, shift); 1240 if (shift > 0) __ sra(result, result, shift);
1241 if (divisor < 0) __ Subu(result, zero_reg, result); 1241 if (divisor < 0) __ Subu(result, zero_reg, result);
1242 } 1242 }
1243 1243
1244 1244
1245 void LCodeGen::DoDivByConstI(LDivByConstI* instr) { 1245 void LCodeGen::DoDivByConstI(LDivByConstI* instr) {
1246 Register dividend = ToRegister(instr->dividend()); 1246 Register dividend = ToRegister(instr->dividend());
1247 int32_t divisor = instr->divisor(); 1247 int32_t divisor = instr->divisor();
1248 Register result = ToRegister(instr->result()); 1248 Register result = ToRegister(instr->result());
1249 ASSERT(!dividend.is(result)); 1249 DCHECK(!dividend.is(result));
1250 1250
1251 if (divisor == 0) { 1251 if (divisor == 0) {
1252 DeoptimizeIf(al, instr->environment()); 1252 DeoptimizeIf(al, instr->environment());
1253 return; 1253 return;
1254 } 1254 }
1255 1255
1256 // Check for (0 / -x) that will produce negative zero. 1256 // Check for (0 / -x) that will produce negative zero.
1257 HDiv* hdiv = instr->hydrogen(); 1257 HDiv* hdiv = instr->hydrogen();
1258 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { 1258 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) {
1259 DeoptimizeIf(eq, instr->environment(), dividend, Operand(zero_reg)); 1259 DeoptimizeIf(eq, instr->environment(), dividend, Operand(zero_reg));
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
1312 } 1312 }
1313 } 1313 }
1314 1314
1315 1315
1316 void LCodeGen::DoMultiplyAddD(LMultiplyAddD* instr) { 1316 void LCodeGen::DoMultiplyAddD(LMultiplyAddD* instr) {
1317 DoubleRegister addend = ToDoubleRegister(instr->addend()); 1317 DoubleRegister addend = ToDoubleRegister(instr->addend());
1318 DoubleRegister multiplier = ToDoubleRegister(instr->multiplier()); 1318 DoubleRegister multiplier = ToDoubleRegister(instr->multiplier());
1319 DoubleRegister multiplicand = ToDoubleRegister(instr->multiplicand()); 1319 DoubleRegister multiplicand = ToDoubleRegister(instr->multiplicand());
1320 1320
1321 // This is computed in-place. 1321 // This is computed in-place.
1322 ASSERT(addend.is(ToDoubleRegister(instr->result()))); 1322 DCHECK(addend.is(ToDoubleRegister(instr->result())));
1323 1323
1324 __ madd_d(addend, addend, multiplier, multiplicand); 1324 __ madd_d(addend, addend, multiplier, multiplicand);
1325 } 1325 }
1326 1326
1327 1327
1328 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) { 1328 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) {
1329 Register dividend = ToRegister(instr->dividend()); 1329 Register dividend = ToRegister(instr->dividend());
1330 Register result = ToRegister(instr->result()); 1330 Register result = ToRegister(instr->result());
1331 int32_t divisor = instr->divisor(); 1331 int32_t divisor = instr->divisor();
1332 Register scratch = result.is(dividend) ? scratch0() : dividend; 1332 Register scratch = result.is(dividend) ? scratch0() : dividend;
1333 ASSERT(!result.is(dividend) || !scratch.is(dividend)); 1333 DCHECK(!result.is(dividend) || !scratch.is(dividend));
1334 1334
1335 // If the divisor is 1, return the dividend. 1335 // If the divisor is 1, return the dividend.
1336 if (divisor == 1) { 1336 if (divisor == 1) {
1337 __ Move(result, dividend); 1337 __ Move(result, dividend);
1338 return; 1338 return;
1339 } 1339 }
1340 1340
1341 // If the divisor is positive, things are easy: There can be no deopts and we 1341 // If the divisor is positive, things are easy: There can be no deopts and we
1342 // can simply do an arithmetic right shift. 1342 // can simply do an arithmetic right shift.
1343 uint16_t shift = WhichPowerOf2Abs(divisor); 1343 uint16_t shift = WhichPowerOf2Abs(divisor);
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
1379 __ bind(&no_overflow); 1379 __ bind(&no_overflow);
1380 __ sra(result, result, shift); 1380 __ sra(result, result, shift);
1381 __ bind(&done); 1381 __ bind(&done);
1382 } 1382 }
1383 1383
1384 1384
1385 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) { 1385 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) {
1386 Register dividend = ToRegister(instr->dividend()); 1386 Register dividend = ToRegister(instr->dividend());
1387 int32_t divisor = instr->divisor(); 1387 int32_t divisor = instr->divisor();
1388 Register result = ToRegister(instr->result()); 1388 Register result = ToRegister(instr->result());
1389 ASSERT(!dividend.is(result)); 1389 DCHECK(!dividend.is(result));
1390 1390
1391 if (divisor == 0) { 1391 if (divisor == 0) {
1392 DeoptimizeIf(al, instr->environment()); 1392 DeoptimizeIf(al, instr->environment());
1393 return; 1393 return;
1394 } 1394 }
1395 1395
1396 // Check for (0 / -x) that will produce negative zero. 1396 // Check for (0 / -x) that will produce negative zero.
1397 HMathFloorOfDiv* hdiv = instr->hydrogen(); 1397 HMathFloorOfDiv* hdiv = instr->hydrogen();
1398 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) { 1398 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) {
1399 DeoptimizeIf(eq, instr->environment(), dividend, Operand(zero_reg)); 1399 DeoptimizeIf(eq, instr->environment(), dividend, Operand(zero_reg));
1400 } 1400 }
1401 1401
1402 // Easy case: We need no dynamic check for the dividend and the flooring 1402 // Easy case: We need no dynamic check for the dividend and the flooring
1403 // division is the same as the truncating division. 1403 // division is the same as the truncating division.
1404 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) || 1404 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) ||
1405 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) { 1405 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) {
1406 __ TruncatingDiv(result, dividend, Abs(divisor)); 1406 __ TruncatingDiv(result, dividend, Abs(divisor));
1407 if (divisor < 0) __ Subu(result, zero_reg, result); 1407 if (divisor < 0) __ Subu(result, zero_reg, result);
1408 return; 1408 return;
1409 } 1409 }
1410 1410
1411 // In the general case we may need to adjust before and after the truncating 1411 // In the general case we may need to adjust before and after the truncating
1412 // division to get a flooring division. 1412 // division to get a flooring division.
1413 Register temp = ToRegister(instr->temp()); 1413 Register temp = ToRegister(instr->temp());
1414 ASSERT(!temp.is(dividend) && !temp.is(result)); 1414 DCHECK(!temp.is(dividend) && !temp.is(result));
1415 Label needs_adjustment, done; 1415 Label needs_adjustment, done;
1416 __ Branch(&needs_adjustment, divisor > 0 ? lt : gt, 1416 __ Branch(&needs_adjustment, divisor > 0 ? lt : gt,
1417 dividend, Operand(zero_reg)); 1417 dividend, Operand(zero_reg));
1418 __ TruncatingDiv(result, dividend, Abs(divisor)); 1418 __ TruncatingDiv(result, dividend, Abs(divisor));
1419 if (divisor < 0) __ Subu(result, zero_reg, result); 1419 if (divisor < 0) __ Subu(result, zero_reg, result);
1420 __ jmp(&done); 1420 __ jmp(&done);
1421 __ bind(&needs_adjustment); 1421 __ bind(&needs_adjustment);
1422 __ Addu(temp, dividend, Operand(divisor > 0 ? 1 : -1)); 1422 __ Addu(temp, dividend, Operand(divisor > 0 ? 1 : -1));
1423 __ TruncatingDiv(result, temp, Abs(divisor)); 1423 __ TruncatingDiv(result, temp, Abs(divisor));
1424 if (divisor < 0) __ Subu(result, zero_reg, result); 1424 if (divisor < 0) __ Subu(result, zero_reg, result);
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after
1539 // Correct the sign of the result if the constant is negative. 1539 // Correct the sign of the result if the constant is negative.
1540 if (constant < 0) __ Subu(result, zero_reg, result); 1540 if (constant < 0) __ Subu(result, zero_reg, result);
1541 } else { 1541 } else {
1542 // Generate standard code. 1542 // Generate standard code.
1543 __ li(at, constant); 1543 __ li(at, constant);
1544 __ Mul(result, left, at); 1544 __ Mul(result, left, at);
1545 } 1545 }
1546 } 1546 }
1547 1547
1548 } else { 1548 } else {
1549 ASSERT(right_op->IsRegister()); 1549 DCHECK(right_op->IsRegister());
1550 Register right = ToRegister(right_op); 1550 Register right = ToRegister(right_op);
1551 1551
1552 if (overflow) { 1552 if (overflow) {
1553 // hi:lo = left * right. 1553 // hi:lo = left * right.
1554 if (instr->hydrogen()->representation().IsSmi()) { 1554 if (instr->hydrogen()->representation().IsSmi()) {
1555 __ SmiUntag(result, left); 1555 __ SmiUntag(result, left);
1556 __ mult(result, right); 1556 __ mult(result, right);
1557 __ mfhi(scratch); 1557 __ mfhi(scratch);
1558 __ mflo(result); 1558 __ mflo(result);
1559 } else { 1559 } else {
(...skipping 23 matching lines...) Expand all
1583 Operand(zero_reg)); 1583 Operand(zero_reg));
1584 __ bind(&done); 1584 __ bind(&done);
1585 } 1585 }
1586 } 1586 }
1587 } 1587 }
1588 1588
1589 1589
1590 void LCodeGen::DoBitI(LBitI* instr) { 1590 void LCodeGen::DoBitI(LBitI* instr) {
1591 LOperand* left_op = instr->left(); 1591 LOperand* left_op = instr->left();
1592 LOperand* right_op = instr->right(); 1592 LOperand* right_op = instr->right();
1593 ASSERT(left_op->IsRegister()); 1593 DCHECK(left_op->IsRegister());
1594 Register left = ToRegister(left_op); 1594 Register left = ToRegister(left_op);
1595 Register result = ToRegister(instr->result()); 1595 Register result = ToRegister(instr->result());
1596 Operand right(no_reg); 1596 Operand right(no_reg);
1597 1597
1598 if (right_op->IsStackSlot()) { 1598 if (right_op->IsStackSlot()) {
1599 right = Operand(EmitLoadRegister(right_op, at)); 1599 right = Operand(EmitLoadRegister(right_op, at));
1600 } else { 1600 } else {
1601 ASSERT(right_op->IsRegister() || right_op->IsConstantOperand()); 1601 DCHECK(right_op->IsRegister() || right_op->IsConstantOperand());
1602 right = ToOperand(right_op); 1602 right = ToOperand(right_op);
1603 } 1603 }
1604 1604
1605 switch (instr->op()) { 1605 switch (instr->op()) {
1606 case Token::BIT_AND: 1606 case Token::BIT_AND:
1607 __ And(result, left, right); 1607 __ And(result, left, right);
1608 break; 1608 break;
1609 case Token::BIT_OR: 1609 case Token::BIT_OR:
1610 __ Or(result, left, right); 1610 __ Or(result, left, right);
1611 break; 1611 break;
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after
1714 LOperand* left = instr->left(); 1714 LOperand* left = instr->left();
1715 LOperand* right = instr->right(); 1715 LOperand* right = instr->right();
1716 LOperand* result = instr->result(); 1716 LOperand* result = instr->result();
1717 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); 1717 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1718 1718
1719 if (!can_overflow) { 1719 if (!can_overflow) {
1720 if (right->IsStackSlot()) { 1720 if (right->IsStackSlot()) {
1721 Register right_reg = EmitLoadRegister(right, at); 1721 Register right_reg = EmitLoadRegister(right, at);
1722 __ Subu(ToRegister(result), ToRegister(left), Operand(right_reg)); 1722 __ Subu(ToRegister(result), ToRegister(left), Operand(right_reg));
1723 } else { 1723 } else {
1724 ASSERT(right->IsRegister() || right->IsConstantOperand()); 1724 DCHECK(right->IsRegister() || right->IsConstantOperand());
1725 __ Subu(ToRegister(result), ToRegister(left), ToOperand(right)); 1725 __ Subu(ToRegister(result), ToRegister(left), ToOperand(right));
1726 } 1726 }
1727 } else { // can_overflow. 1727 } else { // can_overflow.
1728 Register overflow = scratch0(); 1728 Register overflow = scratch0();
1729 Register scratch = scratch1(); 1729 Register scratch = scratch1();
1730 if (right->IsStackSlot() || right->IsConstantOperand()) { 1730 if (right->IsStackSlot() || right->IsConstantOperand()) {
1731 Register right_reg = EmitLoadRegister(right, scratch); 1731 Register right_reg = EmitLoadRegister(right, scratch);
1732 __ SubuAndCheckForOverflow(ToRegister(result), 1732 __ SubuAndCheckForOverflow(ToRegister(result),
1733 ToRegister(left), 1733 ToRegister(left),
1734 right_reg, 1734 right_reg,
1735 overflow); // Reg at also used as scratch. 1735 overflow); // Reg at also used as scratch.
1736 } else { 1736 } else {
1737 ASSERT(right->IsRegister()); 1737 DCHECK(right->IsRegister());
1738 // Due to overflow check macros not supporting constant operands, 1738 // Due to overflow check macros not supporting constant operands,
1739 // handling the IsConstantOperand case was moved to prev if clause. 1739 // handling the IsConstantOperand case was moved to prev if clause.
1740 __ SubuAndCheckForOverflow(ToRegister(result), 1740 __ SubuAndCheckForOverflow(ToRegister(result),
1741 ToRegister(left), 1741 ToRegister(left),
1742 ToRegister(right), 1742 ToRegister(right),
1743 overflow); // Reg at also used as scratch. 1743 overflow); // Reg at also used as scratch.
1744 } 1744 }
1745 DeoptimizeIf(lt, instr->environment(), overflow, Operand(zero_reg)); 1745 DeoptimizeIf(lt, instr->environment(), overflow, Operand(zero_reg));
1746 } 1746 }
1747 } 1747 }
1748 1748
1749 1749
1750 void LCodeGen::DoConstantI(LConstantI* instr) { 1750 void LCodeGen::DoConstantI(LConstantI* instr) {
1751 __ li(ToRegister(instr->result()), Operand(instr->value())); 1751 __ li(ToRegister(instr->result()), Operand(instr->value()));
1752 } 1752 }
1753 1753
1754 1754
1755 void LCodeGen::DoConstantS(LConstantS* instr) { 1755 void LCodeGen::DoConstantS(LConstantS* instr) {
1756 __ li(ToRegister(instr->result()), Operand(instr->value())); 1756 __ li(ToRegister(instr->result()), Operand(instr->value()));
1757 } 1757 }
1758 1758
1759 1759
1760 void LCodeGen::DoConstantD(LConstantD* instr) { 1760 void LCodeGen::DoConstantD(LConstantD* instr) {
1761 ASSERT(instr->result()->IsDoubleRegister()); 1761 DCHECK(instr->result()->IsDoubleRegister());
1762 DoubleRegister result = ToDoubleRegister(instr->result()); 1762 DoubleRegister result = ToDoubleRegister(instr->result());
1763 double v = instr->value(); 1763 double v = instr->value();
1764 __ Move(result, v); 1764 __ Move(result, v);
1765 } 1765 }
1766 1766
1767 1767
1768 void LCodeGen::DoConstantE(LConstantE* instr) { 1768 void LCodeGen::DoConstantE(LConstantE* instr) {
1769 __ li(ToRegister(instr->result()), Operand(instr->value())); 1769 __ li(ToRegister(instr->result()), Operand(instr->value()));
1770 } 1770 }
1771 1771
(...skipping 11 matching lines...) Expand all
1783 __ EnumLength(result, map); 1783 __ EnumLength(result, map);
1784 } 1784 }
1785 1785
1786 1786
1787 void LCodeGen::DoDateField(LDateField* instr) { 1787 void LCodeGen::DoDateField(LDateField* instr) {
1788 Register object = ToRegister(instr->date()); 1788 Register object = ToRegister(instr->date());
1789 Register result = ToRegister(instr->result()); 1789 Register result = ToRegister(instr->result());
1790 Register scratch = ToRegister(instr->temp()); 1790 Register scratch = ToRegister(instr->temp());
1791 Smi* index = instr->index(); 1791 Smi* index = instr->index();
1792 Label runtime, done; 1792 Label runtime, done;
1793 ASSERT(object.is(a0)); 1793 DCHECK(object.is(a0));
1794 ASSERT(result.is(v0)); 1794 DCHECK(result.is(v0));
1795 ASSERT(!scratch.is(scratch0())); 1795 DCHECK(!scratch.is(scratch0()));
1796 ASSERT(!scratch.is(object)); 1796 DCHECK(!scratch.is(object));
1797 1797
1798 __ SmiTst(object, at); 1798 __ SmiTst(object, at);
1799 DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg)); 1799 DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg));
1800 __ GetObjectType(object, scratch, scratch); 1800 __ GetObjectType(object, scratch, scratch);
1801 DeoptimizeIf(ne, instr->environment(), scratch, Operand(JS_DATE_TYPE)); 1801 DeoptimizeIf(ne, instr->environment(), scratch, Operand(JS_DATE_TYPE));
1802 1802
1803 if (index->value() == 0) { 1803 if (index->value() == 0) {
1804 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset)); 1804 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
1805 } else { 1805 } else {
1806 if (index->value() < JSDate::kFirstUncachedField) { 1806 if (index->value() < JSDate::kFirstUncachedField) {
(...skipping 20 matching lines...) Expand all
1827 String::Encoding encoding) { 1827 String::Encoding encoding) {
1828 if (index->IsConstantOperand()) { 1828 if (index->IsConstantOperand()) {
1829 int offset = ToInteger32(LConstantOperand::cast(index)); 1829 int offset = ToInteger32(LConstantOperand::cast(index));
1830 if (encoding == String::TWO_BYTE_ENCODING) { 1830 if (encoding == String::TWO_BYTE_ENCODING) {
1831 offset *= kUC16Size; 1831 offset *= kUC16Size;
1832 } 1832 }
1833 STATIC_ASSERT(kCharSize == 1); 1833 STATIC_ASSERT(kCharSize == 1);
1834 return FieldMemOperand(string, SeqString::kHeaderSize + offset); 1834 return FieldMemOperand(string, SeqString::kHeaderSize + offset);
1835 } 1835 }
1836 Register scratch = scratch0(); 1836 Register scratch = scratch0();
1837 ASSERT(!scratch.is(string)); 1837 DCHECK(!scratch.is(string));
1838 ASSERT(!scratch.is(ToRegister(index))); 1838 DCHECK(!scratch.is(ToRegister(index)));
1839 if (encoding == String::ONE_BYTE_ENCODING) { 1839 if (encoding == String::ONE_BYTE_ENCODING) {
1840 __ Addu(scratch, string, ToRegister(index)); 1840 __ Addu(scratch, string, ToRegister(index));
1841 } else { 1841 } else {
1842 STATIC_ASSERT(kUC16Size == 2); 1842 STATIC_ASSERT(kUC16Size == 2);
1843 __ sll(scratch, ToRegister(index), 1); 1843 __ sll(scratch, ToRegister(index), 1);
1844 __ Addu(scratch, string, scratch); 1844 __ Addu(scratch, string, scratch);
1845 } 1845 }
1846 return FieldMemOperand(scratch, SeqString::kHeaderSize); 1846 return FieldMemOperand(scratch, SeqString::kHeaderSize);
1847 } 1847 }
1848 1848
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
1904 LOperand* left = instr->left(); 1904 LOperand* left = instr->left();
1905 LOperand* right = instr->right(); 1905 LOperand* right = instr->right();
1906 LOperand* result = instr->result(); 1906 LOperand* result = instr->result();
1907 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow); 1907 bool can_overflow = instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1908 1908
1909 if (!can_overflow) { 1909 if (!can_overflow) {
1910 if (right->IsStackSlot()) { 1910 if (right->IsStackSlot()) {
1911 Register right_reg = EmitLoadRegister(right, at); 1911 Register right_reg = EmitLoadRegister(right, at);
1912 __ Addu(ToRegister(result), ToRegister(left), Operand(right_reg)); 1912 __ Addu(ToRegister(result), ToRegister(left), Operand(right_reg));
1913 } else { 1913 } else {
1914 ASSERT(right->IsRegister() || right->IsConstantOperand()); 1914 DCHECK(right->IsRegister() || right->IsConstantOperand());
1915 __ Addu(ToRegister(result), ToRegister(left), ToOperand(right)); 1915 __ Addu(ToRegister(result), ToRegister(left), ToOperand(right));
1916 } 1916 }
1917 } else { // can_overflow. 1917 } else { // can_overflow.
1918 Register overflow = scratch0(); 1918 Register overflow = scratch0();
1919 Register scratch = scratch1(); 1919 Register scratch = scratch1();
1920 if (right->IsStackSlot() || 1920 if (right->IsStackSlot() ||
1921 right->IsConstantOperand()) { 1921 right->IsConstantOperand()) {
1922 Register right_reg = EmitLoadRegister(right, scratch); 1922 Register right_reg = EmitLoadRegister(right, scratch);
1923 __ AdduAndCheckForOverflow(ToRegister(result), 1923 __ AdduAndCheckForOverflow(ToRegister(result),
1924 ToRegister(left), 1924 ToRegister(left),
1925 right_reg, 1925 right_reg,
1926 overflow); // Reg at also used as scratch. 1926 overflow); // Reg at also used as scratch.
1927 } else { 1927 } else {
1928 ASSERT(right->IsRegister()); 1928 DCHECK(right->IsRegister());
1929 // Due to overflow check macros not supporting constant operands, 1929 // Due to overflow check macros not supporting constant operands,
1930 // handling the IsConstantOperand case was moved to prev if clause. 1930 // handling the IsConstantOperand case was moved to prev if clause.
1931 __ AdduAndCheckForOverflow(ToRegister(result), 1931 __ AdduAndCheckForOverflow(ToRegister(result),
1932 ToRegister(left), 1932 ToRegister(left),
1933 ToRegister(right), 1933 ToRegister(right),
1934 overflow); // Reg at also used as scratch. 1934 overflow); // Reg at also used as scratch.
1935 } 1935 }
1936 DeoptimizeIf(lt, instr->environment(), overflow, Operand(zero_reg)); 1936 DeoptimizeIf(lt, instr->environment(), overflow, Operand(zero_reg));
1937 } 1937 }
1938 } 1938 }
1939 1939
1940 1940
1941 void LCodeGen::DoMathMinMax(LMathMinMax* instr) { 1941 void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
1942 LOperand* left = instr->left(); 1942 LOperand* left = instr->left();
1943 LOperand* right = instr->right(); 1943 LOperand* right = instr->right();
1944 HMathMinMax::Operation operation = instr->hydrogen()->operation(); 1944 HMathMinMax::Operation operation = instr->hydrogen()->operation();
1945 Condition condition = (operation == HMathMinMax::kMathMin) ? le : ge; 1945 Condition condition = (operation == HMathMinMax::kMathMin) ? le : ge;
1946 if (instr->hydrogen()->representation().IsSmiOrInteger32()) { 1946 if (instr->hydrogen()->representation().IsSmiOrInteger32()) {
1947 Register left_reg = ToRegister(left); 1947 Register left_reg = ToRegister(left);
1948 Register right_reg = EmitLoadRegister(right, scratch0()); 1948 Register right_reg = EmitLoadRegister(right, scratch0());
1949 Register result_reg = ToRegister(instr->result()); 1949 Register result_reg = ToRegister(instr->result());
1950 Label return_right, done; 1950 Label return_right, done;
1951 Register scratch = scratch1(); 1951 Register scratch = scratch1();
1952 __ Slt(scratch, left_reg, Operand(right_reg)); 1952 __ Slt(scratch, left_reg, Operand(right_reg));
1953 if (condition == ge) { 1953 if (condition == ge) {
1954 __ Movz(result_reg, left_reg, scratch); 1954 __ Movz(result_reg, left_reg, scratch);
1955 __ Movn(result_reg, right_reg, scratch); 1955 __ Movn(result_reg, right_reg, scratch);
1956 } else { 1956 } else {
1957 ASSERT(condition == le); 1957 DCHECK(condition == le);
1958 __ Movn(result_reg, left_reg, scratch); 1958 __ Movn(result_reg, left_reg, scratch);
1959 __ Movz(result_reg, right_reg, scratch); 1959 __ Movz(result_reg, right_reg, scratch);
1960 } 1960 }
1961 } else { 1961 } else {
1962 ASSERT(instr->hydrogen()->representation().IsDouble()); 1962 DCHECK(instr->hydrogen()->representation().IsDouble());
1963 FPURegister left_reg = ToDoubleRegister(left); 1963 FPURegister left_reg = ToDoubleRegister(left);
1964 FPURegister right_reg = ToDoubleRegister(right); 1964 FPURegister right_reg = ToDoubleRegister(right);
1965 FPURegister result_reg = ToDoubleRegister(instr->result()); 1965 FPURegister result_reg = ToDoubleRegister(instr->result());
1966 Label check_nan_left, check_zero, return_left, return_right, done; 1966 Label check_nan_left, check_zero, return_left, return_right, done;
1967 __ BranchF(&check_zero, &check_nan_left, eq, left_reg, right_reg); 1967 __ BranchF(&check_zero, &check_nan_left, eq, left_reg, right_reg);
1968 __ BranchF(&return_left, NULL, condition, left_reg, right_reg); 1968 __ BranchF(&return_left, NULL, condition, left_reg, right_reg);
1969 __ Branch(&return_right); 1969 __ Branch(&return_right);
1970 1970
1971 __ bind(&check_zero); 1971 __ bind(&check_zero);
1972 // left == right != 0. 1972 // left == right != 0.
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
2034 break; 2034 break;
2035 } 2035 }
2036 default: 2036 default:
2037 UNREACHABLE(); 2037 UNREACHABLE();
2038 break; 2038 break;
2039 } 2039 }
2040 } 2040 }
2041 2041
2042 2042
2043 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { 2043 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
2044 ASSERT(ToRegister(instr->context()).is(cp)); 2044 DCHECK(ToRegister(instr->context()).is(cp));
2045 ASSERT(ToRegister(instr->left()).is(a1)); 2045 DCHECK(ToRegister(instr->left()).is(a1));
2046 ASSERT(ToRegister(instr->right()).is(a0)); 2046 DCHECK(ToRegister(instr->right()).is(a0));
2047 ASSERT(ToRegister(instr->result()).is(v0)); 2047 DCHECK(ToRegister(instr->result()).is(v0));
2048 2048
2049 BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE); 2049 BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE);
2050 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 2050 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2051 // Other arch use a nop here, to signal that there is no inlined 2051 // Other arch use a nop here, to signal that there is no inlined
2052 // patchable code. Mips does not need the nop, since our marker 2052 // patchable code. Mips does not need the nop, since our marker
2053 // instruction (andi zero_reg) will never be used in normal code. 2053 // instruction (andi zero_reg) will never be used in normal code.
2054 } 2054 }
2055 2055
2056 2056
2057 template<class InstrType> 2057 template<class InstrType>
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
2124 2124
2125 2125
2126 void LCodeGen::DoDebugBreak(LDebugBreak* instr) { 2126 void LCodeGen::DoDebugBreak(LDebugBreak* instr) {
2127 __ stop("LDebugBreak"); 2127 __ stop("LDebugBreak");
2128 } 2128 }
2129 2129
2130 2130
2131 void LCodeGen::DoBranch(LBranch* instr) { 2131 void LCodeGen::DoBranch(LBranch* instr) {
2132 Representation r = instr->hydrogen()->value()->representation(); 2132 Representation r = instr->hydrogen()->value()->representation();
2133 if (r.IsInteger32() || r.IsSmi()) { 2133 if (r.IsInteger32() || r.IsSmi()) {
2134 ASSERT(!info()->IsStub()); 2134 DCHECK(!info()->IsStub());
2135 Register reg = ToRegister(instr->value()); 2135 Register reg = ToRegister(instr->value());
2136 EmitBranch(instr, ne, reg, Operand(zero_reg)); 2136 EmitBranch(instr, ne, reg, Operand(zero_reg));
2137 } else if (r.IsDouble()) { 2137 } else if (r.IsDouble()) {
2138 ASSERT(!info()->IsStub()); 2138 DCHECK(!info()->IsStub());
2139 DoubleRegister reg = ToDoubleRegister(instr->value()); 2139 DoubleRegister reg = ToDoubleRegister(instr->value());
2140 // Test the double value. Zero and NaN are false. 2140 // Test the double value. Zero and NaN are false.
2141 EmitBranchF(instr, nue, reg, kDoubleRegZero); 2141 EmitBranchF(instr, nue, reg, kDoubleRegZero);
2142 } else { 2142 } else {
2143 ASSERT(r.IsTagged()); 2143 DCHECK(r.IsTagged());
2144 Register reg = ToRegister(instr->value()); 2144 Register reg = ToRegister(instr->value());
2145 HType type = instr->hydrogen()->value()->type(); 2145 HType type = instr->hydrogen()->value()->type();
2146 if (type.IsBoolean()) { 2146 if (type.IsBoolean()) {
2147 ASSERT(!info()->IsStub()); 2147 DCHECK(!info()->IsStub());
2148 __ LoadRoot(at, Heap::kTrueValueRootIndex); 2148 __ LoadRoot(at, Heap::kTrueValueRootIndex);
2149 EmitBranch(instr, eq, reg, Operand(at)); 2149 EmitBranch(instr, eq, reg, Operand(at));
2150 } else if (type.IsSmi()) { 2150 } else if (type.IsSmi()) {
2151 ASSERT(!info()->IsStub()); 2151 DCHECK(!info()->IsStub());
2152 EmitBranch(instr, ne, reg, Operand(zero_reg)); 2152 EmitBranch(instr, ne, reg, Operand(zero_reg));
2153 } else if (type.IsJSArray()) { 2153 } else if (type.IsJSArray()) {
2154 ASSERT(!info()->IsStub()); 2154 DCHECK(!info()->IsStub());
2155 EmitBranch(instr, al, zero_reg, Operand(zero_reg)); 2155 EmitBranch(instr, al, zero_reg, Operand(zero_reg));
2156 } else if (type.IsHeapNumber()) { 2156 } else if (type.IsHeapNumber()) {
2157 ASSERT(!info()->IsStub()); 2157 DCHECK(!info()->IsStub());
2158 DoubleRegister dbl_scratch = double_scratch0(); 2158 DoubleRegister dbl_scratch = double_scratch0();
2159 __ ldc1(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset)); 2159 __ ldc1(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset));
2160 // Test the double value. Zero and NaN are false. 2160 // Test the double value. Zero and NaN are false.
2161 EmitBranchF(instr, nue, dbl_scratch, kDoubleRegZero); 2161 EmitBranchF(instr, nue, dbl_scratch, kDoubleRegZero);
2162 } else if (type.IsString()) { 2162 } else if (type.IsString()) {
2163 ASSERT(!info()->IsStub()); 2163 DCHECK(!info()->IsStub());
2164 __ lw(at, FieldMemOperand(reg, String::kLengthOffset)); 2164 __ lw(at, FieldMemOperand(reg, String::kLengthOffset));
2165 EmitBranch(instr, ne, at, Operand(zero_reg)); 2165 EmitBranch(instr, ne, at, Operand(zero_reg));
2166 } else { 2166 } else {
2167 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); 2167 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
2168 // Avoid deopts in the case where we've never executed this path before. 2168 // Avoid deopts in the case where we've never executed this path before.
2169 if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic(); 2169 if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic();
2170 2170
2171 if (expected.Contains(ToBooleanStub::UNDEFINED)) { 2171 if (expected.Contains(ToBooleanStub::UNDEFINED)) {
2172 // undefined -> false. 2172 // undefined -> false.
2173 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 2173 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after
2383 EmitFalseBranchF(instr, eq, input_reg, input_reg); 2383 EmitFalseBranchF(instr, eq, input_reg, input_reg);
2384 2384
2385 Register scratch = scratch0(); 2385 Register scratch = scratch0();
2386 __ FmoveHigh(scratch, input_reg); 2386 __ FmoveHigh(scratch, input_reg);
2387 EmitBranch(instr, eq, scratch, Operand(kHoleNanUpper32)); 2387 EmitBranch(instr, eq, scratch, Operand(kHoleNanUpper32));
2388 } 2388 }
2389 2389
2390 2390
2391 void LCodeGen::DoCompareMinusZeroAndBranch(LCompareMinusZeroAndBranch* instr) { 2391 void LCodeGen::DoCompareMinusZeroAndBranch(LCompareMinusZeroAndBranch* instr) {
2392 Representation rep = instr->hydrogen()->value()->representation(); 2392 Representation rep = instr->hydrogen()->value()->representation();
2393 ASSERT(!rep.IsInteger32()); 2393 DCHECK(!rep.IsInteger32());
2394 Register scratch = ToRegister(instr->temp()); 2394 Register scratch = ToRegister(instr->temp());
2395 2395
2396 if (rep.IsDouble()) { 2396 if (rep.IsDouble()) {
2397 DoubleRegister value = ToDoubleRegister(instr->value()); 2397 DoubleRegister value = ToDoubleRegister(instr->value());
2398 EmitFalseBranchF(instr, ne, value, kDoubleRegZero); 2398 EmitFalseBranchF(instr, ne, value, kDoubleRegZero);
2399 __ FmoveHigh(scratch, value); 2399 __ FmoveHigh(scratch, value);
2400 __ li(at, 0x80000000); 2400 __ li(at, 0x80000000);
2401 } else { 2401 } else {
2402 Register value = ToRegister(instr->value()); 2402 Register value = ToRegister(instr->value());
2403 __ CheckMap(value, 2403 __ CheckMap(value,
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after
2517 case Token::GTE: 2517 case Token::GTE:
2518 return ge; 2518 return ge;
2519 default: 2519 default:
2520 UNREACHABLE(); 2520 UNREACHABLE();
2521 return kNoCondition; 2521 return kNoCondition;
2522 } 2522 }
2523 } 2523 }
2524 2524
2525 2525
2526 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { 2526 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
2527 ASSERT(ToRegister(instr->context()).is(cp)); 2527 DCHECK(ToRegister(instr->context()).is(cp));
2528 Token::Value op = instr->op(); 2528 Token::Value op = instr->op();
2529 2529
2530 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); 2530 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
2531 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2531 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2532 2532
2533 Condition condition = ComputeCompareCondition(op); 2533 Condition condition = ComputeCompareCondition(op);
2534 2534
2535 EmitBranch(instr, condition, v0, Operand(zero_reg)); 2535 EmitBranch(instr, condition, v0, Operand(zero_reg));
2536 } 2536 }
2537 2537
2538 2538
2539 static InstanceType TestType(HHasInstanceTypeAndBranch* instr) { 2539 static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
2540 InstanceType from = instr->from(); 2540 InstanceType from = instr->from();
2541 InstanceType to = instr->to(); 2541 InstanceType to = instr->to();
2542 if (from == FIRST_TYPE) return to; 2542 if (from == FIRST_TYPE) return to;
2543 ASSERT(from == to || to == LAST_TYPE); 2543 DCHECK(from == to || to == LAST_TYPE);
2544 return from; 2544 return from;
2545 } 2545 }
2546 2546
2547 2547
2548 static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) { 2548 static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
2549 InstanceType from = instr->from(); 2549 InstanceType from = instr->from();
2550 InstanceType to = instr->to(); 2550 InstanceType to = instr->to();
2551 if (from == to) return eq; 2551 if (from == to) return eq;
2552 if (to == LAST_TYPE) return hs; 2552 if (to == LAST_TYPE) return hs;
2553 if (from == FIRST_TYPE) return ls; 2553 if (from == FIRST_TYPE) return ls;
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
2596 2596
2597 2597
2598 // Branches to a label or falls through with the answer in flags. Trashes 2598 // Branches to a label or falls through with the answer in flags. Trashes
2599 // the temp registers, but not the input. 2599 // the temp registers, but not the input.
2600 void LCodeGen::EmitClassOfTest(Label* is_true, 2600 void LCodeGen::EmitClassOfTest(Label* is_true,
2601 Label* is_false, 2601 Label* is_false,
2602 Handle<String>class_name, 2602 Handle<String>class_name,
2603 Register input, 2603 Register input,
2604 Register temp, 2604 Register temp,
2605 Register temp2) { 2605 Register temp2) {
2606 ASSERT(!input.is(temp)); 2606 DCHECK(!input.is(temp));
2607 ASSERT(!input.is(temp2)); 2607 DCHECK(!input.is(temp2));
2608 ASSERT(!temp.is(temp2)); 2608 DCHECK(!temp.is(temp2));
2609 2609
2610 __ JumpIfSmi(input, is_false); 2610 __ JumpIfSmi(input, is_false);
2611 2611
2612 if (class_name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("Function"))) { 2612 if (class_name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("Function"))) {
2613 // Assuming the following assertions, we can use the same compares to test 2613 // Assuming the following assertions, we can use the same compares to test
2614 // for both being a function type and being in the object type range. 2614 // for both being a function type and being in the object type range.
2615 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 2615 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
2616 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == 2616 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2617 FIRST_SPEC_OBJECT_TYPE + 1); 2617 FIRST_SPEC_OBJECT_TYPE + 1);
2618 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == 2618 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
2677 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { 2677 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
2678 Register reg = ToRegister(instr->value()); 2678 Register reg = ToRegister(instr->value());
2679 Register temp = ToRegister(instr->temp()); 2679 Register temp = ToRegister(instr->temp());
2680 2680
2681 __ lw(temp, FieldMemOperand(reg, HeapObject::kMapOffset)); 2681 __ lw(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
2682 EmitBranch(instr, eq, temp, Operand(instr->map())); 2682 EmitBranch(instr, eq, temp, Operand(instr->map()));
2683 } 2683 }
2684 2684
2685 2685
2686 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { 2686 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
2687 ASSERT(ToRegister(instr->context()).is(cp)); 2687 DCHECK(ToRegister(instr->context()).is(cp));
2688 Label true_label, done; 2688 Label true_label, done;
2689 ASSERT(ToRegister(instr->left()).is(a0)); // Object is in a0. 2689 DCHECK(ToRegister(instr->left()).is(a0)); // Object is in a0.
2690 ASSERT(ToRegister(instr->right()).is(a1)); // Function is in a1. 2690 DCHECK(ToRegister(instr->right()).is(a1)); // Function is in a1.
2691 Register result = ToRegister(instr->result()); 2691 Register result = ToRegister(instr->result());
2692 ASSERT(result.is(v0)); 2692 DCHECK(result.is(v0));
2693 2693
2694 InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters); 2694 InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters);
2695 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 2695 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2696 2696
2697 __ Branch(&true_label, eq, result, Operand(zero_reg)); 2697 __ Branch(&true_label, eq, result, Operand(zero_reg));
2698 __ li(result, Operand(factory()->false_value())); 2698 __ li(result, Operand(factory()->false_value()));
2699 __ Branch(&done); 2699 __ Branch(&done);
2700 __ bind(&true_label); 2700 __ bind(&true_label);
2701 __ li(result, Operand(factory()->true_value())); 2701 __ li(result, Operand(factory()->true_value()));
2702 __ bind(&done); 2702 __ bind(&done);
(...skipping 18 matching lines...) Expand all
2721 }; 2721 };
2722 2722
2723 DeferredInstanceOfKnownGlobal* deferred; 2723 DeferredInstanceOfKnownGlobal* deferred;
2724 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr); 2724 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
2725 2725
2726 Label done, false_result; 2726 Label done, false_result;
2727 Register object = ToRegister(instr->value()); 2727 Register object = ToRegister(instr->value());
2728 Register temp = ToRegister(instr->temp()); 2728 Register temp = ToRegister(instr->temp());
2729 Register result = ToRegister(instr->result()); 2729 Register result = ToRegister(instr->result());
2730 2730
2731 ASSERT(object.is(a0)); 2731 DCHECK(object.is(a0));
2732 ASSERT(result.is(v0)); 2732 DCHECK(result.is(v0));
2733 2733
2734 // A Smi is not instance of anything. 2734 // A Smi is not instance of anything.
2735 __ JumpIfSmi(object, &false_result); 2735 __ JumpIfSmi(object, &false_result);
2736 2736
2737 // This is the inlined call site instanceof cache. The two occurences of the 2737 // This is the inlined call site instanceof cache. The two occurences of the
2738 // hole value will be patched to the last map/result pair generated by the 2738 // hole value will be patched to the last map/result pair generated by the
2739 // instanceof stub. 2739 // instanceof stub.
2740 Label cache_miss; 2740 Label cache_miss;
2741 Register map = temp; 2741 Register map = temp;
2742 __ lw(map, FieldMemOperand(object, HeapObject::kMapOffset)); 2742 __ lw(map, FieldMemOperand(object, HeapObject::kMapOffset));
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
2776 // Here result has either true or false. Deferred code also produces true or 2776 // Here result has either true or false. Deferred code also produces true or
2777 // false object. 2777 // false object.
2778 __ bind(deferred->exit()); 2778 __ bind(deferred->exit());
2779 __ bind(&done); 2779 __ bind(&done);
2780 } 2780 }
2781 2781
2782 2782
2783 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, 2783 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2784 Label* map_check) { 2784 Label* map_check) {
2785 Register result = ToRegister(instr->result()); 2785 Register result = ToRegister(instr->result());
2786 ASSERT(result.is(v0)); 2786 DCHECK(result.is(v0));
2787 2787
2788 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; 2788 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2789 flags = static_cast<InstanceofStub::Flags>( 2789 flags = static_cast<InstanceofStub::Flags>(
2790 flags | InstanceofStub::kArgsInRegisters); 2790 flags | InstanceofStub::kArgsInRegisters);
2791 flags = static_cast<InstanceofStub::Flags>( 2791 flags = static_cast<InstanceofStub::Flags>(
2792 flags | InstanceofStub::kCallSiteInlineCheck); 2792 flags | InstanceofStub::kCallSiteInlineCheck);
2793 flags = static_cast<InstanceofStub::Flags>( 2793 flags = static_cast<InstanceofStub::Flags>(
2794 flags | InstanceofStub::kReturnTrueFalseObject); 2794 flags | InstanceofStub::kReturnTrueFalseObject);
2795 InstanceofStub stub(isolate(), flags); 2795 InstanceofStub stub(isolate(), flags);
2796 2796
2797 PushSafepointRegistersScope scope(this); 2797 PushSafepointRegistersScope scope(this);
2798 LoadContextFromDeferred(instr->context()); 2798 LoadContextFromDeferred(instr->context());
2799 2799
2800 // Get the temp register reserved by the instruction. This needs to be t0 as 2800 // Get the temp register reserved by the instruction. This needs to be t0 as
2801 // its slot of the pushing of safepoint registers is used to communicate the 2801 // its slot of the pushing of safepoint registers is used to communicate the
2802 // offset to the location of the map check. 2802 // offset to the location of the map check.
2803 Register temp = ToRegister(instr->temp()); 2803 Register temp = ToRegister(instr->temp());
2804 ASSERT(temp.is(t0)); 2804 DCHECK(temp.is(t0));
2805 __ li(InstanceofStub::right(), instr->function()); 2805 __ li(InstanceofStub::right(), instr->function());
2806 static const int kAdditionalDelta = 7; 2806 static const int kAdditionalDelta = 7;
2807 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; 2807 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
2808 Label before_push_delta; 2808 Label before_push_delta;
2809 __ bind(&before_push_delta); 2809 __ bind(&before_push_delta);
2810 { 2810 {
2811 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); 2811 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
2812 __ li(temp, Operand(delta * kPointerSize), CONSTANT_SIZE); 2812 __ li(temp, Operand(delta * kPointerSize), CONSTANT_SIZE);
2813 __ StoreToSafepointRegisterSlot(temp, temp); 2813 __ StoreToSafepointRegisterSlot(temp, temp);
2814 } 2814 }
2815 CallCodeGeneric(stub.GetCode(), 2815 CallCodeGeneric(stub.GetCode(),
2816 RelocInfo::CODE_TARGET, 2816 RelocInfo::CODE_TARGET,
2817 instr, 2817 instr,
2818 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); 2818 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
2819 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); 2819 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
2820 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 2820 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2821 // Put the result value into the result register slot and 2821 // Put the result value into the result register slot and
2822 // restore all registers. 2822 // restore all registers.
2823 __ StoreToSafepointRegisterSlot(result, result); 2823 __ StoreToSafepointRegisterSlot(result, result);
2824 } 2824 }
2825 2825
2826 2826
2827 void LCodeGen::DoCmpT(LCmpT* instr) { 2827 void LCodeGen::DoCmpT(LCmpT* instr) {
2828 ASSERT(ToRegister(instr->context()).is(cp)); 2828 DCHECK(ToRegister(instr->context()).is(cp));
2829 Token::Value op = instr->op(); 2829 Token::Value op = instr->op();
2830 2830
2831 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); 2831 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
2832 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2832 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2833 // On MIPS there is no need for a "no inlined smi code" marker (nop). 2833 // On MIPS there is no need for a "no inlined smi code" marker (nop).
2834 2834
2835 Condition condition = ComputeCompareCondition(op); 2835 Condition condition = ComputeCompareCondition(op);
2836 // A minor optimization that relies on LoadRoot always emitting one 2836 // A minor optimization that relies on LoadRoot always emitting one
2837 // instruction. 2837 // instruction.
2838 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm()); 2838 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm());
2839 Label done, check; 2839 Label done, check;
2840 __ Branch(USE_DELAY_SLOT, &done, condition, v0, Operand(zero_reg)); 2840 __ Branch(USE_DELAY_SLOT, &done, condition, v0, Operand(zero_reg));
2841 __ bind(&check); 2841 __ bind(&check);
2842 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex); 2842 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
2843 ASSERT_EQ(1, masm()->InstructionsGeneratedSince(&check)); 2843 DCHECK_EQ(1, masm()->InstructionsGeneratedSince(&check));
2844 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); 2844 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2845 __ bind(&done); 2845 __ bind(&done);
2846 } 2846 }
2847 2847
2848 2848
2849 void LCodeGen::DoReturn(LReturn* instr) { 2849 void LCodeGen::DoReturn(LReturn* instr) {
2850 if (FLAG_trace && info()->IsOptimizing()) { 2850 if (FLAG_trace && info()->IsOptimizing()) {
2851 // Push the return value on the stack as the parameter. 2851 // Push the return value on the stack as the parameter.
2852 // Runtime::TraceExit returns its parameter in v0. We're leaving the code 2852 // Runtime::TraceExit returns its parameter in v0. We're leaving the code
2853 // managed by the register allocator and tearing down the frame, it's 2853 // managed by the register allocator and tearing down the frame, it's
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
2892 __ li(at, Operand(Handle<Object>(instr->hydrogen()->cell().handle()))); 2892 __ li(at, Operand(Handle<Object>(instr->hydrogen()->cell().handle())));
2893 __ lw(result, FieldMemOperand(at, Cell::kValueOffset)); 2893 __ lw(result, FieldMemOperand(at, Cell::kValueOffset));
2894 if (instr->hydrogen()->RequiresHoleCheck()) { 2894 if (instr->hydrogen()->RequiresHoleCheck()) {
2895 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 2895 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2896 DeoptimizeIf(eq, instr->environment(), result, Operand(at)); 2896 DeoptimizeIf(eq, instr->environment(), result, Operand(at));
2897 } 2897 }
2898 } 2898 }
2899 2899
2900 2900
2901 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { 2901 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2902 ASSERT(ToRegister(instr->context()).is(cp)); 2902 DCHECK(ToRegister(instr->context()).is(cp));
2903 ASSERT(ToRegister(instr->global_object()).is(LoadIC::ReceiverRegister())); 2903 DCHECK(ToRegister(instr->global_object()).is(LoadIC::ReceiverRegister()));
2904 ASSERT(ToRegister(instr->result()).is(v0)); 2904 DCHECK(ToRegister(instr->result()).is(v0));
2905 2905
2906 __ li(LoadIC::NameRegister(), Operand(instr->name())); 2906 __ li(LoadIC::NameRegister(), Operand(instr->name()));
2907 if (FLAG_vector_ics) { 2907 if (FLAG_vector_ics) {
2908 Register vector = ToRegister(instr->temp_vector()); 2908 Register vector = ToRegister(instr->temp_vector());
2909 ASSERT(vector.is(LoadIC::VectorRegister())); 2909 DCHECK(vector.is(LoadIC::VectorRegister()));
2910 __ li(vector, instr->hydrogen()->feedback_vector()); 2910 __ li(vector, instr->hydrogen()->feedback_vector());
2911 // No need to allocate this register. 2911 // No need to allocate this register.
2912 ASSERT(LoadIC::SlotRegister().is(a0)); 2912 DCHECK(LoadIC::SlotRegister().is(a0));
2913 __ li(LoadIC::SlotRegister(), 2913 __ li(LoadIC::SlotRegister(),
2914 Operand(Smi::FromInt(instr->hydrogen()->slot()))); 2914 Operand(Smi::FromInt(instr->hydrogen()->slot())));
2915 } 2915 }
2916 ContextualMode mode = instr->for_typeof() ? NOT_CONTEXTUAL : CONTEXTUAL; 2916 ContextualMode mode = instr->for_typeof() ? NOT_CONTEXTUAL : CONTEXTUAL;
2917 Handle<Code> ic = LoadIC::initialize_stub(isolate(), mode); 2917 Handle<Code> ic = LoadIC::initialize_stub(isolate(), mode);
2918 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2918 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2919 } 2919 }
2920 2920
2921 2921
2922 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { 2922 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after
3025 if (!access.IsInobject()) { 3025 if (!access.IsInobject()) {
3026 __ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); 3026 __ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
3027 object = result; 3027 object = result;
3028 } 3028 }
3029 MemOperand operand = FieldMemOperand(object, offset); 3029 MemOperand operand = FieldMemOperand(object, offset);
3030 __ Load(result, operand, access.representation()); 3030 __ Load(result, operand, access.representation());
3031 } 3031 }
3032 3032
3033 3033
3034 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { 3034 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
3035 ASSERT(ToRegister(instr->context()).is(cp)); 3035 DCHECK(ToRegister(instr->context()).is(cp));
3036 ASSERT(ToRegister(instr->object()).is(LoadIC::ReceiverRegister())); 3036 DCHECK(ToRegister(instr->object()).is(LoadIC::ReceiverRegister()));
3037 ASSERT(ToRegister(instr->result()).is(v0)); 3037 DCHECK(ToRegister(instr->result()).is(v0));
3038 3038
3039 // Name is always in a2. 3039 // Name is always in a2.
3040 __ li(LoadIC::NameRegister(), Operand(instr->name())); 3040 __ li(LoadIC::NameRegister(), Operand(instr->name()));
3041 if (FLAG_vector_ics) { 3041 if (FLAG_vector_ics) {
3042 Register vector = ToRegister(instr->temp_vector()); 3042 Register vector = ToRegister(instr->temp_vector());
3043 ASSERT(vector.is(LoadIC::VectorRegister())); 3043 DCHECK(vector.is(LoadIC::VectorRegister()));
3044 __ li(vector, instr->hydrogen()->feedback_vector()); 3044 __ li(vector, instr->hydrogen()->feedback_vector());
3045 // No need to allocate this register. 3045 // No need to allocate this register.
3046 ASSERT(LoadIC::SlotRegister().is(a0)); 3046 DCHECK(LoadIC::SlotRegister().is(a0));
3047 __ li(LoadIC::SlotRegister(), 3047 __ li(LoadIC::SlotRegister(),
3048 Operand(Smi::FromInt(instr->hydrogen()->slot()))); 3048 Operand(Smi::FromInt(instr->hydrogen()->slot())));
3049 } 3049 }
3050 Handle<Code> ic = LoadIC::initialize_stub(isolate(), NOT_CONTEXTUAL); 3050 Handle<Code> ic = LoadIC::initialize_stub(isolate(), NOT_CONTEXTUAL);
3051 CallCode(ic, RelocInfo::CODE_TARGET, instr); 3051 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3052 } 3052 }
3053 3053
3054 3054
3055 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { 3055 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
3056 Register scratch = scratch0(); 3056 Register scratch = scratch0();
(...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after
3319 if (key_is_constant) { 3319 if (key_is_constant) {
3320 return MemOperand(base, (constant_key << element_size) + base_offset); 3320 return MemOperand(base, (constant_key << element_size) + base_offset);
3321 } 3321 }
3322 3322
3323 if (base_offset == 0) { 3323 if (base_offset == 0) {
3324 if (shift_size >= 0) { 3324 if (shift_size >= 0) {
3325 __ sll(scratch0(), key, shift_size); 3325 __ sll(scratch0(), key, shift_size);
3326 __ Addu(scratch0(), base, scratch0()); 3326 __ Addu(scratch0(), base, scratch0());
3327 return MemOperand(scratch0()); 3327 return MemOperand(scratch0());
3328 } else { 3328 } else {
3329 ASSERT_EQ(-1, shift_size); 3329 DCHECK_EQ(-1, shift_size);
3330 __ srl(scratch0(), key, 1); 3330 __ srl(scratch0(), key, 1);
3331 __ Addu(scratch0(), base, scratch0()); 3331 __ Addu(scratch0(), base, scratch0());
3332 return MemOperand(scratch0()); 3332 return MemOperand(scratch0());
3333 } 3333 }
3334 } 3334 }
3335 3335
3336 if (shift_size >= 0) { 3336 if (shift_size >= 0) {
3337 __ sll(scratch0(), key, shift_size); 3337 __ sll(scratch0(), key, shift_size);
3338 __ Addu(scratch0(), base, scratch0()); 3338 __ Addu(scratch0(), base, scratch0());
3339 return MemOperand(scratch0(), base_offset); 3339 return MemOperand(scratch0(), base_offset);
3340 } else { 3340 } else {
3341 ASSERT_EQ(-1, shift_size); 3341 DCHECK_EQ(-1, shift_size);
3342 __ sra(scratch0(), key, 1); 3342 __ sra(scratch0(), key, 1);
3343 __ Addu(scratch0(), base, scratch0()); 3343 __ Addu(scratch0(), base, scratch0());
3344 return MemOperand(scratch0(), base_offset); 3344 return MemOperand(scratch0(), base_offset);
3345 } 3345 }
3346 } 3346 }
3347 3347
3348 3348
3349 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { 3349 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
3350 ASSERT(ToRegister(instr->context()).is(cp)); 3350 DCHECK(ToRegister(instr->context()).is(cp));
3351 ASSERT(ToRegister(instr->object()).is(LoadIC::ReceiverRegister())); 3351 DCHECK(ToRegister(instr->object()).is(LoadIC::ReceiverRegister()));
3352 ASSERT(ToRegister(instr->key()).is(LoadIC::NameRegister())); 3352 DCHECK(ToRegister(instr->key()).is(LoadIC::NameRegister()));
3353 3353
3354 if (FLAG_vector_ics) { 3354 if (FLAG_vector_ics) {
3355 Register vector = ToRegister(instr->temp_vector()); 3355 Register vector = ToRegister(instr->temp_vector());
3356 ASSERT(vector.is(LoadIC::VectorRegister())); 3356 DCHECK(vector.is(LoadIC::VectorRegister()));
3357 __ li(vector, instr->hydrogen()->feedback_vector()); 3357 __ li(vector, instr->hydrogen()->feedback_vector());
3358 // No need to allocate this register. 3358 // No need to allocate this register.
3359 ASSERT(LoadIC::SlotRegister().is(a0)); 3359 DCHECK(LoadIC::SlotRegister().is(a0));
3360 __ li(LoadIC::SlotRegister(), 3360 __ li(LoadIC::SlotRegister(),
3361 Operand(Smi::FromInt(instr->hydrogen()->slot()))); 3361 Operand(Smi::FromInt(instr->hydrogen()->slot())));
3362 } 3362 }
3363 3363
3364 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 3364 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
3365 CallCode(ic, RelocInfo::CODE_TARGET, instr); 3365 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3366 } 3366 }
3367 3367
3368 3368
3369 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { 3369 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
3469 } 3469 }
3470 } 3470 }
3471 3471
3472 3472
3473 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { 3473 void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
3474 Register receiver = ToRegister(instr->receiver()); 3474 Register receiver = ToRegister(instr->receiver());
3475 Register function = ToRegister(instr->function()); 3475 Register function = ToRegister(instr->function());
3476 Register length = ToRegister(instr->length()); 3476 Register length = ToRegister(instr->length());
3477 Register elements = ToRegister(instr->elements()); 3477 Register elements = ToRegister(instr->elements());
3478 Register scratch = scratch0(); 3478 Register scratch = scratch0();
3479 ASSERT(receiver.is(a0)); // Used for parameter count. 3479 DCHECK(receiver.is(a0)); // Used for parameter count.
3480 ASSERT(function.is(a1)); // Required by InvokeFunction. 3480 DCHECK(function.is(a1)); // Required by InvokeFunction.
3481 ASSERT(ToRegister(instr->result()).is(v0)); 3481 DCHECK(ToRegister(instr->result()).is(v0));
3482 3482
3483 // Copy the arguments to this function possibly from the 3483 // Copy the arguments to this function possibly from the
3484 // adaptor frame below it. 3484 // adaptor frame below it.
3485 const uint32_t kArgumentsLimit = 1 * KB; 3485 const uint32_t kArgumentsLimit = 1 * KB;
3486 DeoptimizeIf(hi, instr->environment(), length, Operand(kArgumentsLimit)); 3486 DeoptimizeIf(hi, instr->environment(), length, Operand(kArgumentsLimit));
3487 3487
3488 // Push the receiver and use the register to keep the original 3488 // Push the receiver and use the register to keep the original
3489 // number of arguments. 3489 // number of arguments.
3490 __ push(receiver); 3490 __ push(receiver);
3491 __ Move(receiver, length); 3491 __ Move(receiver, length);
3492 // The arguments are at a one pointer size offset from elements. 3492 // The arguments are at a one pointer size offset from elements.
3493 __ Addu(elements, elements, Operand(1 * kPointerSize)); 3493 __ Addu(elements, elements, Operand(1 * kPointerSize));
3494 3494
3495 // Loop through the arguments pushing them onto the execution 3495 // Loop through the arguments pushing them onto the execution
3496 // stack. 3496 // stack.
3497 Label invoke, loop; 3497 Label invoke, loop;
3498 // length is a small non-negative integer, due to the test above. 3498 // length is a small non-negative integer, due to the test above.
3499 __ Branch(USE_DELAY_SLOT, &invoke, eq, length, Operand(zero_reg)); 3499 __ Branch(USE_DELAY_SLOT, &invoke, eq, length, Operand(zero_reg));
3500 __ sll(scratch, length, 2); 3500 __ sll(scratch, length, 2);
3501 __ bind(&loop); 3501 __ bind(&loop);
3502 __ Addu(scratch, elements, scratch); 3502 __ Addu(scratch, elements, scratch);
3503 __ lw(scratch, MemOperand(scratch)); 3503 __ lw(scratch, MemOperand(scratch));
3504 __ push(scratch); 3504 __ push(scratch);
3505 __ Subu(length, length, Operand(1)); 3505 __ Subu(length, length, Operand(1));
3506 __ Branch(USE_DELAY_SLOT, &loop, ne, length, Operand(zero_reg)); 3506 __ Branch(USE_DELAY_SLOT, &loop, ne, length, Operand(zero_reg));
3507 __ sll(scratch, length, 2); 3507 __ sll(scratch, length, 2);
3508 3508
3509 __ bind(&invoke); 3509 __ bind(&invoke);
3510 ASSERT(instr->HasPointerMap()); 3510 DCHECK(instr->HasPointerMap());
3511 LPointerMap* pointers = instr->pointer_map(); 3511 LPointerMap* pointers = instr->pointer_map();
3512 SafepointGenerator safepoint_generator( 3512 SafepointGenerator safepoint_generator(
3513 this, pointers, Safepoint::kLazyDeopt); 3513 this, pointers, Safepoint::kLazyDeopt);
3514 // The number of arguments is stored in receiver which is a0, as expected 3514 // The number of arguments is stored in receiver which is a0, as expected
3515 // by InvokeFunction. 3515 // by InvokeFunction.
3516 ParameterCount actual(receiver); 3516 ParameterCount actual(receiver);
3517 __ InvokeFunction(function, actual, CALL_FUNCTION, safepoint_generator); 3517 __ InvokeFunction(function, actual, CALL_FUNCTION, safepoint_generator);
3518 } 3518 }
3519 3519
3520 3520
(...skipping 19 matching lines...) Expand all
3540 } 3540 }
3541 3541
3542 3542
3543 void LCodeGen::DoContext(LContext* instr) { 3543 void LCodeGen::DoContext(LContext* instr) {
3544 // If there is a non-return use, the context must be moved to a register. 3544 // If there is a non-return use, the context must be moved to a register.
3545 Register result = ToRegister(instr->result()); 3545 Register result = ToRegister(instr->result());
3546 if (info()->IsOptimizing()) { 3546 if (info()->IsOptimizing()) {
3547 __ lw(result, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3547 __ lw(result, MemOperand(fp, StandardFrameConstants::kContextOffset));
3548 } else { 3548 } else {
3549 // If there is no frame, the context must be in cp. 3549 // If there is no frame, the context must be in cp.
3550 ASSERT(result.is(cp)); 3550 DCHECK(result.is(cp));
3551 } 3551 }
3552 } 3552 }
3553 3553
3554 3554
3555 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { 3555 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
3556 ASSERT(ToRegister(instr->context()).is(cp)); 3556 DCHECK(ToRegister(instr->context()).is(cp));
3557 __ li(scratch0(), instr->hydrogen()->pairs()); 3557 __ li(scratch0(), instr->hydrogen()->pairs());
3558 __ li(scratch1(), Operand(Smi::FromInt(instr->hydrogen()->flags()))); 3558 __ li(scratch1(), Operand(Smi::FromInt(instr->hydrogen()->flags())));
3559 // The context is the first argument. 3559 // The context is the first argument.
3560 __ Push(cp, scratch0(), scratch1()); 3560 __ Push(cp, scratch0(), scratch1());
3561 CallRuntime(Runtime::kDeclareGlobals, 3, instr); 3561 CallRuntime(Runtime::kDeclareGlobals, 3, instr);
3562 } 3562 }
3563 3563
3564 3564
3565 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, 3565 void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
3566 int formal_parameter_count, 3566 int formal_parameter_count,
(...skipping 30 matching lines...) Expand all
3597 } else { 3597 } else {
3598 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); 3598 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
3599 ParameterCount count(arity); 3599 ParameterCount count(arity);
3600 ParameterCount expected(formal_parameter_count); 3600 ParameterCount expected(formal_parameter_count);
3601 __ InvokeFunction(function, expected, count, CALL_FUNCTION, generator); 3601 __ InvokeFunction(function, expected, count, CALL_FUNCTION, generator);
3602 } 3602 }
3603 } 3603 }
3604 3604
3605 3605
3606 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) { 3606 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) {
3607 ASSERT(instr->context() != NULL); 3607 DCHECK(instr->context() != NULL);
3608 ASSERT(ToRegister(instr->context()).is(cp)); 3608 DCHECK(ToRegister(instr->context()).is(cp));
3609 Register input = ToRegister(instr->value()); 3609 Register input = ToRegister(instr->value());
3610 Register result = ToRegister(instr->result()); 3610 Register result = ToRegister(instr->result());
3611 Register scratch = scratch0(); 3611 Register scratch = scratch0();
3612 3612
3613 // Deoptimize if not a heap number. 3613 // Deoptimize if not a heap number.
3614 __ lw(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); 3614 __ lw(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
3615 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); 3615 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
3616 DeoptimizeIf(ne, instr->environment(), scratch, Operand(at)); 3616 DeoptimizeIf(ne, instr->environment(), scratch, Operand(at));
3617 3617
3618 Label done; 3618 Label done;
(...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after
3837 DoubleRegister result = ToDoubleRegister(instr->result()); 3837 DoubleRegister result = ToDoubleRegister(instr->result());
3838 __ sqrt_d(result, input); 3838 __ sqrt_d(result, input);
3839 } 3839 }
3840 3840
3841 3841
3842 void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) { 3842 void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) {
3843 DoubleRegister input = ToDoubleRegister(instr->value()); 3843 DoubleRegister input = ToDoubleRegister(instr->value());
3844 DoubleRegister result = ToDoubleRegister(instr->result()); 3844 DoubleRegister result = ToDoubleRegister(instr->result());
3845 DoubleRegister temp = ToDoubleRegister(instr->temp()); 3845 DoubleRegister temp = ToDoubleRegister(instr->temp());
3846 3846
3847 ASSERT(!input.is(result)); 3847 DCHECK(!input.is(result));
3848 3848
3849 // Note that according to ECMA-262 15.8.2.13: 3849 // Note that according to ECMA-262 15.8.2.13:
3850 // Math.pow(-Infinity, 0.5) == Infinity 3850 // Math.pow(-Infinity, 0.5) == Infinity
3851 // Math.sqrt(-Infinity) == NaN 3851 // Math.sqrt(-Infinity) == NaN
3852 Label done; 3852 Label done;
3853 __ Move(temp, -V8_INFINITY); 3853 __ Move(temp, -V8_INFINITY);
3854 __ BranchF(USE_DELAY_SLOT, &done, NULL, eq, temp, input); 3854 __ BranchF(USE_DELAY_SLOT, &done, NULL, eq, temp, input);
3855 // Set up Infinity in the delay slot. 3855 // Set up Infinity in the delay slot.
3856 // result is overwritten if the branch is not taken. 3856 // result is overwritten if the branch is not taken.
3857 __ neg_d(result, temp); 3857 __ neg_d(result, temp);
3858 3858
3859 // Add +0 to convert -0 to +0. 3859 // Add +0 to convert -0 to +0.
3860 __ add_d(result, input, kDoubleRegZero); 3860 __ add_d(result, input, kDoubleRegZero);
3861 __ sqrt_d(result, result); 3861 __ sqrt_d(result, result);
3862 __ bind(&done); 3862 __ bind(&done);
3863 } 3863 }
3864 3864
3865 3865
3866 void LCodeGen::DoPower(LPower* instr) { 3866 void LCodeGen::DoPower(LPower* instr) {
3867 Representation exponent_type = instr->hydrogen()->right()->representation(); 3867 Representation exponent_type = instr->hydrogen()->right()->representation();
3868 // Having marked this as a call, we can use any registers. 3868 // Having marked this as a call, we can use any registers.
3869 // Just make sure that the input/output registers are the expected ones. 3869 // Just make sure that the input/output registers are the expected ones.
3870 ASSERT(!instr->right()->IsDoubleRegister() || 3870 DCHECK(!instr->right()->IsDoubleRegister() ||
3871 ToDoubleRegister(instr->right()).is(f4)); 3871 ToDoubleRegister(instr->right()).is(f4));
3872 ASSERT(!instr->right()->IsRegister() || 3872 DCHECK(!instr->right()->IsRegister() ||
3873 ToRegister(instr->right()).is(a2)); 3873 ToRegister(instr->right()).is(a2));
3874 ASSERT(ToDoubleRegister(instr->left()).is(f2)); 3874 DCHECK(ToDoubleRegister(instr->left()).is(f2));
3875 ASSERT(ToDoubleRegister(instr->result()).is(f0)); 3875 DCHECK(ToDoubleRegister(instr->result()).is(f0));
3876 3876
3877 if (exponent_type.IsSmi()) { 3877 if (exponent_type.IsSmi()) {
3878 MathPowStub stub(isolate(), MathPowStub::TAGGED); 3878 MathPowStub stub(isolate(), MathPowStub::TAGGED);
3879 __ CallStub(&stub); 3879 __ CallStub(&stub);
3880 } else if (exponent_type.IsTagged()) { 3880 } else if (exponent_type.IsTagged()) {
3881 Label no_deopt; 3881 Label no_deopt;
3882 __ JumpIfSmi(a2, &no_deopt); 3882 __ JumpIfSmi(a2, &no_deopt);
3883 __ lw(t3, FieldMemOperand(a2, HeapObject::kMapOffset)); 3883 __ lw(t3, FieldMemOperand(a2, HeapObject::kMapOffset));
3884 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); 3884 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
3885 DeoptimizeIf(ne, instr->environment(), t3, Operand(at)); 3885 DeoptimizeIf(ne, instr->environment(), t3, Operand(at));
3886 __ bind(&no_deopt); 3886 __ bind(&no_deopt);
3887 MathPowStub stub(isolate(), MathPowStub::TAGGED); 3887 MathPowStub stub(isolate(), MathPowStub::TAGGED);
3888 __ CallStub(&stub); 3888 __ CallStub(&stub);
3889 } else if (exponent_type.IsInteger32()) { 3889 } else if (exponent_type.IsInteger32()) {
3890 MathPowStub stub(isolate(), MathPowStub::INTEGER); 3890 MathPowStub stub(isolate(), MathPowStub::INTEGER);
3891 __ CallStub(&stub); 3891 __ CallStub(&stub);
3892 } else { 3892 } else {
3893 ASSERT(exponent_type.IsDouble()); 3893 DCHECK(exponent_type.IsDouble());
3894 MathPowStub stub(isolate(), MathPowStub::DOUBLE); 3894 MathPowStub stub(isolate(), MathPowStub::DOUBLE);
3895 __ CallStub(&stub); 3895 __ CallStub(&stub);
3896 } 3896 }
3897 } 3897 }
3898 3898
3899 3899
3900 void LCodeGen::DoMathExp(LMathExp* instr) { 3900 void LCodeGen::DoMathExp(LMathExp* instr) {
3901 DoubleRegister input = ToDoubleRegister(instr->value()); 3901 DoubleRegister input = ToDoubleRegister(instr->value());
3902 DoubleRegister result = ToDoubleRegister(instr->result()); 3902 DoubleRegister result = ToDoubleRegister(instr->result());
3903 DoubleRegister double_scratch1 = ToDoubleRegister(instr->double_temp()); 3903 DoubleRegister double_scratch1 = ToDoubleRegister(instr->double_temp());
(...skipping 17 matching lines...) Expand all
3921 3921
3922 3922
3923 void LCodeGen::DoMathClz32(LMathClz32* instr) { 3923 void LCodeGen::DoMathClz32(LMathClz32* instr) {
3924 Register input = ToRegister(instr->value()); 3924 Register input = ToRegister(instr->value());
3925 Register result = ToRegister(instr->result()); 3925 Register result = ToRegister(instr->result());
3926 __ Clz(result, input); 3926 __ Clz(result, input);
3927 } 3927 }
3928 3928
3929 3929
3930 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) { 3930 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3931 ASSERT(ToRegister(instr->context()).is(cp)); 3931 DCHECK(ToRegister(instr->context()).is(cp));
3932 ASSERT(ToRegister(instr->function()).is(a1)); 3932 DCHECK(ToRegister(instr->function()).is(a1));
3933 ASSERT(instr->HasPointerMap()); 3933 DCHECK(instr->HasPointerMap());
3934 3934
3935 Handle<JSFunction> known_function = instr->hydrogen()->known_function(); 3935 Handle<JSFunction> known_function = instr->hydrogen()->known_function();
3936 if (known_function.is_null()) { 3936 if (known_function.is_null()) {
3937 LPointerMap* pointers = instr->pointer_map(); 3937 LPointerMap* pointers = instr->pointer_map();
3938 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); 3938 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
3939 ParameterCount count(instr->arity()); 3939 ParameterCount count(instr->arity());
3940 __ InvokeFunction(a1, count, CALL_FUNCTION, generator); 3940 __ InvokeFunction(a1, count, CALL_FUNCTION, generator);
3941 } else { 3941 } else {
3942 CallKnownFunction(known_function, 3942 CallKnownFunction(known_function,
3943 instr->hydrogen()->formal_parameter_count(), 3943 instr->hydrogen()->formal_parameter_count(),
3944 instr->arity(), 3944 instr->arity(),
3945 instr, 3945 instr,
3946 A1_CONTAINS_TARGET); 3946 A1_CONTAINS_TARGET);
3947 } 3947 }
3948 } 3948 }
3949 3949
3950 3950
3951 void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) { 3951 void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) {
3952 ASSERT(ToRegister(instr->result()).is(v0)); 3952 DCHECK(ToRegister(instr->result()).is(v0));
3953 3953
3954 LPointerMap* pointers = instr->pointer_map(); 3954 LPointerMap* pointers = instr->pointer_map();
3955 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); 3955 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
3956 3956
3957 if (instr->target()->IsConstantOperand()) { 3957 if (instr->target()->IsConstantOperand()) {
3958 LConstantOperand* target = LConstantOperand::cast(instr->target()); 3958 LConstantOperand* target = LConstantOperand::cast(instr->target());
3959 Handle<Code> code = Handle<Code>::cast(ToHandle(target)); 3959 Handle<Code> code = Handle<Code>::cast(ToHandle(target));
3960 generator.BeforeCall(__ CallSize(code, RelocInfo::CODE_TARGET)); 3960 generator.BeforeCall(__ CallSize(code, RelocInfo::CODE_TARGET));
3961 __ Call(code, RelocInfo::CODE_TARGET); 3961 __ Call(code, RelocInfo::CODE_TARGET);
3962 } else { 3962 } else {
3963 ASSERT(instr->target()->IsRegister()); 3963 DCHECK(instr->target()->IsRegister());
3964 Register target = ToRegister(instr->target()); 3964 Register target = ToRegister(instr->target());
3965 generator.BeforeCall(__ CallSize(target)); 3965 generator.BeforeCall(__ CallSize(target));
3966 __ Addu(target, target, Operand(Code::kHeaderSize - kHeapObjectTag)); 3966 __ Addu(target, target, Operand(Code::kHeaderSize - kHeapObjectTag));
3967 __ Call(target); 3967 __ Call(target);
3968 } 3968 }
3969 generator.AfterCall(); 3969 generator.AfterCall();
3970 } 3970 }
3971 3971
3972 3972
3973 void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) { 3973 void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) {
3974 ASSERT(ToRegister(instr->function()).is(a1)); 3974 DCHECK(ToRegister(instr->function()).is(a1));
3975 ASSERT(ToRegister(instr->result()).is(v0)); 3975 DCHECK(ToRegister(instr->result()).is(v0));
3976 3976
3977 if (instr->hydrogen()->pass_argument_count()) { 3977 if (instr->hydrogen()->pass_argument_count()) {
3978 __ li(a0, Operand(instr->arity())); 3978 __ li(a0, Operand(instr->arity()));
3979 } 3979 }
3980 3980
3981 // Change context. 3981 // Change context.
3982 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); 3982 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
3983 3983
3984 // Load the code entry address 3984 // Load the code entry address
3985 __ lw(at, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); 3985 __ lw(at, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
3986 __ Call(at); 3986 __ Call(at);
3987 3987
3988 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); 3988 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
3989 } 3989 }
3990 3990
3991 3991
3992 void LCodeGen::DoCallFunction(LCallFunction* instr) { 3992 void LCodeGen::DoCallFunction(LCallFunction* instr) {
3993 ASSERT(ToRegister(instr->context()).is(cp)); 3993 DCHECK(ToRegister(instr->context()).is(cp));
3994 ASSERT(ToRegister(instr->function()).is(a1)); 3994 DCHECK(ToRegister(instr->function()).is(a1));
3995 ASSERT(ToRegister(instr->result()).is(v0)); 3995 DCHECK(ToRegister(instr->result()).is(v0));
3996 3996
3997 int arity = instr->arity(); 3997 int arity = instr->arity();
3998 CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags()); 3998 CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
3999 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 3999 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4000 } 4000 }
4001 4001
4002 4002
4003 void LCodeGen::DoCallNew(LCallNew* instr) { 4003 void LCodeGen::DoCallNew(LCallNew* instr) {
4004 ASSERT(ToRegister(instr->context()).is(cp)); 4004 DCHECK(ToRegister(instr->context()).is(cp));
4005 ASSERT(ToRegister(instr->constructor()).is(a1)); 4005 DCHECK(ToRegister(instr->constructor()).is(a1));
4006 ASSERT(ToRegister(instr->result()).is(v0)); 4006 DCHECK(ToRegister(instr->result()).is(v0));
4007 4007
4008 __ li(a0, Operand(instr->arity())); 4008 __ li(a0, Operand(instr->arity()));
4009 // No cell in a2 for construct type feedback in optimized code 4009 // No cell in a2 for construct type feedback in optimized code
4010 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); 4010 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
4011 CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS); 4011 CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
4012 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr); 4012 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
4013 } 4013 }
4014 4014
4015 4015
4016 void LCodeGen::DoCallNewArray(LCallNewArray* instr) { 4016 void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
4017 ASSERT(ToRegister(instr->context()).is(cp)); 4017 DCHECK(ToRegister(instr->context()).is(cp));
4018 ASSERT(ToRegister(instr->constructor()).is(a1)); 4018 DCHECK(ToRegister(instr->constructor()).is(a1));
4019 ASSERT(ToRegister(instr->result()).is(v0)); 4019 DCHECK(ToRegister(instr->result()).is(v0));
4020 4020
4021 __ li(a0, Operand(instr->arity())); 4021 __ li(a0, Operand(instr->arity()));
4022 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); 4022 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
4023 ElementsKind kind = instr->hydrogen()->elements_kind(); 4023 ElementsKind kind = instr->hydrogen()->elements_kind();
4024 AllocationSiteOverrideMode override_mode = 4024 AllocationSiteOverrideMode override_mode =
4025 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE) 4025 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE)
4026 ? DISABLE_ALLOCATION_SITES 4026 ? DISABLE_ALLOCATION_SITES
4027 : DONT_OVERRIDE; 4027 : DONT_OVERRIDE;
4028 4028
4029 if (instr->arity() == 0) { 4029 if (instr->arity() == 0) {
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
4095 4095
4096 if (access.IsExternalMemory()) { 4096 if (access.IsExternalMemory()) {
4097 Register value = ToRegister(instr->value()); 4097 Register value = ToRegister(instr->value());
4098 MemOperand operand = MemOperand(object, offset); 4098 MemOperand operand = MemOperand(object, offset);
4099 __ Store(value, operand, representation); 4099 __ Store(value, operand, representation);
4100 return; 4100 return;
4101 } 4101 }
4102 4102
4103 __ AssertNotSmi(object); 4103 __ AssertNotSmi(object);
4104 4104
4105 ASSERT(!representation.IsSmi() || 4105 DCHECK(!representation.IsSmi() ||
4106 !instr->value()->IsConstantOperand() || 4106 !instr->value()->IsConstantOperand() ||
4107 IsSmi(LConstantOperand::cast(instr->value()))); 4107 IsSmi(LConstantOperand::cast(instr->value())));
4108 if (representation.IsDouble()) { 4108 if (representation.IsDouble()) {
4109 ASSERT(access.IsInobject()); 4109 DCHECK(access.IsInobject());
4110 ASSERT(!instr->hydrogen()->has_transition()); 4110 DCHECK(!instr->hydrogen()->has_transition());
4111 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); 4111 DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
4112 DoubleRegister value = ToDoubleRegister(instr->value()); 4112 DoubleRegister value = ToDoubleRegister(instr->value());
4113 __ sdc1(value, FieldMemOperand(object, offset)); 4113 __ sdc1(value, FieldMemOperand(object, offset));
4114 return; 4114 return;
4115 } 4115 }
4116 4116
4117 if (instr->hydrogen()->has_transition()) { 4117 if (instr->hydrogen()->has_transition()) {
4118 Handle<Map> transition = instr->hydrogen()->transition_map(); 4118 Handle<Map> transition = instr->hydrogen()->transition_map();
4119 AddDeprecationDependency(transition); 4119 AddDeprecationDependency(transition);
4120 __ li(scratch, Operand(transition)); 4120 __ li(scratch, Operand(transition));
4121 __ sw(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); 4121 __ sw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
4162 kSaveFPRegs, 4162 kSaveFPRegs,
4163 EMIT_REMEMBERED_SET, 4163 EMIT_REMEMBERED_SET,
4164 instr->hydrogen()->SmiCheckForWriteBarrier(), 4164 instr->hydrogen()->SmiCheckForWriteBarrier(),
4165 instr->hydrogen()->PointersToHereCheckForValue()); 4165 instr->hydrogen()->PointersToHereCheckForValue());
4166 } 4166 }
4167 } 4167 }
4168 } 4168 }
4169 4169
4170 4170
4171 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { 4171 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
4172 ASSERT(ToRegister(instr->context()).is(cp)); 4172 DCHECK(ToRegister(instr->context()).is(cp));
4173 ASSERT(ToRegister(instr->object()).is(StoreIC::ReceiverRegister())); 4173 DCHECK(ToRegister(instr->object()).is(StoreIC::ReceiverRegister()));
4174 ASSERT(ToRegister(instr->value()).is(StoreIC::ValueRegister())); 4174 DCHECK(ToRegister(instr->value()).is(StoreIC::ValueRegister()));
4175 4175
4176 __ li(StoreIC::NameRegister(), Operand(instr->name())); 4176 __ li(StoreIC::NameRegister(), Operand(instr->name()));
4177 Handle<Code> ic = StoreIC::initialize_stub(isolate(), instr->strict_mode()); 4177 Handle<Code> ic = StoreIC::initialize_stub(isolate(), instr->strict_mode());
4178 CallCode(ic, RelocInfo::CODE_TARGET, instr); 4178 CallCode(ic, RelocInfo::CODE_TARGET, instr);
4179 } 4179 }
4180 4180
4181 4181
4182 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { 4182 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
4183 Condition cc = instr->hydrogen()->allow_equality() ? hi : hs; 4183 Condition cc = instr->hydrogen()->allow_equality() ? hi : hs;
4184 Operand operand(0); 4184 Operand operand(0);
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
4343 Register value = ToRegister(instr->value()); 4343 Register value = ToRegister(instr->value());
4344 Register elements = ToRegister(instr->elements()); 4344 Register elements = ToRegister(instr->elements());
4345 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) 4345 Register key = instr->key()->IsRegister() ? ToRegister(instr->key())
4346 : no_reg; 4346 : no_reg;
4347 Register scratch = scratch0(); 4347 Register scratch = scratch0();
4348 Register store_base = scratch; 4348 Register store_base = scratch;
4349 int offset = instr->base_offset(); 4349 int offset = instr->base_offset();
4350 4350
4351 // Do the store. 4351 // Do the store.
4352 if (instr->key()->IsConstantOperand()) { 4352 if (instr->key()->IsConstantOperand()) {
4353 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); 4353 DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
4354 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); 4354 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
4355 offset += ToInteger32(const_operand) * kPointerSize; 4355 offset += ToInteger32(const_operand) * kPointerSize;
4356 store_base = elements; 4356 store_base = elements;
4357 } else { 4357 } else {
4358 // Even though the HLoadKeyed instruction forces the input 4358 // Even though the HLoadKeyed instruction forces the input
4359 // representation for the key to be an integer, the input gets replaced 4359 // representation for the key to be an integer, the input gets replaced
4360 // during bound check elimination with the index argument to the bounds 4360 // during bound check elimination with the index argument to the bounds
4361 // check, which can be tagged, so that case must be handled here, too. 4361 // check, which can be tagged, so that case must be handled here, too.
4362 if (instr->hydrogen()->key()->representation().IsSmi()) { 4362 if (instr->hydrogen()->key()->representation().IsSmi()) {
4363 __ sll(scratch, key, kPointerSizeLog2 - kSmiTagSize); 4363 __ sll(scratch, key, kPointerSizeLog2 - kSmiTagSize);
(...skipping 29 matching lines...) Expand all
4393 DoStoreKeyedExternalArray(instr); 4393 DoStoreKeyedExternalArray(instr);
4394 } else if (instr->hydrogen()->value()->representation().IsDouble()) { 4394 } else if (instr->hydrogen()->value()->representation().IsDouble()) {
4395 DoStoreKeyedFixedDoubleArray(instr); 4395 DoStoreKeyedFixedDoubleArray(instr);
4396 } else { 4396 } else {
4397 DoStoreKeyedFixedArray(instr); 4397 DoStoreKeyedFixedArray(instr);
4398 } 4398 }
4399 } 4399 }
4400 4400
4401 4401
4402 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { 4402 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
4403 ASSERT(ToRegister(instr->context()).is(cp)); 4403 DCHECK(ToRegister(instr->context()).is(cp));
4404 ASSERT(ToRegister(instr->object()).is(KeyedStoreIC::ReceiverRegister())); 4404 DCHECK(ToRegister(instr->object()).is(KeyedStoreIC::ReceiverRegister()));
4405 ASSERT(ToRegister(instr->key()).is(KeyedStoreIC::NameRegister())); 4405 DCHECK(ToRegister(instr->key()).is(KeyedStoreIC::NameRegister()));
4406 ASSERT(ToRegister(instr->value()).is(KeyedStoreIC::ValueRegister())); 4406 DCHECK(ToRegister(instr->value()).is(KeyedStoreIC::ValueRegister()));
4407 4407
4408 Handle<Code> ic = (instr->strict_mode() == STRICT) 4408 Handle<Code> ic = (instr->strict_mode() == STRICT)
4409 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() 4409 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
4410 : isolate()->builtins()->KeyedStoreIC_Initialize(); 4410 : isolate()->builtins()->KeyedStoreIC_Initialize();
4411 CallCode(ic, RelocInfo::CODE_TARGET, instr); 4411 CallCode(ic, RelocInfo::CODE_TARGET, instr);
4412 } 4412 }
4413 4413
4414 4414
4415 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { 4415 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
4416 Register object_reg = ToRegister(instr->object()); 4416 Register object_reg = ToRegister(instr->object());
(...skipping 12 matching lines...) Expand all
4429 Register new_map_reg = ToRegister(instr->new_map_temp()); 4429 Register new_map_reg = ToRegister(instr->new_map_temp());
4430 __ li(new_map_reg, Operand(to_map)); 4430 __ li(new_map_reg, Operand(to_map));
4431 __ sw(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset)); 4431 __ sw(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset));
4432 // Write barrier. 4432 // Write barrier.
4433 __ RecordWriteForMap(object_reg, 4433 __ RecordWriteForMap(object_reg,
4434 new_map_reg, 4434 new_map_reg,
4435 scratch, 4435 scratch,
4436 GetRAState(), 4436 GetRAState(),
4437 kDontSaveFPRegs); 4437 kDontSaveFPRegs);
4438 } else { 4438 } else {
4439 ASSERT(object_reg.is(a0)); 4439 DCHECK(object_reg.is(a0));
4440 ASSERT(ToRegister(instr->context()).is(cp)); 4440 DCHECK(ToRegister(instr->context()).is(cp));
4441 PushSafepointRegistersScope scope(this); 4441 PushSafepointRegistersScope scope(this);
4442 __ li(a1, Operand(to_map)); 4442 __ li(a1, Operand(to_map));
4443 bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE; 4443 bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE;
4444 TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array); 4444 TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array);
4445 __ CallStub(&stub); 4445 __ CallStub(&stub);
4446 RecordSafepointWithRegisters( 4446 RecordSafepointWithRegisters(
4447 instr->pointer_map(), 0, Safepoint::kLazyDeopt); 4447 instr->pointer_map(), 0, Safepoint::kLazyDeopt);
4448 } 4448 }
4449 __ bind(&not_applicable); 4449 __ bind(&not_applicable);
4450 } 4450 }
4451 4451
4452 4452
4453 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) { 4453 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
4454 Register object = ToRegister(instr->object()); 4454 Register object = ToRegister(instr->object());
4455 Register temp = ToRegister(instr->temp()); 4455 Register temp = ToRegister(instr->temp());
4456 Label no_memento_found; 4456 Label no_memento_found;
4457 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found, 4457 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found,
4458 ne, &no_memento_found); 4458 ne, &no_memento_found);
4459 DeoptimizeIf(al, instr->environment()); 4459 DeoptimizeIf(al, instr->environment());
4460 __ bind(&no_memento_found); 4460 __ bind(&no_memento_found);
4461 } 4461 }
4462 4462
4463 4463
4464 void LCodeGen::DoStringAdd(LStringAdd* instr) { 4464 void LCodeGen::DoStringAdd(LStringAdd* instr) {
4465 ASSERT(ToRegister(instr->context()).is(cp)); 4465 DCHECK(ToRegister(instr->context()).is(cp));
4466 ASSERT(ToRegister(instr->left()).is(a1)); 4466 DCHECK(ToRegister(instr->left()).is(a1));
4467 ASSERT(ToRegister(instr->right()).is(a0)); 4467 DCHECK(ToRegister(instr->right()).is(a0));
4468 StringAddStub stub(isolate(), 4468 StringAddStub stub(isolate(),
4469 instr->hydrogen()->flags(), 4469 instr->hydrogen()->flags(),
4470 instr->hydrogen()->pretenure_flag()); 4470 instr->hydrogen()->pretenure_flag());
4471 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 4471 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4472 } 4472 }
4473 4473
4474 4474
4475 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { 4475 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
4476 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode { 4476 class DeferredStringCharCodeAt V8_FINAL : public LDeferredCode {
4477 public: 4477 public:
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
4536 codegen()->DoDeferredStringCharFromCode(instr_); 4536 codegen()->DoDeferredStringCharFromCode(instr_);
4537 } 4537 }
4538 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } 4538 virtual LInstruction* instr() V8_OVERRIDE { return instr_; }
4539 private: 4539 private:
4540 LStringCharFromCode* instr_; 4540 LStringCharFromCode* instr_;
4541 }; 4541 };
4542 4542
4543 DeferredStringCharFromCode* deferred = 4543 DeferredStringCharFromCode* deferred =
4544 new(zone()) DeferredStringCharFromCode(this, instr); 4544 new(zone()) DeferredStringCharFromCode(this, instr);
4545 4545
4546 ASSERT(instr->hydrogen()->value()->representation().IsInteger32()); 4546 DCHECK(instr->hydrogen()->value()->representation().IsInteger32());
4547 Register char_code = ToRegister(instr->char_code()); 4547 Register char_code = ToRegister(instr->char_code());
4548 Register result = ToRegister(instr->result()); 4548 Register result = ToRegister(instr->result());
4549 Register scratch = scratch0(); 4549 Register scratch = scratch0();
4550 ASSERT(!char_code.is(result)); 4550 DCHECK(!char_code.is(result));
4551 4551
4552 __ Branch(deferred->entry(), hi, 4552 __ Branch(deferred->entry(), hi,
4553 char_code, Operand(String::kMaxOneByteCharCode)); 4553 char_code, Operand(String::kMaxOneByteCharCode));
4554 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex); 4554 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
4555 __ sll(scratch, char_code, kPointerSizeLog2); 4555 __ sll(scratch, char_code, kPointerSizeLog2);
4556 __ Addu(result, result, scratch); 4556 __ Addu(result, result, scratch);
4557 __ lw(result, FieldMemOperand(result, FixedArray::kHeaderSize)); 4557 __ lw(result, FieldMemOperand(result, FixedArray::kHeaderSize));
4558 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); 4558 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
4559 __ Branch(deferred->entry(), eq, result, Operand(scratch)); 4559 __ Branch(deferred->entry(), eq, result, Operand(scratch));
4560 __ bind(deferred->exit()); 4560 __ bind(deferred->exit());
(...skipping 12 matching lines...) Expand all
4573 PushSafepointRegistersScope scope(this); 4573 PushSafepointRegistersScope scope(this);
4574 __ SmiTag(char_code); 4574 __ SmiTag(char_code);
4575 __ push(char_code); 4575 __ push(char_code);
4576 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context()); 4576 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context());
4577 __ StoreToSafepointRegisterSlot(v0, result); 4577 __ StoreToSafepointRegisterSlot(v0, result);
4578 } 4578 }
4579 4579
4580 4580
4581 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) { 4581 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
4582 LOperand* input = instr->value(); 4582 LOperand* input = instr->value();
4583 ASSERT(input->IsRegister() || input->IsStackSlot()); 4583 DCHECK(input->IsRegister() || input->IsStackSlot());
4584 LOperand* output = instr->result(); 4584 LOperand* output = instr->result();
4585 ASSERT(output->IsDoubleRegister()); 4585 DCHECK(output->IsDoubleRegister());
4586 FPURegister single_scratch = double_scratch0().low(); 4586 FPURegister single_scratch = double_scratch0().low();
4587 if (input->IsStackSlot()) { 4587 if (input->IsStackSlot()) {
4588 Register scratch = scratch0(); 4588 Register scratch = scratch0();
4589 __ lw(scratch, ToMemOperand(input)); 4589 __ lw(scratch, ToMemOperand(input));
4590 __ mtc1(scratch, single_scratch); 4590 __ mtc1(scratch, single_scratch);
4591 } else { 4591 } else {
4592 __ mtc1(ToRegister(input), single_scratch); 4592 __ mtc1(ToRegister(input), single_scratch);
4593 } 4593 }
4594 __ cvt_d_w(ToDoubleRegister(output), single_scratch); 4594 __ cvt_d_w(ToDoubleRegister(output), single_scratch);
4595 } 4595 }
(...skipping 255 matching lines...) Expand 10 before | Expand all | Expand 10 after
4851 __ bind(&convert); 4851 __ bind(&convert);
4852 // Convert undefined (and hole) to NaN. 4852 // Convert undefined (and hole) to NaN.
4853 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 4853 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4854 DeoptimizeIf(ne, env, input_reg, Operand(at)); 4854 DeoptimizeIf(ne, env, input_reg, Operand(at));
4855 __ LoadRoot(scratch, Heap::kNanValueRootIndex); 4855 __ LoadRoot(scratch, Heap::kNanValueRootIndex);
4856 __ ldc1(result_reg, FieldMemOperand(scratch, HeapNumber::kValueOffset)); 4856 __ ldc1(result_reg, FieldMemOperand(scratch, HeapNumber::kValueOffset));
4857 __ Branch(&done); 4857 __ Branch(&done);
4858 } 4858 }
4859 } else { 4859 } else {
4860 __ SmiUntag(scratch, input_reg); 4860 __ SmiUntag(scratch, input_reg);
4861 ASSERT(mode == NUMBER_CANDIDATE_IS_SMI); 4861 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI);
4862 } 4862 }
4863 // Smi to double register conversion 4863 // Smi to double register conversion
4864 __ bind(&load_smi); 4864 __ bind(&load_smi);
4865 // scratch: untagged value of input_reg 4865 // scratch: untagged value of input_reg
4866 __ mtc1(scratch, result_reg); 4866 __ mtc1(scratch, result_reg);
4867 __ cvt_d_w(result_reg, result_reg); 4867 __ cvt_d_w(result_reg, result_reg);
4868 __ bind(&done); 4868 __ bind(&done);
4869 } 4869 }
4870 4870
4871 4871
4872 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) { 4872 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
4873 Register input_reg = ToRegister(instr->value()); 4873 Register input_reg = ToRegister(instr->value());
4874 Register scratch1 = scratch0(); 4874 Register scratch1 = scratch0();
4875 Register scratch2 = ToRegister(instr->temp()); 4875 Register scratch2 = ToRegister(instr->temp());
4876 DoubleRegister double_scratch = double_scratch0(); 4876 DoubleRegister double_scratch = double_scratch0();
4877 DoubleRegister double_scratch2 = ToDoubleRegister(instr->temp2()); 4877 DoubleRegister double_scratch2 = ToDoubleRegister(instr->temp2());
4878 4878
4879 ASSERT(!scratch1.is(input_reg) && !scratch1.is(scratch2)); 4879 DCHECK(!scratch1.is(input_reg) && !scratch1.is(scratch2));
4880 ASSERT(!scratch2.is(input_reg) && !scratch2.is(scratch1)); 4880 DCHECK(!scratch2.is(input_reg) && !scratch2.is(scratch1));
4881 4881
4882 Label done; 4882 Label done;
4883 4883
4884 // The input is a tagged HeapObject. 4884 // The input is a tagged HeapObject.
4885 // Heap number map check. 4885 // Heap number map check.
4886 __ lw(scratch1, FieldMemOperand(input_reg, HeapObject::kMapOffset)); 4886 __ lw(scratch1, FieldMemOperand(input_reg, HeapObject::kMapOffset));
4887 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); 4887 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4888 // This 'at' value and scratch1 map value are used for tests in both clauses 4888 // This 'at' value and scratch1 map value are used for tests in both clauses
4889 // of the if. 4889 // of the if.
4890 4890
4891 if (instr->truncating()) { 4891 if (instr->truncating()) {
4892 // Performs a truncating conversion of a floating point number as used by 4892 // Performs a truncating conversion of a floating point number as used by
4893 // the JS bitwise operations. 4893 // the JS bitwise operations.
4894 Label no_heap_number, check_bools, check_false; 4894 Label no_heap_number, check_bools, check_false;
4895 // Check HeapNumber map. 4895 // Check HeapNumber map.
4896 __ Branch(USE_DELAY_SLOT, &no_heap_number, ne, scratch1, Operand(at)); 4896 __ Branch(USE_DELAY_SLOT, &no_heap_number, ne, scratch1, Operand(at));
4897 __ mov(scratch2, input_reg); // In delay slot. 4897 __ mov(scratch2, input_reg); // In delay slot.
4898 __ TruncateHeapNumberToI(input_reg, scratch2); 4898 __ TruncateHeapNumberToI(input_reg, scratch2);
4899 __ Branch(&done); 4899 __ Branch(&done);
4900 4900
4901 // Check for Oddballs. Undefined/False is converted to zero and True to one 4901 // Check for Oddballs. Undefined/False is converted to zero and True to one
4902 // for truncating conversions. 4902 // for truncating conversions.
4903 __ bind(&no_heap_number); 4903 __ bind(&no_heap_number);
4904 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 4904 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4905 __ Branch(&check_bools, ne, input_reg, Operand(at)); 4905 __ Branch(&check_bools, ne, input_reg, Operand(at));
4906 ASSERT(ToRegister(instr->result()).is(input_reg)); 4906 DCHECK(ToRegister(instr->result()).is(input_reg));
4907 __ Branch(USE_DELAY_SLOT, &done); 4907 __ Branch(USE_DELAY_SLOT, &done);
4908 __ mov(input_reg, zero_reg); // In delay slot. 4908 __ mov(input_reg, zero_reg); // In delay slot.
4909 4909
4910 __ bind(&check_bools); 4910 __ bind(&check_bools);
4911 __ LoadRoot(at, Heap::kTrueValueRootIndex); 4911 __ LoadRoot(at, Heap::kTrueValueRootIndex);
4912 __ Branch(&check_false, ne, scratch2, Operand(at)); 4912 __ Branch(&check_false, ne, scratch2, Operand(at));
4913 __ Branch(USE_DELAY_SLOT, &done); 4913 __ Branch(USE_DELAY_SLOT, &done);
4914 __ li(input_reg, Operand(1)); // In delay slot. 4914 __ li(input_reg, Operand(1)); // In delay slot.
4915 4915
4916 __ bind(&check_false); 4916 __ bind(&check_false);
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
4957 : LDeferredCode(codegen), instr_(instr) { } 4957 : LDeferredCode(codegen), instr_(instr) { }
4958 virtual void Generate() V8_OVERRIDE { 4958 virtual void Generate() V8_OVERRIDE {
4959 codegen()->DoDeferredTaggedToI(instr_); 4959 codegen()->DoDeferredTaggedToI(instr_);
4960 } 4960 }
4961 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } 4961 virtual LInstruction* instr() V8_OVERRIDE { return instr_; }
4962 private: 4962 private:
4963 LTaggedToI* instr_; 4963 LTaggedToI* instr_;
4964 }; 4964 };
4965 4965
4966 LOperand* input = instr->value(); 4966 LOperand* input = instr->value();
4967 ASSERT(input->IsRegister()); 4967 DCHECK(input->IsRegister());
4968 ASSERT(input->Equals(instr->result())); 4968 DCHECK(input->Equals(instr->result()));
4969 4969
4970 Register input_reg = ToRegister(input); 4970 Register input_reg = ToRegister(input);
4971 4971
4972 if (instr->hydrogen()->value()->representation().IsSmi()) { 4972 if (instr->hydrogen()->value()->representation().IsSmi()) {
4973 __ SmiUntag(input_reg); 4973 __ SmiUntag(input_reg);
4974 } else { 4974 } else {
4975 DeferredTaggedToI* deferred = new(zone()) DeferredTaggedToI(this, instr); 4975 DeferredTaggedToI* deferred = new(zone()) DeferredTaggedToI(this, instr);
4976 4976
4977 // Let the deferred code handle the HeapObject case. 4977 // Let the deferred code handle the HeapObject case.
4978 __ JumpIfNotSmi(input_reg, deferred->entry()); 4978 __ JumpIfNotSmi(input_reg, deferred->entry());
4979 4979
4980 // Smi to int32 conversion. 4980 // Smi to int32 conversion.
4981 __ SmiUntag(input_reg); 4981 __ SmiUntag(input_reg);
4982 __ bind(deferred->exit()); 4982 __ bind(deferred->exit());
4983 } 4983 }
4984 } 4984 }
4985 4985
4986 4986
4987 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) { 4987 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
4988 LOperand* input = instr->value(); 4988 LOperand* input = instr->value();
4989 ASSERT(input->IsRegister()); 4989 DCHECK(input->IsRegister());
4990 LOperand* result = instr->result(); 4990 LOperand* result = instr->result();
4991 ASSERT(result->IsDoubleRegister()); 4991 DCHECK(result->IsDoubleRegister());
4992 4992
4993 Register input_reg = ToRegister(input); 4993 Register input_reg = ToRegister(input);
4994 DoubleRegister result_reg = ToDoubleRegister(result); 4994 DoubleRegister result_reg = ToDoubleRegister(result);
4995 4995
4996 HValue* value = instr->hydrogen()->value(); 4996 HValue* value = instr->hydrogen()->value();
4997 NumberUntagDMode mode = value->representation().IsSmi() 4997 NumberUntagDMode mode = value->representation().IsSmi()
4998 ? NUMBER_CANDIDATE_IS_SMI : NUMBER_CANDIDATE_IS_ANY_TAGGED; 4998 ? NUMBER_CANDIDATE_IS_SMI : NUMBER_CANDIDATE_IS_ANY_TAGGED;
4999 4999
5000 EmitNumberUntagD(input_reg, result_reg, 5000 EmitNumberUntagD(input_reg, result_reg,
5001 instr->hydrogen()->can_convert_undefined_to_nan(), 5001 instr->hydrogen()->can_convert_undefined_to_nan(),
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after
5109 if (last != LAST_TYPE) { 5109 if (last != LAST_TYPE) {
5110 DeoptimizeIf(hi, instr->environment(), scratch, Operand(last)); 5110 DeoptimizeIf(hi, instr->environment(), scratch, Operand(last));
5111 } 5111 }
5112 } 5112 }
5113 } else { 5113 } else {
5114 uint8_t mask; 5114 uint8_t mask;
5115 uint8_t tag; 5115 uint8_t tag;
5116 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag); 5116 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
5117 5117
5118 if (IsPowerOf2(mask)) { 5118 if (IsPowerOf2(mask)) {
5119 ASSERT(tag == 0 || IsPowerOf2(tag)); 5119 DCHECK(tag == 0 || IsPowerOf2(tag));
5120 __ And(at, scratch, mask); 5120 __ And(at, scratch, mask);
5121 DeoptimizeIf(tag == 0 ? ne : eq, instr->environment(), 5121 DeoptimizeIf(tag == 0 ? ne : eq, instr->environment(),
5122 at, Operand(zero_reg)); 5122 at, Operand(zero_reg));
5123 } else { 5123 } else {
5124 __ And(scratch, scratch, Operand(mask)); 5124 __ And(scratch, scratch, Operand(mask));
5125 DeoptimizeIf(ne, instr->environment(), scratch, Operand(tag)); 5125 DeoptimizeIf(ne, instr->environment(), scratch, Operand(tag));
5126 } 5126 }
5127 } 5127 }
5128 } 5128 }
5129 5129
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
5182 if (instr->hydrogen()->IsStabilityCheck()) { 5182 if (instr->hydrogen()->IsStabilityCheck()) {
5183 const UniqueSet<Map>* maps = instr->hydrogen()->maps(); 5183 const UniqueSet<Map>* maps = instr->hydrogen()->maps();
5184 for (int i = 0; i < maps->size(); ++i) { 5184 for (int i = 0; i < maps->size(); ++i) {
5185 AddStabilityDependency(maps->at(i).handle()); 5185 AddStabilityDependency(maps->at(i).handle());
5186 } 5186 }
5187 return; 5187 return;
5188 } 5188 }
5189 5189
5190 Register map_reg = scratch0(); 5190 Register map_reg = scratch0();
5191 LOperand* input = instr->value(); 5191 LOperand* input = instr->value();
5192 ASSERT(input->IsRegister()); 5192 DCHECK(input->IsRegister());
5193 Register reg = ToRegister(input); 5193 Register reg = ToRegister(input);
5194 __ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset)); 5194 __ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
5195 5195
5196 DeferredCheckMaps* deferred = NULL; 5196 DeferredCheckMaps* deferred = NULL;
5197 if (instr->hydrogen()->HasMigrationTarget()) { 5197 if (instr->hydrogen()->HasMigrationTarget()) {
5198 deferred = new(zone()) DeferredCheckMaps(this, instr, reg); 5198 deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
5199 __ bind(deferred->check_maps()); 5199 __ bind(deferred->check_maps());
5200 } 5200 }
5201 5201
5202 const UniqueSet<Map>* maps = instr->hydrogen()->maps(); 5202 const UniqueSet<Map>* maps = instr->hydrogen()->maps();
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after
5305 Register result = ToRegister(instr->result()); 5305 Register result = ToRegister(instr->result());
5306 Register scratch = ToRegister(instr->temp1()); 5306 Register scratch = ToRegister(instr->temp1());
5307 Register scratch2 = ToRegister(instr->temp2()); 5307 Register scratch2 = ToRegister(instr->temp2());
5308 5308
5309 // Allocate memory for the object. 5309 // Allocate memory for the object.
5310 AllocationFlags flags = TAG_OBJECT; 5310 AllocationFlags flags = TAG_OBJECT;
5311 if (instr->hydrogen()->MustAllocateDoubleAligned()) { 5311 if (instr->hydrogen()->MustAllocateDoubleAligned()) {
5312 flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT); 5312 flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
5313 } 5313 }
5314 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { 5314 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5315 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); 5315 DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
5316 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); 5316 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5317 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE); 5317 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
5318 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { 5318 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5319 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); 5319 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5320 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE); 5320 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
5321 } 5321 }
5322 if (instr->size()->IsConstantOperand()) { 5322 if (instr->size()->IsConstantOperand()) {
5323 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); 5323 int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
5324 if (size <= Page::kMaxRegularHeapObjectSize) { 5324 if (size <= Page::kMaxRegularHeapObjectSize) {
5325 __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags); 5325 __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags);
5326 } else { 5326 } else {
5327 __ jmp(deferred->entry()); 5327 __ jmp(deferred->entry());
5328 } 5328 }
5329 } else { 5329 } else {
(...skipping 26 matching lines...) Expand all
5356 Register result = ToRegister(instr->result()); 5356 Register result = ToRegister(instr->result());
5357 5357
5358 // TODO(3095996): Get rid of this. For now, we need to make the 5358 // TODO(3095996): Get rid of this. For now, we need to make the
5359 // result register contain a valid pointer because it is already 5359 // result register contain a valid pointer because it is already
5360 // contained in the register pointer map. 5360 // contained in the register pointer map.
5361 __ mov(result, zero_reg); 5361 __ mov(result, zero_reg);
5362 5362
5363 PushSafepointRegistersScope scope(this); 5363 PushSafepointRegistersScope scope(this);
5364 if (instr->size()->IsRegister()) { 5364 if (instr->size()->IsRegister()) {
5365 Register size = ToRegister(instr->size()); 5365 Register size = ToRegister(instr->size());
5366 ASSERT(!size.is(result)); 5366 DCHECK(!size.is(result));
5367 __ SmiTag(size); 5367 __ SmiTag(size);
5368 __ push(size); 5368 __ push(size);
5369 } else { 5369 } else {
5370 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); 5370 int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
5371 if (size >= 0 && size <= Smi::kMaxValue) { 5371 if (size >= 0 && size <= Smi::kMaxValue) {
5372 __ Push(Smi::FromInt(size)); 5372 __ Push(Smi::FromInt(size));
5373 } else { 5373 } else {
5374 // We should never get here at runtime => abort 5374 // We should never get here at runtime => abort
5375 __ stop("invalid allocation size"); 5375 __ stop("invalid allocation size");
5376 return; 5376 return;
5377 } 5377 }
5378 } 5378 }
5379 5379
5380 int flags = AllocateDoubleAlignFlag::encode( 5380 int flags = AllocateDoubleAlignFlag::encode(
5381 instr->hydrogen()->MustAllocateDoubleAligned()); 5381 instr->hydrogen()->MustAllocateDoubleAligned());
5382 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { 5382 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5383 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); 5383 DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
5384 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); 5384 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5385 flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE); 5385 flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
5386 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { 5386 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5387 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); 5387 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5388 flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE); 5388 flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
5389 } else { 5389 } else {
5390 flags = AllocateTargetSpace::update(flags, NEW_SPACE); 5390 flags = AllocateTargetSpace::update(flags, NEW_SPACE);
5391 } 5391 }
5392 __ Push(Smi::FromInt(flags)); 5392 __ Push(Smi::FromInt(flags));
5393 5393
5394 CallRuntimeFromDeferred( 5394 CallRuntimeFromDeferred(
5395 Runtime::kAllocateInTargetSpace, 2, instr, instr->context()); 5395 Runtime::kAllocateInTargetSpace, 2, instr, instr->context());
5396 __ StoreToSafepointRegisterSlot(v0, result); 5396 __ StoreToSafepointRegisterSlot(v0, result);
5397 } 5397 }
5398 5398
5399 5399
5400 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { 5400 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
5401 ASSERT(ToRegister(instr->value()).is(a0)); 5401 DCHECK(ToRegister(instr->value()).is(a0));
5402 ASSERT(ToRegister(instr->result()).is(v0)); 5402 DCHECK(ToRegister(instr->result()).is(v0));
5403 __ push(a0); 5403 __ push(a0);
5404 CallRuntime(Runtime::kToFastProperties, 1, instr); 5404 CallRuntime(Runtime::kToFastProperties, 1, instr);
5405 } 5405 }
5406 5406
5407 5407
5408 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { 5408 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
5409 ASSERT(ToRegister(instr->context()).is(cp)); 5409 DCHECK(ToRegister(instr->context()).is(cp));
5410 Label materialized; 5410 Label materialized;
5411 // Registers will be used as follows: 5411 // Registers will be used as follows:
5412 // t3 = literals array. 5412 // t3 = literals array.
5413 // a1 = regexp literal. 5413 // a1 = regexp literal.
5414 // a0 = regexp literal clone. 5414 // a0 = regexp literal clone.
5415 // a2 and t0-t2 are used as temporaries. 5415 // a2 and t0-t2 are used as temporaries.
5416 int literal_offset = 5416 int literal_offset =
5417 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); 5417 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index());
5418 __ li(t3, instr->hydrogen()->literals()); 5418 __ li(t3, instr->hydrogen()->literals());
5419 __ lw(a1, FieldMemOperand(t3, literal_offset)); 5419 __ lw(a1, FieldMemOperand(t3, literal_offset));
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
5452 __ sw(a2, FieldMemOperand(v0, i + kPointerSize)); 5452 __ sw(a2, FieldMemOperand(v0, i + kPointerSize));
5453 } 5453 }
5454 if ((size % (2 * kPointerSize)) != 0) { 5454 if ((size % (2 * kPointerSize)) != 0) {
5455 __ lw(a3, FieldMemOperand(a1, size - kPointerSize)); 5455 __ lw(a3, FieldMemOperand(a1, size - kPointerSize));
5456 __ sw(a3, FieldMemOperand(v0, size - kPointerSize)); 5456 __ sw(a3, FieldMemOperand(v0, size - kPointerSize));
5457 } 5457 }
5458 } 5458 }
5459 5459
5460 5460
5461 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { 5461 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
5462 ASSERT(ToRegister(instr->context()).is(cp)); 5462 DCHECK(ToRegister(instr->context()).is(cp));
5463 // Use the fast case closure allocation code that allocates in new 5463 // Use the fast case closure allocation code that allocates in new
5464 // space for nested functions that don't need literals cloning. 5464 // space for nested functions that don't need literals cloning.
5465 bool pretenure = instr->hydrogen()->pretenure(); 5465 bool pretenure = instr->hydrogen()->pretenure();
5466 if (!pretenure && instr->hydrogen()->has_no_literals()) { 5466 if (!pretenure && instr->hydrogen()->has_no_literals()) {
5467 FastNewClosureStub stub(isolate(), 5467 FastNewClosureStub stub(isolate(),
5468 instr->hydrogen()->strict_mode(), 5468 instr->hydrogen()->strict_mode(),
5469 instr->hydrogen()->is_generator()); 5469 instr->hydrogen()->is_generator());
5470 __ li(a2, Operand(instr->hydrogen()->shared_info())); 5470 __ li(a2, Operand(instr->hydrogen()->shared_info()));
5471 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 5471 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
5472 } else { 5472 } else {
5473 __ li(a2, Operand(instr->hydrogen()->shared_info())); 5473 __ li(a2, Operand(instr->hydrogen()->shared_info()));
5474 __ li(a1, Operand(pretenure ? factory()->true_value() 5474 __ li(a1, Operand(pretenure ? factory()->true_value()
5475 : factory()->false_value())); 5475 : factory()->false_value()));
5476 __ Push(cp, a2, a1); 5476 __ Push(cp, a2, a1);
5477 CallRuntime(Runtime::kNewClosure, 3, instr); 5477 CallRuntime(Runtime::kNewClosure, 3, instr);
5478 } 5478 }
5479 } 5479 }
5480 5480
5481 5481
5482 void LCodeGen::DoTypeof(LTypeof* instr) { 5482 void LCodeGen::DoTypeof(LTypeof* instr) {
5483 ASSERT(ToRegister(instr->result()).is(v0)); 5483 DCHECK(ToRegister(instr->result()).is(v0));
5484 Register input = ToRegister(instr->value()); 5484 Register input = ToRegister(instr->value());
5485 __ push(input); 5485 __ push(input);
5486 CallRuntime(Runtime::kTypeof, 1, instr); 5486 CallRuntime(Runtime::kTypeof, 1, instr);
5487 } 5487 }
5488 5488
5489 5489
5490 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { 5490 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
5491 Register input = ToRegister(instr->value()); 5491 Register input = ToRegister(instr->value());
5492 5492
5493 Register cmp1 = no_reg; 5493 Register cmp1 = no_reg;
5494 Operand cmp2 = Operand(no_reg); 5494 Operand cmp2 = Operand(no_reg);
5495 5495
5496 Condition final_branch_condition = EmitTypeofIs(instr->TrueLabel(chunk_), 5496 Condition final_branch_condition = EmitTypeofIs(instr->TrueLabel(chunk_),
5497 instr->FalseLabel(chunk_), 5497 instr->FalseLabel(chunk_),
5498 input, 5498 input,
5499 instr->type_literal(), 5499 instr->type_literal(),
5500 &cmp1, 5500 &cmp1,
5501 &cmp2); 5501 &cmp2);
5502 5502
5503 ASSERT(cmp1.is_valid()); 5503 DCHECK(cmp1.is_valid());
5504 ASSERT(!cmp2.is_reg() || cmp2.rm().is_valid()); 5504 DCHECK(!cmp2.is_reg() || cmp2.rm().is_valid());
5505 5505
5506 if (final_branch_condition != kNoCondition) { 5506 if (final_branch_condition != kNoCondition) {
5507 EmitBranch(instr, final_branch_condition, cmp1, cmp2); 5507 EmitBranch(instr, final_branch_condition, cmp1, cmp2);
5508 } 5508 }
5509 } 5509 }
5510 5510
5511 5511
5512 Condition LCodeGen::EmitTypeofIs(Label* true_label, 5512 Condition LCodeGen::EmitTypeofIs(Label* true_label,
5513 Label* false_label, 5513 Label* false_label,
5514 Register input, 5514 Register input,
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
5612 Register temp1 = ToRegister(instr->temp()); 5612 Register temp1 = ToRegister(instr->temp());
5613 5613
5614 EmitIsConstructCall(temp1, scratch0()); 5614 EmitIsConstructCall(temp1, scratch0());
5615 5615
5616 EmitBranch(instr, eq, temp1, 5616 EmitBranch(instr, eq, temp1,
5617 Operand(Smi::FromInt(StackFrame::CONSTRUCT))); 5617 Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
5618 } 5618 }
5619 5619
5620 5620
5621 void LCodeGen::EmitIsConstructCall(Register temp1, Register temp2) { 5621 void LCodeGen::EmitIsConstructCall(Register temp1, Register temp2) {
5622 ASSERT(!temp1.is(temp2)); 5622 DCHECK(!temp1.is(temp2));
5623 // Get the frame pointer for the calling frame. 5623 // Get the frame pointer for the calling frame.
5624 __ lw(temp1, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 5624 __ lw(temp1, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
5625 5625
5626 // Skip the arguments adaptor frame if it exists. 5626 // Skip the arguments adaptor frame if it exists.
5627 Label check_frame_marker; 5627 Label check_frame_marker;
5628 __ lw(temp2, MemOperand(temp1, StandardFrameConstants::kContextOffset)); 5628 __ lw(temp2, MemOperand(temp1, StandardFrameConstants::kContextOffset));
5629 __ Branch(&check_frame_marker, ne, temp2, 5629 __ Branch(&check_frame_marker, ne, temp2,
5630 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 5630 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5631 __ lw(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset)); 5631 __ lw(temp1, MemOperand(temp1, StandardFrameConstants::kCallerFPOffset));
5632 5632
5633 // Check the marker in the calling frame. 5633 // Check the marker in the calling frame.
5634 __ bind(&check_frame_marker); 5634 __ bind(&check_frame_marker);
5635 __ lw(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset)); 5635 __ lw(temp1, MemOperand(temp1, StandardFrameConstants::kMarkerOffset));
5636 } 5636 }
5637 5637
5638 5638
5639 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { 5639 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
5640 if (!info()->IsStub()) { 5640 if (!info()->IsStub()) {
5641 // Ensure that we have enough space after the previous lazy-bailout 5641 // Ensure that we have enough space after the previous lazy-bailout
5642 // instruction for patching the code here. 5642 // instruction for patching the code here.
5643 int current_pc = masm()->pc_offset(); 5643 int current_pc = masm()->pc_offset();
5644 if (current_pc < last_lazy_deopt_pc_ + space_needed) { 5644 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
5645 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; 5645 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
5646 ASSERT_EQ(0, padding_size % Assembler::kInstrSize); 5646 DCHECK_EQ(0, padding_size % Assembler::kInstrSize);
5647 while (padding_size > 0) { 5647 while (padding_size > 0) {
5648 __ nop(); 5648 __ nop();
5649 padding_size -= Assembler::kInstrSize; 5649 padding_size -= Assembler::kInstrSize;
5650 } 5650 }
5651 } 5651 }
5652 } 5652 }
5653 last_lazy_deopt_pc_ = masm()->pc_offset(); 5653 last_lazy_deopt_pc_ = masm()->pc_offset();
5654 } 5654 }
5655 5655
5656 5656
5657 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { 5657 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
5658 last_lazy_deopt_pc_ = masm()->pc_offset(); 5658 last_lazy_deopt_pc_ = masm()->pc_offset();
5659 ASSERT(instr->HasEnvironment()); 5659 DCHECK(instr->HasEnvironment());
5660 LEnvironment* env = instr->environment(); 5660 LEnvironment* env = instr->environment();
5661 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); 5661 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5662 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 5662 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5663 } 5663 }
5664 5664
5665 5665
5666 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { 5666 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
5667 Deoptimizer::BailoutType type = instr->hydrogen()->type(); 5667 Deoptimizer::BailoutType type = instr->hydrogen()->type();
5668 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the 5668 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the
5669 // needed return address), even though the implementation of LAZY and EAGER is 5669 // needed return address), even though the implementation of LAZY and EAGER is
(...skipping 17 matching lines...) Expand all
5687 // Nothing to see here, move on! 5687 // Nothing to see here, move on!
5688 } 5688 }
5689 5689
5690 5690
5691 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { 5691 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
5692 PushSafepointRegistersScope scope(this); 5692 PushSafepointRegistersScope scope(this);
5693 LoadContextFromDeferred(instr->context()); 5693 LoadContextFromDeferred(instr->context());
5694 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); 5694 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
5695 RecordSafepointWithLazyDeopt( 5695 RecordSafepointWithLazyDeopt(
5696 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); 5696 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
5697 ASSERT(instr->HasEnvironment()); 5697 DCHECK(instr->HasEnvironment());
5698 LEnvironment* env = instr->environment(); 5698 LEnvironment* env = instr->environment();
5699 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 5699 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5700 } 5700 }
5701 5701
5702 5702
5703 void LCodeGen::DoStackCheck(LStackCheck* instr) { 5703 void LCodeGen::DoStackCheck(LStackCheck* instr) {
5704 class DeferredStackCheck V8_FINAL : public LDeferredCode { 5704 class DeferredStackCheck V8_FINAL : public LDeferredCode {
5705 public: 5705 public:
5706 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) 5706 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
5707 : LDeferredCode(codegen), instr_(instr) { } 5707 : LDeferredCode(codegen), instr_(instr) { }
5708 virtual void Generate() V8_OVERRIDE { 5708 virtual void Generate() V8_OVERRIDE {
5709 codegen()->DoDeferredStackCheck(instr_); 5709 codegen()->DoDeferredStackCheck(instr_);
5710 } 5710 }
5711 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } 5711 virtual LInstruction* instr() V8_OVERRIDE { return instr_; }
5712 private: 5712 private:
5713 LStackCheck* instr_; 5713 LStackCheck* instr_;
5714 }; 5714 };
5715 5715
5716 ASSERT(instr->HasEnvironment()); 5716 DCHECK(instr->HasEnvironment());
5717 LEnvironment* env = instr->environment(); 5717 LEnvironment* env = instr->environment();
5718 // There is no LLazyBailout instruction for stack-checks. We have to 5718 // There is no LLazyBailout instruction for stack-checks. We have to
5719 // prepare for lazy deoptimization explicitly here. 5719 // prepare for lazy deoptimization explicitly here.
5720 if (instr->hydrogen()->is_function_entry()) { 5720 if (instr->hydrogen()->is_function_entry()) {
5721 // Perform stack overflow check. 5721 // Perform stack overflow check.
5722 Label done; 5722 Label done;
5723 __ LoadRoot(at, Heap::kStackLimitRootIndex); 5723 __ LoadRoot(at, Heap::kStackLimitRootIndex);
5724 __ Branch(&done, hs, sp, Operand(at)); 5724 __ Branch(&done, hs, sp, Operand(at));
5725 ASSERT(instr->context()->IsRegister()); 5725 DCHECK(instr->context()->IsRegister());
5726 ASSERT(ToRegister(instr->context()).is(cp)); 5726 DCHECK(ToRegister(instr->context()).is(cp));
5727 CallCode(isolate()->builtins()->StackCheck(), 5727 CallCode(isolate()->builtins()->StackCheck(),
5728 RelocInfo::CODE_TARGET, 5728 RelocInfo::CODE_TARGET,
5729 instr); 5729 instr);
5730 __ bind(&done); 5730 __ bind(&done);
5731 } else { 5731 } else {
5732 ASSERT(instr->hydrogen()->is_backwards_branch()); 5732 DCHECK(instr->hydrogen()->is_backwards_branch());
5733 // Perform stack overflow check if this goto needs it before jumping. 5733 // Perform stack overflow check if this goto needs it before jumping.
5734 DeferredStackCheck* deferred_stack_check = 5734 DeferredStackCheck* deferred_stack_check =
5735 new(zone()) DeferredStackCheck(this, instr); 5735 new(zone()) DeferredStackCheck(this, instr);
5736 __ LoadRoot(at, Heap::kStackLimitRootIndex); 5736 __ LoadRoot(at, Heap::kStackLimitRootIndex);
5737 __ Branch(deferred_stack_check->entry(), lo, sp, Operand(at)); 5737 __ Branch(deferred_stack_check->entry(), lo, sp, Operand(at));
5738 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); 5738 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
5739 __ bind(instr->done_label()); 5739 __ bind(instr->done_label());
5740 deferred_stack_check->SetExit(instr->done_label()); 5740 deferred_stack_check->SetExit(instr->done_label());
5741 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); 5741 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5742 // Don't record a deoptimization index for the safepoint here. 5742 // Don't record a deoptimization index for the safepoint here.
5743 // This will be done explicitly when emitting call and the safepoint in 5743 // This will be done explicitly when emitting call and the safepoint in
5744 // the deferred code. 5744 // the deferred code.
5745 } 5745 }
5746 } 5746 }
5747 5747
5748 5748
5749 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { 5749 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
5750 // This is a pseudo-instruction that ensures that the environment here is 5750 // This is a pseudo-instruction that ensures that the environment here is
5751 // properly registered for deoptimization and records the assembler's PC 5751 // properly registered for deoptimization and records the assembler's PC
5752 // offset. 5752 // offset.
5753 LEnvironment* environment = instr->environment(); 5753 LEnvironment* environment = instr->environment();
5754 5754
5755 // If the environment were already registered, we would have no way of 5755 // If the environment were already registered, we would have no way of
5756 // backpatching it with the spill slot operands. 5756 // backpatching it with the spill slot operands.
5757 ASSERT(!environment->HasBeenRegistered()); 5757 DCHECK(!environment->HasBeenRegistered());
5758 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); 5758 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
5759 5759
5760 GenerateOsrPrologue(); 5760 GenerateOsrPrologue();
5761 } 5761 }
5762 5762
5763 5763
5764 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) { 5764 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
5765 Register result = ToRegister(instr->result()); 5765 Register result = ToRegister(instr->result());
5766 Register object = ToRegister(instr->object()); 5766 Register object = ToRegister(instr->object());
5767 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 5767 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5768 DeoptimizeIf(eq, instr->environment(), object, Operand(at)); 5768 DeoptimizeIf(eq, instr->environment(), object, Operand(at));
5769 5769
5770 Register null_value = t1; 5770 Register null_value = t1;
5771 __ LoadRoot(null_value, Heap::kNullValueRootIndex); 5771 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
5772 DeoptimizeIf(eq, instr->environment(), object, Operand(null_value)); 5772 DeoptimizeIf(eq, instr->environment(), object, Operand(null_value));
5773 5773
5774 __ And(at, object, kSmiTagMask); 5774 __ And(at, object, kSmiTagMask);
5775 DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg)); 5775 DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg));
5776 5776
5777 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 5777 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
5778 __ GetObjectType(object, a1, a1); 5778 __ GetObjectType(object, a1, a1);
5779 DeoptimizeIf(le, instr->environment(), a1, Operand(LAST_JS_PROXY_TYPE)); 5779 DeoptimizeIf(le, instr->environment(), a1, Operand(LAST_JS_PROXY_TYPE));
5780 5780
5781 Label use_cache, call_runtime; 5781 Label use_cache, call_runtime;
5782 ASSERT(object.is(a0)); 5782 DCHECK(object.is(a0));
5783 __ CheckEnumCache(null_value, &call_runtime); 5783 __ CheckEnumCache(null_value, &call_runtime);
5784 5784
5785 __ lw(result, FieldMemOperand(object, HeapObject::kMapOffset)); 5785 __ lw(result, FieldMemOperand(object, HeapObject::kMapOffset));
5786 __ Branch(&use_cache); 5786 __ Branch(&use_cache);
5787 5787
5788 // Get the set of properties to enumerate. 5788 // Get the set of properties to enumerate.
5789 __ bind(&call_runtime); 5789 __ bind(&call_runtime);
5790 __ push(object); 5790 __ push(object);
5791 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr); 5791 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
5792 5792
5793 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); 5793 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
5794 ASSERT(result.is(v0)); 5794 DCHECK(result.is(v0));
5795 __ LoadRoot(at, Heap::kMetaMapRootIndex); 5795 __ LoadRoot(at, Heap::kMetaMapRootIndex);
5796 DeoptimizeIf(ne, instr->environment(), a1, Operand(at)); 5796 DeoptimizeIf(ne, instr->environment(), a1, Operand(at));
5797 __ bind(&use_cache); 5797 __ bind(&use_cache);
5798 } 5798 }
5799 5799
5800 5800
5801 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { 5801 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
5802 Register map = ToRegister(instr->map()); 5802 Register map = ToRegister(instr->map());
5803 Register result = ToRegister(instr->result()); 5803 Register result = ToRegister(instr->result());
5804 Label load_cache, done; 5804 Label load_cache, done;
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after
5912 __ li(at, scope_info); 5912 __ li(at, scope_info);
5913 __ Push(at, ToRegister(instr->function())); 5913 __ Push(at, ToRegister(instr->function()));
5914 CallRuntime(Runtime::kPushBlockContext, 2, instr); 5914 CallRuntime(Runtime::kPushBlockContext, 2, instr);
5915 RecordSafepoint(Safepoint::kNoLazyDeopt); 5915 RecordSafepoint(Safepoint::kNoLazyDeopt);
5916 } 5916 }
5917 5917
5918 5918
5919 #undef __ 5919 #undef __
5920 5920
5921 } } // namespace v8::internal 5921 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/mips/lithium-codegen-mips.h ('k') | src/mips/lithium-gap-resolver-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698