Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(126)

Side by Side Diff: src/ia32/lithium-codegen-ia32.cc

Issue 8139027: Version 3.6.5 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: '' Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | src/ia32/lithium-ia32.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
63 }; 63 };
64 64
65 65
66 #define __ masm()-> 66 #define __ masm()->
67 67
68 bool LCodeGen::GenerateCode() { 68 bool LCodeGen::GenerateCode() {
69 HPhase phase("Code generation", chunk()); 69 HPhase phase("Code generation", chunk());
70 ASSERT(is_unused()); 70 ASSERT(is_unused());
71 status_ = GENERATING; 71 status_ = GENERATING;
72 CpuFeatures::Scope scope(SSE2); 72 CpuFeatures::Scope scope(SSE2);
73
74 CodeStub::GenerateFPStubs();
75
76 // Open a frame scope to indicate that there is a frame on the stack. The
77 // MANUAL indicates that the scope shouldn't actually generate code to set up
78 // the frame (that is done in GeneratePrologue).
79 FrameScope frame_scope(masm_, StackFrame::MANUAL);
80
81 dynamic_frame_alignment_ = chunk()->num_double_slots() > 2 ||
82 info()->osr_ast_id() != AstNode::kNoNumber;
83
73 return GeneratePrologue() && 84 return GeneratePrologue() &&
74 GenerateBody() && 85 GenerateBody() &&
75 GenerateDeferredCode() && 86 GenerateDeferredCode() &&
76 GenerateSafepointTable(); 87 GenerateSafepointTable();
77 } 88 }
78 89
79 90
80 void LCodeGen::FinishCode(Handle<Code> code) { 91 void LCodeGen::FinishCode(Handle<Code> code) {
81 ASSERT(is_done()); 92 ASSERT(is_done());
82 code->set_stack_slots(GetStackSlotCount()); 93 code->set_stack_slots(GetStackSlotCount());
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
137 Label ok; 148 Label ok;
138 __ test(ecx, Operand(ecx)); 149 __ test(ecx, Operand(ecx));
139 __ j(zero, &ok, Label::kNear); 150 __ j(zero, &ok, Label::kNear);
140 // +1 for return address. 151 // +1 for return address.
141 int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize; 152 int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
142 __ mov(Operand(esp, receiver_offset), 153 __ mov(Operand(esp, receiver_offset),
143 Immediate(isolate()->factory()->undefined_value())); 154 Immediate(isolate()->factory()->undefined_value()));
144 __ bind(&ok); 155 __ bind(&ok);
145 } 156 }
146 157
158 if (dynamic_frame_alignment_) {
159 Label do_not_pad, align_loop;
160 STATIC_ASSERT(kDoubleSize == 2 * kPointerSize);
161 // Align esp to a multiple of 2 * kPointerSize.
162 __ test(esp, Immediate(kPointerSize));
163 __ j(zero, &do_not_pad, Label::kNear);
164 __ push(Immediate(0));
165 __ mov(ebx, esp);
166 // Copy arguments, receiver, and return address.
167 __ mov(ecx, Immediate(scope()->num_parameters() + 2));
168
169 __ bind(&align_loop);
170 __ mov(eax, Operand(ebx, 1 * kPointerSize));
171 __ mov(Operand(ebx, 0), eax);
172 __ add(Operand(ebx), Immediate(kPointerSize));
173 __ dec(ecx);
174 __ j(not_zero, &align_loop, Label::kNear);
175 __ mov(Operand(ebx, 0),
176 Immediate(isolate()->factory()->frame_alignment_marker()));
177
178 __ bind(&do_not_pad);
179 }
180
147 __ push(ebp); // Caller's frame pointer. 181 __ push(ebp); // Caller's frame pointer.
148 __ mov(ebp, esp); 182 __ mov(ebp, esp);
149 __ push(esi); // Callee's context. 183 __ push(esi); // Callee's context.
150 __ push(edi); // Callee's JS function. 184 __ push(edi); // Callee's JS function.
151 185
152 // Reserve space for the stack slots needed by the code. 186 // Reserve space for the stack slots needed by the code.
153 int slots = GetStackSlotCount(); 187 int slots = GetStackSlotCount();
154 if (slots > 0) { 188 if (slots > 0) {
155 if (FLAG_debug_code) { 189 if (FLAG_debug_code) {
156 __ mov(Operand(eax), Immediate(slots)); 190 __ mov(Operand(eax), Immediate(slots));
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
197 for (int i = 0; i < num_parameters; i++) { 231 for (int i = 0; i < num_parameters; i++) {
198 Variable* var = scope()->parameter(i); 232 Variable* var = scope()->parameter(i);
199 if (var->IsContextSlot()) { 233 if (var->IsContextSlot()) {
200 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 234 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
201 (num_parameters - 1 - i) * kPointerSize; 235 (num_parameters - 1 - i) * kPointerSize;
202 // Load parameter from stack. 236 // Load parameter from stack.
203 __ mov(eax, Operand(ebp, parameter_offset)); 237 __ mov(eax, Operand(ebp, parameter_offset));
204 // Store it in the context. 238 // Store it in the context.
205 int context_offset = Context::SlotOffset(var->index()); 239 int context_offset = Context::SlotOffset(var->index());
206 __ mov(Operand(esi, context_offset), eax); 240 __ mov(Operand(esi, context_offset), eax);
207 // Update the write barrier. This clobbers all involved 241 // Update the write barrier. This clobbers eax and ebx.
208 // registers, so we have to use a third register to avoid 242 __ RecordWriteContextSlot(esi,
209 // clobbering esi. 243 context_offset,
210 __ mov(ecx, esi); 244 eax,
211 __ RecordWrite(ecx, context_offset, eax, ebx); 245 ebx,
246 kDontSaveFPRegs);
212 } 247 }
213 } 248 }
214 Comment(";;; End allocate local context"); 249 Comment(";;; End allocate local context");
215 } 250 }
216 251
217 // Trace the call. 252 // Trace the call.
218 if (FLAG_trace) { 253 if (FLAG_trace) {
219 // We have not executed any compiled code yet, so esi still holds the 254 // We have not executed any compiled code yet, so esi still holds the
220 // incoming context. 255 // incoming context.
221 __ CallRuntime(Runtime::kTraceEnter, 0); 256 __ CallRuntime(Runtime::kTraceEnter, 0);
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
253 } 288 }
254 } 289 }
255 290
256 291
257 bool LCodeGen::GenerateDeferredCode() { 292 bool LCodeGen::GenerateDeferredCode() {
258 ASSERT(is_generating()); 293 ASSERT(is_generating());
259 if (deferred_.length() > 0) { 294 if (deferred_.length() > 0) {
260 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { 295 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
261 LDeferredCode* code = deferred_[i]; 296 LDeferredCode* code = deferred_[i];
262 __ bind(code->entry()); 297 __ bind(code->entry());
298 Comment(";;; Deferred code @%d: %s.",
299 code->instruction_index(),
300 code->instr()->Mnemonic());
263 code->Generate(); 301 code->Generate();
264 __ jmp(code->exit()); 302 __ jmp(code->exit());
265 } 303 }
266 304
267 // Pad code to ensure that the last piece of deferred code have 305 // Pad code to ensure that the last piece of deferred code have
268 // room for lazy bailout. 306 // room for lazy bailout.
269 while ((masm()->pc_offset() - LastSafepointEnd()) 307 while ((masm()->pc_offset() - LastSafepointEnd())
270 < Deoptimizer::patch_size()) { 308 < Deoptimizer::patch_size()) {
271 __ nop(); 309 __ nop();
272 } 310 }
(...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after
474 __ CallRuntime(fun, argc); 512 __ CallRuntime(fun, argc);
475 513
476 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT); 514 RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
477 } 515 }
478 516
479 517
480 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id, 518 void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
481 int argc, 519 int argc,
482 LInstruction* instr, 520 LInstruction* instr,
483 LOperand* context) { 521 LOperand* context) {
484 ASSERT(context->IsRegister() || context->IsStackSlot());
485 if (context->IsRegister()) { 522 if (context->IsRegister()) {
486 if (!ToRegister(context).is(esi)) { 523 if (!ToRegister(context).is(esi)) {
487 __ mov(esi, ToRegister(context)); 524 __ mov(esi, ToRegister(context));
488 } 525 }
526 } else if (context->IsStackSlot()) {
527 __ mov(esi, ToOperand(context));
528 } else if (context->IsConstantOperand()) {
529 Handle<Object> literal =
530 chunk_->LookupLiteral(LConstantOperand::cast(context));
531 LoadHeapObject(esi, Handle<Context>::cast(literal));
489 } else { 532 } else {
490 // Context is stack slot. 533 UNREACHABLE();
491 __ mov(esi, ToOperand(context));
492 } 534 }
493 535
494 __ CallRuntimeSaveDoubles(id); 536 __ CallRuntimeSaveDoubles(id);
495 RecordSafepointWithRegisters( 537 RecordSafepointWithRegisters(
496 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex); 538 instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex);
497 } 539 }
498 540
499 541
500 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr, 542 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr,
501 SafepointMode safepoint_mode) { 543 SafepointMode safepoint_mode) {
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after
662 inlined_function_count_ = deoptimization_literals_.length(); 704 inlined_function_count_ = deoptimization_literals_.length();
663 } 705 }
664 706
665 707
666 void LCodeGen::RecordSafepoint( 708 void LCodeGen::RecordSafepoint(
667 LPointerMap* pointers, 709 LPointerMap* pointers,
668 Safepoint::Kind kind, 710 Safepoint::Kind kind,
669 int arguments, 711 int arguments,
670 int deoptimization_index) { 712 int deoptimization_index) {
671 ASSERT(kind == expected_safepoint_kind_); 713 ASSERT(kind == expected_safepoint_kind_);
672 const ZoneList<LOperand*>* operands = pointers->operands(); 714 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
673 Safepoint safepoint = safepoints_.DefineSafepoint(masm(), 715 Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
674 kind, arguments, deoptimization_index); 716 kind, arguments, deoptimization_index);
675 for (int i = 0; i < operands->length(); i++) { 717 for (int i = 0; i < operands->length(); i++) {
676 LOperand* pointer = operands->at(i); 718 LOperand* pointer = operands->at(i);
677 if (pointer->IsStackSlot()) { 719 if (pointer->IsStackSlot()) {
678 safepoint.DefinePointerSlot(pointer->index()); 720 safepoint.DefinePointerSlot(pointer->index());
679 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) { 721 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
680 safepoint.DefinePointerRegister(ToRegister(pointer)); 722 safepoint.DefinePointerRegister(ToRegister(pointer));
681 } 723 }
682 } 724 }
(...skipping 510 matching lines...) Expand 10 before | Expand all | Expand 10 after
1193 __ Set(temp, Immediate(lower)); 1235 __ Set(temp, Immediate(lower));
1194 __ movd(xmm0, Operand(temp)); 1236 __ movd(xmm0, Operand(temp));
1195 __ por(res, xmm0); 1237 __ por(res, xmm0);
1196 } 1238 }
1197 } 1239 }
1198 } 1240 }
1199 } 1241 }
1200 1242
1201 1243
1202 void LCodeGen::DoConstantT(LConstantT* instr) { 1244 void LCodeGen::DoConstantT(LConstantT* instr) {
1203 ASSERT(instr->result()->IsRegister()); 1245 Register reg = ToRegister(instr->result());
1204 __ Set(ToRegister(instr->result()), Immediate(instr->value())); 1246 Handle<Object> handle = instr->value();
1247 if (handle->IsHeapObject()) {
1248 LoadHeapObject(reg, Handle<HeapObject>::cast(handle));
1249 } else {
1250 __ Set(reg, Immediate(handle));
1251 }
1205 } 1252 }
1206 1253
1207 1254
1208 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) { 1255 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
1209 Register result = ToRegister(instr->result()); 1256 Register result = ToRegister(instr->result());
1210 Register array = ToRegister(instr->InputAt(0)); 1257 Register array = ToRegister(instr->InputAt(0));
1211 __ mov(result, FieldOperand(array, JSArray::kLengthOffset)); 1258 __ mov(result, FieldOperand(array, JSArray::kLengthOffset));
1212 } 1259 }
1213 1260
1214 1261
(...skipping 355 matching lines...) Expand 10 before | Expand all | Expand 10 after
1570 void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) { 1617 void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
1571 Register left = ToRegister(instr->InputAt(0)); 1618 Register left = ToRegister(instr->InputAt(0));
1572 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1619 int true_block = chunk_->LookupDestination(instr->true_block_id());
1573 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1620 int false_block = chunk_->LookupDestination(instr->false_block_id());
1574 1621
1575 __ cmp(left, instr->hydrogen()->right()); 1622 __ cmp(left, instr->hydrogen()->right());
1576 EmitBranch(true_block, false_block, equal); 1623 EmitBranch(true_block, false_block, equal);
1577 } 1624 }
1578 1625
1579 1626
1580 void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) { 1627 void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
1581 Register reg = ToRegister(instr->InputAt(0)); 1628 Register reg = ToRegister(instr->InputAt(0));
1629 int false_block = chunk_->LookupDestination(instr->false_block_id());
1582 1630
1583 // TODO(fsc): If the expression is known to be a smi, then it's 1631 // If the expression is known to be untagged or a smi, then it's definitely
1584 // definitely not null. Jump to the false block. 1632 // not null, and it can't be a an undetectable object.
1633 if (instr->hydrogen()->representation().IsSpecialization() ||
1634 instr->hydrogen()->type().IsSmi()) {
1635 EmitGoto(false_block);
1636 return;
1637 }
1585 1638
1586 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1639 int true_block = chunk_->LookupDestination(instr->true_block_id());
1587 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1640 Handle<Object> nil_value = instr->nil() == kNullValue ?
1588 1641 factory()->null_value() :
1589 __ cmp(reg, factory()->null_value()); 1642 factory()->undefined_value();
1590 if (instr->is_strict()) { 1643 __ cmp(reg, nil_value);
1644 if (instr->kind() == kStrictEquality) {
1591 EmitBranch(true_block, false_block, equal); 1645 EmitBranch(true_block, false_block, equal);
1592 } else { 1646 } else {
1647 Handle<Object> other_nil_value = instr->nil() == kNullValue ?
1648 factory()->undefined_value() :
1649 factory()->null_value();
1593 Label* true_label = chunk_->GetAssemblyLabel(true_block); 1650 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1594 Label* false_label = chunk_->GetAssemblyLabel(false_block); 1651 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1595 __ j(equal, true_label); 1652 __ j(equal, true_label);
1596 __ cmp(reg, factory()->undefined_value()); 1653 __ cmp(reg, other_nil_value);
1597 __ j(equal, true_label); 1654 __ j(equal, true_label);
1598 __ JumpIfSmi(reg, false_label); 1655 __ JumpIfSmi(reg, false_label);
1599 // Check for undetectable objects by looking in the bit field in 1656 // Check for undetectable objects by looking in the bit field in
1600 // the map. The object has already been smi checked. 1657 // the map. The object has already been smi checked.
1601 Register scratch = ToRegister(instr->TempAt(0)); 1658 Register scratch = ToRegister(instr->TempAt(0));
1602 __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset)); 1659 __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1603 __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset)); 1660 __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
1604 __ test(scratch, Immediate(1 << Map::kIsUndetectable)); 1661 __ test(scratch, Immediate(1 << Map::kIsUndetectable));
1605 EmitBranch(true_block, false_block, not_zero); 1662 EmitBranch(true_block, false_block, not_zero);
1606 } 1663 }
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after
1738 // the temp registers, but not the input. Only input and temp2 may alias. 1795 // the temp registers, but not the input. Only input and temp2 may alias.
1739 void LCodeGen::EmitClassOfTest(Label* is_true, 1796 void LCodeGen::EmitClassOfTest(Label* is_true,
1740 Label* is_false, 1797 Label* is_false,
1741 Handle<String>class_name, 1798 Handle<String>class_name,
1742 Register input, 1799 Register input,
1743 Register temp, 1800 Register temp,
1744 Register temp2) { 1801 Register temp2) {
1745 ASSERT(!input.is(temp)); 1802 ASSERT(!input.is(temp));
1746 ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register. 1803 ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
1747 __ JumpIfSmi(input, is_false); 1804 __ JumpIfSmi(input, is_false);
1748 __ CmpObjectType(input, FIRST_SPEC_OBJECT_TYPE, temp);
1749 __ j(below, is_false);
1750 1805
1751 // Map is now in temp.
1752 // Functions have class 'Function'.
1753 __ CmpInstanceType(temp, FIRST_CALLABLE_SPEC_OBJECT_TYPE);
1754 if (class_name->IsEqualTo(CStrVector("Function"))) { 1806 if (class_name->IsEqualTo(CStrVector("Function"))) {
1755 __ j(above_equal, is_true); 1807 // Assuming the following assertions, we can use the same compares to test
1808 // for both being a function type and being in the object type range.
1809 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
1810 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
1811 FIRST_SPEC_OBJECT_TYPE + 1);
1812 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
1813 LAST_SPEC_OBJECT_TYPE - 1);
1814 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1815 __ CmpObjectType(input, FIRST_SPEC_OBJECT_TYPE, temp);
1816 __ j(below, is_false);
1817 __ j(equal, is_true);
1818 __ CmpInstanceType(temp, LAST_SPEC_OBJECT_TYPE);
1819 __ j(equal, is_true);
1756 } else { 1820 } else {
1757 __ j(above_equal, is_false); 1821 // Faster code path to avoid two compares: subtract lower bound from the
1822 // actual type and do a signed compare with the width of the type range.
1823 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
1824 __ mov(temp2, FieldOperand(temp, Map::kInstanceTypeOffset));
1825 __ sub(Operand(temp2), Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
1826 __ cmpb(Operand(temp2),
1827 static_cast<int8_t>(LAST_NONCALLABLE_SPEC_OBJECT_TYPE -
1828 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
1829 __ j(above, is_false);
1758 } 1830 }
1759 1831
1832 // Now we are in the FIRST-LAST_NONCALLABLE_SPEC_OBJECT_TYPE range.
1760 // Check if the constructor in the map is a function. 1833 // Check if the constructor in the map is a function.
1761 __ mov(temp, FieldOperand(temp, Map::kConstructorOffset)); 1834 __ mov(temp, FieldOperand(temp, Map::kConstructorOffset));
1762
1763 // As long as LAST_CALLABLE_SPEC_OBJECT_TYPE is the last instance type, and
1764 // FIRST_CALLABLE_SPEC_OBJECT_TYPE comes right after
1765 // LAST_NONCALLABLE_SPEC_OBJECT_TYPE, we can avoid checking for the latter.
1766 STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE);
1767 STATIC_ASSERT(FIRST_CALLABLE_SPEC_OBJECT_TYPE ==
1768 LAST_NONCALLABLE_SPEC_OBJECT_TYPE + 1);
1769
1770 // Objects with a non-function constructor have class 'Object'. 1835 // Objects with a non-function constructor have class 'Object'.
1771 __ CmpObjectType(temp, JS_FUNCTION_TYPE, temp2); 1836 __ CmpObjectType(temp, JS_FUNCTION_TYPE, temp2);
1772 if (class_name->IsEqualTo(CStrVector("Object"))) { 1837 if (class_name->IsEqualTo(CStrVector("Object"))) {
1773 __ j(not_equal, is_true); 1838 __ j(not_equal, is_true);
1774 } else { 1839 } else {
1775 __ j(not_equal, is_false); 1840 __ j(not_equal, is_false);
1776 } 1841 }
1777 1842
1778 // temp now contains the constructor function. Grab the 1843 // temp now contains the constructor function. Grab the
1779 // instance class name from there. 1844 // instance class name from there.
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
1844 1909
1845 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { 1910 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1846 class DeferredInstanceOfKnownGlobal: public LDeferredCode { 1911 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
1847 public: 1912 public:
1848 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, 1913 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1849 LInstanceOfKnownGlobal* instr) 1914 LInstanceOfKnownGlobal* instr)
1850 : LDeferredCode(codegen), instr_(instr) { } 1915 : LDeferredCode(codegen), instr_(instr) { }
1851 virtual void Generate() { 1916 virtual void Generate() {
1852 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_); 1917 codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
1853 } 1918 }
1854 1919 virtual LInstruction* instr() { return instr_; }
1855 Label* map_check() { return &map_check_; } 1920 Label* map_check() { return &map_check_; }
1856
1857 private: 1921 private:
1858 LInstanceOfKnownGlobal* instr_; 1922 LInstanceOfKnownGlobal* instr_;
1859 Label map_check_; 1923 Label map_check_;
1860 }; 1924 };
1861 1925
1862 DeferredInstanceOfKnownGlobal* deferred; 1926 DeferredInstanceOfKnownGlobal* deferred;
1863 deferred = new DeferredInstanceOfKnownGlobal(this, instr); 1927 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
1864 1928
1865 Label done, false_result; 1929 Label done, false_result;
1866 Register object = ToRegister(instr->InputAt(1)); 1930 Register object = ToRegister(instr->InputAt(1));
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
1984 // Preserve the return value on the stack and rely on the runtime call 2048 // Preserve the return value on the stack and rely on the runtime call
1985 // to return the value in the same register. We're leaving the code 2049 // to return the value in the same register. We're leaving the code
1986 // managed by the register allocator and tearing down the frame, it's 2050 // managed by the register allocator and tearing down the frame, it's
1987 // safe to write to the context register. 2051 // safe to write to the context register.
1988 __ push(eax); 2052 __ push(eax);
1989 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 2053 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1990 __ CallRuntime(Runtime::kTraceExit, 1); 2054 __ CallRuntime(Runtime::kTraceExit, 1);
1991 } 2055 }
1992 __ mov(esp, ebp); 2056 __ mov(esp, ebp);
1993 __ pop(ebp); 2057 __ pop(ebp);
2058 if (dynamic_frame_alignment_) {
2059 Label aligned;
2060 // Frame alignment marker (padding) is below arguments,
2061 // and receiver, so its return-address-relative offset is
2062 // (num_arguments + 2) words.
2063 __ cmp(Operand(esp, (GetParameterCount() + 2) * kPointerSize),
2064 Immediate(factory()->frame_alignment_marker()));
2065 __ j(not_equal, &aligned);
2066 __ Ret((GetParameterCount() + 2) * kPointerSize, ecx);
2067 __ bind(&aligned);
2068 }
1994 __ Ret((GetParameterCount() + 1) * kPointerSize, ecx); 2069 __ Ret((GetParameterCount() + 1) * kPointerSize, ecx);
1995 } 2070 }
1996 2071
1997 2072
1998 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) { 2073 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
1999 Register result = ToRegister(instr->result()); 2074 Register result = ToRegister(instr->result());
2000 __ mov(result, Operand::Cell(instr->hydrogen()->cell())); 2075 __ mov(result, Operand::Cell(instr->hydrogen()->cell()));
2001 if (instr->hydrogen()->check_hole_value()) { 2076 if (instr->hydrogen()->RequiresHoleCheck()) {
2002 __ cmp(result, factory()->the_hole_value()); 2077 __ cmp(result, factory()->the_hole_value());
2003 DeoptimizeIf(equal, instr->environment()); 2078 DeoptimizeIf(equal, instr->environment());
2004 } 2079 }
2005 } 2080 }
2006 2081
2007 2082
2008 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { 2083 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2009 ASSERT(ToRegister(instr->context()).is(esi)); 2084 ASSERT(ToRegister(instr->context()).is(esi));
2010 ASSERT(ToRegister(instr->global_object()).is(eax)); 2085 ASSERT(ToRegister(instr->global_object()).is(eax));
2011 ASSERT(ToRegister(instr->result()).is(eax)); 2086 ASSERT(ToRegister(instr->result()).is(eax));
2012 2087
2013 __ mov(ecx, instr->name()); 2088 __ mov(ecx, instr->name());
2014 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET : 2089 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET :
2015 RelocInfo::CODE_TARGET_CONTEXT; 2090 RelocInfo::CODE_TARGET_CONTEXT;
2016 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 2091 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2017 CallCode(ic, mode, instr); 2092 CallCode(ic, mode, instr);
2018 } 2093 }
2019 2094
2020 2095
2021 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { 2096 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
2097 Register object = ToRegister(instr->TempAt(0));
2098 Register address = ToRegister(instr->TempAt(1));
2022 Register value = ToRegister(instr->InputAt(0)); 2099 Register value = ToRegister(instr->InputAt(0));
2023 Operand cell_operand = Operand::Cell(instr->hydrogen()->cell()); 2100 ASSERT(!value.is(object));
2101 Handle<JSGlobalPropertyCell> cell_handle(instr->hydrogen()->cell());
2102
2103 int offset = JSGlobalPropertyCell::kValueOffset;
2104 __ mov(object, Immediate(cell_handle));
2024 2105
2025 // If the cell we are storing to contains the hole it could have 2106 // If the cell we are storing to contains the hole it could have
2026 // been deleted from the property dictionary. In that case, we need 2107 // been deleted from the property dictionary. In that case, we need
2027 // to update the property details in the property dictionary to mark 2108 // to update the property details in the property dictionary to mark
2028 // it as no longer deleted. We deoptimize in that case. 2109 // it as no longer deleted. We deoptimize in that case.
2029 if (instr->hydrogen()->check_hole_value()) { 2110 if (instr->hydrogen()->RequiresHoleCheck()) {
2030 __ cmp(cell_operand, factory()->the_hole_value()); 2111 __ cmp(FieldOperand(object, offset), factory()->the_hole_value());
2031 DeoptimizeIf(equal, instr->environment()); 2112 DeoptimizeIf(equal, instr->environment());
2032 } 2113 }
2033 2114
2034 // Store the value. 2115 // Store the value.
2035 __ mov(cell_operand, value); 2116 __ mov(FieldOperand(object, offset), value);
2117
2118 // Cells are always in the remembered set.
2119 __ RecordWriteField(object,
2120 offset,
2121 value,
2122 address,
2123 kSaveFPRegs,
2124 OMIT_REMEMBERED_SET);
2036 } 2125 }
2037 2126
2038 2127
2039 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { 2128 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2040 ASSERT(ToRegister(instr->context()).is(esi)); 2129 ASSERT(ToRegister(instr->context()).is(esi));
2041 ASSERT(ToRegister(instr->global_object()).is(edx)); 2130 ASSERT(ToRegister(instr->global_object()).is(edx));
2042 ASSERT(ToRegister(instr->value()).is(eax)); 2131 ASSERT(ToRegister(instr->value()).is(eax));
2043 2132
2044 __ mov(ecx, instr->name()); 2133 __ mov(ecx, instr->name());
2045 Handle<Code> ic = instr->strict_mode() 2134 Handle<Code> ic = instr->strict_mode()
(...skipping 10 matching lines...) Expand all
2056 } 2145 }
2057 2146
2058 2147
2059 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { 2148 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2060 Register context = ToRegister(instr->context()); 2149 Register context = ToRegister(instr->context());
2061 Register value = ToRegister(instr->value()); 2150 Register value = ToRegister(instr->value());
2062 __ mov(ContextOperand(context, instr->slot_index()), value); 2151 __ mov(ContextOperand(context, instr->slot_index()), value);
2063 if (instr->needs_write_barrier()) { 2152 if (instr->needs_write_barrier()) {
2064 Register temp = ToRegister(instr->TempAt(0)); 2153 Register temp = ToRegister(instr->TempAt(0));
2065 int offset = Context::SlotOffset(instr->slot_index()); 2154 int offset = Context::SlotOffset(instr->slot_index());
2066 __ RecordWrite(context, offset, value, temp); 2155 __ RecordWriteContextSlot(context, offset, value, temp, kSaveFPRegs);
2067 } 2156 }
2068 } 2157 }
2069 2158
2070 2159
2071 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { 2160 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
2072 Register object = ToRegister(instr->object()); 2161 Register object = ToRegister(instr->object());
2073 Register result = ToRegister(instr->result()); 2162 Register result = ToRegister(instr->result());
2074 if (instr->hydrogen()->is_in_object()) { 2163 if (instr->hydrogen()->is_in_object()) {
2075 __ mov(result, FieldOperand(object, instr->hydrogen()->offset())); 2164 __ mov(result, FieldOperand(object, instr->hydrogen()->offset()));
2076 } else { 2165 } else {
(...skipping 196 matching lines...) Expand 10 before | Expand all | Expand 10 after
2273 __ cmp(result, factory()->the_hole_value()); 2362 __ cmp(result, factory()->the_hole_value());
2274 DeoptimizeIf(equal, instr->environment()); 2363 DeoptimizeIf(equal, instr->environment());
2275 } 2364 }
2276 } 2365 }
2277 2366
2278 2367
2279 void LCodeGen::DoLoadKeyedFastDoubleElement( 2368 void LCodeGen::DoLoadKeyedFastDoubleElement(
2280 LLoadKeyedFastDoubleElement* instr) { 2369 LLoadKeyedFastDoubleElement* instr) {
2281 XMMRegister result = ToDoubleRegister(instr->result()); 2370 XMMRegister result = ToDoubleRegister(instr->result());
2282 2371
2283 if (instr->hydrogen()->RequiresHoleCheck()) { 2372 int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag +
2284 int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag + 2373 sizeof(kHoleNanLower32);
2285 sizeof(kHoleNanLower32); 2374 Operand hole_check_operand = BuildFastArrayOperand(
2286 Operand hole_check_operand = BuildFastArrayOperand( 2375 instr->elements(), instr->key(),
2287 instr->elements(), instr->key(), 2376 FAST_DOUBLE_ELEMENTS,
2288 FAST_DOUBLE_ELEMENTS, 2377 offset);
2289 offset); 2378 __ cmp(hole_check_operand, Immediate(kHoleNanUpper32));
2290 __ cmp(hole_check_operand, Immediate(kHoleNanUpper32)); 2379 DeoptimizeIf(equal, instr->environment());
2291 DeoptimizeIf(equal, instr->environment());
2292 }
2293 2380
2294 Operand double_load_operand = BuildFastArrayOperand( 2381 Operand double_load_operand = BuildFastArrayOperand(
2295 instr->elements(), instr->key(), FAST_DOUBLE_ELEMENTS, 2382 instr->elements(), instr->key(), FAST_DOUBLE_ELEMENTS,
2296 FixedDoubleArray::kHeaderSize - kHeapObjectTag); 2383 FixedDoubleArray::kHeaderSize - kHeapObjectTag);
2297 __ movdbl(result, double_load_operand); 2384 __ movdbl(result, double_load_operand);
2298 } 2385 }
2299 2386
2300 2387
2301 Operand LCodeGen::BuildFastArrayOperand( 2388 Operand LCodeGen::BuildFastArrayOperand(
2302 LOperand* elements_pointer, 2389 LOperand* elements_pointer,
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
2352 case EXTERNAL_UNSIGNED_INT_ELEMENTS: 2439 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
2353 __ mov(result, operand); 2440 __ mov(result, operand);
2354 __ test(result, Operand(result)); 2441 __ test(result, Operand(result));
2355 // TODO(danno): we could be more clever here, perhaps having a special 2442 // TODO(danno): we could be more clever here, perhaps having a special
2356 // version of the stub that detects if the overflow case actually 2443 // version of the stub that detects if the overflow case actually
2357 // happens, and generate code that returns a double rather than int. 2444 // happens, and generate code that returns a double rather than int.
2358 DeoptimizeIf(negative, instr->environment()); 2445 DeoptimizeIf(negative, instr->environment());
2359 break; 2446 break;
2360 case EXTERNAL_FLOAT_ELEMENTS: 2447 case EXTERNAL_FLOAT_ELEMENTS:
2361 case EXTERNAL_DOUBLE_ELEMENTS: 2448 case EXTERNAL_DOUBLE_ELEMENTS:
2449 case FAST_SMI_ONLY_ELEMENTS:
2362 case FAST_ELEMENTS: 2450 case FAST_ELEMENTS:
2363 case FAST_DOUBLE_ELEMENTS: 2451 case FAST_DOUBLE_ELEMENTS:
2364 case DICTIONARY_ELEMENTS: 2452 case DICTIONARY_ELEMENTS:
2365 case NON_STRICT_ARGUMENTS_ELEMENTS: 2453 case NON_STRICT_ARGUMENTS_ELEMENTS:
2366 UNREACHABLE(); 2454 UNREACHABLE();
2367 break; 2455 break;
2368 } 2456 }
2369 } 2457 }
2370 } 2458 }
2371 2459
(...skipping 301 matching lines...) Expand 10 before | Expand all | Expand 10 after
2673 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) { 2761 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2674 // Class for deferred case. 2762 // Class for deferred case.
2675 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode { 2763 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2676 public: 2764 public:
2677 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen, 2765 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2678 LUnaryMathOperation* instr) 2766 LUnaryMathOperation* instr)
2679 : LDeferredCode(codegen), instr_(instr) { } 2767 : LDeferredCode(codegen), instr_(instr) { }
2680 virtual void Generate() { 2768 virtual void Generate() {
2681 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_); 2769 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2682 } 2770 }
2771 virtual LInstruction* instr() { return instr_; }
2683 private: 2772 private:
2684 LUnaryMathOperation* instr_; 2773 LUnaryMathOperation* instr_;
2685 }; 2774 };
2686 2775
2687 ASSERT(instr->value()->Equals(instr->result())); 2776 ASSERT(instr->value()->Equals(instr->result()));
2688 Representation r = instr->hydrogen()->value()->representation(); 2777 Representation r = instr->hydrogen()->value()->representation();
2689 2778
2690 if (r.IsDouble()) { 2779 if (r.IsDouble()) {
2691 XMMRegister scratch = xmm0; 2780 XMMRegister scratch = xmm0;
2692 XMMRegister input_reg = ToDoubleRegister(instr->value()); 2781 XMMRegister input_reg = ToDoubleRegister(instr->value());
(...skipping 305 matching lines...) Expand 10 before | Expand all | Expand 10 after
2998 __ mov(ecx, instr->name()); 3087 __ mov(ecx, instr->name());
2999 CallCode(ic, mode, instr); 3088 CallCode(ic, mode, instr);
3000 } 3089 }
3001 3090
3002 3091
3003 void LCodeGen::DoCallFunction(LCallFunction* instr) { 3092 void LCodeGen::DoCallFunction(LCallFunction* instr) {
3004 ASSERT(ToRegister(instr->context()).is(esi)); 3093 ASSERT(ToRegister(instr->context()).is(esi));
3005 ASSERT(ToRegister(instr->result()).is(eax)); 3094 ASSERT(ToRegister(instr->result()).is(eax));
3006 3095
3007 int arity = instr->arity(); 3096 int arity = instr->arity();
3008 CallFunctionStub stub(arity, RECEIVER_MIGHT_BE_IMPLICIT); 3097 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
3009 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 3098 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3010 __ Drop(1); 3099 __ Drop(1);
3011 } 3100 }
3012 3101
3013 3102
3014 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { 3103 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
3015 ASSERT(ToRegister(instr->context()).is(esi)); 3104 ASSERT(ToRegister(instr->context()).is(esi));
3016 ASSERT(ToRegister(instr->result()).is(eax)); 3105 ASSERT(ToRegister(instr->result()).is(eax));
3017 3106
3018 int arity = instr->arity(); 3107 int arity = instr->arity();
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
3055 if (!instr->transition().is_null()) { 3144 if (!instr->transition().is_null()) {
3056 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition()); 3145 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
3057 } 3146 }
3058 3147
3059 // Do the store. 3148 // Do the store.
3060 if (instr->is_in_object()) { 3149 if (instr->is_in_object()) {
3061 __ mov(FieldOperand(object, offset), value); 3150 __ mov(FieldOperand(object, offset), value);
3062 if (instr->needs_write_barrier()) { 3151 if (instr->needs_write_barrier()) {
3063 Register temp = ToRegister(instr->TempAt(0)); 3152 Register temp = ToRegister(instr->TempAt(0));
3064 // Update the write barrier for the object for in-object properties. 3153 // Update the write barrier for the object for in-object properties.
3065 __ RecordWrite(object, offset, value, temp); 3154 __ RecordWriteField(object, offset, value, temp, kSaveFPRegs);
3066 } 3155 }
3067 } else { 3156 } else {
3068 Register temp = ToRegister(instr->TempAt(0)); 3157 Register temp = ToRegister(instr->TempAt(0));
3069 __ mov(temp, FieldOperand(object, JSObject::kPropertiesOffset)); 3158 __ mov(temp, FieldOperand(object, JSObject::kPropertiesOffset));
3070 __ mov(FieldOperand(temp, offset), value); 3159 __ mov(FieldOperand(temp, offset), value);
3071 if (instr->needs_write_barrier()) { 3160 if (instr->needs_write_barrier()) {
3072 // Update the write barrier for the properties array. 3161 // Update the write barrier for the properties array.
3073 // object is used as a scratch register. 3162 // object is used as a scratch register.
3074 __ RecordWrite(temp, offset, value, object); 3163 __ RecordWriteField(temp, offset, value, object, kSaveFPRegs);
3075 } 3164 }
3076 } 3165 }
3077 } 3166 }
3078 3167
3079 3168
3080 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { 3169 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3081 ASSERT(ToRegister(instr->context()).is(esi)); 3170 ASSERT(ToRegister(instr->context()).is(esi));
3082 ASSERT(ToRegister(instr->object()).is(edx)); 3171 ASSERT(ToRegister(instr->object()).is(edx));
3083 ASSERT(ToRegister(instr->value()).is(eax)); 3172 ASSERT(ToRegister(instr->value()).is(eax));
3084 3173
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
3123 case EXTERNAL_SHORT_ELEMENTS: 3212 case EXTERNAL_SHORT_ELEMENTS:
3124 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 3213 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3125 __ mov_w(operand, value); 3214 __ mov_w(operand, value);
3126 break; 3215 break;
3127 case EXTERNAL_INT_ELEMENTS: 3216 case EXTERNAL_INT_ELEMENTS:
3128 case EXTERNAL_UNSIGNED_INT_ELEMENTS: 3217 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3129 __ mov(operand, value); 3218 __ mov(operand, value);
3130 break; 3219 break;
3131 case EXTERNAL_FLOAT_ELEMENTS: 3220 case EXTERNAL_FLOAT_ELEMENTS:
3132 case EXTERNAL_DOUBLE_ELEMENTS: 3221 case EXTERNAL_DOUBLE_ELEMENTS:
3222 case FAST_SMI_ONLY_ELEMENTS:
3133 case FAST_ELEMENTS: 3223 case FAST_ELEMENTS:
3134 case FAST_DOUBLE_ELEMENTS: 3224 case FAST_DOUBLE_ELEMENTS:
3135 case DICTIONARY_ELEMENTS: 3225 case DICTIONARY_ELEMENTS:
3136 case NON_STRICT_ARGUMENTS_ELEMENTS: 3226 case NON_STRICT_ARGUMENTS_ELEMENTS:
3137 UNREACHABLE(); 3227 UNREACHABLE();
3138 break; 3228 break;
3139 } 3229 }
3140 } 3230 }
3141 } 3231 }
3142 3232
3143 3233
3144 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) { 3234 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
3145 Register value = ToRegister(instr->value()); 3235 Register value = ToRegister(instr->value());
3146 Register elements = ToRegister(instr->object()); 3236 Register elements = ToRegister(instr->object());
3147 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg; 3237 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
3148 3238
3239 // This instruction cannot handle the FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
3240 // conversion, so it deopts in that case.
3241 if (instr->hydrogen()->ValueNeedsSmiCheck()) {
3242 __ test(value, Immediate(kSmiTagMask));
3243 DeoptimizeIf(not_zero, instr->environment());
3244 }
3245
3149 // Do the store. 3246 // Do the store.
3150 if (instr->key()->IsConstantOperand()) { 3247 if (instr->key()->IsConstantOperand()) {
3151 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); 3248 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
3152 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); 3249 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3153 int offset = 3250 int offset =
3154 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize; 3251 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
3155 __ mov(FieldOperand(elements, offset), value); 3252 __ mov(FieldOperand(elements, offset), value);
3156 } else { 3253 } else {
3157 __ mov(FieldOperand(elements, 3254 __ mov(FieldOperand(elements,
3158 key, 3255 key,
3159 times_pointer_size, 3256 times_pointer_size,
3160 FixedArray::kHeaderSize), 3257 FixedArray::kHeaderSize),
3161 value); 3258 value);
3162 } 3259 }
3163 3260
3164 if (instr->hydrogen()->NeedsWriteBarrier()) { 3261 if (instr->hydrogen()->NeedsWriteBarrier()) {
3165 // Compute address of modified element and store it into key register. 3262 // Compute address of modified element and store it into key register.
3166 __ lea(key, 3263 __ lea(key,
3167 FieldOperand(elements, 3264 FieldOperand(elements,
3168 key, 3265 key,
3169 times_pointer_size, 3266 times_pointer_size,
3170 FixedArray::kHeaderSize)); 3267 FixedArray::kHeaderSize));
3171 __ RecordWrite(elements, key, value); 3268 __ RecordWrite(elements, key, value, kSaveFPRegs);
3172 } 3269 }
3173 } 3270 }
3174 3271
3175 3272
3176 void LCodeGen::DoStoreKeyedFastDoubleElement( 3273 void LCodeGen::DoStoreKeyedFastDoubleElement(
3177 LStoreKeyedFastDoubleElement* instr) { 3274 LStoreKeyedFastDoubleElement* instr) {
3178 XMMRegister value = ToDoubleRegister(instr->value()); 3275 XMMRegister value = ToDoubleRegister(instr->value());
3179 Label have_value; 3276 Label have_value;
3180 3277
3181 __ ucomisd(value, value); 3278 __ ucomisd(value, value);
(...skipping 23 matching lines...) Expand all
3205 CallCode(ic, RelocInfo::CODE_TARGET, instr); 3302 CallCode(ic, RelocInfo::CODE_TARGET, instr);
3206 } 3303 }
3207 3304
3208 3305
3209 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { 3306 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3210 class DeferredStringCharCodeAt: public LDeferredCode { 3307 class DeferredStringCharCodeAt: public LDeferredCode {
3211 public: 3308 public:
3212 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr) 3309 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3213 : LDeferredCode(codegen), instr_(instr) { } 3310 : LDeferredCode(codegen), instr_(instr) { }
3214 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); } 3311 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3312 virtual LInstruction* instr() { return instr_; }
3215 private: 3313 private:
3216 LStringCharCodeAt* instr_; 3314 LStringCharCodeAt* instr_;
3217 }; 3315 };
3218 3316
3219 Register string = ToRegister(instr->string()); 3317 Register string = ToRegister(instr->string());
3220 Register index = ToRegister(instr->index()); 3318 Register index = ToRegister(instr->index());
3221 Register result = ToRegister(instr->result()); 3319 Register result = ToRegister(instr->result());
3222 3320
3223 DeferredStringCharCodeAt* deferred = 3321 DeferredStringCharCodeAt* deferred =
3224 new DeferredStringCharCodeAt(this, instr); 3322 new DeferredStringCharCodeAt(this, instr);
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after
3327 __ StoreToSafepointRegisterSlot(result, eax); 3425 __ StoreToSafepointRegisterSlot(result, eax);
3328 } 3426 }
3329 3427
3330 3428
3331 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) { 3429 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3332 class DeferredStringCharFromCode: public LDeferredCode { 3430 class DeferredStringCharFromCode: public LDeferredCode {
3333 public: 3431 public:
3334 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr) 3432 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3335 : LDeferredCode(codegen), instr_(instr) { } 3433 : LDeferredCode(codegen), instr_(instr) { }
3336 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); } 3434 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
3435 virtual LInstruction* instr() { return instr_; }
3337 private: 3436 private:
3338 LStringCharFromCode* instr_; 3437 LStringCharFromCode* instr_;
3339 }; 3438 };
3340 3439
3341 DeferredStringCharFromCode* deferred = 3440 DeferredStringCharFromCode* deferred =
3342 new DeferredStringCharFromCode(this, instr); 3441 new DeferredStringCharFromCode(this, instr);
3343 3442
3344 ASSERT(instr->hydrogen()->value()->representation().IsInteger32()); 3443 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3345 Register char_code = ToRegister(instr->char_code()); 3444 Register char_code = ToRegister(instr->char_code());
3346 Register result = ToRegister(instr->result()); 3445 Register result = ToRegister(instr->result());
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
3406 __ cvtsi2sd(ToDoubleRegister(output), ToOperand(input)); 3505 __ cvtsi2sd(ToDoubleRegister(output), ToOperand(input));
3407 } 3506 }
3408 3507
3409 3508
3410 void LCodeGen::DoNumberTagI(LNumberTagI* instr) { 3509 void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3411 class DeferredNumberTagI: public LDeferredCode { 3510 class DeferredNumberTagI: public LDeferredCode {
3412 public: 3511 public:
3413 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr) 3512 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3414 : LDeferredCode(codegen), instr_(instr) { } 3513 : LDeferredCode(codegen), instr_(instr) { }
3415 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); } 3514 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
3515 virtual LInstruction* instr() { return instr_; }
3416 private: 3516 private:
3417 LNumberTagI* instr_; 3517 LNumberTagI* instr_;
3418 }; 3518 };
3419 3519
3420 LOperand* input = instr->InputAt(0); 3520 LOperand* input = instr->InputAt(0);
3421 ASSERT(input->IsRegister() && input->Equals(instr->result())); 3521 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3422 Register reg = ToRegister(input); 3522 Register reg = ToRegister(input);
3423 3523
3424 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr); 3524 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
3425 __ SmiTag(reg); 3525 __ SmiTag(reg);
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
3473 __ StoreToSafepointRegisterSlot(reg, reg); 3573 __ StoreToSafepointRegisterSlot(reg, reg);
3474 } 3574 }
3475 3575
3476 3576
3477 void LCodeGen::DoNumberTagD(LNumberTagD* instr) { 3577 void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3478 class DeferredNumberTagD: public LDeferredCode { 3578 class DeferredNumberTagD: public LDeferredCode {
3479 public: 3579 public:
3480 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr) 3580 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3481 : LDeferredCode(codegen), instr_(instr) { } 3581 : LDeferredCode(codegen), instr_(instr) { }
3482 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); } 3582 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3583 virtual LInstruction* instr() { return instr_; }
3483 private: 3584 private:
3484 LNumberTagD* instr_; 3585 LNumberTagD* instr_;
3485 }; 3586 };
3486 3587
3487 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); 3588 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
3488 Register reg = ToRegister(instr->result()); 3589 Register reg = ToRegister(instr->result());
3489 Register tmp = ToRegister(instr->TempAt(0)); 3590 Register tmp = ToRegister(instr->TempAt(0));
3490 3591
3491 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr); 3592 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
3492 if (FLAG_inline_new) { 3593 if (FLAG_inline_new) {
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
3574 3675
3575 // Smi to XMM conversion 3676 // Smi to XMM conversion
3576 __ bind(&load_smi); 3677 __ bind(&load_smi);
3577 __ SmiUntag(input_reg); // Untag smi before converting to float. 3678 __ SmiUntag(input_reg); // Untag smi before converting to float.
3578 __ cvtsi2sd(result_reg, Operand(input_reg)); 3679 __ cvtsi2sd(result_reg, Operand(input_reg));
3579 __ SmiTag(input_reg); // Retag smi. 3680 __ SmiTag(input_reg); // Retag smi.
3580 __ bind(&done); 3681 __ bind(&done);
3581 } 3682 }
3582 3683
3583 3684
3584 class DeferredTaggedToI: public LDeferredCode {
3585 public:
3586 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3587 : LDeferredCode(codegen), instr_(instr) { }
3588 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3589 private:
3590 LTaggedToI* instr_;
3591 };
3592
3593
3594 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) { 3685 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3595 Label done, heap_number; 3686 Label done, heap_number;
3596 Register input_reg = ToRegister(instr->InputAt(0)); 3687 Register input_reg = ToRegister(instr->InputAt(0));
3597 3688
3598 // Heap number map check. 3689 // Heap number map check.
3599 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), 3690 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
3600 factory()->heap_number_map()); 3691 factory()->heap_number_map());
3601 3692
3602 if (instr->truncating()) { 3693 if (instr->truncating()) {
3603 __ j(equal, &heap_number, Label::kNear); 3694 __ j(equal, &heap_number, Label::kNear);
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
3665 __ movmskpd(input_reg, xmm0); 3756 __ movmskpd(input_reg, xmm0);
3666 __ and_(input_reg, 1); 3757 __ and_(input_reg, 1);
3667 DeoptimizeIf(not_zero, instr->environment()); 3758 DeoptimizeIf(not_zero, instr->environment());
3668 } 3759 }
3669 } 3760 }
3670 __ bind(&done); 3761 __ bind(&done);
3671 } 3762 }
3672 3763
3673 3764
3674 void LCodeGen::DoTaggedToI(LTaggedToI* instr) { 3765 void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
3766 class DeferredTaggedToI: public LDeferredCode {
3767 public:
3768 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3769 : LDeferredCode(codegen), instr_(instr) { }
3770 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3771 virtual LInstruction* instr() { return instr_; }
3772 private:
3773 LTaggedToI* instr_;
3774 };
3775
3675 LOperand* input = instr->InputAt(0); 3776 LOperand* input = instr->InputAt(0);
3676 ASSERT(input->IsRegister()); 3777 ASSERT(input->IsRegister());
3677 ASSERT(input->Equals(instr->result())); 3778 ASSERT(input->Equals(instr->result()));
3678 3779
3679 Register input_reg = ToRegister(input); 3780 Register input_reg = ToRegister(input);
3680 3781
3681 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr); 3782 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3682 3783
3683 // Smi check. 3784 // Smi check.
3684 __ JumpIfNotSmi(input_reg, deferred->entry()); 3785 __ JumpIfNotSmi(input_reg, deferred->entry());
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after
3875 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset)); 3976 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
3876 __ and_(temp, mask); 3977 __ and_(temp, mask);
3877 __ cmpb(Operand(temp), tag); 3978 __ cmpb(Operand(temp), tag);
3878 DeoptimizeIf(not_equal, instr->environment()); 3979 DeoptimizeIf(not_equal, instr->environment());
3879 } 3980 }
3880 } 3981 }
3881 } 3982 }
3882 3983
3883 3984
3884 void LCodeGen::DoCheckFunction(LCheckFunction* instr) { 3985 void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
3885 ASSERT(instr->InputAt(0)->IsRegister()); 3986 Handle<JSFunction> target = instr->hydrogen()->target();
3886 Operand operand = ToOperand(instr->InputAt(0)); 3987 if (isolate()->heap()->InNewSpace(*target)) {
3887 __ cmp(operand, instr->hydrogen()->target()); 3988 Register reg = ToRegister(instr->value());
3989 Handle<JSGlobalPropertyCell> cell =
3990 isolate()->factory()->NewJSGlobalPropertyCell(target);
3991 __ cmp(reg, Operand::Cell(cell));
3992 } else {
3993 Operand operand = ToOperand(instr->value());
3994 __ cmp(operand, instr->hydrogen()->target());
3995 }
3888 DeoptimizeIf(not_equal, instr->environment()); 3996 DeoptimizeIf(not_equal, instr->environment());
3889 } 3997 }
3890 3998
3891 3999
3892 void LCodeGen::DoCheckMap(LCheckMap* instr) { 4000 void LCodeGen::DoCheckMap(LCheckMap* instr) {
3893 LOperand* input = instr->InputAt(0); 4001 LOperand* input = instr->InputAt(0);
3894 ASSERT(input->IsRegister()); 4002 ASSERT(input->IsRegister());
3895 Register reg = ToRegister(input); 4003 Register reg = ToRegister(input);
3896 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), 4004 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3897 instr->hydrogen()->map()); 4005 instr->hydrogen()->map());
(...skipping 283 matching lines...) Expand 10 before | Expand all | Expand 10 after
4181 __ cmp(input, factory()->undefined_value()); 4289 __ cmp(input, factory()->undefined_value());
4182 __ j(equal, true_label); 4290 __ j(equal, true_label);
4183 __ JumpIfSmi(input, false_label); 4291 __ JumpIfSmi(input, false_label);
4184 // Check for undetectable objects => true. 4292 // Check for undetectable objects => true.
4185 __ mov(input, FieldOperand(input, HeapObject::kMapOffset)); 4293 __ mov(input, FieldOperand(input, HeapObject::kMapOffset));
4186 __ test_b(FieldOperand(input, Map::kBitFieldOffset), 4294 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
4187 1 << Map::kIsUndetectable); 4295 1 << Map::kIsUndetectable);
4188 final_branch_condition = not_zero; 4296 final_branch_condition = not_zero;
4189 4297
4190 } else if (type_name->Equals(heap()->function_symbol())) { 4298 } else if (type_name->Equals(heap()->function_symbol())) {
4191 STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE); 4299 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4192 __ JumpIfSmi(input, false_label); 4300 __ JumpIfSmi(input, false_label);
4193 __ CmpObjectType(input, FIRST_CALLABLE_SPEC_OBJECT_TYPE, input); 4301 __ CmpObjectType(input, JS_FUNCTION_TYPE, input);
4194 final_branch_condition = above_equal; 4302 __ j(equal, true_label);
4303 __ CmpInstanceType(input, JS_FUNCTION_PROXY_TYPE);
4304 final_branch_condition = equal;
4195 4305
4196 } else if (type_name->Equals(heap()->object_symbol())) { 4306 } else if (type_name->Equals(heap()->object_symbol())) {
4197 __ JumpIfSmi(input, false_label); 4307 __ JumpIfSmi(input, false_label);
4198 if (!FLAG_harmony_typeof) { 4308 if (!FLAG_harmony_typeof) {
4199 __ cmp(input, factory()->null_value()); 4309 __ cmp(input, factory()->null_value());
4200 __ j(equal, true_label); 4310 __ j(equal, true_label);
4201 } 4311 }
4202 __ CmpObjectType(input, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, input); 4312 __ CmpObjectType(input, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, input);
4203 __ j(below, false_label); 4313 __ j(below, false_label);
4204 __ CmpInstanceType(input, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); 4314 __ CmpInstanceType(input, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
4296 safepoints_.SetPcAfterGap(pc); 4406 safepoints_.SetPcAfterGap(pc);
4297 } 4407 }
4298 4408
4299 4409
4300 void LCodeGen::DoStackCheck(LStackCheck* instr) { 4410 void LCodeGen::DoStackCheck(LStackCheck* instr) {
4301 class DeferredStackCheck: public LDeferredCode { 4411 class DeferredStackCheck: public LDeferredCode {
4302 public: 4412 public:
4303 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr) 4413 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
4304 : LDeferredCode(codegen), instr_(instr) { } 4414 : LDeferredCode(codegen), instr_(instr) { }
4305 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); } 4415 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
4416 virtual LInstruction* instr() { return instr_; }
4306 private: 4417 private:
4307 LStackCheck* instr_; 4418 LStackCheck* instr_;
4308 }; 4419 };
4309 4420
4310 if (instr->hydrogen()->is_function_entry()) { 4421 if (instr->hydrogen()->is_function_entry()) {
4311 // Perform stack overflow check. 4422 // Perform stack overflow check.
4312 Label done; 4423 Label done;
4313 ExternalReference stack_limit = 4424 ExternalReference stack_limit =
4314 ExternalReference::address_of_stack_limit(isolate()); 4425 ExternalReference::address_of_stack_limit(isolate());
4315 __ cmp(esp, Operand::StaticVariable(stack_limit)); 4426 __ cmp(esp, Operand::StaticVariable(stack_limit));
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
4378 env->deoptimization_index()); 4489 env->deoptimization_index());
4379 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator); 4490 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
4380 } 4491 }
4381 4492
4382 4493
4383 #undef __ 4494 #undef __
4384 4495
4385 } } // namespace v8::internal 4496 } } // namespace v8::internal
4386 4497
4387 #endif // V8_TARGET_ARCH_IA32 4498 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | src/ia32/lithium-ia32.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698