Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(118)

Side by Side Diff: src/x64/lithium-codegen-x64.cc

Issue 142893003: Merge bleeding_edge 18658:18677 (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Created 6 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/ic-x64.cc ('k') | src/x64/lithium-gap-resolver-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
96 void LChunkBuilder::Abort(BailoutReason reason) { 96 void LChunkBuilder::Abort(BailoutReason reason) {
97 info()->set_bailout_reason(reason); 97 info()->set_bailout_reason(reason);
98 status_ = ABORTED; 98 status_ = ABORTED;
99 } 99 }
100 100
101 101
102 #ifdef _MSC_VER 102 #ifdef _MSC_VER
103 void LCodeGen::MakeSureStackPagesMapped(int offset) { 103 void LCodeGen::MakeSureStackPagesMapped(int offset) {
104 const int kPageSize = 4 * KB; 104 const int kPageSize = 4 * KB;
105 for (offset -= kPageSize; offset > 0; offset -= kPageSize) { 105 for (offset -= kPageSize; offset > 0; offset -= kPageSize) {
106 __ movq(Operand(rsp, offset), rax); 106 __ movp(Operand(rsp, offset), rax);
107 } 107 }
108 } 108 }
109 #endif 109 #endif
110 110
111 111
112 void LCodeGen::SaveCallerDoubles() { 112 void LCodeGen::SaveCallerDoubles() {
113 ASSERT(info()->saves_caller_doubles()); 113 ASSERT(info()->saves_caller_doubles());
114 ASSERT(NeedsEagerFrame()); 114 ASSERT(NeedsEagerFrame());
115 Comment(";;; Save clobbered callee double registers"); 115 Comment(";;; Save clobbered callee double registers");
116 int count = 0; 116 int count = 0;
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
154 } 154 }
155 #endif 155 #endif
156 156
157 // Classic mode functions need to replace the receiver with the global proxy 157 // Classic mode functions need to replace the receiver with the global proxy
158 // when called as functions (without an explicit receiver object). 158 // when called as functions (without an explicit receiver object).
159 if (info_->this_has_uses() && 159 if (info_->this_has_uses() &&
160 info_->is_classic_mode() && 160 info_->is_classic_mode() &&
161 !info_->is_native()) { 161 !info_->is_native()) {
162 Label ok; 162 Label ok;
163 StackArgumentsAccessor args(rsp, scope()->num_parameters()); 163 StackArgumentsAccessor args(rsp, scope()->num_parameters());
164 __ movq(rcx, args.GetReceiverOperand()); 164 __ movp(rcx, args.GetReceiverOperand());
165 165
166 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex); 166 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
167 __ j(not_equal, &ok, Label::kNear); 167 __ j(not_equal, &ok, Label::kNear);
168 168
169 __ movq(rcx, GlobalObjectOperand()); 169 __ movp(rcx, GlobalObjectOperand());
170 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset)); 170 __ movp(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
171 171
172 __ movq(args.GetReceiverOperand(), rcx); 172 __ movp(args.GetReceiverOperand(), rcx);
173 173
174 __ bind(&ok); 174 __ bind(&ok);
175 } 175 }
176 } 176 }
177 177
178 info()->set_prologue_offset(masm_->pc_offset()); 178 info()->set_prologue_offset(masm_->pc_offset());
179 if (NeedsEagerFrame()) { 179 if (NeedsEagerFrame()) {
180 ASSERT(!frame_is_built_); 180 ASSERT(!frame_is_built_);
181 frame_is_built_ = true; 181 frame_is_built_ = true;
182 __ Prologue(info()->IsStub() ? BUILD_STUB_FRAME : BUILD_FUNCTION_FRAME); 182 __ Prologue(info()->IsStub() ? BUILD_STUB_FRAME : BUILD_FUNCTION_FRAME);
183 info()->AddNoFrameRange(0, masm_->pc_offset()); 183 info()->AddNoFrameRange(0, masm_->pc_offset());
184 } 184 }
185 185
186 // Reserve space for the stack slots needed by the code. 186 // Reserve space for the stack slots needed by the code.
187 int slots = GetStackSlotCount(); 187 int slots = GetStackSlotCount();
188 if (slots > 0) { 188 if (slots > 0) {
189 if (FLAG_debug_code) { 189 if (FLAG_debug_code) {
190 __ subq(rsp, Immediate(slots * kPointerSize)); 190 __ subq(rsp, Immediate(slots * kPointerSize));
191 #ifdef _MSC_VER 191 #ifdef _MSC_VER
192 MakeSureStackPagesMapped(slots * kPointerSize); 192 MakeSureStackPagesMapped(slots * kPointerSize);
193 #endif 193 #endif
194 __ push(rax); 194 __ push(rax);
195 __ Set(rax, slots); 195 __ Set(rax, slots);
196 __ movq(kScratchRegister, kSlotsZapValue); 196 __ movq(kScratchRegister, kSlotsZapValue);
197 Label loop; 197 Label loop;
198 __ bind(&loop); 198 __ bind(&loop);
199 __ movq(MemOperand(rsp, rax, times_pointer_size, 0), 199 __ movp(MemOperand(rsp, rax, times_pointer_size, 0),
200 kScratchRegister); 200 kScratchRegister);
201 __ decl(rax); 201 __ decl(rax);
202 __ j(not_zero, &loop); 202 __ j(not_zero, &loop);
203 __ pop(rax); 203 __ pop(rax);
204 } else { 204 } else {
205 __ subq(rsp, Immediate(slots * kPointerSize)); 205 __ subq(rsp, Immediate(slots * kPointerSize));
206 #ifdef _MSC_VER 206 #ifdef _MSC_VER
207 MakeSureStackPagesMapped(slots * kPointerSize); 207 MakeSureStackPagesMapped(slots * kPointerSize);
208 #endif 208 #endif
209 } 209 }
(...skipping 11 matching lines...) Expand all
221 __ push(rdi); 221 __ push(rdi);
222 if (heap_slots <= FastNewContextStub::kMaximumSlots) { 222 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
223 FastNewContextStub stub(heap_slots); 223 FastNewContextStub stub(heap_slots);
224 __ CallStub(&stub); 224 __ CallStub(&stub);
225 } else { 225 } else {
226 __ CallRuntime(Runtime::kNewFunctionContext, 1); 226 __ CallRuntime(Runtime::kNewFunctionContext, 1);
227 } 227 }
228 RecordSafepoint(Safepoint::kNoLazyDeopt); 228 RecordSafepoint(Safepoint::kNoLazyDeopt);
229 // Context is returned in both rax and rsi. It replaces the context 229 // Context is returned in both rax and rsi. It replaces the context
230 // passed to us. It's saved in the stack and kept live in rsi. 230 // passed to us. It's saved in the stack and kept live in rsi.
231 __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); 231 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
232 232
233 // Copy any necessary parameters into the context. 233 // Copy any necessary parameters into the context.
234 int num_parameters = scope()->num_parameters(); 234 int num_parameters = scope()->num_parameters();
235 for (int i = 0; i < num_parameters; i++) { 235 for (int i = 0; i < num_parameters; i++) {
236 Variable* var = scope()->parameter(i); 236 Variable* var = scope()->parameter(i);
237 if (var->IsContextSlot()) { 237 if (var->IsContextSlot()) {
238 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 238 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
239 (num_parameters - 1 - i) * kPointerSize; 239 (num_parameters - 1 - i) * kPointerSize;
240 // Load parameter from stack. 240 // Load parameter from stack.
241 __ movq(rax, Operand(rbp, parameter_offset)); 241 __ movp(rax, Operand(rbp, parameter_offset));
242 // Store it in the context. 242 // Store it in the context.
243 int context_offset = Context::SlotOffset(var->index()); 243 int context_offset = Context::SlotOffset(var->index());
244 __ movq(Operand(rsi, context_offset), rax); 244 __ movp(Operand(rsi, context_offset), rax);
245 // Update the write barrier. This clobbers rax and rbx. 245 // Update the write barrier. This clobbers rax and rbx.
246 __ RecordWriteContextSlot(rsi, context_offset, rax, rbx, kSaveFPRegs); 246 __ RecordWriteContextSlot(rsi, context_offset, rax, rbx, kSaveFPRegs);
247 } 247 }
248 } 248 }
249 Comment(";;; End allocate local context"); 249 Comment(";;; End allocate local context");
250 } 250 }
251 251
252 // Trace the call. 252 // Trace the call.
253 if (FLAG_trace && info()->IsOptimizing()) { 253 if (FLAG_trace && info()->IsOptimizing()) {
254 __ CallRuntime(Runtime::kTraceEnter, 0); 254 __ CallRuntime(Runtime::kTraceEnter, 0);
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
287 } else { 287 } else {
288 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); 288 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
289 } 289 }
290 if (jump_table_[i].needs_frame) { 290 if (jump_table_[i].needs_frame) {
291 ASSERT(!info()->saves_caller_doubles()); 291 ASSERT(!info()->saves_caller_doubles());
292 __ Move(kScratchRegister, ExternalReference::ForDeoptEntry(entry)); 292 __ Move(kScratchRegister, ExternalReference::ForDeoptEntry(entry));
293 if (needs_frame.is_bound()) { 293 if (needs_frame.is_bound()) {
294 __ jmp(&needs_frame); 294 __ jmp(&needs_frame);
295 } else { 295 } else {
296 __ bind(&needs_frame); 296 __ bind(&needs_frame);
297 __ movq(rsi, MemOperand(rbp, StandardFrameConstants::kContextOffset)); 297 __ movp(rsi, MemOperand(rbp, StandardFrameConstants::kContextOffset));
298 __ push(rbp); 298 __ push(rbp);
299 __ movq(rbp, rsp); 299 __ movp(rbp, rsp);
300 __ push(rsi); 300 __ push(rsi);
301 // This variant of deopt can only be used with stubs. Since we don't 301 // This variant of deopt can only be used with stubs. Since we don't
302 // have a function pointer to install in the stack frame that we're 302 // have a function pointer to install in the stack frame that we're
303 // building, install a special marker there instead. 303 // building, install a special marker there instead.
304 ASSERT(info()->IsStub()); 304 ASSERT(info()->IsStub());
305 __ Move(rsi, Smi::FromInt(StackFrame::STUB)); 305 __ Move(rsi, Smi::FromInt(StackFrame::STUB));
306 __ push(rsi); 306 __ push(rsi);
307 __ movq(rsi, MemOperand(rsp, kPointerSize)); 307 __ movp(rsi, MemOperand(rsp, kPointerSize));
308 __ call(kScratchRegister); 308 __ call(kScratchRegister);
309 } 309 }
310 } else { 310 } else {
311 if (info()->saves_caller_doubles()) { 311 if (info()->saves_caller_doubles()) {
312 ASSERT(info()->IsStub()); 312 ASSERT(info()->IsStub());
313 RestoreCallerDoubles(); 313 RestoreCallerDoubles();
314 } 314 }
315 __ call(entry, RelocInfo::RUNTIME_ENTRY); 315 __ call(entry, RelocInfo::RUNTIME_ENTRY);
316 } 316 }
317 } 317 }
(...skipping 28 matching lines...) Expand all
346 __ Push(Smi::FromInt(StackFrame::STUB)); 346 __ Push(Smi::FromInt(StackFrame::STUB));
347 __ lea(rbp, Operand(rsp, 2 * kPointerSize)); 347 __ lea(rbp, Operand(rsp, 2 * kPointerSize));
348 Comment(";;; Deferred code"); 348 Comment(";;; Deferred code");
349 } 349 }
350 code->Generate(); 350 code->Generate();
351 if (NeedsDeferredFrame()) { 351 if (NeedsDeferredFrame()) {
352 __ bind(code->done()); 352 __ bind(code->done());
353 Comment(";;; Destroy frame"); 353 Comment(";;; Destroy frame");
354 ASSERT(frame_is_built_); 354 ASSERT(frame_is_built_);
355 frame_is_built_ = false; 355 frame_is_built_ = false;
356 __ movq(rsp, rbp); 356 __ movp(rsp, rbp);
357 __ pop(rbp); 357 __ pop(rbp);
358 } 358 }
359 __ jmp(code->exit()); 359 __ jmp(code->exit());
360 } 360 }
361 } 361 }
362 362
363 // Deferred code is the last part of the instruction sequence. Mark 363 // Deferred code is the last part of the instruction sequence. Mark
364 // the generated code as done unless we bailed out. 364 // the generated code as done unless we bailed out.
365 if (!is_aborted()) status_ = DONE; 365 if (!is_aborted()) status_ = DONE;
366 return !is_aborted(); 366 return !is_aborted();
(...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after
629 629
630 __ CallRuntime(function, num_arguments, save_doubles); 630 __ CallRuntime(function, num_arguments, save_doubles);
631 631
632 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0); 632 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0);
633 } 633 }
634 634
635 635
636 void LCodeGen::LoadContextFromDeferred(LOperand* context) { 636 void LCodeGen::LoadContextFromDeferred(LOperand* context) {
637 if (context->IsRegister()) { 637 if (context->IsRegister()) {
638 if (!ToRegister(context).is(rsi)) { 638 if (!ToRegister(context).is(rsi)) {
639 __ movq(rsi, ToRegister(context)); 639 __ movp(rsi, ToRegister(context));
640 } 640 }
641 } else if (context->IsStackSlot()) { 641 } else if (context->IsStackSlot()) {
642 __ movq(rsi, ToOperand(context)); 642 __ movp(rsi, ToOperand(context));
643 } else if (context->IsConstantOperand()) { 643 } else if (context->IsConstantOperand()) {
644 HConstant* constant = 644 HConstant* constant =
645 chunk_->LookupConstant(LConstantOperand::cast(context)); 645 chunk_->LookupConstant(LConstantOperand::cast(context));
646 __ Move(rsi, Handle<Object>::cast(constant->handle(isolate()))); 646 __ Move(rsi, Handle<Object>::cast(constant->handle(isolate())));
647 } else { 647 } else {
648 UNREACHABLE(); 648 UNREACHABLE();
649 } 649 }
650 } 650 }
651 651
652 652
(...skipping 608 matching lines...) Expand 10 before | Expand all | Expand 10 after
1261 } 1261 }
1262 } 1262 }
1263 1263
1264 1264
1265 void LCodeGen::DoMulI(LMulI* instr) { 1265 void LCodeGen::DoMulI(LMulI* instr) {
1266 Register left = ToRegister(instr->left()); 1266 Register left = ToRegister(instr->left());
1267 LOperand* right = instr->right(); 1267 LOperand* right = instr->right();
1268 1268
1269 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 1269 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1270 if (instr->hydrogen_value()->representation().IsSmi()) { 1270 if (instr->hydrogen_value()->representation().IsSmi()) {
1271 __ movq(kScratchRegister, left); 1271 __ movp(kScratchRegister, left);
1272 } else { 1272 } else {
1273 __ movl(kScratchRegister, left); 1273 __ movl(kScratchRegister, left);
1274 } 1274 }
1275 } 1275 }
1276 1276
1277 bool can_overflow = 1277 bool can_overflow =
1278 instr->hydrogen()->CheckFlag(HValue::kCanOverflow); 1278 instr->hydrogen()->CheckFlag(HValue::kCanOverflow);
1279 if (right->IsConstantOperand()) { 1279 if (right->IsConstantOperand()) {
1280 int32_t right_value = ToInteger32(LConstantOperand::cast(right)); 1280 int32_t right_value = ToInteger32(LConstantOperand::cast(right));
1281 if (right_value == -1) { 1281 if (right_value == -1) {
(...skipping 294 matching lines...) Expand 10 before | Expand all | Expand 10 after
1576 Register map = ToRegister(instr->value()); 1576 Register map = ToRegister(instr->value());
1577 __ EnumLength(result, map); 1577 __ EnumLength(result, map);
1578 } 1578 }
1579 1579
1580 1580
1581 void LCodeGen::DoElementsKind(LElementsKind* instr) { 1581 void LCodeGen::DoElementsKind(LElementsKind* instr) {
1582 Register result = ToRegister(instr->result()); 1582 Register result = ToRegister(instr->result());
1583 Register input = ToRegister(instr->value()); 1583 Register input = ToRegister(instr->value());
1584 1584
1585 // Load map into |result|. 1585 // Load map into |result|.
1586 __ movq(result, FieldOperand(input, HeapObject::kMapOffset)); 1586 __ movp(result, FieldOperand(input, HeapObject::kMapOffset));
1587 // Load the map's "bit field 2" into |result|. We only need the first byte. 1587 // Load the map's "bit field 2" into |result|. We only need the first byte.
1588 __ movzxbq(result, FieldOperand(result, Map::kBitField2Offset)); 1588 __ movzxbq(result, FieldOperand(result, Map::kBitField2Offset));
1589 // Retrieve elements_kind from bit field 2. 1589 // Retrieve elements_kind from bit field 2.
1590 __ and_(result, Immediate(Map::kElementsKindMask)); 1590 __ and_(result, Immediate(Map::kElementsKindMask));
1591 __ shr(result, Immediate(Map::kElementsKindShift)); 1591 __ shr(result, Immediate(Map::kElementsKindShift));
1592 } 1592 }
1593 1593
1594 1594
1595 void LCodeGen::DoValueOf(LValueOf* instr) { 1595 void LCodeGen::DoValueOf(LValueOf* instr) {
1596 Register input = ToRegister(instr->value()); 1596 Register input = ToRegister(instr->value());
1597 Register result = ToRegister(instr->result()); 1597 Register result = ToRegister(instr->result());
1598 ASSERT(input.is(result)); 1598 ASSERT(input.is(result));
1599 Label done; 1599 Label done;
1600 1600
1601 if (!instr->hydrogen()->value()->IsHeapObject()) { 1601 if (!instr->hydrogen()->value()->IsHeapObject()) {
1602 // If the object is a smi return the object. 1602 // If the object is a smi return the object.
1603 __ JumpIfSmi(input, &done, Label::kNear); 1603 __ JumpIfSmi(input, &done, Label::kNear);
1604 } 1604 }
1605 1605
1606 // If the object is not a value type, return the object. 1606 // If the object is not a value type, return the object.
1607 __ CmpObjectType(input, JS_VALUE_TYPE, kScratchRegister); 1607 __ CmpObjectType(input, JS_VALUE_TYPE, kScratchRegister);
1608 __ j(not_equal, &done, Label::kNear); 1608 __ j(not_equal, &done, Label::kNear);
1609 __ movq(result, FieldOperand(input, JSValue::kValueOffset)); 1609 __ movp(result, FieldOperand(input, JSValue::kValueOffset));
1610 1610
1611 __ bind(&done); 1611 __ bind(&done);
1612 } 1612 }
1613 1613
1614 1614
1615 void LCodeGen::DoDateField(LDateField* instr) { 1615 void LCodeGen::DoDateField(LDateField* instr) {
1616 Register object = ToRegister(instr->date()); 1616 Register object = ToRegister(instr->date());
1617 Register result = ToRegister(instr->result()); 1617 Register result = ToRegister(instr->result());
1618 Smi* index = instr->index(); 1618 Smi* index = instr->index();
1619 Label runtime, done, not_date_object; 1619 Label runtime, done, not_date_object;
1620 ASSERT(object.is(result)); 1620 ASSERT(object.is(result));
1621 ASSERT(object.is(rax)); 1621 ASSERT(object.is(rax));
1622 1622
1623 Condition cc = masm()->CheckSmi(object); 1623 Condition cc = masm()->CheckSmi(object);
1624 DeoptimizeIf(cc, instr->environment()); 1624 DeoptimizeIf(cc, instr->environment());
1625 __ CmpObjectType(object, JS_DATE_TYPE, kScratchRegister); 1625 __ CmpObjectType(object, JS_DATE_TYPE, kScratchRegister);
1626 DeoptimizeIf(not_equal, instr->environment()); 1626 DeoptimizeIf(not_equal, instr->environment());
1627 1627
1628 if (index->value() == 0) { 1628 if (index->value() == 0) {
1629 __ movq(result, FieldOperand(object, JSDate::kValueOffset)); 1629 __ movp(result, FieldOperand(object, JSDate::kValueOffset));
1630 } else { 1630 } else {
1631 if (index->value() < JSDate::kFirstUncachedField) { 1631 if (index->value() < JSDate::kFirstUncachedField) {
1632 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); 1632 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
1633 Operand stamp_operand = __ ExternalOperand(stamp); 1633 Operand stamp_operand = __ ExternalOperand(stamp);
1634 __ movq(kScratchRegister, stamp_operand); 1634 __ movp(kScratchRegister, stamp_operand);
1635 __ cmpq(kScratchRegister, FieldOperand(object, 1635 __ cmpq(kScratchRegister, FieldOperand(object,
1636 JSDate::kCacheStampOffset)); 1636 JSDate::kCacheStampOffset));
1637 __ j(not_equal, &runtime, Label::kNear); 1637 __ j(not_equal, &runtime, Label::kNear);
1638 __ movq(result, FieldOperand(object, JSDate::kValueOffset + 1638 __ movp(result, FieldOperand(object, JSDate::kValueOffset +
1639 kPointerSize * index->value())); 1639 kPointerSize * index->value()));
1640 __ jmp(&done, Label::kNear); 1640 __ jmp(&done, Label::kNear);
1641 } 1641 }
1642 __ bind(&runtime); 1642 __ bind(&runtime);
1643 __ PrepareCallCFunction(2); 1643 __ PrepareCallCFunction(2);
1644 __ movq(arg_reg_1, object); 1644 __ movp(arg_reg_1, object);
1645 __ Move(arg_reg_2, index, RelocInfo::NONE64); 1645 __ Move(arg_reg_2, index, RelocInfo::NONE64);
1646 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); 1646 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
1647 __ bind(&done); 1647 __ bind(&done);
1648 } 1648 }
1649 } 1649 }
1650 1650
1651 1651
1652 Operand LCodeGen::BuildSeqStringOperand(Register string, 1652 Operand LCodeGen::BuildSeqStringOperand(Register string,
1653 LOperand* index, 1653 LOperand* index,
1654 String::Encoding encoding) { 1654 String::Encoding encoding) {
(...skipping 12 matching lines...) Expand all
1667 } 1667 }
1668 1668
1669 1669
1670 void LCodeGen::DoSeqStringGetChar(LSeqStringGetChar* instr) { 1670 void LCodeGen::DoSeqStringGetChar(LSeqStringGetChar* instr) {
1671 String::Encoding encoding = instr->hydrogen()->encoding(); 1671 String::Encoding encoding = instr->hydrogen()->encoding();
1672 Register result = ToRegister(instr->result()); 1672 Register result = ToRegister(instr->result());
1673 Register string = ToRegister(instr->string()); 1673 Register string = ToRegister(instr->string());
1674 1674
1675 if (FLAG_debug_code) { 1675 if (FLAG_debug_code) {
1676 __ push(string); 1676 __ push(string);
1677 __ movq(string, FieldOperand(string, HeapObject::kMapOffset)); 1677 __ movp(string, FieldOperand(string, HeapObject::kMapOffset));
1678 __ movzxbq(string, FieldOperand(string, Map::kInstanceTypeOffset)); 1678 __ movzxbq(string, FieldOperand(string, Map::kInstanceTypeOffset));
1679 1679
1680 __ andb(string, Immediate(kStringRepresentationMask | kStringEncodingMask)); 1680 __ andb(string, Immediate(kStringRepresentationMask | kStringEncodingMask));
1681 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; 1681 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
1682 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; 1682 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
1683 __ cmpq(string, Immediate(encoding == String::ONE_BYTE_ENCODING 1683 __ cmpq(string, Immediate(encoding == String::ONE_BYTE_ENCODING
1684 ? one_byte_seq_type : two_byte_seq_type)); 1684 ? one_byte_seq_type : two_byte_seq_type));
1685 __ Check(equal, kUnexpectedStringType); 1685 __ Check(equal, kUnexpectedStringType);
1686 __ pop(string); 1686 __ pop(string);
1687 } 1687 }
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
1808 Condition condition = (operation == HMathMinMax::kMathMin) 1808 Condition condition = (operation == HMathMinMax::kMathMin)
1809 ? less_equal 1809 ? less_equal
1810 : greater_equal; 1810 : greater_equal;
1811 Register left_reg = ToRegister(left); 1811 Register left_reg = ToRegister(left);
1812 if (right->IsConstantOperand()) { 1812 if (right->IsConstantOperand()) {
1813 Immediate right_imm = 1813 Immediate right_imm =
1814 Immediate(ToInteger32(LConstantOperand::cast(right))); 1814 Immediate(ToInteger32(LConstantOperand::cast(right)));
1815 ASSERT(!instr->hydrogen_value()->representation().IsSmi()); 1815 ASSERT(!instr->hydrogen_value()->representation().IsSmi());
1816 __ cmpl(left_reg, right_imm); 1816 __ cmpl(left_reg, right_imm);
1817 __ j(condition, &return_left, Label::kNear); 1817 __ j(condition, &return_left, Label::kNear);
1818 __ movq(left_reg, right_imm); 1818 __ movp(left_reg, right_imm);
1819 } else if (right->IsRegister()) { 1819 } else if (right->IsRegister()) {
1820 Register right_reg = ToRegister(right); 1820 Register right_reg = ToRegister(right);
1821 if (instr->hydrogen_value()->representation().IsSmi()) { 1821 if (instr->hydrogen_value()->representation().IsSmi()) {
1822 __ cmpq(left_reg, right_reg); 1822 __ cmpq(left_reg, right_reg);
1823 } else { 1823 } else {
1824 __ cmpl(left_reg, right_reg); 1824 __ cmpl(left_reg, right_reg);
1825 } 1825 }
1826 __ j(condition, &return_left, Label::kNear); 1826 __ j(condition, &return_left, Label::kNear);
1827 __ movq(left_reg, right_reg); 1827 __ movp(left_reg, right_reg);
1828 } else { 1828 } else {
1829 Operand right_op = ToOperand(right); 1829 Operand right_op = ToOperand(right);
1830 if (instr->hydrogen_value()->representation().IsSmi()) { 1830 if (instr->hydrogen_value()->representation().IsSmi()) {
1831 __ cmpq(left_reg, right_op); 1831 __ cmpq(left_reg, right_op);
1832 } else { 1832 } else {
1833 __ cmpl(left_reg, right_op); 1833 __ cmpl(left_reg, right_op);
1834 } 1834 }
1835 __ j(condition, &return_left, Label::kNear); 1835 __ j(condition, &return_left, Label::kNear);
1836 __ movq(left_reg, right_op); 1836 __ movp(left_reg, right_op);
1837 } 1837 }
1838 __ bind(&return_left); 1838 __ bind(&return_left);
1839 } else { 1839 } else {
1840 ASSERT(instr->hydrogen()->representation().IsDouble()); 1840 ASSERT(instr->hydrogen()->representation().IsDouble());
1841 Label check_nan_left, check_zero, return_left, return_right; 1841 Label check_nan_left, check_zero, return_left, return_right;
1842 Condition condition = (operation == HMathMinMax::kMathMin) ? below : above; 1842 Condition condition = (operation == HMathMinMax::kMathMin) ? below : above;
1843 XMMRegister left_reg = ToDoubleRegister(left); 1843 XMMRegister left_reg = ToDoubleRegister(left);
1844 XMMRegister right_reg = ToDoubleRegister(right); 1844 XMMRegister right_reg = ToDoubleRegister(right);
1845 __ ucomisd(left_reg, right_reg); 1845 __ ucomisd(left_reg, right_reg);
1846 __ j(parity_even, &check_nan_left, Label::kNear); // At least one NaN. 1846 __ j(parity_even, &check_nan_left, Label::kNear); // At least one NaN.
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after
2032 __ j(equal, instr->FalseLabel(chunk_)); 2032 __ j(equal, instr->FalseLabel(chunk_));
2033 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); 2033 __ JumpIfSmi(reg, instr->TrueLabel(chunk_));
2034 } else if (expected.NeedsMap()) { 2034 } else if (expected.NeedsMap()) {
2035 // If we need a map later and have a Smi -> deopt. 2035 // If we need a map later and have a Smi -> deopt.
2036 __ testb(reg, Immediate(kSmiTagMask)); 2036 __ testb(reg, Immediate(kSmiTagMask));
2037 DeoptimizeIf(zero, instr->environment()); 2037 DeoptimizeIf(zero, instr->environment());
2038 } 2038 }
2039 2039
2040 const Register map = kScratchRegister; 2040 const Register map = kScratchRegister;
2041 if (expected.NeedsMap()) { 2041 if (expected.NeedsMap()) {
2042 __ movq(map, FieldOperand(reg, HeapObject::kMapOffset)); 2042 __ movp(map, FieldOperand(reg, HeapObject::kMapOffset));
2043 2043
2044 if (expected.CanBeUndetectable()) { 2044 if (expected.CanBeUndetectable()) {
2045 // Undetectable -> false. 2045 // Undetectable -> false.
2046 __ testb(FieldOperand(map, Map::kBitFieldOffset), 2046 __ testb(FieldOperand(map, Map::kBitFieldOffset),
2047 Immediate(1 << Map::kIsUndetectable)); 2047 Immediate(1 << Map::kIsUndetectable));
2048 __ j(not_zero, instr->FalseLabel(chunk_)); 2048 __ j(not_zero, instr->FalseLabel(chunk_));
2049 } 2049 }
2050 } 2050 }
2051 2051
2052 if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) { 2052 if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
(...skipping 213 matching lines...) Expand 10 before | Expand all | Expand 10 after
2266 Condition LCodeGen::EmitIsObject(Register input, 2266 Condition LCodeGen::EmitIsObject(Register input,
2267 Label* is_not_object, 2267 Label* is_not_object,
2268 Label* is_object) { 2268 Label* is_object) {
2269 ASSERT(!input.is(kScratchRegister)); 2269 ASSERT(!input.is(kScratchRegister));
2270 2270
2271 __ JumpIfSmi(input, is_not_object); 2271 __ JumpIfSmi(input, is_not_object);
2272 2272
2273 __ CompareRoot(input, Heap::kNullValueRootIndex); 2273 __ CompareRoot(input, Heap::kNullValueRootIndex);
2274 __ j(equal, is_object); 2274 __ j(equal, is_object);
2275 2275
2276 __ movq(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset)); 2276 __ movp(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset));
2277 // Undetectable objects behave like undefined. 2277 // Undetectable objects behave like undefined.
2278 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset), 2278 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
2279 Immediate(1 << Map::kIsUndetectable)); 2279 Immediate(1 << Map::kIsUndetectable));
2280 __ j(not_zero, is_not_object); 2280 __ j(not_zero, is_not_object);
2281 2281
2282 __ movzxbl(kScratchRegister, 2282 __ movzxbl(kScratchRegister,
2283 FieldOperand(kScratchRegister, Map::kInstanceTypeOffset)); 2283 FieldOperand(kScratchRegister, Map::kInstanceTypeOffset));
2284 __ cmpb(kScratchRegister, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); 2284 __ cmpb(kScratchRegister, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2285 __ j(below, is_not_object); 2285 __ j(below, is_not_object);
2286 __ cmpb(kScratchRegister, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); 2286 __ cmpb(kScratchRegister, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
2340 } 2340 }
2341 2341
2342 2342
2343 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) { 2343 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
2344 Register input = ToRegister(instr->value()); 2344 Register input = ToRegister(instr->value());
2345 Register temp = ToRegister(instr->temp()); 2345 Register temp = ToRegister(instr->temp());
2346 2346
2347 if (!instr->hydrogen()->value()->IsHeapObject()) { 2347 if (!instr->hydrogen()->value()->IsHeapObject()) {
2348 __ JumpIfSmi(input, instr->FalseLabel(chunk_)); 2348 __ JumpIfSmi(input, instr->FalseLabel(chunk_));
2349 } 2349 }
2350 __ movq(temp, FieldOperand(input, HeapObject::kMapOffset)); 2350 __ movp(temp, FieldOperand(input, HeapObject::kMapOffset));
2351 __ testb(FieldOperand(temp, Map::kBitFieldOffset), 2351 __ testb(FieldOperand(temp, Map::kBitFieldOffset),
2352 Immediate(1 << Map::kIsUndetectable)); 2352 Immediate(1 << Map::kIsUndetectable));
2353 EmitBranch(instr, not_zero); 2353 EmitBranch(instr, not_zero);
2354 } 2354 }
2355 2355
2356 2356
2357 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { 2357 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
2358 ASSERT(ToRegister(instr->context()).is(rsi)); 2358 ASSERT(ToRegister(instr->context()).is(rsi));
2359 Token::Value op = instr->op(); 2359 Token::Value op = instr->op();
2360 2360
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
2446 LAST_SPEC_OBJECT_TYPE - 1); 2446 LAST_SPEC_OBJECT_TYPE - 1);
2447 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 2447 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
2448 __ CmpObjectType(input, FIRST_SPEC_OBJECT_TYPE, temp); 2448 __ CmpObjectType(input, FIRST_SPEC_OBJECT_TYPE, temp);
2449 __ j(below, is_false); 2449 __ j(below, is_false);
2450 __ j(equal, is_true); 2450 __ j(equal, is_true);
2451 __ CmpInstanceType(temp, LAST_SPEC_OBJECT_TYPE); 2451 __ CmpInstanceType(temp, LAST_SPEC_OBJECT_TYPE);
2452 __ j(equal, is_true); 2452 __ j(equal, is_true);
2453 } else { 2453 } else {
2454 // Faster code path to avoid two compares: subtract lower bound from the 2454 // Faster code path to avoid two compares: subtract lower bound from the
2455 // actual type and do a signed compare with the width of the type range. 2455 // actual type and do a signed compare with the width of the type range.
2456 __ movq(temp, FieldOperand(input, HeapObject::kMapOffset)); 2456 __ movp(temp, FieldOperand(input, HeapObject::kMapOffset));
2457 __ movzxbl(temp2, FieldOperand(temp, Map::kInstanceTypeOffset)); 2457 __ movzxbl(temp2, FieldOperand(temp, Map::kInstanceTypeOffset));
2458 __ subq(temp2, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); 2458 __ subq(temp2, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2459 __ cmpq(temp2, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE - 2459 __ cmpq(temp2, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE -
2460 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); 2460 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2461 __ j(above, is_false); 2461 __ j(above, is_false);
2462 } 2462 }
2463 2463
2464 // Now we are in the FIRST-LAST_NONCALLABLE_SPEC_OBJECT_TYPE range. 2464 // Now we are in the FIRST-LAST_NONCALLABLE_SPEC_OBJECT_TYPE range.
2465 // Check if the constructor in the map is a function. 2465 // Check if the constructor in the map is a function.
2466 __ movq(temp, FieldOperand(temp, Map::kConstructorOffset)); 2466 __ movp(temp, FieldOperand(temp, Map::kConstructorOffset));
2467 2467
2468 // Objects with a non-function constructor have class 'Object'. 2468 // Objects with a non-function constructor have class 'Object'.
2469 __ CmpObjectType(temp, JS_FUNCTION_TYPE, kScratchRegister); 2469 __ CmpObjectType(temp, JS_FUNCTION_TYPE, kScratchRegister);
2470 if (class_name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("Object"))) { 2470 if (class_name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("Object"))) {
2471 __ j(not_equal, is_true); 2471 __ j(not_equal, is_true);
2472 } else { 2472 } else {
2473 __ j(not_equal, is_false); 2473 __ j(not_equal, is_false);
2474 } 2474 }
2475 2475
2476 // temp now contains the constructor function. Grab the 2476 // temp now contains the constructor function. Grab the
2477 // instance class name from there. 2477 // instance class name from there.
2478 __ movq(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset)); 2478 __ movp(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset));
2479 __ movq(temp, FieldOperand(temp, 2479 __ movp(temp, FieldOperand(temp,
2480 SharedFunctionInfo::kInstanceClassNameOffset)); 2480 SharedFunctionInfo::kInstanceClassNameOffset));
2481 // The class name we are testing against is internalized since it's a literal. 2481 // The class name we are testing against is internalized since it's a literal.
2482 // The name in the constructor is internalized because of the way the context 2482 // The name in the constructor is internalized because of the way the context
2483 // is booted. This routine isn't expected to work for random API-created 2483 // is booted. This routine isn't expected to work for random API-created
2484 // classes and it doesn't have to because you can't access it with natives 2484 // classes and it doesn't have to because you can't access it with natives
2485 // syntax. Since both sides are internalized it is sufficient to use an 2485 // syntax. Since both sides are internalized it is sufficient to use an
2486 // identity comparison. 2486 // identity comparison.
2487 ASSERT(class_name->IsInternalizedString()); 2487 ASSERT(class_name->IsInternalizedString());
2488 __ Cmp(temp, class_name); 2488 __ Cmp(temp, class_name);
2489 // End with the answer in the z flag. 2489 // End with the answer in the z flag.
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
2553 2553
2554 // A Smi is not an instance of anything. 2554 // A Smi is not an instance of anything.
2555 __ JumpIfSmi(object, &false_result, Label::kNear); 2555 __ JumpIfSmi(object, &false_result, Label::kNear);
2556 2556
2557 // This is the inlined call site instanceof cache. The two occurences of the 2557 // This is the inlined call site instanceof cache. The two occurences of the
2558 // hole value will be patched to the last map/result pair generated by the 2558 // hole value will be patched to the last map/result pair generated by the
2559 // instanceof stub. 2559 // instanceof stub.
2560 Label cache_miss; 2560 Label cache_miss;
2561 // Use a temp register to avoid memory operands with variable lengths. 2561 // Use a temp register to avoid memory operands with variable lengths.
2562 Register map = ToRegister(instr->temp()); 2562 Register map = ToRegister(instr->temp());
2563 __ movq(map, FieldOperand(object, HeapObject::kMapOffset)); 2563 __ movp(map, FieldOperand(object, HeapObject::kMapOffset));
2564 __ bind(deferred->map_check()); // Label for calculating code patching. 2564 __ bind(deferred->map_check()); // Label for calculating code patching.
2565 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value()); 2565 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value());
2566 __ Move(kScratchRegister, cache_cell, RelocInfo::CELL); 2566 __ Move(kScratchRegister, cache_cell, RelocInfo::CELL);
2567 __ cmpq(map, Operand(kScratchRegister, 0)); 2567 __ cmpq(map, Operand(kScratchRegister, 0));
2568 __ j(not_equal, &cache_miss, Label::kNear); 2568 __ j(not_equal, &cache_miss, Label::kNear);
2569 // Patched to load either true or false. 2569 // Patched to load either true or false.
2570 __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex); 2570 __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
2571 #ifdef DEBUG 2571 #ifdef DEBUG
2572 // Check that the code size between patch label and patch sites is invariant. 2572 // Check that the code size between patch label and patch sites is invariant.
2573 Label end_of_patched_code; 2573 Label end_of_patched_code;
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
2617 CallCodeGeneric(stub.GetCode(isolate()), 2617 CallCodeGeneric(stub.GetCode(isolate()),
2618 RelocInfo::CODE_TARGET, 2618 RelocInfo::CODE_TARGET,
2619 instr, 2619 instr,
2620 RECORD_SAFEPOINT_WITH_REGISTERS, 2620 RECORD_SAFEPOINT_WITH_REGISTERS,
2621 2); 2621 2);
2622 ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check)); 2622 ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check));
2623 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); 2623 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
2624 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 2624 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2625 // Move result to a register that survives the end of the 2625 // Move result to a register that survives the end of the
2626 // PushSafepointRegisterScope. 2626 // PushSafepointRegisterScope.
2627 __ movq(kScratchRegister, rax); 2627 __ movp(kScratchRegister, rax);
2628 } 2628 }
2629 __ testq(kScratchRegister, kScratchRegister); 2629 __ testq(kScratchRegister, kScratchRegister);
2630 Label load_false; 2630 Label load_false;
2631 Label done; 2631 Label done;
2632 __ j(not_zero, &load_false, Label::kNear); 2632 __ j(not_zero, &load_false, Label::kNear);
2633 __ LoadRoot(rax, Heap::kTrueValueRootIndex); 2633 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
2634 __ jmp(&done, Label::kNear); 2634 __ jmp(&done, Label::kNear);
2635 __ bind(&load_false); 2635 __ bind(&load_false);
2636 __ LoadRoot(rax, Heap::kFalseValueRootIndex); 2636 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
2637 __ bind(&done); 2637 __ bind(&done);
(...skipping 19 matching lines...) Expand all
2657 } 2657 }
2658 2658
2659 2659
2660 void LCodeGen::DoReturn(LReturn* instr) { 2660 void LCodeGen::DoReturn(LReturn* instr) {
2661 if (FLAG_trace && info()->IsOptimizing()) { 2661 if (FLAG_trace && info()->IsOptimizing()) {
2662 // Preserve the return value on the stack and rely on the runtime call 2662 // Preserve the return value on the stack and rely on the runtime call
2663 // to return the value in the same register. We're leaving the code 2663 // to return the value in the same register. We're leaving the code
2664 // managed by the register allocator and tearing down the frame, it's 2664 // managed by the register allocator and tearing down the frame, it's
2665 // safe to write to the context register. 2665 // safe to write to the context register.
2666 __ push(rax); 2666 __ push(rax);
2667 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2667 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2668 __ CallRuntime(Runtime::kTraceExit, 1); 2668 __ CallRuntime(Runtime::kTraceExit, 1);
2669 } 2669 }
2670 if (info()->saves_caller_doubles()) { 2670 if (info()->saves_caller_doubles()) {
2671 RestoreCallerDoubles(); 2671 RestoreCallerDoubles();
2672 } 2672 }
2673 int no_frame_start = -1; 2673 int no_frame_start = -1;
2674 if (NeedsEagerFrame()) { 2674 if (NeedsEagerFrame()) {
2675 __ movq(rsp, rbp); 2675 __ movp(rsp, rbp);
2676 __ pop(rbp); 2676 __ pop(rbp);
2677 no_frame_start = masm_->pc_offset(); 2677 no_frame_start = masm_->pc_offset();
2678 } 2678 }
2679 if (instr->has_constant_parameter_count()) { 2679 if (instr->has_constant_parameter_count()) {
2680 __ Ret((ToInteger32(instr->constant_parameter_count()) + 1) * kPointerSize, 2680 __ Ret((ToInteger32(instr->constant_parameter_count()) + 1) * kPointerSize,
2681 rcx); 2681 rcx);
2682 } else { 2682 } else {
2683 Register reg = ToRegister(instr->parameter_count()); 2683 Register reg = ToRegister(instr->parameter_count());
2684 // The argument count parameter is a smi 2684 // The argument count parameter is a smi
2685 __ SmiToInteger32(reg, reg); 2685 __ SmiToInteger32(reg, reg);
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
2726 // to update the property details in the property dictionary to mark 2726 // to update the property details in the property dictionary to mark
2727 // it as no longer deleted. We deoptimize in that case. 2727 // it as no longer deleted. We deoptimize in that case.
2728 if (instr->hydrogen()->RequiresHoleCheck()) { 2728 if (instr->hydrogen()->RequiresHoleCheck()) {
2729 // We have a temp because CompareRoot might clobber kScratchRegister. 2729 // We have a temp because CompareRoot might clobber kScratchRegister.
2730 Register cell = ToRegister(instr->temp()); 2730 Register cell = ToRegister(instr->temp());
2731 ASSERT(!value.is(cell)); 2731 ASSERT(!value.is(cell));
2732 __ Move(cell, cell_handle, RelocInfo::CELL); 2732 __ Move(cell, cell_handle, RelocInfo::CELL);
2733 __ CompareRoot(Operand(cell, 0), Heap::kTheHoleValueRootIndex); 2733 __ CompareRoot(Operand(cell, 0), Heap::kTheHoleValueRootIndex);
2734 DeoptimizeIf(equal, instr->environment()); 2734 DeoptimizeIf(equal, instr->environment());
2735 // Store the value. 2735 // Store the value.
2736 __ movq(Operand(cell, 0), value); 2736 __ movp(Operand(cell, 0), value);
2737 } else { 2737 } else {
2738 // Store the value. 2738 // Store the value.
2739 __ Move(kScratchRegister, cell_handle, RelocInfo::CELL); 2739 __ Move(kScratchRegister, cell_handle, RelocInfo::CELL);
2740 __ movq(Operand(kScratchRegister, 0), value); 2740 __ movp(Operand(kScratchRegister, 0), value);
2741 } 2741 }
2742 // Cells are always rescanned, so no write barrier here. 2742 // Cells are always rescanned, so no write barrier here.
2743 } 2743 }
2744 2744
2745 2745
2746 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2747 ASSERT(ToRegister(instr->context()).is(rsi));
2748 ASSERT(ToRegister(instr->global_object()).is(rdx));
2749 ASSERT(ToRegister(instr->value()).is(rax));
2750
2751 __ Move(rcx, instr->name());
2752 Handle<Code> ic = StoreIC::initialize_stub(isolate(),
2753 instr->strict_mode_flag(),
2754 CONTEXTUAL);
2755 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2756 }
2757
2758
2759 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { 2746 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
2760 Register context = ToRegister(instr->context()); 2747 Register context = ToRegister(instr->context());
2761 Register result = ToRegister(instr->result()); 2748 Register result = ToRegister(instr->result());
2762 __ movq(result, ContextOperand(context, instr->slot_index())); 2749 __ movp(result, ContextOperand(context, instr->slot_index()));
2763 if (instr->hydrogen()->RequiresHoleCheck()) { 2750 if (instr->hydrogen()->RequiresHoleCheck()) {
2764 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); 2751 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2765 if (instr->hydrogen()->DeoptimizesOnHole()) { 2752 if (instr->hydrogen()->DeoptimizesOnHole()) {
2766 DeoptimizeIf(equal, instr->environment()); 2753 DeoptimizeIf(equal, instr->environment());
2767 } else { 2754 } else {
2768 Label is_not_hole; 2755 Label is_not_hole;
2769 __ j(not_equal, &is_not_hole, Label::kNear); 2756 __ j(not_equal, &is_not_hole, Label::kNear);
2770 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); 2757 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2771 __ bind(&is_not_hole); 2758 __ bind(&is_not_hole);
2772 } 2759 }
2773 } 2760 }
2774 } 2761 }
2775 2762
2776 2763
2777 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { 2764 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2778 Register context = ToRegister(instr->context()); 2765 Register context = ToRegister(instr->context());
2779 Register value = ToRegister(instr->value()); 2766 Register value = ToRegister(instr->value());
2780 2767
2781 Operand target = ContextOperand(context, instr->slot_index()); 2768 Operand target = ContextOperand(context, instr->slot_index());
2782 2769
2783 Label skip_assignment; 2770 Label skip_assignment;
2784 if (instr->hydrogen()->RequiresHoleCheck()) { 2771 if (instr->hydrogen()->RequiresHoleCheck()) {
2785 __ CompareRoot(target, Heap::kTheHoleValueRootIndex); 2772 __ CompareRoot(target, Heap::kTheHoleValueRootIndex);
2786 if (instr->hydrogen()->DeoptimizesOnHole()) { 2773 if (instr->hydrogen()->DeoptimizesOnHole()) {
2787 DeoptimizeIf(equal, instr->environment()); 2774 DeoptimizeIf(equal, instr->environment());
2788 } else { 2775 } else {
2789 __ j(not_equal, &skip_assignment); 2776 __ j(not_equal, &skip_assignment);
2790 } 2777 }
2791 } 2778 }
2792 __ movq(target, value); 2779 __ movp(target, value);
2793 2780
2794 if (instr->hydrogen()->NeedsWriteBarrier()) { 2781 if (instr->hydrogen()->NeedsWriteBarrier()) {
2795 SmiCheck check_needed = 2782 SmiCheck check_needed =
2796 instr->hydrogen()->value()->IsHeapObject() 2783 instr->hydrogen()->value()->IsHeapObject()
2797 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; 2784 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
2798 int offset = Context::SlotOffset(instr->slot_index()); 2785 int offset = Context::SlotOffset(instr->slot_index());
2799 Register scratch = ToRegister(instr->temp()); 2786 Register scratch = ToRegister(instr->temp());
2800 __ RecordWriteContextSlot(context, 2787 __ RecordWriteContextSlot(context,
2801 offset, 2788 offset,
2802 value, 2789 value,
(...skipping 26 matching lines...) Expand all
2829 Register object = ToRegister(instr->object()); 2816 Register object = ToRegister(instr->object());
2830 if (FLAG_track_double_fields && 2817 if (FLAG_track_double_fields &&
2831 instr->hydrogen()->representation().IsDouble()) { 2818 instr->hydrogen()->representation().IsDouble()) {
2832 XMMRegister result = ToDoubleRegister(instr->result()); 2819 XMMRegister result = ToDoubleRegister(instr->result());
2833 __ movsd(result, FieldOperand(object, offset)); 2820 __ movsd(result, FieldOperand(object, offset));
2834 return; 2821 return;
2835 } 2822 }
2836 2823
2837 Register result = ToRegister(instr->result()); 2824 Register result = ToRegister(instr->result());
2838 if (!access.IsInobject()) { 2825 if (!access.IsInobject()) {
2839 __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset)); 2826 __ movp(result, FieldOperand(object, JSObject::kPropertiesOffset));
2840 object = result; 2827 object = result;
2841 } 2828 }
2842 2829
2843 Representation representation = access.representation(); 2830 Representation representation = access.representation();
2844 if (representation.IsSmi() && 2831 if (representation.IsSmi() &&
2845 instr->hydrogen()->representation().IsInteger32()) { 2832 instr->hydrogen()->representation().IsInteger32()) {
2846 // Read int value directly from upper half of the smi. 2833 // Read int value directly from upper half of the smi.
2847 STATIC_ASSERT(kSmiTag == 0); 2834 STATIC_ASSERT(kSmiTag == 0);
2848 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); 2835 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32);
2849 offset += kPointerSize / 2; 2836 offset += kPointerSize / 2;
(...skipping 22 matching lines...) Expand all
2872 __ CmpObjectType(function, JS_FUNCTION_TYPE, result); 2859 __ CmpObjectType(function, JS_FUNCTION_TYPE, result);
2873 DeoptimizeIf(not_equal, instr->environment()); 2860 DeoptimizeIf(not_equal, instr->environment());
2874 2861
2875 // Check whether the function has an instance prototype. 2862 // Check whether the function has an instance prototype.
2876 Label non_instance; 2863 Label non_instance;
2877 __ testb(FieldOperand(result, Map::kBitFieldOffset), 2864 __ testb(FieldOperand(result, Map::kBitFieldOffset),
2878 Immediate(1 << Map::kHasNonInstancePrototype)); 2865 Immediate(1 << Map::kHasNonInstancePrototype));
2879 __ j(not_zero, &non_instance, Label::kNear); 2866 __ j(not_zero, &non_instance, Label::kNear);
2880 2867
2881 // Get the prototype or initial map from the function. 2868 // Get the prototype or initial map from the function.
2882 __ movq(result, 2869 __ movp(result,
2883 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); 2870 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2884 2871
2885 // Check that the function has a prototype or an initial map. 2872 // Check that the function has a prototype or an initial map.
2886 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); 2873 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2887 DeoptimizeIf(equal, instr->environment()); 2874 DeoptimizeIf(equal, instr->environment());
2888 2875
2889 // If the function does not have an initial map, we're done. 2876 // If the function does not have an initial map, we're done.
2890 Label done; 2877 Label done;
2891 __ CmpObjectType(result, MAP_TYPE, kScratchRegister); 2878 __ CmpObjectType(result, MAP_TYPE, kScratchRegister);
2892 __ j(not_equal, &done, Label::kNear); 2879 __ j(not_equal, &done, Label::kNear);
2893 2880
2894 // Get the prototype from the initial map. 2881 // Get the prototype from the initial map.
2895 __ movq(result, FieldOperand(result, Map::kPrototypeOffset)); 2882 __ movp(result, FieldOperand(result, Map::kPrototypeOffset));
2896 __ jmp(&done, Label::kNear); 2883 __ jmp(&done, Label::kNear);
2897 2884
2898 // Non-instance prototype: Fetch prototype from constructor field 2885 // Non-instance prototype: Fetch prototype from constructor field
2899 // in the function's map. 2886 // in the function's map.
2900 __ bind(&non_instance); 2887 __ bind(&non_instance);
2901 __ movq(result, FieldOperand(result, Map::kConstructorOffset)); 2888 __ movp(result, FieldOperand(result, Map::kConstructorOffset));
2902 2889
2903 // All done. 2890 // All done.
2904 __ bind(&done); 2891 __ bind(&done);
2905 } 2892 }
2906 2893
2907 2894
2908 void LCodeGen::DoLoadRoot(LLoadRoot* instr) { 2895 void LCodeGen::DoLoadRoot(LLoadRoot* instr) {
2909 Register result = ToRegister(instr->result()); 2896 Register result = ToRegister(instr->result());
2910 __ LoadRoot(result, instr->index()); 2897 __ LoadRoot(result, instr->index());
2911 } 2898 }
2912 2899
2913 2900
2914 void LCodeGen::DoLoadExternalArrayPointer( 2901 void LCodeGen::DoLoadExternalArrayPointer(
2915 LLoadExternalArrayPointer* instr) { 2902 LLoadExternalArrayPointer* instr) {
2916 Register result = ToRegister(instr->result()); 2903 Register result = ToRegister(instr->result());
2917 Register input = ToRegister(instr->object()); 2904 Register input = ToRegister(instr->object());
2918 __ movq(result, FieldOperand(input, 2905 __ movp(result, FieldOperand(input,
2919 ExternalPixelArray::kExternalPointerOffset)); 2906 ExternalPixelArray::kExternalPointerOffset));
2920 } 2907 }
2921 2908
2922 2909
2923 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { 2910 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2924 Register arguments = ToRegister(instr->arguments()); 2911 Register arguments = ToRegister(instr->arguments());
2925 Register result = ToRegister(instr->result()); 2912 Register result = ToRegister(instr->result());
2926 2913
2927 if (instr->length()->IsConstantOperand() && 2914 if (instr->length()->IsConstantOperand() &&
2928 instr->index()->IsConstantOperand()) { 2915 instr->index()->IsConstantOperand()) {
2929 int32_t const_index = ToInteger32(LConstantOperand::cast(instr->index())); 2916 int32_t const_index = ToInteger32(LConstantOperand::cast(instr->index()));
2930 int32_t const_length = ToInteger32(LConstantOperand::cast(instr->length())); 2917 int32_t const_length = ToInteger32(LConstantOperand::cast(instr->length()));
2931 StackArgumentsAccessor args(arguments, const_length, 2918 StackArgumentsAccessor args(arguments, const_length,
2932 ARGUMENTS_DONT_CONTAIN_RECEIVER); 2919 ARGUMENTS_DONT_CONTAIN_RECEIVER);
2933 __ movq(result, args.GetArgumentOperand(const_index)); 2920 __ movp(result, args.GetArgumentOperand(const_index));
2934 } else { 2921 } else {
2935 Register length = ToRegister(instr->length()); 2922 Register length = ToRegister(instr->length());
2936 // There are two words between the frame pointer and the last argument. 2923 // There are two words between the frame pointer and the last argument.
2937 // Subtracting from length accounts for one of them add one more. 2924 // Subtracting from length accounts for one of them add one more.
2938 if (instr->index()->IsRegister()) { 2925 if (instr->index()->IsRegister()) {
2939 __ subl(length, ToRegister(instr->index())); 2926 __ subl(length, ToRegister(instr->index()));
2940 } else { 2927 } else {
2941 __ subl(length, ToOperand(instr->index())); 2928 __ subl(length, ToOperand(instr->index()));
2942 } 2929 }
2943 StackArgumentsAccessor args(arguments, length, 2930 StackArgumentsAccessor args(arguments, length,
2944 ARGUMENTS_DONT_CONTAIN_RECEIVER); 2931 ARGUMENTS_DONT_CONTAIN_RECEIVER);
2945 __ movq(result, args.GetArgumentOperand(0)); 2932 __ movp(result, args.GetArgumentOperand(0));
2946 } 2933 }
2947 } 2934 }
2948 2935
2949 2936
2950 void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) { 2937 void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
2951 ElementsKind elements_kind = instr->elements_kind(); 2938 ElementsKind elements_kind = instr->elements_kind();
2952 LOperand* key = instr->key(); 2939 LOperand* key = instr->key();
2953 if (!key->IsConstantOperand()) { 2940 if (!key->IsConstantOperand()) {
2954 Register key_reg = ToRegister(key); 2941 Register key_reg = ToRegister(key);
2955 // Even though the HLoad/StoreKeyed (in this case) instructions force 2942 // Even though the HLoad/StoreKeyed (in this case) instructions force
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after
3172 3159
3173 3160
3174 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { 3161 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
3175 Register result = ToRegister(instr->result()); 3162 Register result = ToRegister(instr->result());
3176 3163
3177 if (instr->hydrogen()->from_inlined()) { 3164 if (instr->hydrogen()->from_inlined()) {
3178 __ lea(result, Operand(rsp, -kFPOnStackSize + -kPCOnStackSize)); 3165 __ lea(result, Operand(rsp, -kFPOnStackSize + -kPCOnStackSize));
3179 } else { 3166 } else {
3180 // Check for arguments adapter frame. 3167 // Check for arguments adapter frame.
3181 Label done, adapted; 3168 Label done, adapted;
3182 __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 3169 __ movp(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3183 __ Cmp(Operand(result, StandardFrameConstants::kContextOffset), 3170 __ Cmp(Operand(result, StandardFrameConstants::kContextOffset),
3184 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 3171 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3185 __ j(equal, &adapted, Label::kNear); 3172 __ j(equal, &adapted, Label::kNear);
3186 3173
3187 // No arguments adaptor frame. 3174 // No arguments adaptor frame.
3188 __ movq(result, rbp); 3175 __ movp(result, rbp);
3189 __ jmp(&done, Label::kNear); 3176 __ jmp(&done, Label::kNear);
3190 3177
3191 // Arguments adaptor frame present. 3178 // Arguments adaptor frame present.
3192 __ bind(&adapted); 3179 __ bind(&adapted);
3193 __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 3180 __ movp(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3194 3181
3195 // Result is the frame pointer for the frame if not adapted and for the real 3182 // Result is the frame pointer for the frame if not adapted and for the real
3196 // frame below the adaptor frame if adapted. 3183 // frame below the adaptor frame if adapted.
3197 __ bind(&done); 3184 __ bind(&done);
3198 } 3185 }
3199 } 3186 }
3200 3187
3201 3188
3202 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) { 3189 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
3203 Register result = ToRegister(instr->result()); 3190 Register result = ToRegister(instr->result());
3204 3191
3205 Label done; 3192 Label done;
3206 3193
3207 // If no arguments adaptor frame the number of arguments is fixed. 3194 // If no arguments adaptor frame the number of arguments is fixed.
3208 if (instr->elements()->IsRegister()) { 3195 if (instr->elements()->IsRegister()) {
3209 __ cmpq(rbp, ToRegister(instr->elements())); 3196 __ cmpq(rbp, ToRegister(instr->elements()));
3210 } else { 3197 } else {
3211 __ cmpq(rbp, ToOperand(instr->elements())); 3198 __ cmpq(rbp, ToOperand(instr->elements()));
3212 } 3199 }
3213 __ movl(result, Immediate(scope()->num_parameters())); 3200 __ movl(result, Immediate(scope()->num_parameters()));
3214 __ j(equal, &done, Label::kNear); 3201 __ j(equal, &done, Label::kNear);
3215 3202
3216 // Arguments adaptor frame present. Get argument length from there. 3203 // Arguments adaptor frame present. Get argument length from there.
3217 __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 3204 __ movp(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3218 __ SmiToInteger32(result, 3205 __ SmiToInteger32(result,
3219 Operand(result, 3206 Operand(result,
3220 ArgumentsAdaptorFrameConstants::kLengthOffset)); 3207 ArgumentsAdaptorFrameConstants::kLengthOffset));
3221 3208
3222 // Argument length is in result register. 3209 // Argument length is in result register.
3223 __ bind(&done); 3210 __ bind(&done);
3224 } 3211 }
3225 3212
3226 3213
3227 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) { 3214 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
3228 Register receiver = ToRegister(instr->receiver()); 3215 Register receiver = ToRegister(instr->receiver());
3229 Register function = ToRegister(instr->function()); 3216 Register function = ToRegister(instr->function());
3230 3217
3231 // If the receiver is null or undefined, we have to pass the global 3218 // If the receiver is null or undefined, we have to pass the global
3232 // object as a receiver to normal functions. Values have to be 3219 // object as a receiver to normal functions. Values have to be
3233 // passed unchanged to builtins and strict-mode functions. 3220 // passed unchanged to builtins and strict-mode functions.
3234 Label global_object, receiver_ok; 3221 Label global_object, receiver_ok;
3235 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; 3222 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear;
3236 3223
3237 // Do not transform the receiver to object for strict mode 3224 // Do not transform the receiver to object for strict mode
3238 // functions. 3225 // functions.
3239 __ movq(kScratchRegister, 3226 __ movp(kScratchRegister,
3240 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); 3227 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
3241 __ testb(FieldOperand(kScratchRegister, 3228 __ testb(FieldOperand(kScratchRegister,
3242 SharedFunctionInfo::kStrictModeByteOffset), 3229 SharedFunctionInfo::kStrictModeByteOffset),
3243 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); 3230 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
3244 __ j(not_equal, &receiver_ok, dist); 3231 __ j(not_equal, &receiver_ok, dist);
3245 3232
3246 // Do not transform the receiver to object for builtins. 3233 // Do not transform the receiver to object for builtins.
3247 __ testb(FieldOperand(kScratchRegister, 3234 __ testb(FieldOperand(kScratchRegister,
3248 SharedFunctionInfo::kNativeByteOffset), 3235 SharedFunctionInfo::kNativeByteOffset),
3249 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); 3236 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
3250 __ j(not_equal, &receiver_ok, dist); 3237 __ j(not_equal, &receiver_ok, dist);
3251 3238
3252 // Normal function. Replace undefined or null with global receiver. 3239 // Normal function. Replace undefined or null with global receiver.
3253 __ CompareRoot(receiver, Heap::kNullValueRootIndex); 3240 __ CompareRoot(receiver, Heap::kNullValueRootIndex);
3254 __ j(equal, &global_object, Label::kNear); 3241 __ j(equal, &global_object, Label::kNear);
3255 __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex); 3242 __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex);
3256 __ j(equal, &global_object, Label::kNear); 3243 __ j(equal, &global_object, Label::kNear);
3257 3244
3258 // The receiver should be a JS object. 3245 // The receiver should be a JS object.
3259 Condition is_smi = __ CheckSmi(receiver); 3246 Condition is_smi = __ CheckSmi(receiver);
3260 DeoptimizeIf(is_smi, instr->environment()); 3247 DeoptimizeIf(is_smi, instr->environment());
3261 __ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, kScratchRegister); 3248 __ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, kScratchRegister);
3262 DeoptimizeIf(below, instr->environment()); 3249 DeoptimizeIf(below, instr->environment());
3263 __ jmp(&receiver_ok, Label::kNear); 3250 __ jmp(&receiver_ok, Label::kNear);
3264 3251
3265 __ bind(&global_object); 3252 __ bind(&global_object);
3266 __ movq(receiver, FieldOperand(function, JSFunction::kContextOffset)); 3253 __ movp(receiver, FieldOperand(function, JSFunction::kContextOffset));
3267 __ movq(receiver, 3254 __ movp(receiver,
3268 Operand(receiver, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 3255 Operand(receiver, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
3269 __ movq(receiver, 3256 __ movp(receiver,
3270 FieldOperand(receiver, GlobalObject::kGlobalReceiverOffset)); 3257 FieldOperand(receiver, GlobalObject::kGlobalReceiverOffset));
3271 __ bind(&receiver_ok); 3258 __ bind(&receiver_ok);
3272 } 3259 }
3273 3260
3274 3261
3275 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { 3262 void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
3276 Register receiver = ToRegister(instr->receiver()); 3263 Register receiver = ToRegister(instr->receiver());
3277 Register function = ToRegister(instr->function()); 3264 Register function = ToRegister(instr->function());
3278 Register length = ToRegister(instr->length()); 3265 Register length = ToRegister(instr->length());
3279 Register elements = ToRegister(instr->elements()); 3266 Register elements = ToRegister(instr->elements());
3280 ASSERT(receiver.is(rax)); // Used for parameter count. 3267 ASSERT(receiver.is(rax)); // Used for parameter count.
3281 ASSERT(function.is(rdi)); // Required by InvokeFunction. 3268 ASSERT(function.is(rdi)); // Required by InvokeFunction.
3282 ASSERT(ToRegister(instr->result()).is(rax)); 3269 ASSERT(ToRegister(instr->result()).is(rax));
3283 3270
3284 // Copy the arguments to this function possibly from the 3271 // Copy the arguments to this function possibly from the
3285 // adaptor frame below it. 3272 // adaptor frame below it.
3286 const uint32_t kArgumentsLimit = 1 * KB; 3273 const uint32_t kArgumentsLimit = 1 * KB;
3287 __ cmpq(length, Immediate(kArgumentsLimit)); 3274 __ cmpq(length, Immediate(kArgumentsLimit));
3288 DeoptimizeIf(above, instr->environment()); 3275 DeoptimizeIf(above, instr->environment());
3289 3276
3290 __ push(receiver); 3277 __ push(receiver);
3291 __ movq(receiver, length); 3278 __ movp(receiver, length);
3292 3279
3293 // Loop through the arguments pushing them onto the execution 3280 // Loop through the arguments pushing them onto the execution
3294 // stack. 3281 // stack.
3295 Label invoke, loop; 3282 Label invoke, loop;
3296 // length is a small non-negative integer, due to the test above. 3283 // length is a small non-negative integer, due to the test above.
3297 __ testl(length, length); 3284 __ testl(length, length);
3298 __ j(zero, &invoke, Label::kNear); 3285 __ j(zero, &invoke, Label::kNear);
3299 __ bind(&loop); 3286 __ bind(&loop);
3300 StackArgumentsAccessor args(elements, length, 3287 StackArgumentsAccessor args(elements, length,
3301 ARGUMENTS_DONT_CONTAIN_RECEIVER); 3288 ARGUMENTS_DONT_CONTAIN_RECEIVER);
(...skipping 18 matching lines...) Expand all
3320 } 3307 }
3321 3308
3322 3309
3323 void LCodeGen::DoDrop(LDrop* instr) { 3310 void LCodeGen::DoDrop(LDrop* instr) {
3324 __ Drop(instr->count()); 3311 __ Drop(instr->count());
3325 } 3312 }
3326 3313
3327 3314
3328 void LCodeGen::DoThisFunction(LThisFunction* instr) { 3315 void LCodeGen::DoThisFunction(LThisFunction* instr) {
3329 Register result = ToRegister(instr->result()); 3316 Register result = ToRegister(instr->result());
3330 __ movq(result, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 3317 __ movp(result, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
3331 } 3318 }
3332 3319
3333 3320
3334 void LCodeGen::DoContext(LContext* instr) { 3321 void LCodeGen::DoContext(LContext* instr) {
3335 Register result = ToRegister(instr->result()); 3322 Register result = ToRegister(instr->result());
3336 if (info()->IsOptimizing()) { 3323 if (info()->IsOptimizing()) {
3337 __ movq(result, Operand(rbp, StandardFrameConstants::kContextOffset)); 3324 __ movp(result, Operand(rbp, StandardFrameConstants::kContextOffset));
3338 } else { 3325 } else {
3339 // If there is no frame, the context must be in rsi. 3326 // If there is no frame, the context must be in rsi.
3340 ASSERT(result.is(rsi)); 3327 ASSERT(result.is(rsi));
3341 } 3328 }
3342 } 3329 }
3343 3330
3344 3331
3345 void LCodeGen::DoOuterContext(LOuterContext* instr) { 3332 void LCodeGen::DoOuterContext(LOuterContext* instr) {
3346 Register context = ToRegister(instr->context()); 3333 Register context = ToRegister(instr->context());
3347 Register result = ToRegister(instr->result()); 3334 Register result = ToRegister(instr->result());
3348 __ movq(result, 3335 __ movp(result,
3349 Operand(context, Context::SlotOffset(Context::PREVIOUS_INDEX))); 3336 Operand(context, Context::SlotOffset(Context::PREVIOUS_INDEX)));
3350 } 3337 }
3351 3338
3352 3339
3353 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { 3340 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
3354 ASSERT(ToRegister(instr->context()).is(rsi)); 3341 ASSERT(ToRegister(instr->context()).is(rsi));
3355 __ push(rsi); // The context is the first argument. 3342 __ push(rsi); // The context is the first argument.
3356 __ Push(instr->hydrogen()->pairs()); 3343 __ Push(instr->hydrogen()->pairs());
3357 __ Push(Smi::FromInt(instr->hydrogen()->flags())); 3344 __ Push(Smi::FromInt(instr->hydrogen()->flags()));
3358 CallRuntime(Runtime::kDeclareGlobals, 3, instr); 3345 CallRuntime(Runtime::kDeclareGlobals, 3, instr);
3359 } 3346 }
3360 3347
3361 3348
3362 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { 3349 void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
3363 Register context = ToRegister(instr->context()); 3350 Register context = ToRegister(instr->context());
3364 Register result = ToRegister(instr->result()); 3351 Register result = ToRegister(instr->result());
3365 __ movq(result, 3352 __ movp(result,
3366 Operand(context, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 3353 Operand(context, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
3367 } 3354 }
3368 3355
3369 3356
3370 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { 3357 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
3371 Register global = ToRegister(instr->global()); 3358 Register global = ToRegister(instr->global());
3372 Register result = ToRegister(instr->result()); 3359 Register result = ToRegister(instr->result());
3373 __ movq(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset)); 3360 __ movp(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset));
3374 } 3361 }
3375 3362
3376 3363
3377 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, 3364 void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
3378 int formal_parameter_count, 3365 int formal_parameter_count,
3379 int arity, 3366 int arity,
3380 LInstruction* instr, 3367 LInstruction* instr,
3381 RDIState rdi_state) { 3368 RDIState rdi_state) {
3382 bool dont_adapt_arguments = 3369 bool dont_adapt_arguments =
3383 formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel; 3370 formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3384 bool can_invoke_directly = 3371 bool can_invoke_directly =
3385 dont_adapt_arguments || formal_parameter_count == arity; 3372 dont_adapt_arguments || formal_parameter_count == arity;
3386 3373
3387 LPointerMap* pointers = instr->pointer_map(); 3374 LPointerMap* pointers = instr->pointer_map();
3388 3375
3389 if (can_invoke_directly) { 3376 if (can_invoke_directly) {
3390 if (rdi_state == RDI_UNINITIALIZED) { 3377 if (rdi_state == RDI_UNINITIALIZED) {
3391 __ Move(rdi, function); 3378 __ Move(rdi, function);
3392 } 3379 }
3393 3380
3394 // Change context. 3381 // Change context.
3395 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 3382 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
3396 3383
3397 // Set rax to arguments count if adaption is not needed. Assumes that rax 3384 // Set rax to arguments count if adaption is not needed. Assumes that rax
3398 // is available to write to at this point. 3385 // is available to write to at this point.
3399 if (dont_adapt_arguments) { 3386 if (dont_adapt_arguments) {
3400 __ Set(rax, arity); 3387 __ Set(rax, arity);
3401 } 3388 }
3402 3389
3403 // Invoke function. 3390 // Invoke function.
3404 if (function.is_identical_to(info()->closure())) { 3391 if (function.is_identical_to(info()->closure())) {
3405 __ CallSelf(); 3392 __ CallSelf();
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
3444 3431
3445 void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) { 3432 void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) {
3446 ASSERT(ToRegister(instr->function()).is(rdi)); 3433 ASSERT(ToRegister(instr->function()).is(rdi));
3447 ASSERT(ToRegister(instr->result()).is(rax)); 3434 ASSERT(ToRegister(instr->result()).is(rax));
3448 3435
3449 if (instr->hydrogen()->pass_argument_count()) { 3436 if (instr->hydrogen()->pass_argument_count()) {
3450 __ Set(rax, instr->arity()); 3437 __ Set(rax, instr->arity());
3451 } 3438 }
3452 3439
3453 // Change context. 3440 // Change context.
3454 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 3441 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
3455 3442
3456 LPointerMap* pointers = instr->pointer_map(); 3443 LPointerMap* pointers = instr->pointer_map();
3457 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); 3444 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
3458 3445
3459 bool is_self_call = false; 3446 bool is_self_call = false;
3460 if (instr->hydrogen()->function()->IsConstant()) { 3447 if (instr->hydrogen()->function()->IsConstant()) {
3461 Handle<JSFunction> jsfun = Handle<JSFunction>::null(); 3448 Handle<JSFunction> jsfun = Handle<JSFunction>::null();
3462 HConstant* fun_const = HConstant::cast(instr->hydrogen()->function()); 3449 HConstant* fun_const = HConstant::cast(instr->hydrogen()->function());
3463 jsfun = Handle<JSFunction>::cast(fun_const->handle(isolate())); 3450 jsfun = Handle<JSFunction>::cast(fun_const->handle(isolate()));
3464 is_self_call = jsfun.is_identical_to(info()->closure()); 3451 is_self_call = jsfun.is_identical_to(info()->closure());
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
3497 __ j(zero, &done); 3484 __ j(zero, &done);
3498 3485
3499 __ AllocateHeapNumber(tmp, tmp2, &slow); 3486 __ AllocateHeapNumber(tmp, tmp2, &slow);
3500 __ jmp(&allocated, Label::kNear); 3487 __ jmp(&allocated, Label::kNear);
3501 3488
3502 // Slow case: Call the runtime system to do the number allocation. 3489 // Slow case: Call the runtime system to do the number allocation.
3503 __ bind(&slow); 3490 __ bind(&slow);
3504 CallRuntimeFromDeferred( 3491 CallRuntimeFromDeferred(
3505 Runtime::kAllocateHeapNumber, 0, instr, instr->context()); 3492 Runtime::kAllocateHeapNumber, 0, instr, instr->context());
3506 // Set the pointer to the new heap number in tmp. 3493 // Set the pointer to the new heap number in tmp.
3507 if (!tmp.is(rax)) __ movq(tmp, rax); 3494 if (!tmp.is(rax)) __ movp(tmp, rax);
3508 // Restore input_reg after call to runtime. 3495 // Restore input_reg after call to runtime.
3509 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); 3496 __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
3510 3497
3511 __ bind(&allocated); 3498 __ bind(&allocated);
3512 __ MoveDouble(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset)); 3499 __ MoveDouble(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset));
3513 __ shl(tmp2, Immediate(1)); 3500 __ shl(tmp2, Immediate(1));
3514 __ shr(tmp2, Immediate(1)); 3501 __ shr(tmp2, Immediate(1));
3515 __ MoveDouble(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2); 3502 __ MoveDouble(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2);
3516 __ StoreToSafepointRegisterSlot(input_reg, tmp); 3503 __ StoreToSafepointRegisterSlot(input_reg, tmp);
3517 3504
(...skipping 367 matching lines...) Expand 10 before | Expand all | Expand 10 after
3885 3872
3886 if (instr->arity() == 0) { 3873 if (instr->arity() == 0) {
3887 ArrayNoArgumentConstructorStub stub(kind, override_mode); 3874 ArrayNoArgumentConstructorStub stub(kind, override_mode);
3888 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 3875 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
3889 } else if (instr->arity() == 1) { 3876 } else if (instr->arity() == 1) {
3890 Label done; 3877 Label done;
3891 if (IsFastPackedElementsKind(kind)) { 3878 if (IsFastPackedElementsKind(kind)) {
3892 Label packed_case; 3879 Label packed_case;
3893 // We might need a change here 3880 // We might need a change here
3894 // look at the first argument 3881 // look at the first argument
3895 __ movq(rcx, Operand(rsp, 0)); 3882 __ movp(rcx, Operand(rsp, 0));
3896 __ testq(rcx, rcx); 3883 __ testq(rcx, rcx);
3897 __ j(zero, &packed_case, Label::kNear); 3884 __ j(zero, &packed_case, Label::kNear);
3898 3885
3899 ElementsKind holey_kind = GetHoleyElementsKind(kind); 3886 ElementsKind holey_kind = GetHoleyElementsKind(kind);
3900 ArraySingleArgumentConstructorStub stub(holey_kind, override_mode); 3887 ArraySingleArgumentConstructorStub stub(holey_kind, override_mode);
3901 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); 3888 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
3902 __ jmp(&done, Label::kNear); 3889 __ jmp(&done, Label::kNear);
3903 __ bind(&packed_case); 3890 __ bind(&packed_case);
3904 } 3891 }
3905 3892
(...skipping 10 matching lines...) Expand all
3916 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { 3903 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
3917 ASSERT(ToRegister(instr->context()).is(rsi)); 3904 ASSERT(ToRegister(instr->context()).is(rsi));
3918 CallRuntime(instr->function(), instr->arity(), instr, instr->save_doubles()); 3905 CallRuntime(instr->function(), instr->arity(), instr, instr->save_doubles());
3919 } 3906 }
3920 3907
3921 3908
3922 void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) { 3909 void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) {
3923 Register function = ToRegister(instr->function()); 3910 Register function = ToRegister(instr->function());
3924 Register code_object = ToRegister(instr->code_object()); 3911 Register code_object = ToRegister(instr->code_object());
3925 __ lea(code_object, FieldOperand(code_object, Code::kHeaderSize)); 3912 __ lea(code_object, FieldOperand(code_object, Code::kHeaderSize));
3926 __ movq(FieldOperand(function, JSFunction::kCodeEntryOffset), code_object); 3913 __ movp(FieldOperand(function, JSFunction::kCodeEntryOffset), code_object);
3927 } 3914 }
3928 3915
3929 3916
3930 void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) { 3917 void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) {
3931 Register result = ToRegister(instr->result()); 3918 Register result = ToRegister(instr->result());
3932 Register base = ToRegister(instr->base_object()); 3919 Register base = ToRegister(instr->base_object());
3933 if (instr->offset()->IsConstantOperand()) { 3920 if (instr->offset()->IsConstantOperand()) {
3934 LConstantOperand* offset = LConstantOperand::cast(instr->offset()); 3921 LConstantOperand* offset = LConstantOperand::cast(instr->offset());
3935 __ lea(result, Operand(base, ToInteger32(offset))); 3922 __ lea(result, Operand(base, ToInteger32(offset)));
3936 } else { 3923 } else {
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
3994 __ movsd(FieldOperand(object, offset), value); 3981 __ movsd(FieldOperand(object, offset), value);
3995 return; 3982 return;
3996 } 3983 }
3997 3984
3998 if (!transition.is_null()) { 3985 if (!transition.is_null()) {
3999 if (!hinstr->NeedsWriteBarrierForMap()) { 3986 if (!hinstr->NeedsWriteBarrierForMap()) {
4000 __ Move(FieldOperand(object, HeapObject::kMapOffset), transition); 3987 __ Move(FieldOperand(object, HeapObject::kMapOffset), transition);
4001 } else { 3988 } else {
4002 Register temp = ToRegister(instr->temp()); 3989 Register temp = ToRegister(instr->temp());
4003 __ Move(kScratchRegister, transition); 3990 __ Move(kScratchRegister, transition);
4004 __ movq(FieldOperand(object, HeapObject::kMapOffset), kScratchRegister); 3991 __ movp(FieldOperand(object, HeapObject::kMapOffset), kScratchRegister);
4005 // Update the write barrier for the map field. 3992 // Update the write barrier for the map field.
4006 __ RecordWriteField(object, 3993 __ RecordWriteField(object,
4007 HeapObject::kMapOffset, 3994 HeapObject::kMapOffset,
4008 kScratchRegister, 3995 kScratchRegister,
4009 temp, 3996 temp,
4010 kSaveFPRegs, 3997 kSaveFPRegs,
4011 OMIT_REMEMBERED_SET, 3998 OMIT_REMEMBERED_SET,
4012 OMIT_SMI_CHECK); 3999 OMIT_SMI_CHECK);
4013 } 4000 }
4014 } 4001 }
4015 4002
4016 // Do the store. 4003 // Do the store.
4017 SmiCheck check_needed = hinstr->value()->IsHeapObject() 4004 SmiCheck check_needed = hinstr->value()->IsHeapObject()
4018 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; 4005 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
4019 4006
4020 Register write_register = object; 4007 Register write_register = object;
4021 if (!access.IsInobject()) { 4008 if (!access.IsInobject()) {
4022 write_register = ToRegister(instr->temp()); 4009 write_register = ToRegister(instr->temp());
4023 __ movq(write_register, FieldOperand(object, JSObject::kPropertiesOffset)); 4010 __ movp(write_register, FieldOperand(object, JSObject::kPropertiesOffset));
4024 } 4011 }
4025 4012
4026 if (representation.IsSmi() && 4013 if (representation.IsSmi() &&
4027 hinstr->value()->representation().IsInteger32()) { 4014 hinstr->value()->representation().IsInteger32()) {
4028 ASSERT(hinstr->store_mode() == STORE_TO_INITIALIZED_ENTRY); 4015 ASSERT(hinstr->store_mode() == STORE_TO_INITIALIZED_ENTRY);
4029 // Store int value directly to upper half of the smi. 4016 // Store int value directly to upper half of the smi.
4030 STATIC_ASSERT(kSmiTag == 0); 4017 STATIC_ASSERT(kSmiTag == 0);
4031 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); 4018 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32);
4032 offset += kPointerSize / 2; 4019 offset += kPointerSize / 2;
4033 representation = Representation::Integer32(); 4020 representation = Representation::Integer32();
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
4072 } 4059 }
4073 4060
4074 4061
4075 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { 4062 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
4076 ASSERT(ToRegister(instr->context()).is(rsi)); 4063 ASSERT(ToRegister(instr->context()).is(rsi));
4077 ASSERT(ToRegister(instr->object()).is(rdx)); 4064 ASSERT(ToRegister(instr->object()).is(rdx));
4078 ASSERT(ToRegister(instr->value()).is(rax)); 4065 ASSERT(ToRegister(instr->value()).is(rax));
4079 4066
4080 __ Move(rcx, instr->hydrogen()->name()); 4067 __ Move(rcx, instr->hydrogen()->name());
4081 Handle<Code> ic = StoreIC::initialize_stub(isolate(), 4068 Handle<Code> ic = StoreIC::initialize_stub(isolate(),
4082 instr->strict_mode_flag(), 4069 instr->strict_mode_flag());
4083 NOT_CONTEXTUAL);
4084 CallCode(ic, RelocInfo::CODE_TARGET, instr); 4070 CallCode(ic, RelocInfo::CODE_TARGET, instr);
4085 } 4071 }
4086 4072
4087 4073
4088 void LCodeGen::ApplyCheckIf(Condition cc, LBoundsCheck* check) { 4074 void LCodeGen::ApplyCheckIf(Condition cc, LBoundsCheck* check) {
4089 if (FLAG_debug_code && check->hydrogen()->skip_check()) { 4075 if (FLAG_debug_code && check->hydrogen()->skip_check()) {
4090 Label done; 4076 Label done;
4091 __ j(NegateCondition(cc), &done, Label::kNear); 4077 __ j(NegateCondition(cc), &done, Label::kNear);
4092 __ int3(); 4078 __ int3();
4093 __ bind(&done); 4079 __ bind(&done);
(...skipping 268 matching lines...) Expand 10 before | Expand all | Expand 10 after
4362 Handle<Map> to_map = instr->transitioned_map(); 4348 Handle<Map> to_map = instr->transitioned_map();
4363 ElementsKind from_kind = instr->from_kind(); 4349 ElementsKind from_kind = instr->from_kind();
4364 ElementsKind to_kind = instr->to_kind(); 4350 ElementsKind to_kind = instr->to_kind();
4365 4351
4366 Label not_applicable; 4352 Label not_applicable;
4367 __ Cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map); 4353 __ Cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map);
4368 __ j(not_equal, &not_applicable); 4354 __ j(not_equal, &not_applicable);
4369 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { 4355 if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
4370 Register new_map_reg = ToRegister(instr->new_map_temp()); 4356 Register new_map_reg = ToRegister(instr->new_map_temp());
4371 __ Move(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT); 4357 __ Move(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT);
4372 __ movq(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg); 4358 __ movp(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg);
4373 // Write barrier. 4359 // Write barrier.
4374 ASSERT_NE(instr->temp(), NULL); 4360 ASSERT_NE(instr->temp(), NULL);
4375 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, 4361 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
4376 ToRegister(instr->temp()), kDontSaveFPRegs); 4362 ToRegister(instr->temp()), kDontSaveFPRegs);
4377 } else { 4363 } else {
4378 ASSERT(ToRegister(instr->context()).is(rsi)); 4364 ASSERT(ToRegister(instr->context()).is(rsi));
4379 PushSafepointRegistersScope scope(this); 4365 PushSafepointRegistersScope scope(this);
4380 if (!object_reg.is(rax)) { 4366 if (!object_reg.is(rax)) {
4381 __ movq(rax, object_reg); 4367 __ movp(rax, object_reg);
4382 } 4368 }
4383 __ Move(rbx, to_map); 4369 __ Move(rbx, to_map);
4384 TransitionElementsKindStub stub(from_kind, to_kind); 4370 TransitionElementsKindStub stub(from_kind, to_kind);
4385 __ CallStub(&stub); 4371 __ CallStub(&stub);
4386 RecordSafepointWithRegisters( 4372 RecordSafepointWithRegisters(
4387 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); 4373 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4388 } 4374 }
4389 __ bind(&not_applicable); 4375 __ bind(&not_applicable);
4390 } 4376 }
4391 4377
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after
4490 4476
4491 ASSERT(instr->hydrogen()->value()->representation().IsInteger32()); 4477 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
4492 Register char_code = ToRegister(instr->char_code()); 4478 Register char_code = ToRegister(instr->char_code());
4493 Register result = ToRegister(instr->result()); 4479 Register result = ToRegister(instr->result());
4494 ASSERT(!char_code.is(result)); 4480 ASSERT(!char_code.is(result));
4495 4481
4496 __ cmpl(char_code, Immediate(String::kMaxOneByteCharCode)); 4482 __ cmpl(char_code, Immediate(String::kMaxOneByteCharCode));
4497 __ j(above, deferred->entry()); 4483 __ j(above, deferred->entry());
4498 __ movsxlq(char_code, char_code); 4484 __ movsxlq(char_code, char_code);
4499 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex); 4485 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
4500 __ movq(result, FieldOperand(result, 4486 __ movp(result, FieldOperand(result,
4501 char_code, times_pointer_size, 4487 char_code, times_pointer_size,
4502 FixedArray::kHeaderSize)); 4488 FixedArray::kHeaderSize));
4503 __ CompareRoot(result, Heap::kUndefinedValueRootIndex); 4489 __ CompareRoot(result, Heap::kUndefinedValueRootIndex);
4504 __ j(equal, deferred->entry()); 4490 __ j(equal, deferred->entry());
4505 __ bind(deferred->exit()); 4491 __ bind(deferred->exit());
4506 } 4492 }
4507 4493
4508 4494
4509 void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) { 4495 void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
4510 Register char_code = ToRegister(instr->char_code()); 4496 Register char_code = ToRegister(instr->char_code());
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after
4636 // Put a valid pointer value in the stack slot where the result 4622 // Put a valid pointer value in the stack slot where the result
4637 // register is stored, as this register is in the pointer map, but contains an 4623 // register is stored, as this register is in the pointer map, but contains an
4638 // integer value. 4624 // integer value.
4639 __ StoreToSafepointRegisterSlot(reg, Immediate(0)); 4625 __ StoreToSafepointRegisterSlot(reg, Immediate(0));
4640 4626
4641 // NumberTagU uses the context from the frame, rather than 4627 // NumberTagU uses the context from the frame, rather than
4642 // the environment's HContext or HInlinedContext value. 4628 // the environment's HContext or HInlinedContext value.
4643 // They only call Runtime::kAllocateHeapNumber. 4629 // They only call Runtime::kAllocateHeapNumber.
4644 // The corresponding HChange instructions are added in a phase that does 4630 // The corresponding HChange instructions are added in a phase that does
4645 // not have easy access to the local context. 4631 // not have easy access to the local context.
4646 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 4632 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4647 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); 4633 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4648 RecordSafepointWithRegisters( 4634 RecordSafepointWithRegisters(
4649 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); 4635 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4650 4636
4651 if (!reg.is(rax)) __ movq(reg, rax); 4637 if (!reg.is(rax)) __ movp(reg, rax);
4652 4638
4653 // Done. Put the value in temp_xmm into the value of the allocated heap 4639 // Done. Put the value in temp_xmm into the value of the allocated heap
4654 // number. 4640 // number.
4655 __ bind(&done); 4641 __ bind(&done);
4656 __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), temp_xmm); 4642 __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), temp_xmm);
4657 __ StoreToSafepointRegisterSlot(reg, reg); 4643 __ StoreToSafepointRegisterSlot(reg, reg);
4658 } 4644 }
4659 4645
4660 4646
4661 void LCodeGen::DoNumberTagD(LNumberTagD* instr) { 4647 void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
4693 Register reg = ToRegister(instr->result()); 4679 Register reg = ToRegister(instr->result());
4694 __ Move(reg, Smi::FromInt(0)); 4680 __ Move(reg, Smi::FromInt(0));
4695 4681
4696 { 4682 {
4697 PushSafepointRegistersScope scope(this); 4683 PushSafepointRegistersScope scope(this);
4698 // NumberTagD uses the context from the frame, rather than 4684 // NumberTagD uses the context from the frame, rather than
4699 // the environment's HContext or HInlinedContext value. 4685 // the environment's HContext or HInlinedContext value.
4700 // They only call Runtime::kAllocateHeapNumber. 4686 // They only call Runtime::kAllocateHeapNumber.
4701 // The corresponding HChange instructions are added in a phase that does 4687 // The corresponding HChange instructions are added in a phase that does
4702 // not have easy access to the local context. 4688 // not have easy access to the local context.
4703 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 4689 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
4704 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); 4690 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4705 RecordSafepointWithRegisters( 4691 RecordSafepointWithRegisters(
4706 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); 4692 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4707 __ movq(kScratchRegister, rax); 4693 __ movp(kScratchRegister, rax);
4708 } 4694 }
4709 __ movq(reg, kScratchRegister); 4695 __ movp(reg, kScratchRegister);
4710 } 4696 }
4711 4697
4712 4698
4713 void LCodeGen::DoSmiTag(LSmiTag* instr) { 4699 void LCodeGen::DoSmiTag(LSmiTag* instr) {
4714 ASSERT(instr->value()->Equals(instr->result())); 4700 ASSERT(instr->value()->Equals(instr->result()));
4715 Register input = ToRegister(instr->value()); 4701 Register input = ToRegister(instr->value());
4716 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); 4702 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
4717 __ Integer32ToSmi(input, input); 4703 __ Integer32ToSmi(input, input);
4718 } 4704 }
4719 4705
(...skipping 228 matching lines...) Expand 10 before | Expand all | Expand 10 after
4948 LOperand* input = instr->value(); 4934 LOperand* input = instr->value();
4949 Condition cc = masm()->CheckSmi(ToRegister(input)); 4935 Condition cc = masm()->CheckSmi(ToRegister(input));
4950 DeoptimizeIf(cc, instr->environment()); 4936 DeoptimizeIf(cc, instr->environment());
4951 } 4937 }
4952 } 4938 }
4953 4939
4954 4940
4955 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { 4941 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
4956 Register input = ToRegister(instr->value()); 4942 Register input = ToRegister(instr->value());
4957 4943
4958 __ movq(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset)); 4944 __ movp(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset));
4959 4945
4960 if (instr->hydrogen()->is_interval_check()) { 4946 if (instr->hydrogen()->is_interval_check()) {
4961 InstanceType first; 4947 InstanceType first;
4962 InstanceType last; 4948 InstanceType last;
4963 instr->hydrogen()->GetCheckInterval(&first, &last); 4949 instr->hydrogen()->GetCheckInterval(&first, &last);
4964 4950
4965 __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset), 4951 __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset),
4966 Immediate(static_cast<int8_t>(first))); 4952 Immediate(static_cast<int8_t>(first)));
4967 4953
4968 // If there is only one type in the interval check for equality. 4954 // If there is only one type in the interval check for equality.
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after
5096 5082
5097 // Check for heap number 5083 // Check for heap number
5098 __ Cmp(FieldOperand(input_reg, HeapObject::kMapOffset), 5084 __ Cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
5099 factory()->heap_number_map()); 5085 factory()->heap_number_map());
5100 __ j(equal, &heap_number, Label::kNear); 5086 __ j(equal, &heap_number, Label::kNear);
5101 5087
5102 // Check for undefined. Undefined is converted to zero for clamping 5088 // Check for undefined. Undefined is converted to zero for clamping
5103 // conversions. 5089 // conversions.
5104 __ Cmp(input_reg, factory()->undefined_value()); 5090 __ Cmp(input_reg, factory()->undefined_value());
5105 DeoptimizeIf(not_equal, instr->environment()); 5091 DeoptimizeIf(not_equal, instr->environment());
5106 __ movq(input_reg, Immediate(0)); 5092 __ movp(input_reg, Immediate(0));
5107 __ jmp(&done, Label::kNear); 5093 __ jmp(&done, Label::kNear);
5108 5094
5109 // Heap number 5095 // Heap number
5110 __ bind(&heap_number); 5096 __ bind(&heap_number);
5111 __ movsd(xmm_scratch, FieldOperand(input_reg, HeapNumber::kValueOffset)); 5097 __ movsd(xmm_scratch, FieldOperand(input_reg, HeapNumber::kValueOffset));
5112 __ ClampDoubleToUint8(xmm_scratch, temp_xmm_reg, input_reg); 5098 __ ClampDoubleToUint8(xmm_scratch, temp_xmm_reg, input_reg);
5113 __ jmp(&done, Label::kNear); 5099 __ jmp(&done, Label::kNear);
5114 5100
5115 // smi 5101 // smi
5116 __ bind(&is_smi); 5102 __ bind(&is_smi);
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after
5231 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { 5217 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
5232 ASSERT(ToRegister(instr->context()).is(rsi)); 5218 ASSERT(ToRegister(instr->context()).is(rsi));
5233 Label materialized; 5219 Label materialized;
5234 // Registers will be used as follows: 5220 // Registers will be used as follows:
5235 // rcx = literals array. 5221 // rcx = literals array.
5236 // rbx = regexp literal. 5222 // rbx = regexp literal.
5237 // rax = regexp literal clone. 5223 // rax = regexp literal clone.
5238 int literal_offset = 5224 int literal_offset =
5239 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); 5225 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index());
5240 __ Move(rcx, instr->hydrogen()->literals()); 5226 __ Move(rcx, instr->hydrogen()->literals());
5241 __ movq(rbx, FieldOperand(rcx, literal_offset)); 5227 __ movp(rbx, FieldOperand(rcx, literal_offset));
5242 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); 5228 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
5243 __ j(not_equal, &materialized, Label::kNear); 5229 __ j(not_equal, &materialized, Label::kNear);
5244 5230
5245 // Create regexp literal using runtime function 5231 // Create regexp literal using runtime function
5246 // Result will be in rax. 5232 // Result will be in rax.
5247 __ push(rcx); 5233 __ push(rcx);
5248 __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); 5234 __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
5249 __ Push(instr->hydrogen()->pattern()); 5235 __ Push(instr->hydrogen()->pattern());
5250 __ Push(instr->hydrogen()->flags()); 5236 __ Push(instr->hydrogen()->flags());
5251 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); 5237 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
5252 __ movq(rbx, rax); 5238 __ movp(rbx, rax);
5253 5239
5254 __ bind(&materialized); 5240 __ bind(&materialized);
5255 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 5241 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
5256 Label allocated, runtime_allocate; 5242 Label allocated, runtime_allocate;
5257 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT); 5243 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
5258 __ jmp(&allocated, Label::kNear); 5244 __ jmp(&allocated, Label::kNear);
5259 5245
5260 __ bind(&runtime_allocate); 5246 __ bind(&runtime_allocate);
5261 __ push(rbx); 5247 __ push(rbx);
5262 __ Push(Smi::FromInt(size)); 5248 __ Push(Smi::FromInt(size));
5263 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); 5249 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
5264 __ pop(rbx); 5250 __ pop(rbx);
5265 5251
5266 __ bind(&allocated); 5252 __ bind(&allocated);
5267 // Copy the content into the newly allocated memory. 5253 // Copy the content into the newly allocated memory.
5268 // (Unroll copy loop once for better throughput). 5254 // (Unroll copy loop once for better throughput).
5269 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { 5255 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
5270 __ movq(rdx, FieldOperand(rbx, i)); 5256 __ movp(rdx, FieldOperand(rbx, i));
5271 __ movq(rcx, FieldOperand(rbx, i + kPointerSize)); 5257 __ movp(rcx, FieldOperand(rbx, i + kPointerSize));
5272 __ movq(FieldOperand(rax, i), rdx); 5258 __ movp(FieldOperand(rax, i), rdx);
5273 __ movq(FieldOperand(rax, i + kPointerSize), rcx); 5259 __ movp(FieldOperand(rax, i + kPointerSize), rcx);
5274 } 5260 }
5275 if ((size % (2 * kPointerSize)) != 0) { 5261 if ((size % (2 * kPointerSize)) != 0) {
5276 __ movq(rdx, FieldOperand(rbx, size - kPointerSize)); 5262 __ movp(rdx, FieldOperand(rbx, size - kPointerSize));
5277 __ movq(FieldOperand(rax, size - kPointerSize), rdx); 5263 __ movp(FieldOperand(rax, size - kPointerSize), rdx);
5278 } 5264 }
5279 } 5265 }
5280 5266
5281 5267
5282 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { 5268 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
5283 ASSERT(ToRegister(instr->context()).is(rsi)); 5269 ASSERT(ToRegister(instr->context()).is(rsi));
5284 // Use the fast case closure allocation code that allocates in new 5270 // Use the fast case closure allocation code that allocates in new
5285 // space for nested functions that don't need literals cloning. 5271 // space for nested functions that don't need literals cloning.
5286 bool pretenure = instr->hydrogen()->pretenure(); 5272 bool pretenure = instr->hydrogen()->pretenure();
5287 if (!pretenure && instr->hydrogen()->has_no_literals()) { 5273 if (!pretenure && instr->hydrogen()->has_no_literals()) {
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
5369 5355
5370 } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_string())) { 5356 } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_string())) {
5371 __ CompareRoot(input, Heap::kNullValueRootIndex); 5357 __ CompareRoot(input, Heap::kNullValueRootIndex);
5372 final_branch_condition = equal; 5358 final_branch_condition = equal;
5373 5359
5374 } else if (type_name->Equals(heap()->undefined_string())) { 5360 } else if (type_name->Equals(heap()->undefined_string())) {
5375 __ CompareRoot(input, Heap::kUndefinedValueRootIndex); 5361 __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
5376 __ j(equal, true_label, true_distance); 5362 __ j(equal, true_label, true_distance);
5377 __ JumpIfSmi(input, false_label, false_distance); 5363 __ JumpIfSmi(input, false_label, false_distance);
5378 // Check for undetectable objects => true. 5364 // Check for undetectable objects => true.
5379 __ movq(input, FieldOperand(input, HeapObject::kMapOffset)); 5365 __ movp(input, FieldOperand(input, HeapObject::kMapOffset));
5380 __ testb(FieldOperand(input, Map::kBitFieldOffset), 5366 __ testb(FieldOperand(input, Map::kBitFieldOffset),
5381 Immediate(1 << Map::kIsUndetectable)); 5367 Immediate(1 << Map::kIsUndetectable));
5382 final_branch_condition = not_zero; 5368 final_branch_condition = not_zero;
5383 5369
5384 } else if (type_name->Equals(heap()->function_string())) { 5370 } else if (type_name->Equals(heap()->function_string())) {
5385 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 5371 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
5386 __ JumpIfSmi(input, false_label, false_distance); 5372 __ JumpIfSmi(input, false_label, false_distance);
5387 __ CmpObjectType(input, JS_FUNCTION_TYPE, input); 5373 __ CmpObjectType(input, JS_FUNCTION_TYPE, input);
5388 __ j(equal, true_label, true_distance); 5374 __ j(equal, true_label, true_distance);
5389 __ CmpInstanceType(input, JS_FUNCTION_PROXY_TYPE); 5375 __ CmpInstanceType(input, JS_FUNCTION_PROXY_TYPE);
(...skipping 25 matching lines...) Expand all
5415 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) { 5401 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
5416 Register temp = ToRegister(instr->temp()); 5402 Register temp = ToRegister(instr->temp());
5417 5403
5418 EmitIsConstructCall(temp); 5404 EmitIsConstructCall(temp);
5419 EmitBranch(instr, equal); 5405 EmitBranch(instr, equal);
5420 } 5406 }
5421 5407
5422 5408
5423 void LCodeGen::EmitIsConstructCall(Register temp) { 5409 void LCodeGen::EmitIsConstructCall(Register temp) {
5424 // Get the frame pointer for the calling frame. 5410 // Get the frame pointer for the calling frame.
5425 __ movq(temp, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 5411 __ movp(temp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
5426 5412
5427 // Skip the arguments adaptor frame if it exists. 5413 // Skip the arguments adaptor frame if it exists.
5428 Label check_frame_marker; 5414 Label check_frame_marker;
5429 __ Cmp(Operand(temp, StandardFrameConstants::kContextOffset), 5415 __ Cmp(Operand(temp, StandardFrameConstants::kContextOffset),
5430 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 5416 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
5431 __ j(not_equal, &check_frame_marker, Label::kNear); 5417 __ j(not_equal, &check_frame_marker, Label::kNear);
5432 __ movq(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset)); 5418 __ movp(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset));
5433 5419
5434 // Check the marker in the calling frame. 5420 // Check the marker in the calling frame.
5435 __ bind(&check_frame_marker); 5421 __ bind(&check_frame_marker);
5436 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), 5422 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
5437 Smi::FromInt(StackFrame::CONSTRUCT)); 5423 Smi::FromInt(StackFrame::CONSTRUCT));
5438 } 5424 }
5439 5425
5440 5426
5441 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { 5427 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
5442 if (!info()->IsStub()) { 5428 if (!info()->IsStub()) {
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
5481 } 5467 }
5482 5468
5483 5469
5484 void LCodeGen::DoDummyUse(LDummyUse* instr) { 5470 void LCodeGen::DoDummyUse(LDummyUse* instr) {
5485 // Nothing to see here, move on! 5471 // Nothing to see here, move on!
5486 } 5472 }
5487 5473
5488 5474
5489 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { 5475 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
5490 PushSafepointRegistersScope scope(this); 5476 PushSafepointRegistersScope scope(this);
5491 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 5477 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
5492 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); 5478 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
5493 RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0); 5479 RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0);
5494 ASSERT(instr->HasEnvironment()); 5480 ASSERT(instr->HasEnvironment());
5495 LEnvironment* env = instr->environment(); 5481 LEnvironment* env = instr->environment();
5496 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 5482 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5497 } 5483 }
5498 5484
5499 5485
5500 void LCodeGen::DoStackCheck(LStackCheck* instr) { 5486 void LCodeGen::DoStackCheck(LStackCheck* instr) {
5501 class DeferredStackCheck V8_FINAL : public LDeferredCode { 5487 class DeferredStackCheck V8_FINAL : public LDeferredCode {
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
5575 Condition cc = masm()->CheckSmi(rax); 5561 Condition cc = masm()->CheckSmi(rax);
5576 DeoptimizeIf(cc, instr->environment()); 5562 DeoptimizeIf(cc, instr->environment());
5577 5563
5578 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 5564 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
5579 __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx); 5565 __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
5580 DeoptimizeIf(below_equal, instr->environment()); 5566 DeoptimizeIf(below_equal, instr->environment());
5581 5567
5582 Label use_cache, call_runtime; 5568 Label use_cache, call_runtime;
5583 __ CheckEnumCache(null_value, &call_runtime); 5569 __ CheckEnumCache(null_value, &call_runtime);
5584 5570
5585 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset)); 5571 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
5586 __ jmp(&use_cache, Label::kNear); 5572 __ jmp(&use_cache, Label::kNear);
5587 5573
5588 // Get the set of properties to enumerate. 5574 // Get the set of properties to enumerate.
5589 __ bind(&call_runtime); 5575 __ bind(&call_runtime);
5590 __ push(rax); 5576 __ push(rax);
5591 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr); 5577 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
5592 5578
5593 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), 5579 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
5594 Heap::kMetaMapRootIndex); 5580 Heap::kMetaMapRootIndex);
5595 DeoptimizeIf(not_equal, instr->environment()); 5581 DeoptimizeIf(not_equal, instr->environment());
5596 __ bind(&use_cache); 5582 __ bind(&use_cache);
5597 } 5583 }
5598 5584
5599 5585
5600 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { 5586 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
5601 Register map = ToRegister(instr->map()); 5587 Register map = ToRegister(instr->map());
5602 Register result = ToRegister(instr->result()); 5588 Register result = ToRegister(instr->result());
5603 Label load_cache, done; 5589 Label load_cache, done;
5604 __ EnumLength(result, map); 5590 __ EnumLength(result, map);
5605 __ Cmp(result, Smi::FromInt(0)); 5591 __ Cmp(result, Smi::FromInt(0));
5606 __ j(not_equal, &load_cache, Label::kNear); 5592 __ j(not_equal, &load_cache, Label::kNear);
5607 __ LoadRoot(result, Heap::kEmptyFixedArrayRootIndex); 5593 __ LoadRoot(result, Heap::kEmptyFixedArrayRootIndex);
5608 __ jmp(&done, Label::kNear); 5594 __ jmp(&done, Label::kNear);
5609 __ bind(&load_cache); 5595 __ bind(&load_cache);
5610 __ LoadInstanceDescriptors(map, result); 5596 __ LoadInstanceDescriptors(map, result);
5611 __ movq(result, 5597 __ movp(result,
5612 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); 5598 FieldOperand(result, DescriptorArray::kEnumCacheOffset));
5613 __ movq(result, 5599 __ movp(result,
5614 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); 5600 FieldOperand(result, FixedArray::SizeFor(instr->idx())));
5615 __ bind(&done); 5601 __ bind(&done);
5616 Condition cc = masm()->CheckSmi(result); 5602 Condition cc = masm()->CheckSmi(result);
5617 DeoptimizeIf(cc, instr->environment()); 5603 DeoptimizeIf(cc, instr->environment());
5618 } 5604 }
5619 5605
5620 5606
5621 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { 5607 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
5622 Register object = ToRegister(instr->value()); 5608 Register object = ToRegister(instr->value());
5623 __ cmpq(ToRegister(instr->map()), 5609 __ cmpq(ToRegister(instr->map()),
5624 FieldOperand(object, HeapObject::kMapOffset)); 5610 FieldOperand(object, HeapObject::kMapOffset));
5625 DeoptimizeIf(not_equal, instr->environment()); 5611 DeoptimizeIf(not_equal, instr->environment());
5626 } 5612 }
5627 5613
5628 5614
5629 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { 5615 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
5630 Register object = ToRegister(instr->object()); 5616 Register object = ToRegister(instr->object());
5631 Register index = ToRegister(instr->index()); 5617 Register index = ToRegister(instr->index());
5632 5618
5633 Label out_of_object, done; 5619 Label out_of_object, done;
5634 __ SmiToInteger32(index, index); 5620 __ SmiToInteger32(index, index);
5635 __ cmpl(index, Immediate(0)); 5621 __ cmpl(index, Immediate(0));
5636 __ j(less, &out_of_object, Label::kNear); 5622 __ j(less, &out_of_object, Label::kNear);
5637 __ movq(object, FieldOperand(object, 5623 __ movp(object, FieldOperand(object,
5638 index, 5624 index,
5639 times_pointer_size, 5625 times_pointer_size,
5640 JSObject::kHeaderSize)); 5626 JSObject::kHeaderSize));
5641 __ jmp(&done, Label::kNear); 5627 __ jmp(&done, Label::kNear);
5642 5628
5643 __ bind(&out_of_object); 5629 __ bind(&out_of_object);
5644 __ movq(object, FieldOperand(object, JSObject::kPropertiesOffset)); 5630 __ movp(object, FieldOperand(object, JSObject::kPropertiesOffset));
5645 __ negl(index); 5631 __ negl(index);
5646 // Index is now equal to out of object property index plus 1. 5632 // Index is now equal to out of object property index plus 1.
5647 __ movq(object, FieldOperand(object, 5633 __ movp(object, FieldOperand(object,
5648 index, 5634 index,
5649 times_pointer_size, 5635 times_pointer_size,
5650 FixedArray::kHeaderSize - kPointerSize)); 5636 FixedArray::kHeaderSize - kPointerSize));
5651 __ bind(&done); 5637 __ bind(&done);
5652 } 5638 }
5653 5639
5654 5640
5655 #undef __ 5641 #undef __
5656 5642
5657 } } // namespace v8::internal 5643 } } // namespace v8::internal
5658 5644
5659 #endif // V8_TARGET_ARCH_X64 5645 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/ic-x64.cc ('k') | src/x64/lithium-gap-resolver-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698