| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 96 void LChunkBuilder::Abort(BailoutReason reason) { | 96 void LChunkBuilder::Abort(BailoutReason reason) { |
| 97 info()->set_bailout_reason(reason); | 97 info()->set_bailout_reason(reason); |
| 98 status_ = ABORTED; | 98 status_ = ABORTED; |
| 99 } | 99 } |
| 100 | 100 |
| 101 | 101 |
| 102 #ifdef _MSC_VER | 102 #ifdef _MSC_VER |
| 103 void LCodeGen::MakeSureStackPagesMapped(int offset) { | 103 void LCodeGen::MakeSureStackPagesMapped(int offset) { |
| 104 const int kPageSize = 4 * KB; | 104 const int kPageSize = 4 * KB; |
| 105 for (offset -= kPageSize; offset > 0; offset -= kPageSize) { | 105 for (offset -= kPageSize; offset > 0; offset -= kPageSize) { |
| 106 __ movq(Operand(rsp, offset), rax); | 106 __ movp(Operand(rsp, offset), rax); |
| 107 } | 107 } |
| 108 } | 108 } |
| 109 #endif | 109 #endif |
| 110 | 110 |
| 111 | 111 |
| 112 void LCodeGen::SaveCallerDoubles() { | 112 void LCodeGen::SaveCallerDoubles() { |
| 113 ASSERT(info()->saves_caller_doubles()); | 113 ASSERT(info()->saves_caller_doubles()); |
| 114 ASSERT(NeedsEagerFrame()); | 114 ASSERT(NeedsEagerFrame()); |
| 115 Comment(";;; Save clobbered callee double registers"); | 115 Comment(";;; Save clobbered callee double registers"); |
| 116 int count = 0; | 116 int count = 0; |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 154 } | 154 } |
| 155 #endif | 155 #endif |
| 156 | 156 |
| 157 // Classic mode functions need to replace the receiver with the global proxy | 157 // Classic mode functions need to replace the receiver with the global proxy |
| 158 // when called as functions (without an explicit receiver object). | 158 // when called as functions (without an explicit receiver object). |
| 159 if (info_->this_has_uses() && | 159 if (info_->this_has_uses() && |
| 160 info_->is_classic_mode() && | 160 info_->is_classic_mode() && |
| 161 !info_->is_native()) { | 161 !info_->is_native()) { |
| 162 Label ok; | 162 Label ok; |
| 163 StackArgumentsAccessor args(rsp, scope()->num_parameters()); | 163 StackArgumentsAccessor args(rsp, scope()->num_parameters()); |
| 164 __ movq(rcx, args.GetReceiverOperand()); | 164 __ movp(rcx, args.GetReceiverOperand()); |
| 165 | 165 |
| 166 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex); | 166 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex); |
| 167 __ j(not_equal, &ok, Label::kNear); | 167 __ j(not_equal, &ok, Label::kNear); |
| 168 | 168 |
| 169 __ movq(rcx, GlobalObjectOperand()); | 169 __ movp(rcx, GlobalObjectOperand()); |
| 170 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset)); | 170 __ movp(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset)); |
| 171 | 171 |
| 172 __ movq(args.GetReceiverOperand(), rcx); | 172 __ movp(args.GetReceiverOperand(), rcx); |
| 173 | 173 |
| 174 __ bind(&ok); | 174 __ bind(&ok); |
| 175 } | 175 } |
| 176 } | 176 } |
| 177 | 177 |
| 178 info()->set_prologue_offset(masm_->pc_offset()); | 178 info()->set_prologue_offset(masm_->pc_offset()); |
| 179 if (NeedsEagerFrame()) { | 179 if (NeedsEagerFrame()) { |
| 180 ASSERT(!frame_is_built_); | 180 ASSERT(!frame_is_built_); |
| 181 frame_is_built_ = true; | 181 frame_is_built_ = true; |
| 182 __ Prologue(info()->IsStub() ? BUILD_STUB_FRAME : BUILD_FUNCTION_FRAME); | 182 __ Prologue(info()->IsStub() ? BUILD_STUB_FRAME : BUILD_FUNCTION_FRAME); |
| 183 info()->AddNoFrameRange(0, masm_->pc_offset()); | 183 info()->AddNoFrameRange(0, masm_->pc_offset()); |
| 184 } | 184 } |
| 185 | 185 |
| 186 // Reserve space for the stack slots needed by the code. | 186 // Reserve space for the stack slots needed by the code. |
| 187 int slots = GetStackSlotCount(); | 187 int slots = GetStackSlotCount(); |
| 188 if (slots > 0) { | 188 if (slots > 0) { |
| 189 if (FLAG_debug_code) { | 189 if (FLAG_debug_code) { |
| 190 __ subq(rsp, Immediate(slots * kPointerSize)); | 190 __ subq(rsp, Immediate(slots * kPointerSize)); |
| 191 #ifdef _MSC_VER | 191 #ifdef _MSC_VER |
| 192 MakeSureStackPagesMapped(slots * kPointerSize); | 192 MakeSureStackPagesMapped(slots * kPointerSize); |
| 193 #endif | 193 #endif |
| 194 __ push(rax); | 194 __ push(rax); |
| 195 __ Set(rax, slots); | 195 __ Set(rax, slots); |
| 196 __ movq(kScratchRegister, kSlotsZapValue); | 196 __ movq(kScratchRegister, kSlotsZapValue); |
| 197 Label loop; | 197 Label loop; |
| 198 __ bind(&loop); | 198 __ bind(&loop); |
| 199 __ movq(MemOperand(rsp, rax, times_pointer_size, 0), | 199 __ movp(MemOperand(rsp, rax, times_pointer_size, 0), |
| 200 kScratchRegister); | 200 kScratchRegister); |
| 201 __ decl(rax); | 201 __ decl(rax); |
| 202 __ j(not_zero, &loop); | 202 __ j(not_zero, &loop); |
| 203 __ pop(rax); | 203 __ pop(rax); |
| 204 } else { | 204 } else { |
| 205 __ subq(rsp, Immediate(slots * kPointerSize)); | 205 __ subq(rsp, Immediate(slots * kPointerSize)); |
| 206 #ifdef _MSC_VER | 206 #ifdef _MSC_VER |
| 207 MakeSureStackPagesMapped(slots * kPointerSize); | 207 MakeSureStackPagesMapped(slots * kPointerSize); |
| 208 #endif | 208 #endif |
| 209 } | 209 } |
| (...skipping 11 matching lines...) Expand all Loading... |
| 221 __ push(rdi); | 221 __ push(rdi); |
| 222 if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 222 if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
| 223 FastNewContextStub stub(heap_slots); | 223 FastNewContextStub stub(heap_slots); |
| 224 __ CallStub(&stub); | 224 __ CallStub(&stub); |
| 225 } else { | 225 } else { |
| 226 __ CallRuntime(Runtime::kNewFunctionContext, 1); | 226 __ CallRuntime(Runtime::kNewFunctionContext, 1); |
| 227 } | 227 } |
| 228 RecordSafepoint(Safepoint::kNoLazyDeopt); | 228 RecordSafepoint(Safepoint::kNoLazyDeopt); |
| 229 // Context is returned in both rax and rsi. It replaces the context | 229 // Context is returned in both rax and rsi. It replaces the context |
| 230 // passed to us. It's saved in the stack and kept live in rsi. | 230 // passed to us. It's saved in the stack and kept live in rsi. |
| 231 __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); | 231 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); |
| 232 | 232 |
| 233 // Copy any necessary parameters into the context. | 233 // Copy any necessary parameters into the context. |
| 234 int num_parameters = scope()->num_parameters(); | 234 int num_parameters = scope()->num_parameters(); |
| 235 for (int i = 0; i < num_parameters; i++) { | 235 for (int i = 0; i < num_parameters; i++) { |
| 236 Variable* var = scope()->parameter(i); | 236 Variable* var = scope()->parameter(i); |
| 237 if (var->IsContextSlot()) { | 237 if (var->IsContextSlot()) { |
| 238 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | 238 int parameter_offset = StandardFrameConstants::kCallerSPOffset + |
| 239 (num_parameters - 1 - i) * kPointerSize; | 239 (num_parameters - 1 - i) * kPointerSize; |
| 240 // Load parameter from stack. | 240 // Load parameter from stack. |
| 241 __ movq(rax, Operand(rbp, parameter_offset)); | 241 __ movp(rax, Operand(rbp, parameter_offset)); |
| 242 // Store it in the context. | 242 // Store it in the context. |
| 243 int context_offset = Context::SlotOffset(var->index()); | 243 int context_offset = Context::SlotOffset(var->index()); |
| 244 __ movq(Operand(rsi, context_offset), rax); | 244 __ movp(Operand(rsi, context_offset), rax); |
| 245 // Update the write barrier. This clobbers rax and rbx. | 245 // Update the write barrier. This clobbers rax and rbx. |
| 246 __ RecordWriteContextSlot(rsi, context_offset, rax, rbx, kSaveFPRegs); | 246 __ RecordWriteContextSlot(rsi, context_offset, rax, rbx, kSaveFPRegs); |
| 247 } | 247 } |
| 248 } | 248 } |
| 249 Comment(";;; End allocate local context"); | 249 Comment(";;; End allocate local context"); |
| 250 } | 250 } |
| 251 | 251 |
| 252 // Trace the call. | 252 // Trace the call. |
| 253 if (FLAG_trace && info()->IsOptimizing()) { | 253 if (FLAG_trace && info()->IsOptimizing()) { |
| 254 __ CallRuntime(Runtime::kTraceEnter, 0); | 254 __ CallRuntime(Runtime::kTraceEnter, 0); |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 287 } else { | 287 } else { |
| 288 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); | 288 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); |
| 289 } | 289 } |
| 290 if (jump_table_[i].needs_frame) { | 290 if (jump_table_[i].needs_frame) { |
| 291 ASSERT(!info()->saves_caller_doubles()); | 291 ASSERT(!info()->saves_caller_doubles()); |
| 292 __ Move(kScratchRegister, ExternalReference::ForDeoptEntry(entry)); | 292 __ Move(kScratchRegister, ExternalReference::ForDeoptEntry(entry)); |
| 293 if (needs_frame.is_bound()) { | 293 if (needs_frame.is_bound()) { |
| 294 __ jmp(&needs_frame); | 294 __ jmp(&needs_frame); |
| 295 } else { | 295 } else { |
| 296 __ bind(&needs_frame); | 296 __ bind(&needs_frame); |
| 297 __ movq(rsi, MemOperand(rbp, StandardFrameConstants::kContextOffset)); | 297 __ movp(rsi, MemOperand(rbp, StandardFrameConstants::kContextOffset)); |
| 298 __ push(rbp); | 298 __ push(rbp); |
| 299 __ movq(rbp, rsp); | 299 __ movp(rbp, rsp); |
| 300 __ push(rsi); | 300 __ push(rsi); |
| 301 // This variant of deopt can only be used with stubs. Since we don't | 301 // This variant of deopt can only be used with stubs. Since we don't |
| 302 // have a function pointer to install in the stack frame that we're | 302 // have a function pointer to install in the stack frame that we're |
| 303 // building, install a special marker there instead. | 303 // building, install a special marker there instead. |
| 304 ASSERT(info()->IsStub()); | 304 ASSERT(info()->IsStub()); |
| 305 __ Move(rsi, Smi::FromInt(StackFrame::STUB)); | 305 __ Move(rsi, Smi::FromInt(StackFrame::STUB)); |
| 306 __ push(rsi); | 306 __ push(rsi); |
| 307 __ movq(rsi, MemOperand(rsp, kPointerSize)); | 307 __ movp(rsi, MemOperand(rsp, kPointerSize)); |
| 308 __ call(kScratchRegister); | 308 __ call(kScratchRegister); |
| 309 } | 309 } |
| 310 } else { | 310 } else { |
| 311 if (info()->saves_caller_doubles()) { | 311 if (info()->saves_caller_doubles()) { |
| 312 ASSERT(info()->IsStub()); | 312 ASSERT(info()->IsStub()); |
| 313 RestoreCallerDoubles(); | 313 RestoreCallerDoubles(); |
| 314 } | 314 } |
| 315 __ call(entry, RelocInfo::RUNTIME_ENTRY); | 315 __ call(entry, RelocInfo::RUNTIME_ENTRY); |
| 316 } | 316 } |
| 317 } | 317 } |
| (...skipping 28 matching lines...) Expand all Loading... |
| 346 __ Push(Smi::FromInt(StackFrame::STUB)); | 346 __ Push(Smi::FromInt(StackFrame::STUB)); |
| 347 __ lea(rbp, Operand(rsp, 2 * kPointerSize)); | 347 __ lea(rbp, Operand(rsp, 2 * kPointerSize)); |
| 348 Comment(";;; Deferred code"); | 348 Comment(";;; Deferred code"); |
| 349 } | 349 } |
| 350 code->Generate(); | 350 code->Generate(); |
| 351 if (NeedsDeferredFrame()) { | 351 if (NeedsDeferredFrame()) { |
| 352 __ bind(code->done()); | 352 __ bind(code->done()); |
| 353 Comment(";;; Destroy frame"); | 353 Comment(";;; Destroy frame"); |
| 354 ASSERT(frame_is_built_); | 354 ASSERT(frame_is_built_); |
| 355 frame_is_built_ = false; | 355 frame_is_built_ = false; |
| 356 __ movq(rsp, rbp); | 356 __ movp(rsp, rbp); |
| 357 __ pop(rbp); | 357 __ pop(rbp); |
| 358 } | 358 } |
| 359 __ jmp(code->exit()); | 359 __ jmp(code->exit()); |
| 360 } | 360 } |
| 361 } | 361 } |
| 362 | 362 |
| 363 // Deferred code is the last part of the instruction sequence. Mark | 363 // Deferred code is the last part of the instruction sequence. Mark |
| 364 // the generated code as done unless we bailed out. | 364 // the generated code as done unless we bailed out. |
| 365 if (!is_aborted()) status_ = DONE; | 365 if (!is_aborted()) status_ = DONE; |
| 366 return !is_aborted(); | 366 return !is_aborted(); |
| (...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 629 | 629 |
| 630 __ CallRuntime(function, num_arguments, save_doubles); | 630 __ CallRuntime(function, num_arguments, save_doubles); |
| 631 | 631 |
| 632 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0); | 632 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0); |
| 633 } | 633 } |
| 634 | 634 |
| 635 | 635 |
| 636 void LCodeGen::LoadContextFromDeferred(LOperand* context) { | 636 void LCodeGen::LoadContextFromDeferred(LOperand* context) { |
| 637 if (context->IsRegister()) { | 637 if (context->IsRegister()) { |
| 638 if (!ToRegister(context).is(rsi)) { | 638 if (!ToRegister(context).is(rsi)) { |
| 639 __ movq(rsi, ToRegister(context)); | 639 __ movp(rsi, ToRegister(context)); |
| 640 } | 640 } |
| 641 } else if (context->IsStackSlot()) { | 641 } else if (context->IsStackSlot()) { |
| 642 __ movq(rsi, ToOperand(context)); | 642 __ movp(rsi, ToOperand(context)); |
| 643 } else if (context->IsConstantOperand()) { | 643 } else if (context->IsConstantOperand()) { |
| 644 HConstant* constant = | 644 HConstant* constant = |
| 645 chunk_->LookupConstant(LConstantOperand::cast(context)); | 645 chunk_->LookupConstant(LConstantOperand::cast(context)); |
| 646 __ Move(rsi, Handle<Object>::cast(constant->handle(isolate()))); | 646 __ Move(rsi, Handle<Object>::cast(constant->handle(isolate()))); |
| 647 } else { | 647 } else { |
| 648 UNREACHABLE(); | 648 UNREACHABLE(); |
| 649 } | 649 } |
| 650 } | 650 } |
| 651 | 651 |
| 652 | 652 |
| (...skipping 608 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1261 } | 1261 } |
| 1262 } | 1262 } |
| 1263 | 1263 |
| 1264 | 1264 |
| 1265 void LCodeGen::DoMulI(LMulI* instr) { | 1265 void LCodeGen::DoMulI(LMulI* instr) { |
| 1266 Register left = ToRegister(instr->left()); | 1266 Register left = ToRegister(instr->left()); |
| 1267 LOperand* right = instr->right(); | 1267 LOperand* right = instr->right(); |
| 1268 | 1268 |
| 1269 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 1269 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 1270 if (instr->hydrogen_value()->representation().IsSmi()) { | 1270 if (instr->hydrogen_value()->representation().IsSmi()) { |
| 1271 __ movq(kScratchRegister, left); | 1271 __ movp(kScratchRegister, left); |
| 1272 } else { | 1272 } else { |
| 1273 __ movl(kScratchRegister, left); | 1273 __ movl(kScratchRegister, left); |
| 1274 } | 1274 } |
| 1275 } | 1275 } |
| 1276 | 1276 |
| 1277 bool can_overflow = | 1277 bool can_overflow = |
| 1278 instr->hydrogen()->CheckFlag(HValue::kCanOverflow); | 1278 instr->hydrogen()->CheckFlag(HValue::kCanOverflow); |
| 1279 if (right->IsConstantOperand()) { | 1279 if (right->IsConstantOperand()) { |
| 1280 int32_t right_value = ToInteger32(LConstantOperand::cast(right)); | 1280 int32_t right_value = ToInteger32(LConstantOperand::cast(right)); |
| 1281 if (right_value == -1) { | 1281 if (right_value == -1) { |
| (...skipping 294 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1576 Register map = ToRegister(instr->value()); | 1576 Register map = ToRegister(instr->value()); |
| 1577 __ EnumLength(result, map); | 1577 __ EnumLength(result, map); |
| 1578 } | 1578 } |
| 1579 | 1579 |
| 1580 | 1580 |
| 1581 void LCodeGen::DoElementsKind(LElementsKind* instr) { | 1581 void LCodeGen::DoElementsKind(LElementsKind* instr) { |
| 1582 Register result = ToRegister(instr->result()); | 1582 Register result = ToRegister(instr->result()); |
| 1583 Register input = ToRegister(instr->value()); | 1583 Register input = ToRegister(instr->value()); |
| 1584 | 1584 |
| 1585 // Load map into |result|. | 1585 // Load map into |result|. |
| 1586 __ movq(result, FieldOperand(input, HeapObject::kMapOffset)); | 1586 __ movp(result, FieldOperand(input, HeapObject::kMapOffset)); |
| 1587 // Load the map's "bit field 2" into |result|. We only need the first byte. | 1587 // Load the map's "bit field 2" into |result|. We only need the first byte. |
| 1588 __ movzxbq(result, FieldOperand(result, Map::kBitField2Offset)); | 1588 __ movzxbq(result, FieldOperand(result, Map::kBitField2Offset)); |
| 1589 // Retrieve elements_kind from bit field 2. | 1589 // Retrieve elements_kind from bit field 2. |
| 1590 __ and_(result, Immediate(Map::kElementsKindMask)); | 1590 __ and_(result, Immediate(Map::kElementsKindMask)); |
| 1591 __ shr(result, Immediate(Map::kElementsKindShift)); | 1591 __ shr(result, Immediate(Map::kElementsKindShift)); |
| 1592 } | 1592 } |
| 1593 | 1593 |
| 1594 | 1594 |
| 1595 void LCodeGen::DoValueOf(LValueOf* instr) { | 1595 void LCodeGen::DoValueOf(LValueOf* instr) { |
| 1596 Register input = ToRegister(instr->value()); | 1596 Register input = ToRegister(instr->value()); |
| 1597 Register result = ToRegister(instr->result()); | 1597 Register result = ToRegister(instr->result()); |
| 1598 ASSERT(input.is(result)); | 1598 ASSERT(input.is(result)); |
| 1599 Label done; | 1599 Label done; |
| 1600 | 1600 |
| 1601 if (!instr->hydrogen()->value()->IsHeapObject()) { | 1601 if (!instr->hydrogen()->value()->IsHeapObject()) { |
| 1602 // If the object is a smi return the object. | 1602 // If the object is a smi return the object. |
| 1603 __ JumpIfSmi(input, &done, Label::kNear); | 1603 __ JumpIfSmi(input, &done, Label::kNear); |
| 1604 } | 1604 } |
| 1605 | 1605 |
| 1606 // If the object is not a value type, return the object. | 1606 // If the object is not a value type, return the object. |
| 1607 __ CmpObjectType(input, JS_VALUE_TYPE, kScratchRegister); | 1607 __ CmpObjectType(input, JS_VALUE_TYPE, kScratchRegister); |
| 1608 __ j(not_equal, &done, Label::kNear); | 1608 __ j(not_equal, &done, Label::kNear); |
| 1609 __ movq(result, FieldOperand(input, JSValue::kValueOffset)); | 1609 __ movp(result, FieldOperand(input, JSValue::kValueOffset)); |
| 1610 | 1610 |
| 1611 __ bind(&done); | 1611 __ bind(&done); |
| 1612 } | 1612 } |
| 1613 | 1613 |
| 1614 | 1614 |
| 1615 void LCodeGen::DoDateField(LDateField* instr) { | 1615 void LCodeGen::DoDateField(LDateField* instr) { |
| 1616 Register object = ToRegister(instr->date()); | 1616 Register object = ToRegister(instr->date()); |
| 1617 Register result = ToRegister(instr->result()); | 1617 Register result = ToRegister(instr->result()); |
| 1618 Smi* index = instr->index(); | 1618 Smi* index = instr->index(); |
| 1619 Label runtime, done, not_date_object; | 1619 Label runtime, done, not_date_object; |
| 1620 ASSERT(object.is(result)); | 1620 ASSERT(object.is(result)); |
| 1621 ASSERT(object.is(rax)); | 1621 ASSERT(object.is(rax)); |
| 1622 | 1622 |
| 1623 Condition cc = masm()->CheckSmi(object); | 1623 Condition cc = masm()->CheckSmi(object); |
| 1624 DeoptimizeIf(cc, instr->environment()); | 1624 DeoptimizeIf(cc, instr->environment()); |
| 1625 __ CmpObjectType(object, JS_DATE_TYPE, kScratchRegister); | 1625 __ CmpObjectType(object, JS_DATE_TYPE, kScratchRegister); |
| 1626 DeoptimizeIf(not_equal, instr->environment()); | 1626 DeoptimizeIf(not_equal, instr->environment()); |
| 1627 | 1627 |
| 1628 if (index->value() == 0) { | 1628 if (index->value() == 0) { |
| 1629 __ movq(result, FieldOperand(object, JSDate::kValueOffset)); | 1629 __ movp(result, FieldOperand(object, JSDate::kValueOffset)); |
| 1630 } else { | 1630 } else { |
| 1631 if (index->value() < JSDate::kFirstUncachedField) { | 1631 if (index->value() < JSDate::kFirstUncachedField) { |
| 1632 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); | 1632 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); |
| 1633 Operand stamp_operand = __ ExternalOperand(stamp); | 1633 Operand stamp_operand = __ ExternalOperand(stamp); |
| 1634 __ movq(kScratchRegister, stamp_operand); | 1634 __ movp(kScratchRegister, stamp_operand); |
| 1635 __ cmpq(kScratchRegister, FieldOperand(object, | 1635 __ cmpq(kScratchRegister, FieldOperand(object, |
| 1636 JSDate::kCacheStampOffset)); | 1636 JSDate::kCacheStampOffset)); |
| 1637 __ j(not_equal, &runtime, Label::kNear); | 1637 __ j(not_equal, &runtime, Label::kNear); |
| 1638 __ movq(result, FieldOperand(object, JSDate::kValueOffset + | 1638 __ movp(result, FieldOperand(object, JSDate::kValueOffset + |
| 1639 kPointerSize * index->value())); | 1639 kPointerSize * index->value())); |
| 1640 __ jmp(&done, Label::kNear); | 1640 __ jmp(&done, Label::kNear); |
| 1641 } | 1641 } |
| 1642 __ bind(&runtime); | 1642 __ bind(&runtime); |
| 1643 __ PrepareCallCFunction(2); | 1643 __ PrepareCallCFunction(2); |
| 1644 __ movq(arg_reg_1, object); | 1644 __ movp(arg_reg_1, object); |
| 1645 __ Move(arg_reg_2, index, RelocInfo::NONE64); | 1645 __ Move(arg_reg_2, index, RelocInfo::NONE64); |
| 1646 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); | 1646 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); |
| 1647 __ bind(&done); | 1647 __ bind(&done); |
| 1648 } | 1648 } |
| 1649 } | 1649 } |
| 1650 | 1650 |
| 1651 | 1651 |
| 1652 Operand LCodeGen::BuildSeqStringOperand(Register string, | 1652 Operand LCodeGen::BuildSeqStringOperand(Register string, |
| 1653 LOperand* index, | 1653 LOperand* index, |
| 1654 String::Encoding encoding) { | 1654 String::Encoding encoding) { |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1667 } | 1667 } |
| 1668 | 1668 |
| 1669 | 1669 |
| 1670 void LCodeGen::DoSeqStringGetChar(LSeqStringGetChar* instr) { | 1670 void LCodeGen::DoSeqStringGetChar(LSeqStringGetChar* instr) { |
| 1671 String::Encoding encoding = instr->hydrogen()->encoding(); | 1671 String::Encoding encoding = instr->hydrogen()->encoding(); |
| 1672 Register result = ToRegister(instr->result()); | 1672 Register result = ToRegister(instr->result()); |
| 1673 Register string = ToRegister(instr->string()); | 1673 Register string = ToRegister(instr->string()); |
| 1674 | 1674 |
| 1675 if (FLAG_debug_code) { | 1675 if (FLAG_debug_code) { |
| 1676 __ push(string); | 1676 __ push(string); |
| 1677 __ movq(string, FieldOperand(string, HeapObject::kMapOffset)); | 1677 __ movp(string, FieldOperand(string, HeapObject::kMapOffset)); |
| 1678 __ movzxbq(string, FieldOperand(string, Map::kInstanceTypeOffset)); | 1678 __ movzxbq(string, FieldOperand(string, Map::kInstanceTypeOffset)); |
| 1679 | 1679 |
| 1680 __ andb(string, Immediate(kStringRepresentationMask | kStringEncodingMask)); | 1680 __ andb(string, Immediate(kStringRepresentationMask | kStringEncodingMask)); |
| 1681 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; | 1681 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; |
| 1682 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; | 1682 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; |
| 1683 __ cmpq(string, Immediate(encoding == String::ONE_BYTE_ENCODING | 1683 __ cmpq(string, Immediate(encoding == String::ONE_BYTE_ENCODING |
| 1684 ? one_byte_seq_type : two_byte_seq_type)); | 1684 ? one_byte_seq_type : two_byte_seq_type)); |
| 1685 __ Check(equal, kUnexpectedStringType); | 1685 __ Check(equal, kUnexpectedStringType); |
| 1686 __ pop(string); | 1686 __ pop(string); |
| 1687 } | 1687 } |
| (...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1808 Condition condition = (operation == HMathMinMax::kMathMin) | 1808 Condition condition = (operation == HMathMinMax::kMathMin) |
| 1809 ? less_equal | 1809 ? less_equal |
| 1810 : greater_equal; | 1810 : greater_equal; |
| 1811 Register left_reg = ToRegister(left); | 1811 Register left_reg = ToRegister(left); |
| 1812 if (right->IsConstantOperand()) { | 1812 if (right->IsConstantOperand()) { |
| 1813 Immediate right_imm = | 1813 Immediate right_imm = |
| 1814 Immediate(ToInteger32(LConstantOperand::cast(right))); | 1814 Immediate(ToInteger32(LConstantOperand::cast(right))); |
| 1815 ASSERT(!instr->hydrogen_value()->representation().IsSmi()); | 1815 ASSERT(!instr->hydrogen_value()->representation().IsSmi()); |
| 1816 __ cmpl(left_reg, right_imm); | 1816 __ cmpl(left_reg, right_imm); |
| 1817 __ j(condition, &return_left, Label::kNear); | 1817 __ j(condition, &return_left, Label::kNear); |
| 1818 __ movq(left_reg, right_imm); | 1818 __ movp(left_reg, right_imm); |
| 1819 } else if (right->IsRegister()) { | 1819 } else if (right->IsRegister()) { |
| 1820 Register right_reg = ToRegister(right); | 1820 Register right_reg = ToRegister(right); |
| 1821 if (instr->hydrogen_value()->representation().IsSmi()) { | 1821 if (instr->hydrogen_value()->representation().IsSmi()) { |
| 1822 __ cmpq(left_reg, right_reg); | 1822 __ cmpq(left_reg, right_reg); |
| 1823 } else { | 1823 } else { |
| 1824 __ cmpl(left_reg, right_reg); | 1824 __ cmpl(left_reg, right_reg); |
| 1825 } | 1825 } |
| 1826 __ j(condition, &return_left, Label::kNear); | 1826 __ j(condition, &return_left, Label::kNear); |
| 1827 __ movq(left_reg, right_reg); | 1827 __ movp(left_reg, right_reg); |
| 1828 } else { | 1828 } else { |
| 1829 Operand right_op = ToOperand(right); | 1829 Operand right_op = ToOperand(right); |
| 1830 if (instr->hydrogen_value()->representation().IsSmi()) { | 1830 if (instr->hydrogen_value()->representation().IsSmi()) { |
| 1831 __ cmpq(left_reg, right_op); | 1831 __ cmpq(left_reg, right_op); |
| 1832 } else { | 1832 } else { |
| 1833 __ cmpl(left_reg, right_op); | 1833 __ cmpl(left_reg, right_op); |
| 1834 } | 1834 } |
| 1835 __ j(condition, &return_left, Label::kNear); | 1835 __ j(condition, &return_left, Label::kNear); |
| 1836 __ movq(left_reg, right_op); | 1836 __ movp(left_reg, right_op); |
| 1837 } | 1837 } |
| 1838 __ bind(&return_left); | 1838 __ bind(&return_left); |
| 1839 } else { | 1839 } else { |
| 1840 ASSERT(instr->hydrogen()->representation().IsDouble()); | 1840 ASSERT(instr->hydrogen()->representation().IsDouble()); |
| 1841 Label check_nan_left, check_zero, return_left, return_right; | 1841 Label check_nan_left, check_zero, return_left, return_right; |
| 1842 Condition condition = (operation == HMathMinMax::kMathMin) ? below : above; | 1842 Condition condition = (operation == HMathMinMax::kMathMin) ? below : above; |
| 1843 XMMRegister left_reg = ToDoubleRegister(left); | 1843 XMMRegister left_reg = ToDoubleRegister(left); |
| 1844 XMMRegister right_reg = ToDoubleRegister(right); | 1844 XMMRegister right_reg = ToDoubleRegister(right); |
| 1845 __ ucomisd(left_reg, right_reg); | 1845 __ ucomisd(left_reg, right_reg); |
| 1846 __ j(parity_even, &check_nan_left, Label::kNear); // At least one NaN. | 1846 __ j(parity_even, &check_nan_left, Label::kNear); // At least one NaN. |
| (...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2032 __ j(equal, instr->FalseLabel(chunk_)); | 2032 __ j(equal, instr->FalseLabel(chunk_)); |
| 2033 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); | 2033 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); |
| 2034 } else if (expected.NeedsMap()) { | 2034 } else if (expected.NeedsMap()) { |
| 2035 // If we need a map later and have a Smi -> deopt. | 2035 // If we need a map later and have a Smi -> deopt. |
| 2036 __ testb(reg, Immediate(kSmiTagMask)); | 2036 __ testb(reg, Immediate(kSmiTagMask)); |
| 2037 DeoptimizeIf(zero, instr->environment()); | 2037 DeoptimizeIf(zero, instr->environment()); |
| 2038 } | 2038 } |
| 2039 | 2039 |
| 2040 const Register map = kScratchRegister; | 2040 const Register map = kScratchRegister; |
| 2041 if (expected.NeedsMap()) { | 2041 if (expected.NeedsMap()) { |
| 2042 __ movq(map, FieldOperand(reg, HeapObject::kMapOffset)); | 2042 __ movp(map, FieldOperand(reg, HeapObject::kMapOffset)); |
| 2043 | 2043 |
| 2044 if (expected.CanBeUndetectable()) { | 2044 if (expected.CanBeUndetectable()) { |
| 2045 // Undetectable -> false. | 2045 // Undetectable -> false. |
| 2046 __ testb(FieldOperand(map, Map::kBitFieldOffset), | 2046 __ testb(FieldOperand(map, Map::kBitFieldOffset), |
| 2047 Immediate(1 << Map::kIsUndetectable)); | 2047 Immediate(1 << Map::kIsUndetectable)); |
| 2048 __ j(not_zero, instr->FalseLabel(chunk_)); | 2048 __ j(not_zero, instr->FalseLabel(chunk_)); |
| 2049 } | 2049 } |
| 2050 } | 2050 } |
| 2051 | 2051 |
| 2052 if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) { | 2052 if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) { |
| (...skipping 213 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2266 Condition LCodeGen::EmitIsObject(Register input, | 2266 Condition LCodeGen::EmitIsObject(Register input, |
| 2267 Label* is_not_object, | 2267 Label* is_not_object, |
| 2268 Label* is_object) { | 2268 Label* is_object) { |
| 2269 ASSERT(!input.is(kScratchRegister)); | 2269 ASSERT(!input.is(kScratchRegister)); |
| 2270 | 2270 |
| 2271 __ JumpIfSmi(input, is_not_object); | 2271 __ JumpIfSmi(input, is_not_object); |
| 2272 | 2272 |
| 2273 __ CompareRoot(input, Heap::kNullValueRootIndex); | 2273 __ CompareRoot(input, Heap::kNullValueRootIndex); |
| 2274 __ j(equal, is_object); | 2274 __ j(equal, is_object); |
| 2275 | 2275 |
| 2276 __ movq(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset)); | 2276 __ movp(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset)); |
| 2277 // Undetectable objects behave like undefined. | 2277 // Undetectable objects behave like undefined. |
| 2278 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset), | 2278 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset), |
| 2279 Immediate(1 << Map::kIsUndetectable)); | 2279 Immediate(1 << Map::kIsUndetectable)); |
| 2280 __ j(not_zero, is_not_object); | 2280 __ j(not_zero, is_not_object); |
| 2281 | 2281 |
| 2282 __ movzxbl(kScratchRegister, | 2282 __ movzxbl(kScratchRegister, |
| 2283 FieldOperand(kScratchRegister, Map::kInstanceTypeOffset)); | 2283 FieldOperand(kScratchRegister, Map::kInstanceTypeOffset)); |
| 2284 __ cmpb(kScratchRegister, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); | 2284 __ cmpb(kScratchRegister, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); |
| 2285 __ j(below, is_not_object); | 2285 __ j(below, is_not_object); |
| 2286 __ cmpb(kScratchRegister, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); | 2286 __ cmpb(kScratchRegister, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2340 } | 2340 } |
| 2341 | 2341 |
| 2342 | 2342 |
| 2343 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) { | 2343 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) { |
| 2344 Register input = ToRegister(instr->value()); | 2344 Register input = ToRegister(instr->value()); |
| 2345 Register temp = ToRegister(instr->temp()); | 2345 Register temp = ToRegister(instr->temp()); |
| 2346 | 2346 |
| 2347 if (!instr->hydrogen()->value()->IsHeapObject()) { | 2347 if (!instr->hydrogen()->value()->IsHeapObject()) { |
| 2348 __ JumpIfSmi(input, instr->FalseLabel(chunk_)); | 2348 __ JumpIfSmi(input, instr->FalseLabel(chunk_)); |
| 2349 } | 2349 } |
| 2350 __ movq(temp, FieldOperand(input, HeapObject::kMapOffset)); | 2350 __ movp(temp, FieldOperand(input, HeapObject::kMapOffset)); |
| 2351 __ testb(FieldOperand(temp, Map::kBitFieldOffset), | 2351 __ testb(FieldOperand(temp, Map::kBitFieldOffset), |
| 2352 Immediate(1 << Map::kIsUndetectable)); | 2352 Immediate(1 << Map::kIsUndetectable)); |
| 2353 EmitBranch(instr, not_zero); | 2353 EmitBranch(instr, not_zero); |
| 2354 } | 2354 } |
| 2355 | 2355 |
| 2356 | 2356 |
| 2357 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { | 2357 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { |
| 2358 ASSERT(ToRegister(instr->context()).is(rsi)); | 2358 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 2359 Token::Value op = instr->op(); | 2359 Token::Value op = instr->op(); |
| 2360 | 2360 |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2446 LAST_SPEC_OBJECT_TYPE - 1); | 2446 LAST_SPEC_OBJECT_TYPE - 1); |
| 2447 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); | 2447 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); |
| 2448 __ CmpObjectType(input, FIRST_SPEC_OBJECT_TYPE, temp); | 2448 __ CmpObjectType(input, FIRST_SPEC_OBJECT_TYPE, temp); |
| 2449 __ j(below, is_false); | 2449 __ j(below, is_false); |
| 2450 __ j(equal, is_true); | 2450 __ j(equal, is_true); |
| 2451 __ CmpInstanceType(temp, LAST_SPEC_OBJECT_TYPE); | 2451 __ CmpInstanceType(temp, LAST_SPEC_OBJECT_TYPE); |
| 2452 __ j(equal, is_true); | 2452 __ j(equal, is_true); |
| 2453 } else { | 2453 } else { |
| 2454 // Faster code path to avoid two compares: subtract lower bound from the | 2454 // Faster code path to avoid two compares: subtract lower bound from the |
| 2455 // actual type and do a signed compare with the width of the type range. | 2455 // actual type and do a signed compare with the width of the type range. |
| 2456 __ movq(temp, FieldOperand(input, HeapObject::kMapOffset)); | 2456 __ movp(temp, FieldOperand(input, HeapObject::kMapOffset)); |
| 2457 __ movzxbl(temp2, FieldOperand(temp, Map::kInstanceTypeOffset)); | 2457 __ movzxbl(temp2, FieldOperand(temp, Map::kInstanceTypeOffset)); |
| 2458 __ subq(temp2, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); | 2458 __ subq(temp2, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); |
| 2459 __ cmpq(temp2, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE - | 2459 __ cmpq(temp2, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE - |
| 2460 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); | 2460 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); |
| 2461 __ j(above, is_false); | 2461 __ j(above, is_false); |
| 2462 } | 2462 } |
| 2463 | 2463 |
| 2464 // Now we are in the FIRST-LAST_NONCALLABLE_SPEC_OBJECT_TYPE range. | 2464 // Now we are in the FIRST-LAST_NONCALLABLE_SPEC_OBJECT_TYPE range. |
| 2465 // Check if the constructor in the map is a function. | 2465 // Check if the constructor in the map is a function. |
| 2466 __ movq(temp, FieldOperand(temp, Map::kConstructorOffset)); | 2466 __ movp(temp, FieldOperand(temp, Map::kConstructorOffset)); |
| 2467 | 2467 |
| 2468 // Objects with a non-function constructor have class 'Object'. | 2468 // Objects with a non-function constructor have class 'Object'. |
| 2469 __ CmpObjectType(temp, JS_FUNCTION_TYPE, kScratchRegister); | 2469 __ CmpObjectType(temp, JS_FUNCTION_TYPE, kScratchRegister); |
| 2470 if (class_name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("Object"))) { | 2470 if (class_name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("Object"))) { |
| 2471 __ j(not_equal, is_true); | 2471 __ j(not_equal, is_true); |
| 2472 } else { | 2472 } else { |
| 2473 __ j(not_equal, is_false); | 2473 __ j(not_equal, is_false); |
| 2474 } | 2474 } |
| 2475 | 2475 |
| 2476 // temp now contains the constructor function. Grab the | 2476 // temp now contains the constructor function. Grab the |
| 2477 // instance class name from there. | 2477 // instance class name from there. |
| 2478 __ movq(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset)); | 2478 __ movp(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset)); |
| 2479 __ movq(temp, FieldOperand(temp, | 2479 __ movp(temp, FieldOperand(temp, |
| 2480 SharedFunctionInfo::kInstanceClassNameOffset)); | 2480 SharedFunctionInfo::kInstanceClassNameOffset)); |
| 2481 // The class name we are testing against is internalized since it's a literal. | 2481 // The class name we are testing against is internalized since it's a literal. |
| 2482 // The name in the constructor is internalized because of the way the context | 2482 // The name in the constructor is internalized because of the way the context |
| 2483 // is booted. This routine isn't expected to work for random API-created | 2483 // is booted. This routine isn't expected to work for random API-created |
| 2484 // classes and it doesn't have to because you can't access it with natives | 2484 // classes and it doesn't have to because you can't access it with natives |
| 2485 // syntax. Since both sides are internalized it is sufficient to use an | 2485 // syntax. Since both sides are internalized it is sufficient to use an |
| 2486 // identity comparison. | 2486 // identity comparison. |
| 2487 ASSERT(class_name->IsInternalizedString()); | 2487 ASSERT(class_name->IsInternalizedString()); |
| 2488 __ Cmp(temp, class_name); | 2488 __ Cmp(temp, class_name); |
| 2489 // End with the answer in the z flag. | 2489 // End with the answer in the z flag. |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2553 | 2553 |
| 2554 // A Smi is not an instance of anything. | 2554 // A Smi is not an instance of anything. |
| 2555 __ JumpIfSmi(object, &false_result, Label::kNear); | 2555 __ JumpIfSmi(object, &false_result, Label::kNear); |
| 2556 | 2556 |
| 2557 // This is the inlined call site instanceof cache. The two occurences of the | 2557 // This is the inlined call site instanceof cache. The two occurences of the |
| 2558 // hole value will be patched to the last map/result pair generated by the | 2558 // hole value will be patched to the last map/result pair generated by the |
| 2559 // instanceof stub. | 2559 // instanceof stub. |
| 2560 Label cache_miss; | 2560 Label cache_miss; |
| 2561 // Use a temp register to avoid memory operands with variable lengths. | 2561 // Use a temp register to avoid memory operands with variable lengths. |
| 2562 Register map = ToRegister(instr->temp()); | 2562 Register map = ToRegister(instr->temp()); |
| 2563 __ movq(map, FieldOperand(object, HeapObject::kMapOffset)); | 2563 __ movp(map, FieldOperand(object, HeapObject::kMapOffset)); |
| 2564 __ bind(deferred->map_check()); // Label for calculating code patching. | 2564 __ bind(deferred->map_check()); // Label for calculating code patching. |
| 2565 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value()); | 2565 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value()); |
| 2566 __ Move(kScratchRegister, cache_cell, RelocInfo::CELL); | 2566 __ Move(kScratchRegister, cache_cell, RelocInfo::CELL); |
| 2567 __ cmpq(map, Operand(kScratchRegister, 0)); | 2567 __ cmpq(map, Operand(kScratchRegister, 0)); |
| 2568 __ j(not_equal, &cache_miss, Label::kNear); | 2568 __ j(not_equal, &cache_miss, Label::kNear); |
| 2569 // Patched to load either true or false. | 2569 // Patched to load either true or false. |
| 2570 __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex); | 2570 __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex); |
| 2571 #ifdef DEBUG | 2571 #ifdef DEBUG |
| 2572 // Check that the code size between patch label and patch sites is invariant. | 2572 // Check that the code size between patch label and patch sites is invariant. |
| 2573 Label end_of_patched_code; | 2573 Label end_of_patched_code; |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2617 CallCodeGeneric(stub.GetCode(isolate()), | 2617 CallCodeGeneric(stub.GetCode(isolate()), |
| 2618 RelocInfo::CODE_TARGET, | 2618 RelocInfo::CODE_TARGET, |
| 2619 instr, | 2619 instr, |
| 2620 RECORD_SAFEPOINT_WITH_REGISTERS, | 2620 RECORD_SAFEPOINT_WITH_REGISTERS, |
| 2621 2); | 2621 2); |
| 2622 ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check)); | 2622 ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check)); |
| 2623 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); | 2623 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); |
| 2624 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 2624 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 2625 // Move result to a register that survives the end of the | 2625 // Move result to a register that survives the end of the |
| 2626 // PushSafepointRegisterScope. | 2626 // PushSafepointRegisterScope. |
| 2627 __ movq(kScratchRegister, rax); | 2627 __ movp(kScratchRegister, rax); |
| 2628 } | 2628 } |
| 2629 __ testq(kScratchRegister, kScratchRegister); | 2629 __ testq(kScratchRegister, kScratchRegister); |
| 2630 Label load_false; | 2630 Label load_false; |
| 2631 Label done; | 2631 Label done; |
| 2632 __ j(not_zero, &load_false, Label::kNear); | 2632 __ j(not_zero, &load_false, Label::kNear); |
| 2633 __ LoadRoot(rax, Heap::kTrueValueRootIndex); | 2633 __ LoadRoot(rax, Heap::kTrueValueRootIndex); |
| 2634 __ jmp(&done, Label::kNear); | 2634 __ jmp(&done, Label::kNear); |
| 2635 __ bind(&load_false); | 2635 __ bind(&load_false); |
| 2636 __ LoadRoot(rax, Heap::kFalseValueRootIndex); | 2636 __ LoadRoot(rax, Heap::kFalseValueRootIndex); |
| 2637 __ bind(&done); | 2637 __ bind(&done); |
| (...skipping 19 matching lines...) Expand all Loading... |
| 2657 } | 2657 } |
| 2658 | 2658 |
| 2659 | 2659 |
| 2660 void LCodeGen::DoReturn(LReturn* instr) { | 2660 void LCodeGen::DoReturn(LReturn* instr) { |
| 2661 if (FLAG_trace && info()->IsOptimizing()) { | 2661 if (FLAG_trace && info()->IsOptimizing()) { |
| 2662 // Preserve the return value on the stack and rely on the runtime call | 2662 // Preserve the return value on the stack and rely on the runtime call |
| 2663 // to return the value in the same register. We're leaving the code | 2663 // to return the value in the same register. We're leaving the code |
| 2664 // managed by the register allocator and tearing down the frame, it's | 2664 // managed by the register allocator and tearing down the frame, it's |
| 2665 // safe to write to the context register. | 2665 // safe to write to the context register. |
| 2666 __ push(rax); | 2666 __ push(rax); |
| 2667 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 2667 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 2668 __ CallRuntime(Runtime::kTraceExit, 1); | 2668 __ CallRuntime(Runtime::kTraceExit, 1); |
| 2669 } | 2669 } |
| 2670 if (info()->saves_caller_doubles()) { | 2670 if (info()->saves_caller_doubles()) { |
| 2671 RestoreCallerDoubles(); | 2671 RestoreCallerDoubles(); |
| 2672 } | 2672 } |
| 2673 int no_frame_start = -1; | 2673 int no_frame_start = -1; |
| 2674 if (NeedsEagerFrame()) { | 2674 if (NeedsEagerFrame()) { |
| 2675 __ movq(rsp, rbp); | 2675 __ movp(rsp, rbp); |
| 2676 __ pop(rbp); | 2676 __ pop(rbp); |
| 2677 no_frame_start = masm_->pc_offset(); | 2677 no_frame_start = masm_->pc_offset(); |
| 2678 } | 2678 } |
| 2679 if (instr->has_constant_parameter_count()) { | 2679 if (instr->has_constant_parameter_count()) { |
| 2680 __ Ret((ToInteger32(instr->constant_parameter_count()) + 1) * kPointerSize, | 2680 __ Ret((ToInteger32(instr->constant_parameter_count()) + 1) * kPointerSize, |
| 2681 rcx); | 2681 rcx); |
| 2682 } else { | 2682 } else { |
| 2683 Register reg = ToRegister(instr->parameter_count()); | 2683 Register reg = ToRegister(instr->parameter_count()); |
| 2684 // The argument count parameter is a smi | 2684 // The argument count parameter is a smi |
| 2685 __ SmiToInteger32(reg, reg); | 2685 __ SmiToInteger32(reg, reg); |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2726 // to update the property details in the property dictionary to mark | 2726 // to update the property details in the property dictionary to mark |
| 2727 // it as no longer deleted. We deoptimize in that case. | 2727 // it as no longer deleted. We deoptimize in that case. |
| 2728 if (instr->hydrogen()->RequiresHoleCheck()) { | 2728 if (instr->hydrogen()->RequiresHoleCheck()) { |
| 2729 // We have a temp because CompareRoot might clobber kScratchRegister. | 2729 // We have a temp because CompareRoot might clobber kScratchRegister. |
| 2730 Register cell = ToRegister(instr->temp()); | 2730 Register cell = ToRegister(instr->temp()); |
| 2731 ASSERT(!value.is(cell)); | 2731 ASSERT(!value.is(cell)); |
| 2732 __ Move(cell, cell_handle, RelocInfo::CELL); | 2732 __ Move(cell, cell_handle, RelocInfo::CELL); |
| 2733 __ CompareRoot(Operand(cell, 0), Heap::kTheHoleValueRootIndex); | 2733 __ CompareRoot(Operand(cell, 0), Heap::kTheHoleValueRootIndex); |
| 2734 DeoptimizeIf(equal, instr->environment()); | 2734 DeoptimizeIf(equal, instr->environment()); |
| 2735 // Store the value. | 2735 // Store the value. |
| 2736 __ movq(Operand(cell, 0), value); | 2736 __ movp(Operand(cell, 0), value); |
| 2737 } else { | 2737 } else { |
| 2738 // Store the value. | 2738 // Store the value. |
| 2739 __ Move(kScratchRegister, cell_handle, RelocInfo::CELL); | 2739 __ Move(kScratchRegister, cell_handle, RelocInfo::CELL); |
| 2740 __ movq(Operand(kScratchRegister, 0), value); | 2740 __ movp(Operand(kScratchRegister, 0), value); |
| 2741 } | 2741 } |
| 2742 // Cells are always rescanned, so no write barrier here. | 2742 // Cells are always rescanned, so no write barrier here. |
| 2743 } | 2743 } |
| 2744 | 2744 |
| 2745 | 2745 |
| 2746 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { | 2746 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { |
| 2747 Register context = ToRegister(instr->context()); | 2747 Register context = ToRegister(instr->context()); |
| 2748 Register result = ToRegister(instr->result()); | 2748 Register result = ToRegister(instr->result()); |
| 2749 __ movq(result, ContextOperand(context, instr->slot_index())); | 2749 __ movp(result, ContextOperand(context, instr->slot_index())); |
| 2750 if (instr->hydrogen()->RequiresHoleCheck()) { | 2750 if (instr->hydrogen()->RequiresHoleCheck()) { |
| 2751 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); | 2751 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); |
| 2752 if (instr->hydrogen()->DeoptimizesOnHole()) { | 2752 if (instr->hydrogen()->DeoptimizesOnHole()) { |
| 2753 DeoptimizeIf(equal, instr->environment()); | 2753 DeoptimizeIf(equal, instr->environment()); |
| 2754 } else { | 2754 } else { |
| 2755 Label is_not_hole; | 2755 Label is_not_hole; |
| 2756 __ j(not_equal, &is_not_hole, Label::kNear); | 2756 __ j(not_equal, &is_not_hole, Label::kNear); |
| 2757 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | 2757 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); |
| 2758 __ bind(&is_not_hole); | 2758 __ bind(&is_not_hole); |
| 2759 } | 2759 } |
| 2760 } | 2760 } |
| 2761 } | 2761 } |
| 2762 | 2762 |
| 2763 | 2763 |
| 2764 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { | 2764 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { |
| 2765 Register context = ToRegister(instr->context()); | 2765 Register context = ToRegister(instr->context()); |
| 2766 Register value = ToRegister(instr->value()); | 2766 Register value = ToRegister(instr->value()); |
| 2767 | 2767 |
| 2768 Operand target = ContextOperand(context, instr->slot_index()); | 2768 Operand target = ContextOperand(context, instr->slot_index()); |
| 2769 | 2769 |
| 2770 Label skip_assignment; | 2770 Label skip_assignment; |
| 2771 if (instr->hydrogen()->RequiresHoleCheck()) { | 2771 if (instr->hydrogen()->RequiresHoleCheck()) { |
| 2772 __ CompareRoot(target, Heap::kTheHoleValueRootIndex); | 2772 __ CompareRoot(target, Heap::kTheHoleValueRootIndex); |
| 2773 if (instr->hydrogen()->DeoptimizesOnHole()) { | 2773 if (instr->hydrogen()->DeoptimizesOnHole()) { |
| 2774 DeoptimizeIf(equal, instr->environment()); | 2774 DeoptimizeIf(equal, instr->environment()); |
| 2775 } else { | 2775 } else { |
| 2776 __ j(not_equal, &skip_assignment); | 2776 __ j(not_equal, &skip_assignment); |
| 2777 } | 2777 } |
| 2778 } | 2778 } |
| 2779 __ movq(target, value); | 2779 __ movp(target, value); |
| 2780 | 2780 |
| 2781 if (instr->hydrogen()->NeedsWriteBarrier()) { | 2781 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 2782 SmiCheck check_needed = | 2782 SmiCheck check_needed = |
| 2783 instr->hydrogen()->value()->IsHeapObject() | 2783 instr->hydrogen()->value()->IsHeapObject() |
| 2784 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; | 2784 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
| 2785 int offset = Context::SlotOffset(instr->slot_index()); | 2785 int offset = Context::SlotOffset(instr->slot_index()); |
| 2786 Register scratch = ToRegister(instr->temp()); | 2786 Register scratch = ToRegister(instr->temp()); |
| 2787 __ RecordWriteContextSlot(context, | 2787 __ RecordWriteContextSlot(context, |
| 2788 offset, | 2788 offset, |
| 2789 value, | 2789 value, |
| (...skipping 26 matching lines...) Expand all Loading... |
| 2816 Register object = ToRegister(instr->object()); | 2816 Register object = ToRegister(instr->object()); |
| 2817 if (FLAG_track_double_fields && | 2817 if (FLAG_track_double_fields && |
| 2818 instr->hydrogen()->representation().IsDouble()) { | 2818 instr->hydrogen()->representation().IsDouble()) { |
| 2819 XMMRegister result = ToDoubleRegister(instr->result()); | 2819 XMMRegister result = ToDoubleRegister(instr->result()); |
| 2820 __ movsd(result, FieldOperand(object, offset)); | 2820 __ movsd(result, FieldOperand(object, offset)); |
| 2821 return; | 2821 return; |
| 2822 } | 2822 } |
| 2823 | 2823 |
| 2824 Register result = ToRegister(instr->result()); | 2824 Register result = ToRegister(instr->result()); |
| 2825 if (!access.IsInobject()) { | 2825 if (!access.IsInobject()) { |
| 2826 __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset)); | 2826 __ movp(result, FieldOperand(object, JSObject::kPropertiesOffset)); |
| 2827 object = result; | 2827 object = result; |
| 2828 } | 2828 } |
| 2829 | 2829 |
| 2830 Representation representation = access.representation(); | 2830 Representation representation = access.representation(); |
| 2831 if (representation.IsSmi() && | 2831 if (representation.IsSmi() && |
| 2832 instr->hydrogen()->representation().IsInteger32()) { | 2832 instr->hydrogen()->representation().IsInteger32()) { |
| 2833 // Read int value directly from upper half of the smi. | 2833 // Read int value directly from upper half of the smi. |
| 2834 STATIC_ASSERT(kSmiTag == 0); | 2834 STATIC_ASSERT(kSmiTag == 0); |
| 2835 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); | 2835 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); |
| 2836 offset += kPointerSize / 2; | 2836 offset += kPointerSize / 2; |
| (...skipping 22 matching lines...) Expand all Loading... |
| 2859 __ CmpObjectType(function, JS_FUNCTION_TYPE, result); | 2859 __ CmpObjectType(function, JS_FUNCTION_TYPE, result); |
| 2860 DeoptimizeIf(not_equal, instr->environment()); | 2860 DeoptimizeIf(not_equal, instr->environment()); |
| 2861 | 2861 |
| 2862 // Check whether the function has an instance prototype. | 2862 // Check whether the function has an instance prototype. |
| 2863 Label non_instance; | 2863 Label non_instance; |
| 2864 __ testb(FieldOperand(result, Map::kBitFieldOffset), | 2864 __ testb(FieldOperand(result, Map::kBitFieldOffset), |
| 2865 Immediate(1 << Map::kHasNonInstancePrototype)); | 2865 Immediate(1 << Map::kHasNonInstancePrototype)); |
| 2866 __ j(not_zero, &non_instance, Label::kNear); | 2866 __ j(not_zero, &non_instance, Label::kNear); |
| 2867 | 2867 |
| 2868 // Get the prototype or initial map from the function. | 2868 // Get the prototype or initial map from the function. |
| 2869 __ movq(result, | 2869 __ movp(result, |
| 2870 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); | 2870 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); |
| 2871 | 2871 |
| 2872 // Check that the function has a prototype or an initial map. | 2872 // Check that the function has a prototype or an initial map. |
| 2873 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); | 2873 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); |
| 2874 DeoptimizeIf(equal, instr->environment()); | 2874 DeoptimizeIf(equal, instr->environment()); |
| 2875 | 2875 |
| 2876 // If the function does not have an initial map, we're done. | 2876 // If the function does not have an initial map, we're done. |
| 2877 Label done; | 2877 Label done; |
| 2878 __ CmpObjectType(result, MAP_TYPE, kScratchRegister); | 2878 __ CmpObjectType(result, MAP_TYPE, kScratchRegister); |
| 2879 __ j(not_equal, &done, Label::kNear); | 2879 __ j(not_equal, &done, Label::kNear); |
| 2880 | 2880 |
| 2881 // Get the prototype from the initial map. | 2881 // Get the prototype from the initial map. |
| 2882 __ movq(result, FieldOperand(result, Map::kPrototypeOffset)); | 2882 __ movp(result, FieldOperand(result, Map::kPrototypeOffset)); |
| 2883 __ jmp(&done, Label::kNear); | 2883 __ jmp(&done, Label::kNear); |
| 2884 | 2884 |
| 2885 // Non-instance prototype: Fetch prototype from constructor field | 2885 // Non-instance prototype: Fetch prototype from constructor field |
| 2886 // in the function's map. | 2886 // in the function's map. |
| 2887 __ bind(&non_instance); | 2887 __ bind(&non_instance); |
| 2888 __ movq(result, FieldOperand(result, Map::kConstructorOffset)); | 2888 __ movp(result, FieldOperand(result, Map::kConstructorOffset)); |
| 2889 | 2889 |
| 2890 // All done. | 2890 // All done. |
| 2891 __ bind(&done); | 2891 __ bind(&done); |
| 2892 } | 2892 } |
| 2893 | 2893 |
| 2894 | 2894 |
| 2895 void LCodeGen::DoLoadRoot(LLoadRoot* instr) { | 2895 void LCodeGen::DoLoadRoot(LLoadRoot* instr) { |
| 2896 Register result = ToRegister(instr->result()); | 2896 Register result = ToRegister(instr->result()); |
| 2897 __ LoadRoot(result, instr->index()); | 2897 __ LoadRoot(result, instr->index()); |
| 2898 } | 2898 } |
| 2899 | 2899 |
| 2900 | 2900 |
| 2901 void LCodeGen::DoLoadExternalArrayPointer( | 2901 void LCodeGen::DoLoadExternalArrayPointer( |
| 2902 LLoadExternalArrayPointer* instr) { | 2902 LLoadExternalArrayPointer* instr) { |
| 2903 Register result = ToRegister(instr->result()); | 2903 Register result = ToRegister(instr->result()); |
| 2904 Register input = ToRegister(instr->object()); | 2904 Register input = ToRegister(instr->object()); |
| 2905 __ movq(result, FieldOperand(input, | 2905 __ movp(result, FieldOperand(input, |
| 2906 ExternalPixelArray::kExternalPointerOffset)); | 2906 ExternalPixelArray::kExternalPointerOffset)); |
| 2907 } | 2907 } |
| 2908 | 2908 |
| 2909 | 2909 |
| 2910 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { | 2910 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { |
| 2911 Register arguments = ToRegister(instr->arguments()); | 2911 Register arguments = ToRegister(instr->arguments()); |
| 2912 Register result = ToRegister(instr->result()); | 2912 Register result = ToRegister(instr->result()); |
| 2913 | 2913 |
| 2914 if (instr->length()->IsConstantOperand() && | 2914 if (instr->length()->IsConstantOperand() && |
| 2915 instr->index()->IsConstantOperand()) { | 2915 instr->index()->IsConstantOperand()) { |
| 2916 int32_t const_index = ToInteger32(LConstantOperand::cast(instr->index())); | 2916 int32_t const_index = ToInteger32(LConstantOperand::cast(instr->index())); |
| 2917 int32_t const_length = ToInteger32(LConstantOperand::cast(instr->length())); | 2917 int32_t const_length = ToInteger32(LConstantOperand::cast(instr->length())); |
| 2918 StackArgumentsAccessor args(arguments, const_length, | 2918 StackArgumentsAccessor args(arguments, const_length, |
| 2919 ARGUMENTS_DONT_CONTAIN_RECEIVER); | 2919 ARGUMENTS_DONT_CONTAIN_RECEIVER); |
| 2920 __ movq(result, args.GetArgumentOperand(const_index)); | 2920 __ movp(result, args.GetArgumentOperand(const_index)); |
| 2921 } else { | 2921 } else { |
| 2922 Register length = ToRegister(instr->length()); | 2922 Register length = ToRegister(instr->length()); |
| 2923 // There are two words between the frame pointer and the last argument. | 2923 // There are two words between the frame pointer and the last argument. |
| 2924 // Subtracting from length accounts for one of them add one more. | 2924 // Subtracting from length accounts for one of them add one more. |
| 2925 if (instr->index()->IsRegister()) { | 2925 if (instr->index()->IsRegister()) { |
| 2926 __ subl(length, ToRegister(instr->index())); | 2926 __ subl(length, ToRegister(instr->index())); |
| 2927 } else { | 2927 } else { |
| 2928 __ subl(length, ToOperand(instr->index())); | 2928 __ subl(length, ToOperand(instr->index())); |
| 2929 } | 2929 } |
| 2930 StackArgumentsAccessor args(arguments, length, | 2930 StackArgumentsAccessor args(arguments, length, |
| 2931 ARGUMENTS_DONT_CONTAIN_RECEIVER); | 2931 ARGUMENTS_DONT_CONTAIN_RECEIVER); |
| 2932 __ movq(result, args.GetArgumentOperand(0)); | 2932 __ movp(result, args.GetArgumentOperand(0)); |
| 2933 } | 2933 } |
| 2934 } | 2934 } |
| 2935 | 2935 |
| 2936 | 2936 |
| 2937 void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) { | 2937 void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) { |
| 2938 ElementsKind elements_kind = instr->elements_kind(); | 2938 ElementsKind elements_kind = instr->elements_kind(); |
| 2939 LOperand* key = instr->key(); | 2939 LOperand* key = instr->key(); |
| 2940 if (!key->IsConstantOperand()) { | 2940 if (!key->IsConstantOperand()) { |
| 2941 Register key_reg = ToRegister(key); | 2941 Register key_reg = ToRegister(key); |
| 2942 // Even though the HLoad/StoreKeyed (in this case) instructions force | 2942 // Even though the HLoad/StoreKeyed (in this case) instructions force |
| (...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3159 | 3159 |
| 3160 | 3160 |
| 3161 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { | 3161 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { |
| 3162 Register result = ToRegister(instr->result()); | 3162 Register result = ToRegister(instr->result()); |
| 3163 | 3163 |
| 3164 if (instr->hydrogen()->from_inlined()) { | 3164 if (instr->hydrogen()->from_inlined()) { |
| 3165 __ lea(result, Operand(rsp, -kFPOnStackSize + -kPCOnStackSize)); | 3165 __ lea(result, Operand(rsp, -kFPOnStackSize + -kPCOnStackSize)); |
| 3166 } else { | 3166 } else { |
| 3167 // Check for arguments adapter frame. | 3167 // Check for arguments adapter frame. |
| 3168 Label done, adapted; | 3168 Label done, adapted; |
| 3169 __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 3169 __ movp(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
| 3170 __ Cmp(Operand(result, StandardFrameConstants::kContextOffset), | 3170 __ Cmp(Operand(result, StandardFrameConstants::kContextOffset), |
| 3171 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 3171 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
| 3172 __ j(equal, &adapted, Label::kNear); | 3172 __ j(equal, &adapted, Label::kNear); |
| 3173 | 3173 |
| 3174 // No arguments adaptor frame. | 3174 // No arguments adaptor frame. |
| 3175 __ movq(result, rbp); | 3175 __ movp(result, rbp); |
| 3176 __ jmp(&done, Label::kNear); | 3176 __ jmp(&done, Label::kNear); |
| 3177 | 3177 |
| 3178 // Arguments adaptor frame present. | 3178 // Arguments adaptor frame present. |
| 3179 __ bind(&adapted); | 3179 __ bind(&adapted); |
| 3180 __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 3180 __ movp(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
| 3181 | 3181 |
| 3182 // Result is the frame pointer for the frame if not adapted and for the real | 3182 // Result is the frame pointer for the frame if not adapted and for the real |
| 3183 // frame below the adaptor frame if adapted. | 3183 // frame below the adaptor frame if adapted. |
| 3184 __ bind(&done); | 3184 __ bind(&done); |
| 3185 } | 3185 } |
| 3186 } | 3186 } |
| 3187 | 3187 |
| 3188 | 3188 |
| 3189 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) { | 3189 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) { |
| 3190 Register result = ToRegister(instr->result()); | 3190 Register result = ToRegister(instr->result()); |
| 3191 | 3191 |
| 3192 Label done; | 3192 Label done; |
| 3193 | 3193 |
| 3194 // If no arguments adaptor frame the number of arguments is fixed. | 3194 // If no arguments adaptor frame the number of arguments is fixed. |
| 3195 if (instr->elements()->IsRegister()) { | 3195 if (instr->elements()->IsRegister()) { |
| 3196 __ cmpq(rbp, ToRegister(instr->elements())); | 3196 __ cmpq(rbp, ToRegister(instr->elements())); |
| 3197 } else { | 3197 } else { |
| 3198 __ cmpq(rbp, ToOperand(instr->elements())); | 3198 __ cmpq(rbp, ToOperand(instr->elements())); |
| 3199 } | 3199 } |
| 3200 __ movl(result, Immediate(scope()->num_parameters())); | 3200 __ movl(result, Immediate(scope()->num_parameters())); |
| 3201 __ j(equal, &done, Label::kNear); | 3201 __ j(equal, &done, Label::kNear); |
| 3202 | 3202 |
| 3203 // Arguments adaptor frame present. Get argument length from there. | 3203 // Arguments adaptor frame present. Get argument length from there. |
| 3204 __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 3204 __ movp(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
| 3205 __ SmiToInteger32(result, | 3205 __ SmiToInteger32(result, |
| 3206 Operand(result, | 3206 Operand(result, |
| 3207 ArgumentsAdaptorFrameConstants::kLengthOffset)); | 3207 ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 3208 | 3208 |
| 3209 // Argument length is in result register. | 3209 // Argument length is in result register. |
| 3210 __ bind(&done); | 3210 __ bind(&done); |
| 3211 } | 3211 } |
| 3212 | 3212 |
| 3213 | 3213 |
| 3214 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) { | 3214 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) { |
| 3215 Register receiver = ToRegister(instr->receiver()); | 3215 Register receiver = ToRegister(instr->receiver()); |
| 3216 Register function = ToRegister(instr->function()); | 3216 Register function = ToRegister(instr->function()); |
| 3217 | 3217 |
| 3218 // If the receiver is null or undefined, we have to pass the global | 3218 // If the receiver is null or undefined, we have to pass the global |
| 3219 // object as a receiver to normal functions. Values have to be | 3219 // object as a receiver to normal functions. Values have to be |
| 3220 // passed unchanged to builtins and strict-mode functions. | 3220 // passed unchanged to builtins and strict-mode functions. |
| 3221 Label global_object, receiver_ok; | 3221 Label global_object, receiver_ok; |
| 3222 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; | 3222 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear; |
| 3223 | 3223 |
| 3224 // Do not transform the receiver to object for strict mode | 3224 // Do not transform the receiver to object for strict mode |
| 3225 // functions. | 3225 // functions. |
| 3226 __ movq(kScratchRegister, | 3226 __ movp(kScratchRegister, |
| 3227 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); | 3227 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); |
| 3228 __ testb(FieldOperand(kScratchRegister, | 3228 __ testb(FieldOperand(kScratchRegister, |
| 3229 SharedFunctionInfo::kStrictModeByteOffset), | 3229 SharedFunctionInfo::kStrictModeByteOffset), |
| 3230 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); | 3230 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); |
| 3231 __ j(not_equal, &receiver_ok, dist); | 3231 __ j(not_equal, &receiver_ok, dist); |
| 3232 | 3232 |
| 3233 // Do not transform the receiver to object for builtins. | 3233 // Do not transform the receiver to object for builtins. |
| 3234 __ testb(FieldOperand(kScratchRegister, | 3234 __ testb(FieldOperand(kScratchRegister, |
| 3235 SharedFunctionInfo::kNativeByteOffset), | 3235 SharedFunctionInfo::kNativeByteOffset), |
| 3236 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); | 3236 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); |
| 3237 __ j(not_equal, &receiver_ok, dist); | 3237 __ j(not_equal, &receiver_ok, dist); |
| 3238 | 3238 |
| 3239 // Normal function. Replace undefined or null with global receiver. | 3239 // Normal function. Replace undefined or null with global receiver. |
| 3240 __ CompareRoot(receiver, Heap::kNullValueRootIndex); | 3240 __ CompareRoot(receiver, Heap::kNullValueRootIndex); |
| 3241 __ j(equal, &global_object, Label::kNear); | 3241 __ j(equal, &global_object, Label::kNear); |
| 3242 __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex); | 3242 __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex); |
| 3243 __ j(equal, &global_object, Label::kNear); | 3243 __ j(equal, &global_object, Label::kNear); |
| 3244 | 3244 |
| 3245 // The receiver should be a JS object. | 3245 // The receiver should be a JS object. |
| 3246 Condition is_smi = __ CheckSmi(receiver); | 3246 Condition is_smi = __ CheckSmi(receiver); |
| 3247 DeoptimizeIf(is_smi, instr->environment()); | 3247 DeoptimizeIf(is_smi, instr->environment()); |
| 3248 __ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, kScratchRegister); | 3248 __ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, kScratchRegister); |
| 3249 DeoptimizeIf(below, instr->environment()); | 3249 DeoptimizeIf(below, instr->environment()); |
| 3250 __ jmp(&receiver_ok, Label::kNear); | 3250 __ jmp(&receiver_ok, Label::kNear); |
| 3251 | 3251 |
| 3252 __ bind(&global_object); | 3252 __ bind(&global_object); |
| 3253 __ movq(receiver, FieldOperand(function, JSFunction::kContextOffset)); | 3253 __ movp(receiver, FieldOperand(function, JSFunction::kContextOffset)); |
| 3254 __ movq(receiver, | 3254 __ movp(receiver, |
| 3255 Operand(receiver, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 3255 Operand(receiver, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
| 3256 __ movq(receiver, | 3256 __ movp(receiver, |
| 3257 FieldOperand(receiver, GlobalObject::kGlobalReceiverOffset)); | 3257 FieldOperand(receiver, GlobalObject::kGlobalReceiverOffset)); |
| 3258 __ bind(&receiver_ok); | 3258 __ bind(&receiver_ok); |
| 3259 } | 3259 } |
| 3260 | 3260 |
| 3261 | 3261 |
| 3262 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { | 3262 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { |
| 3263 Register receiver = ToRegister(instr->receiver()); | 3263 Register receiver = ToRegister(instr->receiver()); |
| 3264 Register function = ToRegister(instr->function()); | 3264 Register function = ToRegister(instr->function()); |
| 3265 Register length = ToRegister(instr->length()); | 3265 Register length = ToRegister(instr->length()); |
| 3266 Register elements = ToRegister(instr->elements()); | 3266 Register elements = ToRegister(instr->elements()); |
| 3267 ASSERT(receiver.is(rax)); // Used for parameter count. | 3267 ASSERT(receiver.is(rax)); // Used for parameter count. |
| 3268 ASSERT(function.is(rdi)); // Required by InvokeFunction. | 3268 ASSERT(function.is(rdi)); // Required by InvokeFunction. |
| 3269 ASSERT(ToRegister(instr->result()).is(rax)); | 3269 ASSERT(ToRegister(instr->result()).is(rax)); |
| 3270 | 3270 |
| 3271 // Copy the arguments to this function possibly from the | 3271 // Copy the arguments to this function possibly from the |
| 3272 // adaptor frame below it. | 3272 // adaptor frame below it. |
| 3273 const uint32_t kArgumentsLimit = 1 * KB; | 3273 const uint32_t kArgumentsLimit = 1 * KB; |
| 3274 __ cmpq(length, Immediate(kArgumentsLimit)); | 3274 __ cmpq(length, Immediate(kArgumentsLimit)); |
| 3275 DeoptimizeIf(above, instr->environment()); | 3275 DeoptimizeIf(above, instr->environment()); |
| 3276 | 3276 |
| 3277 __ push(receiver); | 3277 __ push(receiver); |
| 3278 __ movq(receiver, length); | 3278 __ movp(receiver, length); |
| 3279 | 3279 |
| 3280 // Loop through the arguments pushing them onto the execution | 3280 // Loop through the arguments pushing them onto the execution |
| 3281 // stack. | 3281 // stack. |
| 3282 Label invoke, loop; | 3282 Label invoke, loop; |
| 3283 // length is a small non-negative integer, due to the test above. | 3283 // length is a small non-negative integer, due to the test above. |
| 3284 __ testl(length, length); | 3284 __ testl(length, length); |
| 3285 __ j(zero, &invoke, Label::kNear); | 3285 __ j(zero, &invoke, Label::kNear); |
| 3286 __ bind(&loop); | 3286 __ bind(&loop); |
| 3287 StackArgumentsAccessor args(elements, length, | 3287 StackArgumentsAccessor args(elements, length, |
| 3288 ARGUMENTS_DONT_CONTAIN_RECEIVER); | 3288 ARGUMENTS_DONT_CONTAIN_RECEIVER); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 3307 } | 3307 } |
| 3308 | 3308 |
| 3309 | 3309 |
| 3310 void LCodeGen::DoDrop(LDrop* instr) { | 3310 void LCodeGen::DoDrop(LDrop* instr) { |
| 3311 __ Drop(instr->count()); | 3311 __ Drop(instr->count()); |
| 3312 } | 3312 } |
| 3313 | 3313 |
| 3314 | 3314 |
| 3315 void LCodeGen::DoThisFunction(LThisFunction* instr) { | 3315 void LCodeGen::DoThisFunction(LThisFunction* instr) { |
| 3316 Register result = ToRegister(instr->result()); | 3316 Register result = ToRegister(instr->result()); |
| 3317 __ movq(result, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | 3317 __ movp(result, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
| 3318 } | 3318 } |
| 3319 | 3319 |
| 3320 | 3320 |
| 3321 void LCodeGen::DoContext(LContext* instr) { | 3321 void LCodeGen::DoContext(LContext* instr) { |
| 3322 Register result = ToRegister(instr->result()); | 3322 Register result = ToRegister(instr->result()); |
| 3323 if (info()->IsOptimizing()) { | 3323 if (info()->IsOptimizing()) { |
| 3324 __ movq(result, Operand(rbp, StandardFrameConstants::kContextOffset)); | 3324 __ movp(result, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 3325 } else { | 3325 } else { |
| 3326 // If there is no frame, the context must be in rsi. | 3326 // If there is no frame, the context must be in rsi. |
| 3327 ASSERT(result.is(rsi)); | 3327 ASSERT(result.is(rsi)); |
| 3328 } | 3328 } |
| 3329 } | 3329 } |
| 3330 | 3330 |
| 3331 | 3331 |
| 3332 void LCodeGen::DoOuterContext(LOuterContext* instr) { | 3332 void LCodeGen::DoOuterContext(LOuterContext* instr) { |
| 3333 Register context = ToRegister(instr->context()); | 3333 Register context = ToRegister(instr->context()); |
| 3334 Register result = ToRegister(instr->result()); | 3334 Register result = ToRegister(instr->result()); |
| 3335 __ movq(result, | 3335 __ movp(result, |
| 3336 Operand(context, Context::SlotOffset(Context::PREVIOUS_INDEX))); | 3336 Operand(context, Context::SlotOffset(Context::PREVIOUS_INDEX))); |
| 3337 } | 3337 } |
| 3338 | 3338 |
| 3339 | 3339 |
| 3340 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { | 3340 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { |
| 3341 ASSERT(ToRegister(instr->context()).is(rsi)); | 3341 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 3342 __ push(rsi); // The context is the first argument. | 3342 __ push(rsi); // The context is the first argument. |
| 3343 __ Push(instr->hydrogen()->pairs()); | 3343 __ Push(instr->hydrogen()->pairs()); |
| 3344 __ Push(Smi::FromInt(instr->hydrogen()->flags())); | 3344 __ Push(Smi::FromInt(instr->hydrogen()->flags())); |
| 3345 CallRuntime(Runtime::kDeclareGlobals, 3, instr); | 3345 CallRuntime(Runtime::kDeclareGlobals, 3, instr); |
| 3346 } | 3346 } |
| 3347 | 3347 |
| 3348 | 3348 |
| 3349 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { | 3349 void LCodeGen::DoGlobalObject(LGlobalObject* instr) { |
| 3350 Register context = ToRegister(instr->context()); | 3350 Register context = ToRegister(instr->context()); |
| 3351 Register result = ToRegister(instr->result()); | 3351 Register result = ToRegister(instr->result()); |
| 3352 __ movq(result, | 3352 __ movp(result, |
| 3353 Operand(context, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 3353 Operand(context, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
| 3354 } | 3354 } |
| 3355 | 3355 |
| 3356 | 3356 |
| 3357 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { | 3357 void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) { |
| 3358 Register global = ToRegister(instr->global()); | 3358 Register global = ToRegister(instr->global()); |
| 3359 Register result = ToRegister(instr->result()); | 3359 Register result = ToRegister(instr->result()); |
| 3360 __ movq(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset)); | 3360 __ movp(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset)); |
| 3361 } | 3361 } |
| 3362 | 3362 |
| 3363 | 3363 |
| 3364 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, | 3364 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, |
| 3365 int formal_parameter_count, | 3365 int formal_parameter_count, |
| 3366 int arity, | 3366 int arity, |
| 3367 LInstruction* instr, | 3367 LInstruction* instr, |
| 3368 RDIState rdi_state) { | 3368 RDIState rdi_state) { |
| 3369 bool dont_adapt_arguments = | 3369 bool dont_adapt_arguments = |
| 3370 formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel; | 3370 formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel; |
| 3371 bool can_invoke_directly = | 3371 bool can_invoke_directly = |
| 3372 dont_adapt_arguments || formal_parameter_count == arity; | 3372 dont_adapt_arguments || formal_parameter_count == arity; |
| 3373 | 3373 |
| 3374 LPointerMap* pointers = instr->pointer_map(); | 3374 LPointerMap* pointers = instr->pointer_map(); |
| 3375 | 3375 |
| 3376 if (can_invoke_directly) { | 3376 if (can_invoke_directly) { |
| 3377 if (rdi_state == RDI_UNINITIALIZED) { | 3377 if (rdi_state == RDI_UNINITIALIZED) { |
| 3378 __ Move(rdi, function); | 3378 __ Move(rdi, function); |
| 3379 } | 3379 } |
| 3380 | 3380 |
| 3381 // Change context. | 3381 // Change context. |
| 3382 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); | 3382 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); |
| 3383 | 3383 |
| 3384 // Set rax to arguments count if adaption is not needed. Assumes that rax | 3384 // Set rax to arguments count if adaption is not needed. Assumes that rax |
| 3385 // is available to write to at this point. | 3385 // is available to write to at this point. |
| 3386 if (dont_adapt_arguments) { | 3386 if (dont_adapt_arguments) { |
| 3387 __ Set(rax, arity); | 3387 __ Set(rax, arity); |
| 3388 } | 3388 } |
| 3389 | 3389 |
| 3390 // Invoke function. | 3390 // Invoke function. |
| 3391 if (function.is_identical_to(info()->closure())) { | 3391 if (function.is_identical_to(info()->closure())) { |
| 3392 __ CallSelf(); | 3392 __ CallSelf(); |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3431 | 3431 |
| 3432 void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) { | 3432 void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) { |
| 3433 ASSERT(ToRegister(instr->function()).is(rdi)); | 3433 ASSERT(ToRegister(instr->function()).is(rdi)); |
| 3434 ASSERT(ToRegister(instr->result()).is(rax)); | 3434 ASSERT(ToRegister(instr->result()).is(rax)); |
| 3435 | 3435 |
| 3436 if (instr->hydrogen()->pass_argument_count()) { | 3436 if (instr->hydrogen()->pass_argument_count()) { |
| 3437 __ Set(rax, instr->arity()); | 3437 __ Set(rax, instr->arity()); |
| 3438 } | 3438 } |
| 3439 | 3439 |
| 3440 // Change context. | 3440 // Change context. |
| 3441 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); | 3441 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); |
| 3442 | 3442 |
| 3443 LPointerMap* pointers = instr->pointer_map(); | 3443 LPointerMap* pointers = instr->pointer_map(); |
| 3444 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); | 3444 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt); |
| 3445 | 3445 |
| 3446 bool is_self_call = false; | 3446 bool is_self_call = false; |
| 3447 if (instr->hydrogen()->function()->IsConstant()) { | 3447 if (instr->hydrogen()->function()->IsConstant()) { |
| 3448 Handle<JSFunction> jsfun = Handle<JSFunction>::null(); | 3448 Handle<JSFunction> jsfun = Handle<JSFunction>::null(); |
| 3449 HConstant* fun_const = HConstant::cast(instr->hydrogen()->function()); | 3449 HConstant* fun_const = HConstant::cast(instr->hydrogen()->function()); |
| 3450 jsfun = Handle<JSFunction>::cast(fun_const->handle(isolate())); | 3450 jsfun = Handle<JSFunction>::cast(fun_const->handle(isolate())); |
| 3451 is_self_call = jsfun.is_identical_to(info()->closure()); | 3451 is_self_call = jsfun.is_identical_to(info()->closure()); |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3484 __ j(zero, &done); | 3484 __ j(zero, &done); |
| 3485 | 3485 |
| 3486 __ AllocateHeapNumber(tmp, tmp2, &slow); | 3486 __ AllocateHeapNumber(tmp, tmp2, &slow); |
| 3487 __ jmp(&allocated, Label::kNear); | 3487 __ jmp(&allocated, Label::kNear); |
| 3488 | 3488 |
| 3489 // Slow case: Call the runtime system to do the number allocation. | 3489 // Slow case: Call the runtime system to do the number allocation. |
| 3490 __ bind(&slow); | 3490 __ bind(&slow); |
| 3491 CallRuntimeFromDeferred( | 3491 CallRuntimeFromDeferred( |
| 3492 Runtime::kAllocateHeapNumber, 0, instr, instr->context()); | 3492 Runtime::kAllocateHeapNumber, 0, instr, instr->context()); |
| 3493 // Set the pointer to the new heap number in tmp. | 3493 // Set the pointer to the new heap number in tmp. |
| 3494 if (!tmp.is(rax)) __ movq(tmp, rax); | 3494 if (!tmp.is(rax)) __ movp(tmp, rax); |
| 3495 // Restore input_reg after call to runtime. | 3495 // Restore input_reg after call to runtime. |
| 3496 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); | 3496 __ LoadFromSafepointRegisterSlot(input_reg, input_reg); |
| 3497 | 3497 |
| 3498 __ bind(&allocated); | 3498 __ bind(&allocated); |
| 3499 __ MoveDouble(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 3499 __ MoveDouble(tmp2, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
| 3500 __ shl(tmp2, Immediate(1)); | 3500 __ shl(tmp2, Immediate(1)); |
| 3501 __ shr(tmp2, Immediate(1)); | 3501 __ shr(tmp2, Immediate(1)); |
| 3502 __ MoveDouble(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2); | 3502 __ MoveDouble(FieldOperand(tmp, HeapNumber::kValueOffset), tmp2); |
| 3503 __ StoreToSafepointRegisterSlot(input_reg, tmp); | 3503 __ StoreToSafepointRegisterSlot(input_reg, tmp); |
| 3504 | 3504 |
| (...skipping 367 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3872 | 3872 |
| 3873 if (instr->arity() == 0) { | 3873 if (instr->arity() == 0) { |
| 3874 ArrayNoArgumentConstructorStub stub(kind, override_mode); | 3874 ArrayNoArgumentConstructorStub stub(kind, override_mode); |
| 3875 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 3875 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 3876 } else if (instr->arity() == 1) { | 3876 } else if (instr->arity() == 1) { |
| 3877 Label done; | 3877 Label done; |
| 3878 if (IsFastPackedElementsKind(kind)) { | 3878 if (IsFastPackedElementsKind(kind)) { |
| 3879 Label packed_case; | 3879 Label packed_case; |
| 3880 // We might need a change here | 3880 // We might need a change here |
| 3881 // look at the first argument | 3881 // look at the first argument |
| 3882 __ movq(rcx, Operand(rsp, 0)); | 3882 __ movp(rcx, Operand(rsp, 0)); |
| 3883 __ testq(rcx, rcx); | 3883 __ testq(rcx, rcx); |
| 3884 __ j(zero, &packed_case, Label::kNear); | 3884 __ j(zero, &packed_case, Label::kNear); |
| 3885 | 3885 |
| 3886 ElementsKind holey_kind = GetHoleyElementsKind(kind); | 3886 ElementsKind holey_kind = GetHoleyElementsKind(kind); |
| 3887 ArraySingleArgumentConstructorStub stub(holey_kind, override_mode); | 3887 ArraySingleArgumentConstructorStub stub(holey_kind, override_mode); |
| 3888 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); | 3888 CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); |
| 3889 __ jmp(&done, Label::kNear); | 3889 __ jmp(&done, Label::kNear); |
| 3890 __ bind(&packed_case); | 3890 __ bind(&packed_case); |
| 3891 } | 3891 } |
| 3892 | 3892 |
| (...skipping 10 matching lines...) Expand all Loading... |
| 3903 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { | 3903 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { |
| 3904 ASSERT(ToRegister(instr->context()).is(rsi)); | 3904 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 3905 CallRuntime(instr->function(), instr->arity(), instr, instr->save_doubles()); | 3905 CallRuntime(instr->function(), instr->arity(), instr, instr->save_doubles()); |
| 3906 } | 3906 } |
| 3907 | 3907 |
| 3908 | 3908 |
| 3909 void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) { | 3909 void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) { |
| 3910 Register function = ToRegister(instr->function()); | 3910 Register function = ToRegister(instr->function()); |
| 3911 Register code_object = ToRegister(instr->code_object()); | 3911 Register code_object = ToRegister(instr->code_object()); |
| 3912 __ lea(code_object, FieldOperand(code_object, Code::kHeaderSize)); | 3912 __ lea(code_object, FieldOperand(code_object, Code::kHeaderSize)); |
| 3913 __ movq(FieldOperand(function, JSFunction::kCodeEntryOffset), code_object); | 3913 __ movp(FieldOperand(function, JSFunction::kCodeEntryOffset), code_object); |
| 3914 } | 3914 } |
| 3915 | 3915 |
| 3916 | 3916 |
| 3917 void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) { | 3917 void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) { |
| 3918 Register result = ToRegister(instr->result()); | 3918 Register result = ToRegister(instr->result()); |
| 3919 Register base = ToRegister(instr->base_object()); | 3919 Register base = ToRegister(instr->base_object()); |
| 3920 if (instr->offset()->IsConstantOperand()) { | 3920 if (instr->offset()->IsConstantOperand()) { |
| 3921 LConstantOperand* offset = LConstantOperand::cast(instr->offset()); | 3921 LConstantOperand* offset = LConstantOperand::cast(instr->offset()); |
| 3922 __ lea(result, Operand(base, ToInteger32(offset))); | 3922 __ lea(result, Operand(base, ToInteger32(offset))); |
| 3923 } else { | 3923 } else { |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3981 __ movsd(FieldOperand(object, offset), value); | 3981 __ movsd(FieldOperand(object, offset), value); |
| 3982 return; | 3982 return; |
| 3983 } | 3983 } |
| 3984 | 3984 |
| 3985 if (!transition.is_null()) { | 3985 if (!transition.is_null()) { |
| 3986 if (!hinstr->NeedsWriteBarrierForMap()) { | 3986 if (!hinstr->NeedsWriteBarrierForMap()) { |
| 3987 __ Move(FieldOperand(object, HeapObject::kMapOffset), transition); | 3987 __ Move(FieldOperand(object, HeapObject::kMapOffset), transition); |
| 3988 } else { | 3988 } else { |
| 3989 Register temp = ToRegister(instr->temp()); | 3989 Register temp = ToRegister(instr->temp()); |
| 3990 __ Move(kScratchRegister, transition); | 3990 __ Move(kScratchRegister, transition); |
| 3991 __ movq(FieldOperand(object, HeapObject::kMapOffset), kScratchRegister); | 3991 __ movp(FieldOperand(object, HeapObject::kMapOffset), kScratchRegister); |
| 3992 // Update the write barrier for the map field. | 3992 // Update the write barrier for the map field. |
| 3993 __ RecordWriteField(object, | 3993 __ RecordWriteField(object, |
| 3994 HeapObject::kMapOffset, | 3994 HeapObject::kMapOffset, |
| 3995 kScratchRegister, | 3995 kScratchRegister, |
| 3996 temp, | 3996 temp, |
| 3997 kSaveFPRegs, | 3997 kSaveFPRegs, |
| 3998 OMIT_REMEMBERED_SET, | 3998 OMIT_REMEMBERED_SET, |
| 3999 OMIT_SMI_CHECK); | 3999 OMIT_SMI_CHECK); |
| 4000 } | 4000 } |
| 4001 } | 4001 } |
| 4002 | 4002 |
| 4003 // Do the store. | 4003 // Do the store. |
| 4004 SmiCheck check_needed = hinstr->value()->IsHeapObject() | 4004 SmiCheck check_needed = hinstr->value()->IsHeapObject() |
| 4005 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; | 4005 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
| 4006 | 4006 |
| 4007 Register write_register = object; | 4007 Register write_register = object; |
| 4008 if (!access.IsInobject()) { | 4008 if (!access.IsInobject()) { |
| 4009 write_register = ToRegister(instr->temp()); | 4009 write_register = ToRegister(instr->temp()); |
| 4010 __ movq(write_register, FieldOperand(object, JSObject::kPropertiesOffset)); | 4010 __ movp(write_register, FieldOperand(object, JSObject::kPropertiesOffset)); |
| 4011 } | 4011 } |
| 4012 | 4012 |
| 4013 if (representation.IsSmi() && | 4013 if (representation.IsSmi() && |
| 4014 hinstr->value()->representation().IsInteger32()) { | 4014 hinstr->value()->representation().IsInteger32()) { |
| 4015 ASSERT(hinstr->store_mode() == STORE_TO_INITIALIZED_ENTRY); | 4015 ASSERT(hinstr->store_mode() == STORE_TO_INITIALIZED_ENTRY); |
| 4016 // Store int value directly to upper half of the smi. | 4016 // Store int value directly to upper half of the smi. |
| 4017 STATIC_ASSERT(kSmiTag == 0); | 4017 STATIC_ASSERT(kSmiTag == 0); |
| 4018 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); | 4018 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 32); |
| 4019 offset += kPointerSize / 2; | 4019 offset += kPointerSize / 2; |
| 4020 representation = Representation::Integer32(); | 4020 representation = Representation::Integer32(); |
| (...skipping 327 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4348 Handle<Map> to_map = instr->transitioned_map(); | 4348 Handle<Map> to_map = instr->transitioned_map(); |
| 4349 ElementsKind from_kind = instr->from_kind(); | 4349 ElementsKind from_kind = instr->from_kind(); |
| 4350 ElementsKind to_kind = instr->to_kind(); | 4350 ElementsKind to_kind = instr->to_kind(); |
| 4351 | 4351 |
| 4352 Label not_applicable; | 4352 Label not_applicable; |
| 4353 __ Cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map); | 4353 __ Cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map); |
| 4354 __ j(not_equal, ¬_applicable); | 4354 __ j(not_equal, ¬_applicable); |
| 4355 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { | 4355 if (IsSimpleMapChangeTransition(from_kind, to_kind)) { |
| 4356 Register new_map_reg = ToRegister(instr->new_map_temp()); | 4356 Register new_map_reg = ToRegister(instr->new_map_temp()); |
| 4357 __ Move(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT); | 4357 __ Move(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT); |
| 4358 __ movq(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg); | 4358 __ movp(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg); |
| 4359 // Write barrier. | 4359 // Write barrier. |
| 4360 ASSERT_NE(instr->temp(), NULL); | 4360 ASSERT_NE(instr->temp(), NULL); |
| 4361 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, | 4361 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, |
| 4362 ToRegister(instr->temp()), kDontSaveFPRegs); | 4362 ToRegister(instr->temp()), kDontSaveFPRegs); |
| 4363 } else { | 4363 } else { |
| 4364 ASSERT(ToRegister(instr->context()).is(rsi)); | 4364 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 4365 PushSafepointRegistersScope scope(this); | 4365 PushSafepointRegistersScope scope(this); |
| 4366 if (!object_reg.is(rax)) { | 4366 if (!object_reg.is(rax)) { |
| 4367 __ movq(rax, object_reg); | 4367 __ movp(rax, object_reg); |
| 4368 } | 4368 } |
| 4369 __ Move(rbx, to_map); | 4369 __ Move(rbx, to_map); |
| 4370 TransitionElementsKindStub stub(from_kind, to_kind); | 4370 TransitionElementsKindStub stub(from_kind, to_kind); |
| 4371 __ CallStub(&stub); | 4371 __ CallStub(&stub); |
| 4372 RecordSafepointWithRegisters( | 4372 RecordSafepointWithRegisters( |
| 4373 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); | 4373 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
| 4374 } | 4374 } |
| 4375 __ bind(¬_applicable); | 4375 __ bind(¬_applicable); |
| 4376 } | 4376 } |
| 4377 | 4377 |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4476 | 4476 |
| 4477 ASSERT(instr->hydrogen()->value()->representation().IsInteger32()); | 4477 ASSERT(instr->hydrogen()->value()->representation().IsInteger32()); |
| 4478 Register char_code = ToRegister(instr->char_code()); | 4478 Register char_code = ToRegister(instr->char_code()); |
| 4479 Register result = ToRegister(instr->result()); | 4479 Register result = ToRegister(instr->result()); |
| 4480 ASSERT(!char_code.is(result)); | 4480 ASSERT(!char_code.is(result)); |
| 4481 | 4481 |
| 4482 __ cmpl(char_code, Immediate(String::kMaxOneByteCharCode)); | 4482 __ cmpl(char_code, Immediate(String::kMaxOneByteCharCode)); |
| 4483 __ j(above, deferred->entry()); | 4483 __ j(above, deferred->entry()); |
| 4484 __ movsxlq(char_code, char_code); | 4484 __ movsxlq(char_code, char_code); |
| 4485 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex); | 4485 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex); |
| 4486 __ movq(result, FieldOperand(result, | 4486 __ movp(result, FieldOperand(result, |
| 4487 char_code, times_pointer_size, | 4487 char_code, times_pointer_size, |
| 4488 FixedArray::kHeaderSize)); | 4488 FixedArray::kHeaderSize)); |
| 4489 __ CompareRoot(result, Heap::kUndefinedValueRootIndex); | 4489 __ CompareRoot(result, Heap::kUndefinedValueRootIndex); |
| 4490 __ j(equal, deferred->entry()); | 4490 __ j(equal, deferred->entry()); |
| 4491 __ bind(deferred->exit()); | 4491 __ bind(deferred->exit()); |
| 4492 } | 4492 } |
| 4493 | 4493 |
| 4494 | 4494 |
| 4495 void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) { | 4495 void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) { |
| 4496 Register char_code = ToRegister(instr->char_code()); | 4496 Register char_code = ToRegister(instr->char_code()); |
| (...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4622 // Put a valid pointer value in the stack slot where the result | 4622 // Put a valid pointer value in the stack slot where the result |
| 4623 // register is stored, as this register is in the pointer map, but contains an | 4623 // register is stored, as this register is in the pointer map, but contains an |
| 4624 // integer value. | 4624 // integer value. |
| 4625 __ StoreToSafepointRegisterSlot(reg, Immediate(0)); | 4625 __ StoreToSafepointRegisterSlot(reg, Immediate(0)); |
| 4626 | 4626 |
| 4627 // NumberTagU uses the context from the frame, rather than | 4627 // NumberTagU uses the context from the frame, rather than |
| 4628 // the environment's HContext or HInlinedContext value. | 4628 // the environment's HContext or HInlinedContext value. |
| 4629 // They only call Runtime::kAllocateHeapNumber. | 4629 // They only call Runtime::kAllocateHeapNumber. |
| 4630 // The corresponding HChange instructions are added in a phase that does | 4630 // The corresponding HChange instructions are added in a phase that does |
| 4631 // not have easy access to the local context. | 4631 // not have easy access to the local context. |
| 4632 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 4632 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 4633 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 4633 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 4634 RecordSafepointWithRegisters( | 4634 RecordSafepointWithRegisters( |
| 4635 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); | 4635 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
| 4636 | 4636 |
| 4637 if (!reg.is(rax)) __ movq(reg, rax); | 4637 if (!reg.is(rax)) __ movp(reg, rax); |
| 4638 | 4638 |
| 4639 // Done. Put the value in temp_xmm into the value of the allocated heap | 4639 // Done. Put the value in temp_xmm into the value of the allocated heap |
| 4640 // number. | 4640 // number. |
| 4641 __ bind(&done); | 4641 __ bind(&done); |
| 4642 __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), temp_xmm); | 4642 __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), temp_xmm); |
| 4643 __ StoreToSafepointRegisterSlot(reg, reg); | 4643 __ StoreToSafepointRegisterSlot(reg, reg); |
| 4644 } | 4644 } |
| 4645 | 4645 |
| 4646 | 4646 |
| 4647 void LCodeGen::DoNumberTagD(LNumberTagD* instr) { | 4647 void LCodeGen::DoNumberTagD(LNumberTagD* instr) { |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4679 Register reg = ToRegister(instr->result()); | 4679 Register reg = ToRegister(instr->result()); |
| 4680 __ Move(reg, Smi::FromInt(0)); | 4680 __ Move(reg, Smi::FromInt(0)); |
| 4681 | 4681 |
| 4682 { | 4682 { |
| 4683 PushSafepointRegistersScope scope(this); | 4683 PushSafepointRegistersScope scope(this); |
| 4684 // NumberTagD uses the context from the frame, rather than | 4684 // NumberTagD uses the context from the frame, rather than |
| 4685 // the environment's HContext or HInlinedContext value. | 4685 // the environment's HContext or HInlinedContext value. |
| 4686 // They only call Runtime::kAllocateHeapNumber. | 4686 // They only call Runtime::kAllocateHeapNumber. |
| 4687 // The corresponding HChange instructions are added in a phase that does | 4687 // The corresponding HChange instructions are added in a phase that does |
| 4688 // not have easy access to the local context. | 4688 // not have easy access to the local context. |
| 4689 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 4689 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 4690 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 4690 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 4691 RecordSafepointWithRegisters( | 4691 RecordSafepointWithRegisters( |
| 4692 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); | 4692 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
| 4693 __ movq(kScratchRegister, rax); | 4693 __ movp(kScratchRegister, rax); |
| 4694 } | 4694 } |
| 4695 __ movq(reg, kScratchRegister); | 4695 __ movp(reg, kScratchRegister); |
| 4696 } | 4696 } |
| 4697 | 4697 |
| 4698 | 4698 |
| 4699 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 4699 void LCodeGen::DoSmiTag(LSmiTag* instr) { |
| 4700 ASSERT(instr->value()->Equals(instr->result())); | 4700 ASSERT(instr->value()->Equals(instr->result())); |
| 4701 Register input = ToRegister(instr->value()); | 4701 Register input = ToRegister(instr->value()); |
| 4702 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); | 4702 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); |
| 4703 __ Integer32ToSmi(input, input); | 4703 __ Integer32ToSmi(input, input); |
| 4704 } | 4704 } |
| 4705 | 4705 |
| (...skipping 228 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4934 LOperand* input = instr->value(); | 4934 LOperand* input = instr->value(); |
| 4935 Condition cc = masm()->CheckSmi(ToRegister(input)); | 4935 Condition cc = masm()->CheckSmi(ToRegister(input)); |
| 4936 DeoptimizeIf(cc, instr->environment()); | 4936 DeoptimizeIf(cc, instr->environment()); |
| 4937 } | 4937 } |
| 4938 } | 4938 } |
| 4939 | 4939 |
| 4940 | 4940 |
| 4941 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { | 4941 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { |
| 4942 Register input = ToRegister(instr->value()); | 4942 Register input = ToRegister(instr->value()); |
| 4943 | 4943 |
| 4944 __ movq(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset)); | 4944 __ movp(kScratchRegister, FieldOperand(input, HeapObject::kMapOffset)); |
| 4945 | 4945 |
| 4946 if (instr->hydrogen()->is_interval_check()) { | 4946 if (instr->hydrogen()->is_interval_check()) { |
| 4947 InstanceType first; | 4947 InstanceType first; |
| 4948 InstanceType last; | 4948 InstanceType last; |
| 4949 instr->hydrogen()->GetCheckInterval(&first, &last); | 4949 instr->hydrogen()->GetCheckInterval(&first, &last); |
| 4950 | 4950 |
| 4951 __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset), | 4951 __ cmpb(FieldOperand(kScratchRegister, Map::kInstanceTypeOffset), |
| 4952 Immediate(static_cast<int8_t>(first))); | 4952 Immediate(static_cast<int8_t>(first))); |
| 4953 | 4953 |
| 4954 // If there is only one type in the interval check for equality. | 4954 // If there is only one type in the interval check for equality. |
| (...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5082 | 5082 |
| 5083 // Check for heap number | 5083 // Check for heap number |
| 5084 __ Cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 5084 __ Cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
| 5085 factory()->heap_number_map()); | 5085 factory()->heap_number_map()); |
| 5086 __ j(equal, &heap_number, Label::kNear); | 5086 __ j(equal, &heap_number, Label::kNear); |
| 5087 | 5087 |
| 5088 // Check for undefined. Undefined is converted to zero for clamping | 5088 // Check for undefined. Undefined is converted to zero for clamping |
| 5089 // conversions. | 5089 // conversions. |
| 5090 __ Cmp(input_reg, factory()->undefined_value()); | 5090 __ Cmp(input_reg, factory()->undefined_value()); |
| 5091 DeoptimizeIf(not_equal, instr->environment()); | 5091 DeoptimizeIf(not_equal, instr->environment()); |
| 5092 __ movq(input_reg, Immediate(0)); | 5092 __ movp(input_reg, Immediate(0)); |
| 5093 __ jmp(&done, Label::kNear); | 5093 __ jmp(&done, Label::kNear); |
| 5094 | 5094 |
| 5095 // Heap number | 5095 // Heap number |
| 5096 __ bind(&heap_number); | 5096 __ bind(&heap_number); |
| 5097 __ movsd(xmm_scratch, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 5097 __ movsd(xmm_scratch, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
| 5098 __ ClampDoubleToUint8(xmm_scratch, temp_xmm_reg, input_reg); | 5098 __ ClampDoubleToUint8(xmm_scratch, temp_xmm_reg, input_reg); |
| 5099 __ jmp(&done, Label::kNear); | 5099 __ jmp(&done, Label::kNear); |
| 5100 | 5100 |
| 5101 // smi | 5101 // smi |
| 5102 __ bind(&is_smi); | 5102 __ bind(&is_smi); |
| (...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5217 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { | 5217 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { |
| 5218 ASSERT(ToRegister(instr->context()).is(rsi)); | 5218 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 5219 Label materialized; | 5219 Label materialized; |
| 5220 // Registers will be used as follows: | 5220 // Registers will be used as follows: |
| 5221 // rcx = literals array. | 5221 // rcx = literals array. |
| 5222 // rbx = regexp literal. | 5222 // rbx = regexp literal. |
| 5223 // rax = regexp literal clone. | 5223 // rax = regexp literal clone. |
| 5224 int literal_offset = | 5224 int literal_offset = |
| 5225 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); | 5225 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index()); |
| 5226 __ Move(rcx, instr->hydrogen()->literals()); | 5226 __ Move(rcx, instr->hydrogen()->literals()); |
| 5227 __ movq(rbx, FieldOperand(rcx, literal_offset)); | 5227 __ movp(rbx, FieldOperand(rcx, literal_offset)); |
| 5228 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); | 5228 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); |
| 5229 __ j(not_equal, &materialized, Label::kNear); | 5229 __ j(not_equal, &materialized, Label::kNear); |
| 5230 | 5230 |
| 5231 // Create regexp literal using runtime function | 5231 // Create regexp literal using runtime function |
| 5232 // Result will be in rax. | 5232 // Result will be in rax. |
| 5233 __ push(rcx); | 5233 __ push(rcx); |
| 5234 __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); | 5234 __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); |
| 5235 __ Push(instr->hydrogen()->pattern()); | 5235 __ Push(instr->hydrogen()->pattern()); |
| 5236 __ Push(instr->hydrogen()->flags()); | 5236 __ Push(instr->hydrogen()->flags()); |
| 5237 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); | 5237 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); |
| 5238 __ movq(rbx, rax); | 5238 __ movp(rbx, rax); |
| 5239 | 5239 |
| 5240 __ bind(&materialized); | 5240 __ bind(&materialized); |
| 5241 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | 5241 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; |
| 5242 Label allocated, runtime_allocate; | 5242 Label allocated, runtime_allocate; |
| 5243 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT); | 5243 __ Allocate(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT); |
| 5244 __ jmp(&allocated, Label::kNear); | 5244 __ jmp(&allocated, Label::kNear); |
| 5245 | 5245 |
| 5246 __ bind(&runtime_allocate); | 5246 __ bind(&runtime_allocate); |
| 5247 __ push(rbx); | 5247 __ push(rbx); |
| 5248 __ Push(Smi::FromInt(size)); | 5248 __ Push(Smi::FromInt(size)); |
| 5249 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); | 5249 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); |
| 5250 __ pop(rbx); | 5250 __ pop(rbx); |
| 5251 | 5251 |
| 5252 __ bind(&allocated); | 5252 __ bind(&allocated); |
| 5253 // Copy the content into the newly allocated memory. | 5253 // Copy the content into the newly allocated memory. |
| 5254 // (Unroll copy loop once for better throughput). | 5254 // (Unroll copy loop once for better throughput). |
| 5255 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { | 5255 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { |
| 5256 __ movq(rdx, FieldOperand(rbx, i)); | 5256 __ movp(rdx, FieldOperand(rbx, i)); |
| 5257 __ movq(rcx, FieldOperand(rbx, i + kPointerSize)); | 5257 __ movp(rcx, FieldOperand(rbx, i + kPointerSize)); |
| 5258 __ movq(FieldOperand(rax, i), rdx); | 5258 __ movp(FieldOperand(rax, i), rdx); |
| 5259 __ movq(FieldOperand(rax, i + kPointerSize), rcx); | 5259 __ movp(FieldOperand(rax, i + kPointerSize), rcx); |
| 5260 } | 5260 } |
| 5261 if ((size % (2 * kPointerSize)) != 0) { | 5261 if ((size % (2 * kPointerSize)) != 0) { |
| 5262 __ movq(rdx, FieldOperand(rbx, size - kPointerSize)); | 5262 __ movp(rdx, FieldOperand(rbx, size - kPointerSize)); |
| 5263 __ movq(FieldOperand(rax, size - kPointerSize), rdx); | 5263 __ movp(FieldOperand(rax, size - kPointerSize), rdx); |
| 5264 } | 5264 } |
| 5265 } | 5265 } |
| 5266 | 5266 |
| 5267 | 5267 |
| 5268 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { | 5268 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
| 5269 ASSERT(ToRegister(instr->context()).is(rsi)); | 5269 ASSERT(ToRegister(instr->context()).is(rsi)); |
| 5270 // Use the fast case closure allocation code that allocates in new | 5270 // Use the fast case closure allocation code that allocates in new |
| 5271 // space for nested functions that don't need literals cloning. | 5271 // space for nested functions that don't need literals cloning. |
| 5272 bool pretenure = instr->hydrogen()->pretenure(); | 5272 bool pretenure = instr->hydrogen()->pretenure(); |
| 5273 if (!pretenure && instr->hydrogen()->has_no_literals()) { | 5273 if (!pretenure && instr->hydrogen()->has_no_literals()) { |
| (...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5355 | 5355 |
| 5356 } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_string())) { | 5356 } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_string())) { |
| 5357 __ CompareRoot(input, Heap::kNullValueRootIndex); | 5357 __ CompareRoot(input, Heap::kNullValueRootIndex); |
| 5358 final_branch_condition = equal; | 5358 final_branch_condition = equal; |
| 5359 | 5359 |
| 5360 } else if (type_name->Equals(heap()->undefined_string())) { | 5360 } else if (type_name->Equals(heap()->undefined_string())) { |
| 5361 __ CompareRoot(input, Heap::kUndefinedValueRootIndex); | 5361 __ CompareRoot(input, Heap::kUndefinedValueRootIndex); |
| 5362 __ j(equal, true_label, true_distance); | 5362 __ j(equal, true_label, true_distance); |
| 5363 __ JumpIfSmi(input, false_label, false_distance); | 5363 __ JumpIfSmi(input, false_label, false_distance); |
| 5364 // Check for undetectable objects => true. | 5364 // Check for undetectable objects => true. |
| 5365 __ movq(input, FieldOperand(input, HeapObject::kMapOffset)); | 5365 __ movp(input, FieldOperand(input, HeapObject::kMapOffset)); |
| 5366 __ testb(FieldOperand(input, Map::kBitFieldOffset), | 5366 __ testb(FieldOperand(input, Map::kBitFieldOffset), |
| 5367 Immediate(1 << Map::kIsUndetectable)); | 5367 Immediate(1 << Map::kIsUndetectable)); |
| 5368 final_branch_condition = not_zero; | 5368 final_branch_condition = not_zero; |
| 5369 | 5369 |
| 5370 } else if (type_name->Equals(heap()->function_string())) { | 5370 } else if (type_name->Equals(heap()->function_string())) { |
| 5371 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); | 5371 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); |
| 5372 __ JumpIfSmi(input, false_label, false_distance); | 5372 __ JumpIfSmi(input, false_label, false_distance); |
| 5373 __ CmpObjectType(input, JS_FUNCTION_TYPE, input); | 5373 __ CmpObjectType(input, JS_FUNCTION_TYPE, input); |
| 5374 __ j(equal, true_label, true_distance); | 5374 __ j(equal, true_label, true_distance); |
| 5375 __ CmpInstanceType(input, JS_FUNCTION_PROXY_TYPE); | 5375 __ CmpInstanceType(input, JS_FUNCTION_PROXY_TYPE); |
| (...skipping 25 matching lines...) Expand all Loading... |
| 5401 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) { | 5401 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) { |
| 5402 Register temp = ToRegister(instr->temp()); | 5402 Register temp = ToRegister(instr->temp()); |
| 5403 | 5403 |
| 5404 EmitIsConstructCall(temp); | 5404 EmitIsConstructCall(temp); |
| 5405 EmitBranch(instr, equal); | 5405 EmitBranch(instr, equal); |
| 5406 } | 5406 } |
| 5407 | 5407 |
| 5408 | 5408 |
| 5409 void LCodeGen::EmitIsConstructCall(Register temp) { | 5409 void LCodeGen::EmitIsConstructCall(Register temp) { |
| 5410 // Get the frame pointer for the calling frame. | 5410 // Get the frame pointer for the calling frame. |
| 5411 __ movq(temp, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); | 5411 __ movp(temp, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
| 5412 | 5412 |
| 5413 // Skip the arguments adaptor frame if it exists. | 5413 // Skip the arguments adaptor frame if it exists. |
| 5414 Label check_frame_marker; | 5414 Label check_frame_marker; |
| 5415 __ Cmp(Operand(temp, StandardFrameConstants::kContextOffset), | 5415 __ Cmp(Operand(temp, StandardFrameConstants::kContextOffset), |
| 5416 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); | 5416 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
| 5417 __ j(not_equal, &check_frame_marker, Label::kNear); | 5417 __ j(not_equal, &check_frame_marker, Label::kNear); |
| 5418 __ movq(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset)); | 5418 __ movp(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset)); |
| 5419 | 5419 |
| 5420 // Check the marker in the calling frame. | 5420 // Check the marker in the calling frame. |
| 5421 __ bind(&check_frame_marker); | 5421 __ bind(&check_frame_marker); |
| 5422 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), | 5422 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), |
| 5423 Smi::FromInt(StackFrame::CONSTRUCT)); | 5423 Smi::FromInt(StackFrame::CONSTRUCT)); |
| 5424 } | 5424 } |
| 5425 | 5425 |
| 5426 | 5426 |
| 5427 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { | 5427 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { |
| 5428 if (!info()->IsStub()) { | 5428 if (!info()->IsStub()) { |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5467 } | 5467 } |
| 5468 | 5468 |
| 5469 | 5469 |
| 5470 void LCodeGen::DoDummyUse(LDummyUse* instr) { | 5470 void LCodeGen::DoDummyUse(LDummyUse* instr) { |
| 5471 // Nothing to see here, move on! | 5471 // Nothing to see here, move on! |
| 5472 } | 5472 } |
| 5473 | 5473 |
| 5474 | 5474 |
| 5475 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { | 5475 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { |
| 5476 PushSafepointRegistersScope scope(this); | 5476 PushSafepointRegistersScope scope(this); |
| 5477 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 5477 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 5478 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); | 5478 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); |
| 5479 RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0); | 5479 RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0); |
| 5480 ASSERT(instr->HasEnvironment()); | 5480 ASSERT(instr->HasEnvironment()); |
| 5481 LEnvironment* env = instr->environment(); | 5481 LEnvironment* env = instr->environment(); |
| 5482 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5482 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 5483 } | 5483 } |
| 5484 | 5484 |
| 5485 | 5485 |
| 5486 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 5486 void LCodeGen::DoStackCheck(LStackCheck* instr) { |
| 5487 class DeferredStackCheck V8_FINAL : public LDeferredCode { | 5487 class DeferredStackCheck V8_FINAL : public LDeferredCode { |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5561 Condition cc = masm()->CheckSmi(rax); | 5561 Condition cc = masm()->CheckSmi(rax); |
| 5562 DeoptimizeIf(cc, instr->environment()); | 5562 DeoptimizeIf(cc, instr->environment()); |
| 5563 | 5563 |
| 5564 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | 5564 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); |
| 5565 __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx); | 5565 __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx); |
| 5566 DeoptimizeIf(below_equal, instr->environment()); | 5566 DeoptimizeIf(below_equal, instr->environment()); |
| 5567 | 5567 |
| 5568 Label use_cache, call_runtime; | 5568 Label use_cache, call_runtime; |
| 5569 __ CheckEnumCache(null_value, &call_runtime); | 5569 __ CheckEnumCache(null_value, &call_runtime); |
| 5570 | 5570 |
| 5571 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset)); | 5571 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset)); |
| 5572 __ jmp(&use_cache, Label::kNear); | 5572 __ jmp(&use_cache, Label::kNear); |
| 5573 | 5573 |
| 5574 // Get the set of properties to enumerate. | 5574 // Get the set of properties to enumerate. |
| 5575 __ bind(&call_runtime); | 5575 __ bind(&call_runtime); |
| 5576 __ push(rax); | 5576 __ push(rax); |
| 5577 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr); | 5577 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr); |
| 5578 | 5578 |
| 5579 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), | 5579 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
| 5580 Heap::kMetaMapRootIndex); | 5580 Heap::kMetaMapRootIndex); |
| 5581 DeoptimizeIf(not_equal, instr->environment()); | 5581 DeoptimizeIf(not_equal, instr->environment()); |
| 5582 __ bind(&use_cache); | 5582 __ bind(&use_cache); |
| 5583 } | 5583 } |
| 5584 | 5584 |
| 5585 | 5585 |
| 5586 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { | 5586 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) { |
| 5587 Register map = ToRegister(instr->map()); | 5587 Register map = ToRegister(instr->map()); |
| 5588 Register result = ToRegister(instr->result()); | 5588 Register result = ToRegister(instr->result()); |
| 5589 Label load_cache, done; | 5589 Label load_cache, done; |
| 5590 __ EnumLength(result, map); | 5590 __ EnumLength(result, map); |
| 5591 __ Cmp(result, Smi::FromInt(0)); | 5591 __ Cmp(result, Smi::FromInt(0)); |
| 5592 __ j(not_equal, &load_cache, Label::kNear); | 5592 __ j(not_equal, &load_cache, Label::kNear); |
| 5593 __ LoadRoot(result, Heap::kEmptyFixedArrayRootIndex); | 5593 __ LoadRoot(result, Heap::kEmptyFixedArrayRootIndex); |
| 5594 __ jmp(&done, Label::kNear); | 5594 __ jmp(&done, Label::kNear); |
| 5595 __ bind(&load_cache); | 5595 __ bind(&load_cache); |
| 5596 __ LoadInstanceDescriptors(map, result); | 5596 __ LoadInstanceDescriptors(map, result); |
| 5597 __ movq(result, | 5597 __ movp(result, |
| 5598 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); | 5598 FieldOperand(result, DescriptorArray::kEnumCacheOffset)); |
| 5599 __ movq(result, | 5599 __ movp(result, |
| 5600 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); | 5600 FieldOperand(result, FixedArray::SizeFor(instr->idx()))); |
| 5601 __ bind(&done); | 5601 __ bind(&done); |
| 5602 Condition cc = masm()->CheckSmi(result); | 5602 Condition cc = masm()->CheckSmi(result); |
| 5603 DeoptimizeIf(cc, instr->environment()); | 5603 DeoptimizeIf(cc, instr->environment()); |
| 5604 } | 5604 } |
| 5605 | 5605 |
| 5606 | 5606 |
| 5607 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { | 5607 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { |
| 5608 Register object = ToRegister(instr->value()); | 5608 Register object = ToRegister(instr->value()); |
| 5609 __ cmpq(ToRegister(instr->map()), | 5609 __ cmpq(ToRegister(instr->map()), |
| 5610 FieldOperand(object, HeapObject::kMapOffset)); | 5610 FieldOperand(object, HeapObject::kMapOffset)); |
| 5611 DeoptimizeIf(not_equal, instr->environment()); | 5611 DeoptimizeIf(not_equal, instr->environment()); |
| 5612 } | 5612 } |
| 5613 | 5613 |
| 5614 | 5614 |
| 5615 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { | 5615 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { |
| 5616 Register object = ToRegister(instr->object()); | 5616 Register object = ToRegister(instr->object()); |
| 5617 Register index = ToRegister(instr->index()); | 5617 Register index = ToRegister(instr->index()); |
| 5618 | 5618 |
| 5619 Label out_of_object, done; | 5619 Label out_of_object, done; |
| 5620 __ SmiToInteger32(index, index); | 5620 __ SmiToInteger32(index, index); |
| 5621 __ cmpl(index, Immediate(0)); | 5621 __ cmpl(index, Immediate(0)); |
| 5622 __ j(less, &out_of_object, Label::kNear); | 5622 __ j(less, &out_of_object, Label::kNear); |
| 5623 __ movq(object, FieldOperand(object, | 5623 __ movp(object, FieldOperand(object, |
| 5624 index, | 5624 index, |
| 5625 times_pointer_size, | 5625 times_pointer_size, |
| 5626 JSObject::kHeaderSize)); | 5626 JSObject::kHeaderSize)); |
| 5627 __ jmp(&done, Label::kNear); | 5627 __ jmp(&done, Label::kNear); |
| 5628 | 5628 |
| 5629 __ bind(&out_of_object); | 5629 __ bind(&out_of_object); |
| 5630 __ movq(object, FieldOperand(object, JSObject::kPropertiesOffset)); | 5630 __ movp(object, FieldOperand(object, JSObject::kPropertiesOffset)); |
| 5631 __ negl(index); | 5631 __ negl(index); |
| 5632 // Index is now equal to out of object property index plus 1. | 5632 // Index is now equal to out of object property index plus 1. |
| 5633 __ movq(object, FieldOperand(object, | 5633 __ movp(object, FieldOperand(object, |
| 5634 index, | 5634 index, |
| 5635 times_pointer_size, | 5635 times_pointer_size, |
| 5636 FixedArray::kHeaderSize - kPointerSize)); | 5636 FixedArray::kHeaderSize - kPointerSize)); |
| 5637 __ bind(&done); | 5637 __ bind(&done); |
| 5638 } | 5638 } |
| 5639 | 5639 |
| 5640 | 5640 |
| 5641 #undef __ | 5641 #undef __ |
| 5642 | 5642 |
| 5643 } } // namespace v8::internal | 5643 } } // namespace v8::internal |
| 5644 | 5644 |
| 5645 #endif // V8_TARGET_ARCH_X64 | 5645 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |