| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 84 __ push(rdx); | 84 __ push(rdx); |
| 85 __ Push(Factory::false_value()); | 85 __ Push(Factory::false_value()); |
| 86 __ push(rcx); // Restore return address. | 86 __ push(rcx); // Restore return address. |
| 87 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); | 87 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); |
| 88 } | 88 } |
| 89 | 89 |
| 90 | 90 |
| 91 void FastNewContextStub::Generate(MacroAssembler* masm) { | 91 void FastNewContextStub::Generate(MacroAssembler* masm) { |
| 92 // Try to allocate the context in new space. | 92 // Try to allocate the context in new space. |
| 93 Label gc; | 93 Label gc; |
| 94 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 94 __ AllocateInNewSpace((slots_ * kPointerSize) + FixedArray::kHeaderSize, |
| 95 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize, | |
| 96 rax, rbx, rcx, &gc, TAG_OBJECT); | 95 rax, rbx, rcx, &gc, TAG_OBJECT); |
| 97 | 96 |
| 98 // Get the function from the stack. | 97 // Get the function from the stack. |
| 99 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 98 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); |
| 100 | 99 |
| 101 // Setup the object header. | 100 // Setup the object header. |
| 102 __ LoadRoot(kScratchRegister, Heap::kContextMapRootIndex); | 101 __ LoadRoot(kScratchRegister, Heap::kContextMapRootIndex); |
| 103 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); | 102 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); |
| 104 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); | 103 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(slots_)); |
| 105 | 104 |
| 106 // Setup the fixed slots. | 105 // Setup the fixed slots. |
| 107 __ Set(rbx, 0); // Set to NULL. | 106 __ Set(rbx, 0); // Set to NULL. |
| 108 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx); | 107 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx); |
| 109 __ movq(Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX)), rax); | 108 __ movq(Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX)), rax); |
| 110 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rbx); | 109 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rbx); |
| 111 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx); | 110 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx); |
| 112 | 111 |
| 113 // Copy the global object from the surrounding context. | 112 // Copy the global object from the surrounding context. |
| 114 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 113 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 115 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx); | 114 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx); |
| 116 | 115 |
| 117 // Initialize the rest of the slots to undefined. | 116 // Initialize the rest of the slots to undefined. |
| 118 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); | 117 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); |
| 119 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { | 118 for (int i = Context::MIN_CONTEXT_SLOTS; i < slots_; i++) { |
| 120 __ movq(Operand(rax, Context::SlotOffset(i)), rbx); | 119 __ movq(Operand(rax, Context::SlotOffset(i)), rbx); |
| 121 } | 120 } |
| 122 | 121 |
| 123 // Return and remove the on-stack parameter. | 122 // Return and remove the on-stack parameter. |
| 124 __ movq(rsi, rax); | 123 __ movq(rsi, rax); |
| 125 __ ret(1 * kPointerSize); | 124 __ ret(1 * kPointerSize); |
| 126 | 125 |
| 127 // Need to collect. Call into runtime system. | 126 // Need to collect. Call into runtime system. |
| 128 __ bind(&gc); | 127 __ bind(&gc); |
| 129 __ TailCallRuntime(Runtime::kNewContext, 1, 1); | 128 __ TailCallRuntime(Runtime::kNewContext, 1, 1); |
| (...skipping 3111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3241 // We have to store a non-zero value in the cache. | 3240 // We have to store a non-zero value in the cache. |
| 3242 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); | 3241 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); |
| 3243 __ ret(2 * kPointerSize); | 3242 __ ret(2 * kPointerSize); |
| 3244 | 3243 |
| 3245 // Slow-case: Go through the JavaScript implementation. | 3244 // Slow-case: Go through the JavaScript implementation. |
| 3246 __ bind(&slow); | 3245 __ bind(&slow); |
| 3247 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | 3246 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); |
| 3248 } | 3247 } |
| 3249 | 3248 |
| 3250 | 3249 |
| 3250 Register InstanceofStub::left() { return rax; } |
| 3251 |
| 3252 |
| 3253 Register InstanceofStub::right() { return rdx; } |
| 3254 |
| 3255 |
| 3251 int CompareStub::MinorKey() { | 3256 int CompareStub::MinorKey() { |
| 3252 // Encode the three parameters in a unique 16 bit value. To avoid duplicate | 3257 // Encode the three parameters in a unique 16 bit value. To avoid duplicate |
| 3253 // stubs the never NaN NaN condition is only taken into account if the | 3258 // stubs the never NaN NaN condition is only taken into account if the |
| 3254 // condition is equals. | 3259 // condition is equals. |
| 3255 ASSERT(static_cast<unsigned>(cc_) < (1 << 12)); | 3260 ASSERT(static_cast<unsigned>(cc_) < (1 << 12)); |
| 3256 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); | 3261 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); |
| 3257 return ConditionField::encode(static_cast<unsigned>(cc_)) | 3262 return ConditionField::encode(static_cast<unsigned>(cc_)) |
| 3258 | RegisterField::encode(false) // lhs_ and rhs_ are not used | 3263 | RegisterField::encode(false) // lhs_ and rhs_ are not used |
| 3259 | StrictField::encode(strict_) | 3264 | StrictField::encode(strict_) |
| 3260 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false) | 3265 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false) |
| (...skipping 1004 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4265 __ push(rcx); | 4270 __ push(rcx); |
| 4266 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); | 4271 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); |
| 4267 | 4272 |
| 4268 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) | 4273 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) |
| 4269 // tagged as a small integer. | 4274 // tagged as a small integer. |
| 4270 __ bind(&runtime); | 4275 __ bind(&runtime); |
| 4271 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 4276 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); |
| 4272 } | 4277 } |
| 4273 | 4278 |
| 4274 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { | 4279 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { |
| 4275 UNIMPLEMENTED(); | 4280 ASSERT(state_ == CompareIC::SMIS); |
| 4281 NearLabel miss; |
| 4282 __ JumpIfNotBothSmi(rdx, rax, &miss); |
| 4283 |
| 4284 if (GetCondition() == equal) { |
| 4285 // For equality we do not care about the sign of the result. |
| 4286 __ SmiSub(rax, rax, rdx); |
| 4287 } else { |
| 4288 NearLabel done; |
| 4289 __ SmiSub(rdx, rdx, rax); |
| 4290 __ j(no_overflow, &done); |
| 4291 // Correct sign of result in case of overflow. |
| 4292 __ SmiNot(rdx, rdx); |
| 4293 __ bind(&done); |
| 4294 __ movq(rax, rdx); |
| 4295 } |
| 4296 __ ret(0); |
| 4297 |
| 4298 __ bind(&miss); |
| 4299 GenerateMiss(masm); |
| 4276 } | 4300 } |
| 4277 | 4301 |
| 4278 | 4302 |
| 4279 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) { | 4303 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) { |
| 4280 UNIMPLEMENTED(); | 4304 ASSERT(state_ == CompareIC::HEAP_NUMBERS); |
| 4305 |
| 4306 NearLabel generic_stub; |
| 4307 NearLabel unordered; |
| 4308 NearLabel miss; |
| 4309 Condition either_smi = masm->CheckEitherSmi(rax, rdx); |
| 4310 __ j(either_smi, &generic_stub); |
| 4311 |
| 4312 __ CmpObjectType(rax, HEAP_NUMBER_TYPE, rcx); |
| 4313 __ j(not_equal, &miss); |
| 4314 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx); |
| 4315 __ j(not_equal, &miss); |
| 4316 |
| 4317 // Load left and right operand |
| 4318 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); |
| 4319 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); |
| 4320 |
| 4321 // Compare operands |
| 4322 __ ucomisd(xmm0, xmm1); |
| 4323 |
| 4324 // Don't base result on EFLAGS when a NaN is involved. |
| 4325 __ j(parity_even, &unordered); |
| 4326 |
| 4327 // Return a result of -1, 0, or 1, based on EFLAGS. |
| 4328 // Performing mov, because xor would destroy the flag register. |
| 4329 __ movl(rax, Immediate(0)); |
| 4330 __ movl(rcx, Immediate(0)); |
| 4331 __ setcc(above, rax); // Add one to zero if carry clear and not equal. |
| 4332 __ sbbq(rax, rcx); // Subtract one if below (aka. carry set). |
| 4333 __ ret(0); |
| 4334 |
| 4335 __ bind(&unordered); |
| 4336 |
| 4337 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS); |
| 4338 __ bind(&generic_stub); |
| 4339 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 4340 |
| 4341 __ bind(&miss); |
| 4342 GenerateMiss(masm); |
| 4281 } | 4343 } |
| 4282 | 4344 |
| 4283 | 4345 |
| 4284 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { | 4346 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { |
| 4285 UNIMPLEMENTED(); | 4347 ASSERT(state_ == CompareIC::OBJECTS); |
| 4348 NearLabel miss; |
| 4349 Condition either_smi = masm->CheckEitherSmi(rdx, rax); |
| 4350 __ j(either_smi, &miss); |
| 4351 |
| 4352 __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx); |
| 4353 __ j(not_equal, &miss, not_taken); |
| 4354 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx); |
| 4355 __ j(not_equal, &miss, not_taken); |
| 4356 |
| 4357 ASSERT(GetCondition() == equal); |
| 4358 __ subq(rax, rdx); |
| 4359 __ ret(0); |
| 4360 |
| 4361 __ bind(&miss); |
| 4362 GenerateMiss(masm); |
| 4286 } | 4363 } |
| 4287 | 4364 |
| 4288 | 4365 |
| 4289 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { | 4366 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { |
| 4290 UNIMPLEMENTED(); | 4367 // Save the registers. |
| 4368 __ pop(rcx); |
| 4369 __ push(rdx); |
| 4370 __ push(rax); |
| 4371 __ push(rcx); |
| 4372 |
| 4373 // Call the runtime system in a fresh internal frame. |
| 4374 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss)); |
| 4375 __ EnterInternalFrame(); |
| 4376 __ push(rdx); |
| 4377 __ push(rax); |
| 4378 __ Push(Smi::FromInt(op_)); |
| 4379 __ CallExternalReference(miss, 3); |
| 4380 __ LeaveInternalFrame(); |
| 4381 |
| 4382 // Compute the entry point of the rewritten stub. |
| 4383 __ lea(rdi, FieldOperand(rax, Code::kHeaderSize)); |
| 4384 |
| 4385 // Restore registers. |
| 4386 __ pop(rcx); |
| 4387 __ pop(rax); |
| 4388 __ pop(rdx); |
| 4389 __ push(rcx); |
| 4390 |
| 4391 // Do a tail call to the rewritten stub. |
| 4392 __ jmp(rdi); |
| 4291 } | 4393 } |
| 4292 | 4394 |
| 4293 #undef __ | 4395 #undef __ |
| 4294 | 4396 |
| 4295 } } // namespace v8::internal | 4397 } } // namespace v8::internal |
| 4296 | 4398 |
| 4297 #endif // V8_TARGET_ARCH_X64 | 4399 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |