Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 3230 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3241 // We have to store a non-zero value in the cache. | 3241 // We have to store a non-zero value in the cache. |
| 3242 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); | 3242 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); |
| 3243 __ ret(2 * kPointerSize); | 3243 __ ret(2 * kPointerSize); |
| 3244 | 3244 |
| 3245 // Slow-case: Go through the JavaScript implementation. | 3245 // Slow-case: Go through the JavaScript implementation. |
| 3246 __ bind(&slow); | 3246 __ bind(&slow); |
| 3247 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | 3247 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); |
| 3248 } | 3248 } |
| 3249 | 3249 |
| 3250 | 3250 |
| 3251 Register InstanceofStub::left() { return rax; } | |
| 3252 | |
| 3253 | |
| 3254 Register InstanceofStub::right() { return rdx; } | |
| 3255 | |
| 3256 | |
| 3251 int CompareStub::MinorKey() { | 3257 int CompareStub::MinorKey() { |
| 3252 // Encode the three parameters in a unique 16 bit value. To avoid duplicate | 3258 // Encode the three parameters in a unique 16 bit value. To avoid duplicate |
| 3253 // stubs the never NaN NaN condition is only taken into account if the | 3259 // stubs the never NaN NaN condition is only taken into account if the |
| 3254 // condition is equals. | 3260 // condition is equals. |
| 3255 ASSERT(static_cast<unsigned>(cc_) < (1 << 12)); | 3261 ASSERT(static_cast<unsigned>(cc_) < (1 << 12)); |
| 3256 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); | 3262 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); |
| 3257 return ConditionField::encode(static_cast<unsigned>(cc_)) | 3263 return ConditionField::encode(static_cast<unsigned>(cc_)) |
| 3258 | RegisterField::encode(false) // lhs_ and rhs_ are not used | 3264 | RegisterField::encode(false) // lhs_ and rhs_ are not used |
| 3259 | StrictField::encode(strict_) | 3265 | StrictField::encode(strict_) |
| 3260 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false) | 3266 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false) |
| (...skipping 1004 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4265 __ push(rcx); | 4271 __ push(rcx); |
| 4266 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); | 4272 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); |
| 4267 | 4273 |
| 4268 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) | 4274 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) |
| 4269 // tagged as a small integer. | 4275 // tagged as a small integer. |
| 4270 __ bind(&runtime); | 4276 __ bind(&runtime); |
| 4271 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 4277 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); |
| 4272 } | 4278 } |
| 4273 | 4279 |
| 4274 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { | 4280 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { |
| 4275 UNIMPLEMENTED(); | 4281 ASSERT(state_ == CompareIC::SMIS); |
| 4282 NearLabel miss; | |
| 4283 __ JumpIfNotBothSmi(rdx, rax, &miss); | |
| 4284 | |
| 4285 if (GetCondition() == equal) { | |
| 4286 // For equality we do not care about the sign of the result. | |
| 4287 __ SmiSub(rax, rax, rdx); | |
| 4288 } else { | |
| 4289 NearLabel done; | |
| 4290 __ SmiSub(rdx, rdx, rax); | |
| 4291 __ j(no_overflow, &done); | |
| 4292 // Correct sign of result in case of overflow. | |
| 4293 __ SmiNot(rdx, rdx); | |
| 4294 __ bind(&done); | |
| 4295 __ movq(rax, rdx); | |
| 4296 } | |
| 4297 __ ret(0); | |
| 4298 | |
| 4299 __ bind(&miss); | |
| 4300 GenerateMiss(masm); | |
| 4276 } | 4301 } |
| 4277 | 4302 |
| 4278 | 4303 |
| 4279 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) { | 4304 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) { |
| 4280 UNIMPLEMENTED(); | 4305 ASSERT(state_ == CompareIC::HEAP_NUMBERS); |
| 4306 | |
| 4307 NearLabel generic_stub; | |
| 4308 NearLabel unordered; | |
| 4309 NearLabel miss; | |
| 4310 Condition either_smi = masm->CheckEitherSmi(rax, rdx); | |
| 4311 __ j(either_smi, &generic_stub); | |
| 4312 | |
| 4313 __ CmpObjectType(rax, HEAP_NUMBER_TYPE, rcx); | |
| 4314 __ j(not_equal, &miss); | |
| 4315 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx); | |
| 4316 __ j(not_equal, &miss); | |
| 4317 | |
| 4318 // Load left and right operand | |
| 4319 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); | |
| 4320 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); | |
| 4321 | |
| 4322 // Compare operands | |
| 4323 __ ucomisd(xmm0, xmm1); | |
| 4324 | |
| 4325 // Don't base result on EFLAGS when a NaN is involved. | |
| 4326 __ j(parity_even, &unordered); | |
| 4327 | |
| 4328 // Return a result of -1, 0, or 1, based on EFLAGS. | |
| 4329 // Performing mov, because xor would destroy the flag register. | |
| 4330 __ movl(rax, Immediate(0)); | |
| 4331 __ movl(rcx, Immediate(0)); | |
| 4332 __ setcc(above, rax); // Add one to zero if carry clear and not equal. | |
| 4333 __ sbbl(rax, rcx); // Subtract one if below (aka. carry set). | |
|
William Hesse
2011/01/18 14:01:50
Should be a 64-bit subtraction, to return a 64-bit
Lasse Reichstein
2011/01/19 09:20:35
Done. Well spotted.
| |
| 4334 __ ret(0); | |
| 4335 | |
| 4336 __ bind(&unordered); | |
| 4337 | |
| 4338 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS); | |
| 4339 __ bind(&generic_stub); | |
| 4340 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); | |
| 4341 | |
| 4342 __ bind(&miss); | |
| 4343 GenerateMiss(masm); | |
| 4281 } | 4344 } |
| 4282 | 4345 |
| 4283 | 4346 |
| 4284 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { | 4347 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { |
| 4285 UNIMPLEMENTED(); | 4348 ASSERT(state_ == CompareIC::OBJECTS); |
| 4349 NearLabel miss; | |
| 4350 Condition either_smi = masm->CheckEitherSmi(rdx, rax); | |
| 4351 __ j(either_smi, &miss); | |
| 4352 | |
| 4353 __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx); | |
| 4354 __ j(not_equal, &miss, not_taken); | |
| 4355 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx); | |
| 4356 __ j(not_equal, &miss, not_taken); | |
| 4357 | |
| 4358 ASSERT(GetCondition() == equal); | |
| 4359 __ subq(rax, rdx); | |
| 4360 __ ret(0); | |
| 4361 | |
| 4362 __ bind(&miss); | |
| 4363 GenerateMiss(masm); | |
| 4286 } | 4364 } |
| 4287 | 4365 |
| 4288 | 4366 |
| 4289 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { | 4367 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { |
| 4290 UNIMPLEMENTED(); | 4368 // Save the registers. |
| 4369 __ pop(rcx); | |
| 4370 __ push(rdx); | |
| 4371 __ push(rax); | |
| 4372 __ push(rcx); | |
| 4373 | |
| 4374 // Call the runtime system in a fresh internal frame. | |
| 4375 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss)); | |
| 4376 __ EnterInternalFrame(); | |
| 4377 __ push(rdx); | |
| 4378 __ push(rax); | |
| 4379 __ Push(Smi::FromInt(op_)); | |
| 4380 __ CallExternalReference(miss, 3); | |
| 4381 __ LeaveInternalFrame(); | |
| 4382 | |
| 4383 // Compute the entry point of the rewritten stub. | |
| 4384 __ lea(rdi, FieldOperand(rax, Code::kHeaderSize)); | |
| 4385 | |
| 4386 // Restore registers. | |
| 4387 __ pop(rcx); | |
| 4388 __ pop(rax); | |
| 4389 __ pop(rdx); | |
| 4390 __ push(rcx); | |
| 4391 | |
| 4392 // Do a tail call to the rewritten stub. | |
| 4393 __ jmp(rdi); | |
| 4291 } | 4394 } |
| 4292 | 4395 |
| 4293 #undef __ | 4396 #undef __ |
| 4294 | 4397 |
| 4295 } } // namespace v8::internal | 4398 } } // namespace v8::internal |
| 4296 | 4399 |
| 4297 #endif // V8_TARGET_ARCH_X64 | 4400 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |