OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2606 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2617 | 2617 |
2618 // Skip cache and return answer directly, only in untagged case. | 2618 // Skip cache and return answer directly, only in untagged case. |
2619 __ bind(&skip_cache); | 2619 __ bind(&skip_cache); |
2620 __ sub(Operand(esp), Immediate(kDoubleSize)); | 2620 __ sub(Operand(esp), Immediate(kDoubleSize)); |
2621 __ movdbl(Operand(esp, 0), xmm1); | 2621 __ movdbl(Operand(esp, 0), xmm1); |
2622 __ fld_d(Operand(esp, 0)); | 2622 __ fld_d(Operand(esp, 0)); |
2623 GenerateOperation(masm); | 2623 GenerateOperation(masm); |
2624 __ fstp_d(Operand(esp, 0)); | 2624 __ fstp_d(Operand(esp, 0)); |
2625 __ movdbl(xmm1, Operand(esp, 0)); | 2625 __ movdbl(xmm1, Operand(esp, 0)); |
2626 __ add(Operand(esp), Immediate(kDoubleSize)); | 2626 __ add(Operand(esp), Immediate(kDoubleSize)); |
| 2627 // We return the value in xmm1 without adding it to the cache, but |
| 2628 // we cause a scavenging GC so that future allocations will succeed. |
| 2629 __ EnterInternalFrame(); |
| 2630 // Allocate an unused object bigger than a HeapNumber. |
| 2631 __ push(Immediate(Smi::FromInt(2 * kDoubleSize))); |
| 2632 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); |
| 2633 __ LeaveInternalFrame(); |
2627 __ Ret(); | 2634 __ Ret(); |
2628 } | 2635 } |
2629 | 2636 |
2630 // Call runtime, doing whatever allocation and cleanup is necessary. | 2637 // Call runtime, doing whatever allocation and cleanup is necessary. |
2631 if (tagged) { | 2638 if (tagged) { |
2632 __ bind(&runtime_call_clear_stack); | 2639 __ bind(&runtime_call_clear_stack); |
2633 __ fstp(0); | 2640 __ fstp(0); |
2634 __ bind(&runtime_call); | 2641 __ bind(&runtime_call); |
2635 __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1); | 2642 __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1); |
2636 } else { // UNTAGGED. | 2643 } else { // UNTAGGED. |
(...skipping 3761 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6398 // Do a tail call to the rewritten stub. | 6405 // Do a tail call to the rewritten stub. |
6399 __ jmp(Operand(edi)); | 6406 __ jmp(Operand(edi)); |
6400 } | 6407 } |
6401 | 6408 |
6402 | 6409 |
6403 #undef __ | 6410 #undef __ |
6404 | 6411 |
6405 } } // namespace v8::internal | 6412 } } // namespace v8::internal |
6406 | 6413 |
6407 #endif // V8_TARGET_ARCH_IA32 | 6414 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |