OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 221 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
232 __ ret(3 * kPointerSize); | 232 __ ret(3 * kPointerSize); |
233 | 233 |
234 __ bind(&slow_case); | 234 __ bind(&slow_case); |
235 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); | 235 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); |
236 } | 236 } |
237 | 237 |
238 | 238 |
239 // The stub expects its argument on the stack and returns its result in tos_: | 239 // The stub expects its argument on the stack and returns its result in tos_: |
240 // zero for false, and a non-zero value for true. | 240 // zero for false, and a non-zero value for true. |
241 void ToBooleanStub::Generate(MacroAssembler* masm) { | 241 void ToBooleanStub::Generate(MacroAssembler* masm) { |
| 242 // This stub overrides SometimesSetsUpAFrame() to return false. That means |
| 243 // we cannot call anything that could cause a GC from this stub. |
242 Label patch; | 244 Label patch; |
243 Factory* factory = masm->isolate()->factory(); | 245 Factory* factory = masm->isolate()->factory(); |
244 const Register argument = eax; | 246 const Register argument = eax; |
245 const Register map = edx; | 247 const Register map = edx; |
246 | 248 |
247 if (!types_.IsEmpty()) { | 249 if (!types_.IsEmpty()) { |
248 __ mov(argument, Operand(esp, 1 * kPointerSize)); | 250 __ mov(argument, Operand(esp, 1 * kPointerSize)); |
249 } | 251 } |
250 | 252 |
251 // undefined -> false | 253 // undefined -> false |
(...skipping 509 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
761 Immediate(HeapNumber::kSignMask)); // Flip sign. | 763 Immediate(HeapNumber::kSignMask)); // Flip sign. |
762 } else { | 764 } else { |
763 __ mov(edx, Operand(eax)); | 765 __ mov(edx, Operand(eax)); |
764 // edx: operand | 766 // edx: operand |
765 | 767 |
766 Label slow_allocate_heapnumber, heapnumber_allocated; | 768 Label slow_allocate_heapnumber, heapnumber_allocated; |
767 __ AllocateHeapNumber(eax, ebx, ecx, &slow_allocate_heapnumber); | 769 __ AllocateHeapNumber(eax, ebx, ecx, &slow_allocate_heapnumber); |
768 __ jmp(&heapnumber_allocated, Label::kNear); | 770 __ jmp(&heapnumber_allocated, Label::kNear); |
769 | 771 |
770 __ bind(&slow_allocate_heapnumber); | 772 __ bind(&slow_allocate_heapnumber); |
771 __ EnterInternalFrame(); | 773 { |
772 __ push(edx); | 774 FrameScope scope(masm, StackFrame::INTERNAL); |
773 __ CallRuntime(Runtime::kNumberAlloc, 0); | 775 __ push(edx); |
774 __ pop(edx); | 776 __ CallRuntime(Runtime::kNumberAlloc, 0); |
775 __ LeaveInternalFrame(); | 777 __ pop(edx); |
| 778 } |
776 | 779 |
777 __ bind(&heapnumber_allocated); | 780 __ bind(&heapnumber_allocated); |
778 // eax: allocated 'empty' number | 781 // eax: allocated 'empty' number |
779 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset)); | 782 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset)); |
780 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign. | 783 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign. |
781 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx); | 784 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx); |
782 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset)); | 785 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset)); |
783 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx); | 786 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx); |
784 } | 787 } |
785 __ ret(0); | 788 __ ret(0); |
(...skipping 22 matching lines...) Expand all Loading... |
808 | 811 |
809 // Try to store the result in a heap number. | 812 // Try to store the result in a heap number. |
810 __ bind(&try_float); | 813 __ bind(&try_float); |
811 if (mode_ == UNARY_NO_OVERWRITE) { | 814 if (mode_ == UNARY_NO_OVERWRITE) { |
812 Label slow_allocate_heapnumber, heapnumber_allocated; | 815 Label slow_allocate_heapnumber, heapnumber_allocated; |
813 __ mov(ebx, eax); | 816 __ mov(ebx, eax); |
814 __ AllocateHeapNumber(eax, edx, edi, &slow_allocate_heapnumber); | 817 __ AllocateHeapNumber(eax, edx, edi, &slow_allocate_heapnumber); |
815 __ jmp(&heapnumber_allocated); | 818 __ jmp(&heapnumber_allocated); |
816 | 819 |
817 __ bind(&slow_allocate_heapnumber); | 820 __ bind(&slow_allocate_heapnumber); |
818 __ EnterInternalFrame(); | 821 { |
819 // Push the original HeapNumber on the stack. The integer value can't | 822 FrameScope scope(masm, StackFrame::INTERNAL); |
820 // be stored since it's untagged and not in the smi range (so we can't | 823 // Push the original HeapNumber on the stack. The integer value can't |
821 // smi-tag it). We'll recalculate the value after the GC instead. | 824 // be stored since it's untagged and not in the smi range (so we can't |
822 __ push(ebx); | 825 // smi-tag it). We'll recalculate the value after the GC instead. |
823 __ CallRuntime(Runtime::kNumberAlloc, 0); | 826 __ push(ebx); |
824 // New HeapNumber is in eax. | 827 __ CallRuntime(Runtime::kNumberAlloc, 0); |
825 __ pop(edx); | 828 // New HeapNumber is in eax. |
826 __ LeaveInternalFrame(); | 829 __ pop(edx); |
| 830 } |
827 // IntegerConvert uses ebx and edi as scratch registers. | 831 // IntegerConvert uses ebx and edi as scratch registers. |
828 // This conversion won't go slow-case. | 832 // This conversion won't go slow-case. |
829 IntegerConvert(masm, edx, CpuFeatures::IsSupported(SSE3), slow); | 833 IntegerConvert(masm, edx, CpuFeatures::IsSupported(SSE3), slow); |
830 __ not_(ecx); | 834 __ not_(ecx); |
831 | 835 |
832 __ bind(&heapnumber_allocated); | 836 __ bind(&heapnumber_allocated); |
833 } | 837 } |
834 if (CpuFeatures::IsSupported(SSE2)) { | 838 if (CpuFeatures::IsSupported(SSE2)) { |
835 CpuFeatures::Scope use_sse2(SSE2); | 839 CpuFeatures::Scope use_sse2(SSE2); |
836 __ cvtsi2sd(xmm0, Operand(ecx)); | 840 __ cvtsi2sd(xmm0, Operand(ecx)); |
(...skipping 1464 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2301 __ bind(&skip_cache); | 2305 __ bind(&skip_cache); |
2302 __ sub(Operand(esp), Immediate(kDoubleSize)); | 2306 __ sub(Operand(esp), Immediate(kDoubleSize)); |
2303 __ movdbl(Operand(esp, 0), xmm1); | 2307 __ movdbl(Operand(esp, 0), xmm1); |
2304 __ fld_d(Operand(esp, 0)); | 2308 __ fld_d(Operand(esp, 0)); |
2305 GenerateOperation(masm); | 2309 GenerateOperation(masm); |
2306 __ fstp_d(Operand(esp, 0)); | 2310 __ fstp_d(Operand(esp, 0)); |
2307 __ movdbl(xmm1, Operand(esp, 0)); | 2311 __ movdbl(xmm1, Operand(esp, 0)); |
2308 __ add(Operand(esp), Immediate(kDoubleSize)); | 2312 __ add(Operand(esp), Immediate(kDoubleSize)); |
2309 // We return the value in xmm1 without adding it to the cache, but | 2313 // We return the value in xmm1 without adding it to the cache, but |
2310 // we cause a scavenging GC so that future allocations will succeed. | 2314 // we cause a scavenging GC so that future allocations will succeed. |
2311 __ EnterInternalFrame(); | 2315 { |
2312 // Allocate an unused object bigger than a HeapNumber. | 2316 FrameScope scope(masm, StackFrame::INTERNAL); |
2313 __ push(Immediate(Smi::FromInt(2 * kDoubleSize))); | 2317 // Allocate an unused object bigger than a HeapNumber. |
2314 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); | 2318 __ push(Immediate(Smi::FromInt(2 * kDoubleSize))); |
2315 __ LeaveInternalFrame(); | 2319 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); |
| 2320 } |
2316 __ Ret(); | 2321 __ Ret(); |
2317 } | 2322 } |
2318 | 2323 |
2319 // Call runtime, doing whatever allocation and cleanup is necessary. | 2324 // Call runtime, doing whatever allocation and cleanup is necessary. |
2320 if (tagged) { | 2325 if (tagged) { |
2321 __ bind(&runtime_call_clear_stack); | 2326 __ bind(&runtime_call_clear_stack); |
2322 __ fstp(0); | 2327 __ fstp(0); |
2323 __ bind(&runtime_call); | 2328 __ bind(&runtime_call); |
2324 ExternalReference runtime = | 2329 ExternalReference runtime = |
2325 ExternalReference(RuntimeFunction(), masm->isolate()); | 2330 ExternalReference(RuntimeFunction(), masm->isolate()); |
2326 __ TailCallExternalReference(runtime, 1, 1); | 2331 __ TailCallExternalReference(runtime, 1, 1); |
2327 } else { // UNTAGGED. | 2332 } else { // UNTAGGED. |
2328 __ bind(&runtime_call_clear_stack); | 2333 __ bind(&runtime_call_clear_stack); |
2329 __ bind(&runtime_call); | 2334 __ bind(&runtime_call); |
2330 __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache); | 2335 __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache); |
2331 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm1); | 2336 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm1); |
2332 __ EnterInternalFrame(); | 2337 { |
2333 __ push(eax); | 2338 FrameScope scope(masm, StackFrame::INTERNAL); |
2334 __ CallRuntime(RuntimeFunction(), 1); | 2339 __ push(eax); |
2335 __ LeaveInternalFrame(); | 2340 __ CallRuntime(RuntimeFunction(), 1); |
| 2341 } |
2336 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); | 2342 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); |
2337 __ Ret(); | 2343 __ Ret(); |
2338 } | 2344 } |
2339 } | 2345 } |
2340 | 2346 |
2341 | 2347 |
2342 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { | 2348 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { |
2343 switch (type_) { | 2349 switch (type_) { |
2344 case TranscendentalCache::SIN: return Runtime::kMath_sin; | 2350 case TranscendentalCache::SIN: return Runtime::kMath_sin; |
2345 case TranscendentalCache::COS: return Runtime::kMath_cos; | 2351 case TranscendentalCache::COS: return Runtime::kMath_cos; |
(...skipping 2435 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4781 if (HasArgsInRegisters()) { | 4787 if (HasArgsInRegisters()) { |
4782 // Push arguments below return address. | 4788 // Push arguments below return address. |
4783 __ pop(scratch); | 4789 __ pop(scratch); |
4784 __ push(object); | 4790 __ push(object); |
4785 __ push(function); | 4791 __ push(function); |
4786 __ push(scratch); | 4792 __ push(scratch); |
4787 } | 4793 } |
4788 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | 4794 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); |
4789 } else { | 4795 } else { |
4790 // Call the builtin and convert 0/1 to true/false. | 4796 // Call the builtin and convert 0/1 to true/false. |
4791 __ EnterInternalFrame(); | 4797 { |
4792 __ push(object); | 4798 FrameScope scope(masm, StackFrame::INTERNAL); |
4793 __ push(function); | 4799 __ push(object); |
4794 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); | 4800 __ push(function); |
4795 __ LeaveInternalFrame(); | 4801 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); |
| 4802 } |
4796 Label true_value, done; | 4803 Label true_value, done; |
4797 __ test(eax, Operand(eax)); | 4804 __ test(eax, Operand(eax)); |
4798 __ j(zero, &true_value, Label::kNear); | 4805 __ j(zero, &true_value, Label::kNear); |
4799 __ mov(eax, factory->false_value()); | 4806 __ mov(eax, factory->false_value()); |
4800 __ jmp(&done, Label::kNear); | 4807 __ jmp(&done, Label::kNear); |
4801 __ bind(&true_value); | 4808 __ bind(&true_value); |
4802 __ mov(eax, factory->true_value()); | 4809 __ mov(eax, factory->true_value()); |
4803 __ bind(&done); | 4810 __ bind(&done); |
4804 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); | 4811 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
4805 } | 4812 } |
(...skipping 1461 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6267 } | 6274 } |
6268 | 6275 |
6269 | 6276 |
6270 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { | 6277 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { |
6271 // Save the registers. | 6278 // Save the registers. |
6272 __ pop(ecx); | 6279 __ pop(ecx); |
6273 __ push(edx); | 6280 __ push(edx); |
6274 __ push(eax); | 6281 __ push(eax); |
6275 __ push(ecx); | 6282 __ push(ecx); |
6276 | 6283 |
6277 // Call the runtime system in a fresh internal frame. | 6284 { |
6278 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss), | 6285 // Call the runtime system in a fresh internal frame. |
6279 masm->isolate()); | 6286 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss), |
6280 __ EnterInternalFrame(); | 6287 masm->isolate()); |
6281 __ push(edx); | 6288 FrameScope scope(masm, StackFrame::INTERNAL); |
6282 __ push(eax); | 6289 __ push(edx); |
6283 __ push(Immediate(Smi::FromInt(op_))); | 6290 __ push(eax); |
6284 __ CallExternalReference(miss, 3); | 6291 __ push(Immediate(Smi::FromInt(op_))); |
6285 __ LeaveInternalFrame(); | 6292 __ CallExternalReference(miss, 3); |
| 6293 } |
6286 | 6294 |
6287 // Compute the entry point of the rewritten stub. | 6295 // Compute the entry point of the rewritten stub. |
6288 __ lea(edi, FieldOperand(eax, Code::kHeaderSize)); | 6296 __ lea(edi, FieldOperand(eax, Code::kHeaderSize)); |
6289 | 6297 |
6290 // Restore registers. | 6298 // Restore registers. |
6291 __ pop(ecx); | 6299 __ pop(ecx); |
6292 __ pop(eax); | 6300 __ pop(eax); |
6293 __ pop(edx); | 6301 __ pop(edx); |
6294 __ push(ecx); | 6302 __ push(ecx); |
6295 | 6303 |
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6416 __ push(r0); | 6424 __ push(r0); |
6417 __ CallStub(&stub); | 6425 __ CallStub(&stub); |
6418 | 6426 |
6419 __ test(r1, Operand(r1)); | 6427 __ test(r1, Operand(r1)); |
6420 __ j(zero, miss); | 6428 __ j(zero, miss); |
6421 __ jmp(done); | 6429 __ jmp(done); |
6422 } | 6430 } |
6423 | 6431 |
6424 | 6432 |
6425 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { | 6433 void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { |
| 6434 // This stub overrides SometimesSetsUpAFrame() to return false. That means |
| 6435 // we cannot call anything that could cause a GC from this stub. |
6426 // Stack frame on entry: | 6436 // Stack frame on entry: |
6427 // esp[0 * kPointerSize]: return address. | 6437 // esp[0 * kPointerSize]: return address. |
6428 // esp[1 * kPointerSize]: key's hash. | 6438 // esp[1 * kPointerSize]: key's hash. |
6429 // esp[2 * kPointerSize]: key. | 6439 // esp[2 * kPointerSize]: key. |
6430 // Registers: | 6440 // Registers: |
6431 // dictionary_: StringDictionary to probe. | 6441 // dictionary_: StringDictionary to probe. |
6432 // result_: used as scratch. | 6442 // result_: used as scratch. |
6433 // index_: will hold an index of entry if lookup is successful. | 6443 // index_: will hold an index of entry if lookup is successful. |
6434 // might alias with result_. | 6444 // might alias with result_. |
6435 // Returns: | 6445 // Returns: |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6508 __ Drop(1); | 6518 __ Drop(1); |
6509 __ ret(2 * kPointerSize); | 6519 __ ret(2 * kPointerSize); |
6510 } | 6520 } |
6511 | 6521 |
6512 | 6522 |
6513 #undef __ | 6523 #undef __ |
6514 | 6524 |
6515 } } // namespace v8::internal | 6525 } } // namespace v8::internal |
6516 | 6526 |
6517 #endif // V8_TARGET_ARCH_IA32 | 6527 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |