| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. | 11 // with the distribution. |
| (...skipping 19 matching lines...) Expand all Loading... |
| 31 | 31 |
| 32 #include "code-stubs.h" | 32 #include "code-stubs.h" |
| 33 #include "bootstrapper.h" | 33 #include "bootstrapper.h" |
| 34 #include "jsregexp.h" | 34 #include "jsregexp.h" |
| 35 #include "regexp-macro-assembler.h" | 35 #include "regexp-macro-assembler.h" |
| 36 | 36 |
| 37 namespace v8 { | 37 namespace v8 { |
| 38 namespace internal { | 38 namespace internal { |
| 39 | 39 |
| 40 #define __ ACCESS_MASM(masm) | 40 #define __ ACCESS_MASM(masm) |
| 41 |
| 42 void ToNumberStub::Generate(MacroAssembler* masm) { |
| 43 // The ToNumber stub takes one argument in eax. |
| 44 NearLabel check_heap_number, call_builtin; |
| 45 __ test(eax, Immediate(kSmiTagMask)); |
| 46 __ j(not_zero, &check_heap_number); |
| 47 __ ret(0); |
| 48 |
| 49 __ bind(&check_heap_number); |
| 50 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); |
| 51 __ cmp(Operand(ebx), Immediate(Factory::heap_number_map())); |
| 52 __ j(not_equal, &call_builtin); |
| 53 __ ret(0); |
| 54 |
| 55 __ bind(&call_builtin); |
| 56 __ pop(ecx); // Pop return address. |
| 57 __ push(eax); |
| 58 __ push(ecx); // Push return address. |
| 59 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); |
| 60 } |
| 61 |
| 62 |
| 41 void FastNewClosureStub::Generate(MacroAssembler* masm) { | 63 void FastNewClosureStub::Generate(MacroAssembler* masm) { |
| 42 // Create a new closure from the given function info in new | 64 // Create a new closure from the given function info in new |
| 43 // space. Set the context to the current context in esi. | 65 // space. Set the context to the current context in esi. |
| 44 Label gc; | 66 Label gc; |
| 45 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT); | 67 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT); |
| 46 | 68 |
| 47 // Get the function info from the stack. | 69 // Get the function info from the stack. |
| 48 __ mov(edx, Operand(esp, 1 * kPointerSize)); | 70 __ mov(edx, Operand(esp, 1 * kPointerSize)); |
| 49 | 71 |
| 50 // Compute the function map in the current global context and set that | 72 // Compute the function map in the current global context and set that |
| (...skipping 1745 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1796 case Token::SHL: | 1818 case Token::SHL: |
| 1797 case Token::SHR: | 1819 case Token::SHR: |
| 1798 GenerateTypeTransitionWithSavedArgs(masm); | 1820 GenerateTypeTransitionWithSavedArgs(masm); |
| 1799 break; | 1821 break; |
| 1800 default: | 1822 default: |
| 1801 UNREACHABLE(); | 1823 UNREACHABLE(); |
| 1802 } | 1824 } |
| 1803 } | 1825 } |
| 1804 | 1826 |
| 1805 | 1827 |
| 1806 | |
| 1807 void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) { | 1828 void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) { |
| 1808 Label call_runtime; | |
| 1809 ASSERT(operands_type_ == TRBinaryOpIC::STRING); | 1829 ASSERT(operands_type_ == TRBinaryOpIC::STRING); |
| 1810 ASSERT(op_ == Token::ADD); | 1830 ASSERT(op_ == Token::ADD); |
| 1811 // If one of the arguments is a string, call the string add stub. | 1831 // Try to add arguments as strings, otherwise, transition to the generic |
| 1812 // Otherwise, transition to the generic TRBinaryOpIC type. | 1832 // TRBinaryOpIC type. |
| 1813 | 1833 GenerateAddStrings(masm); |
| 1814 // Registers containing left and right operands respectively. | |
| 1815 Register left = edx; | |
| 1816 Register right = eax; | |
| 1817 | |
| 1818 // Test if left operand is a string. | |
| 1819 NearLabel left_not_string; | |
| 1820 __ test(left, Immediate(kSmiTagMask)); | |
| 1821 __ j(zero, &left_not_string); | |
| 1822 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx); | |
| 1823 __ j(above_equal, &left_not_string); | |
| 1824 | |
| 1825 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB); | |
| 1826 GenerateRegisterArgsPush(masm); | |
| 1827 __ TailCallStub(&string_add_left_stub); | |
| 1828 | |
| 1829 // Left operand is not a string, test right. | |
| 1830 __ bind(&left_not_string); | |
| 1831 __ test(right, Immediate(kSmiTagMask)); | |
| 1832 __ j(zero, &call_runtime); | |
| 1833 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx); | |
| 1834 __ j(above_equal, &call_runtime); | |
| 1835 | |
| 1836 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB); | |
| 1837 GenerateRegisterArgsPush(masm); | |
| 1838 __ TailCallStub(&string_add_right_stub); | |
| 1839 | |
| 1840 // Neither argument is a string. | |
| 1841 __ bind(&call_runtime); | |
| 1842 GenerateTypeTransition(masm); | 1834 GenerateTypeTransition(masm); |
| 1843 } | 1835 } |
| 1844 | 1836 |
| 1845 | 1837 |
| 1846 void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) { | 1838 void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) { |
| 1847 Label call_runtime; | 1839 Label call_runtime; |
| 1848 ASSERT(operands_type_ == TRBinaryOpIC::INT32); | 1840 ASSERT(operands_type_ == TRBinaryOpIC::INT32); |
| 1849 | 1841 |
| 1850 // Floating point case. | 1842 // Floating point case. |
| 1851 switch (op_) { | 1843 switch (op_) { |
| (...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2040 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); | 2032 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); |
| 2041 break; | 2033 break; |
| 2042 default: | 2034 default: |
| 2043 UNREACHABLE(); | 2035 UNREACHABLE(); |
| 2044 } | 2036 } |
| 2045 } | 2037 } |
| 2046 | 2038 |
| 2047 | 2039 |
| 2048 void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { | 2040 void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
| 2049 Label call_runtime; | 2041 Label call_runtime; |
| 2050 ASSERT(operands_type_ == TRBinaryOpIC::HEAP_NUMBER || | 2042 ASSERT(operands_type_ == TRBinaryOpIC::HEAP_NUMBER); |
| 2051 operands_type_ == TRBinaryOpIC::INT32); | |
| 2052 | 2043 |
| 2053 // Floating point case. | 2044 // Floating point case. |
| 2054 switch (op_) { | 2045 switch (op_) { |
| 2055 case Token::ADD: | 2046 case Token::ADD: |
| 2056 case Token::SUB: | 2047 case Token::SUB: |
| 2057 case Token::MUL: | 2048 case Token::MUL: |
| 2058 case Token::DIV: { | 2049 case Token::DIV: { |
| 2059 Label not_floats; | 2050 Label not_floats; |
| 2060 if (CpuFeatures::IsSupported(SSE2)) { | 2051 if (CpuFeatures::IsSupported(SSE2)) { |
| 2061 CpuFeatures::Scope use_sse2(SSE2); | 2052 CpuFeatures::Scope use_sse2(SSE2); |
| (...skipping 311 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2373 break; | 2364 break; |
| 2374 } | 2365 } |
| 2375 default: UNREACHABLE(); break; | 2366 default: UNREACHABLE(); break; |
| 2376 } | 2367 } |
| 2377 | 2368 |
| 2378 // If all else fails, use the runtime system to get the correct | 2369 // If all else fails, use the runtime system to get the correct |
| 2379 // result. | 2370 // result. |
| 2380 __ bind(&call_runtime); | 2371 __ bind(&call_runtime); |
| 2381 switch (op_) { | 2372 switch (op_) { |
| 2382 case Token::ADD: { | 2373 case Token::ADD: { |
| 2374 GenerateAddStrings(masm); |
| 2383 GenerateRegisterArgsPush(masm); | 2375 GenerateRegisterArgsPush(masm); |
| 2384 // Test for string arguments before calling runtime. | |
| 2385 // Registers containing left and right operands respectively. | |
| 2386 Register lhs, rhs; | |
| 2387 lhs = edx; | |
| 2388 rhs = eax; | |
| 2389 | |
| 2390 // Test if left operand is a string. | |
| 2391 NearLabel lhs_not_string; | |
| 2392 __ test(lhs, Immediate(kSmiTagMask)); | |
| 2393 __ j(zero, &lhs_not_string); | |
| 2394 __ CmpObjectType(lhs, FIRST_NONSTRING_TYPE, ecx); | |
| 2395 __ j(above_equal, &lhs_not_string); | |
| 2396 | |
| 2397 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB); | |
| 2398 __ TailCallStub(&string_add_left_stub); | |
| 2399 | |
| 2400 NearLabel call_add_runtime; | |
| 2401 // Left operand is not a string, test right. | |
| 2402 __ bind(&lhs_not_string); | |
| 2403 __ test(rhs, Immediate(kSmiTagMask)); | |
| 2404 __ j(zero, &call_add_runtime); | |
| 2405 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, ecx); | |
| 2406 __ j(above_equal, &call_add_runtime); | |
| 2407 | |
| 2408 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB); | |
| 2409 __ TailCallStub(&string_add_right_stub); | |
| 2410 | |
| 2411 // Neither argument is a string. | |
| 2412 __ bind(&call_add_runtime); | |
| 2413 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION); | 2376 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION); |
| 2414 break; | 2377 break; |
| 2415 } | 2378 } |
| 2416 case Token::SUB: | 2379 case Token::SUB: |
| 2417 GenerateRegisterArgsPush(masm); | 2380 GenerateRegisterArgsPush(masm); |
| 2418 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION); | 2381 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION); |
| 2419 break; | 2382 break; |
| 2420 case Token::MUL: | 2383 case Token::MUL: |
| 2421 GenerateRegisterArgsPush(masm); | 2384 GenerateRegisterArgsPush(masm); |
| 2422 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION); | 2385 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION); |
| (...skipping 22 matching lines...) Expand all Loading... |
| 2445 break; | 2408 break; |
| 2446 case Token::SHR: | 2409 case Token::SHR: |
| 2447 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); | 2410 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); |
| 2448 break; | 2411 break; |
| 2449 default: | 2412 default: |
| 2450 UNREACHABLE(); | 2413 UNREACHABLE(); |
| 2451 } | 2414 } |
| 2452 } | 2415 } |
| 2453 | 2416 |
| 2454 | 2417 |
| 2418 void TypeRecordingBinaryOpStub::GenerateAddStrings(MacroAssembler* masm) { |
| 2419 NearLabel call_runtime; |
| 2420 |
| 2421 // Registers containing left and right operands respectively. |
| 2422 Register left = edx; |
| 2423 Register right = eax; |
| 2424 |
| 2425 // Test if left operand is a string. |
| 2426 NearLabel left_not_string; |
| 2427 __ test(left, Immediate(kSmiTagMask)); |
| 2428 __ j(zero, &left_not_string); |
| 2429 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx); |
| 2430 __ j(above_equal, &left_not_string); |
| 2431 |
| 2432 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB); |
| 2433 GenerateRegisterArgsPush(masm); |
| 2434 __ TailCallStub(&string_add_left_stub); |
| 2435 |
| 2436 // Left operand is not a string, test right. |
| 2437 __ bind(&left_not_string); |
| 2438 __ test(right, Immediate(kSmiTagMask)); |
| 2439 __ j(zero, &call_runtime); |
| 2440 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx); |
| 2441 __ j(above_equal, &call_runtime); |
| 2442 |
| 2443 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB); |
| 2444 GenerateRegisterArgsPush(masm); |
| 2445 __ TailCallStub(&string_add_right_stub); |
| 2446 |
| 2447 // Neither argument is a string. |
| 2448 __ bind(&call_runtime); |
| 2449 } |
| 2450 |
| 2451 |
| 2455 void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation( | 2452 void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation( |
| 2456 MacroAssembler* masm, | 2453 MacroAssembler* masm, |
| 2457 Label* alloc_failure) { | 2454 Label* alloc_failure) { |
| 2458 Label skip_allocation; | 2455 Label skip_allocation; |
| 2459 OverwriteMode mode = mode_; | 2456 OverwriteMode mode = mode_; |
| 2460 switch (mode) { | 2457 switch (mode) { |
| 2461 case OVERWRITE_LEFT: { | 2458 case OVERWRITE_LEFT: { |
| 2462 // If the argument in edx is already an object, we skip the | 2459 // If the argument in edx is already an object, we skip the |
| 2463 // allocation of a heap number. | 2460 // allocation of a heap number. |
| 2464 __ test(edx, Immediate(kSmiTagMask)); | 2461 __ test(edx, Immediate(kSmiTagMask)); |
| (...skipping 1061 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3526 // Test for -0.5. | 3523 // Test for -0.5. |
| 3527 // Load xmm2 with -0.5. | 3524 // Load xmm2 with -0.5. |
| 3528 __ mov(ecx, Immediate(0xBF000000)); | 3525 __ mov(ecx, Immediate(0xBF000000)); |
| 3529 __ movd(xmm2, Operand(ecx)); | 3526 __ movd(xmm2, Operand(ecx)); |
| 3530 __ cvtss2sd(xmm2, xmm2); | 3527 __ cvtss2sd(xmm2, xmm2); |
| 3531 // xmm2 now has -0.5. | 3528 // xmm2 now has -0.5. |
| 3532 __ ucomisd(xmm2, xmm1); | 3529 __ ucomisd(xmm2, xmm1); |
| 3533 __ j(not_equal, ¬_minus_half); | 3530 __ j(not_equal, ¬_minus_half); |
| 3534 | 3531 |
| 3535 // Calculates reciprocal of square root. | 3532 // Calculates reciprocal of square root. |
| 3536 // Note that 1/sqrt(x) = sqrt(1/x)) | 3533 // sqrtsd returns -0 when input is -0. ECMA spec requires +0. |
| 3537 __ divsd(xmm3, xmm0); | 3534 __ xorpd(xmm1, xmm1); |
| 3535 __ addsd(xmm1, xmm0); |
| 3536 __ sqrtsd(xmm1, xmm1); |
| 3537 __ divsd(xmm3, xmm1); |
| 3538 __ movsd(xmm1, xmm3); | 3538 __ movsd(xmm1, xmm3); |
| 3539 __ sqrtsd(xmm1, xmm1); | |
| 3540 __ jmp(&allocate_return); | 3539 __ jmp(&allocate_return); |
| 3541 | 3540 |
| 3542 // Test for 0.5. | 3541 // Test for 0.5. |
| 3543 __ bind(¬_minus_half); | 3542 __ bind(¬_minus_half); |
| 3544 // Load xmm2 with 0.5. | 3543 // Load xmm2 with 0.5. |
| 3545 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3. | 3544 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3. |
| 3546 __ addsd(xmm2, xmm3); | 3545 __ addsd(xmm2, xmm3); |
| 3547 // xmm2 now has 0.5. | 3546 // xmm2 now has 0.5. |
| 3548 __ ucomisd(xmm2, xmm1); | 3547 __ ucomisd(xmm2, xmm1); |
| 3549 __ j(not_equal, &call_runtime); | 3548 __ j(not_equal, &call_runtime); |
| 3550 // Calculates square root. | 3549 // Calculates square root. |
| 3551 __ movsd(xmm1, xmm0); | 3550 // sqrtsd returns -0 when input is -0. ECMA spec requires +0. |
| 3551 __ xorpd(xmm1, xmm1); |
| 3552 __ addsd(xmm1, xmm0); |
| 3552 __ sqrtsd(xmm1, xmm1); | 3553 __ sqrtsd(xmm1, xmm1); |
| 3553 | 3554 |
| 3554 __ bind(&allocate_return); | 3555 __ bind(&allocate_return); |
| 3555 __ AllocateHeapNumber(ecx, eax, edx, &call_runtime); | 3556 __ AllocateHeapNumber(ecx, eax, edx, &call_runtime); |
| 3556 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm1); | 3557 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm1); |
| 3557 __ mov(eax, ecx); | 3558 __ mov(eax, ecx); |
| 3558 __ ret(2); | 3559 __ ret(2); |
| 3559 | 3560 |
| 3560 __ bind(&call_runtime); | 3561 __ bind(&call_runtime); |
| 3561 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); | 3562 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); |
| (...skipping 348 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3910 __ SmiUntag(ebx); // Previous index from smi. | 3911 __ SmiUntag(ebx); // Previous index from smi. |
| 3911 | 3912 |
| 3912 // eax: subject string | 3913 // eax: subject string |
| 3913 // ebx: previous index | 3914 // ebx: previous index |
| 3914 // edx: code | 3915 // edx: code |
| 3915 // edi: encoding of subject string (1 if ascii 0 if two_byte); | 3916 // edi: encoding of subject string (1 if ascii 0 if two_byte); |
| 3916 // All checks done. Now push arguments for native regexp code. | 3917 // All checks done. Now push arguments for native regexp code. |
| 3917 __ IncrementCounter(&Counters::regexp_entry_native, 1); | 3918 __ IncrementCounter(&Counters::regexp_entry_native, 1); |
| 3918 | 3919 |
| 3919 static const int kRegExpExecuteArguments = 7; | 3920 static const int kRegExpExecuteArguments = 7; |
| 3920 __ PrepareCallCFunction(kRegExpExecuteArguments, ecx); | 3921 __ EnterApiExitFrame(kRegExpExecuteArguments); |
| 3921 | 3922 |
| 3922 // Argument 7: Indicate that this is a direct call from JavaScript. | 3923 // Argument 7: Indicate that this is a direct call from JavaScript. |
| 3923 __ mov(Operand(esp, 6 * kPointerSize), Immediate(1)); | 3924 __ mov(Operand(esp, 6 * kPointerSize), Immediate(1)); |
| 3924 | 3925 |
| 3925 // Argument 6: Start (high end) of backtracking stack memory area. | 3926 // Argument 6: Start (high end) of backtracking stack memory area. |
| 3926 __ mov(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_address)); | 3927 __ mov(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_address)); |
| 3927 __ add(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_size)); | 3928 __ add(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_size)); |
| 3928 __ mov(Operand(esp, 5 * kPointerSize), ecx); | 3929 __ mov(Operand(esp, 5 * kPointerSize), ecx); |
| 3929 | 3930 |
| 3930 // Argument 5: static offsets vector buffer. | 3931 // Argument 5: static offsets vector buffer. |
| (...skipping 24 matching lines...) Expand all Loading... |
| 3955 __ bind(&setup_rest); | 3956 __ bind(&setup_rest); |
| 3956 | 3957 |
| 3957 // Argument 2: Previous index. | 3958 // Argument 2: Previous index. |
| 3958 __ mov(Operand(esp, 1 * kPointerSize), ebx); | 3959 __ mov(Operand(esp, 1 * kPointerSize), ebx); |
| 3959 | 3960 |
| 3960 // Argument 1: Subject string. | 3961 // Argument 1: Subject string. |
| 3961 __ mov(Operand(esp, 0 * kPointerSize), eax); | 3962 __ mov(Operand(esp, 0 * kPointerSize), eax); |
| 3962 | 3963 |
| 3963 // Locate the code entry and call it. | 3964 // Locate the code entry and call it. |
| 3964 __ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag)); | 3965 __ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag)); |
| 3965 __ CallCFunction(edx, kRegExpExecuteArguments); | 3966 __ call(Operand(edx)); |
| 3967 |
| 3968 // Drop arguments and come back to JS mode. |
| 3969 __ LeaveApiExitFrame(); |
| 3966 | 3970 |
| 3967 // Check the result. | 3971 // Check the result. |
| 3968 Label success; | 3972 Label success; |
| 3969 __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS); | 3973 __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS); |
| 3970 __ j(equal, &success, taken); | 3974 __ j(equal, &success, taken); |
| 3971 Label failure; | 3975 Label failure; |
| 3972 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE); | 3976 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE); |
| 3973 __ j(equal, &failure, taken); | 3977 __ j(equal, &failure, taken); |
| 3974 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION); | 3978 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION); |
| 3975 // If not exception it can only be retry. Handle that in the runtime system. | 3979 // If not exception it can only be retry. Handle that in the runtime system. |
| 3976 __ j(not_equal, &runtime); | 3980 __ j(not_equal, &runtime); |
| 3977 // Result must now be exception. If there is no pending exception already a | 3981 // Result must now be exception. If there is no pending exception already a |
| 3978 // stack overflow (on the backtrack stack) was detected in RegExp code but | 3982 // stack overflow (on the backtrack stack) was detected in RegExp code but |
| 3979 // haven't created the exception yet. Handle that in the runtime system. | 3983 // haven't created the exception yet. Handle that in the runtime system. |
| 3980 // TODO(592): Rerunning the RegExp to get the stack overflow exception. | 3984 // TODO(592): Rerunning the RegExp to get the stack overflow exception. |
| 3981 ExternalReference pending_exception(Top::k_pending_exception_address); | 3985 ExternalReference pending_exception(Top::k_pending_exception_address); |
| 3982 __ mov(eax, | 3986 __ mov(edx, |
| 3983 Operand::StaticVariable(ExternalReference::the_hole_value_location())); | 3987 Operand::StaticVariable(ExternalReference::the_hole_value_location())); |
| 3984 __ cmp(eax, Operand::StaticVariable(pending_exception)); | 3988 __ mov(eax, Operand::StaticVariable(pending_exception)); |
| 3989 __ cmp(edx, Operand(eax)); |
| 3985 __ j(equal, &runtime); | 3990 __ j(equal, &runtime); |
| 3991 // For exception, throw the exception again. |
| 3992 |
| 3993 // Clear the pending exception variable. |
| 3994 __ mov(Operand::StaticVariable(pending_exception), edx); |
| 3995 |
| 3996 // Special handling of termination exceptions which are uncatchable |
| 3997 // by javascript code. |
| 3998 __ cmp(eax, Factory::termination_exception()); |
| 3999 Label throw_termination_exception; |
| 4000 __ j(equal, &throw_termination_exception); |
| 4001 |
| 4002 // Handle normal exception by following handler chain. |
| 4003 __ Throw(eax); |
| 4004 |
| 4005 __ bind(&throw_termination_exception); |
| 4006 __ ThrowUncatchable(TERMINATION, eax); |
| 4007 |
| 3986 __ bind(&failure); | 4008 __ bind(&failure); |
| 3987 // For failure and exception return null. | 4009 // For failure to match, return null. |
| 3988 __ mov(Operand(eax), Factory::null_value()); | 4010 __ mov(Operand(eax), Factory::null_value()); |
| 3989 __ ret(4 * kPointerSize); | 4011 __ ret(4 * kPointerSize); |
| 3990 | 4012 |
| 3991 // Load RegExp data. | 4013 // Load RegExp data. |
| 3992 __ bind(&success); | 4014 __ bind(&success); |
| 3993 __ mov(eax, Operand(esp, kJSRegExpOffset)); | 4015 __ mov(eax, Operand(esp, kJSRegExpOffset)); |
| 3994 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset)); | 4016 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset)); |
| 3995 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset)); | 4017 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset)); |
| 3996 // Calculate number of capture registers (number_of_captures + 1) * 2. | 4018 // Calculate number of capture registers (number_of_captures + 1) * 2. |
| 3997 STATIC_ASSERT(kSmiTag == 0); | 4019 STATIC_ASSERT(kSmiTag == 0); |
| (...skipping 657 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4655 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi); | 4677 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi); |
| 4656 __ Set(eax, Immediate(argc_)); | 4678 __ Set(eax, Immediate(argc_)); |
| 4657 __ Set(ebx, Immediate(0)); | 4679 __ Set(ebx, Immediate(0)); |
| 4658 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); | 4680 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); |
| 4659 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline)); | 4681 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline)); |
| 4660 __ jmp(adaptor, RelocInfo::CODE_TARGET); | 4682 __ jmp(adaptor, RelocInfo::CODE_TARGET); |
| 4661 } | 4683 } |
| 4662 | 4684 |
| 4663 | 4685 |
| 4664 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { | 4686 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { |
| 4665 // eax holds the exception. | 4687 __ Throw(eax); |
| 4666 | |
| 4667 // Adjust this code if not the case. | |
| 4668 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); | |
| 4669 | |
| 4670 // Drop the sp to the top of the handler. | |
| 4671 ExternalReference handler_address(Top::k_handler_address); | |
| 4672 __ mov(esp, Operand::StaticVariable(handler_address)); | |
| 4673 | |
| 4674 // Restore next handler and frame pointer, discard handler state. | |
| 4675 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); | |
| 4676 __ pop(Operand::StaticVariable(handler_address)); | |
| 4677 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize); | |
| 4678 __ pop(ebp); | |
| 4679 __ pop(edx); // Remove state. | |
| 4680 | |
| 4681 // Before returning we restore the context from the frame pointer if | |
| 4682 // not NULL. The frame pointer is NULL in the exception handler of | |
| 4683 // a JS entry frame. | |
| 4684 __ Set(esi, Immediate(0)); // Tentatively set context pointer to NULL. | |
| 4685 NearLabel skip; | |
| 4686 __ cmp(ebp, 0); | |
| 4687 __ j(equal, &skip, not_taken); | |
| 4688 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | |
| 4689 __ bind(&skip); | |
| 4690 | |
| 4691 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); | |
| 4692 __ ret(0); | |
| 4693 } | 4688 } |
| 4694 | 4689 |
| 4695 | 4690 |
| 4696 void CEntryStub::GenerateCore(MacroAssembler* masm, | 4691 void CEntryStub::GenerateCore(MacroAssembler* masm, |
| 4697 Label* throw_normal_exception, | 4692 Label* throw_normal_exception, |
| 4698 Label* throw_termination_exception, | 4693 Label* throw_termination_exception, |
| 4699 Label* throw_out_of_memory_exception, | 4694 Label* throw_out_of_memory_exception, |
| 4700 bool do_gc, | 4695 bool do_gc, |
| 4701 bool always_allocate_scope, | 4696 bool always_allocate_scope) { |
| 4702 int /* alignment_skew */) { | |
| 4703 // eax: result parameter for PerformGC, if any | 4697 // eax: result parameter for PerformGC, if any |
| 4704 // ebx: pointer to C function (C callee-saved) | 4698 // ebx: pointer to C function (C callee-saved) |
| 4705 // ebp: frame pointer (restored after C call) | 4699 // ebp: frame pointer (restored after C call) |
| 4706 // esp: stack pointer (restored after C call) | 4700 // esp: stack pointer (restored after C call) |
| 4707 // edi: number of arguments including receiver (C callee-saved) | 4701 // edi: number of arguments including receiver (C callee-saved) |
| 4708 // esi: pointer to the first argument (C callee-saved) | 4702 // esi: pointer to the first argument (C callee-saved) |
| 4709 | 4703 |
| 4710 // Result returned in eax, or eax+edx if result_size_ is 2. | 4704 // Result returned in eax, or eax+edx if result_size_ is 2. |
| 4711 | 4705 |
| 4712 // Check stack alignment. | 4706 // Check stack alignment. |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4751 } | 4745 } |
| 4752 | 4746 |
| 4753 // Check for failure result. | 4747 // Check for failure result. |
| 4754 Label failure_returned; | 4748 Label failure_returned; |
| 4755 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0); | 4749 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0); |
| 4756 __ lea(ecx, Operand(eax, 1)); | 4750 __ lea(ecx, Operand(eax, 1)); |
| 4757 // Lower 2 bits of ecx are 0 iff eax has failure tag. | 4751 // Lower 2 bits of ecx are 0 iff eax has failure tag. |
| 4758 __ test(ecx, Immediate(kFailureTagMask)); | 4752 __ test(ecx, Immediate(kFailureTagMask)); |
| 4759 __ j(zero, &failure_returned, not_taken); | 4753 __ j(zero, &failure_returned, not_taken); |
| 4760 | 4754 |
| 4755 ExternalReference pending_exception_address(Top::k_pending_exception_address); |
| 4756 |
| 4757 // Check that there is no pending exception, otherwise we |
| 4758 // should have returned some failure value. |
| 4759 if (FLAG_debug_code) { |
| 4760 __ push(edx); |
| 4761 __ mov(edx, Operand::StaticVariable( |
| 4762 ExternalReference::the_hole_value_location())); |
| 4763 NearLabel okay; |
| 4764 __ cmp(edx, Operand::StaticVariable(pending_exception_address)); |
| 4765 // Cannot use check here as it attempts to generate call into runtime. |
| 4766 __ j(equal, &okay); |
| 4767 __ int3(); |
| 4768 __ bind(&okay); |
| 4769 __ pop(edx); |
| 4770 } |
| 4771 |
| 4761 // Exit the JavaScript to C++ exit frame. | 4772 // Exit the JavaScript to C++ exit frame. |
| 4762 __ LeaveExitFrame(save_doubles_ == kSaveFPRegs); | 4773 __ LeaveExitFrame(save_doubles_ == kSaveFPRegs); |
| 4763 __ ret(0); | 4774 __ ret(0); |
| 4764 | 4775 |
| 4765 // Handling of failure. | 4776 // Handling of failure. |
| 4766 __ bind(&failure_returned); | 4777 __ bind(&failure_returned); |
| 4767 | 4778 |
| 4768 Label retry; | 4779 Label retry; |
| 4769 // If the returned exception is RETRY_AFTER_GC continue at retry label | 4780 // If the returned exception is RETRY_AFTER_GC continue at retry label |
| 4770 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); | 4781 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0); |
| 4771 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); | 4782 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); |
| 4772 __ j(zero, &retry, taken); | 4783 __ j(zero, &retry, taken); |
| 4773 | 4784 |
| 4774 // Special handling of out of memory exceptions. | 4785 // Special handling of out of memory exceptions. |
| 4775 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException())); | 4786 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException())); |
| 4776 __ j(equal, throw_out_of_memory_exception); | 4787 __ j(equal, throw_out_of_memory_exception); |
| 4777 | 4788 |
| 4778 // Retrieve the pending exception and clear the variable. | 4789 // Retrieve the pending exception and clear the variable. |
| 4779 ExternalReference pending_exception_address(Top::k_pending_exception_address); | |
| 4780 __ mov(eax, Operand::StaticVariable(pending_exception_address)); | 4790 __ mov(eax, Operand::StaticVariable(pending_exception_address)); |
| 4781 __ mov(edx, | 4791 __ mov(edx, |
| 4782 Operand::StaticVariable(ExternalReference::the_hole_value_location())); | 4792 Operand::StaticVariable(ExternalReference::the_hole_value_location())); |
| 4783 __ mov(Operand::StaticVariable(pending_exception_address), edx); | 4793 __ mov(Operand::StaticVariable(pending_exception_address), edx); |
| 4784 | 4794 |
| 4785 // Special handling of termination exceptions which are uncatchable | 4795 // Special handling of termination exceptions which are uncatchable |
| 4786 // by javascript code. | 4796 // by javascript code. |
| 4787 __ cmp(eax, Factory::termination_exception()); | 4797 __ cmp(eax, Factory::termination_exception()); |
| 4788 __ j(equal, throw_termination_exception); | 4798 __ j(equal, throw_termination_exception); |
| 4789 | 4799 |
| 4790 // Handle normal exception. | 4800 // Handle normal exception. |
| 4791 __ jmp(throw_normal_exception); | 4801 __ jmp(throw_normal_exception); |
| 4792 | 4802 |
| 4793 // Retry. | 4803 // Retry. |
| 4794 __ bind(&retry); | 4804 __ bind(&retry); |
| 4795 } | 4805 } |
| 4796 | 4806 |
| 4797 | 4807 |
| 4798 void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm, | 4808 void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm, |
| 4799 UncatchableExceptionType type) { | 4809 UncatchableExceptionType type) { |
| 4800 // Adjust this code if not the case. | 4810 __ ThrowUncatchable(type, eax); |
| 4801 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); | |
| 4802 | |
| 4803 // Drop sp to the top stack handler. | |
| 4804 ExternalReference handler_address(Top::k_handler_address); | |
| 4805 __ mov(esp, Operand::StaticVariable(handler_address)); | |
| 4806 | |
| 4807 // Unwind the handlers until the ENTRY handler is found. | |
| 4808 NearLabel loop, done; | |
| 4809 __ bind(&loop); | |
| 4810 // Load the type of the current stack handler. | |
| 4811 const int kStateOffset = StackHandlerConstants::kStateOffset; | |
| 4812 __ cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY)); | |
| 4813 __ j(equal, &done); | |
| 4814 // Fetch the next handler in the list. | |
| 4815 const int kNextOffset = StackHandlerConstants::kNextOffset; | |
| 4816 __ mov(esp, Operand(esp, kNextOffset)); | |
| 4817 __ jmp(&loop); | |
| 4818 __ bind(&done); | |
| 4819 | |
| 4820 // Set the top handler address to next handler past the current ENTRY handler. | |
| 4821 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); | |
| 4822 __ pop(Operand::StaticVariable(handler_address)); | |
| 4823 | |
| 4824 if (type == OUT_OF_MEMORY) { | |
| 4825 // Set external caught exception to false. | |
| 4826 ExternalReference external_caught(Top::k_external_caught_exception_address); | |
| 4827 __ mov(eax, false); | |
| 4828 __ mov(Operand::StaticVariable(external_caught), eax); | |
| 4829 | |
| 4830 // Set pending exception and eax to out of memory exception. | |
| 4831 ExternalReference pending_exception(Top::k_pending_exception_address); | |
| 4832 __ mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException())); | |
| 4833 __ mov(Operand::StaticVariable(pending_exception), eax); | |
| 4834 } | |
| 4835 | |
| 4836 // Clear the context pointer. | |
| 4837 __ Set(esi, Immediate(0)); | |
| 4838 | |
| 4839 // Restore fp from handler and discard handler state. | |
| 4840 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize); | |
| 4841 __ pop(ebp); | |
| 4842 __ pop(edx); // State. | |
| 4843 | |
| 4844 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); | |
| 4845 __ ret(0); | |
| 4846 } | 4811 } |
| 4847 | 4812 |
| 4848 | 4813 |
| 4849 void CEntryStub::Generate(MacroAssembler* masm) { | 4814 void CEntryStub::Generate(MacroAssembler* masm) { |
| 4850 // eax: number of arguments including receiver | 4815 // eax: number of arguments including receiver |
| 4851 // ebx: pointer to C function (C callee-saved) | 4816 // ebx: pointer to C function (C callee-saved) |
| 4852 // ebp: frame pointer (restored after C call) | 4817 // ebp: frame pointer (restored after C call) |
| 4853 // esp: stack pointer (restored after C call) | 4818 // esp: stack pointer (restored after C call) |
| 4854 // esi: current context (C callee-saved) | 4819 // esi: current context (C callee-saved) |
| 4855 // edi: JS function of the caller (C callee-saved) | 4820 // edi: JS function of the caller (C callee-saved) |
| (...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5001 __ pop(esi); | 4966 __ pop(esi); |
| 5002 __ pop(edi); | 4967 __ pop(edi); |
| 5003 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers | 4968 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers |
| 5004 | 4969 |
| 5005 // Restore frame pointer and return. | 4970 // Restore frame pointer and return. |
| 5006 __ pop(ebp); | 4971 __ pop(ebp); |
| 5007 __ ret(0); | 4972 __ ret(0); |
| 5008 } | 4973 } |
| 5009 | 4974 |
| 5010 | 4975 |
| 4976 // Generate stub code for instanceof. |
| 4977 // This code can patch a call site inlined cache of the instance of check, |
| 4978 // which looks like this. |
| 4979 // |
| 4980 // 81 ff XX XX XX XX cmp edi, <the hole, patched to a map> |
| 4981 // 75 0a jne <some near label> |
| 4982 // b8 XX XX XX XX mov eax, <the hole, patched to either true or false> |
| 4983 // |
| 4984 // If call site patching is requested the stack will have the delta from the |
| 4985 // return address to the cmp instruction just below the return address. This |
| 4986 // also means that call site patching can only take place with arguments in |
| 4987 // registers. TOS looks like this when call site patching is requested |
| 4988 // |
| 4989 // esp[0] : return address |
| 4990 // esp[4] : delta from return address to cmp instruction |
| 4991 // |
| 5011 void InstanceofStub::Generate(MacroAssembler* masm) { | 4992 void InstanceofStub::Generate(MacroAssembler* masm) { |
| 4993 // Call site inlining and patching implies arguments in registers. |
| 4994 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck()); |
| 4995 |
| 5012 // Fixed register usage throughout the stub. | 4996 // Fixed register usage throughout the stub. |
| 5013 Register object = eax; // Object (lhs). | 4997 Register object = eax; // Object (lhs). |
| 5014 Register map = ebx; // Map of the object. | 4998 Register map = ebx; // Map of the object. |
| 5015 Register function = edx; // Function (rhs). | 4999 Register function = edx; // Function (rhs). |
| 5016 Register prototype = edi; // Prototype of the function. | 5000 Register prototype = edi; // Prototype of the function. |
| 5017 Register scratch = ecx; | 5001 Register scratch = ecx; |
| 5018 | 5002 |
| 5003 // Constants describing the call site code to patch. |
| 5004 static const int kDeltaToCmpImmediate = 2; |
| 5005 static const int kDeltaToMov = 8; |
| 5006 static const int kDeltaToMovImmediate = 9; |
| 5007 static const int8_t kCmpEdiImmediateByte1 = BitCast<int8_t, uint8_t>(0x81); |
| 5008 static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff); |
| 5009 static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8); |
| 5010 |
| 5011 ExternalReference roots_address = ExternalReference::roots_address(); |
| 5012 |
| 5013 ASSERT_EQ(object.code(), InstanceofStub::left().code()); |
| 5014 ASSERT_EQ(function.code(), InstanceofStub::right().code()); |
| 5015 |
| 5019 // Get the object and function - they are always both needed. | 5016 // Get the object and function - they are always both needed. |
| 5020 Label slow, not_js_object; | 5017 Label slow, not_js_object; |
| 5021 if (!args_in_registers()) { | 5018 if (!HasArgsInRegisters()) { |
| 5022 __ mov(object, Operand(esp, 2 * kPointerSize)); | 5019 __ mov(object, Operand(esp, 2 * kPointerSize)); |
| 5023 __ mov(function, Operand(esp, 1 * kPointerSize)); | 5020 __ mov(function, Operand(esp, 1 * kPointerSize)); |
| 5024 } | 5021 } |
| 5025 | 5022 |
| 5026 // Check that the left hand is a JS object. | 5023 // Check that the left hand is a JS object. |
| 5027 __ test(object, Immediate(kSmiTagMask)); | 5024 __ test(object, Immediate(kSmiTagMask)); |
| 5028 __ j(zero, ¬_js_object, not_taken); | 5025 __ j(zero, ¬_js_object, not_taken); |
| 5029 __ IsObjectJSObjectType(object, map, scratch, ¬_js_object); | 5026 __ IsObjectJSObjectType(object, map, scratch, ¬_js_object); |
| 5030 | 5027 |
| 5031 // Look up the function and the map in the instanceof cache. | 5028 // If there is a call site cache don't look in the global cache, but do the |
| 5032 NearLabel miss; | 5029 // real lookup and update the call site cache. |
| 5033 ExternalReference roots_address = ExternalReference::roots_address(); | 5030 if (!HasCallSiteInlineCheck()) { |
| 5034 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); | 5031 // Look up the function and the map in the instanceof cache. |
| 5035 __ cmp(function, | 5032 NearLabel miss; |
| 5036 Operand::StaticArray(scratch, times_pointer_size, roots_address)); | 5033 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); |
| 5037 __ j(not_equal, &miss); | 5034 __ cmp(function, |
| 5038 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); | 5035 Operand::StaticArray(scratch, times_pointer_size, roots_address)); |
| 5039 __ cmp(map, Operand::StaticArray(scratch, times_pointer_size, roots_address)); | 5036 __ j(not_equal, &miss); |
| 5040 __ j(not_equal, &miss); | 5037 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); |
| 5041 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); | 5038 __ cmp(map, Operand::StaticArray( |
| 5042 __ mov(eax, Operand::StaticArray(scratch, times_pointer_size, roots_address)); | 5039 scratch, times_pointer_size, roots_address)); |
| 5043 __ IncrementCounter(&Counters::instance_of_cache, 1); | 5040 __ j(not_equal, &miss); |
| 5044 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); | 5041 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); |
| 5042 __ mov(eax, Operand::StaticArray( |
| 5043 scratch, times_pointer_size, roots_address)); |
| 5044 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
| 5045 __ bind(&miss); |
| 5046 } |
| 5045 | 5047 |
| 5046 __ bind(&miss); | |
| 5047 // Get the prototype of the function. | 5048 // Get the prototype of the function. |
| 5048 __ TryGetFunctionPrototype(function, prototype, scratch, &slow); | 5049 __ TryGetFunctionPrototype(function, prototype, scratch, &slow); |
| 5049 | 5050 |
| 5050 // Check that the function prototype is a JS object. | 5051 // Check that the function prototype is a JS object. |
| 5051 __ test(prototype, Immediate(kSmiTagMask)); | 5052 __ test(prototype, Immediate(kSmiTagMask)); |
| 5052 __ j(zero, &slow, not_taken); | 5053 __ j(zero, &slow, not_taken); |
| 5053 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); | 5054 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); |
| 5054 | 5055 |
| 5055 // Update the golbal instanceof cache with the current map and function. The | 5056 // Update the global instanceof or call site inlined cache with the current |
| 5056 // cached answer will be set when it is known. | 5057 // map and function. The cached answer will be set when it is known below. |
| 5058 if (!HasCallSiteInlineCheck()) { |
| 5057 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); | 5059 __ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex)); |
| 5058 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map); | 5060 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map); |
| 5059 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); | 5061 __ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex)); |
| 5060 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), | 5062 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), |
| 5061 function); | 5063 function); |
| 5064 } else { |
| 5065 // The constants for the code patching are based on no push instructions |
| 5066 // at the call site. |
| 5067 ASSERT(HasArgsInRegisters()); |
| 5068 // Get return address and delta to inlined map check. |
| 5069 __ mov(scratch, Operand(esp, 0 * kPointerSize)); |
| 5070 __ sub(scratch, Operand(esp, 1 * kPointerSize)); |
| 5071 if (FLAG_debug_code) { |
| 5072 __ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1); |
| 5073 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)"); |
| 5074 __ cmpb(Operand(scratch, 1), kCmpEdiImmediateByte2); |
| 5075 __ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)"); |
| 5076 } |
| 5077 __ mov(Operand(scratch, kDeltaToCmpImmediate), map); |
| 5078 } |
| 5062 | 5079 |
| 5063 // Loop through the prototype chain of the object looking for the function | 5080 // Loop through the prototype chain of the object looking for the function |
| 5064 // prototype. | 5081 // prototype. |
| 5065 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset)); | 5082 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset)); |
| 5066 NearLabel loop, is_instance, is_not_instance; | 5083 NearLabel loop, is_instance, is_not_instance; |
| 5067 __ bind(&loop); | 5084 __ bind(&loop); |
| 5068 __ cmp(scratch, Operand(prototype)); | 5085 __ cmp(scratch, Operand(prototype)); |
| 5069 __ j(equal, &is_instance); | 5086 __ j(equal, &is_instance); |
| 5070 __ cmp(Operand(scratch), Immediate(Factory::null_value())); | 5087 __ cmp(Operand(scratch), Immediate(Factory::null_value())); |
| 5071 __ j(equal, &is_not_instance); | 5088 __ j(equal, &is_not_instance); |
| 5072 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); | 5089 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
| 5073 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset)); | 5090 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset)); |
| 5074 __ jmp(&loop); | 5091 __ jmp(&loop); |
| 5075 | 5092 |
| 5076 __ bind(&is_instance); | 5093 __ bind(&is_instance); |
| 5077 __ IncrementCounter(&Counters::instance_of_stub_true, 1); | 5094 if (!HasCallSiteInlineCheck()) { |
| 5078 __ Set(eax, Immediate(0)); | 5095 __ Set(eax, Immediate(0)); |
| 5079 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); | 5096 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); |
| 5080 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), eax); | 5097 __ mov(Operand::StaticArray(scratch, |
| 5081 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); | 5098 times_pointer_size, roots_address), eax); |
| 5099 } else { |
| 5100 // Get return address and delta to inlined map check. |
| 5101 __ mov(eax, Factory::true_value()); |
| 5102 __ mov(scratch, Operand(esp, 0 * kPointerSize)); |
| 5103 __ sub(scratch, Operand(esp, 1 * kPointerSize)); |
| 5104 if (FLAG_debug_code) { |
| 5105 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte); |
| 5106 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); |
| 5107 } |
| 5108 __ mov(Operand(scratch, kDeltaToMovImmediate), eax); |
| 5109 if (!ReturnTrueFalseObject()) { |
| 5110 __ Set(eax, Immediate(0)); |
| 5111 } |
| 5112 } |
| 5113 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
| 5082 | 5114 |
| 5083 __ bind(&is_not_instance); | 5115 __ bind(&is_not_instance); |
| 5084 __ IncrementCounter(&Counters::instance_of_stub_false, 1); | 5116 if (!HasCallSiteInlineCheck()) { |
| 5085 __ Set(eax, Immediate(Smi::FromInt(1))); | 5117 __ Set(eax, Immediate(Smi::FromInt(1))); |
| 5086 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); | 5118 __ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex)); |
| 5087 __ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), eax); | 5119 __ mov(Operand::StaticArray( |
| 5088 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); | 5120 scratch, times_pointer_size, roots_address), eax); |
| 5121 } else { |
| 5122 // Get return address and delta to inlined map check. |
| 5123 __ mov(eax, Factory::false_value()); |
| 5124 __ mov(scratch, Operand(esp, 0 * kPointerSize)); |
| 5125 __ sub(scratch, Operand(esp, 1 * kPointerSize)); |
| 5126 if (FLAG_debug_code) { |
| 5127 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte); |
| 5128 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); |
| 5129 } |
| 5130 __ mov(Operand(scratch, kDeltaToMovImmediate), eax); |
| 5131 if (!ReturnTrueFalseObject()) { |
| 5132 __ Set(eax, Immediate(Smi::FromInt(1))); |
| 5133 } |
| 5134 } |
| 5135 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
| 5089 | 5136 |
| 5090 Label object_not_null, object_not_null_or_smi; | 5137 Label object_not_null, object_not_null_or_smi; |
| 5091 __ bind(¬_js_object); | 5138 __ bind(¬_js_object); |
| 5092 // Before null, smi and string value checks, check that the rhs is a function | 5139 // Before null, smi and string value checks, check that the rhs is a function |
| 5093 // as for a non-function rhs an exception needs to be thrown. | 5140 // as for a non-function rhs an exception needs to be thrown. |
| 5094 __ test(function, Immediate(kSmiTagMask)); | 5141 __ test(function, Immediate(kSmiTagMask)); |
| 5095 __ j(zero, &slow, not_taken); | 5142 __ j(zero, &slow, not_taken); |
| 5096 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch); | 5143 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch); |
| 5097 __ j(not_equal, &slow, not_taken); | 5144 __ j(not_equal, &slow, not_taken); |
| 5098 | 5145 |
| 5099 // Null is not instance of anything. | 5146 // Null is not instance of anything. |
| 5100 __ cmp(object, Factory::null_value()); | 5147 __ cmp(object, Factory::null_value()); |
| 5101 __ j(not_equal, &object_not_null); | 5148 __ j(not_equal, &object_not_null); |
| 5102 __ IncrementCounter(&Counters::instance_of_stub_false_null, 1); | |
| 5103 __ Set(eax, Immediate(Smi::FromInt(1))); | 5149 __ Set(eax, Immediate(Smi::FromInt(1))); |
| 5104 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); | 5150 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
| 5105 | 5151 |
| 5106 __ bind(&object_not_null); | 5152 __ bind(&object_not_null); |
| 5107 // Smi values is not instance of anything. | 5153 // Smi values is not instance of anything. |
| 5108 __ test(object, Immediate(kSmiTagMask)); | 5154 __ test(object, Immediate(kSmiTagMask)); |
| 5109 __ j(not_zero, &object_not_null_or_smi, not_taken); | 5155 __ j(not_zero, &object_not_null_or_smi, not_taken); |
| 5110 __ Set(eax, Immediate(Smi::FromInt(1))); | 5156 __ Set(eax, Immediate(Smi::FromInt(1))); |
| 5111 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); | 5157 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
| 5112 | 5158 |
| 5113 __ bind(&object_not_null_or_smi); | 5159 __ bind(&object_not_null_or_smi); |
| 5114 // String values is not instance of anything. | 5160 // String values is not instance of anything. |
| 5115 Condition is_string = masm->IsObjectStringType(object, scratch, scratch); | 5161 Condition is_string = masm->IsObjectStringType(object, scratch, scratch); |
| 5116 __ j(NegateCondition(is_string), &slow); | 5162 __ j(NegateCondition(is_string), &slow); |
| 5117 __ IncrementCounter(&Counters::instance_of_stub_false_string, 1); | |
| 5118 __ Set(eax, Immediate(Smi::FromInt(1))); | 5163 __ Set(eax, Immediate(Smi::FromInt(1))); |
| 5119 __ ret((args_in_registers() ? 0 : 2) * kPointerSize); | 5164 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
| 5120 | 5165 |
| 5121 // Slow-case: Go through the JavaScript implementation. | 5166 // Slow-case: Go through the JavaScript implementation. |
| 5122 __ bind(&slow); | 5167 __ bind(&slow); |
| 5123 if (args_in_registers()) { | 5168 if (!ReturnTrueFalseObject()) { |
| 5124 // Push arguments below return address. | 5169 // Tail call the builtin which returns 0 or 1. |
| 5125 __ pop(scratch); | 5170 if (HasArgsInRegisters()) { |
| 5171 // Push arguments below return address. |
| 5172 __ pop(scratch); |
| 5173 __ push(object); |
| 5174 __ push(function); |
| 5175 __ push(scratch); |
| 5176 } |
| 5177 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); |
| 5178 } else { |
| 5179 // Call the builtin and convert 0/1 to true/false. |
| 5180 __ EnterInternalFrame(); |
| 5126 __ push(object); | 5181 __ push(object); |
| 5127 __ push(function); | 5182 __ push(function); |
| 5128 __ push(scratch); | 5183 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); |
| 5184 __ LeaveInternalFrame(); |
| 5185 NearLabel true_value, done; |
| 5186 __ test(eax, Operand(eax)); |
| 5187 __ j(zero, &true_value); |
| 5188 __ mov(eax, Factory::false_value()); |
| 5189 __ jmp(&done); |
| 5190 __ bind(&true_value); |
| 5191 __ mov(eax, Factory::true_value()); |
| 5192 __ bind(&done); |
| 5193 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize); |
| 5129 } | 5194 } |
| 5130 __ IncrementCounter(&Counters::instance_of_slow, 1); | |
| 5131 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | |
| 5132 } | 5195 } |
| 5133 | 5196 |
| 5134 | 5197 |
| 5198 Register InstanceofStub::left() { return eax; } |
| 5199 |
| 5200 |
| 5201 Register InstanceofStub::right() { return edx; } |
| 5202 |
| 5203 |
| 5135 int CompareStub::MinorKey() { | 5204 int CompareStub::MinorKey() { |
| 5136 // Encode the three parameters in a unique 16 bit value. To avoid duplicate | 5205 // Encode the three parameters in a unique 16 bit value. To avoid duplicate |
| 5137 // stubs the never NaN NaN condition is only taken into account if the | 5206 // stubs the never NaN NaN condition is only taken into account if the |
| 5138 // condition is equals. | 5207 // condition is equals. |
| 5139 ASSERT(static_cast<unsigned>(cc_) < (1 << 12)); | 5208 ASSERT(static_cast<unsigned>(cc_) < (1 << 12)); |
| 5140 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); | 5209 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); |
| 5141 return ConditionField::encode(static_cast<unsigned>(cc_)) | 5210 return ConditionField::encode(static_cast<unsigned>(cc_)) |
| 5142 | RegisterField::encode(false) // lhs_ and rhs_ are not used | 5211 | RegisterField::encode(false) // lhs_ and rhs_ are not used |
| 5143 | StrictField::encode(strict_) | 5212 | StrictField::encode(strict_) |
| 5144 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false) | 5213 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false) |
| (...skipping 1290 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6435 __ pop(ecx); | 6504 __ pop(ecx); |
| 6436 __ pop(eax); | 6505 __ pop(eax); |
| 6437 __ pop(edx); | 6506 __ pop(edx); |
| 6438 __ push(ecx); | 6507 __ push(ecx); |
| 6439 | 6508 |
| 6440 // Do a tail call to the rewritten stub. | 6509 // Do a tail call to the rewritten stub. |
| 6441 __ jmp(Operand(edi)); | 6510 __ jmp(Operand(edi)); |
| 6442 } | 6511 } |
| 6443 | 6512 |
| 6444 | 6513 |
| 6514 // Loads a indexed element from a pixel array. |
| 6515 void GenerateFastPixelArrayLoad(MacroAssembler* masm, |
| 6516 Register receiver, |
| 6517 Register key, |
| 6518 Register elements, |
| 6519 Register untagged_key, |
| 6520 Register result, |
| 6521 Label* not_pixel_array, |
| 6522 Label* key_not_smi, |
| 6523 Label* out_of_range) { |
| 6524 // Register use: |
| 6525 // receiver - holds the receiver and is unchanged. |
| 6526 // key - holds the key and is unchanged (must be a smi). |
| 6527 // elements - is set to the the receiver's element if |
| 6528 // the receiver doesn't have a pixel array or the |
| 6529 // key is not a smi, otherwise it's the elements' |
| 6530 // external pointer. |
| 6531 // untagged_key - is set to the untagged key |
| 6532 |
| 6533 // Some callers already have verified that the key is a smi. key_not_smi is |
| 6534 // set to NULL as a sentinel for that case. Otherwise, add an explicit check |
| 6535 // to ensure the key is a smi must be added. |
| 6536 if (key_not_smi != NULL) { |
| 6537 __ JumpIfNotSmi(key, key_not_smi); |
| 6538 } else { |
| 6539 if (FLAG_debug_code) { |
| 6540 __ AbortIfNotSmi(key); |
| 6541 } |
| 6542 } |
| 6543 __ mov(untagged_key, key); |
| 6544 __ SmiUntag(untagged_key); |
| 6545 |
| 6546 __ mov(elements, FieldOperand(receiver, JSObject::kElementsOffset)); |
| 6547 // By passing NULL as not_pixel_array, callers signal that they have already |
| 6548 // verified that the receiver has pixel array elements. |
| 6549 if (not_pixel_array != NULL) { |
| 6550 __ CheckMap(elements, Factory::pixel_array_map(), not_pixel_array, true); |
| 6551 } else { |
| 6552 if (FLAG_debug_code) { |
| 6553 // Map check should have already made sure that elements is a pixel array. |
| 6554 __ cmp(FieldOperand(elements, HeapObject::kMapOffset), |
| 6555 Immediate(Factory::pixel_array_map())); |
| 6556 __ Assert(equal, "Elements isn't a pixel array"); |
| 6557 } |
| 6558 } |
| 6559 |
| 6560 // Key must be in range. |
| 6561 __ cmp(untagged_key, FieldOperand(elements, PixelArray::kLengthOffset)); |
| 6562 __ j(above_equal, out_of_range); // unsigned check handles negative keys. |
| 6563 |
| 6564 // Perform the indexed load and tag the result as a smi. |
| 6565 __ mov(elements, FieldOperand(elements, PixelArray::kExternalPointerOffset)); |
| 6566 __ movzx_b(result, Operand(elements, untagged_key, times_1, 0)); |
| 6567 __ SmiTag(result); |
| 6568 __ ret(0); |
| 6569 } |
| 6570 |
| 6571 |
| 6572 // Stores an indexed element into a pixel array, clamping the stored value. |
| 6573 void GenerateFastPixelArrayStore(MacroAssembler* masm, |
| 6574 Register receiver, |
| 6575 Register key, |
| 6576 Register value, |
| 6577 Register elements, |
| 6578 Register scratch1, |
| 6579 bool load_elements_from_receiver, |
| 6580 Label* key_not_smi, |
| 6581 Label* value_not_smi, |
| 6582 Label* not_pixel_array, |
| 6583 Label* out_of_range) { |
| 6584 // Register use: |
| 6585 // receiver - holds the receiver and is unchanged unless the |
| 6586 // store succeeds. |
| 6587 // key - holds the key (must be a smi) and is unchanged. |
| 6588 // value - holds the value (must be a smi) and is unchanged. |
| 6589 // elements - holds the element object of the receiver on entry if |
| 6590 // load_elements_from_receiver is false, otherwise used |
| 6591 // internally to store the pixel arrays elements and |
| 6592 // external array pointer. |
| 6593 // |
| 6594 // receiver, key and value remain unmodified until it's guaranteed that the |
| 6595 // store will succeed. |
| 6596 Register external_pointer = elements; |
| 6597 Register untagged_key = scratch1; |
| 6598 Register untagged_value = receiver; // Only set once success guaranteed. |
| 6599 |
| 6600 // Fetch the receiver's elements if the caller hasn't already done so. |
| 6601 if (load_elements_from_receiver) { |
| 6602 __ mov(elements, FieldOperand(receiver, JSObject::kElementsOffset)); |
| 6603 } |
| 6604 |
| 6605 // By passing NULL as not_pixel_array, callers signal that they have already |
| 6606 // verified that the receiver has pixel array elements. |
| 6607 if (not_pixel_array != NULL) { |
| 6608 __ CheckMap(elements, Factory::pixel_array_map(), not_pixel_array, true); |
| 6609 } else { |
| 6610 if (FLAG_debug_code) { |
| 6611 // Map check should have already made sure that elements is a pixel array. |
| 6612 __ cmp(FieldOperand(elements, HeapObject::kMapOffset), |
| 6613 Immediate(Factory::pixel_array_map())); |
| 6614 __ Assert(equal, "Elements isn't a pixel array"); |
| 6615 } |
| 6616 } |
| 6617 |
| 6618 // Some callers already have verified that the key is a smi. key_not_smi is |
| 6619 // set to NULL as a sentinel for that case. Otherwise, add an explicit check |
| 6620 // to ensure the key is a smi must be added. |
| 6621 if (key_not_smi != NULL) { |
| 6622 __ JumpIfNotSmi(key, key_not_smi); |
| 6623 } else { |
| 6624 if (FLAG_debug_code) { |
| 6625 __ AbortIfNotSmi(key); |
| 6626 } |
| 6627 } |
| 6628 |
| 6629 // Key must be a smi and it must be in range. |
| 6630 __ mov(untagged_key, key); |
| 6631 __ SmiUntag(untagged_key); |
| 6632 __ cmp(untagged_key, FieldOperand(elements, PixelArray::kLengthOffset)); |
| 6633 __ j(above_equal, out_of_range); // unsigned check handles negative keys. |
| 6634 |
| 6635 // Value must be a smi. |
| 6636 __ JumpIfNotSmi(value, value_not_smi); |
| 6637 __ mov(untagged_value, value); |
| 6638 __ SmiUntag(untagged_value); |
| 6639 |
| 6640 { // Clamp the value to [0..255]. |
| 6641 NearLabel done; |
| 6642 __ test(untagged_value, Immediate(0xFFFFFF00)); |
| 6643 __ j(zero, &done); |
| 6644 __ setcc(negative, untagged_value); // 1 if negative, 0 if positive. |
| 6645 __ dec_b(untagged_value); // 0 if negative, 255 if positive. |
| 6646 __ bind(&done); |
| 6647 } |
| 6648 |
| 6649 __ mov(external_pointer, |
| 6650 FieldOperand(elements, PixelArray::kExternalPointerOffset)); |
| 6651 __ mov_b(Operand(external_pointer, untagged_key, times_1, 0), untagged_value); |
| 6652 __ ret(0); // Return value in eax. |
| 6653 } |
| 6654 |
| 6655 |
| 6445 #undef __ | 6656 #undef __ |
| 6446 | 6657 |
| 6447 } } // namespace v8::internal | 6658 } } // namespace v8::internal |
| 6448 | 6659 |
| 6449 #endif // V8_TARGET_ARCH_IA32 | 6660 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |