| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 3242 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3253 | 3253 |
| 3254 // Register mapping: | 3254 // Register mapping: |
| 3255 // rax is object map. | 3255 // rax is object map. |
| 3256 // rdx is function. | 3256 // rdx is function. |
| 3257 // rbx is function prototype. | 3257 // rbx is function prototype. |
| 3258 if (!HasCallSiteInlineCheck()) { | 3258 if (!HasCallSiteInlineCheck()) { |
| 3259 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); | 3259 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); |
| 3260 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); | 3260 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); |
| 3261 } else { | 3261 } else { |
| 3262 // Get return address and delta to inlined map check. | 3262 // Get return address and delta to inlined map check. |
| 3263 __ movq(kScratchRegister, StackOperandForReturnAddress(0)); | 3263 __ MoveReturnAddress(kScratchRegister, Operand(rsp, 0)); |
| 3264 __ subq(kScratchRegister, args.GetArgumentOperand(2)); | 3264 __ subq(kScratchRegister, args.GetArgumentOperand(2)); |
| 3265 if (FLAG_debug_code) { | 3265 if (FLAG_debug_code) { |
| 3266 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); | 3266 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); |
| 3267 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); | 3267 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); |
| 3268 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck); | 3268 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck); |
| 3269 } | 3269 } |
| 3270 __ movq(kScratchRegister, | 3270 __ movq(kScratchRegister, |
| 3271 Operand(kScratchRegister, kOffsetToMapCheckValue)); | 3271 Operand(kScratchRegister, kOffsetToMapCheckValue)); |
| 3272 __ movq(Operand(kScratchRegister, 0), rax); | 3272 __ movq(Operand(kScratchRegister, 0), rax); |
| 3273 } | 3273 } |
| (...skipping 20 matching lines...) Expand all Loading... |
| 3294 // Store bitwise zero in the cache. This is a Smi in GC terms. | 3294 // Store bitwise zero in the cache. This is a Smi in GC terms. |
| 3295 STATIC_ASSERT(kSmiTag == 0); | 3295 STATIC_ASSERT(kSmiTag == 0); |
| 3296 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); | 3296 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); |
| 3297 } else { | 3297 } else { |
| 3298 // Store offset of true in the root array at the inline check site. | 3298 // Store offset of true in the root array at the inline check site. |
| 3299 int true_offset = 0x100 + | 3299 int true_offset = 0x100 + |
| 3300 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; | 3300 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; |
| 3301 // Assert it is a 1-byte signed value. | 3301 // Assert it is a 1-byte signed value. |
| 3302 ASSERT(true_offset >= 0 && true_offset < 0x100); | 3302 ASSERT(true_offset >= 0 && true_offset < 0x100); |
| 3303 __ movl(rax, Immediate(true_offset)); | 3303 __ movl(rax, Immediate(true_offset)); |
| 3304 __ movq(kScratchRegister, StackOperandForReturnAddress(0)); | 3304 __ MoveReturnAddress(kScratchRegister, Operand(rsp, 0)); |
| 3305 __ subq(kScratchRegister, args.GetArgumentOperand(2)); | 3305 __ subq(kScratchRegister, args.GetArgumentOperand(2)); |
| 3306 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); | 3306 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); |
| 3307 if (FLAG_debug_code) { | 3307 if (FLAG_debug_code) { |
| 3308 __ movl(rax, Immediate(kWordBeforeResultValue)); | 3308 __ movl(rax, Immediate(kWordBeforeResultValue)); |
| 3309 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); | 3309 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); |
| 3310 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); | 3310 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); |
| 3311 } | 3311 } |
| 3312 __ Set(rax, 0); | 3312 __ Set(rax, 0); |
| 3313 } | 3313 } |
| 3314 __ ret((2 + extra_argument_offset) * kPointerSize); | 3314 __ ret((2 + extra_argument_offset) * kPointerSize); |
| 3315 | 3315 |
| 3316 __ bind(&is_not_instance); | 3316 __ bind(&is_not_instance); |
| 3317 if (!HasCallSiteInlineCheck()) { | 3317 if (!HasCallSiteInlineCheck()) { |
| 3318 // We have to store a non-zero value in the cache. | 3318 // We have to store a non-zero value in the cache. |
| 3319 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); | 3319 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); |
| 3320 } else { | 3320 } else { |
| 3321 // Store offset of false in the root array at the inline check site. | 3321 // Store offset of false in the root array at the inline check site. |
| 3322 int false_offset = 0x100 + | 3322 int false_offset = 0x100 + |
| 3323 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; | 3323 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; |
| 3324 // Assert it is a 1-byte signed value. | 3324 // Assert it is a 1-byte signed value. |
| 3325 ASSERT(false_offset >= 0 && false_offset < 0x100); | 3325 ASSERT(false_offset >= 0 && false_offset < 0x100); |
| 3326 __ movl(rax, Immediate(false_offset)); | 3326 __ movl(rax, Immediate(false_offset)); |
| 3327 __ movq(kScratchRegister, StackOperandForReturnAddress(0)); | 3327 __ MoveReturnAddress(kScratchRegister, Operand(rsp, 0)); |
| 3328 __ subq(kScratchRegister, args.GetArgumentOperand(2)); | 3328 __ subq(kScratchRegister, args.GetArgumentOperand(2)); |
| 3329 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); | 3329 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); |
| 3330 if (FLAG_debug_code) { | 3330 if (FLAG_debug_code) { |
| 3331 __ movl(rax, Immediate(kWordBeforeResultValue)); | 3331 __ movl(rax, Immediate(kWordBeforeResultValue)); |
| 3332 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); | 3332 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); |
| 3333 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); | 3333 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); |
| 3334 } | 3334 } |
| 3335 } | 3335 } |
| 3336 __ ret((2 + extra_argument_offset) * kPointerSize); | 3336 __ ret((2 + extra_argument_offset) * kPointerSize); |
| 3337 | 3337 |
| (...skipping 2475 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5813 __ bind(&fast_elements_case); | 5813 __ bind(&fast_elements_case); |
| 5814 GenerateCase(masm, FAST_ELEMENTS); | 5814 GenerateCase(masm, FAST_ELEMENTS); |
| 5815 } | 5815 } |
| 5816 | 5816 |
| 5817 | 5817 |
| 5818 #undef __ | 5818 #undef __ |
| 5819 | 5819 |
| 5820 } } // namespace v8::internal | 5820 } } // namespace v8::internal |
| 5821 | 5821 |
| 5822 #endif // V8_TARGET_ARCH_X64 | 5822 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |