| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 117 void FastNewContextStub::Generate(MacroAssembler* masm) { | 117 void FastNewContextStub::Generate(MacroAssembler* masm) { |
| 118 // Try to allocate the context in new space. | 118 // Try to allocate the context in new space. |
| 119 Label gc; | 119 Label gc; |
| 120 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 120 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
| 121 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize, | 121 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize, |
| 122 rax, rbx, rcx, &gc, TAG_OBJECT); | 122 rax, rbx, rcx, &gc, TAG_OBJECT); |
| 123 | 123 |
| 124 // Get the function from the stack. | 124 // Get the function from the stack. |
| 125 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 125 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); |
| 126 | 126 |
| 127 // Setup the object header. | 127 // Set up the object header. |
| 128 __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex); | 128 __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex); |
| 129 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); | 129 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); |
| 130 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); | 130 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); |
| 131 | 131 |
| 132 // Setup the fixed slots. | 132 // Set up the fixed slots. |
| 133 __ Set(rbx, 0); // Set to NULL. | 133 __ Set(rbx, 0); // Set to NULL. |
| 134 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx); | 134 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx); |
| 135 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi); | 135 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi); |
| 136 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx); | 136 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx); |
| 137 | 137 |
| 138 // Copy the global object from the previous context. | 138 // Copy the global object from the previous context. |
| 139 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 139 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 140 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx); | 140 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx); |
| 141 | 141 |
| 142 // Initialize the rest of the slots to undefined. | 142 // Initialize the rest of the slots to undefined. |
| (...skipping 23 matching lines...) Expand all Loading... |
| 166 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 166 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
| 167 __ AllocateInNewSpace(FixedArray::SizeFor(length), | 167 __ AllocateInNewSpace(FixedArray::SizeFor(length), |
| 168 rax, rbx, rcx, &gc, TAG_OBJECT); | 168 rax, rbx, rcx, &gc, TAG_OBJECT); |
| 169 | 169 |
| 170 // Get the function from the stack. | 170 // Get the function from the stack. |
| 171 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 171 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); |
| 172 | 172 |
| 173 // Get the serialized scope info from the stack. | 173 // Get the serialized scope info from the stack. |
| 174 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); | 174 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); |
| 175 | 175 |
| 176 // Setup the object header. | 176 // Set up the object header. |
| 177 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); | 177 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); |
| 178 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); | 178 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); |
| 179 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); | 179 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); |
| 180 | 180 |
| 181 // If this block context is nested in the global context we get a smi | 181 // If this block context is nested in the global context we get a smi |
| 182 // sentinel instead of a function. The block context should get the | 182 // sentinel instead of a function. The block context should get the |
| 183 // canonical empty function of the global context as its closure which | 183 // canonical empty function of the global context as its closure which |
| 184 // we still have to look up. | 184 // we still have to look up. |
| 185 Label after_sentinel; | 185 Label after_sentinel; |
| 186 __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear); | 186 __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear); |
| 187 if (FLAG_debug_code) { | 187 if (FLAG_debug_code) { |
| 188 const char* message = "Expected 0 as a Smi sentinel"; | 188 const char* message = "Expected 0 as a Smi sentinel"; |
| 189 __ cmpq(rcx, Immediate(0)); | 189 __ cmpq(rcx, Immediate(0)); |
| 190 __ Assert(equal, message); | 190 __ Assert(equal, message); |
| 191 } | 191 } |
| 192 __ movq(rcx, GlobalObjectOperand()); | 192 __ movq(rcx, GlobalObjectOperand()); |
| 193 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset)); | 193 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset)); |
| 194 __ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX)); | 194 __ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX)); |
| 195 __ bind(&after_sentinel); | 195 __ bind(&after_sentinel); |
| 196 | 196 |
| 197 // Setup the fixed slots. | 197 // Set up the fixed slots. |
| 198 __ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx); | 198 __ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx); |
| 199 __ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi); | 199 __ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi); |
| 200 __ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx); | 200 __ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx); |
| 201 | 201 |
| 202 // Copy the global object from the previous context. | 202 // Copy the global object from the previous context. |
| 203 __ movq(rbx, ContextOperand(rsi, Context::GLOBAL_INDEX)); | 203 __ movq(rbx, ContextOperand(rsi, Context::GLOBAL_INDEX)); |
| 204 __ movq(ContextOperand(rax, Context::GLOBAL_INDEX), rbx); | 204 __ movq(ContextOperand(rax, Context::GLOBAL_INDEX), rbx); |
| 205 | 205 |
| 206 // Initialize the rest of the slots to the hole value. | 206 // Initialize the rest of the slots to the hole value. |
| 207 __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex); | 207 __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex); |
| (...skipping 2184 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2392 // rax = address of new object (tagged) | 2392 // rax = address of new object (tagged) |
| 2393 // rbx = mapped parameter count (untagged) | 2393 // rbx = mapped parameter count (untagged) |
| 2394 // rcx = argument count (untagged) | 2394 // rcx = argument count (untagged) |
| 2395 // rdi = address of boilerplate object (tagged) | 2395 // rdi = address of boilerplate object (tagged) |
| 2396 // Copy the JS object part. | 2396 // Copy the JS object part. |
| 2397 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { | 2397 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { |
| 2398 __ movq(rdx, FieldOperand(rdi, i)); | 2398 __ movq(rdx, FieldOperand(rdi, i)); |
| 2399 __ movq(FieldOperand(rax, i), rdx); | 2399 __ movq(FieldOperand(rax, i), rdx); |
| 2400 } | 2400 } |
| 2401 | 2401 |
| 2402 // Setup the callee in-object property. | 2402 // Set up the callee in-object property. |
| 2403 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); | 2403 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); |
| 2404 __ movq(rdx, Operand(rsp, 3 * kPointerSize)); | 2404 __ movq(rdx, Operand(rsp, 3 * kPointerSize)); |
| 2405 __ movq(FieldOperand(rax, JSObject::kHeaderSize + | 2405 __ movq(FieldOperand(rax, JSObject::kHeaderSize + |
| 2406 Heap::kArgumentsCalleeIndex * kPointerSize), | 2406 Heap::kArgumentsCalleeIndex * kPointerSize), |
| 2407 rdx); | 2407 rdx); |
| 2408 | 2408 |
| 2409 // Use the length (smi tagged) and set that as an in-object property too. | 2409 // Use the length (smi tagged) and set that as an in-object property too. |
| 2410 // Note: rcx is tagged from here on. | 2410 // Note: rcx is tagged from here on. |
| 2411 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); | 2411 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); |
| 2412 __ Integer32ToSmi(rcx, rcx); | 2412 __ Integer32ToSmi(rcx, rcx); |
| 2413 __ movq(FieldOperand(rax, JSObject::kHeaderSize + | 2413 __ movq(FieldOperand(rax, JSObject::kHeaderSize + |
| 2414 Heap::kArgumentsLengthIndex * kPointerSize), | 2414 Heap::kArgumentsLengthIndex * kPointerSize), |
| 2415 rcx); | 2415 rcx); |
| 2416 | 2416 |
| 2417 // Setup the elements pointer in the allocated arguments object. | 2417 // Set up the elements pointer in the allocated arguments object. |
| 2418 // If we allocated a parameter map, edi will point there, otherwise to the | 2418 // If we allocated a parameter map, edi will point there, otherwise to the |
| 2419 // backing store. | 2419 // backing store. |
| 2420 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize)); | 2420 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize)); |
| 2421 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi); | 2421 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi); |
| 2422 | 2422 |
| 2423 // rax = address of new object (tagged) | 2423 // rax = address of new object (tagged) |
| 2424 // rbx = mapped parameter count (untagged) | 2424 // rbx = mapped parameter count (untagged) |
| 2425 // rcx = argument count (tagged) | 2425 // rcx = argument count (tagged) |
| 2426 // rdi = address of parameter map or backing store (tagged) | 2426 // rdi = address of parameter map or backing store (tagged) |
| 2427 | 2427 |
| (...skipping 186 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2614 rcx); | 2614 rcx); |
| 2615 | 2615 |
| 2616 // If there are no actual arguments, we're done. | 2616 // If there are no actual arguments, we're done. |
| 2617 Label done; | 2617 Label done; |
| 2618 __ testq(rcx, rcx); | 2618 __ testq(rcx, rcx); |
| 2619 __ j(zero, &done); | 2619 __ j(zero, &done); |
| 2620 | 2620 |
| 2621 // Get the parameters pointer from the stack. | 2621 // Get the parameters pointer from the stack. |
| 2622 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); | 2622 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); |
| 2623 | 2623 |
| 2624 // Setup the elements pointer in the allocated arguments object and | 2624 // Set up the elements pointer in the allocated arguments object and |
| 2625 // initialize the header in the elements fixed array. | 2625 // initialize the header in the elements fixed array. |
| 2626 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict)); | 2626 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict)); |
| 2627 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi); | 2627 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi); |
| 2628 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); | 2628 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); |
| 2629 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); | 2629 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); |
| 2630 | 2630 |
| 2631 | 2631 |
| 2632 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); | 2632 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); |
| 2633 // Untag the length for the loop below. | 2633 // Untag the length for the loop below. |
| 2634 __ SmiToInteger64(rcx, rcx); | 2634 __ SmiToInteger64(rcx, rcx); |
| (...skipping 1300 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3935 __ bind(&throw_normal_exception); | 3935 __ bind(&throw_normal_exception); |
| 3936 GenerateThrowTOS(masm); | 3936 GenerateThrowTOS(masm); |
| 3937 } | 3937 } |
| 3938 | 3938 |
| 3939 | 3939 |
| 3940 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { | 3940 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { |
| 3941 Label invoke, handler_entry, exit; | 3941 Label invoke, handler_entry, exit; |
| 3942 Label not_outermost_js, not_outermost_js_2; | 3942 Label not_outermost_js, not_outermost_js_2; |
| 3943 { // NOLINT. Scope block confuses linter. | 3943 { // NOLINT. Scope block confuses linter. |
| 3944 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm); | 3944 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm); |
| 3945 // Setup frame. | 3945 // Set up frame. |
| 3946 __ push(rbp); | 3946 __ push(rbp); |
| 3947 __ movq(rbp, rsp); | 3947 __ movq(rbp, rsp); |
| 3948 | 3948 |
| 3949 // Push the stack frame type marker twice. | 3949 // Push the stack frame type marker twice. |
| 3950 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; | 3950 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; |
| 3951 // Scratch register is neither callee-save, nor an argument register on any | 3951 // Scratch register is neither callee-save, nor an argument register on any |
| 3952 // platform. It's free to use at this point. | 3952 // platform. It's free to use at this point. |
| 3953 // Cannot use smi-register for loading yet. | 3953 // Cannot use smi-register for loading yet. |
| 3954 __ movq(kScratchRegister, | 3954 __ movq(kScratchRegister, |
| 3955 reinterpret_cast<uint64_t>(Smi::FromInt(marker)), | 3955 reinterpret_cast<uint64_t>(Smi::FromInt(marker)), |
| (...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4149 // rax is object map. | 4149 // rax is object map. |
| 4150 // rdx is function. | 4150 // rdx is function. |
| 4151 // rbx is function prototype. | 4151 // rbx is function prototype. |
| 4152 if (!HasCallSiteInlineCheck()) { | 4152 if (!HasCallSiteInlineCheck()) { |
| 4153 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); | 4153 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); |
| 4154 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); | 4154 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); |
| 4155 } else { | 4155 } else { |
| 4156 // Get return address and delta to inlined map check. | 4156 // Get return address and delta to inlined map check. |
| 4157 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); | 4157 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); |
| 4158 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); | 4158 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); |
| 4159 __ movq(Operand(kScratchRegister, kOffsetToMapCheckValue), rax); | |
| 4160 if (FLAG_debug_code) { | 4159 if (FLAG_debug_code) { |
| 4161 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); | 4160 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); |
| 4162 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); | 4161 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); |
| 4163 __ Assert(equal, "InstanceofStub unexpected call site cache (check)."); | 4162 __ Assert(equal, "InstanceofStub unexpected call site cache (check)."); |
| 4164 } | 4163 } |
| 4164 __ movq(kScratchRegister, |
| 4165 Operand(kScratchRegister, kOffsetToMapCheckValue)); |
| 4166 __ movq(Operand(kScratchRegister, 0), rax); |
| 4165 } | 4167 } |
| 4166 | 4168 |
| 4167 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset)); | 4169 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset)); |
| 4168 | 4170 |
| 4169 // Loop through the prototype chain looking for the function prototype. | 4171 // Loop through the prototype chain looking for the function prototype. |
| 4170 Label loop, is_instance, is_not_instance; | 4172 Label loop, is_instance, is_not_instance; |
| 4171 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex); | 4173 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex); |
| 4172 __ bind(&loop); | 4174 __ bind(&loop); |
| 4173 __ cmpq(rcx, rbx); | 4175 __ cmpq(rcx, rbx); |
| 4174 __ j(equal, &is_instance, Label::kNear); | 4176 __ j(equal, &is_instance, Label::kNear); |
| (...skipping 777 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4952 __ movq(rax, result); | 4954 __ movq(rax, result); |
| 4953 } | 4955 } |
| 4954 } | 4956 } |
| 4955 | 4957 |
| 4956 | 4958 |
| 4957 void StringHelper::GenerateHashInit(MacroAssembler* masm, | 4959 void StringHelper::GenerateHashInit(MacroAssembler* masm, |
| 4958 Register hash, | 4960 Register hash, |
| 4959 Register character, | 4961 Register character, |
| 4960 Register scratch) { | 4962 Register scratch) { |
| 4961 // hash = (seed + character) + ((seed + character) << 10); | 4963 // hash = (seed + character) + ((seed + character) << 10); |
| 4962 __ LoadRoot(scratch, Heap::kStringHashSeedRootIndex); | 4964 __ LoadRoot(scratch, Heap::kHashSeedRootIndex); |
| 4963 __ SmiToInteger32(scratch, scratch); | 4965 __ SmiToInteger32(scratch, scratch); |
| 4964 __ addl(scratch, character); | 4966 __ addl(scratch, character); |
| 4965 __ movl(hash, scratch); | 4967 __ movl(hash, scratch); |
| 4966 __ shll(scratch, Immediate(10)); | 4968 __ shll(scratch, Immediate(10)); |
| 4967 __ addl(hash, scratch); | 4969 __ addl(hash, scratch); |
| 4968 // hash ^= hash >> 6; | 4970 // hash ^= hash >> 6; |
| 4969 __ movl(scratch, hash); | 4971 __ movl(scratch, hash); |
| 4970 __ shrl(scratch, Immediate(6)); | 4972 __ shrl(scratch, Immediate(6)); |
| 4971 __ xorl(hash, scratch); | 4973 __ xorl(hash, scratch); |
| 4972 } | 4974 } |
| (...skipping 23 matching lines...) Expand all Loading... |
| 4996 __ leal(hash, Operand(hash, hash, times_8, 0)); | 4998 __ leal(hash, Operand(hash, hash, times_8, 0)); |
| 4997 // hash ^= hash >> 11; | 4999 // hash ^= hash >> 11; |
| 4998 __ movl(scratch, hash); | 5000 __ movl(scratch, hash); |
| 4999 __ shrl(scratch, Immediate(11)); | 5001 __ shrl(scratch, Immediate(11)); |
| 5000 __ xorl(hash, scratch); | 5002 __ xorl(hash, scratch); |
| 5001 // hash += hash << 15; | 5003 // hash += hash << 15; |
| 5002 __ movl(scratch, hash); | 5004 __ movl(scratch, hash); |
| 5003 __ shll(scratch, Immediate(15)); | 5005 __ shll(scratch, Immediate(15)); |
| 5004 __ addl(hash, scratch); | 5006 __ addl(hash, scratch); |
| 5005 | 5007 |
| 5006 uint32_t kHashShiftCutOffMask = (1 << (32 - String::kHashShift)) - 1; | 5008 __ andl(hash, Immediate(String::kHashBitMask)); |
| 5007 __ andl(hash, Immediate(kHashShiftCutOffMask)); | |
| 5008 | 5009 |
| 5009 // if (hash == 0) hash = 27; | 5010 // if (hash == 0) hash = 27; |
| 5010 Label hash_not_zero; | 5011 Label hash_not_zero; |
| 5011 __ j(not_zero, &hash_not_zero); | 5012 __ j(not_zero, &hash_not_zero); |
| 5012 __ Set(hash, 27); | 5013 __ Set(hash, StringHasher::kZeroHash); |
| 5013 __ bind(&hash_not_zero); | 5014 __ bind(&hash_not_zero); |
| 5014 } | 5015 } |
| 5015 | 5016 |
| 5016 void SubStringStub::Generate(MacroAssembler* masm) { | 5017 void SubStringStub::Generate(MacroAssembler* masm) { |
| 5017 Label runtime; | 5018 Label runtime; |
| 5018 | 5019 |
| 5019 // Stack frame on entry. | 5020 // Stack frame on entry. |
| 5020 // rsp[0]: return address | 5021 // rsp[0]: return address |
| 5021 // rsp[8]: to | 5022 // rsp[8]: to |
| 5022 // rsp[16]: from | 5023 // rsp[16]: from |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5073 FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize + 1)); | 5074 FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize + 1)); |
| 5074 | 5075 |
| 5075 // Try to lookup two character string in symbol table. | 5076 // Try to lookup two character string in symbol table. |
| 5076 Label make_two_character_string; | 5077 Label make_two_character_string; |
| 5077 StringHelper::GenerateTwoCharacterSymbolTableProbe( | 5078 StringHelper::GenerateTwoCharacterSymbolTableProbe( |
| 5078 masm, rbx, rdi, r9, r11, r14, r15, &make_two_character_string); | 5079 masm, rbx, rdi, r9, r11, r14, r15, &make_two_character_string); |
| 5079 __ IncrementCounter(counters->sub_string_native(), 1); | 5080 __ IncrementCounter(counters->sub_string_native(), 1); |
| 5080 __ ret(3 * kPointerSize); | 5081 __ ret(3 * kPointerSize); |
| 5081 | 5082 |
| 5082 __ bind(&make_two_character_string); | 5083 __ bind(&make_two_character_string); |
| 5083 // Setup registers for allocating the two character string. | 5084 // Set up registers for allocating the two character string. |
| 5084 __ movzxwq(rbx, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize)); | 5085 __ movzxwq(rbx, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize)); |
| 5085 __ AllocateAsciiString(rax, rcx, r11, r14, r15, &runtime); | 5086 __ AllocateAsciiString(rax, rcx, r11, r14, r15, &runtime); |
| 5086 __ movw(FieldOperand(rax, SeqAsciiString::kHeaderSize), rbx); | 5087 __ movw(FieldOperand(rax, SeqAsciiString::kHeaderSize), rbx); |
| 5087 __ IncrementCounter(counters->sub_string_native(), 1); | 5088 __ IncrementCounter(counters->sub_string_native(), 1); |
| 5088 __ ret(3 * kPointerSize); | 5089 __ ret(3 * kPointerSize); |
| 5089 | 5090 |
| 5090 __ bind(&result_longer_than_two); | 5091 __ bind(&result_longer_than_two); |
| 5091 // rax: string | 5092 // rax: string |
| 5092 // rbx: instance type | 5093 // rbx: instance type |
| 5093 // rcx: sub string length | 5094 // rcx: sub string length |
| (...skipping 1140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6234 xmm0, | 6235 xmm0, |
| 6235 &slow_elements); | 6236 &slow_elements); |
| 6236 __ ret(0); | 6237 __ ret(0); |
| 6237 } | 6238 } |
| 6238 | 6239 |
| 6239 #undef __ | 6240 #undef __ |
| 6240 | 6241 |
| 6241 } } // namespace v8::internal | 6242 } } // namespace v8::internal |
| 6242 | 6243 |
| 6243 #endif // V8_TARGET_ARCH_X64 | 6244 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |