| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 133 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); | 133 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); |
| 134 } | 134 } |
| 135 | 135 |
| 136 | 136 |
| 137 void FastNewClosureStub::Generate(MacroAssembler* masm) { | 137 void FastNewClosureStub::Generate(MacroAssembler* masm) { |
| 138 // Create a new closure from the given function info in new | 138 // Create a new closure from the given function info in new |
| 139 // space. Set the context to the current context in rsi. | 139 // space. Set the context to the current context in rsi. |
| 140 Counters* counters = masm->isolate()->counters(); | 140 Counters* counters = masm->isolate()->counters(); |
| 141 | 141 |
| 142 Label gc; | 142 Label gc; |
| 143 __ AllocateInNewSpace(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT); | 143 __ Allocate(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT); |
| 144 | 144 |
| 145 __ IncrementCounter(counters->fast_new_closure_total(), 1); | 145 __ IncrementCounter(counters->fast_new_closure_total(), 1); |
| 146 | 146 |
| 147 // Get the function info from the stack. | 147 // Get the function info from the stack. |
| 148 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); | 148 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); |
| 149 | 149 |
| 150 int map_index = (language_mode_ == CLASSIC_MODE) | 150 int map_index = (language_mode_ == CLASSIC_MODE) |
| 151 ? Context::FUNCTION_MAP_INDEX | 151 ? Context::FUNCTION_MAP_INDEX |
| 152 : Context::STRICT_MODE_FUNCTION_MAP_INDEX; | 152 : Context::STRICT_MODE_FUNCTION_MAP_INDEX; |
| 153 | 153 |
| (...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 267 __ PushRoot(Heap::kFalseValueRootIndex); | 267 __ PushRoot(Heap::kFalseValueRootIndex); |
| 268 __ push(rcx); // Restore return address. | 268 __ push(rcx); // Restore return address. |
| 269 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); | 269 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); |
| 270 } | 270 } |
| 271 | 271 |
| 272 | 272 |
| 273 void FastNewContextStub::Generate(MacroAssembler* masm) { | 273 void FastNewContextStub::Generate(MacroAssembler* masm) { |
| 274 // Try to allocate the context in new space. | 274 // Try to allocate the context in new space. |
| 275 Label gc; | 275 Label gc; |
| 276 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 276 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
| 277 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize, | 277 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize, |
| 278 rax, rbx, rcx, &gc, TAG_OBJECT); | 278 rax, rbx, rcx, &gc, TAG_OBJECT); |
| 279 | 279 |
| 280 // Get the function from the stack. | 280 // Get the function from the stack. |
| 281 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 281 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); |
| 282 | 282 |
| 283 // Set up the object header. | 283 // Set up the object header. |
| 284 __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex); | 284 __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex); |
| 285 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); | 285 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); |
| 286 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); | 286 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); |
| 287 | 287 |
| 288 // Set up the fixed slots. | 288 // Set up the fixed slots. |
| (...skipping 24 matching lines...) Expand all Loading... |
| 313 | 313 |
| 314 void FastNewBlockContextStub::Generate(MacroAssembler* masm) { | 314 void FastNewBlockContextStub::Generate(MacroAssembler* masm) { |
| 315 // Stack layout on entry: | 315 // Stack layout on entry: |
| 316 // | 316 // |
| 317 // [rsp + (1 * kPointerSize)]: function | 317 // [rsp + (1 * kPointerSize)]: function |
| 318 // [rsp + (2 * kPointerSize)]: serialized scope info | 318 // [rsp + (2 * kPointerSize)]: serialized scope info |
| 319 | 319 |
| 320 // Try to allocate the context in new space. | 320 // Try to allocate the context in new space. |
| 321 Label gc; | 321 Label gc; |
| 322 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 322 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
| 323 __ AllocateInNewSpace(FixedArray::SizeFor(length), | 323 __ Allocate(FixedArray::SizeFor(length), |
| 324 rax, rbx, rcx, &gc, TAG_OBJECT); | 324 rax, rbx, rcx, &gc, TAG_OBJECT); |
| 325 | 325 |
| 326 // Get the function from the stack. | 326 // Get the function from the stack. |
| 327 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 327 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); |
| 328 | 328 |
| 329 // Get the serialized scope info from the stack. | 329 // Get the serialized scope info from the stack. |
| 330 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); | 330 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); |
| 331 | 331 |
| 332 // Set up the object header. | 332 // Set up the object header. |
| 333 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); | 333 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); |
| 334 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); | 334 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 399 size += AllocationSiteInfo::kSize; | 399 size += AllocationSiteInfo::kSize; |
| 400 } | 400 } |
| 401 size += elements_size; | 401 size += elements_size; |
| 402 | 402 |
| 403 // Allocate both the JS array and the elements array in one big | 403 // Allocate both the JS array and the elements array in one big |
| 404 // allocation. This avoids multiple limit checks. | 404 // allocation. This avoids multiple limit checks. |
| 405 AllocationFlags flags = TAG_OBJECT; | 405 AllocationFlags flags = TAG_OBJECT; |
| 406 if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) { | 406 if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) { |
| 407 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags); | 407 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags); |
| 408 } | 408 } |
| 409 __ AllocateInNewSpace(size, rax, rbx, rdx, fail, flags); | 409 __ Allocate(size, rax, rbx, rdx, fail, flags); |
| 410 | 410 |
| 411 if (allocation_site_mode == TRACK_ALLOCATION_SITE) { | 411 if (allocation_site_mode == TRACK_ALLOCATION_SITE) { |
| 412 __ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex); | 412 __ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex); |
| 413 __ movq(FieldOperand(rax, allocation_info_start), kScratchRegister); | 413 __ movq(FieldOperand(rax, allocation_info_start), kScratchRegister); |
| 414 __ movq(FieldOperand(rax, allocation_info_start + kPointerSize), rcx); | 414 __ movq(FieldOperand(rax, allocation_info_start + kPointerSize), rcx); |
| 415 } | 415 } |
| 416 | 416 |
| 417 // Copy the JS array part. | 417 // Copy the JS array part. |
| 418 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { | 418 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { |
| 419 if ((i != JSArray::kElementsOffset) || (length == 0)) { | 419 if ((i != JSArray::kElementsOffset) || (length == 0)) { |
| (...skipping 889 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1309 // Logical shift right can produce an unsigned int32 that is not | 1309 // Logical shift right can produce an unsigned int32 that is not |
| 1310 // an int32, and so is not in the smi range. Allocate a heap number | 1310 // an int32, and so is not in the smi range. Allocate a heap number |
| 1311 // in that case. | 1311 // in that case. |
| 1312 if (op == Token::SHR) { | 1312 if (op == Token::SHR) { |
| 1313 __ bind(&non_smi_shr_result); | 1313 __ bind(&non_smi_shr_result); |
| 1314 Label allocation_failed; | 1314 Label allocation_failed; |
| 1315 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64). | 1315 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64). |
| 1316 // Allocate heap number in new space. | 1316 // Allocate heap number in new space. |
| 1317 // Not using AllocateHeapNumber macro in order to reuse | 1317 // Not using AllocateHeapNumber macro in order to reuse |
| 1318 // already loaded heap_number_map. | 1318 // already loaded heap_number_map. |
| 1319 __ AllocateInNewSpace(HeapNumber::kSize, | 1319 __ Allocate(HeapNumber::kSize, rax, rdx, no_reg, &allocation_failed, |
| 1320 rax, | 1320 TAG_OBJECT); |
| 1321 rdx, | |
| 1322 no_reg, | |
| 1323 &allocation_failed, | |
| 1324 TAG_OBJECT); | |
| 1325 // Set the map. | 1321 // Set the map. |
| 1326 __ AssertRootValue(heap_number_map, | 1322 __ AssertRootValue(heap_number_map, |
| 1327 Heap::kHeapNumberMapRootIndex, | 1323 Heap::kHeapNumberMapRootIndex, |
| 1328 "HeapNumberMap register clobbered."); | 1324 "HeapNumberMap register clobbered."); |
| 1329 __ movq(FieldOperand(rax, HeapObject::kMapOffset), | 1325 __ movq(FieldOperand(rax, HeapObject::kMapOffset), |
| 1330 heap_number_map); | 1326 heap_number_map); |
| 1331 __ cvtqsi2sd(xmm0, rbx); | 1327 __ cvtqsi2sd(xmm0, rbx); |
| 1332 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0); | 1328 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0); |
| 1333 __ Ret(); | 1329 __ Ret(); |
| 1334 | 1330 |
| (...skipping 5550 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6885 #endif | 6881 #endif |
| 6886 | 6882 |
| 6887 __ Ret(); | 6883 __ Ret(); |
| 6888 } | 6884 } |
| 6889 | 6885 |
| 6890 #undef __ | 6886 #undef __ |
| 6891 | 6887 |
| 6892 } } // namespace v8::internal | 6888 } } // namespace v8::internal |
| 6893 | 6889 |
| 6894 #endif // V8_TARGET_ARCH_X64 | 6890 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |