OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
95 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); | 95 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); |
96 } | 96 } |
97 | 97 |
98 | 98 |
99 void FastNewClosureStub::Generate(MacroAssembler* masm) { | 99 void FastNewClosureStub::Generate(MacroAssembler* masm) { |
100 // Create a new closure from the given function info in new | 100 // Create a new closure from the given function info in new |
101 // space. Set the context to the current context in rsi. | 101 // space. Set the context to the current context in rsi. |
102 Counters* counters = masm->isolate()->counters(); | 102 Counters* counters = masm->isolate()->counters(); |
103 | 103 |
104 Label gc; | 104 Label gc; |
105 __ AllocateInNewSpace(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT); | 105 __ Allocate(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT); |
106 | 106 |
107 __ IncrementCounter(counters->fast_new_closure_total(), 1); | 107 __ IncrementCounter(counters->fast_new_closure_total(), 1); |
108 | 108 |
109 // Get the function info from the stack. | 109 // Get the function info from the stack. |
110 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); | 110 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); |
111 | 111 |
112 int map_index = (language_mode_ == CLASSIC_MODE) | 112 int map_index = (language_mode_ == CLASSIC_MODE) |
113 ? Context::FUNCTION_MAP_INDEX | 113 ? Context::FUNCTION_MAP_INDEX |
114 : Context::STRICT_MODE_FUNCTION_MAP_INDEX; | 114 : Context::STRICT_MODE_FUNCTION_MAP_INDEX; |
115 | 115 |
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
229 __ PushRoot(Heap::kFalseValueRootIndex); | 229 __ PushRoot(Heap::kFalseValueRootIndex); |
230 __ push(rcx); // Restore return address. | 230 __ push(rcx); // Restore return address. |
231 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); | 231 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); |
232 } | 232 } |
233 | 233 |
234 | 234 |
235 void FastNewContextStub::Generate(MacroAssembler* masm) { | 235 void FastNewContextStub::Generate(MacroAssembler* masm) { |
236 // Try to allocate the context in new space. | 236 // Try to allocate the context in new space. |
237 Label gc; | 237 Label gc; |
238 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 238 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
239 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize, | 239 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize, |
240 rax, rbx, rcx, &gc, TAG_OBJECT); | 240 rax, rbx, rcx, &gc, TAG_OBJECT); |
241 | 241 |
242 // Get the function from the stack. | 242 // Get the function from the stack. |
243 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 243 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); |
244 | 244 |
245 // Set up the object header. | 245 // Set up the object header. |
246 __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex); | 246 __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex); |
247 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); | 247 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); |
248 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); | 248 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); |
249 | 249 |
250 // Set up the fixed slots. | 250 // Set up the fixed slots. |
(...skipping 24 matching lines...) Expand all Loading... |
275 | 275 |
276 void FastNewBlockContextStub::Generate(MacroAssembler* masm) { | 276 void FastNewBlockContextStub::Generate(MacroAssembler* masm) { |
277 // Stack layout on entry: | 277 // Stack layout on entry: |
278 // | 278 // |
279 // [rsp + (1 * kPointerSize)]: function | 279 // [rsp + (1 * kPointerSize)]: function |
280 // [rsp + (2 * kPointerSize)]: serialized scope info | 280 // [rsp + (2 * kPointerSize)]: serialized scope info |
281 | 281 |
282 // Try to allocate the context in new space. | 282 // Try to allocate the context in new space. |
283 Label gc; | 283 Label gc; |
284 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 284 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
285 __ AllocateInNewSpace(FixedArray::SizeFor(length), | 285 __ Allocate(FixedArray::SizeFor(length), |
286 rax, rbx, rcx, &gc, TAG_OBJECT); | 286 rax, rbx, rcx, &gc, TAG_OBJECT); |
287 | 287 |
288 // Get the function from the stack. | 288 // Get the function from the stack. |
289 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 289 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); |
290 | 290 |
291 // Get the serialized scope info from the stack. | 291 // Get the serialized scope info from the stack. |
292 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); | 292 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); |
293 | 293 |
294 // Set up the object header. | 294 // Set up the object header. |
295 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); | 295 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); |
296 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); | 296 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
361 size += AllocationSiteInfo::kSize; | 361 size += AllocationSiteInfo::kSize; |
362 } | 362 } |
363 size += elements_size; | 363 size += elements_size; |
364 | 364 |
365 // Allocate both the JS array and the elements array in one big | 365 // Allocate both the JS array and the elements array in one big |
366 // allocation. This avoids multiple limit checks. | 366 // allocation. This avoids multiple limit checks. |
367 AllocationFlags flags = TAG_OBJECT; | 367 AllocationFlags flags = TAG_OBJECT; |
368 if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) { | 368 if (mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS) { |
369 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags); | 369 flags = static_cast<AllocationFlags>(DOUBLE_ALIGNMENT | flags); |
370 } | 370 } |
371 __ AllocateInNewSpace(size, rax, rbx, rdx, fail, flags); | 371 __ Allocate(size, rax, rbx, rdx, fail, flags); |
372 | 372 |
373 if (allocation_site_mode == TRACK_ALLOCATION_SITE) { | 373 if (allocation_site_mode == TRACK_ALLOCATION_SITE) { |
374 __ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex); | 374 __ LoadRoot(kScratchRegister, Heap::kAllocationSiteInfoMapRootIndex); |
375 __ movq(FieldOperand(rax, allocation_info_start), kScratchRegister); | 375 __ movq(FieldOperand(rax, allocation_info_start), kScratchRegister); |
376 __ movq(FieldOperand(rax, allocation_info_start + kPointerSize), rcx); | 376 __ movq(FieldOperand(rax, allocation_info_start + kPointerSize), rcx); |
377 } | 377 } |
378 | 378 |
379 // Copy the JS array part. | 379 // Copy the JS array part. |
380 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { | 380 for (int i = 0; i < JSArray::kSize; i += kPointerSize) { |
381 if ((i != JSArray::kElementsOffset) || (length == 0)) { | 381 if ((i != JSArray::kElementsOffset) || (length == 0)) { |
(...skipping 889 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1271 // Logical shift right can produce an unsigned int32 that is not | 1271 // Logical shift right can produce an unsigned int32 that is not |
1272 // an int32, and so is not in the smi range. Allocate a heap number | 1272 // an int32, and so is not in the smi range. Allocate a heap number |
1273 // in that case. | 1273 // in that case. |
1274 if (op == Token::SHR) { | 1274 if (op == Token::SHR) { |
1275 __ bind(&non_smi_shr_result); | 1275 __ bind(&non_smi_shr_result); |
1276 Label allocation_failed; | 1276 Label allocation_failed; |
1277 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64). | 1277 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64). |
1278 // Allocate heap number in new space. | 1278 // Allocate heap number in new space. |
1279 // Not using AllocateHeapNumber macro in order to reuse | 1279 // Not using AllocateHeapNumber macro in order to reuse |
1280 // already loaded heap_number_map. | 1280 // already loaded heap_number_map. |
1281 __ AllocateInNewSpace(HeapNumber::kSize, | 1281 __ Allocate(HeapNumber::kSize, rax, rdx, no_reg, &allocation_failed, |
1282 rax, | 1282 TAG_OBJECT); |
1283 rdx, | |
1284 no_reg, | |
1285 &allocation_failed, | |
1286 TAG_OBJECT); | |
1287 // Set the map. | 1283 // Set the map. |
1288 __ AssertRootValue(heap_number_map, | 1284 __ AssertRootValue(heap_number_map, |
1289 Heap::kHeapNumberMapRootIndex, | 1285 Heap::kHeapNumberMapRootIndex, |
1290 "HeapNumberMap register clobbered."); | 1286 "HeapNumberMap register clobbered."); |
1291 __ movq(FieldOperand(rax, HeapObject::kMapOffset), | 1287 __ movq(FieldOperand(rax, HeapObject::kMapOffset), |
1292 heap_number_map); | 1288 heap_number_map); |
1293 __ cvtqsi2sd(xmm0, rbx); | 1289 __ cvtqsi2sd(xmm0, rbx); |
1294 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0); | 1290 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0); |
1295 __ Ret(); | 1291 __ Ret(); |
1296 | 1292 |
(...skipping 5398 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6695 #endif | 6691 #endif |
6696 | 6692 |
6697 __ Ret(); | 6693 __ Ret(); |
6698 } | 6694 } |
6699 | 6695 |
6700 #undef __ | 6696 #undef __ |
6701 | 6697 |
6702 } } // namespace v8::internal | 6698 } } // namespace v8::internal |
6703 | 6699 |
6704 #endif // V8_TARGET_ARCH_X64 | 6700 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |