| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 84 __ push(rdx); | 84 __ push(rdx); |
| 85 __ Push(Factory::false_value()); | 85 __ Push(Factory::false_value()); |
| 86 __ push(rcx); // Restore return address. | 86 __ push(rcx); // Restore return address. |
| 87 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); | 87 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); |
| 88 } | 88 } |
| 89 | 89 |
| 90 | 90 |
| 91 void FastNewContextStub::Generate(MacroAssembler* masm) { | 91 void FastNewContextStub::Generate(MacroAssembler* masm) { |
| 92 // Try to allocate the context in new space. | 92 // Try to allocate the context in new space. |
| 93 Label gc; | 93 Label gc; |
| 94 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 94 __ AllocateInNewSpace((slots_ * kPointerSize) + FixedArray::kHeaderSize, |
| 95 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize, | |
| 96 rax, rbx, rcx, &gc, TAG_OBJECT); | 95 rax, rbx, rcx, &gc, TAG_OBJECT); |
| 97 | 96 |
| 98 // Get the function from the stack. | 97 // Get the function from the stack. |
| 99 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); | 98 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); |
| 100 | 99 |
| 101 // Setup the object header. | 100 // Setup the object header. |
| 102 __ LoadRoot(kScratchRegister, Heap::kContextMapRootIndex); | 101 __ LoadRoot(kScratchRegister, Heap::kContextMapRootIndex); |
| 103 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); | 102 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); |
| 104 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); | 103 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(slots_)); |
| 105 | 104 |
| 106 // Setup the fixed slots. | 105 // Setup the fixed slots. |
| 107 __ Set(rbx, 0); // Set to NULL. | 106 __ Set(rbx, 0); // Set to NULL. |
| 108 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx); | 107 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx); |
| 109 __ movq(Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX)), rax); | 108 __ movq(Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX)), rax); |
| 110 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rbx); | 109 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rbx); |
| 111 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx); | 110 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx); |
| 112 | 111 |
| 113 // Copy the global object from the surrounding context. | 112 // Copy the global object from the surrounding context. |
| 114 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 113 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 115 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx); | 114 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx); |
| 116 | 115 |
| 117 // Initialize the rest of the slots to undefined. | 116 // Initialize the rest of the slots to undefined. |
| 118 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); | 117 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); |
| 119 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { | 118 for (int i = Context::MIN_CONTEXT_SLOTS; i < slots_; i++) { |
| 120 __ movq(Operand(rax, Context::SlotOffset(i)), rbx); | 119 __ movq(Operand(rax, Context::SlotOffset(i)), rbx); |
| 121 } | 120 } |
| 122 | 121 |
| 123 // Return and remove the on-stack parameter. | 122 // Return and remove the on-stack parameter. |
| 124 __ movq(rsi, rax); | 123 __ movq(rsi, rax); |
| 125 __ ret(1 * kPointerSize); | 124 __ ret(1 * kPointerSize); |
| 126 | 125 |
| 127 // Need to collect. Call into runtime system. | 126 // Need to collect. Call into runtime system. |
| 128 __ bind(&gc); | 127 __ bind(&gc); |
| 129 __ TailCallRuntime(Runtime::kNewContext, 1, 1); | 128 __ TailCallRuntime(Runtime::kNewContext, 1, 1); |
| (...skipping 4261 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4391 | 4390 |
| 4392 // Do a tail call to the rewritten stub. | 4391 // Do a tail call to the rewritten stub. |
| 4393 __ jmp(rdi); | 4392 __ jmp(rdi); |
| 4394 } | 4393 } |
| 4395 | 4394 |
| 4396 #undef __ | 4395 #undef __ |
| 4397 | 4396 |
| 4398 } } // namespace v8::internal | 4397 } } // namespace v8::internal |
| 4399 | 4398 |
| 4400 #endif // V8_TARGET_ARCH_X64 | 4399 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |