OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
58 __ pop(ecx); // Pop return address. | 58 __ pop(ecx); // Pop return address. |
59 __ push(eax); | 59 __ push(eax); |
60 __ push(ecx); // Push return address. | 60 __ push(ecx); // Push return address. |
61 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); | 61 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); |
62 } | 62 } |
63 | 63 |
64 | 64 |
65 void FastNewClosureStub::Generate(MacroAssembler* masm) { | 65 void FastNewClosureStub::Generate(MacroAssembler* masm) { |
66 // Create a new closure from the given function info in new | 66 // Create a new closure from the given function info in new |
67 // space. Set the context to the current context in esi. | 67 // space. Set the context to the current context in esi. |
| 68 Counters* counters = masm->isolate()->counters(); |
| 69 |
68 Label gc; | 70 Label gc; |
69 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT); | 71 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT); |
70 | 72 |
| 73 __ IncrementCounter(counters->fast_new_closure_total(), 1); |
| 74 |
71 // Get the function info from the stack. | 75 // Get the function info from the stack. |
72 __ mov(edx, Operand(esp, 1 * kPointerSize)); | 76 __ mov(edx, Operand(esp, 1 * kPointerSize)); |
73 | 77 |
74 int map_index = strict_mode_ == kStrictMode | 78 int map_index = strict_mode_ == kStrictMode |
75 ? Context::STRICT_MODE_FUNCTION_MAP_INDEX | 79 ? Context::STRICT_MODE_FUNCTION_MAP_INDEX |
76 : Context::FUNCTION_MAP_INDEX; | 80 : Context::FUNCTION_MAP_INDEX; |
77 | 81 |
78 // Compute the function map in the current global context and set that | 82 // Compute the function map in the current global context and set that |
79 // as the map of the allocated object. | 83 // as the map of the allocated object. |
80 __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); | 84 __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
81 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset)); | 85 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset)); |
82 __ mov(ecx, Operand(ecx, Context::SlotOffset(map_index))); | 86 __ mov(ebx, Operand(ecx, Context::SlotOffset(map_index))); |
83 __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx); | 87 __ mov(FieldOperand(eax, JSObject::kMapOffset), ebx); |
84 | 88 |
85 // Initialize the rest of the function. We don't have to update the | 89 // Initialize the rest of the function. We don't have to update the |
86 // write barrier because the allocated object is in new space. | 90 // write barrier because the allocated object is in new space. |
87 Factory* factory = masm->isolate()->factory(); | 91 Factory* factory = masm->isolate()->factory(); |
88 __ mov(ebx, Immediate(factory->empty_fixed_array())); | 92 __ mov(ebx, Immediate(factory->empty_fixed_array())); |
89 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ebx); | 93 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ebx); |
90 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx); | 94 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx); |
91 __ mov(FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset), | 95 __ mov(FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset), |
92 Immediate(factory->the_hole_value())); | 96 Immediate(factory->the_hole_value())); |
93 __ mov(FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset), edx); | 97 __ mov(FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset), edx); |
94 __ mov(FieldOperand(eax, JSFunction::kContextOffset), esi); | 98 __ mov(FieldOperand(eax, JSFunction::kContextOffset), esi); |
95 __ mov(FieldOperand(eax, JSFunction::kLiteralsOffset), ebx); | 99 __ mov(FieldOperand(eax, JSFunction::kLiteralsOffset), ebx); |
96 __ mov(FieldOperand(eax, JSFunction::kNextFunctionLinkOffset), | |
97 Immediate(factory->undefined_value())); | |
98 | 100 |
99 // Initialize the code pointer in the function to be the one | 101 // Initialize the code pointer in the function to be the one |
100 // found in the shared function info object. | 102 // found in the shared function info object. |
| 103 // But first check if there is optimized version for our context. |
| 104 NearLabel check_optimized; |
| 105 Label install_unoptimized; |
| 106 if (FLAG_cache_optimized_code) { |
| 107 __ mov(ebx, FieldOperand(edx, SharedFunctionInfo::kOptimizedCodeMapOffset)); |
| 108 __ test(ebx, Operand(ebx)); |
| 109 __ j(not_zero, &check_optimized); |
| 110 } |
| 111 __ bind(&install_unoptimized); |
| 112 __ mov(FieldOperand(eax, JSFunction::kNextFunctionLinkOffset), |
| 113 Immediate(factory->undefined_value())); |
101 __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset)); | 114 __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset)); |
102 __ lea(edx, FieldOperand(edx, Code::kHeaderSize)); | 115 __ lea(edx, FieldOperand(edx, Code::kHeaderSize)); |
103 __ mov(FieldOperand(eax, JSFunction::kCodeEntryOffset), edx); | 116 __ mov(FieldOperand(eax, JSFunction::kCodeEntryOffset), edx); |
104 | 117 |
105 // Return and remove the on-stack parameter. | 118 // Return and remove the on-stack parameter. |
106 __ ret(1 * kPointerSize); | 119 __ ret(1 * kPointerSize); |
107 | 120 |
| 121 __ bind(&check_optimized); |
| 122 |
| 123 __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1); |
| 124 |
| 125 // ecx holds global context, ebx points to fixed array of pairs |
| 126 // (global context, optimized code). |
| 127 // Map must never be empty, so check the first elements. |
| 128 Label install_optimized; |
| 129 // Speculatively move code object into edx. |
| 130 __ mov(edx, FieldOperand(ebx, FixedArray::kHeaderSize + kPointerSize)); |
| 131 __ cmp(ecx, FieldOperand(ebx, FixedArray::kHeaderSize)); |
| 132 __ j(equal, &install_optimized); |
| 133 |
| 134 // Iterate through the rest of map backwards. edx holds an index as a Smi. |
| 135 NearLabel loop; |
| 136 Label restore; |
| 137 __ mov(edx, FieldOperand(ebx, FixedArray::kLengthOffset)); |
| 138 __ bind(&loop); |
| 139 __ cmp(edx, 4); // Do not double check first entry. |
| 140 __ j(equal, &restore); |
| 141 __ sub(Operand(edx), Immediate(4)); // Skip a pair. |
| 142 __ cmp(ecx, FixedArrayElementOperand(ebx, edx, 0)); |
| 143 __ j(not_equal, &loop); |
| 144 // Hit: fetch the optimized code. |
| 145 __ mov(edx, FixedArrayElementOperand(ebx, edx, 1)); |
| 146 |
| 147 __ bind(&install_optimized); |
| 148 |
| 149 __ IncrementCounter(counters->fast_new_closure_install_optimized(), 1); |
| 150 |
| 151 // Idea: store proper code pointers in the map and either unmangle them |
| 152 // on marking or do nothing as the whole map is discarded on major GC anyway. |
| 153 __ lea(edx, FieldOperand(edx, Code::kHeaderSize)); |
| 154 __ mov(FieldOperand(eax, JSFunction::kCodeEntryOffset), edx); |
| 155 |
| 156 // Now link a function into a list of optimized functions. |
| 157 __ mov(edx, ContextOperand(ecx, Context::OPTIMIZED_FUNCTIONS_LIST)); |
| 158 |
| 159 __ mov(FieldOperand(eax, JSFunction::kNextFunctionLinkOffset), edx); |
| 160 // No need in write barrier as JSFunction (eax) is in the new space. |
| 161 if (FLAG_debug_code) { |
| 162 NearLabel ok; |
| 163 __ InNewSpace(eax, ebx, equal, &ok); |
| 164 __ Abort("New closure has been allocated in old space!"); |
| 165 __ bind(&ok); |
| 166 } |
| 167 |
| 168 __ mov(ContextOperand(ecx, Context::OPTIMIZED_FUNCTIONS_LIST), eax); |
| 169 // Store JSFunction (eax) into edx before issuing write barrier as |
| 170 // it clobbers all the registers passed. |
| 171 __ mov(edx, eax); |
| 172 __ RecordWrite(ecx, Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST), |
| 173 edx, ebx); |
| 174 |
| 175 // Return and remove the on-stack parameter. |
| 176 __ ret(1 * kPointerSize); |
| 177 |
| 178 __ bind(&restore); |
| 179 // Restore SharedFunctionInfo into edx. |
| 180 __ mov(edx, Operand(esp, 1 * kPointerSize)); |
| 181 __ jmp(&install_unoptimized); |
| 182 |
108 // Create a new closure through the slower runtime call. | 183 // Create a new closure through the slower runtime call. |
109 __ bind(&gc); | 184 __ bind(&gc); |
110 __ pop(ecx); // Temporarily remove return address. | 185 __ pop(ecx); // Temporarily remove return address. |
111 __ pop(edx); | 186 __ pop(edx); |
112 __ push(esi); | 187 __ push(esi); |
113 __ push(edx); | 188 __ push(edx); |
114 __ push(Immediate(factory->false_value())); | 189 __ push(Immediate(factory->false_value())); |
115 __ push(ecx); // Restore return address. | 190 __ push(ecx); // Restore return address. |
116 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); | 191 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); |
117 } | 192 } |
(...skipping 6422 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6540 // Do a tail call to the rewritten stub. | 6615 // Do a tail call to the rewritten stub. |
6541 __ jmp(Operand(edi)); | 6616 __ jmp(Operand(edi)); |
6542 } | 6617 } |
6543 | 6618 |
6544 | 6619 |
6545 #undef __ | 6620 #undef __ |
6546 | 6621 |
6547 } } // namespace v8::internal | 6622 } } // namespace v8::internal |
6548 | 6623 |
6549 #endif // V8_TARGET_ARCH_IA32 | 6624 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |