OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
107 ASM_LOCATION("Builtins::Generate_InternalArrayCode"); | 107 ASM_LOCATION("Builtins::Generate_InternalArrayCode"); |
108 Label generic_array_code; | 108 Label generic_array_code; |
109 | 109 |
110 // Get the InternalArray function. | 110 // Get the InternalArray function. |
111 GenerateLoadInternalArrayFunction(masm, x1); | 111 GenerateLoadInternalArrayFunction(masm, x1); |
112 | 112 |
113 if (FLAG_debug_code) { | 113 if (FLAG_debug_code) { |
114 // Initial map for the builtin InternalArray functions should be maps. | 114 // Initial map for the builtin InternalArray functions should be maps. |
115 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset)); | 115 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset)); |
116 __ Tst(x10, kSmiTagMask); | 116 __ Tst(x10, kSmiTagMask); |
117 __ Assert(ne, "Unexpected initial map for InternalArray function"); | 117 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction); |
118 __ CompareObjectType(x10, x11, x12, MAP_TYPE); | 118 __ CompareObjectType(x10, x11, x12, MAP_TYPE); |
119 __ Assert(eq, "Unexpected initial map for InternalArray function"); | 119 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction); |
120 } | 120 } |
121 | 121 |
122 // Run the native code for the InternalArray function called as a normal | 122 // Run the native code for the InternalArray function called as a normal |
123 // function. | 123 // function. |
124 InternalArrayConstructorStub stub(masm->isolate()); | 124 InternalArrayConstructorStub stub(masm->isolate()); |
125 __ TailCallStub(&stub); | 125 __ TailCallStub(&stub); |
126 } | 126 } |
127 | 127 |
128 | 128 |
129 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { | 129 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { |
130 // ----------- S t a t e ------------- | 130 // ----------- S t a t e ------------- |
131 // -- x0 : number of arguments | 131 // -- x0 : number of arguments |
132 // -- lr : return address | 132 // -- lr : return address |
133 // -- sp[...]: constructor arguments | 133 // -- sp[...]: constructor arguments |
134 // ----------------------------------- | 134 // ----------------------------------- |
135 ASM_LOCATION("Builtins::Generate_ArrayCode"); | 135 ASM_LOCATION("Builtins::Generate_ArrayCode"); |
136 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; | 136 Label generic_array_code, one_or_more_arguments, two_or_more_arguments; |
137 | 137 |
138 // Get the Array function. | 138 // Get the Array function. |
139 GenerateLoadArrayFunction(masm, x1); | 139 GenerateLoadArrayFunction(masm, x1); |
140 | 140 |
141 if (FLAG_debug_code) { | 141 if (FLAG_debug_code) { |
142 // Initial map for the builtin Array functions should be maps. | 142 // Initial map for the builtin Array functions should be maps. |
143 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset)); | 143 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset)); |
144 __ Tst(x10, kSmiTagMask); | 144 __ Tst(x10, kSmiTagMask); |
145 __ Assert(ne, "Unexpected initial map for Array function"); | 145 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); |
146 __ CompareObjectType(x10, x11, x12, MAP_TYPE); | 146 __ CompareObjectType(x10, x11, x12, MAP_TYPE); |
147 __ Assert(eq, "Unexpected initial map for Array function"); | 147 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); |
148 } | 148 } |
149 | 149 |
150 // Run the native code for the Array function called as a normal function. | 150 // Run the native code for the Array function called as a normal function. |
151 Handle<Object> undefined_sentinel( | 151 Handle<Object> undefined_sentinel( |
152 masm->isolate()->heap()->undefined_value(), | 152 masm->isolate()->heap()->undefined_value(), |
153 masm->isolate()); | 153 masm->isolate()); |
154 __ Mov(x2, Operand(undefined_sentinel)); | 154 __ Mov(x2, Operand(undefined_sentinel)); |
155 ArrayConstructorStub stub(masm->isolate()); | 155 ArrayConstructorStub stub(masm->isolate()); |
156 __ TailCallStub(&stub); | 156 __ TailCallStub(&stub); |
157 } | 157 } |
158 | 158 |
159 | 159 |
160 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { | 160 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { |
161 // ----------- S t a t e ------------- | 161 // ----------- S t a t e ------------- |
162 // -- x0 : number of arguments | 162 // -- x0 : number of arguments |
163 // -- x1 : constructor function | 163 // -- x1 : constructor function |
164 // -- lr : return address | 164 // -- lr : return address |
165 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based) | 165 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based) |
166 // -- sp[argc * 8] : receiver | 166 // -- sp[argc * 8] : receiver |
167 // ----------------------------------- | 167 // ----------------------------------- |
168 ASM_LOCATION("Builtins::Generate_StringConstructCode"); | 168 ASM_LOCATION("Builtins::Generate_StringConstructCode"); |
169 Counters* counters = masm->isolate()->counters(); | 169 Counters* counters = masm->isolate()->counters(); |
170 __ IncrementCounter(counters->string_ctor_calls(), 1, x10, x11); | 170 __ IncrementCounter(counters->string_ctor_calls(), 1, x10, x11); |
171 | 171 |
172 Register argc = x0; | 172 Register argc = x0; |
173 Register function = x1; | 173 Register function = x1; |
174 if (FLAG_debug_code) { | 174 if (FLAG_debug_code) { |
175 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, x10); | 175 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, x10); |
176 __ Cmp(function, x10); | 176 __ Cmp(function, x10); |
177 __ Assert(eq, "Unexpected String function"); | 177 __ Assert(eq, kUnexpectedStringFunction); |
178 } | 178 } |
179 | 179 |
180 // Load the first arguments in x0 and get rid of the rest. | 180 // Load the first arguments in x0 and get rid of the rest. |
181 Label no_arguments; | 181 Label no_arguments; |
182 __ Cbz(argc, &no_arguments); | 182 __ Cbz(argc, &no_arguments); |
183 // First args = sp[(argc - 1) * 8]. | 183 // First args = sp[(argc - 1) * 8]. |
184 __ Sub(argc, argc, 1); | 184 __ Sub(argc, argc, 1); |
185 __ Claim(argc, kXRegSizeInBytes); | 185 __ Claim(argc, kXRegSizeInBytes); |
186 // jssp now point to args[0], load and drop args[0] + receiver. | 186 // jssp now point to args[0], load and drop args[0] + receiver. |
187 // TODO(jbramley): Consider adding ClaimAndPoke. | 187 // TODO(jbramley): Consider adding ClaimAndPoke. |
(...skipping 21 matching lines...) Expand all Loading... |
209 Label gc_required; | 209 Label gc_required; |
210 Register new_obj = x0; | 210 Register new_obj = x0; |
211 __ Allocate(JSValue::kSize, new_obj, x10, x11, &gc_required, TAG_OBJECT); | 211 __ Allocate(JSValue::kSize, new_obj, x10, x11, &gc_required, TAG_OBJECT); |
212 | 212 |
213 // Initialize the String object. | 213 // Initialize the String object. |
214 Register map = x3; | 214 Register map = x3; |
215 __ LoadGlobalFunctionInitialMap(function, map, x10); | 215 __ LoadGlobalFunctionInitialMap(function, map, x10); |
216 if (FLAG_debug_code) { | 216 if (FLAG_debug_code) { |
217 __ Ldrb(x4, FieldMemOperand(map, Map::kInstanceSizeOffset)); | 217 __ Ldrb(x4, FieldMemOperand(map, Map::kInstanceSizeOffset)); |
218 __ Cmp(x4, JSValue::kSize >> kPointerSizeLog2); | 218 __ Cmp(x4, JSValue::kSize >> kPointerSizeLog2); |
219 __ Assert(eq, "Unexpected string wrapper instance size"); | 219 __ Assert(eq, kUnexpectedStringWrapperInstanceSize); |
220 __ Ldrb(x4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset)); | 220 __ Ldrb(x4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset)); |
221 __ Cmp(x4, 0); | 221 __ Cmp(x4, 0); |
222 __ Assert(eq, "Unexpected unused properties of string wrapper"); | 222 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper); |
223 } | 223 } |
224 __ Str(map, FieldMemOperand(new_obj, HeapObject::kMapOffset)); | 224 __ Str(map, FieldMemOperand(new_obj, HeapObject::kMapOffset)); |
225 | 225 |
226 Register empty = x3; | 226 Register empty = x3; |
227 __ LoadRoot(empty, Heap::kEmptyFixedArrayRootIndex); | 227 __ LoadRoot(empty, Heap::kEmptyFixedArrayRootIndex); |
228 __ Str(empty, FieldMemOperand(new_obj, JSObject::kPropertiesOffset)); | 228 __ Str(empty, FieldMemOperand(new_obj, JSObject::kPropertiesOffset)); |
229 __ Str(empty, FieldMemOperand(new_obj, JSObject::kElementsOffset)); | 229 __ Str(empty, FieldMemOperand(new_obj, JSObject::kElementsOffset)); |
230 | 230 |
231 __ Str(argument, FieldMemOperand(new_obj, JSValue::kValueOffset)); | 231 __ Str(argument, FieldMemOperand(new_obj, JSValue::kValueOffset)); |
232 | 232 |
(...skipping 221 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
454 | 454 |
455 if (count_constructions) { | 455 if (count_constructions) { |
456 // Register first_non_prealloc is the offset of the first field after | 456 // Register first_non_prealloc is the offset of the first field after |
457 // pre-allocated fields. | 457 // pre-allocated fields. |
458 Register first_non_prealloc = x12; | 458 Register first_non_prealloc = x12; |
459 __ Add(first_non_prealloc, first_prop, | 459 __ Add(first_non_prealloc, first_prop, |
460 Operand(prealloc_fields, LSL, kPointerSizeLog2)); | 460 Operand(prealloc_fields, LSL, kPointerSizeLog2)); |
461 | 461 |
462 if (FLAG_debug_code) { | 462 if (FLAG_debug_code) { |
463 __ Cmp(first_non_prealloc, obj_end); | 463 __ Cmp(first_non_prealloc, obj_end); |
464 __ Assert(le, "Unexpected number of pre-allocated property fields"); | 464 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields); |
465 } | 465 } |
466 __ InitializeFieldsWithFiller(first_prop, first_non_prealloc, undef); | 466 __ InitializeFieldsWithFiller(first_prop, first_non_prealloc, undef); |
467 // To allow for truncation. | 467 // To allow for truncation. |
468 __ LoadRoot(x12, Heap::kOnePointerFillerMapRootIndex); | 468 __ LoadRoot(x12, Heap::kOnePointerFillerMapRootIndex); |
469 __ InitializeFieldsWithFiller(first_prop, obj_end, x12); | 469 __ InitializeFieldsWithFiller(first_prop, obj_end, x12); |
470 } else { | 470 } else { |
471 __ InitializeFieldsWithFiller(first_prop, obj_end, undef); | 471 __ InitializeFieldsWithFiller(first_prop, obj_end, undef); |
472 } | 472 } |
473 | 473 |
474 // Add the object tag to make the JSObject real, so that we can continue | 474 // Add the object tag to make the JSObject real, so that we can continue |
475 // and jump into the continuation code at any time from now on. Any | 475 // and jump into the continuation code at any time from now on. Any |
476 // failures need to undo the allocation, so that the heap is in a | 476 // failures need to undo the allocation, so that the heap is in a |
477 // consistent state and verifiable. | 477 // consistent state and verifiable. |
478 __ Add(new_obj, new_obj, kHeapObjectTag); | 478 __ Add(new_obj, new_obj, kHeapObjectTag); |
479 | 479 |
480 // Check if a non-empty properties array is needed. Continue with | 480 // Check if a non-empty properties array is needed. Continue with |
481 // allocated object if not, or fall through to runtime call if it is. | 481 // allocated object if not, or fall through to runtime call if it is. |
482 Register element_count = x3; | 482 Register element_count = x3; |
483 __ Ldrb(x3, FieldMemOperand(init_map, Map::kUnusedPropertyFieldsOffset)); | 483 __ Ldrb(x3, FieldMemOperand(init_map, Map::kUnusedPropertyFieldsOffset)); |
484 // The field instance sizes contains both pre-allocated property fields | 484 // The field instance sizes contains both pre-allocated property fields |
485 // and in-object properties. | 485 // and in-object properties. |
486 __ Add(x3, x3, prealloc_fields); | 486 __ Add(x3, x3, prealloc_fields); |
487 __ Subs(element_count, x3, inobject_props); | 487 __ Subs(element_count, x3, inobject_props); |
488 | 488 |
489 // Done if no extra properties are to be allocated. | 489 // Done if no extra properties are to be allocated. |
490 __ B(eq, &allocated); | 490 __ B(eq, &allocated); |
491 __ Assert(pl, "Property allocation count failed"); | 491 __ Assert(pl, kPropertyAllocationCountFailed); |
492 | 492 |
493 // Scale the number of elements by pointer size and add the header for | 493 // Scale the number of elements by pointer size and add the header for |
494 // FixedArrays to the start of the next object calculation from above. | 494 // FixedArrays to the start of the next object calculation from above. |
495 Register new_array = x5; | 495 Register new_array = x5; |
496 Register array_size = x6; | 496 Register array_size = x6; |
497 __ Add(array_size, element_count, FixedArray::kHeaderSize / kPointerSize); | 497 __ Add(array_size, element_count, FixedArray::kHeaderSize / kPointerSize); |
498 __ Allocate(array_size, new_array, x11, x12, &undo_allocation, | 498 __ Allocate(array_size, new_array, x11, x12, &undo_allocation, |
499 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | | 499 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | |
500 SIZE_IN_WORDS)); | 500 SIZE_IN_WORDS)); |
501 | 501 |
(...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
893 __ Ret(); | 893 __ Ret(); |
894 | 894 |
895 __ Bind(&with_tos_register); | 895 __ Bind(&with_tos_register); |
896 // Reload TOS register. | 896 // Reload TOS register. |
897 __ Peek(x0, kPointerSize); | 897 __ Peek(x0, kPointerSize); |
898 __ CompareAndBranch(state, FullCodeGenerator::TOS_REG, ne, &unknown_state); | 898 __ CompareAndBranch(state, FullCodeGenerator::TOS_REG, ne, &unknown_state); |
899 __ Drop(2); // Remove state and TOS. | 899 __ Drop(2); // Remove state and TOS. |
900 __ Ret(); | 900 __ Ret(); |
901 | 901 |
902 __ Bind(&unknown_state); | 902 __ Bind(&unknown_state); |
903 __ Abort("Invalid fullcodegen state."); | 903 __ Abort(kInvalidFullCodegenState); |
904 } | 904 } |
905 | 905 |
906 | 906 |
907 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { | 907 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { |
908 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); | 908 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); |
909 } | 909 } |
910 | 910 |
911 | 911 |
912 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { | 912 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { |
913 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); | 913 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); |
(...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1192 __ Sub(x10, jssp, x10); | 1192 __ Sub(x10, jssp, x10); |
1193 // Check if the arguments will overflow the stack. | 1193 // Check if the arguments will overflow the stack. |
1194 __ Cmp(x10, Operand(argc, LSR, kSmiShift - kPointerSizeLog2)); | 1194 __ Cmp(x10, Operand(argc, LSR, kSmiShift - kPointerSizeLog2)); |
1195 __ B(gt, &enough_stack_space); | 1195 __ B(gt, &enough_stack_space); |
1196 // There is not enough stack space, so use a builtin to throw an appropriate | 1196 // There is not enough stack space, so use a builtin to throw an appropriate |
1197 // error. | 1197 // error. |
1198 __ Push(function, argc); | 1198 __ Push(function, argc); |
1199 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); | 1199 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); |
1200 // We should never return from the APPLY_OVERFLOW builtin. | 1200 // We should never return from the APPLY_OVERFLOW builtin. |
1201 if (__ emit_debug_code()) { | 1201 if (__ emit_debug_code()) { |
1202 __ Abort("Unreachable code."); | 1202 __ Unreachable(); |
1203 } | 1203 } |
1204 | 1204 |
1205 __ Bind(&enough_stack_space); | 1205 __ Bind(&enough_stack_space); |
1206 // Push current limit and index. | 1206 // Push current limit and index. |
1207 __ Mov(x1, 0); // Initial index. | 1207 __ Mov(x1, 0); // Initial index. |
1208 __ Push(argc, x1); | 1208 __ Push(argc, x1); |
1209 | 1209 |
1210 Label push_receiver; | 1210 Label push_receiver; |
1211 __ Ldr(receiver, MemOperand(fp, kReceiverOffset)); | 1211 __ Ldr(receiver, MemOperand(fp, kReceiverOffset)); |
1212 | 1212 |
(...skipping 244 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1457 __ Bind(&dont_adapt_arguments); | 1457 __ Bind(&dont_adapt_arguments); |
1458 __ Jump(x3); | 1458 __ Jump(x3); |
1459 } | 1459 } |
1460 | 1460 |
1461 | 1461 |
1462 #undef __ | 1462 #undef __ |
1463 | 1463 |
1464 } } // namespace v8::internal | 1464 } } // namespace v8::internal |
1465 | 1465 |
1466 #endif // V8_TARGET_ARCH_ARM | 1466 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |