| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1097 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1108 } else { | 1108 } else { |
| 1109 // Get the elements array of the object. | 1109 // Get the elements array of the object. |
| 1110 __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset)); | 1110 __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset)); |
| 1111 | 1111 |
| 1112 // Check that the elements are in fast mode (not dictionary). | 1112 // Check that the elements are in fast mode (not dictionary). |
| 1113 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), | 1113 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), |
| 1114 Factory::fixed_array_map()); | 1114 Factory::fixed_array_map()); |
| 1115 __ j(not_equal, &miss); | 1115 __ j(not_equal, &miss); |
| 1116 | 1116 |
| 1117 if (argc == 1) { // Otherwise fall through to call builtin. | 1117 if (argc == 1) { // Otherwise fall through to call builtin. |
| 1118 Label call_builtin, exit, with_rset_update, attempt_to_grow_elements; | 1118 Label call_builtin, exit, with_write_barrier, attempt_to_grow_elements; |
| 1119 | 1119 |
| 1120 // Get the array's length into rax and calculate new length. | 1120 // Get the array's length into rax and calculate new length. |
| 1121 __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset)); | 1121 __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset)); |
| 1122 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue); | 1122 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue); |
| 1123 __ SmiAddConstant(rax, rax, Smi::FromInt(argc)); | 1123 __ SmiAddConstant(rax, rax, Smi::FromInt(argc)); |
| 1124 | 1124 |
| 1125 // Get the element's length into rcx. | 1125 // Get the element's length into rcx. |
| 1126 __ movl(rcx, FieldOperand(rbx, FixedArray::kLengthOffset)); | 1126 __ movq(rcx, FieldOperand(rbx, FixedArray::kLengthOffset)); |
| 1127 __ Integer32ToSmi(rcx, rcx); | |
| 1128 | 1127 |
| 1129 // Check if we could survive without allocation. | 1128 // Check if we could survive without allocation. |
| 1130 __ SmiCompare(rax, rcx); | 1129 __ SmiCompare(rax, rcx); |
| 1131 __ j(greater, &attempt_to_grow_elements); | 1130 __ j(greater, &attempt_to_grow_elements); |
| 1132 | 1131 |
| 1133 // Save new length. | 1132 // Save new length. |
| 1134 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax); | 1133 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax); |
| 1135 | 1134 |
| 1136 // Push the element. | 1135 // Push the element. |
| 1137 __ movq(rcx, Operand(rsp, argc * kPointerSize)); | 1136 __ movq(rcx, Operand(rsp, argc * kPointerSize)); |
| 1138 SmiIndex index = | 1137 SmiIndex index = |
| 1139 masm()->SmiToIndex(kScratchRegister, rax, times_pointer_size); | 1138 masm()->SmiToIndex(kScratchRegister, rax, times_pointer_size); |
| 1140 __ lea(rdx, FieldOperand(rbx, | 1139 __ lea(rdx, FieldOperand(rbx, |
| 1141 index.reg, index.scale, | 1140 index.reg, index.scale, |
| 1142 FixedArray::kHeaderSize - argc * kPointerSize)); | 1141 FixedArray::kHeaderSize - argc * kPointerSize)); |
| 1143 __ movq(Operand(rdx, 0), rcx); | 1142 __ movq(Operand(rdx, 0), rcx); |
| 1144 | 1143 |
| 1145 // Check if value is a smi. | 1144 // Check if value is a smi. |
| 1146 __ JumpIfNotSmi(rcx, &with_rset_update); | 1145 __ JumpIfNotSmi(rcx, &with_write_barrier); |
| 1147 | 1146 |
| 1148 __ bind(&exit); | 1147 __ bind(&exit); |
| 1149 __ ret((argc + 1) * kPointerSize); | 1148 __ ret((argc + 1) * kPointerSize); |
| 1150 | 1149 |
| 1151 __ bind(&with_rset_update); | 1150 __ bind(&with_write_barrier); |
| 1152 | 1151 |
| 1153 __ InNewSpace(rbx, rcx, equal, &exit); | 1152 __ InNewSpace(rbx, rcx, equal, &exit); |
| 1154 | 1153 |
| 1155 RecordWriteStub stub(rbx, rdx, rcx); | 1154 RecordWriteStub stub(rbx, rdx, rcx); |
| 1156 __ CallStub(&stub); | 1155 __ CallStub(&stub); |
| 1157 __ ret((argc + 1) * kPointerSize); | 1156 __ ret((argc + 1) * kPointerSize); |
| 1158 | 1157 |
| 1159 __ bind(&attempt_to_grow_elements); | 1158 __ bind(&attempt_to_grow_elements); |
| 1160 ExternalReference new_space_allocation_top = | 1159 ExternalReference new_space_allocation_top = |
| 1161 ExternalReference::new_space_allocation_top_address(); | 1160 ExternalReference::new_space_allocation_top_address(); |
| (...skipping 27 matching lines...) Expand all Loading... |
| 1189 // ... and fill the rest with holes. | 1188 // ... and fill the rest with holes. |
| 1190 __ Move(kScratchRegister, Factory::the_hole_value()); | 1189 __ Move(kScratchRegister, Factory::the_hole_value()); |
| 1191 for (int i = 1; i < kAllocationDelta; i++) { | 1190 for (int i = 1; i < kAllocationDelta; i++) { |
| 1192 __ movq(Operand(rdx, i * kPointerSize), kScratchRegister); | 1191 __ movq(Operand(rdx, i * kPointerSize), kScratchRegister); |
| 1193 } | 1192 } |
| 1194 | 1193 |
| 1195 // Restore receiver to rdx as finish sequence assumes it's here. | 1194 // Restore receiver to rdx as finish sequence assumes it's here. |
| 1196 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); | 1195 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); |
| 1197 | 1196 |
| 1198 // Increment element's and array's sizes. | 1197 // Increment element's and array's sizes. |
| 1199 __ addl(FieldOperand(rbx, FixedArray::kLengthOffset), | 1198 __ SmiAddConstant(FieldOperand(rbx, FixedArray::kLengthOffset), |
| 1200 Immediate(kAllocationDelta)); | 1199 Smi::FromInt(kAllocationDelta)); |
| 1201 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax); | 1200 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax); |
| 1202 | 1201 |
| 1203 // Elements are in new space, so no remembered set updates are necessary. | 1202 // Elements are in new space, so write barrier is not required. |
| 1204 __ ret((argc + 1) * kPointerSize); | 1203 __ ret((argc + 1) * kPointerSize); |
| 1205 | 1204 |
| 1206 __ bind(&call_builtin); | 1205 __ bind(&call_builtin); |
| 1207 } | 1206 } |
| 1208 | 1207 |
| 1209 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush), | 1208 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush), |
| 1210 argc + 1, | 1209 argc + 1, |
| 1211 1); | 1210 1); |
| 1212 } | 1211 } |
| 1213 | 1212 |
| (...skipping 1145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2359 // Return the generated code. | 2358 // Return the generated code. |
| 2360 return GetCode(); | 2359 return GetCode(); |
| 2361 } | 2360 } |
| 2362 | 2361 |
| 2363 | 2362 |
| 2364 #undef __ | 2363 #undef __ |
| 2365 | 2364 |
| 2366 } } // namespace v8::internal | 2365 } } // namespace v8::internal |
| 2367 | 2366 |
| 2368 #endif // V8_TARGET_ARCH_X64 | 2367 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |