| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1141 } else { | 1141 } else { |
| 1142 // Get the elements array of the object. | 1142 // Get the elements array of the object. |
| 1143 __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset)); | 1143 __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset)); |
| 1144 | 1144 |
| 1145 // Check that the elements are in fast mode (not dictionary). | 1145 // Check that the elements are in fast mode (not dictionary). |
| 1146 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), | 1146 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), |
| 1147 Factory::fixed_array_map()); | 1147 Factory::fixed_array_map()); |
| 1148 __ j(not_equal, &miss); | 1148 __ j(not_equal, &miss); |
| 1149 | 1149 |
| 1150 if (argc == 1) { // Otherwise fall through to call builtin. | 1150 if (argc == 1) { // Otherwise fall through to call builtin. |
| 1151 Label call_builtin, exit, with_write_barrier, attempt_to_grow_elements; | 1151 Label call_builtin, exit, with_rset_update, attempt_to_grow_elements; |
| 1152 | 1152 |
| 1153 // Get the array's length into rax and calculate new length. | 1153 // Get the array's length into rax and calculate new length. |
| 1154 __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset)); | 1154 __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset)); |
| 1155 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue); | 1155 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue); |
| 1156 __ SmiAddConstant(rax, rax, Smi::FromInt(argc)); | 1156 __ SmiAddConstant(rax, rax, Smi::FromInt(argc)); |
| 1157 | 1157 |
| 1158 // Get the element's length into rcx. | 1158 // Get the element's length into rcx. |
| 1159 __ movq(rcx, FieldOperand(rbx, FixedArray::kLengthOffset)); | 1159 __ movl(rcx, FieldOperand(rbx, FixedArray::kLengthOffset)); |
| 1160 __ Integer32ToSmi(rcx, rcx); |
| 1160 | 1161 |
| 1161 // Check if we could survive without allocation. | 1162 // Check if we could survive without allocation. |
| 1162 __ SmiCompare(rax, rcx); | 1163 __ SmiCompare(rax, rcx); |
| 1163 __ j(greater, &attempt_to_grow_elements); | 1164 __ j(greater, &attempt_to_grow_elements); |
| 1164 | 1165 |
| 1165 // Save new length. | 1166 // Save new length. |
| 1166 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax); | 1167 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax); |
| 1167 | 1168 |
| 1168 // Push the element. | 1169 // Push the element. |
| 1169 __ movq(rcx, Operand(rsp, argc * kPointerSize)); | 1170 __ movq(rcx, Operand(rsp, argc * kPointerSize)); |
| 1170 SmiIndex index = | 1171 SmiIndex index = |
| 1171 masm()->SmiToIndex(kScratchRegister, rax, times_pointer_size); | 1172 masm()->SmiToIndex(kScratchRegister, rax, times_pointer_size); |
| 1172 __ lea(rdx, FieldOperand(rbx, | 1173 __ lea(rdx, FieldOperand(rbx, |
| 1173 index.reg, index.scale, | 1174 index.reg, index.scale, |
| 1174 FixedArray::kHeaderSize - argc * kPointerSize)); | 1175 FixedArray::kHeaderSize - argc * kPointerSize)); |
| 1175 __ movq(Operand(rdx, 0), rcx); | 1176 __ movq(Operand(rdx, 0), rcx); |
| 1176 | 1177 |
| 1177 // Check if value is a smi. | 1178 // Check if value is a smi. |
| 1178 __ JumpIfNotSmi(rcx, &with_write_barrier); | 1179 __ JumpIfNotSmi(rcx, &with_rset_update); |
| 1179 | 1180 |
| 1180 __ bind(&exit); | 1181 __ bind(&exit); |
| 1181 __ ret((argc + 1) * kPointerSize); | 1182 __ ret((argc + 1) * kPointerSize); |
| 1182 | 1183 |
| 1183 __ bind(&with_write_barrier); | 1184 __ bind(&with_rset_update); |
| 1184 | 1185 |
| 1185 __ InNewSpace(rbx, rcx, equal, &exit); | 1186 __ InNewSpace(rbx, rcx, equal, &exit); |
| 1186 | 1187 |
| 1187 RecordWriteStub stub(rbx, rdx, rcx); | 1188 RecordWriteStub stub(rbx, rdx, rcx); |
| 1188 __ CallStub(&stub); | 1189 __ CallStub(&stub); |
| 1189 __ ret((argc + 1) * kPointerSize); | 1190 __ ret((argc + 1) * kPointerSize); |
| 1190 | 1191 |
| 1191 __ bind(&attempt_to_grow_elements); | 1192 __ bind(&attempt_to_grow_elements); |
| 1192 ExternalReference new_space_allocation_top = | 1193 ExternalReference new_space_allocation_top = |
| 1193 ExternalReference::new_space_allocation_top_address(); | 1194 ExternalReference::new_space_allocation_top_address(); |
| (...skipping 27 matching lines...) Expand all Loading... |
| 1221 // ... and fill the rest with holes. | 1222 // ... and fill the rest with holes. |
| 1222 __ Move(kScratchRegister, Factory::the_hole_value()); | 1223 __ Move(kScratchRegister, Factory::the_hole_value()); |
| 1223 for (int i = 1; i < kAllocationDelta; i++) { | 1224 for (int i = 1; i < kAllocationDelta; i++) { |
| 1224 __ movq(Operand(rdx, i * kPointerSize), kScratchRegister); | 1225 __ movq(Operand(rdx, i * kPointerSize), kScratchRegister); |
| 1225 } | 1226 } |
| 1226 | 1227 |
| 1227 // Restore receiver to rdx as finish sequence assumes it's here. | 1228 // Restore receiver to rdx as finish sequence assumes it's here. |
| 1228 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); | 1229 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); |
| 1229 | 1230 |
| 1230 // Increment element's and array's sizes. | 1231 // Increment element's and array's sizes. |
| 1231 __ SmiAddConstant(FieldOperand(rbx, FixedArray::kLengthOffset), | 1232 __ addl(FieldOperand(rbx, FixedArray::kLengthOffset), |
| 1232 Smi::FromInt(kAllocationDelta)); | 1233 Immediate(kAllocationDelta)); |
| 1233 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax); | 1234 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax); |
| 1234 | 1235 |
| 1235 // Elements are in new space, so write barrier is not required. | 1236 // Elements are in new space, so no remembered set updates are necessary. |
| 1236 __ ret((argc + 1) * kPointerSize); | 1237 __ ret((argc + 1) * kPointerSize); |
| 1237 | 1238 |
| 1238 __ bind(&call_builtin); | 1239 __ bind(&call_builtin); |
| 1239 } | 1240 } |
| 1240 | 1241 |
| 1241 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush), | 1242 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush), |
| 1242 argc + 1, | 1243 argc + 1, |
| 1243 1); | 1244 1); |
| 1244 } | 1245 } |
| 1245 | 1246 |
| (...skipping 1145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2391 // Return the generated code. | 2392 // Return the generated code. |
| 2392 return GetCode(); | 2393 return GetCode(); |
| 2393 } | 2394 } |
| 2394 | 2395 |
| 2395 | 2396 |
| 2396 #undef __ | 2397 #undef __ |
| 2397 | 2398 |
| 2398 } } // namespace v8::internal | 2399 } } // namespace v8::internal |
| 2399 | 2400 |
| 2400 #endif // V8_TARGET_ARCH_X64 | 2401 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |