| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1131 } else { | 1131 } else { |
| 1132 // Get the elements array of the object. | 1132 // Get the elements array of the object. |
| 1133 __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset)); | 1133 __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset)); |
| 1134 | 1134 |
| 1135 // Check that the elements are in fast mode (not dictionary). | 1135 // Check that the elements are in fast mode (not dictionary). |
| 1136 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), | 1136 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), |
| 1137 Factory::fixed_array_map()); | 1137 Factory::fixed_array_map()); |
| 1138 __ j(not_equal, &miss); | 1138 __ j(not_equal, &miss); |
| 1139 | 1139 |
| 1140 if (argc == 1) { // Otherwise fall through to call builtin. | 1140 if (argc == 1) { // Otherwise fall through to call builtin. |
| 1141 Label call_builtin, exit, with_rset_update, attempt_to_grow_elements; | 1141 Label call_builtin, exit, with_write_barrier, attempt_to_grow_elements; |
| 1142 | 1142 |
| 1143 // Get the array's length into rax and calculate new length. | 1143 // Get the array's length into rax and calculate new length. |
| 1144 __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset)); | 1144 __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset)); |
| 1145 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue); | 1145 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue); |
| 1146 __ SmiAddConstant(rax, rax, Smi::FromInt(argc)); | 1146 __ SmiAddConstant(rax, rax, Smi::FromInt(argc)); |
| 1147 | 1147 |
| 1148 // Get the element's length into rcx. | 1148 // Get the element's length into rcx. |
| 1149 __ movl(rcx, FieldOperand(rbx, FixedArray::kLengthOffset)); | 1149 __ movq(rcx, FieldOperand(rbx, FixedArray::kLengthOffset)); |
| 1150 __ Integer32ToSmi(rcx, rcx); | |
| 1151 | 1150 |
| 1152 // Check if we could survive without allocation. | 1151 // Check if we could survive without allocation. |
| 1153 __ SmiCompare(rax, rcx); | 1152 __ SmiCompare(rax, rcx); |
| 1154 __ j(greater, &attempt_to_grow_elements); | 1153 __ j(greater, &attempt_to_grow_elements); |
| 1155 | 1154 |
| 1156 // Save new length. | 1155 // Save new length. |
| 1157 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax); | 1156 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax); |
| 1158 | 1157 |
| 1159 // Push the element. | 1158 // Push the element. |
| 1160 __ movq(rcx, Operand(rsp, argc * kPointerSize)); | 1159 __ movq(rcx, Operand(rsp, argc * kPointerSize)); |
| 1161 SmiIndex index = | 1160 SmiIndex index = |
| 1162 masm()->SmiToIndex(kScratchRegister, rax, times_pointer_size); | 1161 masm()->SmiToIndex(kScratchRegister, rax, times_pointer_size); |
| 1163 __ lea(rdx, FieldOperand(rbx, | 1162 __ lea(rdx, FieldOperand(rbx, |
| 1164 index.reg, index.scale, | 1163 index.reg, index.scale, |
| 1165 FixedArray::kHeaderSize - argc * kPointerSize)); | 1164 FixedArray::kHeaderSize - argc * kPointerSize)); |
| 1166 __ movq(Operand(rdx, 0), rcx); | 1165 __ movq(Operand(rdx, 0), rcx); |
| 1167 | 1166 |
| 1168 // Check if value is a smi. | 1167 // Check if value is a smi. |
| 1169 __ JumpIfNotSmi(rcx, &with_rset_update); | 1168 __ JumpIfNotSmi(rcx, &with_write_barrier); |
| 1170 | 1169 |
| 1171 __ bind(&exit); | 1170 __ bind(&exit); |
| 1172 __ ret((argc + 1) * kPointerSize); | 1171 __ ret((argc + 1) * kPointerSize); |
| 1173 | 1172 |
| 1174 __ bind(&with_rset_update); | 1173 __ bind(&with_write_barrier); |
| 1175 | 1174 |
| 1176 __ InNewSpace(rbx, rcx, equal, &exit); | 1175 __ InNewSpace(rbx, rcx, equal, &exit); |
| 1177 | 1176 |
| 1178 RecordWriteStub stub(rbx, rdx, rcx); | 1177 RecordWriteStub stub(rbx, rdx, rcx); |
| 1179 __ CallStub(&stub); | 1178 __ CallStub(&stub); |
| 1180 __ ret((argc + 1) * kPointerSize); | 1179 __ ret((argc + 1) * kPointerSize); |
| 1181 | 1180 |
| 1182 __ bind(&attempt_to_grow_elements); | 1181 __ bind(&attempt_to_grow_elements); |
| 1183 ExternalReference new_space_allocation_top = | 1182 ExternalReference new_space_allocation_top = |
| 1184 ExternalReference::new_space_allocation_top_address(); | 1183 ExternalReference::new_space_allocation_top_address(); |
| (...skipping 27 matching lines...) Expand all Loading... |
| 1212 // ... and fill the rest with holes. | 1211 // ... and fill the rest with holes. |
| 1213 __ Move(kScratchRegister, Factory::the_hole_value()); | 1212 __ Move(kScratchRegister, Factory::the_hole_value()); |
| 1214 for (int i = 1; i < kAllocationDelta; i++) { | 1213 for (int i = 1; i < kAllocationDelta; i++) { |
| 1215 __ movq(Operand(rdx, i * kPointerSize), kScratchRegister); | 1214 __ movq(Operand(rdx, i * kPointerSize), kScratchRegister); |
| 1216 } | 1215 } |
| 1217 | 1216 |
| 1218 // Restore receiver to rdx as finish sequence assumes it's here. | 1217 // Restore receiver to rdx as finish sequence assumes it's here. |
| 1219 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); | 1218 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); |
| 1220 | 1219 |
| 1221 // Increment element's and array's sizes. | 1220 // Increment element's and array's sizes. |
| 1222 __ addl(FieldOperand(rbx, FixedArray::kLengthOffset), | 1221 __ SmiAddConstant(FieldOperand(rbx, FixedArray::kLengthOffset), |
| 1223 Immediate(kAllocationDelta)); | 1222 Smi::FromInt(kAllocationDelta)); |
| 1224 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax); | 1223 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax); |
| 1225 | 1224 |
| 1226 // Elements are in new space, so no remembered set updates are necessary. | 1225 // Elements are in new space, so write barrier is not required. |
| 1227 __ ret((argc + 1) * kPointerSize); | 1226 __ ret((argc + 1) * kPointerSize); |
| 1228 | 1227 |
| 1229 __ bind(&call_builtin); | 1228 __ bind(&call_builtin); |
| 1230 } | 1229 } |
| 1231 | 1230 |
| 1232 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush), | 1231 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush), |
| 1233 argc + 1, | 1232 argc + 1, |
| 1234 1); | 1233 1); |
| 1235 } | 1234 } |
| 1236 | 1235 |
| (...skipping 1164 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2401 // Return the generated code. | 2400 // Return the generated code. |
| 2402 return GetCode(); | 2401 return GetCode(); |
| 2403 } | 2402 } |
| 2404 | 2403 |
| 2405 | 2404 |
| 2406 #undef __ | 2405 #undef __ |
| 2407 | 2406 |
| 2408 } } // namespace v8::internal | 2407 } } // namespace v8::internal |
| 2409 | 2408 |
| 2410 #endif // V8_TARGET_ARCH_X64 | 2409 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |