| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/x64/codegen-x64.h" | 5 #include "src/x64/codegen-x64.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_X64 | 7 #if V8_TARGET_ARCH_X64 |
| 8 | 8 |
| 9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
| 10 #include "src/macro-assembler.h" | 10 #include "src/macro-assembler.h" |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 55 return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer); | 55 return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer); |
| 56 } | 56 } |
| 57 | 57 |
| 58 #undef __ | 58 #undef __ |
| 59 | 59 |
| 60 // ------------------------------------------------------------------------- | 60 // ------------------------------------------------------------------------- |
| 61 // Code generators | 61 // Code generators |
| 62 | 62 |
| 63 #define __ ACCESS_MASM(masm) | 63 #define __ ACCESS_MASM(masm) |
| 64 | 64 |
| 65 void ElementsTransitionGenerator::GenerateMapChangeElementsTransition( | |
| 66 MacroAssembler* masm, | |
| 67 Register receiver, | |
| 68 Register key, | |
| 69 Register value, | |
| 70 Register target_map, | |
| 71 AllocationSiteMode mode, | |
| 72 Label* allocation_memento_found) { | |
| 73 // Return address is on the stack. | |
| 74 Register scratch = rdi; | |
| 75 DCHECK(!AreAliased(receiver, key, value, target_map, scratch)); | |
| 76 | |
| 77 if (mode == TRACK_ALLOCATION_SITE) { | |
| 78 DCHECK(allocation_memento_found != NULL); | |
| 79 __ JumpIfJSArrayHasAllocationMemento( | |
| 80 receiver, scratch, allocation_memento_found); | |
| 81 } | |
| 82 | |
| 83 // Set transitioned map. | |
| 84 __ movp(FieldOperand(receiver, HeapObject::kMapOffset), target_map); | |
| 85 __ RecordWriteField(receiver, | |
| 86 HeapObject::kMapOffset, | |
| 87 target_map, | |
| 88 scratch, | |
| 89 kDontSaveFPRegs, | |
| 90 EMIT_REMEMBERED_SET, | |
| 91 OMIT_SMI_CHECK); | |
| 92 } | |
| 93 | |
| 94 | |
| 95 void ElementsTransitionGenerator::GenerateSmiToDouble( | |
| 96 MacroAssembler* masm, | |
| 97 Register receiver, | |
| 98 Register key, | |
| 99 Register value, | |
| 100 Register target_map, | |
| 101 AllocationSiteMode mode, | |
| 102 Label* fail) { | |
| 103 // Return address is on the stack. | |
| 104 DCHECK(receiver.is(rdx)); | |
| 105 DCHECK(key.is(rcx)); | |
| 106 DCHECK(value.is(rax)); | |
| 107 DCHECK(target_map.is(rbx)); | |
| 108 | |
| 109 // The fail label is not actually used since we do not allocate. | |
| 110 Label allocated, new_backing_store, only_change_map, done; | |
| 111 | |
| 112 if (mode == TRACK_ALLOCATION_SITE) { | |
| 113 __ JumpIfJSArrayHasAllocationMemento(rdx, rdi, fail); | |
| 114 } | |
| 115 | |
| 116 // Check for empty arrays, which only require a map transition and no changes | |
| 117 // to the backing store. | |
| 118 __ movp(r8, FieldOperand(rdx, JSObject::kElementsOffset)); | |
| 119 __ CompareRoot(r8, Heap::kEmptyFixedArrayRootIndex); | |
| 120 __ j(equal, &only_change_map); | |
| 121 | |
| 122 __ SmiToInteger32(r9, FieldOperand(r8, FixedDoubleArray::kLengthOffset)); | |
| 123 if (kPointerSize == kDoubleSize) { | |
| 124 // Check backing store for COW-ness. For COW arrays we have to | |
| 125 // allocate a new backing store. | |
| 126 __ CompareRoot(FieldOperand(r8, HeapObject::kMapOffset), | |
| 127 Heap::kFixedCOWArrayMapRootIndex); | |
| 128 __ j(equal, &new_backing_store); | |
| 129 } else { | |
| 130 // For x32 port we have to allocate a new backing store as SMI size is | |
| 131 // not equal with double size. | |
| 132 DCHECK(kDoubleSize == 2 * kPointerSize); | |
| 133 __ jmp(&new_backing_store); | |
| 134 } | |
| 135 | |
| 136 // Check if the backing store is in new-space. If not, we need to allocate | |
| 137 // a new one since the old one is in pointer-space. | |
| 138 // If in new space, we can reuse the old backing store because it is | |
| 139 // the same size. | |
| 140 __ JumpIfNotInNewSpace(r8, rdi, &new_backing_store); | |
| 141 | |
| 142 __ movp(r14, r8); // Destination array equals source array. | |
| 143 | |
| 144 // r8 : source FixedArray | |
| 145 // r9 : elements array length | |
| 146 // r14: destination FixedDoubleArray | |
| 147 // Set backing store's map | |
| 148 __ LoadRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex); | |
| 149 __ movp(FieldOperand(r14, HeapObject::kMapOffset), rdi); | |
| 150 | |
| 151 __ bind(&allocated); | |
| 152 // Set transitioned map. | |
| 153 __ movp(FieldOperand(rdx, HeapObject::kMapOffset), rbx); | |
| 154 __ RecordWriteField(rdx, | |
| 155 HeapObject::kMapOffset, | |
| 156 rbx, | |
| 157 rdi, | |
| 158 kDontSaveFPRegs, | |
| 159 EMIT_REMEMBERED_SET, | |
| 160 OMIT_SMI_CHECK); | |
| 161 | |
| 162 // Convert smis to doubles and holes to hole NaNs. The Array's length | |
| 163 // remains unchanged. | |
| 164 STATIC_ASSERT(FixedDoubleArray::kLengthOffset == FixedArray::kLengthOffset); | |
| 165 STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize); | |
| 166 | |
| 167 Label loop, entry, convert_hole; | |
| 168 __ movq(r15, bit_cast<int64_t, uint64_t>(kHoleNanInt64)); | |
| 169 // r15: the-hole NaN | |
| 170 __ jmp(&entry); | |
| 171 | |
| 172 // Allocate new backing store. | |
| 173 __ bind(&new_backing_store); | |
| 174 __ leap(rdi, Operand(r9, times_8, FixedArray::kHeaderSize)); | |
| 175 __ Allocate(rdi, r14, r11, r15, fail, NO_ALLOCATION_FLAGS); | |
| 176 // Set backing store's map | |
| 177 __ LoadRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex); | |
| 178 __ movp(FieldOperand(r14, HeapObject::kMapOffset), rdi); | |
| 179 // Set receiver's backing store. | |
| 180 __ movp(FieldOperand(rdx, JSObject::kElementsOffset), r14); | |
| 181 __ movp(r11, r14); | |
| 182 __ RecordWriteField(rdx, | |
| 183 JSObject::kElementsOffset, | |
| 184 r11, | |
| 185 r15, | |
| 186 kDontSaveFPRegs, | |
| 187 EMIT_REMEMBERED_SET, | |
| 188 OMIT_SMI_CHECK); | |
| 189 // Set backing store's length. | |
| 190 __ Integer32ToSmi(r11, r9); | |
| 191 __ movp(FieldOperand(r14, FixedDoubleArray::kLengthOffset), r11); | |
| 192 __ jmp(&allocated); | |
| 193 | |
| 194 __ bind(&only_change_map); | |
| 195 // Set transitioned map. | |
| 196 __ movp(FieldOperand(rdx, HeapObject::kMapOffset), rbx); | |
| 197 __ RecordWriteField(rdx, | |
| 198 HeapObject::kMapOffset, | |
| 199 rbx, | |
| 200 rdi, | |
| 201 kDontSaveFPRegs, | |
| 202 OMIT_REMEMBERED_SET, | |
| 203 OMIT_SMI_CHECK); | |
| 204 __ jmp(&done); | |
| 205 | |
| 206 // Conversion loop. | |
| 207 __ bind(&loop); | |
| 208 __ movp(rbx, | |
| 209 FieldOperand(r8, r9, times_pointer_size, FixedArray::kHeaderSize)); | |
| 210 // r9 : current element's index | |
| 211 // rbx: current element (smi-tagged) | |
| 212 __ JumpIfNotSmi(rbx, &convert_hole); | |
| 213 __ SmiToInteger32(rbx, rbx); | |
| 214 __ Cvtlsi2sd(kScratchDoubleReg, rbx); | |
| 215 __ Movsd(FieldOperand(r14, r9, times_8, FixedDoubleArray::kHeaderSize), | |
| 216 kScratchDoubleReg); | |
| 217 __ jmp(&entry); | |
| 218 __ bind(&convert_hole); | |
| 219 | |
| 220 if (FLAG_debug_code) { | |
| 221 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex); | |
| 222 __ Assert(equal, kObjectFoundInSmiOnlyArray); | |
| 223 } | |
| 224 | |
| 225 __ movq(FieldOperand(r14, r9, times_8, FixedDoubleArray::kHeaderSize), r15); | |
| 226 __ bind(&entry); | |
| 227 __ decp(r9); | |
| 228 __ j(not_sign, &loop); | |
| 229 | |
| 230 __ bind(&done); | |
| 231 } | |
| 232 | |
| 233 | |
| 234 void ElementsTransitionGenerator::GenerateDoubleToObject( | |
| 235 MacroAssembler* masm, | |
| 236 Register receiver, | |
| 237 Register key, | |
| 238 Register value, | |
| 239 Register target_map, | |
| 240 AllocationSiteMode mode, | |
| 241 Label* fail) { | |
| 242 // Return address is on the stack. | |
| 243 DCHECK(receiver.is(rdx)); | |
| 244 DCHECK(key.is(rcx)); | |
| 245 DCHECK(value.is(rax)); | |
| 246 DCHECK(target_map.is(rbx)); | |
| 247 | |
| 248 Label loop, entry, convert_hole, gc_required, only_change_map; | |
| 249 | |
| 250 if (mode == TRACK_ALLOCATION_SITE) { | |
| 251 __ JumpIfJSArrayHasAllocationMemento(rdx, rdi, fail); | |
| 252 } | |
| 253 | |
| 254 // Check for empty arrays, which only require a map transition and no changes | |
| 255 // to the backing store. | |
| 256 __ movp(r8, FieldOperand(rdx, JSObject::kElementsOffset)); | |
| 257 __ CompareRoot(r8, Heap::kEmptyFixedArrayRootIndex); | |
| 258 __ j(equal, &only_change_map); | |
| 259 | |
| 260 __ Push(rsi); | |
| 261 __ Push(rax); | |
| 262 | |
| 263 __ movp(r8, FieldOperand(rdx, JSObject::kElementsOffset)); | |
| 264 __ SmiToInteger32(r9, FieldOperand(r8, FixedDoubleArray::kLengthOffset)); | |
| 265 // r8 : source FixedDoubleArray | |
| 266 // r9 : number of elements | |
| 267 __ leap(rdi, Operand(r9, times_pointer_size, FixedArray::kHeaderSize)); | |
| 268 __ Allocate(rdi, r11, r14, r15, &gc_required, NO_ALLOCATION_FLAGS); | |
| 269 // r11: destination FixedArray | |
| 270 __ LoadRoot(rdi, Heap::kFixedArrayMapRootIndex); | |
| 271 __ movp(FieldOperand(r11, HeapObject::kMapOffset), rdi); | |
| 272 __ Integer32ToSmi(r14, r9); | |
| 273 __ movp(FieldOperand(r11, FixedArray::kLengthOffset), r14); | |
| 274 | |
| 275 // Prepare for conversion loop. | |
| 276 __ movq(rsi, bit_cast<int64_t, uint64_t>(kHoleNanInt64)); | |
| 277 __ LoadRoot(rdi, Heap::kTheHoleValueRootIndex); | |
| 278 // rsi: the-hole NaN | |
| 279 // rdi: pointer to the-hole | |
| 280 | |
| 281 // Allocating heap numbers in the loop below can fail and cause a jump to | |
| 282 // gc_required. We can't leave a partly initialized FixedArray behind, | |
| 283 // so pessimistically fill it with holes now. | |
| 284 Label initialization_loop, initialization_loop_entry; | |
| 285 __ jmp(&initialization_loop_entry, Label::kNear); | |
| 286 __ bind(&initialization_loop); | |
| 287 __ movp(FieldOperand(r11, r9, times_pointer_size, FixedArray::kHeaderSize), | |
| 288 rdi); | |
| 289 __ bind(&initialization_loop_entry); | |
| 290 __ decp(r9); | |
| 291 __ j(not_sign, &initialization_loop); | |
| 292 | |
| 293 __ SmiToInteger32(r9, FieldOperand(r8, FixedDoubleArray::kLengthOffset)); | |
| 294 __ jmp(&entry); | |
| 295 | |
| 296 // Call into runtime if GC is required. | |
| 297 __ bind(&gc_required); | |
| 298 __ Pop(rax); | |
| 299 __ Pop(rsi); | |
| 300 __ jmp(fail); | |
| 301 | |
| 302 // Box doubles into heap numbers. | |
| 303 __ bind(&loop); | |
| 304 __ movq(r14, FieldOperand(r8, | |
| 305 r9, | |
| 306 times_8, | |
| 307 FixedDoubleArray::kHeaderSize)); | |
| 308 // r9 : current element's index | |
| 309 // r14: current element | |
| 310 __ cmpq(r14, rsi); | |
| 311 __ j(equal, &convert_hole); | |
| 312 | |
| 313 // Non-hole double, copy value into a heap number. | |
| 314 __ AllocateHeapNumber(rax, r15, &gc_required); | |
| 315 // rax: new heap number | |
| 316 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), r14); | |
| 317 __ movp(FieldOperand(r11, | |
| 318 r9, | |
| 319 times_pointer_size, | |
| 320 FixedArray::kHeaderSize), | |
| 321 rax); | |
| 322 __ movp(r15, r9); | |
| 323 __ RecordWriteArray(r11, | |
| 324 rax, | |
| 325 r15, | |
| 326 kDontSaveFPRegs, | |
| 327 EMIT_REMEMBERED_SET, | |
| 328 OMIT_SMI_CHECK); | |
| 329 __ jmp(&entry, Label::kNear); | |
| 330 | |
| 331 // Replace the-hole NaN with the-hole pointer. | |
| 332 __ bind(&convert_hole); | |
| 333 __ movp(FieldOperand(r11, | |
| 334 r9, | |
| 335 times_pointer_size, | |
| 336 FixedArray::kHeaderSize), | |
| 337 rdi); | |
| 338 | |
| 339 __ bind(&entry); | |
| 340 __ decp(r9); | |
| 341 __ j(not_sign, &loop); | |
| 342 | |
| 343 // Replace receiver's backing store with newly created and filled FixedArray. | |
| 344 __ movp(FieldOperand(rdx, JSObject::kElementsOffset), r11); | |
| 345 __ RecordWriteField(rdx, | |
| 346 JSObject::kElementsOffset, | |
| 347 r11, | |
| 348 r15, | |
| 349 kDontSaveFPRegs, | |
| 350 EMIT_REMEMBERED_SET, | |
| 351 OMIT_SMI_CHECK); | |
| 352 __ Pop(rax); | |
| 353 __ Pop(rsi); | |
| 354 | |
| 355 __ bind(&only_change_map); | |
| 356 // Set transitioned map. | |
| 357 __ movp(FieldOperand(rdx, HeapObject::kMapOffset), rbx); | |
| 358 __ RecordWriteField(rdx, | |
| 359 HeapObject::kMapOffset, | |
| 360 rbx, | |
| 361 rdi, | |
| 362 kDontSaveFPRegs, | |
| 363 OMIT_REMEMBERED_SET, | |
| 364 OMIT_SMI_CHECK); | |
| 365 } | |
| 366 | |
| 367 | |
| 368 void StringCharLoadGenerator::Generate(MacroAssembler* masm, | 65 void StringCharLoadGenerator::Generate(MacroAssembler* masm, |
| 369 Register string, | 66 Register string, |
| 370 Register index, | 67 Register index, |
| 371 Register result, | 68 Register result, |
| 372 Label* call_runtime) { | 69 Label* call_runtime) { |
| 373 // Fetch the instance type of the receiver into result register. | 70 // Fetch the instance type of the receiver into result register. |
| 374 __ movp(result, FieldOperand(string, HeapObject::kMapOffset)); | 71 __ movp(result, FieldOperand(string, HeapObject::kMapOffset)); |
| 375 __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset)); | 72 __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset)); |
| 376 | 73 |
| 377 // We need special handling for indirect strings. | 74 // We need special handling for indirect strings. |
| (...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 550 return Operand(base_reg_, argument_count_reg_, times_pointer_size, | 247 return Operand(base_reg_, argument_count_reg_, times_pointer_size, |
| 551 displacement_to_last_argument + (receiver - 1 - index) * kPointerSize); | 248 displacement_to_last_argument + (receiver - 1 - index) * kPointerSize); |
| 552 } | 249 } |
| 553 } | 250 } |
| 554 | 251 |
| 555 | 252 |
| 556 } // namespace internal | 253 } // namespace internal |
| 557 } // namespace v8 | 254 } // namespace v8 |
| 558 | 255 |
| 559 #endif // V8_TARGET_ARCH_X64 | 256 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |