| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 374 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 385 ASSERT(!representation.IsNone()); | 385 ASSERT(!representation.IsNone()); |
| 386 | 386 |
| 387 if (details.type() == CONSTANT) { | 387 if (details.type() == CONSTANT) { |
| 388 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate()); | 388 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate()); |
| 389 __ LoadObject(scratch1, constant); | 389 __ LoadObject(scratch1, constant); |
| 390 __ Cmp(value_reg, scratch1); | 390 __ Cmp(value_reg, scratch1); |
| 391 __ B(ne, miss_label); | 391 __ B(ne, miss_label); |
| 392 } else if (representation.IsSmi()) { | 392 } else if (representation.IsSmi()) { |
| 393 __ JumpIfNotSmi(value_reg, miss_label); | 393 __ JumpIfNotSmi(value_reg, miss_label); |
| 394 } else if (representation.IsHeapObject()) { | 394 } else if (representation.IsHeapObject()) { |
| 395 __ JumpIfSmi(value_reg, miss_label); |
| 395 HeapType* field_type = descriptors->GetFieldType(descriptor); | 396 HeapType* field_type = descriptors->GetFieldType(descriptor); |
| 396 if (field_type->IsClass()) { | 397 HeapType::Iterator<Map> it = field_type->Classes(); |
| 397 __ CheckMap(value_reg, scratch1, field_type->AsClass()->Map(), | 398 if (!it.Done()) { |
| 398 miss_label, DO_SMI_CHECK); | 399 __ Ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); |
| 399 } else { | 400 Label do_store; |
| 400 ASSERT(HeapType::Any()->Is(field_type)); | 401 while (true) { |
| 401 __ JumpIfSmi(value_reg, miss_label); | 402 __ CompareMap(scratch1, it.Current()); |
| 403 it.Advance(); |
| 404 if (it.Done()) { |
| 405 __ B(ne, miss_label); |
| 406 break; |
| 407 } |
| 408 __ B(eq, &do_store); |
| 409 } |
| 410 __ Bind(&do_store); |
| 402 } | 411 } |
| 403 } else if (representation.IsDouble()) { | 412 } else if (representation.IsDouble()) { |
| 404 UseScratchRegisterScope temps(masm); | 413 UseScratchRegisterScope temps(masm); |
| 405 DoubleRegister temp_double = temps.AcquireD(); | 414 DoubleRegister temp_double = temps.AcquireD(); |
| 406 __ SmiUntagToDouble(temp_double, value_reg, kSpeculativeUntag); | 415 __ SmiUntagToDouble(temp_double, value_reg, kSpeculativeUntag); |
| 407 | 416 |
| 408 Label do_store; | 417 Label do_store; |
| 409 __ JumpIfSmi(value_reg, &do_store); | 418 __ JumpIfSmi(value_reg, &do_store); |
| 410 | 419 |
| 411 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, | 420 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, |
| (...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 541 // Adjust for the number of properties stored in the object. Even in the | 550 // Adjust for the number of properties stored in the object. Even in the |
| 542 // face of a transition we can use the old map here because the size of the | 551 // face of a transition we can use the old map here because the size of the |
| 543 // object and the number of in-object properties is not going to change. | 552 // object and the number of in-object properties is not going to change. |
| 544 index -= object->map()->inobject_properties(); | 553 index -= object->map()->inobject_properties(); |
| 545 | 554 |
| 546 Representation representation = lookup->representation(); | 555 Representation representation = lookup->representation(); |
| 547 ASSERT(!representation.IsNone()); | 556 ASSERT(!representation.IsNone()); |
| 548 if (representation.IsSmi()) { | 557 if (representation.IsSmi()) { |
| 549 __ JumpIfNotSmi(value_reg, miss_label); | 558 __ JumpIfNotSmi(value_reg, miss_label); |
| 550 } else if (representation.IsHeapObject()) { | 559 } else if (representation.IsHeapObject()) { |
| 560 __ JumpIfSmi(value_reg, miss_label); |
| 551 HeapType* field_type = lookup->GetFieldType(); | 561 HeapType* field_type = lookup->GetFieldType(); |
| 552 if (field_type->IsClass()) { | 562 HeapType::Iterator<Map> it = field_type->Classes(); |
| 553 __ CheckMap(value_reg, scratch1, field_type->AsClass()->Map(), | 563 if (!it.Done()) { |
| 554 miss_label, DO_SMI_CHECK); | 564 __ Ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); |
| 555 } else { | 565 Label do_store; |
| 556 ASSERT(HeapType::Any()->Is(field_type)); | 566 while (true) { |
| 557 __ JumpIfSmi(value_reg, miss_label); | 567 __ CompareMap(scratch1, it.Current()); |
| 568 it.Advance(); |
| 569 if (it.Done()) { |
| 570 __ B(ne, miss_label); |
| 571 break; |
| 572 } |
| 573 __ B(eq, &do_store); |
| 574 } |
| 575 __ Bind(&do_store); |
| 558 } | 576 } |
| 559 } else if (representation.IsDouble()) { | 577 } else if (representation.IsDouble()) { |
| 560 UseScratchRegisterScope temps(masm); | 578 UseScratchRegisterScope temps(masm); |
| 561 DoubleRegister temp_double = temps.AcquireD(); | 579 DoubleRegister temp_double = temps.AcquireD(); |
| 562 | 580 |
| 563 __ SmiUntagToDouble(temp_double, value_reg, kSpeculativeUntag); | 581 __ SmiUntagToDouble(temp_double, value_reg, kSpeculativeUntag); |
| 564 | 582 |
| 565 // Load the double storage. | 583 // Load the double storage. |
| 566 if (index < 0) { | 584 if (index < 0) { |
| 567 int offset = (index * kPointerSize) + object->map()->instance_size(); | 585 int offset = (index * kPointerSize) + object->map()->instance_size(); |
| (...skipping 931 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1499 | 1517 |
| 1500 // Miss case, call the runtime. | 1518 // Miss case, call the runtime. |
| 1501 __ Bind(&miss); | 1519 __ Bind(&miss); |
| 1502 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); | 1520 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); |
| 1503 } | 1521 } |
| 1504 | 1522 |
| 1505 | 1523 |
| 1506 } } // namespace v8::internal | 1524 } } // namespace v8::internal |
| 1507 | 1525 |
| 1508 #endif // V8_TARGET_ARCH_ARM64 | 1526 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |