| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 412 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 423 ASSERT(!representation.IsNone()); | 423 ASSERT(!representation.IsNone()); |
| 424 | 424 |
| 425 if (details.type() == CONSTANT) { | 425 if (details.type() == CONSTANT) { |
| 426 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate()); | 426 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate()); |
| 427 __ Move(scratch1, constant); | 427 __ Move(scratch1, constant); |
| 428 __ cmp(value_reg, scratch1); | 428 __ cmp(value_reg, scratch1); |
| 429 __ b(ne, miss_label); | 429 __ b(ne, miss_label); |
| 430 } else if (representation.IsSmi()) { | 430 } else if (representation.IsSmi()) { |
| 431 __ JumpIfNotSmi(value_reg, miss_label); | 431 __ JumpIfNotSmi(value_reg, miss_label); |
| 432 } else if (representation.IsHeapObject()) { | 432 } else if (representation.IsHeapObject()) { |
| 433 __ JumpIfSmi(value_reg, miss_label); |
| 433 HeapType* field_type = descriptors->GetFieldType(descriptor); | 434 HeapType* field_type = descriptors->GetFieldType(descriptor); |
| 434 if (field_type->IsClass()) { | 435 HeapType::Iterator<Map> it = field_type->Classes(); |
| 435 __ CheckMap(value_reg, scratch1, field_type->AsClass()->Map(), | 436 if (!it.Done()) { |
| 436 miss_label, DO_SMI_CHECK); | 437 __ ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); |
| 437 } else { | 438 Label do_store; |
| 438 ASSERT(HeapType::Any()->Is(field_type)); | 439 while (true) { |
| 439 __ JumpIfSmi(value_reg, miss_label); | 440 __ CompareMap(scratch1, it.Current(), &do_store); |
| 441 it.Advance(); |
| 442 if (it.Done()) { |
| 443 __ b(ne, miss_label); |
| 444 break; |
| 445 } |
| 446 __ b(eq, &do_store); |
| 447 } |
| 448 __ bind(&do_store); |
| 440 } | 449 } |
| 441 } else if (representation.IsDouble()) { | 450 } else if (representation.IsDouble()) { |
| 442 Label do_store, heap_number; | 451 Label do_store, heap_number; |
| 443 __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex); | 452 __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex); |
| 444 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow); | 453 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow); |
| 445 | 454 |
| 446 __ JumpIfNotSmi(value_reg, &heap_number); | 455 __ JumpIfNotSmi(value_reg, &heap_number); |
| 447 __ SmiUntag(scratch1, value_reg); | 456 __ SmiUntag(scratch1, value_reg); |
| 448 __ vmov(s0, scratch1); | 457 __ vmov(s0, scratch1); |
| 449 __ vcvt_f64_s32(d0, s0); | 458 __ vcvt_f64_s32(d0, s0); |
| (...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 592 // Adjust for the number of properties stored in the object. Even in the | 601 // Adjust for the number of properties stored in the object. Even in the |
| 593 // face of a transition we can use the old map here because the size of the | 602 // face of a transition we can use the old map here because the size of the |
| 594 // object and the number of in-object properties is not going to change. | 603 // object and the number of in-object properties is not going to change. |
| 595 index -= object->map()->inobject_properties(); | 604 index -= object->map()->inobject_properties(); |
| 596 | 605 |
| 597 Representation representation = lookup->representation(); | 606 Representation representation = lookup->representation(); |
| 598 ASSERT(!representation.IsNone()); | 607 ASSERT(!representation.IsNone()); |
| 599 if (representation.IsSmi()) { | 608 if (representation.IsSmi()) { |
| 600 __ JumpIfNotSmi(value_reg, miss_label); | 609 __ JumpIfNotSmi(value_reg, miss_label); |
| 601 } else if (representation.IsHeapObject()) { | 610 } else if (representation.IsHeapObject()) { |
| 611 __ JumpIfSmi(value_reg, miss_label); |
| 602 HeapType* field_type = lookup->GetFieldType(); | 612 HeapType* field_type = lookup->GetFieldType(); |
| 603 if (field_type->IsClass()) { | 613 HeapType::Iterator<Map> it = field_type->Classes(); |
| 604 __ CheckMap(value_reg, scratch1, field_type->AsClass()->Map(), | 614 if (!it.Done()) { |
| 605 miss_label, DO_SMI_CHECK); | 615 __ ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); |
| 606 } else { | 616 Label do_store; |
| 607 ASSERT(HeapType::Any()->Is(field_type)); | 617 while (true) { |
| 608 __ JumpIfSmi(value_reg, miss_label); | 618 __ CompareMap(scratch1, it.Current(), &do_store); |
| 619 it.Advance(); |
| 620 if (it.Done()) { |
| 621 __ b(ne, miss_label); |
| 622 break; |
| 623 } |
| 624 __ b(eq, &do_store); |
| 625 } |
| 626 __ bind(&do_store); |
| 609 } | 627 } |
| 610 } else if (representation.IsDouble()) { | 628 } else if (representation.IsDouble()) { |
| 611 // Load the double storage. | 629 // Load the double storage. |
| 612 if (index < 0) { | 630 if (index < 0) { |
| 613 int offset = object->map()->instance_size() + (index * kPointerSize); | 631 int offset = object->map()->instance_size() + (index * kPointerSize); |
| 614 __ ldr(scratch1, FieldMemOperand(receiver_reg, offset)); | 632 __ ldr(scratch1, FieldMemOperand(receiver_reg, offset)); |
| 615 } else { | 633 } else { |
| 616 __ ldr(scratch1, | 634 __ ldr(scratch1, |
| 617 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); | 635 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); |
| 618 int offset = index * kPointerSize + FixedArray::kHeaderSize; | 636 int offset = index * kPointerSize + FixedArray::kHeaderSize; |
| (...skipping 919 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1538 // ----------------------------------- | 1556 // ----------------------------------- |
| 1539 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); | 1557 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); |
| 1540 } | 1558 } |
| 1541 | 1559 |
| 1542 | 1560 |
| 1543 #undef __ | 1561 #undef __ |
| 1544 | 1562 |
| 1545 } } // namespace v8::internal | 1563 } } // namespace v8::internal |
| 1546 | 1564 |
| 1547 #endif // V8_TARGET_ARCH_ARM | 1565 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |