| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 2671 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2682 cmp(map_in_out, FieldOperand(scratch, offset)); | 2682 cmp(map_in_out, FieldOperand(scratch, offset)); |
| 2683 j(not_equal, no_map_match); | 2683 j(not_equal, no_map_match); |
| 2684 | 2684 |
| 2685 // Use the transitioned cached map. | 2685 // Use the transitioned cached map. |
| 2686 offset = transitioned_kind * kPointerSize + | 2686 offset = transitioned_kind * kPointerSize + |
| 2687 FixedArrayBase::kHeaderSize; | 2687 FixedArrayBase::kHeaderSize; |
| 2688 mov(map_in_out, FieldOperand(scratch, offset)); | 2688 mov(map_in_out, FieldOperand(scratch, offset)); |
| 2689 } | 2689 } |
| 2690 | 2690 |
| 2691 | 2691 |
| 2692 void MacroAssembler::LoadInitialArrayMap( |
| 2693 Register function_in, Register scratch, |
| 2694 Register map_out, bool can_have_holes) { |
| 2695 ASSERT(!function_in.is(map_out)); |
| 2696 Label done; |
| 2697 mov(map_out, FieldOperand(function_in, |
| 2698 JSFunction::kPrototypeOrInitialMapOffset)); |
| 2699 if (!FLAG_smi_only_arrays) { |
| 2700 ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS; |
| 2701 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, |
| 2702 kind, |
| 2703 map_out, |
| 2704 scratch, |
| 2705 &done); |
| 2706 } else if (can_have_holes) { |
| 2707 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, |
| 2708 FAST_HOLEY_SMI_ELEMENTS, |
| 2709 map_out, |
| 2710 scratch, |
| 2711 &done); |
| 2712 } |
| 2713 bind(&done); |
| 2714 } |
| 2715 |
| 2716 |
| 2692 void MacroAssembler::LoadGlobalContext(Register global_context) { | 2717 void MacroAssembler::LoadGlobalContext(Register global_context) { |
| 2693 // Load the global or builtins object from the current context. | 2718 // Load the global or builtins object from the current context. |
| 2694 mov(global_context, | 2719 mov(global_context, |
| 2695 Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 2720 Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
| 2696 // Load the native context from the global or builtins object. | 2721 // Load the native context from the global or builtins object. |
| 2697 mov(global_context, | 2722 mov(global_context, |
| 2698 FieldOperand(global_context, GlobalObject::kNativeContextOffset)); | 2723 FieldOperand(global_context, GlobalObject::kNativeContextOffset)); |
| 2699 } | 2724 } |
| 2700 | 2725 |
| 2701 | 2726 |
| (...skipping 246 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2948 test(esp, Immediate(frame_alignment_mask)); | 2973 test(esp, Immediate(frame_alignment_mask)); |
| 2949 j(zero, &alignment_as_expected); | 2974 j(zero, &alignment_as_expected); |
| 2950 // Abort if stack is not aligned. | 2975 // Abort if stack is not aligned. |
| 2951 int3(); | 2976 int3(); |
| 2952 bind(&alignment_as_expected); | 2977 bind(&alignment_as_expected); |
| 2953 } | 2978 } |
| 2954 } | 2979 } |
| 2955 | 2980 |
| 2956 | 2981 |
| 2957 void MacroAssembler::Abort(BailoutReason reason) { | 2982 void MacroAssembler::Abort(BailoutReason reason) { |
| 2983 // We want to pass the msg string like a smi to avoid GC |
| 2984 // problems, however msg is not guaranteed to be aligned |
| 2985 // properly. Instead, we pass an aligned pointer that is |
| 2986 // a proper v8 smi, but also pass the alignment difference |
| 2987 // from the real pointer as a smi. |
| 2988 const char* msg = GetBailoutReason(reason); |
| 2989 intptr_t p1 = reinterpret_cast<intptr_t>(msg); |
| 2990 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; |
| 2991 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); |
| 2958 #ifdef DEBUG | 2992 #ifdef DEBUG |
| 2959 const char* msg = GetBailoutReason(reason); | |
| 2960 if (msg != NULL) { | 2993 if (msg != NULL) { |
| 2961 RecordComment("Abort message: "); | 2994 RecordComment("Abort message: "); |
| 2962 RecordComment(msg); | 2995 RecordComment(msg); |
| 2963 } | 2996 } |
| 2964 | 2997 |
| 2965 if (FLAG_trap_on_abort) { | 2998 if (FLAG_trap_on_abort) { |
| 2966 int3(); | 2999 int3(); |
| 2967 return; | 3000 return; |
| 2968 } | 3001 } |
| 2969 #endif | 3002 #endif |
| 2970 | 3003 |
| 2971 push(eax); | 3004 push(eax); |
| 2972 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(reason)))); | 3005 push(Immediate(p0)); |
| 3006 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)))); |
| 2973 // Disable stub call restrictions to always allow calls to abort. | 3007 // Disable stub call restrictions to always allow calls to abort. |
| 2974 if (!has_frame_) { | 3008 if (!has_frame_) { |
| 2975 // We don't actually want to generate a pile of code for this, so just | 3009 // We don't actually want to generate a pile of code for this, so just |
| 2976 // claim there is a stack frame, without generating one. | 3010 // claim there is a stack frame, without generating one. |
| 2977 FrameScope scope(this, StackFrame::NONE); | 3011 FrameScope scope(this, StackFrame::NONE); |
| 2978 CallRuntime(Runtime::kAbort, 1); | 3012 CallRuntime(Runtime::kAbort, 2); |
| 2979 } else { | 3013 } else { |
| 2980 CallRuntime(Runtime::kAbort, 1); | 3014 CallRuntime(Runtime::kAbort, 2); |
| 2981 } | 3015 } |
| 2982 // will not return here | 3016 // will not return here |
| 2983 int3(); | 3017 int3(); |
| 2984 } | 3018 } |
| 2985 | 3019 |
| 2986 | 3020 |
| 2987 void MacroAssembler::Throw(BailoutReason reason) { | 3021 void MacroAssembler::Throw(BailoutReason reason) { |
| 2988 #ifdef DEBUG | 3022 #ifdef DEBUG |
| 2989 const char* msg = GetBailoutReason(reason); | 3023 const char* msg = GetBailoutReason(reason); |
| 2990 if (msg != NULL) { | 3024 if (msg != NULL) { |
| (...skipping 618 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3609 cmp(scratch1, Immediate(DICTIONARY_ELEMENTS)); | 3643 cmp(scratch1, Immediate(DICTIONARY_ELEMENTS)); |
| 3610 j(equal, found); | 3644 j(equal, found); |
| 3611 mov(current, FieldOperand(current, Map::kPrototypeOffset)); | 3645 mov(current, FieldOperand(current, Map::kPrototypeOffset)); |
| 3612 cmp(current, Immediate(factory->null_value())); | 3646 cmp(current, Immediate(factory->null_value())); |
| 3613 j(not_equal, &loop_again); | 3647 j(not_equal, &loop_again); |
| 3614 } | 3648 } |
| 3615 | 3649 |
| 3616 } } // namespace v8::internal | 3650 } } // namespace v8::internal |
| 3617 | 3651 |
| 3618 #endif // V8_TARGET_ARCH_IA32 | 3652 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |