OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
66 V(CounterOp) \ | 66 V(CounterOp) \ |
67 V(ArgumentsAccess) \ | 67 V(ArgumentsAccess) \ |
68 V(RegExpExec) \ | 68 V(RegExpExec) \ |
69 V(RegExpConstructResult) \ | 69 V(RegExpConstructResult) \ |
70 V(NumberToString) \ | 70 V(NumberToString) \ |
71 V(CEntry) \ | 71 V(CEntry) \ |
72 V(JSEntry) \ | 72 V(JSEntry) \ |
73 V(KeyedLoadElement) \ | 73 V(KeyedLoadElement) \ |
74 V(KeyedStoreElement) \ | 74 V(KeyedStoreElement) \ |
75 V(DebuggerStatement) \ | 75 V(DebuggerStatement) \ |
76 V(StringDictionaryNegativeLookup) | 76 V(StringDictionaryLookup) |
77 | 77 |
78 // List of code stubs only used on ARM platforms. | 78 // List of code stubs only used on ARM platforms. |
79 #ifdef V8_TARGET_ARCH_ARM | 79 #ifdef V8_TARGET_ARCH_ARM |
80 #define CODE_STUB_LIST_ARM(V) \ | 80 #define CODE_STUB_LIST_ARM(V) \ |
81 V(GetProperty) \ | 81 V(GetProperty) \ |
82 V(SetProperty) \ | 82 V(SetProperty) \ |
83 V(InvokeBuiltin) \ | 83 V(InvokeBuiltin) \ |
84 V(RegExpCEntry) \ | 84 V(RegExpCEntry) \ |
85 V(DirectCEntry) | 85 V(DirectCEntry) |
86 #else | 86 #else |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
135 | 135 |
136 // Gets the major key from a code object that is a code stub or binary op IC. | 136 // Gets the major key from a code object that is a code stub or binary op IC. |
137 static Major GetMajorKey(Code* code_stub) { | 137 static Major GetMajorKey(Code* code_stub) { |
138 return static_cast<Major>(code_stub->major_key()); | 138 return static_cast<Major>(code_stub->major_key()); |
139 } | 139 } |
140 | 140 |
141 static const char* MajorName(Major major_key, bool allow_unknown_keys); | 141 static const char* MajorName(Major major_key, bool allow_unknown_keys); |
142 | 142 |
143 virtual ~CodeStub() {} | 143 virtual ~CodeStub() {} |
144 | 144 |
| 145 // See comment above, where Instanceof is defined. |
| 146 virtual bool CompilingCallsToThisStubIsGCSafe() { |
| 147 return MajorKey() <= Instanceof; |
| 148 } |
| 149 |
| 150 // Some stubs put untagged junk on the stack that cannot be scanned by the |
| 151 // GC. This means that we must be statically sure that no GC can occur while |
| 152 // they are running. If that is the case they should override this to return |
| 153 // true, which will cause an assertion if we try to call something that can |
| 154 // GC or if we try to put a stack frame on top of the junk, which would not |
| 155 // result in a traversable stack. |
| 156 virtual bool SometimesSetsUpAFrame() { return true; } |
| 157 |
145 protected: | 158 protected: |
146 static const int kMajorBits = 6; | 159 static const int kMajorBits = 6; |
147 static const int kMinorBits = kBitsPerInt - kSmiTagSize - kMajorBits; | 160 static const int kMinorBits = kBitsPerInt - kSmiTagSize - kMajorBits; |
148 | 161 |
149 private: | 162 private: |
150 // Lookup the code in the (possibly custom) cache. | 163 // Lookup the code in the (possibly custom) cache. |
151 bool FindCodeInCache(Code** code_out); | 164 bool FindCodeInCache(Code** code_out); |
152 | 165 |
153 // Nonvirtual wrapper around the stub-specific Generate function. Call | 166 // Nonvirtual wrapper around the stub-specific Generate function. Call |
154 // this function to set up the macro assembler and generate the code. | 167 // this function to set up the macro assembler and generate the code. |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
186 // a fixed (non-moveable) code object. | 199 // a fixed (non-moveable) code object. |
187 virtual bool NeedsImmovableCode() { return false; } | 200 virtual bool NeedsImmovableCode() { return false; } |
188 | 201 |
189 // Computes the key based on major and minor. | 202 // Computes the key based on major and minor. |
190 uint32_t GetKey() { | 203 uint32_t GetKey() { |
191 ASSERT(static_cast<int>(MajorKey()) < NUMBER_OF_IDS); | 204 ASSERT(static_cast<int>(MajorKey()) < NUMBER_OF_IDS); |
192 return MinorKeyBits::encode(MinorKey()) | | 205 return MinorKeyBits::encode(MinorKey()) | |
193 MajorKeyBits::encode(MajorKey()); | 206 MajorKeyBits::encode(MajorKey()); |
194 } | 207 } |
195 | 208 |
196 // See comment above, where Instanceof is defined. | |
197 bool AllowsStubCalls() { return MajorKey() <= Instanceof; } | |
198 | |
199 class MajorKeyBits: public BitField<uint32_t, 0, kMajorBits> {}; | 209 class MajorKeyBits: public BitField<uint32_t, 0, kMajorBits> {}; |
200 class MinorKeyBits: public BitField<uint32_t, kMajorBits, kMinorBits> {}; | 210 class MinorKeyBits: public BitField<uint32_t, kMajorBits, kMinorBits> {}; |
201 | 211 |
202 friend class BreakPointIterator; | 212 friend class BreakPointIterator; |
203 }; | 213 }; |
204 | 214 |
205 | 215 |
206 // Helper interface to prepare to/restore after making runtime calls. | 216 // Helper interface to prepare to/restore after making runtime calls. |
207 class RuntimeCallHelper { | 217 class RuntimeCallHelper { |
208 public: | 218 public: |
(...skipping 321 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
530 | 540 |
531 | 541 |
532 class CEntryStub : public CodeStub { | 542 class CEntryStub : public CodeStub { |
533 public: | 543 public: |
534 explicit CEntryStub(int result_size) | 544 explicit CEntryStub(int result_size) |
535 : result_size_(result_size), save_doubles_(false) { } | 545 : result_size_(result_size), save_doubles_(false) { } |
536 | 546 |
537 void Generate(MacroAssembler* masm); | 547 void Generate(MacroAssembler* masm); |
538 void SaveDoubles() { save_doubles_ = true; } | 548 void SaveDoubles() { save_doubles_ = true; } |
539 | 549 |
| 550 // The version of this stub that doesn't save doubles is generated ahead of |
| 551 // time, so it's OK to call it from other stubs that can't cope with GC during |
| 552 // their code generation. |
| 553 virtual bool CompilingCallsToThisStubIsGCSafe() { return !save_doubles_; } |
| 554 |
540 private: | 555 private: |
541 void GenerateCore(MacroAssembler* masm, | 556 void GenerateCore(MacroAssembler* masm, |
542 Label* throw_normal_exception, | 557 Label* throw_normal_exception, |
543 Label* throw_termination_exception, | 558 Label* throw_termination_exception, |
544 Label* throw_out_of_memory_exception, | 559 Label* throw_out_of_memory_exception, |
545 bool do_gc, | 560 bool do_gc, |
546 bool always_allocate_scope); | 561 bool always_allocate_scope); |
547 void GenerateThrowTOS(MacroAssembler* masm); | 562 void GenerateThrowTOS(MacroAssembler* masm); |
548 void GenerateThrowUncatchable(MacroAssembler* masm, | 563 void GenerateThrowUncatchable(MacroAssembler* masm, |
549 UncatchableExceptionType type); | 564 UncatchableExceptionType type); |
(...skipping 377 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
927 static Types no_types() { return Types(); } | 942 static Types no_types() { return Types(); } |
928 static Types all_types() { return Types((1 << NUMBER_OF_TYPES) - 1); } | 943 static Types all_types() { return Types((1 << NUMBER_OF_TYPES) - 1); } |
929 | 944 |
930 explicit ToBooleanStub(Register tos, Types types = Types()) | 945 explicit ToBooleanStub(Register tos, Types types = Types()) |
931 : tos_(tos), types_(types) { } | 946 : tos_(tos), types_(types) { } |
932 | 947 |
933 void Generate(MacroAssembler* masm); | 948 void Generate(MacroAssembler* masm); |
934 virtual int GetCodeKind() { return Code::TO_BOOLEAN_IC; } | 949 virtual int GetCodeKind() { return Code::TO_BOOLEAN_IC; } |
935 virtual void PrintName(StringStream* stream); | 950 virtual void PrintName(StringStream* stream); |
936 | 951 |
| 952 virtual bool SometimesSetsUpAFrame() { return false; } |
| 953 |
937 private: | 954 private: |
938 Major MajorKey() { return ToBoolean; } | 955 Major MajorKey() { return ToBoolean; } |
939 int MinorKey() { return (tos_.code() << NUMBER_OF_TYPES) | types_.ToByte(); } | 956 int MinorKey() { return (tos_.code() << NUMBER_OF_TYPES) | types_.ToByte(); } |
940 | 957 |
941 virtual void FinishCode(Code* code) { | 958 virtual void FinishCode(Code* code) { |
942 code->set_to_boolean_state(types_.ToByte()); | 959 code->set_to_boolean_state(types_.ToByte()); |
943 } | 960 } |
944 | 961 |
945 void CheckOddball(MacroAssembler* masm, | 962 void CheckOddball(MacroAssembler* masm, |
946 Type type, | 963 Type type, |
947 Heap::RootListIndex value, | 964 Heap::RootListIndex value, |
948 bool result); | 965 bool result); |
949 void GenerateTypeTransition(MacroAssembler* masm); | 966 void GenerateTypeTransition(MacroAssembler* masm); |
950 | 967 |
951 Register tos_; | 968 Register tos_; |
952 Types types_; | 969 Types types_; |
953 }; | 970 }; |
954 | 971 |
955 } } // namespace v8::internal | 972 } } // namespace v8::internal |
956 | 973 |
957 #endif // V8_CODE_STUBS_H_ | 974 #endif // V8_CODE_STUBS_H_ |
OLD | NEW |