| OLD | NEW |
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 54 }; | 54 }; |
| 55 | 55 |
| 56 // Maps handle location (slot) to the containing node. | 56 // Maps handle location (slot) to the containing node. |
| 57 static Node* FromLocation(Object** location) { | 57 static Node* FromLocation(Object** location) { |
| 58 ASSERT(OFFSET_OF(Node, object_) == 0); | 58 ASSERT(OFFSET_OF(Node, object_) == 0); |
| 59 return reinterpret_cast<Node*>(location); | 59 return reinterpret_cast<Node*>(location); |
| 60 } | 60 } |
| 61 | 61 |
| 62 Node() { | 62 Node() { |
| 63 ASSERT(OFFSET_OF(Node, flags_) == Internals::kNodeFlagsOffset); | 63 ASSERT(OFFSET_OF(Node, flags_) == Internals::kNodeFlagsOffset); |
| 64 ASSERT(OFFSET_OF(Node, class_id_) == Internals::kNodeClassIdOffset); |
| 64 ASSERT(static_cast<int>(IsIndependent::kShift) == | 65 ASSERT(static_cast<int>(IsIndependent::kShift) == |
| 65 Internals::kNodeIsIndependentShift); | 66 Internals::kNodeIsIndependentShift); |
| 66 ASSERT(static_cast<int>(IsPartiallyDependent::kShift) == | 67 ASSERT(static_cast<int>(IsPartiallyDependent::kShift) == |
| 67 Internals::kNodeIsPartiallyDependentShift); | 68 Internals::kNodeIsPartiallyDependentShift); |
| 68 } | 69 } |
| 69 | 70 |
| 70 #ifdef DEBUG | 71 #ifdef DEBUG |
| 71 ~Node() { | 72 ~Node() { |
| 72 // TODO(1428): if it's a weak handle we should have invoked its callback. | 73 // TODO(1428): if it's a weak handle we should have invoked its callback. |
| 73 // Zap the values for eager trapping. | 74 // Zap the values for eager trapping. |
| (...skipping 751 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 825 implicit_ref_groups_.Clear(); | 826 implicit_ref_groups_.Clear(); |
| 826 } | 827 } |
| 827 | 828 |
| 828 | 829 |
| 829 void GlobalHandles::TearDown() { | 830 void GlobalHandles::TearDown() { |
| 830 // TODO(1428): invoke weak callbacks. | 831 // TODO(1428): invoke weak callbacks. |
| 831 } | 832 } |
| 832 | 833 |
| 833 | 834 |
| 834 } } // namespace v8::internal | 835 } } // namespace v8::internal |
| OLD | NEW |