OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
46 Register receiver, | 46 Register receiver, |
47 Register name, | 47 Register name, |
48 // The offset is scaled by 4, based on | 48 // The offset is scaled by 4, based on |
49 // kHeapObjectTagSize, which is two bits | 49 // kHeapObjectTagSize, which is two bits |
50 Register offset) { | 50 Register offset) { |
51 // We need to scale up the pointer by 2 because the offset is scaled by less | 51 // We need to scale up the pointer by 2 because the offset is scaled by less |
52 // than the pointer size. | 52 // than the pointer size. |
53 ASSERT(kPointerSizeLog2 == kHeapObjectTagSize + 1); | 53 ASSERT(kPointerSizeLog2 == kHeapObjectTagSize + 1); |
54 ScaleFactor scale_factor = times_2; | 54 ScaleFactor scale_factor = times_2; |
55 | 55 |
56 ASSERT_EQ(24, sizeof(StubCache::Entry)); | 56 ASSERT_EQ(3 * kPointerSize, sizeof(StubCache::Entry)); |
57 // The offset register holds the entry offset times four (due to masking | 57 // The offset register holds the entry offset times four (due to masking |
58 // and shifting optimizations). | 58 // and shifting optimizations). |
59 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); | 59 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); |
60 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); | 60 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); |
61 Label miss; | 61 Label miss; |
62 | 62 |
63 // Multiply by 3 because there are 3 fields per entry (name, code, map). | 63 // Multiply by 3 because there are 3 fields per entry (name, code, map). |
64 __ lea(offset, Operand(offset, offset, times_2, 0)); | 64 __ lea(offset, Operand(offset, offset, times_2, 0)); |
65 | 65 |
66 __ LoadAddress(kScratchRegister, key_offset); | 66 __ LoadAddress(kScratchRegister, key_offset); |
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
164 Register scratch, | 164 Register scratch, |
165 Register extra, | 165 Register extra, |
166 Register extra2, | 166 Register extra2, |
167 Register extra3) { | 167 Register extra3) { |
168 Isolate* isolate = masm->isolate(); | 168 Isolate* isolate = masm->isolate(); |
169 Label miss; | 169 Label miss; |
170 USE(extra); // The register extra is not used on the X64 platform. | 170 USE(extra); // The register extra is not used on the X64 platform. |
171 USE(extra2); // The register extra2 is not used on the X64 platform. | 171 USE(extra2); // The register extra2 is not used on the X64 platform. |
172 USE(extra3); // The register extra2 is not used on the X64 platform. | 172 USE(extra3); // The register extra2 is not used on the X64 platform. |
173 // Make sure that code is valid. The multiplying code relies on the | 173 // Make sure that code is valid. The multiplying code relies on the |
174 // entry size being 24. | 174 // entry size being 3 * kPointerSize. |
175 ASSERT(sizeof(Entry) == 24); | 175 ASSERT(sizeof(Entry) == 3 * kPointerSize); |
176 | 176 |
177 // Make sure the flags do not name a specific type. | 177 // Make sure the flags do not name a specific type. |
178 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); | 178 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); |
179 | 179 |
180 // Make sure that there are no register conflicts. | 180 // Make sure that there are no register conflicts. |
181 ASSERT(!scratch.is(receiver)); | 181 ASSERT(!scratch.is(receiver)); |
182 ASSERT(!scratch.is(name)); | 182 ASSERT(!scratch.is(name)); |
183 | 183 |
184 // Check scratch register is valid, extra and extra2 are unused. | 184 // Check scratch register is valid, extra and extra2 are unused. |
185 ASSERT(!scratch.is(no_reg)); | 185 ASSERT(!scratch.is(no_reg)); |
(...skipping 3386 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3572 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow); | 3572 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow); |
3573 } | 3573 } |
3574 } | 3574 } |
3575 | 3575 |
3576 | 3576 |
3577 #undef __ | 3577 #undef __ |
3578 | 3578 |
3579 } } // namespace v8::internal | 3579 } } // namespace v8::internal |
3580 | 3580 |
3581 #endif // V8_TARGET_ARCH_X64 | 3581 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |