Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(60)

Side by Side Diff: src/ic/x64/ic-compiler-x64.cc

Issue 483683005: Move IC code into a subdir and move ic-compilation related code from stub-cache into ic-compiler (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Fix BUILD.gn Created 6 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ic/stub-cache.cc ('k') | src/ic/x64/ic-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_X64 7 #if V8_TARGET_ARCH_X64
8 8
9 #include "src/arguments.h" 9 #include "src/ic/ic-compiler.h"
10 #include "src/codegen.h"
11 #include "src/ic-inl.h"
12 #include "src/stub-cache.h"
13 10
14 namespace v8 { 11 namespace v8 {
15 namespace internal { 12 namespace internal {
16 13
17 #define __ ACCESS_MASM(masm) 14 #define __ ACCESS_MASM(masm)
18 15
19 16
20 static void ProbeTable(Isolate* isolate,
21 MacroAssembler* masm,
22 Code::Flags flags,
23 StubCache::Table table,
24 Register receiver,
25 Register name,
26 // The offset is scaled by 4, based on
27 // kCacheIndexShift, which is two bits
28 Register offset) {
29 // We need to scale up the pointer by 2 when the offset is scaled by less
30 // than the pointer size.
31 DCHECK(kPointerSize == kInt64Size
32 ? kPointerSizeLog2 == StubCache::kCacheIndexShift + 1
33 : kPointerSizeLog2 == StubCache::kCacheIndexShift);
34 ScaleFactor scale_factor = kPointerSize == kInt64Size ? times_2 : times_1;
35
36 DCHECK_EQ(3 * kPointerSize, sizeof(StubCache::Entry));
37 // The offset register holds the entry offset times four (due to masking
38 // and shifting optimizations).
39 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
40 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
41 Label miss;
42
43 // Multiply by 3 because there are 3 fields per entry (name, code, map).
44 __ leap(offset, Operand(offset, offset, times_2, 0));
45
46 __ LoadAddress(kScratchRegister, key_offset);
47
48 // Check that the key in the entry matches the name.
49 // Multiply entry offset by 16 to get the entry address. Since the
50 // offset register already holds the entry offset times four, multiply
51 // by a further four.
52 __ cmpl(name, Operand(kScratchRegister, offset, scale_factor, 0));
53 __ j(not_equal, &miss);
54
55 // Get the map entry from the cache.
56 // Use key_offset + kPointerSize * 2, rather than loading map_offset.
57 __ movp(kScratchRegister,
58 Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2));
59 __ cmpp(kScratchRegister, FieldOperand(receiver, HeapObject::kMapOffset));
60 __ j(not_equal, &miss);
61
62 // Get the code entry from the cache.
63 __ LoadAddress(kScratchRegister, value_offset);
64 __ movp(kScratchRegister,
65 Operand(kScratchRegister, offset, scale_factor, 0));
66
67 // Check that the flags match what we're looking for.
68 __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset));
69 __ andp(offset, Immediate(~Code::kFlagsNotUsedInLookup));
70 __ cmpl(offset, Immediate(flags));
71 __ j(not_equal, &miss);
72
73 #ifdef DEBUG
74 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
75 __ jmp(&miss);
76 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
77 __ jmp(&miss);
78 }
79 #endif
80
81 // Jump to the first instruction in the code stub.
82 __ addp(kScratchRegister, Immediate(Code::kHeaderSize - kHeapObjectTag));
83 __ jmp(kScratchRegister);
84
85 __ bind(&miss);
86 }
87
88
89 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup( 17 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
90 MacroAssembler* masm, Label* miss_label, Register receiver, 18 MacroAssembler* masm, Label* miss_label, Register receiver,
91 Handle<Name> name, Register scratch0, Register scratch1) { 19 Handle<Name> name, Register scratch0, Register scratch1) {
92 DCHECK(name->IsUniqueName()); 20 DCHECK(name->IsUniqueName());
93 DCHECK(!receiver.is(scratch0)); 21 DCHECK(!receiver.is(scratch0));
94 Counters* counters = masm->isolate()->counters(); 22 Counters* counters = masm->isolate()->counters();
95 __ IncrementCounter(counters->negative_lookups(), 1); 23 __ IncrementCounter(counters->negative_lookups(), 1);
96 __ IncrementCounter(counters->negative_lookups_miss(), 1); 24 __ IncrementCounter(counters->negative_lookups_miss(), 1);
97 25
98 __ movp(scratch0, FieldOperand(receiver, HeapObject::kMapOffset)); 26 __ movp(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
(...skipping 13 matching lines...) Expand all
112 // Load properties array. 40 // Load properties array.
113 Register properties = scratch0; 41 Register properties = scratch0;
114 __ movp(properties, FieldOperand(receiver, JSObject::kPropertiesOffset)); 42 __ movp(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
115 43
116 // Check that the properties array is a dictionary. 44 // Check that the properties array is a dictionary.
117 __ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset), 45 __ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset),
118 Heap::kHashTableMapRootIndex); 46 Heap::kHashTableMapRootIndex);
119 __ j(not_equal, miss_label); 47 __ j(not_equal, miss_label);
120 48
121 Label done; 49 Label done;
122 NameDictionaryLookupStub::GenerateNegativeLookup(masm, 50 NameDictionaryLookupStub::GenerateNegativeLookup(masm, miss_label, &done,
123 miss_label, 51 properties, name, scratch1);
124 &done,
125 properties,
126 name,
127 scratch1);
128 __ bind(&done); 52 __ bind(&done);
129 __ DecrementCounter(counters->negative_lookups_miss(), 1); 53 __ DecrementCounter(counters->negative_lookups_miss(), 1);
130 } 54 }
131 55
132 56
133 void StubCache::GenerateProbe(MacroAssembler* masm,
134 Code::Flags flags,
135 Register receiver,
136 Register name,
137 Register scratch,
138 Register extra,
139 Register extra2,
140 Register extra3) {
141 Isolate* isolate = masm->isolate();
142 Label miss;
143 USE(extra); // The register extra is not used on the X64 platform.
144 USE(extra2); // The register extra2 is not used on the X64 platform.
145 USE(extra3); // The register extra2 is not used on the X64 platform.
146 // Make sure that code is valid. The multiplying code relies on the
147 // entry size being 3 * kPointerSize.
148 DCHECK(sizeof(Entry) == 3 * kPointerSize);
149
150 // Make sure the flags do not name a specific type.
151 DCHECK(Code::ExtractTypeFromFlags(flags) == 0);
152
153 // Make sure that there are no register conflicts.
154 DCHECK(!scratch.is(receiver));
155 DCHECK(!scratch.is(name));
156
157 // Check scratch register is valid, extra and extra2 are unused.
158 DCHECK(!scratch.is(no_reg));
159 DCHECK(extra2.is(no_reg));
160 DCHECK(extra3.is(no_reg));
161
162 Counters* counters = masm->isolate()->counters();
163 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
164
165 // Check that the receiver isn't a smi.
166 __ JumpIfSmi(receiver, &miss);
167
168 // Get the map of the receiver and compute the hash.
169 __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
170 // Use only the low 32 bits of the map pointer.
171 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
172 __ xorp(scratch, Immediate(flags));
173 // We mask out the last two bits because they are not part of the hash and
174 // they are always 01 for maps. Also in the two 'and' instructions below.
175 __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kCacheIndexShift));
176
177 // Probe the primary table.
178 ProbeTable(isolate, masm, flags, kPrimary, receiver, name, scratch);
179
180 // Primary miss: Compute hash for secondary probe.
181 __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
182 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
183 __ xorp(scratch, Immediate(flags));
184 __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kCacheIndexShift));
185 __ subl(scratch, name);
186 __ addl(scratch, Immediate(flags));
187 __ andp(scratch, Immediate((kSecondaryTableSize - 1) << kCacheIndexShift));
188
189 // Probe the secondary table.
190 ProbeTable(isolate, masm, flags, kSecondary, receiver, name, scratch);
191
192 // Cache miss: Fall-through and let caller handle the miss by
193 // entering the runtime system.
194 __ bind(&miss);
195 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
196 }
197
198
199 void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype( 57 void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype(
200 MacroAssembler* masm, int index, Register prototype, Label* miss) { 58 MacroAssembler* masm, int index, Register prototype, Label* miss) {
201 Isolate* isolate = masm->isolate(); 59 Isolate* isolate = masm->isolate();
202 // Get the global function with the given index. 60 // Get the global function with the given index.
203 Handle<JSFunction> function( 61 Handle<JSFunction> function(
204 JSFunction::cast(isolate->native_context()->get(index))); 62 JSFunction::cast(isolate->native_context()->get(index)));
205 63
206 // Check we're still in the same context. 64 // Check we're still in the same context.
207 Register scratch = prototype; 65 Register scratch = prototype;
208 const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX); 66 const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
(...skipping 11 matching lines...) Expand all
220 78
221 void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype( 79 void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(
222 MacroAssembler* masm, Register receiver, Register result, Register scratch, 80 MacroAssembler* masm, Register receiver, Register result, Register scratch,
223 Label* miss_label) { 81 Label* miss_label) {
224 __ TryGetFunctionPrototype(receiver, result, miss_label); 82 __ TryGetFunctionPrototype(receiver, result, miss_label);
225 if (!result.is(rax)) __ movp(rax, result); 83 if (!result.is(rax)) __ movp(rax, result);
226 __ ret(0); 84 __ ret(0);
227 } 85 }
228 86
229 87
230 static void PushInterceptorArguments(MacroAssembler* masm, 88 static void PushInterceptorArguments(MacroAssembler* masm, Register receiver,
231 Register receiver, 89 Register holder, Register name,
232 Register holder,
233 Register name,
234 Handle<JSObject> holder_obj) { 90 Handle<JSObject> holder_obj) {
235 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0); 91 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
236 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1); 92 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1);
237 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2); 93 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2);
238 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3); 94 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3);
239 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 4); 95 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 4);
240 __ Push(name); 96 __ Push(name);
241 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor()); 97 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
242 DCHECK(!masm->isolate()->heap()->InNewSpace(*interceptor)); 98 DCHECK(!masm->isolate()->heap()->InNewSpace(*interceptor));
243 __ Move(kScratchRegister, interceptor); 99 __ Move(kScratchRegister, interceptor);
244 __ Push(kScratchRegister); 100 __ Push(kScratchRegister);
245 __ Push(receiver); 101 __ Push(receiver);
246 __ Push(holder); 102 __ Push(holder);
247 } 103 }
248 104
249 105
250 static void CompileCallLoadPropertyWithInterceptor( 106 static void CompileCallLoadPropertyWithInterceptor(
251 MacroAssembler* masm, 107 MacroAssembler* masm, Register receiver, Register holder, Register name,
252 Register receiver, 108 Handle<JSObject> holder_obj, IC::UtilityId id) {
253 Register holder,
254 Register name,
255 Handle<JSObject> holder_obj,
256 IC::UtilityId id) {
257 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); 109 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
258 __ CallExternalReference(ExternalReference(IC_Utility(id), masm->isolate()), 110 __ CallExternalReference(ExternalReference(IC_Utility(id), masm->isolate()),
259 NamedLoadHandlerCompiler::kInterceptorArgsLength); 111 NamedLoadHandlerCompiler::kInterceptorArgsLength);
260 } 112 }
261 113
262 114
263 // Generate call to api function. 115 // Generate call to api function.
264 void PropertyHandlerCompiler::GenerateFastApiCall( 116 void PropertyHandlerCompiler::GenerateFastApiCall(
265 MacroAssembler* masm, const CallOptimization& optimization, 117 MacroAssembler* masm, const CallOptimization& optimization,
266 Handle<Map> receiver_map, Register receiver, Register scratch_in, 118 Handle<Map> receiver_map, Register receiver, Register scratch_in,
267 bool is_store, int argc, Register* values) { 119 bool is_store, int argc, Register* values) {
268 DCHECK(optimization.is_simple_api_call()); 120 DCHECK(optimization.is_simple_api_call());
269 121
270 __ PopReturnAddressTo(scratch_in); 122 __ PopReturnAddressTo(scratch_in);
271 // receiver 123 // receiver
272 __ Push(receiver); 124 __ Push(receiver);
273 // Write the arguments to stack frame. 125 // Write the arguments to stack frame.
274 for (int i = 0; i < argc; i++) { 126 for (int i = 0; i < argc; i++) {
275 Register arg = values[argc-1-i]; 127 Register arg = values[argc - 1 - i];
276 DCHECK(!receiver.is(arg)); 128 DCHECK(!receiver.is(arg));
277 DCHECK(!scratch_in.is(arg)); 129 DCHECK(!scratch_in.is(arg));
278 __ Push(arg); 130 __ Push(arg);
279 } 131 }
280 __ PushReturnAddressFrom(scratch_in); 132 __ PushReturnAddressFrom(scratch_in);
281 // Stack now matches JSFunction abi. 133 // Stack now matches JSFunction abi.
282 134
283 // Abi for CallApiFunctionStub. 135 // Abi for CallApiFunctionStub.
284 Register callee = rax; 136 Register callee = rax;
285 Register call_data = rbx; 137 Register call_data = rbx;
286 Register holder = rcx; 138 Register holder = rcx;
287 Register api_function_address = rdx; 139 Register api_function_address = rdx;
288 Register scratch = rdi; // scratch_in is no longer valid. 140 Register scratch = rdi; // scratch_in is no longer valid.
289 141
290 // Put holder in place. 142 // Put holder in place.
291 CallOptimization::HolderLookup holder_lookup; 143 CallOptimization::HolderLookup holder_lookup;
292 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType( 144 Handle<JSObject> api_holder =
293 receiver_map, 145 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup);
294 &holder_lookup);
295 switch (holder_lookup) { 146 switch (holder_lookup) {
296 case CallOptimization::kHolderIsReceiver: 147 case CallOptimization::kHolderIsReceiver:
297 __ Move(holder, receiver); 148 __ Move(holder, receiver);
298 break; 149 break;
299 case CallOptimization::kHolderFound: 150 case CallOptimization::kHolderFound:
300 __ Move(holder, api_holder); 151 __ Move(holder, api_holder);
301 break; 152 break;
302 case CallOptimization::kHolderNotFound: 153 case CallOptimization::kHolderNotFound:
303 UNREACHABLE(); 154 UNREACHABLE();
304 break; 155 break;
305 } 156 }
306 157
307 Isolate* isolate = masm->isolate(); 158 Isolate* isolate = masm->isolate();
308 Handle<JSFunction> function = optimization.constant_function(); 159 Handle<JSFunction> function = optimization.constant_function();
309 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); 160 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
310 Handle<Object> call_data_obj(api_call_info->data(), isolate); 161 Handle<Object> call_data_obj(api_call_info->data(), isolate);
311 162
312 // Put callee in place. 163 // Put callee in place.
313 __ Move(callee, function); 164 __ Move(callee, function);
314 165
315 bool call_data_undefined = false; 166 bool call_data_undefined = false;
316 // Put call_data in place. 167 // Put call_data in place.
317 if (isolate->heap()->InNewSpace(*call_data_obj)) { 168 if (isolate->heap()->InNewSpace(*call_data_obj)) {
318 __ Move(scratch, api_call_info); 169 __ Move(scratch, api_call_info);
319 __ movp(call_data, FieldOperand(scratch, CallHandlerInfo::kDataOffset)); 170 __ movp(call_data, FieldOperand(scratch, CallHandlerInfo::kDataOffset));
320 } else if (call_data_obj->IsUndefined()) { 171 } else if (call_data_obj->IsUndefined()) {
321 call_data_undefined = true; 172 call_data_undefined = true;
322 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex); 173 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
323 } else { 174 } else {
324 __ Move(call_data, call_data_obj); 175 __ Move(call_data, call_data_obj);
325 } 176 }
326 177
327 // Put api_function_address in place. 178 // Put api_function_address in place.
328 Address function_address = v8::ToCData<Address>(api_call_info->callback()); 179 Address function_address = v8::ToCData<Address>(api_call_info->callback());
329 __ Move( 180 __ Move(api_function_address, function_address,
330 api_function_address, function_address, RelocInfo::EXTERNAL_REFERENCE); 181 RelocInfo::EXTERNAL_REFERENCE);
331 182
332 // Jump to stub. 183 // Jump to stub.
333 CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc); 184 CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
334 __ TailCallStub(&stub); 185 __ TailCallStub(&stub);
335 } 186 }
336 187
337 188
338 void PropertyHandlerCompiler::GenerateCheckPropertyCell( 189 void PropertyHandlerCompiler::GenerateCheckPropertyCell(
339 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name, 190 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
340 Register scratch, Label* miss) { 191 Register scratch, Label* miss) {
341 Handle<PropertyCell> cell = 192 Handle<PropertyCell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
342 JSGlobalObject::EnsurePropertyCell(global, name);
343 DCHECK(cell->value()->IsTheHole()); 193 DCHECK(cell->value()->IsTheHole());
344 __ Move(scratch, cell); 194 __ Move(scratch, cell);
345 __ Cmp(FieldOperand(scratch, Cell::kValueOffset), 195 __ Cmp(FieldOperand(scratch, Cell::kValueOffset),
346 masm->isolate()->factory()->the_hole_value()); 196 masm->isolate()->factory()->the_hole_value());
347 __ j(not_equal, miss); 197 __ j(not_equal, miss);
348 } 198 }
349 199
350 200
351 void PropertyAccessCompiler::GenerateTailCall(MacroAssembler* masm, 201 void PropertyAccessCompiler::GenerateTailCall(MacroAssembler* masm,
352 Handle<Code> code) { 202 Handle<Code> code) {
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
438 isolate()), 288 isolate()),
439 3, 1); 289 3, 1);
440 return; 290 return;
441 } 291 }
442 292
443 // Update the map of the object. 293 // Update the map of the object.
444 __ Move(scratch1, transition); 294 __ Move(scratch1, transition);
445 __ movp(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1); 295 __ movp(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
446 296
447 // Update the write barrier for the map field. 297 // Update the write barrier for the map field.
448 __ RecordWriteField(receiver_reg, 298 __ RecordWriteField(receiver_reg, HeapObject::kMapOffset, scratch1, scratch2,
449 HeapObject::kMapOffset, 299 kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
450 scratch1,
451 scratch2,
452 kDontSaveFPRegs,
453 OMIT_REMEMBERED_SET,
454 OMIT_SMI_CHECK);
455 300
456 if (details.type() == CONSTANT) { 301 if (details.type() == CONSTANT) {
457 DCHECK(value_reg.is(rax)); 302 DCHECK(value_reg.is(rax));
458 __ ret(0); 303 __ ret(0);
459 return; 304 return;
460 } 305 }
461 306
462 int index = transition->instance_descriptors()->GetFieldIndex( 307 int index = transition->instance_descriptors()->GetFieldIndex(
463 transition->LastAdded()); 308 transition->LastAdded());
464 309
465 // Adjust for the number of properties stored in the object. Even in the 310 // Adjust for the number of properties stored in the object. Even in the
466 // face of a transition we can use the old map here because the size of the 311 // face of a transition we can use the old map here because the size of the
467 // object and the number of in-object properties is not going to change. 312 // object and the number of in-object properties is not going to change.
468 index -= transition->inobject_properties(); 313 index -= transition->inobject_properties();
469 314
470 // TODO(verwaest): Share this code as a code stub. 315 // TODO(verwaest): Share this code as a code stub.
471 SmiCheck smi_check = representation.IsTagged() 316 SmiCheck smi_check =
472 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; 317 representation.IsTagged() ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
473 if (index < 0) { 318 if (index < 0) {
474 // Set the property straight into the object. 319 // Set the property straight into the object.
475 int offset = transition->instance_size() + (index * kPointerSize); 320 int offset = transition->instance_size() + (index * kPointerSize);
476 if (representation.IsDouble()) { 321 if (representation.IsDouble()) {
477 __ movp(FieldOperand(receiver_reg, offset), storage_reg); 322 __ movp(FieldOperand(receiver_reg, offset), storage_reg);
478 } else { 323 } else {
479 __ movp(FieldOperand(receiver_reg, offset), value_reg); 324 __ movp(FieldOperand(receiver_reg, offset), value_reg);
480 } 325 }
481 326
482 if (!representation.IsSmi()) { 327 if (!representation.IsSmi()) {
483 // Update the write barrier for the array address. 328 // Update the write barrier for the array address.
484 if (!representation.IsDouble()) { 329 if (!representation.IsDouble()) {
485 __ movp(storage_reg, value_reg); 330 __ movp(storage_reg, value_reg);
486 } 331 }
487 __ RecordWriteField( 332 __ RecordWriteField(receiver_reg, offset, storage_reg, scratch1,
488 receiver_reg, offset, storage_reg, scratch1, kDontSaveFPRegs, 333 kDontSaveFPRegs, EMIT_REMEMBERED_SET, smi_check);
489 EMIT_REMEMBERED_SET, smi_check);
490 } 334 }
491 } else { 335 } else {
492 // Write to the properties array. 336 // Write to the properties array.
493 int offset = index * kPointerSize + FixedArray::kHeaderSize; 337 int offset = index * kPointerSize + FixedArray::kHeaderSize;
494 // Get the properties array (optimistically). 338 // Get the properties array (optimistically).
495 __ movp(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); 339 __ movp(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
496 if (representation.IsDouble()) { 340 if (representation.IsDouble()) {
497 __ movp(FieldOperand(scratch1, offset), storage_reg); 341 __ movp(FieldOperand(scratch1, offset), storage_reg);
498 } else { 342 } else {
499 __ movp(FieldOperand(scratch1, offset), value_reg); 343 __ movp(FieldOperand(scratch1, offset), value_reg);
500 } 344 }
501 345
502 if (!representation.IsSmi()) { 346 if (!representation.IsSmi()) {
503 // Update the write barrier for the array address. 347 // Update the write barrier for the array address.
504 if (!representation.IsDouble()) { 348 if (!representation.IsDouble()) {
505 __ movp(storage_reg, value_reg); 349 __ movp(storage_reg, value_reg);
506 } 350 }
507 __ RecordWriteField( 351 __ RecordWriteField(scratch1, offset, storage_reg, receiver_reg,
508 scratch1, offset, storage_reg, receiver_reg, kDontSaveFPRegs, 352 kDontSaveFPRegs, EMIT_REMEMBERED_SET, smi_check);
509 EMIT_REMEMBERED_SET, smi_check);
510 } 353 }
511 } 354 }
512 355
513 // Return the value (register rax). 356 // Return the value (register rax).
514 DCHECK(value_reg.is(rax)); 357 DCHECK(value_reg.is(rax));
515 __ ret(0); 358 __ ret(0);
516 } 359 }
517 360
518 361
519 void NamedStoreHandlerCompiler::GenerateStoreField(LookupIterator* lookup, 362 void NamedStoreHandlerCompiler::GenerateStoreField(LookupIterator* lookup,
(...skipping 21 matching lines...) Expand all
541 384
542 385
543 Register PropertyHandlerCompiler::CheckPrototypes( 386 Register PropertyHandlerCompiler::CheckPrototypes(
544 Register object_reg, Register holder_reg, Register scratch1, 387 Register object_reg, Register holder_reg, Register scratch1,
545 Register scratch2, Handle<Name> name, Label* miss, 388 Register scratch2, Handle<Name> name, Label* miss,
546 PrototypeCheckType check) { 389 PrototypeCheckType check) {
547 Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate())); 390 Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate()));
548 391
549 // Make sure there's no overlap between holder and object registers. 392 // Make sure there's no overlap between holder and object registers.
550 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); 393 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
551 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) 394 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) &&
552 && !scratch2.is(scratch1)); 395 !scratch2.is(scratch1));
553 396
554 // Keep track of the current object in register reg. On the first 397 // Keep track of the current object in register reg. On the first
555 // iteration, reg is an alias for object_reg, on later iterations, 398 // iteration, reg is an alias for object_reg, on later iterations,
556 // it is an alias for holder_reg. 399 // it is an alias for holder_reg.
557 Register reg = object_reg; 400 Register reg = object_reg;
558 int depth = 0; 401 int depth = 0;
559 402
560 Handle<JSObject> current = Handle<JSObject>::null(); 403 Handle<JSObject> current = Handle<JSObject>::null();
561 if (type()->IsConstant()) { 404 if (type()->IsConstant()) {
562 current = Handle<JSObject>::cast(type()->AsConstant()->Value()); 405 current = Handle<JSObject>::cast(type()->AsConstant()->Value());
(...skipping 14 matching lines...) Expand all
577 prototype = handle(JSObject::cast(current_map->prototype())); 420 prototype = handle(JSObject::cast(current_map->prototype()));
578 if (current_map->is_dictionary_map() && 421 if (current_map->is_dictionary_map() &&
579 !current_map->IsJSGlobalObjectMap()) { 422 !current_map->IsJSGlobalObjectMap()) {
580 DCHECK(!current_map->IsJSGlobalProxyMap()); // Proxy maps are fast. 423 DCHECK(!current_map->IsJSGlobalProxyMap()); // Proxy maps are fast.
581 if (!name->IsUniqueName()) { 424 if (!name->IsUniqueName()) {
582 DCHECK(name->IsString()); 425 DCHECK(name->IsString());
583 name = factory()->InternalizeString(Handle<String>::cast(name)); 426 name = factory()->InternalizeString(Handle<String>::cast(name));
584 } 427 }
585 DCHECK(current.is_null() || 428 DCHECK(current.is_null() ||
586 current->property_dictionary()->FindEntry(name) == 429 current->property_dictionary()->FindEntry(name) ==
587 NameDictionary::kNotFound); 430 NameDictionary::kNotFound);
588 431
589 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, 432 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1,
590 scratch1, scratch2); 433 scratch2);
591 434
592 __ movp(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); 435 __ movp(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
593 reg = holder_reg; // From now on the object will be in holder_reg. 436 reg = holder_reg; // From now on the object will be in holder_reg.
594 __ movp(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); 437 __ movp(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
595 } else { 438 } else {
596 bool in_new_space = heap()->InNewSpace(*prototype); 439 bool in_new_space = heap()->InNewSpace(*prototype);
597 // Two possible reasons for loading the prototype from the map: 440 // Two possible reasons for loading the prototype from the map:
598 // (1) Can't store references to new space in code. 441 // (1) Can't store references to new space in code.
599 // (2) Handler is shared for all receivers with the same prototype 442 // (2) Handler is shared for all receivers with the same prototype
600 // map (but not necessarily the same prototype instance). 443 // map (but not necessarily the same prototype instance).
601 bool load_prototype_from_map = in_new_space || depth == 1; 444 bool load_prototype_from_map = in_new_space || depth == 1;
602 if (load_prototype_from_map) { 445 if (load_prototype_from_map) {
603 // Save the map in scratch1 for later. 446 // Save the map in scratch1 for later.
604 __ movp(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); 447 __ movp(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
605 } 448 }
606 if (depth != 1 || check == CHECK_ALL_MAPS) { 449 if (depth != 1 || check == CHECK_ALL_MAPS) {
607 __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK); 450 __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
608 } 451 }
609 452
610 // Check access rights to the global object. This has to happen after 453 // Check access rights to the global object. This has to happen after
611 // the map check so that we know that the object is actually a global 454 // the map check so that we know that the object is actually a global
612 // object. 455 // object.
613 // This allows us to install generated handlers for accesses to the 456 // This allows us to install generated handlers for accesses to the
614 // global proxy (as opposed to using slow ICs). See corresponding code 457 // global proxy (as opposed to using slow ICs). See corresponding code
615 // in LookupForRead(). 458 // in LookupForRead().
616 if (current_map->IsJSGlobalProxyMap()) { 459 if (current_map->IsJSGlobalProxyMap()) {
617 __ CheckAccessGlobalProxy(reg, scratch2, miss); 460 __ CheckAccessGlobalProxy(reg, scratch2, miss);
618 } else if (current_map->IsJSGlobalObjectMap()) { 461 } else if (current_map->IsJSGlobalObjectMap()) {
619 GenerateCheckPropertyCell( 462 GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current),
620 masm(), Handle<JSGlobalObject>::cast(current), name, 463 name, scratch2, miss);
621 scratch2, miss);
622 } 464 }
623 reg = holder_reg; // From now on the object will be in holder_reg. 465 reg = holder_reg; // From now on the object will be in holder_reg.
624 466
625 if (load_prototype_from_map) { 467 if (load_prototype_from_map) {
626 __ movp(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); 468 __ movp(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
627 } else { 469 } else {
628 __ Move(reg, prototype); 470 __ Move(reg, prototype);
629 } 471 }
630 } 472 }
631 473
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
696 __ Push(FieldOperand(scratch2(), 538 __ Push(FieldOperand(scratch2(),
697 ExecutableAccessorInfo::kDataOffset)); // data 539 ExecutableAccessorInfo::kDataOffset)); // data
698 } else { 540 } else {
699 __ Push(Handle<Object>(callback->data(), isolate())); 541 __ Push(Handle<Object>(callback->data(), isolate()));
700 } 542 }
701 DCHECK(!kScratchRegister.is(reg)); 543 DCHECK(!kScratchRegister.is(reg));
702 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); 544 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
703 __ Push(kScratchRegister); // return value 545 __ Push(kScratchRegister); // return value
704 __ Push(kScratchRegister); // return value default 546 __ Push(kScratchRegister); // return value default
705 __ PushAddress(ExternalReference::isolate_address(isolate())); 547 __ PushAddress(ExternalReference::isolate_address(isolate()));
706 __ Push(reg); // holder 548 __ Push(reg); // holder
707 __ Push(name()); // name 549 __ Push(name()); // name
708 // Save a pointer to where we pushed the arguments pointer. This will be 550 // Save a pointer to where we pushed the arguments pointer. This will be
709 // passed as the const PropertyAccessorInfo& to the C++ callback. 551 // passed as the const PropertyAccessorInfo& to the C++ callback.
710 552
711 __ PushReturnAddressFrom(scratch4()); 553 __ PushReturnAddressFrom(scratch4());
712 554
713 // Abi for CallApiGetter 555 // Abi for CallApiGetter
714 Register api_function_address = r8; 556 Register api_function_address = r8;
715 Address getter_address = v8::ToCData<Address>(callback->getter()); 557 Address getter_address = v8::ToCData<Address>(callback->getter());
716 __ Move(api_function_address, getter_address, RelocInfo::EXTERNAL_REFERENCE); 558 __ Move(api_function_address, getter_address, RelocInfo::EXTERNAL_REFERENCE);
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after
847 // Call the JavaScript setter with receiver and value on the stack. 689 // Call the JavaScript setter with receiver and value on the stack.
848 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { 690 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
849 // Swap in the global receiver. 691 // Swap in the global receiver.
850 __ movp(receiver, 692 __ movp(receiver,
851 FieldOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); 693 FieldOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
852 } 694 }
853 __ Push(receiver); 695 __ Push(receiver);
854 __ Push(value()); 696 __ Push(value());
855 ParameterCount actual(1); 697 ParameterCount actual(1);
856 ParameterCount expected(setter); 698 ParameterCount expected(setter);
857 __ InvokeFunction(setter, expected, actual, 699 __ InvokeFunction(setter, expected, actual, CALL_FUNCTION,
858 CALL_FUNCTION, NullCallWrapper()); 700 NullCallWrapper());
859 } else { 701 } else {
860 // If we generate a global code snippet for deoptimization only, remember 702 // If we generate a global code snippet for deoptimization only, remember
861 // the place to continue after deoptimization. 703 // the place to continue after deoptimization.
862 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); 704 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
863 } 705 }
864 706
865 // We have to return the passed value, not the return value of the setter. 707 // We have to return the passed value, not the return value of the setter.
866 __ Pop(rax); 708 __ Pop(rax);
867 709
868 // Restore context register. 710 // Restore context register.
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
903 __ movp(scratch1(), FieldOperand(receiver(), HeapObject::kMapOffset)); 745 __ movp(scratch1(), FieldOperand(receiver(), HeapObject::kMapOffset));
904 int receiver_count = receiver_maps->length(); 746 int receiver_count = receiver_maps->length();
905 for (int i = 0; i < receiver_count; ++i) { 747 for (int i = 0; i < receiver_count; ++i) {
906 // Check map and tail call if there's a match 748 // Check map and tail call if there's a match
907 __ Cmp(scratch1(), receiver_maps->at(i)); 749 __ Cmp(scratch1(), receiver_maps->at(i));
908 if (transitioned_maps->at(i).is_null()) { 750 if (transitioned_maps->at(i).is_null()) {
909 __ j(equal, handler_stubs->at(i), RelocInfo::CODE_TARGET); 751 __ j(equal, handler_stubs->at(i), RelocInfo::CODE_TARGET);
910 } else { 752 } else {
911 Label next_map; 753 Label next_map;
912 __ j(not_equal, &next_map, Label::kNear); 754 __ j(not_equal, &next_map, Label::kNear);
913 __ Move(transition_map(), 755 __ Move(transition_map(), transitioned_maps->at(i),
914 transitioned_maps->at(i),
915 RelocInfo::EMBEDDED_OBJECT); 756 RelocInfo::EMBEDDED_OBJECT);
916 __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET); 757 __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
917 __ bind(&next_map); 758 __ bind(&next_map);
918 } 759 }
919 } 760 }
920 761
921 __ bind(&miss); 762 __ bind(&miss);
922 763
923 TailCallBuiltin(masm(), MissBuiltin(kind())); 764 TailCallBuiltin(masm(), MissBuiltin(kind()));
924 765
925 // Return the generated code. 766 // Return the generated code.
926 return GetCode(kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC); 767 return GetCode(kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
927 } 768 }
928 769
929 770
930 Register* PropertyAccessCompiler::load_calling_convention() { 771 Register* PropertyAccessCompiler::load_calling_convention() {
931 // receiver, name, scratch1, scratch2, scratch3, scratch4. 772 // receiver, name, scratch1, scratch2, scratch3, scratch4.
932 Register receiver = LoadIC::ReceiverRegister(); 773 Register receiver = LoadIC::ReceiverRegister();
933 Register name = LoadIC::NameRegister(); 774 Register name = LoadIC::NameRegister();
934 static Register registers[] = { receiver, name, rax, rbx, rdi, r8 }; 775 static Register registers[] = {receiver, name, rax, rbx, rdi, r8};
935 return registers; 776 return registers;
936 } 777 }
937 778
938 779
939 Register* PropertyAccessCompiler::store_calling_convention() { 780 Register* PropertyAccessCompiler::store_calling_convention() {
940 // receiver, name, scratch1, scratch2, scratch3. 781 // receiver, name, scratch1, scratch2, scratch3.
941 Register receiver = KeyedStoreIC::ReceiverRegister(); 782 Register receiver = KeyedStoreIC::ReceiverRegister();
942 Register name = KeyedStoreIC::NameRegister(); 783 Register name = KeyedStoreIC::NameRegister();
943 DCHECK(rbx.is(KeyedStoreIC::MapRegister())); 784 DCHECK(rbx.is(KeyedStoreIC::MapRegister()));
944 static Register registers[] = { receiver, name, rbx, rdi, r8 }; 785 static Register registers[] = {receiver, name, rbx, rdi, r8};
945 return registers; 786 return registers;
946 } 787 }
947 788
948 789
949 Register NamedStoreHandlerCompiler::value() { return StoreIC::ValueRegister(); } 790 Register NamedStoreHandlerCompiler::value() { return StoreIC::ValueRegister(); }
950 791
951 792
952 #undef __ 793 #undef __
953 #define __ ACCESS_MASM(masm) 794 #define __ ACCESS_MASM(masm)
954 795
(...skipping 12 matching lines...) Expand all
967 if (!getter.is_null()) { 808 if (!getter.is_null()) {
968 // Call the JavaScript getter with the receiver on the stack. 809 // Call the JavaScript getter with the receiver on the stack.
969 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { 810 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
970 // Swap in the global receiver. 811 // Swap in the global receiver.
971 __ movp(receiver, 812 __ movp(receiver,
972 FieldOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); 813 FieldOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
973 } 814 }
974 __ Push(receiver); 815 __ Push(receiver);
975 ParameterCount actual(0); 816 ParameterCount actual(0);
976 ParameterCount expected(getter); 817 ParameterCount expected(getter);
977 __ InvokeFunction(getter, expected, actual, 818 __ InvokeFunction(getter, expected, actual, CALL_FUNCTION,
978 CALL_FUNCTION, NullCallWrapper()); 819 NullCallWrapper());
979 } else { 820 } else {
980 // If we generate a global code snippet for deoptimization only, remember 821 // If we generate a global code snippet for deoptimization only, remember
981 // the place to continue after deoptimization. 822 // the place to continue after deoptimization.
982 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); 823 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
983 } 824 }
984 825
985 // Restore context register. 826 // Restore context register.
986 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 827 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
987 } 828 }
988 __ ret(0); 829 __ ret(0);
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
1062 __ Cmp(map_reg, map); 903 __ Cmp(map_reg, map);
1063 if (type->Is(HeapType::Number())) { 904 if (type->Is(HeapType::Number())) {
1064 DCHECK(!number_case.is_unused()); 905 DCHECK(!number_case.is_unused());
1065 __ bind(&number_case); 906 __ bind(&number_case);
1066 } 907 }
1067 __ j(equal, handlers->at(current), RelocInfo::CODE_TARGET); 908 __ j(equal, handlers->at(current), RelocInfo::CODE_TARGET);
1068 } 909 }
1069 } 910 }
1070 DCHECK(number_of_handled_maps > 0); 911 DCHECK(number_of_handled_maps > 0);
1071 912
1072 __ bind(&miss); 913 __ bind(&miss);
1073 TailCallBuiltin(masm(), MissBuiltin(kind())); 914 TailCallBuiltin(masm(), MissBuiltin(kind()));
1074 915
1075 // Return the generated code. 916 // Return the generated code.
1076 InlineCacheState state = 917 InlineCacheState state =
1077 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC; 918 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
1078 return GetCode(kind(), type, name, state); 919 return GetCode(kind(), type, name, state);
1079 } 920 }
1080 921
1081 922
1082 #undef __ 923 #undef __
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
1121 // ----------- S t a t e ------------- 962 // ----------- S t a t e -------------
1122 // -- rcx : key 963 // -- rcx : key
1123 // -- rdx : receiver 964 // -- rdx : receiver
1124 // -- rsp[0] : return address 965 // -- rsp[0] : return address
1125 // ----------------------------------- 966 // -----------------------------------
1126 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); 967 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1127 } 968 }
1128 969
1129 970
1130 #undef __ 971 #undef __
1131 972 }
1132 } } // namespace v8::internal 973 } // namespace v8::internal
1133 974
1134 #endif // V8_TARGET_ARCH_X64 975 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/ic/stub-cache.cc ('k') | src/ic/x64/ic-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698