Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: src/x64/ic-x64.cc

Issue 159266: Add inline caching for keyed loads and stores. Remove extra parentheses from... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 11 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/disasm-x64.cc ('k') | src/x64/macro-assembler-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 24 matching lines...) Expand all
35 namespace v8 { 35 namespace v8 {
36 namespace internal { 36 namespace internal {
37 37
38 // ---------------------------------------------------------------------------- 38 // ----------------------------------------------------------------------------
39 // Static IC stub generators. 39 // Static IC stub generators.
40 // 40 //
41 41
42 #define __ ACCESS_MASM(masm) 42 #define __ ACCESS_MASM(masm)
43 43
44 44
45 // Helper function used to load a property from a dictionary backing storage.
46 // This function may return false negatives, so miss_label
47 // must always call a backup property load that is complete.
48 // This function is safe to call if the receiver has fast properties,
49 // or if name is not a symbol, and will jump to the miss_label in that case.
50 static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss_label,
51 Register r0, Register r1, Register r2,
52 Register name) {
53 // Register use:
54 //
55 // r0 - used to hold the property dictionary.
56 //
57 // r1 - initially the receiver
58 // - used for the index into the property dictionary
59 // - holds the result on exit.
60 //
61 // r2 - used to hold the capacity of the property dictionary.
62 //
63 // name - holds the name of the property and is unchanged.
64
65 Label done;
66
67 // Check for the absence of an interceptor.
68 // Load the map into r0.
69 __ movq(r0, FieldOperand(r1, JSObject::kMapOffset));
70 // Test the has_named_interceptor bit in the map.
71 __ testl(FieldOperand(r0, Map::kInstanceAttributesOffset),
72 Immediate(1 << (Map::kHasNamedInterceptor + (3 * 8))));
73
74 // Jump to miss if the interceptor bit is set.
75 __ j(not_zero, miss_label);
76
77 // Bail out if we have a JS global proxy object.
78 __ movzxbq(r0, FieldOperand(r0, Map::kInstanceTypeOffset));
79 __ cmpb(r0, Immediate(JS_GLOBAL_PROXY_TYPE));
80 __ j(equal, miss_label);
81
82 // Possible work-around for http://crbug.com/16276.
83 __ cmpb(r0, Immediate(JS_GLOBAL_OBJECT_TYPE));
84 __ j(equal, miss_label);
85 __ cmpb(r0, Immediate(JS_BUILTINS_OBJECT_TYPE));
86 __ j(equal, miss_label);
87
88 // Check that the properties array is a dictionary.
89 __ movq(r0, FieldOperand(r1, JSObject::kPropertiesOffset));
90 __ Cmp(FieldOperand(r0, HeapObject::kMapOffset),
91 Factory::hash_table_map());
92 __ j(not_equal, miss_label);
93
94 // Compute the capacity mask.
95 const int kCapacityOffset =
96 StringDictionary::kHeaderSize +
97 StringDictionary::kCapacityIndex * kPointerSize;
98 __ movq(r2, FieldOperand(r0, kCapacityOffset));
99 __ shrl(r2, Immediate(kSmiTagSize)); // convert smi to int
100 __ decl(r2);
101
102 // Generate an unrolled loop that performs a few probes before
103 // giving up. Measurements done on Gmail indicate that 2 probes
104 // cover ~93% of loads from dictionaries.
105 static const int kProbes = 4;
106 const int kElementsStartOffset =
107 StringDictionary::kHeaderSize +
108 StringDictionary::kElementsStartIndex * kPointerSize;
109 for (int i = 0; i < kProbes; i++) {
110 // Compute the masked index: (hash + i + i * i) & mask.
111 __ movl(r1, FieldOperand(name, String::kLengthOffset));
112 __ shrl(r1, Immediate(String::kHashShift));
113 if (i > 0) {
114 __ addl(r1, Immediate(StringDictionary::GetProbeOffset(i)));
115 }
116 __ and_(r1, r2);
117
118 // Scale the index by multiplying by the entry size.
119 ASSERT(StringDictionary::kEntrySize == 3);
120 __ lea(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3
121
122 // Check if the key is identical to the name.
123 __ cmpq(name, Operand(r0, r1, times_pointer_size,
124 kElementsStartOffset - kHeapObjectTag));
125 if (i != kProbes - 1) {
126 __ j(equal, &done);
127 } else {
128 __ j(not_equal, miss_label);
129 }
130 }
131
132 // Check that the value is a normal property.
133 __ bind(&done);
134 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
135 __ testl(Operand(r0, r1, times_pointer_size, kDetailsOffset - kHeapObjectTag),
136 Immediate(PropertyDetails::TypeField::mask() << kSmiTagSize));
137 __ j(not_zero, miss_label);
138
139 // Get the value at the masked, scaled index.
140 const int kValueOffset = kElementsStartOffset + kPointerSize;
141 __ movq(r1,
142 Operand(r0, r1, times_pointer_size, kValueOffset - kHeapObjectTag));
143 }
144
145
146 // Helper function used to check that a value is either not an object
147 // or is loaded if it is an object.
148 static void GenerateCheckNonObjectOrLoaded(MacroAssembler* masm, Label* miss,
149 Register value) {
150 Label done;
151 // Check if the value is a Smi.
152 __ testl(value, Immediate(kSmiTagMask));
153 __ j(zero, &done);
154 // Check if the object has been loaded.
155 __ movq(kScratchRegister, FieldOperand(value, JSFunction::kMapOffset));
156 __ testb(FieldOperand(kScratchRegister, Map::kBitField2Offset),
157 Immediate(1 << Map::kNeedsLoading));
158 __ j(not_zero, miss);
159 __ bind(&done);
160 }
161
162
45 void KeyedLoadIC::ClearInlinedVersion(Address address) { 163 void KeyedLoadIC::ClearInlinedVersion(Address address) {
46 // TODO(X64): Implement this when LoadIC is enabled. 164 // TODO(X64): Implement this when LoadIC is enabled.
47 } 165 }
48 166
49 void KeyedStoreIC::ClearInlinedVersion(Address address) { 167 void KeyedStoreIC::ClearInlinedVersion(Address address) {
50 // TODO(X64): Implement this when LoadIC is enabled. 168 // TODO(X64): Implement this when LoadIC is enabled.
51 } 169 }
52 170
53 void KeyedStoreIC::RestoreInlinedVersion(Address address) { 171 void KeyedStoreIC::RestoreInlinedVersion(Address address) {
54 UNIMPLEMENTED(); 172 UNIMPLEMENTED();
(...skipping 15 matching lines...) Expand all
70 __ pop(rbx); 188 __ pop(rbx);
71 __ push(rcx); 189 __ push(rcx);
72 __ push(rax); 190 __ push(rax);
73 __ push(rbx); 191 __ push(rbx);
74 192
75 // Perform tail call to the entry. 193 // Perform tail call to the entry.
76 __ TailCallRuntime(f, 2); 194 __ TailCallRuntime(f, 2);
77 } 195 }
78 196
79 197
198 #ifdef DEBUG
199 // For use in assert below.
200 static int TenToThe(int exponent) {
201 ASSERT(exponent <= 9);
202 ASSERT(exponent >= 1);
203 int answer = 10;
204 for (int i = 1; i < exponent; i++) answer *= 10;
205 return answer;
206 }
207 #endif
208
209
80 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { 210 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
81 // ----------- S t a t e ------------- 211 // ----------- S t a t e -------------
82 // -- rsp[0] : return address 212 // -- rsp[0] : return address
83 // -- rsp[8] : name 213 // -- rsp[8] : name
84 // -- rsp[16] : receiver 214 // -- rsp[16] : receiver
85 // ----------------------------------- 215 // -----------------------------------
216 Label slow, fast, check_string, index_int, index_string;
86 217
87 Generate(masm, ExternalReference(Runtime::kKeyedGetProperty)); 218 // Load name and receiver.
219 __ movq(rax, Operand(rsp, kPointerSize));
220 __ movq(rcx, Operand(rsp, 2 * kPointerSize));
221
222 // Check that the object isn't a smi.
223 __ testl(rcx, Immediate(kSmiTagMask));
224 __ j(zero, &slow);
225
226 // Check that the object is some kind of JS object EXCEPT JS Value type.
227 // In the case that the object is a value-wrapper object,
228 // we enter the runtime system to make sure that indexing
229 // into string objects work as intended.
230 ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
231 __ CmpObjectType(rcx, JS_OBJECT_TYPE, rdx);
232 __ j(below, &slow);
233 // Check that the receiver does not require access checks. We need
234 // to check this explicitly since this generic stub does not perform
235 // map checks. The map is already in rdx.
236 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
237 Immediate(1 << Map::kIsAccessCheckNeeded));
238 __ j(not_zero, &slow);
239
240 // Check that the key is a smi.
241 __ testl(rax, Immediate(kSmiTagMask));
242 __ j(not_zero, &check_string);
243 __ sarl(rax, Immediate(kSmiTagSize));
244 // Get the elements array of the object.
245 __ bind(&index_int);
246 __ movq(rcx, FieldOperand(rcx, JSObject::kElementsOffset));
247 // Check that the object is in fast mode (not dictionary).
248 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), Factory::hash_table_map());
249 __ j(equal, &slow);
250 // Check that the key (index) is within bounds.
251 __ cmpl(rax, FieldOperand(rcx, FixedArray::kLengthOffset));
252 __ j(below, &fast); // Unsigned comparison rejects negative indices.
253 // Slow case: Load name and receiver from stack and jump to runtime.
254 __ bind(&slow);
255 __ IncrementCounter(&Counters::keyed_load_generic_slow, 1);
256 KeyedLoadIC::Generate(masm, ExternalReference(Runtime::kKeyedGetProperty));
257 __ bind(&check_string);
258 // The key is not a smi.
259 // Is it a string?
260 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
261 __ j(above_equal, &slow);
262 // Is the string an array index, with cached numeric value?
263 __ movl(rbx, FieldOperand(rax, String::kLengthOffset));
264 __ testl(rbx, Immediate(String::kIsArrayIndexMask));
265
266 // If the string is a symbol, do a quick inline probe of the receiver's
267 // dictionary, if it exists.
268 __ j(not_zero, &index_string); // The value in rbx is used at jump target.
269 __ testb(FieldOperand(rdx, Map::kInstanceTypeOffset),
270 Immediate(kIsSymbolMask));
271 __ j(zero, &slow);
272 // Probe the dictionary leaving result in ecx.
273 GenerateDictionaryLoad(masm, &slow, rbx, rcx, rdx, rax);
274 GenerateCheckNonObjectOrLoaded(masm, &slow, rcx);
275 __ movq(rax, rcx);
276 __ IncrementCounter(&Counters::keyed_load_generic_symbol, 1);
277 __ ret(0);
278 // Array index string: If short enough use cache in length/hash field (ebx).
279 // We assert that there are enough bits in an int32_t after the hash shift
280 // bits have been subtracted to allow space for the length and the cached
281 // array index.
282 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
283 (1 << (String::kShortLengthShift - String::kHashShift)));
284 __ bind(&index_string);
285 const int kLengthFieldLimit =
286 (String::kMaxCachedArrayIndexLength + 1) << String::kShortLengthShift;
287 __ cmpl(rbx, Immediate(kLengthFieldLimit));
288 __ j(above_equal, &slow);
289 __ movl(rax, rbx);
290 __ and_(rax, Immediate((1 << String::kShortLengthShift) - 1));
291 __ shrl(rax, Immediate(String::kLongLengthShift));
292 __ jmp(&index_int);
293 // Fast case: Do the load.
294 __ bind(&fast);
295 __ movq(rax, Operand(rcx, rax, times_pointer_size,
296 FixedArray::kHeaderSize - kHeapObjectTag));
297 __ Cmp(rax, Factory::the_hole_value());
298 // In case the loaded value is the_hole we have to consult GetProperty
299 // to ensure the prototype chain is searched.
300 __ j(equal, &slow);
301 __ IncrementCounter(&Counters::keyed_load_generic_smi, 1);
302 __ ret(0);
88 } 303 }
89 304
90 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { 305 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
91 // ----------- S t a t e ------------- 306 // ----------- S t a t e -------------
92 // -- rsp[0] : return address 307 // -- rsp[0] : return address
93 // -- rsp[8] : name 308 // -- rsp[8] : name
94 // -- rsp[16] : receiver 309 // -- rsp[16] : receiver
95 // ----------------------------------- 310 // -----------------------------------
96 311
97 Generate(masm, ExternalReference(Runtime::kKeyedGetProperty)); 312 Generate(masm, ExternalReference(Runtime::kKeyedGetProperty));
(...skipping 23 matching lines...) Expand all
121 } 336 }
122 337
123 Object* KeyedLoadStubCompiler::CompileLoadConstant(String* name, 338 Object* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
124 JSObject* object, 339 JSObject* object,
125 JSObject* holder, 340 JSObject* holder,
126 Object* callback) { 341 Object* callback) {
127 UNIMPLEMENTED(); 342 UNIMPLEMENTED();
128 return NULL; 343 return NULL;
129 } 344 }
130 345
131 Object* KeyedLoadStubCompiler::CompileLoadField(String* name,
132 JSObject* object,
133 JSObject* holder,
134 int index) {
135 UNIMPLEMENTED();
136 return NULL;
137 }
138 346
139 Object* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { 347 Object* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
140 UNIMPLEMENTED(); 348 UNIMPLEMENTED();
141 return NULL; 349 return NULL;
142 } 350 }
143 351
144 Object* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* object, 352 Object* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* object,
145 JSObject* holder, 353 JSObject* holder,
146 String* name) { 354 String* name) {
147 UNIMPLEMENTED(); 355 UNIMPLEMENTED();
(...skipping 21 matching lines...) Expand all
169 __ push(rcx); 377 __ push(rcx);
170 378
171 // Do tail-call to runtime routine. 379 // Do tail-call to runtime routine.
172 __ TailCallRuntime(f, 3); 380 __ TailCallRuntime(f, 3);
173 } 381 }
174 382
175 void KeyedStoreIC::GenerateExtendStorage(MacroAssembler* masm) { 383 void KeyedStoreIC::GenerateExtendStorage(MacroAssembler* masm) {
176 Generate(masm, ExternalReference(IC_Utility(kKeyedStoreIC_Miss))); 384 Generate(masm, ExternalReference(IC_Utility(kKeyedStoreIC_Miss)));
177 } 385 }
178 386
387
179 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) { 388 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
180 Generate(masm, ExternalReference(IC_Utility(kKeyedStoreIC_Miss))); 389 // ----------- S t a t e -------------
390 // -- rax : value
391 // -- rsp[0] : return address
392 // -- rsp[8] : key
393 // -- rsp[16] : receiver
394 // -----------------------------------
395 Label slow, fast, array, extra;
396
397 // Get the receiver from the stack.
398 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // 2 ~ return address, key
399 // Check that the object isn't a smi.
400 __ testl(rdx, Immediate(kSmiTagMask));
401 __ j(zero, &slow);
402 // Get the map from the receiver.
403 __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
404 // Check that the receiver does not require access checks. We need
405 // to do this because this generic stub does not perform map checks.
406 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
407 Immediate(1 << Map::kIsAccessCheckNeeded));
408 __ j(not_zero, &slow);
409 // Get the key from the stack.
410 __ movq(rbx, Operand(rsp, 1 * kPointerSize)); // 1 ~ return address
411 // Check that the key is a smi.
412 __ testl(rbx, Immediate(kSmiTagMask));
413 __ j(not_zero, &slow);
414
415 __ CmpInstanceType(rcx, JS_ARRAY_TYPE);
416 __ j(equal, &array);
417 // Check that the object is some kind of JS object.
418 __ CmpInstanceType(rcx, FIRST_JS_OBJECT_TYPE);
419 __ j(below, &slow);
420
421 // Object case: Check key against length in the elements array.
422 // rax: value
423 // rdx: JSObject
424 // rbx: index (as a smi)
425 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
426 // Check that the object is in fast mode (not dictionary).
427 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), Factory::hash_table_map());
428 __ j(equal, &slow);
429 // Untag the key (for checking against untagged length in the fixed array).
430 __ movl(rdx, rbx);
431 __ sarl(rdx, Immediate(kSmiTagSize));
432 __ cmpl(rdx, FieldOperand(rcx, Array::kLengthOffset));
433 // rax: value
434 // rcx: FixedArray
435 // rbx: index (as a smi)
436 __ j(below, &fast);
437
438
439 // Slow case: Push extra copies of the arguments (3).
440 __ bind(&slow);
441 __ pop(rcx);
442 __ push(Operand(rsp, 1 * kPointerSize));
443 __ push(Operand(rsp, 1 * kPointerSize));
444 __ push(rax);
445 __ push(rcx);
446 // Do tail-call to runtime routine.
447 __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3);
448
449
450 // Extra capacity case: Check if there is extra capacity to
451 // perform the store and update the length. Used for adding one
452 // element to the array by writing to array[array.length].
453 __ bind(&extra);
454 // rax: value
455 // rdx: JSArray
456 // rcx: FixedArray
457 // rbx: index (as a smi)
458 // flags: compare (rbx, rdx.length())
459 __ j(not_equal, &slow); // do not leave holes in the array
460 __ sarl(rbx, Immediate(kSmiTagSize)); // untag
461 __ cmpl(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
462 __ j(above_equal, &slow);
463 // Restore tag and increment.
464 __ lea(rbx, Operand(rbx, rbx, times_1, 1 << kSmiTagSize));
465 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rbx);
466 __ subl(rbx, Immediate(1 << kSmiTagSize)); // decrement rbx again
467 __ jmp(&fast);
468
469
470 // Array case: Get the length and the elements array from the JS
471 // array. Check that the array is in fast mode; if it is the
472 // length is always a smi.
473 __ bind(&array);
474 // rax: value
475 // rdx: JSArray
476 // rbx: index (as a smi)
477 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
478 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), Factory::hash_table_map());
479 __ j(equal, &slow);
480
481 // Check the key against the length in the array, compute the
482 // address to store into and fall through to fast case.
483 __ cmpl(rbx, FieldOperand(rdx, JSArray::kLengthOffset));
484 __ j(above_equal, &extra);
485
486
487 // Fast case: Do the store.
488 __ bind(&fast);
489 // rax: value
490 // rcx: FixedArray
491 // rbx: index (as a smi)
492 __ movq(Operand(rcx, rbx, times_4, FixedArray::kHeaderSize - kHeapObjectTag),
493 rax);
494 // Update write barrier for the elements array address.
495 __ movq(rdx, rax);
496 __ RecordWrite(rcx, 0, rdx, rbx);
497 __ ret(0);
181 } 498 }
182 499
500
183 Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object, 501 Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
184 int index, 502 int index,
185 Map* transition, 503 Map* transition,
186 String* name) { 504 String* name) {
187 UNIMPLEMENTED(); 505 UNIMPLEMENTED();
188 return NULL; 506 return NULL;
189 } 507 }
190 508
191 509
192 void CallIC::Generate(MacroAssembler* masm, 510 void CallIC::Generate(MacroAssembler* masm,
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after
353 671
354 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) { 672 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
355 Generate(masm, ExternalReference(IC_Utility(kStoreIC_Miss))); 673 Generate(masm, ExternalReference(IC_Utility(kStoreIC_Miss)));
356 } 674 }
357 675
358 676
359 #undef __ 677 #undef __
360 678
361 679
362 } } // namespace v8::internal 680 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/x64/disasm-x64.cc ('k') | src/x64/macro-assembler-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698