Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(537)

Side by Side Diff: src/ic/arm/ic-arm.cc

Issue 483683005: Move IC code into a subdir and move ic-compilation related code from stub-cache into ic-compiler (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Fix BUILD.gn Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ic-inl.h ('k') | src/ic/arm/ic-compiler-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_ARM 7 #if V8_TARGET_ARCH_ARM
8 8
9 #include "src/arm/assembler-arm.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h" 9 #include "src/codegen.h"
12 #include "src/disasm.h" 10 #include "src/ic/ic.h"
13 #include "src/ic-inl.h" 11 #include "src/ic/stub-cache.h"
14 #include "src/runtime.h"
15 #include "src/stub-cache.h"
16 12
17 namespace v8 { 13 namespace v8 {
18 namespace internal { 14 namespace internal {
19 15
20 16
21 // ---------------------------------------------------------------------------- 17 // ----------------------------------------------------------------------------
22 // Static IC stub generators. 18 // Static IC stub generators.
23 // 19 //
24 20
25 #define __ ACCESS_MASM(masm) 21 #define __ ACCESS_MASM(masm)
26 22
27 23
28 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, 24 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, Register type,
29 Register type,
30 Label* global_object) { 25 Label* global_object) {
31 // Register usage: 26 // Register usage:
32 // type: holds the receiver instance type on entry. 27 // type: holds the receiver instance type on entry.
33 __ cmp(type, Operand(JS_GLOBAL_OBJECT_TYPE)); 28 __ cmp(type, Operand(JS_GLOBAL_OBJECT_TYPE));
34 __ b(eq, global_object); 29 __ b(eq, global_object);
35 __ cmp(type, Operand(JS_BUILTINS_OBJECT_TYPE)); 30 __ cmp(type, Operand(JS_BUILTINS_OBJECT_TYPE));
36 __ b(eq, global_object); 31 __ b(eq, global_object);
37 __ cmp(type, Operand(JS_GLOBAL_PROXY_TYPE)); 32 __ cmp(type, Operand(JS_GLOBAL_PROXY_TYPE));
38 __ b(eq, global_object); 33 __ b(eq, global_object);
39 } 34 }
40 35
41 36
42 // Helper function used from LoadIC GenerateNormal. 37 // Helper function used from LoadIC GenerateNormal.
43 // 38 //
44 // elements: Property dictionary. It is not clobbered if a jump to the miss 39 // elements: Property dictionary. It is not clobbered if a jump to the miss
45 // label is done. 40 // label is done.
46 // name: Property name. It is not clobbered if a jump to the miss label is 41 // name: Property name. It is not clobbered if a jump to the miss label is
47 // done 42 // done
48 // result: Register for the result. It is only updated if a jump to the miss 43 // result: Register for the result. It is only updated if a jump to the miss
49 // label is not done. Can be the same as elements or name clobbering 44 // label is not done. Can be the same as elements or name clobbering
50 // one of these in the case of not jumping to the miss label. 45 // one of these in the case of not jumping to the miss label.
51 // The two scratch registers need to be different from elements, name and 46 // The two scratch registers need to be different from elements, name and
52 // result. 47 // result.
53 // The generated code assumes that the receiver has slow properties, 48 // The generated code assumes that the receiver has slow properties,
54 // is not a global object and does not have interceptors. 49 // is not a global object and does not have interceptors.
55 static void GenerateDictionaryLoad(MacroAssembler* masm, 50 static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss,
56 Label* miss, 51 Register elements, Register name,
57 Register elements, 52 Register result, Register scratch1,
58 Register name,
59 Register result,
60 Register scratch1,
61 Register scratch2) { 53 Register scratch2) {
62 // Main use of the scratch registers. 54 // Main use of the scratch registers.
63 // scratch1: Used as temporary and to hold the capacity of the property 55 // scratch1: Used as temporary and to hold the capacity of the property
64 // dictionary. 56 // dictionary.
65 // scratch2: Used as temporary. 57 // scratch2: Used as temporary.
66 Label done; 58 Label done;
67 59
68 // Probe the dictionary. 60 // Probe the dictionary.
69 NameDictionaryLookupStub::GeneratePositiveLookup(masm, 61 NameDictionaryLookupStub::GeneratePositiveLookup(masm, miss, &done, elements,
70 miss, 62 name, scratch1, scratch2);
71 &done,
72 elements,
73 name,
74 scratch1,
75 scratch2);
76 63
77 // If probing finds an entry check that the value is a normal 64 // If probing finds an entry check that the value is a normal
78 // property. 65 // property.
79 __ bind(&done); // scratch2 == elements + 4 * index 66 __ bind(&done); // scratch2 == elements + 4 * index
80 const int kElementsStartOffset = NameDictionary::kHeaderSize + 67 const int kElementsStartOffset =
68 NameDictionary::kHeaderSize +
81 NameDictionary::kElementsStartIndex * kPointerSize; 69 NameDictionary::kElementsStartIndex * kPointerSize;
82 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize; 70 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
83 __ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset)); 71 __ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
84 __ tst(scratch1, Operand(PropertyDetails::TypeField::kMask << kSmiTagSize)); 72 __ tst(scratch1, Operand(PropertyDetails::TypeField::kMask << kSmiTagSize));
85 __ b(ne, miss); 73 __ b(ne, miss);
86 74
87 // Get the value at the masked, scaled index and return. 75 // Get the value at the masked, scaled index and return.
88 __ ldr(result, 76 __ ldr(result,
89 FieldMemOperand(scratch2, kElementsStartOffset + 1 * kPointerSize)); 77 FieldMemOperand(scratch2, kElementsStartOffset + 1 * kPointerSize));
90 } 78 }
91 79
92 80
93 // Helper function used from StoreIC::GenerateNormal. 81 // Helper function used from StoreIC::GenerateNormal.
94 // 82 //
95 // elements: Property dictionary. It is not clobbered if a jump to the miss 83 // elements: Property dictionary. It is not clobbered if a jump to the miss
96 // label is done. 84 // label is done.
97 // name: Property name. It is not clobbered if a jump to the miss label is 85 // name: Property name. It is not clobbered if a jump to the miss label is
98 // done 86 // done
99 // value: The value to store. 87 // value: The value to store.
100 // The two scratch registers need to be different from elements, name and 88 // The two scratch registers need to be different from elements, name and
101 // result. 89 // result.
102 // The generated code assumes that the receiver has slow properties, 90 // The generated code assumes that the receiver has slow properties,
103 // is not a global object and does not have interceptors. 91 // is not a global object and does not have interceptors.
104 static void GenerateDictionaryStore(MacroAssembler* masm, 92 static void GenerateDictionaryStore(MacroAssembler* masm, Label* miss,
105 Label* miss, 93 Register elements, Register name,
106 Register elements, 94 Register value, Register scratch1,
107 Register name,
108 Register value,
109 Register scratch1,
110 Register scratch2) { 95 Register scratch2) {
111 // Main use of the scratch registers. 96 // Main use of the scratch registers.
112 // scratch1: Used as temporary and to hold the capacity of the property 97 // scratch1: Used as temporary and to hold the capacity of the property
113 // dictionary. 98 // dictionary.
114 // scratch2: Used as temporary. 99 // scratch2: Used as temporary.
115 Label done; 100 Label done;
116 101
117 // Probe the dictionary. 102 // Probe the dictionary.
118 NameDictionaryLookupStub::GeneratePositiveLookup(masm, 103 NameDictionaryLookupStub::GeneratePositiveLookup(masm, miss, &done, elements,
119 miss, 104 name, scratch1, scratch2);
120 &done,
121 elements,
122 name,
123 scratch1,
124 scratch2);
125 105
126 // If probing finds an entry in the dictionary check that the value 106 // If probing finds an entry in the dictionary check that the value
127 // is a normal property that is not read only. 107 // is a normal property that is not read only.
128 __ bind(&done); // scratch2 == elements + 4 * index 108 __ bind(&done); // scratch2 == elements + 4 * index
129 const int kElementsStartOffset = NameDictionary::kHeaderSize + 109 const int kElementsStartOffset =
110 NameDictionary::kHeaderSize +
130 NameDictionary::kElementsStartIndex * kPointerSize; 111 NameDictionary::kElementsStartIndex * kPointerSize;
131 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize; 112 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
132 const int kTypeAndReadOnlyMask = 113 const int kTypeAndReadOnlyMask =
133 (PropertyDetails::TypeField::kMask | 114 (PropertyDetails::TypeField::kMask |
134 PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize; 115 PropertyDetails::AttributesField::encode(READ_ONLY))
116 << kSmiTagSize;
135 __ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset)); 117 __ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
136 __ tst(scratch1, Operand(kTypeAndReadOnlyMask)); 118 __ tst(scratch1, Operand(kTypeAndReadOnlyMask));
137 __ b(ne, miss); 119 __ b(ne, miss);
138 120
139 // Store the value at the masked, scaled index and return. 121 // Store the value at the masked, scaled index and return.
140 const int kValueOffset = kElementsStartOffset + kPointerSize; 122 const int kValueOffset = kElementsStartOffset + kPointerSize;
141 __ add(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag)); 123 __ add(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag));
142 __ str(value, MemOperand(scratch2)); 124 __ str(value, MemOperand(scratch2));
143 125
144 // Update the write barrier. Make sure not to clobber the value. 126 // Update the write barrier. Make sure not to clobber the value.
145 __ mov(scratch1, value); 127 __ mov(scratch1, value);
146 __ RecordWrite( 128 __ RecordWrite(elements, scratch2, scratch1, kLRHasNotBeenSaved,
147 elements, scratch2, scratch1, kLRHasNotBeenSaved, kDontSaveFPRegs); 129 kDontSaveFPRegs);
148 } 130 }
149 131
150 132
151 // Checks the receiver for special cases (value type, slow case bits). 133 // Checks the receiver for special cases (value type, slow case bits).
152 // Falls through for regular JS object. 134 // Falls through for regular JS object.
153 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm, 135 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
154 Register receiver, 136 Register receiver, Register map,
155 Register map,
156 Register scratch, 137 Register scratch,
157 int interceptor_bit, 138 int interceptor_bit, Label* slow) {
158 Label* slow) {
159 // Check that the object isn't a smi. 139 // Check that the object isn't a smi.
160 __ JumpIfSmi(receiver, slow); 140 __ JumpIfSmi(receiver, slow);
161 // Get the map of the receiver. 141 // Get the map of the receiver.
162 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); 142 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
163 // Check bit field. 143 // Check bit field.
164 __ ldrb(scratch, FieldMemOperand(map, Map::kBitFieldOffset)); 144 __ ldrb(scratch, FieldMemOperand(map, Map::kBitFieldOffset));
165 __ tst(scratch, 145 __ tst(scratch,
166 Operand((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit))); 146 Operand((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit)));
167 __ b(ne, slow); 147 __ b(ne, slow);
168 // Check that the object is some kind of JS object EXCEPT JS Value type. 148 // Check that the object is some kind of JS object EXCEPT JS Value type.
169 // In the case that the object is a value-wrapper object, 149 // In the case that the object is a value-wrapper object,
170 // we enter the runtime system to make sure that indexing into string 150 // we enter the runtime system to make sure that indexing into string
171 // objects work as intended. 151 // objects work as intended.
172 DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE); 152 DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE);
173 __ ldrb(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); 153 __ ldrb(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
174 __ cmp(scratch, Operand(JS_OBJECT_TYPE)); 154 __ cmp(scratch, Operand(JS_OBJECT_TYPE));
175 __ b(lt, slow); 155 __ b(lt, slow);
176 } 156 }
177 157
178 158
179 // Loads an indexed element from a fast case array. 159 // Loads an indexed element from a fast case array.
180 // If not_fast_array is NULL, doesn't perform the elements map check. 160 // If not_fast_array is NULL, doesn't perform the elements map check.
181 static void GenerateFastArrayLoad(MacroAssembler* masm, 161 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver,
182 Register receiver, 162 Register key, Register elements,
183 Register key, 163 Register scratch1, Register scratch2,
184 Register elements, 164 Register result, Label* not_fast_array,
185 Register scratch1,
186 Register scratch2,
187 Register result,
188 Label* not_fast_array,
189 Label* out_of_range) { 165 Label* out_of_range) {
190 // Register use: 166 // Register use:
191 // 167 //
192 // receiver - holds the receiver on entry. 168 // receiver - holds the receiver on entry.
193 // Unchanged unless 'result' is the same register. 169 // Unchanged unless 'result' is the same register.
194 // 170 //
195 // key - holds the smi key on entry. 171 // key - holds the smi key on entry.
196 // Unchanged unless 'result' is the same register. 172 // Unchanged unless 'result' is the same register.
197 // 173 //
198 // elements - holds the elements of the receiver on exit. 174 // elements - holds the elements of the receiver on exit.
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
230 __ cmp(scratch2, ip); 206 __ cmp(scratch2, ip);
231 // In case the loaded value is the_hole we have to consult GetProperty 207 // In case the loaded value is the_hole we have to consult GetProperty
232 // to ensure the prototype chain is searched. 208 // to ensure the prototype chain is searched.
233 __ b(eq, out_of_range); 209 __ b(eq, out_of_range);
234 __ mov(result, scratch2); 210 __ mov(result, scratch2);
235 } 211 }
236 212
237 213
238 // Checks whether a key is an array index string or a unique name. 214 // Checks whether a key is an array index string or a unique name.
239 // Falls through if a key is a unique name. 215 // Falls through if a key is a unique name.
240 static void GenerateKeyNameCheck(MacroAssembler* masm, 216 static void GenerateKeyNameCheck(MacroAssembler* masm, Register key,
241 Register key, 217 Register map, Register hash,
242 Register map, 218 Label* index_string, Label* not_unique) {
243 Register hash,
244 Label* index_string,
245 Label* not_unique) {
246 // The key is not a smi. 219 // The key is not a smi.
247 Label unique; 220 Label unique;
248 // Is it a name? 221 // Is it a name?
249 __ CompareObjectType(key, map, hash, LAST_UNIQUE_NAME_TYPE); 222 __ CompareObjectType(key, map, hash, LAST_UNIQUE_NAME_TYPE);
250 __ b(hi, not_unique); 223 __ b(hi, not_unique);
251 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE); 224 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
252 __ b(eq, &unique); 225 __ b(eq, &unique);
253 226
254 // Is the string an array index, with cached numeric value? 227 // Is the string an array index, with cached numeric value?
255 __ ldr(hash, FieldMemOperand(key, Name::kHashFieldOffset)); 228 __ ldr(hash, FieldMemOperand(key, Name::kHashFieldOffset));
(...skipping 15 matching lines...) Expand all
271 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) { 244 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
272 // The return address is in lr. 245 // The return address is in lr.
273 Register receiver = ReceiverRegister(); 246 Register receiver = ReceiverRegister();
274 Register name = NameRegister(); 247 Register name = NameRegister();
275 DCHECK(receiver.is(r1)); 248 DCHECK(receiver.is(r1));
276 DCHECK(name.is(r2)); 249 DCHECK(name.is(r2));
277 250
278 // Probe the stub cache. 251 // Probe the stub cache.
279 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( 252 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags(
280 Code::ComputeHandlerFlags(Code::LOAD_IC)); 253 Code::ComputeHandlerFlags(Code::LOAD_IC));
281 masm->isolate()->stub_cache()->GenerateProbe( 254 masm->isolate()->stub_cache()->GenerateProbe(masm, flags, receiver, name, r3,
282 masm, flags, receiver, name, r3, r4, r5, r6); 255 r4, r5, r6);
283 256
284 // Cache miss: Jump to runtime. 257 // Cache miss: Jump to runtime.
285 GenerateMiss(masm); 258 GenerateMiss(masm);
286 } 259 }
287 260
288 261
289 void LoadIC::GenerateNormal(MacroAssembler* masm) { 262 void LoadIC::GenerateNormal(MacroAssembler* masm) {
290 Register dictionary = r0; 263 Register dictionary = r0;
291 DCHECK(!dictionary.is(ReceiverRegister())); 264 DCHECK(!dictionary.is(ReceiverRegister()));
292 DCHECK(!dictionary.is(NameRegister())); 265 DCHECK(!dictionary.is(NameRegister()));
(...skipping 18 matching lines...) Expand all
311 void LoadIC::GenerateMiss(MacroAssembler* masm) { 284 void LoadIC::GenerateMiss(MacroAssembler* masm) {
312 // The return address is in lr. 285 // The return address is in lr.
313 Isolate* isolate = masm->isolate(); 286 Isolate* isolate = masm->isolate();
314 287
315 __ IncrementCounter(isolate->counters()->load_miss(), 1, r3, r4); 288 __ IncrementCounter(isolate->counters()->load_miss(), 1, r3, r4);
316 289
317 __ mov(LoadIC_TempRegister(), ReceiverRegister()); 290 __ mov(LoadIC_TempRegister(), ReceiverRegister());
318 __ Push(LoadIC_TempRegister(), NameRegister()); 291 __ Push(LoadIC_TempRegister(), NameRegister());
319 292
320 // Perform tail call to the entry. 293 // Perform tail call to the entry.
321 ExternalReference ref = 294 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate);
322 ExternalReference(IC_Utility(kLoadIC_Miss), isolate);
323 __ TailCallExternalReference(ref, 2, 1); 295 __ TailCallExternalReference(ref, 2, 1);
324 } 296 }
325 297
326 298
327 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { 299 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
328 // The return address is in lr. 300 // The return address is in lr.
329 301
330 __ mov(LoadIC_TempRegister(), ReceiverRegister()); 302 __ mov(LoadIC_TempRegister(), ReceiverRegister());
331 __ Push(LoadIC_TempRegister(), NameRegister()); 303 __ Push(LoadIC_TempRegister(), NameRegister());
332 304
333 __ TailCallRuntime(Runtime::kGetProperty, 2, 1); 305 __ TailCallRuntime(Runtime::kGetProperty, 2, 1);
334 } 306 }
335 307
336 308
337 static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm, 309 static MemOperand GenerateMappedArgumentsLookup(
338 Register object, 310 MacroAssembler* masm, Register object, Register key, Register scratch1,
339 Register key, 311 Register scratch2, Register scratch3, Label* unmapped_case,
340 Register scratch1, 312 Label* slow_case) {
341 Register scratch2,
342 Register scratch3,
343 Label* unmapped_case,
344 Label* slow_case) {
345 Heap* heap = masm->isolate()->heap(); 313 Heap* heap = masm->isolate()->heap();
346 314
347 // Check that the receiver is a JSObject. Because of the map check 315 // Check that the receiver is a JSObject. Because of the map check
348 // later, we do not need to check for interceptors or whether it 316 // later, we do not need to check for interceptors or whether it
349 // requires access checks. 317 // requires access checks.
350 __ JumpIfSmi(object, slow_case); 318 __ JumpIfSmi(object, slow_case);
351 // Check that the object is some kind of JSObject. 319 // Check that the object is some kind of JSObject.
352 __ CompareObjectType(object, scratch1, scratch2, FIRST_JS_RECEIVER_TYPE); 320 __ CompareObjectType(object, scratch1, scratch2, FIRST_JS_RECEIVER_TYPE);
353 __ b(lt, slow_case); 321 __ b(lt, slow_case);
354 322
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
405 Register backing_store = parameter_map; 373 Register backing_store = parameter_map;
406 __ ldr(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset)); 374 __ ldr(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset));
407 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map()); 375 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
408 __ CheckMap(backing_store, scratch, fixed_array_map, slow_case, 376 __ CheckMap(backing_store, scratch, fixed_array_map, slow_case,
409 DONT_DO_SMI_CHECK); 377 DONT_DO_SMI_CHECK);
410 __ ldr(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset)); 378 __ ldr(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset));
411 __ cmp(key, Operand(scratch)); 379 __ cmp(key, Operand(scratch));
412 __ b(cs, slow_case); 380 __ b(cs, slow_case);
413 __ mov(scratch, Operand(kPointerSize >> 1)); 381 __ mov(scratch, Operand(kPointerSize >> 1));
414 __ mul(scratch, key, scratch); 382 __ mul(scratch, key, scratch);
415 __ add(scratch, 383 __ add(scratch, scratch, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
416 scratch,
417 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
418 return MemOperand(backing_store, scratch); 384 return MemOperand(backing_store, scratch);
419 } 385 }
420 386
421 387
422 void KeyedLoadIC::GenerateSloppyArguments(MacroAssembler* masm) { 388 void KeyedLoadIC::GenerateSloppyArguments(MacroAssembler* masm) {
423 // The return address is in lr. 389 // The return address is in lr.
424 Register receiver = ReceiverRegister(); 390 Register receiver = ReceiverRegister();
425 Register key = NameRegister(); 391 Register key = NameRegister();
426 DCHECK(receiver.is(r1)); 392 DCHECK(receiver.is(r1));
427 DCHECK(key.is(r2)); 393 DCHECK(key.is(r2));
428 394
429 Label slow, notin; 395 Label slow, notin;
430 MemOperand mapped_location = 396 MemOperand mapped_location = GenerateMappedArgumentsLookup(
431 GenerateMappedArgumentsLookup( 397 masm, receiver, key, r0, r3, r4, &notin, &slow);
432 masm, receiver, key, r0, r3, r4, &notin, &slow);
433 __ ldr(r0, mapped_location); 398 __ ldr(r0, mapped_location);
434 __ Ret(); 399 __ Ret();
435 __ bind(&notin); 400 __ bind(&notin);
436 // The unmapped lookup expects that the parameter map is in r0. 401 // The unmapped lookup expects that the parameter map is in r0.
437 MemOperand unmapped_location = 402 MemOperand unmapped_location =
438 GenerateUnmappedArgumentsLookup(masm, key, r0, r3, &slow); 403 GenerateUnmappedArgumentsLookup(masm, key, r0, r3, &slow);
439 __ ldr(r0, unmapped_location); 404 __ ldr(r0, unmapped_location);
440 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex); 405 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
441 __ cmp(r0, r3); 406 __ cmp(r0, r3);
442 __ b(eq, &slow); 407 __ b(eq, &slow);
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
507 DCHECK(FLAG_vector_ics); 472 DCHECK(FLAG_vector_ics);
508 return r3; 473 return r3;
509 } 474 }
510 475
511 476
512 const Register StoreIC::ReceiverRegister() { return r1; } 477 const Register StoreIC::ReceiverRegister() { return r1; }
513 const Register StoreIC::NameRegister() { return r2; } 478 const Register StoreIC::NameRegister() { return r2; }
514 const Register StoreIC::ValueRegister() { return r0; } 479 const Register StoreIC::ValueRegister() { return r0; }
515 480
516 481
517 const Register KeyedStoreIC::MapRegister() { 482 const Register KeyedStoreIC::MapRegister() { return r3; }
518 return r3;
519 }
520 483
521 484
522 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { 485 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
523 // The return address is in lr. 486 // The return address is in lr.
524 487
525 __ Push(ReceiverRegister(), NameRegister()); 488 __ Push(ReceiverRegister(), NameRegister());
526 489
527 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); 490 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
528 } 491 }
529 492
530 493
531 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { 494 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
532 // The return address is in lr. 495 // The return address is in lr.
533 Label slow, check_name, index_smi, index_name, property_array_property; 496 Label slow, check_name, index_smi, index_name, property_array_property;
534 Label probe_dictionary, check_number_dictionary; 497 Label probe_dictionary, check_number_dictionary;
535 498
536 Register key = NameRegister(); 499 Register key = NameRegister();
537 Register receiver = ReceiverRegister(); 500 Register receiver = ReceiverRegister();
538 DCHECK(key.is(r2)); 501 DCHECK(key.is(r2));
539 DCHECK(receiver.is(r1)); 502 DCHECK(receiver.is(r1));
540 503
541 Isolate* isolate = masm->isolate(); 504 Isolate* isolate = masm->isolate();
542 505
543 // Check that the key is a smi. 506 // Check that the key is a smi.
544 __ JumpIfNotSmi(key, &check_name); 507 __ JumpIfNotSmi(key, &check_name);
545 __ bind(&index_smi); 508 __ bind(&index_smi);
546 // Now the key is known to be a smi. This place is also jumped to from below 509 // Now the key is known to be a smi. This place is also jumped to from below
547 // where a numeric string is converted to a smi. 510 // where a numeric string is converted to a smi.
548 511
549 GenerateKeyedLoadReceiverCheck( 512 GenerateKeyedLoadReceiverCheck(masm, receiver, r0, r3,
550 masm, receiver, r0, r3, Map::kHasIndexedInterceptor, &slow); 513 Map::kHasIndexedInterceptor, &slow);
551 514
552 // Check the receiver's map to see if it has fast elements. 515 // Check the receiver's map to see if it has fast elements.
553 __ CheckFastElements(r0, r3, &check_number_dictionary); 516 __ CheckFastElements(r0, r3, &check_number_dictionary);
554 517
555 GenerateFastArrayLoad( 518 GenerateFastArrayLoad(masm, receiver, key, r0, r3, r4, r0, NULL, &slow);
556 masm, receiver, key, r0, r3, r4, r0, NULL, &slow);
557 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, r4, r3); 519 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, r4, r3);
558 __ Ret(); 520 __ Ret();
559 521
560 __ bind(&check_number_dictionary); 522 __ bind(&check_number_dictionary);
561 __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset)); 523 __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
562 __ ldr(r3, FieldMemOperand(r4, JSObject::kMapOffset)); 524 __ ldr(r3, FieldMemOperand(r4, JSObject::kMapOffset));
563 525
564 // Check whether the elements is a number dictionary. 526 // Check whether the elements is a number dictionary.
565 // r3: elements map 527 // r3: elements map
566 // r4: elements 528 // r4: elements
567 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); 529 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
568 __ cmp(r3, ip); 530 __ cmp(r3, ip);
569 __ b(ne, &slow); 531 __ b(ne, &slow);
570 __ SmiUntag(r0, key); 532 __ SmiUntag(r0, key);
571 __ LoadFromNumberDictionary(&slow, r4, key, r0, r0, r3, r5); 533 __ LoadFromNumberDictionary(&slow, r4, key, r0, r0, r3, r5);
572 __ Ret(); 534 __ Ret();
573 535
574 // Slow case, key and receiver still in r2 and r1. 536 // Slow case, key and receiver still in r2 and r1.
575 __ bind(&slow); 537 __ bind(&slow);
576 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(), 538 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(), 1, r4,
577 1, r4, r3); 539 r3);
578 GenerateRuntimeGetProperty(masm); 540 GenerateRuntimeGetProperty(masm);
579 541
580 __ bind(&check_name); 542 __ bind(&check_name);
581 GenerateKeyNameCheck(masm, key, r0, r3, &index_name, &slow); 543 GenerateKeyNameCheck(masm, key, r0, r3, &index_name, &slow);
582 544
583 GenerateKeyedLoadReceiverCheck( 545 GenerateKeyedLoadReceiverCheck(masm, receiver, r0, r3,
584 masm, receiver, r0, r3, Map::kHasNamedInterceptor, &slow); 546 Map::kHasNamedInterceptor, &slow);
585 547
586 // If the receiver is a fast-case object, check the keyed lookup 548 // If the receiver is a fast-case object, check the keyed lookup
587 // cache. Otherwise probe the dictionary. 549 // cache. Otherwise probe the dictionary.
588 __ ldr(r3, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 550 __ ldr(r3, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
589 __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset)); 551 __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
590 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); 552 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
591 __ cmp(r4, ip); 553 __ cmp(r4, ip);
592 __ b(eq, &probe_dictionary); 554 __ b(eq, &probe_dictionary);
593 555
594 // Load the map of the receiver, compute the keyed lookup cache hash 556 // Load the map of the receiver, compute the keyed lookup cache hash
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
652 __ jmp(&load_in_object_property); 614 __ jmp(&load_in_object_property);
653 } 615 }
654 } 616 }
655 617
656 // Load in-object property. 618 // Load in-object property.
657 __ bind(&load_in_object_property); 619 __ bind(&load_in_object_property);
658 __ ldrb(r6, FieldMemOperand(r0, Map::kInstanceSizeOffset)); 620 __ ldrb(r6, FieldMemOperand(r0, Map::kInstanceSizeOffset));
659 __ add(r6, r6, r5); // Index from start of object. 621 __ add(r6, r6, r5); // Index from start of object.
660 __ sub(receiver, receiver, Operand(kHeapObjectTag)); // Remove the heap tag. 622 __ sub(receiver, receiver, Operand(kHeapObjectTag)); // Remove the heap tag.
661 __ ldr(r0, MemOperand(receiver, r6, LSL, kPointerSizeLog2)); 623 __ ldr(r0, MemOperand(receiver, r6, LSL, kPointerSizeLog2));
662 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), 624 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), 1,
663 1, r4, r3); 625 r4, r3);
664 __ Ret(); 626 __ Ret();
665 627
666 // Load property array property. 628 // Load property array property.
667 __ bind(&property_array_property); 629 __ bind(&property_array_property);
668 __ ldr(receiver, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 630 __ ldr(receiver, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
669 __ add(receiver, receiver, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 631 __ add(receiver, receiver, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
670 __ ldr(r0, MemOperand(receiver, r5, LSL, kPointerSizeLog2)); 632 __ ldr(r0, MemOperand(receiver, r5, LSL, kPointerSizeLog2));
671 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), 633 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), 1,
672 1, r4, r3); 634 r4, r3);
673 __ Ret(); 635 __ Ret();
674 636
675 // Do a quick inline probe of the receiver's dictionary, if it 637 // Do a quick inline probe of the receiver's dictionary, if it
676 // exists. 638 // exists.
677 __ bind(&probe_dictionary); 639 __ bind(&probe_dictionary);
678 // r3: elements 640 // r3: elements
679 __ ldr(r0, FieldMemOperand(receiver, HeapObject::kMapOffset)); 641 __ ldr(r0, FieldMemOperand(receiver, HeapObject::kMapOffset));
680 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset)); 642 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
681 GenerateGlobalInstanceTypeCheck(masm, r0, &slow); 643 GenerateGlobalInstanceTypeCheck(masm, r0, &slow);
682 // Load the property to r0. 644 // Load the property to r0.
683 GenerateDictionaryLoad(masm, &slow, r3, key, r0, r5, r4); 645 GenerateDictionaryLoad(masm, &slow, r3, key, r0, r5, r4);
684 __ IncrementCounter( 646 __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(), 1, r4,
685 isolate->counters()->keyed_load_generic_symbol(), 1, r4, r3); 647 r3);
686 __ Ret(); 648 __ Ret();
687 649
688 __ bind(&index_name); 650 __ bind(&index_name);
689 __ IndexFromHash(r3, key); 651 __ IndexFromHash(r3, key);
690 // Now jump to the place where smi keys are handled. 652 // Now jump to the place where smi keys are handled.
691 __ jmp(&index_smi); 653 __ jmp(&index_smi);
692 } 654 }
693 655
694 656
695 void KeyedLoadIC::GenerateString(MacroAssembler* masm) { 657 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
696 // Return address is in lr. 658 // Return address is in lr.
697 Label miss; 659 Label miss;
698 660
699 Register receiver = ReceiverRegister(); 661 Register receiver = ReceiverRegister();
700 Register index = NameRegister(); 662 Register index = NameRegister();
701 Register scratch = r3; 663 Register scratch = r3;
702 Register result = r0; 664 Register result = r0;
703 DCHECK(!scratch.is(receiver) && !scratch.is(index)); 665 DCHECK(!scratch.is(receiver) && !scratch.is(index));
704 666
705 StringCharAtGenerator char_at_generator(receiver, 667 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
706 index,
707 scratch,
708 result,
709 &miss, // When not a string. 668 &miss, // When not a string.
710 &miss, // When not a number. 669 &miss, // When not a number.
711 &miss, // When index out of range. 670 &miss, // When index out of range.
712 STRING_INDEX_IS_ARRAY_INDEX); 671 STRING_INDEX_IS_ARRAY_INDEX);
713 char_at_generator.GenerateFast(masm); 672 char_at_generator.GenerateFast(masm);
714 __ Ret(); 673 __ Ret();
715 674
716 StubRuntimeCallHelper call_helper; 675 StubRuntimeCallHelper call_helper;
717 char_at_generator.GenerateSlow(masm, call_helper); 676 char_at_generator.GenerateSlow(masm, call_helper);
718 677
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
795 ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate()); 754 ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
796 __ TailCallExternalReference(ref, 3, 1); 755 __ TailCallExternalReference(ref, 3, 1);
797 } 756 }
798 757
799 758
800 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm, 759 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
801 StrictMode strict_mode) { 760 StrictMode strict_mode) {
802 // Push receiver, key and value for runtime call. 761 // Push receiver, key and value for runtime call.
803 __ Push(ReceiverRegister(), NameRegister(), ValueRegister()); 762 __ Push(ReceiverRegister(), NameRegister(), ValueRegister());
804 763
805 __ mov(r0, Operand(Smi::FromInt(strict_mode))); // Strict mode. 764 __ mov(r0, Operand(Smi::FromInt(strict_mode))); // Strict mode.
806 __ Push(r0); 765 __ Push(r0);
807 766
808 __ TailCallRuntime(Runtime::kSetProperty, 4, 1); 767 __ TailCallRuntime(Runtime::kSetProperty, 4, 1);
809 } 768 }
810 769
811 770
812 static void KeyedStoreGenerateGenericHelper( 771 static void KeyedStoreGenerateGenericHelper(
813 MacroAssembler* masm, 772 MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow,
814 Label* fast_object, 773 KeyedStoreCheckMap check_map, KeyedStoreIncrementLength increment_length,
815 Label* fast_double, 774 Register value, Register key, Register receiver, Register receiver_map,
816 Label* slow, 775 Register elements_map, Register elements) {
817 KeyedStoreCheckMap check_map,
818 KeyedStoreIncrementLength increment_length,
819 Register value,
820 Register key,
821 Register receiver,
822 Register receiver_map,
823 Register elements_map,
824 Register elements) {
825 Label transition_smi_elements; 776 Label transition_smi_elements;
826 Label finish_object_store, non_double_value, transition_double_elements; 777 Label finish_object_store, non_double_value, transition_double_elements;
827 Label fast_double_without_map_check; 778 Label fast_double_without_map_check;
828 779
829 // Fast case: Do the store, could be either Object or double. 780 // Fast case: Do the store, could be either Object or double.
830 __ bind(fast_object); 781 __ bind(fast_object);
831 Register scratch_value = r4; 782 Register scratch_value = r4;
832 Register address = r5; 783 Register address = r5;
833 if (check_map == kCheckMap) { 784 if (check_map == kCheckMap) {
834 __ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset)); 785 __ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
875 if (increment_length == kIncrementLength) { 826 if (increment_length == kIncrementLength) {
876 // Add 1 to receiver->length. 827 // Add 1 to receiver->length.
877 __ add(scratch_value, key, Operand(Smi::FromInt(1))); 828 __ add(scratch_value, key, Operand(Smi::FromInt(1)));
878 __ str(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset)); 829 __ str(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset));
879 } 830 }
880 __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 831 __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
881 __ add(address, address, Operand::PointerOffsetFromSmiKey(key)); 832 __ add(address, address, Operand::PointerOffsetFromSmiKey(key));
882 __ str(value, MemOperand(address)); 833 __ str(value, MemOperand(address));
883 // Update write barrier for the elements array address. 834 // Update write barrier for the elements array address.
884 __ mov(scratch_value, value); // Preserve the value which is returned. 835 __ mov(scratch_value, value); // Preserve the value which is returned.
885 __ RecordWrite(elements, 836 __ RecordWrite(elements, address, scratch_value, kLRHasNotBeenSaved,
886 address, 837 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
887 scratch_value,
888 kLRHasNotBeenSaved,
889 kDontSaveFPRegs,
890 EMIT_REMEMBERED_SET,
891 OMIT_SMI_CHECK);
892 __ Ret(); 838 __ Ret();
893 839
894 __ bind(fast_double); 840 __ bind(fast_double);
895 if (check_map == kCheckMap) { 841 if (check_map == kCheckMap) {
896 // Check for fast double array case. If this fails, call through to the 842 // Check for fast double array case. If this fails, call through to the
897 // runtime. 843 // runtime.
898 __ CompareRoot(elements_map, Heap::kFixedDoubleArrayMapRootIndex); 844 __ CompareRoot(elements_map, Heap::kFixedDoubleArrayMapRootIndex);
899 __ b(ne, slow); 845 __ b(ne, slow);
900 } 846 }
901 847
902 // HOLECHECK: guards "A[i] double hole?" 848 // HOLECHECK: guards "A[i] double hole?"
903 // We have to see if the double version of the hole is present. If so 849 // We have to see if the double version of the hole is present. If so
904 // go to the runtime. 850 // go to the runtime.
905 __ add(address, elements, 851 __ add(address, elements,
906 Operand((FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32)) 852 Operand((FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32)) -
907 - kHeapObjectTag)); 853 kHeapObjectTag));
908 __ ldr(scratch_value, 854 __ ldr(scratch_value,
909 MemOperand(address, key, LSL, kPointerSizeLog2, PreIndex)); 855 MemOperand(address, key, LSL, kPointerSizeLog2, PreIndex));
910 __ cmp(scratch_value, Operand(kHoleNanUpper32)); 856 __ cmp(scratch_value, Operand(kHoleNanUpper32));
911 __ b(ne, &fast_double_without_map_check); 857 __ b(ne, &fast_double_without_map_check);
912 __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, scratch_value, 858 __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, scratch_value,
913 slow); 859 slow);
914 860
915 __ bind(&fast_double_without_map_check); 861 __ bind(&fast_double_without_map_check);
916 __ StoreNumberToDoubleElements(value, key, elements, r3, d0, 862 __ StoreNumberToDoubleElements(value, key, elements, r3, d0,
917 &transition_double_elements); 863 &transition_double_elements);
918 if (increment_length == kIncrementLength) { 864 if (increment_length == kIncrementLength) {
919 // Add 1 to receiver->length. 865 // Add 1 to receiver->length.
920 __ add(scratch_value, key, Operand(Smi::FromInt(1))); 866 __ add(scratch_value, key, Operand(Smi::FromInt(1)));
921 __ str(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset)); 867 __ str(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset));
922 } 868 }
923 __ Ret(); 869 __ Ret();
924 870
925 __ bind(&transition_smi_elements); 871 __ bind(&transition_smi_elements);
926 // Transition the array appropriately depending on the value type. 872 // Transition the array appropriately depending on the value type.
927 __ ldr(r4, FieldMemOperand(value, HeapObject::kMapOffset)); 873 __ ldr(r4, FieldMemOperand(value, HeapObject::kMapOffset));
928 __ CompareRoot(r4, Heap::kHeapNumberMapRootIndex); 874 __ CompareRoot(r4, Heap::kHeapNumberMapRootIndex);
929 __ b(ne, &non_double_value); 875 __ b(ne, &non_double_value);
930 876
931 // Value is a double. Transition FAST_SMI_ELEMENTS -> 877 // Value is a double. Transition FAST_SMI_ELEMENTS ->
932 // FAST_DOUBLE_ELEMENTS and complete the store. 878 // FAST_DOUBLE_ELEMENTS and complete the store.
933 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, 879 __ LoadTransitionedArrayMapConditional(
934 FAST_DOUBLE_ELEMENTS, 880 FAST_SMI_ELEMENTS, FAST_DOUBLE_ELEMENTS, receiver_map, r4, slow);
935 receiver_map, 881 AllocationSiteMode mode =
936 r4, 882 AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_DOUBLE_ELEMENTS);
937 slow); 883 ElementsTransitionGenerator::GenerateSmiToDouble(masm, receiver, key, value,
938 AllocationSiteMode mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, 884 receiver_map, mode, slow);
939 FAST_DOUBLE_ELEMENTS);
940 ElementsTransitionGenerator::GenerateSmiToDouble(
941 masm, receiver, key, value, receiver_map, mode, slow);
942 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); 885 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
943 __ jmp(&fast_double_without_map_check); 886 __ jmp(&fast_double_without_map_check);
944 887
945 __ bind(&non_double_value); 888 __ bind(&non_double_value);
946 // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS 889 // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
947 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, 890 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, FAST_ELEMENTS,
948 FAST_ELEMENTS, 891 receiver_map, r4, slow);
949 receiver_map,
950 r4,
951 slow);
952 mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS); 892 mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS);
953 ElementsTransitionGenerator::GenerateMapChangeElementsTransition( 893 ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
954 masm, receiver, key, value, receiver_map, mode, slow); 894 masm, receiver, key, value, receiver_map, mode, slow);
955 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); 895 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
956 __ jmp(&finish_object_store); 896 __ jmp(&finish_object_store);
957 897
958 __ bind(&transition_double_elements); 898 __ bind(&transition_double_elements);
959 // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a 899 // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
960 // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and 900 // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
961 // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS 901 // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
962 __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, 902 __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS,
963 FAST_ELEMENTS, 903 receiver_map, r4, slow);
964 receiver_map,
965 r4,
966 slow);
967 mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS); 904 mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS);
968 ElementsTransitionGenerator::GenerateDoubleToObject( 905 ElementsTransitionGenerator::GenerateDoubleToObject(
969 masm, receiver, key, value, receiver_map, mode, slow); 906 masm, receiver, key, value, receiver_map, mode, slow);
970 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); 907 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
971 __ jmp(&finish_object_store); 908 __ jmp(&finish_object_store);
972 } 909 }
973 910
974 911
975 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, 912 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
976 StrictMode strict_mode) { 913 StrictMode strict_mode) {
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
1035 // element to the array by writing to array[array.length]. 972 // element to the array by writing to array[array.length].
1036 __ bind(&extra); 973 __ bind(&extra);
1037 // Condition code from comparing key and array length is still available. 974 // Condition code from comparing key and array length is still available.
1038 __ b(ne, &slow); // Only support writing to writing to array[array.length]. 975 __ b(ne, &slow); // Only support writing to writing to array[array.length].
1039 // Check for room in the elements backing store. 976 // Check for room in the elements backing store.
1040 // Both the key and the length of FixedArray are smis. 977 // Both the key and the length of FixedArray are smis.
1041 __ ldr(ip, FieldMemOperand(elements, FixedArray::kLengthOffset)); 978 __ ldr(ip, FieldMemOperand(elements, FixedArray::kLengthOffset));
1042 __ cmp(key, Operand(ip)); 979 __ cmp(key, Operand(ip));
1043 __ b(hs, &slow); 980 __ b(hs, &slow);
1044 __ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset)); 981 __ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
1045 __ cmp(elements_map, 982 __ cmp(elements_map, Operand(masm->isolate()->factory()->fixed_array_map()));
1046 Operand(masm->isolate()->factory()->fixed_array_map()));
1047 __ b(ne, &check_if_double_array); 983 __ b(ne, &check_if_double_array);
1048 __ jmp(&fast_object_grow); 984 __ jmp(&fast_object_grow);
1049 985
1050 __ bind(&check_if_double_array); 986 __ bind(&check_if_double_array);
1051 __ cmp(elements_map, 987 __ cmp(elements_map,
1052 Operand(masm->isolate()->factory()->fixed_double_array_map())); 988 Operand(masm->isolate()->factory()->fixed_double_array_map()));
1053 __ b(ne, &slow); 989 __ b(ne, &slow);
1054 __ jmp(&fast_double_grow); 990 __ jmp(&fast_double_grow);
1055 991
1056 // Array case: Get the length and the elements array from the JS 992 // Array case: Get the length and the elements array from the JS
1057 // array. Check that the array is in fast mode (and writable); if it 993 // array. Check that the array is in fast mode (and writable); if it
1058 // is the length is always a smi. 994 // is the length is always a smi.
1059 __ bind(&array); 995 __ bind(&array);
1060 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); 996 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1061 997
1062 // Check the key against the length in the array. 998 // Check the key against the length in the array.
1063 __ ldr(ip, FieldMemOperand(receiver, JSArray::kLengthOffset)); 999 __ ldr(ip, FieldMemOperand(receiver, JSArray::kLengthOffset));
1064 __ cmp(key, Operand(ip)); 1000 __ cmp(key, Operand(ip));
1065 __ b(hs, &extra); 1001 __ b(hs, &extra);
1066 1002
1067 KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double, 1003 KeyedStoreGenerateGenericHelper(
1068 &slow, kCheckMap, kDontIncrementLength, 1004 masm, &fast_object, &fast_double, &slow, kCheckMap, kDontIncrementLength,
1069 value, key, receiver, receiver_map, 1005 value, key, receiver, receiver_map, elements_map, elements);
1070 elements_map, elements);
1071 KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow, 1006 KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
1072 &slow, kDontCheckMap, kIncrementLength, 1007 &slow, kDontCheckMap, kIncrementLength, value,
1073 value, key, receiver, receiver_map, 1008 key, receiver, receiver_map, elements_map,
1074 elements_map, elements); 1009 elements);
1075 } 1010 }
1076 1011
1077 1012
1078 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) { 1013 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
1079 Register receiver = ReceiverRegister(); 1014 Register receiver = ReceiverRegister();
1080 Register name = NameRegister(); 1015 Register name = NameRegister();
1081 DCHECK(receiver.is(r1)); 1016 DCHECK(receiver.is(r1));
1082 DCHECK(name.is(r2)); 1017 DCHECK(name.is(r2));
1083 DCHECK(ValueRegister().is(r0)); 1018 DCHECK(ValueRegister().is(r0));
1084 1019
1085 // Get the receiver from the stack and probe the stub cache. 1020 // Get the receiver from the stack and probe the stub cache.
1086 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( 1021 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags(
1087 Code::ComputeHandlerFlags(Code::STORE_IC)); 1022 Code::ComputeHandlerFlags(Code::STORE_IC));
1088 1023
1089 masm->isolate()->stub_cache()->GenerateProbe( 1024 masm->isolate()->stub_cache()->GenerateProbe(masm, flags, receiver, name, r3,
1090 masm, flags, receiver, name, r3, r4, r5, r6); 1025 r4, r5, r6);
1091 1026
1092 // Cache miss: Jump to runtime. 1027 // Cache miss: Jump to runtime.
1093 GenerateMiss(masm); 1028 GenerateMiss(masm);
1094 } 1029 }
1095 1030
1096 1031
1097 void StoreIC::GenerateMiss(MacroAssembler* masm) { 1032 void StoreIC::GenerateMiss(MacroAssembler* masm) {
1098 __ Push(ReceiverRegister(), NameRegister(), ValueRegister()); 1033 __ Push(ReceiverRegister(), NameRegister(), ValueRegister());
1099 1034
1100 // Perform tail call to the entry. 1035 // Perform tail call to the entry.
(...skipping 10 matching lines...) Expand all
1111 Register value = ValueRegister(); 1046 Register value = ValueRegister();
1112 Register dictionary = r3; 1047 Register dictionary = r3;
1113 DCHECK(receiver.is(r1)); 1048 DCHECK(receiver.is(r1));
1114 DCHECK(name.is(r2)); 1049 DCHECK(name.is(r2));
1115 DCHECK(value.is(r0)); 1050 DCHECK(value.is(r0));
1116 1051
1117 __ ldr(dictionary, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 1052 __ ldr(dictionary, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
1118 1053
1119 GenerateDictionaryStore(masm, &miss, dictionary, name, value, r4, r5); 1054 GenerateDictionaryStore(masm, &miss, dictionary, name, value, r4, r5);
1120 Counters* counters = masm->isolate()->counters(); 1055 Counters* counters = masm->isolate()->counters();
1121 __ IncrementCounter(counters->store_normal_hit(), 1056 __ IncrementCounter(counters->store_normal_hit(), 1, r4, r5);
1122 1, r4, r5);
1123 __ Ret(); 1057 __ Ret();
1124 1058
1125 __ bind(&miss); 1059 __ bind(&miss);
1126 __ IncrementCounter(counters->store_normal_miss(), 1, r4, r5); 1060 __ IncrementCounter(counters->store_normal_miss(), 1, r4, r5);
1127 GenerateMiss(masm); 1061 GenerateMiss(masm);
1128 } 1062 }
1129 1063
1130 1064
1131 void StoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm, 1065 void StoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1132 StrictMode strict_mode) { 1066 StrictMode strict_mode) {
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
1182 // If the instruction following the call is not a cmp rx, #yyy, nothing 1116 // If the instruction following the call is not a cmp rx, #yyy, nothing
1183 // was inlined. 1117 // was inlined.
1184 Instr instr = Assembler::instr_at(cmp_instruction_address); 1118 Instr instr = Assembler::instr_at(cmp_instruction_address);
1185 if (!Assembler::IsCmpImmediate(instr)) { 1119 if (!Assembler::IsCmpImmediate(instr)) {
1186 return; 1120 return;
1187 } 1121 }
1188 1122
1189 // The delta to the start of the map check instruction and the 1123 // The delta to the start of the map check instruction and the
1190 // condition code uses at the patched jump. 1124 // condition code uses at the patched jump.
1191 int delta = Assembler::GetCmpImmediateRawImmediate(instr); 1125 int delta = Assembler::GetCmpImmediateRawImmediate(instr);
1192 delta += 1126 delta += Assembler::GetCmpImmediateRegister(instr).code() * kOff12Mask;
1193 Assembler::GetCmpImmediateRegister(instr).code() * kOff12Mask;
1194 // If the delta is 0 the instruction is cmp r0, #0 which also signals that 1127 // If the delta is 0 the instruction is cmp r0, #0 which also signals that
1195 // nothing was inlined. 1128 // nothing was inlined.
1196 if (delta == 0) { 1129 if (delta == 0) {
1197 return; 1130 return;
1198 } 1131 }
1199 1132
1200 if (FLAG_trace_ic) { 1133 if (FLAG_trace_ic) {
1201 PrintF("[ patching ic at %p, cmp=%p, delta=%d\n", 1134 PrintF("[ patching ic at %p, cmp=%p, delta=%d\n", address,
1202 address, cmp_instruction_address, delta); 1135 cmp_instruction_address, delta);
1203 } 1136 }
1204 1137
1205 Address patch_address = 1138 Address patch_address =
1206 cmp_instruction_address - delta * Instruction::kInstrSize; 1139 cmp_instruction_address - delta * Instruction::kInstrSize;
1207 Instr instr_at_patch = Assembler::instr_at(patch_address); 1140 Instr instr_at_patch = Assembler::instr_at(patch_address);
1208 Instr branch_instr = 1141 Instr branch_instr =
1209 Assembler::instr_at(patch_address + Instruction::kInstrSize); 1142 Assembler::instr_at(patch_address + Instruction::kInstrSize);
1210 // This is patching a conditional "jump if not smi/jump if smi" site. 1143 // This is patching a conditional "jump if not smi/jump if smi" site.
1211 // Enabling by changing from 1144 // Enabling by changing from
1212 // cmp rx, rx 1145 // cmp rx, rx
(...skipping 15 matching lines...) Expand all
1228 patcher.masm()->cmp(reg, reg); 1161 patcher.masm()->cmp(reg, reg);
1229 } 1162 }
1230 DCHECK(Assembler::IsBranch(branch_instr)); 1163 DCHECK(Assembler::IsBranch(branch_instr));
1231 if (Assembler::GetCondition(branch_instr) == eq) { 1164 if (Assembler::GetCondition(branch_instr) == eq) {
1232 patcher.EmitCondition(ne); 1165 patcher.EmitCondition(ne);
1233 } else { 1166 } else {
1234 DCHECK(Assembler::GetCondition(branch_instr) == ne); 1167 DCHECK(Assembler::GetCondition(branch_instr) == ne);
1235 patcher.EmitCondition(eq); 1168 patcher.EmitCondition(eq);
1236 } 1169 }
1237 } 1170 }
1238 1171 }
1239 1172 } // namespace v8::internal
1240 } } // namespace v8::internal
1241 1173
1242 #endif // V8_TARGET_ARCH_ARM 1174 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/ic-inl.h ('k') | src/ic/arm/ic-compiler-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698