Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(144)

Side by Side Diff: src/mips/ic-mips.cc

Issue 6973024: Introduce ic-mips.cc. (Closed) Base URL: http://github.com/v8/v8.git@bleeding_edge
Patch Set: Created 9 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/mips/code-stubs-mips.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 30 matching lines...) Expand all
41 namespace internal { 41 namespace internal {
42 42
43 43
44 // ---------------------------------------------------------------------------- 44 // ----------------------------------------------------------------------------
45 // Static IC stub generators. 45 // Static IC stub generators.
46 // 46 //
47 47
48 #define __ ACCESS_MASM(masm) 48 #define __ ACCESS_MASM(masm)
49 49
50 50
51 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
52 Register type,
53 Label* global_object) {
54 // Register usage:
55 // type: holds the receiver instance type on entry.
56 __ Branch(global_object, eq, type, Operand(JS_GLOBAL_OBJECT_TYPE));
57 __ Branch(global_object, eq, type, Operand(JS_BUILTINS_OBJECT_TYPE));
58 __ Branch(global_object, eq, type, Operand(JS_GLOBAL_PROXY_TYPE));
59 }
60
61
62 // Generated code falls through if the receiver is a regular non-global
63 // JS object with slow properties and no interceptors.
64 static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
65 Register receiver,
66 Register elements,
67 Register scratch0,
68 Register scratch1,
69 Label* miss) {
70 // Register usage:
71 // receiver: holds the receiver on entry and is unchanged.
72 // elements: holds the property dictionary on fall through.
73 // Scratch registers:
74 // scratch0: used to holds the receiver map.
75 // scratch1: used to holds the receiver instance type, receiver bit mask
76 // and elements map.
77
78 // Check that the receiver isn't a smi.
79 __ JumpIfSmi(receiver, miss);
80
81 // Check that the receiver is a valid JS object.
82 __ GetObjectType(receiver, scratch0, scratch1);
83 __ Branch(miss, lt, scratch1, Operand(FIRST_JS_OBJECT_TYPE));
84
85 // If this assert fails, we have to check upper bound too.
86 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
87
88 GenerateGlobalInstanceTypeCheck(masm, scratch1, miss);
89
90 // Check that the global object does not require access checks.
91 __ lbu(scratch1, FieldMemOperand(scratch0, Map::kBitFieldOffset));
92 __ And(scratch1, scratch1, Operand((1 << Map::kIsAccessCheckNeeded) |
93 (1 << Map::kHasNamedInterceptor)));
94 __ Branch(miss, ne, scratch1, Operand(zero_reg));
95
96 __ lw(elements, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
97 __ lw(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
98 __ LoadRoot(scratch0, Heap::kHashTableMapRootIndex);
99 __ Branch(miss, ne, scratch1, Operand(scratch0));
100 }
101
102
103 // Helper function used from LoadIC/CallIC GenerateNormal.
104 //
105 // elements: Property dictionary. It is not clobbered if a jump to the miss
106 // label is done.
107 // name: Property name. It is not clobbered if a jump to the miss label is
108 // done
109 // result: Register for the result. It is only updated if a jump to the miss
110 // label is not done. Can be the same as elements or name clobbering
111 // one of these in the case of not jumping to the miss label.
112 // The two scratch registers need to be different from elements, name and
113 // result.
114 // The generated code assumes that the receiver has slow properties,
115 // is not a global object and does not have interceptors.
116 // The address returned from GenerateStringDictionaryProbes() in scratch2
117 // is used.
118 static void GenerateDictionaryLoad(MacroAssembler* masm,
119 Label* miss,
120 Register elements,
121 Register name,
122 Register result,
123 Register scratch1,
124 Register scratch2) {
125 // Main use of the scratch registers.
126 // scratch1: Used as temporary and to hold the capacity of the property
127 // dictionary.
128 // scratch2: Used as temporary.
129 Label done;
130
131 // Probe the dictionary.
132 StringDictionaryLookupStub::GeneratePositiveLookup(masm,
133 miss,
134 &done,
135 elements,
136 name,
137 scratch1,
138 scratch2);
139
140 // If probing finds an entry check that the value is a normal
141 // property.
142 __ bind(&done); // scratch2 == elements + 4 * index.
143 const int kElementsStartOffset = StringDictionary::kHeaderSize +
144 StringDictionary::kElementsStartIndex * kPointerSize;
145 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
146 __ lw(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
147 __ And(at,
148 scratch1,
149 Operand(PropertyDetails::TypeField::mask() << kSmiTagSize));
150 __ Branch(miss, ne, at, Operand(zero_reg));
151
152 // Get the value at the masked, scaled index and return.
153 __ lw(result,
154 FieldMemOperand(scratch2, kElementsStartOffset + 1 * kPointerSize));
155 }
156
157
158 // Helper function used from StoreIC::GenerateNormal.
159 //
160 // elements: Property dictionary. It is not clobbered if a jump to the miss
161 // label is done.
162 // name: Property name. It is not clobbered if a jump to the miss label is
163 // done
164 // value: The value to store.
165 // The two scratch registers need to be different from elements, name and
166 // result.
167 // The generated code assumes that the receiver has slow properties,
168 // is not a global object and does not have interceptors.
169 // The address returned from GenerateStringDictionaryProbes() in scratch2
170 // is used.
171 static void GenerateDictionaryStore(MacroAssembler* masm,
172 Label* miss,
173 Register elements,
174 Register name,
175 Register value,
176 Register scratch1,
177 Register scratch2) {
178 // Main use of the scratch registers.
179 // scratch1: Used as temporary and to hold the capacity of the property
180 // dictionary.
181 // scratch2: Used as temporary.
182 Label done;
183
184 // Probe the dictionary.
185 StringDictionaryLookupStub::GeneratePositiveLookup(masm,
186 miss,
187 &done,
188 elements,
189 name,
190 scratch1,
191 scratch2);
192
193 // If probing finds an entry in the dictionary check that the value
194 // is a normal property that is not read only.
195 __ bind(&done); // scratch2 == elements + 4 * index.
196 const int kElementsStartOffset = StringDictionary::kHeaderSize +
197 StringDictionary::kElementsStartIndex * kPointerSize;
198 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
199 const int kTypeAndReadOnlyMask
200 = (PropertyDetails::TypeField::mask() |
201 PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
202 __ lw(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
203 __ And(at, scratch1, Operand(kTypeAndReadOnlyMask));
204 __ Branch(miss, ne, at, Operand(zero_reg));
205
206 // Store the value at the masked, scaled index and return.
207 const int kValueOffset = kElementsStartOffset + kPointerSize;
208 __ Addu(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag));
209 __ sw(value, MemOperand(scratch2));
210
211 // Update the write barrier. Make sure not to clobber the value.
212 __ mov(scratch1, value);
213 __ RecordWrite(elements, scratch2, scratch1);
214 }
215
216
217 static void GenerateNumberDictionaryLoad(MacroAssembler* masm,
218 Label* miss,
219 Register elements,
220 Register key,
221 Register result,
222 Register reg0,
223 Register reg1,
224 Register reg2) {
225 // Register use:
226 //
227 // elements - holds the slow-case elements of the receiver on entry.
228 // Unchanged unless 'result' is the same register.
229 //
230 // key - holds the smi key on entry.
231 // Unchanged unless 'result' is the same register.
232 //
233 //
234 // result - holds the result on exit if the load succeeded.
235 // Allowed to be the same as 'key' or 'result'.
236 // Unchanged on bailout so 'key' or 'result' can be used
237 // in further computation.
238 //
239 // Scratch registers:
240 //
241 // reg0 - holds the untagged key on entry and holds the hash once computed.
242 //
243 // reg1 - Used to hold the capacity mask of the dictionary.
244 //
245 // reg2 - Used for the index into the dictionary.
246 // at - Temporary (avoid MacroAssembler instructions also using 'at').
247 Label done;
248
249 // Compute the hash code from the untagged key. This must be kept in sync
250 // with ComputeIntegerHash in utils.h.
251 //
252 // hash = ~hash + (hash << 15);
253 __ nor(reg1, reg0, zero_reg);
254 __ sll(at, reg0, 15);
255 __ addu(reg0, reg1, at);
256
257 // hash = hash ^ (hash >> 12);
258 __ srl(at, reg0, 12);
259 __ xor_(reg0, reg0, at);
260
261 // hash = hash + (hash << 2);
262 __ sll(at, reg0, 2);
263 __ addu(reg0, reg0, at);
264
265 // hash = hash ^ (hash >> 4);
266 __ srl(at, reg0, 4);
267 __ xor_(reg0, reg0, at);
268
269 // hash = hash * 2057;
270 __ li(reg1, Operand(2057));
271 __ mul(reg0, reg0, reg1);
272
273 // hash = hash ^ (hash >> 16);
274 __ srl(at, reg0, 16);
275 __ xor_(reg0, reg0, at);
276
277 // Compute the capacity mask.
278 __ lw(reg1, FieldMemOperand(elements, NumberDictionary::kCapacityOffset));
279 __ sra(reg1, reg1, kSmiTagSize);
280 __ Subu(reg1, reg1, Operand(1));
281
282 // Generate an unrolled loop that performs a few probes before giving up.
283 static const int kProbes = 4;
284 for (int i = 0; i < kProbes; i++) {
285 // Use reg2 for index calculations and keep the hash intact in reg0.
286 __ mov(reg2, reg0);
287 // Compute the masked index: (hash + i + i * i) & mask.
288 if (i > 0) {
289 __ Addu(reg2, reg2, Operand(NumberDictionary::GetProbeOffset(i)));
290 }
291 __ and_(reg2, reg2, reg1);
292
293 // Scale the index by multiplying by the element size.
294 ASSERT(NumberDictionary::kEntrySize == 3);
295 __ sll(at, reg2, 1); // 2x.
296 __ addu(reg2, reg2, at); // reg2 = reg2 * 3.
297
298 // Check if the key is identical to the name.
299 __ sll(at, reg2, kPointerSizeLog2);
300 __ addu(reg2, elements, at);
301
302 __ lw(at, FieldMemOperand(reg2, NumberDictionary::kElementsStartOffset));
303 if (i != kProbes - 1) {
304 __ Branch(&done, eq, key, Operand(at));
305 } else {
306 __ Branch(miss, ne, key, Operand(at));
307 }
308 }
309
310 __ bind(&done);
311 // Check that the value is a normal property.
312 // reg2: elements + (index * kPointerSize).
313 const int kDetailsOffset =
314 NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
315 __ lw(reg1, FieldMemOperand(reg2, kDetailsOffset));
316 __ And(at, reg1, Operand(Smi::FromInt(PropertyDetails::TypeField::mask())));
317 __ Branch(miss, ne, at, Operand(zero_reg));
318
319 // Get the value at the masked, scaled index and return.
320 const int kValueOffset =
321 NumberDictionary::kElementsStartOffset + kPointerSize;
322 __ lw(result, FieldMemOperand(reg2, kValueOffset));
323 }
324
325
51 void LoadIC::GenerateArrayLength(MacroAssembler* masm) { 326 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
52 UNIMPLEMENTED_MIPS(); 327 // ----------- S t a t e -------------
328 // -- a2 : name
329 // -- ra : return address
330 // -- a0 : receiver
331 // -- sp[0] : receiver
332 // -----------------------------------
333 Label miss;
334
335 StubCompiler::GenerateLoadArrayLength(masm, a0, a3, &miss);
336 __ bind(&miss);
337 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
53 } 338 }
54 339
55 340
56 void LoadIC::GenerateStringLength(MacroAssembler* masm, bool support_wrappers) { 341 void LoadIC::GenerateStringLength(MacroAssembler* masm, bool support_wrappers) {
57 UNIMPLEMENTED_MIPS(); 342 // ----------- S t a t e -------------
343 // -- a2 : name
344 // -- lr : return address
345 // -- a0 : receiver
346 // -- sp[0] : receiver
347 // -----------------------------------
348 Label miss;
349
350 StubCompiler::GenerateLoadStringLength(masm, a0, a1, a3, &miss,
351 support_wrappers);
352 // Cache miss: Jump to runtime.
353 __ bind(&miss);
354 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
58 } 355 }
59 356
60 357
61 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) { 358 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
62 UNIMPLEMENTED_MIPS(); 359 // ----------- S t a t e -------------
360 // -- a2 : name
361 // -- lr : return address
362 // -- a0 : receiver
363 // -- sp[0] : receiver
364 // -----------------------------------
365 Label miss;
366
367 StubCompiler::GenerateLoadFunctionPrototype(masm, a0, a1, a3, &miss);
368 __ bind(&miss);
369 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
370 }
371
372
373 // Checks the receiver for special cases (value type, slow case bits).
374 // Falls through for regular JS object.
375 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
376 Register receiver,
377 Register map,
378 Register scratch,
379 int interceptor_bit,
380 Label* slow) {
381 // Check that the object isn't a smi.
382 __ JumpIfSmi(receiver, slow);
383 // Get the map of the receiver.
384 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
385 // Check bit field.
386 __ lbu(scratch, FieldMemOperand(map, Map::kBitFieldOffset));
387 __ And(at, scratch, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
388 __ Branch(slow, ne, at, Operand(zero_reg));
389 // Check that the object is some kind of JS object EXCEPT JS Value type.
390 // In the case that the object is a value-wrapper object,
391 // we enter the runtime system to make sure that indexing into string
392 // objects work as intended.
393 ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
394 __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
395 __ Branch(slow, lt, scratch, Operand(JS_OBJECT_TYPE));
396 }
397
398
399 // Loads an indexed element from a fast case array.
400 // If not_fast_array is NULL, doesn't perform the elements map check.
401 static void GenerateFastArrayLoad(MacroAssembler* masm,
402 Register receiver,
403 Register key,
404 Register elements,
405 Register scratch1,
406 Register scratch2,
407 Register result,
408 Label* not_fast_array,
409 Label* out_of_range) {
410 // Register use:
411 //
412 // receiver - holds the receiver on entry.
413 // Unchanged unless 'result' is the same register.
414 //
415 // key - holds the smi key on entry.
416 // Unchanged unless 'result' is the same register.
417 //
418 // elements - holds the elements of the receiver on exit.
419 //
420 // result - holds the result on exit if the load succeeded.
421 // Allowed to be the the same as 'receiver' or 'key'.
422 // Unchanged on bailout so 'receiver' and 'key' can be safely
423 // used by further computation.
424 //
425 // Scratch registers:
426 //
427 // scratch1 - used to hold elements map and elements length.
428 // Holds the elements map if not_fast_array branch is taken.
429 //
430 // scratch2 - used to hold the loaded value.
431
432 __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
433 if (not_fast_array != NULL) {
434 // Check that the object is in fast mode (not dictionary).
435 __ lw(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
436 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
437 __ Branch(not_fast_array, ne, scratch1, Operand(at));
438 } else {
439 __ AssertFastElements(elements);
440 }
441
442 // Check that the key (index) is within bounds.
443 __ lw(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset));
444 __ Branch(out_of_range, hs, key, Operand(scratch1));
445
446 // Fast case: Do the load.
447 __ Addu(scratch1, elements,
448 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
449 // The key is a smi.
450 ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
451 __ sll(at, key, kPointerSizeLog2 - kSmiTagSize);
452 __ addu(at, at, scratch1);
453 __ lw(scratch2, MemOperand(at));
454
455 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
456 // In case the loaded value is the_hole we have to consult GetProperty
457 // to ensure the prototype chain is searched.
458 __ Branch(out_of_range, eq, scratch2, Operand(at));
459 __ mov(result, scratch2);
460 }
461
462
463 // Checks whether a key is an array index string or a symbol string.
464 // Falls through if a key is a symbol.
465 static void GenerateKeyStringCheck(MacroAssembler* masm,
466 Register key,
467 Register map,
468 Register hash,
469 Label* index_string,
470 Label* not_symbol) {
471 // The key is not a smi.
472 // Is it a string?
473 __ GetObjectType(key, map, hash);
474 __ Branch(not_symbol, ge, hash, Operand(FIRST_NONSTRING_TYPE));
475
476 // Is the string an array index, with cached numeric value?
477 __ lw(hash, FieldMemOperand(key, String::kHashFieldOffset));
478 __ And(at, hash, Operand(String::kContainsCachedArrayIndexMask));
479 __ Branch(index_string, eq, at, Operand(zero_reg));
480
481 // Is the string a symbol?
482 // map: key map
483 __ lbu(hash, FieldMemOperand(map, Map::kInstanceTypeOffset));
484 ASSERT(kSymbolTag != 0);
485 __ And(at, hash, Operand(kIsSymbolMask));
486 __ Branch(not_symbol, eq, at, Operand(zero_reg));
63 } 487 }
64 488
65 489
66 // Defined in ic.cc. 490 // Defined in ic.cc.
67 Object* CallIC_Miss(Arguments args); 491 Object* CallIC_Miss(Arguments args);
68 492
493 // The generated code does not accept smi keys.
494 // The generated code falls through if both probes miss.
495 static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
496 int argc,
497 Code::Kind kind) {
498 // ----------- S t a t e -------------
499 // -- a1 : receiver
500 // -- a2 : name
501 // -----------------------------------
502 Label number, non_number, non_string, boolean, probe, miss;
503
504 // Probe the stub cache.
505 Code::Flags flags = Code::ComputeFlags(kind,
506 NOT_IN_LOOP,
507 MONOMORPHIC,
508 Code::kNoExtraICState,
509 NORMAL,
510 argc);
511 Isolate::Current()->stub_cache()->GenerateProbe(
512 masm, flags, a1, a2, a3, t0, t1);
513
514 // If the stub cache probing failed, the receiver might be a value.
515 // For value objects, we use the map of the prototype objects for
516 // the corresponding JSValue for the cache and that is what we need
517 // to probe.
518 //
519 // Check for number.
520 __ JumpIfSmi(a1, &number, t1);
521 __ GetObjectType(a1, a3, a3);
522 __ Branch(&non_number, ne, a3, Operand(HEAP_NUMBER_TYPE));
523 __ bind(&number);
524 StubCompiler::GenerateLoadGlobalFunctionPrototype(
525 masm, Context::NUMBER_FUNCTION_INDEX, a1);
526 __ Branch(&probe);
527
528 // Check for string.
529 __ bind(&non_number);
530 __ Branch(&non_string, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE));
531 StubCompiler::GenerateLoadGlobalFunctionPrototype(
532 masm, Context::STRING_FUNCTION_INDEX, a1);
533 __ Branch(&probe);
534
535 // Check for boolean.
536 __ bind(&non_string);
537 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
538 __ Branch(&boolean, eq, a1, Operand(t0));
539 __ LoadRoot(t1, Heap::kFalseValueRootIndex);
540 __ Branch(&miss, ne, a1, Operand(t1));
541 __ bind(&boolean);
542 StubCompiler::GenerateLoadGlobalFunctionPrototype(
543 masm, Context::BOOLEAN_FUNCTION_INDEX, a1);
544
545 // Probe the stub cache for the value object.
546 __ bind(&probe);
547 Isolate::Current()->stub_cache()->GenerateProbe(
548 masm, flags, a1, a2, a3, t0, t1);
549
550 __ bind(&miss);
551 }
552
553
554 static void GenerateFunctionTailCall(MacroAssembler* masm,
555 int argc,
556 Label* miss,
557 Register scratch) {
558 // a1: function
559
560 // Check that the value isn't a smi.
561 __ JumpIfSmi(a1, miss);
562
563 // Check that the value is a JSFunction.
564 __ GetObjectType(a1, scratch, scratch);
565 __ Branch(miss, ne, scratch, Operand(JS_FUNCTION_TYPE));
566
567 // Invoke the function.
568 ParameterCount actual(argc);
569 __ InvokeFunction(a1, actual, JUMP_FUNCTION);
570 }
571
572
573 static void GenerateCallNormal(MacroAssembler* masm, int argc) {
574 // ----------- S t a t e -------------
575 // -- a2 : name
576 // -- ra : return address
577 // -----------------------------------
578 Label miss;
579
580 // Get the receiver of the function from the stack into a1.
581 __ lw(a1, MemOperand(sp, argc * kPointerSize));
582
583 GenerateStringDictionaryReceiverCheck(masm, a1, a0, a3, t0, &miss);
584
585 // a0: elements
586 // Search the dictionary - put result in register a1.
587 GenerateDictionaryLoad(masm, &miss, a0, a2, a1, a3, t0);
588
589 GenerateFunctionTailCall(masm, argc, &miss, t0);
590
591 // Cache miss: Jump to runtime.
592 __ bind(&miss);
593 }
594
595
596 static void GenerateCallMiss(MacroAssembler* masm, int argc, IC::UtilityId id) {
597 // ----------- S t a t e -------------
598 // -- a2 : name
599 // -- ra : return address
600 // -----------------------------------
601 Isolate* isolate = masm->isolate();
602
603 if (id == IC::kCallIC_Miss) {
604 __ IncrementCounter(isolate->counters()->call_miss(), 1, a3, t0);
605 } else {
606 __ IncrementCounter(isolate->counters()->keyed_call_miss(), 1, a3, t0);
607 }
608
609 // Get the receiver of the function from the stack.
610 __ lw(a3, MemOperand(sp, argc*kPointerSize));
611
612 __ EnterInternalFrame();
613
614 // Push the receiver and the name of the function.
615 __ Push(a3, a2);
616
617 // Call the entry.
618 __ li(a0, Operand(2));
619 __ li(a1, Operand(ExternalReference(IC_Utility(id), isolate)));
620
621 CEntryStub stub(1);
622 __ CallStub(&stub);
623
624 // Move result to a1 and leave the internal frame.
625 __ mov(a1, v0);
626 __ LeaveInternalFrame();
627
628 // Check if the receiver is a global object of some sort.
629 // This can happen only for regular CallIC but not KeyedCallIC.
630 if (id == IC::kCallIC_Miss) {
631 Label invoke, global;
632 __ lw(a2, MemOperand(sp, argc * kPointerSize));
633 __ andi(t0, a2, kSmiTagMask);
634 __ Branch(&invoke, eq, t0, Operand(zero_reg));
635 __ GetObjectType(a2, a3, a3);
636 __ Branch(&global, eq, a3, Operand(JS_GLOBAL_OBJECT_TYPE));
637 __ Branch(&invoke, ne, a3, Operand(JS_BUILTINS_OBJECT_TYPE));
638
639 // Patch the receiver on the stack.
640 __ bind(&global);
641 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset));
642 __ sw(a2, MemOperand(sp, argc * kPointerSize));
643 __ bind(&invoke);
644 }
645 // Invoke the function.
646 ParameterCount actual(argc);
647 __ InvokeFunction(a1, actual, JUMP_FUNCTION);
648 }
649
69 650
70 void CallIC::GenerateMiss(MacroAssembler* masm, int argc) { 651 void CallIC::GenerateMiss(MacroAssembler* masm, int argc) {
71 UNIMPLEMENTED_MIPS(); 652 // ----------- S t a t e -------------
653 // -- a2 : name
654 // -- ra : return address
655 // -----------------------------------
656
657 GenerateCallMiss(masm, argc, IC::kCallIC_Miss);
72 } 658 }
73 659
74 660
75 void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) { 661 void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
76 UNIMPLEMENTED_MIPS(); 662 // ----------- S t a t e -------------
663 // -- a2 : name
664 // -- ra : return address
665 // -----------------------------------
666
667 // Get the receiver of the function from the stack into a1.
668 __ lw(a1, MemOperand(sp, argc * kPointerSize));
669 GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC);
670 GenerateMiss(masm, argc);
77 } 671 }
78 672
79 673
80 void CallIC::GenerateNormal(MacroAssembler* masm, int argc) { 674 void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
81 UNIMPLEMENTED_MIPS(); 675 // ----------- S t a t e -------------
676 // -- a2 : name
677 // -- ra : return address
678 // -----------------------------------
679
680 GenerateCallNormal(masm, argc);
681 GenerateMiss(masm, argc);
82 } 682 }
83 683
84 684
85 void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) { 685 void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) {
86 UNIMPLEMENTED_MIPS(); 686 // ----------- S t a t e -------------
687 // -- a2 : name
688 // -- ra : return address
689 // -----------------------------------
690
691 GenerateCallMiss(masm, argc, IC::kKeyedCallIC_Miss);
87 } 692 }
88 693
89 694
90 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) { 695 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
91 UNIMPLEMENTED_MIPS(); 696 // ----------- S t a t e -------------
697 // -- a2 : name
698 // -- ra : return address
699 // -----------------------------------
700
701 // Get the receiver of the function from the stack into a1.
702 __ lw(a1, MemOperand(sp, argc * kPointerSize));
703
704 Label do_call, slow_call, slow_load, slow_reload_receiver;
705 Label check_number_dictionary, check_string, lookup_monomorphic_cache;
706 Label index_smi, index_string;
707
708 // Check that the key is a smi.
709 __ JumpIfNotSmi(a2, &check_string);
710 __ bind(&index_smi);
711 // Now the key is known to be a smi. This place is also jumped to from below
712 // where a numeric string is converted to a smi.
713
714 GenerateKeyedLoadReceiverCheck(
715 masm, a1, a0, a3, Map::kHasIndexedInterceptor, &slow_call);
716
717 GenerateFastArrayLoad(
718 masm, a1, a2, t0, a3, a0, a1, &check_number_dictionary, &slow_load);
719 Counters* counters = masm->isolate()->counters();
720 __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1, a0, a3);
721
722 __ bind(&do_call);
723 // receiver in a1 is not used after this point.
724 // a2: key
725 // a1: function
726
727 GenerateFunctionTailCall(masm, argc, &slow_call, a0);
728
729 __ bind(&check_number_dictionary);
730 // a2: key
731 // a3: elements map
732 // t0: elements pointer
733 // Check whether the elements is a number dictionary.
734 __ LoadRoot(at, Heap::kHashTableMapRootIndex);
735 __ Branch(&slow_load, ne, a3, Operand(at));
736 __ sra(a0, a2, kSmiTagSize);
737 // a0: untagged index
738 GenerateNumberDictionaryLoad(masm, &slow_load, t0, a2, a1, a0, a3, t1);
739 __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1, a0, a3);
740 __ jmp(&do_call);
741
742 __ bind(&slow_load);
743 // This branch is taken when calling KeyedCallIC_Miss is neither required
744 // nor beneficial.
745 __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1, a0, a3);
746 __ EnterInternalFrame();
747 __ push(a2); // Save the key.
748 __ Push(a1, a2); // Pass the receiver and the key.
749 __ CallRuntime(Runtime::kKeyedGetProperty, 2);
750 __ pop(a2); // Restore the key.
751 __ LeaveInternalFrame();
752 __ mov(a1, v0);
753 __ jmp(&do_call);
754
755 __ bind(&check_string);
756 GenerateKeyStringCheck(masm, a2, a0, a3, &index_string, &slow_call);
757
758 // The key is known to be a symbol.
759 // If the receiver is a regular JS object with slow properties then do
760 // a quick inline probe of the receiver's dictionary.
761 // Otherwise do the monomorphic cache probe.
762 GenerateKeyedLoadReceiverCheck(
763 masm, a1, a0, a3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
764
765 __ lw(a0, FieldMemOperand(a1, JSObject::kPropertiesOffset));
766 __ lw(a3, FieldMemOperand(a0, HeapObject::kMapOffset));
767 __ LoadRoot(at, Heap::kHashTableMapRootIndex);
768 __ Branch(&lookup_monomorphic_cache, ne, a3, Operand(at));
769
770 GenerateDictionaryLoad(masm, &slow_load, a0, a2, a1, a3, t0);
771 __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1, a0, a3);
772 __ jmp(&do_call);
773
774 __ bind(&lookup_monomorphic_cache);
775 __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, a0, a3);
776 GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC);
777 // Fall through on miss.
778
779 __ bind(&slow_call);
780 // This branch is taken if:
781 // - the receiver requires boxing or access check,
782 // - the key is neither smi nor symbol,
783 // - the value loaded is not a function,
784 // - there is hope that the runtime will create a monomorphic call stub,
785 // that will get fetched next time.
786 __ IncrementCounter(counters->keyed_call_generic_slow(), 1, a0, a3);
787 GenerateMiss(masm, argc);
788
789 __ bind(&index_string);
790 __ IndexFromHash(a3, a2);
791 // Now jump to the place where smi keys are handled.
792 __ jmp(&index_smi);
92 } 793 }
93 794
94 795
95 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) { 796 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
96 UNIMPLEMENTED_MIPS(); 797 // ----------- S t a t e -------------
97 } 798 // -- a2 : name
98 799 // -- ra : return address
99 800 // -----------------------------------
801
802 // Check if the name is a string.
803 Label miss;
804 __ JumpIfSmi(a2, &miss);
805 __ IsObjectJSStringType(a2, a0, &miss);
806
807 GenerateCallNormal(masm, argc);
808 __ bind(&miss);
809 GenerateMiss(masm, argc);
810 }
811
812
100 // Defined in ic.cc. 813 // Defined in ic.cc.
101 Object* LoadIC_Miss(Arguments args); 814 Object* LoadIC_Miss(Arguments args);
102 815
103 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) { 816 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
104 UNIMPLEMENTED_MIPS(); 817 // ----------- S t a t e -------------
818 // -- a2 : name
819 // -- ra : return address
820 // -- a0 : receiver
821 // -- sp[0] : receiver
822 // -----------------------------------
823
824 // Probe the stub cache.
825 Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC,
826 NOT_IN_LOOP,
827 MONOMORPHIC);
828 Isolate::Current()->stub_cache()->GenerateProbe(
829 masm, flags, a0, a2, a3, t0, t1);
830
831 // Cache miss: Jump to runtime.
832 GenerateMiss(masm);
105 } 833 }
106 834
107 835
108 void LoadIC::GenerateNormal(MacroAssembler* masm) { 836 void LoadIC::GenerateNormal(MacroAssembler* masm) {
109 UNIMPLEMENTED_MIPS(); 837 // ----------- S t a t e -------------
838 // -- a2 : name
839 // -- lr : return address
840 // -- a0 : receiver
841 // -- sp[0] : receiver
842 // -----------------------------------
843 Label miss;
844
845 GenerateStringDictionaryReceiverCheck(masm, a0, a1, a3, t0, &miss);
846
847 // a1: elements
848 GenerateDictionaryLoad(masm, &miss, a1, a2, v0, a3, t0);
849 __ Ret();
850
851 // Cache miss: Jump to runtime.
852 __ bind(&miss);
853 GenerateMiss(masm);
110 } 854 }
111 855
112 856
113 void LoadIC::GenerateMiss(MacroAssembler* masm) { 857 void LoadIC::GenerateMiss(MacroAssembler* masm) {
114 UNIMPLEMENTED_MIPS(); 858 // ----------- S t a t e -------------
859 // -- a2 : name
860 // -- ra : return address
861 // -- a0 : receiver
862 // -- sp[0] : receiver
863 // -----------------------------------
864 Isolate* isolate = masm->isolate();
865
866 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a3, t0);
867
868 __ mov(a3, a0);
869 __ Push(a3, a2);
870
871 // Perform tail call to the entry.
872 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate);
873 __ TailCallExternalReference(ref, 2, 1);
115 } 874 }
116 875
117 876
118 Object* KeyedLoadIC_Miss(Arguments args); 877 Object* KeyedLoadIC_Miss(Arguments args);
119 878
120 879
121 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { 880 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
122 UNIMPLEMENTED_MIPS(); 881 // ---------- S t a t e --------------
882 // -- ra : return address
883 // -- a0 : key
884 // -- a1 : receiver
885 // -----------------------------------
886 Isolate* isolate = masm->isolate();
887
888 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a3, t0);
889
890 __ Push(a1, a0);
891
892 ExternalReference ref = ExternalReference(IC_Utility(kKeyedLoadIC_Miss),
893 isolate);
894 __ TailCallExternalReference(ref, 2, 1);
123 } 895 }
124 896
125 897
126 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { 898 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
127 UNIMPLEMENTED_MIPS(); 899 // ---------- S t a t e --------------
900 // -- ra : return address
901 // -- a0 : key
902 // -- a1 : receiver
903 // -----------------------------------
904
905 __ Push(a1, a0);
906
907 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
128 } 908 }
129 909
130 910
131 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { 911 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
132 UNIMPLEMENTED_MIPS(); 912 // ---------- S t a t e --------------
913 // -- ra : return address
914 // -- a0 : key
915 // -- a1 : receiver
916 // -----------------------------------
917 Label slow, check_string, index_smi, index_string, property_array_property;
918 Label probe_dictionary, check_number_dictionary;
919
920 Register key = a0;
921 Register receiver = a1;
922
923 Isolate* isolate = masm->isolate();
924
925 // Check that the key is a smi.
926 __ JumpIfNotSmi(key, &check_string);
927 __ bind(&index_smi);
928 // Now the key is known to be a smi. This place is also jumped to from below
929 // where a numeric string is converted to a smi.
930
931 GenerateKeyedLoadReceiverCheck(
932 masm, receiver, a2, a3, Map::kHasIndexedInterceptor, &slow);
933
934 // Check the "has fast elements" bit in the receiver's map which is
935 // now in a2.
936 __ lbu(a3, FieldMemOperand(a2, Map::kBitField2Offset));
937 __ And(at, a3, Operand(1 << Map::kHasFastElements));
938 __ Branch(&check_number_dictionary, eq, at, Operand(zero_reg));
939
940 GenerateFastArrayLoad(
941 masm, receiver, key, t0, a3, a2, v0, NULL, &slow);
942
943 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, a2, a3);
944 __ Ret();
945
946 __ bind(&check_number_dictionary);
947 __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset));
948 __ lw(a3, FieldMemOperand(t0, JSObject::kMapOffset));
949
950 // Check whether the elements is a number dictionary.
951 // a0: key
952 // a3: elements map
953 // t0: elements
954 __ LoadRoot(at, Heap::kHashTableMapRootIndex);
955 __ Branch(&slow, ne, a3, Operand(at));
956 __ sra(a2, a0, kSmiTagSize);
957 GenerateNumberDictionaryLoad(masm, &slow, t0, a0, v0, a2, a3, t1);
958 __ Ret();
959
960 // Slow case, key and receiver still in a0 and a1.
961 __ bind(&slow);
962 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(),
963 1,
964 a2,
965 a3);
966 GenerateRuntimeGetProperty(masm);
967
968 __ bind(&check_string);
969 GenerateKeyStringCheck(masm, key, a2, a3, &index_string, &slow);
970
971 GenerateKeyedLoadReceiverCheck(
972 masm, receiver, a2, a3, Map::kHasIndexedInterceptor, &slow);
973
974
975 // If the receiver is a fast-case object, check the keyed lookup
976 // cache. Otherwise probe the dictionary.
977 __ lw(a3, FieldMemOperand(a1, JSObject::kPropertiesOffset));
978 __ lw(t0, FieldMemOperand(a3, HeapObject::kMapOffset));
979 __ LoadRoot(at, Heap::kHashTableMapRootIndex);
980 __ Branch(&probe_dictionary, eq, t0, Operand(at));
981
982 // Load the map of the receiver, compute the keyed lookup cache hash
983 // based on 32 bits of the map pointer and the string hash.
984 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
985 __ sra(a3, a2, KeyedLookupCache::kMapHashShift);
986 __ lw(t0, FieldMemOperand(a0, String::kHashFieldOffset));
987 __ sra(at, t0, String::kHashShift);
988 __ xor_(a3, a3, at);
989 __ And(a3, a3, Operand(KeyedLookupCache::kCapacityMask));
990
991 // Load the key (consisting of map and symbol) from the cache and
992 // check for match.
993 ExternalReference cache_keys =
994 ExternalReference::keyed_lookup_cache_keys(isolate);
995 __ li(t0, Operand(cache_keys));
996 __ sll(at, a3, kPointerSizeLog2 + 1);
997 __ addu(t0, t0, at);
998 __ lw(t1, MemOperand(t0)); // Move t0 to symbol.
999 __ Addu(t0, t0, Operand(kPointerSize));
1000 __ Branch(&slow, ne, a2, Operand(t1));
1001 __ lw(t1, MemOperand(t0));
1002 __ Branch(&slow, ne, a0, Operand(t1));
1003
1004 // Get field offset.
1005 // a0 : key
1006 // a1 : receiver
1007 // a2 : receiver's map
1008 // a3 : lookup cache index
1009 ExternalReference cache_field_offsets =
1010 ExternalReference::keyed_lookup_cache_field_offsets(isolate);
1011 __ li(t0, Operand(cache_field_offsets));
1012 __ sll(at, a3, kPointerSizeLog2);
1013 __ addu(at, t0, at);
1014 __ lw(t1, MemOperand(at));
1015 __ lbu(t2, FieldMemOperand(a2, Map::kInObjectPropertiesOffset));
1016 __ Subu(t1, t1, t2);
1017 __ Branch(&property_array_property, ge, t1, Operand(zero_reg));
1018
1019 // Load in-object property.
1020 __ lbu(t2, FieldMemOperand(a2, Map::kInstanceSizeOffset));
1021 __ addu(t2, t2, t1); // Index from start of object.
1022 __ Subu(a1, a1, Operand(kHeapObjectTag)); // Remove the heap tag.
1023 __ sll(at, t2, kPointerSizeLog2);
1024 __ addu(at, a1, at);
1025 __ lw(v0, MemOperand(at));
1026 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1027 1,
1028 a2,
1029 a3);
1030 __ Ret();
1031
1032 // Load property array property.
1033 __ bind(&property_array_property);
1034 __ lw(a1, FieldMemOperand(a1, JSObject::kPropertiesOffset));
1035 __ Addu(a1, a1, FixedArray::kHeaderSize - kHeapObjectTag);
1036 __ sll(t0, t1, kPointerSizeLog2);
1037 __ Addu(t0, t0, a1);
1038 __ lw(v0, MemOperand(t0));
1039 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1040 1,
1041 a2,
1042 a3);
1043 __ Ret();
1044
1045
1046 // Do a quick inline probe of the receiver's dictionary, if it
1047 // exists.
1048 __ bind(&probe_dictionary);
1049 // a1: receiver
1050 // a0: key
1051 // a3: elements
1052 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
1053 __ lbu(a2, FieldMemOperand(a2, Map::kInstanceTypeOffset));
1054 GenerateGlobalInstanceTypeCheck(masm, a2, &slow);
1055 // Load the property to v0.
1056 GenerateDictionaryLoad(masm, &slow, a3, a0, v0, a2, t0);
1057 __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(),
1058 1,
1059 a2,
1060 a3);
1061 __ Ret();
1062
1063 __ bind(&index_string);
1064 __ IndexFromHash(a3, key);
1065 // Now jump to the place where smi keys are handled.
1066 __ Branch(&index_smi);
133 } 1067 }
134 1068
135 1069
136 void KeyedLoadIC::GenerateString(MacroAssembler* masm) { 1070 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
137 UNIMPLEMENTED_MIPS(); 1071 // ---------- S t a t e --------------
1072 // -- ra : return address
1073 // -- a0 : key (index)
1074 // -- a1 : receiver
1075 // -----------------------------------
1076 Label miss;
1077
1078 Register receiver = a1;
1079 Register index = a0;
1080 Register scratch1 = a2;
1081 Register scratch2 = a3;
1082 Register result = v0;
1083
1084 StringCharAtGenerator char_at_generator(receiver,
1085 index,
1086 scratch1,
1087 scratch2,
1088 result,
1089 &miss, // When not a string.
1090 &miss, // When not a number.
1091 &miss, // When index out of range.
1092 STRING_INDEX_IS_ARRAY_INDEX);
1093 char_at_generator.GenerateFast(masm);
1094 __ Ret();
1095
1096 StubRuntimeCallHelper call_helper;
1097 char_at_generator.GenerateSlow(masm, call_helper);
1098
1099 __ bind(&miss);
1100 GenerateMiss(masm);
138 } 1101 }
139 1102
140 1103
141 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm, 1104 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
142 StrictModeFlag strict_mode) { 1105 StrictModeFlag strict_mode) {
143 UNIMPLEMENTED_MIPS(); 1106 // ---------- S t a t e --------------
1107 // -- a0 : value
1108 // -- a1 : key
1109 // -- a2 : receiver
1110 // -- ra : return address
1111 // -----------------------------------
1112
1113 // Push receiver, key and value for runtime call.
1114 __ Push(a2, a1, a0);
1115 __ li(a1, Operand(Smi::FromInt(NONE))); // PropertyAttributes.
1116 __ li(a0, Operand(Smi::FromInt(strict_mode))); // Strict mode.
1117 __ Push(a1, a0);
1118
1119 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
144 } 1120 }
145 1121
146 1122
147 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm, 1123 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
148 StrictModeFlag strict_mode) { 1124 StrictModeFlag strict_mode) {
149 UNIMPLEMENTED_MIPS(); 1125 // ---------- S t a t e --------------
1126 // -- a0 : value
1127 // -- a1 : key
1128 // -- a2 : receiver
1129 // -- ra : return address
1130 // -----------------------------------
1131
1132 Label slow, fast, array, extra, exit;
1133
1134 // Register usage.
1135 Register value = a0;
1136 Register key = a1;
1137 Register receiver = a2;
1138 Register elements = a3; // Elements array of the receiver.
1139 // t0 is used as ip in the arm version.
1140 // t3-t4 are used as temporaries.
1141
1142 // Check that the key is a smi.
1143 __ JumpIfNotSmi(key, &slow);
1144 // Check that the object isn't a smi.
1145 __ JumpIfSmi(receiver, &slow);
1146
1147 // Get the map of the object.
1148 __ lw(t3, FieldMemOperand(receiver, HeapObject::kMapOffset));
1149 // Check that the receiver does not require access checks. We need
1150 // to do this because this generic stub does not perform map checks.
1151 __ lbu(t0, FieldMemOperand(t3, Map::kBitFieldOffset));
1152 __ And(t0, t0, Operand(1 << Map::kIsAccessCheckNeeded));
1153 __ Branch(&slow, ne, t0, Operand(zero_reg));
1154 // Check if the object is a JS array or not.
1155 __ lbu(t3, FieldMemOperand(t3, Map::kInstanceTypeOffset));
1156
1157 __ Branch(&array, eq, t3, Operand(JS_ARRAY_TYPE));
1158 // Check that the object is some kind of JS object.
1159 __ Branch(&slow, lt, t3, Operand(FIRST_JS_OBJECT_TYPE));
1160
1161 // Object case: Check key against length in the elements array.
1162 __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1163 // Check that the object is in fast mode and writable.
1164 __ lw(t3, FieldMemOperand(elements, HeapObject::kMapOffset));
1165 __ LoadRoot(t0, Heap::kFixedArrayMapRootIndex);
1166 __ Branch(&slow, ne, t3, Operand(t0));
1167 // Check array bounds. Both the key and the length of FixedArray are smis.
1168 __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1169 __ Branch(&fast, lo, key, Operand(t0));
1170 // Fall thru to slow if un-tagged index >= length.
1171
1172 // Slow case, handle jump to runtime.
1173 __ bind(&slow);
1174
1175 // Entry registers are intact.
1176 // a0: value.
1177 // a1: key.
1178 // a2: receiver.
1179
1180 GenerateRuntimeSetProperty(masm, strict_mode);
1181
1182 // Extra capacity case: Check if there is extra capacity to
1183 // perform the store and update the length. Used for adding one
1184 // element to the array by writing to array[array.length].
1185
1186 __ bind(&extra);
1187 // Only support writing to array[array.length].
1188 __ Branch(&slow, ne, key, Operand(t0));
1189 // Check for room in the elements backing store.
1190 // Both the key and the length of FixedArray are smis.
1191 __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1192 __ Branch(&slow, hs, key, Operand(t0));
1193 // Calculate key + 1 as smi.
1194 ASSERT_EQ(0, kSmiTag);
1195 __ Addu(t3, key, Operand(Smi::FromInt(1)));
1196 __ sw(t3, FieldMemOperand(receiver, JSArray::kLengthOffset));
1197 __ Branch(&fast);
1198
1199
1200 // Array case: Get the length and the elements array from the JS
1201 // array. Check that the array is in fast mode (and writable); if it
1202 // is the length is always a smi.
1203
1204 __ bind(&array);
1205 __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1206 __ lw(t3, FieldMemOperand(elements, HeapObject::kMapOffset));
1207 __ LoadRoot(t0, Heap::kFixedArrayMapRootIndex);
1208 __ Branch(&slow, ne, t3, Operand(t0));
1209
1210 // Check the key against the length in the array.
1211 __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1212 __ Branch(&extra, hs, key, Operand(t0));
1213 // Fall through to fast case.
1214
1215 __ bind(&fast);
1216 // Fast case, store the value to the elements backing store.
1217 __ Addu(t4, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1218 __ sll(t1, key, kPointerSizeLog2 - kSmiTagSize);
1219 __ Addu(t4, t4, Operand(t1));
1220 __ sw(value, MemOperand(t4));
1221 // Skip write barrier if the written value is a smi.
1222 __ JumpIfSmi(value, &exit);
1223
1224 // Update write barrier for the elements array address.
1225 __ Subu(t3, t4, Operand(elements));
1226
1227 __ RecordWrite(elements, Operand(t3), t4, t5);
1228 __ bind(&exit);
1229
1230 __ mov(v0, a0); // Return the value written.
1231 __ Ret();
150 } 1232 }
151 1233
152 1234
153 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) { 1235 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
154 UNIMPLEMENTED_MIPS(); 1236 // ---------- S t a t e --------------
1237 // -- ra : return address
1238 // -- a0 : key
1239 // -- a1 : receiver
1240 // -----------------------------------
1241 Label slow;
1242
1243 // Check that the receiver isn't a smi.
1244 __ JumpIfSmi(a1, &slow);
1245
1246 // Check that the key is an array index, that is Uint32.
1247 __ And(t0, a0, Operand(kSmiTagMask | kSmiSignMask));
1248 __ Branch(&slow, ne, t0, Operand(zero_reg));
1249
1250 // Get the map of the receiver.
1251 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
1252
1253 // Check that it has indexed interceptor and access checks
1254 // are not enabled for this object.
1255 __ lbu(a3, FieldMemOperand(a2, Map::kBitFieldOffset));
1256 __ And(a3, a3, Operand(kSlowCaseBitFieldMask));
1257 __ Branch(&slow, ne, a3, Operand(1 << Map::kHasIndexedInterceptor));
1258 // Everything is fine, call runtime.
1259 __ Push(a1, a0); // Receiver, key.
1260
1261 // Perform tail call to the entry.
1262 __ TailCallExternalReference(ExternalReference(
1263 IC_Utility(kKeyedLoadPropertyWithInterceptor), masm->isolate()), 2, 1);
1264
1265 __ bind(&slow);
1266 GenerateMiss(masm);
155 } 1267 }
156 1268
157 1269
158 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) { 1270 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
159 UNIMPLEMENTED_MIPS(); 1271 // ---------- S t a t e --------------
1272 // -- a0 : value
1273 // -- a1 : key
1274 // -- a2 : receiver
1275 // -- ra : return address
1276 // -----------------------------------
1277
1278 // Push receiver, key and value for runtime call.
1279 // We can't use MultiPush as the order of the registers is important.
1280 __ Push(a2, a1, a0);
1281
1282 ExternalReference ref = ExternalReference(IC_Utility(kKeyedStoreIC_Miss),
1283 masm->isolate());
1284 __ TailCallExternalReference(ref, 3, 1);
160 } 1285 }
161 1286
162 1287
163 void StoreIC::GenerateMegamorphic(MacroAssembler* masm, 1288 void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
164 StrictModeFlag strict_mode) { 1289 StrictModeFlag strict_mode) {
165 UNIMPLEMENTED_MIPS(); 1290 // ----------- S t a t e -------------
1291 // -- a0 : value
1292 // -- a1 : receiver
1293 // -- a2 : name
1294 // -- ra : return address
1295 // -----------------------------------
1296
1297 // Get the receiver from the stack and probe the stub cache.
1298 Code::Flags flags = Code::ComputeFlags(Code::STORE_IC,
1299 NOT_IN_LOOP,
1300 MONOMORPHIC,
1301 strict_mode);
1302 Isolate::Current()->stub_cache()->GenerateProbe(
1303 masm, flags, a1, a2, a3, t0, t1);
1304
1305 // Cache miss: Jump to runtime.
1306 GenerateMiss(masm);
166 } 1307 }
167 1308
168 1309
169 void StoreIC::GenerateMiss(MacroAssembler* masm) { 1310 void StoreIC::GenerateMiss(MacroAssembler* masm) {
170 UNIMPLEMENTED_MIPS(); 1311 // ----------- S t a t e -------------
1312 // -- a0 : value
1313 // -- a1 : receiver
1314 // -- a2 : name
1315 // -- ra : return address
1316 // -----------------------------------
1317
1318 __ Push(a1, a2, a0);
1319 // Perform tail call to the entry.
1320 ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_Miss),
1321 masm->isolate());
1322 __ TailCallExternalReference(ref, 3, 1);
171 } 1323 }
172 1324
173 1325
174 void StoreIC::GenerateArrayLength(MacroAssembler* masm) { 1326 void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
175 UNIMPLEMENTED_MIPS(); 1327 // ----------- S t a t e -------------
1328 // -- a0 : value
1329 // -- a1 : receiver
1330 // -- a2 : name
1331 // -- ra : return address
1332 // -----------------------------------
1333 //
1334 // This accepts as a receiver anything JSObject::SetElementsLength accepts
1335 // (currently anything except for external and pixel arrays which means
1336 // anything with elements of FixedArray type.), but currently is restricted
1337 // to JSArray.
1338 // Value must be a number, but only smis are accepted as the most common case.
1339
1340 Label miss;
1341
1342 Register receiver = a1;
1343 Register value = a0;
1344 Register scratch = a3;
1345
1346 // Check that the receiver isn't a smi.
1347 __ JumpIfSmi(receiver, &miss);
1348
1349 // Check that the object is a JS array.
1350 __ GetObjectType(receiver, scratch, scratch);
1351 __ Branch(&miss, ne, scratch, Operand(JS_ARRAY_TYPE));
1352
1353 // Check that elements are FixedArray.
1354 // We rely on StoreIC_ArrayLength below to deal with all types of
1355 // fast elements (including COW).
1356 __ lw(scratch, FieldMemOperand(receiver, JSArray::kElementsOffset));
1357 __ GetObjectType(scratch, scratch, scratch);
1358 __ Branch(&miss, ne, scratch, Operand(FIXED_ARRAY_TYPE));
1359
1360 // Check that value is a smi.
1361 __ JumpIfNotSmi(value, &miss);
1362
1363 // Prepare tail call to StoreIC_ArrayLength.
1364 __ Push(receiver, value);
1365
1366 ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_ArrayLength),
1367 masm->isolate());
1368 __ TailCallExternalReference(ref, 2, 1);
1369
1370 __ bind(&miss);
1371
1372 GenerateMiss(masm);
176 } 1373 }
177 1374
178 1375
179 void StoreIC::GenerateNormal(MacroAssembler* masm) { 1376 void StoreIC::GenerateNormal(MacroAssembler* masm) {
180 UNIMPLEMENTED_MIPS(); 1377 // ----------- S t a t e -------------
1378 // -- a0 : value
1379 // -- a1 : receiver
1380 // -- a2 : name
1381 // -- ra : return address
1382 // -----------------------------------
1383 Label miss;
1384
1385 GenerateStringDictionaryReceiverCheck(masm, a1, a3, t0, t1, &miss);
1386
1387 GenerateDictionaryStore(masm, &miss, a3, a2, a0, t0, t1);
1388 Counters* counters = masm->isolate()->counters();
1389 __ IncrementCounter(counters->store_normal_hit(), 1, t0, t1);
1390 __ Ret();
1391
1392 __ bind(&miss);
1393 __ IncrementCounter(counters->store_normal_miss(), 1, t0, t1);
1394 GenerateMiss(masm);
181 } 1395 }
182 1396
183 1397
184 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm, 1398 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
185 StrictModeFlag strict_mode) { 1399 StrictModeFlag strict_mode) {
186 UNIMPLEMENTED_MIPS(); 1400 // ----------- S t a t e -------------
187 } 1401 // -- a0 : value
188 1402 // -- a1 : receiver
189 1403 // -- a2 : name
1404 // -- ra : return address
1405 // -----------------------------------
1406
1407 __ Push(a1, a2, a0);
1408
1409 __ li(a1, Operand(Smi::FromInt(NONE))); // PropertyAttributes.
1410 __ li(a0, Operand(Smi::FromInt(strict_mode)));
1411 __ Push(a1, a0);
1412
1413 // Do tail-call to runtime routine.
1414 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1415 }
1416
1417
190 #undef __ 1418 #undef __
191 1419
192 1420
193 Condition CompareIC::ComputeCondition(Token::Value op) { 1421 Condition CompareIC::ComputeCondition(Token::Value op) {
194 UNIMPLEMENTED_MIPS(); 1422 switch (op) {
195 return kNoCondition; 1423 case Token::EQ_STRICT:
1424 case Token::EQ:
1425 return eq;
1426 case Token::LT:
1427 return lt;
1428 case Token::GT:
1429 // Reverse left and right operands to obtain ECMA-262 conversion order.
1430 return lt;
1431 case Token::LTE:
1432 // Reverse left and right operands to obtain ECMA-262 conversion order.
1433 return ge;
1434 case Token::GTE:
1435 return ge;
1436 default:
1437 UNREACHABLE();
1438 return kNoCondition;
1439 }
196 } 1440 }
197 1441
198 1442
199 void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) { 1443 void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
200 UNIMPLEMENTED_MIPS(); 1444 HandleScope scope;
1445 Handle<Code> rewritten;
1446 State previous_state = GetState();
1447 State state = TargetState(previous_state, false, x, y);
1448 if (state == GENERIC) {
1449 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, a1, a0);
1450 rewritten = stub.GetCode();
1451 } else {
1452 ICCompareStub stub(op_, state);
1453 rewritten = stub.GetCode();
1454 }
1455 set_target(*rewritten);
1456
1457 #ifdef DEBUG
1458 if (FLAG_trace_ic) {
1459 PrintF("[CompareIC (%s->%s)#%s]\n",
1460 GetStateName(previous_state),
1461 GetStateName(state),
1462 Token::Name(op_));
1463 }
1464 #endif
1465
1466 // Activate inlined smi code.
1467 if (previous_state == UNINITIALIZED) {
1468 PatchInlinedSmiCode(address());
1469 }
201 } 1470 }
202 1471
203 1472
204 void PatchInlinedSmiCode(Address address) { 1473 void PatchInlinedSmiCode(Address address) {
205 // Currently there is no smi inlining in the MIPS full code generator. 1474 Address andi_instruction_address =
1475 address + Assembler::kCallTargetAddressOffset;
1476
1477 // If the instruction following the call is not a andi at, rx, #yyy, nothing
1478 // was inlined.
1479 Instr instr = Assembler::instr_at(andi_instruction_address);
1480 if (!Assembler::IsAndImmediate(instr)) {
1481 return;
1482 }
1483
1484 // The delta to the start of the map check instruction and the
1485 // condition code uses at the patched jump.
1486 int delta = Assembler::GetImmediate16(instr);
1487 delta += Assembler::GetRs(instr) * kImm16Mask;
1488 // If the delta is 0 the instruction is andi at, zero_reg, #0 which also
1489 // signals that nothing was inlined.
1490 if (delta == 0) {
1491 return;
1492 }
1493
1494 #ifdef DEBUG
1495 if (FLAG_trace_ic) {
1496 PrintF("[ patching ic at %p, andi=%p, delta=%d\n",
1497 address, andi_instruction_address, delta);
1498 }
1499 #endif
1500
1501 Address patch_address =
1502 andi_instruction_address - delta * Instruction::kInstrSize;
1503 Instr instr_at_patch = Assembler::instr_at(patch_address);
1504 Instr branch_instr =
1505 Assembler::instr_at(patch_address + Instruction::kInstrSize);
1506 ASSERT(Assembler::IsAndImmediate(instr_at_patch));
1507 ASSERT_EQ(0, Assembler::GetImmediate16(instr_at_patch));
1508 ASSERT(Assembler::IsBranch(branch_instr));
1509 if (Assembler::IsBeq(branch_instr)) {
1510 // This is patching a "jump if not smi" site to be active.
1511 // Changing:
1512 // andi at, rx, 0
1513 // Branch <target>, eq, at, Operand(zero_reg)
1514 // to:
1515 // andi at, rx, #kSmiTagMask
1516 // Branch <target>, ne, at, Operand(zero_reg)
1517 CodePatcher patcher(patch_address, 2);
1518 Register reg = Register::from_code(Assembler::GetRs(instr_at_patch));
1519 patcher.masm()->andi(at, reg, kSmiTagMask);
1520 patcher.ChangeBranchCondition(ne);
1521 } else {
1522 ASSERT(Assembler::IsBne(branch_instr));
1523 // This is patching a "jump if smi" site to be active.
1524 // Changing:
1525 // andi at, rx, 0
1526 // Branch <target>, ne, at, Operand(zero_reg)
1527 // to:
1528 // andi at, rx, #kSmiTagMask
1529 // Branch <target>, eq, at, Operand(zero_reg)
1530 CodePatcher patcher(patch_address, 2);
1531 Register reg = Register::from_code(Assembler::GetRs(instr_at_patch));
1532 patcher.masm()->andi(at, reg, kSmiTagMask);
1533 patcher.ChangeBranchCondition(eq);
1534 }
206 } 1535 }
207 1536
208 1537
209 } } // namespace v8::internal 1538 } } // namespace v8::internal
210 1539
211 #endif // V8_TARGET_ARCH_MIPS 1540 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/code-stubs-mips.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698