Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(100)

Side by Side Diff: src/mips/stub-cache-mips.cc

Issue 7043003: Version 3.3.8 (Closed) Base URL: https://v8.googlecode.com/svn/trunk
Patch Set: Created 9 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips/regexp-macro-assembler-mips.cc ('k') | src/mirror-debugger.js » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
(...skipping 20 matching lines...) Expand all
32 #include "ic-inl.h" 32 #include "ic-inl.h"
33 #include "codegen.h" 33 #include "codegen.h"
34 #include "stub-cache.h" 34 #include "stub-cache.h"
35 35
36 namespace v8 { 36 namespace v8 {
37 namespace internal { 37 namespace internal {
38 38
39 #define __ ACCESS_MASM(masm) 39 #define __ ACCESS_MASM(masm)
40 40
41 41
42 static void ProbeTable(Isolate* isolate,
43 MacroAssembler* masm,
44 Code::Flags flags,
45 StubCache::Table table,
46 Register name,
47 Register offset,
48 Register scratch,
49 Register scratch2) {
50 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
51 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
52
53 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
54 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
55
56 // Check the relative positions of the address fields.
57 ASSERT(value_off_addr > key_off_addr);
58 ASSERT((value_off_addr - key_off_addr) % 4 == 0);
59 ASSERT((value_off_addr - key_off_addr) < (256 * 4));
60
61 Label miss;
62 Register offsets_base_addr = scratch;
63
64 // Check that the key in the entry matches the name.
65 __ li(offsets_base_addr, Operand(key_offset));
66 __ sll(scratch2, offset, 1);
67 __ addu(scratch2, offsets_base_addr, scratch2);
68 __ lw(scratch2, MemOperand(scratch2));
69 __ Branch(&miss, ne, name, Operand(scratch2));
70
71 // Get the code entry from the cache.
72 __ Addu(offsets_base_addr, offsets_base_addr,
73 Operand(value_off_addr - key_off_addr));
74 __ sll(scratch2, offset, 1);
75 __ addu(scratch2, offsets_base_addr, scratch2);
76 __ lw(scratch2, MemOperand(scratch2));
77
78 // Check that the flags match what we're looking for.
79 __ lw(scratch2, FieldMemOperand(scratch2, Code::kFlagsOffset));
80 __ And(scratch2, scratch2, Operand(~Code::kFlagsNotUsedInLookup));
81 __ Branch(&miss, ne, scratch2, Operand(flags));
82
83 // Re-load code entry from cache.
84 __ sll(offset, offset, 1);
85 __ addu(offset, offset, offsets_base_addr);
86 __ lw(offset, MemOperand(offset));
87
88 // Jump to the first instruction in the code stub.
89 __ Addu(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag));
90 __ Jump(offset);
91
92 // Miss: fall through.
93 __ bind(&miss);
94 }
95
96
97 // Helper function used to check that the dictionary doesn't contain
98 // the property. This function may return false negatives, so miss_label
99 // must always call a backup property check that is complete.
100 // This function is safe to call if the receiver has fast properties.
101 // Name must be a symbol and receiver must be a heap object.
102 MUST_USE_RESULT static MaybeObject* GenerateDictionaryNegativeLookup(
103 MacroAssembler* masm,
104 Label* miss_label,
105 Register receiver,
106 String* name,
107 Register scratch0,
108 Register scratch1) {
109 ASSERT(name->IsSymbol());
110 Counters* counters = masm->isolate()->counters();
111 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
112 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
113
114 Label done;
115
116 const int kInterceptorOrAccessCheckNeededMask =
117 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
118
119 // Bail out if the receiver has a named interceptor or requires access checks.
120 Register map = scratch1;
121 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
122 __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
123 __ And(at, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
124 __ Branch(miss_label, ne, at, Operand(zero_reg));
125
126
127 // Check that receiver is a JSObject.
128 __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
129 __ Branch(miss_label, lt, scratch0, Operand(FIRST_JS_OBJECT_TYPE));
130
131 // Load properties array.
132 Register properties = scratch0;
133 __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
134 // Check that the properties array is a dictionary.
135 __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset));
136 Register tmp = properties;
137 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
138 __ Branch(miss_label, ne, map, Operand(tmp));
139
140 // Restore the temporarily used register.
141 __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
142
143 MaybeObject* result = StringDictionaryLookupStub::GenerateNegativeLookup(
144 masm,
145 miss_label,
146 &done,
147 receiver,
148 properties,
149 name,
150 scratch1);
151 if (result->IsFailure()) return result;
152
153 __ bind(&done);
154 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
155
156 return result;
157 }
158
159
42 void StubCache::GenerateProbe(MacroAssembler* masm, 160 void StubCache::GenerateProbe(MacroAssembler* masm,
43 Code::Flags flags, 161 Code::Flags flags,
44 Register receiver, 162 Register receiver,
45 Register name, 163 Register name,
46 Register scratch, 164 Register scratch,
47 Register extra, 165 Register extra,
48 Register extra2) { 166 Register extra2) {
49 UNIMPLEMENTED_MIPS(); 167 Isolate* isolate = masm->isolate();
168 Label miss;
169
170 // Make sure that code is valid. The shifting code relies on the
171 // entry size being 8.
172 ASSERT(sizeof(Entry) == 8);
173
174 // Make sure the flags does not name a specific type.
175 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
176
177 // Make sure that there are no register conflicts.
178 ASSERT(!scratch.is(receiver));
179 ASSERT(!scratch.is(name));
180 ASSERT(!extra.is(receiver));
181 ASSERT(!extra.is(name));
182 ASSERT(!extra.is(scratch));
183 ASSERT(!extra2.is(receiver));
184 ASSERT(!extra2.is(name));
185 ASSERT(!extra2.is(scratch));
186 ASSERT(!extra2.is(extra));
187
188 // Check scratch, extra and extra2 registers are valid.
189 ASSERT(!scratch.is(no_reg));
190 ASSERT(!extra.is(no_reg));
191 ASSERT(!extra2.is(no_reg));
192
193 // Check that the receiver isn't a smi.
194 __ JumpIfSmi(receiver, &miss, t0);
195
196 // Get the map of the receiver and compute the hash.
197 __ lw(scratch, FieldMemOperand(name, String::kHashFieldOffset));
198 __ lw(t8, FieldMemOperand(receiver, HeapObject::kMapOffset));
199 __ Addu(scratch, scratch, Operand(t8));
200 __ Xor(scratch, scratch, Operand(flags));
201 __ And(scratch,
202 scratch,
203 Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
204
205 // Probe the primary table.
206 ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra, extra2);
207
208 // Primary miss: Compute hash for secondary probe.
209 __ Subu(scratch, scratch, Operand(name));
210 __ Addu(scratch, scratch, Operand(flags));
211 __ And(scratch,
212 scratch,
213 Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
214
215 // Probe the secondary table.
216 ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra, extra2);
217
218 // Cache miss: Fall-through and let caller handle the miss by
219 // entering the runtime system.
220 __ bind(&miss);
50 } 221 }
51 222
52 223
53 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, 224 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
54 int index, 225 int index,
55 Register prototype) { 226 Register prototype) {
56 UNIMPLEMENTED_MIPS(); 227 // Load the global or builtins object from the current context.
228 __ lw(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
229 // Load the global context from the global or builtins object.
230 __ lw(prototype,
231 FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset));
232 // Load the function from the global context.
233 __ lw(prototype, MemOperand(prototype, Context::SlotOffset(index)));
234 // Load the initial map. The global functions all have initial maps.
235 __ lw(prototype,
236 FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
237 // Load the prototype from the initial map.
238 __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
57 } 239 }
58 240
59 241
60 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( 242 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
61 MacroAssembler* masm, int index, Register prototype, Label* miss) { 243 MacroAssembler* masm, int index, Register prototype, Label* miss) {
62 UNIMPLEMENTED_MIPS(); 244 Isolate* isolate = masm->isolate();
245 // Check we're still in the same context.
246 __ lw(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
247 ASSERT(!prototype.is(at));
248 __ li(at, isolate->global());
249 __ Branch(miss, ne, prototype, Operand(at));
250 // Get the global function with the given index.
251 JSFunction* function =
252 JSFunction::cast(isolate->global_context()->get(index));
253 // Load its initial map. The global functions all have initial maps.
254 __ li(prototype, Handle<Map>(function->initial_map()));
255 // Load the prototype from the initial map.
256 __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
63 } 257 }
64 258
65 259
66 // Load a fast property out of a holder object (src). In-object properties 260 // Load a fast property out of a holder object (src). In-object properties
67 // are loaded directly otherwise the property is loaded from the properties 261 // are loaded directly otherwise the property is loaded from the properties
68 // fixed array. 262 // fixed array.
69 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, 263 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
70 Register dst, Register src, 264 Register dst, Register src,
71 JSObject* holder, int index) { 265 JSObject* holder, int index) {
72 UNIMPLEMENTED_MIPS(); 266 // Adjust for the number of properties stored in the holder.
267 index -= holder->map()->inobject_properties();
268 if (index < 0) {
269 // Get the property straight out of the holder.
270 int offset = holder->map()->instance_size() + (index * kPointerSize);
271 __ lw(dst, FieldMemOperand(src, offset));
272 } else {
273 // Calculate the offset into the properties array.
274 int offset = index * kPointerSize + FixedArray::kHeaderSize;
275 __ lw(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
276 __ lw(dst, FieldMemOperand(dst, offset));
277 }
73 } 278 }
74 279
75 280
76 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, 281 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
77 Register receiver, 282 Register receiver,
78 Register scratch, 283 Register scratch,
79 Label* miss_label) { 284 Label* miss_label) {
80 UNIMPLEMENTED_MIPS(); 285 // Check that the receiver isn't a smi.
286 __ And(scratch, receiver, Operand(kSmiTagMask));
287 __ Branch(miss_label, eq, scratch, Operand(zero_reg));
288
289 // Check that the object is a JS array.
290 __ GetObjectType(receiver, scratch, scratch);
291 __ Branch(miss_label, ne, scratch, Operand(JS_ARRAY_TYPE));
292
293 // Load length directly from the JS array.
294 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
295 __ Ret();
81 } 296 }
82 297
83 298
299 // Generate code to check if an object is a string. If the object is a
300 // heap object, its map's instance type is left in the scratch1 register.
301 // If this is not needed, scratch1 and scratch2 may be the same register.
302 static void GenerateStringCheck(MacroAssembler* masm,
303 Register receiver,
304 Register scratch1,
305 Register scratch2,
306 Label* smi,
307 Label* non_string_object) {
308 // Check that the receiver isn't a smi.
309 __ JumpIfSmi(receiver, smi, t0);
310
311 // Check that the object is a string.
312 __ lw(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
313 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
314 __ And(scratch2, scratch1, Operand(kIsNotStringMask));
315 // The cast is to resolve the overload for the argument of 0x0.
316 __ Branch(non_string_object,
317 ne,
318 scratch2,
319 Operand(static_cast<int32_t>(kStringTag)));
320 }
321
322
84 // Generate code to load the length from a string object and return the length. 323 // Generate code to load the length from a string object and return the length.
85 // If the receiver object is not a string or a wrapped string object the 324 // If the receiver object is not a string or a wrapped string object the
86 // execution continues at the miss label. The register containing the 325 // execution continues at the miss label. The register containing the
87 // receiver is potentially clobbered. 326 // receiver is potentially clobbered.
88 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm, 327 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
89 Register receiver, 328 Register receiver,
90 Register scratch1, 329 Register scratch1,
91 Register scratch2, 330 Register scratch2,
92 Label* miss, 331 Label* miss,
93 bool support_wrappers) { 332 bool support_wrappers) {
94 UNIMPLEMENTED_MIPS(); 333 Label check_wrapper;
334
335 // Check if the object is a string leaving the instance type in the
336 // scratch1 register.
337 GenerateStringCheck(masm, receiver, scratch1, scratch2, miss,
338 support_wrappers ? &check_wrapper : miss);
339
340 // Load length directly from the string.
341 __ lw(v0, FieldMemOperand(receiver, String::kLengthOffset));
342 __ Ret();
343
344 if (support_wrappers) {
345 // Check if the object is a JSValue wrapper.
346 __ bind(&check_wrapper);
347 __ Branch(miss, ne, scratch1, Operand(JS_VALUE_TYPE));
348
349 // Unwrap the value and check if the wrapped value is a string.
350 __ lw(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
351 GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
352 __ lw(v0, FieldMemOperand(scratch1, String::kLengthOffset));
353 __ Ret();
354 }
95 } 355 }
96 356
97 357
98 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, 358 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
99 Register receiver, 359 Register receiver,
100 Register scratch1, 360 Register scratch1,
101 Register scratch2, 361 Register scratch2,
102 Label* miss_label) { 362 Label* miss_label) {
103 UNIMPLEMENTED_MIPS(); 363 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
364 __ mov(v0, scratch1);
365 __ Ret();
104 } 366 }
105 367
106 368
107 // Generate StoreField code, value is passed in a0 register. 369 // Generate StoreField code, value is passed in a0 register.
108 // After executing generated code, the receiver_reg and name_reg 370 // After executing generated code, the receiver_reg and name_reg
109 // may be clobbered. 371 // may be clobbered.
110 void StubCompiler::GenerateStoreField(MacroAssembler* masm, 372 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
111 JSObject* object, 373 JSObject* object,
112 int index, 374 int index,
113 Map* transition, 375 Map* transition,
114 Register receiver_reg, 376 Register receiver_reg,
115 Register name_reg, 377 Register name_reg,
116 Register scratch, 378 Register scratch,
117 Label* miss_label) { 379 Label* miss_label) {
118 UNIMPLEMENTED_MIPS(); 380 // a0 : value.
381 Label exit;
382
383 // Check that the receiver isn't a smi.
384 __ JumpIfSmi(receiver_reg, miss_label, scratch);
385
386 // Check that the map of the receiver hasn't changed.
387 __ lw(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
388 __ Branch(miss_label, ne, scratch, Operand(Handle<Map>(object->map())));
389
390 // Perform global security token check if needed.
391 if (object->IsJSGlobalProxy()) {
392 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
393 }
394
395 // Stub never generated for non-global objects that require access
396 // checks.
397 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
398
399 // Perform map transition for the receiver if necessary.
400 if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
401 // The properties must be extended before we can store the value.
402 // We jump to a runtime call that extends the properties array.
403 __ push(receiver_reg);
404 __ li(a2, Operand(Handle<Map>(transition)));
405 __ Push(a2, a0);
406 __ TailCallExternalReference(
407 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
408 masm->isolate()),
409 3, 1);
410 return;
411 }
412
413 if (transition != NULL) {
414 // Update the map of the object; no write barrier updating is
415 // needed because the map is never in new space.
416 __ li(t0, Operand(Handle<Map>(transition)));
417 __ sw(t0, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
418 }
419
420 // Adjust for the number of properties stored in the object. Even in the
421 // face of a transition we can use the old map here because the size of the
422 // object and the number of in-object properties is not going to change.
423 index -= object->map()->inobject_properties();
424
425 if (index < 0) {
426 // Set the property straight into the object.
427 int offset = object->map()->instance_size() + (index * kPointerSize);
428 __ sw(a0, FieldMemOperand(receiver_reg, offset));
429
430 // Skip updating write barrier if storing a smi.
431 __ JumpIfSmi(a0, &exit, scratch);
432
433 // Update the write barrier for the array address.
434 // Pass the now unused name_reg as a scratch register.
435 __ RecordWrite(receiver_reg, Operand(offset), name_reg, scratch);
436 } else {
437 // Write to the properties array.
438 int offset = index * kPointerSize + FixedArray::kHeaderSize;
439 // Get the properties array.
440 __ lw(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
441 __ sw(a0, FieldMemOperand(scratch, offset));
442
443 // Skip updating write barrier if storing a smi.
444 __ JumpIfSmi(a0, &exit);
445
446 // Update the write barrier for the array address.
447 // Ok to clobber receiver_reg and name_reg, since we return.
448 __ RecordWrite(scratch, Operand(offset), name_reg, receiver_reg);
449 }
450
451 // Return the value (register v0).
452 __ bind(&exit);
453 __ mov(v0, a0);
454 __ Ret();
119 } 455 }
120 456
121 457
122 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { 458 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
123 UNIMPLEMENTED_MIPS(); 459 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
124 } 460 Code* code = NULL;
125 461 if (kind == Code::LOAD_IC) {
462 code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss);
463 } else {
464 code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss);
465 }
466
467 Handle<Code> ic(code);
468 __ Jump(ic, RelocInfo::CODE_TARGET);
469 }
470
471
472 static void GenerateCallFunction(MacroAssembler* masm,
473 Object* object,
474 const ParameterCount& arguments,
475 Label* miss) {
476 // ----------- S t a t e -------------
477 // -- a0: receiver
478 // -- a1: function to call
479 // -----------------------------------
480 // Check that the function really is a function.
481 __ JumpIfSmi(a1, miss);
482 __ GetObjectType(a1, a3, a3);
483 __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
484
485 // Patch the receiver on the stack with the global proxy if
486 // necessary.
487 if (object->IsGlobalObject()) {
488 __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
489 __ sw(a3, MemOperand(sp, arguments.immediate() * kPointerSize));
490 }
491
492 // Invoke the function.
493 __ InvokeFunction(a1, arguments, JUMP_FUNCTION);
494 }
495
496
497 static void PushInterceptorArguments(MacroAssembler* masm,
498 Register receiver,
499 Register holder,
500 Register name,
501 JSObject* holder_obj) {
502 __ push(name);
503 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
504 ASSERT(!masm->isolate()->heap()->InNewSpace(interceptor));
505 Register scratch = name;
506 __ li(scratch, Operand(Handle<Object>(interceptor)));
507 __ Push(scratch, receiver, holder);
508 __ lw(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
509 __ push(scratch);
510 }
511
512
513 static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
514 Register receiver,
515 Register holder,
516 Register name,
517 JSObject* holder_obj) {
518 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
519
520 ExternalReference ref =
521 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
522 masm->isolate());
523 __ li(a0, Operand(5));
524 __ li(a1, Operand(ref));
525
526 CEntryStub stub(1);
527 __ CallStub(&stub);
528 }
529
530
531 static const int kFastApiCallArguments = 3;
532
533
534 // Reserves space for the extra arguments to FastHandleApiCall in the
535 // caller's frame.
536 //
537 // These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
538 static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
539 Register scratch) {
540 ASSERT(Smi::FromInt(0) == 0);
541 for (int i = 0; i < kFastApiCallArguments; i++) {
542 __ push(zero_reg);
543 }
544 }
545
546
547 // Undoes the effects of ReserveSpaceForFastApiCall.
548 static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
549 __ Drop(kFastApiCallArguments);
550 }
551
552
553 static MaybeObject* GenerateFastApiDirectCall(MacroAssembler* masm,
554 const CallOptimization& optimization,
555 int argc) {
556 // ----------- S t a t e -------------
557 // -- sp[0] : holder (set by CheckPrototypes)
558 // -- sp[4] : callee js function
559 // -- sp[8] : call data
560 // -- sp[12] : last js argument
561 // -- ...
562 // -- sp[(argc + 3) * 4] : first js argument
563 // -- sp[(argc + 4) * 4] : receiver
564 // -----------------------------------
565 // Get the function and setup the context.
566 JSFunction* function = optimization.constant_function();
567 __ li(t1, Operand(Handle<JSFunction>(function)));
568 __ lw(cp, FieldMemOperand(t1, JSFunction::kContextOffset));
569
570 // Pass the additional arguments FastHandleApiCall expects.
571 Object* call_data = optimization.api_call_info()->data();
572 Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
573 if (masm->isolate()->heap()->InNewSpace(call_data)) {
574 __ li(a0, api_call_info_handle);
575 __ lw(t2, FieldMemOperand(a0, CallHandlerInfo::kDataOffset));
576 } else {
577 __ li(t2, Operand(Handle<Object>(call_data)));
578 }
579
580 // Store js function and call data.
581 __ sw(t1, MemOperand(sp, 1 * kPointerSize));
582 __ sw(t2, MemOperand(sp, 2 * kPointerSize));
583
584 // a2 points to call data as expected by Arguments
585 // (refer to layout above).
586 __ Addu(a2, sp, Operand(2 * kPointerSize));
587
588 Object* callback = optimization.api_call_info()->callback();
589 Address api_function_address = v8::ToCData<Address>(callback);
590 ApiFunction fun(api_function_address);
591
592 const int kApiStackSpace = 4;
593
594 __ EnterExitFrame(false, kApiStackSpace);
595
596 // NOTE: the O32 abi requires a0 to hold a special pointer when returning a
597 // struct from the function (which is currently the case). This means we pass
598 // the first argument in a1 instead of a0. TryCallApiFunctionAndReturn
599 // will handle setting up a0.
600
601 // a1 = v8::Arguments&
602 // Arguments is built at sp + 1 (sp is a reserved spot for ra).
603 __ Addu(a1, sp, kPointerSize);
604
605 // v8::Arguments::implicit_args = data
606 __ sw(a2, MemOperand(a1, 0 * kPointerSize));
607 // v8::Arguments::values = last argument
608 __ Addu(t0, a2, Operand(argc * kPointerSize));
609 __ sw(t0, MemOperand(a1, 1 * kPointerSize));
610 // v8::Arguments::length_ = argc
611 __ li(t0, Operand(argc));
612 __ sw(t0, MemOperand(a1, 2 * kPointerSize));
613 // v8::Arguments::is_construct_call = 0
614 __ sw(zero_reg, MemOperand(a1, 3 * kPointerSize));
615
616 // Emitting a stub call may try to allocate (if the code is not
617 // already generated). Do not allow the assembler to perform a
618 // garbage collection but instead return the allocation failure
619 // object.
620 const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
621 ExternalReference ref =
622 ExternalReference(&fun,
623 ExternalReference::DIRECT_API_CALL,
624 masm->isolate());
625 return masm->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace);
626 }
126 627
127 class CallInterceptorCompiler BASE_EMBEDDED { 628 class CallInterceptorCompiler BASE_EMBEDDED {
128 public: 629 public:
129 CallInterceptorCompiler(StubCompiler* stub_compiler, 630 CallInterceptorCompiler(StubCompiler* stub_compiler,
130 const ParameterCount& arguments, 631 const ParameterCount& arguments,
131 Register name) 632 Register name)
132 : stub_compiler_(stub_compiler), 633 : stub_compiler_(stub_compiler),
133 arguments_(arguments), 634 arguments_(arguments),
134 name_(name) {} 635 name_(name) {}
135 636
136 void Compile(MacroAssembler* masm, 637 MaybeObject* Compile(MacroAssembler* masm,
137 JSObject* object,
138 JSObject* holder,
139 String* name,
140 LookupResult* lookup,
141 Register receiver,
142 Register scratch1,
143 Register scratch2,
144 Register scratch3,
145 Label* miss) {
146 UNIMPLEMENTED_MIPS();
147 }
148
149 private:
150 void CompileCacheable(MacroAssembler* masm,
151 JSObject* object, 638 JSObject* object,
639 JSObject* holder,
640 String* name,
641 LookupResult* lookup,
152 Register receiver, 642 Register receiver,
153 Register scratch1, 643 Register scratch1,
154 Register scratch2, 644 Register scratch2,
155 Register scratch3, 645 Register scratch3,
156 JSObject* interceptor_holder, 646 Label* miss) {
157 LookupResult* lookup, 647 ASSERT(holder->HasNamedInterceptor());
158 String* name, 648 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
159 const CallOptimization& optimization, 649
160 Label* miss_label) { 650 // Check that the receiver isn't a smi.
161 UNIMPLEMENTED_MIPS(); 651 __ JumpIfSmi(receiver, miss);
652
653 CallOptimization optimization(lookup);
654
655 if (optimization.is_constant_call()) {
656 return CompileCacheable(masm,
657 object,
658 receiver,
659 scratch1,
660 scratch2,
661 scratch3,
662 holder,
663 lookup,
664 name,
665 optimization,
666 miss);
667 } else {
668 CompileRegular(masm,
669 object,
670 receiver,
671 scratch1,
672 scratch2,
673 scratch3,
674 name,
675 holder,
676 miss);
677 return masm->isolate()->heap()->undefined_value();
678 }
679 }
680
681 private:
682 MaybeObject* CompileCacheable(MacroAssembler* masm,
683 JSObject* object,
684 Register receiver,
685 Register scratch1,
686 Register scratch2,
687 Register scratch3,
688 JSObject* interceptor_holder,
689 LookupResult* lookup,
690 String* name,
691 const CallOptimization& optimization,
692 Label* miss_label) {
693 ASSERT(optimization.is_constant_call());
694 ASSERT(!lookup->holder()->IsGlobalObject());
695
696 Counters* counters = masm->isolate()->counters();
697
698 int depth1 = kInvalidProtoDepth;
699 int depth2 = kInvalidProtoDepth;
700 bool can_do_fast_api_call = false;
701 if (optimization.is_simple_api_call() &&
702 !lookup->holder()->IsGlobalObject()) {
703 depth1 =
704 optimization.GetPrototypeDepthOfExpectedType(object,
705 interceptor_holder);
706 if (depth1 == kInvalidProtoDepth) {
707 depth2 =
708 optimization.GetPrototypeDepthOfExpectedType(interceptor_holder,
709 lookup->holder());
710 }
711 can_do_fast_api_call = (depth1 != kInvalidProtoDepth) ||
712 (depth2 != kInvalidProtoDepth);
713 }
714
715 __ IncrementCounter(counters->call_const_interceptor(), 1,
716 scratch1, scratch2);
717
718 if (can_do_fast_api_call) {
719 __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
720 scratch1, scratch2);
721 ReserveSpaceForFastApiCall(masm, scratch1);
722 }
723
724 // Check that the maps from receiver to interceptor's holder
725 // haven't changed and thus we can invoke interceptor.
726 Label miss_cleanup;
727 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
728 Register holder =
729 stub_compiler_->CheckPrototypes(object, receiver,
730 interceptor_holder, scratch1,
731 scratch2, scratch3, name, depth1, miss);
732
733 // Invoke an interceptor and if it provides a value,
734 // branch to |regular_invoke|.
735 Label regular_invoke;
736 LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
737 &regular_invoke);
738
739 // Interceptor returned nothing for this property. Try to use cached
740 // constant function.
741
742 // Check that the maps from interceptor's holder to constant function's
743 // holder haven't changed and thus we can use cached constant function.
744 if (interceptor_holder != lookup->holder()) {
745 stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
746 lookup->holder(), scratch1,
747 scratch2, scratch3, name, depth2, miss);
748 } else {
749 // CheckPrototypes has a side effect of fetching a 'holder'
750 // for API (object which is instanceof for the signature). It's
751 // safe to omit it here, as if present, it should be fetched
752 // by the previous CheckPrototypes.
753 ASSERT(depth2 == kInvalidProtoDepth);
754 }
755
756 // Invoke function.
757 if (can_do_fast_api_call) {
758 MaybeObject* result = GenerateFastApiDirectCall(masm,
759 optimization,
760 arguments_.immediate());
761 if (result->IsFailure()) return result;
762 } else {
763 __ InvokeFunction(optimization.constant_function(), arguments_,
764 JUMP_FUNCTION);
765 }
766
767 // Deferred code for fast API call case---clean preallocated space.
768 if (can_do_fast_api_call) {
769 __ bind(&miss_cleanup);
770 FreeSpaceForFastApiCall(masm);
771 __ Branch(miss_label);
772 }
773
774 // Invoke a regular function.
775 __ bind(&regular_invoke);
776 if (can_do_fast_api_call) {
777 FreeSpaceForFastApiCall(masm);
778 }
779
780 return masm->isolate()->heap()->undefined_value();
162 } 781 }
163 782
164 void CompileRegular(MacroAssembler* masm, 783 void CompileRegular(MacroAssembler* masm,
165 JSObject* object, 784 JSObject* object,
166 Register receiver, 785 Register receiver,
167 Register scratch1, 786 Register scratch1,
168 Register scratch2, 787 Register scratch2,
169 Register scratch3, 788 Register scratch3,
170 String* name, 789 String* name,
171 JSObject* interceptor_holder, 790 JSObject* interceptor_holder,
172 Label* miss_label) { 791 Label* miss_label) {
173 UNIMPLEMENTED_MIPS(); 792 Register holder =
793 stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
794 scratch1, scratch2, scratch3, name,
795 miss_label);
796
797 // Call a runtime function to load the interceptor property.
798 __ EnterInternalFrame();
799 // Save the name_ register across the call.
800 __ push(name_);
801
802 PushInterceptorArguments(masm,
803 receiver,
804 holder,
805 name_,
806 interceptor_holder);
807
808 __ CallExternalReference(
809 ExternalReference(
810 IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
811 masm->isolate()),
812 5);
813
814 // Restore the name_ register.
815 __ pop(name_);
816 __ LeaveInternalFrame();
174 } 817 }
175 818
176 void LoadWithInterceptor(MacroAssembler* masm, 819 void LoadWithInterceptor(MacroAssembler* masm,
177 Register receiver, 820 Register receiver,
178 Register holder, 821 Register holder,
179 JSObject* holder_obj, 822 JSObject* holder_obj,
180 Register scratch, 823 Register scratch,
181 Label* interceptor_succeeded) { 824 Label* interceptor_succeeded) {
182 UNIMPLEMENTED_MIPS(); 825 __ EnterInternalFrame();
826
827 __ Push(holder, name_);
828
829 CompileCallLoadPropertyWithInterceptor(masm,
830 receiver,
831 holder,
832 name_,
833 holder_obj);
834
835 __ pop(name_); // Restore the name.
836 __ pop(receiver); // Restore the holder.
837 __ LeaveInternalFrame();
838
839 // If interceptor returns no-result sentinel, call the constant function.
840 __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
841 __ Branch(interceptor_succeeded, ne, v0, Operand(scratch));
183 } 842 }
184 843
185 StubCompiler* stub_compiler_; 844 StubCompiler* stub_compiler_;
186 const ParameterCount& arguments_; 845 const ParameterCount& arguments_;
187 Register name_; 846 Register name_;
188 }; 847 };
189 848
190 849
850
851 // Generate code to check that a global property cell is empty. Create
852 // the property cell at compilation time if no cell exists for the
853 // property.
854 MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
855 MacroAssembler* masm,
856 GlobalObject* global,
857 String* name,
858 Register scratch,
859 Label* miss) {
860 Object* probe;
861 { MaybeObject* maybe_probe = global->EnsurePropertyCell(name);
862 if (!maybe_probe->ToObject(&probe)) return maybe_probe;
863 }
864 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
865 ASSERT(cell->value()->IsTheHole());
866 __ li(scratch, Operand(Handle<Object>(cell)));
867 __ lw(scratch,
868 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
869 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
870 __ Branch(miss, ne, scratch, Operand(at));
871 return cell;
872 }
873
874
875 // Calls GenerateCheckPropertyCell for each global object in the prototype chain
876 // from object to (but not including) holder.
877 MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCells(
878 MacroAssembler* masm,
879 JSObject* object,
880 JSObject* holder,
881 String* name,
882 Register scratch,
883 Label* miss) {
884 JSObject* current = object;
885 while (current != holder) {
886 if (current->IsGlobalObject()) {
887 // Returns a cell or a failure.
888 MaybeObject* result = GenerateCheckPropertyCell(
889 masm,
890 GlobalObject::cast(current),
891 name,
892 scratch,
893 miss);
894 if (result->IsFailure()) return result;
895 }
896 ASSERT(current->IsJSObject());
897 current = JSObject::cast(current->GetPrototype());
898 }
899 return NULL;
900 }
901
902
903 // Convert and store int passed in register ival to IEEE 754 single precision
904 // floating point value at memory location (dst + 4 * wordoffset)
905 // If FPU is available use it for conversion.
906 static void StoreIntAsFloat(MacroAssembler* masm,
907 Register dst,
908 Register wordoffset,
909 Register ival,
910 Register fval,
911 Register scratch1,
912 Register scratch2) {
913 if (CpuFeatures::IsSupported(FPU)) {
914 CpuFeatures::Scope scope(FPU);
915 __ mtc1(ival, f0);
916 __ cvt_s_w(f0, f0);
917 __ sll(scratch1, wordoffset, 2);
918 __ addu(scratch1, dst, scratch1);
919 __ swc1(f0, MemOperand(scratch1, 0));
920 } else {
921 // FPU is not available, do manual conversions.
922
923 Label not_special, done;
924 // Move sign bit from source to destination. This works because the sign
925 // bit in the exponent word of the double has the same position and polarity
926 // as the 2's complement sign bit in a Smi.
927 ASSERT(kBinary32SignMask == 0x80000000u);
928
929 __ And(fval, ival, Operand(kBinary32SignMask));
930 // Negate value if it is negative.
931 __ subu(scratch1, zero_reg, ival);
932 __ movn(ival, scratch1, fval);
933
934 // We have -1, 0 or 1, which we treat specially. Register ival contains
935 // absolute value: it is either equal to 1 (special case of -1 and 1),
936 // greater than 1 (not a special case) or less than 1 (special case of 0).
937 __ Branch(&not_special, gt, ival, Operand(1));
938
939 // For 1 or -1 we need to or in the 0 exponent (biased).
940 static const uint32_t exponent_word_for_1 =
941 kBinary32ExponentBias << kBinary32ExponentShift;
942
943 __ Xor(scratch1, ival, Operand(1));
944 __ li(scratch2, exponent_word_for_1);
945 __ or_(scratch2, fval, scratch2);
946 __ movz(fval, scratch2, scratch1); // Only if ival is equal to 1.
947 __ Branch(&done);
948
949 __ bind(&not_special);
950 // Count leading zeros.
951 // Gets the wrong answer for 0, but we already checked for that case above.
952 Register zeros = scratch2;
953 __ clz(zeros, ival);
954
955 // Compute exponent and or it into the exponent register.
956 __ li(scratch1, (kBitsPerInt - 1) + kBinary32ExponentBias);
957 __ subu(scratch1, scratch1, zeros);
958
959 __ sll(scratch1, scratch1, kBinary32ExponentShift);
960 __ or_(fval, fval, scratch1);
961
962 // Shift up the source chopping the top bit off.
963 __ Addu(zeros, zeros, Operand(1));
964 // This wouldn't work for 1 and -1 as the shift would be 32 which means 0.
965 __ sllv(ival, ival, zeros);
966 // And the top (top 20 bits).
967 __ srl(scratch1, ival, kBitsPerInt - kBinary32MantissaBits);
968 __ or_(fval, fval, scratch1);
969
970 __ bind(&done);
971
972 __ sll(scratch1, wordoffset, 2);
973 __ addu(scratch1, dst, scratch1);
974 __ sw(fval, MemOperand(scratch1, 0));
975 }
976 }
977
978
979 // Convert unsigned integer with specified number of leading zeroes in binary
980 // representation to IEEE 754 double.
981 // Integer to convert is passed in register hiword.
982 // Resulting double is returned in registers hiword:loword.
983 // This functions does not work correctly for 0.
984 static void GenerateUInt2Double(MacroAssembler* masm,
985 Register hiword,
986 Register loword,
987 Register scratch,
988 int leading_zeroes) {
989 const int meaningful_bits = kBitsPerInt - leading_zeroes - 1;
990 const int biased_exponent = HeapNumber::kExponentBias + meaningful_bits;
991
992 const int mantissa_shift_for_hi_word =
993 meaningful_bits - HeapNumber::kMantissaBitsInTopWord;
994
995 const int mantissa_shift_for_lo_word =
996 kBitsPerInt - mantissa_shift_for_hi_word;
997
998 __ li(scratch, biased_exponent << HeapNumber::kExponentShift);
999 if (mantissa_shift_for_hi_word > 0) {
1000 __ sll(loword, hiword, mantissa_shift_for_lo_word);
1001 __ srl(hiword, hiword, mantissa_shift_for_hi_word);
1002 __ or_(hiword, scratch, hiword);
1003 } else {
1004 __ mov(loword, zero_reg);
1005 __ sll(hiword, hiword, mantissa_shift_for_hi_word);
1006 __ or_(hiword, scratch, hiword);
1007 }
1008
1009 // If least significant bit of biased exponent was not 1 it was corrupted
1010 // by most significant bit of mantissa so we should fix that.
1011 if (!(biased_exponent & 1)) {
1012 __ li(scratch, 1 << HeapNumber::kExponentShift);
1013 __ nor(scratch, scratch, scratch);
1014 __ and_(hiword, hiword, scratch);
1015 }
1016 }
1017
1018
191 #undef __ 1019 #undef __
192 #define __ ACCESS_MASM(masm()) 1020 #define __ ACCESS_MASM(masm())
193 1021
194 1022
195 Register StubCompiler::CheckPrototypes(JSObject* object, 1023 Register StubCompiler::CheckPrototypes(JSObject* object,
196 Register object_reg, 1024 Register object_reg,
197 JSObject* holder, 1025 JSObject* holder,
198 Register holder_reg, 1026 Register holder_reg,
199 Register scratch1, 1027 Register scratch1,
200 Register scratch2, 1028 Register scratch2,
201 String* name, 1029 String* name,
202 int save_at_depth, 1030 int save_at_depth,
203 Label* miss) { 1031 Label* miss) {
204 UNIMPLEMENTED_MIPS(); 1032 // Make sure there's no overlap between holder and object registers.
205 return no_reg; 1033 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1034 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1035 && !scratch2.is(scratch1));
1036
1037 // Keep track of the current object in register reg.
1038 Register reg = object_reg;
1039 int depth = 0;
1040
1041 if (save_at_depth == depth) {
1042 __ sw(reg, MemOperand(sp));
1043 }
1044
1045 // Check the maps in the prototype chain.
1046 // Traverse the prototype chain from the object and do map checks.
1047 JSObject* current = object;
1048 while (current != holder) {
1049 depth++;
1050
1051 // Only global objects and objects that do not require access
1052 // checks are allowed in stubs.
1053 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
1054
1055 ASSERT(current->GetPrototype()->IsJSObject());
1056 JSObject* prototype = JSObject::cast(current->GetPrototype());
1057 if (!current->HasFastProperties() &&
1058 !current->IsJSGlobalObject() &&
1059 !current->IsJSGlobalProxy()) {
1060 if (!name->IsSymbol()) {
1061 MaybeObject* maybe_lookup_result = heap()->LookupSymbol(name);
1062 Object* lookup_result = NULL; // Initialization to please compiler.
1063 if (!maybe_lookup_result->ToObject(&lookup_result)) {
1064 set_failure(Failure::cast(maybe_lookup_result));
1065 return reg;
1066 }
1067 name = String::cast(lookup_result);
1068 }
1069 ASSERT(current->property_dictionary()->FindEntry(name) ==
1070 StringDictionary::kNotFound);
1071
1072 MaybeObject* negative_lookup = GenerateDictionaryNegativeLookup(masm(),
1073 miss,
1074 reg,
1075 name,
1076 scratch1,
1077 scratch2);
1078 if (negative_lookup->IsFailure()) {
1079 set_failure(Failure::cast(negative_lookup));
1080 return reg;
1081 }
1082
1083 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1084 reg = holder_reg; // From now the object is in holder_reg.
1085 __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1086 } else if (heap()->InNewSpace(prototype)) {
1087 // Get the map of the current object.
1088 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1089
1090 // Branch on the result of the map check.
1091 __ Branch(miss, ne, scratch1, Operand(Handle<Map>(current->map())));
1092
1093 // Check access rights to the global object. This has to happen
1094 // after the map check so that we know that the object is
1095 // actually a global object.
1096 if (current->IsJSGlobalProxy()) {
1097 __ CheckAccessGlobalProxy(reg, scratch1, miss);
1098 // Restore scratch register to be the map of the object. In the
1099 // new space case below, we load the prototype from the map in
1100 // the scratch register.
1101 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1102 }
1103
1104 reg = holder_reg; // From now the object is in holder_reg.
1105 // The prototype is in new space; we cannot store a reference
1106 // to it in the code. Load it from the map.
1107 __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1108 } else {
1109 // Check the map of the current object.
1110 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1111 // Branch on the result of the map check.
1112 __ Branch(miss, ne, scratch1, Operand(Handle<Map>(current->map())));
1113 // Check access rights to the global object. This has to happen
1114 // after the map check so that we know that the object is
1115 // actually a global object.
1116 if (current->IsJSGlobalProxy()) {
1117 __ CheckAccessGlobalProxy(reg, scratch1, miss);
1118 }
1119 // The prototype is in old space; load it directly.
1120 reg = holder_reg; // From now the object is in holder_reg.
1121 __ li(reg, Operand(Handle<JSObject>(prototype)));
1122 }
1123
1124 if (save_at_depth == depth) {
1125 __ sw(reg, MemOperand(sp));
1126 }
1127
1128 // Go to the next object in the prototype chain.
1129 current = prototype;
1130 }
1131
1132 // Check the holder map.
1133 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1134 __ Branch(miss, ne, scratch1, Operand(Handle<Map>(current->map())));
1135
1136 // Log the check depth.
1137 LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
1138 // Perform security check for access to the global object.
1139 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1140 if (holder->IsJSGlobalProxy()) {
1141 __ CheckAccessGlobalProxy(reg, scratch1, miss);
1142 };
1143
1144 // If we've skipped any global objects, it's not enough to verify
1145 // that their maps haven't changed. We also need to check that the
1146 // property cell for the property is still empty.
1147
1148 MaybeObject* result = GenerateCheckPropertyCells(masm(),
1149 object,
1150 holder,
1151 name,
1152 scratch1,
1153 miss);
1154 if (result->IsFailure()) set_failure(Failure::cast(result));
1155
1156 // Return the register containing the holder.
1157 return reg;
206 } 1158 }
207 1159
208 1160
209 void StubCompiler::GenerateLoadField(JSObject* object, 1161 void StubCompiler::GenerateLoadField(JSObject* object,
210 JSObject* holder, 1162 JSObject* holder,
211 Register receiver, 1163 Register receiver,
212 Register scratch1, 1164 Register scratch1,
213 Register scratch2, 1165 Register scratch2,
214 Register scratch3, 1166 Register scratch3,
215 int index, 1167 int index,
216 String* name, 1168 String* name,
217 Label* miss) { 1169 Label* miss) {
218 UNIMPLEMENTED_MIPS(); 1170 // Check that the receiver isn't a smi.
1171 __ And(scratch1, receiver, Operand(kSmiTagMask));
1172 __ Branch(miss, eq, scratch1, Operand(zero_reg));
1173
1174 // Check that the maps haven't changed.
1175 Register reg =
1176 CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
1177 name, miss);
1178 GenerateFastPropertyLoad(masm(), v0, reg, holder, index);
1179 __ Ret();
219 } 1180 }
220 1181
221 1182
222 void StubCompiler::GenerateLoadConstant(JSObject* object, 1183 void StubCompiler::GenerateLoadConstant(JSObject* object,
223 JSObject* holder, 1184 JSObject* holder,
224 Register receiver, 1185 Register receiver,
225 Register scratch1, 1186 Register scratch1,
226 Register scratch2, 1187 Register scratch2,
227 Register scratch3, 1188 Register scratch3,
228 Object* value, 1189 Object* value,
229 String* name, 1190 String* name,
230 Label* miss) { 1191 Label* miss) {
231 UNIMPLEMENTED_MIPS(); 1192 // Check that the receiver isn't a smi.
1193 __ JumpIfSmi(receiver, miss, scratch1);
1194
1195 // Check that the maps haven't changed.
1196 Register reg =
1197 CheckPrototypes(object, receiver, holder,
1198 scratch1, scratch2, scratch3, name, miss);
1199
1200 // Return the constant value.
1201 __ li(v0, Operand(Handle<Object>(value)));
1202 __ Ret();
232 } 1203 }
233 1204
234 1205
235 MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object, 1206 MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
236 JSObject* holder, 1207 JSObject* holder,
237 Register receiver, 1208 Register receiver,
238 Register name_reg, 1209 Register name_reg,
239 Register scratch1, 1210 Register scratch1,
240 Register scratch2, 1211 Register scratch2,
241 Register scratch3, 1212 Register scratch3,
242 AccessorInfo* callback, 1213 AccessorInfo* callback,
243 String* name, 1214 String* name,
244 Label* miss) { 1215 Label* miss) {
245 UNIMPLEMENTED_MIPS(); 1216 // Check that the receiver isn't a smi.
246 return NULL; 1217 __ JumpIfSmi(receiver, miss, scratch1);
1218
1219 // Check that the maps haven't changed.
1220 Register reg =
1221 CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
1222 name, miss);
1223
1224 // Build AccessorInfo::args_ list on the stack and push property name below
1225 // the exit frame to make GC aware of them and store pointers to them.
1226 __ push(receiver);
1227 __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_
1228 Handle<AccessorInfo> callback_handle(callback);
1229 if (heap()->InNewSpace(callback_handle->data())) {
1230 __ li(scratch3, callback_handle);
1231 __ lw(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
1232 } else {
1233 __ li(scratch3, Handle<Object>(callback_handle->data()));
1234 }
1235 __ Push(reg, scratch3, name_reg);
1236 __ mov(a2, scratch2); // Saved in case scratch2 == a1.
1237 __ mov(a1, sp); // a1 (first argument - see note below) = Handle<String>
1238
1239 Address getter_address = v8::ToCData<Address>(callback->getter());
1240 ApiFunction fun(getter_address);
1241
1242 // NOTE: the O32 abi requires a0 to hold a special pointer when returning a
1243 // struct from the function (which is currently the case). This means we pass
1244 // the arguments in a1-a2 instead of a0-a1. TryCallApiFunctionAndReturn
1245 // will handle setting up a0.
1246
1247 const int kApiStackSpace = 1;
1248
1249 __ EnterExitFrame(false, kApiStackSpace);
1250 // Create AccessorInfo instance on the stack above the exit frame with
1251 // scratch2 (internal::Object **args_) as the data.
1252 __ sw(a2, MemOperand(sp, kPointerSize));
1253 // a2 (second argument - see note above) = AccessorInfo&
1254 __ Addu(a2, sp, kPointerSize);
1255
1256 // Emitting a stub call may try to allocate (if the code is not
1257 // already generated). Do not allow the assembler to perform a
1258 // garbage collection but instead return the allocation failure
1259 // object.
1260 ExternalReference ref =
1261 ExternalReference(&fun,
1262 ExternalReference::DIRECT_GETTER_CALL,
1263 masm()->isolate());
1264 // 4 args - will be freed later by LeaveExitFrame.
1265 return masm()->TryCallApiFunctionAndReturn(ref, 4);
247 } 1266 }
248 1267
249 1268
250 void StubCompiler::GenerateLoadInterceptor(JSObject* object, 1269 void StubCompiler::GenerateLoadInterceptor(JSObject* object,
251 JSObject* interceptor_holder, 1270 JSObject* interceptor_holder,
252 LookupResult* lookup, 1271 LookupResult* lookup,
253 Register receiver, 1272 Register receiver,
254 Register name_reg, 1273 Register name_reg,
255 Register scratch1, 1274 Register scratch1,
256 Register scratch2, 1275 Register scratch2,
257 Register scratch3, 1276 Register scratch3,
258 String* name, 1277 String* name,
259 Label* miss) { 1278 Label* miss) {
260 UNIMPLEMENTED_MIPS(); 1279 ASSERT(interceptor_holder->HasNamedInterceptor());
1280 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1281
1282 // Check that the receiver isn't a smi.
1283 __ JumpIfSmi(receiver, miss);
1284
1285 // So far the most popular follow ups for interceptor loads are FIELD
1286 // and CALLBACKS, so inline only them, other cases may be added
1287 // later.
1288 bool compile_followup_inline = false;
1289 if (lookup->IsProperty() && lookup->IsCacheable()) {
1290 if (lookup->type() == FIELD) {
1291 compile_followup_inline = true;
1292 } else if (lookup->type() == CALLBACKS &&
1293 lookup->GetCallbackObject()->IsAccessorInfo() &&
1294 AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
1295 compile_followup_inline = true;
1296 }
1297 }
1298
1299 if (compile_followup_inline) {
1300 // Compile the interceptor call, followed by inline code to load the
1301 // property from further up the prototype chain if the call fails.
1302 // Check that the maps haven't changed.
1303 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1304 scratch1, scratch2, scratch3,
1305 name, miss);
1306 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1307
1308 // Save necessary data before invoking an interceptor.
1309 // Requires a frame to make GC aware of pushed pointers.
1310 __ EnterInternalFrame();
1311
1312 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1313 // CALLBACKS case needs a receiver to be passed into C++ callback.
1314 __ Push(receiver, holder_reg, name_reg);
1315 } else {
1316 __ Push(holder_reg, name_reg);
1317 }
1318
1319 // Invoke an interceptor. Note: map checks from receiver to
1320 // interceptor's holder has been compiled before (see a caller
1321 // of this method).
1322 CompileCallLoadPropertyWithInterceptor(masm(),
1323 receiver,
1324 holder_reg,
1325 name_reg,
1326 interceptor_holder);
1327
1328 // Check if interceptor provided a value for property. If it's
1329 // the case, return immediately.
1330 Label interceptor_failed;
1331 __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
1332 __ Branch(&interceptor_failed, eq, v0, Operand(scratch1));
1333 __ LeaveInternalFrame();
1334 __ Ret();
1335
1336 __ bind(&interceptor_failed);
1337 __ pop(name_reg);
1338 __ pop(holder_reg);
1339 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1340 __ pop(receiver);
1341 }
1342
1343 __ LeaveInternalFrame();
1344
1345 // Check that the maps from interceptor's holder to lookup's holder
1346 // haven't changed. And load lookup's holder into |holder| register.
1347 if (interceptor_holder != lookup->holder()) {
1348 holder_reg = CheckPrototypes(interceptor_holder,
1349 holder_reg,
1350 lookup->holder(),
1351 scratch1,
1352 scratch2,
1353 scratch3,
1354 name,
1355 miss);
1356 }
1357
1358 if (lookup->type() == FIELD) {
1359 // We found FIELD property in prototype chain of interceptor's holder.
1360 // Retrieve a field from field's holder.
1361 GenerateFastPropertyLoad(masm(), v0, holder_reg,
1362 lookup->holder(), lookup->GetFieldIndex());
1363 __ Ret();
1364 } else {
1365 // We found CALLBACKS property in prototype chain of interceptor's
1366 // holder.
1367 ASSERT(lookup->type() == CALLBACKS);
1368 ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
1369 AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1370 ASSERT(callback != NULL);
1371 ASSERT(callback->getter() != NULL);
1372
1373 // Tail call to runtime.
1374 // Important invariant in CALLBACKS case: the code above must be
1375 // structured to never clobber |receiver| register.
1376 __ li(scratch2, Handle<AccessorInfo>(callback));
1377 // holder_reg is either receiver or scratch1.
1378 if (!receiver.is(holder_reg)) {
1379 ASSERT(scratch1.is(holder_reg));
1380 __ Push(receiver, holder_reg);
1381 __ lw(scratch3,
1382 FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1383 __ Push(scratch3, scratch2, name_reg);
1384 } else {
1385 __ push(receiver);
1386 __ lw(scratch3,
1387 FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1388 __ Push(holder_reg, scratch3, scratch2, name_reg);
1389 }
1390
1391 ExternalReference ref =
1392 ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1393 masm()->isolate());
1394 __ TailCallExternalReference(ref, 5, 1);
1395 }
1396 } else { // !compile_followup_inline
1397 // Call the runtime system to load the interceptor.
1398 // Check that the maps haven't changed.
1399 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1400 scratch1, scratch2, scratch3,
1401 name, miss);
1402 PushInterceptorArguments(masm(), receiver, holder_reg,
1403 name_reg, interceptor_holder);
1404
1405 ExternalReference ref = ExternalReference(
1406 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), masm()->isolate());
1407 __ TailCallExternalReference(ref, 5, 1);
1408 }
261 } 1409 }
262 1410
263 1411
264 void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) { 1412 void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
265 UNIMPLEMENTED_MIPS(); 1413 if (kind_ == Code::KEYED_CALL_IC) {
1414 __ Branch(miss, ne, a2, Operand(Handle<String>(name)));
1415 }
266 } 1416 }
267 1417
268 1418
269 void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object, 1419 void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
270 JSObject* holder, 1420 JSObject* holder,
271 String* name, 1421 String* name,
272 Label* miss) { 1422 Label* miss) {
273 UNIMPLEMENTED_MIPS(); 1423 ASSERT(holder->IsGlobalObject());
1424
1425 // Get the number of arguments.
1426 const int argc = arguments().immediate();
1427
1428 // Get the receiver from the stack.
1429 __ lw(a0, MemOperand(sp, argc * kPointerSize));
1430
1431 // If the object is the holder then we know that it's a global
1432 // object which can only happen for contextual calls. In this case,
1433 // the receiver cannot be a smi.
1434 if (object != holder) {
1435 __ JumpIfSmi(a0, miss);
1436 }
1437
1438 // Check that the maps haven't changed.
1439 CheckPrototypes(object, a0, holder, a3, a1, t0, name, miss);
274 } 1440 }
275 1441
276 1442
277 void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell, 1443 void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
278 JSFunction* function, 1444 JSFunction* function,
279 Label* miss) { 1445 Label* miss) {
280 UNIMPLEMENTED_MIPS(); 1446 // Get the value from the cell.
1447 __ li(a3, Operand(Handle<JSGlobalPropertyCell>(cell)));
1448 __ lw(a1, FieldMemOperand(a3, JSGlobalPropertyCell::kValueOffset));
1449
1450 // Check that the cell contains the same function.
1451 if (heap()->InNewSpace(function)) {
1452 // We can't embed a pointer to a function in new space so we have
1453 // to verify that the shared function info is unchanged. This has
1454 // the nice side effect that multiple closures based on the same
1455 // function can all use this call IC. Before we load through the
1456 // function, we have to verify that it still is a function.
1457 __ JumpIfSmi(a1, miss);
1458 __ GetObjectType(a1, a3, a3);
1459 __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
1460
1461 // Check the shared function info. Make sure it hasn't changed.
1462 __ li(a3, Handle<SharedFunctionInfo>(function->shared()));
1463 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1464 __ Branch(miss, ne, t0, Operand(a3));
1465 } else {
1466 __ Branch(miss, ne, a1, Operand(Handle<JSFunction>(function)));
1467 }
281 } 1468 }
282 1469
283 1470
284 MaybeObject* CallStubCompiler::GenerateMissBranch() { 1471 MaybeObject* CallStubCompiler::GenerateMissBranch() {
285 UNIMPLEMENTED_MIPS(); 1472 MaybeObject* maybe_obj = masm()->isolate()->stub_cache()->ComputeCallMiss(
286 return NULL; 1473 arguments().immediate(), kind_);
1474 Object* obj;
1475 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1476 __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
1477 return obj;
287 } 1478 }
288 1479
289 1480
290 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object, 1481 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
291 JSObject* holder, 1482 JSObject* holder,
292 int index, 1483 int index,
293 String* name) { 1484 String* name) {
294 UNIMPLEMENTED_MIPS(); 1485 // ----------- S t a t e -------------
295 return NULL; 1486 // -- a2 : name
1487 // -- ra : return address
1488 // -----------------------------------
1489 Label miss;
1490
1491 GenerateNameCheck(name, &miss);
1492
1493 const int argc = arguments().immediate();
1494
1495 // Get the receiver of the function from the stack into a0.
1496 __ lw(a0, MemOperand(sp, argc * kPointerSize));
1497 // Check that the receiver isn't a smi.
1498 __ JumpIfSmi(a0, &miss, t0);
1499
1500 // Do the right check and compute the holder register.
1501 Register reg = CheckPrototypes(object, a0, holder, a1, a3, t0, name, &miss);
1502 GenerateFastPropertyLoad(masm(), a1, reg, holder, index);
1503
1504 GenerateCallFunction(masm(), object, arguments(), &miss);
1505
1506 // Handle call cache miss.
1507 __ bind(&miss);
1508 MaybeObject* maybe_result = GenerateMissBranch();
1509 if (maybe_result->IsFailure()) return maybe_result;
1510
1511 // Return the generated code.
1512 return GetCode(FIELD, name);
296 } 1513 }
297 1514
298 1515
299 MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, 1516 MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
300 JSObject* holder, 1517 JSObject* holder,
301 JSGlobalPropertyCell* cell, 1518 JSGlobalPropertyCell* cell,
302 JSFunction* function, 1519 JSFunction* function,
303 String* name) { 1520 String* name) {
304 UNIMPLEMENTED_MIPS(); 1521 // ----------- S t a t e -------------
305 return NULL; 1522 // -- a2 : name
1523 // -- ra : return address
1524 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1525 // -- ...
1526 // -- sp[argc * 4] : receiver
1527 // -----------------------------------
1528
1529 // If object is not an array, bail out to regular call.
1530 if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1531
1532 Label miss;
1533
1534 GenerateNameCheck(name, &miss);
1535
1536 Register receiver = a1;
1537
1538 // Get the receiver from the stack.
1539 const int argc = arguments().immediate();
1540 __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1541
1542 // Check that the receiver isn't a smi.
1543 __ JumpIfSmi(receiver, &miss);
1544
1545 // Check that the maps haven't changed.
1546 CheckPrototypes(JSObject::cast(object), receiver,
1547 holder, a3, v0, t0, name, &miss);
1548
1549 if (argc == 0) {
1550 // Nothing to do, just return the length.
1551 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1552 __ Drop(argc + 1);
1553 __ Ret();
1554 } else {
1555 Label call_builtin;
1556
1557 Register elements = a3;
1558 Register end_elements = t1;
1559
1560 // Get the elements array of the object.
1561 __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1562
1563 // Check that the elements are in fast mode and writable.
1564 __ CheckMap(elements, v0,
1565 Heap::kFixedArrayMapRootIndex, &call_builtin, true);
1566
1567 if (argc == 1) { // Otherwise fall through to call the builtin.
1568 Label exit, with_write_barrier, attempt_to_grow_elements;
1569
1570 // Get the array's length into v0 and calculate new length.
1571 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1572 STATIC_ASSERT(kSmiTagSize == 1);
1573 STATIC_ASSERT(kSmiTag == 0);
1574 __ Addu(v0, v0, Operand(Smi::FromInt(argc)));
1575
1576 // Get the element's length.
1577 __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1578
1579 // Check if we could survive without allocation.
1580 __ Branch(&attempt_to_grow_elements, gt, v0, Operand(t0));
1581
1582 // Save new length.
1583 __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1584
1585 // Push the element.
1586 __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize));
1587 // We may need a register containing the address end_elements below,
1588 // so write back the value in end_elements.
1589 __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1590 __ Addu(end_elements, elements, end_elements);
1591 const int kEndElementsOffset =
1592 FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
1593 __ sw(t0, MemOperand(end_elements, kEndElementsOffset));
1594 __ Addu(end_elements, end_elements, kPointerSize);
1595
1596 // Check for a smi.
1597 __ JumpIfNotSmi(t0, &with_write_barrier);
1598 __ bind(&exit);
1599 __ Drop(argc + 1);
1600 __ Ret();
1601
1602 __ bind(&with_write_barrier);
1603 __ InNewSpace(elements, t0, eq, &exit);
1604 __ RecordWriteHelper(elements, end_elements, t0);
1605 __ Drop(argc + 1);
1606 __ Ret();
1607
1608 __ bind(&attempt_to_grow_elements);
1609 // v0: array's length + 1.
1610 // t0: elements' length.
1611
1612 if (!FLAG_inline_new) {
1613 __ Branch(&call_builtin);
1614 }
1615
1616 ExternalReference new_space_allocation_top =
1617 ExternalReference::new_space_allocation_top_address(
1618 masm()->isolate());
1619 ExternalReference new_space_allocation_limit =
1620 ExternalReference::new_space_allocation_limit_address(
1621 masm()->isolate());
1622
1623 const int kAllocationDelta = 4;
1624 // Load top and check if it is the end of elements.
1625 __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1626 __ Addu(end_elements, elements, end_elements);
1627 __ Addu(end_elements, end_elements, Operand(kEndElementsOffset));
1628 __ li(t3, Operand(new_space_allocation_top));
1629 __ lw(t2, MemOperand(t3));
1630 __ Branch(&call_builtin, ne, end_elements, Operand(t2));
1631
1632 __ li(t5, Operand(new_space_allocation_limit));
1633 __ lw(t5, MemOperand(t5));
1634 __ Addu(t2, t2, Operand(kAllocationDelta * kPointerSize));
1635 __ Branch(&call_builtin, hi, t2, Operand(t5));
1636
1637 // We fit and could grow elements.
1638 // Update new_space_allocation_top.
1639 __ sw(t2, MemOperand(t3));
1640 // Push the argument.
1641 __ lw(t2, MemOperand(sp, (argc - 1) * kPointerSize));
1642 __ sw(t2, MemOperand(end_elements));
1643 // Fill the rest with holes.
1644 __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
1645 for (int i = 1; i < kAllocationDelta; i++) {
1646 __ sw(t2, MemOperand(end_elements, i * kPointerSize));
1647 }
1648
1649 // Update elements' and array's sizes.
1650 __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1651 __ Addu(t0, t0, Operand(Smi::FromInt(kAllocationDelta)));
1652 __ sw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1653
1654 // Elements are in new space, so write barrier is not required.
1655 __ Drop(argc + 1);
1656 __ Ret();
1657 }
1658 __ bind(&call_builtin);
1659 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1660 masm()->isolate()),
1661 argc + 1,
1662 1);
1663 }
1664
1665 // Handle call cache miss.
1666 __ bind(&miss);
1667 MaybeObject* maybe_result = GenerateMissBranch();
1668 if (maybe_result->IsFailure()) return maybe_result;
1669
1670 // Return the generated code.
1671 return GetCode(function);
306 } 1672 }
307 1673
308 1674
309 MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object, 1675 MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
310 JSObject* holder, 1676 JSObject* holder,
311 JSGlobalPropertyCell* cell, 1677 JSGlobalPropertyCell* cell,
312 JSFunction* function, 1678 JSFunction* function,
313 String* name) { 1679 String* name) {
314 UNIMPLEMENTED_MIPS(); 1680 // ----------- S t a t e -------------
315 return NULL; 1681 // -- a2 : name
1682 // -- ra : return address
1683 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1684 // -- ...
1685 // -- sp[argc * 4] : receiver
1686 // -----------------------------------
1687
1688 // If object is not an array, bail out to regular call.
1689 if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1690
1691 Label miss, return_undefined, call_builtin;
1692
1693 Register receiver = a1;
1694 Register elements = a3;
1695
1696 GenerateNameCheck(name, &miss);
1697
1698 // Get the receiver from the stack.
1699 const int argc = arguments().immediate();
1700 __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1701
1702 // Check that the receiver isn't a smi.
1703 __ JumpIfSmi(receiver, &miss);
1704
1705 // Check that the maps haven't changed.
1706 CheckPrototypes(JSObject::cast(object),
1707 receiver, holder, elements, t0, v0, name, &miss);
1708
1709 // Get the elements array of the object.
1710 __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1711
1712 // Check that the elements are in fast mode and writable.
1713 __ CheckMap(elements, v0, Heap::kFixedArrayMapRootIndex, &call_builtin, true);
1714
1715 // Get the array's length into t0 and calculate new length.
1716 __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1717 __ Subu(t0, t0, Operand(Smi::FromInt(1)));
1718 __ Branch(&return_undefined, lt, t0, Operand(zero_reg));
1719
1720 // Get the last element.
1721 __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
1722 STATIC_ASSERT(kSmiTagSize == 1);
1723 STATIC_ASSERT(kSmiTag == 0);
1724 // We can't address the last element in one operation. Compute the more
1725 // expensive shift first, and use an offset later on.
1726 __ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize);
1727 __ Addu(elements, elements, t1);
1728 __ lw(v0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1729 __ Branch(&call_builtin, eq, v0, Operand(t2));
1730
1731 // Set the array's length.
1732 __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1733
1734 // Fill with the hole.
1735 __ sw(t2, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1736 __ Drop(argc + 1);
1737 __ Ret();
1738
1739 __ bind(&return_undefined);
1740 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
1741 __ Drop(argc + 1);
1742 __ Ret();
1743
1744 __ bind(&call_builtin);
1745 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
1746 masm()->isolate()),
1747 argc + 1,
1748 1);
1749
1750 // Handle call cache miss.
1751 __ bind(&miss);
1752 MaybeObject* maybe_result = GenerateMissBranch();
1753 if (maybe_result->IsFailure()) return maybe_result;
1754
1755 // Return the generated code.
1756 return GetCode(function);
316 } 1757 }
317 1758
318 1759
319 MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall( 1760 MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
320 Object* object, 1761 Object* object,
321 JSObject* holder, 1762 JSObject* holder,
322 JSGlobalPropertyCell* cell, 1763 JSGlobalPropertyCell* cell,
323 JSFunction* function, 1764 JSFunction* function,
324 String* name) { 1765 String* name) {
325 UNIMPLEMENTED_MIPS(); 1766 // ----------- S t a t e -------------
326 return NULL; 1767 // -- a2 : function name
1768 // -- ra : return address
1769 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1770 // -- ...
1771 // -- sp[argc * 4] : receiver
1772 // -----------------------------------
1773
1774 // If object is not a string, bail out to regular call.
1775 if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1776
1777 const int argc = arguments().immediate();
1778
1779 Label miss;
1780 Label name_miss;
1781 Label index_out_of_range;
1782
1783 Label* index_out_of_range_label = &index_out_of_range;
1784
1785 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1786 index_out_of_range_label = &miss;
1787 }
1788
1789 GenerateNameCheck(name, &name_miss);
1790
1791 // Check that the maps starting from the prototype haven't changed.
1792 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1793 Context::STRING_FUNCTION_INDEX,
1794 v0,
1795 &miss);
1796 ASSERT(object != holder);
1797 CheckPrototypes(JSObject::cast(object->GetPrototype()), v0, holder,
1798 a1, a3, t0, name, &miss);
1799
1800 Register receiver = a1;
1801 Register index = t1;
1802 Register scratch = a3;
1803 Register result = v0;
1804 __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1805 if (argc > 0) {
1806 __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
1807 } else {
1808 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1809 }
1810
1811 StringCharCodeAtGenerator char_code_at_generator(receiver,
1812 index,
1813 scratch,
1814 result,
1815 &miss, // When not a string.
1816 &miss, // When not a number.
1817 index_out_of_range_label,
1818 STRING_INDEX_IS_NUMBER);
1819 char_code_at_generator.GenerateFast(masm());
1820 __ Drop(argc + 1);
1821 __ Ret();
1822
1823 StubRuntimeCallHelper call_helper;
1824 char_code_at_generator.GenerateSlow(masm(), call_helper);
1825
1826 if (index_out_of_range.is_linked()) {
1827 __ bind(&index_out_of_range);
1828 __ LoadRoot(v0, Heap::kNanValueRootIndex);
1829 __ Drop(argc + 1);
1830 __ Ret();
1831 }
1832
1833 __ bind(&miss);
1834 // Restore function name in a2.
1835 __ li(a2, Handle<String>(name));
1836 __ bind(&name_miss);
1837 MaybeObject* maybe_result = GenerateMissBranch();
1838 if (maybe_result->IsFailure()) return maybe_result;
1839
1840 // Return the generated code.
1841 return GetCode(function);
327 } 1842 }
328 1843
329 1844
330 MaybeObject* CallStubCompiler::CompileStringCharAtCall( 1845 MaybeObject* CallStubCompiler::CompileStringCharAtCall(
331 Object* object, 1846 Object* object,
332 JSObject* holder, 1847 JSObject* holder,
333 JSGlobalPropertyCell* cell, 1848 JSGlobalPropertyCell* cell,
334 JSFunction* function, 1849 JSFunction* function,
335 String* name) { 1850 String* name) {
336 UNIMPLEMENTED_MIPS(); 1851 // ----------- S t a t e -------------
337 return NULL; 1852 // -- a2 : function name
1853 // -- ra : return address
1854 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1855 // -- ...
1856 // -- sp[argc * 4] : receiver
1857 // -----------------------------------
1858
1859 // If object is not a string, bail out to regular call.
1860 if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1861
1862 const int argc = arguments().immediate();
1863
1864 Label miss;
1865 Label name_miss;
1866 Label index_out_of_range;
1867 Label* index_out_of_range_label = &index_out_of_range;
1868
1869 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1870 index_out_of_range_label = &miss;
1871 }
1872
1873 GenerateNameCheck(name, &name_miss);
1874
1875 // Check that the maps starting from the prototype haven't changed.
1876 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1877 Context::STRING_FUNCTION_INDEX,
1878 v0,
1879 &miss);
1880 ASSERT(object != holder);
1881 CheckPrototypes(JSObject::cast(object->GetPrototype()), v0, holder,
1882 a1, a3, t0, name, &miss);
1883
1884 Register receiver = v0;
1885 Register index = t1;
1886 Register scratch1 = a1;
1887 Register scratch2 = a3;
1888 Register result = v0;
1889 __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1890 if (argc > 0) {
1891 __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
1892 } else {
1893 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1894 }
1895
1896 StringCharAtGenerator char_at_generator(receiver,
1897 index,
1898 scratch1,
1899 scratch2,
1900 result,
1901 &miss, // When not a string.
1902 &miss, // When not a number.
1903 index_out_of_range_label,
1904 STRING_INDEX_IS_NUMBER);
1905 char_at_generator.GenerateFast(masm());
1906 __ Drop(argc + 1);
1907 __ Ret();
1908
1909 StubRuntimeCallHelper call_helper;
1910 char_at_generator.GenerateSlow(masm(), call_helper);
1911
1912 if (index_out_of_range.is_linked()) {
1913 __ bind(&index_out_of_range);
1914 __ LoadRoot(v0, Heap::kEmptyStringRootIndex);
1915 __ Drop(argc + 1);
1916 __ Ret();
1917 }
1918
1919 __ bind(&miss);
1920 // Restore function name in a2.
1921 __ li(a2, Handle<String>(name));
1922 __ bind(&name_miss);
1923 MaybeObject* maybe_result = GenerateMissBranch();
1924 if (maybe_result->IsFailure()) return maybe_result;
1925
1926 // Return the generated code.
1927 return GetCode(function);
338 } 1928 }
339 1929
340 1930
341 MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall( 1931 MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
342 Object* object, 1932 Object* object,
343 JSObject* holder, 1933 JSObject* holder,
344 JSGlobalPropertyCell* cell, 1934 JSGlobalPropertyCell* cell,
345 JSFunction* function, 1935 JSFunction* function,
346 String* name) { 1936 String* name) {
347 UNIMPLEMENTED_MIPS(); 1937 // ----------- S t a t e -------------
348 return NULL; 1938 // -- a2 : function name
1939 // -- ra : return address
1940 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1941 // -- ...
1942 // -- sp[argc * 4] : receiver
1943 // -----------------------------------
1944
1945 const int argc = arguments().immediate();
1946
1947 // If the object is not a JSObject or we got an unexpected number of
1948 // arguments, bail out to the regular call.
1949 if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
1950
1951 Label miss;
1952 GenerateNameCheck(name, &miss);
1953
1954 if (cell == NULL) {
1955 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1956
1957 STATIC_ASSERT(kSmiTag == 0);
1958 __ JumpIfSmi(a1, &miss);
1959
1960 CheckPrototypes(JSObject::cast(object), a1, holder, v0, a3, t0, name,
1961 &miss);
1962 } else {
1963 ASSERT(cell->value() == function);
1964 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1965 GenerateLoadFunctionFromCell(cell, function, &miss);
1966 }
1967
1968 // Load the char code argument.
1969 Register code = a1;
1970 __ lw(code, MemOperand(sp, 0 * kPointerSize));
1971
1972 // Check the code is a smi.
1973 Label slow;
1974 STATIC_ASSERT(kSmiTag == 0);
1975 __ JumpIfNotSmi(code, &slow);
1976
1977 // Convert the smi code to uint16.
1978 __ And(code, code, Operand(Smi::FromInt(0xffff)));
1979
1980 StringCharFromCodeGenerator char_from_code_generator(code, v0);
1981 char_from_code_generator.GenerateFast(masm());
1982 __ Drop(argc + 1);
1983 __ Ret();
1984
1985 StubRuntimeCallHelper call_helper;
1986 char_from_code_generator.GenerateSlow(masm(), call_helper);
1987
1988 // Tail call the full function. We do not have to patch the receiver
1989 // because the function makes no use of it.
1990 __ bind(&slow);
1991 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
1992
1993 __ bind(&miss);
1994 // a2: function name.
1995 MaybeObject* maybe_result = GenerateMissBranch();
1996 if (maybe_result->IsFailure()) return maybe_result;
1997
1998 // Return the generated code.
1999 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
349 } 2000 }
350 2001
351 2002
352 MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object, 2003 MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
353 JSObject* holder, 2004 JSObject* holder,
354 JSGlobalPropertyCell* cell, 2005 JSGlobalPropertyCell* cell,
355 JSFunction* function, 2006 JSFunction* function,
356 String* name) { 2007 String* name) {
357 UNIMPLEMENTED_MIPS(); 2008 // ----------- S t a t e -------------
358 return NULL; 2009 // -- a2 : function name
2010 // -- ra : return address
2011 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2012 // -- ...
2013 // -- sp[argc * 4] : receiver
2014 // -----------------------------------
2015
2016 if (!CpuFeatures::IsSupported(FPU))
2017 return heap()->undefined_value();
2018 CpuFeatures::Scope scope_fpu(FPU);
2019
2020 const int argc = arguments().immediate();
2021
2022 // If the object is not a JSObject or we got an unexpected number of
2023 // arguments, bail out to the regular call.
2024 if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
2025
2026 Label miss, slow;
2027 GenerateNameCheck(name, &miss);
2028
2029 if (cell == NULL) {
2030 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
2031
2032 STATIC_ASSERT(kSmiTag == 0);
2033 __ JumpIfSmi(a1, &miss);
2034
2035 CheckPrototypes(JSObject::cast(object), a1, holder, a0, a3, t0, name,
2036 &miss);
2037 } else {
2038 ASSERT(cell->value() == function);
2039 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
2040 GenerateLoadFunctionFromCell(cell, function, &miss);
2041 }
2042
2043 // Load the (only) argument into v0.
2044 __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2045
2046 // If the argument is a smi, just return.
2047 STATIC_ASSERT(kSmiTag == 0);
2048 __ And(t0, v0, Operand(kSmiTagMask));
2049 __ Drop(argc + 1, eq, t0, Operand(zero_reg));
2050 __ Ret(eq, t0, Operand(zero_reg));
2051
2052 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, true);
2053
2054 Label wont_fit_smi, no_fpu_error, restore_fcsr_and_return;
2055
2056 // If fpu is enabled, we use the floor instruction.
2057
2058 // Load the HeapNumber value.
2059 __ ldc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
2060
2061 // Backup FCSR.
2062 __ cfc1(a3, FCSR);
2063 // Clearing FCSR clears the exception mask with no side-effects.
2064 __ ctc1(zero_reg, FCSR);
2065 // Convert the argument to an integer.
2066 __ floor_w_d(f0, f0);
2067
2068 // Start checking for special cases.
2069 // Get the argument exponent and clear the sign bit.
2070 __ lw(t1, FieldMemOperand(v0, HeapNumber::kValueOffset + kPointerSize));
2071 __ And(t2, t1, Operand(~HeapNumber::kSignMask));
2072 __ srl(t2, t2, HeapNumber::kMantissaBitsInTopWord);
2073
2074 // Retrieve FCSR and check for fpu errors.
2075 __ cfc1(t5, FCSR);
2076 __ srl(t5, t5, kFCSRFlagShift);
2077 // Flag 1 marks an inaccurate but still good result so we ignore it.
2078 __ And(t5, t5, Operand(kFCSRFlagMask ^ 1));
2079 __ Branch(&no_fpu_error, eq, t5, Operand(zero_reg));
2080
2081 // Check for NaN, Infinity, and -Infinity.
2082 // They are invariant through a Math.Floor call, so just
2083 // return the original argument.
2084 __ Subu(t3, t2, Operand(HeapNumber::kExponentMask
2085 >> HeapNumber::kMantissaBitsInTopWord));
2086 __ Branch(&restore_fcsr_and_return, eq, t3, Operand(zero_reg));
2087 // We had an overflow or underflow in the conversion. Check if we
2088 // have a big exponent.
2089 // If greater or equal, the argument is already round and in v0.
2090 __ Branch(&restore_fcsr_and_return, ge, t3,
2091 Operand(HeapNumber::kMantissaBits));
2092 __ Branch(&wont_fit_smi);
2093
2094 __ bind(&no_fpu_error);
2095 // Move the result back to v0.
2096 __ mfc1(v0, f0);
2097 // Check if the result fits into a smi.
2098 __ Addu(a1, v0, Operand(0x40000000));
2099 __ Branch(&wont_fit_smi, lt, a1, Operand(zero_reg));
2100 // Tag the result.
2101 STATIC_ASSERT(kSmiTag == 0);
2102 __ sll(v0, v0, kSmiTagSize);
2103
2104 // Check for -0.
2105 __ Branch(&restore_fcsr_and_return, ne, v0, Operand(zero_reg));
2106 // t1 already holds the HeapNumber exponent.
2107 __ And(t0, t1, Operand(HeapNumber::kSignMask));
2108 // If our HeapNumber is negative it was -0, so load its address and return.
2109 // Else v0 is loaded with 0, so we can also just return.
2110 __ Branch(&restore_fcsr_and_return, eq, t0, Operand(zero_reg));
2111 __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2112
2113 __ bind(&restore_fcsr_and_return);
2114 // Restore FCSR and return.
2115 __ ctc1(a3, FCSR);
2116
2117 __ Drop(argc + 1);
2118 __ Ret();
2119
2120 __ bind(&wont_fit_smi);
2121 // Restore FCSR and fall to slow case.
2122 __ ctc1(a3, FCSR);
2123
2124 __ bind(&slow);
2125 // Tail call the full function. We do not have to patch the receiver
2126 // because the function makes no use of it.
2127 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2128
2129 __ bind(&miss);
2130 // a2: function name.
2131 MaybeObject* obj = GenerateMissBranch();
2132 if (obj->IsFailure()) return obj;
2133
2134 // Return the generated code.
2135 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
359 } 2136 }
360 2137
361 2138
362 MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object, 2139 MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
363 JSObject* holder, 2140 JSObject* holder,
364 JSGlobalPropertyCell* cell, 2141 JSGlobalPropertyCell* cell,
365 JSFunction* function, 2142 JSFunction* function,
366 String* name) { 2143 String* name) {
367 UNIMPLEMENTED_MIPS(); 2144 // ----------- S t a t e -------------
368 return NULL; 2145 // -- a2 : function name
2146 // -- ra : return address
2147 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2148 // -- ...
2149 // -- sp[argc * 4] : receiver
2150 // -----------------------------------
2151
2152 const int argc = arguments().immediate();
2153
2154 // If the object is not a JSObject or we got an unexpected number of
2155 // arguments, bail out to the regular call.
2156 if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
2157
2158 Label miss;
2159 GenerateNameCheck(name, &miss);
2160
2161 if (cell == NULL) {
2162 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
2163
2164 STATIC_ASSERT(kSmiTag == 0);
2165 __ JumpIfSmi(a1, &miss);
2166
2167 CheckPrototypes(JSObject::cast(object), a1, holder, v0, a3, t0, name,
2168 &miss);
2169 } else {
2170 ASSERT(cell->value() == function);
2171 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
2172 GenerateLoadFunctionFromCell(cell, function, &miss);
2173 }
2174
2175 // Load the (only) argument into v0.
2176 __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2177
2178 // Check if the argument is a smi.
2179 Label not_smi;
2180 STATIC_ASSERT(kSmiTag == 0);
2181 __ JumpIfNotSmi(v0, &not_smi);
2182
2183 // Do bitwise not or do nothing depending on the sign of the
2184 // argument.
2185 __ sra(t0, v0, kBitsPerInt - 1);
2186 __ Xor(a1, v0, t0);
2187
2188 // Add 1 or do nothing depending on the sign of the argument.
2189 __ Subu(v0, a1, t0);
2190
2191 // If the result is still negative, go to the slow case.
2192 // This only happens for the most negative smi.
2193 Label slow;
2194 __ Branch(&slow, lt, v0, Operand(zero_reg));
2195
2196 // Smi case done.
2197 __ Drop(argc + 1);
2198 __ Ret();
2199
2200 // Check if the argument is a heap number and load its exponent and
2201 // sign.
2202 __ bind(&not_smi);
2203 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, true);
2204 __ lw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset));
2205
2206 // Check the sign of the argument. If the argument is positive,
2207 // just return it.
2208 Label negative_sign;
2209 __ And(t0, a1, Operand(HeapNumber::kSignMask));
2210 __ Branch(&negative_sign, ne, t0, Operand(zero_reg));
2211 __ Drop(argc + 1);
2212 __ Ret();
2213
2214 // If the argument is negative, clear the sign, and return a new
2215 // number.
2216 __ bind(&negative_sign);
2217 __ Xor(a1, a1, Operand(HeapNumber::kSignMask));
2218 __ lw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
2219 __ LoadRoot(t2, Heap::kHeapNumberMapRootIndex);
2220 __ AllocateHeapNumber(v0, t0, t1, t2, &slow);
2221 __ sw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset));
2222 __ sw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
2223 __ Drop(argc + 1);
2224 __ Ret();
2225
2226 // Tail call the full function. We do not have to patch the receiver
2227 // because the function makes no use of it.
2228 __ bind(&slow);
2229 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2230
2231 __ bind(&miss);
2232 // a2: function name.
2233 MaybeObject* maybe_result = GenerateMissBranch();
2234 if (maybe_result->IsFailure()) return maybe_result;
2235
2236 // Return the generated code.
2237 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
369 } 2238 }
370 2239
371 2240
372 MaybeObject* CallStubCompiler::CompileFastApiCall( 2241 MaybeObject* CallStubCompiler::CompileFastApiCall(
373 const CallOptimization& optimization, 2242 const CallOptimization& optimization,
374 Object* object, 2243 Object* object,
375 JSObject* holder, 2244 JSObject* holder,
376 JSGlobalPropertyCell* cell, 2245 JSGlobalPropertyCell* cell,
377 JSFunction* function, 2246 JSFunction* function,
378 String* name) { 2247 String* name) {
379 UNIMPLEMENTED_MIPS(); 2248
380 return NULL; 2249 Isolate* isolate = masm()->isolate();
2250 Heap* heap = isolate->heap();
2251 Counters* counters = isolate->counters();
2252
2253 ASSERT(optimization.is_simple_api_call());
2254 // Bail out if object is a global object as we don't want to
2255 // repatch it to global receiver.
2256 if (object->IsGlobalObject()) return heap->undefined_value();
2257 if (cell != NULL) return heap->undefined_value();
2258 int depth = optimization.GetPrototypeDepthOfExpectedType(
2259 JSObject::cast(object), holder);
2260 if (depth == kInvalidProtoDepth) return heap->undefined_value();
2261
2262 Label miss, miss_before_stack_reserved;
2263
2264 GenerateNameCheck(name, &miss_before_stack_reserved);
2265
2266 // Get the receiver from the stack.
2267 const int argc = arguments().immediate();
2268 __ lw(a1, MemOperand(sp, argc * kPointerSize));
2269
2270 // Check that the receiver isn't a smi.
2271 __ JumpIfSmi(a1, &miss_before_stack_reserved);
2272
2273 __ IncrementCounter(counters->call_const(), 1, a0, a3);
2274 __ IncrementCounter(counters->call_const_fast_api(), 1, a0, a3);
2275
2276 ReserveSpaceForFastApiCall(masm(), a0);
2277
2278 // Check that the maps haven't changed and find a Holder as a side effect.
2279 CheckPrototypes(JSObject::cast(object), a1, holder, a0, a3, t0, name,
2280 depth, &miss);
2281
2282 MaybeObject* result = GenerateFastApiDirectCall(masm(), optimization, argc);
2283 if (result->IsFailure()) return result;
2284
2285 __ bind(&miss);
2286 FreeSpaceForFastApiCall(masm());
2287
2288 __ bind(&miss_before_stack_reserved);
2289 MaybeObject* maybe_result = GenerateMissBranch();
2290 if (maybe_result->IsFailure()) return maybe_result;
2291
2292 // Return the generated code.
2293 return GetCode(function);
381 } 2294 }
382 2295
383 2296
384 MaybeObject* CallStubCompiler::CompileCallConstant(Object* object, 2297 MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
385 JSObject* holder, 2298 JSObject* holder,
386 JSFunction* function, 2299 JSFunction* function,
387 String* name, 2300 String* name,
388 CheckType check) { 2301 CheckType check) {
389 UNIMPLEMENTED_MIPS(); 2302 // ----------- S t a t e -------------
390 return NULL; 2303 // -- a2 : name
2304 // -- ra : return address
2305 // -----------------------------------
2306 if (HasCustomCallGenerator(function)) {
2307 MaybeObject* maybe_result = CompileCustomCall(
2308 object, holder, NULL, function, name);
2309 Object* result;
2310 if (!maybe_result->ToObject(&result)) return maybe_result;
2311 // Undefined means bail out to regular compiler.
2312 if (!result->IsUndefined()) return result;
2313 }
2314
2315 Label miss;
2316
2317 GenerateNameCheck(name, &miss);
2318
2319 // Get the receiver from the stack.
2320 const int argc = arguments().immediate();
2321 __ lw(a1, MemOperand(sp, argc * kPointerSize));
2322
2323 // Check that the receiver isn't a smi.
2324 if (check != NUMBER_CHECK) {
2325 __ And(t1, a1, Operand(kSmiTagMask));
2326 __ Branch(&miss, eq, t1, Operand(zero_reg));
2327 }
2328
2329 // Make sure that it's okay not to patch the on stack receiver
2330 // unless we're doing a receiver map check.
2331 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2332
2333 SharedFunctionInfo* function_info = function->shared();
2334 switch (check) {
2335 case RECEIVER_MAP_CHECK:
2336 __ IncrementCounter(masm()->isolate()->counters()->call_const(),
2337 1, a0, a3);
2338
2339 // Check that the maps haven't changed.
2340 CheckPrototypes(JSObject::cast(object), a1, holder, a0, a3, t0, name,
2341 &miss);
2342
2343 // Patch the receiver on the stack with the global proxy if
2344 // necessary.
2345 if (object->IsGlobalObject()) {
2346 __ lw(a3, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset));
2347 __ sw(a3, MemOperand(sp, argc * kPointerSize));
2348 }
2349 break;
2350
2351 case STRING_CHECK:
2352 if (!function->IsBuiltin() && !function_info->strict_mode()) {
2353 // Calling non-strict non-builtins with a value as the receiver
2354 // requires boxing.
2355 __ jmp(&miss);
2356 } else {
2357 // Check that the object is a two-byte string or a symbol.
2358 __ GetObjectType(a1, a3, a3);
2359 __ Branch(&miss, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE));
2360 // Check that the maps starting from the prototype haven't changed.
2361 GenerateDirectLoadGlobalFunctionPrototype(
2362 masm(), Context::STRING_FUNCTION_INDEX, a0, &miss);
2363 CheckPrototypes(JSObject::cast(object->GetPrototype()), a0, holder, a3,
2364 a1, t0, name, &miss);
2365 }
2366 break;
2367
2368 case NUMBER_CHECK: {
2369 if (!function->IsBuiltin() && !function_info->strict_mode()) {
2370 // Calling non-strict non-builtins with a value as the receiver
2371 // requires boxing.
2372 __ jmp(&miss);
2373 } else {
2374 Label fast;
2375 // Check that the object is a smi or a heap number.
2376 __ And(t1, a1, Operand(kSmiTagMask));
2377 __ Branch(&fast, eq, t1, Operand(zero_reg));
2378 __ GetObjectType(a1, a0, a0);
2379 __ Branch(&miss, ne, a0, Operand(HEAP_NUMBER_TYPE));
2380 __ bind(&fast);
2381 // Check that the maps starting from the prototype haven't changed.
2382 GenerateDirectLoadGlobalFunctionPrototype(
2383 masm(), Context::NUMBER_FUNCTION_INDEX, a0, &miss);
2384 CheckPrototypes(JSObject::cast(object->GetPrototype()), a0, holder, a3,
2385 a1, t0, name, &miss);
2386 }
2387 break;
2388 }
2389
2390 case BOOLEAN_CHECK: {
2391 if (!function->IsBuiltin() && !function_info->strict_mode()) {
2392 // Calling non-strict non-builtins with a value as the receiver
2393 // requires boxing.
2394 __ jmp(&miss);
2395 } else {
2396 Label fast;
2397 // Check that the object is a boolean.
2398 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
2399 __ Branch(&fast, eq, a1, Operand(t0));
2400 __ LoadRoot(t0, Heap::kFalseValueRootIndex);
2401 __ Branch(&miss, ne, a1, Operand(t0));
2402 __ bind(&fast);
2403 // Check that the maps starting from the prototype haven't changed.
2404 GenerateDirectLoadGlobalFunctionPrototype(
2405 masm(), Context::BOOLEAN_FUNCTION_INDEX, a0, &miss);
2406 CheckPrototypes(JSObject::cast(object->GetPrototype()), a0, holder, a3,
2407 a1, t0, name, &miss);
2408 }
2409 break;
2410 }
2411
2412 default:
2413 UNREACHABLE();
2414 }
2415
2416 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2417
2418 // Handle call cache miss.
2419 __ bind(&miss);
2420
2421 MaybeObject* maybe_result = GenerateMissBranch();
2422 if (maybe_result->IsFailure()) return maybe_result;
2423
2424 // Return the generated code.
2425 return GetCode(function);
391 } 2426 }
392 2427
393 2428
394 MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object, 2429 MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
395 JSObject* holder, 2430 JSObject* holder,
396 String* name) { 2431 String* name) {
397 UNIMPLEMENTED_MIPS(); 2432 // ----------- S t a t e -------------
398 return NULL; 2433 // -- a2 : name
2434 // -- ra : return address
2435 // -----------------------------------
2436
2437 Label miss;
2438
2439 GenerateNameCheck(name, &miss);
2440
2441 // Get the number of arguments.
2442 const int argc = arguments().immediate();
2443
2444 LookupResult lookup;
2445 LookupPostInterceptor(holder, name, &lookup);
2446
2447 // Get the receiver from the stack.
2448 __ lw(a1, MemOperand(sp, argc * kPointerSize));
2449
2450 CallInterceptorCompiler compiler(this, arguments(), a2);
2451 MaybeObject* result = compiler.Compile(masm(),
2452 object,
2453 holder,
2454 name,
2455 &lookup,
2456 a1,
2457 a3,
2458 t0,
2459 a0,
2460 &miss);
2461 if (result->IsFailure()) {
2462 return result;
2463 }
2464
2465 // Move returned value, the function to call, to a1.
2466 __ mov(a1, v0);
2467 // Restore receiver.
2468 __ lw(a0, MemOperand(sp, argc * kPointerSize));
2469
2470 GenerateCallFunction(masm(), object, arguments(), &miss);
2471
2472 // Handle call cache miss.
2473 __ bind(&miss);
2474 MaybeObject* maybe_result = GenerateMissBranch();
2475 if (maybe_result->IsFailure()) return maybe_result;
2476
2477 // Return the generated code.
2478 return GetCode(INTERCEPTOR, name);
399 } 2479 }
400 2480
401 2481
402 MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object, 2482 MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
403 GlobalObject* holder, 2483 GlobalObject* holder,
404 JSGlobalPropertyCell* cell, 2484 JSGlobalPropertyCell* cell,
405 JSFunction* function, 2485 JSFunction* function,
406 String* name) { 2486 String* name) {
407 UNIMPLEMENTED_MIPS(); 2487 // ----------- S t a t e -------------
408 return NULL; 2488 // -- a2 : name
2489 // -- ra : return address
2490 // -----------------------------------
2491
2492 if (HasCustomCallGenerator(function)) {
2493 MaybeObject* maybe_result = CompileCustomCall(
2494 object, holder, cell, function, name);
2495 Object* result;
2496 if (!maybe_result->ToObject(&result)) return maybe_result;
2497 // Undefined means bail out to regular compiler.
2498 if (!result->IsUndefined()) return result;
2499 }
2500
2501 Label miss;
2502
2503 GenerateNameCheck(name, &miss);
2504
2505 // Get the number of arguments.
2506 const int argc = arguments().immediate();
2507
2508 GenerateGlobalReceiverCheck(object, holder, name, &miss);
2509 GenerateLoadFunctionFromCell(cell, function, &miss);
2510
2511 // Patch the receiver on the stack with the global proxy if
2512 // necessary.
2513 if (object->IsGlobalObject()) {
2514 __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
2515 __ sw(a3, MemOperand(sp, argc * kPointerSize));
2516 }
2517
2518 // Setup the context (function already in r1).
2519 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
2520
2521 // Jump to the cached code (tail call).
2522 Counters* counters = masm()->isolate()->counters();
2523 __ IncrementCounter(counters->call_global_inline(), 1, a3, t0);
2524 ASSERT(function->is_compiled());
2525 Handle<Code> code(function->code());
2526 ParameterCount expected(function->shared()->formal_parameter_count());
2527 if (V8::UseCrankshaft()) {
2528 UNIMPLEMENTED_MIPS();
2529 } else {
2530 __ InvokeCode(code, expected, arguments(),
2531 RelocInfo::CODE_TARGET, JUMP_FUNCTION);
2532 }
2533
2534 // Handle call cache miss.
2535 __ bind(&miss);
2536 __ IncrementCounter(counters->call_global_inline_miss(), 1, a1, a3);
2537 MaybeObject* maybe_result = GenerateMissBranch();
2538 if (maybe_result->IsFailure()) return maybe_result;
2539
2540 // Return the generated code.
2541 return GetCode(NORMAL, name);
409 } 2542 }
410 2543
411 2544
412 MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object, 2545 MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
413 int index, 2546 int index,
414 Map* transition, 2547 Map* transition,
415 String* name) { 2548 String* name) {
416 UNIMPLEMENTED_MIPS(); 2549 // ----------- S t a t e -------------
417 return NULL; 2550 // -- a0 : value
2551 // -- a1 : receiver
2552 // -- a2 : name
2553 // -- ra : return address
2554 // -----------------------------------
2555 Label miss;
2556
2557 // Name register might be clobbered.
2558 GenerateStoreField(masm(),
2559 object,
2560 index,
2561 transition,
2562 a1, a2, a3,
2563 &miss);
2564 __ bind(&miss);
2565 __ li(a2, Operand(Handle<String>(name))); // Restore name.
2566 Handle<Code> ic = masm()->isolate()->builtins()->Builtins::StoreIC_Miss();
2567 __ Jump(ic, RelocInfo::CODE_TARGET);
2568
2569 // Return the generated code.
2570 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
418 } 2571 }
419 2572
420 2573
421 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object, 2574 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
422 AccessorInfo* callback, 2575 AccessorInfo* callback,
423 String* name) { 2576 String* name) {
424 UNIMPLEMENTED_MIPS(); 2577 // ----------- S t a t e -------------
425 return NULL; 2578 // -- a0 : value
2579 // -- a1 : receiver
2580 // -- a2 : name
2581 // -- ra : return address
2582 // -----------------------------------
2583 Label miss;
2584
2585 // Check that the object isn't a smi.
2586 __ JumpIfSmi(a1, &miss);
2587
2588 // Check that the map of the object hasn't changed.
2589 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
2590 __ Branch(&miss, ne, a3, Operand(Handle<Map>(object->map())));
2591
2592 // Perform global security token check if needed.
2593 if (object->IsJSGlobalProxy()) {
2594 __ CheckAccessGlobalProxy(a1, a3, &miss);
2595 }
2596
2597 // Stub never generated for non-global objects that require access
2598 // checks.
2599 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2600
2601 __ push(a1); // Receiver.
2602 __ li(a3, Operand(Handle<AccessorInfo>(callback))); // Callback info.
2603 __ Push(a3, a2, a0);
2604
2605 // Do tail-call to the runtime system.
2606 ExternalReference store_callback_property =
2607 ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
2608 masm()->isolate());
2609 __ TailCallExternalReference(store_callback_property, 4, 1);
2610
2611 // Handle store cache miss.
2612 __ bind(&miss);
2613 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2614 __ Jump(ic, RelocInfo::CODE_TARGET);
2615
2616 // Return the generated code.
2617 return GetCode(CALLBACKS, name);
426 } 2618 }
427 2619
428 2620
429 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver, 2621 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
430 String* name) { 2622 String* name) {
431 UNIMPLEMENTED_MIPS(); 2623 // ----------- S t a t e -------------
432 return NULL; 2624 // -- a0 : value
2625 // -- a1 : receiver
2626 // -- a2 : name
2627 // -- ra : return address
2628 // -----------------------------------
2629 Label miss;
2630
2631 // Check that the object isn't a smi.
2632 __ JumpIfSmi(a1, &miss);
2633
2634 // Check that the map of the object hasn't changed.
2635 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
2636 __ Branch(&miss, ne, a3, Operand(Handle<Map>(receiver->map())));
2637
2638 // Perform global security token check if needed.
2639 if (receiver->IsJSGlobalProxy()) {
2640 __ CheckAccessGlobalProxy(a1, a3, &miss);
2641 }
2642
2643 // Stub is never generated for non-global objects that require access
2644 // checks.
2645 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2646
2647 __ Push(a1, a2, a0); // Receiver, name, value.
2648
2649 __ li(a0, Operand(Smi::FromInt(strict_mode_)));
2650 __ push(a0); // Strict mode.
2651
2652 // Do tail-call to the runtime system.
2653 ExternalReference store_ic_property =
2654 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
2655 masm()->isolate());
2656 __ TailCallExternalReference(store_ic_property, 4, 1);
2657
2658 // Handle store cache miss.
2659 __ bind(&miss);
2660 Handle<Code> ic = masm()->isolate()->builtins()->Builtins::StoreIC_Miss();
2661 __ Jump(ic, RelocInfo::CODE_TARGET);
2662
2663 // Return the generated code.
2664 return GetCode(INTERCEPTOR, name);
433 } 2665 }
434 2666
435 2667
436 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object, 2668 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
437 JSGlobalPropertyCell* cell, 2669 JSGlobalPropertyCell* cell,
438 String* name) { 2670 String* name) {
439 UNIMPLEMENTED_MIPS(); 2671 // ----------- S t a t e -------------
440 return NULL; 2672 // -- a0 : value
2673 // -- a1 : receiver
2674 // -- a2 : name
2675 // -- ra : return address
2676 // -----------------------------------
2677 Label miss;
2678
2679 // Check that the map of the global has not changed.
2680 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
2681 __ Branch(&miss, ne, a3, Operand(Handle<Map>(object->map())));
2682
2683 // Check that the value in the cell is not the hole. If it is, this
2684 // cell could have been deleted and reintroducing the global needs
2685 // to update the property details in the property dictionary of the
2686 // global object. We bail out to the runtime system to do that.
2687 __ li(t0, Operand(Handle<JSGlobalPropertyCell>(cell)));
2688 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
2689 __ lw(t2, FieldMemOperand(t0, JSGlobalPropertyCell::kValueOffset));
2690 __ Branch(&miss, eq, t1, Operand(t2));
2691
2692 // Store the value in the cell.
2693 __ sw(a0, FieldMemOperand(t0, JSGlobalPropertyCell::kValueOffset));
2694 __ mov(v0, a0); // Stored value must be returned in v0.
2695 Counters* counters = masm()->isolate()->counters();
2696 __ IncrementCounter(counters->named_store_global_inline(), 1, a1, a3);
2697 __ Ret();
2698
2699 // Handle store cache miss.
2700 __ bind(&miss);
2701 __ IncrementCounter(counters->named_store_global_inline_miss(), 1, a1, a3);
2702 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2703 __ Jump(ic, RelocInfo::CODE_TARGET);
2704
2705 // Return the generated code.
2706 return GetCode(NORMAL, name);
441 } 2707 }
442 2708
443 2709
444 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name, 2710 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
445 JSObject* object, 2711 JSObject* object,
446 JSObject* last) { 2712 JSObject* last) {
447 UNIMPLEMENTED_MIPS(); 2713 // ----------- S t a t e -------------
448 return NULL; 2714 // -- a0 : receiver
449 } 2715 // -- ra : return address
450 2716 // -----------------------------------
451 2717 Label miss;
2718
2719 // Check that the receiver is not a smi.
2720 __ JumpIfSmi(a0, &miss);
2721
2722 // Check the maps of the full prototype chain.
2723 CheckPrototypes(object, a0, last, a3, a1, t0, name, &miss);
2724
2725 // If the last object in the prototype chain is a global object,
2726 // check that the global property cell is empty.
2727 if (last->IsGlobalObject()) {
2728 MaybeObject* cell = GenerateCheckPropertyCell(masm(),
2729 GlobalObject::cast(last),
2730 name,
2731 a1,
2732 &miss);
2733 if (cell->IsFailure()) {
2734 miss.Unuse();
2735 return cell;
2736 }
2737 }
2738
2739 // Return undefined if maps of the full prototype chain is still the same.
2740 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2741 __ Ret();
2742
2743 __ bind(&miss);
2744 GenerateLoadMiss(masm(), Code::LOAD_IC);
2745
2746 // Return the generated code.
2747 return GetCode(NONEXISTENT, heap()->empty_string());
2748 }
2749
2750
452 MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object, 2751 MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
453 JSObject* holder, 2752 JSObject* holder,
454 int index, 2753 int index,
455 String* name) { 2754 String* name) {
456 UNIMPLEMENTED_MIPS(); 2755 // ----------- S t a t e -------------
457 return NULL; 2756 // -- a0 : receiver
2757 // -- a2 : name
2758 // -- ra : return address
2759 // -----------------------------------
2760 Label miss;
2761
2762 __ mov(v0, a0);
2763
2764 GenerateLoadField(object, holder, v0, a3, a1, t0, index, name, &miss);
2765 __ bind(&miss);
2766 GenerateLoadMiss(masm(), Code::LOAD_IC);
2767
2768 // Return the generated code.
2769 return GetCode(FIELD, name);
458 } 2770 }
459 2771
460 2772
461 MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name, 2773 MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name,
462 JSObject* object, 2774 JSObject* object,
463 JSObject* holder, 2775 JSObject* holder,
464 AccessorInfo* callback) { 2776 AccessorInfo* callback) {
465 UNIMPLEMENTED_MIPS(); 2777 // ----------- S t a t e -------------
466 return NULL; 2778 // -- a0 : receiver
2779 // -- a2 : name
2780 // -- ra : return address
2781 // -----------------------------------
2782 Label miss;
2783
2784 MaybeObject* result = GenerateLoadCallback(object, holder, a0, a2, a3, a1, t0,
2785 callback, name, &miss);
2786 if (result->IsFailure()) {
2787 miss.Unuse();
2788 return result;
2789 }
2790
2791 __ bind(&miss);
2792 GenerateLoadMiss(masm(), Code::LOAD_IC);
2793
2794 // Return the generated code.
2795 return GetCode(CALLBACKS, name);
467 } 2796 }
468 2797
469 2798
470 MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object, 2799 MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
471 JSObject* holder, 2800 JSObject* holder,
472 Object* value, 2801 Object* value,
473 String* name) { 2802 String* name) {
474 UNIMPLEMENTED_MIPS(); 2803 // ----------- S t a t e -------------
475 return NULL; 2804 // -- a0 : receiver
2805 // -- a2 : name
2806 // -- ra : return address
2807 // -----------------------------------
2808 Label miss;
2809
2810 GenerateLoadConstant(object, holder, a0, a3, a1, t0, value, name, &miss);
2811 __ bind(&miss);
2812 GenerateLoadMiss(masm(), Code::LOAD_IC);
2813
2814 // Return the generated code.
2815 return GetCode(CONSTANT_FUNCTION, name);
476 } 2816 }
477 2817
478 2818
479 MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* object, 2819 MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* object,
480 JSObject* holder, 2820 JSObject* holder,
481 String* name) { 2821 String* name) {
482 UNIMPLEMENTED_MIPS(); 2822 // ----------- S t a t e -------------
483 return NULL; 2823 // -- a0 : receiver
2824 // -- a2 : name
2825 // -- ra : return address
2826 // -- [sp] : receiver
2827 // -----------------------------------
2828 Label miss;
2829
2830 LookupResult lookup;
2831 LookupPostInterceptor(holder, name, &lookup);
2832 GenerateLoadInterceptor(object,
2833 holder,
2834 &lookup,
2835 a0,
2836 a2,
2837 a3,
2838 a1,
2839 t0,
2840 name,
2841 &miss);
2842 __ bind(&miss);
2843 GenerateLoadMiss(masm(), Code::LOAD_IC);
2844
2845 // Return the generated code.
2846 return GetCode(INTERCEPTOR, name);
484 } 2847 }
485 2848
486 2849
487 MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object, 2850 MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
488 GlobalObject* holder, 2851 GlobalObject* holder,
489 JSGlobalPropertyCell* cell, 2852 JSGlobalPropertyCell* cell,
490 String* name, 2853 String* name,
491 bool is_dont_delete) { 2854 bool is_dont_delete) {
492 UNIMPLEMENTED_MIPS(); 2855 // ----------- S t a t e -------------
493 return NULL; 2856 // -- a0 : receiver
2857 // -- a2 : name
2858 // -- ra : return address
2859 // -----------------------------------
2860 Label miss;
2861
2862 // If the object is the holder then we know that it's a global
2863 // object which can only happen for contextual calls. In this case,
2864 // the receiver cannot be a smi.
2865 if (object != holder) {
2866 __ And(t0, a0, Operand(kSmiTagMask));
2867 __ Branch(&miss, eq, t0, Operand(zero_reg));
2868 }
2869
2870 // Check that the map of the global has not changed.
2871 CheckPrototypes(object, a0, holder, a3, t0, a1, name, &miss);
2872
2873 // Get the value from the cell.
2874 __ li(a3, Operand(Handle<JSGlobalPropertyCell>(cell)));
2875 __ lw(t0, FieldMemOperand(a3, JSGlobalPropertyCell::kValueOffset));
2876
2877 // Check for deleted property if property can actually be deleted.
2878 if (!is_dont_delete) {
2879 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2880 __ Branch(&miss, eq, t0, Operand(at));
2881 }
2882
2883 __ mov(v0, t0);
2884 Counters* counters = masm()->isolate()->counters();
2885 __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
2886 __ Ret();
2887
2888 __ bind(&miss);
2889 __ IncrementCounter(counters->named_load_global_stub_miss(), 1, a1, a3);
2890 GenerateLoadMiss(masm(), Code::LOAD_IC);
2891
2892 // Return the generated code.
2893 return GetCode(NORMAL, name);
494 } 2894 }
495 2895
496 2896
497 MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name, 2897 MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
498 JSObject* receiver, 2898 JSObject* receiver,
499 JSObject* holder, 2899 JSObject* holder,
500 int index) { 2900 int index) {
501 UNIMPLEMENTED_MIPS(); 2901 // ----------- S t a t e -------------
502 return NULL; 2902 // -- ra : return address
2903 // -- a0 : key
2904 // -- a1 : receiver
2905 // -----------------------------------
2906 Label miss;
2907
2908 // Check the key is the cached one.
2909 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
2910
2911 GenerateLoadField(receiver, holder, a1, a2, a3, t0, index, name, &miss);
2912 __ bind(&miss);
2913 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2914
2915 return GetCode(FIELD, name);
503 } 2916 }
504 2917
505 2918
506 MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback( 2919 MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
507 String* name, 2920 String* name,
508 JSObject* receiver, 2921 JSObject* receiver,
509 JSObject* holder, 2922 JSObject* holder,
510 AccessorInfo* callback) { 2923 AccessorInfo* callback) {
511 UNIMPLEMENTED_MIPS(); 2924 // ----------- S t a t e -------------
512 return NULL; 2925 // -- ra : return address
2926 // -- a0 : key
2927 // -- a1 : receiver
2928 // -----------------------------------
2929 Label miss;
2930
2931 // Check the key is the cached one.
2932 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
2933
2934 MaybeObject* result = GenerateLoadCallback(receiver, holder, a1, a0, a2, a3,
2935 t0, callback, name, &miss);
2936 if (result->IsFailure()) {
2937 miss.Unuse();
2938 return result;
2939 }
2940
2941 __ bind(&miss);
2942 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2943
2944 return GetCode(CALLBACKS, name);
513 } 2945 }
514 2946
515 2947
516 MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name, 2948 MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
517 JSObject* receiver, 2949 JSObject* receiver,
518 JSObject* holder, 2950 JSObject* holder,
519 Object* value) { 2951 Object* value) {
520 UNIMPLEMENTED_MIPS(); 2952 // ----------- S t a t e -------------
521 return NULL; 2953 // -- ra : return address
2954 // -- a0 : key
2955 // -- a1 : receiver
2956 // -----------------------------------
2957 Label miss;
2958
2959 // Check the key is the cached one.
2960 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
2961
2962 GenerateLoadConstant(receiver, holder, a1, a2, a3, t0, value, name, &miss);
2963 __ bind(&miss);
2964 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2965
2966 // Return the generated code.
2967 return GetCode(CONSTANT_FUNCTION, name);
522 } 2968 }
523 2969
524 2970
525 MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, 2971 MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
526 JSObject* holder, 2972 JSObject* holder,
527 String* name) { 2973 String* name) {
528 UNIMPLEMENTED_MIPS(); 2974 // ----------- S t a t e -------------
529 return NULL; 2975 // -- ra : return address
2976 // -- a0 : key
2977 // -- a1 : receiver
2978 // -----------------------------------
2979 Label miss;
2980
2981 // Check the key is the cached one.
2982 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
2983
2984 LookupResult lookup;
2985 LookupPostInterceptor(holder, name, &lookup);
2986 GenerateLoadInterceptor(receiver,
2987 holder,
2988 &lookup,
2989 a1,
2990 a0,
2991 a2,
2992 a3,
2993 t0,
2994 name,
2995 &miss);
2996 __ bind(&miss);
2997 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2998
2999 return GetCode(INTERCEPTOR, name);
530 } 3000 }
531 3001
532 3002
533 MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) { 3003 MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
534 UNIMPLEMENTED_MIPS(); 3004 // ----------- S t a t e -------------
535 return NULL; 3005 // -- ra : return address
3006 // -- a0 : key
3007 // -- a1 : receiver
3008 // -----------------------------------
3009 Label miss;
3010
3011 // Check the key is the cached one.
3012 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
3013
3014 GenerateLoadArrayLength(masm(), a1, a2, &miss);
3015 __ bind(&miss);
3016 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3017
3018 return GetCode(CALLBACKS, name);
536 } 3019 }
537 3020
538 3021
539 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) { 3022 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
540 UNIMPLEMENTED_MIPS(); 3023 // ----------- S t a t e -------------
541 return NULL; 3024 // -- ra : return address
3025 // -- a0 : key
3026 // -- a1 : receiver
3027 // -----------------------------------
3028 Label miss;
3029
3030 Counters* counters = masm()->isolate()->counters();
3031 __ IncrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
3032
3033 // Check the key is the cached one.
3034 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
3035
3036 GenerateLoadStringLength(masm(), a1, a2, a3, &miss, true);
3037 __ bind(&miss);
3038 __ DecrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
3039
3040 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3041
3042 return GetCode(CALLBACKS, name);
542 } 3043 }
543 3044
544 3045
545 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { 3046 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
546 UNIMPLEMENTED_MIPS(); 3047 // ----------- S t a t e -------------
547 return NULL; 3048 // -- ra : return address
3049 // -- a0 : key
3050 // -- a1 : receiver
3051 // -----------------------------------
3052 Label miss;
3053
3054 Counters* counters = masm()->isolate()->counters();
3055 __ IncrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
3056
3057 // Check the name hasn't changed.
3058 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
3059
3060 GenerateLoadFunctionPrototype(masm(), a1, a2, a3, &miss);
3061 __ bind(&miss);
3062 __ DecrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
3063 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3064
3065 return GetCode(CALLBACKS, name);
548 } 3066 }
549 3067
550 3068
551 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) { 3069 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
552 UNIMPLEMENTED_MIPS(); 3070 // ----------- S t a t e -------------
553 return NULL; 3071 // -- ra : return address
3072 // -- a0 : key
3073 // -- a1 : receiver
3074 // -----------------------------------
3075 Label miss;
3076
3077 // Check that the receiver isn't a smi.
3078 __ JumpIfSmi(a1, &miss);
3079
3080 // Check that the map matches.
3081 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
3082 __ Branch(&miss, ne, a2, Operand(Handle<Map>(receiver->map())));
3083
3084 // Check that the key is a smi.
3085 __ JumpIfNotSmi(a0, &miss);
3086
3087 // Get the elements array.
3088 __ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset));
3089 __ AssertFastElements(a2);
3090
3091 // Check that the key is within bounds.
3092 __ lw(a3, FieldMemOperand(a2, FixedArray::kLengthOffset));
3093 __ Branch(&miss, hs, a0, Operand(a3));
3094
3095 // Load the result and make sure it's not the hole.
3096 __ Addu(a3, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3097 ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
3098 __ sll(t1, a0, kPointerSizeLog2 - kSmiTagSize);
3099 __ Addu(t1, t1, a3);
3100 __ lw(t0, MemOperand(t1));
3101 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
3102 __ Branch(&miss, eq, t0, Operand(t1));
3103 __ mov(v0, t0);
3104 __ Ret();
3105
3106 __ bind(&miss);
3107 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3108
3109 // Return the generated code.
3110 return GetCode(NORMAL, NULL);
554 } 3111 }
555 3112
556 3113
557 MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object, 3114 MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
558 int index, 3115 int index,
559 Map* transition, 3116 Map* transition,
560 String* name) { 3117 String* name) {
561 UNIMPLEMENTED_MIPS(); 3118 // ----------- S t a t e -------------
562 return NULL; 3119 // -- a0 : value
3120 // -- a1 : key
3121 // -- a2 : receiver
3122 // -- ra : return address
3123 // -----------------------------------
3124
3125 Label miss;
3126
3127 Counters* counters = masm()->isolate()->counters();
3128 __ IncrementCounter(counters->keyed_store_field(), 1, a3, t0);
3129
3130 // Check that the name has not changed.
3131 __ Branch(&miss, ne, a1, Operand(Handle<String>(name)));
3132
3133 // a3 is used as scratch register. a1 and a2 keep their values if a jump to
3134 // the miss label is generated.
3135 GenerateStoreField(masm(),
3136 object,
3137 index,
3138 transition,
3139 a2, a1, a3,
3140 &miss);
3141 __ bind(&miss);
3142
3143 __ DecrementCounter(counters->keyed_store_field(), 1, a3, t0);
3144 Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3145 __ Jump(ic, RelocInfo::CODE_TARGET);
3146
3147 // Return the generated code.
3148 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
563 } 3149 }
564 3150
565 3151
566 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized( 3152 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
567 JSObject* receiver) { 3153 JSObject* receiver) {
568 UNIMPLEMENTED_MIPS(); 3154 // ----------- S t a t e -------------
569 return NULL; 3155 // -- a0 : value
3156 // -- a1 : key
3157 // -- a2 : receiver
3158 // -- ra : return address
3159 // -- a3 : scratch
3160 // -- t0 : scratch (elements)
3161 // -----------------------------------
3162 Label miss;
3163 Register value_reg = a0;
3164 Register key_reg = a1;
3165 Register receiver_reg = a2;
3166 Register scratch = a3;
3167 Register elements_reg = t0;
3168
3169 // Check that the receiver isn't a smi.
3170 __ JumpIfSmi(receiver_reg, &miss);
3171
3172 // Check that the map matches.
3173 __ lw(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
3174 __ Branch(&miss, ne, scratch, Operand(Handle<Map>(receiver->map())));
3175
3176 // Check that the key is a smi.
3177 __ JumpIfNotSmi(key_reg, &miss);
3178
3179 // Get the elements array and make sure it is a fast element array, not 'cow'.
3180 __ lw(elements_reg,
3181 FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
3182 __ lw(scratch, FieldMemOperand(elements_reg, HeapObject::kMapOffset));
3183 __ Branch(&miss, ne, scratch,
3184 Operand(Handle<Map>(FACTORY->fixed_array_map())));
3185
3186 // Check that the key is within bounds.
3187 if (receiver->IsJSArray()) {
3188 __ lw(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
3189 } else {
3190 __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
3191 }
3192 // Compare smis.
3193 __ Branch(&miss, hs, key_reg, Operand(scratch));
3194 __ Addu(scratch,
3195 elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3196 ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
3197 __ sll(key_reg, key_reg, kPointerSizeLog2 - kSmiTagSize);
3198 __ Addu(v0, scratch, key_reg);
3199 __ sw(value_reg, MemOperand(v0));
3200 __ RecordWrite(scratch, Operand(key_reg), receiver_reg , elements_reg);
3201
3202 // value_reg (a0) is preserved.
3203 // Done.
3204 __ mov(v0, value_reg);
3205 __ Ret();
3206
3207 __ bind(&miss);
3208 Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3209 __ Jump(ic, RelocInfo::CODE_TARGET);
3210
3211 // Return the generated code.
3212 return GetCode(NORMAL, NULL);
570 } 3213 }
571 3214
572 3215
573 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) { 3216 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
574 UNIMPLEMENTED_MIPS(); 3217 // a0 : argc
575 return NULL; 3218 // a1 : constructor
576 } 3219 // ra : return address
577 3220 // [sp] : last argument
578 3221 Label generic_stub_call;
3222
3223 // Use t7 for holding undefined which is used in several places below.
3224 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
3225
3226 #ifdef ENABLE_DEBUGGER_SUPPORT
3227 // Check to see whether there are any break points in the function code. If
3228 // there are jump to the generic constructor stub which calls the actual
3229 // code for the function thereby hitting the break points.
3230 __ lw(t5, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
3231 __ lw(a2, FieldMemOperand(t5, SharedFunctionInfo::kDebugInfoOffset));
3232 __ Branch(&generic_stub_call, ne, a2, Operand(t7));
3233 #endif
3234
3235 // Load the initial map and verify that it is in fact a map.
3236 // a1: constructor function
3237 // t7: undefined
3238 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
3239 __ And(t0, a2, Operand(kSmiTagMask));
3240 __ Branch(&generic_stub_call, eq, t0, Operand(zero_reg));
3241 __ GetObjectType(a2, a3, t0);
3242 __ Branch(&generic_stub_call, ne, t0, Operand(MAP_TYPE));
3243
3244 #ifdef DEBUG
3245 // Cannot construct functions this way.
3246 // a0: argc
3247 // a1: constructor function
3248 // a2: initial map
3249 // t7: undefined
3250 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
3251 __ Check(ne, "Function constructed by construct stub.",
3252 a3, Operand(JS_FUNCTION_TYPE));
3253 #endif
3254
3255 // Now allocate the JSObject in new space.
3256 // a0: argc
3257 // a1: constructor function
3258 // a2: initial map
3259 // t7: undefined
3260 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
3261 __ AllocateInNewSpace(a3,
3262 t4,
3263 t5,
3264 t6,
3265 &generic_stub_call,
3266 SIZE_IN_WORDS);
3267
3268 // Allocated the JSObject, now initialize the fields. Map is set to initial
3269 // map and properties and elements are set to empty fixed array.
3270 // a0: argc
3271 // a1: constructor function
3272 // a2: initial map
3273 // a3: object size (in words)
3274 // t4: JSObject (not tagged)
3275 // t7: undefined
3276 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
3277 __ mov(t5, t4);
3278 __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
3279 __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
3280 __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
3281 __ Addu(t5, t5, Operand(3 * kPointerSize));
3282 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
3283 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
3284 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
3285
3286
3287 // Calculate the location of the first argument. The stack contains only the
3288 // argc arguments.
3289 __ sll(a1, a0, kPointerSizeLog2);
3290 __ Addu(a1, a1, sp);
3291
3292 // Fill all the in-object properties with undefined.
3293 // a0: argc
3294 // a1: first argument
3295 // a3: object size (in words)
3296 // t4: JSObject (not tagged)
3297 // t5: First in-object property of JSObject (not tagged)
3298 // t7: undefined
3299 // Fill the initialized properties with a constant value or a passed argument
3300 // depending on the this.x = ...; assignment in the function.
3301 SharedFunctionInfo* shared = function->shared();
3302 for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3303 if (shared->IsThisPropertyAssignmentArgument(i)) {
3304 Label not_passed, next;
3305 // Check if the argument assigned to the property is actually passed.
3306 int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3307 __ Branch(&not_passed, less_equal, a0, Operand(arg_number));
3308 // Argument passed - find it on the stack.
3309 __ lw(a2, MemOperand(a1, (arg_number + 1) * -kPointerSize));
3310 __ sw(a2, MemOperand(t5));
3311 __ Addu(t5, t5, kPointerSize);
3312 __ jmp(&next);
3313 __ bind(&not_passed);
3314 // Set the property to undefined.
3315 __ sw(t7, MemOperand(t5));
3316 __ Addu(t5, t5, Operand(kPointerSize));
3317 __ bind(&next);
3318 } else {
3319 // Set the property to the constant value.
3320 Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3321 __ li(a2, Operand(constant));
3322 __ sw(a2, MemOperand(t5));
3323 __ Addu(t5, t5, kPointerSize);
3324 }
3325 }
3326
3327 // Fill the unused in-object property fields with undefined.
3328 ASSERT(function->has_initial_map());
3329 for (int i = shared->this_property_assignments_count();
3330 i < function->initial_map()->inobject_properties();
3331 i++) {
3332 __ sw(t7, MemOperand(t5));
3333 __ Addu(t5, t5, kPointerSize);
3334 }
3335
3336 // a0: argc
3337 // t4: JSObject (not tagged)
3338 // Move argc to a1 and the JSObject to return to v0 and tag it.
3339 __ mov(a1, a0);
3340 __ mov(v0, t4);
3341 __ Or(v0, v0, Operand(kHeapObjectTag));
3342
3343 // v0: JSObject
3344 // a1: argc
3345 // Remove caller arguments and receiver from the stack and return.
3346 __ sll(t0, a1, kPointerSizeLog2);
3347 __ Addu(sp, sp, t0);
3348 __ Addu(sp, sp, Operand(kPointerSize));
3349 Counters* counters = masm()->isolate()->counters();
3350 __ IncrementCounter(counters->constructed_objects(), 1, a1, a2);
3351 __ IncrementCounter(counters->constructed_objects_stub(), 1, a1, a2);
3352 __ Ret();
3353
3354 // Jump to the generic stub in case the specialized code cannot handle the
3355 // construction.
3356 __ bind(&generic_stub_call);
3357 Handle<Code> generic_construct_stub =
3358 masm()->isolate()->builtins()->JSConstructStubGeneric();
3359 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
3360
3361 // Return the generated code.
3362 return GetCode();
3363 }
3364
3365
3366 static bool IsElementTypeSigned(ExternalArrayType array_type) {
3367 switch (array_type) {
3368 case kExternalByteArray:
3369 case kExternalShortArray:
3370 case kExternalIntArray:
3371 return true;
3372
3373 case kExternalUnsignedByteArray:
3374 case kExternalUnsignedShortArray:
3375 case kExternalUnsignedIntArray:
3376 return false;
3377
3378 default:
3379 UNREACHABLE();
3380 return false;
3381 }
3382 }
3383
3384
579 MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub( 3385 MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
580 JSObject* receiver_object, 3386 JSObject* receiver_object,
581 ExternalArrayType array_type, 3387 ExternalArrayType array_type,
582 Code::Flags flags) { 3388 Code::Flags flags) {
583 UNIMPLEMENTED_MIPS(); 3389 // ---------- S t a t e --------------
584 return NULL; 3390 // -- ra : return address
3391 // -- a0 : key
3392 // -- a1 : receiver
3393 // -----------------------------------
3394 Label slow, failed_allocation;
3395
3396 Register key = a0;
3397 Register receiver = a1;
3398
3399 // Check that the object isn't a smi.
3400 __ JumpIfSmi(receiver, &slow);
3401
3402 // Check that the key is a smi.
3403 __ JumpIfNotSmi(key, &slow);
3404
3405 // Make sure that we've got the right map.
3406 __ lw(a2, FieldMemOperand(receiver, HeapObject::kMapOffset));
3407 __ Branch(&slow, ne, a2, Operand(Handle<Map>(receiver_object->map())));
3408
3409 __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3410 // a3: elements array
3411
3412 // Check that the index is in range.
3413 __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
3414 __ sra(t2, key, kSmiTagSize);
3415 // Unsigned comparison catches both negative and too-large values.
3416 __ Branch(&slow, Uless, t1, Operand(t2));
3417
3418
3419 __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
3420 // a3: base pointer of external storage
3421
3422 // We are not untagging smi key and instead work with it
3423 // as if it was premultiplied by 2.
3424 ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
3425
3426 Register value = a2;
3427 switch (array_type) {
3428 case kExternalByteArray:
3429 __ srl(t2, key, 1);
3430 __ addu(t3, a3, t2);
3431 __ lb(value, MemOperand(t3, 0));
3432 break;
3433 case kExternalPixelArray:
3434 case kExternalUnsignedByteArray:
3435 __ srl(t2, key, 1);
3436 __ addu(t3, a3, t2);
3437 __ lbu(value, MemOperand(t3, 0));
3438 break;
3439 case kExternalShortArray:
3440 __ addu(t3, a3, key);
3441 __ lh(value, MemOperand(t3, 0));
3442 break;
3443 case kExternalUnsignedShortArray:
3444 __ addu(t3, a3, key);
3445 __ lhu(value, MemOperand(t3, 0));
3446 break;
3447 case kExternalIntArray:
3448 case kExternalUnsignedIntArray:
3449 __ sll(t2, key, 1);
3450 __ addu(t3, a3, t2);
3451 __ lw(value, MemOperand(t3, 0));
3452 break;
3453 case kExternalFloatArray:
3454 __ sll(t3, t2, 2);
3455 __ addu(t3, a3, t3);
3456 if (CpuFeatures::IsSupported(FPU)) {
3457 CpuFeatures::Scope scope(FPU);
3458 __ lwc1(f0, MemOperand(t3, 0));
3459 } else {
3460 __ lw(value, MemOperand(t3, 0));
3461 }
3462 break;
3463 case kExternalDoubleArray:
3464 __ sll(t2, key, 2);
3465 __ addu(t3, a3, t2);
3466 if (CpuFeatures::IsSupported(FPU)) {
3467 CpuFeatures::Scope scope(FPU);
3468 __ ldc1(f0, MemOperand(t3, 0));
3469 } else {
3470 // t3: pointer to the beginning of the double we want to load.
3471 __ lw(a2, MemOperand(t3, 0));
3472 __ lw(a3, MemOperand(t3, Register::kSizeInBytes));
3473 }
3474 break;
3475 default:
3476 UNREACHABLE();
3477 break;
3478 }
3479
3480 // For integer array types:
3481 // a2: value
3482 // For float array type:
3483 // f0: value (if FPU is supported)
3484 // a2: value (if FPU is not supported)
3485 // For double array type:
3486 // f0: value (if FPU is supported)
3487 // a2/a3: value (if FPU is not supported)
3488
3489 if (array_type == kExternalIntArray) {
3490 // For the Int and UnsignedInt array types, we need to see whether
3491 // the value can be represented in a Smi. If not, we need to convert
3492 // it to a HeapNumber.
3493 Label box_int;
3494 __ Subu(t3, value, Operand(0xC0000000)); // Non-smi value gives neg result.
3495 __ Branch(&box_int, lt, t3, Operand(zero_reg));
3496 // Tag integer as smi and return it.
3497 __ sll(v0, value, kSmiTagSize);
3498 __ Ret();
3499
3500 __ bind(&box_int);
3501 // Allocate a HeapNumber for the result and perform int-to-double
3502 // conversion.
3503 // The arm version uses a temporary here to save r0, but we don't need to
3504 // (a0 is not modified).
3505 __ LoadRoot(t1, Heap::kHeapNumberMapRootIndex);
3506 __ AllocateHeapNumber(v0, a3, t0, t1, &slow);
3507
3508 if (CpuFeatures::IsSupported(FPU)) {
3509 CpuFeatures::Scope scope(FPU);
3510 __ mtc1(value, f0);
3511 __ cvt_d_w(f0, f0);
3512 __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
3513 __ Ret();
3514 } else {
3515 WriteInt32ToHeapNumberStub stub(value, v0, t2, t3);
3516 __ TailCallStub(&stub);
3517 }
3518 } else if (array_type == kExternalUnsignedIntArray) {
3519 // The test is different for unsigned int values. Since we need
3520 // the value to be in the range of a positive smi, we can't
3521 // handle either of the top two bits being set in the value.
3522 if (CpuFeatures::IsSupported(FPU)) {
3523 CpuFeatures::Scope scope(FPU);
3524 Label pl_box_int;
3525 __ And(t2, value, Operand(0xC0000000));
3526 __ Branch(&pl_box_int, ne, t2, Operand(zero_reg));
3527
3528 // It can fit in an Smi.
3529 // Tag integer as smi and return it.
3530 __ sll(v0, value, kSmiTagSize);
3531 __ Ret();
3532
3533 __ bind(&pl_box_int);
3534 // Allocate a HeapNumber for the result and perform int-to-double
3535 // conversion. Don't use a0 and a1 as AllocateHeapNumber clobbers all
3536 // registers - also when jumping due to exhausted young space.
3537 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3538 __ AllocateHeapNumber(v0, t2, t3, t6, &slow);
3539
3540 // This is replaced by a macro:
3541 // __ mtc1(value, f0); // LS 32-bits.
3542 // __ mtc1(zero_reg, f1); // MS 32-bits are all zero.
3543 // __ cvt_d_l(f0, f0); // Use 64 bit conv to get correct unsigned 32-bit.
3544
3545 __ Cvt_d_uw(f0, value);
3546
3547 __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
3548
3549 __ Ret();
3550 } else {
3551 // Check whether unsigned integer fits into smi.
3552 Label box_int_0, box_int_1, done;
3553 __ And(t2, value, Operand(0x80000000));
3554 __ Branch(&box_int_0, ne, t2, Operand(zero_reg));
3555 __ And(t2, value, Operand(0x40000000));
3556 __ Branch(&box_int_1, ne, t2, Operand(zero_reg));
3557
3558 // Tag integer as smi and return it.
3559 __ sll(v0, value, kSmiTagSize);
3560 __ Ret();
3561
3562 Register hiword = value; // a2.
3563 Register loword = a3;
3564
3565 __ bind(&box_int_0);
3566 // Integer does not have leading zeros.
3567 GenerateUInt2Double(masm(), hiword, loword, t0, 0);
3568 __ Branch(&done);
3569
3570 __ bind(&box_int_1);
3571 // Integer has one leading zero.
3572 GenerateUInt2Double(masm(), hiword, loword, t0, 1);
3573
3574
3575 __ bind(&done);
3576 // Integer was converted to double in registers hiword:loword.
3577 // Wrap it into a HeapNumber. Don't use a0 and a1 as AllocateHeapNumber
3578 // clobbers all registers - also when jumping due to exhausted young
3579 // space.
3580 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3581 __ AllocateHeapNumber(t2, t3, t5, t6, &slow);
3582
3583 __ sw(hiword, FieldMemOperand(t2, HeapNumber::kExponentOffset));
3584 __ sw(loword, FieldMemOperand(t2, HeapNumber::kMantissaOffset));
3585
3586 __ mov(v0, t2);
3587 __ Ret();
3588 }
3589 } else if (array_type == kExternalFloatArray) {
3590 // For the floating-point array type, we need to always allocate a
3591 // HeapNumber.
3592 if (CpuFeatures::IsSupported(FPU)) {
3593 CpuFeatures::Scope scope(FPU);
3594 // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3595 // AllocateHeapNumber clobbers all registers - also when jumping due to
3596 // exhausted young space.
3597 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3598 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3599 // The float (single) value is already in fpu reg f0 (if we use float).
3600 __ cvt_d_s(f0, f0);
3601 __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
3602 __ Ret();
3603 } else {
3604 // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3605 // AllocateHeapNumber clobbers all registers - also when jumping due to
3606 // exhausted young space.
3607 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3608 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3609 // FPU is not available, do manual single to double conversion.
3610
3611 // a2: floating point value (binary32).
3612 // v0: heap number for result
3613
3614 // Extract mantissa to t4.
3615 __ And(t4, value, Operand(kBinary32MantissaMask));
3616
3617 // Extract exponent to t5.
3618 __ srl(t5, value, kBinary32MantissaBits);
3619 __ And(t5, t5, Operand(kBinary32ExponentMask >> kBinary32MantissaBits));
3620
3621 Label exponent_rebiased;
3622 __ Branch(&exponent_rebiased, eq, t5, Operand(zero_reg));
3623
3624 __ li(t0, 0x7ff);
3625 __ Xor(t1, t5, Operand(0xFF));
3626 __ movz(t5, t0, t1); // Set t5 to 0x7ff only if t5 is equal to 0xff.
3627 __ Branch(&exponent_rebiased, eq, t0, Operand(0xff));
3628
3629 // Rebias exponent.
3630 __ Addu(t5,
3631 t5,
3632 Operand(-kBinary32ExponentBias + HeapNumber::kExponentBias));
3633
3634 __ bind(&exponent_rebiased);
3635 __ And(a2, value, Operand(kBinary32SignMask));
3636 value = no_reg;
3637 __ sll(t0, t5, HeapNumber::kMantissaBitsInTopWord);
3638 __ or_(a2, a2, t0);
3639
3640 // Shift mantissa.
3641 static const int kMantissaShiftForHiWord =
3642 kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3643
3644 static const int kMantissaShiftForLoWord =
3645 kBitsPerInt - kMantissaShiftForHiWord;
3646
3647 __ srl(t0, t4, kMantissaShiftForHiWord);
3648 __ or_(a2, a2, t0);
3649 __ sll(a0, t4, kMantissaShiftForLoWord);
3650
3651 __ sw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3652 __ sw(a0, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3653 __ Ret();
3654 }
3655
3656 } else if (array_type == kExternalDoubleArray) {
3657 if (CpuFeatures::IsSupported(FPU)) {
3658 CpuFeatures::Scope scope(FPU);
3659 // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3660 // AllocateHeapNumber clobbers all registers - also when jumping due to
3661 // exhausted young space.
3662 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3663 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3664 // The double value is already in f0
3665 __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
3666 __ Ret();
3667 } else {
3668 // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3669 // AllocateHeapNumber clobbers all registers - also when jumping due to
3670 // exhausted young space.
3671 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3672 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3673
3674 __ sw(a2, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3675 __ sw(a3, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3676 __ Ret();
3677 }
3678
3679 } else {
3680 // Tag integer as smi and return it.
3681 __ sll(v0, value, kSmiTagSize);
3682 __ Ret();
3683 }
3684
3685 // Slow case, key and receiver still in a0 and a1.
3686 __ bind(&slow);
3687 __ IncrementCounter(
3688 masm()->isolate()->counters()->keyed_load_external_array_slow(),
3689 1, a2, a3);
3690
3691 // ---------- S t a t e --------------
3692 // -- ra : return address
3693 // -- a0 : key
3694 // -- a1 : receiver
3695 // -----------------------------------
3696
3697 __ Push(a1, a0);
3698
3699 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3700
3701 return GetCode(flags);
585 } 3702 }
586 3703
587 3704
3705
3706
588 MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub( 3707 MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
589 JSObject* receiver_object, 3708 JSObject* receiver_object,
590 ExternalArrayType array_type, 3709 ExternalArrayType array_type,
591 Code::Flags flags) { 3710 Code::Flags flags) {
592 UNIMPLEMENTED_MIPS(); 3711 // ---------- S t a t e --------------
593 return NULL; 3712 // -- a0 : value
3713 // -- a1 : key
3714 // -- a2 : receiver
3715 // -- ra : return address
3716 // -----------------------------------
3717
3718 Label slow, check_heap_number;
3719
3720 // Register usage.
3721 Register value = a0;
3722 Register key = a1;
3723 Register receiver = a2;
3724 // a3 mostly holds the elements array or the destination external array.
3725
3726 // Check that the object isn't a smi.
3727 __ JumpIfSmi(receiver, &slow);
3728
3729 // Make sure that we've got the right map.
3730 __ lw(a3, FieldMemOperand(receiver, HeapObject::kMapOffset));
3731 __ Branch(&slow, ne, a3, Operand(Handle<Map>(receiver_object->map())));
3732
3733 __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3734
3735 // Check that the key is a smi.
3736 __ JumpIfNotSmi(key, &slow);
3737
3738 // Check that the index is in range.
3739 __ SmiUntag(t0, key);
3740 __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
3741 // Unsigned comparison catches both negative and too-large values.
3742 __ Branch(&slow, Ugreater_equal, t0, Operand(t1));
3743
3744 // Handle both smis and HeapNumbers in the fast path. Go to the
3745 // runtime for all other kinds of values.
3746 // a3: external array.
3747 // t0: key (integer).
3748
3749 if (array_type == kExternalPixelArray) {
3750 // Double to pixel conversion is only implemented in the runtime for now.
3751 __ JumpIfNotSmi(value, &slow);
3752 } else {
3753 __ JumpIfNotSmi(value, &check_heap_number);
3754 }
3755 __ SmiUntag(t1, value);
3756 __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
3757
3758 // a3: base pointer of external storage.
3759 // t0: key (integer).
3760 // t1: value (integer).
3761
3762 switch (array_type) {
3763 case kExternalPixelArray: {
3764 // Clamp the value to [0..255].
3765 // v0 is used as a scratch register here.
3766 Label done;
3767 __ li(v0, Operand(255));
3768 // Normal branch: nop in delay slot.
3769 __ Branch(&done, gt, t1, Operand(v0));
3770 // Use delay slot in this branch.
3771 __ Branch(USE_DELAY_SLOT, &done, lt, t1, Operand(zero_reg));
3772 __ mov(v0, zero_reg); // In delay slot.
3773 __ mov(v0, t1); // Value is in range 0..255.
3774 __ bind(&done);
3775 __ mov(t1, v0);
3776 __ addu(t8, a3, t0);
3777 __ sb(t1, MemOperand(t8, 0));
3778 }
3779 break;
3780 case kExternalByteArray:
3781 case kExternalUnsignedByteArray:
3782 __ addu(t8, a3, t0);
3783 __ sb(t1, MemOperand(t8, 0));
3784 break;
3785 case kExternalShortArray:
3786 case kExternalUnsignedShortArray:
3787 __ sll(t8, t0, 1);
3788 __ addu(t8, a3, t8);
3789 __ sh(t1, MemOperand(t8, 0));
3790 break;
3791 case kExternalIntArray:
3792 case kExternalUnsignedIntArray:
3793 __ sll(t8, t0, 2);
3794 __ addu(t8, a3, t8);
3795 __ sw(t1, MemOperand(t8, 0));
3796 break;
3797 case kExternalFloatArray:
3798 // Perform int-to-float conversion and store to memory.
3799 StoreIntAsFloat(masm(), a3, t0, t1, t2, t3, t4);
3800 break;
3801 case kExternalDoubleArray:
3802 __ sll(t8, t0, 3);
3803 __ addu(a3, a3, t8);
3804 // a3: effective address of the double element
3805 FloatingPointHelper::Destination destination;
3806 if (CpuFeatures::IsSupported(FPU)) {
3807 destination = FloatingPointHelper::kFPURegisters;
3808 } else {
3809 destination = FloatingPointHelper::kCoreRegisters;
3810 }
3811 FloatingPointHelper::ConvertIntToDouble(
3812 masm(), t1, destination,
3813 f0, t2, t3, // These are: double_dst, dst1, dst2.
3814 t0, f2); // These are: scratch2, single_scratch.
3815 if (destination == FloatingPointHelper::kFPURegisters) {
3816 CpuFeatures::Scope scope(FPU);
3817 __ sdc1(f0, MemOperand(a3, 0));
3818 } else {
3819 __ sw(t2, MemOperand(a3, 0));
3820 __ sw(t3, MemOperand(a3, Register::kSizeInBytes));
3821 }
3822 break;
3823 default:
3824 UNREACHABLE();
3825 break;
3826 }
3827
3828 // Entry registers are intact, a0 holds the value which is the return value.
3829 __ mov(v0, value);
3830 __ Ret();
3831
3832 if (array_type != kExternalPixelArray) {
3833 // a3: external array.
3834 // t0: index (integer).
3835 __ bind(&check_heap_number);
3836 __ GetObjectType(value, t1, t2);
3837 __ Branch(&slow, ne, t2, Operand(HEAP_NUMBER_TYPE));
3838
3839 __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
3840
3841 // a3: base pointer of external storage.
3842 // t0: key (integer).
3843
3844 // The WebGL specification leaves the behavior of storing NaN and
3845 // +/-Infinity into integer arrays basically undefined. For more
3846 // reproducible behavior, convert these to zero.
3847
3848 if (CpuFeatures::IsSupported(FPU)) {
3849 CpuFeatures::Scope scope(FPU);
3850
3851 __ ldc1(f0, FieldMemOperand(a0, HeapNumber::kValueOffset));
3852
3853 if (array_type == kExternalFloatArray) {
3854 __ cvt_s_d(f0, f0);
3855 __ sll(t8, t0, 2);
3856 __ addu(t8, a3, t8);
3857 __ swc1(f0, MemOperand(t8, 0));
3858 } else if (array_type == kExternalDoubleArray) {
3859 __ sll(t8, t0, 3);
3860 __ addu(t8, a3, t8);
3861 __ sdc1(f0, MemOperand(t8, 0));
3862 } else {
3863 Label done;
3864
3865 // Need to perform float-to-int conversion.
3866 // Test whether exponent equal to 0x7FF (infinity or NaN).
3867
3868 __ mfc1(t3, f1); // Move exponent word of double to t3 (as raw bits).
3869 __ li(t1, Operand(0x7FF00000));
3870 __ And(t3, t3, Operand(t1));
3871 __ Branch(USE_DELAY_SLOT, &done, eq, t3, Operand(t1));
3872 __ mov(t3, zero_reg); // In delay slot.
3873
3874 // Not infinity or NaN simply convert to int.
3875 if (IsElementTypeSigned(array_type)) {
3876 __ trunc_w_d(f0, f0);
3877 __ mfc1(t3, f0);
3878 } else {
3879 __ Trunc_uw_d(f0, t3);
3880 }
3881
3882 // t3: HeapNumber converted to integer
3883 __ bind(&done);
3884 switch (array_type) {
3885 case kExternalByteArray:
3886 case kExternalUnsignedByteArray:
3887 __ addu(t8, a3, t0);
3888 __ sb(t3, MemOperand(t8, 0));
3889 break;
3890 case kExternalShortArray:
3891 case kExternalUnsignedShortArray:
3892 __ sll(t8, t0, 1);
3893 __ addu(t8, a3, t8);
3894 __ sh(t3, MemOperand(t8, 0));
3895 break;
3896 case kExternalIntArray:
3897 case kExternalUnsignedIntArray:
3898 __ sll(t8, t0, 2);
3899 __ addu(t8, a3, t8);
3900 __ sw(t3, MemOperand(t8, 0));
3901 break;
3902 default:
3903 UNREACHABLE();
3904 break;
3905 }
3906 }
3907
3908 // Entry registers are intact, a0 holds the value
3909 // which is the return value.
3910 __ mov(v0, value);
3911 __ Ret();
3912 } else {
3913 // FPU is not available, do manual conversions.
3914
3915 __ lw(t3, FieldMemOperand(value, HeapNumber::kExponentOffset));
3916 __ lw(t4, FieldMemOperand(value, HeapNumber::kMantissaOffset));
3917
3918 if (array_type == kExternalFloatArray) {
3919 Label done, nan_or_infinity_or_zero;
3920 static const int kMantissaInHiWordShift =
3921 kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3922
3923 static const int kMantissaInLoWordShift =
3924 kBitsPerInt - kMantissaInHiWordShift;
3925
3926 // Test for all special exponent values: zeros, subnormal numbers, NaNs
3927 // and infinities. All these should be converted to 0.
3928 __ li(t5, HeapNumber::kExponentMask);
3929 __ and_(t6, t3, t5);
3930 __ Branch(&nan_or_infinity_or_zero, eq, t6, Operand(zero_reg));
3931
3932 __ xor_(t1, t6, t5);
3933 __ li(t2, kBinary32ExponentMask);
3934 __ movz(t6, t2, t1); // Only if t6 is equal to t5.
3935 __ Branch(&nan_or_infinity_or_zero, eq, t6, Operand(t5));
3936
3937 // Rebias exponent.
3938 __ srl(t6, t6, HeapNumber::kExponentShift);
3939 __ Addu(t6,
3940 t6,
3941 Operand(kBinary32ExponentBias - HeapNumber::kExponentBias));
3942
3943 __ li(t1, Operand(kBinary32MaxExponent));
3944 __ Slt(t1, t1, t6);
3945 __ And(t2, t3, Operand(HeapNumber::kSignMask));
3946 __ Or(t2, t2, Operand(kBinary32ExponentMask));
3947 __ movn(t3, t2, t1); // Only if t6 is gt kBinary32MaxExponent.
3948 __ Branch(&done, gt, t6, Operand(kBinary32MaxExponent));
3949
3950 __ Slt(t1, t6, Operand(kBinary32MinExponent));
3951 __ And(t2, t3, Operand(HeapNumber::kSignMask));
3952 __ movn(t3, t2, t1); // Only if t6 is lt kBinary32MinExponent.
3953 __ Branch(&done, lt, t6, Operand(kBinary32MinExponent));
3954
3955 __ And(t7, t3, Operand(HeapNumber::kSignMask));
3956 __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
3957 __ sll(t3, t3, kMantissaInHiWordShift);
3958 __ or_(t7, t7, t3);
3959 __ srl(t4, t4, kMantissaInLoWordShift);
3960 __ or_(t7, t7, t4);
3961 __ sll(t6, t6, kBinary32ExponentShift);
3962 __ or_(t3, t7, t6);
3963
3964 __ bind(&done);
3965 __ sll(t9, a1, 2);
3966 __ addu(t9, a2, t9);
3967 __ sw(t3, MemOperand(t9, 0));
3968
3969 // Entry registers are intact, a0 holds the value which is the return
3970 // value.
3971 __ mov(v0, value);
3972 __ Ret();
3973
3974 __ bind(&nan_or_infinity_or_zero);
3975 __ And(t7, t3, Operand(HeapNumber::kSignMask));
3976 __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
3977 __ or_(t6, t6, t7);
3978 __ sll(t3, t3, kMantissaInHiWordShift);
3979 __ or_(t6, t6, t3);
3980 __ srl(t4, t4, kMantissaInLoWordShift);
3981 __ or_(t3, t6, t4);
3982 __ Branch(&done);
3983 } else if (array_type == kExternalDoubleArray) {
3984 __ sll(t8, t0, 3);
3985 __ addu(t8, a3, t8);
3986 // t8: effective address of destination element.
3987 __ sw(t4, MemOperand(t8, 0));
3988 __ sw(t3, MemOperand(t8, Register::kSizeInBytes));
3989 __ Ret();
3990 } else {
3991 bool is_signed_type = IsElementTypeSigned(array_type);
3992 int meaningfull_bits = is_signed_type ? (kBitsPerInt - 1) : kBitsPerInt;
3993 int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000;
3994
3995 Label done, sign;
3996
3997 // Test for all special exponent values: zeros, subnormal numbers, NaNs
3998 // and infinities. All these should be converted to 0.
3999 __ li(t5, HeapNumber::kExponentMask);
4000 __ and_(t6, t3, t5);
4001 __ movz(t3, zero_reg, t6); // Only if t6 is equal to zero.
4002 __ Branch(&done, eq, t6, Operand(zero_reg));
4003
4004 __ xor_(t2, t6, t5);
4005 __ movz(t3, zero_reg, t2); // Only if t6 is equal to t5.
4006 __ Branch(&done, eq, t6, Operand(t5));
4007
4008 // Unbias exponent.
4009 __ srl(t6, t6, HeapNumber::kExponentShift);
4010 __ Subu(t6, t6, Operand(HeapNumber::kExponentBias));
4011 // If exponent is negative then result is 0.
4012 __ slt(t2, t6, zero_reg);
4013 __ movn(t3, zero_reg, t2); // Only if exponent is negative.
4014 __ Branch(&done, lt, t6, Operand(zero_reg));
4015
4016 // If exponent is too big then result is minimal value.
4017 __ slti(t1, t6, meaningfull_bits - 1);
4018 __ li(t2, min_value);
4019 __ movz(t3, t2, t1); // Only if t6 is ge meaningfull_bits - 1.
4020 __ Branch(&done, ge, t6, Operand(meaningfull_bits - 1));
4021
4022 __ And(t5, t3, Operand(HeapNumber::kSignMask));
4023 __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
4024 __ Or(t3, t3, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
4025
4026 __ li(t9, HeapNumber::kMantissaBitsInTopWord);
4027 __ subu(t6, t9, t6);
4028 __ slt(t1, t6, zero_reg);
4029 __ srlv(t2, t3, t6);
4030 __ movz(t3, t2, t1); // Only if t6 is positive.
4031 __ Branch(&sign, ge, t6, Operand(zero_reg));
4032
4033 __ subu(t6, zero_reg, t6);
4034 __ sllv(t3, t3, t6);
4035 __ li(t9, meaningfull_bits);
4036 __ subu(t6, t9, t6);
4037 __ srlv(t4, t4, t6);
4038 __ or_(t3, t3, t4);
4039
4040 __ bind(&sign);
4041 __ subu(t2, t3, zero_reg);
4042 __ movz(t3, t2, t5); // Only if t5 is zero.
4043
4044 __ bind(&done);
4045
4046 // Result is in t3.
4047 // This switch block should be exactly the same as above (FPU mode).
4048 switch (array_type) {
4049 case kExternalByteArray:
4050 case kExternalUnsignedByteArray:
4051 __ addu(t8, a3, t0);
4052 __ sb(t3, MemOperand(t8, 0));
4053 break;
4054 case kExternalShortArray:
4055 case kExternalUnsignedShortArray:
4056 __ sll(t8, t0, 1);
4057 __ addu(t8, a3, t8);
4058 __ sh(t3, MemOperand(t8, 0));
4059 break;
4060 case kExternalIntArray:
4061 case kExternalUnsignedIntArray:
4062 __ sll(t8, t0, 2);
4063 __ addu(t8, a3, t8);
4064 __ sw(t3, MemOperand(t8, 0));
4065 break;
4066 default:
4067 UNREACHABLE();
4068 break;
4069 }
4070 }
4071 }
4072 }
4073
4074 // Slow case: call runtime.
4075 __ bind(&slow);
4076 // Entry registers are intact.
4077 // ---------- S t a t e --------------
4078 // -- a0 : value
4079 // -- a1 : key
4080 // -- a2 : receiver
4081 // -- ra : return address
4082 // -----------------------------------
4083
4084 // Push receiver, key and value for runtime call.
4085 __ Push(a2, a1, a0);
4086
4087 __ li(a1, Operand(Smi::FromInt(NONE))); // PropertyAttributes.
4088 __ li(a0, Operand(Smi::FromInt(
4089 Code::ExtractExtraICStateFromFlags(flags) & kStrictMode)));
4090 __ Push(a1, a0);
4091
4092 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
4093
4094 return GetCode(flags);
594 } 4095 }
595 4096
596 4097
597 #undef __ 4098 #undef __
598 4099
599 } } // namespace v8::internal 4100 } } // namespace v8::internal
600 4101
601 #endif // V8_TARGET_ARCH_MIPS 4102 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/regexp-macro-assembler-mips.cc ('k') | src/mirror-debugger.js » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698