Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(607)

Side by Side Diff: src/mips/stub-cache-mips.cc

Issue 7060010: Merge bleeding edge into the GC branch up to 7948. The asserts (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: Created 9 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips/simulator-mips.cc ('k') | src/mips/virtual-frame-mips.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
(...skipping 11 matching lines...) Expand all
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #if defined(V8_TARGET_ARCH_MIPS) 30 #if defined(V8_TARGET_ARCH_MIPS)
31 31
32 #include "ic-inl.h" 32 #include "ic-inl.h"
33 #include "codegen-inl.h" 33 #include "codegen.h"
34 #include "stub-cache.h" 34 #include "stub-cache.h"
35 35
36 namespace v8 { 36 namespace v8 {
37 namespace internal { 37 namespace internal {
38 38
39 #define __ ACCESS_MASM(masm) 39 #define __ ACCESS_MASM(masm)
40 40
41 41
42 static void ProbeTable(Isolate* isolate,
43 MacroAssembler* masm,
44 Code::Flags flags,
45 StubCache::Table table,
46 Register name,
47 Register offset,
48 Register scratch,
49 Register scratch2) {
50 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
51 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
52
53 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
54 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
55
56 // Check the relative positions of the address fields.
57 ASSERT(value_off_addr > key_off_addr);
58 ASSERT((value_off_addr - key_off_addr) % 4 == 0);
59 ASSERT((value_off_addr - key_off_addr) < (256 * 4));
60
61 Label miss;
62 Register offsets_base_addr = scratch;
63
64 // Check that the key in the entry matches the name.
65 __ li(offsets_base_addr, Operand(key_offset));
66 __ sll(scratch2, offset, 1);
67 __ addu(scratch2, offsets_base_addr, scratch2);
68 __ lw(scratch2, MemOperand(scratch2));
69 __ Branch(&miss, ne, name, Operand(scratch2));
70
71 // Get the code entry from the cache.
72 __ Addu(offsets_base_addr, offsets_base_addr,
73 Operand(value_off_addr - key_off_addr));
74 __ sll(scratch2, offset, 1);
75 __ addu(scratch2, offsets_base_addr, scratch2);
76 __ lw(scratch2, MemOperand(scratch2));
77
78 // Check that the flags match what we're looking for.
79 __ lw(scratch2, FieldMemOperand(scratch2, Code::kFlagsOffset));
80 __ And(scratch2, scratch2, Operand(~Code::kFlagsNotUsedInLookup));
81 __ Branch(&miss, ne, scratch2, Operand(flags));
82
83 // Re-load code entry from cache.
84 __ sll(offset, offset, 1);
85 __ addu(offset, offset, offsets_base_addr);
86 __ lw(offset, MemOperand(offset));
87
88 // Jump to the first instruction in the code stub.
89 __ Addu(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag));
90 __ Jump(offset);
91
92 // Miss: fall through.
93 __ bind(&miss);
94 }
95
96
97 // Helper function used to check that the dictionary doesn't contain
98 // the property. This function may return false negatives, so miss_label
99 // must always call a backup property check that is complete.
100 // This function is safe to call if the receiver has fast properties.
101 // Name must be a symbol and receiver must be a heap object.
102 MUST_USE_RESULT static MaybeObject* GenerateDictionaryNegativeLookup(
103 MacroAssembler* masm,
104 Label* miss_label,
105 Register receiver,
106 String* name,
107 Register scratch0,
108 Register scratch1) {
109 ASSERT(name->IsSymbol());
110 Counters* counters = masm->isolate()->counters();
111 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
112 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
113
114 Label done;
115
116 const int kInterceptorOrAccessCheckNeededMask =
117 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
118
119 // Bail out if the receiver has a named interceptor or requires access checks.
120 Register map = scratch1;
121 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
122 __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
123 __ And(at, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
124 __ Branch(miss_label, ne, at, Operand(zero_reg));
125
126
127 // Check that receiver is a JSObject.
128 __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
129 __ Branch(miss_label, lt, scratch0, Operand(FIRST_JS_OBJECT_TYPE));
130
131 // Load properties array.
132 Register properties = scratch0;
133 __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
134 // Check that the properties array is a dictionary.
135 __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset));
136 Register tmp = properties;
137 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
138 __ Branch(miss_label, ne, map, Operand(tmp));
139
140 // Restore the temporarily used register.
141 __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
142
143 MaybeObject* result = StringDictionaryLookupStub::GenerateNegativeLookup(
144 masm,
145 miss_label,
146 &done,
147 receiver,
148 properties,
149 name,
150 scratch1);
151 if (result->IsFailure()) return result;
152
153 __ bind(&done);
154 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
155
156 return result;
157 }
158
159
42 void StubCache::GenerateProbe(MacroAssembler* masm, 160 void StubCache::GenerateProbe(MacroAssembler* masm,
43 Code::Flags flags, 161 Code::Flags flags,
44 Register receiver, 162 Register receiver,
45 Register name, 163 Register name,
46 Register scratch, 164 Register scratch,
47 Register extra, 165 Register extra,
48 Register extra2) { 166 Register extra2) {
49 UNIMPLEMENTED_MIPS(); 167 Isolate* isolate = masm->isolate();
168 Label miss;
169
170 // Make sure that code is valid. The shifting code relies on the
171 // entry size being 8.
172 ASSERT(sizeof(Entry) == 8);
173
174 // Make sure the flags does not name a specific type.
175 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
176
177 // Make sure that there are no register conflicts.
178 ASSERT(!scratch.is(receiver));
179 ASSERT(!scratch.is(name));
180 ASSERT(!extra.is(receiver));
181 ASSERT(!extra.is(name));
182 ASSERT(!extra.is(scratch));
183 ASSERT(!extra2.is(receiver));
184 ASSERT(!extra2.is(name));
185 ASSERT(!extra2.is(scratch));
186 ASSERT(!extra2.is(extra));
187
188 // Check scratch, extra and extra2 registers are valid.
189 ASSERT(!scratch.is(no_reg));
190 ASSERT(!extra.is(no_reg));
191 ASSERT(!extra2.is(no_reg));
192
193 // Check that the receiver isn't a smi.
194 __ JumpIfSmi(receiver, &miss, t0);
195
196 // Get the map of the receiver and compute the hash.
197 __ lw(scratch, FieldMemOperand(name, String::kHashFieldOffset));
198 __ lw(t8, FieldMemOperand(receiver, HeapObject::kMapOffset));
199 __ Addu(scratch, scratch, Operand(t8));
200 __ Xor(scratch, scratch, Operand(flags));
201 __ And(scratch,
202 scratch,
203 Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
204
205 // Probe the primary table.
206 ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra, extra2);
207
208 // Primary miss: Compute hash for secondary probe.
209 __ Subu(scratch, scratch, Operand(name));
210 __ Addu(scratch, scratch, Operand(flags));
211 __ And(scratch,
212 scratch,
213 Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
214
215 // Probe the secondary table.
216 ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra, extra2);
217
218 // Cache miss: Fall-through and let caller handle the miss by
219 // entering the runtime system.
220 __ bind(&miss);
50 } 221 }
51 222
52 223
53 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, 224 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
54 int index, 225 int index,
55 Register prototype) { 226 Register prototype) {
56 UNIMPLEMENTED_MIPS(); 227 // Load the global or builtins object from the current context.
228 __ lw(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
229 // Load the global context from the global or builtins object.
230 __ lw(prototype,
231 FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset));
232 // Load the function from the global context.
233 __ lw(prototype, MemOperand(prototype, Context::SlotOffset(index)));
234 // Load the initial map. The global functions all have initial maps.
235 __ lw(prototype,
236 FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
237 // Load the prototype from the initial map.
238 __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
57 } 239 }
58 240
59 241
60 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( 242 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
61 MacroAssembler* masm, int index, Register prototype, Label* miss) { 243 MacroAssembler* masm, int index, Register prototype, Label* miss) {
62 UNIMPLEMENTED_MIPS(); 244 Isolate* isolate = masm->isolate();
245 // Check we're still in the same context.
246 __ lw(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
247 ASSERT(!prototype.is(at));
248 __ li(at, isolate->global());
249 __ Branch(miss, ne, prototype, Operand(at));
250 // Get the global function with the given index.
251 JSFunction* function =
252 JSFunction::cast(isolate->global_context()->get(index));
253 // Load its initial map. The global functions all have initial maps.
254 __ li(prototype, Handle<Map>(function->initial_map()));
255 // Load the prototype from the initial map.
256 __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
63 } 257 }
64 258
65 259
66 // Load a fast property out of a holder object (src). In-object properties 260 // Load a fast property out of a holder object (src). In-object properties
67 // are loaded directly otherwise the property is loaded from the properties 261 // are loaded directly otherwise the property is loaded from the properties
68 // fixed array. 262 // fixed array.
69 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, 263 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
70 Register dst, Register src, 264 Register dst, Register src,
71 JSObject* holder, int index) { 265 JSObject* holder, int index) {
72 UNIMPLEMENTED_MIPS(); 266 // Adjust for the number of properties stored in the holder.
267 index -= holder->map()->inobject_properties();
268 if (index < 0) {
269 // Get the property straight out of the holder.
270 int offset = holder->map()->instance_size() + (index * kPointerSize);
271 __ lw(dst, FieldMemOperand(src, offset));
272 } else {
273 // Calculate the offset into the properties array.
274 int offset = index * kPointerSize + FixedArray::kHeaderSize;
275 __ lw(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
276 __ lw(dst, FieldMemOperand(dst, offset));
277 }
73 } 278 }
74 279
75 280
76 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, 281 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
77 Register receiver, 282 Register receiver,
78 Register scratch, 283 Register scratch,
79 Label* miss_label) { 284 Label* miss_label) {
80 UNIMPLEMENTED_MIPS(); 285 // Check that the receiver isn't a smi.
286 __ And(scratch, receiver, Operand(kSmiTagMask));
287 __ Branch(miss_label, eq, scratch, Operand(zero_reg));
288
289 // Check that the object is a JS array.
290 __ GetObjectType(receiver, scratch, scratch);
291 __ Branch(miss_label, ne, scratch, Operand(JS_ARRAY_TYPE));
292
293 // Load length directly from the JS array.
294 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
295 __ Ret();
296 }
297
298
299 // Generate code to check if an object is a string. If the object is a
300 // heap object, its map's instance type is left in the scratch1 register.
301 // If this is not needed, scratch1 and scratch2 may be the same register.
302 static void GenerateStringCheck(MacroAssembler* masm,
303 Register receiver,
304 Register scratch1,
305 Register scratch2,
306 Label* smi,
307 Label* non_string_object) {
308 // Check that the receiver isn't a smi.
309 __ JumpIfSmi(receiver, smi, t0);
310
311 // Check that the object is a string.
312 __ lw(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
313 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
314 __ And(scratch2, scratch1, Operand(kIsNotStringMask));
315 // The cast is to resolve the overload for the argument of 0x0.
316 __ Branch(non_string_object,
317 ne,
318 scratch2,
319 Operand(static_cast<int32_t>(kStringTag)));
81 } 320 }
82 321
83 322
84 // Generate code to load the length from a string object and return the length. 323 // Generate code to load the length from a string object and return the length.
85 // If the receiver object is not a string or a wrapped string object the 324 // If the receiver object is not a string or a wrapped string object the
86 // execution continues at the miss label. The register containing the 325 // execution continues at the miss label. The register containing the
87 // receiver is potentially clobbered. 326 // receiver is potentially clobbered.
88 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm, 327 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
89 Register receiver, 328 Register receiver,
90 Register scratch1, 329 Register scratch1,
91 Register scratch2, 330 Register scratch2,
92 Label* miss, 331 Label* miss,
93 bool support_wrappers) { 332 bool support_wrappers) {
94 UNIMPLEMENTED_MIPS(); 333 Label check_wrapper;
334
335 // Check if the object is a string leaving the instance type in the
336 // scratch1 register.
337 GenerateStringCheck(masm, receiver, scratch1, scratch2, miss,
338 support_wrappers ? &check_wrapper : miss);
339
340 // Load length directly from the string.
341 __ lw(v0, FieldMemOperand(receiver, String::kLengthOffset));
342 __ Ret();
343
344 if (support_wrappers) {
345 // Check if the object is a JSValue wrapper.
346 __ bind(&check_wrapper);
347 __ Branch(miss, ne, scratch1, Operand(JS_VALUE_TYPE));
348
349 // Unwrap the value and check if the wrapped value is a string.
350 __ lw(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
351 GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
352 __ lw(v0, FieldMemOperand(scratch1, String::kLengthOffset));
353 __ Ret();
354 }
95 } 355 }
96 356
97 357
98 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, 358 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
99 Register receiver, 359 Register receiver,
100 Register scratch1, 360 Register scratch1,
101 Register scratch2, 361 Register scratch2,
102 Label* miss_label) { 362 Label* miss_label) {
103 UNIMPLEMENTED_MIPS(); 363 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
364 __ mov(v0, scratch1);
365 __ Ret();
104 } 366 }
105 367
106 368
107 // Generate StoreField code, value is passed in a0 register. 369 // Generate StoreField code, value is passed in a0 register.
108 // After executing generated code, the receiver_reg and name_reg 370 // After executing generated code, the receiver_reg and name_reg
109 // may be clobbered. 371 // may be clobbered.
110 void StubCompiler::GenerateStoreField(MacroAssembler* masm, 372 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
111 JSObject* object, 373 JSObject* object,
112 int index, 374 int index,
113 Map* transition, 375 Map* transition,
114 Register receiver_reg, 376 Register receiver_reg,
115 Register name_reg, 377 Register name_reg,
116 Register scratch, 378 Register scratch,
117 Label* miss_label) { 379 Label* miss_label) {
118 UNIMPLEMENTED_MIPS(); 380 // a0 : value.
381 Label exit;
382
383 // Check that the receiver isn't a smi.
384 __ JumpIfSmi(receiver_reg, miss_label, scratch);
385
386 // Check that the map of the receiver hasn't changed.
387 __ lw(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
388 __ Branch(miss_label, ne, scratch, Operand(Handle<Map>(object->map())));
389
390 // Perform global security token check if needed.
391 if (object->IsJSGlobalProxy()) {
392 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
393 }
394
395 // Stub never generated for non-global objects that require access
396 // checks.
397 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
398
399 // Perform map transition for the receiver if necessary.
400 if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
401 // The properties must be extended before we can store the value.
402 // We jump to a runtime call that extends the properties array.
403 __ push(receiver_reg);
404 __ li(a2, Operand(Handle<Map>(transition)));
405 __ Push(a2, a0);
406 __ TailCallExternalReference(
407 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
408 masm->isolate()),
409 3, 1);
410 return;
411 }
412
413 if (transition != NULL) {
414 // Update the map of the object; no write barrier updating is
415 // needed because the map is never in new space.
416 __ li(t0, Operand(Handle<Map>(transition)));
417 __ sw(t0, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
418 }
419
420 // Adjust for the number of properties stored in the object. Even in the
421 // face of a transition we can use the old map here because the size of the
422 // object and the number of in-object properties is not going to change.
423 index -= object->map()->inobject_properties();
424
425 if (index < 0) {
426 // Set the property straight into the object.
427 int offset = object->map()->instance_size() + (index * kPointerSize);
428 __ sw(a0, FieldMemOperand(receiver_reg, offset));
429
430 // Skip updating write barrier if storing a smi.
431 __ JumpIfSmi(a0, &exit, scratch);
432
433 // Update the write barrier for the array address.
434 // Pass the now unused name_reg as a scratch register.
435 __ RecordWrite(receiver_reg, Operand(offset), name_reg, scratch);
436 } else {
437 // Write to the properties array.
438 int offset = index * kPointerSize + FixedArray::kHeaderSize;
439 // Get the properties array.
440 __ lw(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
441 __ sw(a0, FieldMemOperand(scratch, offset));
442
443 // Skip updating write barrier if storing a smi.
444 __ JumpIfSmi(a0, &exit);
445
446 // Update the write barrier for the array address.
447 // Ok to clobber receiver_reg and name_reg, since we return.
448 __ RecordWrite(scratch, Operand(offset), name_reg, receiver_reg);
449 }
450
451 // Return the value (register v0).
452 __ bind(&exit);
453 __ mov(v0, a0);
454 __ Ret();
119 } 455 }
120 456
121 457
122 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { 458 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
123 UNIMPLEMENTED_MIPS(); 459 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
124 } 460 Code* code = NULL;
125 461 if (kind == Code::LOAD_IC) {
462 code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss);
463 } else {
464 code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss);
465 }
466
467 Handle<Code> ic(code);
468 __ Jump(ic, RelocInfo::CODE_TARGET);
469 }
470
471
472 static void GenerateCallFunction(MacroAssembler* masm,
473 Object* object,
474 const ParameterCount& arguments,
475 Label* miss) {
476 // ----------- S t a t e -------------
477 // -- a0: receiver
478 // -- a1: function to call
479 // -----------------------------------
480 // Check that the function really is a function.
481 __ JumpIfSmi(a1, miss);
482 __ GetObjectType(a1, a3, a3);
483 __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
484
485 // Patch the receiver on the stack with the global proxy if
486 // necessary.
487 if (object->IsGlobalObject()) {
488 __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
489 __ sw(a3, MemOperand(sp, arguments.immediate() * kPointerSize));
490 }
491
492 // Invoke the function.
493 __ InvokeFunction(a1, arguments, JUMP_FUNCTION);
494 }
495
496
497 static void PushInterceptorArguments(MacroAssembler* masm,
498 Register receiver,
499 Register holder,
500 Register name,
501 JSObject* holder_obj) {
502 __ push(name);
503 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
504 ASSERT(!masm->isolate()->heap()->InNewSpace(interceptor));
505 Register scratch = name;
506 __ li(scratch, Operand(Handle<Object>(interceptor)));
507 __ Push(scratch, receiver, holder);
508 __ lw(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
509 __ push(scratch);
510 }
511
512
513 static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
514 Register receiver,
515 Register holder,
516 Register name,
517 JSObject* holder_obj) {
518 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
519
520 ExternalReference ref =
521 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
522 masm->isolate());
523 __ li(a0, Operand(5));
524 __ li(a1, Operand(ref));
525
526 CEntryStub stub(1);
527 __ CallStub(&stub);
528 }
529
530
531 static const int kFastApiCallArguments = 3;
532
533
534 // Reserves space for the extra arguments to FastHandleApiCall in the
535 // caller's frame.
536 //
537 // These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
538 static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
539 Register scratch) {
540 ASSERT(Smi::FromInt(0) == 0);
541 for (int i = 0; i < kFastApiCallArguments; i++) {
542 __ push(zero_reg);
543 }
544 }
545
546
547 // Undoes the effects of ReserveSpaceForFastApiCall.
548 static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
549 __ Drop(kFastApiCallArguments);
550 }
551
552
553 static MaybeObject* GenerateFastApiDirectCall(MacroAssembler* masm,
554 const CallOptimization& optimization,
555 int argc) {
556 // ----------- S t a t e -------------
557 // -- sp[0] : holder (set by CheckPrototypes)
558 // -- sp[4] : callee js function
559 // -- sp[8] : call data
560 // -- sp[12] : last js argument
561 // -- ...
562 // -- sp[(argc + 3) * 4] : first js argument
563 // -- sp[(argc + 4) * 4] : receiver
564 // -----------------------------------
565 // Get the function and setup the context.
566 JSFunction* function = optimization.constant_function();
567 __ li(t1, Operand(Handle<JSFunction>(function)));
568 __ lw(cp, FieldMemOperand(t1, JSFunction::kContextOffset));
569
570 // Pass the additional arguments FastHandleApiCall expects.
571 Object* call_data = optimization.api_call_info()->data();
572 Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
573 if (masm->isolate()->heap()->InNewSpace(call_data)) {
574 __ li(a0, api_call_info_handle);
575 __ lw(t2, FieldMemOperand(a0, CallHandlerInfo::kDataOffset));
576 } else {
577 __ li(t2, Operand(Handle<Object>(call_data)));
578 }
579
580 // Store js function and call data.
581 __ sw(t1, MemOperand(sp, 1 * kPointerSize));
582 __ sw(t2, MemOperand(sp, 2 * kPointerSize));
583
584 // a2 points to call data as expected by Arguments
585 // (refer to layout above).
586 __ Addu(a2, sp, Operand(2 * kPointerSize));
587
588 Object* callback = optimization.api_call_info()->callback();
589 Address api_function_address = v8::ToCData<Address>(callback);
590 ApiFunction fun(api_function_address);
591
592 const int kApiStackSpace = 4;
593
594 __ EnterExitFrame(false, kApiStackSpace);
595
596 // NOTE: the O32 abi requires a0 to hold a special pointer when returning a
597 // struct from the function (which is currently the case). This means we pass
598 // the first argument in a1 instead of a0. TryCallApiFunctionAndReturn
599 // will handle setting up a0.
600
601 // a1 = v8::Arguments&
602 // Arguments is built at sp + 1 (sp is a reserved spot for ra).
603 __ Addu(a1, sp, kPointerSize);
604
605 // v8::Arguments::implicit_args = data
606 __ sw(a2, MemOperand(a1, 0 * kPointerSize));
607 // v8::Arguments::values = last argument
608 __ Addu(t0, a2, Operand(argc * kPointerSize));
609 __ sw(t0, MemOperand(a1, 1 * kPointerSize));
610 // v8::Arguments::length_ = argc
611 __ li(t0, Operand(argc));
612 __ sw(t0, MemOperand(a1, 2 * kPointerSize));
613 // v8::Arguments::is_construct_call = 0
614 __ sw(zero_reg, MemOperand(a1, 3 * kPointerSize));
615
616 // Emitting a stub call may try to allocate (if the code is not
617 // already generated). Do not allow the assembler to perform a
618 // garbage collection but instead return the allocation failure
619 // object.
620 const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
621 ExternalReference ref =
622 ExternalReference(&fun,
623 ExternalReference::DIRECT_API_CALL,
624 masm->isolate());
625 return masm->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace);
626 }
126 627
127 class CallInterceptorCompiler BASE_EMBEDDED { 628 class CallInterceptorCompiler BASE_EMBEDDED {
128 public: 629 public:
129 CallInterceptorCompiler(StubCompiler* stub_compiler, 630 CallInterceptorCompiler(StubCompiler* stub_compiler,
130 const ParameterCount& arguments, 631 const ParameterCount& arguments,
131 Register name) 632 Register name)
132 : stub_compiler_(stub_compiler), 633 : stub_compiler_(stub_compiler),
133 arguments_(arguments), 634 arguments_(arguments),
134 name_(name) {} 635 name_(name) {}
135 636
136 void Compile(MacroAssembler* masm, 637 MaybeObject* Compile(MacroAssembler* masm,
137 JSObject* object,
138 JSObject* holder,
139 String* name,
140 LookupResult* lookup,
141 Register receiver,
142 Register scratch1,
143 Register scratch2,
144 Register scratch3,
145 Label* miss) {
146 UNIMPLEMENTED_MIPS();
147 }
148
149 private:
150 void CompileCacheable(MacroAssembler* masm,
151 JSObject* object, 638 JSObject* object,
639 JSObject* holder,
640 String* name,
641 LookupResult* lookup,
152 Register receiver, 642 Register receiver,
153 Register scratch1, 643 Register scratch1,
154 Register scratch2, 644 Register scratch2,
155 Register scratch3, 645 Register scratch3,
156 JSObject* interceptor_holder, 646 Label* miss) {
157 LookupResult* lookup, 647 ASSERT(holder->HasNamedInterceptor());
158 String* name, 648 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
159 const CallOptimization& optimization, 649
160 Label* miss_label) { 650 // Check that the receiver isn't a smi.
161 UNIMPLEMENTED_MIPS(); 651 __ JumpIfSmi(receiver, miss);
652
653 CallOptimization optimization(lookup);
654
655 if (optimization.is_constant_call()) {
656 return CompileCacheable(masm,
657 object,
658 receiver,
659 scratch1,
660 scratch2,
661 scratch3,
662 holder,
663 lookup,
664 name,
665 optimization,
666 miss);
667 } else {
668 CompileRegular(masm,
669 object,
670 receiver,
671 scratch1,
672 scratch2,
673 scratch3,
674 name,
675 holder,
676 miss);
677 return masm->isolate()->heap()->undefined_value();
678 }
679 }
680
681 private:
682 MaybeObject* CompileCacheable(MacroAssembler* masm,
683 JSObject* object,
684 Register receiver,
685 Register scratch1,
686 Register scratch2,
687 Register scratch3,
688 JSObject* interceptor_holder,
689 LookupResult* lookup,
690 String* name,
691 const CallOptimization& optimization,
692 Label* miss_label) {
693 ASSERT(optimization.is_constant_call());
694 ASSERT(!lookup->holder()->IsGlobalObject());
695
696 Counters* counters = masm->isolate()->counters();
697
698 int depth1 = kInvalidProtoDepth;
699 int depth2 = kInvalidProtoDepth;
700 bool can_do_fast_api_call = false;
701 if (optimization.is_simple_api_call() &&
702 !lookup->holder()->IsGlobalObject()) {
703 depth1 =
704 optimization.GetPrototypeDepthOfExpectedType(object,
705 interceptor_holder);
706 if (depth1 == kInvalidProtoDepth) {
707 depth2 =
708 optimization.GetPrototypeDepthOfExpectedType(interceptor_holder,
709 lookup->holder());
710 }
711 can_do_fast_api_call = (depth1 != kInvalidProtoDepth) ||
712 (depth2 != kInvalidProtoDepth);
713 }
714
715 __ IncrementCounter(counters->call_const_interceptor(), 1,
716 scratch1, scratch2);
717
718 if (can_do_fast_api_call) {
719 __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
720 scratch1, scratch2);
721 ReserveSpaceForFastApiCall(masm, scratch1);
722 }
723
724 // Check that the maps from receiver to interceptor's holder
725 // haven't changed and thus we can invoke interceptor.
726 Label miss_cleanup;
727 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
728 Register holder =
729 stub_compiler_->CheckPrototypes(object, receiver,
730 interceptor_holder, scratch1,
731 scratch2, scratch3, name, depth1, miss);
732
733 // Invoke an interceptor and if it provides a value,
734 // branch to |regular_invoke|.
735 Label regular_invoke;
736 LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
737 &regular_invoke);
738
739 // Interceptor returned nothing for this property. Try to use cached
740 // constant function.
741
742 // Check that the maps from interceptor's holder to constant function's
743 // holder haven't changed and thus we can use cached constant function.
744 if (interceptor_holder != lookup->holder()) {
745 stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
746 lookup->holder(), scratch1,
747 scratch2, scratch3, name, depth2, miss);
748 } else {
749 // CheckPrototypes has a side effect of fetching a 'holder'
750 // for API (object which is instanceof for the signature). It's
751 // safe to omit it here, as if present, it should be fetched
752 // by the previous CheckPrototypes.
753 ASSERT(depth2 == kInvalidProtoDepth);
754 }
755
756 // Invoke function.
757 if (can_do_fast_api_call) {
758 MaybeObject* result = GenerateFastApiDirectCall(masm,
759 optimization,
760 arguments_.immediate());
761 if (result->IsFailure()) return result;
762 } else {
763 __ InvokeFunction(optimization.constant_function(), arguments_,
764 JUMP_FUNCTION);
765 }
766
767 // Deferred code for fast API call case---clean preallocated space.
768 if (can_do_fast_api_call) {
769 __ bind(&miss_cleanup);
770 FreeSpaceForFastApiCall(masm);
771 __ Branch(miss_label);
772 }
773
774 // Invoke a regular function.
775 __ bind(&regular_invoke);
776 if (can_do_fast_api_call) {
777 FreeSpaceForFastApiCall(masm);
778 }
779
780 return masm->isolate()->heap()->undefined_value();
162 } 781 }
163 782
164 void CompileRegular(MacroAssembler* masm, 783 void CompileRegular(MacroAssembler* masm,
165 JSObject* object, 784 JSObject* object,
166 Register receiver, 785 Register receiver,
167 Register scratch1, 786 Register scratch1,
168 Register scratch2, 787 Register scratch2,
169 Register scratch3, 788 Register scratch3,
170 String* name, 789 String* name,
171 JSObject* interceptor_holder, 790 JSObject* interceptor_holder,
172 Label* miss_label) { 791 Label* miss_label) {
173 UNIMPLEMENTED_MIPS(); 792 Register holder =
793 stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
794 scratch1, scratch2, scratch3, name,
795 miss_label);
796
797 // Call a runtime function to load the interceptor property.
798 __ EnterInternalFrame();
799 // Save the name_ register across the call.
800 __ push(name_);
801
802 PushInterceptorArguments(masm,
803 receiver,
804 holder,
805 name_,
806 interceptor_holder);
807
808 __ CallExternalReference(
809 ExternalReference(
810 IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
811 masm->isolate()),
812 5);
813
814 // Restore the name_ register.
815 __ pop(name_);
816 __ LeaveInternalFrame();
174 } 817 }
175 818
176 void LoadWithInterceptor(MacroAssembler* masm, 819 void LoadWithInterceptor(MacroAssembler* masm,
177 Register receiver, 820 Register receiver,
178 Register holder, 821 Register holder,
179 JSObject* holder_obj, 822 JSObject* holder_obj,
180 Register scratch, 823 Register scratch,
181 Label* interceptor_succeeded) { 824 Label* interceptor_succeeded) {
182 UNIMPLEMENTED_MIPS(); 825 __ EnterInternalFrame();
826
827 __ Push(holder, name_);
828
829 CompileCallLoadPropertyWithInterceptor(masm,
830 receiver,
831 holder,
832 name_,
833 holder_obj);
834
835 __ pop(name_); // Restore the name.
836 __ pop(receiver); // Restore the holder.
837 __ LeaveInternalFrame();
838
839 // If interceptor returns no-result sentinel, call the constant function.
840 __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
841 __ Branch(interceptor_succeeded, ne, v0, Operand(scratch));
183 } 842 }
184 843
185 StubCompiler* stub_compiler_; 844 StubCompiler* stub_compiler_;
186 const ParameterCount& arguments_; 845 const ParameterCount& arguments_;
187 Register name_; 846 Register name_;
188 }; 847 };
189 848
190 849
850
851 // Generate code to check that a global property cell is empty. Create
852 // the property cell at compilation time if no cell exists for the
853 // property.
854 MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
855 MacroAssembler* masm,
856 GlobalObject* global,
857 String* name,
858 Register scratch,
859 Label* miss) {
860 Object* probe;
861 { MaybeObject* maybe_probe = global->EnsurePropertyCell(name);
862 if (!maybe_probe->ToObject(&probe)) return maybe_probe;
863 }
864 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
865 ASSERT(cell->value()->IsTheHole());
866 __ li(scratch, Operand(Handle<Object>(cell)));
867 __ lw(scratch,
868 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
869 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
870 __ Branch(miss, ne, scratch, Operand(at));
871 return cell;
872 }
873
874
875 // Calls GenerateCheckPropertyCell for each global object in the prototype chain
876 // from object to (but not including) holder.
877 MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCells(
878 MacroAssembler* masm,
879 JSObject* object,
880 JSObject* holder,
881 String* name,
882 Register scratch,
883 Label* miss) {
884 JSObject* current = object;
885 while (current != holder) {
886 if (current->IsGlobalObject()) {
887 // Returns a cell or a failure.
888 MaybeObject* result = GenerateCheckPropertyCell(
889 masm,
890 GlobalObject::cast(current),
891 name,
892 scratch,
893 miss);
894 if (result->IsFailure()) return result;
895 }
896 ASSERT(current->IsJSObject());
897 current = JSObject::cast(current->GetPrototype());
898 }
899 return NULL;
900 }
901
902
903 // Convert and store int passed in register ival to IEEE 754 single precision
904 // floating point value at memory location (dst + 4 * wordoffset)
905 // If FPU is available use it for conversion.
906 static void StoreIntAsFloat(MacroAssembler* masm,
907 Register dst,
908 Register wordoffset,
909 Register ival,
910 Register fval,
911 Register scratch1,
912 Register scratch2) {
913 if (CpuFeatures::IsSupported(FPU)) {
914 CpuFeatures::Scope scope(FPU);
915 __ mtc1(ival, f0);
916 __ cvt_s_w(f0, f0);
917 __ sll(scratch1, wordoffset, 2);
918 __ addu(scratch1, dst, scratch1);
919 __ swc1(f0, MemOperand(scratch1, 0));
920 } else {
921 // FPU is not available, do manual conversions.
922
923 Label not_special, done;
924 // Move sign bit from source to destination. This works because the sign
925 // bit in the exponent word of the double has the same position and polarity
926 // as the 2's complement sign bit in a Smi.
927 ASSERT(kBinary32SignMask == 0x80000000u);
928
929 __ And(fval, ival, Operand(kBinary32SignMask));
930 // Negate value if it is negative.
931 __ subu(scratch1, zero_reg, ival);
932 __ movn(ival, scratch1, fval);
933
934 // We have -1, 0 or 1, which we treat specially. Register ival contains
935 // absolute value: it is either equal to 1 (special case of -1 and 1),
936 // greater than 1 (not a special case) or less than 1 (special case of 0).
937 __ Branch(&not_special, gt, ival, Operand(1));
938
939 // For 1 or -1 we need to or in the 0 exponent (biased).
940 static const uint32_t exponent_word_for_1 =
941 kBinary32ExponentBias << kBinary32ExponentShift;
942
943 __ Xor(scratch1, ival, Operand(1));
944 __ li(scratch2, exponent_word_for_1);
945 __ or_(scratch2, fval, scratch2);
946 __ movz(fval, scratch2, scratch1); // Only if ival is equal to 1.
947 __ Branch(&done);
948
949 __ bind(&not_special);
950 // Count leading zeros.
951 // Gets the wrong answer for 0, but we already checked for that case above.
952 Register zeros = scratch2;
953 __ clz(zeros, ival);
954
955 // Compute exponent and or it into the exponent register.
956 __ li(scratch1, (kBitsPerInt - 1) + kBinary32ExponentBias);
957 __ subu(scratch1, scratch1, zeros);
958
959 __ sll(scratch1, scratch1, kBinary32ExponentShift);
960 __ or_(fval, fval, scratch1);
961
962 // Shift up the source chopping the top bit off.
963 __ Addu(zeros, zeros, Operand(1));
964 // This wouldn't work for 1 and -1 as the shift would be 32 which means 0.
965 __ sllv(ival, ival, zeros);
966 // And the top (top 20 bits).
967 __ srl(scratch1, ival, kBitsPerInt - kBinary32MantissaBits);
968 __ or_(fval, fval, scratch1);
969
970 __ bind(&done);
971
972 __ sll(scratch1, wordoffset, 2);
973 __ addu(scratch1, dst, scratch1);
974 __ sw(fval, MemOperand(scratch1, 0));
975 }
976 }
977
978
979 // Convert unsigned integer with specified number of leading zeroes in binary
980 // representation to IEEE 754 double.
981 // Integer to convert is passed in register hiword.
982 // Resulting double is returned in registers hiword:loword.
983 // This functions does not work correctly for 0.
984 static void GenerateUInt2Double(MacroAssembler* masm,
985 Register hiword,
986 Register loword,
987 Register scratch,
988 int leading_zeroes) {
989 const int meaningful_bits = kBitsPerInt - leading_zeroes - 1;
990 const int biased_exponent = HeapNumber::kExponentBias + meaningful_bits;
991
992 const int mantissa_shift_for_hi_word =
993 meaningful_bits - HeapNumber::kMantissaBitsInTopWord;
994
995 const int mantissa_shift_for_lo_word =
996 kBitsPerInt - mantissa_shift_for_hi_word;
997
998 __ li(scratch, biased_exponent << HeapNumber::kExponentShift);
999 if (mantissa_shift_for_hi_word > 0) {
1000 __ sll(loword, hiword, mantissa_shift_for_lo_word);
1001 __ srl(hiword, hiword, mantissa_shift_for_hi_word);
1002 __ or_(hiword, scratch, hiword);
1003 } else {
1004 __ mov(loword, zero_reg);
1005 __ sll(hiword, hiword, mantissa_shift_for_hi_word);
1006 __ or_(hiword, scratch, hiword);
1007 }
1008
1009 // If least significant bit of biased exponent was not 1 it was corrupted
1010 // by most significant bit of mantissa so we should fix that.
1011 if (!(biased_exponent & 1)) {
1012 __ li(scratch, 1 << HeapNumber::kExponentShift);
1013 __ nor(scratch, scratch, scratch);
1014 __ and_(hiword, hiword, scratch);
1015 }
1016 }
1017
1018
191 #undef __ 1019 #undef __
192 #define __ ACCESS_MASM(masm()) 1020 #define __ ACCESS_MASM(masm())
193 1021
194 1022
195 Register StubCompiler::CheckPrototypes(JSObject* object, 1023 Register StubCompiler::CheckPrototypes(JSObject* object,
196 Register object_reg, 1024 Register object_reg,
197 JSObject* holder, 1025 JSObject* holder,
198 Register holder_reg, 1026 Register holder_reg,
199 Register scratch1, 1027 Register scratch1,
200 Register scratch2, 1028 Register scratch2,
201 String* name, 1029 String* name,
202 int save_at_depth, 1030 int save_at_depth,
203 Label* miss) { 1031 Label* miss) {
204 UNIMPLEMENTED_MIPS(); 1032 // Make sure there's no overlap between holder and object registers.
205 return no_reg; 1033 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1034 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1035 && !scratch2.is(scratch1));
1036
1037 // Keep track of the current object in register reg.
1038 Register reg = object_reg;
1039 int depth = 0;
1040
1041 if (save_at_depth == depth) {
1042 __ sw(reg, MemOperand(sp));
1043 }
1044
1045 // Check the maps in the prototype chain.
1046 // Traverse the prototype chain from the object and do map checks.
1047 JSObject* current = object;
1048 while (current != holder) {
1049 depth++;
1050
1051 // Only global objects and objects that do not require access
1052 // checks are allowed in stubs.
1053 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
1054
1055 ASSERT(current->GetPrototype()->IsJSObject());
1056 JSObject* prototype = JSObject::cast(current->GetPrototype());
1057 if (!current->HasFastProperties() &&
1058 !current->IsJSGlobalObject() &&
1059 !current->IsJSGlobalProxy()) {
1060 if (!name->IsSymbol()) {
1061 MaybeObject* maybe_lookup_result = heap()->LookupSymbol(name);
1062 Object* lookup_result = NULL; // Initialization to please compiler.
1063 if (!maybe_lookup_result->ToObject(&lookup_result)) {
1064 set_failure(Failure::cast(maybe_lookup_result));
1065 return reg;
1066 }
1067 name = String::cast(lookup_result);
1068 }
1069 ASSERT(current->property_dictionary()->FindEntry(name) ==
1070 StringDictionary::kNotFound);
1071
1072 MaybeObject* negative_lookup = GenerateDictionaryNegativeLookup(masm(),
1073 miss,
1074 reg,
1075 name,
1076 scratch1,
1077 scratch2);
1078 if (negative_lookup->IsFailure()) {
1079 set_failure(Failure::cast(negative_lookup));
1080 return reg;
1081 }
1082
1083 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1084 reg = holder_reg; // From now the object is in holder_reg.
1085 __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1086 } else if (heap()->InNewSpace(prototype)) {
1087 // Get the map of the current object.
1088 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1089
1090 // Branch on the result of the map check.
1091 __ Branch(miss, ne, scratch1, Operand(Handle<Map>(current->map())));
1092
1093 // Check access rights to the global object. This has to happen
1094 // after the map check so that we know that the object is
1095 // actually a global object.
1096 if (current->IsJSGlobalProxy()) {
1097 __ CheckAccessGlobalProxy(reg, scratch1, miss);
1098 // Restore scratch register to be the map of the object. In the
1099 // new space case below, we load the prototype from the map in
1100 // the scratch register.
1101 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1102 }
1103
1104 reg = holder_reg; // From now the object is in holder_reg.
1105 // The prototype is in new space; we cannot store a reference
1106 // to it in the code. Load it from the map.
1107 __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1108 } else {
1109 // Check the map of the current object.
1110 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1111 // Branch on the result of the map check.
1112 __ Branch(miss, ne, scratch1, Operand(Handle<Map>(current->map())));
1113 // Check access rights to the global object. This has to happen
1114 // after the map check so that we know that the object is
1115 // actually a global object.
1116 if (current->IsJSGlobalProxy()) {
1117 __ CheckAccessGlobalProxy(reg, scratch1, miss);
1118 }
1119 // The prototype is in old space; load it directly.
1120 reg = holder_reg; // From now the object is in holder_reg.
1121 __ li(reg, Operand(Handle<JSObject>(prototype)));
1122 }
1123
1124 if (save_at_depth == depth) {
1125 __ sw(reg, MemOperand(sp));
1126 }
1127
1128 // Go to the next object in the prototype chain.
1129 current = prototype;
1130 }
1131
1132 // Check the holder map.
1133 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1134 __ Branch(miss, ne, scratch1, Operand(Handle<Map>(current->map())));
1135
1136 // Log the check depth.
1137 LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
1138 // Perform security check for access to the global object.
1139 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1140 if (holder->IsJSGlobalProxy()) {
1141 __ CheckAccessGlobalProxy(reg, scratch1, miss);
1142 };
1143
1144 // If we've skipped any global objects, it's not enough to verify
1145 // that their maps haven't changed. We also need to check that the
1146 // property cell for the property is still empty.
1147
1148 MaybeObject* result = GenerateCheckPropertyCells(masm(),
1149 object,
1150 holder,
1151 name,
1152 scratch1,
1153 miss);
1154 if (result->IsFailure()) set_failure(Failure::cast(result));
1155
1156 // Return the register containing the holder.
1157 return reg;
206 } 1158 }
207 1159
208 1160
209 void StubCompiler::GenerateLoadField(JSObject* object, 1161 void StubCompiler::GenerateLoadField(JSObject* object,
210 JSObject* holder, 1162 JSObject* holder,
211 Register receiver, 1163 Register receiver,
212 Register scratch1, 1164 Register scratch1,
213 Register scratch2, 1165 Register scratch2,
214 Register scratch3, 1166 Register scratch3,
215 int index, 1167 int index,
216 String* name, 1168 String* name,
217 Label* miss) { 1169 Label* miss) {
218 UNIMPLEMENTED_MIPS(); 1170 // Check that the receiver isn't a smi.
1171 __ And(scratch1, receiver, Operand(kSmiTagMask));
1172 __ Branch(miss, eq, scratch1, Operand(zero_reg));
1173
1174 // Check that the maps haven't changed.
1175 Register reg =
1176 CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
1177 name, miss);
1178 GenerateFastPropertyLoad(masm(), v0, reg, holder, index);
1179 __ Ret();
219 } 1180 }
220 1181
221 1182
222 void StubCompiler::GenerateLoadConstant(JSObject* object, 1183 void StubCompiler::GenerateLoadConstant(JSObject* object,
223 JSObject* holder, 1184 JSObject* holder,
224 Register receiver, 1185 Register receiver,
225 Register scratch1, 1186 Register scratch1,
226 Register scratch2, 1187 Register scratch2,
227 Register scratch3, 1188 Register scratch3,
228 Object* value, 1189 Object* value,
229 String* name, 1190 String* name,
230 Label* miss) { 1191 Label* miss) {
231 UNIMPLEMENTED_MIPS(); 1192 // Check that the receiver isn't a smi.
1193 __ JumpIfSmi(receiver, miss, scratch1);
1194
1195 // Check that the maps haven't changed.
1196 Register reg =
1197 CheckPrototypes(object, receiver, holder,
1198 scratch1, scratch2, scratch3, name, miss);
1199
1200 // Return the constant value.
1201 __ li(v0, Operand(Handle<Object>(value)));
1202 __ Ret();
232 } 1203 }
233 1204
234 1205
235 MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object, 1206 MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
236 JSObject* holder, 1207 JSObject* holder,
237 Register receiver, 1208 Register receiver,
238 Register name_reg, 1209 Register name_reg,
239 Register scratch1, 1210 Register scratch1,
240 Register scratch2, 1211 Register scratch2,
241 Register scratch3, 1212 Register scratch3,
242 AccessorInfo* callback, 1213 AccessorInfo* callback,
243 String* name, 1214 String* name,
244 Label* miss) { 1215 Label* miss) {
245 UNIMPLEMENTED_MIPS(); 1216 // Check that the receiver isn't a smi.
246 return NULL; 1217 __ JumpIfSmi(receiver, miss, scratch1);
1218
1219 // Check that the maps haven't changed.
1220 Register reg =
1221 CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
1222 name, miss);
1223
1224 // Build AccessorInfo::args_ list on the stack and push property name below
1225 // the exit frame to make GC aware of them and store pointers to them.
1226 __ push(receiver);
1227 __ mov(scratch2, sp); // scratch2 = AccessorInfo::args_
1228 Handle<AccessorInfo> callback_handle(callback);
1229 if (heap()->InNewSpace(callback_handle->data())) {
1230 __ li(scratch3, callback_handle);
1231 __ lw(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
1232 } else {
1233 __ li(scratch3, Handle<Object>(callback_handle->data()));
1234 }
1235 __ Push(reg, scratch3, name_reg);
1236 __ mov(a2, scratch2); // Saved in case scratch2 == a1.
1237 __ mov(a1, sp); // a1 (first argument - see note below) = Handle<String>
1238
1239 Address getter_address = v8::ToCData<Address>(callback->getter());
1240 ApiFunction fun(getter_address);
1241
1242 // NOTE: the O32 abi requires a0 to hold a special pointer when returning a
1243 // struct from the function (which is currently the case). This means we pass
1244 // the arguments in a1-a2 instead of a0-a1. TryCallApiFunctionAndReturn
1245 // will handle setting up a0.
1246
1247 const int kApiStackSpace = 1;
1248
1249 __ EnterExitFrame(false, kApiStackSpace);
1250 // Create AccessorInfo instance on the stack above the exit frame with
1251 // scratch2 (internal::Object **args_) as the data.
1252 __ sw(a2, MemOperand(sp, kPointerSize));
1253 // a2 (second argument - see note above) = AccessorInfo&
1254 __ Addu(a2, sp, kPointerSize);
1255
1256 // Emitting a stub call may try to allocate (if the code is not
1257 // already generated). Do not allow the assembler to perform a
1258 // garbage collection but instead return the allocation failure
1259 // object.
1260 ExternalReference ref =
1261 ExternalReference(&fun,
1262 ExternalReference::DIRECT_GETTER_CALL,
1263 masm()->isolate());
1264 // 4 args - will be freed later by LeaveExitFrame.
1265 return masm()->TryCallApiFunctionAndReturn(ref, 4);
247 } 1266 }
248 1267
249 1268
250 void StubCompiler::GenerateLoadInterceptor(JSObject* object, 1269 void StubCompiler::GenerateLoadInterceptor(JSObject* object,
251 JSObject* interceptor_holder, 1270 JSObject* interceptor_holder,
252 LookupResult* lookup, 1271 LookupResult* lookup,
253 Register receiver, 1272 Register receiver,
254 Register name_reg, 1273 Register name_reg,
255 Register scratch1, 1274 Register scratch1,
256 Register scratch2, 1275 Register scratch2,
257 Register scratch3, 1276 Register scratch3,
258 String* name, 1277 String* name,
259 Label* miss) { 1278 Label* miss) {
260 UNIMPLEMENTED_MIPS(); 1279 ASSERT(interceptor_holder->HasNamedInterceptor());
1280 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1281
1282 // Check that the receiver isn't a smi.
1283 __ JumpIfSmi(receiver, miss);
1284
1285 // So far the most popular follow ups for interceptor loads are FIELD
1286 // and CALLBACKS, so inline only them, other cases may be added
1287 // later.
1288 bool compile_followup_inline = false;
1289 if (lookup->IsProperty() && lookup->IsCacheable()) {
1290 if (lookup->type() == FIELD) {
1291 compile_followup_inline = true;
1292 } else if (lookup->type() == CALLBACKS &&
1293 lookup->GetCallbackObject()->IsAccessorInfo() &&
1294 AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
1295 compile_followup_inline = true;
1296 }
1297 }
1298
1299 if (compile_followup_inline) {
1300 // Compile the interceptor call, followed by inline code to load the
1301 // property from further up the prototype chain if the call fails.
1302 // Check that the maps haven't changed.
1303 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1304 scratch1, scratch2, scratch3,
1305 name, miss);
1306 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1307
1308 // Save necessary data before invoking an interceptor.
1309 // Requires a frame to make GC aware of pushed pointers.
1310 __ EnterInternalFrame();
1311
1312 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1313 // CALLBACKS case needs a receiver to be passed into C++ callback.
1314 __ Push(receiver, holder_reg, name_reg);
1315 } else {
1316 __ Push(holder_reg, name_reg);
1317 }
1318
1319 // Invoke an interceptor. Note: map checks from receiver to
1320 // interceptor's holder has been compiled before (see a caller
1321 // of this method).
1322 CompileCallLoadPropertyWithInterceptor(masm(),
1323 receiver,
1324 holder_reg,
1325 name_reg,
1326 interceptor_holder);
1327
1328 // Check if interceptor provided a value for property. If it's
1329 // the case, return immediately.
1330 Label interceptor_failed;
1331 __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
1332 __ Branch(&interceptor_failed, eq, v0, Operand(scratch1));
1333 __ LeaveInternalFrame();
1334 __ Ret();
1335
1336 __ bind(&interceptor_failed);
1337 __ pop(name_reg);
1338 __ pop(holder_reg);
1339 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1340 __ pop(receiver);
1341 }
1342
1343 __ LeaveInternalFrame();
1344
1345 // Check that the maps from interceptor's holder to lookup's holder
1346 // haven't changed. And load lookup's holder into |holder| register.
1347 if (interceptor_holder != lookup->holder()) {
1348 holder_reg = CheckPrototypes(interceptor_holder,
1349 holder_reg,
1350 lookup->holder(),
1351 scratch1,
1352 scratch2,
1353 scratch3,
1354 name,
1355 miss);
1356 }
1357
1358 if (lookup->type() == FIELD) {
1359 // We found FIELD property in prototype chain of interceptor's holder.
1360 // Retrieve a field from field's holder.
1361 GenerateFastPropertyLoad(masm(), v0, holder_reg,
1362 lookup->holder(), lookup->GetFieldIndex());
1363 __ Ret();
1364 } else {
1365 // We found CALLBACKS property in prototype chain of interceptor's
1366 // holder.
1367 ASSERT(lookup->type() == CALLBACKS);
1368 ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
1369 AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1370 ASSERT(callback != NULL);
1371 ASSERT(callback->getter() != NULL);
1372
1373 // Tail call to runtime.
1374 // Important invariant in CALLBACKS case: the code above must be
1375 // structured to never clobber |receiver| register.
1376 __ li(scratch2, Handle<AccessorInfo>(callback));
1377 // holder_reg is either receiver or scratch1.
1378 if (!receiver.is(holder_reg)) {
1379 ASSERT(scratch1.is(holder_reg));
1380 __ Push(receiver, holder_reg);
1381 __ lw(scratch3,
1382 FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1383 __ Push(scratch3, scratch2, name_reg);
1384 } else {
1385 __ push(receiver);
1386 __ lw(scratch3,
1387 FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1388 __ Push(holder_reg, scratch3, scratch2, name_reg);
1389 }
1390
1391 ExternalReference ref =
1392 ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1393 masm()->isolate());
1394 __ TailCallExternalReference(ref, 5, 1);
1395 }
1396 } else { // !compile_followup_inline
1397 // Call the runtime system to load the interceptor.
1398 // Check that the maps haven't changed.
1399 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1400 scratch1, scratch2, scratch3,
1401 name, miss);
1402 PushInterceptorArguments(masm(), receiver, holder_reg,
1403 name_reg, interceptor_holder);
1404
1405 ExternalReference ref = ExternalReference(
1406 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), masm()->isolate());
1407 __ TailCallExternalReference(ref, 5, 1);
1408 }
261 } 1409 }
262 1410
263 1411
264 void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) { 1412 void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
265 UNIMPLEMENTED_MIPS(); 1413 if (kind_ == Code::KEYED_CALL_IC) {
1414 __ Branch(miss, ne, a2, Operand(Handle<String>(name)));
1415 }
266 } 1416 }
267 1417
268 1418
269 void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object, 1419 void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
270 JSObject* holder, 1420 JSObject* holder,
271 String* name, 1421 String* name,
272 Label* miss) { 1422 Label* miss) {
273 UNIMPLEMENTED_MIPS(); 1423 ASSERT(holder->IsGlobalObject());
1424
1425 // Get the number of arguments.
1426 const int argc = arguments().immediate();
1427
1428 // Get the receiver from the stack.
1429 __ lw(a0, MemOperand(sp, argc * kPointerSize));
1430
1431 // If the object is the holder then we know that it's a global
1432 // object which can only happen for contextual calls. In this case,
1433 // the receiver cannot be a smi.
1434 if (object != holder) {
1435 __ JumpIfSmi(a0, miss);
1436 }
1437
1438 // Check that the maps haven't changed.
1439 CheckPrototypes(object, a0, holder, a3, a1, t0, name, miss);
274 } 1440 }
275 1441
276 1442
277 void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell, 1443 void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
278 JSFunction* function, 1444 JSFunction* function,
279 Label* miss) { 1445 Label* miss) {
280 UNIMPLEMENTED_MIPS(); 1446 // Get the value from the cell.
1447 __ li(a3, Operand(Handle<JSGlobalPropertyCell>(cell)));
1448 __ lw(a1, FieldMemOperand(a3, JSGlobalPropertyCell::kValueOffset));
1449
1450 // Check that the cell contains the same function.
1451 if (heap()->InNewSpace(function)) {
1452 // We can't embed a pointer to a function in new space so we have
1453 // to verify that the shared function info is unchanged. This has
1454 // the nice side effect that multiple closures based on the same
1455 // function can all use this call IC. Before we load through the
1456 // function, we have to verify that it still is a function.
1457 __ JumpIfSmi(a1, miss);
1458 __ GetObjectType(a1, a3, a3);
1459 __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
1460
1461 // Check the shared function info. Make sure it hasn't changed.
1462 __ li(a3, Handle<SharedFunctionInfo>(function->shared()));
1463 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1464 __ Branch(miss, ne, t0, Operand(a3));
1465 } else {
1466 __ Branch(miss, ne, a1, Operand(Handle<JSFunction>(function)));
1467 }
281 } 1468 }
282 1469
283 1470
284 MaybeObject* CallStubCompiler::GenerateMissBranch() { 1471 MaybeObject* CallStubCompiler::GenerateMissBranch() {
285 UNIMPLEMENTED_MIPS(); 1472 MaybeObject* maybe_obj = masm()->isolate()->stub_cache()->ComputeCallMiss(
286 return NULL; 1473 arguments().immediate(), kind_);
1474 Object* obj;
1475 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1476 __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
1477 return obj;
287 } 1478 }
288 1479
289 1480
290 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object, 1481 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
291 JSObject* holder, 1482 JSObject* holder,
292 int index, 1483 int index,
293 String* name) { 1484 String* name) {
294 UNIMPLEMENTED_MIPS(); 1485 // ----------- S t a t e -------------
295 return NULL; 1486 // -- a2 : name
1487 // -- ra : return address
1488 // -----------------------------------
1489 Label miss;
1490
1491 GenerateNameCheck(name, &miss);
1492
1493 const int argc = arguments().immediate();
1494
1495 // Get the receiver of the function from the stack into a0.
1496 __ lw(a0, MemOperand(sp, argc * kPointerSize));
1497 // Check that the receiver isn't a smi.
1498 __ JumpIfSmi(a0, &miss, t0);
1499
1500 // Do the right check and compute the holder register.
1501 Register reg = CheckPrototypes(object, a0, holder, a1, a3, t0, name, &miss);
1502 GenerateFastPropertyLoad(masm(), a1, reg, holder, index);
1503
1504 GenerateCallFunction(masm(), object, arguments(), &miss);
1505
1506 // Handle call cache miss.
1507 __ bind(&miss);
1508 MaybeObject* maybe_result = GenerateMissBranch();
1509 if (maybe_result->IsFailure()) return maybe_result;
1510
1511 // Return the generated code.
1512 return GetCode(FIELD, name);
296 } 1513 }
297 1514
298 1515
299 MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, 1516 MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
300 JSObject* holder, 1517 JSObject* holder,
301 JSGlobalPropertyCell* cell, 1518 JSGlobalPropertyCell* cell,
302 JSFunction* function, 1519 JSFunction* function,
303 String* name) { 1520 String* name) {
304 UNIMPLEMENTED_MIPS(); 1521 // ----------- S t a t e -------------
305 return NULL; 1522 // -- a2 : name
1523 // -- ra : return address
1524 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1525 // -- ...
1526 // -- sp[argc * 4] : receiver
1527 // -----------------------------------
1528
1529 // If object is not an array, bail out to regular call.
1530 if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1531
1532 Label miss;
1533
1534 GenerateNameCheck(name, &miss);
1535
1536 Register receiver = a1;
1537
1538 // Get the receiver from the stack.
1539 const int argc = arguments().immediate();
1540 __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1541
1542 // Check that the receiver isn't a smi.
1543 __ JumpIfSmi(receiver, &miss);
1544
1545 // Check that the maps haven't changed.
1546 CheckPrototypes(JSObject::cast(object), receiver,
1547 holder, a3, v0, t0, name, &miss);
1548
1549 if (argc == 0) {
1550 // Nothing to do, just return the length.
1551 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1552 __ Drop(argc + 1);
1553 __ Ret();
1554 } else {
1555 Label call_builtin;
1556
1557 Register elements = a3;
1558 Register end_elements = t1;
1559
1560 // Get the elements array of the object.
1561 __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1562
1563 // Check that the elements are in fast mode and writable.
1564 __ CheckMap(elements, v0,
1565 Heap::kFixedArrayMapRootIndex, &call_builtin, true);
1566
1567 if (argc == 1) { // Otherwise fall through to call the builtin.
1568 Label exit, with_write_barrier, attempt_to_grow_elements;
1569
1570 // Get the array's length into v0 and calculate new length.
1571 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1572 STATIC_ASSERT(kSmiTagSize == 1);
1573 STATIC_ASSERT(kSmiTag == 0);
1574 __ Addu(v0, v0, Operand(Smi::FromInt(argc)));
1575
1576 // Get the element's length.
1577 __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1578
1579 // Check if we could survive without allocation.
1580 __ Branch(&attempt_to_grow_elements, gt, v0, Operand(t0));
1581
1582 // Save new length.
1583 __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1584
1585 // Push the element.
1586 __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize));
1587 // We may need a register containing the address end_elements below,
1588 // so write back the value in end_elements.
1589 __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1590 __ Addu(end_elements, elements, end_elements);
1591 const int kEndElementsOffset =
1592 FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
1593 __ sw(t0, MemOperand(end_elements, kEndElementsOffset));
1594 __ Addu(end_elements, end_elements, kPointerSize);
1595
1596 // Check for a smi.
1597 __ JumpIfNotSmi(t0, &with_write_barrier);
1598 __ bind(&exit);
1599 __ Drop(argc + 1);
1600 __ Ret();
1601
1602 __ bind(&with_write_barrier);
1603 __ InNewSpace(elements, t0, eq, &exit);
1604 __ RecordWriteHelper(elements, end_elements, t0);
1605 __ Drop(argc + 1);
1606 __ Ret();
1607
1608 __ bind(&attempt_to_grow_elements);
1609 // v0: array's length + 1.
1610 // t0: elements' length.
1611
1612 if (!FLAG_inline_new) {
1613 __ Branch(&call_builtin);
1614 }
1615
1616 ExternalReference new_space_allocation_top =
1617 ExternalReference::new_space_allocation_top_address(
1618 masm()->isolate());
1619 ExternalReference new_space_allocation_limit =
1620 ExternalReference::new_space_allocation_limit_address(
1621 masm()->isolate());
1622
1623 const int kAllocationDelta = 4;
1624 // Load top and check if it is the end of elements.
1625 __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1626 __ Addu(end_elements, elements, end_elements);
1627 __ Addu(end_elements, end_elements, Operand(kEndElementsOffset));
1628 __ li(t3, Operand(new_space_allocation_top));
1629 __ lw(t2, MemOperand(t3));
1630 __ Branch(&call_builtin, ne, end_elements, Operand(t2));
1631
1632 __ li(t5, Operand(new_space_allocation_limit));
1633 __ lw(t5, MemOperand(t5));
1634 __ Addu(t2, t2, Operand(kAllocationDelta * kPointerSize));
1635 __ Branch(&call_builtin, hi, t2, Operand(t5));
1636
1637 // We fit and could grow elements.
1638 // Update new_space_allocation_top.
1639 __ sw(t2, MemOperand(t3));
1640 // Push the argument.
1641 __ lw(t2, MemOperand(sp, (argc - 1) * kPointerSize));
1642 __ sw(t2, MemOperand(end_elements));
1643 // Fill the rest with holes.
1644 __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
1645 for (int i = 1; i < kAllocationDelta; i++) {
1646 __ sw(t2, MemOperand(end_elements, i * kPointerSize));
1647 }
1648
1649 // Update elements' and array's sizes.
1650 __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1651 __ Addu(t0, t0, Operand(Smi::FromInt(kAllocationDelta)));
1652 __ sw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1653
1654 // Elements are in new space, so write barrier is not required.
1655 __ Drop(argc + 1);
1656 __ Ret();
1657 }
1658 __ bind(&call_builtin);
1659 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1660 masm()->isolate()),
1661 argc + 1,
1662 1);
1663 }
1664
1665 // Handle call cache miss.
1666 __ bind(&miss);
1667 MaybeObject* maybe_result = GenerateMissBranch();
1668 if (maybe_result->IsFailure()) return maybe_result;
1669
1670 // Return the generated code.
1671 return GetCode(function);
306 } 1672 }
307 1673
308 1674
309 MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object, 1675 MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
310 JSObject* holder, 1676 JSObject* holder,
311 JSGlobalPropertyCell* cell, 1677 JSGlobalPropertyCell* cell,
312 JSFunction* function, 1678 JSFunction* function,
313 String* name) { 1679 String* name) {
314 UNIMPLEMENTED_MIPS(); 1680 // ----------- S t a t e -------------
315 return NULL; 1681 // -- a2 : name
1682 // -- ra : return address
1683 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1684 // -- ...
1685 // -- sp[argc * 4] : receiver
1686 // -----------------------------------
1687
1688 // If object is not an array, bail out to regular call.
1689 if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1690
1691 Label miss, return_undefined, call_builtin;
1692
1693 Register receiver = a1;
1694 Register elements = a3;
1695
1696 GenerateNameCheck(name, &miss);
1697
1698 // Get the receiver from the stack.
1699 const int argc = arguments().immediate();
1700 __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1701
1702 // Check that the receiver isn't a smi.
1703 __ JumpIfSmi(receiver, &miss);
1704
1705 // Check that the maps haven't changed.
1706 CheckPrototypes(JSObject::cast(object),
1707 receiver, holder, elements, t0, v0, name, &miss);
1708
1709 // Get the elements array of the object.
1710 __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1711
1712 // Check that the elements are in fast mode and writable.
1713 __ CheckMap(elements, v0, Heap::kFixedArrayMapRootIndex, &call_builtin, true);
1714
1715 // Get the array's length into t0 and calculate new length.
1716 __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1717 __ Subu(t0, t0, Operand(Smi::FromInt(1)));
1718 __ Branch(&return_undefined, lt, t0, Operand(zero_reg));
1719
1720 // Get the last element.
1721 __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
1722 STATIC_ASSERT(kSmiTagSize == 1);
1723 STATIC_ASSERT(kSmiTag == 0);
1724 // We can't address the last element in one operation. Compute the more
1725 // expensive shift first, and use an offset later on.
1726 __ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize);
1727 __ Addu(elements, elements, t1);
1728 __ lw(v0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1729 __ Branch(&call_builtin, eq, v0, Operand(t2));
1730
1731 // Set the array's length.
1732 __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1733
1734 // Fill with the hole.
1735 __ sw(t2, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1736 __ Drop(argc + 1);
1737 __ Ret();
1738
1739 __ bind(&return_undefined);
1740 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
1741 __ Drop(argc + 1);
1742 __ Ret();
1743
1744 __ bind(&call_builtin);
1745 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
1746 masm()->isolate()),
1747 argc + 1,
1748 1);
1749
1750 // Handle call cache miss.
1751 __ bind(&miss);
1752 MaybeObject* maybe_result = GenerateMissBranch();
1753 if (maybe_result->IsFailure()) return maybe_result;
1754
1755 // Return the generated code.
1756 return GetCode(function);
316 } 1757 }
317 1758
318 1759
319 MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall( 1760 MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
320 Object* object, 1761 Object* object,
321 JSObject* holder, 1762 JSObject* holder,
322 JSGlobalPropertyCell* cell, 1763 JSGlobalPropertyCell* cell,
323 JSFunction* function, 1764 JSFunction* function,
324 String* name) { 1765 String* name) {
325 UNIMPLEMENTED_MIPS(); 1766 // ----------- S t a t e -------------
326 return NULL; 1767 // -- a2 : function name
1768 // -- ra : return address
1769 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1770 // -- ...
1771 // -- sp[argc * 4] : receiver
1772 // -----------------------------------
1773
1774 // If object is not a string, bail out to regular call.
1775 if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1776
1777 const int argc = arguments().immediate();
1778
1779 Label miss;
1780 Label name_miss;
1781 Label index_out_of_range;
1782
1783 Label* index_out_of_range_label = &index_out_of_range;
1784
1785 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1786 index_out_of_range_label = &miss;
1787 }
1788
1789 GenerateNameCheck(name, &name_miss);
1790
1791 // Check that the maps starting from the prototype haven't changed.
1792 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1793 Context::STRING_FUNCTION_INDEX,
1794 v0,
1795 &miss);
1796 ASSERT(object != holder);
1797 CheckPrototypes(JSObject::cast(object->GetPrototype()), v0, holder,
1798 a1, a3, t0, name, &miss);
1799
1800 Register receiver = a1;
1801 Register index = t1;
1802 Register scratch = a3;
1803 Register result = v0;
1804 __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1805 if (argc > 0) {
1806 __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
1807 } else {
1808 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1809 }
1810
1811 StringCharCodeAtGenerator char_code_at_generator(receiver,
1812 index,
1813 scratch,
1814 result,
1815 &miss, // When not a string.
1816 &miss, // When not a number.
1817 index_out_of_range_label,
1818 STRING_INDEX_IS_NUMBER);
1819 char_code_at_generator.GenerateFast(masm());
1820 __ Drop(argc + 1);
1821 __ Ret();
1822
1823 StubRuntimeCallHelper call_helper;
1824 char_code_at_generator.GenerateSlow(masm(), call_helper);
1825
1826 if (index_out_of_range.is_linked()) {
1827 __ bind(&index_out_of_range);
1828 __ LoadRoot(v0, Heap::kNanValueRootIndex);
1829 __ Drop(argc + 1);
1830 __ Ret();
1831 }
1832
1833 __ bind(&miss);
1834 // Restore function name in a2.
1835 __ li(a2, Handle<String>(name));
1836 __ bind(&name_miss);
1837 MaybeObject* maybe_result = GenerateMissBranch();
1838 if (maybe_result->IsFailure()) return maybe_result;
1839
1840 // Return the generated code.
1841 return GetCode(function);
327 } 1842 }
328 1843
329 1844
330 MaybeObject* CallStubCompiler::CompileStringCharAtCall( 1845 MaybeObject* CallStubCompiler::CompileStringCharAtCall(
331 Object* object, 1846 Object* object,
332 JSObject* holder, 1847 JSObject* holder,
333 JSGlobalPropertyCell* cell, 1848 JSGlobalPropertyCell* cell,
334 JSFunction* function, 1849 JSFunction* function,
335 String* name) { 1850 String* name) {
336 UNIMPLEMENTED_MIPS(); 1851 // ----------- S t a t e -------------
337 return NULL; 1852 // -- a2 : function name
1853 // -- ra : return address
1854 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1855 // -- ...
1856 // -- sp[argc * 4] : receiver
1857 // -----------------------------------
1858
1859 // If object is not a string, bail out to regular call.
1860 if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1861
1862 const int argc = arguments().immediate();
1863
1864 Label miss;
1865 Label name_miss;
1866 Label index_out_of_range;
1867 Label* index_out_of_range_label = &index_out_of_range;
1868
1869 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1870 index_out_of_range_label = &miss;
1871 }
1872
1873 GenerateNameCheck(name, &name_miss);
1874
1875 // Check that the maps starting from the prototype haven't changed.
1876 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1877 Context::STRING_FUNCTION_INDEX,
1878 v0,
1879 &miss);
1880 ASSERT(object != holder);
1881 CheckPrototypes(JSObject::cast(object->GetPrototype()), v0, holder,
1882 a1, a3, t0, name, &miss);
1883
1884 Register receiver = v0;
1885 Register index = t1;
1886 Register scratch1 = a1;
1887 Register scratch2 = a3;
1888 Register result = v0;
1889 __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1890 if (argc > 0) {
1891 __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
1892 } else {
1893 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1894 }
1895
1896 StringCharAtGenerator char_at_generator(receiver,
1897 index,
1898 scratch1,
1899 scratch2,
1900 result,
1901 &miss, // When not a string.
1902 &miss, // When not a number.
1903 index_out_of_range_label,
1904 STRING_INDEX_IS_NUMBER);
1905 char_at_generator.GenerateFast(masm());
1906 __ Drop(argc + 1);
1907 __ Ret();
1908
1909 StubRuntimeCallHelper call_helper;
1910 char_at_generator.GenerateSlow(masm(), call_helper);
1911
1912 if (index_out_of_range.is_linked()) {
1913 __ bind(&index_out_of_range);
1914 __ LoadRoot(v0, Heap::kEmptyStringRootIndex);
1915 __ Drop(argc + 1);
1916 __ Ret();
1917 }
1918
1919 __ bind(&miss);
1920 // Restore function name in a2.
1921 __ li(a2, Handle<String>(name));
1922 __ bind(&name_miss);
1923 MaybeObject* maybe_result = GenerateMissBranch();
1924 if (maybe_result->IsFailure()) return maybe_result;
1925
1926 // Return the generated code.
1927 return GetCode(function);
338 } 1928 }
339 1929
340 1930
341 MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall( 1931 MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
342 Object* object, 1932 Object* object,
343 JSObject* holder, 1933 JSObject* holder,
344 JSGlobalPropertyCell* cell, 1934 JSGlobalPropertyCell* cell,
345 JSFunction* function, 1935 JSFunction* function,
346 String* name) { 1936 String* name) {
347 UNIMPLEMENTED_MIPS(); 1937 // ----------- S t a t e -------------
348 return NULL; 1938 // -- a2 : function name
1939 // -- ra : return address
1940 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1941 // -- ...
1942 // -- sp[argc * 4] : receiver
1943 // -----------------------------------
1944
1945 const int argc = arguments().immediate();
1946
1947 // If the object is not a JSObject or we got an unexpected number of
1948 // arguments, bail out to the regular call.
1949 if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
1950
1951 Label miss;
1952 GenerateNameCheck(name, &miss);
1953
1954 if (cell == NULL) {
1955 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1956
1957 STATIC_ASSERT(kSmiTag == 0);
1958 __ JumpIfSmi(a1, &miss);
1959
1960 CheckPrototypes(JSObject::cast(object), a1, holder, v0, a3, t0, name,
1961 &miss);
1962 } else {
1963 ASSERT(cell->value() == function);
1964 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1965 GenerateLoadFunctionFromCell(cell, function, &miss);
1966 }
1967
1968 // Load the char code argument.
1969 Register code = a1;
1970 __ lw(code, MemOperand(sp, 0 * kPointerSize));
1971
1972 // Check the code is a smi.
1973 Label slow;
1974 STATIC_ASSERT(kSmiTag == 0);
1975 __ JumpIfNotSmi(code, &slow);
1976
1977 // Convert the smi code to uint16.
1978 __ And(code, code, Operand(Smi::FromInt(0xffff)));
1979
1980 StringCharFromCodeGenerator char_from_code_generator(code, v0);
1981 char_from_code_generator.GenerateFast(masm());
1982 __ Drop(argc + 1);
1983 __ Ret();
1984
1985 StubRuntimeCallHelper call_helper;
1986 char_from_code_generator.GenerateSlow(masm(), call_helper);
1987
1988 // Tail call the full function. We do not have to patch the receiver
1989 // because the function makes no use of it.
1990 __ bind(&slow);
1991 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
1992
1993 __ bind(&miss);
1994 // a2: function name.
1995 MaybeObject* maybe_result = GenerateMissBranch();
1996 if (maybe_result->IsFailure()) return maybe_result;
1997
1998 // Return the generated code.
1999 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
349 } 2000 }
350 2001
351 2002
352 MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object, 2003 MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
353 JSObject* holder, 2004 JSObject* holder,
354 JSGlobalPropertyCell* cell, 2005 JSGlobalPropertyCell* cell,
355 JSFunction* function, 2006 JSFunction* function,
356 String* name) { 2007 String* name) {
357 UNIMPLEMENTED_MIPS(); 2008 // ----------- S t a t e -------------
358 return NULL; 2009 // -- a2 : function name
2010 // -- ra : return address
2011 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2012 // -- ...
2013 // -- sp[argc * 4] : receiver
2014 // -----------------------------------
2015
2016 if (!CpuFeatures::IsSupported(FPU))
2017 return heap()->undefined_value();
2018 CpuFeatures::Scope scope_fpu(FPU);
2019
2020 const int argc = arguments().immediate();
2021
2022 // If the object is not a JSObject or we got an unexpected number of
2023 // arguments, bail out to the regular call.
2024 if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
2025
2026 Label miss, slow;
2027 GenerateNameCheck(name, &miss);
2028
2029 if (cell == NULL) {
2030 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
2031
2032 STATIC_ASSERT(kSmiTag == 0);
2033 __ JumpIfSmi(a1, &miss);
2034
2035 CheckPrototypes(JSObject::cast(object), a1, holder, a0, a3, t0, name,
2036 &miss);
2037 } else {
2038 ASSERT(cell->value() == function);
2039 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
2040 GenerateLoadFunctionFromCell(cell, function, &miss);
2041 }
2042
2043 // Load the (only) argument into v0.
2044 __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2045
2046 // If the argument is a smi, just return.
2047 STATIC_ASSERT(kSmiTag == 0);
2048 __ And(t0, v0, Operand(kSmiTagMask));
2049 __ Drop(argc + 1, eq, t0, Operand(zero_reg));
2050 __ Ret(eq, t0, Operand(zero_reg));
2051
2052 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, true);
2053
2054 Label wont_fit_smi, no_fpu_error, restore_fcsr_and_return;
2055
2056 // If fpu is enabled, we use the floor instruction.
2057
2058 // Load the HeapNumber value.
2059 __ ldc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
2060
2061 // Backup FCSR.
2062 __ cfc1(a3, FCSR);
2063 // Clearing FCSR clears the exception mask with no side-effects.
2064 __ ctc1(zero_reg, FCSR);
2065 // Convert the argument to an integer.
2066 __ floor_w_d(f0, f0);
2067
2068 // Start checking for special cases.
2069 // Get the argument exponent and clear the sign bit.
2070 __ lw(t1, FieldMemOperand(v0, HeapNumber::kValueOffset + kPointerSize));
2071 __ And(t2, t1, Operand(~HeapNumber::kSignMask));
2072 __ srl(t2, t2, HeapNumber::kMantissaBitsInTopWord);
2073
2074 // Retrieve FCSR and check for fpu errors.
2075 __ cfc1(t5, FCSR);
2076 __ srl(t5, t5, kFCSRFlagShift);
2077 // Flag 1 marks an inaccurate but still good result so we ignore it.
2078 __ And(t5, t5, Operand(kFCSRFlagMask ^ 1));
2079 __ Branch(&no_fpu_error, eq, t5, Operand(zero_reg));
2080
2081 // Check for NaN, Infinity, and -Infinity.
2082 // They are invariant through a Math.Floor call, so just
2083 // return the original argument.
2084 __ Subu(t3, t2, Operand(HeapNumber::kExponentMask
2085 >> HeapNumber::kMantissaBitsInTopWord));
2086 __ Branch(&restore_fcsr_and_return, eq, t3, Operand(zero_reg));
2087 // We had an overflow or underflow in the conversion. Check if we
2088 // have a big exponent.
2089 // If greater or equal, the argument is already round and in v0.
2090 __ Branch(&restore_fcsr_and_return, ge, t3,
2091 Operand(HeapNumber::kMantissaBits));
2092 __ Branch(&wont_fit_smi);
2093
2094 __ bind(&no_fpu_error);
2095 // Move the result back to v0.
2096 __ mfc1(v0, f0);
2097 // Check if the result fits into a smi.
2098 __ Addu(a1, v0, Operand(0x40000000));
2099 __ Branch(&wont_fit_smi, lt, a1, Operand(zero_reg));
2100 // Tag the result.
2101 STATIC_ASSERT(kSmiTag == 0);
2102 __ sll(v0, v0, kSmiTagSize);
2103
2104 // Check for -0.
2105 __ Branch(&restore_fcsr_and_return, ne, v0, Operand(zero_reg));
2106 // t1 already holds the HeapNumber exponent.
2107 __ And(t0, t1, Operand(HeapNumber::kSignMask));
2108 // If our HeapNumber is negative it was -0, so load its address and return.
2109 // Else v0 is loaded with 0, so we can also just return.
2110 __ Branch(&restore_fcsr_and_return, eq, t0, Operand(zero_reg));
2111 __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2112
2113 __ bind(&restore_fcsr_and_return);
2114 // Restore FCSR and return.
2115 __ ctc1(a3, FCSR);
2116
2117 __ Drop(argc + 1);
2118 __ Ret();
2119
2120 __ bind(&wont_fit_smi);
2121 // Restore FCSR and fall to slow case.
2122 __ ctc1(a3, FCSR);
2123
2124 __ bind(&slow);
2125 // Tail call the full function. We do not have to patch the receiver
2126 // because the function makes no use of it.
2127 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2128
2129 __ bind(&miss);
2130 // a2: function name.
2131 MaybeObject* obj = GenerateMissBranch();
2132 if (obj->IsFailure()) return obj;
2133
2134 // Return the generated code.
2135 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
359 } 2136 }
360 2137
361 2138
362 MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object, 2139 MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
363 JSObject* holder, 2140 JSObject* holder,
364 JSGlobalPropertyCell* cell, 2141 JSGlobalPropertyCell* cell,
365 JSFunction* function, 2142 JSFunction* function,
366 String* name) { 2143 String* name) {
367 UNIMPLEMENTED_MIPS(); 2144 // ----------- S t a t e -------------
368 return NULL; 2145 // -- a2 : function name
2146 // -- ra : return address
2147 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2148 // -- ...
2149 // -- sp[argc * 4] : receiver
2150 // -----------------------------------
2151
2152 const int argc = arguments().immediate();
2153
2154 // If the object is not a JSObject or we got an unexpected number of
2155 // arguments, bail out to the regular call.
2156 if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
2157
2158 Label miss;
2159 GenerateNameCheck(name, &miss);
2160
2161 if (cell == NULL) {
2162 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
2163
2164 STATIC_ASSERT(kSmiTag == 0);
2165 __ JumpIfSmi(a1, &miss);
2166
2167 CheckPrototypes(JSObject::cast(object), a1, holder, v0, a3, t0, name,
2168 &miss);
2169 } else {
2170 ASSERT(cell->value() == function);
2171 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
2172 GenerateLoadFunctionFromCell(cell, function, &miss);
2173 }
2174
2175 // Load the (only) argument into v0.
2176 __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2177
2178 // Check if the argument is a smi.
2179 Label not_smi;
2180 STATIC_ASSERT(kSmiTag == 0);
2181 __ JumpIfNotSmi(v0, &not_smi);
2182
2183 // Do bitwise not or do nothing depending on the sign of the
2184 // argument.
2185 __ sra(t0, v0, kBitsPerInt - 1);
2186 __ Xor(a1, v0, t0);
2187
2188 // Add 1 or do nothing depending on the sign of the argument.
2189 __ Subu(v0, a1, t0);
2190
2191 // If the result is still negative, go to the slow case.
2192 // This only happens for the most negative smi.
2193 Label slow;
2194 __ Branch(&slow, lt, v0, Operand(zero_reg));
2195
2196 // Smi case done.
2197 __ Drop(argc + 1);
2198 __ Ret();
2199
2200 // Check if the argument is a heap number and load its exponent and
2201 // sign.
2202 __ bind(&not_smi);
2203 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, true);
2204 __ lw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset));
2205
2206 // Check the sign of the argument. If the argument is positive,
2207 // just return it.
2208 Label negative_sign;
2209 __ And(t0, a1, Operand(HeapNumber::kSignMask));
2210 __ Branch(&negative_sign, ne, t0, Operand(zero_reg));
2211 __ Drop(argc + 1);
2212 __ Ret();
2213
2214 // If the argument is negative, clear the sign, and return a new
2215 // number.
2216 __ bind(&negative_sign);
2217 __ Xor(a1, a1, Operand(HeapNumber::kSignMask));
2218 __ lw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
2219 __ LoadRoot(t2, Heap::kHeapNumberMapRootIndex);
2220 __ AllocateHeapNumber(v0, t0, t1, t2, &slow);
2221 __ sw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset));
2222 __ sw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
2223 __ Drop(argc + 1);
2224 __ Ret();
2225
2226 // Tail call the full function. We do not have to patch the receiver
2227 // because the function makes no use of it.
2228 __ bind(&slow);
2229 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2230
2231 __ bind(&miss);
2232 // a2: function name.
2233 MaybeObject* maybe_result = GenerateMissBranch();
2234 if (maybe_result->IsFailure()) return maybe_result;
2235
2236 // Return the generated code.
2237 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
369 } 2238 }
370 2239
371 2240
372 MaybeObject* CallStubCompiler::CompileFastApiCall( 2241 MaybeObject* CallStubCompiler::CompileFastApiCall(
373 const CallOptimization& optimization, 2242 const CallOptimization& optimization,
374 Object* object, 2243 Object* object,
375 JSObject* holder, 2244 JSObject* holder,
376 JSGlobalPropertyCell* cell, 2245 JSGlobalPropertyCell* cell,
377 JSFunction* function, 2246 JSFunction* function,
378 String* name) { 2247 String* name) {
379 UNIMPLEMENTED_MIPS(); 2248
380 return NULL; 2249 Isolate* isolate = masm()->isolate();
2250 Heap* heap = isolate->heap();
2251 Counters* counters = isolate->counters();
2252
2253 ASSERT(optimization.is_simple_api_call());
2254 // Bail out if object is a global object as we don't want to
2255 // repatch it to global receiver.
2256 if (object->IsGlobalObject()) return heap->undefined_value();
2257 if (cell != NULL) return heap->undefined_value();
2258 if (!object->IsJSObject()) return heap()->undefined_value();
2259 int depth = optimization.GetPrototypeDepthOfExpectedType(
2260 JSObject::cast(object), holder);
2261 if (depth == kInvalidProtoDepth) return heap->undefined_value();
2262
2263 Label miss, miss_before_stack_reserved;
2264
2265 GenerateNameCheck(name, &miss_before_stack_reserved);
2266
2267 // Get the receiver from the stack.
2268 const int argc = arguments().immediate();
2269 __ lw(a1, MemOperand(sp, argc * kPointerSize));
2270
2271 // Check that the receiver isn't a smi.
2272 __ JumpIfSmi(a1, &miss_before_stack_reserved);
2273
2274 __ IncrementCounter(counters->call_const(), 1, a0, a3);
2275 __ IncrementCounter(counters->call_const_fast_api(), 1, a0, a3);
2276
2277 ReserveSpaceForFastApiCall(masm(), a0);
2278
2279 // Check that the maps haven't changed and find a Holder as a side effect.
2280 CheckPrototypes(JSObject::cast(object), a1, holder, a0, a3, t0, name,
2281 depth, &miss);
2282
2283 MaybeObject* result = GenerateFastApiDirectCall(masm(), optimization, argc);
2284 if (result->IsFailure()) return result;
2285
2286 __ bind(&miss);
2287 FreeSpaceForFastApiCall(masm());
2288
2289 __ bind(&miss_before_stack_reserved);
2290 MaybeObject* maybe_result = GenerateMissBranch();
2291 if (maybe_result->IsFailure()) return maybe_result;
2292
2293 // Return the generated code.
2294 return GetCode(function);
381 } 2295 }
382 2296
383 2297
384 MaybeObject* CallStubCompiler::CompileCallConstant(Object* object, 2298 MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
385 JSObject* holder, 2299 JSObject* holder,
386 JSFunction* function, 2300 JSFunction* function,
387 String* name, 2301 String* name,
388 CheckType check) { 2302 CheckType check) {
389 UNIMPLEMENTED_MIPS(); 2303 // ----------- S t a t e -------------
390 return NULL; 2304 // -- a2 : name
2305 // -- ra : return address
2306 // -----------------------------------
2307 if (HasCustomCallGenerator(function)) {
2308 MaybeObject* maybe_result = CompileCustomCall(
2309 object, holder, NULL, function, name);
2310 Object* result;
2311 if (!maybe_result->ToObject(&result)) return maybe_result;
2312 // Undefined means bail out to regular compiler.
2313 if (!result->IsUndefined()) return result;
2314 }
2315
2316 Label miss;
2317
2318 GenerateNameCheck(name, &miss);
2319
2320 // Get the receiver from the stack.
2321 const int argc = arguments().immediate();
2322 __ lw(a1, MemOperand(sp, argc * kPointerSize));
2323
2324 // Check that the receiver isn't a smi.
2325 if (check != NUMBER_CHECK) {
2326 __ And(t1, a1, Operand(kSmiTagMask));
2327 __ Branch(&miss, eq, t1, Operand(zero_reg));
2328 }
2329
2330 // Make sure that it's okay not to patch the on stack receiver
2331 // unless we're doing a receiver map check.
2332 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2333
2334 SharedFunctionInfo* function_info = function->shared();
2335 switch (check) {
2336 case RECEIVER_MAP_CHECK:
2337 __ IncrementCounter(masm()->isolate()->counters()->call_const(),
2338 1, a0, a3);
2339
2340 // Check that the maps haven't changed.
2341 CheckPrototypes(JSObject::cast(object), a1, holder, a0, a3, t0, name,
2342 &miss);
2343
2344 // Patch the receiver on the stack with the global proxy if
2345 // necessary.
2346 if (object->IsGlobalObject()) {
2347 __ lw(a3, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset));
2348 __ sw(a3, MemOperand(sp, argc * kPointerSize));
2349 }
2350 break;
2351
2352 case STRING_CHECK:
2353 if (!function->IsBuiltin() && !function_info->strict_mode()) {
2354 // Calling non-strict non-builtins with a value as the receiver
2355 // requires boxing.
2356 __ jmp(&miss);
2357 } else {
2358 // Check that the object is a two-byte string or a symbol.
2359 __ GetObjectType(a1, a3, a3);
2360 __ Branch(&miss, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE));
2361 // Check that the maps starting from the prototype haven't changed.
2362 GenerateDirectLoadGlobalFunctionPrototype(
2363 masm(), Context::STRING_FUNCTION_INDEX, a0, &miss);
2364 CheckPrototypes(JSObject::cast(object->GetPrototype()), a0, holder, a3,
2365 a1, t0, name, &miss);
2366 }
2367 break;
2368
2369 case NUMBER_CHECK: {
2370 if (!function->IsBuiltin() && !function_info->strict_mode()) {
2371 // Calling non-strict non-builtins with a value as the receiver
2372 // requires boxing.
2373 __ jmp(&miss);
2374 } else {
2375 Label fast;
2376 // Check that the object is a smi or a heap number.
2377 __ And(t1, a1, Operand(kSmiTagMask));
2378 __ Branch(&fast, eq, t1, Operand(zero_reg));
2379 __ GetObjectType(a1, a0, a0);
2380 __ Branch(&miss, ne, a0, Operand(HEAP_NUMBER_TYPE));
2381 __ bind(&fast);
2382 // Check that the maps starting from the prototype haven't changed.
2383 GenerateDirectLoadGlobalFunctionPrototype(
2384 masm(), Context::NUMBER_FUNCTION_INDEX, a0, &miss);
2385 CheckPrototypes(JSObject::cast(object->GetPrototype()), a0, holder, a3,
2386 a1, t0, name, &miss);
2387 }
2388 break;
2389 }
2390
2391 case BOOLEAN_CHECK: {
2392 if (!function->IsBuiltin() && !function_info->strict_mode()) {
2393 // Calling non-strict non-builtins with a value as the receiver
2394 // requires boxing.
2395 __ jmp(&miss);
2396 } else {
2397 Label fast;
2398 // Check that the object is a boolean.
2399 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
2400 __ Branch(&fast, eq, a1, Operand(t0));
2401 __ LoadRoot(t0, Heap::kFalseValueRootIndex);
2402 __ Branch(&miss, ne, a1, Operand(t0));
2403 __ bind(&fast);
2404 // Check that the maps starting from the prototype haven't changed.
2405 GenerateDirectLoadGlobalFunctionPrototype(
2406 masm(), Context::BOOLEAN_FUNCTION_INDEX, a0, &miss);
2407 CheckPrototypes(JSObject::cast(object->GetPrototype()), a0, holder, a3,
2408 a1, t0, name, &miss);
2409 }
2410 break;
2411 }
2412
2413 default:
2414 UNREACHABLE();
2415 }
2416
2417 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2418
2419 // Handle call cache miss.
2420 __ bind(&miss);
2421
2422 MaybeObject* maybe_result = GenerateMissBranch();
2423 if (maybe_result->IsFailure()) return maybe_result;
2424
2425 // Return the generated code.
2426 return GetCode(function);
391 } 2427 }
392 2428
393 2429
394 MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object, 2430 MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
395 JSObject* holder, 2431 JSObject* holder,
396 String* name) { 2432 String* name) {
397 UNIMPLEMENTED_MIPS(); 2433 // ----------- S t a t e -------------
398 return NULL; 2434 // -- a2 : name
2435 // -- ra : return address
2436 // -----------------------------------
2437
2438 Label miss;
2439
2440 GenerateNameCheck(name, &miss);
2441
2442 // Get the number of arguments.
2443 const int argc = arguments().immediate();
2444
2445 LookupResult lookup;
2446 LookupPostInterceptor(holder, name, &lookup);
2447
2448 // Get the receiver from the stack.
2449 __ lw(a1, MemOperand(sp, argc * kPointerSize));
2450
2451 CallInterceptorCompiler compiler(this, arguments(), a2);
2452 MaybeObject* result = compiler.Compile(masm(),
2453 object,
2454 holder,
2455 name,
2456 &lookup,
2457 a1,
2458 a3,
2459 t0,
2460 a0,
2461 &miss);
2462 if (result->IsFailure()) {
2463 return result;
2464 }
2465
2466 // Move returned value, the function to call, to a1.
2467 __ mov(a1, v0);
2468 // Restore receiver.
2469 __ lw(a0, MemOperand(sp, argc * kPointerSize));
2470
2471 GenerateCallFunction(masm(), object, arguments(), &miss);
2472
2473 // Handle call cache miss.
2474 __ bind(&miss);
2475 MaybeObject* maybe_result = GenerateMissBranch();
2476 if (maybe_result->IsFailure()) return maybe_result;
2477
2478 // Return the generated code.
2479 return GetCode(INTERCEPTOR, name);
399 } 2480 }
400 2481
401 2482
402 MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object, 2483 MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
403 GlobalObject* holder, 2484 GlobalObject* holder,
404 JSGlobalPropertyCell* cell, 2485 JSGlobalPropertyCell* cell,
405 JSFunction* function, 2486 JSFunction* function,
406 String* name) { 2487 String* name) {
407 UNIMPLEMENTED_MIPS(); 2488 // ----------- S t a t e -------------
408 return NULL; 2489 // -- a2 : name
2490 // -- ra : return address
2491 // -----------------------------------
2492
2493 if (HasCustomCallGenerator(function)) {
2494 MaybeObject* maybe_result = CompileCustomCall(
2495 object, holder, cell, function, name);
2496 Object* result;
2497 if (!maybe_result->ToObject(&result)) return maybe_result;
2498 // Undefined means bail out to regular compiler.
2499 if (!result->IsUndefined()) return result;
2500 }
2501
2502 Label miss;
2503
2504 GenerateNameCheck(name, &miss);
2505
2506 // Get the number of arguments.
2507 const int argc = arguments().immediate();
2508
2509 GenerateGlobalReceiverCheck(object, holder, name, &miss);
2510 GenerateLoadFunctionFromCell(cell, function, &miss);
2511
2512 // Patch the receiver on the stack with the global proxy if
2513 // necessary.
2514 if (object->IsGlobalObject()) {
2515 __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
2516 __ sw(a3, MemOperand(sp, argc * kPointerSize));
2517 }
2518
2519 // Setup the context (function already in r1).
2520 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
2521
2522 // Jump to the cached code (tail call).
2523 Counters* counters = masm()->isolate()->counters();
2524 __ IncrementCounter(counters->call_global_inline(), 1, a3, t0);
2525 ASSERT(function->is_compiled());
2526 Handle<Code> code(function->code());
2527 ParameterCount expected(function->shared()->formal_parameter_count());
2528 if (V8::UseCrankshaft()) {
2529 UNIMPLEMENTED_MIPS();
2530 } else {
2531 __ InvokeCode(code, expected, arguments(),
2532 RelocInfo::CODE_TARGET, JUMP_FUNCTION);
2533 }
2534
2535 // Handle call cache miss.
2536 __ bind(&miss);
2537 __ IncrementCounter(counters->call_global_inline_miss(), 1, a1, a3);
2538 MaybeObject* maybe_result = GenerateMissBranch();
2539 if (maybe_result->IsFailure()) return maybe_result;
2540
2541 // Return the generated code.
2542 return GetCode(NORMAL, name);
409 } 2543 }
410 2544
411 2545
412 MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object, 2546 MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
413 int index, 2547 int index,
414 Map* transition, 2548 Map* transition,
415 String* name) { 2549 String* name) {
416 UNIMPLEMENTED_MIPS(); 2550 // ----------- S t a t e -------------
417 return NULL; 2551 // -- a0 : value
2552 // -- a1 : receiver
2553 // -- a2 : name
2554 // -- ra : return address
2555 // -----------------------------------
2556 Label miss;
2557
2558 // Name register might be clobbered.
2559 GenerateStoreField(masm(),
2560 object,
2561 index,
2562 transition,
2563 a1, a2, a3,
2564 &miss);
2565 __ bind(&miss);
2566 __ li(a2, Operand(Handle<String>(name))); // Restore name.
2567 Handle<Code> ic = masm()->isolate()->builtins()->Builtins::StoreIC_Miss();
2568 __ Jump(ic, RelocInfo::CODE_TARGET);
2569
2570 // Return the generated code.
2571 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
418 } 2572 }
419 2573
420 2574
421 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object, 2575 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
422 AccessorInfo* callback, 2576 AccessorInfo* callback,
423 String* name) { 2577 String* name) {
424 UNIMPLEMENTED_MIPS(); 2578 // ----------- S t a t e -------------
425 return NULL; 2579 // -- a0 : value
2580 // -- a1 : receiver
2581 // -- a2 : name
2582 // -- ra : return address
2583 // -----------------------------------
2584 Label miss;
2585
2586 // Check that the object isn't a smi.
2587 __ JumpIfSmi(a1, &miss);
2588
2589 // Check that the map of the object hasn't changed.
2590 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
2591 __ Branch(&miss, ne, a3, Operand(Handle<Map>(object->map())));
2592
2593 // Perform global security token check if needed.
2594 if (object->IsJSGlobalProxy()) {
2595 __ CheckAccessGlobalProxy(a1, a3, &miss);
2596 }
2597
2598 // Stub never generated for non-global objects that require access
2599 // checks.
2600 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2601
2602 __ push(a1); // Receiver.
2603 __ li(a3, Operand(Handle<AccessorInfo>(callback))); // Callback info.
2604 __ Push(a3, a2, a0);
2605
2606 // Do tail-call to the runtime system.
2607 ExternalReference store_callback_property =
2608 ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
2609 masm()->isolate());
2610 __ TailCallExternalReference(store_callback_property, 4, 1);
2611
2612 // Handle store cache miss.
2613 __ bind(&miss);
2614 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2615 __ Jump(ic, RelocInfo::CODE_TARGET);
2616
2617 // Return the generated code.
2618 return GetCode(CALLBACKS, name);
426 } 2619 }
427 2620
428 2621
429 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver, 2622 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
430 String* name) { 2623 String* name) {
431 UNIMPLEMENTED_MIPS(); 2624 // ----------- S t a t e -------------
432 return NULL; 2625 // -- a0 : value
2626 // -- a1 : receiver
2627 // -- a2 : name
2628 // -- ra : return address
2629 // -----------------------------------
2630 Label miss;
2631
2632 // Check that the object isn't a smi.
2633 __ JumpIfSmi(a1, &miss);
2634
2635 // Check that the map of the object hasn't changed.
2636 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
2637 __ Branch(&miss, ne, a3, Operand(Handle<Map>(receiver->map())));
2638
2639 // Perform global security token check if needed.
2640 if (receiver->IsJSGlobalProxy()) {
2641 __ CheckAccessGlobalProxy(a1, a3, &miss);
2642 }
2643
2644 // Stub is never generated for non-global objects that require access
2645 // checks.
2646 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2647
2648 __ Push(a1, a2, a0); // Receiver, name, value.
2649
2650 __ li(a0, Operand(Smi::FromInt(strict_mode_)));
2651 __ push(a0); // Strict mode.
2652
2653 // Do tail-call to the runtime system.
2654 ExternalReference store_ic_property =
2655 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
2656 masm()->isolate());
2657 __ TailCallExternalReference(store_ic_property, 4, 1);
2658
2659 // Handle store cache miss.
2660 __ bind(&miss);
2661 Handle<Code> ic = masm()->isolate()->builtins()->Builtins::StoreIC_Miss();
2662 __ Jump(ic, RelocInfo::CODE_TARGET);
2663
2664 // Return the generated code.
2665 return GetCode(INTERCEPTOR, name);
433 } 2666 }
434 2667
435 2668
436 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object, 2669 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
437 JSGlobalPropertyCell* cell, 2670 JSGlobalPropertyCell* cell,
438 String* name) { 2671 String* name) {
439 UNIMPLEMENTED_MIPS(); 2672 // ----------- S t a t e -------------
440 return NULL; 2673 // -- a0 : value
2674 // -- a1 : receiver
2675 // -- a2 : name
2676 // -- ra : return address
2677 // -----------------------------------
2678 Label miss;
2679
2680 // Check that the map of the global has not changed.
2681 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
2682 __ Branch(&miss, ne, a3, Operand(Handle<Map>(object->map())));
2683
2684 // Check that the value in the cell is not the hole. If it is, this
2685 // cell could have been deleted and reintroducing the global needs
2686 // to update the property details in the property dictionary of the
2687 // global object. We bail out to the runtime system to do that.
2688 __ li(t0, Operand(Handle<JSGlobalPropertyCell>(cell)));
2689 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
2690 __ lw(t2, FieldMemOperand(t0, JSGlobalPropertyCell::kValueOffset));
2691 __ Branch(&miss, eq, t1, Operand(t2));
2692
2693 // Store the value in the cell.
2694 __ sw(a0, FieldMemOperand(t0, JSGlobalPropertyCell::kValueOffset));
2695 __ mov(v0, a0); // Stored value must be returned in v0.
2696 Counters* counters = masm()->isolate()->counters();
2697 __ IncrementCounter(counters->named_store_global_inline(), 1, a1, a3);
2698 __ Ret();
2699
2700 // Handle store cache miss.
2701 __ bind(&miss);
2702 __ IncrementCounter(counters->named_store_global_inline_miss(), 1, a1, a3);
2703 Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2704 __ Jump(ic, RelocInfo::CODE_TARGET);
2705
2706 // Return the generated code.
2707 return GetCode(NORMAL, name);
441 } 2708 }
442 2709
443 2710
444 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name, 2711 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
445 JSObject* object, 2712 JSObject* object,
446 JSObject* last) { 2713 JSObject* last) {
447 UNIMPLEMENTED_MIPS(); 2714 // ----------- S t a t e -------------
448 return NULL; 2715 // -- a0 : receiver
449 } 2716 // -- ra : return address
450 2717 // -----------------------------------
451 2718 Label miss;
2719
2720 // Check that the receiver is not a smi.
2721 __ JumpIfSmi(a0, &miss);
2722
2723 // Check the maps of the full prototype chain.
2724 CheckPrototypes(object, a0, last, a3, a1, t0, name, &miss);
2725
2726 // If the last object in the prototype chain is a global object,
2727 // check that the global property cell is empty.
2728 if (last->IsGlobalObject()) {
2729 MaybeObject* cell = GenerateCheckPropertyCell(masm(),
2730 GlobalObject::cast(last),
2731 name,
2732 a1,
2733 &miss);
2734 if (cell->IsFailure()) {
2735 miss.Unuse();
2736 return cell;
2737 }
2738 }
2739
2740 // Return undefined if maps of the full prototype chain is still the same.
2741 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2742 __ Ret();
2743
2744 __ bind(&miss);
2745 GenerateLoadMiss(masm(), Code::LOAD_IC);
2746
2747 // Return the generated code.
2748 return GetCode(NONEXISTENT, heap()->empty_string());
2749 }
2750
2751
452 MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object, 2752 MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
453 JSObject* holder, 2753 JSObject* holder,
454 int index, 2754 int index,
455 String* name) { 2755 String* name) {
456 UNIMPLEMENTED_MIPS(); 2756 // ----------- S t a t e -------------
457 return NULL; 2757 // -- a0 : receiver
2758 // -- a2 : name
2759 // -- ra : return address
2760 // -----------------------------------
2761 Label miss;
2762
2763 __ mov(v0, a0);
2764
2765 GenerateLoadField(object, holder, v0, a3, a1, t0, index, name, &miss);
2766 __ bind(&miss);
2767 GenerateLoadMiss(masm(), Code::LOAD_IC);
2768
2769 // Return the generated code.
2770 return GetCode(FIELD, name);
458 } 2771 }
459 2772
460 2773
461 MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name, 2774 MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name,
462 JSObject* object, 2775 JSObject* object,
463 JSObject* holder, 2776 JSObject* holder,
464 AccessorInfo* callback) { 2777 AccessorInfo* callback) {
465 UNIMPLEMENTED_MIPS(); 2778 // ----------- S t a t e -------------
466 return NULL; 2779 // -- a0 : receiver
2780 // -- a2 : name
2781 // -- ra : return address
2782 // -----------------------------------
2783 Label miss;
2784
2785 MaybeObject* result = GenerateLoadCallback(object, holder, a0, a2, a3, a1, t0,
2786 callback, name, &miss);
2787 if (result->IsFailure()) {
2788 miss.Unuse();
2789 return result;
2790 }
2791
2792 __ bind(&miss);
2793 GenerateLoadMiss(masm(), Code::LOAD_IC);
2794
2795 // Return the generated code.
2796 return GetCode(CALLBACKS, name);
467 } 2797 }
468 2798
469 2799
470 MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object, 2800 MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
471 JSObject* holder, 2801 JSObject* holder,
472 Object* value, 2802 Object* value,
473 String* name) { 2803 String* name) {
474 UNIMPLEMENTED_MIPS(); 2804 // ----------- S t a t e -------------
475 return NULL; 2805 // -- a0 : receiver
2806 // -- a2 : name
2807 // -- ra : return address
2808 // -----------------------------------
2809 Label miss;
2810
2811 GenerateLoadConstant(object, holder, a0, a3, a1, t0, value, name, &miss);
2812 __ bind(&miss);
2813 GenerateLoadMiss(masm(), Code::LOAD_IC);
2814
2815 // Return the generated code.
2816 return GetCode(CONSTANT_FUNCTION, name);
476 } 2817 }
477 2818
478 2819
479 MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* object, 2820 MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* object,
480 JSObject* holder, 2821 JSObject* holder,
481 String* name) { 2822 String* name) {
482 UNIMPLEMENTED_MIPS(); 2823 // ----------- S t a t e -------------
483 return NULL; 2824 // -- a0 : receiver
2825 // -- a2 : name
2826 // -- ra : return address
2827 // -- [sp] : receiver
2828 // -----------------------------------
2829 Label miss;
2830
2831 LookupResult lookup;
2832 LookupPostInterceptor(holder, name, &lookup);
2833 GenerateLoadInterceptor(object,
2834 holder,
2835 &lookup,
2836 a0,
2837 a2,
2838 a3,
2839 a1,
2840 t0,
2841 name,
2842 &miss);
2843 __ bind(&miss);
2844 GenerateLoadMiss(masm(), Code::LOAD_IC);
2845
2846 // Return the generated code.
2847 return GetCode(INTERCEPTOR, name);
484 } 2848 }
485 2849
486 2850
487 MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object, 2851 MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
488 GlobalObject* holder, 2852 GlobalObject* holder,
489 JSGlobalPropertyCell* cell, 2853 JSGlobalPropertyCell* cell,
490 String* name, 2854 String* name,
491 bool is_dont_delete) { 2855 bool is_dont_delete) {
492 UNIMPLEMENTED_MIPS(); 2856 // ----------- S t a t e -------------
493 return NULL; 2857 // -- a0 : receiver
2858 // -- a2 : name
2859 // -- ra : return address
2860 // -----------------------------------
2861 Label miss;
2862
2863 // If the object is the holder then we know that it's a global
2864 // object which can only happen for contextual calls. In this case,
2865 // the receiver cannot be a smi.
2866 if (object != holder) {
2867 __ And(t0, a0, Operand(kSmiTagMask));
2868 __ Branch(&miss, eq, t0, Operand(zero_reg));
2869 }
2870
2871 // Check that the map of the global has not changed.
2872 CheckPrototypes(object, a0, holder, a3, t0, a1, name, &miss);
2873
2874 // Get the value from the cell.
2875 __ li(a3, Operand(Handle<JSGlobalPropertyCell>(cell)));
2876 __ lw(t0, FieldMemOperand(a3, JSGlobalPropertyCell::kValueOffset));
2877
2878 // Check for deleted property if property can actually be deleted.
2879 if (!is_dont_delete) {
2880 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2881 __ Branch(&miss, eq, t0, Operand(at));
2882 }
2883
2884 __ mov(v0, t0);
2885 Counters* counters = masm()->isolate()->counters();
2886 __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
2887 __ Ret();
2888
2889 __ bind(&miss);
2890 __ IncrementCounter(counters->named_load_global_stub_miss(), 1, a1, a3);
2891 GenerateLoadMiss(masm(), Code::LOAD_IC);
2892
2893 // Return the generated code.
2894 return GetCode(NORMAL, name);
494 } 2895 }
495 2896
496 2897
497 MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name, 2898 MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
498 JSObject* receiver, 2899 JSObject* receiver,
499 JSObject* holder, 2900 JSObject* holder,
500 int index) { 2901 int index) {
501 UNIMPLEMENTED_MIPS(); 2902 // ----------- S t a t e -------------
502 return NULL; 2903 // -- ra : return address
2904 // -- a0 : key
2905 // -- a1 : receiver
2906 // -----------------------------------
2907 Label miss;
2908
2909 // Check the key is the cached one.
2910 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
2911
2912 GenerateLoadField(receiver, holder, a1, a2, a3, t0, index, name, &miss);
2913 __ bind(&miss);
2914 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2915
2916 return GetCode(FIELD, name);
503 } 2917 }
504 2918
505 2919
506 MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback( 2920 MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
507 String* name, 2921 String* name,
508 JSObject* receiver, 2922 JSObject* receiver,
509 JSObject* holder, 2923 JSObject* holder,
510 AccessorInfo* callback) { 2924 AccessorInfo* callback) {
511 UNIMPLEMENTED_MIPS(); 2925 // ----------- S t a t e -------------
512 return NULL; 2926 // -- ra : return address
2927 // -- a0 : key
2928 // -- a1 : receiver
2929 // -----------------------------------
2930 Label miss;
2931
2932 // Check the key is the cached one.
2933 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
2934
2935 MaybeObject* result = GenerateLoadCallback(receiver, holder, a1, a0, a2, a3,
2936 t0, callback, name, &miss);
2937 if (result->IsFailure()) {
2938 miss.Unuse();
2939 return result;
2940 }
2941
2942 __ bind(&miss);
2943 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2944
2945 return GetCode(CALLBACKS, name);
513 } 2946 }
514 2947
515 2948
516 MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name, 2949 MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
517 JSObject* receiver, 2950 JSObject* receiver,
518 JSObject* holder, 2951 JSObject* holder,
519 Object* value) { 2952 Object* value) {
520 UNIMPLEMENTED_MIPS(); 2953 // ----------- S t a t e -------------
521 return NULL; 2954 // -- ra : return address
2955 // -- a0 : key
2956 // -- a1 : receiver
2957 // -----------------------------------
2958 Label miss;
2959
2960 // Check the key is the cached one.
2961 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
2962
2963 GenerateLoadConstant(receiver, holder, a1, a2, a3, t0, value, name, &miss);
2964 __ bind(&miss);
2965 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2966
2967 // Return the generated code.
2968 return GetCode(CONSTANT_FUNCTION, name);
522 } 2969 }
523 2970
524 2971
525 MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, 2972 MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
526 JSObject* holder, 2973 JSObject* holder,
527 String* name) { 2974 String* name) {
528 UNIMPLEMENTED_MIPS(); 2975 // ----------- S t a t e -------------
529 return NULL; 2976 // -- ra : return address
2977 // -- a0 : key
2978 // -- a1 : receiver
2979 // -----------------------------------
2980 Label miss;
2981
2982 // Check the key is the cached one.
2983 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
2984
2985 LookupResult lookup;
2986 LookupPostInterceptor(holder, name, &lookup);
2987 GenerateLoadInterceptor(receiver,
2988 holder,
2989 &lookup,
2990 a1,
2991 a0,
2992 a2,
2993 a3,
2994 t0,
2995 name,
2996 &miss);
2997 __ bind(&miss);
2998 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2999
3000 return GetCode(INTERCEPTOR, name);
530 } 3001 }
531 3002
532 3003
533 MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) { 3004 MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
534 UNIMPLEMENTED_MIPS(); 3005 // ----------- S t a t e -------------
535 return NULL; 3006 // -- ra : return address
3007 // -- a0 : key
3008 // -- a1 : receiver
3009 // -----------------------------------
3010 Label miss;
3011
3012 // Check the key is the cached one.
3013 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
3014
3015 GenerateLoadArrayLength(masm(), a1, a2, &miss);
3016 __ bind(&miss);
3017 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3018
3019 return GetCode(CALLBACKS, name);
536 } 3020 }
537 3021
538 3022
539 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) { 3023 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
540 UNIMPLEMENTED_MIPS(); 3024 // ----------- S t a t e -------------
541 return NULL; 3025 // -- ra : return address
3026 // -- a0 : key
3027 // -- a1 : receiver
3028 // -----------------------------------
3029 Label miss;
3030
3031 Counters* counters = masm()->isolate()->counters();
3032 __ IncrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
3033
3034 // Check the key is the cached one.
3035 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
3036
3037 GenerateLoadStringLength(masm(), a1, a2, a3, &miss, true);
3038 __ bind(&miss);
3039 __ DecrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
3040
3041 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3042
3043 return GetCode(CALLBACKS, name);
542 } 3044 }
543 3045
544 3046
545 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { 3047 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
546 UNIMPLEMENTED_MIPS(); 3048 // ----------- S t a t e -------------
547 return NULL; 3049 // -- ra : return address
3050 // -- a0 : key
3051 // -- a1 : receiver
3052 // -----------------------------------
3053 Label miss;
3054
3055 Counters* counters = masm()->isolate()->counters();
3056 __ IncrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
3057
3058 // Check the name hasn't changed.
3059 __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
3060
3061 GenerateLoadFunctionPrototype(masm(), a1, a2, a3, &miss);
3062 __ bind(&miss);
3063 __ DecrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
3064 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3065
3066 return GetCode(CALLBACKS, name);
548 } 3067 }
549 3068
550 3069
551 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) { 3070 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
552 UNIMPLEMENTED_MIPS(); 3071 // ----------- S t a t e -------------
553 return NULL; 3072 // -- ra : return address
3073 // -- a0 : key
3074 // -- a1 : receiver
3075 // -----------------------------------
3076 Label miss;
3077
3078 // Check that the receiver isn't a smi.
3079 __ JumpIfSmi(a1, &miss);
3080
3081 // Check that the map matches.
3082 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
3083 __ Branch(&miss, ne, a2, Operand(Handle<Map>(receiver->map())));
3084
3085 // Check that the key is a smi.
3086 __ JumpIfNotSmi(a0, &miss);
3087
3088 // Get the elements array.
3089 __ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset));
3090 __ AssertFastElements(a2);
3091
3092 // Check that the key is within bounds.
3093 __ lw(a3, FieldMemOperand(a2, FixedArray::kLengthOffset));
3094 __ Branch(&miss, hs, a0, Operand(a3));
3095
3096 // Load the result and make sure it's not the hole.
3097 __ Addu(a3, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3098 ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
3099 __ sll(t1, a0, kPointerSizeLog2 - kSmiTagSize);
3100 __ Addu(t1, t1, a3);
3101 __ lw(t0, MemOperand(t1));
3102 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
3103 __ Branch(&miss, eq, t0, Operand(t1));
3104 __ mov(v0, t0);
3105 __ Ret();
3106
3107 __ bind(&miss);
3108 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3109
3110 // Return the generated code.
3111 return GetCode(NORMAL, NULL);
554 } 3112 }
555 3113
556 3114
557 MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object, 3115 MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
558 int index, 3116 int index,
559 Map* transition, 3117 Map* transition,
560 String* name) { 3118 String* name) {
561 UNIMPLEMENTED_MIPS(); 3119 // ----------- S t a t e -------------
562 return NULL; 3120 // -- a0 : value
3121 // -- a1 : key
3122 // -- a2 : receiver
3123 // -- ra : return address
3124 // -----------------------------------
3125
3126 Label miss;
3127
3128 Counters* counters = masm()->isolate()->counters();
3129 __ IncrementCounter(counters->keyed_store_field(), 1, a3, t0);
3130
3131 // Check that the name has not changed.
3132 __ Branch(&miss, ne, a1, Operand(Handle<String>(name)));
3133
3134 // a3 is used as scratch register. a1 and a2 keep their values if a jump to
3135 // the miss label is generated.
3136 GenerateStoreField(masm(),
3137 object,
3138 index,
3139 transition,
3140 a2, a1, a3,
3141 &miss);
3142 __ bind(&miss);
3143
3144 __ DecrementCounter(counters->keyed_store_field(), 1, a3, t0);
3145 Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3146 __ Jump(ic, RelocInfo::CODE_TARGET);
3147
3148 // Return the generated code.
3149 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
563 } 3150 }
564 3151
565 3152
566 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized( 3153 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
567 JSObject* receiver) { 3154 JSObject* receiver) {
568 UNIMPLEMENTED_MIPS(); 3155 // ----------- S t a t e -------------
569 return NULL; 3156 // -- a0 : value
3157 // -- a1 : key
3158 // -- a2 : receiver
3159 // -- ra : return address
3160 // -- a3 : scratch
3161 // -- t0 : scratch (elements)
3162 // -----------------------------------
3163 Label miss;
3164 Register value_reg = a0;
3165 Register key_reg = a1;
3166 Register receiver_reg = a2;
3167 Register scratch = a3;
3168 Register elements_reg = t0;
3169
3170 // Check that the receiver isn't a smi.
3171 __ JumpIfSmi(receiver_reg, &miss);
3172
3173 // Check that the map matches.
3174 __ lw(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
3175 __ Branch(&miss, ne, scratch, Operand(Handle<Map>(receiver->map())));
3176
3177 // Check that the key is a smi.
3178 __ JumpIfNotSmi(key_reg, &miss);
3179
3180 // Get the elements array and make sure it is a fast element array, not 'cow'.
3181 __ lw(elements_reg,
3182 FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
3183 __ lw(scratch, FieldMemOperand(elements_reg, HeapObject::kMapOffset));
3184 __ Branch(&miss, ne, scratch,
3185 Operand(Handle<Map>(FACTORY->fixed_array_map())));
3186
3187 // Check that the key is within bounds.
3188 if (receiver->IsJSArray()) {
3189 __ lw(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
3190 } else {
3191 __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
3192 }
3193 // Compare smis.
3194 __ Branch(&miss, hs, key_reg, Operand(scratch));
3195 __ Addu(scratch,
3196 elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3197 ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
3198 __ sll(key_reg, key_reg, kPointerSizeLog2 - kSmiTagSize);
3199 __ Addu(v0, scratch, key_reg);
3200 __ sw(value_reg, MemOperand(v0));
3201 __ RecordWrite(scratch, Operand(key_reg), receiver_reg , elements_reg);
3202
3203 // value_reg (a0) is preserved.
3204 // Done.
3205 __ mov(v0, value_reg);
3206 __ Ret();
3207
3208 __ bind(&miss);
3209 Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3210 __ Jump(ic, RelocInfo::CODE_TARGET);
3211
3212 // Return the generated code.
3213 return GetCode(NORMAL, NULL);
570 } 3214 }
571 3215
572 3216
573 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) { 3217 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
574 UNIMPLEMENTED_MIPS(); 3218 // a0 : argc
575 return NULL; 3219 // a1 : constructor
3220 // ra : return address
3221 // [sp] : last argument
3222 Label generic_stub_call;
3223
3224 // Use t7 for holding undefined which is used in several places below.
3225 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
3226
3227 #ifdef ENABLE_DEBUGGER_SUPPORT
3228 // Check to see whether there are any break points in the function code. If
3229 // there are jump to the generic constructor stub which calls the actual
3230 // code for the function thereby hitting the break points.
3231 __ lw(t5, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
3232 __ lw(a2, FieldMemOperand(t5, SharedFunctionInfo::kDebugInfoOffset));
3233 __ Branch(&generic_stub_call, ne, a2, Operand(t7));
3234 #endif
3235
3236 // Load the initial map and verify that it is in fact a map.
3237 // a1: constructor function
3238 // t7: undefined
3239 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
3240 __ And(t0, a2, Operand(kSmiTagMask));
3241 __ Branch(&generic_stub_call, eq, t0, Operand(zero_reg));
3242 __ GetObjectType(a2, a3, t0);
3243 __ Branch(&generic_stub_call, ne, t0, Operand(MAP_TYPE));
3244
3245 #ifdef DEBUG
3246 // Cannot construct functions this way.
3247 // a0: argc
3248 // a1: constructor function
3249 // a2: initial map
3250 // t7: undefined
3251 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
3252 __ Check(ne, "Function constructed by construct stub.",
3253 a3, Operand(JS_FUNCTION_TYPE));
3254 #endif
3255
3256 // Now allocate the JSObject in new space.
3257 // a0: argc
3258 // a1: constructor function
3259 // a2: initial map
3260 // t7: undefined
3261 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
3262 __ AllocateInNewSpace(a3,
3263 t4,
3264 t5,
3265 t6,
3266 &generic_stub_call,
3267 SIZE_IN_WORDS);
3268
3269 // Allocated the JSObject, now initialize the fields. Map is set to initial
3270 // map and properties and elements are set to empty fixed array.
3271 // a0: argc
3272 // a1: constructor function
3273 // a2: initial map
3274 // a3: object size (in words)
3275 // t4: JSObject (not tagged)
3276 // t7: undefined
3277 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
3278 __ mov(t5, t4);
3279 __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
3280 __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
3281 __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
3282 __ Addu(t5, t5, Operand(3 * kPointerSize));
3283 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
3284 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
3285 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
3286
3287
3288 // Calculate the location of the first argument. The stack contains only the
3289 // argc arguments.
3290 __ sll(a1, a0, kPointerSizeLog2);
3291 __ Addu(a1, a1, sp);
3292
3293 // Fill all the in-object properties with undefined.
3294 // a0: argc
3295 // a1: first argument
3296 // a3: object size (in words)
3297 // t4: JSObject (not tagged)
3298 // t5: First in-object property of JSObject (not tagged)
3299 // t7: undefined
3300 // Fill the initialized properties with a constant value or a passed argument
3301 // depending on the this.x = ...; assignment in the function.
3302 SharedFunctionInfo* shared = function->shared();
3303 for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3304 if (shared->IsThisPropertyAssignmentArgument(i)) {
3305 Label not_passed, next;
3306 // Check if the argument assigned to the property is actually passed.
3307 int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3308 __ Branch(&not_passed, less_equal, a0, Operand(arg_number));
3309 // Argument passed - find it on the stack.
3310 __ lw(a2, MemOperand(a1, (arg_number + 1) * -kPointerSize));
3311 __ sw(a2, MemOperand(t5));
3312 __ Addu(t5, t5, kPointerSize);
3313 __ jmp(&next);
3314 __ bind(&not_passed);
3315 // Set the property to undefined.
3316 __ sw(t7, MemOperand(t5));
3317 __ Addu(t5, t5, Operand(kPointerSize));
3318 __ bind(&next);
3319 } else {
3320 // Set the property to the constant value.
3321 Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3322 __ li(a2, Operand(constant));
3323 __ sw(a2, MemOperand(t5));
3324 __ Addu(t5, t5, kPointerSize);
3325 }
3326 }
3327
3328 // Fill the unused in-object property fields with undefined.
3329 ASSERT(function->has_initial_map());
3330 for (int i = shared->this_property_assignments_count();
3331 i < function->initial_map()->inobject_properties();
3332 i++) {
3333 __ sw(t7, MemOperand(t5));
3334 __ Addu(t5, t5, kPointerSize);
3335 }
3336
3337 // a0: argc
3338 // t4: JSObject (not tagged)
3339 // Move argc to a1 and the JSObject to return to v0 and tag it.
3340 __ mov(a1, a0);
3341 __ mov(v0, t4);
3342 __ Or(v0, v0, Operand(kHeapObjectTag));
3343
3344 // v0: JSObject
3345 // a1: argc
3346 // Remove caller arguments and receiver from the stack and return.
3347 __ sll(t0, a1, kPointerSizeLog2);
3348 __ Addu(sp, sp, t0);
3349 __ Addu(sp, sp, Operand(kPointerSize));
3350 Counters* counters = masm()->isolate()->counters();
3351 __ IncrementCounter(counters->constructed_objects(), 1, a1, a2);
3352 __ IncrementCounter(counters->constructed_objects_stub(), 1, a1, a2);
3353 __ Ret();
3354
3355 // Jump to the generic stub in case the specialized code cannot handle the
3356 // construction.
3357 __ bind(&generic_stub_call);
3358 Handle<Code> generic_construct_stub =
3359 masm()->isolate()->builtins()->JSConstructStubGeneric();
3360 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
3361
3362 // Return the generated code.
3363 return GetCode();
3364 }
3365
3366
3367 static bool IsElementTypeSigned(ExternalArrayType array_type) {
3368 switch (array_type) {
3369 case kExternalByteArray:
3370 case kExternalShortArray:
3371 case kExternalIntArray:
3372 return true;
3373
3374 case kExternalUnsignedByteArray:
3375 case kExternalUnsignedShortArray:
3376 case kExternalUnsignedIntArray:
3377 return false;
3378
3379 default:
3380 UNREACHABLE();
3381 return false;
3382 }
576 } 3383 }
577 3384
578 3385
579 MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub( 3386 MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
580 JSObject* receiver_object, 3387 JSObject* receiver_object,
581 ExternalArrayType array_type, 3388 ExternalArrayType array_type,
582 Code::Flags flags) { 3389 Code::Flags flags) {
583 UNIMPLEMENTED_MIPS(); 3390 // ---------- S t a t e --------------
584 return NULL; 3391 // -- ra : return address
3392 // -- a0 : key
3393 // -- a1 : receiver
3394 // -----------------------------------
3395 Label slow, failed_allocation;
3396
3397 Register key = a0;
3398 Register receiver = a1;
3399
3400 // Check that the object isn't a smi.
3401 __ JumpIfSmi(receiver, &slow);
3402
3403 // Check that the key is a smi.
3404 __ JumpIfNotSmi(key, &slow);
3405
3406 // Make sure that we've got the right map.
3407 __ lw(a2, FieldMemOperand(receiver, HeapObject::kMapOffset));
3408 __ Branch(&slow, ne, a2, Operand(Handle<Map>(receiver_object->map())));
3409
3410 __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3411 // a3: elements array
3412
3413 // Check that the index is in range.
3414 __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
3415 __ sra(t2, key, kSmiTagSize);
3416 // Unsigned comparison catches both negative and too-large values.
3417 __ Branch(&slow, Uless, t1, Operand(t2));
3418
3419
3420 __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
3421 // a3: base pointer of external storage
3422
3423 // We are not untagging smi key and instead work with it
3424 // as if it was premultiplied by 2.
3425 ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
3426
3427 Register value = a2;
3428 switch (array_type) {
3429 case kExternalByteArray:
3430 __ srl(t2, key, 1);
3431 __ addu(t3, a3, t2);
3432 __ lb(value, MemOperand(t3, 0));
3433 break;
3434 case kExternalPixelArray:
3435 case kExternalUnsignedByteArray:
3436 __ srl(t2, key, 1);
3437 __ addu(t3, a3, t2);
3438 __ lbu(value, MemOperand(t3, 0));
3439 break;
3440 case kExternalShortArray:
3441 __ addu(t3, a3, key);
3442 __ lh(value, MemOperand(t3, 0));
3443 break;
3444 case kExternalUnsignedShortArray:
3445 __ addu(t3, a3, key);
3446 __ lhu(value, MemOperand(t3, 0));
3447 break;
3448 case kExternalIntArray:
3449 case kExternalUnsignedIntArray:
3450 __ sll(t2, key, 1);
3451 __ addu(t3, a3, t2);
3452 __ lw(value, MemOperand(t3, 0));
3453 break;
3454 case kExternalFloatArray:
3455 __ sll(t3, t2, 2);
3456 __ addu(t3, a3, t3);
3457 if (CpuFeatures::IsSupported(FPU)) {
3458 CpuFeatures::Scope scope(FPU);
3459 __ lwc1(f0, MemOperand(t3, 0));
3460 } else {
3461 __ lw(value, MemOperand(t3, 0));
3462 }
3463 break;
3464 case kExternalDoubleArray:
3465 __ sll(t2, key, 2);
3466 __ addu(t3, a3, t2);
3467 if (CpuFeatures::IsSupported(FPU)) {
3468 CpuFeatures::Scope scope(FPU);
3469 __ ldc1(f0, MemOperand(t3, 0));
3470 } else {
3471 // t3: pointer to the beginning of the double we want to load.
3472 __ lw(a2, MemOperand(t3, 0));
3473 __ lw(a3, MemOperand(t3, Register::kSizeInBytes));
3474 }
3475 break;
3476 default:
3477 UNREACHABLE();
3478 break;
3479 }
3480
3481 // For integer array types:
3482 // a2: value
3483 // For float array type:
3484 // f0: value (if FPU is supported)
3485 // a2: value (if FPU is not supported)
3486 // For double array type:
3487 // f0: value (if FPU is supported)
3488 // a2/a3: value (if FPU is not supported)
3489
3490 if (array_type == kExternalIntArray) {
3491 // For the Int and UnsignedInt array types, we need to see whether
3492 // the value can be represented in a Smi. If not, we need to convert
3493 // it to a HeapNumber.
3494 Label box_int;
3495 __ Subu(t3, value, Operand(0xC0000000)); // Non-smi value gives neg result.
3496 __ Branch(&box_int, lt, t3, Operand(zero_reg));
3497 // Tag integer as smi and return it.
3498 __ sll(v0, value, kSmiTagSize);
3499 __ Ret();
3500
3501 __ bind(&box_int);
3502 // Allocate a HeapNumber for the result and perform int-to-double
3503 // conversion.
3504 // The arm version uses a temporary here to save r0, but we don't need to
3505 // (a0 is not modified).
3506 __ LoadRoot(t1, Heap::kHeapNumberMapRootIndex);
3507 __ AllocateHeapNumber(v0, a3, t0, t1, &slow);
3508
3509 if (CpuFeatures::IsSupported(FPU)) {
3510 CpuFeatures::Scope scope(FPU);
3511 __ mtc1(value, f0);
3512 __ cvt_d_w(f0, f0);
3513 __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
3514 __ Ret();
3515 } else {
3516 WriteInt32ToHeapNumberStub stub(value, v0, t2, t3);
3517 __ TailCallStub(&stub);
3518 }
3519 } else if (array_type == kExternalUnsignedIntArray) {
3520 // The test is different for unsigned int values. Since we need
3521 // the value to be in the range of a positive smi, we can't
3522 // handle either of the top two bits being set in the value.
3523 if (CpuFeatures::IsSupported(FPU)) {
3524 CpuFeatures::Scope scope(FPU);
3525 Label pl_box_int;
3526 __ And(t2, value, Operand(0xC0000000));
3527 __ Branch(&pl_box_int, ne, t2, Operand(zero_reg));
3528
3529 // It can fit in an Smi.
3530 // Tag integer as smi and return it.
3531 __ sll(v0, value, kSmiTagSize);
3532 __ Ret();
3533
3534 __ bind(&pl_box_int);
3535 // Allocate a HeapNumber for the result and perform int-to-double
3536 // conversion. Don't use a0 and a1 as AllocateHeapNumber clobbers all
3537 // registers - also when jumping due to exhausted young space.
3538 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3539 __ AllocateHeapNumber(v0, t2, t3, t6, &slow);
3540
3541 // This is replaced by a macro:
3542 // __ mtc1(value, f0); // LS 32-bits.
3543 // __ mtc1(zero_reg, f1); // MS 32-bits are all zero.
3544 // __ cvt_d_l(f0, f0); // Use 64 bit conv to get correct unsigned 32-bit.
3545
3546 __ Cvt_d_uw(f0, value);
3547
3548 __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
3549
3550 __ Ret();
3551 } else {
3552 // Check whether unsigned integer fits into smi.
3553 Label box_int_0, box_int_1, done;
3554 __ And(t2, value, Operand(0x80000000));
3555 __ Branch(&box_int_0, ne, t2, Operand(zero_reg));
3556 __ And(t2, value, Operand(0x40000000));
3557 __ Branch(&box_int_1, ne, t2, Operand(zero_reg));
3558
3559 // Tag integer as smi and return it.
3560 __ sll(v0, value, kSmiTagSize);
3561 __ Ret();
3562
3563 Register hiword = value; // a2.
3564 Register loword = a3;
3565
3566 __ bind(&box_int_0);
3567 // Integer does not have leading zeros.
3568 GenerateUInt2Double(masm(), hiword, loword, t0, 0);
3569 __ Branch(&done);
3570
3571 __ bind(&box_int_1);
3572 // Integer has one leading zero.
3573 GenerateUInt2Double(masm(), hiword, loword, t0, 1);
3574
3575
3576 __ bind(&done);
3577 // Integer was converted to double in registers hiword:loword.
3578 // Wrap it into a HeapNumber. Don't use a0 and a1 as AllocateHeapNumber
3579 // clobbers all registers - also when jumping due to exhausted young
3580 // space.
3581 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3582 __ AllocateHeapNumber(t2, t3, t5, t6, &slow);
3583
3584 __ sw(hiword, FieldMemOperand(t2, HeapNumber::kExponentOffset));
3585 __ sw(loword, FieldMemOperand(t2, HeapNumber::kMantissaOffset));
3586
3587 __ mov(v0, t2);
3588 __ Ret();
3589 }
3590 } else if (array_type == kExternalFloatArray) {
3591 // For the floating-point array type, we need to always allocate a
3592 // HeapNumber.
3593 if (CpuFeatures::IsSupported(FPU)) {
3594 CpuFeatures::Scope scope(FPU);
3595 // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3596 // AllocateHeapNumber clobbers all registers - also when jumping due to
3597 // exhausted young space.
3598 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3599 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3600 // The float (single) value is already in fpu reg f0 (if we use float).
3601 __ cvt_d_s(f0, f0);
3602 __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
3603 __ Ret();
3604 } else {
3605 // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3606 // AllocateHeapNumber clobbers all registers - also when jumping due to
3607 // exhausted young space.
3608 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3609 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3610 // FPU is not available, do manual single to double conversion.
3611
3612 // a2: floating point value (binary32).
3613 // v0: heap number for result
3614
3615 // Extract mantissa to t4.
3616 __ And(t4, value, Operand(kBinary32MantissaMask));
3617
3618 // Extract exponent to t5.
3619 __ srl(t5, value, kBinary32MantissaBits);
3620 __ And(t5, t5, Operand(kBinary32ExponentMask >> kBinary32MantissaBits));
3621
3622 Label exponent_rebiased;
3623 __ Branch(&exponent_rebiased, eq, t5, Operand(zero_reg));
3624
3625 __ li(t0, 0x7ff);
3626 __ Xor(t1, t5, Operand(0xFF));
3627 __ movz(t5, t0, t1); // Set t5 to 0x7ff only if t5 is equal to 0xff.
3628 __ Branch(&exponent_rebiased, eq, t0, Operand(0xff));
3629
3630 // Rebias exponent.
3631 __ Addu(t5,
3632 t5,
3633 Operand(-kBinary32ExponentBias + HeapNumber::kExponentBias));
3634
3635 __ bind(&exponent_rebiased);
3636 __ And(a2, value, Operand(kBinary32SignMask));
3637 value = no_reg;
3638 __ sll(t0, t5, HeapNumber::kMantissaBitsInTopWord);
3639 __ or_(a2, a2, t0);
3640
3641 // Shift mantissa.
3642 static const int kMantissaShiftForHiWord =
3643 kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3644
3645 static const int kMantissaShiftForLoWord =
3646 kBitsPerInt - kMantissaShiftForHiWord;
3647
3648 __ srl(t0, t4, kMantissaShiftForHiWord);
3649 __ or_(a2, a2, t0);
3650 __ sll(a0, t4, kMantissaShiftForLoWord);
3651
3652 __ sw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3653 __ sw(a0, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3654 __ Ret();
3655 }
3656
3657 } else if (array_type == kExternalDoubleArray) {
3658 if (CpuFeatures::IsSupported(FPU)) {
3659 CpuFeatures::Scope scope(FPU);
3660 // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3661 // AllocateHeapNumber clobbers all registers - also when jumping due to
3662 // exhausted young space.
3663 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3664 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3665 // The double value is already in f0
3666 __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
3667 __ Ret();
3668 } else {
3669 // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3670 // AllocateHeapNumber clobbers all registers - also when jumping due to
3671 // exhausted young space.
3672 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3673 __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3674
3675 __ sw(a2, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3676 __ sw(a3, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3677 __ Ret();
3678 }
3679
3680 } else {
3681 // Tag integer as smi and return it.
3682 __ sll(v0, value, kSmiTagSize);
3683 __ Ret();
3684 }
3685
3686 // Slow case, key and receiver still in a0 and a1.
3687 __ bind(&slow);
3688 __ IncrementCounter(
3689 masm()->isolate()->counters()->keyed_load_external_array_slow(),
3690 1, a2, a3);
3691
3692 // ---------- S t a t e --------------
3693 // -- ra : return address
3694 // -- a0 : key
3695 // -- a1 : receiver
3696 // -----------------------------------
3697
3698 __ Push(a1, a0);
3699
3700 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3701
3702 return GetCode(flags);
585 } 3703 }
586 3704
587 3705
3706
3707
588 MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub( 3708 MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
589 JSObject* receiver_object, 3709 JSObject* receiver_object,
590 ExternalArrayType array_type, 3710 ExternalArrayType array_type,
591 Code::Flags flags) { 3711 Code::Flags flags) {
592 UNIMPLEMENTED_MIPS(); 3712 // ---------- S t a t e --------------
593 return NULL; 3713 // -- a0 : value
3714 // -- a1 : key
3715 // -- a2 : receiver
3716 // -- ra : return address
3717 // -----------------------------------
3718
3719 Label slow, check_heap_number;
3720
3721 // Register usage.
3722 Register value = a0;
3723 Register key = a1;
3724 Register receiver = a2;
3725 // a3 mostly holds the elements array or the destination external array.
3726
3727 // Check that the object isn't a smi.
3728 __ JumpIfSmi(receiver, &slow);
3729
3730 // Make sure that we've got the right map.
3731 __ lw(a3, FieldMemOperand(receiver, HeapObject::kMapOffset));
3732 __ Branch(&slow, ne, a3, Operand(Handle<Map>(receiver_object->map())));
3733
3734 __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3735
3736 // Check that the key is a smi.
3737 __ JumpIfNotSmi(key, &slow);
3738
3739 // Check that the index is in range.
3740 __ SmiUntag(t0, key);
3741 __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
3742 // Unsigned comparison catches both negative and too-large values.
3743 __ Branch(&slow, Ugreater_equal, t0, Operand(t1));
3744
3745 // Handle both smis and HeapNumbers in the fast path. Go to the
3746 // runtime for all other kinds of values.
3747 // a3: external array.
3748 // t0: key (integer).
3749
3750 if (array_type == kExternalPixelArray) {
3751 // Double to pixel conversion is only implemented in the runtime for now.
3752 __ JumpIfNotSmi(value, &slow);
3753 } else {
3754 __ JumpIfNotSmi(value, &check_heap_number);
3755 }
3756 __ SmiUntag(t1, value);
3757 __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
3758
3759 // a3: base pointer of external storage.
3760 // t0: key (integer).
3761 // t1: value (integer).
3762
3763 switch (array_type) {
3764 case kExternalPixelArray: {
3765 // Clamp the value to [0..255].
3766 // v0 is used as a scratch register here.
3767 Label done;
3768 __ li(v0, Operand(255));
3769 // Normal branch: nop in delay slot.
3770 __ Branch(&done, gt, t1, Operand(v0));
3771 // Use delay slot in this branch.
3772 __ Branch(USE_DELAY_SLOT, &done, lt, t1, Operand(zero_reg));
3773 __ mov(v0, zero_reg); // In delay slot.
3774 __ mov(v0, t1); // Value is in range 0..255.
3775 __ bind(&done);
3776 __ mov(t1, v0);
3777 __ addu(t8, a3, t0);
3778 __ sb(t1, MemOperand(t8, 0));
3779 }
3780 break;
3781 case kExternalByteArray:
3782 case kExternalUnsignedByteArray:
3783 __ addu(t8, a3, t0);
3784 __ sb(t1, MemOperand(t8, 0));
3785 break;
3786 case kExternalShortArray:
3787 case kExternalUnsignedShortArray:
3788 __ sll(t8, t0, 1);
3789 __ addu(t8, a3, t8);
3790 __ sh(t1, MemOperand(t8, 0));
3791 break;
3792 case kExternalIntArray:
3793 case kExternalUnsignedIntArray:
3794 __ sll(t8, t0, 2);
3795 __ addu(t8, a3, t8);
3796 __ sw(t1, MemOperand(t8, 0));
3797 break;
3798 case kExternalFloatArray:
3799 // Perform int-to-float conversion and store to memory.
3800 StoreIntAsFloat(masm(), a3, t0, t1, t2, t3, t4);
3801 break;
3802 case kExternalDoubleArray:
3803 __ sll(t8, t0, 3);
3804 __ addu(a3, a3, t8);
3805 // a3: effective address of the double element
3806 FloatingPointHelper::Destination destination;
3807 if (CpuFeatures::IsSupported(FPU)) {
3808 destination = FloatingPointHelper::kFPURegisters;
3809 } else {
3810 destination = FloatingPointHelper::kCoreRegisters;
3811 }
3812 FloatingPointHelper::ConvertIntToDouble(
3813 masm(), t1, destination,
3814 f0, t2, t3, // These are: double_dst, dst1, dst2.
3815 t0, f2); // These are: scratch2, single_scratch.
3816 if (destination == FloatingPointHelper::kFPURegisters) {
3817 CpuFeatures::Scope scope(FPU);
3818 __ sdc1(f0, MemOperand(a3, 0));
3819 } else {
3820 __ sw(t2, MemOperand(a3, 0));
3821 __ sw(t3, MemOperand(a3, Register::kSizeInBytes));
3822 }
3823 break;
3824 default:
3825 UNREACHABLE();
3826 break;
3827 }
3828
3829 // Entry registers are intact, a0 holds the value which is the return value.
3830 __ mov(v0, value);
3831 __ Ret();
3832
3833 if (array_type != kExternalPixelArray) {
3834 // a3: external array.
3835 // t0: index (integer).
3836 __ bind(&check_heap_number);
3837 __ GetObjectType(value, t1, t2);
3838 __ Branch(&slow, ne, t2, Operand(HEAP_NUMBER_TYPE));
3839
3840 __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
3841
3842 // a3: base pointer of external storage.
3843 // t0: key (integer).
3844
3845 // The WebGL specification leaves the behavior of storing NaN and
3846 // +/-Infinity into integer arrays basically undefined. For more
3847 // reproducible behavior, convert these to zero.
3848
3849 if (CpuFeatures::IsSupported(FPU)) {
3850 CpuFeatures::Scope scope(FPU);
3851
3852 __ ldc1(f0, FieldMemOperand(a0, HeapNumber::kValueOffset));
3853
3854 if (array_type == kExternalFloatArray) {
3855 __ cvt_s_d(f0, f0);
3856 __ sll(t8, t0, 2);
3857 __ addu(t8, a3, t8);
3858 __ swc1(f0, MemOperand(t8, 0));
3859 } else if (array_type == kExternalDoubleArray) {
3860 __ sll(t8, t0, 3);
3861 __ addu(t8, a3, t8);
3862 __ sdc1(f0, MemOperand(t8, 0));
3863 } else {
3864 Label done;
3865
3866 // Need to perform float-to-int conversion.
3867 // Test whether exponent equal to 0x7FF (infinity or NaN).
3868
3869 __ mfc1(t3, f1); // Move exponent word of double to t3 (as raw bits).
3870 __ li(t1, Operand(0x7FF00000));
3871 __ And(t3, t3, Operand(t1));
3872 __ Branch(USE_DELAY_SLOT, &done, eq, t3, Operand(t1));
3873 __ mov(t3, zero_reg); // In delay slot.
3874
3875 // Not infinity or NaN simply convert to int.
3876 if (IsElementTypeSigned(array_type)) {
3877 __ trunc_w_d(f0, f0);
3878 __ mfc1(t3, f0);
3879 } else {
3880 __ Trunc_uw_d(f0, t3);
3881 }
3882
3883 // t3: HeapNumber converted to integer
3884 __ bind(&done);
3885 switch (array_type) {
3886 case kExternalByteArray:
3887 case kExternalUnsignedByteArray:
3888 __ addu(t8, a3, t0);
3889 __ sb(t3, MemOperand(t8, 0));
3890 break;
3891 case kExternalShortArray:
3892 case kExternalUnsignedShortArray:
3893 __ sll(t8, t0, 1);
3894 __ addu(t8, a3, t8);
3895 __ sh(t3, MemOperand(t8, 0));
3896 break;
3897 case kExternalIntArray:
3898 case kExternalUnsignedIntArray:
3899 __ sll(t8, t0, 2);
3900 __ addu(t8, a3, t8);
3901 __ sw(t3, MemOperand(t8, 0));
3902 break;
3903 default:
3904 UNREACHABLE();
3905 break;
3906 }
3907 }
3908
3909 // Entry registers are intact, a0 holds the value
3910 // which is the return value.
3911 __ mov(v0, value);
3912 __ Ret();
3913 } else {
3914 // FPU is not available, do manual conversions.
3915
3916 __ lw(t3, FieldMemOperand(value, HeapNumber::kExponentOffset));
3917 __ lw(t4, FieldMemOperand(value, HeapNumber::kMantissaOffset));
3918
3919 if (array_type == kExternalFloatArray) {
3920 Label done, nan_or_infinity_or_zero;
3921 static const int kMantissaInHiWordShift =
3922 kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3923
3924 static const int kMantissaInLoWordShift =
3925 kBitsPerInt - kMantissaInHiWordShift;
3926
3927 // Test for all special exponent values: zeros, subnormal numbers, NaNs
3928 // and infinities. All these should be converted to 0.
3929 __ li(t5, HeapNumber::kExponentMask);
3930 __ and_(t6, t3, t5);
3931 __ Branch(&nan_or_infinity_or_zero, eq, t6, Operand(zero_reg));
3932
3933 __ xor_(t1, t6, t5);
3934 __ li(t2, kBinary32ExponentMask);
3935 __ movz(t6, t2, t1); // Only if t6 is equal to t5.
3936 __ Branch(&nan_or_infinity_or_zero, eq, t6, Operand(t5));
3937
3938 // Rebias exponent.
3939 __ srl(t6, t6, HeapNumber::kExponentShift);
3940 __ Addu(t6,
3941 t6,
3942 Operand(kBinary32ExponentBias - HeapNumber::kExponentBias));
3943
3944 __ li(t1, Operand(kBinary32MaxExponent));
3945 __ Slt(t1, t1, t6);
3946 __ And(t2, t3, Operand(HeapNumber::kSignMask));
3947 __ Or(t2, t2, Operand(kBinary32ExponentMask));
3948 __ movn(t3, t2, t1); // Only if t6 is gt kBinary32MaxExponent.
3949 __ Branch(&done, gt, t6, Operand(kBinary32MaxExponent));
3950
3951 __ Slt(t1, t6, Operand(kBinary32MinExponent));
3952 __ And(t2, t3, Operand(HeapNumber::kSignMask));
3953 __ movn(t3, t2, t1); // Only if t6 is lt kBinary32MinExponent.
3954 __ Branch(&done, lt, t6, Operand(kBinary32MinExponent));
3955
3956 __ And(t7, t3, Operand(HeapNumber::kSignMask));
3957 __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
3958 __ sll(t3, t3, kMantissaInHiWordShift);
3959 __ or_(t7, t7, t3);
3960 __ srl(t4, t4, kMantissaInLoWordShift);
3961 __ or_(t7, t7, t4);
3962 __ sll(t6, t6, kBinary32ExponentShift);
3963 __ or_(t3, t7, t6);
3964
3965 __ bind(&done);
3966 __ sll(t9, a1, 2);
3967 __ addu(t9, a2, t9);
3968 __ sw(t3, MemOperand(t9, 0));
3969
3970 // Entry registers are intact, a0 holds the value which is the return
3971 // value.
3972 __ mov(v0, value);
3973 __ Ret();
3974
3975 __ bind(&nan_or_infinity_or_zero);
3976 __ And(t7, t3, Operand(HeapNumber::kSignMask));
3977 __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
3978 __ or_(t6, t6, t7);
3979 __ sll(t3, t3, kMantissaInHiWordShift);
3980 __ or_(t6, t6, t3);
3981 __ srl(t4, t4, kMantissaInLoWordShift);
3982 __ or_(t3, t6, t4);
3983 __ Branch(&done);
3984 } else if (array_type == kExternalDoubleArray) {
3985 __ sll(t8, t0, 3);
3986 __ addu(t8, a3, t8);
3987 // t8: effective address of destination element.
3988 __ sw(t4, MemOperand(t8, 0));
3989 __ sw(t3, MemOperand(t8, Register::kSizeInBytes));
3990 __ Ret();
3991 } else {
3992 bool is_signed_type = IsElementTypeSigned(array_type);
3993 int meaningfull_bits = is_signed_type ? (kBitsPerInt - 1) : kBitsPerInt;
3994 int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000;
3995
3996 Label done, sign;
3997
3998 // Test for all special exponent values: zeros, subnormal numbers, NaNs
3999 // and infinities. All these should be converted to 0.
4000 __ li(t5, HeapNumber::kExponentMask);
4001 __ and_(t6, t3, t5);
4002 __ movz(t3, zero_reg, t6); // Only if t6 is equal to zero.
4003 __ Branch(&done, eq, t6, Operand(zero_reg));
4004
4005 __ xor_(t2, t6, t5);
4006 __ movz(t3, zero_reg, t2); // Only if t6 is equal to t5.
4007 __ Branch(&done, eq, t6, Operand(t5));
4008
4009 // Unbias exponent.
4010 __ srl(t6, t6, HeapNumber::kExponentShift);
4011 __ Subu(t6, t6, Operand(HeapNumber::kExponentBias));
4012 // If exponent is negative then result is 0.
4013 __ slt(t2, t6, zero_reg);
4014 __ movn(t3, zero_reg, t2); // Only if exponent is negative.
4015 __ Branch(&done, lt, t6, Operand(zero_reg));
4016
4017 // If exponent is too big then result is minimal value.
4018 __ slti(t1, t6, meaningfull_bits - 1);
4019 __ li(t2, min_value);
4020 __ movz(t3, t2, t1); // Only if t6 is ge meaningfull_bits - 1.
4021 __ Branch(&done, ge, t6, Operand(meaningfull_bits - 1));
4022
4023 __ And(t5, t3, Operand(HeapNumber::kSignMask));
4024 __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
4025 __ Or(t3, t3, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
4026
4027 __ li(t9, HeapNumber::kMantissaBitsInTopWord);
4028 __ subu(t6, t9, t6);
4029 __ slt(t1, t6, zero_reg);
4030 __ srlv(t2, t3, t6);
4031 __ movz(t3, t2, t1); // Only if t6 is positive.
4032 __ Branch(&sign, ge, t6, Operand(zero_reg));
4033
4034 __ subu(t6, zero_reg, t6);
4035 __ sllv(t3, t3, t6);
4036 __ li(t9, meaningfull_bits);
4037 __ subu(t6, t9, t6);
4038 __ srlv(t4, t4, t6);
4039 __ or_(t3, t3, t4);
4040
4041 __ bind(&sign);
4042 __ subu(t2, t3, zero_reg);
4043 __ movz(t3, t2, t5); // Only if t5 is zero.
4044
4045 __ bind(&done);
4046
4047 // Result is in t3.
4048 // This switch block should be exactly the same as above (FPU mode).
4049 switch (array_type) {
4050 case kExternalByteArray:
4051 case kExternalUnsignedByteArray:
4052 __ addu(t8, a3, t0);
4053 __ sb(t3, MemOperand(t8, 0));
4054 break;
4055 case kExternalShortArray:
4056 case kExternalUnsignedShortArray:
4057 __ sll(t8, t0, 1);
4058 __ addu(t8, a3, t8);
4059 __ sh(t3, MemOperand(t8, 0));
4060 break;
4061 case kExternalIntArray:
4062 case kExternalUnsignedIntArray:
4063 __ sll(t8, t0, 2);
4064 __ addu(t8, a3, t8);
4065 __ sw(t3, MemOperand(t8, 0));
4066 break;
4067 default:
4068 UNREACHABLE();
4069 break;
4070 }
4071 }
4072 }
4073 }
4074
4075 // Slow case: call runtime.
4076 __ bind(&slow);
4077 // Entry registers are intact.
4078 // ---------- S t a t e --------------
4079 // -- a0 : value
4080 // -- a1 : key
4081 // -- a2 : receiver
4082 // -- ra : return address
4083 // -----------------------------------
4084
4085 // Push receiver, key and value for runtime call.
4086 __ Push(a2, a1, a0);
4087
4088 __ li(a1, Operand(Smi::FromInt(NONE))); // PropertyAttributes.
4089 __ li(a0, Operand(Smi::FromInt(
4090 Code::ExtractExtraICStateFromFlags(flags) & kStrictMode)));
4091 __ Push(a1, a0);
4092
4093 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
4094
4095 return GetCode(flags);
594 } 4096 }
595 4097
596 4098
597 #undef __ 4099 #undef __
598 4100
599 } } // namespace v8::internal 4101 } } // namespace v8::internal
600 4102
601 #endif // V8_TARGET_ARCH_MIPS 4103 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/simulator-mips.cc ('k') | src/mips/virtual-frame-mips.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698