Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(36)

Side by Side Diff: src/ic/mips/ic-compiler-mips.cc

Issue 501023002: MIPS: Move handler compilers to handler-compiler. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ic/mips/handler-compiler-mips.cc ('k') | src/ic/mips64/handler-compiler-mips64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_MIPS 7 #if V8_TARGET_ARCH_MIPS
8 8
9 #include "src/ic/call-optimization.h"
10 #include "src/ic/ic-compiler.h" 9 #include "src/ic/ic-compiler.h"
11 10
12 namespace v8 { 11 namespace v8 {
13 namespace internal { 12 namespace internal {
14 13
15 #define __ ACCESS_MASM(masm)
16
17
18 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
19 MacroAssembler* masm, Label* miss_label, Register receiver,
20 Handle<Name> name, Register scratch0, Register scratch1) {
21 DCHECK(name->IsUniqueName());
22 DCHECK(!receiver.is(scratch0));
23 Counters* counters = masm->isolate()->counters();
24 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
25 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
26
27 Label done;
28
29 const int kInterceptorOrAccessCheckNeededMask =
30 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
31
32 // Bail out if the receiver has a named interceptor or requires access checks.
33 Register map = scratch1;
34 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
35 __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
36 __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
37 __ Branch(miss_label, ne, scratch0, Operand(zero_reg));
38
39 // Check that receiver is a JSObject.
40 __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
41 __ Branch(miss_label, lt, scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
42
43 // Load properties array.
44 Register properties = scratch0;
45 __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
46 // Check that the properties array is a dictionary.
47 __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset));
48 Register tmp = properties;
49 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
50 __ Branch(miss_label, ne, map, Operand(tmp));
51
52 // Restore the temporarily used register.
53 __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
54
55
56 NameDictionaryLookupStub::GenerateNegativeLookup(
57 masm, miss_label, &done, receiver, properties, name, scratch1);
58 __ bind(&done);
59 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
60 }
61
62
63 void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype(
64 MacroAssembler* masm, int index, Register prototype, Label* miss) {
65 Isolate* isolate = masm->isolate();
66 // Get the global function with the given index.
67 Handle<JSFunction> function(
68 JSFunction::cast(isolate->native_context()->get(index)));
69
70 // Check we're still in the same context.
71 Register scratch = prototype;
72 const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
73 __ lw(scratch, MemOperand(cp, offset));
74 __ lw(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
75 __ lw(scratch, MemOperand(scratch, Context::SlotOffset(index)));
76 __ li(at, function);
77 __ Branch(miss, ne, at, Operand(scratch));
78
79 // Load its initial map. The global functions all have initial maps.
80 __ li(prototype, Handle<Map>(function->initial_map()));
81 // Load the prototype from the initial map.
82 __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
83 }
84
85
86 void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(
87 MacroAssembler* masm, Register receiver, Register scratch1,
88 Register scratch2, Label* miss_label) {
89 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
90 __ Ret(USE_DELAY_SLOT);
91 __ mov(v0, scratch1);
92 }
93
94
95 void PropertyHandlerCompiler::GenerateCheckPropertyCell(
96 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
97 Register scratch, Label* miss) {
98 Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
99 DCHECK(cell->value()->IsTheHole());
100 __ li(scratch, Operand(cell));
101 __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
102 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
103 __ Branch(miss, ne, scratch, Operand(at));
104 }
105
106
107 static void PushInterceptorArguments(MacroAssembler* masm, Register receiver,
108 Register holder, Register name,
109 Handle<JSObject> holder_obj) {
110 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
111 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1);
112 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2);
113 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3);
114 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 4);
115 __ push(name);
116 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
117 DCHECK(!masm->isolate()->heap()->InNewSpace(*interceptor));
118 Register scratch = name;
119 __ li(scratch, Operand(interceptor));
120 __ Push(scratch, receiver, holder);
121 }
122
123
124 static void CompileCallLoadPropertyWithInterceptor(
125 MacroAssembler* masm, Register receiver, Register holder, Register name,
126 Handle<JSObject> holder_obj, IC::UtilityId id) {
127 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
128 __ CallExternalReference(ExternalReference(IC_Utility(id), masm->isolate()),
129 NamedLoadHandlerCompiler::kInterceptorArgsLength);
130 }
131
132
133 // Generate call to api function.
134 void PropertyHandlerCompiler::GenerateFastApiCall(
135 MacroAssembler* masm, const CallOptimization& optimization,
136 Handle<Map> receiver_map, Register receiver, Register scratch_in,
137 bool is_store, int argc, Register* values) {
138 DCHECK(!receiver.is(scratch_in));
139 // Preparing to push, adjust sp.
140 __ Subu(sp, sp, Operand((argc + 1) * kPointerSize));
141 __ sw(receiver, MemOperand(sp, argc * kPointerSize)); // Push receiver.
142 // Write the arguments to stack frame.
143 for (int i = 0; i < argc; i++) {
144 Register arg = values[argc - 1 - i];
145 DCHECK(!receiver.is(arg));
146 DCHECK(!scratch_in.is(arg));
147 __ sw(arg, MemOperand(sp, (argc - 1 - i) * kPointerSize)); // Push arg.
148 }
149 DCHECK(optimization.is_simple_api_call());
150
151 // Abi for CallApiFunctionStub.
152 Register callee = a0;
153 Register call_data = t0;
154 Register holder = a2;
155 Register api_function_address = a1;
156
157 // Put holder in place.
158 CallOptimization::HolderLookup holder_lookup;
159 Handle<JSObject> api_holder =
160 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup);
161 switch (holder_lookup) {
162 case CallOptimization::kHolderIsReceiver:
163 __ Move(holder, receiver);
164 break;
165 case CallOptimization::kHolderFound:
166 __ li(holder, api_holder);
167 break;
168 case CallOptimization::kHolderNotFound:
169 UNREACHABLE();
170 break;
171 }
172
173 Isolate* isolate = masm->isolate();
174 Handle<JSFunction> function = optimization.constant_function();
175 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
176 Handle<Object> call_data_obj(api_call_info->data(), isolate);
177
178 // Put callee in place.
179 __ li(callee, function);
180
181 bool call_data_undefined = false;
182 // Put call_data in place.
183 if (isolate->heap()->InNewSpace(*call_data_obj)) {
184 __ li(call_data, api_call_info);
185 __ lw(call_data, FieldMemOperand(call_data, CallHandlerInfo::kDataOffset));
186 } else if (call_data_obj->IsUndefined()) {
187 call_data_undefined = true;
188 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
189 } else {
190 __ li(call_data, call_data_obj);
191 }
192 // Put api_function_address in place.
193 Address function_address = v8::ToCData<Address>(api_call_info->callback());
194 ApiFunction fun(function_address);
195 ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
196 ExternalReference ref = ExternalReference(&fun, type, masm->isolate());
197 __ li(api_function_address, Operand(ref));
198
199 // Jump to stub.
200 CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
201 __ TailCallStub(&stub);
202 }
203
204
205 #undef __
206 #define __ ACCESS_MASM(masm()) 14 #define __ ACCESS_MASM(masm())
207 15
208 16
209 void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label,
210 Handle<Name> name) {
211 if (!label->is_unused()) {
212 __ bind(label);
213 __ li(this->name(), Operand(name));
214 }
215 }
216
217
218 // Generate StoreTransition code, value is passed in a0 register.
219 // After executing generated code, the receiver_reg and name_reg
220 // may be clobbered.
221 void NamedStoreHandlerCompiler::GenerateStoreTransition(
222 Handle<Map> transition, Handle<Name> name, Register receiver_reg,
223 Register storage_reg, Register value_reg, Register scratch1,
224 Register scratch2, Register scratch3, Label* miss_label, Label* slow) {
225 // a0 : value.
226 Label exit;
227
228 int descriptor = transition->LastAdded();
229 DescriptorArray* descriptors = transition->instance_descriptors();
230 PropertyDetails details = descriptors->GetDetails(descriptor);
231 Representation representation = details.representation();
232 DCHECK(!representation.IsNone());
233
234 if (details.type() == CONSTANT) {
235 Handle<Object> constant(descriptors->GetValue(descriptor), isolate());
236 __ li(scratch1, constant);
237 __ Branch(miss_label, ne, value_reg, Operand(scratch1));
238 } else if (representation.IsSmi()) {
239 __ JumpIfNotSmi(value_reg, miss_label);
240 } else if (representation.IsHeapObject()) {
241 __ JumpIfSmi(value_reg, miss_label);
242 HeapType* field_type = descriptors->GetFieldType(descriptor);
243 HeapType::Iterator<Map> it = field_type->Classes();
244 Handle<Map> current;
245 if (!it.Done()) {
246 __ lw(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
247 Label do_store;
248 while (true) {
249 // Do the CompareMap() directly within the Branch() functions.
250 current = it.Current();
251 it.Advance();
252 if (it.Done()) {
253 __ Branch(miss_label, ne, scratch1, Operand(current));
254 break;
255 }
256 __ Branch(&do_store, eq, scratch1, Operand(current));
257 }
258 __ bind(&do_store);
259 }
260 } else if (representation.IsDouble()) {
261 Label do_store, heap_number;
262 __ LoadRoot(scratch3, Heap::kMutableHeapNumberMapRootIndex);
263 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow,
264 TAG_RESULT, MUTABLE);
265
266 __ JumpIfNotSmi(value_reg, &heap_number);
267 __ SmiUntag(scratch1, value_reg);
268 __ mtc1(scratch1, f6);
269 __ cvt_d_w(f4, f6);
270 __ jmp(&do_store);
271
272 __ bind(&heap_number);
273 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, miss_label,
274 DONT_DO_SMI_CHECK);
275 __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
276
277 __ bind(&do_store);
278 __ sdc1(f4, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
279 }
280
281 // Stub never generated for objects that require access checks.
282 DCHECK(!transition->is_access_check_needed());
283
284 // Perform map transition for the receiver if necessary.
285 if (details.type() == FIELD &&
286 Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) {
287 // The properties must be extended before we can store the value.
288 // We jump to a runtime call that extends the properties array.
289 __ push(receiver_reg);
290 __ li(a2, Operand(transition));
291 __ Push(a2, a0);
292 __ TailCallExternalReference(
293 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
294 isolate()),
295 3, 1);
296 return;
297 }
298
299 // Update the map of the object.
300 __ li(scratch1, Operand(transition));
301 __ sw(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
302
303 // Update the write barrier for the map field.
304 __ RecordWriteField(receiver_reg, HeapObject::kMapOffset, scratch1, scratch2,
305 kRAHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
306 OMIT_SMI_CHECK);
307
308 if (details.type() == CONSTANT) {
309 DCHECK(value_reg.is(a0));
310 __ Ret(USE_DELAY_SLOT);
311 __ mov(v0, a0);
312 return;
313 }
314
315 int index = transition->instance_descriptors()->GetFieldIndex(
316 transition->LastAdded());
317
318 // Adjust for the number of properties stored in the object. Even in the
319 // face of a transition we can use the old map here because the size of the
320 // object and the number of in-object properties is not going to change.
321 index -= transition->inobject_properties();
322
323 // TODO(verwaest): Share this code as a code stub.
324 SmiCheck smi_check =
325 representation.IsTagged() ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
326 if (index < 0) {
327 // Set the property straight into the object.
328 int offset = transition->instance_size() + (index * kPointerSize);
329 if (representation.IsDouble()) {
330 __ sw(storage_reg, FieldMemOperand(receiver_reg, offset));
331 } else {
332 __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
333 }
334
335 if (!representation.IsSmi()) {
336 // Update the write barrier for the array address.
337 if (!representation.IsDouble()) {
338 __ mov(storage_reg, value_reg);
339 }
340 __ RecordWriteField(receiver_reg, offset, storage_reg, scratch1,
341 kRAHasNotBeenSaved, kDontSaveFPRegs,
342 EMIT_REMEMBERED_SET, smi_check);
343 }
344 } else {
345 // Write to the properties array.
346 int offset = index * kPointerSize + FixedArray::kHeaderSize;
347 // Get the properties array
348 __ lw(scratch1, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
349 if (representation.IsDouble()) {
350 __ sw(storage_reg, FieldMemOperand(scratch1, offset));
351 } else {
352 __ sw(value_reg, FieldMemOperand(scratch1, offset));
353 }
354
355 if (!representation.IsSmi()) {
356 // Update the write barrier for the array address.
357 if (!representation.IsDouble()) {
358 __ mov(storage_reg, value_reg);
359 }
360 __ RecordWriteField(scratch1, offset, storage_reg, receiver_reg,
361 kRAHasNotBeenSaved, kDontSaveFPRegs,
362 EMIT_REMEMBERED_SET, smi_check);
363 }
364 }
365
366 // Return the value (register v0).
367 DCHECK(value_reg.is(a0));
368 __ bind(&exit);
369 __ Ret(USE_DELAY_SLOT);
370 __ mov(v0, a0);
371 }
372
373
374 void NamedStoreHandlerCompiler::GenerateStoreField(LookupIterator* lookup,
375 Register value_reg,
376 Label* miss_label) {
377 DCHECK(lookup->representation().IsHeapObject());
378 __ JumpIfSmi(value_reg, miss_label);
379 HeapType::Iterator<Map> it = lookup->GetFieldType()->Classes();
380 __ lw(scratch1(), FieldMemOperand(value_reg, HeapObject::kMapOffset));
381 Label do_store;
382 Handle<Map> current;
383 while (true) {
384 // Do the CompareMap() directly within the Branch() functions.
385 current = it.Current();
386 it.Advance();
387 if (it.Done()) {
388 __ Branch(miss_label, ne, scratch1(), Operand(current));
389 break;
390 }
391 __ Branch(&do_store, eq, scratch1(), Operand(current));
392 }
393 __ bind(&do_store);
394
395 StoreFieldStub stub(isolate(), lookup->GetFieldIndex(),
396 lookup->representation());
397 GenerateTailCall(masm(), stub.GetCode());
398 }
399
400
401 Register PropertyHandlerCompiler::CheckPrototypes(
402 Register object_reg, Register holder_reg, Register scratch1,
403 Register scratch2, Handle<Name> name, Label* miss,
404 PrototypeCheckType check) {
405 Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate()));
406
407 // Make sure there's no overlap between holder and object registers.
408 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
409 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) &&
410 !scratch2.is(scratch1));
411
412 // Keep track of the current object in register reg.
413 Register reg = object_reg;
414 int depth = 0;
415
416 Handle<JSObject> current = Handle<JSObject>::null();
417 if (type()->IsConstant()) {
418 current = Handle<JSObject>::cast(type()->AsConstant()->Value());
419 }
420 Handle<JSObject> prototype = Handle<JSObject>::null();
421 Handle<Map> current_map = receiver_map;
422 Handle<Map> holder_map(holder()->map());
423 // Traverse the prototype chain and check the maps in the prototype chain for
424 // fast and global objects or do negative lookup for normal objects.
425 while (!current_map.is_identical_to(holder_map)) {
426 ++depth;
427
428 // Only global objects and objects that do not require access
429 // checks are allowed in stubs.
430 DCHECK(current_map->IsJSGlobalProxyMap() ||
431 !current_map->is_access_check_needed());
432
433 prototype = handle(JSObject::cast(current_map->prototype()));
434 if (current_map->is_dictionary_map() &&
435 !current_map->IsJSGlobalObjectMap()) {
436 DCHECK(!current_map->IsJSGlobalProxyMap()); // Proxy maps are fast.
437 if (!name->IsUniqueName()) {
438 DCHECK(name->IsString());
439 name = factory()->InternalizeString(Handle<String>::cast(name));
440 }
441 DCHECK(current.is_null() ||
442 current->property_dictionary()->FindEntry(name) ==
443 NameDictionary::kNotFound);
444
445 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1,
446 scratch2);
447
448 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
449 reg = holder_reg; // From now on the object will be in holder_reg.
450 __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
451 } else {
452 Register map_reg = scratch1;
453 if (depth != 1 || check == CHECK_ALL_MAPS) {
454 // CheckMap implicitly loads the map of |reg| into |map_reg|.
455 __ CheckMap(reg, map_reg, current_map, miss, DONT_DO_SMI_CHECK);
456 } else {
457 __ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
458 }
459
460 // Check access rights to the global object. This has to happen after
461 // the map check so that we know that the object is actually a global
462 // object.
463 // This allows us to install generated handlers for accesses to the
464 // global proxy (as opposed to using slow ICs). See corresponding code
465 // in LookupForRead().
466 if (current_map->IsJSGlobalProxyMap()) {
467 __ CheckAccessGlobalProxy(reg, scratch2, miss);
468 } else if (current_map->IsJSGlobalObjectMap()) {
469 GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current),
470 name, scratch2, miss);
471 }
472
473 reg = holder_reg; // From now on the object will be in holder_reg.
474
475 // Two possible reasons for loading the prototype from the map:
476 // (1) Can't store references to new space in code.
477 // (2) Handler is shared for all receivers with the same prototype
478 // map (but not necessarily the same prototype instance).
479 bool load_prototype_from_map =
480 heap()->InNewSpace(*prototype) || depth == 1;
481 if (load_prototype_from_map) {
482 __ lw(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
483 } else {
484 __ li(reg, Operand(prototype));
485 }
486 }
487
488 // Go to the next object in the prototype chain.
489 current = prototype;
490 current_map = handle(current->map());
491 }
492
493 // Log the check depth.
494 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
495
496 if (depth != 0 || check == CHECK_ALL_MAPS) {
497 // Check the holder map.
498 __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK);
499 }
500
501 // Perform security check for access to the global object.
502 DCHECK(current_map->IsJSGlobalProxyMap() ||
503 !current_map->is_access_check_needed());
504 if (current_map->IsJSGlobalProxyMap()) {
505 __ CheckAccessGlobalProxy(reg, scratch1, miss);
506 }
507
508 // Return the register containing the holder.
509 return reg;
510 }
511
512
513 void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
514 if (!miss->is_unused()) {
515 Label success;
516 __ Branch(&success);
517 __ bind(miss);
518 TailCallBuiltin(masm(), MissBuiltin(kind()));
519 __ bind(&success);
520 }
521 }
522
523
524 void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
525 if (!miss->is_unused()) {
526 Label success;
527 __ Branch(&success);
528 GenerateRestoreName(miss, name);
529 TailCallBuiltin(masm(), MissBuiltin(kind()));
530 __ bind(&success);
531 }
532 }
533
534
535 void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) {
536 // Return the constant value.
537 __ li(v0, value);
538 __ Ret();
539 }
540
541
542 void NamedLoadHandlerCompiler::GenerateLoadCallback(
543 Register reg, Handle<ExecutableAccessorInfo> callback) {
544 // Build AccessorInfo::args_ list on the stack and push property name below
545 // the exit frame to make GC aware of them and store pointers to them.
546 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
547 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
548 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
549 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
550 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
551 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
552 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
553 DCHECK(!scratch2().is(reg));
554 DCHECK(!scratch3().is(reg));
555 DCHECK(!scratch4().is(reg));
556 __ push(receiver());
557 if (heap()->InNewSpace(callback->data())) {
558 __ li(scratch3(), callback);
559 __ lw(scratch3(),
560 FieldMemOperand(scratch3(), ExecutableAccessorInfo::kDataOffset));
561 } else {
562 __ li(scratch3(), Handle<Object>(callback->data(), isolate()));
563 }
564 __ Subu(sp, sp, 6 * kPointerSize);
565 __ sw(scratch3(), MemOperand(sp, 5 * kPointerSize));
566 __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex);
567 __ sw(scratch3(), MemOperand(sp, 4 * kPointerSize));
568 __ sw(scratch3(), MemOperand(sp, 3 * kPointerSize));
569 __ li(scratch4(), Operand(ExternalReference::isolate_address(isolate())));
570 __ sw(scratch4(), MemOperand(sp, 2 * kPointerSize));
571 __ sw(reg, MemOperand(sp, 1 * kPointerSize));
572 __ sw(name(), MemOperand(sp, 0 * kPointerSize));
573 __ Addu(scratch2(), sp, 1 * kPointerSize);
574
575 __ mov(a2, scratch2()); // Saved in case scratch2 == a1.
576 // Abi for CallApiGetter.
577 Register getter_address_reg = a2;
578
579 Address getter_address = v8::ToCData<Address>(callback->getter());
580 ApiFunction fun(getter_address);
581 ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL;
582 ExternalReference ref = ExternalReference(&fun, type, isolate());
583 __ li(getter_address_reg, Operand(ref));
584
585 CallApiGetterStub stub(isolate());
586 __ TailCallStub(&stub);
587 }
588
589
590 void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup(
591 LookupIterator* it, Register holder_reg) {
592 DCHECK(holder()->HasNamedInterceptor());
593 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
594
595 // Compile the interceptor call, followed by inline code to load the
596 // property from further up the prototype chain if the call fails.
597 // Check that the maps haven't changed.
598 DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
599
600 // Preserve the receiver register explicitly whenever it is different from the
601 // holder and it is needed should the interceptor return without any result.
602 // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD
603 // case might cause a miss during the prototype check.
604 bool must_perform_prototype_check =
605 !holder().is_identical_to(it->GetHolder<JSObject>());
606 bool must_preserve_receiver_reg =
607 !receiver().is(holder_reg) &&
608 (it->property_kind() == LookupIterator::ACCESSOR ||
609 must_perform_prototype_check);
610
611 // Save necessary data before invoking an interceptor.
612 // Requires a frame to make GC aware of pushed pointers.
613 {
614 FrameScope frame_scope(masm(), StackFrame::INTERNAL);
615 if (must_preserve_receiver_reg) {
616 __ Push(receiver(), holder_reg, this->name());
617 } else {
618 __ Push(holder_reg, this->name());
619 }
620 // Invoke an interceptor. Note: map checks from receiver to
621 // interceptor's holder has been compiled before (see a caller
622 // of this method).
623 CompileCallLoadPropertyWithInterceptor(
624 masm(), receiver(), holder_reg, this->name(), holder(),
625 IC::kLoadPropertyWithInterceptorOnly);
626
627 // Check if interceptor provided a value for property. If it's
628 // the case, return immediately.
629 Label interceptor_failed;
630 __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
631 __ Branch(&interceptor_failed, eq, v0, Operand(scratch1()));
632 frame_scope.GenerateLeaveFrame();
633 __ Ret();
634
635 __ bind(&interceptor_failed);
636 if (must_preserve_receiver_reg) {
637 __ Pop(receiver(), holder_reg, this->name());
638 } else {
639 __ Pop(holder_reg, this->name());
640 }
641 // Leave the internal frame.
642 }
643
644 GenerateLoadPostInterceptor(it, holder_reg);
645 }
646
647
648 void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) {
649 // Call the runtime system to load the interceptor.
650 DCHECK(holder()->HasNamedInterceptor());
651 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
652 PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(),
653 holder());
654
655 ExternalReference ref = ExternalReference(
656 IC_Utility(IC::kLoadPropertyWithInterceptor), isolate());
657 __ TailCallExternalReference(
658 ref, NamedLoadHandlerCompiler::kInterceptorArgsLength, 1);
659 }
660
661
662 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
663 Handle<JSObject> object, Handle<Name> name,
664 Handle<ExecutableAccessorInfo> callback) {
665 Register holder_reg = Frontend(receiver(), name);
666
667 __ Push(receiver(), holder_reg); // Receiver.
668 __ li(at, Operand(callback)); // Callback info.
669 __ push(at);
670 __ li(at, Operand(name));
671 __ Push(at, value());
672
673 // Do tail-call to the runtime system.
674 ExternalReference store_callback_property =
675 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
676 __ TailCallExternalReference(store_callback_property, 5, 1);
677
678 // Return the generated code.
679 return GetCode(kind(), Code::FAST, name);
680 }
681
682
683 #undef __
684 #define __ ACCESS_MASM(masm)
685
686
687 void NamedStoreHandlerCompiler::GenerateStoreViaSetter(
688 MacroAssembler* masm, Handle<HeapType> type, Register receiver,
689 Handle<JSFunction> setter) {
690 // ----------- S t a t e -------------
691 // -- ra : return address
692 // -----------------------------------
693 {
694 FrameScope scope(masm, StackFrame::INTERNAL);
695
696 // Save value register, so we can restore it later.
697 __ push(value());
698
699 if (!setter.is_null()) {
700 // Call the JavaScript setter with receiver and value on the stack.
701 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
702 // Swap in the global receiver.
703 __ lw(receiver,
704 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
705 }
706 __ Push(receiver, value());
707 ParameterCount actual(1);
708 ParameterCount expected(setter);
709 __ InvokeFunction(setter, expected, actual, CALL_FUNCTION,
710 NullCallWrapper());
711 } else {
712 // If we generate a global code snippet for deoptimization only, remember
713 // the place to continue after deoptimization.
714 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
715 }
716
717 // We have to return the passed value, not the return value of the setter.
718 __ pop(v0);
719
720 // Restore context register.
721 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
722 }
723 __ Ret();
724 }
725
726
727 #undef __
728 #define __ ACCESS_MASM(masm())
729
730
731 Handle<Code> NamedStoreHandlerCompiler::CompileStoreInterceptor(
732 Handle<Name> name) {
733 __ Push(receiver(), this->name(), value());
734
735 // Do tail-call to the runtime system.
736 ExternalReference store_ic_property = ExternalReference(
737 IC_Utility(IC::kStorePropertyWithInterceptor), isolate());
738 __ TailCallExternalReference(store_ic_property, 3, 1);
739
740 // Return the generated code.
741 return GetCode(kind(), Code::FAST, name);
742 }
743
744
745 Register NamedStoreHandlerCompiler::value() { return StoreIC::ValueRegister(); }
746
747
748 #undef __
749 #define __ ACCESS_MASM(masm)
750
751
752 void NamedLoadHandlerCompiler::GenerateLoadViaGetter(
753 MacroAssembler* masm, Handle<HeapType> type, Register receiver,
754 Handle<JSFunction> getter) {
755 // ----------- S t a t e -------------
756 // -- a0 : receiver
757 // -- a2 : name
758 // -- ra : return address
759 // -----------------------------------
760 {
761 FrameScope scope(masm, StackFrame::INTERNAL);
762
763 if (!getter.is_null()) {
764 // Call the JavaScript getter with the receiver on the stack.
765 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
766 // Swap in the global receiver.
767 __ lw(receiver,
768 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
769 }
770 __ push(receiver);
771 ParameterCount actual(0);
772 ParameterCount expected(getter);
773 __ InvokeFunction(getter, expected, actual, CALL_FUNCTION,
774 NullCallWrapper());
775 } else {
776 // If we generate a global code snippet for deoptimization only, remember
777 // the place to continue after deoptimization.
778 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
779 }
780
781 // Restore context register.
782 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
783 }
784 __ Ret();
785 }
786
787
788 #undef __
789 #define __ ACCESS_MASM(masm())
790
791
792 Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal(
793 Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) {
794 Label miss;
795
796 FrontendHeader(receiver(), name, &miss);
797
798 // Get the value from the cell.
799 Register result = StoreIC::ValueRegister();
800 __ li(result, Operand(cell));
801 __ lw(result, FieldMemOperand(result, Cell::kValueOffset));
802
803 // Check for deleted property if property can actually be deleted.
804 if (is_configurable) {
805 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
806 __ Branch(&miss, eq, result, Operand(at));
807 }
808
809 Counters* counters = isolate()->counters();
810 __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
811 __ Ret(USE_DELAY_SLOT);
812 __ mov(v0, result);
813
814 FrontendFooter(name, &miss);
815
816 // Return the generated code.
817 return GetCode(kind(), Code::NORMAL, name);
818 }
819
820
821 Handle<Code> PropertyICCompiler::CompilePolymorphic(TypeHandleList* types, 17 Handle<Code> PropertyICCompiler::CompilePolymorphic(TypeHandleList* types,
822 CodeHandleList* handlers, 18 CodeHandleList* handlers,
823 Handle<Name> name, 19 Handle<Name> name,
824 Code::StubType type, 20 Code::StubType type,
825 IcCheckType check) { 21 IcCheckType check) {
826 Label miss; 22 Label miss;
827 23
828 if (check == PROPERTY && 24 if (check == PROPERTY &&
829 (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) { 25 (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) {
830 // In case we are compiling an IC for dictionary loads and stores, just 26 // In case we are compiling an IC for dictionary loads and stores, just
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
903 99
904 // Return the generated code. 100 // Return the generated code.
905 return GetCode(kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC); 101 return GetCode(kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
906 } 102 }
907 103
908 104
909 #undef __ 105 #undef __
910 #define __ ACCESS_MASM(masm) 106 #define __ ACCESS_MASM(masm)
911 107
912 108
913 void ElementHandlerCompiler::GenerateLoadDictionaryElement(
914 MacroAssembler* masm) {
915 // The return address is in ra.
916 Label slow, miss;
917
918 Register key = LoadIC::NameRegister();
919 Register receiver = LoadIC::ReceiverRegister();
920 DCHECK(receiver.is(a1));
921 DCHECK(key.is(a2));
922
923 __ UntagAndJumpIfNotSmi(t2, key, &miss);
924 __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset));
925 __ LoadFromNumberDictionary(&slow, t0, key, v0, t2, a3, t1);
926 __ Ret();
927
928 // Slow case, key and receiver still unmodified.
929 __ bind(&slow);
930 __ IncrementCounter(
931 masm->isolate()->counters()->keyed_load_external_array_slow(), 1, a2, a3);
932
933 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
934
935 // Miss case, call the runtime.
936 __ bind(&miss);
937
938 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
939 }
940
941
942 void PropertyICCompiler::GenerateRuntimeSetProperty(MacroAssembler* masm, 109 void PropertyICCompiler::GenerateRuntimeSetProperty(MacroAssembler* masm,
943 StrictMode strict_mode) { 110 StrictMode strict_mode) {
944 __ Push(StoreIC::ReceiverRegister(), StoreIC::NameRegister(), 111 __ Push(StoreIC::ReceiverRegister(), StoreIC::NameRegister(),
945 StoreIC::ValueRegister()); 112 StoreIC::ValueRegister());
946 113
947 __ li(a0, Operand(Smi::FromInt(strict_mode))); 114 __ li(a0, Operand(Smi::FromInt(strict_mode)));
948 __ Push(a0); 115 __ Push(a0);
949 116
950 // Do tail-call to runtime routine. 117 // Do tail-call to runtime routine.
951 __ TailCallRuntime(Runtime::kSetProperty, 4, 1); 118 __ TailCallRuntime(Runtime::kSetProperty, 4, 1);
952 } 119 }
953 120
954 121
955 #undef __ 122 #undef __
956 } 123 }
957 } // namespace v8::internal 124 } // namespace v8::internal
958 125
959 #endif // V8_TARGET_ARCH_MIPS 126 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/ic/mips/handler-compiler-mips.cc ('k') | src/ic/mips64/handler-compiler-mips64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698