OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_MIPS64 | 7 #if V8_TARGET_ARCH_MIPS64 |
8 | 8 |
9 #include "src/codegen.h" | 9 #include "src/ic/ic-compiler.h" |
10 #include "src/ic-inl.h" | |
11 #include "src/stub-cache.h" | |
12 | 10 |
13 namespace v8 { | 11 namespace v8 { |
14 namespace internal { | 12 namespace internal { |
15 | 13 |
16 #define __ ACCESS_MASM(masm) | 14 #define __ ACCESS_MASM(masm) |
17 | 15 |
18 | 16 |
19 static void ProbeTable(Isolate* isolate, | |
20 MacroAssembler* masm, | |
21 Code::Flags flags, | |
22 StubCache::Table table, | |
23 Register receiver, | |
24 Register name, | |
25 // Number of the cache entry, not scaled. | |
26 Register offset, | |
27 Register scratch, | |
28 Register scratch2, | |
29 Register offset_scratch) { | |
30 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); | |
31 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); | |
32 ExternalReference map_offset(isolate->stub_cache()->map_reference(table)); | |
33 | |
34 uint64_t key_off_addr = reinterpret_cast<uint64_t>(key_offset.address()); | |
35 uint64_t value_off_addr = reinterpret_cast<uint64_t>(value_offset.address()); | |
36 uint64_t map_off_addr = reinterpret_cast<uint64_t>(map_offset.address()); | |
37 | |
38 // Check the relative positions of the address fields. | |
39 DCHECK(value_off_addr > key_off_addr); | |
40 DCHECK((value_off_addr - key_off_addr) % 4 == 0); | |
41 DCHECK((value_off_addr - key_off_addr) < (256 * 4)); | |
42 DCHECK(map_off_addr > key_off_addr); | |
43 DCHECK((map_off_addr - key_off_addr) % 4 == 0); | |
44 DCHECK((map_off_addr - key_off_addr) < (256 * 4)); | |
45 | |
46 Label miss; | |
47 Register base_addr = scratch; | |
48 scratch = no_reg; | |
49 | |
50 // Multiply by 3 because there are 3 fields per entry (name, code, map). | |
51 __ dsll(offset_scratch, offset, 1); | |
52 __ Daddu(offset_scratch, offset_scratch, offset); | |
53 | |
54 // Calculate the base address of the entry. | |
55 __ li(base_addr, Operand(key_offset)); | |
56 __ dsll(at, offset_scratch, kPointerSizeLog2); | |
57 __ Daddu(base_addr, base_addr, at); | |
58 | |
59 // Check that the key in the entry matches the name. | |
60 __ ld(at, MemOperand(base_addr, 0)); | |
61 __ Branch(&miss, ne, name, Operand(at)); | |
62 | |
63 // Check the map matches. | |
64 __ ld(at, MemOperand(base_addr, map_off_addr - key_off_addr)); | |
65 __ ld(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
66 __ Branch(&miss, ne, at, Operand(scratch2)); | |
67 | |
68 // Get the code entry from the cache. | |
69 Register code = scratch2; | |
70 scratch2 = no_reg; | |
71 __ ld(code, MemOperand(base_addr, value_off_addr - key_off_addr)); | |
72 | |
73 // Check that the flags match what we're looking for. | |
74 Register flags_reg = base_addr; | |
75 base_addr = no_reg; | |
76 __ lw(flags_reg, FieldMemOperand(code, Code::kFlagsOffset)); | |
77 __ And(flags_reg, flags_reg, Operand(~Code::kFlagsNotUsedInLookup)); | |
78 __ Branch(&miss, ne, flags_reg, Operand(flags)); | |
79 | |
80 #ifdef DEBUG | |
81 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { | |
82 __ jmp(&miss); | |
83 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { | |
84 __ jmp(&miss); | |
85 } | |
86 #endif | |
87 | |
88 // Jump to the first instruction in the code stub. | |
89 __ Daddu(at, code, Operand(Code::kHeaderSize - kHeapObjectTag)); | |
90 __ Jump(at); | |
91 | |
92 // Miss: fall through. | |
93 __ bind(&miss); | |
94 } | |
95 | |
96 | |
97 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup( | 17 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup( |
98 MacroAssembler* masm, Label* miss_label, Register receiver, | 18 MacroAssembler* masm, Label* miss_label, Register receiver, |
99 Handle<Name> name, Register scratch0, Register scratch1) { | 19 Handle<Name> name, Register scratch0, Register scratch1) { |
100 DCHECK(name->IsUniqueName()); | 20 DCHECK(name->IsUniqueName()); |
101 DCHECK(!receiver.is(scratch0)); | 21 DCHECK(!receiver.is(scratch0)); |
102 Counters* counters = masm->isolate()->counters(); | 22 Counters* counters = masm->isolate()->counters(); |
103 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1); | 23 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1); |
104 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); | 24 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); |
105 | 25 |
106 Label done; | 26 Label done; |
(...skipping 18 matching lines...) Expand all Loading... |
125 // Check that the properties array is a dictionary. | 45 // Check that the properties array is a dictionary. |
126 __ ld(map, FieldMemOperand(properties, HeapObject::kMapOffset)); | 46 __ ld(map, FieldMemOperand(properties, HeapObject::kMapOffset)); |
127 Register tmp = properties; | 47 Register tmp = properties; |
128 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex); | 48 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex); |
129 __ Branch(miss_label, ne, map, Operand(tmp)); | 49 __ Branch(miss_label, ne, map, Operand(tmp)); |
130 | 50 |
131 // Restore the temporarily used register. | 51 // Restore the temporarily used register. |
132 __ ld(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); | 52 __ ld(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
133 | 53 |
134 | 54 |
135 NameDictionaryLookupStub::GenerateNegativeLookup(masm, | 55 NameDictionaryLookupStub::GenerateNegativeLookup( |
136 miss_label, | 56 masm, miss_label, &done, receiver, properties, name, scratch1); |
137 &done, | |
138 receiver, | |
139 properties, | |
140 name, | |
141 scratch1); | |
142 __ bind(&done); | 57 __ bind(&done); |
143 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); | 58 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); |
144 } | 59 } |
145 | 60 |
146 | 61 |
147 void StubCache::GenerateProbe(MacroAssembler* masm, | |
148 Code::Flags flags, | |
149 Register receiver, | |
150 Register name, | |
151 Register scratch, | |
152 Register extra, | |
153 Register extra2, | |
154 Register extra3) { | |
155 Isolate* isolate = masm->isolate(); | |
156 Label miss; | |
157 | |
158 // Make sure that code is valid. The multiplying code relies on the | |
159 // entry size being 12. | |
160 // DCHECK(sizeof(Entry) == 12); | |
161 // DCHECK(sizeof(Entry) == 3 * kPointerSize); | |
162 | |
163 // Make sure the flags does not name a specific type. | |
164 DCHECK(Code::ExtractTypeFromFlags(flags) == 0); | |
165 | |
166 // Make sure that there are no register conflicts. | |
167 DCHECK(!scratch.is(receiver)); | |
168 DCHECK(!scratch.is(name)); | |
169 DCHECK(!extra.is(receiver)); | |
170 DCHECK(!extra.is(name)); | |
171 DCHECK(!extra.is(scratch)); | |
172 DCHECK(!extra2.is(receiver)); | |
173 DCHECK(!extra2.is(name)); | |
174 DCHECK(!extra2.is(scratch)); | |
175 DCHECK(!extra2.is(extra)); | |
176 | |
177 // Check register validity. | |
178 DCHECK(!scratch.is(no_reg)); | |
179 DCHECK(!extra.is(no_reg)); | |
180 DCHECK(!extra2.is(no_reg)); | |
181 DCHECK(!extra3.is(no_reg)); | |
182 | |
183 Counters* counters = masm->isolate()->counters(); | |
184 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, | |
185 extra2, extra3); | |
186 | |
187 // Check that the receiver isn't a smi. | |
188 __ JumpIfSmi(receiver, &miss); | |
189 | |
190 // Get the map of the receiver and compute the hash. | |
191 __ ld(scratch, FieldMemOperand(name, Name::kHashFieldOffset)); | |
192 __ ld(at, FieldMemOperand(receiver, HeapObject::kMapOffset)); | |
193 __ Daddu(scratch, scratch, at); | |
194 uint64_t mask = kPrimaryTableSize - 1; | |
195 // We shift out the last two bits because they are not part of the hash and | |
196 // they are always 01 for maps. | |
197 __ dsrl(scratch, scratch, kCacheIndexShift); | |
198 __ Xor(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask)); | |
199 __ And(scratch, scratch, Operand(mask)); | |
200 | |
201 // Probe the primary table. | |
202 ProbeTable(isolate, | |
203 masm, | |
204 flags, | |
205 kPrimary, | |
206 receiver, | |
207 name, | |
208 scratch, | |
209 extra, | |
210 extra2, | |
211 extra3); | |
212 | |
213 // Primary miss: Compute hash for secondary probe. | |
214 __ dsrl(at, name, kCacheIndexShift); | |
215 __ Dsubu(scratch, scratch, at); | |
216 uint64_t mask2 = kSecondaryTableSize - 1; | |
217 __ Daddu(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask2)); | |
218 __ And(scratch, scratch, Operand(mask2)); | |
219 | |
220 // Probe the secondary table. | |
221 ProbeTable(isolate, | |
222 masm, | |
223 flags, | |
224 kSecondary, | |
225 receiver, | |
226 name, | |
227 scratch, | |
228 extra, | |
229 extra2, | |
230 extra3); | |
231 | |
232 // Cache miss: Fall-through and let caller handle the miss by | |
233 // entering the runtime system. | |
234 __ bind(&miss); | |
235 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1, | |
236 extra2, extra3); | |
237 } | |
238 | |
239 | |
240 void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype( | 62 void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype( |
241 MacroAssembler* masm, int index, Register prototype, Label* miss) { | 63 MacroAssembler* masm, int index, Register prototype, Label* miss) { |
242 Isolate* isolate = masm->isolate(); | 64 Isolate* isolate = masm->isolate(); |
243 // Get the global function with the given index. | 65 // Get the global function with the given index. |
244 Handle<JSFunction> function( | 66 Handle<JSFunction> function( |
245 JSFunction::cast(isolate->native_context()->get(index))); | 67 JSFunction::cast(isolate->native_context()->get(index))); |
246 | 68 |
247 // Check we're still in the same context. | 69 // Check we're still in the same context. |
248 Register scratch = prototype; | 70 Register scratch = prototype; |
249 const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX); | 71 const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX); |
(...skipping 24 matching lines...) Expand all Loading... |
274 Register scratch, Label* miss) { | 96 Register scratch, Label* miss) { |
275 Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name); | 97 Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name); |
276 DCHECK(cell->value()->IsTheHole()); | 98 DCHECK(cell->value()->IsTheHole()); |
277 __ li(scratch, Operand(cell)); | 99 __ li(scratch, Operand(cell)); |
278 __ ld(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); | 100 __ ld(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); |
279 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 101 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
280 __ Branch(miss, ne, scratch, Operand(at)); | 102 __ Branch(miss, ne, scratch, Operand(at)); |
281 } | 103 } |
282 | 104 |
283 | 105 |
284 static void PushInterceptorArguments(MacroAssembler* masm, | 106 static void PushInterceptorArguments(MacroAssembler* masm, Register receiver, |
285 Register receiver, | 107 Register holder, Register name, |
286 Register holder, | |
287 Register name, | |
288 Handle<JSObject> holder_obj) { | 108 Handle<JSObject> holder_obj) { |
289 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0); | 109 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0); |
290 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1); | 110 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1); |
291 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2); | 111 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2); |
292 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3); | 112 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3); |
293 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 4); | 113 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 4); |
294 __ push(name); | 114 __ push(name); |
295 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor()); | 115 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor()); |
296 DCHECK(!masm->isolate()->heap()->InNewSpace(*interceptor)); | 116 DCHECK(!masm->isolate()->heap()->InNewSpace(*interceptor)); |
297 Register scratch = name; | 117 Register scratch = name; |
298 __ li(scratch, Operand(interceptor)); | 118 __ li(scratch, Operand(interceptor)); |
299 __ Push(scratch, receiver, holder); | 119 __ Push(scratch, receiver, holder); |
300 } | 120 } |
301 | 121 |
302 | 122 |
303 static void CompileCallLoadPropertyWithInterceptor( | 123 static void CompileCallLoadPropertyWithInterceptor( |
304 MacroAssembler* masm, | 124 MacroAssembler* masm, Register receiver, Register holder, Register name, |
305 Register receiver, | 125 Handle<JSObject> holder_obj, IC::UtilityId id) { |
306 Register holder, | |
307 Register name, | |
308 Handle<JSObject> holder_obj, | |
309 IC::UtilityId id) { | |
310 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); | 126 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); |
311 __ CallExternalReference(ExternalReference(IC_Utility(id), masm->isolate()), | 127 __ CallExternalReference(ExternalReference(IC_Utility(id), masm->isolate()), |
312 NamedLoadHandlerCompiler::kInterceptorArgsLength); | 128 NamedLoadHandlerCompiler::kInterceptorArgsLength); |
313 } | 129 } |
314 | 130 |
315 | 131 |
316 // Generate call to api function. | 132 // Generate call to api function. |
317 void PropertyHandlerCompiler::GenerateFastApiCall( | 133 void PropertyHandlerCompiler::GenerateFastApiCall( |
318 MacroAssembler* masm, const CallOptimization& optimization, | 134 MacroAssembler* masm, const CallOptimization& optimization, |
319 Handle<Map> receiver_map, Register receiver, Register scratch_in, | 135 Handle<Map> receiver_map, Register receiver, Register scratch_in, |
320 bool is_store, int argc, Register* values) { | 136 bool is_store, int argc, Register* values) { |
321 DCHECK(!receiver.is(scratch_in)); | 137 DCHECK(!receiver.is(scratch_in)); |
322 // Preparing to push, adjust sp. | 138 // Preparing to push, adjust sp. |
323 __ Dsubu(sp, sp, Operand((argc + 1) * kPointerSize)); | 139 __ Dsubu(sp, sp, Operand((argc + 1) * kPointerSize)); |
324 __ sd(receiver, MemOperand(sp, argc * kPointerSize)); // Push receiver. | 140 __ sd(receiver, MemOperand(sp, argc * kPointerSize)); // Push receiver. |
325 // Write the arguments to stack frame. | 141 // Write the arguments to stack frame. |
326 for (int i = 0; i < argc; i++) { | 142 for (int i = 0; i < argc; i++) { |
327 Register arg = values[argc-1-i]; | 143 Register arg = values[argc - 1 - i]; |
328 DCHECK(!receiver.is(arg)); | 144 DCHECK(!receiver.is(arg)); |
329 DCHECK(!scratch_in.is(arg)); | 145 DCHECK(!scratch_in.is(arg)); |
330 __ sd(arg, MemOperand(sp, (argc-1-i) * kPointerSize)); // Push arg. | 146 __ sd(arg, MemOperand(sp, (argc - 1 - i) * kPointerSize)); // Push arg. |
331 } | 147 } |
332 DCHECK(optimization.is_simple_api_call()); | 148 DCHECK(optimization.is_simple_api_call()); |
333 | 149 |
334 // Abi for CallApiFunctionStub. | 150 // Abi for CallApiFunctionStub. |
335 Register callee = a0; | 151 Register callee = a0; |
336 Register call_data = a4; | 152 Register call_data = a4; |
337 Register holder = a2; | 153 Register holder = a2; |
338 Register api_function_address = a1; | 154 Register api_function_address = a1; |
339 | 155 |
340 // Put holder in place. | 156 // Put holder in place. |
341 CallOptimization::HolderLookup holder_lookup; | 157 CallOptimization::HolderLookup holder_lookup; |
342 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType( | 158 Handle<JSObject> api_holder = |
343 receiver_map, | 159 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup); |
344 &holder_lookup); | |
345 switch (holder_lookup) { | 160 switch (holder_lookup) { |
346 case CallOptimization::kHolderIsReceiver: | 161 case CallOptimization::kHolderIsReceiver: |
347 __ Move(holder, receiver); | 162 __ Move(holder, receiver); |
348 break; | 163 break; |
349 case CallOptimization::kHolderFound: | 164 case CallOptimization::kHolderFound: |
350 __ li(holder, api_holder); | 165 __ li(holder, api_holder); |
351 break; | 166 break; |
352 case CallOptimization::kHolderNotFound: | 167 case CallOptimization::kHolderNotFound: |
353 UNREACHABLE(); | 168 UNREACHABLE(); |
354 break; | 169 break; |
355 } | 170 } |
356 | 171 |
357 Isolate* isolate = masm->isolate(); | 172 Isolate* isolate = masm->isolate(); |
358 Handle<JSFunction> function = optimization.constant_function(); | 173 Handle<JSFunction> function = optimization.constant_function(); |
359 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); | 174 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); |
360 Handle<Object> call_data_obj(api_call_info->data(), isolate); | 175 Handle<Object> call_data_obj(api_call_info->data(), isolate); |
361 | 176 |
362 // Put callee in place. | 177 // Put callee in place. |
363 __ li(callee, function); | 178 __ li(callee, function); |
364 | 179 |
365 bool call_data_undefined = false; | 180 bool call_data_undefined = false; |
366 // Put call_data in place. | 181 // Put call_data in place. |
367 if (isolate->heap()->InNewSpace(*call_data_obj)) { | 182 if (isolate->heap()->InNewSpace(*call_data_obj)) { |
368 __ li(call_data, api_call_info); | 183 __ li(call_data, api_call_info); |
369 __ ld(call_data, FieldMemOperand(call_data, CallHandlerInfo::kDataOffset)); | 184 __ ld(call_data, FieldMemOperand(call_data, CallHandlerInfo::kDataOffset)); |
370 } else if (call_data_obj->IsUndefined()) { | 185 } else if (call_data_obj->IsUndefined()) { |
371 call_data_undefined = true; | 186 call_data_undefined = true; |
372 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex); | 187 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex); |
373 } else { | 188 } else { |
374 __ li(call_data, call_data_obj); | 189 __ li(call_data, call_data_obj); |
375 } | 190 } |
376 // Put api_function_address in place. | 191 // Put api_function_address in place. |
377 Address function_address = v8::ToCData<Address>(api_call_info->callback()); | 192 Address function_address = v8::ToCData<Address>(api_call_info->callback()); |
378 ApiFunction fun(function_address); | 193 ApiFunction fun(function_address); |
379 ExternalReference::Type type = ExternalReference::DIRECT_API_CALL; | 194 ExternalReference::Type type = ExternalReference::DIRECT_API_CALL; |
380 ExternalReference ref = | 195 ExternalReference ref = ExternalReference(&fun, type, masm->isolate()); |
381 ExternalReference(&fun, | |
382 type, | |
383 masm->isolate()); | |
384 __ li(api_function_address, Operand(ref)); | 196 __ li(api_function_address, Operand(ref)); |
385 | 197 |
386 // Jump to stub. | 198 // Jump to stub. |
387 CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc); | 199 CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc); |
388 __ TailCallStub(&stub); | 200 __ TailCallStub(&stub); |
389 } | 201 } |
390 | 202 |
391 | 203 |
392 void PropertyAccessCompiler::GenerateTailCall(MacroAssembler* masm, | 204 void PropertyAccessCompiler::GenerateTailCall(MacroAssembler* masm, |
393 Handle<Code> code) { | 205 Handle<Code> code) { |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
456 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow, | 268 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow, |
457 TAG_RESULT, MUTABLE); | 269 TAG_RESULT, MUTABLE); |
458 | 270 |
459 __ JumpIfNotSmi(value_reg, &heap_number); | 271 __ JumpIfNotSmi(value_reg, &heap_number); |
460 __ SmiUntag(scratch1, value_reg); | 272 __ SmiUntag(scratch1, value_reg); |
461 __ mtc1(scratch1, f6); | 273 __ mtc1(scratch1, f6); |
462 __ cvt_d_w(f4, f6); | 274 __ cvt_d_w(f4, f6); |
463 __ jmp(&do_store); | 275 __ jmp(&do_store); |
464 | 276 |
465 __ bind(&heap_number); | 277 __ bind(&heap_number); |
466 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, | 278 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, miss_label, |
467 miss_label, DONT_DO_SMI_CHECK); | 279 DONT_DO_SMI_CHECK); |
468 __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); | 280 __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); |
469 | 281 |
470 __ bind(&do_store); | 282 __ bind(&do_store); |
471 __ sdc1(f4, FieldMemOperand(storage_reg, HeapNumber::kValueOffset)); | 283 __ sdc1(f4, FieldMemOperand(storage_reg, HeapNumber::kValueOffset)); |
472 } | 284 } |
473 | 285 |
474 // Stub never generated for objects that require access checks. | 286 // Stub never generated for objects that require access checks. |
475 DCHECK(!transition->is_access_check_needed()); | 287 DCHECK(!transition->is_access_check_needed()); |
476 | 288 |
477 // Perform map transition for the receiver if necessary. | 289 // Perform map transition for the receiver if necessary. |
478 if (details.type() == FIELD && | 290 if (details.type() == FIELD && |
479 Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) { | 291 Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) { |
480 // The properties must be extended before we can store the value. | 292 // The properties must be extended before we can store the value. |
481 // We jump to a runtime call that extends the properties array. | 293 // We jump to a runtime call that extends the properties array. |
482 __ push(receiver_reg); | 294 __ push(receiver_reg); |
483 __ li(a2, Operand(transition)); | 295 __ li(a2, Operand(transition)); |
484 __ Push(a2, a0); | 296 __ Push(a2, a0); |
485 __ TailCallExternalReference( | 297 __ TailCallExternalReference( |
486 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), | 298 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), |
487 isolate()), | 299 isolate()), |
488 3, 1); | 300 3, 1); |
489 return; | 301 return; |
490 } | 302 } |
491 | 303 |
492 // Update the map of the object. | 304 // Update the map of the object. |
493 __ li(scratch1, Operand(transition)); | 305 __ li(scratch1, Operand(transition)); |
494 __ sd(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); | 306 __ sd(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); |
495 | 307 |
496 // Update the write barrier for the map field. | 308 // Update the write barrier for the map field. |
497 __ RecordWriteField(receiver_reg, | 309 __ RecordWriteField(receiver_reg, HeapObject::kMapOffset, scratch1, scratch2, |
498 HeapObject::kMapOffset, | 310 kRAHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET, |
499 scratch1, | |
500 scratch2, | |
501 kRAHasNotBeenSaved, | |
502 kDontSaveFPRegs, | |
503 OMIT_REMEMBERED_SET, | |
504 OMIT_SMI_CHECK); | 311 OMIT_SMI_CHECK); |
505 | 312 |
506 if (details.type() == CONSTANT) { | 313 if (details.type() == CONSTANT) { |
507 DCHECK(value_reg.is(a0)); | 314 DCHECK(value_reg.is(a0)); |
508 __ Ret(USE_DELAY_SLOT); | 315 __ Ret(USE_DELAY_SLOT); |
509 __ mov(v0, a0); | 316 __ mov(v0, a0); |
510 return; | 317 return; |
511 } | 318 } |
512 | 319 |
513 int index = transition->instance_descriptors()->GetFieldIndex( | 320 int index = transition->instance_descriptors()->GetFieldIndex( |
514 transition->LastAdded()); | 321 transition->LastAdded()); |
515 | 322 |
516 // Adjust for the number of properties stored in the object. Even in the | 323 // Adjust for the number of properties stored in the object. Even in the |
517 // face of a transition we can use the old map here because the size of the | 324 // face of a transition we can use the old map here because the size of the |
518 // object and the number of in-object properties is not going to change. | 325 // object and the number of in-object properties is not going to change. |
519 index -= transition->inobject_properties(); | 326 index -= transition->inobject_properties(); |
520 | 327 |
521 // TODO(verwaest): Share this code as a code stub. | 328 // TODO(verwaest): Share this code as a code stub. |
522 SmiCheck smi_check = representation.IsTagged() | 329 SmiCheck smi_check = |
523 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; | 330 representation.IsTagged() ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; |
524 if (index < 0) { | 331 if (index < 0) { |
525 // Set the property straight into the object. | 332 // Set the property straight into the object. |
526 int offset = transition->instance_size() + (index * kPointerSize); | 333 int offset = transition->instance_size() + (index * kPointerSize); |
527 if (representation.IsDouble()) { | 334 if (representation.IsDouble()) { |
528 __ sd(storage_reg, FieldMemOperand(receiver_reg, offset)); | 335 __ sd(storage_reg, FieldMemOperand(receiver_reg, offset)); |
529 } else { | 336 } else { |
530 __ sd(value_reg, FieldMemOperand(receiver_reg, offset)); | 337 __ sd(value_reg, FieldMemOperand(receiver_reg, offset)); |
531 } | 338 } |
532 | 339 |
533 if (!representation.IsSmi()) { | 340 if (!representation.IsSmi()) { |
534 // Update the write barrier for the array address. | 341 // Update the write barrier for the array address. |
535 if (!representation.IsDouble()) { | 342 if (!representation.IsDouble()) { |
536 __ mov(storage_reg, value_reg); | 343 __ mov(storage_reg, value_reg); |
537 } | 344 } |
538 __ RecordWriteField(receiver_reg, | 345 __ RecordWriteField(receiver_reg, offset, storage_reg, scratch1, |
539 offset, | 346 kRAHasNotBeenSaved, kDontSaveFPRegs, |
540 storage_reg, | 347 EMIT_REMEMBERED_SET, smi_check); |
541 scratch1, | |
542 kRAHasNotBeenSaved, | |
543 kDontSaveFPRegs, | |
544 EMIT_REMEMBERED_SET, | |
545 smi_check); | |
546 } | 348 } |
547 } else { | 349 } else { |
548 // Write to the properties array. | 350 // Write to the properties array. |
549 int offset = index * kPointerSize + FixedArray::kHeaderSize; | 351 int offset = index * kPointerSize + FixedArray::kHeaderSize; |
550 // Get the properties array | 352 // Get the properties array |
551 __ ld(scratch1, | 353 __ ld(scratch1, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); |
552 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); | |
553 if (representation.IsDouble()) { | 354 if (representation.IsDouble()) { |
554 __ sd(storage_reg, FieldMemOperand(scratch1, offset)); | 355 __ sd(storage_reg, FieldMemOperand(scratch1, offset)); |
555 } else { | 356 } else { |
556 __ sd(value_reg, FieldMemOperand(scratch1, offset)); | 357 __ sd(value_reg, FieldMemOperand(scratch1, offset)); |
557 } | 358 } |
558 | 359 |
559 if (!representation.IsSmi()) { | 360 if (!representation.IsSmi()) { |
560 // Update the write barrier for the array address. | 361 // Update the write barrier for the array address. |
561 if (!representation.IsDouble()) { | 362 if (!representation.IsDouble()) { |
562 __ mov(storage_reg, value_reg); | 363 __ mov(storage_reg, value_reg); |
563 } | 364 } |
564 __ RecordWriteField(scratch1, | 365 __ RecordWriteField(scratch1, offset, storage_reg, receiver_reg, |
565 offset, | 366 kRAHasNotBeenSaved, kDontSaveFPRegs, |
566 storage_reg, | 367 EMIT_REMEMBERED_SET, smi_check); |
567 receiver_reg, | |
568 kRAHasNotBeenSaved, | |
569 kDontSaveFPRegs, | |
570 EMIT_REMEMBERED_SET, | |
571 smi_check); | |
572 } | 368 } |
573 } | 369 } |
574 | 370 |
575 // Return the value (register v0). | 371 // Return the value (register v0). |
576 DCHECK(value_reg.is(a0)); | 372 DCHECK(value_reg.is(a0)); |
577 __ bind(&exit); | 373 __ bind(&exit); |
578 __ Ret(USE_DELAY_SLOT); | 374 __ Ret(USE_DELAY_SLOT); |
579 __ mov(v0, a0); | 375 __ mov(v0, a0); |
580 } | 376 } |
581 | 377 |
(...skipping 26 matching lines...) Expand all Loading... |
608 | 404 |
609 | 405 |
610 Register PropertyHandlerCompiler::CheckPrototypes( | 406 Register PropertyHandlerCompiler::CheckPrototypes( |
611 Register object_reg, Register holder_reg, Register scratch1, | 407 Register object_reg, Register holder_reg, Register scratch1, |
612 Register scratch2, Handle<Name> name, Label* miss, | 408 Register scratch2, Handle<Name> name, Label* miss, |
613 PrototypeCheckType check) { | 409 PrototypeCheckType check) { |
614 Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate())); | 410 Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate())); |
615 | 411 |
616 // Make sure there's no overlap between holder and object registers. | 412 // Make sure there's no overlap between holder and object registers. |
617 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); | 413 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); |
618 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) | 414 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) && |
619 && !scratch2.is(scratch1)); | 415 !scratch2.is(scratch1)); |
620 | 416 |
621 // Keep track of the current object in register reg. | 417 // Keep track of the current object in register reg. |
622 Register reg = object_reg; | 418 Register reg = object_reg; |
623 int depth = 0; | 419 int depth = 0; |
624 | 420 |
625 Handle<JSObject> current = Handle<JSObject>::null(); | 421 Handle<JSObject> current = Handle<JSObject>::null(); |
626 if (type()->IsConstant()) { | 422 if (type()->IsConstant()) { |
627 current = Handle<JSObject>::cast(type()->AsConstant()->Value()); | 423 current = Handle<JSObject>::cast(type()->AsConstant()->Value()); |
628 } | 424 } |
629 Handle<JSObject> prototype = Handle<JSObject>::null(); | 425 Handle<JSObject> prototype = Handle<JSObject>::null(); |
(...skipping 12 matching lines...) Expand all Loading... |
642 prototype = handle(JSObject::cast(current_map->prototype())); | 438 prototype = handle(JSObject::cast(current_map->prototype())); |
643 if (current_map->is_dictionary_map() && | 439 if (current_map->is_dictionary_map() && |
644 !current_map->IsJSGlobalObjectMap()) { | 440 !current_map->IsJSGlobalObjectMap()) { |
645 DCHECK(!current_map->IsJSGlobalProxyMap()); // Proxy maps are fast. | 441 DCHECK(!current_map->IsJSGlobalProxyMap()); // Proxy maps are fast. |
646 if (!name->IsUniqueName()) { | 442 if (!name->IsUniqueName()) { |
647 DCHECK(name->IsString()); | 443 DCHECK(name->IsString()); |
648 name = factory()->InternalizeString(Handle<String>::cast(name)); | 444 name = factory()->InternalizeString(Handle<String>::cast(name)); |
649 } | 445 } |
650 DCHECK(current.is_null() || | 446 DCHECK(current.is_null() || |
651 current->property_dictionary()->FindEntry(name) == | 447 current->property_dictionary()->FindEntry(name) == |
652 NameDictionary::kNotFound); | 448 NameDictionary::kNotFound); |
653 | 449 |
654 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, | 450 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1, |
655 scratch1, scratch2); | 451 scratch2); |
656 | 452 |
657 __ ld(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); | 453 __ ld(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); |
658 reg = holder_reg; // From now on the object will be in holder_reg. | 454 reg = holder_reg; // From now on the object will be in holder_reg. |
659 __ ld(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); | 455 __ ld(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); |
660 } else { | 456 } else { |
661 // Two possible reasons for loading the prototype from the map: | 457 // Two possible reasons for loading the prototype from the map: |
662 // (1) Can't store references to new space in code. | 458 // (1) Can't store references to new space in code. |
663 // (2) Handler is shared for all receivers with the same prototype | 459 // (2) Handler is shared for all receivers with the same prototype |
664 // map (but not necessarily the same prototype instance). | 460 // map (but not necessarily the same prototype instance). |
665 bool load_prototype_from_map = | 461 bool load_prototype_from_map = |
666 heap()->InNewSpace(*prototype) || depth == 1; | 462 heap()->InNewSpace(*prototype) || depth == 1; |
667 Register map_reg = scratch1; | 463 Register map_reg = scratch1; |
668 if (depth != 1 || check == CHECK_ALL_MAPS) { | 464 if (depth != 1 || check == CHECK_ALL_MAPS) { |
669 // CheckMap implicitly loads the map of |reg| into |map_reg|. | 465 // CheckMap implicitly loads the map of |reg| into |map_reg|. |
670 __ CheckMap(reg, map_reg, current_map, miss, DONT_DO_SMI_CHECK); | 466 __ CheckMap(reg, map_reg, current_map, miss, DONT_DO_SMI_CHECK); |
671 } else { | 467 } else { |
672 __ ld(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset)); | 468 __ ld(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset)); |
673 } | 469 } |
674 | 470 |
675 // Check access rights to the global object. This has to happen after | 471 // Check access rights to the global object. This has to happen after |
676 // the map check so that we know that the object is actually a global | 472 // the map check so that we know that the object is actually a global |
677 // object. | 473 // object. |
678 // This allows us to install generated handlers for accesses to the | 474 // This allows us to install generated handlers for accesses to the |
679 // global proxy (as opposed to using slow ICs). See corresponding code | 475 // global proxy (as opposed to using slow ICs). See corresponding code |
680 // in LookupForRead(). | 476 // in LookupForRead(). |
681 if (current_map->IsJSGlobalProxyMap()) { | 477 if (current_map->IsJSGlobalProxyMap()) { |
682 __ CheckAccessGlobalProxy(reg, scratch2, miss); | 478 __ CheckAccessGlobalProxy(reg, scratch2, miss); |
683 } else if (current_map->IsJSGlobalObjectMap()) { | 479 } else if (current_map->IsJSGlobalObjectMap()) { |
684 GenerateCheckPropertyCell( | 480 GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current), |
685 masm(), Handle<JSGlobalObject>::cast(current), name, | 481 name, scratch2, miss); |
686 scratch2, miss); | |
687 } | 482 } |
688 | 483 |
689 reg = holder_reg; // From now on the object will be in holder_reg. | 484 reg = holder_reg; // From now on the object will be in holder_reg. |
690 | 485 |
691 if (load_prototype_from_map) { | 486 if (load_prototype_from_map) { |
692 __ ld(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset)); | 487 __ ld(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset)); |
693 } else { | 488 } else { |
694 __ li(reg, Operand(prototype)); | 489 __ li(reg, Operand(prototype)); |
695 } | 490 } |
696 } | 491 } |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
759 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3); | 554 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3); |
760 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4); | 555 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4); |
761 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5); | 556 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5); |
762 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6); | 557 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6); |
763 DCHECK(!scratch2().is(reg)); | 558 DCHECK(!scratch2().is(reg)); |
764 DCHECK(!scratch3().is(reg)); | 559 DCHECK(!scratch3().is(reg)); |
765 DCHECK(!scratch4().is(reg)); | 560 DCHECK(!scratch4().is(reg)); |
766 __ push(receiver()); | 561 __ push(receiver()); |
767 if (heap()->InNewSpace(callback->data())) { | 562 if (heap()->InNewSpace(callback->data())) { |
768 __ li(scratch3(), callback); | 563 __ li(scratch3(), callback); |
769 __ ld(scratch3(), FieldMemOperand(scratch3(), | 564 __ ld(scratch3(), |
770 ExecutableAccessorInfo::kDataOffset)); | 565 FieldMemOperand(scratch3(), ExecutableAccessorInfo::kDataOffset)); |
771 } else { | 566 } else { |
772 __ li(scratch3(), Handle<Object>(callback->data(), isolate())); | 567 __ li(scratch3(), Handle<Object>(callback->data(), isolate())); |
773 } | 568 } |
774 __ Dsubu(sp, sp, 6 * kPointerSize); | 569 __ Dsubu(sp, sp, 6 * kPointerSize); |
775 __ sd(scratch3(), MemOperand(sp, 5 * kPointerSize)); | 570 __ sd(scratch3(), MemOperand(sp, 5 * kPointerSize)); |
776 __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex); | 571 __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex); |
777 __ sd(scratch3(), MemOperand(sp, 4 * kPointerSize)); | 572 __ sd(scratch3(), MemOperand(sp, 4 * kPointerSize)); |
778 __ sd(scratch3(), MemOperand(sp, 3 * kPointerSize)); | 573 __ sd(scratch3(), MemOperand(sp, 3 * kPointerSize)); |
779 __ li(scratch4(), | 574 __ li(scratch4(), Operand(ExternalReference::isolate_address(isolate()))); |
780 Operand(ExternalReference::isolate_address(isolate()))); | |
781 __ sd(scratch4(), MemOperand(sp, 2 * kPointerSize)); | 575 __ sd(scratch4(), MemOperand(sp, 2 * kPointerSize)); |
782 __ sd(reg, MemOperand(sp, 1 * kPointerSize)); | 576 __ sd(reg, MemOperand(sp, 1 * kPointerSize)); |
783 __ sd(name(), MemOperand(sp, 0 * kPointerSize)); | 577 __ sd(name(), MemOperand(sp, 0 * kPointerSize)); |
784 __ Daddu(scratch2(), sp, 1 * kPointerSize); | 578 __ Daddu(scratch2(), sp, 1 * kPointerSize); |
785 | 579 |
786 __ mov(a2, scratch2()); // Saved in case scratch2 == a1. | 580 __ mov(a2, scratch2()); // Saved in case scratch2 == a1. |
787 // Abi for CallApiGetter. | 581 // Abi for CallApiGetter. |
788 Register getter_address_reg = a2; | 582 Register getter_address_reg = a2; |
789 | 583 |
790 Address getter_address = v8::ToCData<Address>(callback->getter()); | 584 Address getter_address = v8::ToCData<Address>(callback->getter()); |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
869 ref, NamedLoadHandlerCompiler::kInterceptorArgsLength, 1); | 663 ref, NamedLoadHandlerCompiler::kInterceptorArgsLength, 1); |
870 } | 664 } |
871 | 665 |
872 | 666 |
873 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback( | 667 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback( |
874 Handle<JSObject> object, Handle<Name> name, | 668 Handle<JSObject> object, Handle<Name> name, |
875 Handle<ExecutableAccessorInfo> callback) { | 669 Handle<ExecutableAccessorInfo> callback) { |
876 Register holder_reg = Frontend(receiver(), name); | 670 Register holder_reg = Frontend(receiver(), name); |
877 | 671 |
878 __ Push(receiver(), holder_reg); // Receiver. | 672 __ Push(receiver(), holder_reg); // Receiver. |
879 __ li(at, Operand(callback)); // Callback info. | 673 __ li(at, Operand(callback)); // Callback info. |
880 __ push(at); | 674 __ push(at); |
881 __ li(at, Operand(name)); | 675 __ li(at, Operand(name)); |
882 __ Push(at, value()); | 676 __ Push(at, value()); |
883 | 677 |
884 // Do tail-call to the runtime system. | 678 // Do tail-call to the runtime system. |
885 ExternalReference store_callback_property = | 679 ExternalReference store_callback_property = |
886 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate()); | 680 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate()); |
887 __ TailCallExternalReference(store_callback_property, 5, 1); | 681 __ TailCallExternalReference(store_callback_property, 5, 1); |
888 | 682 |
889 // Return the generated code. | 683 // Return the generated code. |
(...skipping 15 matching lines...) Expand all Loading... |
905 FrameScope scope(masm, StackFrame::INTERNAL); | 699 FrameScope scope(masm, StackFrame::INTERNAL); |
906 | 700 |
907 // Save value register, so we can restore it later. | 701 // Save value register, so we can restore it later. |
908 __ push(value()); | 702 __ push(value()); |
909 | 703 |
910 if (!setter.is_null()) { | 704 if (!setter.is_null()) { |
911 // Call the JavaScript setter with receiver and value on the stack. | 705 // Call the JavaScript setter with receiver and value on the stack. |
912 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { | 706 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { |
913 // Swap in the global receiver. | 707 // Swap in the global receiver. |
914 __ ld(receiver, | 708 __ ld(receiver, |
915 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); | 709 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); |
916 } | 710 } |
917 __ Push(receiver, value()); | 711 __ Push(receiver, value()); |
918 ParameterCount actual(1); | 712 ParameterCount actual(1); |
919 ParameterCount expected(setter); | 713 ParameterCount expected(setter); |
920 __ InvokeFunction(setter, expected, actual, | 714 __ InvokeFunction(setter, expected, actual, CALL_FUNCTION, |
921 CALL_FUNCTION, NullCallWrapper()); | 715 NullCallWrapper()); |
922 } else { | 716 } else { |
923 // If we generate a global code snippet for deoptimization only, remember | 717 // If we generate a global code snippet for deoptimization only, remember |
924 // the place to continue after deoptimization. | 718 // the place to continue after deoptimization. |
925 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); | 719 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); |
926 } | 720 } |
927 | 721 |
928 // We have to return the passed value, not the return value of the setter. | 722 // We have to return the passed value, not the return value of the setter. |
929 __ pop(v0); | 723 __ pop(v0); |
930 | 724 |
931 // Restore context register. | 725 // Restore context register. |
(...skipping 18 matching lines...) Expand all Loading... |
950 | 744 |
951 // Return the generated code. | 745 // Return the generated code. |
952 return GetCode(kind(), Code::FAST, name); | 746 return GetCode(kind(), Code::FAST, name); |
953 } | 747 } |
954 | 748 |
955 | 749 |
956 Register* PropertyAccessCompiler::load_calling_convention() { | 750 Register* PropertyAccessCompiler::load_calling_convention() { |
957 // receiver, name, scratch1, scratch2, scratch3, scratch4. | 751 // receiver, name, scratch1, scratch2, scratch3, scratch4. |
958 Register receiver = LoadIC::ReceiverRegister(); | 752 Register receiver = LoadIC::ReceiverRegister(); |
959 Register name = LoadIC::NameRegister(); | 753 Register name = LoadIC::NameRegister(); |
960 static Register registers[] = { receiver, name, a3, a0, a4, a5 }; | 754 static Register registers[] = {receiver, name, a3, a0, a4, a5}; |
961 return registers; | 755 return registers; |
962 } | 756 } |
963 | 757 |
964 | 758 |
965 Register* PropertyAccessCompiler::store_calling_convention() { | 759 Register* PropertyAccessCompiler::store_calling_convention() { |
966 // receiver, name, scratch1, scratch2, scratch3. | 760 // receiver, name, scratch1, scratch2, scratch3. |
967 Register receiver = StoreIC::ReceiverRegister(); | 761 Register receiver = StoreIC::ReceiverRegister(); |
968 Register name = StoreIC::NameRegister(); | 762 Register name = StoreIC::NameRegister(); |
969 DCHECK(a3.is(KeyedStoreIC::MapRegister())); | 763 DCHECK(a3.is(KeyedStoreIC::MapRegister())); |
970 static Register registers[] = { receiver, name, a3, a4, a5 }; | 764 static Register registers[] = {receiver, name, a3, a4, a5}; |
971 return registers; | 765 return registers; |
972 } | 766 } |
973 | 767 |
974 | 768 |
975 Register NamedStoreHandlerCompiler::value() { return StoreIC::ValueRegister(); } | 769 Register NamedStoreHandlerCompiler::value() { return StoreIC::ValueRegister(); } |
976 | 770 |
977 | 771 |
978 #undef __ | 772 #undef __ |
979 #define __ ACCESS_MASM(masm) | 773 #define __ ACCESS_MASM(masm) |
980 | 774 |
981 | 775 |
982 void NamedLoadHandlerCompiler::GenerateLoadViaGetter( | 776 void NamedLoadHandlerCompiler::GenerateLoadViaGetter( |
983 MacroAssembler* masm, Handle<HeapType> type, Register receiver, | 777 MacroAssembler* masm, Handle<HeapType> type, Register receiver, |
984 Handle<JSFunction> getter) { | 778 Handle<JSFunction> getter) { |
985 // ----------- S t a t e ------------- | 779 // ----------- S t a t e ------------- |
986 // -- a0 : receiver | 780 // -- a0 : receiver |
987 // -- a2 : name | 781 // -- a2 : name |
988 // -- ra : return address | 782 // -- ra : return address |
989 // ----------------------------------- | 783 // ----------------------------------- |
990 { | 784 { |
991 FrameScope scope(masm, StackFrame::INTERNAL); | 785 FrameScope scope(masm, StackFrame::INTERNAL); |
992 | 786 |
993 if (!getter.is_null()) { | 787 if (!getter.is_null()) { |
994 // Call the JavaScript getter with the receiver on the stack. | 788 // Call the JavaScript getter with the receiver on the stack. |
995 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { | 789 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { |
996 // Swap in the global receiver. | 790 // Swap in the global receiver. |
997 __ ld(receiver, | 791 __ ld(receiver, |
998 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); | 792 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); |
999 } | 793 } |
1000 __ push(receiver); | 794 __ push(receiver); |
1001 ParameterCount actual(0); | 795 ParameterCount actual(0); |
1002 ParameterCount expected(getter); | 796 ParameterCount expected(getter); |
1003 __ InvokeFunction(getter, expected, actual, | 797 __ InvokeFunction(getter, expected, actual, CALL_FUNCTION, |
1004 CALL_FUNCTION, NullCallWrapper()); | 798 NullCallWrapper()); |
1005 } else { | 799 } else { |
1006 // If we generate a global code snippet for deoptimization only, remember | 800 // If we generate a global code snippet for deoptimization only, remember |
1007 // the place to continue after deoptimization. | 801 // the place to continue after deoptimization. |
1008 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); | 802 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); |
1009 } | 803 } |
1010 | 804 |
1011 // Restore context register. | 805 // Restore context register. |
1012 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 806 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
1013 } | 807 } |
1014 __ Ret(); | 808 __ Ret(); |
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1084 Handle<Map> map = IC::TypeToMap(*type, isolate()); | 878 Handle<Map> map = IC::TypeToMap(*type, isolate()); |
1085 if (!map->is_deprecated()) { | 879 if (!map->is_deprecated()) { |
1086 number_of_handled_maps++; | 880 number_of_handled_maps++; |
1087 // Check map and tail call if there's a match. | 881 // Check map and tail call if there's a match. |
1088 // Separate compare from branch, to provide path for above JumpIfSmi(). | 882 // Separate compare from branch, to provide path for above JumpIfSmi(). |
1089 __ Dsubu(match, map_reg, Operand(map)); | 883 __ Dsubu(match, map_reg, Operand(map)); |
1090 if (type->Is(HeapType::Number())) { | 884 if (type->Is(HeapType::Number())) { |
1091 DCHECK(!number_case.is_unused()); | 885 DCHECK(!number_case.is_unused()); |
1092 __ bind(&number_case); | 886 __ bind(&number_case); |
1093 } | 887 } |
1094 __ Jump(handlers->at(current), RelocInfo::CODE_TARGET, | 888 __ Jump(handlers->at(current), RelocInfo::CODE_TARGET, eq, match, |
1095 eq, match, Operand(zero_reg)); | 889 Operand(zero_reg)); |
1096 } | 890 } |
1097 } | 891 } |
1098 DCHECK(number_of_handled_maps != 0); | 892 DCHECK(number_of_handled_maps != 0); |
1099 | 893 |
1100 __ bind(&miss); | 894 __ bind(&miss); |
1101 TailCallBuiltin(masm(), MissBuiltin(kind())); | 895 TailCallBuiltin(masm(), MissBuiltin(kind())); |
1102 | 896 |
1103 // Return the generated code. | 897 // Return the generated code. |
1104 InlineCacheState state = | 898 InlineCacheState state = |
1105 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC; | 899 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC; |
1106 return GetCode(kind(), type, name, state); | 900 return GetCode(kind(), type, name, state); |
1107 } | 901 } |
1108 | 902 |
1109 | 903 |
1110 Handle<Code> PropertyICCompiler::CompileKeyedStorePolymorphic( | 904 Handle<Code> PropertyICCompiler::CompileKeyedStorePolymorphic( |
1111 MapHandleList* receiver_maps, CodeHandleList* handler_stubs, | 905 MapHandleList* receiver_maps, CodeHandleList* handler_stubs, |
1112 MapHandleList* transitioned_maps) { | 906 MapHandleList* transitioned_maps) { |
1113 Label miss; | 907 Label miss; |
1114 __ JumpIfSmi(receiver(), &miss); | 908 __ JumpIfSmi(receiver(), &miss); |
1115 | 909 |
1116 int receiver_count = receiver_maps->length(); | 910 int receiver_count = receiver_maps->length(); |
1117 __ ld(scratch1(), FieldMemOperand(receiver(), HeapObject::kMapOffset)); | 911 __ ld(scratch1(), FieldMemOperand(receiver(), HeapObject::kMapOffset)); |
1118 for (int i = 0; i < receiver_count; ++i) { | 912 for (int i = 0; i < receiver_count; ++i) { |
1119 if (transitioned_maps->at(i).is_null()) { | 913 if (transitioned_maps->at(i).is_null()) { |
1120 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq, | 914 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq, scratch1(), |
1121 scratch1(), Operand(receiver_maps->at(i))); | 915 Operand(receiver_maps->at(i))); |
1122 } else { | 916 } else { |
1123 Label next_map; | 917 Label next_map; |
1124 __ Branch(&next_map, ne, scratch1(), Operand(receiver_maps->at(i))); | 918 __ Branch(&next_map, ne, scratch1(), Operand(receiver_maps->at(i))); |
1125 __ li(transition_map(), Operand(transitioned_maps->at(i))); | 919 __ li(transition_map(), Operand(transitioned_maps->at(i))); |
1126 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET); | 920 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET); |
1127 __ bind(&next_map); | 921 __ bind(&next_map); |
1128 } | 922 } |
1129 } | 923 } |
1130 | 924 |
1131 __ bind(&miss); | 925 __ bind(&miss); |
(...skipping 20 matching lines...) Expand all Loading... |
1152 | 946 |
1153 __ UntagAndJumpIfNotSmi(a6, key, &miss); | 947 __ UntagAndJumpIfNotSmi(a6, key, &miss); |
1154 __ ld(a4, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 948 __ ld(a4, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
1155 DCHECK(kSmiTagSize + kSmiShiftSize == 32); | 949 DCHECK(kSmiTagSize + kSmiShiftSize == 32); |
1156 __ LoadFromNumberDictionary(&slow, a4, key, v0, a6, a3, a5); | 950 __ LoadFromNumberDictionary(&slow, a4, key, v0, a6, a3, a5); |
1157 __ Ret(); | 951 __ Ret(); |
1158 | 952 |
1159 // Slow case, key and receiver still unmodified. | 953 // Slow case, key and receiver still unmodified. |
1160 __ bind(&slow); | 954 __ bind(&slow); |
1161 __ IncrementCounter( | 955 __ IncrementCounter( |
1162 masm->isolate()->counters()->keyed_load_external_array_slow(), | 956 masm->isolate()->counters()->keyed_load_external_array_slow(), 1, a2, a3); |
1163 1, a2, a3); | |
1164 | 957 |
1165 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow); | 958 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow); |
1166 | 959 |
1167 // Miss case, call the runtime. | 960 // Miss case, call the runtime. |
1168 __ bind(&miss); | 961 __ bind(&miss); |
1169 | 962 |
1170 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); | 963 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); |
1171 } | 964 } |
1172 | 965 |
1173 | 966 |
1174 #undef __ | 967 #undef __ |
1175 | 968 } |
1176 } } // namespace v8::internal | 969 } // namespace v8::internal |
1177 | 970 |
1178 #endif // V8_TARGET_ARCH_MIPS64 | 971 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |