OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 | 5 |
6 #include "src/v8.h" | 6 #include "src/v8.h" |
7 | 7 |
8 #if V8_TARGET_ARCH_MIPS64 | 8 #if V8_TARGET_ARCH_MIPS64 |
9 | 9 |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
155 DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE); | 155 DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE); |
156 __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); | 156 __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); |
157 __ Branch(slow, lt, scratch, Operand(JS_OBJECT_TYPE)); | 157 __ Branch(slow, lt, scratch, Operand(JS_OBJECT_TYPE)); |
158 } | 158 } |
159 | 159 |
160 | 160 |
161 // Loads an indexed element from a fast case array. | 161 // Loads an indexed element from a fast case array. |
162 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, | 162 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver, |
163 Register key, Register elements, | 163 Register key, Register elements, |
164 Register scratch1, Register scratch2, | 164 Register scratch1, Register scratch2, |
165 Register result, Label* slow) { | 165 Register result, Label* slow, |
| 166 LanguageMode language_mode) { |
166 // Register use: | 167 // Register use: |
167 // | 168 // |
168 // receiver - holds the receiver on entry. | 169 // receiver - holds the receiver on entry. |
169 // Unchanged unless 'result' is the same register. | 170 // Unchanged unless 'result' is the same register. |
170 // | 171 // |
171 // key - holds the smi key on entry. | 172 // key - holds the smi key on entry. |
172 // Unchanged unless 'result' is the same register. | 173 // Unchanged unless 'result' is the same register. |
173 // | 174 // |
174 // result - holds the result on exit if the load succeeded. | 175 // result - holds the result on exit if the load succeeded. |
175 // Allowed to be the the same as 'receiver' or 'key'. | 176 // Allowed to be the the same as 'receiver' or 'key'. |
176 // Unchanged on bailout so 'receiver' and 'key' can be safely | 177 // Unchanged on bailout so 'receiver' and 'key' can be safely |
177 // used by further computation. | 178 // used by further computation. |
178 // | 179 // |
179 // Scratch registers: | 180 // Scratch registers: |
180 // | 181 // |
181 // elements - holds the elements of the receiver and its prototypes. | 182 // elements - holds the elements of the receiver and its prototypes. |
182 // | 183 // |
183 // scratch1 - used to hold elements length, bit fields, base addresses. | 184 // scratch1 - used to hold elements length, bit fields, base addresses. |
184 // | 185 // |
185 // scratch2 - used to hold maps, prototypes, and the loaded value. | 186 // scratch2 - used to hold maps, prototypes, and the loaded value. |
186 Label check_prototypes, check_next_prototype; | 187 Label check_prototypes, check_next_prototype; |
187 Label done, in_bounds, return_undefined; | 188 Label done, in_bounds, absent; |
188 | 189 |
189 __ ld(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 190 __ ld(elements, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
190 __ AssertFastElements(elements); | 191 __ AssertFastElements(elements); |
191 | 192 |
192 // Check that the key (index) is within bounds. | 193 // Check that the key (index) is within bounds. |
193 __ ld(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset)); | 194 __ ld(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset)); |
194 __ Branch(&in_bounds, lo, key, Operand(scratch1)); | 195 __ Branch(&in_bounds, lo, key, Operand(scratch1)); |
195 // Out-of-bounds. Check the prototype chain to see if we can just return | 196 // Out-of-bounds. Check the prototype chain to see if we can just return |
196 // 'undefined'. | 197 // 'undefined'. |
197 // Negative keys can't take the fast OOB path. | 198 // Negative keys can't take the fast OOB path. |
198 __ Branch(slow, lt, key, Operand(zero_reg)); | 199 __ Branch(slow, lt, key, Operand(zero_reg)); |
199 __ bind(&check_prototypes); | 200 __ bind(&check_prototypes); |
200 __ ld(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 201 __ ld(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
201 __ bind(&check_next_prototype); | 202 __ bind(&check_next_prototype); |
202 __ ld(scratch2, FieldMemOperand(scratch2, Map::kPrototypeOffset)); | 203 __ ld(scratch2, FieldMemOperand(scratch2, Map::kPrototypeOffset)); |
203 // scratch2: current prototype | 204 // scratch2: current prototype |
204 __ LoadRoot(at, Heap::kNullValueRootIndex); | 205 __ LoadRoot(at, Heap::kNullValueRootIndex); |
205 __ Branch(&return_undefined, eq, scratch2, Operand(at)); | 206 __ Branch(&absent, eq, scratch2, Operand(at)); |
206 __ ld(elements, FieldMemOperand(scratch2, JSObject::kElementsOffset)); | 207 __ ld(elements, FieldMemOperand(scratch2, JSObject::kElementsOffset)); |
207 __ ld(scratch2, FieldMemOperand(scratch2, HeapObject::kMapOffset)); | 208 __ ld(scratch2, FieldMemOperand(scratch2, HeapObject::kMapOffset)); |
208 // elements: elements of current prototype | 209 // elements: elements of current prototype |
209 // scratch2: map of current prototype | 210 // scratch2: map of current prototype |
210 __ lbu(scratch1, FieldMemOperand(scratch2, Map::kInstanceTypeOffset)); | 211 __ lbu(scratch1, FieldMemOperand(scratch2, Map::kInstanceTypeOffset)); |
211 __ Branch(slow, lo, scratch1, Operand(JS_OBJECT_TYPE)); | 212 __ Branch(slow, lo, scratch1, Operand(JS_OBJECT_TYPE)); |
212 __ lbu(scratch1, FieldMemOperand(scratch2, Map::kBitFieldOffset)); | 213 __ lbu(scratch1, FieldMemOperand(scratch2, Map::kBitFieldOffset)); |
213 __ And(at, scratch1, Operand((1 << Map::kIsAccessCheckNeeded) | | 214 __ And(at, scratch1, Operand((1 << Map::kIsAccessCheckNeeded) | |
214 (1 << Map::kHasIndexedInterceptor))); | 215 (1 << Map::kHasIndexedInterceptor))); |
215 __ Branch(slow, ne, at, Operand(zero_reg)); | 216 __ Branch(slow, ne, at, Operand(zero_reg)); |
216 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); | 217 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex); |
217 __ Branch(slow, ne, elements, Operand(at)); | 218 __ Branch(slow, ne, elements, Operand(at)); |
218 __ Branch(&check_next_prototype); | 219 __ Branch(&check_next_prototype); |
219 | 220 |
220 __ bind(&return_undefined); | 221 __ bind(&absent); |
221 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | 222 if (is_strong(language_mode)) { |
222 __ Branch(&done); | 223 __ Branch(slow); |
| 224 } else { |
| 225 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); |
| 226 __ Branch(&done); |
| 227 } |
223 | 228 |
224 __ bind(&in_bounds); | 229 __ bind(&in_bounds); |
225 // Fast case: Do the load. | 230 // Fast case: Do the load. |
226 __ Daddu(scratch1, elements, | 231 __ Daddu(scratch1, elements, |
227 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 232 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
228 // The key is a smi. | 233 // The key is a smi. |
229 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); | 234 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2); |
230 __ SmiScale(at, key, kPointerSizeLog2); | 235 __ SmiScale(at, key, kPointerSizeLog2); |
231 __ daddu(at, at, scratch1); | 236 __ daddu(at, at, scratch1); |
232 __ ld(scratch2, MemOperand(at)); | 237 __ ld(scratch2, MemOperand(at)); |
(...skipping 29 matching lines...) Expand all Loading... |
262 // map: key map | 267 // map: key map |
263 __ lbu(hash, FieldMemOperand(map, Map::kInstanceTypeOffset)); | 268 __ lbu(hash, FieldMemOperand(map, Map::kInstanceTypeOffset)); |
264 STATIC_ASSERT(kInternalizedTag == 0); | 269 STATIC_ASSERT(kInternalizedTag == 0); |
265 __ And(at, hash, Operand(kIsNotInternalizedMask)); | 270 __ And(at, hash, Operand(kIsNotInternalizedMask)); |
266 __ Branch(not_unique, ne, at, Operand(zero_reg)); | 271 __ Branch(not_unique, ne, at, Operand(zero_reg)); |
267 | 272 |
268 __ bind(&unique); | 273 __ bind(&unique); |
269 } | 274 } |
270 | 275 |
271 | 276 |
272 void LoadIC::GenerateNormal(MacroAssembler* masm) { | 277 void LoadIC::GenerateNormal(MacroAssembler* masm, LanguageMode language_mode) { |
273 Register dictionary = a0; | 278 Register dictionary = a0; |
274 DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister())); | 279 DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister())); |
275 DCHECK(!dictionary.is(LoadDescriptor::NameRegister())); | 280 DCHECK(!dictionary.is(LoadDescriptor::NameRegister())); |
276 Label slow; | 281 Label slow; |
277 | 282 |
278 __ ld(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(), | 283 __ ld(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(), |
279 JSObject::kPropertiesOffset)); | 284 JSObject::kPropertiesOffset)); |
280 GenerateDictionaryLoad(masm, &slow, dictionary, | 285 GenerateDictionaryLoad(masm, &slow, dictionary, |
281 LoadDescriptor::NameRegister(), v0, a3, a4); | 286 LoadDescriptor::NameRegister(), v0, a3, a4); |
282 __ Ret(); | 287 __ Ret(); |
283 | 288 |
284 // Dictionary load failed, go slow (but don't miss). | 289 // Dictionary load failed, go slow (but don't miss). |
285 __ bind(&slow); | 290 __ bind(&slow); |
286 GenerateRuntimeGetProperty(masm); | 291 GenerateRuntimeGetProperty(masm, language_mode); |
287 } | 292 } |
288 | 293 |
289 | 294 |
290 // A register that isn't one of the parameters to the load ic. | 295 // A register that isn't one of the parameters to the load ic. |
291 static const Register LoadIC_TempRegister() { return a3; } | 296 static const Register LoadIC_TempRegister() { return a3; } |
292 | 297 |
293 | 298 |
294 static void LoadIC_PushArgs(MacroAssembler* masm) { | 299 static void LoadIC_PushArgs(MacroAssembler* masm) { |
295 Register receiver = LoadDescriptor::ReceiverRegister(); | 300 Register receiver = LoadDescriptor::ReceiverRegister(); |
296 Register name = LoadDescriptor::NameRegister(); | 301 Register name = LoadDescriptor::NameRegister(); |
(...skipping 14 matching lines...) Expand all Loading... |
311 | 316 |
312 LoadIC_PushArgs(masm); | 317 LoadIC_PushArgs(masm); |
313 | 318 |
314 // Perform tail call to the entry. | 319 // Perform tail call to the entry. |
315 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate); | 320 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate); |
316 int arg_count = 4; | 321 int arg_count = 4; |
317 __ TailCallExternalReference(ref, arg_count, 1); | 322 __ TailCallExternalReference(ref, arg_count, 1); |
318 } | 323 } |
319 | 324 |
320 | 325 |
321 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 326 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm, |
| 327 LanguageMode language_mode) { |
322 // The return address is in ra. | 328 // The return address is in ra. |
323 | 329 |
324 __ mov(LoadIC_TempRegister(), LoadDescriptor::ReceiverRegister()); | 330 __ mov(LoadIC_TempRegister(), LoadDescriptor::ReceiverRegister()); |
325 __ Push(LoadIC_TempRegister(), LoadDescriptor::NameRegister()); | 331 __ Push(LoadIC_TempRegister(), LoadDescriptor::NameRegister()); |
326 | 332 |
327 __ TailCallRuntime(Runtime::kGetProperty, 2, 1); | 333 // Do tail-call to runtime routine. |
| 334 __ TailCallRuntime(is_strong(language_mode) ? Runtime::kGetPropertyStrong |
| 335 : Runtime::kGetProperty, |
| 336 2, 1); |
328 } | 337 } |
329 | 338 |
330 | 339 |
331 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { | 340 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { |
332 // The return address is in ra. | 341 // The return address is in ra. |
333 Isolate* isolate = masm->isolate(); | 342 Isolate* isolate = masm->isolate(); |
334 | 343 |
335 DCHECK(!AreAliased(a4, a5, LoadWithVectorDescriptor::SlotRegister(), | 344 DCHECK(!AreAliased(a4, a5, LoadWithVectorDescriptor::SlotRegister(), |
336 LoadWithVectorDescriptor::VectorRegister())); | 345 LoadWithVectorDescriptor::VectorRegister())); |
337 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a4, a5); | 346 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a4, a5); |
338 | 347 |
339 LoadIC_PushArgs(masm); | 348 LoadIC_PushArgs(masm); |
340 | 349 |
341 // Perform tail call to the entry. | 350 // Perform tail call to the entry. |
342 ExternalReference ref = | 351 ExternalReference ref = |
343 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate); | 352 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate); |
344 | 353 |
345 int arg_count = 4; | 354 int arg_count = 4; |
346 __ TailCallExternalReference(ref, arg_count, 1); | 355 __ TailCallExternalReference(ref, arg_count, 1); |
347 } | 356 } |
348 | 357 |
349 | 358 |
350 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 359 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm, |
| 360 LanguageMode language_mode) { |
351 // The return address is in ra. | 361 // The return address is in ra. |
352 | 362 |
353 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); | 363 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister()); |
354 | 364 |
355 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1); | 365 // Do tail-call to runtime routine. |
| 366 __ TailCallRuntime(is_strong(language_mode) ? Runtime::kKeyedGetPropertyStrong |
| 367 : Runtime::kKeyedGetProperty, |
| 368 2, 1); |
356 } | 369 } |
357 | 370 |
358 | 371 |
359 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { | 372 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm, |
| 373 LanguageMode language_mode) { |
360 // The return address is in ra. | 374 // The return address is in ra. |
361 Label slow, check_name, index_smi, index_name, property_array_property; | 375 Label slow, check_name, index_smi, index_name, property_array_property; |
362 Label probe_dictionary, check_number_dictionary; | 376 Label probe_dictionary, check_number_dictionary; |
363 | 377 |
364 Register key = LoadDescriptor::NameRegister(); | 378 Register key = LoadDescriptor::NameRegister(); |
365 Register receiver = LoadDescriptor::ReceiverRegister(); | 379 Register receiver = LoadDescriptor::ReceiverRegister(); |
366 DCHECK(key.is(a2)); | 380 DCHECK(key.is(a2)); |
367 DCHECK(receiver.is(a1)); | 381 DCHECK(receiver.is(a1)); |
368 | 382 |
369 Isolate* isolate = masm->isolate(); | 383 Isolate* isolate = masm->isolate(); |
370 | 384 |
371 // Check that the key is a smi. | 385 // Check that the key is a smi. |
372 __ JumpIfNotSmi(key, &check_name); | 386 __ JumpIfNotSmi(key, &check_name); |
373 __ bind(&index_smi); | 387 __ bind(&index_smi); |
374 // Now the key is known to be a smi. This place is also jumped to from below | 388 // Now the key is known to be a smi. This place is also jumped to from below |
375 // where a numeric string is converted to a smi. | 389 // where a numeric string is converted to a smi. |
376 | 390 |
377 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, | 391 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, |
378 Map::kHasIndexedInterceptor, &slow); | 392 Map::kHasIndexedInterceptor, &slow); |
379 | 393 |
380 // Check the receiver's map to see if it has fast elements. | 394 // Check the receiver's map to see if it has fast elements. |
381 __ CheckFastElements(a0, a3, &check_number_dictionary); | 395 __ CheckFastElements(a0, a3, &check_number_dictionary); |
382 | 396 |
383 GenerateFastArrayLoad(masm, receiver, key, a0, a3, a4, v0, &slow); | 397 GenerateFastArrayLoad(masm, receiver, key, a0, a3, a4, v0, &slow, |
| 398 language_mode); |
384 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, a4, a3); | 399 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, a4, a3); |
385 __ Ret(); | 400 __ Ret(); |
386 | 401 |
387 __ bind(&check_number_dictionary); | 402 __ bind(&check_number_dictionary); |
388 __ ld(a4, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 403 __ ld(a4, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
389 __ ld(a3, FieldMemOperand(a4, JSObject::kMapOffset)); | 404 __ ld(a3, FieldMemOperand(a4, JSObject::kMapOffset)); |
390 | 405 |
391 // Check whether the elements is a number dictionary. | 406 // Check whether the elements is a number dictionary. |
392 // a3: elements map | 407 // a3: elements map |
393 // a4: elements | 408 // a4: elements |
394 __ LoadRoot(at, Heap::kHashTableMapRootIndex); | 409 __ LoadRoot(at, Heap::kHashTableMapRootIndex); |
395 __ Branch(&slow, ne, a3, Operand(at)); | 410 __ Branch(&slow, ne, a3, Operand(at)); |
396 __ dsra32(a0, key, 0); | 411 __ dsra32(a0, key, 0); |
397 __ LoadFromNumberDictionary(&slow, a4, key, v0, a0, a3, a5); | 412 __ LoadFromNumberDictionary(&slow, a4, key, v0, a0, a3, a5); |
398 __ Ret(); | 413 __ Ret(); |
399 | 414 |
400 // Slow case, key and receiver still in a2 and a1. | 415 // Slow case, key and receiver still in a2 and a1. |
401 __ bind(&slow); | 416 __ bind(&slow); |
402 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(), 1, a4, | 417 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(), 1, a4, |
403 a3); | 418 a3); |
404 GenerateRuntimeGetProperty(masm); | 419 GenerateRuntimeGetProperty(masm, language_mode); |
405 | 420 |
406 __ bind(&check_name); | 421 __ bind(&check_name); |
407 GenerateKeyNameCheck(masm, key, a0, a3, &index_name, &slow); | 422 GenerateKeyNameCheck(masm, key, a0, a3, &index_name, &slow); |
408 | 423 |
409 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, | 424 GenerateKeyedLoadReceiverCheck(masm, receiver, a0, a3, |
410 Map::kHasNamedInterceptor, &slow); | 425 Map::kHasNamedInterceptor, &slow); |
411 | 426 |
412 | 427 |
413 // If the receiver is a fast-case object, check the stub cache. Otherwise | 428 // If the receiver is a fast-case object, check the stub cache. Otherwise |
414 // probe the dictionary. | 429 // probe the dictionary. |
(...skipping 466 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
881 patcher.ChangeBranchCondition(ne); | 896 patcher.ChangeBranchCondition(ne); |
882 } else { | 897 } else { |
883 DCHECK(Assembler::IsBne(branch_instr)); | 898 DCHECK(Assembler::IsBne(branch_instr)); |
884 patcher.ChangeBranchCondition(eq); | 899 patcher.ChangeBranchCondition(eq); |
885 } | 900 } |
886 } | 901 } |
887 } // namespace internal | 902 } // namespace internal |
888 } // namespace v8 | 903 } // namespace v8 |
889 | 904 |
890 #endif // V8_TARGET_ARCH_MIPS64 | 905 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |