OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 | 5 |
6 #include "src/v8.h" | 6 #include "src/v8.h" |
7 | 7 |
8 #if V8_TARGET_ARCH_MIPS64 | 8 #if V8_TARGET_ARCH_MIPS64 |
9 | 9 |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 310 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
321 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { | 321 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { |
322 // The return address is in ra. | 322 // The return address is in ra. |
323 | 323 |
324 __ mov(LoadIC_TempRegister(), LoadDescriptor::ReceiverRegister()); | 324 __ mov(LoadIC_TempRegister(), LoadDescriptor::ReceiverRegister()); |
325 __ Push(LoadIC_TempRegister(), LoadDescriptor::NameRegister()); | 325 __ Push(LoadIC_TempRegister(), LoadDescriptor::NameRegister()); |
326 | 326 |
327 __ TailCallRuntime(Runtime::kGetProperty, 2, 1); | 327 __ TailCallRuntime(Runtime::kGetProperty, 2, 1); |
328 } | 328 } |
329 | 329 |
330 | 330 |
331 static MemOperand GenerateMappedArgumentsLookup( | |
332 MacroAssembler* masm, Register object, Register key, Register scratch1, | |
333 Register scratch2, Register scratch3, Label* unmapped_case, | |
334 Label* slow_case) { | |
335 Heap* heap = masm->isolate()->heap(); | |
336 | |
337 // Check that the receiver is a JSObject. Because of the map check | |
338 // later, we do not need to check for interceptors or whether it | |
339 // requires access checks. | |
340 __ JumpIfSmi(object, slow_case); | |
341 // Check that the object is some kind of JSObject. | |
342 __ GetObjectType(object, scratch1, scratch2); | |
343 __ Branch(slow_case, lt, scratch2, Operand(FIRST_JS_RECEIVER_TYPE)); | |
344 | |
345 // Check that the key is a positive smi. | |
346 __ NonNegativeSmiTst(key, scratch1); | |
347 __ Branch(slow_case, ne, scratch1, Operand(zero_reg)); | |
348 | |
349 // Load the elements into scratch1 and check its map. | |
350 Handle<Map> arguments_map(heap->sloppy_arguments_elements_map()); | |
351 __ ld(scratch1, FieldMemOperand(object, JSObject::kElementsOffset)); | |
352 __ CheckMap(scratch1, scratch2, arguments_map, slow_case, DONT_DO_SMI_CHECK); | |
353 // Check if element is in the range of mapped arguments. If not, jump | |
354 // to the unmapped lookup with the parameter map in scratch1. | |
355 __ ld(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset)); | |
356 __ Dsubu(scratch2, scratch2, Operand(Smi::FromInt(2))); | |
357 __ Branch(unmapped_case, Ugreater_equal, key, Operand(scratch2)); | |
358 | |
359 // Load element index and check whether it is the hole. | |
360 const int kOffset = | |
361 FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag; | |
362 | |
363 __ SmiUntag(scratch3, key); | |
364 __ dsll(scratch3, scratch3, kPointerSizeLog2); | |
365 __ Daddu(scratch3, scratch3, Operand(kOffset)); | |
366 | |
367 __ Daddu(scratch2, scratch1, scratch3); | |
368 __ ld(scratch2, MemOperand(scratch2)); | |
369 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex); | |
370 __ Branch(unmapped_case, eq, scratch2, Operand(scratch3)); | |
371 | |
372 // Load value from context and return it. We can reuse scratch1 because | |
373 // we do not jump to the unmapped lookup (which requires the parameter | |
374 // map in scratch1). | |
375 __ ld(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize)); | |
376 __ SmiUntag(scratch3, scratch2); | |
377 __ dsll(scratch3, scratch3, kPointerSizeLog2); | |
378 __ Daddu(scratch3, scratch3, Operand(Context::kHeaderSize - kHeapObjectTag)); | |
379 __ Daddu(scratch2, scratch1, scratch3); | |
380 return MemOperand(scratch2); | |
381 } | |
382 | |
383 | |
384 static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm, | |
385 Register key, | |
386 Register parameter_map, | |
387 Register scratch, | |
388 Label* slow_case) { | |
389 // Element is in arguments backing store, which is referenced by the | |
390 // second element of the parameter_map. The parameter_map register | |
391 // must be loaded with the parameter map of the arguments object and is | |
392 // overwritten. | |
393 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize; | |
394 Register backing_store = parameter_map; | |
395 __ ld(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset)); | |
396 __ CheckMap(backing_store, scratch, Heap::kFixedArrayMapRootIndex, slow_case, | |
397 DONT_DO_SMI_CHECK); | |
398 __ ld(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset)); | |
399 __ Branch(slow_case, Ugreater_equal, key, Operand(scratch)); | |
400 __ SmiUntag(scratch, key); | |
401 __ dsll(scratch, scratch, kPointerSizeLog2); | |
402 __ Daddu(scratch, scratch, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
403 __ Daddu(scratch, backing_store, scratch); | |
404 return MemOperand(scratch); | |
405 } | |
406 | |
407 | |
408 void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) { | |
409 Register receiver = StoreDescriptor::ReceiverRegister(); | |
410 Register key = StoreDescriptor::NameRegister(); | |
411 Register value = StoreDescriptor::ValueRegister(); | |
412 DCHECK(value.is(a0)); | |
413 | |
414 Label slow, notin; | |
415 // Store address is returned in register (of MemOperand) mapped_location. | |
416 MemOperand mapped_location = GenerateMappedArgumentsLookup( | |
417 masm, receiver, key, a3, a4, a5, ¬in, &slow); | |
418 __ sd(value, mapped_location); | |
419 __ mov(t1, value); | |
420 DCHECK_EQ(mapped_location.offset(), 0); | |
421 __ RecordWrite(a3, mapped_location.rm(), t1, kRAHasNotBeenSaved, | |
422 kDontSaveFPRegs); | |
423 __ Ret(USE_DELAY_SLOT); | |
424 __ mov(v0, value); // (In delay slot) return the value stored in v0. | |
425 __ bind(¬in); | |
426 // The unmapped lookup expects that the parameter map is in a3. | |
427 // Store address is returned in register (of MemOperand) unmapped_location. | |
428 MemOperand unmapped_location = | |
429 GenerateUnmappedArgumentsLookup(masm, key, a3, a4, &slow); | |
430 __ sd(value, unmapped_location); | |
431 __ mov(t1, value); | |
432 DCHECK_EQ(unmapped_location.offset(), 0); | |
433 __ RecordWrite(a3, unmapped_location.rm(), t1, kRAHasNotBeenSaved, | |
434 kDontSaveFPRegs); | |
435 __ Ret(USE_DELAY_SLOT); | |
436 __ mov(v0, a0); // (In delay slot) return the value stored in v0. | |
437 __ bind(&slow); | |
438 GenerateMiss(masm); | |
439 } | |
440 | |
441 | |
442 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { | 331 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { |
443 // The return address is in ra. | 332 // The return address is in ra. |
444 Isolate* isolate = masm->isolate(); | 333 Isolate* isolate = masm->isolate(); |
445 | 334 |
446 DCHECK(!AreAliased(a4, a5, LoadWithVectorDescriptor::SlotRegister(), | 335 DCHECK(!AreAliased(a4, a5, LoadWithVectorDescriptor::SlotRegister(), |
447 LoadWithVectorDescriptor::VectorRegister())); | 336 LoadWithVectorDescriptor::VectorRegister())); |
448 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a4, a5); | 337 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a4, a5); |
449 | 338 |
450 LoadIC_PushArgs(masm); | 339 LoadIC_PushArgs(masm); |
451 | 340 |
(...skipping 526 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
978 patcher.ChangeBranchCondition(ne); | 867 patcher.ChangeBranchCondition(ne); |
979 } else { | 868 } else { |
980 DCHECK(Assembler::IsBne(branch_instr)); | 869 DCHECK(Assembler::IsBne(branch_instr)); |
981 patcher.ChangeBranchCondition(eq); | 870 patcher.ChangeBranchCondition(eq); |
982 } | 871 } |
983 } | 872 } |
984 } | 873 } |
985 } // namespace v8::internal | 874 } // namespace v8::internal |
986 | 875 |
987 #endif // V8_TARGET_ARCH_MIPS64 | 876 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |