| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include <assert.h> // For assert | 5 #include <assert.h> // For assert |
| 6 #include <limits.h> // For LONG_MIN, LONG_MAX. | 6 #include <limits.h> // For LONG_MIN, LONG_MAX. |
| 7 | 7 |
| 8 #if V8_TARGET_ARCH_S390 | 8 #if V8_TARGET_ARCH_S390 |
| 9 | 9 |
| 10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
| (...skipping 1938 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1949 STATIC_ASSERT(Map::kInstanceTypeOffset < 4096); | 1949 STATIC_ASSERT(Map::kInstanceTypeOffset < 4096); |
| 1950 STATIC_ASSERT(LAST_TYPE < 256); | 1950 STATIC_ASSERT(LAST_TYPE < 256); |
| 1951 LoadlB(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset)); | 1951 LoadlB(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset)); |
| 1952 CmpP(type_reg, Operand(type)); | 1952 CmpP(type_reg, Operand(type)); |
| 1953 } | 1953 } |
| 1954 | 1954 |
| 1955 void MacroAssembler::CompareRoot(Register obj, Heap::RootListIndex index) { | 1955 void MacroAssembler::CompareRoot(Register obj, Heap::RootListIndex index) { |
| 1956 CmpP(obj, MemOperand(kRootRegister, index << kPointerSizeLog2)); | 1956 CmpP(obj, MemOperand(kRootRegister, index << kPointerSizeLog2)); |
| 1957 } | 1957 } |
| 1958 | 1958 |
| 1959 void MacroAssembler::CheckFastObjectElements(Register map, Register scratch, | |
| 1960 Label* fail) { | |
| 1961 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | |
| 1962 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | |
| 1963 STATIC_ASSERT(FAST_ELEMENTS == 2); | |
| 1964 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3); | |
| 1965 CmpLogicalByte(FieldMemOperand(map, Map::kBitField2Offset), | |
| 1966 Operand(Map::kMaximumBitField2FastHoleySmiElementValue)); | |
| 1967 ble(fail); | |
| 1968 CmpLogicalByte(FieldMemOperand(map, Map::kBitField2Offset), | |
| 1969 Operand(Map::kMaximumBitField2FastHoleyElementValue)); | |
| 1970 bgt(fail); | |
| 1971 } | |
| 1972 | |
| 1973 void MacroAssembler::CheckFastSmiElements(Register map, Register scratch, | |
| 1974 Label* fail) { | |
| 1975 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | |
| 1976 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | |
| 1977 CmpLogicalByte(FieldMemOperand(map, Map::kBitField2Offset), | |
| 1978 Operand(Map::kMaximumBitField2FastHoleySmiElementValue)); | |
| 1979 bgt(fail); | |
| 1980 } | |
| 1981 | |
| 1982 void MacroAssembler::SmiToDouble(DoubleRegister value, Register smi) { | 1959 void MacroAssembler::SmiToDouble(DoubleRegister value, Register smi) { |
| 1983 SmiUntag(ip, smi); | 1960 SmiUntag(ip, smi); |
| 1984 ConvertIntToDouble(ip, value); | 1961 ConvertIntToDouble(ip, value); |
| 1985 } | 1962 } |
| 1986 void MacroAssembler::StoreNumberToDoubleElements( | |
| 1987 Register value_reg, Register key_reg, Register elements_reg, | |
| 1988 Register scratch1, DoubleRegister double_scratch, Label* fail, | |
| 1989 int elements_offset) { | |
| 1990 DCHECK(!AreAliased(value_reg, key_reg, elements_reg, scratch1)); | |
| 1991 Label smi_value, store; | |
| 1992 | |
| 1993 // Handle smi values specially. | |
| 1994 JumpIfSmi(value_reg, &smi_value); | |
| 1995 | |
| 1996 // Ensure that the object is a heap number | |
| 1997 CheckMap(value_reg, scratch1, isolate()->factory()->heap_number_map(), fail, | |
| 1998 DONT_DO_SMI_CHECK); | |
| 1999 | |
| 2000 LoadDouble(double_scratch, | |
| 2001 FieldMemOperand(value_reg, HeapNumber::kValueOffset)); | |
| 2002 // Force a canonical NaN. | |
| 2003 CanonicalizeNaN(double_scratch); | |
| 2004 b(&store); | |
| 2005 | |
| 2006 bind(&smi_value); | |
| 2007 SmiToDouble(double_scratch, value_reg); | |
| 2008 | |
| 2009 bind(&store); | |
| 2010 SmiToDoubleArrayOffset(scratch1, key_reg); | |
| 2011 StoreDouble(double_scratch, | |
| 2012 FieldMemOperand(elements_reg, scratch1, | |
| 2013 FixedDoubleArray::kHeaderSize - elements_offset)); | |
| 2014 } | |
| 2015 | 1963 |
| 2016 void MacroAssembler::CompareMap(Register obj, Register scratch, Handle<Map> map, | 1964 void MacroAssembler::CompareMap(Register obj, Register scratch, Handle<Map> map, |
| 2017 Label* early_success) { | 1965 Label* early_success) { |
| 2018 LoadP(scratch, FieldMemOperand(obj, HeapObject::kMapOffset)); | 1966 LoadP(scratch, FieldMemOperand(obj, HeapObject::kMapOffset)); |
| 2019 CompareMap(obj, map, early_success); | 1967 CompareMap(obj, map, early_success); |
| 2020 } | 1968 } |
| 2021 | 1969 |
| 2022 void MacroAssembler::CompareMap(Register obj_map, Handle<Map> map, | 1970 void MacroAssembler::CompareMap(Register obj_map, Handle<Map> map, |
| 2023 Label* early_success) { | 1971 Label* early_success) { |
| 2024 mov(r0, Operand(map)); | 1972 mov(r0, Operand(map)); |
| (...skipping 459 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2484 LoadP(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX))); | 2432 LoadP(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX))); |
| 2485 } | 2433 } |
| 2486 } else { | 2434 } else { |
| 2487 // Slot is in the current function context. Move it into the | 2435 // Slot is in the current function context. Move it into the |
| 2488 // destination register in case we store into it (the write barrier | 2436 // destination register in case we store into it (the write barrier |
| 2489 // cannot be allowed to destroy the context in esi). | 2437 // cannot be allowed to destroy the context in esi). |
| 2490 LoadRR(dst, cp); | 2438 LoadRR(dst, cp); |
| 2491 } | 2439 } |
| 2492 } | 2440 } |
| 2493 | 2441 |
| 2494 void MacroAssembler::LoadTransitionedArrayMapConditional( | |
| 2495 ElementsKind expected_kind, ElementsKind transitioned_kind, | |
| 2496 Register map_in_out, Register scratch, Label* no_map_match) { | |
| 2497 DCHECK(IsFastElementsKind(expected_kind)); | |
| 2498 DCHECK(IsFastElementsKind(transitioned_kind)); | |
| 2499 | |
| 2500 // Check that the function's map is the same as the expected cached map. | |
| 2501 LoadP(scratch, NativeContextMemOperand()); | |
| 2502 LoadP(ip, ContextMemOperand(scratch, Context::ArrayMapIndex(expected_kind))); | |
| 2503 CmpP(map_in_out, ip); | |
| 2504 bne(no_map_match); | |
| 2505 | |
| 2506 // Use the transitioned cached map. | |
| 2507 LoadP(map_in_out, | |
| 2508 ContextMemOperand(scratch, Context::ArrayMapIndex(transitioned_kind))); | |
| 2509 } | |
| 2510 | |
| 2511 void MacroAssembler::LoadNativeContextSlot(int index, Register dst) { | 2442 void MacroAssembler::LoadNativeContextSlot(int index, Register dst) { |
| 2512 LoadP(dst, NativeContextMemOperand()); | 2443 LoadP(dst, NativeContextMemOperand()); |
| 2513 LoadP(dst, ContextMemOperand(dst, index)); | 2444 LoadP(dst, ContextMemOperand(dst, index)); |
| 2514 } | 2445 } |
| 2515 | 2446 |
| 2516 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, | 2447 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, |
| 2517 Register map, | 2448 Register map, |
| 2518 Register scratch) { | 2449 Register scratch) { |
| 2519 // Load the initial map. The global functions all have initial maps. | 2450 // Load the initial map. The global functions all have initial maps. |
| 2520 LoadP(map, | 2451 LoadP(map, |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2585 Label* smi_case) { | 2516 Label* smi_case) { |
| 2586 STATIC_ASSERT(kSmiTag == 0); | 2517 STATIC_ASSERT(kSmiTag == 0); |
| 2587 STATIC_ASSERT(kSmiTagSize == 1); | 2518 STATIC_ASSERT(kSmiTagSize == 1); |
| 2588 // this won't work if src == dst | 2519 // this won't work if src == dst |
| 2589 DCHECK(src.code() != dst.code()); | 2520 DCHECK(src.code() != dst.code()); |
| 2590 SmiUntag(dst, src); | 2521 SmiUntag(dst, src); |
| 2591 TestIfSmi(src); | 2522 TestIfSmi(src); |
| 2592 beq(smi_case); | 2523 beq(smi_case); |
| 2593 } | 2524 } |
| 2594 | 2525 |
| 2595 void MacroAssembler::UntagAndJumpIfNotSmi(Register dst, Register src, | |
| 2596 Label* non_smi_case) { | |
| 2597 STATIC_ASSERT(kSmiTag == 0); | |
| 2598 STATIC_ASSERT(kSmiTagSize == 1); | |
| 2599 | |
| 2600 // We can more optimally use TestIfSmi if dst != src | |
| 2601 // otherwise, the UnTag operation will kill the CC and we cannot | |
| 2602 // test the Tag bit. | |
| 2603 if (src.code() != dst.code()) { | |
| 2604 SmiUntag(dst, src); | |
| 2605 TestIfSmi(src); | |
| 2606 } else { | |
| 2607 TestBit(src, 0, r0); | |
| 2608 SmiUntag(dst, src); | |
| 2609 LoadAndTestRR(r0, r0); | |
| 2610 } | |
| 2611 bne(non_smi_case); | |
| 2612 } | |
| 2613 | |
| 2614 void MacroAssembler::JumpIfEitherSmi(Register reg1, Register reg2, | 2526 void MacroAssembler::JumpIfEitherSmi(Register reg1, Register reg2, |
| 2615 Label* on_either_smi) { | 2527 Label* on_either_smi) { |
| 2616 STATIC_ASSERT(kSmiTag == 0); | 2528 STATIC_ASSERT(kSmiTag == 0); |
| 2617 JumpIfSmi(reg1, on_either_smi); | 2529 JumpIfSmi(reg1, on_either_smi); |
| 2618 JumpIfSmi(reg2, on_either_smi); | 2530 JumpIfSmi(reg2, on_either_smi); |
| 2619 } | 2531 } |
| 2620 | 2532 |
| 2621 void MacroAssembler::AssertNotNumber(Register object) { | 2533 void MacroAssembler::AssertNotNumber(Register object) { |
| 2622 if (emit_debug_code()) { | 2534 if (emit_debug_code()) { |
| 2623 STATIC_ASSERT(kSmiTag == 0); | 2535 STATIC_ASSERT(kSmiTag == 0); |
| (...skipping 782 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3406 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) { | 3318 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) { |
| 3407 int code = config->GetAllocatableGeneralCode(i); | 3319 int code = config->GetAllocatableGeneralCode(i); |
| 3408 Register candidate = Register::from_code(code); | 3320 Register candidate = Register::from_code(code); |
| 3409 if (regs & candidate.bit()) continue; | 3321 if (regs & candidate.bit()) continue; |
| 3410 return candidate; | 3322 return candidate; |
| 3411 } | 3323 } |
| 3412 UNREACHABLE(); | 3324 UNREACHABLE(); |
| 3413 return no_reg; | 3325 return no_reg; |
| 3414 } | 3326 } |
| 3415 | 3327 |
| 3416 void MacroAssembler::JumpIfDictionaryInPrototypeChain(Register object, | |
| 3417 Register scratch0, | |
| 3418 Register scratch1, | |
| 3419 Label* found) { | |
| 3420 DCHECK(!scratch1.is(scratch0)); | |
| 3421 Register current = scratch0; | |
| 3422 Label loop_again, end; | |
| 3423 | |
| 3424 // scratch contained elements pointer. | |
| 3425 LoadRR(current, object); | |
| 3426 LoadP(current, FieldMemOperand(current, HeapObject::kMapOffset)); | |
| 3427 LoadP(current, FieldMemOperand(current, Map::kPrototypeOffset)); | |
| 3428 CompareRoot(current, Heap::kNullValueRootIndex); | |
| 3429 beq(&end); | |
| 3430 | |
| 3431 // Loop based on the map going up the prototype chain. | |
| 3432 bind(&loop_again); | |
| 3433 LoadP(current, FieldMemOperand(current, HeapObject::kMapOffset)); | |
| 3434 | |
| 3435 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE); | |
| 3436 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE); | |
| 3437 LoadlB(scratch1, FieldMemOperand(current, Map::kInstanceTypeOffset)); | |
| 3438 CmpP(scratch1, Operand(JS_OBJECT_TYPE)); | |
| 3439 blt(found); | |
| 3440 | |
| 3441 LoadlB(scratch1, FieldMemOperand(current, Map::kBitField2Offset)); | |
| 3442 DecodeField<Map::ElementsKindBits>(scratch1); | |
| 3443 CmpP(scratch1, Operand(DICTIONARY_ELEMENTS)); | |
| 3444 beq(found); | |
| 3445 LoadP(current, FieldMemOperand(current, Map::kPrototypeOffset)); | |
| 3446 CompareRoot(current, Heap::kNullValueRootIndex); | |
| 3447 bne(&loop_again); | |
| 3448 | |
| 3449 bind(&end); | |
| 3450 } | |
| 3451 | |
| 3452 void MacroAssembler::mov(Register dst, const Operand& src) { | 3328 void MacroAssembler::mov(Register dst, const Operand& src) { |
| 3453 if (src.rmode_ != kRelocInfo_NONEPTR) { | 3329 if (src.rmode_ != kRelocInfo_NONEPTR) { |
| 3454 // some form of relocation needed | 3330 // some form of relocation needed |
| 3455 RecordRelocInfo(src.rmode_, src.imm_); | 3331 RecordRelocInfo(src.rmode_, src.imm_); |
| 3456 } | 3332 } |
| 3457 | 3333 |
| 3458 #if V8_TARGET_ARCH_S390X | 3334 #if V8_TARGET_ARCH_S390X |
| 3459 int64_t value = src.immediate(); | 3335 int64_t value = src.immediate(); |
| 3460 int32_t hi_32 = static_cast<int64_t>(value) >> 32; | 3336 int32_t hi_32 = static_cast<int64_t>(value) >> 32; |
| 3461 int32_t lo_32 = static_cast<int32_t>(value); | 3337 int32_t lo_32 = static_cast<int32_t>(value); |
| (...skipping 1845 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5307 } | 5183 } |
| 5308 if (mag.shift > 0) ShiftRightArith(result, result, Operand(mag.shift)); | 5184 if (mag.shift > 0) ShiftRightArith(result, result, Operand(mag.shift)); |
| 5309 ExtractBit(r0, dividend, 31); | 5185 ExtractBit(r0, dividend, 31); |
| 5310 AddP(result, r0); | 5186 AddP(result, r0); |
| 5311 } | 5187 } |
| 5312 | 5188 |
| 5313 } // namespace internal | 5189 } // namespace internal |
| 5314 } // namespace v8 | 5190 } // namespace v8 |
| 5315 | 5191 |
| 5316 #endif // V8_TARGET_ARCH_S390 | 5192 #endif // V8_TARGET_ARCH_S390 |
| OLD | NEW |