| Index: src/ppc/code-stubs-ppc.cc
|
| diff --git a/src/ppc/code-stubs-ppc.cc b/src/ppc/code-stubs-ppc.cc
|
| index 015a6ef1afbdbfe7763b01e79a03de41a6d98d3e..ce1149fe7f32df0ed2fa61726dfb05d4a563d73f 100644
|
| --- a/src/ppc/code-stubs-ppc.cc
|
| +++ b/src/ppc/code-stubs-ppc.cc
|
| @@ -260,6 +260,7 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
|
| // They are both equal and they are not both Smis so both of them are not
|
| // Smis. If it's not a heap number, then return equal.
|
| if (cond == lt || cond == gt) {
|
| + Label not_simd;
|
| // Call runtime on identical JSObjects.
|
| __ CompareObjectType(r3, r7, r7, FIRST_SPEC_OBJECT_TYPE);
|
| __ bge(slow);
|
| @@ -267,8 +268,11 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
|
| __ cmpi(r7, Operand(SYMBOL_TYPE));
|
| __ beq(slow);
|
| // Call runtime on identical SIMD values since we must throw a TypeError.
|
| - __ cmpi(r7, Operand(FLOAT32X4_TYPE));
|
| - __ beq(slow);
|
| + __ cmpi(r7, Operand(FIRST_SIMD_VALUE_TYPE));
|
| + __ blt(¬_simd);
|
| + __ cmpi(r7, Operand(LAST_SIMD_VALUE_TYPE));
|
| + __ ble(slow);
|
| + __ bind(¬_simd);
|
| if (is_strong(strength)) {
|
| // Call the runtime on anything that is converted in the semantics, since
|
| // we need to throw a TypeError. Smis have already been ruled out.
|
| @@ -282,14 +286,18 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
|
| __ beq(&heap_number);
|
| // Comparing JS objects with <=, >= is complicated.
|
| if (cond != eq) {
|
| + Label not_simd;
|
| __ cmpi(r7, Operand(FIRST_SPEC_OBJECT_TYPE));
|
| __ bge(slow);
|
| // Call runtime on identical symbols since we need to throw a TypeError.
|
| __ cmpi(r7, Operand(SYMBOL_TYPE));
|
| __ beq(slow);
|
| // Call runtime on identical SIMD values since we must throw a TypeError.
|
| - __ cmpi(r7, Operand(FLOAT32X4_TYPE));
|
| - __ beq(slow);
|
| + __ cmpi(r7, Operand(FIRST_SIMD_VALUE_TYPE));
|
| + __ blt(¬_simd);
|
| + __ cmpi(r7, Operand(LAST_SIMD_VALUE_TYPE));
|
| + __ ble(slow);
|
| + __ bind(¬_simd);
|
| if (is_strong(strength)) {
|
| // Call the runtime on anything that is converted in the semantics,
|
| // since we need to throw a TypeError. Smis and heap numbers have
|
| @@ -361,8 +369,11 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
|
| __ li(r4, Operand((cond == le) ? GREATER : LESS));
|
| __ isel(eq, r3, r3, r4);
|
| } else {
|
| + Label not_equal;
|
| + __ bne(¬_equal);
|
| // All-zero means Infinity means equal.
|
| - __ Ret(eq);
|
| + __ Ret();
|
| + __ bind(¬_equal);
|
| if (cond == le) {
|
| __ li(r3, Operand(GREATER)); // NaN <= NaN should fail.
|
| } else {
|
| @@ -393,15 +404,13 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm, Register lhs,
|
| // If rhs is not a number and lhs is a Smi then strict equality cannot
|
| // succeed. Return non-equal
|
| // If rhs is r3 then there is already a non zero value in it.
|
| + Label skip;
|
| + __ beq(&skip);
|
| if (!rhs.is(r3)) {
|
| - Label skip;
|
| - __ beq(&skip);
|
| __ mov(r3, Operand(NOT_EQUAL));
|
| - __ Ret();
|
| - __ bind(&skip);
|
| - } else {
|
| - __ Ret(ne);
|
| }
|
| + __ Ret();
|
| + __ bind(&skip);
|
| } else {
|
| // Smi compared non-strictly with a non-Smi non-heap-number. Call
|
| // the runtime.
|
| @@ -425,15 +434,13 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm, Register lhs,
|
| // If lhs is not a number and rhs is a smi then strict equality cannot
|
| // succeed. Return non-equal.
|
| // If lhs is r3 then there is already a non zero value in it.
|
| + Label skip;
|
| + __ beq(&skip);
|
| if (!lhs.is(r3)) {
|
| - Label skip;
|
| - __ beq(&skip);
|
| __ mov(r3, Operand(NOT_EQUAL));
|
| - __ Ret();
|
| - __ bind(&skip);
|
| - } else {
|
| - __ Ret(ne);
|
| }
|
| + __ Ret();
|
| + __ bind(&skip);
|
| } else {
|
| // Smi compared non-strictly with a non-smi non-heap-number. Call
|
| // the runtime.
|
| @@ -3967,10 +3974,12 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) {
|
| STATIC_ASSERT(kInternalizedTag == 0);
|
| __ orx(tmp3, tmp1, tmp2);
|
| __ andi(r0, tmp3, Operand(kIsNotInternalizedMask));
|
| + __ bne(&is_symbol, cr0);
|
| // Make sure r3 is non-zero. At this point input operands are
|
| // guaranteed to be non-zero.
|
| DCHECK(right.is(r3));
|
| - __ Ret(eq, cr0);
|
| + __ Ret();
|
| + __ bind(&is_symbol);
|
| }
|
|
|
| // Check that both strings are sequential one-byte.
|
| @@ -4207,7 +4216,7 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(
|
| __ srwi(scratch2, scratch2, Operand(Name::kHashShift));
|
| __ and_(scratch2, scratch1, scratch2);
|
|
|
| - // Scale the index by multiplying by the entry size.
|
| + // Scale the index by multiplying by the element size.
|
| STATIC_ASSERT(NameDictionary::kEntrySize == 3);
|
| // scratch2 = scratch2 * 3.
|
| __ ShiftLeftImm(ip, scratch2, Operand(1));
|
| @@ -5312,163 +5321,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
|
| }
|
|
|
|
|
| -void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
|
| - Register context = cp;
|
| - Register result = r3;
|
| - Register slot = r5;
|
| -
|
| - // Go up the context chain to the script context.
|
| - for (int i = 0; i < depth(); ++i) {
|
| - __ LoadP(result, ContextOperand(context, Context::PREVIOUS_INDEX));
|
| - context = result;
|
| - }
|
| -
|
| - // Load the PropertyCell value at the specified slot.
|
| - __ ShiftLeftImm(r0, slot, Operand(kPointerSizeLog2));
|
| - __ add(result, context, r0);
|
| - __ LoadP(result, ContextOperand(result));
|
| - __ LoadP(result, FieldMemOperand(result, PropertyCell::kValueOffset));
|
| -
|
| - // If the result is not the_hole, return. Otherwise, handle in the runtime.
|
| - __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
|
| - __ Ret(ne);
|
| -
|
| - // Fallback to runtime.
|
| - __ SmiTag(slot);
|
| - __ Push(slot);
|
| - __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1, 1);
|
| -}
|
| -
|
| -
|
| -void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
|
| - Register value = r3;
|
| - Register slot = r5;
|
| -
|
| - Register cell = r4;
|
| - Register cell_details = r6;
|
| - Register cell_value = r7;
|
| - Register cell_value_map = r8;
|
| - Register scratch = r9;
|
| -
|
| - Register context = cp;
|
| - Register context_temp = cell;
|
| -
|
| - Label fast_heapobject_case, fast_smi_case, slow_case;
|
| -
|
| - if (FLAG_debug_code) {
|
| - __ CompareRoot(value, Heap::kTheHoleValueRootIndex);
|
| - __ Check(ne, kUnexpectedValue);
|
| - }
|
| -
|
| - // Go up the context chain to the script context.
|
| - for (int i = 0; i < depth(); i++) {
|
| - __ LoadP(context_temp, ContextOperand(context, Context::PREVIOUS_INDEX));
|
| - context = context_temp;
|
| - }
|
| -
|
| - // Load the PropertyCell at the specified slot.
|
| - __ ShiftLeftImm(r0, slot, Operand(kPointerSizeLog2));
|
| - __ add(cell, context, r0);
|
| - __ LoadP(cell, ContextOperand(cell));
|
| -
|
| - // Load PropertyDetails for the cell (actually only the cell_type and kind).
|
| - __ LoadP(cell_details, FieldMemOperand(cell, PropertyCell::kDetailsOffset));
|
| - __ SmiUntag(cell_details);
|
| - __ andi(cell_details, cell_details,
|
| - Operand(PropertyDetails::PropertyCellTypeField::kMask |
|
| - PropertyDetails::KindField::kMask |
|
| - PropertyDetails::kAttributesReadOnlyMask));
|
| -
|
| - // Check if PropertyCell holds mutable data.
|
| - Label not_mutable_data;
|
| - __ cmpi(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
|
| - PropertyCellType::kMutable) |
|
| - PropertyDetails::KindField::encode(kData)));
|
| - __ bne(¬_mutable_data);
|
| - __ JumpIfSmi(value, &fast_smi_case);
|
| -
|
| - __ bind(&fast_heapobject_case);
|
| - __ StoreP(value, FieldMemOperand(cell, PropertyCell::kValueOffset), r0);
|
| - // RecordWriteField clobbers the value register, so we copy it before the
|
| - // call.
|
| - __ mr(r6, value);
|
| - __ RecordWriteField(cell, PropertyCell::kValueOffset, r6, scratch,
|
| - kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
|
| - OMIT_SMI_CHECK);
|
| - __ Ret();
|
| -
|
| - __ bind(¬_mutable_data);
|
| - // Check if PropertyCell value matches the new value (relevant for Constant,
|
| - // ConstantType and Undefined cells).
|
| - Label not_same_value;
|
| - __ LoadP(cell_value, FieldMemOperand(cell, PropertyCell::kValueOffset));
|
| - __ cmp(cell_value, value);
|
| - __ bne(¬_same_value);
|
| -
|
| - // Make sure the PropertyCell is not marked READ_ONLY.
|
| - __ andi(r0, cell_details, Operand(PropertyDetails::kAttributesReadOnlyMask));
|
| - __ bne(&slow_case, cr0);
|
| -
|
| - if (FLAG_debug_code) {
|
| - Label done;
|
| - // This can only be true for Constant, ConstantType and Undefined cells,
|
| - // because we never store the_hole via this stub.
|
| - __ cmpi(cell_details,
|
| - Operand(PropertyDetails::PropertyCellTypeField::encode(
|
| - PropertyCellType::kConstant) |
|
| - PropertyDetails::KindField::encode(kData)));
|
| - __ beq(&done);
|
| - __ cmpi(cell_details,
|
| - Operand(PropertyDetails::PropertyCellTypeField::encode(
|
| - PropertyCellType::kConstantType) |
|
| - PropertyDetails::KindField::encode(kData)));
|
| - __ beq(&done);
|
| - __ cmpi(cell_details,
|
| - Operand(PropertyDetails::PropertyCellTypeField::encode(
|
| - PropertyCellType::kUndefined) |
|
| - PropertyDetails::KindField::encode(kData)));
|
| - __ Check(eq, kUnexpectedValue);
|
| - __ bind(&done);
|
| - }
|
| - __ Ret();
|
| - __ bind(¬_same_value);
|
| -
|
| - // Check if PropertyCell contains data with constant type (and is not
|
| - // READ_ONLY).
|
| - __ cmpi(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
|
| - PropertyCellType::kConstantType) |
|
| - PropertyDetails::KindField::encode(kData)));
|
| - __ bne(&slow_case);
|
| -
|
| - // Now either both old and new values must be smis or both must be heap
|
| - // objects with same map.
|
| - Label value_is_heap_object;
|
| - __ JumpIfNotSmi(value, &value_is_heap_object);
|
| - __ JumpIfNotSmi(cell_value, &slow_case);
|
| - // Old and new values are smis, no need for a write barrier here.
|
| - __ bind(&fast_smi_case);
|
| - __ StoreP(value, FieldMemOperand(cell, PropertyCell::kValueOffset), r0);
|
| - __ Ret();
|
| -
|
| - __ bind(&value_is_heap_object);
|
| - __ JumpIfSmi(cell_value, &slow_case);
|
| -
|
| - __ LoadP(cell_value_map, FieldMemOperand(cell_value, HeapObject::kMapOffset));
|
| - __ LoadP(scratch, FieldMemOperand(value, HeapObject::kMapOffset));
|
| - __ cmp(cell_value_map, scratch);
|
| - __ beq(&fast_heapobject_case);
|
| -
|
| - // Fallback to runtime.
|
| - __ bind(&slow_case);
|
| - __ SmiTag(slot);
|
| - __ Push(slot, value);
|
| - __ TailCallRuntime(is_strict(language_mode())
|
| - ? Runtime::kStoreGlobalViaContext_Strict
|
| - : Runtime::kStoreGlobalViaContext_Sloppy,
|
| - 2, 1);
|
| -}
|
| -
|
| -
|
| static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
|
| return ref0.address() - ref1.address();
|
| }
|
|
|