| Index: gcc/gcc/tree-ssa-forwprop.c
|
| diff --git a/gcc/gcc/tree-ssa-forwprop.c b/gcc/gcc/tree-ssa-forwprop.c
|
| index 17c45ff7c0a6dabe5375ba03902e4fa6db205b62..0b9925ea773e74c77dcfbdf946419a9b21b7d31d 100644
|
| --- a/gcc/gcc/tree-ssa-forwprop.c
|
| +++ b/gcc/gcc/tree-ssa-forwprop.c
|
| @@ -41,7 +41,7 @@ along with GCC; see the file COPYING3. If not see
|
| when we have a generalized tree combiner.
|
|
|
| One class of common cases we handle is forward propagating a single use
|
| - variable into a COND_EXPR.
|
| + variable into a COND_EXPR.
|
|
|
| bb0:
|
| x = a COND b;
|
| @@ -51,13 +51,13 @@ along with GCC; see the file COPYING3. If not see
|
|
|
| bb0:
|
| if (a COND b) goto ... else goto ...
|
| -
|
| +
|
| Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
|
|
|
| Or (assuming c1 and c2 are constants):
|
|
|
| bb0:
|
| - x = a + c1;
|
| + x = a + c1;
|
| if (x EQ/NEQ c2) goto ... else goto ...
|
|
|
| Will be transformed into:
|
| @@ -66,7 +66,7 @@ along with GCC; see the file COPYING3. If not see
|
| if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
|
|
|
| Similarly for x = a - c1.
|
| -
|
| +
|
| Or
|
|
|
| bb0:
|
| @@ -147,6 +147,14 @@ along with GCC; see the file COPYING3. If not see
|
|
|
| ptr2 = &x[index];
|
|
|
| + Or
|
| + ssa = (int) decl
|
| + res = ssa & 1
|
| +
|
| + Provided that decl has known alignment >= 2, will get turned into
|
| +
|
| + res = 0
|
| +
|
| We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
|
| allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
|
| {NOT_EXPR,NEG_EXPR}.
|
| @@ -178,8 +186,7 @@ get_prop_dest_stmt (tree name, tree *final_name_p)
|
| return NULL;
|
|
|
| /* If this is not a trivial copy, we found it. */
|
| - if (!gimple_assign_copy_p (use_stmt)
|
| - || TREE_CODE (gimple_assign_lhs (use_stmt)) != SSA_NAME
|
| + if (!gimple_assign_ssa_name_copy_p (use_stmt)
|
| || gimple_assign_rhs1 (use_stmt) != name)
|
| break;
|
|
|
| @@ -217,12 +224,11 @@ get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
|
| }
|
|
|
| /* If name is defined by a PHI node or is the default def, bail out. */
|
| - if (gimple_code (def_stmt) != GIMPLE_ASSIGN)
|
| + if (!is_gimple_assign (def_stmt))
|
| return NULL;
|
|
|
| - /* If name is not a simple copy destination, we found it. */
|
| - if (!gimple_assign_copy_p (def_stmt)
|
| - || TREE_CODE (gimple_assign_rhs1 (def_stmt)) != SSA_NAME)
|
| + /* If def_stmt is not a simple copy, we possibly found it. */
|
| + if (!gimple_assign_ssa_name_copy_p (def_stmt))
|
| {
|
| tree rhs;
|
|
|
| @@ -258,6 +264,7 @@ can_propagate_from (gimple def_stmt)
|
| ssa_op_iter iter;
|
|
|
| gcc_assert (is_gimple_assign (def_stmt));
|
| +
|
| /* If the rhs has side-effects we cannot propagate from it. */
|
| if (gimple_has_volatile_ops (def_stmt))
|
| return false;
|
| @@ -268,8 +275,8 @@ can_propagate_from (gimple def_stmt)
|
| return false;
|
|
|
| /* Constants can be always propagated. */
|
| - if (is_gimple_min_invariant
|
| - (rhs_to_tree (TREE_TYPE (gimple_assign_lhs (def_stmt)), def_stmt)))
|
| + if (gimple_assign_single_p (def_stmt)
|
| + && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
|
| return true;
|
|
|
| /* We cannot propagate ssa names that occur in abnormal phi nodes. */
|
| @@ -281,14 +288,14 @@ can_propagate_from (gimple def_stmt)
|
| then we can not apply optimizations as some targets require
|
| function pointers to be canonicalized and in this case this
|
| optimization could eliminate a necessary canonicalization. */
|
| - if (is_gimple_assign (def_stmt)
|
| - && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))))
|
| + if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
|
| {
|
| tree rhs = gimple_assign_rhs1 (def_stmt);
|
| if (POINTER_TYPE_P (TREE_TYPE (rhs))
|
| && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
|
| return false;
|
| }
|
| +
|
| return true;
|
| }
|
|
|
| @@ -324,7 +331,7 @@ remove_prop_source_from_use (tree name, gimple up_to_stmt)
|
|
|
| /* Return the rhs of a gimple_assign STMT in a form of a single tree,
|
| converted to type TYPE.
|
| -
|
| +
|
| This should disappear, but is needed so we can combine expressions and use
|
| the fold() interfaces. Long term, we need to develop folding and combine
|
| routines that deal with gimple exclusively . */
|
| @@ -332,9 +339,10 @@ remove_prop_source_from_use (tree name, gimple up_to_stmt)
|
| static tree
|
| rhs_to_tree (tree type, gimple stmt)
|
| {
|
| + location_t loc = gimple_location (stmt);
|
| enum tree_code code = gimple_assign_rhs_code (stmt);
|
| if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
|
| - return fold_build2 (code, type, gimple_assign_rhs1 (stmt),
|
| + return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
|
| gimple_assign_rhs2 (stmt));
|
| else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
|
| return build1 (code, type, gimple_assign_rhs1 (stmt));
|
| @@ -351,14 +359,14 @@ rhs_to_tree (tree type, gimple stmt)
|
| considered simplified. */
|
|
|
| static tree
|
| -combine_cond_expr_cond (enum tree_code code, tree type,
|
| +combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
|
| tree op0, tree op1, bool invariant_only)
|
| {
|
| tree t;
|
|
|
| gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
|
|
|
| - t = fold_binary (code, type, op0, op1);
|
| + t = fold_binary_loc (loc, code, type, op0, op1);
|
| if (!t)
|
| return NULL_TREE;
|
|
|
| @@ -379,13 +387,14 @@ combine_cond_expr_cond (enum tree_code code, tree type,
|
| in GIMPLE_COND statement STMT into the conditional if that simplifies it.
|
| Returns zero if no statement was changed, one if there were
|
| changes and two if cfg_cleanup needs to run.
|
| -
|
| +
|
| This must be kept in sync with forward_propagate_into_cond. */
|
|
|
| static int
|
| forward_propagate_into_gimple_cond (gimple stmt)
|
| {
|
| - int did_something = 0;
|
| + int did_something = 0;
|
| + location_t loc = gimple_location (stmt);
|
|
|
| do {
|
| tree tmp = NULL_TREE;
|
| @@ -406,7 +415,7 @@ forward_propagate_into_gimple_cond (gimple stmt)
|
| {
|
| tree op1 = gimple_cond_rhs (stmt);
|
| rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
|
| - tmp = combine_cond_expr_cond (code, boolean_type_node, rhs0,
|
| + tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
|
| op1, !single_use0_p);
|
| }
|
| /* If that wasn't successful, try the second operand. */
|
| @@ -420,15 +429,17 @@ forward_propagate_into_gimple_cond (gimple stmt)
|
| return did_something;
|
|
|
| rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
|
| - tmp = combine_cond_expr_cond (code, boolean_type_node, op0, rhs1,
|
| - !single_use1_p);
|
| + tmp = combine_cond_expr_cond (loc, code, boolean_type_node, op0,
|
| + rhs1, !single_use1_p);
|
| }
|
| /* If that wasn't successful either, try both operands. */
|
| if (tmp == NULL_TREE
|
| && rhs0 != NULL_TREE
|
| && rhs1 != NULL_TREE)
|
| - tmp = combine_cond_expr_cond (code, boolean_type_node, rhs0,
|
| - fold_convert (TREE_TYPE (rhs0), rhs1),
|
| + tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
|
| + fold_convert_loc (loc,
|
| + TREE_TYPE (rhs0),
|
| + rhs1),
|
| !(single_use0_p && single_use1_p));
|
| }
|
|
|
| @@ -480,6 +491,7 @@ static int
|
| forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
|
| {
|
| gimple stmt = gsi_stmt (*gsi_p);
|
| + location_t loc = gimple_location (stmt);
|
| int did_something = 0;
|
|
|
| do {
|
| @@ -501,7 +513,8 @@ forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
|
| {
|
| tree op1 = TREE_OPERAND (cond, 1);
|
| rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
|
| - tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
|
| + tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
|
| + boolean_type_node,
|
| rhs0, op1, !single_use0_p);
|
| }
|
| /* If that wasn't successful, try the second operand. */
|
| @@ -515,16 +528,20 @@ forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
|
| return did_something;
|
|
|
| rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
|
| - tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
|
| + tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
|
| + boolean_type_node,
|
| op0, rhs1, !single_use1_p);
|
| }
|
| /* If that wasn't successful either, try both operands. */
|
| if (tmp == NULL_TREE
|
| && rhs0 != NULL_TREE
|
| && rhs1 != NULL_TREE)
|
| - tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
|
| - rhs0, fold_convert (TREE_TYPE (rhs0),
|
| - rhs1),
|
| + tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
|
| + boolean_type_node,
|
| + rhs0,
|
| + fold_convert_loc (loc,
|
| + TREE_TYPE (rhs0),
|
| + rhs1),
|
| !(single_use0_p && single_use1_p));
|
| }
|
| else if (TREE_CODE (cond) == SSA_NAME)
|
| @@ -535,7 +552,7 @@ forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
|
| return did_something;
|
|
|
| rhs0 = gimple_assign_rhs1 (def_stmt);
|
| - tmp = combine_cond_expr_cond (NE_EXPR, boolean_type_node, rhs0,
|
| + tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
|
| build_int_cst (TREE_TYPE (rhs0), 0),
|
| false);
|
| }
|
| @@ -573,7 +590,7 @@ forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
|
| return did_something;
|
| }
|
|
|
| -/* We've just substituted an ADDR_EXPR into stmt. Update all the
|
| +/* We've just substituted an ADDR_EXPR into stmt. Update all the
|
| relevant data structures to match. */
|
|
|
| static void
|
| @@ -586,8 +603,6 @@ tidy_after_forward_propagate_addr (gimple stmt)
|
|
|
| if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
|
| recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
|
| -
|
| - mark_symbols_for_renaming (stmt);
|
| }
|
|
|
| /* DEF_RHS contains the address of the 0th element in an array.
|
| @@ -610,8 +625,13 @@ forward_propagate_addr_into_variable_array_index (tree offset,
|
| tree def_rhs,
|
| gimple_stmt_iterator *use_stmt_gsi)
|
| {
|
| - tree index;
|
| + tree index, tunit;
|
| gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
|
| + tree tmp;
|
| +
|
| + tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
|
| + if (!host_integerp (tunit, 1))
|
| + return false;
|
|
|
| /* Get the offset's defining statement. */
|
| offset_def = SSA_NAME_DEF_STMT (offset);
|
| @@ -621,7 +641,7 @@ forward_propagate_addr_into_variable_array_index (tree offset,
|
| along in case the element size is one. In that case, however, we do not
|
| allow multiplications because they can be computing index to a higher
|
| level dimension (PR 37861). */
|
| - if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
|
| + if (integer_onep (tunit))
|
| {
|
| if (is_gimple_assign (offset_def)
|
| && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
|
| @@ -637,21 +657,45 @@ forward_propagate_addr_into_variable_array_index (tree offset,
|
| return false;
|
|
|
| /* The RHS of the statement which defines OFFSET must be a
|
| - multiplication of an object by the size of the array elements.
|
| + multiplication of an object by the size of the array elements.
|
| This implicitly verifies that the size of the array elements
|
| is constant. */
|
| - offset = gimple_assign_rhs1 (offset_def);
|
| - if (gimple_assign_rhs_code (offset_def) != MULT_EXPR
|
| - || TREE_CODE (gimple_assign_rhs2 (offset_def)) != INTEGER_CST
|
| - || !simple_cst_equal (gimple_assign_rhs2 (offset_def),
|
| - TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
|
| + if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
|
| + && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
|
| + && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
|
| + {
|
| + /* The first operand to the MULT_EXPR is the desired index. */
|
| + index = gimple_assign_rhs1 (offset_def);
|
| + }
|
| + /* If we have idx * tunit + CST * tunit re-associate that. */
|
| + else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
|
| + || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
|
| + && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
|
| + && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
|
| + && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
|
| + gimple_assign_rhs2 (offset_def),
|
| + tunit)) != NULL_TREE)
|
| + {
|
| + gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
|
| + if (is_gimple_assign (offset_def2)
|
| + && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
|
| + && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
|
| + && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
|
| + {
|
| + index = fold_build2 (gimple_assign_rhs_code (offset_def),
|
| + TREE_TYPE (offset),
|
| + gimple_assign_rhs1 (offset_def2), tmp);
|
| + }
|
| + else
|
| + return false;
|
| + }
|
| + else
|
| return false;
|
| -
|
| - /* The first operand to the MULT_EXPR is the desired index. */
|
| - index = offset;
|
| }
|
|
|
| /* Replace the pointer addition with array indexing. */
|
| + index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
|
| + true, GSI_SAME_STMT);
|
| gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs));
|
| use_stmt = gsi_stmt (*use_stmt_gsi);
|
| TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1)
|
| @@ -684,6 +728,7 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs,
|
| gimple use_stmt = gsi_stmt (*use_stmt_gsi);
|
| enum tree_code rhs_code;
|
| bool res = true;
|
| + bool addr_p = false;
|
|
|
| gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
|
|
|
| @@ -706,7 +751,11 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs,
|
| address which we cannot do in a single statement. */
|
| if (!single_use_p
|
| || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
|
| - && !is_gimple_min_invariant (def_rhs)))
|
| + && (!is_gimple_min_invariant (def_rhs)
|
| + || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
|
| + && POINTER_TYPE_P (TREE_TYPE (def_rhs))
|
| + && (TYPE_PRECISION (TREE_TYPE (lhs))
|
| + > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
|
| return forward_propagate_addr_expr (lhs, def_rhs);
|
|
|
| gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
|
| @@ -717,14 +766,14 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs,
|
| return true;
|
| }
|
|
|
| - /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
|
| + /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
|
| ADDR_EXPR will not appear on the LHS. */
|
| lhsp = gimple_assign_lhs_ptr (use_stmt);
|
| while (handled_component_p (*lhsp))
|
| lhsp = &TREE_OPERAND (*lhsp, 0);
|
| lhs = *lhsp;
|
|
|
| - /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
|
| + /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
|
| propagate the ADDR_EXPR into the use of NAME and fold the result. */
|
| if (TREE_CODE (lhs) == INDIRECT_REF
|
| && TREE_OPERAND (lhs, 0) == name)
|
| @@ -752,8 +801,12 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs,
|
| /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
|
| nodes from the RHS. */
|
| rhsp = gimple_assign_rhs1_ptr (use_stmt);
|
| - while (handled_component_p (*rhsp)
|
| - || TREE_CODE (*rhsp) == ADDR_EXPR)
|
| + if (TREE_CODE (*rhsp) == ADDR_EXPR)
|
| + {
|
| + rhsp = &TREE_OPERAND (*rhsp, 0);
|
| + addr_p = true;
|
| + }
|
| + while (handled_component_p (*rhsp))
|
| rhsp = &TREE_OPERAND (*rhsp, 0);
|
| rhs = *rhsp;
|
|
|
| @@ -769,7 +822,7 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs,
|
| return res;
|
| }
|
|
|
| - /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
|
| + /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
|
| propagate the ADDR_EXPR into the use of NAME and try to
|
| create a VCE and fold the result. */
|
| if (TREE_CODE (rhs) == INDIRECT_REF
|
| @@ -783,7 +836,9 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs,
|
| && !TYPE_VOLATILE (TREE_TYPE (rhs))
|
| && !TYPE_VOLATILE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
|
| && operand_equal_p (TYPE_SIZE (TREE_TYPE (rhs)),
|
| - TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0))
|
| + TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0)
|
| + /* Make sure we only do TBAA compatible replacements. */
|
| + && get_alias_set (TREE_OPERAND (def_rhs, 0)) == get_alias_set (rhs))
|
| {
|
| tree def_rhs_base, new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
|
| new_rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), new_rhs);
|
| @@ -800,11 +855,14 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs,
|
| return res;
|
| }
|
| /* If the defining rhs comes from an indirect reference, then do not
|
| - convert into a VIEW_CONVERT_EXPR. */
|
| + convert into a VIEW_CONVERT_EXPR. Likewise if we'll end up taking
|
| + the address of a V_C_E of a constant. */
|
| def_rhs_base = TREE_OPERAND (def_rhs, 0);
|
| while (handled_component_p (def_rhs_base))
|
| def_rhs_base = TREE_OPERAND (def_rhs_base, 0);
|
| - if (!INDIRECT_REF_P (def_rhs_base))
|
| + if (!INDIRECT_REF_P (def_rhs_base)
|
| + && (!addr_p
|
| + || !is_gimple_min_invariant (def_rhs)))
|
| {
|
| /* We may have arbitrary VIEW_CONVERT_EXPRs in a nested component
|
| reference. Place it there and fold the thing. */
|
| @@ -828,18 +886,29 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs,
|
| array_ref = TREE_OPERAND (def_rhs, 0);
|
| if (TREE_CODE (array_ref) != ARRAY_REF
|
| || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
|
| - || !integer_zerop (TREE_OPERAND (array_ref, 1)))
|
| + || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
|
| return false;
|
|
|
| rhs2 = gimple_assign_rhs2 (use_stmt);
|
| - /* Try to optimize &x[0] p+ C where C is a multiple of the size
|
| - of the elements in X into &x[C/element size]. */
|
| + /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
|
| + of the elements in X into &x[C1 + C2/element size]. */
|
| if (TREE_CODE (rhs2) == INTEGER_CST)
|
| {
|
| - tree new_rhs = maybe_fold_stmt_addition (gimple_expr_type (use_stmt),
|
| - array_ref, rhs2);
|
| + tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
|
| + TREE_TYPE (def_rhs),
|
| + def_rhs, rhs2);
|
| if (new_rhs)
|
| {
|
| + tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
|
| + new_rhs = unshare_expr (new_rhs);
|
| + if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
|
| + {
|
| + if (!is_gimple_min_invariant (new_rhs))
|
| + new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
|
| + true, NULL_TREE,
|
| + true, GSI_SAME_STMT);
|
| + new_rhs = fold_convert (type, new_rhs);
|
| + }
|
| gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
|
| use_stmt = gsi_stmt (*use_stmt_gsi);
|
| update_stmt (use_stmt);
|
| @@ -853,6 +922,7 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs,
|
| array elements, then the result is converted into the proper
|
| type for the arithmetic. */
|
| if (TREE_CODE (rhs2) == SSA_NAME
|
| + && integer_zerop (TREE_OPERAND (array_ref, 1))
|
| && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
|
| /* Avoid problems with IVopts creating PLUS_EXPRs with a
|
| different type than their operands. */
|
| @@ -887,7 +957,8 @@ forward_propagate_addr_expr (tree name, tree rhs)
|
| there is nothing we can do. */
|
| if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
|
| {
|
| - all = false;
|
| + if (!is_gimple_debug (use_stmt))
|
| + all = false;
|
| continue;
|
| }
|
|
|
| @@ -900,25 +971,28 @@ forward_propagate_addr_expr (tree name, tree rhs)
|
| continue;
|
| }
|
|
|
| - push_stmt_changes (&use_stmt);
|
| -
|
| {
|
| gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
|
| result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
|
| single_use_p);
|
| - use_stmt = gsi_stmt (gsi);
|
| + /* If the use has moved to a different statement adjust
|
| + the update machinery for the old statement too. */
|
| + if (use_stmt != gsi_stmt (gsi))
|
| + {
|
| + update_stmt (use_stmt);
|
| + use_stmt = gsi_stmt (gsi);
|
| + }
|
| +
|
| + update_stmt (use_stmt);
|
| }
|
| all &= result;
|
|
|
| - pop_stmt_changes (&use_stmt);
|
| -
|
| /* Remove intermediate now unused copy and conversion chains. */
|
| use_rhs = gimple_assign_rhs1 (use_stmt);
|
| if (result
|
| && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
|
| - && (TREE_CODE (use_rhs) == SSA_NAME
|
| - || (CONVERT_EXPR_P (use_rhs)
|
| - && TREE_CODE (TREE_OPERAND (use_rhs, 0)) == SSA_NAME)))
|
| + && TREE_CODE (use_rhs) == SSA_NAME
|
| + && has_zero_uses (gimple_assign_lhs (use_stmt)))
|
| {
|
| gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
|
| release_defs (use_stmt);
|
| @@ -989,7 +1063,9 @@ forward_propagate_comparison (gimple stmt)
|
| gimple_assign_rhs1 (stmt),
|
| gimple_assign_rhs2 (stmt));
|
|
|
| - tmp = combine_cond_expr_cond (code, TREE_TYPE (lhs), cond, cst, false);
|
| + tmp = combine_cond_expr_cond (gimple_location (use_stmt),
|
| + code, TREE_TYPE (lhs),
|
| + cond, cst, false);
|
| if (tmp == NULL_TREE)
|
| return false;
|
| }
|
| @@ -1039,9 +1115,9 @@ forward_propagate_comparison (gimple stmt)
|
|
|
| /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
|
| If so, we can change STMT into lhs = y which can later be copy
|
| - propagated. Similarly for negation.
|
| + propagated. Similarly for negation.
|
|
|
| - This could trivially be formulated as a forward propagation
|
| + This could trivially be formulated as a forward propagation
|
| to immediate uses. However, we already had an implementation
|
| from DOM which used backward propagation via the use-def links.
|
|
|
| @@ -1132,6 +1208,46 @@ simplify_gimple_switch (gimple stmt)
|
| }
|
| }
|
|
|
| +/* Run bitwise and assignments throug the folder. If the first argument is an
|
| + ssa name that is itself a result of a typecast of an ADDR_EXPR to an
|
| + integer, feed the ADDR_EXPR to the folder rather than the ssa name.
|
| +*/
|
| +
|
| +static void
|
| +simplify_bitwise_and (gimple_stmt_iterator *gsi, gimple stmt)
|
| +{
|
| + tree res;
|
| + tree arg1 = gimple_assign_rhs1 (stmt);
|
| + tree arg2 = gimple_assign_rhs2 (stmt);
|
| +
|
| + if (TREE_CODE (arg2) != INTEGER_CST)
|
| + return;
|
| +
|
| + if (TREE_CODE (arg1) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (arg1))
|
| + {
|
| + gimple def = SSA_NAME_DEF_STMT (arg1);
|
| +
|
| + if (gimple_assign_cast_p (def)
|
| + && INTEGRAL_TYPE_P (gimple_expr_type (def)))
|
| + {
|
| + tree op = gimple_assign_rhs1 (def);
|
| +
|
| + if (TREE_CODE (op) == ADDR_EXPR)
|
| + arg1 = op;
|
| + }
|
| + }
|
| +
|
| + res = fold_binary_loc (gimple_location (stmt),
|
| + BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
|
| + arg1, arg2);
|
| + if (res && is_gimple_min_invariant (res))
|
| + {
|
| + gimple_assign_set_rhs_from_tree (gsi, res);
|
| + update_stmt (stmt);
|
| + }
|
| + return;
|
| +}
|
| +
|
| /* Main entry point for the forward propagation optimizer. */
|
|
|
| static unsigned int
|
| @@ -1182,6 +1298,15 @@ tree_ssa_forward_propagate_single_use_vars (void)
|
| else
|
| gsi_next (&gsi);
|
| }
|
| + else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
|
| + && is_gimple_min_invariant (rhs))
|
| + {
|
| + /* Make sure to fold &a[0] + off_1 here. */
|
| + fold_stmt_inplace (stmt);
|
| + update_stmt (stmt);
|
| + if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
|
| + gsi_next (&gsi);
|
| + }
|
| else if ((gimple_assign_rhs_code (stmt) == BIT_NOT_EXPR
|
| || gimple_assign_rhs_code (stmt) == NEGATE_EXPR)
|
| && TREE_CODE (rhs) == SSA_NAME)
|
| @@ -1214,6 +1339,11 @@ tree_ssa_forward_propagate_single_use_vars (void)
|
| else
|
| gsi_next (&gsi);
|
| }
|
| + else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
|
| + {
|
| + simplify_bitwise_and (&gsi, stmt);
|
| + gsi_next (&gsi);
|
| + }
|
| else
|
| gsi_next (&gsi);
|
| }
|
| @@ -1247,10 +1377,10 @@ tree_ssa_forward_propagate_single_use_vars (void)
|
| static bool
|
| gate_forwprop (void)
|
| {
|
| - return 1;
|
| + return flag_tree_forwprop;
|
| }
|
|
|
| -struct gimple_opt_pass pass_forwprop =
|
| +struct gimple_opt_pass pass_forwprop =
|
| {
|
| {
|
| GIMPLE_PASS,
|
|
|