Index: gcc/gcc/expr.c |
diff --git a/gcc/gcc/expr.c b/gcc/gcc/expr.c |
index 8cd5dc9b62a931e50394b9ce044d41a75192a754..93c9f98c17cdb2076f9d18c0b86f756ebd08bc4c 100644 |
--- a/gcc/gcc/expr.c |
+++ b/gcc/gcc/expr.c |
@@ -54,6 +54,7 @@ along with GCC; see the file COPYING3. If not see |
#include "timevar.h" |
#include "df.h" |
#include "diagnostic.h" |
+#include "ssaexpand.h" |
/* Decide whether a function's arguments should be processed |
from first to last or from last to first. |
@@ -90,7 +91,7 @@ int cse_not_expected; |
/* This structure is used by move_by_pieces to describe the move to |
be performed. */ |
-struct move_by_pieces |
+struct move_by_pieces_d |
{ |
rtx to; |
rtx to_addr; |
@@ -108,7 +109,7 @@ struct move_by_pieces |
/* This structure is used by store_by_pieces to describe the clear to |
be performed. */ |
-struct store_by_pieces |
+struct store_by_pieces_d |
{ |
rtx to; |
rtx to_addr; |
@@ -125,16 +126,16 @@ static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT, |
unsigned int, |
unsigned int); |
static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode, |
- struct move_by_pieces *); |
+ struct move_by_pieces_d *); |
static bool block_move_libcall_safe_for_call_parm (void); |
static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT); |
static tree emit_block_move_libcall_fn (int); |
static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned); |
static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode); |
static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int); |
-static void store_by_pieces_1 (struct store_by_pieces *, unsigned int); |
+static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int); |
static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode, |
- struct store_by_pieces *); |
+ struct store_by_pieces_d *); |
static tree clear_storage_libcall_fn (int); |
static rtx compress_float_constant (rtx, rtx); |
static rtx get_subtarget (rtx); |
@@ -151,7 +152,7 @@ static int is_aligning_offset (const_tree, const_tree); |
static void expand_operands (tree, tree, rtx, rtx*, rtx*, |
enum expand_modifier); |
static rtx reduce_to_bit_field_precision (rtx, rtx, tree); |
-static rtx do_store_flag (tree, rtx, enum machine_mode, int); |
+static rtx do_store_flag (sepops, rtx, enum machine_mode); |
#ifdef PUSH_ROUNDING |
static void emit_single_push_insn (enum machine_mode, rtx, tree); |
#endif |
@@ -234,7 +235,6 @@ enum insn_code sync_new_and_optab[NUM_MACHINE_MODES]; |
enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES]; |
enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES]; |
enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES]; |
-enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES]; |
enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES]; |
enum insn_code sync_lock_release[NUM_MACHINE_MODES]; |
@@ -268,7 +268,7 @@ init_expr_target (void) |
reg = gen_rtx_REG (VOIDmode, -1); |
insn = rtx_alloc (INSN); |
- pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX); |
+ pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX); |
PATTERN (insn) = pat; |
for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES; |
@@ -588,27 +588,9 @@ convert_move (rtx to, rtx from, int unsignedp) |
if (unsignedp) |
fill_value = const0_rtx; |
else |
- { |
-#ifdef HAVE_slt |
- if (HAVE_slt |
- && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode |
- && STORE_FLAG_VALUE == -1) |
- { |
- emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX, |
- lowpart_mode, 0); |
- fill_value = gen_reg_rtx (word_mode); |
- emit_insn (gen_slt (fill_value)); |
- } |
- else |
-#endif |
- { |
- fill_value |
- = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom, |
- size_int (GET_MODE_BITSIZE (lowpart_mode) - 1), |
- NULL_RTX, 0); |
- fill_value = convert_to_mode (word_mode, fill_value, 1); |
- } |
- } |
+ fill_value = emit_store_flag (gen_reg_rtx (word_mode), |
+ LT, lowfrom, const0_rtx, |
+ VOIDmode, 0, -1); |
/* Fill the remaining words. */ |
for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++) |
@@ -790,7 +772,7 @@ convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int uns |
if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT |
&& GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT |
- && GET_CODE (x) == CONST_INT && INTVAL (x) < 0) |
+ && CONST_INT_P (x) && INTVAL (x) < 0) |
{ |
HOST_WIDE_INT val = INTVAL (x); |
@@ -811,7 +793,7 @@ convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int uns |
non-volatile MEM. Except for the constant case where MODE is no |
wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */ |
- if ((GET_CODE (x) == CONST_INT |
+ if ((CONST_INT_P (x) |
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) |
|| (GET_MODE_CLASS (mode) == MODE_INT |
&& GET_MODE_CLASS (oldmode) == MODE_INT |
@@ -828,7 +810,7 @@ convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int uns |
/* ?? If we don't know OLDMODE, we have to assume here that |
X does not need sign- or zero-extension. This may not be |
the case, but it's the best we can do. */ |
- if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode |
+ if (CONST_INT_P (x) && oldmode != VOIDmode |
&& GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode)) |
{ |
HOST_WIDE_INT val = INTVAL (x); |
@@ -894,7 +876,9 @@ rtx |
move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len, |
unsigned int align, int endp) |
{ |
- struct move_by_pieces data; |
+ struct move_by_pieces_d data; |
+ enum machine_mode to_addr_mode, from_addr_mode |
+ = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from)); |
rtx to_addr, from_addr = XEXP (from, 0); |
unsigned int max_size = MOVE_MAX_PIECES + 1; |
enum machine_mode mode = VOIDmode, tmode; |
@@ -906,6 +890,7 @@ move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len, |
data.from_addr = from_addr; |
if (to) |
{ |
+ to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to)); |
to_addr = XEXP (to, 0); |
data.to = to; |
data.autinc_to |
@@ -916,6 +901,7 @@ move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len, |
} |
else |
{ |
+ to_addr_mode = VOIDmode; |
to_addr = NULL_RTX; |
data.to = NULL_RTX; |
data.autinc_to = 1; |
@@ -951,32 +937,34 @@ move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len, |
if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from) |
{ |
- data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len)); |
+ data.from_addr = copy_to_mode_reg (from_addr_mode, |
+ plus_constant (from_addr, len)); |
data.autinc_from = 1; |
data.explicit_inc_from = -1; |
} |
if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from) |
{ |
- data.from_addr = copy_addr_to_reg (from_addr); |
+ data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr); |
data.autinc_from = 1; |
data.explicit_inc_from = 1; |
} |
if (!data.autinc_from && CONSTANT_P (from_addr)) |
- data.from_addr = copy_addr_to_reg (from_addr); |
+ data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr); |
if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to) |
{ |
- data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len)); |
+ data.to_addr = copy_to_mode_reg (to_addr_mode, |
+ plus_constant (to_addr, len)); |
data.autinc_to = 1; |
data.explicit_inc_to = -1; |
} |
if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to) |
{ |
- data.to_addr = copy_addr_to_reg (to_addr); |
+ data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr); |
data.autinc_to = 1; |
data.explicit_inc_to = 1; |
} |
if (!data.autinc_to && CONSTANT_P (to_addr)) |
- data.to_addr = copy_addr_to_reg (to_addr); |
+ data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr); |
} |
tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1); |
@@ -1031,7 +1019,8 @@ move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len, |
if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0) |
emit_insn (gen_add2_insn (data.to_addr, constm1_rtx)); |
else |
- data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr, |
+ data.to_addr = copy_to_mode_reg (to_addr_mode, |
+ plus_constant (data.to_addr, |
-1)); |
} |
to1 = adjust_automodify_address (data.to, QImode, data.to_addr, |
@@ -1106,7 +1095,7 @@ move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align, |
static void |
move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode, |
- struct move_by_pieces *data) |
+ struct move_by_pieces_d *data) |
{ |
unsigned int size = GET_MODE_SIZE (mode); |
rtx to1 = NULL_RTX, from1; |
@@ -1205,6 +1194,7 @@ emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method, |
} |
align = MIN (MEM_ALIGN (x), MEM_ALIGN (y)); |
+ gcc_assert (align >= BITS_PER_UNIT); |
gcc_assert (MEM_P (x)); |
gcc_assert (MEM_P (y)); |
@@ -1217,7 +1207,7 @@ emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method, |
/* Set MEM_SIZE as appropriate for this block copy. The main place this |
can be incorrect is coming from __builtin_memcpy. */ |
- if (GET_CODE (size) == CONST_INT) |
+ if (CONST_INT_P (size)) |
{ |
if (INTVAL (size) == 0) |
return 0; |
@@ -1228,12 +1218,14 @@ emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method, |
set_mem_size (y, size); |
} |
- if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align)) |
+ if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align)) |
move_by_pieces (x, y, INTVAL (size), align, 0); |
else if (emit_block_move_via_movmem (x, y, size, align, |
expected_align, expected_size)) |
; |
- else if (may_use_call) |
+ else if (may_use_call |
+ && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)) |
+ && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y))) |
retval = emit_block_move_via_libcall (x, y, size, |
method == BLOCK_OP_TAILCALL); |
else |
@@ -1331,7 +1323,7 @@ emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align, |
here because if SIZE is less than the mode mask, as it is |
returned by the macro, it will definitely be less than the |
actual mode mask. */ |
- && ((GET_CODE (size) == CONST_INT |
+ && ((CONST_INT_P (size) |
&& ((unsigned HOST_WIDE_INT) INTVAL (size) |
<= (GET_MODE_MASK (mode) >> 1))) |
|| GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) |
@@ -1442,7 +1434,7 @@ init_block_move_fn (const char *asmspec) |
const_ptr_type_node, sizetype, |
NULL_TREE); |
- fn = build_decl (FUNCTION_DECL, fn, args); |
+ fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args); |
DECL_EXTERNAL (fn) = 1; |
TREE_PUBLIC (fn) = 1; |
DECL_ARTIFICIAL (fn) = 1; |
@@ -1484,6 +1476,10 @@ emit_block_move_via_loop (rtx x, rtx y, rtx size, |
unsigned int align ATTRIBUTE_UNUSED) |
{ |
rtx cmp_label, top_label, iter, x_addr, y_addr, tmp; |
+ enum machine_mode x_addr_mode |
+ = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x)); |
+ enum machine_mode y_addr_mode |
+ = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y)); |
enum machine_mode iter_mode; |
iter_mode = GET_MODE (size); |
@@ -1503,9 +1499,13 @@ emit_block_move_via_loop (rtx x, rtx y, rtx size, |
emit_jump (cmp_label); |
emit_label (top_label); |
- tmp = convert_modes (Pmode, iter_mode, iter, true); |
- x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp); |
- y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp); |
+ tmp = convert_modes (x_addr_mode, iter_mode, iter, true); |
+ x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp); |
+ |
+ if (x_addr_mode != y_addr_mode) |
+ tmp = convert_modes (y_addr_mode, iter_mode, iter, true); |
+ y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp); |
+ |
x = change_address (x, QImode, x_addr); |
y = change_address (y, QImode, y_addr); |
@@ -2267,6 +2267,26 @@ use_group_regs (rtx *call_fusage, rtx regs) |
use_reg (call_fusage, reg); |
} |
} |
+ |
+/* Return the defining gimple statement for SSA_NAME NAME if it is an |
+ assigment and the code of the expresion on the RHS is CODE. Return |
+ NULL otherwise. */ |
+ |
+static gimple |
+get_def_for_expr (tree name, enum tree_code code) |
+{ |
+ gimple def_stmt; |
+ |
+ if (TREE_CODE (name) != SSA_NAME) |
+ return NULL; |
+ |
+ def_stmt = get_gimple_for_ssa_name (name); |
+ if (!def_stmt |
+ || gimple_assign_rhs_code (def_stmt) != code) |
+ return NULL; |
+ |
+ return def_stmt; |
+} |
/* Determine whether the LEN bytes generated by CONSTFUN can be |
@@ -2292,7 +2312,7 @@ can_store_by_pieces (unsigned HOST_WIDE_INT len, |
if (len == 0) |
return 1; |
- if (! (memsetp |
+ if (! (memsetp |
? SET_BY_PIECES_P (len, align) |
: STORE_BY_PIECES_P (len, align))) |
return 0; |
@@ -2380,7 +2400,9 @@ store_by_pieces (rtx to, unsigned HOST_WIDE_INT len, |
rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode), |
void *constfundata, unsigned int align, bool memsetp, int endp) |
{ |
- struct store_by_pieces data; |
+ enum machine_mode to_addr_mode |
+ = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to)); |
+ struct store_by_pieces_d data; |
if (len == 0) |
{ |
@@ -2408,7 +2430,8 @@ store_by_pieces (rtx to, unsigned HOST_WIDE_INT len, |
if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0) |
emit_insn (gen_add2_insn (data.to_addr, constm1_rtx)); |
else |
- data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr, |
+ data.to_addr = copy_to_mode_reg (to_addr_mode, |
+ plus_constant (data.to_addr, |
-1)); |
} |
to1 = adjust_automodify_address (data.to, QImode, data.to_addr, |
@@ -2432,7 +2455,7 @@ store_by_pieces (rtx to, unsigned HOST_WIDE_INT len, |
static void |
clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align) |
{ |
- struct store_by_pieces data; |
+ struct store_by_pieces_d data; |
if (len == 0) |
return; |
@@ -2460,9 +2483,11 @@ clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED, |
rtx with BLKmode). ALIGN is maximum alignment we can assume. */ |
static void |
-store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED, |
+store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED, |
unsigned int align ATTRIBUTE_UNUSED) |
{ |
+ enum machine_mode to_addr_mode |
+ = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to)); |
rtx to_addr = XEXP (data->to, 0); |
unsigned int max_size = STORE_MAX_PIECES + 1; |
enum machine_mode mode = VOIDmode, tmode; |
@@ -2494,7 +2519,8 @@ store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED, |
if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to) |
{ |
- data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len)); |
+ data->to_addr = copy_to_mode_reg (to_addr_mode, |
+ plus_constant (to_addr, data->len)); |
data->autinc_to = 1; |
data->explicit_inc_to = -1; |
} |
@@ -2502,13 +2528,13 @@ store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED, |
if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse |
&& ! data->autinc_to) |
{ |
- data->to_addr = copy_addr_to_reg (to_addr); |
+ data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr); |
data->autinc_to = 1; |
data->explicit_inc_to = 1; |
} |
if ( !data->autinc_to && CONSTANT_P (to_addr)) |
- data->to_addr = copy_addr_to_reg (to_addr); |
+ data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr); |
} |
tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1); |
@@ -2558,7 +2584,7 @@ store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED, |
static void |
store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode, |
- struct store_by_pieces *data) |
+ struct store_by_pieces_d *data) |
{ |
unsigned int size = GET_MODE_SIZE (mode); |
rtx to1, cst; |
@@ -2606,7 +2632,7 @@ clear_storage_hints (rtx object, rtx size, enum block_op_methods method, |
/* If OBJECT is not BLKmode and SIZE is the same size as its mode, |
just move a zero. Otherwise, do this a piece at a time. */ |
if (mode != BLKmode |
- && GET_CODE (size) == CONST_INT |
+ && CONST_INT_P (size) |
&& INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode)) |
{ |
rtx zero = CONST0_RTX (mode); |
@@ -2633,15 +2659,17 @@ clear_storage_hints (rtx object, rtx size, enum block_op_methods method, |
align = MEM_ALIGN (object); |
- if (GET_CODE (size) == CONST_INT |
+ if (CONST_INT_P (size) |
&& CLEAR_BY_PIECES_P (INTVAL (size), align)) |
clear_by_pieces (object, INTVAL (size), align); |
else if (set_storage_via_setmem (object, size, const0_rtx, align, |
expected_align, expected_size)) |
; |
- else |
+ else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object))) |
return set_storage_via_libcall (object, size, const0_rtx, |
method == BLOCK_OP_TAILCALL); |
+ else |
+ gcc_unreachable (); |
return NULL; |
} |
@@ -2679,14 +2707,13 @@ set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall) |
for returning pointers, we could end up generating incorrect code. */ |
object_tree = make_tree (ptr_type_node, object); |
- if (GET_CODE (val) != CONST_INT) |
+ if (!CONST_INT_P (val)) |
val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1); |
size_tree = make_tree (sizetype, size); |
val_tree = make_tree (integer_type_node, val); |
fn = clear_storage_libcall_fn (true); |
- call_expr = build_call_expr (fn, 3, |
- object_tree, integer_zero_node, size_tree); |
+ call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree); |
CALL_EXPR_TAILCALL (call_expr) = tailcall; |
retval = expand_normal (call_expr); |
@@ -2712,7 +2739,7 @@ init_block_clear_fn (const char *asmspec) |
integer_type_node, sizetype, |
NULL_TREE); |
- fn = build_decl (FUNCTION_DECL, fn, args); |
+ fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args); |
DECL_EXTERNAL (fn) = 1; |
TREE_PUBLIC (fn) = 1; |
DECL_ARTIFICIAL (fn) = 1; |
@@ -2772,7 +2799,7 @@ set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align, |
BITS_PER_HOST_WIDE_INT here because if SIZE is less than |
the mode mask, as it is returned by the macro, it will |
definitely be less than the actual mode mask. */ |
- && ((GET_CODE (size) == CONST_INT |
+ && ((CONST_INT_P (size) |
&& ((unsigned HOST_WIDE_INT) INTVAL (size) |
<= (GET_MODE_MASK (mode) >> 1))) |
|| GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) |
@@ -3044,7 +3071,7 @@ emit_move_resolve_push (enum machine_mode mode, rtx x) |
HOST_WIDE_INT val; |
gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS); |
- gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT); |
+ gcc_assert (CONST_INT_P (XEXP (expr, 1))); |
val = INTVAL (XEXP (expr, 1)); |
if (GET_CODE (expr) == MINUS) |
val = -val; |
@@ -3430,12 +3457,14 @@ emit_move_insn (rtx x, rtx y) |
/* If X or Y are memory references, verify that their addresses are valid |
for the machine. */ |
if (MEM_P (x) |
- && (! memory_address_p (GET_MODE (x), XEXP (x, 0)) |
+ && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0), |
+ MEM_ADDR_SPACE (x)) |
&& ! push_operand (x, GET_MODE (x)))) |
x = validize_mem (x); |
if (MEM_P (y) |
- && ! memory_address_p (GET_MODE (y), XEXP (y, 0))) |
+ && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0), |
+ MEM_ADDR_SPACE (y))) |
y = validize_mem (y); |
gcc_assert (mode != BLKmode); |
@@ -3570,7 +3599,7 @@ push_block (rtx size, int extra, int below) |
} |
else |
{ |
- if (GET_CODE (size) == CONST_INT) |
+ if (CONST_INT_P (size)) |
temp = plus_constant (virtual_outgoing_args_rtx, |
-INTVAL (size) - (below ? 0 : extra)); |
else if (extra != 0 && !below) |
@@ -3781,7 +3810,7 @@ emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size, |
on the stack for alignment purposes. */ |
if (args_addr == 0 |
&& PUSH_ARGS |
- && GET_CODE (size) == CONST_INT |
+ && CONST_INT_P (size) |
&& skip == 0 |
&& MEM_ALIGN (xinner) >= align |
&& (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align)) |
@@ -3814,7 +3843,7 @@ emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size, |
/* Deduct words put into registers from the size we must copy. */ |
if (partial != 0) |
{ |
- if (GET_CODE (size) == CONST_INT) |
+ if (CONST_INT_P (size)) |
size = GEN_INT (INTVAL (size) - used); |
else |
size = expand_binop (GET_MODE (size), sub_optab, size, |
@@ -3830,7 +3859,7 @@ emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size, |
temp = push_block (size, extra, where_pad == downward); |
extra = 0; |
} |
- else if (GET_CODE (args_so_far) == CONST_INT) |
+ else if (CONST_INT_P (args_so_far)) |
temp = memory_address (BLKmode, |
plus_constant (args_addr, |
skip + INTVAL (args_so_far))); |
@@ -3946,7 +3975,7 @@ emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size, |
else |
#endif |
{ |
- if (GET_CODE (args_so_far) == CONST_INT) |
+ if (CONST_INT_P (args_so_far)) |
addr |
= memory_address (mode, |
plus_constant (args_addr, |
@@ -4206,6 +4235,7 @@ expand_assignment (tree to, tree from, bool nontemporal) |
if (offset != 0) |
{ |
+ enum machine_mode address_mode; |
rtx offset_rtx; |
if (!MEM_P (to_rtx)) |
@@ -4218,13 +4248,10 @@ expand_assignment (tree to, tree from, bool nontemporal) |
} |
offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM); |
-#ifdef POINTERS_EXTEND_UNSIGNED |
- if (GET_MODE (offset_rtx) != Pmode) |
- offset_rtx = convert_to_mode (Pmode, offset_rtx, 0); |
-#else |
- if (GET_MODE (offset_rtx) != ptr_mode) |
- offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); |
-#endif |
+ address_mode |
+ = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx)); |
+ if (GET_MODE (offset_rtx) != address_mode) |
+ offset_rtx = convert_to_mode (address_mode, offset_rtx, 0); |
/* A constant address in TO_RTX can have VOIDmode, we must not try |
to call force_reg for that case. Avoid that case. */ |
@@ -4245,8 +4272,19 @@ expand_assignment (tree to, tree from, bool nontemporal) |
offset)); |
} |
+ /* No action is needed if the target is not a memory and the field |
+ lies completely outside that target. This can occur if the source |
+ code contains an out-of-bounds access to a small array. */ |
+ if (!MEM_P (to_rtx) |
+ && GET_MODE (to_rtx) != BLKmode |
+ && (unsigned HOST_WIDE_INT) bitpos |
+ >= GET_MODE_BITSIZE (GET_MODE (to_rtx))) |
+ { |
+ expand_normal (from); |
+ result = NULL; |
+ } |
/* Handle expand_expr of a complex value returning a CONCAT. */ |
- if (GET_CODE (to_rtx) == CONCAT) |
+ else if (GET_CODE (to_rtx) == CONCAT) |
{ |
if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))) |
{ |
@@ -4294,6 +4332,41 @@ expand_assignment (tree to, tree from, bool nontemporal) |
return; |
} |
+ else if (TREE_CODE (to) == MISALIGNED_INDIRECT_REF) |
+ { |
+ addr_space_t as = ADDR_SPACE_GENERIC; |
+ enum machine_mode mode, op_mode1; |
+ enum insn_code icode; |
+ rtx reg, addr, mem, insn; |
+ |
+ if (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (to, 0)))) |
+ as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0)))); |
+ |
+ reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL); |
+ reg = force_not_mem (reg); |
+ |
+ mode = TYPE_MODE (TREE_TYPE (to)); |
+ addr = expand_expr (TREE_OPERAND (to, 0), NULL_RTX, VOIDmode, |
+ EXPAND_SUM); |
+ addr = memory_address_addr_space (mode, addr, as); |
+ mem = gen_rtx_MEM (mode, addr); |
+ |
+ set_mem_attributes (mem, to, 0); |
+ set_mem_addr_space (mem, as); |
+ |
+ icode = movmisalign_optab->handlers[mode].insn_code; |
+ gcc_assert (icode != CODE_FOR_nothing); |
+ |
+ op_mode1 = insn_data[icode].operand[1].mode; |
+ if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1) |
+ && op_mode1 != VOIDmode) |
+ reg = copy_to_mode_reg (op_mode1, reg); |
+ |
+ insn = GEN_FCN (icode) (mem, reg); |
+ emit_insn (insn); |
+ return; |
+ } |
+ |
/* If the rhs is a function call and its value is not an aggregate, |
call the function before we start to compute the lhs. |
This is needed for correct code for cases such as |
@@ -4303,12 +4376,13 @@ expand_assignment (tree to, tree from, bool nontemporal) |
Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG |
since it might be a promoted variable where the zero- or sign- extension |
needs to be done. Handling this in the normal way is safe because no |
- computation is done before the call. */ |
+ computation is done before the call. The same is true for SSA names. */ |
if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from) |
&& COMPLETE_TYPE_P (TREE_TYPE (from)) |
&& TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST |
- && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL) |
- && REG_P (DECL_RTL (to)))) |
+ && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL) |
+ && REG_P (DECL_RTL (to))) |
+ || TREE_CODE (to) == SSA_NAME)) |
{ |
rtx value; |
@@ -4327,7 +4401,10 @@ expand_assignment (tree to, tree from, bool nontemporal) |
else |
{ |
if (POINTER_TYPE_P (TREE_TYPE (to))) |
- value = convert_memory_address (GET_MODE (to_rtx), value); |
+ value = convert_memory_address_addr_space |
+ (GET_MODE (to_rtx), value, |
+ TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to)))); |
+ |
emit_move_insn (to_rtx, value); |
} |
preserve_temp_slots (to_rtx); |
@@ -4366,7 +4443,11 @@ expand_assignment (tree to, tree from, bool nontemporal) |
/* In case we are returning the contents of an object which overlaps |
the place the value is being stored, use a safe function when copying |
a value through a pointer into a structure value return block. */ |
- if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF |
+ if (TREE_CODE (to) == RESULT_DECL |
+ && TREE_CODE (from) == INDIRECT_REF |
+ && ADDR_SPACE_GENERIC_P |
+ (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0))))) |
+ && refs_may_alias_p (to, from) |
&& cfun->returns_struct |
&& !cfun->returns_pcc_struct) |
{ |
@@ -4402,7 +4483,7 @@ expand_assignment (tree to, tree from, bool nontemporal) |
/* Emits nontemporal store insn that moves FROM to TO. Returns true if this |
succeeded, false otherwise. */ |
-static bool |
+bool |
emit_storent_insn (rtx to, rtx from) |
{ |
enum machine_mode mode = GET_MODE (to), imode; |
@@ -4444,7 +4525,7 @@ emit_storent_insn (rtx to, rtx from) |
If CALL_PARAM_P is nonzero, this is a store into a call param on the |
stack, and block moves may need to be treated specially. |
- |
+ |
If NONTEMPORAL is true, try using a nontemporal store instruction. */ |
rtx |
@@ -4452,7 +4533,7 @@ store_expr (tree exp, rtx target, int call_param_p, bool nontemporal) |
{ |
rtx temp; |
rtx alt_rtl = NULL_RTX; |
- int dont_return_target = 0; |
+ location_t loc = EXPR_LOCATION (exp); |
if (VOID_TYPE_P (TREE_TYPE (exp))) |
{ |
@@ -4483,7 +4564,7 @@ store_expr (tree exp, rtx target, int call_param_p, bool nontemporal) |
do_pending_stack_adjust (); |
NO_DEFER_POP; |
- jumpifnot (TREE_OPERAND (exp, 0), lab1); |
+ jumpifnot (TREE_OPERAND (exp, 0), lab1, -1); |
store_expr (TREE_OPERAND (exp, 1), target, call_param_p, |
nontemporal); |
emit_jump_insn (gen_jump (lab2)); |
@@ -4528,13 +4609,13 @@ store_expr (tree exp, rtx target, int call_param_p, bool nontemporal) |
(TYPE_MODE (TREE_TYPE (exp)), |
SUBREG_PROMOTED_UNSIGNED_P (target)); |
- exp = fold_convert (ntype, exp); |
+ exp = fold_convert_loc (loc, ntype, exp); |
} |
- exp = fold_convert (lang_hooks.types.type_for_mode |
- (GET_MODE (SUBREG_REG (target)), |
- SUBREG_PROMOTED_UNSIGNED_P (target)), |
- exp); |
+ exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode |
+ (GET_MODE (SUBREG_REG (target)), |
+ SUBREG_PROMOTED_UNSIGNED_P (target)), |
+ exp); |
inner_target = SUBREG_REG (target); |
} |
@@ -4612,19 +4693,6 @@ store_expr (tree exp, rtx target, int call_param_p, bool nontemporal) |
(call_param_p |
? EXPAND_STACK_PARM : EXPAND_NORMAL), |
&alt_rtl); |
- /* Return TARGET if it's a specified hardware register. |
- If TARGET is a volatile mem ref, either return TARGET |
- or return a reg copied *from* TARGET; ANSI requires this. |
- |
- Otherwise, if TEMP is not TARGET, return TEMP |
- if it is constant (for efficiency), |
- or if we really want the correct value. */ |
- if (!(target && REG_P (target) |
- && REGNO (target) < FIRST_PSEUDO_REGISTER) |
- && !(MEM_P (target) && MEM_VOLATILE_P (target)) |
- && ! rtx_equal_p (temp, target) |
- && CONSTANT_P (temp)) |
- dont_return_target = 1; |
} |
/* If TEMP is a VOIDmode constant and the mode of the type of EXP is not |
@@ -4673,15 +4741,7 @@ store_expr (tree exp, rtx target, int call_param_p, bool nontemporal) |
&& GET_MODE (temp) != VOIDmode) |
{ |
int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); |
- if (dont_return_target) |
- { |
- /* In this case, we will return TEMP, |
- so make sure it has the proper mode. |
- But don't forget to store the value into TARGET. */ |
- temp = convert_to_mode (GET_MODE (target), temp, unsignedp); |
- emit_move_insn (target, temp); |
- } |
- else if (GET_MODE (target) == BLKmode |
+ if (GET_MODE (target) == BLKmode |
|| GET_MODE (temp) == BLKmode) |
emit_block_move (target, temp, expr_size (exp), |
(call_param_p |
@@ -4699,18 +4759,23 @@ store_expr (tree exp, rtx target, int call_param_p, bool nontemporal) |
type of the string, which is actually the size of the target. */ |
rtx size = expr_size (exp); |
- if (GET_CODE (size) == CONST_INT |
+ if (CONST_INT_P (size) |
&& INTVAL (size) < TREE_STRING_LENGTH (exp)) |
emit_block_move (target, temp, size, |
(call_param_p |
? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); |
else |
{ |
+ enum machine_mode pointer_mode |
+ = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target)); |
+ enum machine_mode address_mode |
+ = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target)); |
+ |
/* Compute the size of the data to copy from the string. */ |
tree copy_size |
- = size_binop (MIN_EXPR, |
- make_tree (sizetype, size), |
- size_int (TREE_STRING_LENGTH (exp))); |
+ = size_binop_loc (loc, MIN_EXPR, |
+ make_tree (sizetype, size), |
+ size_int (TREE_STRING_LENGTH (exp))); |
rtx copy_size_rtx |
= expand_expr (copy_size, NULL_RTX, VOIDmode, |
(call_param_p |
@@ -4718,15 +4783,15 @@ store_expr (tree exp, rtx target, int call_param_p, bool nontemporal) |
rtx label = 0; |
/* Copy that much. */ |
- copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, |
+ copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx, |
TYPE_UNSIGNED (sizetype)); |
emit_block_move (target, temp, copy_size_rtx, |
(call_param_p |
? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); |
/* Figure out how much is left in TARGET that we have to clear. |
- Do all calculations in ptr_mode. */ |
- if (GET_CODE (copy_size_rtx) == CONST_INT) |
+ Do all calculations in pointer_mode. */ |
+ if (CONST_INT_P (copy_size_rtx)) |
{ |
size = plus_constant (size, -INTVAL (copy_size_rtx)); |
target = adjust_address (target, BLKmode, |
@@ -4738,11 +4803,10 @@ store_expr (tree exp, rtx target, int call_param_p, bool nontemporal) |
copy_size_rtx, NULL_RTX, 0, |
OPTAB_LIB_WIDEN); |
-#ifdef POINTERS_EXTEND_UNSIGNED |
- if (GET_MODE (copy_size_rtx) != Pmode) |
- copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx, |
+ if (GET_MODE (copy_size_rtx) != address_mode) |
+ copy_size_rtx = convert_to_mode (address_mode, |
+ copy_size_rtx, |
TYPE_UNSIGNED (sizetype)); |
-#endif |
target = offset_address (target, copy_size_rtx, |
highest_pow2_factor (copy_size)); |
@@ -5232,6 +5296,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) |
if (offset) |
{ |
+ enum machine_mode address_mode; |
rtx offset_rtx; |
offset |
@@ -5242,13 +5307,10 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) |
offset_rtx = expand_normal (offset); |
gcc_assert (MEM_P (to_rtx)); |
-#ifdef POINTERS_EXTEND_UNSIGNED |
- if (GET_MODE (offset_rtx) != Pmode) |
- offset_rtx = convert_to_mode (Pmode, offset_rtx, 0); |
-#else |
- if (GET_MODE (offset_rtx) != ptr_mode) |
- offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); |
-#endif |
+ address_mode |
+ = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx)); |
+ if (GET_MODE (offset_rtx) != address_mode) |
+ offset_rtx = convert_to_mode (address_mode, offset_rtx, 0); |
to_rtx = offset_address (to_rtx, offset_rtx, |
highest_pow2_factor (offset)); |
@@ -5401,13 +5463,11 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) |
enum machine_mode mode; |
HOST_WIDE_INT bitsize; |
HOST_WIDE_INT bitpos; |
- int unsignedp; |
rtx xtarget = target; |
if (cleared && initializer_zerop (value)) |
continue; |
- unsignedp = TYPE_UNSIGNED (elttype); |
mode = TYPE_MODE (elttype); |
if (mode == BLKmode) |
bitsize = (host_integerp (TYPE_SIZE (elttype), 1) |
@@ -5463,13 +5523,10 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) |
tree exit_cond; |
expand_normal (hi_index); |
- unsignedp = TYPE_UNSIGNED (domain); |
- |
- index = build_decl (VAR_DECL, NULL_TREE, domain); |
- index_r |
- = gen_reg_rtx (promote_mode (domain, DECL_MODE (index), |
- &unsignedp, 0)); |
+ index = build_decl (EXPR_LOCATION (exp), |
+ VAR_DECL, NULL_TREE, domain); |
+ index_r = gen_reg_rtx (promote_decl_mode (index, NULL)); |
SET_DECL_RTL (index, index_r); |
store_expr (lo_index, index_r, 0, false); |
@@ -5503,7 +5560,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) |
/* Generate a conditional jump to exit the loop. */ |
exit_cond = build2 (LT_EXPR, integer_type_node, |
index, hi_index); |
- jumpif (exit_cond, loop_end); |
+ jumpif (exit_cond, loop_end, -1); |
/* Update the loop counter, and jump to the head of |
the loop. */ |
@@ -5710,7 +5767,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) |
ALIAS_SET is the alias set for the destination. This value will |
(in general) be different from that for TARGET, since TARGET is a |
reference to the containing structure. |
- |
+ |
If NONTEMPORAL is true, try generating a nontemporal store. */ |
static rtx |
@@ -5718,8 +5775,6 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, |
enum machine_mode mode, tree exp, tree type, |
alias_set_type alias_set, bool nontemporal) |
{ |
- HOST_WIDE_INT width_mask = 0; |
- |
if (TREE_CODE (exp) == ERROR_MARK) |
return const0_rtx; |
@@ -5727,8 +5782,6 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, |
side-effects. */ |
if (bitsize == 0) |
return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL); |
- else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT) |
- width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1; |
/* If we are storing into an unaligned field of an aligned union that is |
in a register, we may have the mode of TARGET being an integer mode but |
@@ -5793,22 +5846,25 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, |
&& compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)) |
{ |
rtx temp; |
+ gimple nop_def; |
/* If EXP is a NOP_EXPR of precision less than its mode, then that |
implies a mask operation. If the precision is the same size as |
the field we're storing into, that mask is redundant. This is |
particularly common with bit field assignments generated by the |
C front end. */ |
- if (TREE_CODE (exp) == NOP_EXPR) |
+ nop_def = get_def_for_expr (exp, NOP_EXPR); |
+ if (nop_def) |
{ |
tree type = TREE_TYPE (exp); |
if (INTEGRAL_TYPE_P (type) |
&& TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type)) |
&& bitsize == TYPE_PRECISION (type)) |
{ |
- type = TREE_TYPE (TREE_OPERAND (exp, 0)); |
+ tree op = gimple_assign_rhs1 (nop_def); |
+ type = TREE_TYPE (op); |
if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize) |
- exp = TREE_OPERAND (exp, 0); |
+ exp = op; |
} |
} |
@@ -5925,6 +5981,7 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize, |
/* First get the mode, signedness, and size. We do this from just the |
outermost expression. */ |
+ *pbitsize = -1; |
if (TREE_CODE (exp) == COMPONENT_REF) |
{ |
tree field = TREE_OPERAND (exp, 1); |
@@ -6101,7 +6158,7 @@ contains_packed_reference (const_tree exp) |
case COMPONENT_REF: |
{ |
tree field = TREE_OPERAND (exp, 1); |
- packed_p = DECL_PACKED (field) |
+ packed_p = DECL_PACKED (field) |
|| TYPE_PACKED (TREE_TYPE (field)) |
|| TYPE_PACKED (TREE_TYPE (exp)); |
if (packed_p) |
@@ -6134,6 +6191,7 @@ array_ref_element_size (tree exp) |
{ |
tree aligned_size = TREE_OPERAND (exp, 3); |
tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
+ location_t loc = EXPR_LOCATION (exp); |
/* If a size was specified in the ARRAY_REF, it's the size measured |
in alignment units of the element type. So multiply by that value. */ |
@@ -6142,9 +6200,9 @@ array_ref_element_size (tree exp) |
/* ??? tree_ssa_useless_type_conversion will eliminate casts to |
sizetype from another type of the same width and signedness. */ |
if (TREE_TYPE (aligned_size) != sizetype) |
- aligned_size = fold_convert (sizetype, aligned_size); |
- return size_binop (MULT_EXPR, aligned_size, |
- size_int (TYPE_ALIGN_UNIT (elmt_type))); |
+ aligned_size = fold_convert_loc (loc, sizetype, aligned_size); |
+ return size_binop_loc (loc, MULT_EXPR, aligned_size, |
+ size_int (TYPE_ALIGN_UNIT (elmt_type))); |
} |
/* Otherwise, take the size from that of the element type. Substitute |
@@ -6199,6 +6257,7 @@ component_ref_field_offset (tree exp) |
{ |
tree aligned_offset = TREE_OPERAND (exp, 2); |
tree field = TREE_OPERAND (exp, 1); |
+ location_t loc = EXPR_LOCATION (exp); |
/* If an offset was specified in the COMPONENT_REF, it's the offset measured |
in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that |
@@ -6208,9 +6267,10 @@ component_ref_field_offset (tree exp) |
/* ??? tree_ssa_useless_type_conversion will eliminate casts to |
sizetype from another type of the same width and signedness. */ |
if (TREE_TYPE (aligned_offset) != sizetype) |
- aligned_offset = fold_convert (sizetype, aligned_offset); |
- return size_binop (MULT_EXPR, aligned_offset, |
- size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT)); |
+ aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset); |
+ return size_binop_loc (loc, MULT_EXPR, aligned_offset, |
+ size_int (DECL_OFFSET_ALIGN (field) |
+ / BITS_PER_UNIT)); |
} |
/* Otherwise, take the offset from that of the field. Substitute |
@@ -6219,26 +6279,44 @@ component_ref_field_offset (tree exp) |
return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp); |
} |
-/* Return 1 if T is an expression that get_inner_reference handles. */ |
+/* Alignment in bits the TARGET of an assignment may be assumed to have. */ |
-int |
-handled_component_p (const_tree t) |
+static unsigned HOST_WIDE_INT |
+target_align (const_tree target) |
{ |
- switch (TREE_CODE (t)) |
+ /* We might have a chain of nested references with intermediate misaligning |
+ bitfields components, so need to recurse to find out. */ |
+ |
+ unsigned HOST_WIDE_INT this_align, outer_align; |
+ |
+ switch (TREE_CODE (target)) |
{ |
case BIT_FIELD_REF: |
+ return 1; |
+ |
case COMPONENT_REF: |
+ this_align = DECL_ALIGN (TREE_OPERAND (target, 1)); |
+ outer_align = target_align (TREE_OPERAND (target, 0)); |
+ return MIN (this_align, outer_align); |
+ |
case ARRAY_REF: |
case ARRAY_RANGE_REF: |
+ this_align = TYPE_ALIGN (TREE_TYPE (target)); |
+ outer_align = target_align (TREE_OPERAND (target, 0)); |
+ return MIN (this_align, outer_align); |
+ |
+ CASE_CONVERT: |
+ case NON_LVALUE_EXPR: |
case VIEW_CONVERT_EXPR: |
- case REALPART_EXPR: |
- case IMAGPART_EXPR: |
- return 1; |
+ this_align = TYPE_ALIGN (TREE_TYPE (target)); |
+ outer_align = target_align (TREE_OPERAND (target, 0)); |
+ return MAX (this_align, outer_align); |
default: |
- return 0; |
+ return TYPE_ALIGN (TREE_TYPE (target)); |
} |
} |
+ |
/* Given an rtx VALUE that may contain additions and multiplications, return |
an equivalent value that just refers to a register, memory, or constant. |
@@ -6288,7 +6366,7 @@ force_operand (rtx value, rtx target) |
op2 = XEXP (value, 1); |
if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget)) |
subtarget = 0; |
- if (code == MINUS && GET_CODE (op2) == CONST_INT) |
+ if (code == MINUS && CONST_INT_P (op2)) |
{ |
code = PLUS; |
op2 = negate_rtx (GET_MODE (value), op2); |
@@ -6300,7 +6378,7 @@ force_operand (rtx value, rtx target) |
constant first and then add the other value. This allows virtual |
register instantiation to simply modify the constant rather than |
creating another one around this addition. */ |
- if (code == PLUS && GET_CODE (op2) == CONST_INT |
+ if (code == PLUS && CONST_INT_P (op2) |
&& GET_CODE (XEXP (value, 0)) == PLUS |
&& REG_P (XEXP (XEXP (value, 0), 0)) |
&& REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER |
@@ -6685,14 +6763,10 @@ highest_pow2_factor (const_tree exp) |
static unsigned HOST_WIDE_INT |
highest_pow2_factor_for_target (const_tree target, const_tree exp) |
{ |
- unsigned HOST_WIDE_INT target_align, factor; |
+ unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT; |
+ unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp); |
- factor = highest_pow2_factor (exp); |
- if (TREE_CODE (target) == COMPONENT_REF) |
- target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1)); |
- else |
- target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target)); |
- return MAX (factor, target_align); |
+ return MAX (factor, talign); |
} |
/* Return &VAR expression for emulated thread local VAR. */ |
@@ -6704,7 +6778,7 @@ emutls_var_address (tree var) |
tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS]; |
tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node); |
tree arglist = build_tree_list (NULL_TREE, arg); |
- tree call = build_function_call_expr (fn, arglist); |
+ tree call = build_function_call_expr (UNKNOWN_LOCATION, fn, arglist); |
return fold_convert (build_pointer_type (TREE_TYPE (var)), call); |
} |
@@ -6756,7 +6830,7 @@ expand_expr_constant (tree exp, int defer, enum expand_modifier modifier) |
static rtx |
expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode, |
- enum expand_modifier modifier) |
+ enum expand_modifier modifier, addr_space_t as) |
{ |
rtx result, subtarget; |
tree inner, offset; |
@@ -6781,9 +6855,8 @@ expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode, |
return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier); |
case CONST_DECL: |
- /* Recurse and make the output_constant_def clause above handle this. */ |
- return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target, |
- tmode, modifier); |
+ /* Expand the initializer like constants above. */ |
+ return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0); |
case REALPART_EXPR: |
/* The real part of the complex number is always first, therefore |
@@ -6821,9 +6894,10 @@ expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode, |
CONSTRUCTORs too, which should yield a memory reference for the |
constructor's contents. Assume language specific tree nodes can |
be expanded in some interesting way. */ |
+ gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE); |
if (DECL_P (exp) |
|| TREE_CODE (exp) == CONSTRUCTOR |
- || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE) |
+ || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR) |
{ |
result = expand_expr (exp, target, tmode, |
modifier == EXPAND_INITIALIZER |
@@ -6872,7 +6946,7 @@ expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode, |
TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp)); |
TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1; |
} |
- result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier); |
+ result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as); |
if (offset) |
{ |
@@ -6880,12 +6954,12 @@ expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode, |
if (modifier != EXPAND_NORMAL) |
result = force_operand (result, NULL); |
- tmp = expand_expr (offset, NULL_RTX, tmode, |
+ tmp = expand_expr (offset, NULL_RTX, tmode, |
modifier == EXPAND_INITIALIZER |
? EXPAND_INITIALIZER : EXPAND_NORMAL); |
- result = convert_memory_address (tmode, result); |
- tmp = convert_memory_address (tmode, tmp); |
+ result = convert_memory_address_addr_space (tmode, result, as); |
+ tmp = convert_memory_address_addr_space (tmode, tmp, as); |
if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) |
result = gen_rtx_PLUS (tmode, result, tmp); |
@@ -6918,6 +6992,9 @@ static rtx |
expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode, |
enum expand_modifier modifier) |
{ |
+ addr_space_t as = ADDR_SPACE_GENERIC; |
+ enum machine_mode address_mode = Pmode; |
+ enum machine_mode pointer_mode = ptr_mode; |
enum machine_mode rmode; |
rtx result; |
@@ -6925,14 +7002,21 @@ expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode, |
if (tmode == VOIDmode) |
tmode = TYPE_MODE (TREE_TYPE (exp)); |
+ if (POINTER_TYPE_P (TREE_TYPE (exp))) |
+ { |
+ as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))); |
+ address_mode = targetm.addr_space.address_mode (as); |
+ pointer_mode = targetm.addr_space.pointer_mode (as); |
+ } |
+ |
/* We can get called with some Weird Things if the user does silliness |
like "(short) &a". In that case, convert_memory_address won't do |
the right thing, so ignore the given target mode. */ |
- if (tmode != Pmode && tmode != ptr_mode) |
- tmode = Pmode; |
+ if (tmode != address_mode && tmode != pointer_mode) |
+ tmode = address_mode; |
result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target, |
- tmode, modifier); |
+ tmode, modifier, as); |
/* Despite expand_expr claims concerning ignoring TMODE when not |
strictly convenient, stuff breaks if we don't honor it. Note |
@@ -6941,7 +7025,7 @@ expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode, |
if (rmode == VOIDmode) |
rmode = tmode; |
if (rmode != tmode) |
- result = convert_memory_address (tmode, result); |
+ result = convert_memory_address_addr_space (tmode, result, as); |
return result; |
} |
@@ -7078,15 +7162,11 @@ expand_constructor (tree exp, rtx target, enum expand_modifier modifier, |
COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on |
recursively. */ |
-static rtx expand_expr_real_1 (tree, rtx, enum machine_mode, |
- enum expand_modifier, rtx *); |
- |
rtx |
expand_expr_real (tree exp, rtx target, enum machine_mode tmode, |
enum expand_modifier modifier, rtx *alt_rtl) |
{ |
- int rn = -1; |
- rtx ret, last = NULL; |
+ rtx ret; |
/* Handle ERROR_MARK before anybody tries to access its type. */ |
if (TREE_CODE (exp) == ERROR_MARK |
@@ -7096,15 +7176,6 @@ expand_expr_real (tree exp, rtx target, enum machine_mode tmode, |
return ret ? ret : const0_rtx; |
} |
- if (flag_non_call_exceptions) |
- { |
- rn = lookup_expr_eh_region (exp); |
- |
- /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */ |
- if (rn >= 0) |
- last = get_last_insn (); |
- } |
- |
/* If this is an expression of some kind and it has an associated line |
number, then emit the line number before expanding the expression. |
@@ -7116,6 +7187,8 @@ expand_expr_real (tree exp, rtx target, enum machine_mode tmode, |
if (cfun && EXPR_HAS_LOCATION (exp)) |
{ |
location_t saved_location = input_location; |
+ location_t saved_curr_loc = get_curr_insn_source_location (); |
+ tree saved_block = get_curr_insn_block (); |
input_location = EXPR_LOCATION (exp); |
set_curr_insn_source_location (input_location); |
@@ -7125,39 +7198,1104 @@ expand_expr_real (tree exp, rtx target, enum machine_mode tmode, |
ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl); |
input_location = saved_location; |
+ set_curr_insn_block (saved_block); |
+ set_curr_insn_source_location (saved_curr_loc); |
} |
else |
{ |
ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl); |
} |
- /* If using non-call exceptions, mark all insns that may trap. |
- expand_call() will mark CALL_INSNs before we get to this code, |
- but it doesn't handle libcalls, and these may trap. */ |
- if (rn >= 0) |
+ return ret; |
+} |
+ |
+rtx |
+expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode, |
+ enum expand_modifier modifier) |
+{ |
+ rtx op0, op1, op2, temp; |
+ tree type; |
+ int unsignedp; |
+ enum machine_mode mode; |
+ enum tree_code code = ops->code; |
+ optab this_optab; |
+ rtx subtarget, original_target; |
+ int ignore; |
+ tree subexp0, subexp1; |
+ bool reduce_bit_field; |
+ gimple subexp0_def, subexp1_def; |
+ tree top0, top1; |
+ location_t loc = ops->location; |
+ tree treeop0, treeop1; |
+#define REDUCE_BIT_FIELD(expr) (reduce_bit_field \ |
+ ? reduce_to_bit_field_precision ((expr), \ |
+ target, \ |
+ type) \ |
+ : (expr)) |
+ |
+ type = ops->type; |
+ mode = TYPE_MODE (type); |
+ unsignedp = TYPE_UNSIGNED (type); |
+ |
+ treeop0 = ops->op0; |
+ treeop1 = ops->op1; |
+ |
+ /* We should be called only on simple (binary or unary) expressions, |
+ exactly those that are valid in gimple expressions that aren't |
+ GIMPLE_SINGLE_RHS (or invalid). */ |
+ gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS |
+ || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS); |
+ |
+ ignore = (target == const0_rtx |
+ || ((CONVERT_EXPR_CODE_P (code) |
+ || code == COND_EXPR || code == VIEW_CONVERT_EXPR) |
+ && TREE_CODE (type) == VOID_TYPE)); |
+ |
+ /* We should be called only if we need the result. */ |
+ gcc_assert (!ignore); |
+ |
+ /* An operation in what may be a bit-field type needs the |
+ result to be reduced to the precision of the bit-field type, |
+ which is narrower than that of the type's mode. */ |
+ reduce_bit_field = (TREE_CODE (type) == INTEGER_TYPE |
+ && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type)); |
+ |
+ if (reduce_bit_field && modifier == EXPAND_STACK_PARM) |
+ target = 0; |
+ |
+ /* Use subtarget as the target for operand 0 of a binary operation. */ |
+ subtarget = get_subtarget (target); |
+ original_target = target; |
+ |
+ switch (code) |
{ |
- rtx insn; |
- for (insn = next_real_insn (last); insn; |
- insn = next_real_insn (insn)) |
+ case NON_LVALUE_EXPR: |
+ case PAREN_EXPR: |
+ CASE_CONVERT: |
+ if (treeop0 == error_mark_node) |
+ return const0_rtx; |
+ |
+ if (TREE_CODE (type) == UNION_TYPE) |
+ { |
+ tree valtype = TREE_TYPE (treeop0); |
+ |
+ /* If both input and output are BLKmode, this conversion isn't doing |
+ anything except possibly changing memory attribute. */ |
+ if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode) |
+ { |
+ rtx result = expand_expr (treeop0, target, tmode, |
+ modifier); |
+ |
+ result = copy_rtx (result); |
+ set_mem_attributes (result, type, 0); |
+ return result; |
+ } |
+ |
+ if (target == 0) |
+ { |
+ if (TYPE_MODE (type) != BLKmode) |
+ target = gen_reg_rtx (TYPE_MODE (type)); |
+ else |
+ target = assign_temp (type, 0, 1, 1); |
+ } |
+ |
+ if (MEM_P (target)) |
+ /* Store data into beginning of memory target. */ |
+ store_expr (treeop0, |
+ adjust_address (target, TYPE_MODE (valtype), 0), |
+ modifier == EXPAND_STACK_PARM, |
+ false); |
+ |
+ else |
+ { |
+ gcc_assert (REG_P (target)); |
+ |
+ /* Store this field into a union of the proper type. */ |
+ store_field (target, |
+ MIN ((int_size_in_bytes (TREE_TYPE |
+ (treeop0)) |
+ * BITS_PER_UNIT), |
+ (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)), |
+ 0, TYPE_MODE (valtype), treeop0, |
+ type, 0, false); |
+ } |
+ |
+ /* Return the entire union. */ |
+ return target; |
+ } |
+ |
+ if (mode == TYPE_MODE (TREE_TYPE (treeop0))) |
+ { |
+ op0 = expand_expr (treeop0, target, VOIDmode, |
+ modifier); |
+ |
+ /* If the signedness of the conversion differs and OP0 is |
+ a promoted SUBREG, clear that indication since we now |
+ have to do the proper extension. */ |
+ if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp |
+ && GET_CODE (op0) == SUBREG) |
+ SUBREG_PROMOTED_VAR_P (op0) = 0; |
+ |
+ return REDUCE_BIT_FIELD (op0); |
+ } |
+ |
+ op0 = expand_expr (treeop0, NULL_RTX, mode, |
+ modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier); |
+ if (GET_MODE (op0) == mode) |
+ ; |
+ |
+ /* If OP0 is a constant, just convert it into the proper mode. */ |
+ else if (CONSTANT_P (op0)) |
+ { |
+ tree inner_type = TREE_TYPE (treeop0); |
+ enum machine_mode inner_mode = TYPE_MODE (inner_type); |
+ |
+ if (modifier == EXPAND_INITIALIZER) |
+ op0 = simplify_gen_subreg (mode, op0, inner_mode, |
+ subreg_lowpart_offset (mode, |
+ inner_mode)); |
+ else |
+ op0= convert_modes (mode, inner_mode, op0, |
+ TYPE_UNSIGNED (inner_type)); |
+ } |
+ |
+ else if (modifier == EXPAND_INITIALIZER) |
+ op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0); |
+ |
+ else if (target == 0) |
+ op0 = convert_to_mode (mode, op0, |
+ TYPE_UNSIGNED (TREE_TYPE |
+ (treeop0))); |
+ else |
+ { |
+ convert_move (target, op0, |
+ TYPE_UNSIGNED (TREE_TYPE (treeop0))); |
+ op0 = target; |
+ } |
+ |
+ return REDUCE_BIT_FIELD (op0); |
+ |
+ case ADDR_SPACE_CONVERT_EXPR: |
+ { |
+ tree treeop0_type = TREE_TYPE (treeop0); |
+ addr_space_t as_to; |
+ addr_space_t as_from; |
+ |
+ gcc_assert (POINTER_TYPE_P (type)); |
+ gcc_assert (POINTER_TYPE_P (treeop0_type)); |
+ |
+ as_to = TYPE_ADDR_SPACE (TREE_TYPE (type)); |
+ as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type)); |
+ |
+ /* Conversions between pointers to the same address space should |
+ have been implemented via CONVERT_EXPR / NOP_EXPR. */ |
+ gcc_assert (as_to != as_from); |
+ |
+ /* Ask target code to handle conversion between pointers |
+ to overlapping address spaces. */ |
+ if (targetm.addr_space.subset_p (as_to, as_from) |
+ || targetm.addr_space.subset_p (as_from, as_to)) |
+ { |
+ op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier); |
+ op0 = targetm.addr_space.convert (op0, treeop0_type, type); |
+ gcc_assert (op0); |
+ return op0; |
+ } |
+ |
+ /* For disjoint address spaces, converting anything but |
+ a null pointer invokes undefined behaviour. We simply |
+ always return a null pointer here. */ |
+ return CONST0_RTX (mode); |
+ } |
+ |
+ case POINTER_PLUS_EXPR: |
+ /* Even though the sizetype mode and the pointer's mode can be different |
+ expand is able to handle this correctly and get the correct result out |
+ of the PLUS_EXPR code. */ |
+ /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR |
+ if sizetype precision is smaller than pointer precision. */ |
+ if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type)) |
+ treeop1 = fold_convert_loc (loc, type, |
+ fold_convert_loc (loc, ssizetype, |
+ treeop1)); |
+ case PLUS_EXPR: |
+ |
+ /* Check if this is a case for multiplication and addition. */ |
+ if ((TREE_CODE (type) == INTEGER_TYPE |
+ || TREE_CODE (type) == FIXED_POINT_TYPE) |
+ && (subexp0_def = get_def_for_expr (treeop0, |
+ MULT_EXPR))) |
+ { |
+ tree subsubexp0, subsubexp1; |
+ gimple subsubexp0_def, subsubexp1_def; |
+ enum tree_code this_code; |
+ |
+ this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR |
+ : FIXED_CONVERT_EXPR; |
+ subsubexp0 = gimple_assign_rhs1 (subexp0_def); |
+ subsubexp0_def = get_def_for_expr (subsubexp0, this_code); |
+ subsubexp1 = gimple_assign_rhs2 (subexp0_def); |
+ subsubexp1_def = get_def_for_expr (subsubexp1, this_code); |
+ if (subsubexp0_def && subsubexp1_def |
+ && (top0 = gimple_assign_rhs1 (subsubexp0_def)) |
+ && (top1 = gimple_assign_rhs1 (subsubexp1_def)) |
+ && (TYPE_PRECISION (TREE_TYPE (top0)) |
+ < TYPE_PRECISION (TREE_TYPE (subsubexp0))) |
+ && (TYPE_PRECISION (TREE_TYPE (top0)) |
+ == TYPE_PRECISION (TREE_TYPE (top1))) |
+ && (TYPE_UNSIGNED (TREE_TYPE (top0)) |
+ == TYPE_UNSIGNED (TREE_TYPE (top1)))) |
+ { |
+ tree op0type = TREE_TYPE (top0); |
+ enum machine_mode innermode = TYPE_MODE (op0type); |
+ bool zextend_p = TYPE_UNSIGNED (op0type); |
+ bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0)); |
+ if (sat_p == 0) |
+ this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab; |
+ else |
+ this_optab = zextend_p ? usmadd_widen_optab |
+ : ssmadd_widen_optab; |
+ if (mode == GET_MODE_2XWIDER_MODE (innermode) |
+ && (optab_handler (this_optab, mode)->insn_code |
+ != CODE_FOR_nothing)) |
+ { |
+ expand_operands (top0, top1, NULL_RTX, &op0, &op1, |
+ EXPAND_NORMAL); |
+ op2 = expand_expr (treeop1, subtarget, |
+ VOIDmode, EXPAND_NORMAL); |
+ temp = expand_ternary_op (mode, this_optab, op0, op1, op2, |
+ target, unsignedp); |
+ gcc_assert (temp); |
+ return REDUCE_BIT_FIELD (temp); |
+ } |
+ } |
+ } |
+ |
+ /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and |
+ something else, make sure we add the register to the constant and |
+ then to the other thing. This case can occur during strength |
+ reduction and doing it this way will produce better code if the |
+ frame pointer or argument pointer is eliminated. |
+ |
+ fold-const.c will ensure that the constant is always in the inner |
+ PLUS_EXPR, so the only case we need to do anything about is if |
+ sp, ap, or fp is our second argument, in which case we must swap |
+ the innermost first argument and our second argument. */ |
+ |
+ if (TREE_CODE (treeop0) == PLUS_EXPR |
+ && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST |
+ && TREE_CODE (treeop1) == VAR_DECL |
+ && (DECL_RTL (treeop1) == frame_pointer_rtx |
+ || DECL_RTL (treeop1) == stack_pointer_rtx |
+ || DECL_RTL (treeop1) == arg_pointer_rtx)) |
+ { |
+ tree t = treeop1; |
+ |
+ treeop1 = TREE_OPERAND (treeop0, 0); |
+ TREE_OPERAND (treeop0, 0) = t; |
+ } |
+ |
+ /* If the result is to be ptr_mode and we are adding an integer to |
+ something, we might be forming a constant. So try to use |
+ plus_constant. If it produces a sum and we can't accept it, |
+ use force_operand. This allows P = &ARR[const] to generate |
+ efficient code on machines where a SYMBOL_REF is not a valid |
+ address. |
+ |
+ If this is an EXPAND_SUM call, always return the sum. */ |
+ if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER |
+ || (mode == ptr_mode && (unsignedp || ! flag_trapv))) |
+ { |
+ if (modifier == EXPAND_STACK_PARM) |
+ target = 0; |
+ if (TREE_CODE (treeop0) == INTEGER_CST |
+ && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
+ && TREE_CONSTANT (treeop1)) |
+ { |
+ rtx constant_part; |
+ |
+ op1 = expand_expr (treeop1, subtarget, VOIDmode, |
+ EXPAND_SUM); |
+ /* Use immed_double_const to ensure that the constant is |
+ truncated according to the mode of OP1, then sign extended |
+ to a HOST_WIDE_INT. Using the constant directly can result |
+ in non-canonical RTL in a 64x32 cross compile. */ |
+ constant_part |
+ = immed_double_const (TREE_INT_CST_LOW (treeop0), |
+ (HOST_WIDE_INT) 0, |
+ TYPE_MODE (TREE_TYPE (treeop1))); |
+ op1 = plus_constant (op1, INTVAL (constant_part)); |
+ if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) |
+ op1 = force_operand (op1, target); |
+ return REDUCE_BIT_FIELD (op1); |
+ } |
+ |
+ else if (TREE_CODE (treeop1) == INTEGER_CST |
+ && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
+ && TREE_CONSTANT (treeop0)) |
+ { |
+ rtx constant_part; |
+ |
+ op0 = expand_expr (treeop0, subtarget, VOIDmode, |
+ (modifier == EXPAND_INITIALIZER |
+ ? EXPAND_INITIALIZER : EXPAND_SUM)); |
+ if (! CONSTANT_P (op0)) |
+ { |
+ op1 = expand_expr (treeop1, NULL_RTX, |
+ VOIDmode, modifier); |
+ /* Return a PLUS if modifier says it's OK. */ |
+ if (modifier == EXPAND_SUM |
+ || modifier == EXPAND_INITIALIZER) |
+ return simplify_gen_binary (PLUS, mode, op0, op1); |
+ goto binop2; |
+ } |
+ /* Use immed_double_const to ensure that the constant is |
+ truncated according to the mode of OP1, then sign extended |
+ to a HOST_WIDE_INT. Using the constant directly can result |
+ in non-canonical RTL in a 64x32 cross compile. */ |
+ constant_part |
+ = immed_double_const (TREE_INT_CST_LOW (treeop1), |
+ (HOST_WIDE_INT) 0, |
+ TYPE_MODE (TREE_TYPE (treeop0))); |
+ op0 = plus_constant (op0, INTVAL (constant_part)); |
+ if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) |
+ op0 = force_operand (op0, target); |
+ return REDUCE_BIT_FIELD (op0); |
+ } |
+ } |
+ |
+ /* No sense saving up arithmetic to be done |
+ if it's all in the wrong mode to form part of an address. |
+ And force_operand won't know whether to sign-extend or |
+ zero-extend. */ |
+ if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) |
+ || mode != ptr_mode) |
+ { |
+ expand_operands (treeop0, treeop1, |
+ subtarget, &op0, &op1, EXPAND_NORMAL); |
+ if (op0 == const0_rtx) |
+ return op1; |
+ if (op1 == const0_rtx) |
+ return op0; |
+ goto binop2; |
+ } |
+ |
+ expand_operands (treeop0, treeop1, |
+ subtarget, &op0, &op1, modifier); |
+ return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1)); |
+ |
+ case MINUS_EXPR: |
+ /* Check if this is a case for multiplication and subtraction. */ |
+ if ((TREE_CODE (type) == INTEGER_TYPE |
+ || TREE_CODE (type) == FIXED_POINT_TYPE) |
+ && (subexp1_def = get_def_for_expr (treeop1, |
+ MULT_EXPR))) |
+ { |
+ tree subsubexp0, subsubexp1; |
+ gimple subsubexp0_def, subsubexp1_def; |
+ enum tree_code this_code; |
+ |
+ this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR |
+ : FIXED_CONVERT_EXPR; |
+ subsubexp0 = gimple_assign_rhs1 (subexp1_def); |
+ subsubexp0_def = get_def_for_expr (subsubexp0, this_code); |
+ subsubexp1 = gimple_assign_rhs2 (subexp1_def); |
+ subsubexp1_def = get_def_for_expr (subsubexp1, this_code); |
+ if (subsubexp0_def && subsubexp1_def |
+ && (top0 = gimple_assign_rhs1 (subsubexp0_def)) |
+ && (top1 = gimple_assign_rhs1 (subsubexp1_def)) |
+ && (TYPE_PRECISION (TREE_TYPE (top0)) |
+ < TYPE_PRECISION (TREE_TYPE (subsubexp0))) |
+ && (TYPE_PRECISION (TREE_TYPE (top0)) |
+ == TYPE_PRECISION (TREE_TYPE (top1))) |
+ && (TYPE_UNSIGNED (TREE_TYPE (top0)) |
+ == TYPE_UNSIGNED (TREE_TYPE (top1)))) |
+ { |
+ tree op0type = TREE_TYPE (top0); |
+ enum machine_mode innermode = TYPE_MODE (op0type); |
+ bool zextend_p = TYPE_UNSIGNED (op0type); |
+ bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0)); |
+ if (sat_p == 0) |
+ this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab; |
+ else |
+ this_optab = zextend_p ? usmsub_widen_optab |
+ : ssmsub_widen_optab; |
+ if (mode == GET_MODE_2XWIDER_MODE (innermode) |
+ && (optab_handler (this_optab, mode)->insn_code |
+ != CODE_FOR_nothing)) |
+ { |
+ expand_operands (top0, top1, NULL_RTX, &op0, &op1, |
+ EXPAND_NORMAL); |
+ op2 = expand_expr (treeop0, subtarget, |
+ VOIDmode, EXPAND_NORMAL); |
+ temp = expand_ternary_op (mode, this_optab, op0, op1, op2, |
+ target, unsignedp); |
+ gcc_assert (temp); |
+ return REDUCE_BIT_FIELD (temp); |
+ } |
+ } |
+ } |
+ |
+ /* For initializers, we are allowed to return a MINUS of two |
+ symbolic constants. Here we handle all cases when both operands |
+ are constant. */ |
+ /* Handle difference of two symbolic constants, |
+ for the sake of an initializer. */ |
+ if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) |
+ && really_constant_p (treeop0) |
+ && really_constant_p (treeop1)) |
+ { |
+ expand_operands (treeop0, treeop1, |
+ NULL_RTX, &op0, &op1, modifier); |
+ |
+ /* If the last operand is a CONST_INT, use plus_constant of |
+ the negated constant. Else make the MINUS. */ |
+ if (CONST_INT_P (op1)) |
+ return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1))); |
+ else |
+ return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1)); |
+ } |
+ |
+ /* No sense saving up arithmetic to be done |
+ if it's all in the wrong mode to form part of an address. |
+ And force_operand won't know whether to sign-extend or |
+ zero-extend. */ |
+ if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) |
+ || mode != ptr_mode) |
+ goto binop; |
+ |
+ expand_operands (treeop0, treeop1, |
+ subtarget, &op0, &op1, modifier); |
+ |
+ /* Convert A - const to A + (-const). */ |
+ if (CONST_INT_P (op1)) |
+ { |
+ op1 = negate_rtx (mode, op1); |
+ return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1)); |
+ } |
+ |
+ goto binop2; |
+ |
+ case MULT_EXPR: |
+ /* If this is a fixed-point operation, then we cannot use the code |
+ below because "expand_mult" doesn't support sat/no-sat fixed-point |
+ multiplications. */ |
+ if (ALL_FIXED_POINT_MODE_P (mode)) |
+ goto binop; |
+ |
+ /* If first operand is constant, swap them. |
+ Thus the following special case checks need only |
+ check the second operand. */ |
+ if (TREE_CODE (treeop0) == INTEGER_CST) |
+ { |
+ tree t1 = treeop0; |
+ treeop0 = treeop1; |
+ treeop1 = t1; |
+ } |
+ |
+ /* Attempt to return something suitable for generating an |
+ indexed address, for machines that support that. */ |
+ |
+ if (modifier == EXPAND_SUM && mode == ptr_mode |
+ && host_integerp (treeop1, 0)) |
+ { |
+ tree exp1 = treeop1; |
+ |
+ op0 = expand_expr (treeop0, subtarget, VOIDmode, |
+ EXPAND_SUM); |
+ |
+ if (!REG_P (op0)) |
+ op0 = force_operand (op0, NULL_RTX); |
+ if (!REG_P (op0)) |
+ op0 = copy_to_mode_reg (mode, op0); |
+ |
+ return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0, |
+ gen_int_mode (tree_low_cst (exp1, 0), |
+ TYPE_MODE (TREE_TYPE (exp1))))); |
+ } |
+ |
+ if (modifier == EXPAND_STACK_PARM) |
+ target = 0; |
+ |
+ /* Check for multiplying things that have been extended |
+ from a narrower type. If this machine supports multiplying |
+ in that narrower type with a result in the desired type, |
+ do it that way, and avoid the explicit type-conversion. */ |
+ |
+ subexp0 = treeop0; |
+ subexp1 = treeop1; |
+ subexp0_def = get_def_for_expr (subexp0, NOP_EXPR); |
+ subexp1_def = get_def_for_expr (subexp1, NOP_EXPR); |
+ top0 = top1 = NULL_TREE; |
+ |
+ /* First, check if we have a multiplication of one signed and one |
+ unsigned operand. */ |
+ if (subexp0_def |
+ && (top0 = gimple_assign_rhs1 (subexp0_def)) |
+ && subexp1_def |
+ && (top1 = gimple_assign_rhs1 (subexp1_def)) |
+ && TREE_CODE (type) == INTEGER_TYPE |
+ && (TYPE_PRECISION (TREE_TYPE (top0)) |
+ < TYPE_PRECISION (TREE_TYPE (subexp0))) |
+ && (TYPE_PRECISION (TREE_TYPE (top0)) |
+ == TYPE_PRECISION (TREE_TYPE (top1))) |
+ && (TYPE_UNSIGNED (TREE_TYPE (top0)) |
+ != TYPE_UNSIGNED (TREE_TYPE (top1)))) |
+ { |
+ enum machine_mode innermode |
+ = TYPE_MODE (TREE_TYPE (top0)); |
+ this_optab = usmul_widen_optab; |
+ if (mode == GET_MODE_WIDER_MODE (innermode)) |
+ { |
+ if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing) |
+ { |
+ if (TYPE_UNSIGNED (TREE_TYPE (top0))) |
+ expand_operands (top0, top1, NULL_RTX, &op0, &op1, |
+ EXPAND_NORMAL); |
+ else |
+ expand_operands (top0, top1, NULL_RTX, &op1, &op0, |
+ EXPAND_NORMAL); |
+ |
+ goto binop3; |
+ } |
+ } |
+ } |
+ /* Check for a multiplication with matching signedness. If |
+ valid, TOP0 and TOP1 were set in the previous if |
+ condition. */ |
+ else if (top0 |
+ && TREE_CODE (type) == INTEGER_TYPE |
+ && (TYPE_PRECISION (TREE_TYPE (top0)) |
+ < TYPE_PRECISION (TREE_TYPE (subexp0))) |
+ && ((TREE_CODE (subexp1) == INTEGER_CST |
+ && int_fits_type_p (subexp1, TREE_TYPE (top0)) |
+ /* Don't use a widening multiply if a shift will do. */ |
+ && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (subexp1))) |
+ > HOST_BITS_PER_WIDE_INT) |
+ || exact_log2 (TREE_INT_CST_LOW (subexp1)) < 0)) |
+ || |
+ (top1 |
+ && (TYPE_PRECISION (TREE_TYPE (top1)) |
+ == TYPE_PRECISION (TREE_TYPE (top0)) |
+ /* If both operands are extended, they must either both |
+ be zero-extended or both be sign-extended. */ |
+ && (TYPE_UNSIGNED (TREE_TYPE (top1)) |
+ == TYPE_UNSIGNED (TREE_TYPE (top0))))))) |
+ { |
+ tree op0type = TREE_TYPE (top0); |
+ enum machine_mode innermode = TYPE_MODE (op0type); |
+ bool zextend_p = TYPE_UNSIGNED (op0type); |
+ optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab; |
+ this_optab = zextend_p ? umul_widen_optab : smul_widen_optab; |
+ |
+ if (mode == GET_MODE_2XWIDER_MODE (innermode)) |
+ { |
+ if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing) |
+ { |
+ if (TREE_CODE (subexp1) == INTEGER_CST) |
+ expand_operands (top0, subexp1, NULL_RTX, &op0, &op1, |
+ EXPAND_NORMAL); |
+ else |
+ expand_operands (top0, top1, NULL_RTX, &op0, &op1, |
+ EXPAND_NORMAL); |
+ goto binop3; |
+ } |
+ else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing |
+ && innermode == word_mode) |
+ { |
+ rtx htem, hipart; |
+ op0 = expand_normal (top0); |
+ if (TREE_CODE (subexp1) == INTEGER_CST) |
+ op1 = convert_modes (innermode, mode, |
+ expand_normal (subexp1), unsignedp); |
+ else |
+ op1 = expand_normal (top1); |
+ temp = expand_binop (mode, other_optab, op0, op1, target, |
+ unsignedp, OPTAB_LIB_WIDEN); |
+ hipart = gen_highpart (innermode, temp); |
+ htem = expand_mult_highpart_adjust (innermode, hipart, |
+ op0, op1, hipart, |
+ zextend_p); |
+ if (htem != hipart) |
+ emit_move_insn (hipart, htem); |
+ return REDUCE_BIT_FIELD (temp); |
+ } |
+ } |
+ } |
+ expand_operands (subexp0, subexp1, subtarget, &op0, &op1, EXPAND_NORMAL); |
+ return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp)); |
+ |
+ case TRUNC_DIV_EXPR: |
+ case FLOOR_DIV_EXPR: |
+ case CEIL_DIV_EXPR: |
+ case ROUND_DIV_EXPR: |
+ case EXACT_DIV_EXPR: |
+ /* If this is a fixed-point operation, then we cannot use the code |
+ below because "expand_divmod" doesn't support sat/no-sat fixed-point |
+ divisions. */ |
+ if (ALL_FIXED_POINT_MODE_P (mode)) |
+ goto binop; |
+ |
+ if (modifier == EXPAND_STACK_PARM) |
+ target = 0; |
+ /* Possible optimization: compute the dividend with EXPAND_SUM |
+ then if the divisor is constant can optimize the case |
+ where some terms of the dividend have coeffs divisible by it. */ |
+ expand_operands (treeop0, treeop1, |
+ subtarget, &op0, &op1, EXPAND_NORMAL); |
+ return expand_divmod (0, code, mode, op0, op1, target, unsignedp); |
+ |
+ case RDIV_EXPR: |
+ goto binop; |
+ |
+ case TRUNC_MOD_EXPR: |
+ case FLOOR_MOD_EXPR: |
+ case CEIL_MOD_EXPR: |
+ case ROUND_MOD_EXPR: |
+ if (modifier == EXPAND_STACK_PARM) |
+ target = 0; |
+ expand_operands (treeop0, treeop1, |
+ subtarget, &op0, &op1, EXPAND_NORMAL); |
+ return expand_divmod (1, code, mode, op0, op1, target, unsignedp); |
+ |
+ case FIXED_CONVERT_EXPR: |
+ op0 = expand_normal (treeop0); |
+ if (target == 0 || modifier == EXPAND_STACK_PARM) |
+ target = gen_reg_rtx (mode); |
+ |
+ if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE |
+ && TYPE_UNSIGNED (TREE_TYPE (treeop0))) |
+ || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type))) |
+ expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type)); |
+ else |
+ expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type)); |
+ return target; |
+ |
+ case FIX_TRUNC_EXPR: |
+ op0 = expand_normal (treeop0); |
+ if (target == 0 || modifier == EXPAND_STACK_PARM) |
+ target = gen_reg_rtx (mode); |
+ expand_fix (target, op0, unsignedp); |
+ return target; |
+ |
+ case FLOAT_EXPR: |
+ op0 = expand_normal (treeop0); |
+ if (target == 0 || modifier == EXPAND_STACK_PARM) |
+ target = gen_reg_rtx (mode); |
+ /* expand_float can't figure out what to do if FROM has VOIDmode. |
+ So give it the correct mode. With -O, cse will optimize this. */ |
+ if (GET_MODE (op0) == VOIDmode) |
+ op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)), |
+ op0); |
+ expand_float (target, op0, |
+ TYPE_UNSIGNED (TREE_TYPE (treeop0))); |
+ return target; |
+ |
+ case NEGATE_EXPR: |
+ op0 = expand_expr (treeop0, subtarget, |
+ VOIDmode, EXPAND_NORMAL); |
+ if (modifier == EXPAND_STACK_PARM) |
+ target = 0; |
+ temp = expand_unop (mode, |
+ optab_for_tree_code (NEGATE_EXPR, type, |
+ optab_default), |
+ op0, target, 0); |
+ gcc_assert (temp); |
+ return REDUCE_BIT_FIELD (temp); |
+ |
+ case ABS_EXPR: |
+ op0 = expand_expr (treeop0, subtarget, |
+ VOIDmode, EXPAND_NORMAL); |
+ if (modifier == EXPAND_STACK_PARM) |
+ target = 0; |
+ |
+ /* ABS_EXPR is not valid for complex arguments. */ |
+ gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT |
+ && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT); |
+ |
+ /* Unsigned abs is simply the operand. Testing here means we don't |
+ risk generating incorrect code below. */ |
+ if (TYPE_UNSIGNED (type)) |
+ return op0; |
+ |
+ return expand_abs (mode, op0, target, unsignedp, |
+ safe_from_p (target, treeop0, 1)); |
+ |
+ case MAX_EXPR: |
+ case MIN_EXPR: |
+ target = original_target; |
+ if (target == 0 |
+ || modifier == EXPAND_STACK_PARM |
+ || (MEM_P (target) && MEM_VOLATILE_P (target)) |
+ || GET_MODE (target) != mode |
+ || (REG_P (target) |
+ && REGNO (target) < FIRST_PSEUDO_REGISTER)) |
+ target = gen_reg_rtx (mode); |
+ expand_operands (treeop0, treeop1, |
+ target, &op0, &op1, EXPAND_NORMAL); |
+ |
+ /* First try to do it with a special MIN or MAX instruction. |
+ If that does not win, use a conditional jump to select the proper |
+ value. */ |
+ this_optab = optab_for_tree_code (code, type, optab_default); |
+ temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, |
+ OPTAB_WIDEN); |
+ if (temp != 0) |
+ return temp; |
+ |
+ /* At this point, a MEM target is no longer useful; we will get better |
+ code without it. */ |
+ |
+ if (! REG_P (target)) |
+ target = gen_reg_rtx (mode); |
+ |
+ /* If op1 was placed in target, swap op0 and op1. */ |
+ if (target != op0 && target == op1) |
{ |
- if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX) |
- /* If we want exceptions for non-call insns, any |
- may_trap_p instruction may throw. */ |
- && GET_CODE (PATTERN (insn)) != CLOBBER |
- && GET_CODE (PATTERN (insn)) != USE |
- && (CALL_P (insn) || may_trap_p (PATTERN (insn)))) |
- add_reg_note (insn, REG_EH_REGION, GEN_INT (rn)); |
+ temp = op0; |
+ op0 = op1; |
+ op1 = temp; |
} |
+ |
+ /* We generate better code and avoid problems with op1 mentioning |
+ target by forcing op1 into a pseudo if it isn't a constant. */ |
+ if (! CONSTANT_P (op1)) |
+ op1 = force_reg (mode, op1); |
+ |
+ { |
+ enum rtx_code comparison_code; |
+ rtx cmpop1 = op1; |
+ |
+ if (code == MAX_EXPR) |
+ comparison_code = unsignedp ? GEU : GE; |
+ else |
+ comparison_code = unsignedp ? LEU : LE; |
+ |
+ /* Canonicalize to comparisons against 0. */ |
+ if (op1 == const1_rtx) |
+ { |
+ /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1) |
+ or (a != 0 ? a : 1) for unsigned. |
+ For MIN we are safe converting (a <= 1 ? a : 1) |
+ into (a <= 0 ? a : 1) */ |
+ cmpop1 = const0_rtx; |
+ if (code == MAX_EXPR) |
+ comparison_code = unsignedp ? NE : GT; |
+ } |
+ if (op1 == constm1_rtx && !unsignedp) |
+ { |
+ /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1) |
+ and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */ |
+ cmpop1 = const0_rtx; |
+ if (code == MIN_EXPR) |
+ comparison_code = LT; |
+ } |
+#ifdef HAVE_conditional_move |
+ /* Use a conditional move if possible. */ |
+ if (can_conditionally_move_p (mode)) |
+ { |
+ rtx insn; |
+ |
+ /* ??? Same problem as in expmed.c: emit_conditional_move |
+ forces a stack adjustment via compare_from_rtx, and we |
+ lose the stack adjustment if the sequence we are about |
+ to create is discarded. */ |
+ do_pending_stack_adjust (); |
+ |
+ start_sequence (); |
+ |
+ /* Try to emit the conditional move. */ |
+ insn = emit_conditional_move (target, comparison_code, |
+ op0, cmpop1, mode, |
+ op0, op1, mode, |
+ unsignedp); |
+ |
+ /* If we could do the conditional move, emit the sequence, |
+ and return. */ |
+ if (insn) |
+ { |
+ rtx seq = get_insns (); |
+ end_sequence (); |
+ emit_insn (seq); |
+ return target; |
+ } |
+ |
+ /* Otherwise discard the sequence and fall back to code with |
+ branches. */ |
+ end_sequence (); |
+ } |
+#endif |
+ if (target != op0) |
+ emit_move_insn (target, op0); |
+ |
+ temp = gen_label_rtx (); |
+ do_compare_rtx_and_jump (target, cmpop1, comparison_code, |
+ unsignedp, mode, NULL_RTX, NULL_RTX, temp, |
+ -1); |
+ } |
+ emit_move_insn (target, op1); |
+ emit_label (temp); |
+ return target; |
+ |
+ case BIT_NOT_EXPR: |
+ op0 = expand_expr (treeop0, subtarget, |
+ VOIDmode, EXPAND_NORMAL); |
+ if (modifier == EXPAND_STACK_PARM) |
+ target = 0; |
+ temp = expand_unop (mode, one_cmpl_optab, op0, target, 1); |
+ gcc_assert (temp); |
+ return temp; |
+ |
+ /* ??? Can optimize bitwise operations with one arg constant. |
+ Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b) |
+ and (a bitwise1 b) bitwise2 b (etc) |
+ but that is probably not worth while. */ |
+ |
+ /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two |
+ boolean values when we want in all cases to compute both of them. In |
+ general it is fastest to do TRUTH_AND_EXPR by computing both operands |
+ as actual zero-or-1 values and then bitwise anding. In cases where |
+ there cannot be any side effects, better code would be made by |
+ treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is |
+ how to recognize those cases. */ |
+ |
+ case TRUTH_AND_EXPR: |
+ code = BIT_AND_EXPR; |
+ case BIT_AND_EXPR: |
+ goto binop; |
+ |
+ case TRUTH_OR_EXPR: |
+ code = BIT_IOR_EXPR; |
+ case BIT_IOR_EXPR: |
+ goto binop; |
+ |
+ case TRUTH_XOR_EXPR: |
+ code = BIT_XOR_EXPR; |
+ case BIT_XOR_EXPR: |
+ goto binop; |
+ |
+ case LROTATE_EXPR: |
+ case RROTATE_EXPR: |
+ gcc_assert (VECTOR_MODE_P (TYPE_MODE (type)) |
+ || (GET_MODE_PRECISION (TYPE_MODE (type)) |
+ == TYPE_PRECISION (type))); |
+ /* fall through */ |
+ |
+ case LSHIFT_EXPR: |
+ case RSHIFT_EXPR: |
+ /* If this is a fixed-point operation, then we cannot use the code |
+ below because "expand_shift" doesn't support sat/no-sat fixed-point |
+ shifts. */ |
+ if (ALL_FIXED_POINT_MODE_P (mode)) |
+ goto binop; |
+ |
+ if (! safe_from_p (subtarget, treeop1, 1)) |
+ subtarget = 0; |
+ if (modifier == EXPAND_STACK_PARM) |
+ target = 0; |
+ op0 = expand_expr (treeop0, subtarget, |
+ VOIDmode, EXPAND_NORMAL); |
+ temp = expand_shift (code, mode, op0, treeop1, target, |
+ unsignedp); |
+ if (code == LSHIFT_EXPR) |
+ temp = REDUCE_BIT_FIELD (temp); |
+ return temp; |
+ |
+ /* Could determine the answer when only additive constants differ. Also, |
+ the addition of one can be handled by changing the condition. */ |
+ case LT_EXPR: |
+ case LE_EXPR: |
+ case GT_EXPR: |
+ case GE_EXPR: |
+ case EQ_EXPR: |
+ case NE_EXPR: |
+ case UNORDERED_EXPR: |
+ case ORDERED_EXPR: |
+ case UNLT_EXPR: |
+ case UNLE_EXPR: |
+ case UNGT_EXPR: |
+ case UNGE_EXPR: |
+ case UNEQ_EXPR: |
+ case LTGT_EXPR: |
+ temp = do_store_flag (ops, |
+ modifier != EXPAND_STACK_PARM ? target : NULL_RTX, |
+ tmode != VOIDmode ? tmode : mode); |
+ if (temp) |
+ return temp; |
+ |
+ /* Use a compare and a jump for BLKmode comparisons, or for function |
+ type comparisons is HAVE_canonicalize_funcptr_for_compare. */ |
+ |
+ if ((target == 0 |
+ || modifier == EXPAND_STACK_PARM |
+ || ! safe_from_p (target, treeop0, 1) |
+ || ! safe_from_p (target, treeop1, 1) |
+ /* Make sure we don't have a hard reg (such as function's return |
+ value) live across basic blocks, if not optimizing. */ |
+ || (!optimize && REG_P (target) |
+ && REGNO (target) < FIRST_PSEUDO_REGISTER))) |
+ target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); |
+ |
+ emit_move_insn (target, const0_rtx); |
+ |
+ op1 = gen_label_rtx (); |
+ jumpifnot_1 (code, treeop0, treeop1, op1, -1); |
+ |
+ emit_move_insn (target, const1_rtx); |
+ |
+ emit_label (op1); |
+ return target; |
+ |
+ case TRUTH_NOT_EXPR: |
+ if (modifier == EXPAND_STACK_PARM) |
+ target = 0; |
+ op0 = expand_expr (treeop0, target, |
+ VOIDmode, EXPAND_NORMAL); |
+ /* The parser is careful to generate TRUTH_NOT_EXPR |
+ only with operands that are always zero or one. */ |
+ temp = expand_binop (mode, xor_optab, op0, const1_rtx, |
+ target, 1, OPTAB_LIB_WIDEN); |
+ gcc_assert (temp); |
+ return temp; |
+ |
+ case COMPLEX_EXPR: |
+ /* Get the rtx code of the operands. */ |
+ op0 = expand_normal (treeop0); |
+ op1 = expand_normal (treeop1); |
+ |
+ if (!target) |
+ target = gen_reg_rtx (TYPE_MODE (type)); |
+ |
+ /* Move the real (op0) and imaginary (op1) parts to their location. */ |
+ write_complex_part (target, op0, false); |
+ write_complex_part (target, op1, true); |
+ |
+ return target; |
+ |
+ case WIDEN_SUM_EXPR: |
+ { |
+ tree oprnd0 = treeop0; |
+ tree oprnd1 = treeop1; |
+ |
+ expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL); |
+ target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1, |
+ target, unsignedp); |
+ return target; |
+ } |
+ |
+ case REDUC_MAX_EXPR: |
+ case REDUC_MIN_EXPR: |
+ case REDUC_PLUS_EXPR: |
+ { |
+ op0 = expand_normal (treeop0); |
+ this_optab = optab_for_tree_code (code, type, optab_default); |
+ temp = expand_unop (mode, this_optab, op0, target, unsignedp); |
+ gcc_assert (temp); |
+ return temp; |
+ } |
+ |
+ case VEC_EXTRACT_EVEN_EXPR: |
+ case VEC_EXTRACT_ODD_EXPR: |
+ { |
+ expand_operands (treeop0, treeop1, |
+ NULL_RTX, &op0, &op1, EXPAND_NORMAL); |
+ this_optab = optab_for_tree_code (code, type, optab_default); |
+ temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, |
+ OPTAB_WIDEN); |
+ gcc_assert (temp); |
+ return temp; |
+ } |
+ |
+ case VEC_INTERLEAVE_HIGH_EXPR: |
+ case VEC_INTERLEAVE_LOW_EXPR: |
+ { |
+ expand_operands (treeop0, treeop1, |
+ NULL_RTX, &op0, &op1, EXPAND_NORMAL); |
+ this_optab = optab_for_tree_code (code, type, optab_default); |
+ temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, |
+ OPTAB_WIDEN); |
+ gcc_assert (temp); |
+ return temp; |
+ } |
+ |
+ case VEC_LSHIFT_EXPR: |
+ case VEC_RSHIFT_EXPR: |
+ { |
+ target = expand_vec_shift_expr (ops, target); |
+ return target; |
+ } |
+ |
+ case VEC_UNPACK_HI_EXPR: |
+ case VEC_UNPACK_LO_EXPR: |
+ { |
+ op0 = expand_normal (treeop0); |
+ this_optab = optab_for_tree_code (code, type, optab_default); |
+ temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX, |
+ target, unsignedp); |
+ gcc_assert (temp); |
+ return temp; |
+ } |
+ |
+ case VEC_UNPACK_FLOAT_HI_EXPR: |
+ case VEC_UNPACK_FLOAT_LO_EXPR: |
+ { |
+ op0 = expand_normal (treeop0); |
+ /* The signedness is determined from input operand. */ |
+ this_optab = optab_for_tree_code (code, |
+ TREE_TYPE (treeop0), |
+ optab_default); |
+ temp = expand_widen_pattern_expr |
+ (ops, op0, NULL_RTX, NULL_RTX, |
+ target, TYPE_UNSIGNED (TREE_TYPE (treeop0))); |
+ |
+ gcc_assert (temp); |
+ return temp; |
+ } |
+ |
+ case VEC_WIDEN_MULT_HI_EXPR: |
+ case VEC_WIDEN_MULT_LO_EXPR: |
+ { |
+ tree oprnd0 = treeop0; |
+ tree oprnd1 = treeop1; |
+ |
+ expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL); |
+ target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX, |
+ target, unsignedp); |
+ gcc_assert (target); |
+ return target; |
+ } |
+ |
+ case VEC_PACK_TRUNC_EXPR: |
+ case VEC_PACK_SAT_EXPR: |
+ case VEC_PACK_FIX_TRUNC_EXPR: |
+ mode = TYPE_MODE (TREE_TYPE (treeop0)); |
+ goto binop; |
+ |
+ default: |
+ gcc_unreachable (); |
} |
- return ret; |
+ /* Here to do an ordinary binary operator. */ |
+ binop: |
+ expand_operands (treeop0, treeop1, |
+ subtarget, &op0, &op1, EXPAND_NORMAL); |
+ binop2: |
+ this_optab = optab_for_tree_code (code, type, optab_default); |
+ binop3: |
+ if (modifier == EXPAND_STACK_PARM) |
+ target = 0; |
+ temp = expand_binop (mode, this_optab, op0, op1, target, |
+ unsignedp, OPTAB_LIB_WIDEN); |
+ gcc_assert (temp); |
+ return REDUCE_BIT_FIELD (temp); |
} |
+#undef REDUCE_BIT_FIELD |
-static rtx |
+rtx |
expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
enum expand_modifier modifier, rtx *alt_rtl) |
{ |
- rtx op0, op1, op2, temp, decl_rtl; |
+ rtx op0, op1, temp, decl_rtl; |
tree type; |
int unsignedp; |
enum machine_mode mode; |
@@ -7165,18 +8303,33 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
optab this_optab; |
rtx subtarget, original_target; |
int ignore; |
- tree context, subexp0, subexp1; |
+ tree context; |
bool reduce_bit_field; |
-#define REDUCE_BIT_FIELD(expr) (reduce_bit_field \ |
- ? reduce_to_bit_field_precision ((expr), \ |
- target, \ |
- type) \ |
- : (expr)) |
+ location_t loc = EXPR_LOCATION (exp); |
+ struct separate_ops ops; |
+ tree treeop0, treeop1, treeop2; |
type = TREE_TYPE (exp); |
mode = TYPE_MODE (type); |
unsignedp = TYPE_UNSIGNED (type); |
+ treeop0 = treeop1 = treeop2 = NULL_TREE; |
+ if (!VL_EXP_CLASS_P (exp)) |
+ switch (TREE_CODE_LENGTH (code)) |
+ { |
+ default: |
+ case 3: treeop2 = TREE_OPERAND (exp, 2); |
+ case 2: treeop1 = TREE_OPERAND (exp, 1); |
+ case 1: treeop0 = TREE_OPERAND (exp, 0); |
+ case 0: break; |
+ } |
+ ops.code = code; |
+ ops.type = type; |
+ ops.op0 = treeop0; |
+ ops.op1 = treeop1; |
+ ops.op2 = treeop2; |
+ ops.location = loc; |
+ |
ignore = (target == const0_rtx |
|| ((CONVERT_EXPR_CODE_P (code) |
|| code == COND_EXPR || code == VIEW_CONVERT_EXPR) |
@@ -7215,22 +8368,22 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
if (TREE_CODE_CLASS (code) == tcc_unary |
|| code == COMPONENT_REF || code == INDIRECT_REF) |
- return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, |
+ return expand_expr (treeop0, const0_rtx, VOIDmode, |
modifier); |
else if (TREE_CODE_CLASS (code) == tcc_binary |
|| TREE_CODE_CLASS (code) == tcc_comparison |
|| code == ARRAY_REF || code == ARRAY_RANGE_REF) |
{ |
- expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier); |
- expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier); |
+ expand_expr (treeop0, const0_rtx, VOIDmode, modifier); |
+ expand_expr (treeop1, const0_rtx, VOIDmode, modifier); |
return const0_rtx; |
} |
else if (code == BIT_FIELD_REF) |
{ |
- expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier); |
- expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier); |
- expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier); |
+ expand_expr (treeop0, const0_rtx, VOIDmode, modifier); |
+ expand_expr (treeop1, const0_rtx, VOIDmode, modifier); |
+ expand_expr (treeop2, const0_rtx, VOIDmode, modifier); |
return const0_rtx; |
} |
@@ -7262,8 +8415,21 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
} |
case SSA_NAME: |
- return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier, |
- NULL); |
+ /* ??? ivopts calls expander, without any preparation from |
+ out-of-ssa. So fake instructions as if this was an access to the |
+ base variable. This unnecessarily allocates a pseudo, see how we can |
+ reuse it, if partition base vars have it set already. */ |
+ if (!currently_expanding_to_rtl) |
+ return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier, NULL); |
+ { |
+ gimple g = get_gimple_for_ssa_name (exp); |
+ if (g) |
+ return expand_expr_real (gimple_assign_rhs_to_tree (g), target, |
+ tmode, modifier, NULL); |
+ } |
+ decl_rtl = get_rtx_for_ssa_name (exp); |
+ exp = SSA_NAME_VAR (exp); |
+ goto expand_decl_rtl; |
case PARM_DECL: |
case VAR_DECL: |
@@ -7280,7 +8446,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
&& TREE_CODE (exp) == VAR_DECL |
&& DECL_THREAD_LOCAL_P (exp)) |
{ |
- exp = build_fold_indirect_ref (emutls_var_address (exp)); |
+ exp = build_fold_indirect_ref_loc (loc, emutls_var_address (exp)); |
return expand_expr_real_1 (exp, target, tmode, modifier, NULL); |
} |
@@ -7289,6 +8455,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
case FUNCTION_DECL: |
case RESULT_DECL: |
decl_rtl = DECL_RTL (exp); |
+ expand_decl_rtl: |
gcc_assert (decl_rtl); |
decl_rtl = copy_rtx (decl_rtl); |
@@ -7330,7 +8497,9 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
decl_rtl = use_anchored_address (decl_rtl); |
if (modifier != EXPAND_CONST_ADDRESS |
&& modifier != EXPAND_SUM |
- && !memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))) |
+ && !memory_address_addr_space_p (DECL_MODE (exp), |
+ XEXP (decl_rtl, 0), |
+ MEM_ADDR_SPACE (decl_rtl))) |
temp = replace_equiv_address (decl_rtl, |
copy_rtx (XEXP (decl_rtl, 0))); |
} |
@@ -7356,9 +8525,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
/* Get the signedness used for this variable. Ensure we get the |
same mode we got when the variable was declared. */ |
- pmode = promote_mode (type, DECL_MODE (exp), &unsignedp, |
- (TREE_CODE (exp) == RESULT_DECL |
- || TREE_CODE (exp) == PARM_DECL) ? 1 : 0); |
+ pmode = promote_decl_mode (exp, &unsignedp); |
gcc_assert (GET_MODE (decl_rtl) == pmode); |
temp = gen_lowpart_SUBREG (mode, decl_rtl); |
@@ -7389,7 +8556,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
{ |
tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1); |
if (type_for_mode) |
- tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp); |
+ tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp); |
} |
if (!tmp) |
tmp = build_constructor_from_list (type, |
@@ -7452,14 +8619,15 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
if (modifier != EXPAND_CONST_ADDRESS |
&& modifier != EXPAND_INITIALIZER |
&& modifier != EXPAND_SUM |
- && ! memory_address_p (mode, XEXP (temp, 0))) |
+ && ! memory_address_addr_space_p (mode, XEXP (temp, 0), |
+ MEM_ADDR_SPACE (temp))) |
return replace_equiv_address (temp, |
copy_rtx (XEXP (temp, 0))); |
return temp; |
case SAVE_EXPR: |
{ |
- tree val = TREE_OPERAND (exp, 0); |
+ tree val = treeop0; |
rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl); |
if (!SAVE_EXPR_RESOLVED_P (exp)) |
@@ -7470,10 +8638,12 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
with non-BLKmode values. */ |
gcc_assert (GET_MODE (ret) != BLKmode); |
- val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp)); |
+ val = build_decl (EXPR_LOCATION (exp), |
+ VAR_DECL, NULL, TREE_TYPE (exp)); |
DECL_ARTIFICIAL (val) = 1; |
DECL_IGNORED_P (val) = 1; |
- TREE_OPERAND (exp, 0) = val; |
+ treeop0 = val; |
+ TREE_OPERAND (exp, 0) = treeop0; |
SAVE_EXPR_RESOLVED_P (exp) = 1; |
if (!CONSTANT_P (ret)) |
@@ -7484,12 +8654,6 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
return ret; |
} |
- case GOTO_EXPR: |
- if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL) |
- expand_goto (TREE_OPERAND (exp, 0)); |
- else |
- expand_computed_goto (TREE_OPERAND (exp, 0)); |
- return const0_rtx; |
case CONSTRUCTOR: |
/* If we don't need the result, just ensure we evaluate any |
@@ -7511,7 +8675,9 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
case ALIGN_INDIRECT_REF: |
case INDIRECT_REF: |
{ |
- tree exp1 = TREE_OPERAND (exp, 0); |
+ tree exp1 = treeop0; |
+ addr_space_t as = ADDR_SPACE_GENERIC; |
+ enum machine_mode address_mode = Pmode; |
if (modifier != EXPAND_WRITE) |
{ |
@@ -7522,25 +8688,29 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
return expand_expr (t, target, tmode, modifier); |
} |
+ if (POINTER_TYPE_P (TREE_TYPE (exp1))) |
+ { |
+ as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp1))); |
+ address_mode = targetm.addr_space.address_mode (as); |
+ } |
+ |
op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM); |
- op0 = memory_address (mode, op0); |
+ op0 = memory_address_addr_space (mode, op0, as); |
if (code == ALIGN_INDIRECT_REF) |
{ |
int align = TYPE_ALIGN_UNIT (type); |
- op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align)); |
- op0 = memory_address (mode, op0); |
+ op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align)); |
+ op0 = memory_address_addr_space (mode, op0, as); |
} |
temp = gen_rtx_MEM (mode, op0); |
set_mem_attributes (temp, exp, 0); |
+ set_mem_addr_space (temp, as); |
/* Resolve the misalignment now, so that we don't have to remember |
to resolve it later. Of course, this only works for reads. */ |
- /* ??? When we get around to supporting writes, we'll have to handle |
- this in store_expr directly. The vectorizer isn't generating |
- those yet, however. */ |
if (code == MISALIGNED_INDIRECT_REF) |
{ |
int icode; |
@@ -7569,21 +8739,23 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
case TARGET_MEM_REF: |
{ |
+ addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp)); |
struct mem_address addr; |
get_address_description (exp, &addr); |
- op0 = addr_for_mem_ref (&addr, true); |
- op0 = memory_address (mode, op0); |
+ op0 = addr_for_mem_ref (&addr, as, true); |
+ op0 = memory_address_addr_space (mode, op0, as); |
temp = gen_rtx_MEM (mode, op0); |
set_mem_attributes (temp, TMR_ORIGINAL (exp), 0); |
+ set_mem_addr_space (temp, as); |
} |
return temp; |
case ARRAY_REF: |
{ |
- tree array = TREE_OPERAND (exp, 0); |
- tree index = TREE_OPERAND (exp, 1); |
+ tree array = treeop0; |
+ tree index = treeop1; |
/* Fold an expression like: "foo"[2]. |
This is not done in fold so it won't happen inside &. |
@@ -7672,7 +8844,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
{ |
tree index1 = index; |
tree low_bound = array_ref_low_bound (exp); |
- index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1)); |
+ index1 = fold_convert_loc (loc, sizetype, |
+ treeop1); |
/* Optimize the special-case of a zero lower bound. |
@@ -7683,8 +8856,9 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
+INDEX), which becomes (ARRAY+255+INDEX). Opps!) */ |
if (! integer_zerop (low_bound)) |
- index1 = size_diffop (index1, fold_convert (sizetype, |
- low_bound)); |
+ index1 = size_diffop_loc (loc, index1, |
+ fold_convert_loc (loc, sizetype, |
+ low_bound)); |
if (0 > compare_tree_int (index1, |
TREE_STRING_LENGTH (init))) |
@@ -7707,14 +8881,14 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
case COMPONENT_REF: |
/* If the operand is a CONSTRUCTOR, we can just extract the |
appropriate field if it is present. */ |
- if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR) |
+ if (TREE_CODE (treeop0) == CONSTRUCTOR) |
{ |
unsigned HOST_WIDE_INT idx; |
tree field, value; |
- FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)), |
+ FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0), |
idx, field, value) |
- if (field == TREE_OPERAND (exp, 1) |
+ if (field == treeop1 |
/* We can normally use the value of the field in the |
CONSTRUCTOR. However, if this is a bitfield in |
an integral mode that we can fit in a HOST_WIDE_INT, |
@@ -7861,18 +9035,16 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
if (offset) |
{ |
+ enum machine_mode address_mode; |
rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, |
EXPAND_SUM); |
gcc_assert (MEM_P (op0)); |
-#ifdef POINTERS_EXTEND_UNSIGNED |
- if (GET_MODE (offset_rtx) != Pmode) |
- offset_rtx = convert_to_mode (Pmode, offset_rtx, 0); |
-#else |
- if (GET_MODE (offset_rtx) != ptr_mode) |
- offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); |
-#endif |
+ address_mode |
+ = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0)); |
+ if (GET_MODE (offset_rtx) != address_mode) |
+ offset_rtx = convert_to_mode (address_mode, offset_rtx, 0); |
if (GET_MODE (op0) == BLKmode |
/* A constant address in OP0 can have VOIDmode, we must |
@@ -8073,133 +9245,25 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
&& (attr = lookup_attribute ("error", |
DECL_ATTRIBUTES (fndecl))) != NULL) |
error ("%Kcall to %qs declared with attribute error: %s", |
- exp, lang_hooks.decl_printable_name (fndecl, 1), |
+ exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)), |
TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)))); |
if (fndecl |
&& (attr = lookup_attribute ("warning", |
DECL_ATTRIBUTES (fndecl))) != NULL) |
warning_at (tree_nonartificial_location (exp), |
0, "%Kcall to %qs declared with attribute warning: %s", |
- exp, lang_hooks.decl_printable_name (fndecl, 1), |
+ exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)), |
TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)))); |
/* Check for a built-in function. */ |
if (fndecl && DECL_BUILT_IN (fndecl)) |
{ |
- if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_FRONTEND) |
- return lang_hooks.expand_expr (exp, original_target, |
- tmode, modifier, alt_rtl); |
- else |
- return expand_builtin (exp, target, subtarget, tmode, ignore); |
+ gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND); |
+ return expand_builtin (exp, target, subtarget, tmode, ignore); |
} |
} |
return expand_call (exp, target, ignore); |
- case PAREN_EXPR: |
- CASE_CONVERT: |
- if (TREE_OPERAND (exp, 0) == error_mark_node) |
- return const0_rtx; |
- |
- if (TREE_CODE (type) == UNION_TYPE) |
- { |
- tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0)); |
- |
- /* If both input and output are BLKmode, this conversion isn't doing |
- anything except possibly changing memory attribute. */ |
- if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode) |
- { |
- rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode, |
- modifier); |
- |
- result = copy_rtx (result); |
- set_mem_attributes (result, exp, 0); |
- return result; |
- } |
- |
- if (target == 0) |
- { |
- if (TYPE_MODE (type) != BLKmode) |
- target = gen_reg_rtx (TYPE_MODE (type)); |
- else |
- target = assign_temp (type, 0, 1, 1); |
- } |
- |
- if (MEM_P (target)) |
- /* Store data into beginning of memory target. */ |
- store_expr (TREE_OPERAND (exp, 0), |
- adjust_address (target, TYPE_MODE (valtype), 0), |
- modifier == EXPAND_STACK_PARM, |
- false); |
- |
- else |
- { |
- gcc_assert (REG_P (target)); |
- |
- /* Store this field into a union of the proper type. */ |
- store_field (target, |
- MIN ((int_size_in_bytes (TREE_TYPE |
- (TREE_OPERAND (exp, 0))) |
- * BITS_PER_UNIT), |
- (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)), |
- 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0), |
- type, 0, false); |
- } |
- |
- /* Return the entire union. */ |
- return target; |
- } |
- |
- if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) |
- { |
- op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, |
- modifier); |
- |
- /* If the signedness of the conversion differs and OP0 is |
- a promoted SUBREG, clear that indication since we now |
- have to do the proper extension. */ |
- if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp |
- && GET_CODE (op0) == SUBREG) |
- SUBREG_PROMOTED_VAR_P (op0) = 0; |
- |
- return REDUCE_BIT_FIELD (op0); |
- } |
- |
- op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, |
- modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier); |
- if (GET_MODE (op0) == mode) |
- ; |
- |
- /* If OP0 is a constant, just convert it into the proper mode. */ |
- else if (CONSTANT_P (op0)) |
- { |
- tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); |
- enum machine_mode inner_mode = TYPE_MODE (inner_type); |
- |
- if (modifier == EXPAND_INITIALIZER) |
- op0 = simplify_gen_subreg (mode, op0, inner_mode, |
- subreg_lowpart_offset (mode, |
- inner_mode)); |
- else |
- op0= convert_modes (mode, inner_mode, op0, |
- TYPE_UNSIGNED (inner_type)); |
- } |
- |
- else if (modifier == EXPAND_INITIALIZER) |
- op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0); |
- |
- else if (target == 0) |
- op0 = convert_to_mode (mode, op0, |
- TYPE_UNSIGNED (TREE_TYPE |
- (TREE_OPERAND (exp, 0)))); |
- else |
- { |
- convert_move (target, op0, |
- TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); |
- op0 = target; |
- } |
- |
- return REDUCE_BIT_FIELD (op0); |
- |
case VIEW_CONVERT_EXPR: |
op0 = NULL_RTX; |
@@ -8207,8 +9271,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
temporary by fetching an inner memory reference. */ |
if (mode == BLKmode |
&& TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST |
- && TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != BLKmode |
- && handled_component_p (TREE_OPERAND (exp, 0))) |
+ && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode |
+ && handled_component_p (treeop0)) |
{ |
enum machine_mode mode1; |
HOST_WIDE_INT bitsize, bitpos; |
@@ -8216,7 +9280,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
int unsignedp; |
int volatilep = 0; |
tree tem |
- = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, &bitpos, |
+ = get_inner_reference (treeop0, &bitsize, &bitpos, |
&offset, &mode1, &unsignedp, &volatilep, |
true); |
rtx orig_op0; |
@@ -8256,7 +9320,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
if (op0 == orig_op0) |
op0 = copy_rtx (op0); |
- set_mem_attributes (op0, TREE_OPERAND (exp, 0), 0); |
+ set_mem_attributes (op0, treeop0, 0); |
if (REG_P (XEXP (op0, 0))) |
mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0)); |
@@ -8266,7 +9330,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
} |
if (!op0) |
- op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier); |
+ op0 = expand_expr (treeop0, |
+ NULL_RTX, VOIDmode, modifier); |
/* If the input and output modes are both the same, we are done. */ |
if (mode == GET_MODE (op0)) |
@@ -8284,8 +9349,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
/* If both modes are integral, then we can convert from one to the |
other. */ |
else if (SCALAR_INT_MODE_P (GET_MODE (op0)) && SCALAR_INT_MODE_P (mode)) |
- op0 = convert_modes (mode, GET_MODE (op0), op0, |
- TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); |
+ op0 = convert_modes (mode, GET_MODE (op0), op0, |
+ TYPE_UNSIGNED (TREE_TYPE (treeop0))); |
/* As a last resort, spill op0 to memory, and reload it in a |
different mode. */ |
else if (!MEM_P (op0)) |
@@ -8294,7 +9359,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
are going to be changing the mode of the MEM, don't call |
force_const_mem for constants because we don't allow pool |
constants to change mode. */ |
- tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); |
+ tree inner_type = TREE_TYPE (treeop0); |
gcc_assert (!TREE_ADDRESSABLE (exp)); |
@@ -8322,7 +9387,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
&& mode != BLKmode |
&& MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)) |
{ |
- tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); |
+ tree inner_type = TREE_TYPE (treeop0); |
HOST_WIDE_INT temp_size |
= MAX (int_size_in_bytes (inner_type), |
(HOST_WIDE_INT) GET_MODE_SIZE (mode)); |
@@ -8349,764 +9414,9 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
return op0; |
- case POINTER_PLUS_EXPR: |
- /* Even though the sizetype mode and the pointer's mode can be different |
- expand is able to handle this correctly and get the correct result out |
- of the PLUS_EXPR code. */ |
- /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR |
- if sizetype precision is smaller than pointer precision. */ |
- if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type)) |
- exp = build2 (PLUS_EXPR, type, |
- TREE_OPERAND (exp, 0), |
- fold_convert (type, |
- fold_convert (ssizetype, |
- TREE_OPERAND (exp, 1)))); |
- case PLUS_EXPR: |
- |
- /* Check if this is a case for multiplication and addition. */ |
- if ((TREE_CODE (type) == INTEGER_TYPE |
- || TREE_CODE (type) == FIXED_POINT_TYPE) |
- && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR) |
- { |
- tree subsubexp0, subsubexp1; |
- enum tree_code code0, code1, this_code; |
- |
- subexp0 = TREE_OPERAND (exp, 0); |
- subsubexp0 = TREE_OPERAND (subexp0, 0); |
- subsubexp1 = TREE_OPERAND (subexp0, 1); |
- code0 = TREE_CODE (subsubexp0); |
- code1 = TREE_CODE (subsubexp1); |
- this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR |
- : FIXED_CONVERT_EXPR; |
- if (code0 == this_code && code1 == this_code |
- && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0))) |
- < TYPE_PRECISION (TREE_TYPE (subsubexp0))) |
- && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0))) |
- == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))) |
- && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0))) |
- == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))) |
- { |
- tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0)); |
- enum machine_mode innermode = TYPE_MODE (op0type); |
- bool zextend_p = TYPE_UNSIGNED (op0type); |
- bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0)); |
- if (sat_p == 0) |
- this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab; |
- else |
- this_optab = zextend_p ? usmadd_widen_optab |
- : ssmadd_widen_optab; |
- if (mode == GET_MODE_2XWIDER_MODE (innermode) |
- && (optab_handler (this_optab, mode)->insn_code |
- != CODE_FOR_nothing)) |
- { |
- expand_operands (TREE_OPERAND (subsubexp0, 0), |
- TREE_OPERAND (subsubexp1, 0), |
- NULL_RTX, &op0, &op1, EXPAND_NORMAL); |
- op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget, |
- VOIDmode, EXPAND_NORMAL); |
- temp = expand_ternary_op (mode, this_optab, op0, op1, op2, |
- target, unsignedp); |
- gcc_assert (temp); |
- return REDUCE_BIT_FIELD (temp); |
- } |
- } |
- } |
- |
- /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and |
- something else, make sure we add the register to the constant and |
- then to the other thing. This case can occur during strength |
- reduction and doing it this way will produce better code if the |
- frame pointer or argument pointer is eliminated. |
- |
- fold-const.c will ensure that the constant is always in the inner |
- PLUS_EXPR, so the only case we need to do anything about is if |
- sp, ap, or fp is our second argument, in which case we must swap |
- the innermost first argument and our second argument. */ |
- |
- if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR |
- && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST |
- && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL |
- && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx |
- || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx |
- || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx)) |
- { |
- tree t = TREE_OPERAND (exp, 1); |
- |
- TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); |
- TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t; |
- } |
- |
- /* If the result is to be ptr_mode and we are adding an integer to |
- something, we might be forming a constant. So try to use |
- plus_constant. If it produces a sum and we can't accept it, |
- use force_operand. This allows P = &ARR[const] to generate |
- efficient code on machines where a SYMBOL_REF is not a valid |
- address. |
- |
- If this is an EXPAND_SUM call, always return the sum. */ |
- if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER |
- || (mode == ptr_mode && (unsignedp || ! flag_trapv))) |
- { |
- if (modifier == EXPAND_STACK_PARM) |
- target = 0; |
- if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST |
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
- && TREE_CONSTANT (TREE_OPERAND (exp, 1))) |
- { |
- rtx constant_part; |
- |
- op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode, |
- EXPAND_SUM); |
- /* Use immed_double_const to ensure that the constant is |
- truncated according to the mode of OP1, then sign extended |
- to a HOST_WIDE_INT. Using the constant directly can result |
- in non-canonical RTL in a 64x32 cross compile. */ |
- constant_part |
- = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)), |
- (HOST_WIDE_INT) 0, |
- TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)))); |
- op1 = plus_constant (op1, INTVAL (constant_part)); |
- if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) |
- op1 = force_operand (op1, target); |
- return REDUCE_BIT_FIELD (op1); |
- } |
- |
- else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST |
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
- && TREE_CONSTANT (TREE_OPERAND (exp, 0))) |
- { |
- rtx constant_part; |
- |
- op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, |
- (modifier == EXPAND_INITIALIZER |
- ? EXPAND_INITIALIZER : EXPAND_SUM)); |
- if (! CONSTANT_P (op0)) |
- { |
- op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, |
- VOIDmode, modifier); |
- /* Return a PLUS if modifier says it's OK. */ |
- if (modifier == EXPAND_SUM |
- || modifier == EXPAND_INITIALIZER) |
- return simplify_gen_binary (PLUS, mode, op0, op1); |
- goto binop2; |
- } |
- /* Use immed_double_const to ensure that the constant is |
- truncated according to the mode of OP1, then sign extended |
- to a HOST_WIDE_INT. Using the constant directly can result |
- in non-canonical RTL in a 64x32 cross compile. */ |
- constant_part |
- = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)), |
- (HOST_WIDE_INT) 0, |
- TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))); |
- op0 = plus_constant (op0, INTVAL (constant_part)); |
- if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) |
- op0 = force_operand (op0, target); |
- return REDUCE_BIT_FIELD (op0); |
- } |
- } |
- |
- /* No sense saving up arithmetic to be done |
- if it's all in the wrong mode to form part of an address. |
- And force_operand won't know whether to sign-extend or |
- zero-extend. */ |
- if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) |
- || mode != ptr_mode) |
- { |
- expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
- subtarget, &op0, &op1, 0); |
- if (op0 == const0_rtx) |
- return op1; |
- if (op1 == const0_rtx) |
- return op0; |
- goto binop2; |
- } |
- |
- expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
- subtarget, &op0, &op1, modifier); |
- return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1)); |
- |
- case MINUS_EXPR: |
- /* Check if this is a case for multiplication and subtraction. */ |
- if ((TREE_CODE (type) == INTEGER_TYPE |
- || TREE_CODE (type) == FIXED_POINT_TYPE) |
- && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR) |
- { |
- tree subsubexp0, subsubexp1; |
- enum tree_code code0, code1, this_code; |
- |
- subexp1 = TREE_OPERAND (exp, 1); |
- subsubexp0 = TREE_OPERAND (subexp1, 0); |
- subsubexp1 = TREE_OPERAND (subexp1, 1); |
- code0 = TREE_CODE (subsubexp0); |
- code1 = TREE_CODE (subsubexp1); |
- this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR |
- : FIXED_CONVERT_EXPR; |
- if (code0 == this_code && code1 == this_code |
- && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0))) |
- < TYPE_PRECISION (TREE_TYPE (subsubexp0))) |
- && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0))) |
- == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))) |
- && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0))) |
- == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))) |
- { |
- tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0)); |
- enum machine_mode innermode = TYPE_MODE (op0type); |
- bool zextend_p = TYPE_UNSIGNED (op0type); |
- bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0)); |
- if (sat_p == 0) |
- this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab; |
- else |
- this_optab = zextend_p ? usmsub_widen_optab |
- : ssmsub_widen_optab; |
- if (mode == GET_MODE_2XWIDER_MODE (innermode) |
- && (optab_handler (this_optab, mode)->insn_code |
- != CODE_FOR_nothing)) |
- { |
- expand_operands (TREE_OPERAND (subsubexp0, 0), |
- TREE_OPERAND (subsubexp1, 0), |
- NULL_RTX, &op0, &op1, EXPAND_NORMAL); |
- op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget, |
- VOIDmode, EXPAND_NORMAL); |
- temp = expand_ternary_op (mode, this_optab, op0, op1, op2, |
- target, unsignedp); |
- gcc_assert (temp); |
- return REDUCE_BIT_FIELD (temp); |
- } |
- } |
- } |
- |
- /* For initializers, we are allowed to return a MINUS of two |
- symbolic constants. Here we handle all cases when both operands |
- are constant. */ |
- /* Handle difference of two symbolic constants, |
- for the sake of an initializer. */ |
- if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) |
- && really_constant_p (TREE_OPERAND (exp, 0)) |
- && really_constant_p (TREE_OPERAND (exp, 1))) |
- { |
- expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
- NULL_RTX, &op0, &op1, modifier); |
- |
- /* If the last operand is a CONST_INT, use plus_constant of |
- the negated constant. Else make the MINUS. */ |
- if (GET_CODE (op1) == CONST_INT) |
- return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1))); |
- else |
- return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1)); |
- } |
- |
- /* No sense saving up arithmetic to be done |
- if it's all in the wrong mode to form part of an address. |
- And force_operand won't know whether to sign-extend or |
- zero-extend. */ |
- if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) |
- || mode != ptr_mode) |
- goto binop; |
- |
- expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
- subtarget, &op0, &op1, modifier); |
- |
- /* Convert A - const to A + (-const). */ |
- if (GET_CODE (op1) == CONST_INT) |
- { |
- op1 = negate_rtx (mode, op1); |
- return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1)); |
- } |
- |
- goto binop2; |
- |
- case MULT_EXPR: |
- /* If this is a fixed-point operation, then we cannot use the code |
- below because "expand_mult" doesn't support sat/no-sat fixed-point |
- multiplications. */ |
- if (ALL_FIXED_POINT_MODE_P (mode)) |
- goto binop; |
- |
- /* If first operand is constant, swap them. |
- Thus the following special case checks need only |
- check the second operand. */ |
- if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST) |
- { |
- tree t1 = TREE_OPERAND (exp, 0); |
- TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1); |
- TREE_OPERAND (exp, 1) = t1; |
- } |
- |
- /* Attempt to return something suitable for generating an |
- indexed address, for machines that support that. */ |
- |
- if (modifier == EXPAND_SUM && mode == ptr_mode |
- && host_integerp (TREE_OPERAND (exp, 1), 0)) |
- { |
- tree exp1 = TREE_OPERAND (exp, 1); |
- |
- op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, |
- EXPAND_SUM); |
- |
- if (!REG_P (op0)) |
- op0 = force_operand (op0, NULL_RTX); |
- if (!REG_P (op0)) |
- op0 = copy_to_mode_reg (mode, op0); |
- |
- return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0, |
- gen_int_mode (tree_low_cst (exp1, 0), |
- TYPE_MODE (TREE_TYPE (exp1))))); |
- } |
- |
- if (modifier == EXPAND_STACK_PARM) |
- target = 0; |
- |
- /* Check for multiplying things that have been extended |
- from a narrower type. If this machine supports multiplying |
- in that narrower type with a result in the desired type, |
- do it that way, and avoid the explicit type-conversion. */ |
- |
- subexp0 = TREE_OPERAND (exp, 0); |
- subexp1 = TREE_OPERAND (exp, 1); |
- /* First, check if we have a multiplication of one signed and one |
- unsigned operand. */ |
- if (TREE_CODE (subexp0) == NOP_EXPR |
- && TREE_CODE (subexp1) == NOP_EXPR |
- && TREE_CODE (type) == INTEGER_TYPE |
- && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0))) |
- < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))) |
- && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0))) |
- == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0)))) |
- && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))) |
- != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0))))) |
- { |
- enum machine_mode innermode |
- = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0))); |
- this_optab = usmul_widen_optab; |
- if (mode == GET_MODE_WIDER_MODE (innermode)) |
- { |
- if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing) |
- { |
- if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))) |
- expand_operands (TREE_OPERAND (subexp0, 0), |
- TREE_OPERAND (subexp1, 0), |
- NULL_RTX, &op0, &op1, 0); |
- else |
- expand_operands (TREE_OPERAND (subexp0, 0), |
- TREE_OPERAND (subexp1, 0), |
- NULL_RTX, &op1, &op0, 0); |
- |
- goto binop3; |
- } |
- } |
- } |
- /* Check for a multiplication with matching signedness. */ |
- else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR |
- && TREE_CODE (type) == INTEGER_TYPE |
- && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) |
- < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))) |
- && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST |
- && int_fits_type_p (TREE_OPERAND (exp, 1), |
- TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) |
- /* Don't use a widening multiply if a shift will do. */ |
- && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)))) |
- > HOST_BITS_PER_WIDE_INT) |
- || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0)) |
- || |
- (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR |
- && (TYPE_PRECISION (TREE_TYPE |
- (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))) |
- == TYPE_PRECISION (TREE_TYPE |
- (TREE_OPERAND |
- (TREE_OPERAND (exp, 0), 0)))) |
- /* If both operands are extended, they must either both |
- be zero-extended or both be sign-extended. */ |
- && (TYPE_UNSIGNED (TREE_TYPE |
- (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))) |
- == TYPE_UNSIGNED (TREE_TYPE |
- (TREE_OPERAND |
- (TREE_OPERAND (exp, 0), 0))))))) |
- { |
- tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)); |
- enum machine_mode innermode = TYPE_MODE (op0type); |
- bool zextend_p = TYPE_UNSIGNED (op0type); |
- optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab; |
- this_optab = zextend_p ? umul_widen_optab : smul_widen_optab; |
- |
- if (mode == GET_MODE_2XWIDER_MODE (innermode)) |
- { |
- if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing) |
- { |
- if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) |
- expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), |
- TREE_OPERAND (exp, 1), |
- NULL_RTX, &op0, &op1, EXPAND_NORMAL); |
- else |
- expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), |
- TREE_OPERAND (TREE_OPERAND (exp, 1), 0), |
- NULL_RTX, &op0, &op1, EXPAND_NORMAL); |
- goto binop3; |
- } |
- else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing |
- && innermode == word_mode) |
- { |
- rtx htem, hipart; |
- op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)); |
- if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) |
- op1 = convert_modes (innermode, mode, |
- expand_normal (TREE_OPERAND (exp, 1)), |
- unsignedp); |
- else |
- op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)); |
- temp = expand_binop (mode, other_optab, op0, op1, target, |
- unsignedp, OPTAB_LIB_WIDEN); |
- hipart = gen_highpart (innermode, temp); |
- htem = expand_mult_highpart_adjust (innermode, hipart, |
- op0, op1, hipart, |
- zextend_p); |
- if (htem != hipart) |
- emit_move_insn (hipart, htem); |
- return REDUCE_BIT_FIELD (temp); |
- } |
- } |
- } |
- expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
- subtarget, &op0, &op1, 0); |
- return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp)); |
- |
- case TRUNC_DIV_EXPR: |
- case FLOOR_DIV_EXPR: |
- case CEIL_DIV_EXPR: |
- case ROUND_DIV_EXPR: |
- case EXACT_DIV_EXPR: |
- /* If this is a fixed-point operation, then we cannot use the code |
- below because "expand_divmod" doesn't support sat/no-sat fixed-point |
- divisions. */ |
- if (ALL_FIXED_POINT_MODE_P (mode)) |
- goto binop; |
- |
- if (modifier == EXPAND_STACK_PARM) |
- target = 0; |
- /* Possible optimization: compute the dividend with EXPAND_SUM |
- then if the divisor is constant can optimize the case |
- where some terms of the dividend have coeffs divisible by it. */ |
- expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
- subtarget, &op0, &op1, 0); |
- return expand_divmod (0, code, mode, op0, op1, target, unsignedp); |
- |
- case RDIV_EXPR: |
- goto binop; |
- |
- case TRUNC_MOD_EXPR: |
- case FLOOR_MOD_EXPR: |
- case CEIL_MOD_EXPR: |
- case ROUND_MOD_EXPR: |
- if (modifier == EXPAND_STACK_PARM) |
- target = 0; |
- expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
- subtarget, &op0, &op1, 0); |
- return expand_divmod (1, code, mode, op0, op1, target, unsignedp); |
- |
- case FIXED_CONVERT_EXPR: |
- op0 = expand_normal (TREE_OPERAND (exp, 0)); |
- if (target == 0 || modifier == EXPAND_STACK_PARM) |
- target = gen_reg_rtx (mode); |
- |
- if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == INTEGER_TYPE |
- && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) |
- || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type))) |
- expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type)); |
- else |
- expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type)); |
- return target; |
- |
- case FIX_TRUNC_EXPR: |
- op0 = expand_normal (TREE_OPERAND (exp, 0)); |
- if (target == 0 || modifier == EXPAND_STACK_PARM) |
- target = gen_reg_rtx (mode); |
- expand_fix (target, op0, unsignedp); |
- return target; |
- |
- case FLOAT_EXPR: |
- op0 = expand_normal (TREE_OPERAND (exp, 0)); |
- if (target == 0 || modifier == EXPAND_STACK_PARM) |
- target = gen_reg_rtx (mode); |
- /* expand_float can't figure out what to do if FROM has VOIDmode. |
- So give it the correct mode. With -O, cse will optimize this. */ |
- if (GET_MODE (op0) == VOIDmode) |
- op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))), |
- op0); |
- expand_float (target, op0, |
- TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); |
- return target; |
- |
- case NEGATE_EXPR: |
- op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, |
- VOIDmode, EXPAND_NORMAL); |
- if (modifier == EXPAND_STACK_PARM) |
- target = 0; |
- temp = expand_unop (mode, |
- optab_for_tree_code (NEGATE_EXPR, type, |
- optab_default), |
- op0, target, 0); |
- gcc_assert (temp); |
- return REDUCE_BIT_FIELD (temp); |
- |
- case ABS_EXPR: |
- op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, |
- VOIDmode, EXPAND_NORMAL); |
- if (modifier == EXPAND_STACK_PARM) |
- target = 0; |
- |
- /* ABS_EXPR is not valid for complex arguments. */ |
- gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT |
- && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT); |
- |
- /* Unsigned abs is simply the operand. Testing here means we don't |
- risk generating incorrect code below. */ |
- if (TYPE_UNSIGNED (type)) |
- return op0; |
- |
- return expand_abs (mode, op0, target, unsignedp, |
- safe_from_p (target, TREE_OPERAND (exp, 0), 1)); |
- |
- case MAX_EXPR: |
- case MIN_EXPR: |
- target = original_target; |
- if (target == 0 |
- || modifier == EXPAND_STACK_PARM |
- || (MEM_P (target) && MEM_VOLATILE_P (target)) |
- || GET_MODE (target) != mode |
- || (REG_P (target) |
- && REGNO (target) < FIRST_PSEUDO_REGISTER)) |
- target = gen_reg_rtx (mode); |
- expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
- target, &op0, &op1, 0); |
- |
- /* First try to do it with a special MIN or MAX instruction. |
- If that does not win, use a conditional jump to select the proper |
- value. */ |
- this_optab = optab_for_tree_code (code, type, optab_default); |
- temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, |
- OPTAB_WIDEN); |
- if (temp != 0) |
- return temp; |
- |
- /* At this point, a MEM target is no longer useful; we will get better |
- code without it. */ |
- |
- if (! REG_P (target)) |
- target = gen_reg_rtx (mode); |
- |
- /* If op1 was placed in target, swap op0 and op1. */ |
- if (target != op0 && target == op1) |
- { |
- temp = op0; |
- op0 = op1; |
- op1 = temp; |
- } |
- |
- /* We generate better code and avoid problems with op1 mentioning |
- target by forcing op1 into a pseudo if it isn't a constant. */ |
- if (! CONSTANT_P (op1)) |
- op1 = force_reg (mode, op1); |
- |
- { |
- enum rtx_code comparison_code; |
- rtx cmpop1 = op1; |
- |
- if (code == MAX_EXPR) |
- comparison_code = unsignedp ? GEU : GE; |
- else |
- comparison_code = unsignedp ? LEU : LE; |
- |
- /* Canonicalize to comparisons against 0. */ |
- if (op1 == const1_rtx) |
- { |
- /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1) |
- or (a != 0 ? a : 1) for unsigned. |
- For MIN we are safe converting (a <= 1 ? a : 1) |
- into (a <= 0 ? a : 1) */ |
- cmpop1 = const0_rtx; |
- if (code == MAX_EXPR) |
- comparison_code = unsignedp ? NE : GT; |
- } |
- if (op1 == constm1_rtx && !unsignedp) |
- { |
- /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1) |
- and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */ |
- cmpop1 = const0_rtx; |
- if (code == MIN_EXPR) |
- comparison_code = LT; |
- } |
-#ifdef HAVE_conditional_move |
- /* Use a conditional move if possible. */ |
- if (can_conditionally_move_p (mode)) |
- { |
- rtx insn; |
- |
- /* ??? Same problem as in expmed.c: emit_conditional_move |
- forces a stack adjustment via compare_from_rtx, and we |
- lose the stack adjustment if the sequence we are about |
- to create is discarded. */ |
- do_pending_stack_adjust (); |
- |
- start_sequence (); |
- |
- /* Try to emit the conditional move. */ |
- insn = emit_conditional_move (target, comparison_code, |
- op0, cmpop1, mode, |
- op0, op1, mode, |
- unsignedp); |
- |
- /* If we could do the conditional move, emit the sequence, |
- and return. */ |
- if (insn) |
- { |
- rtx seq = get_insns (); |
- end_sequence (); |
- emit_insn (seq); |
- return target; |
- } |
- |
- /* Otherwise discard the sequence and fall back to code with |
- branches. */ |
- end_sequence (); |
- } |
-#endif |
- if (target != op0) |
- emit_move_insn (target, op0); |
- |
- temp = gen_label_rtx (); |
- do_compare_rtx_and_jump (target, cmpop1, comparison_code, |
- unsignedp, mode, NULL_RTX, NULL_RTX, temp); |
- } |
- emit_move_insn (target, op1); |
- emit_label (temp); |
- return target; |
- |
- case BIT_NOT_EXPR: |
- op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, |
- VOIDmode, EXPAND_NORMAL); |
- if (modifier == EXPAND_STACK_PARM) |
- target = 0; |
- temp = expand_unop (mode, one_cmpl_optab, op0, target, 1); |
- gcc_assert (temp); |
- return temp; |
- |
- /* ??? Can optimize bitwise operations with one arg constant. |
- Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b) |
- and (a bitwise1 b) bitwise2 b (etc) |
- but that is probably not worth while. */ |
- |
- /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two |
- boolean values when we want in all cases to compute both of them. In |
- general it is fastest to do TRUTH_AND_EXPR by computing both operands |
- as actual zero-or-1 values and then bitwise anding. In cases where |
- there cannot be any side effects, better code would be made by |
- treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is |
- how to recognize those cases. */ |
- |
- case TRUTH_AND_EXPR: |
- code = BIT_AND_EXPR; |
- case BIT_AND_EXPR: |
- goto binop; |
- |
- case TRUTH_OR_EXPR: |
- code = BIT_IOR_EXPR; |
- case BIT_IOR_EXPR: |
- goto binop; |
- |
- case TRUTH_XOR_EXPR: |
- code = BIT_XOR_EXPR; |
- case BIT_XOR_EXPR: |
- goto binop; |
- |
- case LROTATE_EXPR: |
- case RROTATE_EXPR: |
- gcc_assert (VECTOR_MODE_P (TYPE_MODE (type)) |
- || (GET_MODE_PRECISION (TYPE_MODE (type)) |
- == TYPE_PRECISION (type))); |
- /* fall through */ |
- |
- case LSHIFT_EXPR: |
- case RSHIFT_EXPR: |
- /* If this is a fixed-point operation, then we cannot use the code |
- below because "expand_shift" doesn't support sat/no-sat fixed-point |
- shifts. */ |
- if (ALL_FIXED_POINT_MODE_P (mode)) |
- goto binop; |
- |
- if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
- subtarget = 0; |
- if (modifier == EXPAND_STACK_PARM) |
- target = 0; |
- op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, |
- VOIDmode, EXPAND_NORMAL); |
- temp = expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target, |
- unsignedp); |
- if (code == LSHIFT_EXPR) |
- temp = REDUCE_BIT_FIELD (temp); |
- return temp; |
- |
- /* Could determine the answer when only additive constants differ. Also, |
- the addition of one can be handled by changing the condition. */ |
- case LT_EXPR: |
- case LE_EXPR: |
- case GT_EXPR: |
- case GE_EXPR: |
- case EQ_EXPR: |
- case NE_EXPR: |
- case UNORDERED_EXPR: |
- case ORDERED_EXPR: |
- case UNLT_EXPR: |
- case UNLE_EXPR: |
- case UNGT_EXPR: |
- case UNGE_EXPR: |
- case UNEQ_EXPR: |
- case LTGT_EXPR: |
- temp = do_store_flag (exp, |
- modifier != EXPAND_STACK_PARM ? target : NULL_RTX, |
- tmode != VOIDmode ? tmode : mode, 0); |
- if (temp != 0) |
- return temp; |
- |
- /* For foo != 0, load foo, and if it is nonzero load 1 instead. */ |
- if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1)) |
- && original_target |
- && REG_P (original_target) |
- && (GET_MODE (original_target) |
- == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) |
- { |
- temp = expand_expr (TREE_OPERAND (exp, 0), original_target, |
- VOIDmode, EXPAND_NORMAL); |
- |
- /* If temp is constant, we can just compute the result. */ |
- if (GET_CODE (temp) == CONST_INT) |
- { |
- if (INTVAL (temp) != 0) |
- emit_move_insn (target, const1_rtx); |
- else |
- emit_move_insn (target, const0_rtx); |
- |
- return target; |
- } |
- |
- if (temp != original_target) |
- { |
- enum machine_mode mode1 = GET_MODE (temp); |
- if (mode1 == VOIDmode) |
- mode1 = tmode != VOIDmode ? tmode : mode; |
- |
- temp = copy_to_mode_reg (mode1, temp); |
- } |
- |
- op1 = gen_label_rtx (); |
- emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX, |
- GET_MODE (temp), unsignedp, op1); |
- emit_move_insn (temp, const1_rtx); |
- emit_label (op1); |
- return temp; |
- } |
+ /* Use a compare and a jump for BLKmode comparisons, or for function |
+ type comparisons is HAVE_canonicalize_funcptr_for_compare. */ |
- /* If no set-flag instruction, must generate a conditional store |
- into a temporary variable. Drop through and handle this |
- like && and ||. */ |
/* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they |
are occassionally created by folding during expansion. */ |
case TRUTH_ANDIF_EXPR: |
@@ -9114,7 +9424,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
if (! ignore |
&& (target == 0 |
|| modifier == EXPAND_STACK_PARM |
- || ! safe_from_p (target, exp, 1) |
+ || ! safe_from_p (target, treeop0, 1) |
+ || ! safe_from_p (target, treeop1, 1) |
/* Make sure we don't have a hard reg (such as function's return |
value) live across basic blocks, if not optimizing. */ |
|| (!optimize && REG_P (target) |
@@ -9125,7 +9436,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
emit_move_insn (target, const0_rtx); |
op1 = gen_label_rtx (); |
- jumpifnot (exp, op1); |
+ jumpifnot_1 (code, treeop0, treeop1, op1, -1); |
if (target) |
emit_move_insn (target, const1_rtx); |
@@ -9133,18 +9444,6 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
emit_label (op1); |
return ignore ? const0_rtx : target; |
- case TRUTH_NOT_EXPR: |
- if (modifier == EXPAND_STACK_PARM) |
- target = 0; |
- op0 = expand_expr (TREE_OPERAND (exp, 0), target, |
- VOIDmode, EXPAND_NORMAL); |
- /* The parser is careful to generate TRUTH_NOT_EXPR |
- only with operands that are always zero or one. */ |
- temp = expand_binop (mode, xor_optab, op0, const1_rtx, |
- target, 1, OPTAB_LIB_WIDEN); |
- gcc_assert (temp); |
- return temp; |
- |
case STATEMENT_LIST: |
{ |
tree_stmt_iterator iter; |
@@ -9160,7 +9459,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
/* A COND_EXPR with its type being VOID_TYPE represents a |
conditional jump and is handled in |
expand_gimple_cond_expr. */ |
- gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp))); |
+ gcc_assert (!VOID_TYPE_P (type)); |
/* Note that COND_EXPRs whose type is a structure or union |
are required to be constructed to contain assignments of |
@@ -9169,8 +9468,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
gcc_assert (!TREE_ADDRESSABLE (type) |
&& !ignore |
- && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node |
- && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node); |
+ && TREE_TYPE (treeop1) != void_type_node |
+ && TREE_TYPE (treeop2) != void_type_node); |
/* If we are not to produce a result, we have no target. Otherwise, |
if a target was specified use it; it will not be used as an |
@@ -9179,7 +9478,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
if (modifier != EXPAND_STACK_PARM |
&& original_target |
- && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1) |
+ && safe_from_p (original_target, treeop0, 1) |
&& GET_MODE (original_target) == mode |
#ifdef HAVE_conditional_move |
&& (! can_conditionally_move_p (mode) |
@@ -9194,15 +9493,15 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
NO_DEFER_POP; |
op0 = gen_label_rtx (); |
op1 = gen_label_rtx (); |
- jumpifnot (TREE_OPERAND (exp, 0), op0); |
- store_expr (TREE_OPERAND (exp, 1), temp, |
+ jumpifnot (treeop0, op0, -1); |
+ store_expr (treeop1, temp, |
modifier == EXPAND_STACK_PARM, |
false); |
emit_jump_insn (gen_jump (op1)); |
emit_barrier (); |
emit_label (op0); |
- store_expr (TREE_OPERAND (exp, 2), temp, |
+ store_expr (treeop2, temp, |
modifier == EXPAND_STACK_PARM, |
false); |
@@ -9211,13 +9510,13 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
return temp; |
case VEC_COND_EXPR: |
- target = expand_vec_cond_expr (exp, target); |
- return target; |
+ target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target); |
+ return target; |
case MODIFY_EXPR: |
{ |
- tree lhs = TREE_OPERAND (exp, 0); |
- tree rhs = TREE_OPERAND (exp, 1); |
+ tree lhs = treeop0; |
+ tree rhs = treeop1; |
gcc_assert (ignore); |
/* Check for |= or &= of a bitfield of size one into another bitfield |
@@ -9240,7 +9539,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
int value = TREE_CODE (rhs) == BIT_IOR_EXPR; |
do_jump (TREE_OPERAND (rhs, 1), |
value ? label : 0, |
- value ? 0 : label); |
+ value ? 0 : label, -1); |
expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value), |
MOVE_NONTEMPORAL (exp)); |
do_pending_stack_adjust (); |
@@ -9252,41 +9551,24 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
return const0_rtx; |
} |
- case RETURN_EXPR: |
- if (!TREE_OPERAND (exp, 0)) |
- expand_null_return (); |
- else |
- expand_return (TREE_OPERAND (exp, 0)); |
- return const0_rtx; |
- |
case ADDR_EXPR: |
return expand_expr_addr_expr (exp, target, tmode, modifier); |
- case COMPLEX_EXPR: |
- /* Get the rtx code of the operands. */ |
- op0 = expand_normal (TREE_OPERAND (exp, 0)); |
- op1 = expand_normal (TREE_OPERAND (exp, 1)); |
- |
- if (!target) |
- target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp))); |
- |
- /* Move the real (op0) and imaginary (op1) parts to their location. */ |
- write_complex_part (target, op0, false); |
- write_complex_part (target, op1, true); |
- |
- return target; |
- |
case REALPART_EXPR: |
- op0 = expand_normal (TREE_OPERAND (exp, 0)); |
+ op0 = expand_normal (treeop0); |
return read_complex_part (op0, false); |
case IMAGPART_EXPR: |
- op0 = expand_normal (TREE_OPERAND (exp, 0)); |
+ op0 = expand_normal (treeop0); |
return read_complex_part (op0, true); |
- case RESX_EXPR: |
- expand_resx_expr (exp); |
- return const0_rtx; |
+ case RETURN_EXPR: |
+ case LABEL_EXPR: |
+ case GOTO_EXPR: |
+ case SWITCH_EXPR: |
+ case ASM_EXPR: |
+ /* Expanded in cfgexpand.c. */ |
+ gcc_unreachable (); |
case TRY_CATCH_EXPR: |
case CATCH_EXPR: |
@@ -9313,47 +9595,22 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
/* Lowered by gimplify.c. */ |
gcc_unreachable (); |
- case CHANGE_DYNAMIC_TYPE_EXPR: |
- /* This is ignored at the RTL level. The tree level set |
- DECL_POINTER_ALIAS_SET of any variable to be 0, which is |
- overkill for the RTL layer but is all that we can |
- represent. */ |
- return const0_rtx; |
- |
- case EXC_PTR_EXPR: |
- return get_exception_pointer (); |
- |
- case FILTER_EXPR: |
- return get_exception_filter (); |
- |
case FDESC_EXPR: |
/* Function descriptors are not valid except for as |
initialization constants, and should not be expanded. */ |
gcc_unreachable (); |
- case SWITCH_EXPR: |
- expand_case (exp); |
- return const0_rtx; |
- |
- case LABEL_EXPR: |
- expand_label (TREE_OPERAND (exp, 0)); |
- return const0_rtx; |
- |
- case ASM_EXPR: |
- expand_asm_expr (exp); |
- return const0_rtx; |
- |
case WITH_SIZE_EXPR: |
/* WITH_SIZE_EXPR expands to its first argument. The caller should |
have pulled out the size to use in whatever context it needed. */ |
- return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode, |
+ return expand_expr_real (treeop0, original_target, tmode, |
modifier, alt_rtl); |
case REALIGN_LOAD_EXPR: |
{ |
- tree oprnd0 = TREE_OPERAND (exp, 0); |
- tree oprnd1 = TREE_OPERAND (exp, 1); |
- tree oprnd2 = TREE_OPERAND (exp, 2); |
+ tree oprnd0 = treeop0; |
+ tree oprnd1 = treeop1; |
+ tree oprnd2 = treeop2; |
rtx op2; |
this_optab = optab_for_tree_code (code, type, optab_default); |
@@ -9367,137 +9624,43 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
case DOT_PROD_EXPR: |
{ |
- tree oprnd0 = TREE_OPERAND (exp, 0); |
- tree oprnd1 = TREE_OPERAND (exp, 1); |
- tree oprnd2 = TREE_OPERAND (exp, 2); |
+ tree oprnd0 = treeop0; |
+ tree oprnd1 = treeop1; |
+ tree oprnd2 = treeop2; |
rtx op2; |
expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL); |
op2 = expand_normal (oprnd2); |
- target = expand_widen_pattern_expr (exp, op0, op1, op2, |
+ target = expand_widen_pattern_expr (&ops, op0, op1, op2, |
target, unsignedp); |
return target; |
} |
- case WIDEN_SUM_EXPR: |
- { |
- tree oprnd0 = TREE_OPERAND (exp, 0); |
- tree oprnd1 = TREE_OPERAND (exp, 1); |
- |
- expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0); |
- target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1, |
- target, unsignedp); |
- return target; |
- } |
- |
- case REDUC_MAX_EXPR: |
- case REDUC_MIN_EXPR: |
- case REDUC_PLUS_EXPR: |
- { |
- op0 = expand_normal (TREE_OPERAND (exp, 0)); |
- this_optab = optab_for_tree_code (code, type, optab_default); |
- temp = expand_unop (mode, this_optab, op0, target, unsignedp); |
- gcc_assert (temp); |
- return temp; |
- } |
- |
- case VEC_EXTRACT_EVEN_EXPR: |
- case VEC_EXTRACT_ODD_EXPR: |
- { |
- expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
- NULL_RTX, &op0, &op1, 0); |
- this_optab = optab_for_tree_code (code, type, optab_default); |
- temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, |
- OPTAB_WIDEN); |
- gcc_assert (temp); |
- return temp; |
- } |
- |
- case VEC_INTERLEAVE_HIGH_EXPR: |
- case VEC_INTERLEAVE_LOW_EXPR: |
- { |
- expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
- NULL_RTX, &op0, &op1, 0); |
- this_optab = optab_for_tree_code (code, type, optab_default); |
- temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, |
- OPTAB_WIDEN); |
- gcc_assert (temp); |
- return temp; |
- } |
- |
- case VEC_LSHIFT_EXPR: |
- case VEC_RSHIFT_EXPR: |
- { |
- target = expand_vec_shift_expr (exp, target); |
- return target; |
- } |
- |
- case VEC_UNPACK_HI_EXPR: |
- case VEC_UNPACK_LO_EXPR: |
- { |
- op0 = expand_normal (TREE_OPERAND (exp, 0)); |
- this_optab = optab_for_tree_code (code, type, optab_default); |
- temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX, |
- target, unsignedp); |
- gcc_assert (temp); |
- return temp; |
- } |
- |
- case VEC_UNPACK_FLOAT_HI_EXPR: |
- case VEC_UNPACK_FLOAT_LO_EXPR: |
+ case COMPOUND_LITERAL_EXPR: |
{ |
- op0 = expand_normal (TREE_OPERAND (exp, 0)); |
- /* The signedness is determined from input operand. */ |
- this_optab = optab_for_tree_code (code, |
- TREE_TYPE (TREE_OPERAND (exp, 0)), |
- optab_default); |
- temp = expand_widen_pattern_expr |
- (exp, op0, NULL_RTX, NULL_RTX, |
- target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); |
- |
- gcc_assert (temp); |
- return temp; |
- } |
+ /* Initialize the anonymous variable declared in the compound |
+ literal, then return the variable. */ |
+ tree decl = COMPOUND_LITERAL_EXPR_DECL (exp); |
- case VEC_WIDEN_MULT_HI_EXPR: |
- case VEC_WIDEN_MULT_LO_EXPR: |
- { |
- tree oprnd0 = TREE_OPERAND (exp, 0); |
- tree oprnd1 = TREE_OPERAND (exp, 1); |
+ /* Create RTL for this variable. */ |
+ if (!DECL_RTL_SET_P (decl)) |
+ { |
+ if (DECL_HARD_REGISTER (decl)) |
+ /* The user specified an assembler name for this variable. |
+ Set that up now. */ |
+ rest_of_decl_compilation (decl, 0, 0); |
+ else |
+ expand_decl (decl); |
+ } |
- expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0); |
- target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX, |
- target, unsignedp); |
- gcc_assert (target); |
- return target; |
+ return expand_expr_real (decl, original_target, tmode, |
+ modifier, alt_rtl); |
} |
- case VEC_PACK_TRUNC_EXPR: |
- case VEC_PACK_SAT_EXPR: |
- case VEC_PACK_FIX_TRUNC_EXPR: |
- mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
- goto binop; |
- |
default: |
- return lang_hooks.expand_expr (exp, original_target, tmode, |
- modifier, alt_rtl); |
+ return expand_expr_real_2 (&ops, target, tmode, modifier); |
} |
- |
- /* Here to do an ordinary binary operator. */ |
- binop: |
- expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), |
- subtarget, &op0, &op1, 0); |
- binop2: |
- this_optab = optab_for_tree_code (code, type, optab_default); |
- binop3: |
- if (modifier == EXPAND_STACK_PARM) |
- target = 0; |
- temp = expand_binop (mode, this_optab, op0, op1, target, |
- unsignedp, OPTAB_LIB_WIDEN); |
- gcc_assert (temp); |
- return REDUCE_BIT_FIELD (temp); |
} |
-#undef REDUCE_BIT_FIELD |
/* Subroutine of above: reduce EXP to the precision of TYPE (in the |
signedness of TYPE), possibly returning the result in TARGET. */ |
@@ -9508,7 +9671,7 @@ reduce_to_bit_field_precision (rtx exp, rtx target, tree type) |
if (target && GET_MODE (target) != GET_MODE (exp)) |
target = 0; |
/* For constant values, reduce using build_int_cst_type. */ |
- if (GET_CODE (exp) == CONST_INT) |
+ if (CONST_INT_P (exp)) |
{ |
HOST_WIDE_INT value = INTVAL (exp); |
tree t = build_int_cst_type (type, value); |
@@ -9691,15 +9854,12 @@ string_constant (tree arg, tree *ptr_offset) |
return 0; |
} |
-/* Generate code to calculate EXP using a store-flag instruction |
- and return an rtx for the result. EXP is either a comparison |
- or a TRUTH_NOT_EXPR whose operand is a comparison. |
+/* Generate code to calculate OPS, and exploded expression |
+ using a store-flag instruction and return an rtx for the result. |
+ OPS reflects a comparison. |
If TARGET is nonzero, store the result there if convenient. |
- If ONLY_CHEAP is nonzero, only do this if it is likely to be very |
- cheap. |
- |
Return zero if there is no suitable set-flag instruction |
available on this machine. |
@@ -9712,29 +9872,19 @@ string_constant (tree arg, tree *ptr_offset) |
set/jump/set sequence. */ |
static rtx |
-do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap) |
+do_store_flag (sepops ops, rtx target, enum machine_mode mode) |
{ |
enum rtx_code code; |
tree arg0, arg1, type; |
tree tem; |
enum machine_mode operand_mode; |
- int invert = 0; |
int unsignedp; |
rtx op0, op1; |
- enum insn_code icode; |
rtx subtarget = target; |
- rtx result, label; |
- |
- /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the |
- result at the end. We can't simply invert the test since it would |
- have already been inverted if it were valid. This case occurs for |
- some floating-point comparisons. */ |
- |
- if (TREE_CODE (exp) == TRUTH_NOT_EXPR) |
- invert = 1, exp = TREE_OPERAND (exp, 0); |
+ location_t loc = ops->location; |
- arg0 = TREE_OPERAND (exp, 0); |
- arg1 = TREE_OPERAND (exp, 1); |
+ arg0 = ops->op0; |
+ arg1 = ops->op1; |
/* Don't crash if the comparison was erroneous. */ |
if (arg0 == error_mark_node || arg1 == error_mark_node) |
@@ -9753,11 +9903,11 @@ do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap) |
when function pointers must be canonicalized before comparisons. */ |
#ifdef HAVE_canonicalize_funcptr_for_compare |
if (HAVE_canonicalize_funcptr_for_compare |
- && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE |
- && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))) |
+ && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE |
+ && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) |
== FUNCTION_TYPE)) |
- || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE |
- && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1)))) |
+ || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE |
+ && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) |
== FUNCTION_TYPE)))) |
return 0; |
#endif |
@@ -9772,7 +9922,7 @@ do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap) |
tests will not catch constants in the first operand, but constants |
are rarely passed as the first operand. */ |
- switch (TREE_CODE (exp)) |
+ switch (ops->code) |
{ |
case EQ_EXPR: |
code = EQ; |
@@ -9856,80 +10006,24 @@ do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap) |
&& integer_pow2p (TREE_OPERAND (arg0, 1))) |
{ |
tree type = lang_hooks.types.type_for_mode (mode, unsignedp); |
- return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR, |
+ return expand_expr (fold_single_bit_test (loc, |
+ code == NE ? NE_EXPR : EQ_EXPR, |
arg0, arg1, type), |
target, VOIDmode, EXPAND_NORMAL); |
} |
- /* Now see if we are likely to be able to do this. Return if not. */ |
- if (! can_compare_p (code, operand_mode, ccp_store_flag)) |
- return 0; |
- |
- icode = setcc_gen_code[(int) code]; |
- |
- if (icode == CODE_FOR_nothing) |
- { |
- enum machine_mode wmode; |
- |
- for (wmode = operand_mode; |
- icode == CODE_FOR_nothing && wmode != VOIDmode; |
- wmode = GET_MODE_WIDER_MODE (wmode)) |
- icode = optab_handler (cstore_optab, wmode)->insn_code; |
- } |
- |
- if (icode == CODE_FOR_nothing |
- || (only_cheap && insn_data[(int) icode].operand[0].mode != mode)) |
- { |
- /* We can only do this if it is one of the special cases that |
- can be handled without an scc insn. */ |
- if ((code == LT && integer_zerop (arg1)) |
- || (! only_cheap && code == GE && integer_zerop (arg1))) |
- ; |
- else if (! only_cheap && (code == NE || code == EQ) |
- && TREE_CODE (type) != REAL_TYPE |
- && ((optab_handler (abs_optab, operand_mode)->insn_code |
- != CODE_FOR_nothing) |
- || (optab_handler (ffs_optab, operand_mode)->insn_code |
- != CODE_FOR_nothing))) |
- ; |
- else |
- return 0; |
- } |
- |
if (! get_subtarget (target) |
|| GET_MODE (subtarget) != operand_mode) |
subtarget = 0; |
- expand_operands (arg0, arg1, subtarget, &op0, &op1, 0); |
+ expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL); |
if (target == 0) |
target = gen_reg_rtx (mode); |
- result = emit_store_flag (target, code, op0, op1, |
- operand_mode, unsignedp, 1); |
- |
- if (result) |
- { |
- if (invert) |
- result = expand_binop (mode, xor_optab, result, const1_rtx, |
- result, 0, OPTAB_LIB_WIDEN); |
- return result; |
- } |
- |
- /* If this failed, we have to do this with set/compare/jump/set code. */ |
- if (!REG_P (target) |
- || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1)) |
- target = gen_reg_rtx (GET_MODE (target)); |
- |
- emit_move_insn (target, invert ? const0_rtx : const1_rtx); |
- label = gen_label_rtx (); |
- do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX, |
- NULL_RTX, label); |
- |
- emit_move_insn (target, invert ? const1_rtx : const0_rtx); |
- emit_label (label); |
- |
- return target; |
+ /* Try a cstore if possible. */ |
+ return emit_store_flag_force (target, code, op0, op1, |
+ operand_mode, unsignedp, 1); |
} |
@@ -9940,19 +10034,6 @@ do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap) |
# define CODE_FOR_casesi CODE_FOR_nothing |
#endif |
-/* If the machine does not have a case insn that compares the bounds, |
- this means extra overhead for dispatch tables, which raises the |
- threshold for using them. */ |
-#ifndef CASE_VALUES_THRESHOLD |
-#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5) |
-#endif /* CASE_VALUES_THRESHOLD */ |
- |
-unsigned int |
-case_values_threshold (void) |
-{ |
- return CASE_VALUES_THRESHOLD; |
-} |
- |
/* Attempt to generate a casesi instruction. Returns 1 if successful, |
0 otherwise (i.e. if there is no casesi instruction). */ |
int |
@@ -10203,4 +10284,52 @@ const_vector_from_tree (tree exp) |
return gen_rtx_CONST_VECTOR (mode, v); |
} |
+ |
+ |
+/* Build a decl for a EH personality function named NAME. */ |
+ |
+tree |
+build_personality_function (const char *name) |
+{ |
+ tree decl, type; |
+ |
+ type = build_function_type_list (integer_type_node, integer_type_node, |
+ long_long_unsigned_type_node, |
+ ptr_type_node, ptr_type_node, NULL_TREE); |
+ decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, |
+ get_identifier (name), type); |
+ DECL_ARTIFICIAL (decl) = 1; |
+ DECL_EXTERNAL (decl) = 1; |
+ TREE_PUBLIC (decl) = 1; |
+ |
+ /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with |
+ are the flags assigned by targetm.encode_section_info. */ |
+ SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL); |
+ |
+ return decl; |
+} |
+ |
+/* Extracts the personality function of DECL and returns the corresponding |
+ libfunc. */ |
+ |
+rtx |
+get_personality_function (tree decl) |
+{ |
+ tree personality = DECL_FUNCTION_PERSONALITY (decl); |
+ enum eh_personality_kind pk; |
+ |
+ pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl)); |
+ if (pk == eh_personality_none) |
+ return NULL; |
+ |
+ if (!personality |
+ && pk == eh_personality_any) |
+ personality = lang_hooks.eh_personality (); |
+ |
+ if (pk == eh_personality_lang) |
+ gcc_assert (personality != NULL_TREE); |
+ |
+ return XEXP (DECL_RTL (personality), 0); |
+} |
+ |
#include "gt-expr.h" |