Index: gcc/gcc/recog.c |
diff --git a/gcc/gcc/recog.c b/gcc/gcc/recog.c |
index 70370e3ad81619d4cc67c64fc4e1f70f5d80aff3..254e83a9769d3f73fb5afeacb480c18411270a40 100644 |
--- a/gcc/gcc/recog.c |
+++ b/gcc/gcc/recog.c |
@@ -1,6 +1,6 @@ |
/* Subroutines used by or related to instruction recognition. |
Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998 |
- 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 |
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 |
Free Software Foundation, Inc. |
This file is part of GCC. |
@@ -286,7 +286,7 @@ canonicalize_change_group (rtx insn, rtx x) |
else |
return false; |
} |
- |
+ |
/* This subroutine of apply_change_group verifies whether the changes to INSN |
were valid; i.e. whether INSN can still be recognized. */ |
@@ -376,7 +376,9 @@ verify_changes (int num) |
if (MEM_P (object)) |
{ |
- if (! memory_address_p (GET_MODE (object), XEXP (object, 0))) |
+ if (! memory_address_addr_space_p (GET_MODE (object), |
+ XEXP (object, 0), |
+ MEM_ADDR_SPACE (object))) |
break; |
} |
else if (REG_P (changes[i].old) |
@@ -389,6 +391,8 @@ verify_changes (int num) |
assemblies if they have been defined as register asm ("x"). */ |
break; |
} |
+ else if (DEBUG_INSN_P (object)) |
+ continue; |
else if (insn_invalid_p (object)) |
{ |
rtx pat = PATTERN (object); |
@@ -429,7 +433,8 @@ verify_changes (int num) |
validate_change (object, &PATTERN (object), newpat, 1); |
continue; |
} |
- else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER) |
+ else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER |
+ || GET_CODE (pat) == VAR_LOCATION) |
/* If this insn is a CLOBBER or USE, it is always valid, but is |
never recognized. */ |
continue; |
@@ -524,7 +529,7 @@ cancel_changes (int num) |
rtx. */ |
static void |
-simplify_while_replacing (rtx *loc, rtx to, rtx object, |
+simplify_while_replacing (rtx *loc, rtx to, rtx object, |
enum machine_mode op0_mode) |
{ |
rtx x = *loc; |
@@ -550,13 +555,13 @@ simplify_while_replacing (rtx *loc, rtx to, rtx object, |
simplify_gen_binary to try to simplify it. |
??? We may want later to remove this, once simplification is |
separated from this function. */ |
- if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to) |
+ if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to) |
validate_change (object, loc, |
simplify_gen_binary |
(PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1); |
break; |
case MINUS: |
- if (GET_CODE (XEXP (x, 1)) == CONST_INT |
+ if (CONST_INT_P (XEXP (x, 1)) |
|| GET_CODE (XEXP (x, 1)) == CONST_DOUBLE) |
validate_change (object, loc, |
simplify_gen_binary |
@@ -597,8 +602,8 @@ simplify_while_replacing (rtx *loc, rtx to, rtx object, |
happen, we might just fail in some cases). */ |
if (MEM_P (XEXP (x, 0)) |
- && GET_CODE (XEXP (x, 1)) == CONST_INT |
- && GET_CODE (XEXP (x, 2)) == CONST_INT |
+ && CONST_INT_P (XEXP (x, 1)) |
+ && CONST_INT_P (XEXP (x, 2)) |
&& !mode_dependent_address_p (XEXP (XEXP (x, 0), 0)) |
&& !MEM_VOLATILE_P (XEXP (x, 0))) |
{ |
@@ -655,7 +660,7 @@ simplify_while_replacing (rtx *loc, rtx to, rtx object, |
validate_change passing OBJECT. */ |
static void |
-validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object, |
+validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object, |
bool simplify) |
{ |
int i, j; |
@@ -708,7 +713,7 @@ validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object, |
from, to, object, simplify); |
} |
else |
- validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object, |
+ validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object, |
simplify); |
} |
} |
@@ -719,7 +724,7 @@ validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object, |
validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify); |
else if (fmt[i] == 'E') |
for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
- validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object, |
+ validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object, |
simplify); |
} |
@@ -738,6 +743,17 @@ validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object, |
simplify_while_replacing (loc, to, object, op0_mode); |
} |
+/* Try replacing every occurrence of FROM in subexpression LOC of INSN |
+ with TO. After all changes have been made, validate by seeing |
+ if INSN is still valid. */ |
+ |
+int |
+validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc) |
+{ |
+ validate_replace_rtx_1 (loc, from, to, insn, true); |
+ return apply_change_group (); |
+} |
+ |
/* Try replacing every occurrence of FROM in INSN with TO. After all |
changes have been made, validate by seeing if INSN is still valid. */ |
@@ -749,9 +765,9 @@ validate_replace_rtx (rtx from, rtx to, rtx insn) |
} |
/* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE |
- is a part of INSN. After all changes have been made, validate by seeing if |
- INSN is still valid. |
- validate_replace_rtx (from, to, insn) is equivalent to |
+ is a part of INSN. After all changes have been made, validate by seeing if |
+ INSN is still valid. |
+ validate_replace_rtx (from, to, insn) is equivalent to |
validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */ |
int |
@@ -762,8 +778,8 @@ validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn) |
} |
/* Same as above, but do not simplify rtx afterwards. */ |
-int |
-validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where, |
+int |
+validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where, |
rtx insn) |
{ |
validate_replace_rtx_1 (where, from, to, insn, false); |
@@ -771,12 +787,18 @@ validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where, |
} |
-/* Try replacing every occurrence of FROM in INSN with TO. */ |
+/* Try replacing every occurrence of FROM in INSN with TO. This also |
+ will replace in REG_EQUAL and REG_EQUIV notes. */ |
void |
validate_replace_rtx_group (rtx from, rtx to, rtx insn) |
{ |
+ rtx note; |
validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true); |
+ for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) |
+ if (REG_NOTE_KIND (note) == REG_EQUAL |
+ || REG_NOTE_KIND (note) == REG_EQUIV) |
+ validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true); |
} |
/* Function called by note_uses to replace used subexpressions. */ |
@@ -901,7 +923,7 @@ general_operand (rtx op, enum machine_mode mode) |
&& GET_MODE_CLASS (mode) != MODE_PARTIAL_INT) |
return 0; |
- if (GET_CODE (op) == CONST_INT |
+ if (CONST_INT_P (op) |
&& mode != VOIDmode |
&& trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op)) |
return 0; |
@@ -964,7 +986,7 @@ general_operand (rtx op, enum machine_mode mode) |
return 0; |
/* Use the mem's mode, since it will be reloaded thus. */ |
- if (memory_address_p (GET_MODE (op), y)) |
+ if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op))) |
return 1; |
} |
@@ -1078,7 +1100,7 @@ immediate_operand (rtx op, enum machine_mode mode) |
&& GET_MODE_CLASS (mode) != MODE_PARTIAL_INT) |
return 0; |
- if (GET_CODE (op) == CONST_INT |
+ if (CONST_INT_P (op) |
&& mode != VOIDmode |
&& trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op)) |
return 0; |
@@ -1095,7 +1117,7 @@ immediate_operand (rtx op, enum machine_mode mode) |
int |
const_int_operand (rtx op, enum machine_mode mode) |
{ |
- if (GET_CODE (op) != CONST_INT) |
+ if (!CONST_INT_P (op)) |
return 0; |
if (mode != VOIDmode |
@@ -1118,7 +1140,7 @@ const_double_operand (rtx op, enum machine_mode mode) |
&& GET_MODE_CLASS (mode) != MODE_PARTIAL_INT) |
return 0; |
- return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT) |
+ return ((GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op)) |
&& (mode == VOIDmode || GET_MODE (op) == mode |
|| GET_MODE (op) == VOIDmode)); |
} |
@@ -1145,7 +1167,7 @@ nonmemory_operand (rtx op, enum machine_mode mode) |
&& GET_MODE_CLASS (mode) != MODE_PARTIAL_INT) |
return 0; |
- if (GET_CODE (op) == CONST_INT |
+ if (CONST_INT_P (op) |
&& mode != VOIDmode |
&& trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op)) |
return 0; |
@@ -1212,7 +1234,7 @@ push_operand (rtx op, enum machine_mode mode) |
if (GET_CODE (op) != PRE_MODIFY |
|| GET_CODE (XEXP (op, 1)) != PLUS |
|| XEXP (XEXP (op, 1), 0) != XEXP (op, 0) |
- || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT |
+ || !CONST_INT_P (XEXP (XEXP (op, 1), 1)) |
#ifdef STACK_GROWS_DOWNWARD |
|| INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size |
#else |
@@ -1248,16 +1270,23 @@ pop_operand (rtx op, enum machine_mode mode) |
return XEXP (op, 0) == stack_pointer_rtx; |
} |
-/* Return 1 if ADDR is a valid memory address for mode MODE. */ |
+/* Return 1 if ADDR is a valid memory address |
+ for mode MODE in address space AS. */ |
int |
-memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr) |
+memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED, |
+ rtx addr, addr_space_t as) |
{ |
+#ifdef GO_IF_LEGITIMATE_ADDRESS |
+ gcc_assert (ADDR_SPACE_GENERIC_P (as)); |
GO_IF_LEGITIMATE_ADDRESS (mode, addr, win); |
return 0; |
win: |
return 1; |
+#else |
+ return targetm.addr_space.legitimate_address_p (mode, addr, 0, as); |
+#endif |
} |
/* Return 1 if OP is a valid memory reference with mode MODE, |
@@ -1309,7 +1338,7 @@ indirect_operand (rtx op, enum machine_mode mode) |
return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode)) |
|| (GET_CODE (XEXP (inner, 0)) == PLUS |
- && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT |
+ && CONST_INT_P (XEXP (XEXP (inner, 0), 1)) |
&& INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset |
&& general_operand (XEXP (XEXP (inner, 0), 0), Pmode))); |
} |
@@ -1319,6 +1348,32 @@ indirect_operand (rtx op, enum machine_mode mode) |
&& general_operand (XEXP (op, 0), Pmode)); |
} |
+/* Return 1 if this is an ordered comparison operator (not including |
+ ORDERED and UNORDERED). */ |
+ |
+int |
+ordered_comparison_operator (rtx op, enum machine_mode mode) |
+{ |
+ if (mode != VOIDmode && GET_MODE (op) != mode) |
+ return false; |
+ switch (GET_CODE (op)) |
+ { |
+ case EQ: |
+ case NE: |
+ case LT: |
+ case LTU: |
+ case LE: |
+ case LEU: |
+ case GT: |
+ case GTU: |
+ case GE: |
+ case GEU: |
+ return true; |
+ default: |
+ return false; |
+ } |
+} |
+ |
/* Return 1 if this is a comparison operator. This allows the use of |
MATCH_OPERATOR to recognize all the branch insns. */ |
@@ -1329,6 +1384,42 @@ comparison_operator (rtx op, enum machine_mode mode) |
&& COMPARISON_P (op)); |
} |
+/* If BODY is an insn body that uses ASM_OPERANDS, return it. */ |
+ |
+rtx |
+extract_asm_operands (rtx body) |
+{ |
+ rtx tmp; |
+ switch (GET_CODE (body)) |
+ { |
+ case ASM_OPERANDS: |
+ return body; |
+ |
+ case SET: |
+ /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */ |
+ tmp = SET_SRC (body); |
+ if (GET_CODE (tmp) == ASM_OPERANDS) |
+ return tmp; |
+ break; |
+ |
+ case PARALLEL: |
+ tmp = XVECEXP (body, 0, 0); |
+ if (GET_CODE (tmp) == ASM_OPERANDS) |
+ return tmp; |
+ if (GET_CODE (tmp) == SET) |
+ { |
+ tmp = SET_SRC (tmp); |
+ if (GET_CODE (tmp) == ASM_OPERANDS) |
+ return tmp; |
+ } |
+ break; |
+ |
+ default: |
+ break; |
+ } |
+ return NULL; |
+} |
+ |
/* If BODY is an insn body that uses ASM_OPERANDS, |
return the number of operands (both input and output) in the insn. |
Otherwise return -1. */ |
@@ -1336,26 +1427,22 @@ comparison_operator (rtx op, enum machine_mode mode) |
int |
asm_noperands (const_rtx body) |
{ |
- switch (GET_CODE (body)) |
+ rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body)); |
+ int n_sets = 0; |
+ |
+ if (asm_op == NULL) |
+ return -1; |
+ |
+ if (GET_CODE (body) == SET) |
+ n_sets = 1; |
+ else if (GET_CODE (body) == PARALLEL) |
{ |
- case ASM_OPERANDS: |
- /* No output operands: return number of input operands. */ |
- return ASM_OPERANDS_INPUT_LENGTH (body); |
- case SET: |
- if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS) |
- /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */ |
- return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1; |
- else |
- return -1; |
- case PARALLEL: |
- if (GET_CODE (XVECEXP (body, 0, 0)) == SET |
- && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS) |
+ int i; |
+ if (GET_CODE (XVECEXP (body, 0, 0)) == SET) |
{ |
/* Multiple output operands, or 1 output plus some clobbers: |
- body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */ |
- int i; |
- int n_sets; |
- |
+ body is |
+ [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */ |
/* Count backwards through CLOBBERs to determine number of SETs. */ |
for (i = XVECLEN (body, 0); i > 0; i--) |
{ |
@@ -1381,30 +1468,23 @@ asm_noperands (const_rtx body) |
/* If these ASM_OPERANDS rtx's came from different original insns |
then they aren't allowed together. */ |
if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt)) |
- != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0)))) |
+ != ASM_OPERANDS_INPUT_VEC (asm_op)) |
return -1; |
} |
- return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0))) |
- + n_sets); |
} |
- else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS) |
+ else |
{ |
/* 0 outputs, but some clobbers: |
body is [(asm_operands ...) (clobber (reg ...))...]. */ |
- int i; |
- |
/* Make sure all the other parallel things really are clobbers. */ |
for (i = XVECLEN (body, 0) - 1; i > 0; i--) |
if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER) |
return -1; |
- |
- return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0)); |
} |
- else |
- return -1; |
- default: |
- return -1; |
} |
+ |
+ return (ASM_OPERANDS_INPUT_LENGTH (asm_op) |
+ + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets); |
} |
/* Assuming BODY is an insn body that uses ASM_OPERANDS, |
@@ -1422,28 +1502,19 @@ decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs, |
const char **constraints, enum machine_mode *modes, |
location_t *loc) |
{ |
- int i; |
- int noperands; |
- rtx asmop = 0; |
+ int nbase = 0, n, i; |
+ rtx asmop; |
- if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS) |
+ switch (GET_CODE (body)) |
{ |
- asmop = SET_SRC (body); |
- /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */ |
- |
- noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1; |
+ case ASM_OPERANDS: |
+ /* Zero output asm: BODY is (asm_operands ...). */ |
+ asmop = body; |
+ break; |
- for (i = 1; i < noperands; i++) |
- { |
- if (operand_locs) |
- operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1); |
- if (operands) |
- operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1); |
- if (constraints) |
- constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1); |
- if (modes) |
- modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1); |
- } |
+ case SET: |
+ /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */ |
+ asmop = SET_SRC (body); |
/* The output is in the SET. |
Its constraint is in the ASM_OPERANDS itself. */ |
@@ -1455,93 +1526,67 @@ decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs, |
constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop); |
if (modes) |
modes[0] = GET_MODE (SET_DEST (body)); |
- } |
- else if (GET_CODE (body) == ASM_OPERANDS) |
- { |
- asmop = body; |
- /* No output operands: BODY is (asm_operands ....). */ |
- |
- noperands = ASM_OPERANDS_INPUT_LENGTH (asmop); |
- |
- /* The input operands are found in the 1st element vector. */ |
- /* Constraints for inputs are in the 2nd element vector. */ |
- for (i = 0; i < noperands; i++) |
- { |
- if (operand_locs) |
- operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i); |
- if (operands) |
- operands[i] = ASM_OPERANDS_INPUT (asmop, i); |
- if (constraints) |
- constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i); |
- if (modes) |
- modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i); |
- } |
- } |
- else if (GET_CODE (body) == PARALLEL |
- && GET_CODE (XVECEXP (body, 0, 0)) == SET |
- && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS) |
- { |
- int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */ |
- int nin; |
- int nout = 0; /* Does not include CLOBBERs. */ |
- |
- asmop = SET_SRC (XVECEXP (body, 0, 0)); |
- nin = ASM_OPERANDS_INPUT_LENGTH (asmop); |
+ nbase = 1; |
+ break; |
- /* At least one output, plus some CLOBBERs. */ |
+ case PARALLEL: |
+ { |
+ int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */ |
- /* The outputs are in the SETs. |
- Their constraints are in the ASM_OPERANDS itself. */ |
- for (i = 0; i < nparallel; i++) |
- { |
- if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER) |
- break; /* Past last SET */ |
+ asmop = XVECEXP (body, 0, 0); |
+ if (GET_CODE (asmop) == SET) |
+ { |
+ asmop = SET_SRC (asmop); |
- if (operands) |
- operands[i] = SET_DEST (XVECEXP (body, 0, i)); |
- if (operand_locs) |
- operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i)); |
- if (constraints) |
- constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1); |
- if (modes) |
- modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i))); |
- nout++; |
- } |
+ /* At least one output, plus some CLOBBERs. The outputs are in |
+ the SETs. Their constraints are in the ASM_OPERANDS itself. */ |
+ for (i = 0; i < nparallel; i++) |
+ { |
+ if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER) |
+ break; /* Past last SET */ |
+ if (operands) |
+ operands[i] = SET_DEST (XVECEXP (body, 0, i)); |
+ if (operand_locs) |
+ operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i)); |
+ if (constraints) |
+ constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1); |
+ if (modes) |
+ modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i))); |
+ } |
+ nbase = i; |
+ } |
+ break; |
+ } |
- for (i = 0; i < nin; i++) |
- { |
- if (operand_locs) |
- operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i); |
- if (operands) |
- operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i); |
- if (constraints) |
- constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i); |
- if (modes) |
- modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i); |
- } |
+ default: |
+ gcc_unreachable (); |
} |
- else if (GET_CODE (body) == PARALLEL |
- && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS) |
- { |
- /* No outputs, but some CLOBBERs. */ |
- |
- int nin; |
- asmop = XVECEXP (body, 0, 0); |
- nin = ASM_OPERANDS_INPUT_LENGTH (asmop); |
- |
- for (i = 0; i < nin; i++) |
- { |
- if (operand_locs) |
- operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i); |
- if (operands) |
- operands[i] = ASM_OPERANDS_INPUT (asmop, i); |
- if (constraints) |
- constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i); |
- if (modes) |
- modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i); |
- } |
+ n = ASM_OPERANDS_INPUT_LENGTH (asmop); |
+ for (i = 0; i < n; i++) |
+ { |
+ if (operand_locs) |
+ operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i); |
+ if (operands) |
+ operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i); |
+ if (constraints) |
+ constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i); |
+ if (modes) |
+ modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i); |
+ } |
+ nbase += n; |
+ n = ASM_OPERANDS_LABEL_LENGTH (asmop); |
+ for (i = 0; i < n; i++) |
+ { |
+ if (operand_locs) |
+ operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i); |
+ if (operands) |
+ operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i); |
+ if (constraints) |
+ constraints[nbase + i] = ""; |
+ if (modes) |
+ modes[nbase + i] = Pmode; |
} |
if (loc) |
@@ -1561,6 +1606,11 @@ asm_operand_ok (rtx op, const char *constraint, const char **constraints) |
/* Use constrain_operands after reload. */ |
gcc_assert (!reload_completed); |
+ /* Empty constraint string is the same as "X,...,X", i.e. X for as |
+ many alternatives as required to match the other operands. */ |
+ if (*constraint == '\0') |
+ return 1; |
+ |
while (*constraint) |
{ |
char c = *constraint; |
@@ -1666,7 +1716,7 @@ asm_operand_ok (rtx op, const char *constraint, const char **constraints) |
break; |
case 's': |
- if (GET_CODE (op) == CONST_INT |
+ if (CONST_INT_P (op) |
|| (GET_CODE (op) == CONST_DOUBLE |
&& GET_MODE (op) == VOIDmode)) |
break; |
@@ -1678,49 +1728,49 @@ asm_operand_ok (rtx op, const char *constraint, const char **constraints) |
break; |
case 'n': |
- if (GET_CODE (op) == CONST_INT |
+ if (CONST_INT_P (op) |
|| (GET_CODE (op) == CONST_DOUBLE |
&& GET_MODE (op) == VOIDmode)) |
result = 1; |
break; |
case 'I': |
- if (GET_CODE (op) == CONST_INT |
+ if (CONST_INT_P (op) |
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint)) |
result = 1; |
break; |
case 'J': |
- if (GET_CODE (op) == CONST_INT |
+ if (CONST_INT_P (op) |
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint)) |
result = 1; |
break; |
case 'K': |
- if (GET_CODE (op) == CONST_INT |
+ if (CONST_INT_P (op) |
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint)) |
result = 1; |
break; |
case 'L': |
- if (GET_CODE (op) == CONST_INT |
+ if (CONST_INT_P (op) |
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint)) |
result = 1; |
break; |
case 'M': |
- if (GET_CODE (op) == CONST_INT |
+ if (CONST_INT_P (op) |
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint)) |
result = 1; |
break; |
case 'N': |
- if (GET_CODE (op) == CONST_INT |
+ if (CONST_INT_P (op) |
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint)) |
result = 1; |
break; |
case 'O': |
- if (GET_CODE (op) == CONST_INT |
+ if (CONST_INT_P (op) |
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint)) |
result = 1; |
break; |
case 'P': |
- if (GET_CODE (op) == CONST_INT |
+ if (CONST_INT_P (op) |
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint)) |
result = 1; |
break; |
@@ -1829,7 +1879,8 @@ int |
offsettable_memref_p (rtx op) |
{ |
return ((MEM_P (op)) |
- && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0))); |
+ && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0), |
+ MEM_ADDR_SPACE (op))); |
} |
/* Similar, but don't require a strictly valid mem ref: |
@@ -1839,12 +1890,13 @@ int |
offsettable_nonstrict_memref_p (rtx op) |
{ |
return ((MEM_P (op)) |
- && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0))); |
+ && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0), |
+ MEM_ADDR_SPACE (op))); |
} |
/* Return 1 if Y is a memory address which contains no side effects |
- and would remain valid after the addition of a positive integer |
- less than the size of that mode. |
+ and would remain valid for address space AS after the addition of |
+ a positive integer less than the size of that mode. |
We assume that the original address is valid and do not check it. |
We do check that it is valid for narrower modes. |
@@ -1853,14 +1905,16 @@ offsettable_nonstrict_memref_p (rtx op) |
for the sake of use in reload.c. */ |
int |
-offsettable_address_p (int strictp, enum machine_mode mode, rtx y) |
+offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y, |
+ addr_space_t as) |
{ |
enum rtx_code ycode = GET_CODE (y); |
rtx z; |
rtx y1 = y; |
rtx *y2; |
- int (*addressp) (enum machine_mode, rtx) = |
- (strictp ? strict_memory_address_p : memory_address_p); |
+ int (*addressp) (enum machine_mode, rtx, addr_space_t) = |
+ (strictp ? strict_memory_address_addr_space_p |
+ : memory_address_addr_space_p); |
unsigned int mode_sz = GET_MODE_SIZE (mode); |
if (CONSTANT_ADDRESS_P (y)) |
@@ -1890,7 +1944,7 @@ offsettable_address_p (int strictp, enum machine_mode mode, rtx y) |
*y2 = plus_constant (*y2, mode_sz - 1); |
/* Use QImode because an odd displacement may be automatically invalid |
for any wider mode. But it should be valid for a single byte. */ |
- good = (*addressp) (QImode, y); |
+ good = (*addressp) (QImode, y, as); |
/* In any case, restore old contents of memory. */ |
*y2 = y1; |
@@ -1915,7 +1969,7 @@ offsettable_address_p (int strictp, enum machine_mode mode, rtx y) |
/* Use QImode because an odd displacement may be automatically invalid |
for any wider mode. But it should be valid for a single byte. */ |
- return (*addressp) (QImode, z); |
+ return (*addressp) (QImode, z, as); |
} |
/* Return 1 if ADDR is an address-expression whose effect depends |
@@ -1998,6 +2052,7 @@ extract_insn (rtx insn) |
case ASM_INPUT: |
case ADDR_VEC: |
case ADDR_DIFF_VEC: |
+ case VAR_LOCATION: |
return; |
case SET: |
@@ -2459,11 +2514,14 @@ constrain_operands (int strict) |
if (MEM_P (op)) |
{ |
if (strict > 0 |
- && !strict_memory_address_p (GET_MODE (op), |
- XEXP (op, 0))) |
+ && !strict_memory_address_addr_space_p |
+ (GET_MODE (op), XEXP (op, 0), |
+ MEM_ADDR_SPACE (op))) |
break; |
if (strict == 0 |
- && !memory_address_p (GET_MODE (op), XEXP (op, 0))) |
+ && !memory_address_addr_space_p |
+ (GET_MODE (op), XEXP (op, 0), |
+ MEM_ADDR_SPACE (op))) |
break; |
win = 1; |
} |
@@ -2506,7 +2564,7 @@ constrain_operands (int strict) |
break; |
case 's': |
- if (GET_CODE (op) == CONST_INT |
+ if (CONST_INT_P (op) |
|| (GET_CODE (op) == CONST_DOUBLE |
&& GET_MODE (op) == VOIDmode)) |
break; |
@@ -2516,7 +2574,7 @@ constrain_operands (int strict) |
break; |
case 'n': |
- if (GET_CODE (op) == CONST_INT |
+ if (CONST_INT_P (op) |
|| (GET_CODE (op) == CONST_DOUBLE |
&& GET_MODE (op) == VOIDmode)) |
win = 1; |
@@ -2530,7 +2588,7 @@ constrain_operands (int strict) |
case 'N': |
case 'O': |
case 'P': |
- if (GET_CODE (op) == CONST_INT |
+ if (CONST_INT_P (op) |
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p)) |
win = 1; |
break; |
@@ -3026,6 +3084,26 @@ peep2_find_free_register (int from, int to, const char *class_str, |
return NULL_RTX; |
} |
+/* Forget all currently tracked instructions, only remember current |
+ LIVE regset. */ |
+ |
+static void |
+peep2_reinit_state (regset live) |
+{ |
+ int i; |
+ |
+ /* Indicate that all slots except the last holds invalid data. */ |
+ for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i) |
+ peep2_insn_data[i].insn = NULL_RTX; |
+ peep2_current_count = 0; |
+ |
+ /* Indicate that the last slot contains live_after data. */ |
+ peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB; |
+ peep2_current = MAX_INSNS_PER_PEEP2; |
+ |
+ COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live); |
+} |
+ |
/* Perform the peephole2 optimization pass. */ |
static void |
@@ -3049,24 +3127,16 @@ peephole2_optimize (void) |
FOR_EACH_BB_REVERSE (bb) |
{ |
rtl_profile_for_bb (bb); |
- /* Indicate that all slots except the last holds invalid data. */ |
- for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i) |
- peep2_insn_data[i].insn = NULL_RTX; |
- peep2_current_count = 0; |
- |
- /* Indicate that the last slot contains live_after data. */ |
- peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB; |
- peep2_current = MAX_INSNS_PER_PEEP2; |
/* Start up propagation. */ |
bitmap_copy (live, DF_LR_OUT (bb)); |
df_simulate_initialize_backwards (bb, live); |
- bitmap_copy (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live); |
+ peep2_reinit_state (live); |
for (insn = BB_END (bb); ; insn = prev) |
{ |
prev = PREV_INSN (insn); |
- if (INSN_P (insn)) |
+ if (NONDEBUG_INSN_P (insn)) |
{ |
rtx attempt, before_try, x; |
int match_len; |
@@ -3088,7 +3158,7 @@ peephole2_optimize (void) |
/* If an insn has RTX_FRAME_RELATED_P set, peephole |
substitution would lose the |
REG_FRAME_RELATED_EXPR that is attached. */ |
- peep2_current_count = 0; |
+ peep2_reinit_state (live); |
attempt = NULL; |
} |
else |
@@ -3177,77 +3247,79 @@ peephole2_optimize (void) |
if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL)) |
break; |
- for (x = attempt ; x != before_try ; x = PREV_INSN (x)) |
- if (CALL_P (x) |
- || (flag_non_call_exceptions |
- && may_trap_p (PATTERN (x)) |
- && !find_reg_note (x, REG_EH_REGION, NULL))) |
- { |
- if (note) |
- add_reg_note (x, REG_EH_REGION, XEXP (note, 0)); |
- |
- if (x != BB_END (bb) && eh_edge) |
- { |
- edge nfte, nehe; |
- int flags; |
- |
- nfte = split_block (bb, x); |
- flags = (eh_edge->flags |
- & (EDGE_EH | EDGE_ABNORMAL)); |
- if (CALL_P (x)) |
- flags |= EDGE_ABNORMAL_CALL; |
- nehe = make_edge (nfte->src, eh_edge->dest, |
- flags); |
- |
- nehe->probability = eh_edge->probability; |
- nfte->probability |
- = REG_BR_PROB_BASE - nehe->probability; |
- |
- do_cleanup_cfg |= purge_dead_edges (nfte->dest); |
- bb = nfte->src; |
- eh_edge = nehe; |
- } |
- } |
+ if (note) |
+ copy_reg_eh_region_note_backward (note, attempt, |
+ before_try); |
+ |
+ if (eh_edge) |
+ for (x = attempt ; x != before_try ; x = PREV_INSN (x)) |
+ if (x != BB_END (bb) |
+ && (can_throw_internal (x) |
+ || can_nonlocal_goto (x))) |
+ { |
+ edge nfte, nehe; |
+ int flags; |
+ |
+ nfte = split_block (bb, x); |
+ flags = (eh_edge->flags |
+ & (EDGE_EH | EDGE_ABNORMAL)); |
+ if (CALL_P (x)) |
+ flags |= EDGE_ABNORMAL_CALL; |
+ nehe = make_edge (nfte->src, eh_edge->dest, |
+ flags); |
+ |
+ nehe->probability = eh_edge->probability; |
+ nfte->probability |
+ = REG_BR_PROB_BASE - nehe->probability; |
+ |
+ do_cleanup_cfg |= purge_dead_edges (nfte->dest); |
+ bb = nfte->src; |
+ eh_edge = nehe; |
+ } |
/* Converting possibly trapping insn to non-trapping is |
possible. Zap dummy outgoing edges. */ |
do_cleanup_cfg |= purge_dead_edges (bb); |
} |
-#ifdef HAVE_conditional_execution |
- for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i) |
- peep2_insn_data[i].insn = NULL_RTX; |
- peep2_insn_data[peep2_current].insn = PEEP2_EOB; |
- peep2_current_count = 0; |
-#else |
- /* Back up lifetime information past the end of the |
- newly created sequence. */ |
- if (++i >= MAX_INSNS_PER_PEEP2 + 1) |
- i = 0; |
- bitmap_copy (live, peep2_insn_data[i].live_before); |
- |
- /* Update life information for the new sequence. */ |
- x = attempt; |
- do |
+ if (targetm.have_conditional_execution ()) |
{ |
- if (INSN_P (x)) |
+ for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i) |
+ peep2_insn_data[i].insn = NULL_RTX; |
+ peep2_insn_data[peep2_current].insn = PEEP2_EOB; |
+ peep2_current_count = 0; |
+ } |
+ else |
+ { |
+ /* Back up lifetime information past the end of the |
+ newly created sequence. */ |
+ if (++i >= MAX_INSNS_PER_PEEP2 + 1) |
+ i = 0; |
+ bitmap_copy (live, peep2_insn_data[i].live_before); |
+ |
+ /* Update life information for the new sequence. */ |
+ x = attempt; |
+ do |
{ |
- if (--i < 0) |
- i = MAX_INSNS_PER_PEEP2; |
- if (peep2_current_count < MAX_INSNS_PER_PEEP2 |
- && peep2_insn_data[i].insn == NULL_RTX) |
- peep2_current_count++; |
- peep2_insn_data[i].insn = x; |
- df_insn_rescan (x); |
- df_simulate_one_insn_backwards (bb, x, live); |
- bitmap_copy (peep2_insn_data[i].live_before, live); |
+ if (INSN_P (x)) |
+ { |
+ if (--i < 0) |
+ i = MAX_INSNS_PER_PEEP2; |
+ if (peep2_current_count < MAX_INSNS_PER_PEEP2 |
+ && peep2_insn_data[i].insn == NULL_RTX) |
+ peep2_current_count++; |
+ peep2_insn_data[i].insn = x; |
+ df_insn_rescan (x); |
+ df_simulate_one_insn_backwards (bb, x, live); |
+ bitmap_copy (peep2_insn_data[i].live_before, |
+ live); |
+ } |
+ x = PREV_INSN (x); |
} |
- x = PREV_INSN (x); |
- } |
- while (x != prev); |
+ while (x != prev); |
- peep2_current = i; |
-#endif |
+ peep2_current = i; |
+ } |
/* If we generated a jump instruction, it won't have |
JUMP_LABEL set. Recompute after we're done. */ |
@@ -3474,7 +3546,7 @@ struct rtl_opt_pass pass_split_all_insns = |
NULL, /* sub */ |
NULL, /* next */ |
0, /* static_pass_number */ |
- 0, /* tv_id */ |
+ TV_NONE, /* tv_id */ |
0, /* properties_required */ |
0, /* properties_provided */ |
0, /* properties_destroyed */ |
@@ -3504,7 +3576,7 @@ struct rtl_opt_pass pass_split_after_reload = |
NULL, /* sub */ |
NULL, /* next */ |
0, /* static_pass_number */ |
- 0, /* tv_id */ |
+ TV_NONE, /* tv_id */ |
0, /* properties_required */ |
0, /* properties_provided */ |
0, /* properties_destroyed */ |
@@ -3548,7 +3620,7 @@ struct rtl_opt_pass pass_split_before_regstack = |
NULL, /* sub */ |
NULL, /* next */ |
0, /* static_pass_number */ |
- 0, /* tv_id */ |
+ TV_NONE, /* tv_id */ |
0, /* properties_required */ |
0, /* properties_provided */ |
0, /* properties_destroyed */ |
@@ -3586,7 +3658,7 @@ struct rtl_opt_pass pass_split_before_sched2 = |
NULL, /* sub */ |
NULL, /* next */ |
0, /* static_pass_number */ |
- 0, /* tv_id */ |
+ TV_NONE, /* tv_id */ |
0, /* properties_required */ |
0, /* properties_provided */ |
0, /* properties_destroyed */ |
@@ -3605,7 +3677,7 @@ gate_do_final_split (void) |
return 1; |
#else |
return 0; |
-#endif |
+#endif |
} |
struct rtl_opt_pass pass_split_for_shorten_branches = |
@@ -3618,7 +3690,7 @@ struct rtl_opt_pass pass_split_for_shorten_branches = |
NULL, /* sub */ |
NULL, /* next */ |
0, /* static_pass_number */ |
- 0, /* tv_id */ |
+ TV_NONE, /* tv_id */ |
0, /* properties_required */ |
0, /* properties_provided */ |
0, /* properties_destroyed */ |
@@ -3626,5 +3698,3 @@ struct rtl_opt_pass pass_split_for_shorten_branches = |
TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */ |
} |
}; |
- |
- |