Index: gcc/gcc/tree-ssa-structalias.c |
diff --git a/gcc/gcc/tree-ssa-structalias.c b/gcc/gcc/tree-ssa-structalias.c |
index 9d30528ca0e46351371c4e5617811a10a2383469..2c570331b365c61e2079efc9979d3ae3b6e24bc4 100644 |
--- a/gcc/gcc/tree-ssa-structalias.c |
+++ b/gcc/gcc/tree-ssa-structalias.c |
@@ -1,5 +1,6 @@ |
/* Tree based points-to analysis |
- Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc. |
+ Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 |
+ Free Software Foundation, Inc. |
Contributed by Daniel Berlin <dberlin@dberlin.org> |
This file is part of GCC. |
@@ -32,11 +33,9 @@ |
#include "basic-block.h" |
#include "output.h" |
#include "tree.h" |
-#include "c-common.h" |
#include "tree-flow.h" |
#include "tree-inline.h" |
#include "varray.h" |
-#include "c-tree.h" |
#include "diagnostic.h" |
#include "toplev.h" |
#include "gimple.h" |
@@ -48,7 +47,6 @@ |
#include "alloc-pool.h" |
#include "splay-tree.h" |
#include "params.h" |
-#include "tree-ssa-structalias.h" |
#include "cgraph.h" |
#include "alias.h" |
#include "pointer-set.h" |
@@ -185,6 +183,9 @@ static unsigned int create_variable_info_for (tree, const char *); |
typedef struct constraint_graph *constraint_graph_t; |
static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool); |
+struct constraint; |
+typedef struct constraint *constraint_t; |
+ |
DEF_VEC_P(constraint_t); |
DEF_VEC_ALLOC_P(constraint_t,heap); |
@@ -211,32 +212,29 @@ struct variable_info |
/* True if this is a variable created by the constraint analysis, such as |
heap variables and constraints we had to break up. */ |
- unsigned int is_artificial_var:1; |
+ unsigned int is_artificial_var : 1; |
/* True if this is a special variable whose solution set should not be |
changed. */ |
- unsigned int is_special_var:1; |
+ unsigned int is_special_var : 1; |
/* True for variables whose size is not known or variable. */ |
- unsigned int is_unknown_size_var:1; |
+ unsigned int is_unknown_size_var : 1; |
/* True for (sub-)fields that represent a whole variable. */ |
unsigned int is_full_var : 1; |
/* True if this is a heap variable. */ |
- unsigned int is_heap_var:1; |
+ unsigned int is_heap_var : 1; |
- /* True if we may not use TBAA to prune references to this |
- variable. This is used for C++ placement new. */ |
- unsigned int no_tbaa_pruning : 1; |
+ /* True if this is a variable tracking a restrict pointer source. */ |
+ unsigned int is_restrict_var : 1; |
/* True if this field may contain pointers. */ |
unsigned int may_have_pointers : 1; |
- /* Variable id this was collapsed to due to type unsafety. Zero if |
- this variable was not collapsed. This should be unused completely |
- after build_succ_graph, or something is broken. */ |
- unsigned int collapsed_to; |
+ /* True if this represents a global variable. */ |
+ unsigned int is_global_var : 1; |
/* A link to the variable for the next field in this structure. */ |
struct variable_info *next; |
@@ -265,6 +263,8 @@ struct variable_info |
typedef struct variable_info *varinfo_t; |
static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT); |
+static varinfo_t first_or_preceding_vi_for_offset (varinfo_t, |
+ unsigned HOST_WIDE_INT); |
static varinfo_t lookup_vi_for_tree (tree); |
/* Pool of variable info structures. */ |
@@ -286,68 +286,44 @@ get_varinfo (unsigned int n) |
return VEC_index (varinfo_t, varmap, n); |
} |
-/* Return the varmap element N, following the collapsed_to link. */ |
- |
-static inline varinfo_t |
-get_varinfo_fc (unsigned int n) |
-{ |
- varinfo_t v = VEC_index (varinfo_t, varmap, n); |
- |
- if (v->collapsed_to != 0) |
- return get_varinfo (v->collapsed_to); |
- return v; |
-} |
- |
/* Static IDs for the special variables. */ |
enum { nothing_id = 0, anything_id = 1, readonly_id = 2, |
escaped_id = 3, nonlocal_id = 4, callused_id = 5, |
storedanything_id = 6, integer_id = 7 }; |
-/* Variable that represents the unknown pointer. */ |
-static varinfo_t var_anything; |
-static tree anything_tree; |
- |
-/* Variable that represents the NULL pointer. */ |
-static varinfo_t var_nothing; |
-static tree nothing_tree; |
- |
-/* Variable that represents read only memory. */ |
-static varinfo_t var_readonly; |
-static tree readonly_tree; |
- |
-/* Variable that represents escaped memory. */ |
-static varinfo_t var_escaped; |
-static tree escaped_tree; |
- |
-/* Variable that represents nonlocal memory. */ |
-static varinfo_t var_nonlocal; |
-static tree nonlocal_tree; |
- |
-/* Variable that represents call-used memory. */ |
-static varinfo_t var_callused; |
-static tree callused_tree; |
+struct GTY(()) heapvar_map { |
+ struct tree_map map; |
+ unsigned HOST_WIDE_INT offset; |
+}; |
-/* Variable that represents variables that are stored to anything. */ |
-static varinfo_t var_storedanything; |
-static tree storedanything_tree; |
+static int |
+heapvar_map_eq (const void *p1, const void *p2) |
+{ |
+ const struct heapvar_map *h1 = (const struct heapvar_map *)p1; |
+ const struct heapvar_map *h2 = (const struct heapvar_map *)p2; |
+ return (h1->map.base.from == h2->map.base.from |
+ && h1->offset == h2->offset); |
+} |
-/* Variable that represents integers. This is used for when people do things |
- like &0->a.b. */ |
-static varinfo_t var_integer; |
-static tree integer_tree; |
+static unsigned int |
+heapvar_map_hash (struct heapvar_map *h) |
+{ |
+ return iterative_hash_host_wide_int (h->offset, |
+ htab_hash_pointer (h->map.base.from)); |
+} |
/* Lookup a heap var for FROM, and return it if we find one. */ |
static tree |
-heapvar_lookup (tree from) |
+heapvar_lookup (tree from, unsigned HOST_WIDE_INT offset) |
{ |
- struct tree_map *h, in; |
- in.base.from = from; |
- |
- h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in, |
- htab_hash_pointer (from)); |
+ struct heapvar_map *h, in; |
+ in.map.base.from = from; |
+ in.offset = offset; |
+ h = (struct heapvar_map *) htab_find_with_hash (heapvar_for_stmt, &in, |
+ heapvar_map_hash (&in)); |
if (h) |
- return h->to; |
+ return h->map.to; |
return NULL_TREE; |
} |
@@ -355,47 +331,51 @@ heapvar_lookup (tree from) |
hashtable. */ |
static void |
-heapvar_insert (tree from, tree to) |
+heapvar_insert (tree from, unsigned HOST_WIDE_INT offset, tree to) |
{ |
- struct tree_map *h; |
+ struct heapvar_map *h; |
void **loc; |
- h = GGC_NEW (struct tree_map); |
- h->hash = htab_hash_pointer (from); |
- h->base.from = from; |
- h->to = to; |
- loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT); |
- *(struct tree_map **) loc = h; |
+ h = GGC_NEW (struct heapvar_map); |
+ h->map.base.from = from; |
+ h->offset = offset; |
+ h->map.hash = heapvar_map_hash (h); |
+ h->map.to = to; |
+ loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->map.hash, INSERT); |
+ gcc_assert (*loc == NULL); |
+ *(struct heapvar_map **) loc = h; |
} |
/* Return a new variable info structure consisting for a variable |
- named NAME, and using constraint graph node NODE. */ |
+ named NAME, and using constraint graph node NODE. Append it |
+ to the vector of variable info structures. */ |
static varinfo_t |
-new_var_info (tree t, unsigned int id, const char *name) |
+new_var_info (tree t, const char *name) |
{ |
+ unsigned index = VEC_length (varinfo_t, varmap); |
varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool); |
- tree var; |
- ret->id = id; |
+ ret->id = index; |
ret->name = name; |
ret->decl = t; |
- ret->is_artificial_var = false; |
- ret->is_heap_var = false; |
+ /* Vars without decl are artificial and do not have sub-variables. */ |
+ ret->is_artificial_var = (t == NULL_TREE); |
ret->is_special_var = false; |
ret->is_unknown_size_var = false; |
- ret->is_full_var = false; |
+ ret->is_full_var = (t == NULL_TREE); |
+ ret->is_heap_var = false; |
+ ret->is_restrict_var = false; |
ret->may_have_pointers = true; |
- var = t; |
- if (TREE_CODE (var) == SSA_NAME) |
- var = SSA_NAME_VAR (var); |
- ret->no_tbaa_pruning = (DECL_P (var) |
- && POINTER_TYPE_P (TREE_TYPE (var)) |
- && DECL_NO_TBAA_P (var)); |
+ ret->is_global_var = (t == NULL_TREE); |
+ if (t && DECL_P (t)) |
+ ret->is_global_var = is_global_var (t); |
ret->solution = BITMAP_ALLOC (&pta_obstack); |
ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack); |
ret->next = NULL; |
- ret->collapsed_to = 0; |
+ |
+ VEC_safe_push (varinfo_t, heap, varmap, ret); |
+ |
return ret; |
} |
@@ -416,9 +396,12 @@ struct constraint_expr |
IOW, in a deref constraint, we would deref, get the result set, |
then add OFFSET to each member. */ |
- unsigned HOST_WIDE_INT offset; |
+ HOST_WIDE_INT offset; |
}; |
+/* Use 0x8000... as special unknown offset. */ |
+#define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1)) |
+ |
typedef struct constraint_expr ce_s; |
DEF_VEC_O(ce_s); |
DEF_VEC_ALLOC_O(ce_s, heap); |
@@ -443,10 +426,6 @@ struct constraint |
static VEC(constraint_t,heap) *constraints; |
static alloc_pool constraint_pool; |
- |
-DEF_VEC_I(int); |
-DEF_VEC_ALLOC_I(int, heap); |
- |
/* The constraint graph is represented as an array of bitmaps |
containing successor nodes. */ |
@@ -575,27 +554,38 @@ new_constraint (const struct constraint_expr lhs, |
/* Print out constraint C to FILE. */ |
-void |
+static void |
dump_constraint (FILE *file, constraint_t c) |
{ |
if (c->lhs.type == ADDRESSOF) |
fprintf (file, "&"); |
else if (c->lhs.type == DEREF) |
fprintf (file, "*"); |
- fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name); |
- if (c->lhs.offset != 0) |
+ fprintf (file, "%s", get_varinfo (c->lhs.var)->name); |
+ if (c->lhs.offset == UNKNOWN_OFFSET) |
+ fprintf (file, " + UNKNOWN"); |
+ else if (c->lhs.offset != 0) |
fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset); |
fprintf (file, " = "); |
if (c->rhs.type == ADDRESSOF) |
fprintf (file, "&"); |
else if (c->rhs.type == DEREF) |
fprintf (file, "*"); |
- fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name); |
- if (c->rhs.offset != 0) |
+ fprintf (file, "%s", get_varinfo (c->rhs.var)->name); |
+ if (c->rhs.offset == UNKNOWN_OFFSET) |
+ fprintf (file, " + UNKNOWN"); |
+ else if (c->rhs.offset != 0) |
fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset); |
fprintf (file, "\n"); |
} |
+ |
+void debug_constraint (constraint_t); |
+void debug_constraints (void); |
+void debug_constraint_graph (void); |
+void debug_solution_for_var (unsigned int); |
+void debug_sa_points_to_info (void); |
+ |
/* Print out constraint C to stderr. */ |
void |
@@ -606,7 +596,7 @@ debug_constraint (constraint_t c) |
/* Print out all constraints to FILE */ |
-void |
+static void |
dump_constraints (FILE *file) |
{ |
int i; |
@@ -630,13 +620,13 @@ debug_constraints (void) |
complex with an offset, e.g: a = b + 8, then the label is "+". |
Otherwise the edge has no label. */ |
-void |
+static void |
dump_constraint_edge (FILE *file, constraint_t c) |
{ |
if (c->rhs.type != ADDRESSOF) |
{ |
- const char *src = get_varinfo_fc (c->rhs.var)->name; |
- const char *dst = get_varinfo_fc (c->lhs.var)->name; |
+ const char *src = get_varinfo (c->rhs.var)->name; |
+ const char *dst = get_varinfo (c->lhs.var)->name; |
fprintf (file, " \"%s\" -> \"%s\" ", src, dst); |
/* Due to preprocessing of constraints, instructions like *a = *b are |
illegal; thus, we do not have to handle such cases. */ |
@@ -658,7 +648,7 @@ dump_constraint_edge (FILE *file, constraint_t c) |
/* Print the constraint graph in dot format. */ |
-void |
+static void |
dump_constraint_graph (FILE *file) |
{ |
unsigned int i=0, size; |
@@ -690,7 +680,7 @@ dump_constraint_graph (FILE *file) |
size = size < graph->size ? size : graph->size; |
for (i = 0; i < size; i++) |
{ |
- const char *name = get_varinfo_fc (graph->rep[i])->name; |
+ const char *name = get_varinfo (graph->rep[i])->name; |
fprintf (file, " \"%s\" ;\n", name); |
} |
@@ -833,16 +823,62 @@ constraint_set_union (VEC(constraint_t,heap) **to, |
} |
} |
+/* Expands the solution in SET to all sub-fields of variables included. |
+ Union the expanded result into RESULT. */ |
+ |
+static void |
+solution_set_expand (bitmap result, bitmap set) |
+{ |
+ bitmap_iterator bi; |
+ bitmap vars = NULL; |
+ unsigned j; |
+ |
+ /* In a first pass record all variables we need to add all |
+ sub-fields off. This avoids quadratic behavior. */ |
+ EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi) |
+ { |
+ varinfo_t v = get_varinfo (j); |
+ if (v->is_artificial_var |
+ || v->is_full_var) |
+ continue; |
+ v = lookup_vi_for_tree (v->decl); |
+ if (vars == NULL) |
+ vars = BITMAP_ALLOC (NULL); |
+ bitmap_set_bit (vars, v->id); |
+ } |
+ |
+ /* In the second pass now do the addition to the solution and |
+ to speed up solving add it to the delta as well. */ |
+ if (vars != NULL) |
+ { |
+ EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi) |
+ { |
+ varinfo_t v = get_varinfo (j); |
+ for (; v != NULL; v = v->next) |
+ bitmap_set_bit (result, v->id); |
+ } |
+ BITMAP_FREE (vars); |
+ } |
+} |
+ |
/* Take a solution set SET, add OFFSET to each member of the set, and |
overwrite SET with the result when done. */ |
static void |
-solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset) |
+solution_set_add (bitmap set, HOST_WIDE_INT offset) |
{ |
bitmap result = BITMAP_ALLOC (&iteration_obstack); |
unsigned int i; |
bitmap_iterator bi; |
+ /* If the offset is unknown we have to expand the solution to |
+ all subfields. */ |
+ if (offset == UNKNOWN_OFFSET) |
+ { |
+ solution_set_expand (set, set); |
+ return; |
+ } |
+ |
EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi) |
{ |
varinfo_t vi = get_varinfo (i); |
@@ -856,21 +892,23 @@ solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset) |
else |
{ |
unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset; |
- varinfo_t v = first_vi_for_offset (vi, fieldoffset); |
- /* If the result is outside of the variable use the last field. */ |
- if (!v) |
- { |
- v = vi; |
- while (v->next != NULL) |
- v = v->next; |
- } |
- bitmap_set_bit (result, v->id); |
+ |
+ /* If the offset makes the pointer point to before the |
+ variable use offset zero for the field lookup. */ |
+ if (offset < 0 |
+ && fieldoffset > vi->offset) |
+ fieldoffset = 0; |
+ |
+ if (offset != 0) |
+ vi = first_or_preceding_vi_for_offset (vi, fieldoffset); |
+ |
+ bitmap_set_bit (result, vi->id); |
/* If the result is not exactly at fieldoffset include the next |
field as well. See get_constraint_for_ptr_offset for more |
rationale. */ |
- if (v->offset != fieldoffset |
- && v->next != NULL) |
- bitmap_set_bit (result, v->next->id); |
+ if (vi->offset != fieldoffset |
+ && vi->next != NULL) |
+ bitmap_set_bit (result, vi->next->id); |
} |
} |
@@ -882,7 +920,7 @@ solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset) |
process. */ |
static bool |
-set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc) |
+set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc) |
{ |
if (inc == 0) |
return bitmap_ior_into (to, from); |
@@ -1119,8 +1157,8 @@ build_pred_graph (void) |
{ |
struct constraint_expr lhs = c->lhs; |
struct constraint_expr rhs = c->rhs; |
- unsigned int lhsvar = get_varinfo_fc (lhs.var)->id; |
- unsigned int rhsvar = get_varinfo_fc (rhs.var)->id; |
+ unsigned int lhsvar = lhs.var; |
+ unsigned int rhsvar = rhs.var; |
if (lhs.type == DEREF) |
{ |
@@ -1154,17 +1192,17 @@ build_pred_graph (void) |
/* All related variables are no longer direct nodes. */ |
RESET_BIT (graph->direct_nodes, rhsvar); |
- v = get_varinfo (rhsvar); |
- if (!v->is_full_var) |
- { |
- v = lookup_vi_for_tree (v->decl); |
- do |
- { |
- RESET_BIT (graph->direct_nodes, v->id); |
- v = v->next; |
- } |
- while (v != NULL); |
- } |
+ v = get_varinfo (rhsvar); |
+ if (!v->is_full_var) |
+ { |
+ v = lookup_vi_for_tree (v->decl); |
+ do |
+ { |
+ RESET_BIT (graph->direct_nodes, v->id); |
+ v = v->next; |
+ } |
+ while (v != NULL); |
+ } |
bitmap_set_bit (graph->address_taken, rhsvar); |
} |
else if (lhsvar > anything_id |
@@ -1206,8 +1244,8 @@ build_succ_graph (void) |
lhs = c->lhs; |
rhs = c->rhs; |
- lhsvar = find (get_varinfo_fc (lhs.var)->id); |
- rhsvar = find (get_varinfo_fc (rhs.var)->id); |
+ lhsvar = find (lhs.var); |
+ rhsvar = find (rhs.var); |
if (lhs.type == DEREF) |
{ |
@@ -1222,8 +1260,7 @@ build_succ_graph (void) |
else if (rhs.type == ADDRESSOF) |
{ |
/* x = &y */ |
- gcc_assert (find (get_varinfo_fc (rhs.var)->id) |
- == get_varinfo_fc (rhs.var)->id); |
+ gcc_assert (find (rhs.var) == rhs.var); |
bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar); |
} |
else if (lhsvar > anything_id |
@@ -1233,13 +1270,18 @@ build_succ_graph (void) |
} |
} |
- /* Add edges from STOREDANYTHING to all non-direct nodes. */ |
+ /* Add edges from STOREDANYTHING to all non-direct nodes that can |
+ receive pointers. */ |
t = find (storedanything_id); |
for (i = integer_id + 1; i < FIRST_REF_NODE; ++i) |
{ |
- if (!TEST_BIT (graph->direct_nodes, i)) |
+ if (!TEST_BIT (graph->direct_nodes, i) |
+ && get_varinfo (i)->may_have_pointers) |
add_graph_edge (graph, find (i), t); |
} |
+ |
+ /* Everything stored to ANYTHING also potentially escapes. */ |
+ add_graph_edge (graph, find (escaped_id), t); |
} |
@@ -1247,10 +1289,6 @@ build_succ_graph (void) |
static unsigned int changed_count; |
static sbitmap changed; |
-DEF_VEC_I(unsigned); |
-DEF_VEC_ALLOC_I(unsigned,heap); |
- |
- |
/* Strongly Connected Component visitation info. */ |
struct scc_info |
@@ -1317,7 +1355,6 @@ scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n) |
&& si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs) |
{ |
bitmap scc = BITMAP_ALLOC (NULL); |
- bool have_ref_node = n >= FIRST_REF_NODE; |
unsigned int lowest_node; |
bitmap_iterator bi; |
@@ -1329,8 +1366,6 @@ scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n) |
unsigned int w = VEC_pop (unsigned, si->scc_stack); |
bitmap_set_bit (scc, w); |
- if (w >= FIRST_REF_NODE) |
- have_ref_node = true; |
} |
lowest_node = bitmap_first_set_bit (scc); |
@@ -1380,9 +1415,6 @@ unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from, |
merge_graph_nodes (graph, to, from); |
merge_node_constraints (graph, to, from); |
- if (get_varinfo (from)->no_tbaa_pruning) |
- get_varinfo (to)->no_tbaa_pruning = true; |
- |
/* Mark TO as changed if FROM was changed. If TO was already marked |
as changed, decrease the changed count. */ |
@@ -1410,10 +1442,10 @@ unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from, |
changed_count++; |
} |
} |
- |
+ |
BITMAP_FREE (get_varinfo (from)->solution); |
BITMAP_FREE (get_varinfo (from)->oldsolution); |
- |
+ |
if (stats.iterations > 0) |
{ |
BITMAP_FREE (get_varinfo (to)->oldsolution); |
@@ -1485,29 +1517,8 @@ topo_visit (constraint_graph_t graph, struct topo_info *ti, |
VEC_safe_push (unsigned, heap, ti->topo_order, n); |
} |
-/* Return true if variable N + OFFSET is a legal field of N. */ |
- |
-static bool |
-type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset) |
-{ |
- varinfo_t ninfo = get_varinfo (n); |
- |
- /* For things we've globbed to single variables, any offset into the |
- variable acts like the entire variable, so that it becomes offset |
- 0. */ |
- if (ninfo->is_special_var |
- || ninfo->is_artificial_var |
- || ninfo->is_unknown_size_var |
- || ninfo->is_full_var) |
- { |
- *offset = 0; |
- return true; |
- } |
- return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize; |
-} |
- |
-/* Process a constraint C that represents x = *y, using DELTA as the |
- starting solution. */ |
+/* Process a constraint C that represents x = *(y + off), using DELTA as the |
+ starting solution for y. */ |
static void |
do_sd_constraint (constraint_graph_t graph, constraint_t c, |
@@ -1518,73 +1529,47 @@ do_sd_constraint (constraint_graph_t graph, constraint_t c, |
bitmap sol = get_varinfo (lhs)->solution; |
unsigned int j; |
bitmap_iterator bi; |
+ HOST_WIDE_INT roffset = c->rhs.offset; |
- /* For x = *ESCAPED and x = *CALLUSED we want to compute the |
- reachability set of the rhs var. As a pointer to a sub-field |
- of a variable can also reach all other fields of the variable |
- we simply have to expand the solution to contain all sub-fields |
- if one sub-field is contained. */ |
- if (c->rhs.var == find (escaped_id) |
- || c->rhs.var == find (callused_id)) |
- { |
- bitmap vars = NULL; |
- /* In a first pass record all variables we need to add all |
- sub-fields off. This avoids quadratic behavior. */ |
- EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi) |
- { |
- varinfo_t v = get_varinfo (j); |
- if (v->is_full_var) |
- continue; |
- |
- v = lookup_vi_for_tree (v->decl); |
- if (v->next != NULL) |
- { |
- if (vars == NULL) |
- vars = BITMAP_ALLOC (NULL); |
- bitmap_set_bit (vars, v->id); |
- } |
- } |
- /* In the second pass now do the addition to the solution and |
- to speed up solving add it to the delta as well. */ |
- if (vars != NULL) |
- { |
- EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi) |
- { |
- varinfo_t v = get_varinfo (j); |
- for (; v != NULL; v = v->next) |
- { |
- if (bitmap_set_bit (sol, v->id)) |
- { |
- flag = true; |
- bitmap_set_bit (delta, v->id); |
- } |
- } |
- } |
- BITMAP_FREE (vars); |
- } |
- } |
+ /* Our IL does not allow this. */ |
+ gcc_assert (c->lhs.offset == 0); |
+ /* If the solution of Y contains anything it is good enough to transfer |
+ this to the LHS. */ |
if (bitmap_bit_p (delta, anything_id)) |
{ |
flag |= bitmap_set_bit (sol, anything_id); |
goto done; |
} |
+ /* If we do not know at with offset the rhs is dereferenced compute |
+ the reachability set of DELTA, conservatively assuming it is |
+ dereferenced at all valid offsets. */ |
+ if (roffset == UNKNOWN_OFFSET) |
+ { |
+ solution_set_expand (delta, delta); |
+ /* No further offset processing is necessary. */ |
+ roffset = 0; |
+ } |
+ |
/* For each variable j in delta (Sol(y)), add |
an edge in the graph from j to x, and union Sol(j) into Sol(x). */ |
EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi) |
{ |
- unsigned HOST_WIDE_INT roffset = c->rhs.offset; |
- if (type_safe (j, &roffset)) |
- { |
- varinfo_t v; |
- unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset; |
- unsigned int t; |
+ varinfo_t v = get_varinfo (j); |
+ HOST_WIDE_INT fieldoffset = v->offset + roffset; |
+ unsigned int t; |
+ |
+ if (v->is_full_var) |
+ fieldoffset = v->offset; |
+ else if (roffset != 0) |
+ v = first_vi_for_offset (v, fieldoffset); |
+ /* If the access is outside of the variable we can ignore it. */ |
+ if (!v) |
+ continue; |
- v = first_vi_for_offset (get_varinfo (j), fieldoffset); |
- /* If the access is outside of the variable we can ignore it. */ |
- if (!v) |
- continue; |
+ do |
+ { |
t = find (v->id); |
/* Adding edges from the special vars is pointless. |
@@ -1592,15 +1577,22 @@ do_sd_constraint (constraint_graph_t graph, constraint_t c, |
if (get_varinfo (t)->is_special_var) |
flag |= bitmap_ior_into (sol, get_varinfo (t)->solution); |
/* Merging the solution from ESCAPED needlessly increases |
- the set. Use ESCAPED as representative instead. |
- Same for CALLUSED. */ |
- else if (get_varinfo (t)->id == find (escaped_id)) |
+ the set. Use ESCAPED as representative instead. */ |
+ else if (v->id == escaped_id) |
flag |= bitmap_set_bit (sol, escaped_id); |
- else if (get_varinfo (t)->id == find (callused_id)) |
- flag |= bitmap_set_bit (sol, callused_id); |
else if (add_graph_edge (graph, lhs, t)) |
flag |= bitmap_ior_into (sol, get_varinfo (t)->solution); |
+ |
+ /* If the variable is not exactly at the requested offset |
+ we have to include the next one. */ |
+ if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset |
+ || v->next == NULL) |
+ break; |
+ |
+ v = v->next; |
+ fieldoffset = v->offset; |
} |
+ while (1); |
} |
done: |
@@ -1616,7 +1608,8 @@ done: |
} |
} |
-/* Process a constraint C that represents *x = y. */ |
+/* Process a constraint C that represents *(x + off) = y using DELTA |
+ as the starting solution for x. */ |
static void |
do_ds_constraint (constraint_t c, bitmap delta) |
@@ -1625,6 +1618,7 @@ do_ds_constraint (constraint_t c, bitmap delta) |
bitmap sol = get_varinfo (rhs)->solution; |
unsigned int j; |
bitmap_iterator bi; |
+ HOST_WIDE_INT loff = c->lhs.offset; |
/* Our IL does not allow this. */ |
gcc_assert (c->rhs.offset == 0); |
@@ -1654,40 +1648,71 @@ do_ds_constraint (constraint_t c, bitmap delta) |
return; |
} |
+ /* If we do not know at with offset the rhs is dereferenced compute |
+ the reachability set of DELTA, conservatively assuming it is |
+ dereferenced at all valid offsets. */ |
+ if (loff == UNKNOWN_OFFSET) |
+ { |
+ solution_set_expand (delta, delta); |
+ loff = 0; |
+ } |
+ |
/* For each member j of delta (Sol(x)), add an edge from y to j and |
union Sol(y) into Sol(j) */ |
EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi) |
{ |
- unsigned HOST_WIDE_INT loff = c->lhs.offset; |
- if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var)) |
+ varinfo_t v = get_varinfo (j); |
+ unsigned int t; |
+ HOST_WIDE_INT fieldoffset = v->offset + loff; |
+ |
+ /* If v is a global variable then this is an escape point. */ |
+ if (v->is_global_var) |
{ |
- varinfo_t v; |
- unsigned int t; |
- unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff; |
+ t = find (escaped_id); |
+ if (add_graph_edge (graph, t, rhs) |
+ && bitmap_ior_into (get_varinfo (t)->solution, sol) |
+ && !TEST_BIT (changed, t)) |
+ { |
+ SET_BIT (changed, t); |
+ changed_count++; |
+ } |
+ } |
- v = first_vi_for_offset (get_varinfo (j), fieldoffset); |
- /* If the access is outside of the variable we can ignore it. */ |
- if (!v) |
- continue; |
+ if (v->is_special_var) |
+ continue; |
+ |
+ if (v->is_full_var) |
+ fieldoffset = v->offset; |
+ else if (loff != 0) |
+ v = first_vi_for_offset (v, fieldoffset); |
+ /* If the access is outside of the variable we can ignore it. */ |
+ if (!v) |
+ continue; |
+ do |
+ { |
if (v->may_have_pointers) |
{ |
t = find (v->id); |
- if (add_graph_edge (graph, t, rhs)) |
+ if (add_graph_edge (graph, t, rhs) |
+ && bitmap_ior_into (get_varinfo (t)->solution, sol) |
+ && !TEST_BIT (changed, t)) |
{ |
- if (bitmap_ior_into (get_varinfo (t)->solution, sol)) |
- { |
- if (t == rhs) |
- sol = get_varinfo (rhs)->solution; |
- if (!TEST_BIT (changed, t)) |
- { |
- SET_BIT (changed, t); |
- changed_count++; |
- } |
- } |
+ SET_BIT (changed, t); |
+ changed_count++; |
} |
} |
+ |
+ /* If the variable is not exactly at the requested offset |
+ we have to include the next one. */ |
+ if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset |
+ || v->next == NULL) |
+ break; |
+ |
+ v = v->next; |
+ fieldoffset = v->offset; |
} |
+ while (1); |
} |
} |
@@ -1847,7 +1872,8 @@ equiv_class_label_eq (const void *p1, const void *p2) |
{ |
const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1; |
const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2; |
- return bitmap_equal_p (eql1->labels, eql2->labels); |
+ return (eql1->hashcode == eql2->hashcode |
+ && bitmap_equal_p (eql1->labels, eql2->labels)); |
} |
/* Lookup a equivalence class in TABLE by the bitmap of LABELS it |
@@ -2260,10 +2286,10 @@ unite_pointer_equivalences (constraint_graph_t graph) |
if (label) |
{ |
int label_rep = graph->pe_rep[label]; |
- |
+ |
if (label_rep == -1) |
continue; |
- |
+ |
label_rep = find (label_rep); |
if (label_rep >= 0 && unite (label_rep, find (i))) |
unify_nodes (graph, label_rep, i, false); |
@@ -2324,8 +2350,8 @@ rewrite_constraints (constraint_graph_t graph, |
{ |
struct constraint_expr lhs = c->lhs; |
struct constraint_expr rhs = c->rhs; |
- unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id); |
- unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id); |
+ unsigned int lhsvar = find (lhs.var); |
+ unsigned int rhsvar = find (rhs.var); |
unsigned int lhsnode, rhsnode; |
unsigned int lhslabel, rhslabel; |
@@ -2340,7 +2366,7 @@ rewrite_constraints (constraint_graph_t graph, |
{ |
if (dump_file && (dump_flags & TDF_DETAILS)) |
{ |
- |
+ |
fprintf (dump_file, "%s is a non-pointer variable," |
"ignoring constraint:", |
get_varinfo (lhs.var)->name); |
@@ -2354,7 +2380,7 @@ rewrite_constraints (constraint_graph_t graph, |
{ |
if (dump_file && (dump_flags & TDF_DETAILS)) |
{ |
- |
+ |
fprintf (dump_file, "%s is a non-pointer variable," |
"ignoring constraint:", |
get_varinfo (rhs.var)->name); |
@@ -2515,12 +2541,10 @@ solve_graph (constraint_graph_t graph) |
solution_empty = bitmap_empty_p (solution); |
- if (!solution_empty |
- /* Do not propagate the ESCAPED/CALLUSED solutions. */ |
- && i != find (escaped_id) |
- && i != find (callused_id)) |
+ if (!solution_empty) |
{ |
bitmap_iterator bi; |
+ unsigned eff_escaped_id = find (escaped_id); |
/* Propagate solution to all successors. */ |
EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], |
@@ -2537,7 +2561,12 @@ solve_graph (constraint_graph_t graph) |
if (to == i) |
continue; |
- flag = set_union_with_increment (tmp, pts, 0); |
+ /* If we propagate from ESCAPED use ESCAPED as |
+ placeholder. */ |
+ if (i == eff_escaped_id) |
+ flag = bitmap_set_bit (tmp, escaped_id); |
+ else |
+ flag = set_union_with_increment (tmp, pts, 0); |
if (flag) |
{ |
@@ -2636,20 +2665,25 @@ get_vi_for_tree (tree t) |
return (varinfo_t) *slot; |
} |
-/* Get a constraint expression for a new temporary variable. */ |
+/* Get a scalar constraint expression for a new temporary variable. */ |
static struct constraint_expr |
-get_constraint_exp_for_temp (tree t) |
+new_scalar_tmp_constraint_exp (const char *name) |
{ |
- struct constraint_expr cexpr; |
+ struct constraint_expr tmp; |
+ varinfo_t vi; |
- gcc_assert (SSA_VAR_P (t)); |
+ vi = new_var_info (NULL_TREE, name); |
+ vi->offset = 0; |
+ vi->size = -1; |
+ vi->fullsize = -1; |
+ vi->is_full_var = 1; |
- cexpr.type = SCALAR; |
- cexpr.var = get_vi_for_tree (t)->id; |
- cexpr.offset = 0; |
+ tmp.var = vi->id; |
+ tmp.type = SCALAR; |
+ tmp.offset = 0; |
- return cexpr; |
+ return tmp; |
} |
/* Get a constraint expression vector from an SSA_VAR_P node. |
@@ -2689,7 +2723,8 @@ get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p) |
/* If we are not taking the address of the constraint expr, add all |
sub-fiels of the variable as well. */ |
- if (!address_p) |
+ if (!address_p |
+ && !vi->is_full_var) |
{ |
for (; vi; vi = vi->next) |
{ |
@@ -2714,39 +2749,30 @@ process_constraint (constraint_t t) |
gcc_assert (rhs.var < VEC_length (varinfo_t, varmap)); |
gcc_assert (lhs.var < VEC_length (varinfo_t, varmap)); |
- /* ANYTHING == ANYTHING is pointless. */ |
- if (lhs.var == anything_id && rhs.var == anything_id) |
- return; |
+ /* If we didn't get any useful constraint from the lhs we get |
+ &ANYTHING as fallback from get_constraint_for. Deal with |
+ it here by turning it into *ANYTHING. */ |
+ if (lhs.type == ADDRESSOF |
+ && lhs.var == anything_id) |
+ lhs.type = DEREF; |
+ |
+ /* ADDRESSOF on the lhs is invalid. */ |
+ gcc_assert (lhs.type != ADDRESSOF); |
- /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */ |
- else if (lhs.var == anything_id && lhs.type == ADDRESSOF) |
- { |
- rhs = t->lhs; |
- t->lhs = t->rhs; |
- t->rhs = rhs; |
- process_constraint (t); |
- } |
/* This can happen in our IR with things like n->a = *p */ |
- else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id) |
+ if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id) |
{ |
/* Split into tmp = *rhs, *lhs = tmp */ |
- tree rhsdecl = get_varinfo (rhs.var)->decl; |
- tree pointertype = TREE_TYPE (rhsdecl); |
- tree pointedtotype = TREE_TYPE (pointertype); |
- tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp"); |
- struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar); |
- |
+ struct constraint_expr tmplhs; |
+ tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp"); |
process_constraint (new_constraint (tmplhs, rhs)); |
process_constraint (new_constraint (lhs, tmplhs)); |
} |
else if (rhs.type == ADDRESSOF && lhs.type == DEREF) |
{ |
/* Split into tmp = &rhs, *lhs = tmp */ |
- tree rhsdecl = get_varinfo (rhs.var)->decl; |
- tree pointertype = TREE_TYPE (rhsdecl); |
- tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp"); |
- struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar); |
- |
+ struct constraint_expr tmplhs; |
+ tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp"); |
process_constraint (new_constraint (tmplhs, rhs)); |
process_constraint (new_constraint (lhs, tmplhs)); |
} |
@@ -2777,7 +2803,11 @@ type_could_have_pointers (tree type) |
static bool |
could_have_pointers (tree t) |
{ |
- return type_could_have_pointers (TREE_TYPE (t)); |
+ return (((TREE_CODE (t) == VAR_DECL |
+ || TREE_CODE (t) == PARM_DECL |
+ || TREE_CODE (t) == RESULT_DECL) |
+ && (TREE_PUBLIC (t) || DECL_EXTERNAL (t) || TREE_ADDRESSABLE (t))) |
+ || type_could_have_pointers (TREE_TYPE (t))); |
} |
/* Return the position, in bits, of FIELD_DECL from the beginning of its |
@@ -2805,7 +2835,7 @@ get_constraint_for_ptr_offset (tree ptr, tree offset, |
{ |
struct constraint_expr c; |
unsigned int j, n; |
- unsigned HOST_WIDE_INT rhsunitoffset, rhsoffset; |
+ HOST_WIDE_INT rhsunitoffset, rhsoffset; |
/* If we do not do field-sensitive PTA adding offsets to pointers |
does not change the points-to solution. */ |
@@ -2818,30 +2848,17 @@ get_constraint_for_ptr_offset (tree ptr, tree offset, |
/* If the offset is not a non-negative integer constant that fits |
in a HOST_WIDE_INT, we have to fall back to a conservative |
solution which includes all sub-fields of all pointed-to |
- variables of ptr. |
- ??? As we do not have the ability to express this, fall back |
- to anything. */ |
- if (!host_integerp (offset, 1)) |
- { |
- struct constraint_expr temp; |
- temp.var = anything_id; |
- temp.type = SCALAR; |
- temp.offset = 0; |
- VEC_safe_push (ce_s, heap, *results, &temp); |
- return; |
- } |
- |
- /* Make sure the bit-offset also fits. */ |
- rhsunitoffset = TREE_INT_CST_LOW (offset); |
- rhsoffset = rhsunitoffset * BITS_PER_UNIT; |
- if (rhsunitoffset != rhsoffset / BITS_PER_UNIT) |
+ variables of ptr. */ |
+ if (offset == NULL_TREE |
+ || !host_integerp (offset, 0)) |
+ rhsoffset = UNKNOWN_OFFSET; |
+ else |
{ |
- struct constraint_expr temp; |
- temp.var = anything_id; |
- temp.type = SCALAR; |
- temp.offset = 0; |
- VEC_safe_push (ce_s, heap, *results, &temp); |
- return; |
+ /* Make sure the bit-offset also fits. */ |
+ rhsunitoffset = TREE_INT_CST_LOW (offset); |
+ rhsoffset = rhsunitoffset * BITS_PER_UNIT; |
+ if (rhsunitoffset != rhsoffset / BITS_PER_UNIT) |
+ rhsoffset = UNKNOWN_OFFSET; |
} |
get_constraint_for (ptr, results); |
@@ -2858,36 +2875,50 @@ get_constraint_for_ptr_offset (tree ptr, tree offset, |
curr = get_varinfo (c.var); |
if (c.type == ADDRESSOF |
- && !curr->is_full_var) |
+ /* If this varinfo represents a full variable just use it. */ |
+ && curr->is_full_var) |
+ c.offset = 0; |
+ else if (c.type == ADDRESSOF |
+ /* If we do not know the offset add all subfields. */ |
+ && rhsoffset == UNKNOWN_OFFSET) |
+ { |
+ varinfo_t temp = lookup_vi_for_tree (curr->decl); |
+ do |
+ { |
+ struct constraint_expr c2; |
+ c2.var = temp->id; |
+ c2.type = ADDRESSOF; |
+ c2.offset = 0; |
+ if (c2.var != c.var) |
+ VEC_safe_push (ce_s, heap, *results, &c2); |
+ temp = temp->next; |
+ } |
+ while (temp); |
+ } |
+ else if (c.type == ADDRESSOF) |
{ |
- varinfo_t temp, curr = get_varinfo (c.var); |
+ varinfo_t temp; |
+ unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset; |
/* Search the sub-field which overlaps with the |
- pointed-to offset. As we deal with positive offsets |
- only, we can start the search from the current variable. */ |
- temp = first_vi_for_offset (curr, curr->offset + rhsoffset); |
- |
- /* If the result is outside of the variable we have to provide |
- a conservative result, as the variable is still reachable |
- from the resulting pointer (even though it technically |
- cannot point to anything). The last sub-field is such |
- a conservative result. |
+ pointed-to offset. If the result is outside of the variable |
+ we have to provide a conservative result, as the variable is |
+ still reachable from the resulting pointer (even though it |
+ technically cannot point to anything). The last and first |
+ sub-fields are such conservative results. |
??? If we always had a sub-field for &object + 1 then |
we could represent this in a more precise way. */ |
- if (temp == NULL) |
- { |
- temp = curr; |
- while (temp->next != NULL) |
- temp = temp->next; |
- continue; |
- } |
+ if (rhsoffset < 0 |
+ && curr->offset < offset) |
+ offset = 0; |
+ temp = first_or_preceding_vi_for_offset (curr, offset); |
/* If the found variable is not exactly at the pointed to |
result, we have to include the next variable in the |
solution as well. Otherwise two increments by offset / 2 |
do not result in the same or a conservative superset |
solution. */ |
- if (temp->offset != curr->offset + rhsoffset |
+ if (temp->offset != offset |
&& temp->next != NULL) |
{ |
struct constraint_expr c2; |
@@ -2899,10 +2930,6 @@ get_constraint_for_ptr_offset (tree ptr, tree offset, |
c.var = temp->id; |
c.offset = 0; |
} |
- else if (c.type == ADDRESSOF |
- /* If this varinfo represents a full variable just use it. */ |
- && curr->is_full_var) |
- c.offset = 0; |
else |
c.offset = rhsoffset; |
@@ -2950,10 +2977,6 @@ get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results, |
gcc_assert (VEC_length (ce_s, *results) == 1); |
result = VEC_last (ce_s, *results); |
- /* This can also happen due to weird offsetof type macros. */ |
- if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF) |
- result->type = SCALAR; |
- |
if (result->type == SCALAR |
&& get_varinfo (result->var)->is_full_var) |
/* For single-field vars do not bother about the offset. */ |
@@ -3017,15 +3040,28 @@ get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results, |
if (dump_file && (dump_flags & TDF_DETAILS)) |
fprintf (dump_file, "Access to past the end of variable, ignoring\n"); |
} |
- else if (bitmaxsize == -1) |
+ else if (result->type == DEREF) |
{ |
- /* We can't handle DEREF constraints with unknown size, we'll |
- get the wrong answer. Punt and return anything. */ |
+ /* If we do not know exactly where the access goes say so. Note |
+ that only for non-structure accesses we know that we access |
+ at most one subfiled of any variable. */ |
+ if (bitpos == -1 |
+ || bitsize != bitmaxsize |
+ || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))) |
+ result->offset = UNKNOWN_OFFSET; |
+ else |
+ result->offset = bitpos; |
+ } |
+ else if (result->type == ADDRESSOF) |
+ { |
+ /* We can end up here for component references on a |
+ VIEW_CONVERT_EXPR <>(&foobar). */ |
+ result->type = SCALAR; |
result->var = anything_id; |
result->offset = 0; |
} |
else |
- result->offset = bitpos; |
+ gcc_unreachable (); |
} |
@@ -3049,8 +3085,8 @@ do_deref (VEC (ce_s, heap) **constraints) |
c->type = SCALAR; |
else if (c->type == DEREF) |
{ |
- tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp"); |
- struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar); |
+ struct constraint_expr tmplhs; |
+ tmplhs = new_scalar_tmp_constraint_exp ("dereftmp"); |
process_constraint (new_constraint (tmplhs, *c)); |
c->var = tmplhs.var; |
} |
@@ -3059,6 +3095,28 @@ do_deref (VEC (ce_s, heap) **constraints) |
} |
} |
+static void get_constraint_for_1 (tree, VEC (ce_s, heap) **, bool); |
+ |
+/* Given a tree T, return the constraint expression for taking the |
+ address of it. */ |
+ |
+static void |
+get_constraint_for_address_of (tree t, VEC (ce_s, heap) **results) |
+{ |
+ struct constraint_expr *c; |
+ unsigned int i; |
+ |
+ get_constraint_for_1 (t, results, true); |
+ |
+ for (i = 0; VEC_iterate (ce_s, *results, i, c); i++) |
+ { |
+ if (c->type == DEREF) |
+ c->type = SCALAR; |
+ else |
+ c->type = ADDRESSOF; |
+ } |
+} |
+ |
/* Given a tree T, return the constraint expression for it. */ |
static void |
@@ -3080,8 +3138,11 @@ get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p) |
It is not worth adding a new option or renaming the existing one, |
since this case is relatively obscure. */ |
if (flag_delete_null_pointer_checks |
- && TREE_CODE (t) == INTEGER_CST |
- && integer_zerop (t)) |
+ && ((TREE_CODE (t) == INTEGER_CST |
+ && integer_zerop (t)) |
+ /* The only valid CONSTRUCTORs in gimple with pointer typed |
+ elements are zero-initializer. */ |
+ || TREE_CODE (t) == CONSTRUCTOR)) |
{ |
temp.var = nothing_id; |
temp.type = ADDRESSOF; |
@@ -3107,23 +3168,8 @@ get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p) |
switch (TREE_CODE (t)) |
{ |
case ADDR_EXPR: |
- { |
- struct constraint_expr *c; |
- unsigned int i; |
- tree exp = TREE_OPERAND (t, 0); |
- |
- get_constraint_for_1 (exp, results, true); |
- |
- for (i = 0; VEC_iterate (ce_s, *results, i, c); i++) |
- { |
- if (c->type == DEREF) |
- c->type = SCALAR; |
- else |
- c->type = ADDRESSOF; |
- } |
- return; |
- } |
- break; |
+ get_constraint_for_address_of (TREE_OPERAND (t, 0), results); |
+ return; |
default:; |
} |
break; |
@@ -3143,6 +3189,10 @@ get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p) |
case COMPONENT_REF: |
get_constraint_for_component_ref (t, results, address_p); |
return; |
+ case VIEW_CONVERT_EXPR: |
+ get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p); |
+ return; |
+ /* We are missing handling for TARGET_MEM_REF here. */ |
default:; |
} |
break; |
@@ -3185,277 +3235,87 @@ get_constraint_for (tree t, VEC (ce_s, heap) **results) |
get_constraint_for_1 (t, results, false); |
} |
-/* Handle the structure copy case where we have a simple structure copy |
- between LHS and RHS that is of SIZE (in bits) |
- For each field of the lhs variable (lhsfield) |
- For each field of the rhs variable at lhsfield.offset (rhsfield) |
- add the constraint lhsfield = rhsfield |
- |
- If we fail due to some kind of type unsafety or other thing we |
- can't handle, return false. We expect the caller to collapse the |
- variable in that case. */ |
- |
-static bool |
-do_simple_structure_copy (const struct constraint_expr lhs, |
- const struct constraint_expr rhs, |
- const unsigned HOST_WIDE_INT size) |
-{ |
- varinfo_t p = get_varinfo (lhs.var); |
- unsigned HOST_WIDE_INT pstart, last; |
- pstart = p->offset; |
- last = p->offset + size; |
- for (; p && p->offset < last; p = p->next) |
- { |
- varinfo_t q; |
- struct constraint_expr templhs = lhs; |
- struct constraint_expr temprhs = rhs; |
- unsigned HOST_WIDE_INT fieldoffset; |
- |
- templhs.var = p->id; |
- q = get_varinfo (temprhs.var); |
- fieldoffset = p->offset - pstart; |
- q = first_vi_for_offset (q, q->offset + fieldoffset); |
- if (!q) |
- return false; |
- temprhs.var = q->id; |
- process_constraint (new_constraint (templhs, temprhs)); |
- } |
- return true; |
-} |
- |
- |
-/* Handle the structure copy case where we have a structure copy between a |
- aggregate on the LHS and a dereference of a pointer on the RHS |
- that is of SIZE (in bits) |
- |
- For each field of the lhs variable (lhsfield) |
- rhs.offset = lhsfield->offset |
- add the constraint lhsfield = rhs |
-*/ |
+/* Efficiently generates constraints from all entries in *RHSC to all |
+ entries in *LHSC. */ |
static void |
-do_rhs_deref_structure_copy (const struct constraint_expr lhs, |
- const struct constraint_expr rhs, |
- const unsigned HOST_WIDE_INT size) |
+process_all_all_constraints (VEC (ce_s, heap) *lhsc, VEC (ce_s, heap) *rhsc) |
{ |
- varinfo_t p = get_varinfo (lhs.var); |
- unsigned HOST_WIDE_INT pstart,last; |
- pstart = p->offset; |
- last = p->offset + size; |
+ struct constraint_expr *lhsp, *rhsp; |
+ unsigned i, j; |
- for (; p && p->offset < last; p = p->next) |
+ if (VEC_length (ce_s, lhsc) <= 1 |
+ || VEC_length (ce_s, rhsc) <= 1) |
{ |
- varinfo_t q; |
- struct constraint_expr templhs = lhs; |
- struct constraint_expr temprhs = rhs; |
- unsigned HOST_WIDE_INT fieldoffset; |
- |
- |
- if (templhs.type == SCALAR) |
- templhs.var = p->id; |
- else |
- templhs.offset = p->offset; |
- |
- q = get_varinfo (temprhs.var); |
- fieldoffset = p->offset - pstart; |
- temprhs.offset += fieldoffset; |
- process_constraint (new_constraint (templhs, temprhs)); |
+ for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i) |
+ for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); ++j) |
+ process_constraint (new_constraint (*lhsp, *rhsp)); |
} |
-} |
- |
-/* Handle the structure copy case where we have a structure copy |
- between an aggregate on the RHS and a dereference of a pointer on |
- the LHS that is of SIZE (in bits) |
- |
- For each field of the rhs variable (rhsfield) |
- lhs.offset = rhsfield->offset |
- add the constraint lhs = rhsfield |
-*/ |
- |
-static void |
-do_lhs_deref_structure_copy (const struct constraint_expr lhs, |
- const struct constraint_expr rhs, |
- const unsigned HOST_WIDE_INT size) |
-{ |
- varinfo_t p = get_varinfo (rhs.var); |
- unsigned HOST_WIDE_INT pstart,last; |
- pstart = p->offset; |
- last = p->offset + size; |
- |
- for (; p && p->offset < last; p = p->next) |
+ else |
{ |
- varinfo_t q; |
- struct constraint_expr templhs = lhs; |
- struct constraint_expr temprhs = rhs; |
- unsigned HOST_WIDE_INT fieldoffset; |
- |
- |
- if (temprhs.type == SCALAR) |
- temprhs.var = p->id; |
- else |
- temprhs.offset = p->offset; |
- |
- q = get_varinfo (templhs.var); |
- fieldoffset = p->offset - pstart; |
- templhs.offset += fieldoffset; |
- process_constraint (new_constraint (templhs, temprhs)); |
+ struct constraint_expr tmp; |
+ tmp = new_scalar_tmp_constraint_exp ("allalltmp"); |
+ for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i) |
+ process_constraint (new_constraint (tmp, *rhsp)); |
+ for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i) |
+ process_constraint (new_constraint (*lhsp, tmp)); |
} |
} |
-/* Sometimes, frontends like to give us bad type information. This |
- function will collapse all the fields from VAR to the end of VAR, |
- into VAR, so that we treat those fields as a single variable. |
- We return the variable they were collapsed into. */ |
- |
-static unsigned int |
-collapse_rest_of_var (unsigned int var) |
-{ |
- varinfo_t currvar = get_varinfo (var); |
- varinfo_t field; |
- |
- for (field = currvar->next; field; field = field->next) |
- { |
- if (dump_file) |
- fprintf (dump_file, "Type safety: Collapsing var %s into %s\n", |
- field->name, currvar->name); |
- |
- gcc_assert (field->collapsed_to == 0); |
- field->collapsed_to = currvar->id; |
- } |
- |
- currvar->next = NULL; |
- currvar->size = currvar->fullsize - currvar->offset; |
- |
- return currvar->id; |
-} |
- |
/* Handle aggregate copies by expanding into copies of the respective |
fields of the structures. */ |
static void |
do_structure_copy (tree lhsop, tree rhsop) |
{ |
- struct constraint_expr lhs, rhs, tmp; |
+ struct constraint_expr *lhsp, *rhsp; |
VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL; |
- varinfo_t p; |
- unsigned HOST_WIDE_INT lhssize; |
- unsigned HOST_WIDE_INT rhssize; |
- |
- /* Pretend we are taking the address of the constraint exprs. |
- We deal with walking the sub-fields ourselves. */ |
- get_constraint_for_1 (lhsop, &lhsc, true); |
- get_constraint_for_1 (rhsop, &rhsc, true); |
- gcc_assert (VEC_length (ce_s, lhsc) == 1); |
- gcc_assert (VEC_length (ce_s, rhsc) == 1); |
- lhs = *(VEC_last (ce_s, lhsc)); |
- rhs = *(VEC_last (ce_s, rhsc)); |
- |
- VEC_free (ce_s, heap, lhsc); |
- VEC_free (ce_s, heap, rhsc); |
- |
- /* If we have special var = x, swap it around. */ |
- if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var)) |
- { |
- tmp = lhs; |
- lhs = rhs; |
- rhs = tmp; |
- } |
- |
- /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's |
- possible it's something we could handle. However, most cases falling |
- into this are dealing with transparent unions, which are slightly |
- weird. */ |
- if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var)) |
- { |
- rhs.type = ADDRESSOF; |
- rhs.var = anything_id; |
- } |
- |
- /* If the RHS is a special var, or an addressof, set all the LHS fields to |
- that special var. */ |
- if (rhs.var <= integer_id) |
+ unsigned j; |
+ |
+ get_constraint_for (lhsop, &lhsc); |
+ get_constraint_for (rhsop, &rhsc); |
+ lhsp = VEC_index (ce_s, lhsc, 0); |
+ rhsp = VEC_index (ce_s, rhsc, 0); |
+ if (lhsp->type == DEREF |
+ || (lhsp->type == ADDRESSOF && lhsp->var == anything_id) |
+ || rhsp->type == DEREF) |
+ process_all_all_constraints (lhsc, rhsc); |
+ else if (lhsp->type == SCALAR |
+ && (rhsp->type == SCALAR |
+ || rhsp->type == ADDRESSOF)) |
{ |
- for (p = get_varinfo (lhs.var); p; p = p->next) |
+ HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset; |
+ HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset; |
+ unsigned k = 0; |
+ get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize); |
+ get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize); |
+ for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp);) |
{ |
- struct constraint_expr templhs = lhs; |
- struct constraint_expr temprhs = rhs; |
- |
- if (templhs.type == SCALAR ) |
- templhs.var = p->id; |
+ varinfo_t lhsv, rhsv; |
+ rhsp = VEC_index (ce_s, rhsc, k); |
+ lhsv = get_varinfo (lhsp->var); |
+ rhsv = get_varinfo (rhsp->var); |
+ if (lhsv->may_have_pointers |
+ && ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size, |
+ rhsv->offset + lhsoffset, rhsv->size)) |
+ process_constraint (new_constraint (*lhsp, *rhsp)); |
+ if (lhsv->offset + rhsoffset + lhsv->size |
+ > rhsv->offset + lhsoffset + rhsv->size) |
+ { |
+ ++k; |
+ if (k >= VEC_length (ce_s, rhsc)) |
+ break; |
+ } |
else |
- templhs.offset += p->offset; |
- process_constraint (new_constraint (templhs, temprhs)); |
+ ++j; |
} |
} |
else |
- { |
- tree rhstype = TREE_TYPE (rhsop); |
- tree lhstype = TREE_TYPE (lhsop); |
- tree rhstypesize; |
- tree lhstypesize; |
- |
- lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype); |
- rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype); |
- |
- /* If we have a variably sized types on the rhs or lhs, and a deref |
- constraint, add the constraint, lhsconstraint = &ANYTHING. |
- This is conservatively correct because either the lhs is an unknown |
- sized var (if the constraint is SCALAR), or the lhs is a DEREF |
- constraint, and every variable it can point to must be unknown sized |
- anyway, so we don't need to worry about fields at all. */ |
- if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST) |
- || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST)) |
- { |
- rhs.var = anything_id; |
- rhs.type = ADDRESSOF; |
- rhs.offset = 0; |
- process_constraint (new_constraint (lhs, rhs)); |
- return; |
- } |
- |
- /* The size only really matters insofar as we don't set more or less of |
- the variable. If we hit an unknown size var, the size should be the |
- whole darn thing. */ |
- if (get_varinfo (rhs.var)->is_unknown_size_var) |
- rhssize = ~0; |
- else |
- rhssize = TREE_INT_CST_LOW (rhstypesize); |
- |
- if (get_varinfo (lhs.var)->is_unknown_size_var) |
- lhssize = ~0; |
- else |
- lhssize = TREE_INT_CST_LOW (lhstypesize); |
- |
- |
- if (rhs.type == SCALAR && lhs.type == SCALAR) |
- { |
- if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize))) |
- { |
- lhs.var = collapse_rest_of_var (get_varinfo_fc (lhs.var)->id); |
- rhs.var = collapse_rest_of_var (get_varinfo_fc (rhs.var)->id); |
- lhs.offset = 0; |
- rhs.offset = 0; |
- lhs.type = SCALAR; |
- rhs.type = SCALAR; |
- process_constraint (new_constraint (lhs, rhs)); |
- } |
- } |
- else if (lhs.type != DEREF && rhs.type == DEREF) |
- do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize)); |
- else if (lhs.type == DEREF && rhs.type != DEREF) |
- do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize)); |
- else |
- { |
- tree pointedtotype = lhstype; |
- tree tmpvar; |
+ gcc_unreachable (); |
- gcc_assert (rhs.type == DEREF && lhs.type == DEREF); |
- tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp"); |
- do_structure_copy (tmpvar, rhsop); |
- do_structure_copy (lhsop, tmpvar); |
- } |
- } |
+ VEC_free (ce_s, heap, lhsc); |
+ VEC_free (ce_s, heap, rhsc); |
} |
/* Create a constraint ID = OP. */ |
@@ -3478,6 +3338,40 @@ make_constraint_to (unsigned id, tree op) |
VEC_free (ce_s, heap, rhsc); |
} |
+/* Create a constraint ID = &FROM. */ |
+ |
+static void |
+make_constraint_from (varinfo_t vi, int from) |
+{ |
+ struct constraint_expr lhs, rhs; |
+ |
+ lhs.var = vi->id; |
+ lhs.offset = 0; |
+ lhs.type = SCALAR; |
+ |
+ rhs.var = from; |
+ rhs.offset = 0; |
+ rhs.type = ADDRESSOF; |
+ process_constraint (new_constraint (lhs, rhs)); |
+} |
+ |
+/* Create a constraint ID = FROM. */ |
+ |
+static void |
+make_copy_constraint (varinfo_t vi, int from) |
+{ |
+ struct constraint_expr lhs, rhs; |
+ |
+ lhs.var = vi->id; |
+ lhs.offset = 0; |
+ lhs.type = SCALAR; |
+ |
+ rhs.var = from; |
+ rhs.offset = 0; |
+ rhs.type = SCALAR; |
+ process_constraint (new_constraint (lhs, rhs)); |
+} |
+ |
/* Make constraints necessary to make OP escape. */ |
static void |
@@ -3486,12 +3380,69 @@ make_escape_constraint (tree op) |
make_constraint_to (escaped_id, op); |
} |
+/* Create a new artificial heap variable with NAME and make a |
+ constraint from it to LHS. Return the created variable. */ |
+ |
+static varinfo_t |
+make_constraint_from_heapvar (varinfo_t lhs, const char *name) |
+{ |
+ varinfo_t vi; |
+ tree heapvar = heapvar_lookup (lhs->decl, lhs->offset); |
+ |
+ if (heapvar == NULL_TREE) |
+ { |
+ var_ann_t ann; |
+ heapvar = create_tmp_var_raw (ptr_type_node, name); |
+ DECL_EXTERNAL (heapvar) = 1; |
+ |
+ heapvar_insert (lhs->decl, lhs->offset, heapvar); |
+ |
+ ann = get_var_ann (heapvar); |
+ ann->is_heapvar = 1; |
+ } |
+ |
+ /* For global vars we need to add a heapvar to the list of referenced |
+ vars of a different function than it was created for originally. */ |
+ if (gimple_referenced_vars (cfun)) |
+ add_referenced_var (heapvar); |
+ |
+ vi = new_var_info (heapvar, name); |
+ vi->is_artificial_var = true; |
+ vi->is_heap_var = true; |
+ vi->is_unknown_size_var = true; |
+ vi->offset = 0; |
+ vi->fullsize = ~0; |
+ vi->size = ~0; |
+ vi->is_full_var = true; |
+ insert_vi_for_tree (heapvar, vi); |
+ |
+ make_constraint_from (lhs, vi->id); |
+ |
+ return vi; |
+} |
+ |
+/* Create a new artificial heap variable with NAME and make a |
+ constraint from it to LHS. Set flags according to a tag used |
+ for tracking restrict pointers. */ |
+ |
+static void |
+make_constraint_from_restrict (varinfo_t lhs, const char *name) |
+{ |
+ varinfo_t vi; |
+ vi = make_constraint_from_heapvar (lhs, name); |
+ vi->is_restrict_var = 1; |
+ vi->is_global_var = 0; |
+ vi->is_special_var = 1; |
+ vi->may_have_pointers = 0; |
+} |
+ |
/* For non-IPA mode, generate constraints necessary for a call on the |
RHS. */ |
static void |
-handle_rhs_call (gimple stmt) |
+handle_rhs_call (gimple stmt, VEC(ce_s, heap) **results) |
{ |
+ struct constraint_expr rhsc; |
unsigned i; |
for (i = 0; i < gimple_call_num_args (stmt); ++i) |
@@ -3507,6 +3458,28 @@ handle_rhs_call (gimple stmt) |
/* The static chain escapes as well. */ |
if (gimple_call_chain (stmt)) |
make_escape_constraint (gimple_call_chain (stmt)); |
+ |
+ /* And if we applied NRV the address of the return slot escapes as well. */ |
+ if (gimple_call_return_slot_opt_p (stmt) |
+ && gimple_call_lhs (stmt) != NULL_TREE |
+ && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt)))) |
+ { |
+ VEC(ce_s, heap) *tmpc = NULL; |
+ struct constraint_expr lhsc, *c; |
+ get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc); |
+ lhsc.var = escaped_id; |
+ lhsc.offset = 0; |
+ lhsc.type = SCALAR; |
+ for (i = 0; VEC_iterate (ce_s, tmpc, i, c); ++i) |
+ process_constraint (new_constraint (lhsc, *c)); |
+ VEC_free(ce_s, heap, tmpc); |
+ } |
+ |
+ /* Regular functions return nonlocal memory. */ |
+ rhsc.var = nonlocal_id; |
+ rhsc.offset = 0; |
+ rhsc.type = SCALAR; |
+ VEC_safe_push (ce_s, heap, *results, &rhsc); |
} |
/* For non-IPA mode, generate constraints necessary for a call |
@@ -3514,49 +3487,44 @@ handle_rhs_call (gimple stmt) |
the LHS point to global and escaped variables. */ |
static void |
-handle_lhs_call (tree lhs, int flags) |
+handle_lhs_call (tree lhs, int flags, VEC(ce_s, heap) *rhsc, tree fndecl) |
{ |
VEC(ce_s, heap) *lhsc = NULL; |
- struct constraint_expr rhsc; |
- unsigned int j; |
- struct constraint_expr *lhsp; |
get_constraint_for (lhs, &lhsc); |
if (flags & ECF_MALLOC) |
{ |
- tree heapvar = heapvar_lookup (lhs); |
varinfo_t vi; |
- |
- if (heapvar == NULL) |
- { |
- heapvar = create_tmp_var_raw (ptr_type_node, "HEAP"); |
- DECL_EXTERNAL (heapvar) = 1; |
- get_var_ann (heapvar)->is_heapvar = 1; |
- if (gimple_referenced_vars (cfun)) |
- add_referenced_var (heapvar); |
- heapvar_insert (lhs, heapvar); |
- } |
- |
- rhsc.var = create_variable_info_for (heapvar, |
- alias_get_name (heapvar)); |
- vi = get_varinfo (rhsc.var); |
- vi->is_artificial_var = 1; |
- vi->is_heap_var = 1; |
- vi->is_unknown_size_var = true; |
- vi->fullsize = ~0; |
- vi->size = ~0; |
- rhsc.type = ADDRESSOF; |
- rhsc.offset = 0; |
+ vi = make_constraint_from_heapvar (get_vi_for_tree (lhs), "HEAP"); |
+ /* We delay marking allocated storage global until we know if |
+ it escapes. */ |
+ DECL_EXTERNAL (vi->decl) = 0; |
+ vi->is_global_var = 0; |
+ /* If this is not a real malloc call assume the memory was |
+ initialized and thus may point to global memory. All |
+ builtin functions with the malloc attribute behave in a sane way. */ |
+ if (!fndecl |
+ || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL) |
+ make_constraint_from (vi, nonlocal_id); |
} |
- else |
+ else if (VEC_length (ce_s, rhsc) > 0) |
{ |
- rhsc.var = escaped_id; |
- rhsc.offset = 0; |
- rhsc.type = ADDRESSOF; |
+ /* If the store is to a global decl make sure to |
+ add proper escape constraints. */ |
+ lhs = get_base_address (lhs); |
+ if (lhs |
+ && DECL_P (lhs) |
+ && is_global_var (lhs)) |
+ { |
+ struct constraint_expr tmpc; |
+ tmpc.var = escaped_id; |
+ tmpc.offset = 0; |
+ tmpc.type = SCALAR; |
+ VEC_safe_push (ce_s, heap, lhsc, &tmpc); |
+ } |
+ process_all_all_constraints (lhsc, rhsc); |
} |
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++) |
- process_constraint (new_constraint (*lhsp, rhsc)); |
VEC_free (ce_s, heap, lhsc); |
} |
@@ -3564,43 +3532,22 @@ handle_lhs_call (tree lhs, int flags) |
const function that returns a pointer in the statement STMT. */ |
static void |
-handle_const_call (gimple stmt) |
+handle_const_call (gimple stmt, VEC(ce_s, heap) **results) |
{ |
- tree lhs = gimple_call_lhs (stmt); |
- VEC(ce_s, heap) *lhsc = NULL; |
struct constraint_expr rhsc; |
- unsigned int j, k; |
- struct constraint_expr *lhsp; |
- tree tmpvar; |
- struct constraint_expr tmpc; |
- |
- get_constraint_for (lhs, &lhsc); |
+ unsigned int k; |
- /* If this is a nested function then it can return anything. */ |
+ /* Treat nested const functions the same as pure functions as far |
+ as the static chain is concerned. */ |
if (gimple_call_chain (stmt)) |
{ |
- rhsc.var = anything_id; |
+ make_constraint_to (callused_id, gimple_call_chain (stmt)); |
+ rhsc.var = callused_id; |
rhsc.offset = 0; |
- rhsc.type = ADDRESSOF; |
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++) |
- process_constraint (new_constraint (*lhsp, rhsc)); |
- VEC_free (ce_s, heap, lhsc); |
- return; |
+ rhsc.type = SCALAR; |
+ VEC_safe_push (ce_s, heap, *results, &rhsc); |
} |
- /* We always use a temporary here, otherwise we end up with a quadratic |
- amount of constraints for |
- large_struct = const_call (large_struct); |
- in field-sensitive PTA. */ |
- tmpvar = create_tmp_var_raw (ptr_type_node, "consttmp"); |
- tmpc = get_constraint_exp_for_temp (tmpvar); |
- |
- /* May return addresses of globals. */ |
- rhsc.var = nonlocal_id; |
- rhsc.offset = 0; |
- rhsc.type = ADDRESSOF; |
- process_constraint (new_constraint (tmpc, rhsc)); |
- |
/* May return arguments. */ |
for (k = 0; k < gimple_call_num_args (stmt); ++k) |
{ |
@@ -3609,29 +3556,31 @@ handle_const_call (gimple stmt) |
if (could_have_pointers (arg)) |
{ |
VEC(ce_s, heap) *argc = NULL; |
+ unsigned i; |
struct constraint_expr *argp; |
- int i; |
- |
get_constraint_for (arg, &argc); |
- for (i = 0; VEC_iterate (ce_s, argc, i, argp); i++) |
- process_constraint (new_constraint (tmpc, *argp)); |
- VEC_free (ce_s, heap, argc); |
+ for (i = 0; VEC_iterate (ce_s, argc, i, argp); ++i) |
+ VEC_safe_push (ce_s, heap, *results, argp); |
+ VEC_free(ce_s, heap, argc); |
} |
} |
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++) |
- process_constraint (new_constraint (*lhsp, tmpc)); |
- |
- VEC_free (ce_s, heap, lhsc); |
+ /* May return addresses of globals. */ |
+ rhsc.var = nonlocal_id; |
+ rhsc.offset = 0; |
+ rhsc.type = ADDRESSOF; |
+ VEC_safe_push (ce_s, heap, *results, &rhsc); |
} |
/* For non-IPA mode, generate constraints necessary for a call to a |
pure function in statement STMT. */ |
static void |
-handle_pure_call (gimple stmt) |
+handle_pure_call (gimple stmt, VEC(ce_s, heap) **results) |
{ |
+ struct constraint_expr rhsc; |
unsigned i; |
+ bool need_callused = false; |
/* Memory reached from pointer arguments is call-used. */ |
for (i = 0; i < gimple_call_num_args (stmt); ++i) |
@@ -3639,48 +3588,31 @@ handle_pure_call (gimple stmt) |
tree arg = gimple_call_arg (stmt, i); |
if (could_have_pointers (arg)) |
- make_constraint_to (callused_id, arg); |
+ { |
+ make_constraint_to (callused_id, arg); |
+ need_callused = true; |
+ } |
} |
/* The static chain is used as well. */ |
if (gimple_call_chain (stmt)) |
- make_constraint_to (callused_id, gimple_call_chain (stmt)); |
- |
- /* If the call returns a pointer it may point to reachable memory |
- from the arguments. Not so for malloc functions though. */ |
- if (gimple_call_lhs (stmt) |
- && could_have_pointers (gimple_call_lhs (stmt)) |
- && !(gimple_call_flags (stmt) & ECF_MALLOC)) |
{ |
- tree lhs = gimple_call_lhs (stmt); |
- VEC(ce_s, heap) *lhsc = NULL; |
- struct constraint_expr rhsc; |
- struct constraint_expr *lhsp; |
- unsigned j; |
- |
- get_constraint_for (lhs, &lhsc); |
- |
- /* If this is a nested function then it can return anything. */ |
- if (gimple_call_chain (stmt)) |
- { |
- rhsc.var = anything_id; |
- rhsc.offset = 0; |
- rhsc.type = ADDRESSOF; |
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++) |
- process_constraint (new_constraint (*lhsp, rhsc)); |
- VEC_free (ce_s, heap, lhsc); |
- return; |
- } |
+ make_constraint_to (callused_id, gimple_call_chain (stmt)); |
+ need_callused = true; |
+ } |
- /* Else just add the call-used memory here. Escaped variables |
- and globals will be dealt with in handle_lhs_call. */ |
+ /* Pure functions may return callused and nonlocal memory. */ |
+ if (need_callused) |
+ { |
rhsc.var = callused_id; |
rhsc.offset = 0; |
- rhsc.type = ADDRESSOF; |
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++) |
- process_constraint (new_constraint (*lhsp, rhsc)); |
- VEC_free (ce_s, heap, lhsc); |
+ rhsc.type = SCALAR; |
+ VEC_safe_push (ce_s, heap, *results, &rhsc); |
} |
+ rhsc.var = nonlocal_id; |
+ rhsc.offset = 0; |
+ rhsc.type = SCALAR; |
+ VEC_safe_push (ce_s, heap, *results, &rhsc); |
} |
/* Walk statement T setting up aliasing constraints according to the |
@@ -3695,7 +3627,6 @@ find_func_aliases (gimple origt) |
VEC(ce_s, heap) *lhsc = NULL; |
VEC(ce_s, heap) *rhsc = NULL; |
struct constraint_expr *c; |
- enum escape_type stmt_escape_type; |
/* Now build constraints expressions. */ |
if (gimple_code (t) == GIMPLE_PHI) |
@@ -3714,11 +3645,9 @@ find_func_aliases (gimple origt) |
get_constraint_for (gimple_phi_result (t), &lhsc); |
for (i = 0; i < gimple_phi_num_args (t); i++) |
{ |
- tree rhstype; |
tree strippedrhs = PHI_ARG_DEF (t, i); |
STRIP_NOPS (strippedrhs); |
- rhstype = TREE_TYPE (strippedrhs); |
get_constraint_for (gimple_phi_arg_def (t, i), &rhsc); |
for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++) |
@@ -3743,35 +3672,145 @@ find_func_aliases (gimple origt) |
pointer passed by address. */ |
else if (is_gimple_call (t)) |
{ |
- if (!in_ipa_mode) |
+ tree fndecl = gimple_call_fndecl (t); |
+ if (fndecl != NULL_TREE |
+ && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) |
+ /* ??? All builtins that are handled here need to be handled |
+ in the alias-oracle query functions explicitly! */ |
+ switch (DECL_FUNCTION_CODE (fndecl)) |
+ { |
+ /* All the following functions return a pointer to the same object |
+ as their first argument points to. The functions do not add |
+ to the ESCAPED solution. The functions make the first argument |
+ pointed to memory point to what the second argument pointed to |
+ memory points to. */ |
+ case BUILT_IN_STRCPY: |
+ case BUILT_IN_STRNCPY: |
+ case BUILT_IN_BCOPY: |
+ case BUILT_IN_MEMCPY: |
+ case BUILT_IN_MEMMOVE: |
+ case BUILT_IN_MEMPCPY: |
+ case BUILT_IN_STPCPY: |
+ case BUILT_IN_STPNCPY: |
+ case BUILT_IN_STRCAT: |
+ case BUILT_IN_STRNCAT: |
+ { |
+ tree res = gimple_call_lhs (t); |
+ tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl) |
+ == BUILT_IN_BCOPY ? 1 : 0)); |
+ tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl) |
+ == BUILT_IN_BCOPY ? 0 : 1)); |
+ if (res != NULL_TREE) |
+ { |
+ get_constraint_for (res, &lhsc); |
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY |
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY |
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY) |
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc); |
+ else |
+ get_constraint_for (dest, &rhsc); |
+ process_all_all_constraints (lhsc, rhsc); |
+ VEC_free (ce_s, heap, lhsc); |
+ VEC_free (ce_s, heap, rhsc); |
+ } |
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc); |
+ get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc); |
+ do_deref (&lhsc); |
+ do_deref (&rhsc); |
+ process_all_all_constraints (lhsc, rhsc); |
+ VEC_free (ce_s, heap, lhsc); |
+ VEC_free (ce_s, heap, rhsc); |
+ return; |
+ } |
+ case BUILT_IN_MEMSET: |
+ { |
+ tree res = gimple_call_lhs (t); |
+ tree dest = gimple_call_arg (t, 0); |
+ unsigned i; |
+ ce_s *lhsp; |
+ struct constraint_expr ac; |
+ if (res != NULL_TREE) |
+ { |
+ get_constraint_for (res, &lhsc); |
+ get_constraint_for (dest, &rhsc); |
+ process_all_all_constraints (lhsc, rhsc); |
+ VEC_free (ce_s, heap, lhsc); |
+ VEC_free (ce_s, heap, rhsc); |
+ } |
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc); |
+ do_deref (&lhsc); |
+ if (flag_delete_null_pointer_checks |
+ && integer_zerop (gimple_call_arg (t, 1))) |
+ { |
+ ac.type = ADDRESSOF; |
+ ac.var = nothing_id; |
+ } |
+ else |
+ { |
+ ac.type = SCALAR; |
+ ac.var = integer_id; |
+ } |
+ ac.offset = 0; |
+ for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i) |
+ process_constraint (new_constraint (*lhsp, ac)); |
+ VEC_free (ce_s, heap, lhsc); |
+ return; |
+ } |
+ /* All the following functions do not return pointers, do not |
+ modify the points-to sets of memory reachable from their |
+ arguments and do not add to the ESCAPED solution. */ |
+ case BUILT_IN_SINCOS: |
+ case BUILT_IN_SINCOSF: |
+ case BUILT_IN_SINCOSL: |
+ case BUILT_IN_FREXP: |
+ case BUILT_IN_FREXPF: |
+ case BUILT_IN_FREXPL: |
+ case BUILT_IN_GAMMA_R: |
+ case BUILT_IN_GAMMAF_R: |
+ case BUILT_IN_GAMMAL_R: |
+ case BUILT_IN_LGAMMA_R: |
+ case BUILT_IN_LGAMMAF_R: |
+ case BUILT_IN_LGAMMAL_R: |
+ case BUILT_IN_MODF: |
+ case BUILT_IN_MODFF: |
+ case BUILT_IN_MODFL: |
+ case BUILT_IN_REMQUO: |
+ case BUILT_IN_REMQUOF: |
+ case BUILT_IN_REMQUOL: |
+ case BUILT_IN_FREE: |
+ return; |
+ /* printf-style functions may have hooks to set pointers to |
+ point to somewhere into the generated string. Leave them |
+ for a later excercise... */ |
+ default: |
+ /* Fallthru to general call handling. */; |
+ } |
+ if (!in_ipa_mode |
+ || (fndecl |
+ && !lookup_vi_for_tree (fndecl))) |
{ |
+ VEC(ce_s, heap) *rhsc = NULL; |
int flags = gimple_call_flags (t); |
/* Const functions can return their arguments and addresses |
of global memory but not of escaped memory. */ |
- if (flags & ECF_CONST) |
+ if (flags & (ECF_CONST|ECF_NOVOPS)) |
{ |
if (gimple_call_lhs (t) |
&& could_have_pointers (gimple_call_lhs (t))) |
- handle_const_call (t); |
+ handle_const_call (t, &rhsc); |
} |
/* Pure functions can return addresses in and of memory |
reachable from their arguments, but they are not an escape |
point for reachable memory of their arguments. */ |
- else if (flags & ECF_PURE) |
- { |
- handle_pure_call (t); |
- if (gimple_call_lhs (t) |
- && could_have_pointers (gimple_call_lhs (t))) |
- handle_lhs_call (gimple_call_lhs (t), flags); |
- } |
+ else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE)) |
+ handle_pure_call (t, &rhsc); |
else |
- { |
- handle_rhs_call (t); |
- if (gimple_call_lhs (t) |
- && could_have_pointers (gimple_call_lhs (t))) |
- handle_lhs_call (gimple_call_lhs (t), flags); |
- } |
+ handle_rhs_call (t, &rhsc); |
+ if (gimple_call_lhs (t) |
+ && could_have_pointers (gimple_call_lhs (t))) |
+ handle_lhs_call (gimple_call_lhs (t), flags, rhsc, fndecl); |
+ VEC_free (ce_s, heap, rhsc); |
} |
else |
{ |
@@ -3854,7 +3893,7 @@ find_func_aliases (gimple origt) |
operations with pointer result, others are dealt with as escape |
points if they have pointer operands. */ |
else if (is_gimple_assign (t) |
- && could_have_pointers (gimple_assign_lhs (t))) |
+ && type_could_have_pointers (TREE_TYPE (gimple_assign_lhs (t)))) |
{ |
/* Otherwise, just a regular assignment statement. */ |
tree lhsop = gimple_assign_lhs (t); |
@@ -3864,7 +3903,6 @@ find_func_aliases (gimple origt) |
do_structure_copy (lhsop, rhsop); |
else |
{ |
- unsigned int j; |
struct constraint_expr temp; |
get_constraint_for (lhsop, &lhsc); |
@@ -3883,108 +3921,162 @@ find_func_aliases (gimple origt) |
temp.offset = 0; |
VEC_safe_push (ce_s, heap, rhsc, &temp); |
} |
- for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++) |
- { |
- struct constraint_expr *c2; |
- unsigned int k; |
- |
- for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++) |
- process_constraint (new_constraint (*c, *c2)); |
- } |
+ process_all_all_constraints (lhsc, rhsc); |
} |
+ /* If there is a store to a global variable the rhs escapes. */ |
+ if ((lhsop = get_base_address (lhsop)) != NULL_TREE |
+ && DECL_P (lhsop) |
+ && is_global_var (lhsop)) |
+ make_escape_constraint (rhsop); |
+ /* If this is a conversion of a non-restrict pointer to a |
+ restrict pointer track it with a new heapvar. */ |
+ else if (gimple_assign_cast_p (t) |
+ && POINTER_TYPE_P (TREE_TYPE (rhsop)) |
+ && POINTER_TYPE_P (TREE_TYPE (lhsop)) |
+ && !TYPE_RESTRICT (TREE_TYPE (rhsop)) |
+ && TYPE_RESTRICT (TREE_TYPE (lhsop))) |
+ make_constraint_from_restrict (get_vi_for_tree (lhsop), |
+ "CAST_RESTRICT"); |
} |
- else if (gimple_code (t) == GIMPLE_CHANGE_DYNAMIC_TYPE) |
+ /* For conversions of pointers to non-pointers the pointer escapes. */ |
+ else if (gimple_assign_cast_p (t) |
+ && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (t))) |
+ && !POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (t)))) |
{ |
- unsigned int j; |
- |
- get_constraint_for (gimple_cdt_location (t), &lhsc); |
- for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j) |
- get_varinfo (c->var)->no_tbaa_pruning = true; |
- } |
- |
- stmt_escape_type = is_escape_site (t); |
- if (stmt_escape_type == ESCAPE_STORED_IN_GLOBAL) |
- { |
- gcc_assert (is_gimple_assign (t)); |
- if (gimple_assign_rhs_code (t) == ADDR_EXPR) |
- { |
- tree rhs = gimple_assign_rhs1 (t); |
- tree base = get_base_address (TREE_OPERAND (rhs, 0)); |
- if (base |
- && (!DECL_P (base) |
- || !is_global_var (base))) |
- make_escape_constraint (rhs); |
- } |
- else if (get_gimple_rhs_class (gimple_assign_rhs_code (t)) |
- == GIMPLE_SINGLE_RHS) |
- { |
- if (could_have_pointers (gimple_assign_rhs1 (t))) |
- make_escape_constraint (gimple_assign_rhs1 (t)); |
- } |
- else |
- gcc_unreachable (); |
+ make_escape_constraint (gimple_assign_rhs1 (t)); |
} |
- else if (stmt_escape_type == ESCAPE_BAD_CAST) |
+ /* Handle escapes through return. */ |
+ else if (gimple_code (t) == GIMPLE_RETURN |
+ && gimple_return_retval (t) != NULL_TREE |
+ && could_have_pointers (gimple_return_retval (t))) |
{ |
- gcc_assert (is_gimple_assign (t)); |
- gcc_assert (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t)) |
- || gimple_assign_rhs_code (t) == VIEW_CONVERT_EXPR); |
- make_escape_constraint (gimple_assign_rhs1 (t)); |
+ make_escape_constraint (gimple_return_retval (t)); |
} |
- else if (stmt_escape_type == ESCAPE_TO_ASM) |
+ /* Handle asms conservatively by adding escape constraints to everything. */ |
+ else if (gimple_code (t) == GIMPLE_ASM) |
{ |
- unsigned i; |
- for (i = 0; i < gimple_asm_noutputs (t); ++i) |
+ unsigned i, noutputs; |
+ const char **oconstraints; |
+ const char *constraint; |
+ bool allows_mem, allows_reg, is_inout; |
+ |
+ noutputs = gimple_asm_noutputs (t); |
+ oconstraints = XALLOCAVEC (const char *, noutputs); |
+ |
+ for (i = 0; i < noutputs; ++i) |
{ |
- tree op = TREE_VALUE (gimple_asm_output_op (t, i)); |
+ tree link = gimple_asm_output_op (t, i); |
+ tree op = TREE_VALUE (link); |
+ |
+ constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
+ oconstraints[i] = constraint; |
+ parse_output_constraint (&constraint, i, 0, 0, &allows_mem, |
+ &allows_reg, &is_inout); |
+ |
+ /* A memory constraint makes the address of the operand escape. */ |
+ if (!allows_reg && allows_mem) |
+ make_escape_constraint (build_fold_addr_expr (op)); |
+ |
+ /* The asm may read global memory, so outputs may point to |
+ any global memory. */ |
if (op && could_have_pointers (op)) |
- /* Strictly we'd only need the constraints from ESCAPED and |
- NONLOCAL. */ |
- make_escape_constraint (op); |
+ { |
+ VEC(ce_s, heap) *lhsc = NULL; |
+ struct constraint_expr rhsc, *lhsp; |
+ unsigned j; |
+ get_constraint_for (op, &lhsc); |
+ rhsc.var = nonlocal_id; |
+ rhsc.offset = 0; |
+ rhsc.type = SCALAR; |
+ for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++) |
+ process_constraint (new_constraint (*lhsp, rhsc)); |
+ VEC_free (ce_s, heap, lhsc); |
+ } |
} |
for (i = 0; i < gimple_asm_ninputs (t); ++i) |
{ |
- tree op = TREE_VALUE (gimple_asm_input_op (t, i)); |
- if (op && could_have_pointers (op)) |
- /* Strictly we'd only need the constraint to ESCAPED. */ |
+ tree link = gimple_asm_input_op (t, i); |
+ tree op = TREE_VALUE (link); |
+ |
+ constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
+ |
+ parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, |
+ &allows_mem, &allows_reg); |
+ |
+ /* A memory constraint makes the address of the operand escape. */ |
+ if (!allows_reg && allows_mem) |
+ make_escape_constraint (build_fold_addr_expr (op)); |
+ /* Strictly we'd only need the constraint to ESCAPED if |
+ the asm clobbers memory, otherwise using CALLUSED |
+ would be enough. */ |
+ else if (op && could_have_pointers (op)) |
make_escape_constraint (op); |
} |
} |
- /* After promoting variables and computing aliasing we will |
- need to re-scan most statements. FIXME: Try to minimize the |
- number of statements re-scanned. It's not really necessary to |
- re-scan *all* statements. */ |
- if (!in_ipa_mode) |
- gimple_set_modified (origt, true); |
VEC_free (ce_s, heap, rhsc); |
VEC_free (ce_s, heap, lhsc); |
} |
/* Find the first varinfo in the same variable as START that overlaps with |
- OFFSET. |
- Effectively, walk the chain of fields for the variable START to find the |
- first field that overlaps with OFFSET. |
- Return NULL if we can't find one. */ |
+ OFFSET. Return NULL if we can't find one. */ |
static varinfo_t |
first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset) |
{ |
- varinfo_t curr = start; |
- while (curr) |
+ /* If the offset is outside of the variable, bail out. */ |
+ if (offset >= start->fullsize) |
+ return NULL; |
+ |
+ /* If we cannot reach offset from start, lookup the first field |
+ and start from there. */ |
+ if (start->offset > offset) |
+ start = lookup_vi_for_tree (start->decl); |
+ |
+ while (start) |
{ |
/* We may not find a variable in the field list with the actual |
offset when when we have glommed a structure to a variable. |
In that case, however, offset should still be within the size |
of the variable. */ |
- if (offset >= curr->offset && offset < (curr->offset + curr->size)) |
- return curr; |
- curr = curr->next; |
+ if (offset >= start->offset |
+ && (offset - start->offset) < start->size) |
+ return start; |
+ |
+ start= start->next; |
} |
+ |
return NULL; |
} |
+/* Find the first varinfo in the same variable as START that overlaps with |
+ OFFSET. If there is no such varinfo the varinfo directly preceding |
+ OFFSET is returned. */ |
+ |
+static varinfo_t |
+first_or_preceding_vi_for_offset (varinfo_t start, |
+ unsigned HOST_WIDE_INT offset) |
+{ |
+ /* If we cannot reach offset from start, lookup the first field |
+ and start from there. */ |
+ if (start->offset > offset) |
+ start = lookup_vi_for_tree (start->decl); |
+ |
+ /* We may not find a variable in the field list with the actual |
+ offset when when we have glommed a structure to a variable. |
+ In that case, however, offset should still be within the size |
+ of the variable. |
+ If we got beyond the offset we look for return the field |
+ directly preceding offset which may be the last field. */ |
+ while (start->next |
+ && offset >= start->offset |
+ && !((offset - start->offset) < start->size)) |
+ start = start->next; |
+ |
+ return start; |
+} |
+ |
/* Insert the varinfo FIELD into the field list for BASE, at the front |
of the list. */ |
@@ -4043,6 +4135,8 @@ struct fieldoff |
unsigned has_unknown_size : 1; |
unsigned may_have_pointers : 1; |
+ |
+ unsigned only_restrict_pointers : 1; |
}; |
typedef struct fieldoff fieldoff_s; |
@@ -4094,7 +4188,7 @@ var_can_have_subvars (const_tree v) |
return false; |
/* Non decls or memory tags can never have subvars. */ |
- if (!DECL_P (v) || MTAG_P (v)) |
+ if (!DECL_P (v)) |
return false; |
/* Aggregates without overlapping fields can have subvars. */ |
@@ -4114,7 +4208,7 @@ var_can_have_subvars (const_tree v) |
static int |
push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack, |
- HOST_WIDE_INT offset) |
+ HOST_WIDE_INT offset, bool must_have_pointers_p) |
{ |
tree field; |
int count = 0; |
@@ -4140,7 +4234,8 @@ push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack, |
|| TREE_CODE (TREE_TYPE (field)) == UNION_TYPE) |
push = true; |
else if (!(pushed = push_fields_onto_fieldstack |
- (TREE_TYPE (field), fieldstack, offset + foff)) |
+ (TREE_TYPE (field), fieldstack, offset + foff, |
+ must_have_pointers_p)) |
&& (DECL_SIZE (field) |
&& !integer_zerop (DECL_SIZE (field)))) |
/* Empty structures may have actual size, like in C++. So |
@@ -4163,10 +4258,11 @@ push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack, |
/* If adjacent fields do not contain pointers merge them. */ |
if (pair |
&& !pair->may_have_pointers |
- && !could_have_pointers (field) |
&& !pair->has_unknown_size |
&& !has_unknown_size |
- && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff) |
+ && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff |
+ && !must_have_pointers_p |
+ && !could_have_pointers (field)) |
{ |
pair = VEC_last (fieldoff_s, *fieldstack); |
pair->size += TREE_INT_CST_LOW (DECL_SIZE (field)); |
@@ -4180,7 +4276,12 @@ push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack, |
pair->size = TREE_INT_CST_LOW (DECL_SIZE (field)); |
else |
pair->size = -1; |
- pair->may_have_pointers = could_have_pointers (field); |
+ pair->may_have_pointers |
+ = must_have_pointers_p || could_have_pointers (field); |
+ pair->only_restrict_pointers |
+ = (!has_unknown_size |
+ && POINTER_TYPE_P (TREE_TYPE (field)) |
+ && TYPE_RESTRICT (TREE_TYPE (field))); |
count++; |
} |
} |
@@ -4191,44 +4292,28 @@ push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack, |
return count; |
} |
-/* Create a constraint ID = &FROM. */ |
- |
-static void |
-make_constraint_from (varinfo_t vi, int from) |
-{ |
- struct constraint_expr lhs, rhs; |
- |
- lhs.var = vi->id; |
- lhs.offset = 0; |
- lhs.type = SCALAR; |
- |
- rhs.var = from; |
- rhs.offset = 0; |
- rhs.type = ADDRESSOF; |
- process_constraint (new_constraint (lhs, rhs)); |
-} |
- |
/* Count the number of arguments DECL has, and set IS_VARARGS to true |
if it is a varargs function. */ |
static unsigned int |
count_num_arguments (tree decl, bool *is_varargs) |
{ |
- unsigned int i = 0; |
+ unsigned int num = 0; |
tree t; |
- for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); |
- t; |
- t = TREE_CHAIN (t)) |
- { |
- if (TREE_VALUE (t) == void_type_node) |
- break; |
- i++; |
- } |
+ /* Capture named arguments for K&R functions. They do not |
+ have a prototype and thus no TYPE_ARG_TYPES. */ |
+ for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t)) |
+ ++num; |
+ /* Check if the function has variadic arguments. */ |
+ for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t)) |
+ if (TREE_VALUE (t) == void_type_node) |
+ break; |
if (!t) |
*is_varargs = true; |
- return i; |
+ |
+ return num; |
} |
/* Creation function node for DECL, using NAME, and return the index |
@@ -4237,7 +4322,6 @@ count_num_arguments (tree decl, bool *is_varargs) |
static unsigned int |
create_function_info_for (tree decl, const char *name) |
{ |
- unsigned int index = VEC_length (varinfo_t, varmap); |
varinfo_t vi; |
tree arg; |
unsigned int i; |
@@ -4245,13 +4329,11 @@ create_function_info_for (tree decl, const char *name) |
/* Create the variable info. */ |
- vi = new_var_info (decl, index, name); |
- vi->decl = decl; |
+ vi = new_var_info (decl, name); |
vi->offset = 0; |
vi->size = 1; |
vi->fullsize = count_num_arguments (decl, &is_varargs) + 1; |
insert_vi_for_tree (vi->decl, vi); |
- VEC_safe_push (varinfo_t, heap, varmap, vi); |
stats.total_vars++; |
@@ -4262,10 +4344,9 @@ create_function_info_for (tree decl, const char *name) |
vi->fullsize = ~0; |
vi->size = ~0; |
vi->is_unknown_size_var = true; |
- return index; |
+ return vi->id; |
} |
- |
arg = DECL_ARGUMENTS (decl); |
/* Set up variables for each argument. */ |
@@ -4274,20 +4355,16 @@ create_function_info_for (tree decl, const char *name) |
varinfo_t argvi; |
const char *newname; |
char *tempname; |
- unsigned int newindex; |
tree argdecl = decl; |
if (arg) |
argdecl = arg; |
- newindex = VEC_length (varinfo_t, varmap); |
asprintf (&tempname, "%s.arg%d", name, i-1); |
newname = ggc_strdup (tempname); |
free (tempname); |
- argvi = new_var_info (argdecl, newindex, newname); |
- argvi->decl = argdecl; |
- VEC_safe_push (varinfo_t, heap, varmap, argvi); |
+ argvi = new_var_info (argdecl, newname); |
argvi->offset = i; |
argvi->size = 1; |
argvi->is_full_var = true; |
@@ -4308,7 +4385,6 @@ create_function_info_for (tree decl, const char *name) |
varinfo_t resultvi; |
const char *newname; |
char *tempname; |
- unsigned int newindex; |
tree resultdecl = decl; |
vi->fullsize ++; |
@@ -4316,14 +4392,11 @@ create_function_info_for (tree decl, const char *name) |
if (DECL_RESULT (decl)) |
resultdecl = DECL_RESULT (decl); |
- newindex = VEC_length (varinfo_t, varmap); |
asprintf (&tempname, "%s.result", name); |
newname = ggc_strdup (tempname); |
free (tempname); |
- resultvi = new_var_info (resultdecl, newindex, newname); |
- resultvi->decl = resultdecl; |
- VEC_safe_push (varinfo_t, heap, varmap, resultvi); |
+ resultvi = new_var_info (resultdecl, newname); |
resultvi->offset = i; |
resultvi->size = 1; |
resultvi->fullsize = vi->fullsize; |
@@ -4333,7 +4406,8 @@ create_function_info_for (tree decl, const char *name) |
if (DECL_RESULT (decl)) |
insert_vi_for_tree (DECL_RESULT (decl), resultvi); |
} |
- return index; |
+ |
+ return vi->id; |
} |
@@ -4363,28 +4437,21 @@ check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack) |
static unsigned int |
create_variable_info_for (tree decl, const char *name) |
{ |
- unsigned int index = VEC_length (varinfo_t, varmap); |
varinfo_t vi; |
tree decl_type = TREE_TYPE (decl); |
tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type); |
- bool is_global = DECL_P (decl) ? is_global_var (decl) : false; |
VEC (fieldoff_s,heap) *fieldstack = NULL; |
- if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode) |
- return create_function_info_for (decl, name); |
- |
- if (var_can_have_subvars (decl) && use_field_sensitive |
- && (!var_ann (decl) |
- || var_ann (decl)->noalias_state == 0) |
- && (!var_ann (decl) |
- || !var_ann (decl)->is_heapvar)) |
- push_fields_onto_fieldstack (decl_type, &fieldstack, 0); |
+ if (var_can_have_subvars (decl) && use_field_sensitive) |
+ push_fields_onto_fieldstack (decl_type, &fieldstack, 0, |
+ TREE_PUBLIC (decl) |
+ || DECL_EXTERNAL (decl) |
+ || TREE_ADDRESSABLE (decl)); |
/* If the variable doesn't have subvars, we may end up needing to |
sort the field list and create fake variables for all the |
fields. */ |
- vi = new_var_info (decl, index, name); |
- vi->decl = decl; |
+ vi = new_var_info (decl, name); |
vi->offset = 0; |
vi->may_have_pointers = could_have_pointers (decl); |
if (!declsize |
@@ -4401,15 +4468,14 @@ create_variable_info_for (tree decl, const char *name) |
} |
insert_vi_for_tree (vi->decl, vi); |
- VEC_safe_push (varinfo_t, heap, varmap, vi); |
- if (is_global && (!flag_whole_program || !in_ipa_mode) |
+ if (vi->is_global_var |
+ && (!flag_whole_program || !in_ipa_mode) |
&& vi->may_have_pointers) |
{ |
- if (var_ann (decl) |
- && var_ann (decl)->noalias_state == NO_ALIAS_ANYTHING) |
- make_constraint_from (vi, vi->id); |
- else |
- make_constraint_from (vi, escaped_id); |
+ if (POINTER_TYPE_P (TREE_TYPE (decl)) |
+ && TYPE_RESTRICT (TREE_TYPE (decl))) |
+ make_constraint_from_restrict (vi, "GLOBAL_RESTRICT"); |
+ make_copy_constraint (vi, nonlocal_id); |
} |
stats.total_vars++; |
@@ -4419,7 +4485,6 @@ create_variable_info_for (tree decl, const char *name) |
&& VEC_length (fieldoff_s, fieldstack) > 1 |
&& VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE) |
{ |
- unsigned int newindex = VEC_length (varinfo_t, varmap); |
fieldoff_s *fo = NULL; |
bool notokay = false; |
unsigned int i; |
@@ -4459,12 +4524,19 @@ create_variable_info_for (tree decl, const char *name) |
vi->size = ~0; |
vi->is_full_var = true; |
VEC_free (fieldoff_s, heap, fieldstack); |
- return index; |
+ return vi->id; |
} |
vi->size = fo->size; |
vi->offset = fo->offset; |
vi->may_have_pointers = fo->may_have_pointers; |
+ if (vi->is_global_var |
+ && (!flag_whole_program || !in_ipa_mode) |
+ && vi->may_have_pointers) |
+ { |
+ if (fo->only_restrict_pointers) |
+ make_constraint_from_restrict (vi, "GLOBAL_RESTRICT"); |
+ } |
for (i = VEC_length (fieldoff_s, fieldstack) - 1; |
i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo); |
i--) |
@@ -4473,7 +4545,6 @@ create_variable_info_for (tree decl, const char *name) |
const char *newname = "NULL"; |
char *tempname; |
- newindex = VEC_length (varinfo_t, varmap); |
if (dump_file) |
{ |
asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC |
@@ -4482,16 +4553,20 @@ create_variable_info_for (tree decl, const char *name) |
newname = ggc_strdup (tempname); |
free (tempname); |
} |
- newvi = new_var_info (decl, newindex, newname); |
+ newvi = new_var_info (decl, newname); |
newvi->offset = fo->offset; |
newvi->size = fo->size; |
newvi->fullsize = vi->fullsize; |
newvi->may_have_pointers = fo->may_have_pointers; |
insert_into_field_list (vi, newvi); |
- VEC_safe_push (varinfo_t, heap, varmap, newvi); |
- if (is_global && (!flag_whole_program || !in_ipa_mode) |
+ if ((newvi->is_global_var || TREE_CODE (decl) == PARM_DECL) |
&& newvi->may_have_pointers) |
- make_constraint_from (newvi, escaped_id); |
+ { |
+ if (fo->only_restrict_pointers) |
+ make_constraint_from_restrict (newvi, "GLOBAL_RESTRICT"); |
+ if (newvi->is_global_var && !in_ipa_mode) |
+ make_copy_constraint (newvi, nonlocal_id); |
+ } |
stats.total_vars++; |
} |
@@ -4501,12 +4576,12 @@ create_variable_info_for (tree decl, const char *name) |
VEC_free (fieldoff_s, heap, fieldstack); |
- return index; |
+ return vi->id; |
} |
/* Print out the points-to solution for VAR to FILE. */ |
-void |
+static void |
dump_solution_for_var (FILE *file, unsigned int var) |
{ |
varinfo_t vi = get_varinfo (var); |
@@ -4525,10 +4600,7 @@ dump_solution_for_var (FILE *file, unsigned int var) |
{ |
fprintf (file, "%s ", get_varinfo (i)->name); |
} |
- fprintf (file, "}"); |
- if (vi->no_tbaa_pruning) |
- fprintf (file, " no-tbaa-pruning"); |
- fprintf (file, "\n"); |
+ fprintf (file, "}\n"); |
} |
} |
@@ -4547,7 +4619,6 @@ static void |
intra_create_variable_infos (void) |
{ |
tree t; |
- struct constraint_expr lhs, rhs; |
/* For each incoming pointer argument arg, create the constraint ARG |
= NONLOCAL or a dummy variable if flag_argument_noalias is set. */ |
@@ -4558,65 +4629,44 @@ intra_create_variable_infos (void) |
if (!could_have_pointers (t)) |
continue; |
- /* If flag_argument_noalias is set, then function pointer |
- arguments are guaranteed not to point to each other. In that |
- case, create an artificial variable PARM_NOALIAS and the |
- constraint ARG = &PARM_NOALIAS. */ |
- if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0) |
+ /* For restrict qualified pointers to objects passed by |
+ reference build a real representative for the pointed-to object. */ |
+ if (DECL_BY_REFERENCE (t) |
+ && POINTER_TYPE_P (TREE_TYPE (t)) |
+ && TYPE_RESTRICT (TREE_TYPE (t))) |
{ |
+ struct constraint_expr lhsc, rhsc; |
varinfo_t vi; |
- tree heapvar = heapvar_lookup (t); |
- |
- lhs.offset = 0; |
- lhs.type = SCALAR; |
- lhs.var = get_vi_for_tree (t)->id; |
- |
+ tree heapvar = heapvar_lookup (t, 0); |
if (heapvar == NULL_TREE) |
{ |
var_ann_t ann; |
- heapvar = create_tmp_var_raw (ptr_type_node, |
+ heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)), |
"PARM_NOALIAS"); |
DECL_EXTERNAL (heapvar) = 1; |
- if (gimple_referenced_vars (cfun)) |
- add_referenced_var (heapvar); |
- |
- heapvar_insert (t, heapvar); |
- |
+ heapvar_insert (t, 0, heapvar); |
ann = get_var_ann (heapvar); |
ann->is_heapvar = 1; |
- if (flag_argument_noalias == 1) |
- ann->noalias_state = NO_ALIAS; |
- else if (flag_argument_noalias == 2) |
- ann->noalias_state = NO_ALIAS_GLOBAL; |
- else if (flag_argument_noalias == 3) |
- ann->noalias_state = NO_ALIAS_ANYTHING; |
- else |
- gcc_unreachable (); |
- } |
- |
- vi = get_vi_for_tree (heapvar); |
- vi->is_artificial_var = 1; |
- vi->is_heap_var = 1; |
- vi->is_unknown_size_var = true; |
- vi->fullsize = ~0; |
- vi->size = ~0; |
- rhs.var = vi->id; |
- rhs.type = ADDRESSOF; |
- rhs.offset = 0; |
- for (p = get_varinfo (lhs.var); p; p = p->next) |
- { |
- struct constraint_expr temp = lhs; |
- temp.var = p->id; |
- process_constraint (new_constraint (temp, rhs)); |
} |
+ if (gimple_referenced_vars (cfun)) |
+ add_referenced_var (heapvar); |
+ lhsc.var = get_vi_for_tree (t)->id; |
+ lhsc.type = SCALAR; |
+ lhsc.offset = 0; |
+ rhsc.var = (vi = get_vi_for_tree (heapvar))->id; |
+ rhsc.type = ADDRESSOF; |
+ rhsc.offset = 0; |
+ process_constraint (new_constraint (lhsc, rhsc)); |
+ vi->is_restrict_var = 1; |
+ continue; |
} |
- else |
- { |
- varinfo_t arg_vi = get_vi_for_tree (t); |
- for (p = arg_vi; p; p = p->next) |
- make_constraint_from (p, nonlocal_id); |
- } |
+ for (p = get_vi_for_tree (t); p; p = p->next) |
+ if (p->may_have_pointers) |
+ make_constraint_from (p, nonlocal_id); |
+ if (POINTER_TYPE_P (TREE_TYPE (t)) |
+ && TYPE_RESTRICT (TREE_TYPE (t))) |
+ make_constraint_from_restrict (get_vi_for_tree (t), "PARM_RESTRICT"); |
} |
/* Add a constraint for a result decl that is passed by reference. */ |
@@ -4626,7 +4676,7 @@ intra_create_variable_infos (void) |
varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl)); |
for (p = result_vi; p; p = p->next) |
- make_constraint_from (p, nonlocal_id); |
+ make_constraint_from (p, nonlocal_id); |
} |
/* Add a constraint for the incoming static chain parameter. */ |
@@ -4709,24 +4759,13 @@ shared_bitmap_add (bitmap pt_vars) |
} |
-/* Set bits in INTO corresponding to the variable uids in solution set |
- FROM, which came from variable PTR. |
- For variables that are actually dereferenced, we also use type |
- based alias analysis to prune the points-to sets. |
- IS_DEREFED is true if PTR was directly dereferenced, which we use to |
- help determine whether we are we are allowed to prune using TBAA. |
- If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of |
- the from set. Returns the number of pruned variables. */ |
+/* Set bits in INTO corresponding to the variable uids in solution set FROM. */ |
-static unsigned |
-set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed, |
- bool no_tbaa_pruning) |
+static void |
+set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt) |
{ |
unsigned int i; |
bitmap_iterator bi; |
- unsigned pruned = 0; |
- |
- gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr))); |
EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi) |
{ |
@@ -4741,140 +4780,97 @@ set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed, |
|| TREE_CODE (vi->decl) == PARM_DECL |
|| TREE_CODE (vi->decl) == RESULT_DECL) |
{ |
- /* Just add VI->DECL to the alias set. |
- Don't type prune artificial vars or points-to sets |
- for pointers that have not been dereferenced or with |
- type-based pruning disabled. */ |
- if (vi->is_artificial_var |
- || !is_derefed |
- || no_tbaa_pruning |
- || vi->no_tbaa_pruning) |
- bitmap_set_bit (into, DECL_UID (vi->decl)); |
- else |
- { |
- alias_set_type var_alias_set, mem_alias_set; |
- var_alias_set = get_alias_set (vi->decl); |
- mem_alias_set = get_alias_set (TREE_TYPE (TREE_TYPE (ptr))); |
- if (may_alias_p (SSA_NAME_VAR (ptr), mem_alias_set, |
- vi->decl, var_alias_set, true)) |
- bitmap_set_bit (into, DECL_UID (vi->decl)); |
- else |
- ++pruned; |
- } |
+ /* Add the decl to the points-to set. Note that the points-to |
+ set contains global variables. */ |
+ bitmap_set_bit (into, DECL_UID (vi->decl)); |
+ if (vi->is_global_var) |
+ pt->vars_contains_global = true; |
} |
} |
- |
- return pruned; |
} |
-static bool have_alias_info = false; |
- |
-/* Emit a note for the pointer initialization point DEF. */ |
+/* Compute the points-to solution *PT for the variable VI. */ |
static void |
-emit_pointer_definition (tree ptr, bitmap visited) |
+find_what_var_points_to (varinfo_t orig_vi, struct pt_solution *pt) |
{ |
- gimple def = SSA_NAME_DEF_STMT (ptr); |
- if (gimple_code (def) == GIMPLE_PHI) |
+ unsigned int i; |
+ bitmap_iterator bi; |
+ bitmap finished_solution; |
+ bitmap result; |
+ varinfo_t vi; |
+ |
+ memset (pt, 0, sizeof (struct pt_solution)); |
+ |
+ /* This variable may have been collapsed, let's get the real |
+ variable. */ |
+ vi = get_varinfo (find (orig_vi->id)); |
+ |
+ /* Translate artificial variables into SSA_NAME_PTR_INFO |
+ attributes. */ |
+ EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi) |
{ |
- use_operand_p argp; |
- ssa_op_iter oi; |
+ varinfo_t vi = get_varinfo (i); |
- FOR_EACH_PHI_ARG (argp, def, oi, SSA_OP_USE) |
+ if (vi->is_artificial_var) |
{ |
- tree arg = USE_FROM_PTR (argp); |
- if (TREE_CODE (arg) == SSA_NAME) |
- { |
- if (bitmap_set_bit (visited, SSA_NAME_VERSION (arg))) |
- emit_pointer_definition (arg, visited); |
- } |
- else |
- inform (0, "initialized from %qE", arg); |
+ if (vi->id == nothing_id) |
+ pt->null = 1; |
+ else if (vi->id == escaped_id) |
+ pt->escaped = 1; |
+ else if (vi->id == callused_id) |
+ gcc_unreachable (); |
+ else if (vi->id == nonlocal_id) |
+ pt->nonlocal = 1; |
+ else if (vi->is_heap_var) |
+ /* We represent heapvars in the points-to set properly. */ |
+ ; |
+ else if (vi->id == readonly_id) |
+ /* Nobody cares. */ |
+ ; |
+ else if (vi->id == anything_id |
+ || vi->id == integer_id) |
+ pt->anything = 1; |
} |
+ if (vi->is_restrict_var) |
+ pt->vars_contains_restrict = true; |
} |
- else if (!gimple_nop_p (def)) |
- inform (gimple_location (def), "initialized from here"); |
-} |
-/* Emit a strict aliasing warning for dereferencing the pointer PTR. */ |
+ /* Instead of doing extra work, simply do not create |
+ elaborate points-to information for pt_anything pointers. */ |
+ if (pt->anything |
+ && (orig_vi->is_artificial_var |
+ || !pt->vars_contains_restrict)) |
+ return; |
-static void |
-emit_alias_warning (tree ptr) |
-{ |
- gimple use; |
- imm_use_iterator ui; |
- bool warned = false; |
+ /* Share the final set of variables when possible. */ |
+ finished_solution = BITMAP_GGC_ALLOC (); |
+ stats.points_to_sets_created++; |
- FOR_EACH_IMM_USE_STMT (use, ui, ptr) |
+ set_uids_in_ptset (finished_solution, vi->solution, pt); |
+ result = shared_bitmap_lookup (finished_solution); |
+ if (!result) |
{ |
- tree deref = NULL_TREE; |
- |
- if (gimple_has_lhs (use)) |
- { |
- tree lhs = get_base_address (gimple_get_lhs (use)); |
- if (lhs |
- && INDIRECT_REF_P (lhs) |
- && TREE_OPERAND (lhs, 0) == ptr) |
- deref = lhs; |
- } |
- if (gimple_assign_single_p (use)) |
- { |
- tree rhs = get_base_address (gimple_assign_rhs1 (use)); |
- if (rhs |
- && INDIRECT_REF_P (rhs) |
- && TREE_OPERAND (rhs, 0) == ptr) |
- deref = rhs; |
- } |
- else if (is_gimple_call (use)) |
- { |
- unsigned i; |
- for (i = 0; i < gimple_call_num_args (use); ++i) |
- { |
- tree op = get_base_address (gimple_call_arg (use, i)); |
- if (op |
- && INDIRECT_REF_P (op) |
- && TREE_OPERAND (op, 0) == ptr) |
- deref = op; |
- } |
- } |
- if (deref |
- && !TREE_NO_WARNING (deref)) |
- { |
- TREE_NO_WARNING (deref) = 1; |
- warned |= warning_at (gimple_location (use), OPT_Wstrict_aliasing, |
- "dereferencing pointer %qD does break " |
- "strict-aliasing rules", SSA_NAME_VAR (ptr)); |
- } |
+ shared_bitmap_add (finished_solution); |
+ pt->vars = finished_solution; |
} |
- if (warned) |
+ else |
{ |
- bitmap visited = BITMAP_ALLOC (NULL); |
- emit_pointer_definition (ptr, visited); |
- BITMAP_FREE (visited); |
+ pt->vars = result; |
+ bitmap_clear (finished_solution); |
} |
} |
-/* Given a pointer variable P, fill in its points-to set, or return |
- false if we can't. |
- Rather than return false for variables that point-to anything, we |
- instead find the corresponding SMT, and merge in its aliases. In |
- addition to these aliases, we also set the bits for the SMT's |
- themselves and their subsets, as SMT's are still in use by |
- non-SSA_NAME's, and pruning may eliminate every one of their |
- aliases. In such a case, if we did not include the right set of |
- SMT's in the points-to set of the variable, we'd end up with |
- statements that do not conflict but should. */ |
+/* Given a pointer variable P, fill in its points-to set. */ |
-bool |
+static void |
find_what_p_points_to (tree p) |
{ |
+ struct ptr_info_def *pi; |
tree lookup_p = p; |
varinfo_t vi; |
- if (!have_alias_info) |
- return false; |
- |
/* For parameters, get at the points-to set for the actual parm |
decl. */ |
if (TREE_CODE (p) == SSA_NAME |
@@ -4883,224 +4879,225 @@ find_what_p_points_to (tree p) |
lookup_p = SSA_NAME_VAR (p); |
vi = lookup_vi_for_tree (lookup_p); |
- if (vi) |
- { |
- if (vi->is_artificial_var) |
- return false; |
+ if (!vi) |
+ return; |
- /* See if this is a field or a structure. */ |
- if (vi->size != vi->fullsize) |
- { |
- /* Nothing currently asks about structure fields directly, |
- but when they do, we need code here to hand back the |
- points-to set. */ |
- return false; |
- } |
- else |
- { |
- struct ptr_info_def *pi = get_ptr_info (p); |
- unsigned int i, pruned; |
- bitmap_iterator bi; |
- bool was_pt_anything = false; |
- bitmap finished_solution; |
- bitmap result; |
+ pi = get_ptr_info (p); |
+ find_what_var_points_to (vi, &pi->pt); |
+} |
- if (!pi->memory_tag_needed) |
- return false; |
- /* This variable may have been collapsed, let's get the real |
- variable. */ |
- vi = get_varinfo (find (vi->id)); |
+/* Query statistics for points-to solutions. */ |
- /* Translate artificial variables into SSA_NAME_PTR_INFO |
- attributes. */ |
- EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi) |
- { |
- varinfo_t vi = get_varinfo (i); |
+static struct { |
+ unsigned HOST_WIDE_INT pt_solution_includes_may_alias; |
+ unsigned HOST_WIDE_INT pt_solution_includes_no_alias; |
+ unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias; |
+ unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias; |
+} pta_stats; |
- if (vi->is_artificial_var) |
- { |
- /* FIXME. READONLY should be handled better so that |
- flow insensitive aliasing can disregard writable |
- aliases. */ |
- if (vi->id == nothing_id) |
- pi->pt_null = 1; |
- else if (vi->id == anything_id |
- || vi->id == nonlocal_id |
- || vi->id == escaped_id |
- || vi->id == callused_id) |
- was_pt_anything = 1; |
- else if (vi->id == readonly_id) |
- was_pt_anything = 1; |
- else if (vi->id == integer_id) |
- was_pt_anything = 1; |
- else if (vi->is_heap_var) |
- pi->pt_global_mem = 1; |
- } |
- } |
+void |
+dump_pta_stats (FILE *s) |
+{ |
+ fprintf (s, "\nPTA query stats:\n"); |
+ fprintf (s, " pt_solution_includes: " |
+ HOST_WIDE_INT_PRINT_DEC" disambiguations, " |
+ HOST_WIDE_INT_PRINT_DEC" queries\n", |
+ pta_stats.pt_solution_includes_no_alias, |
+ pta_stats.pt_solution_includes_no_alias |
+ + pta_stats.pt_solution_includes_may_alias); |
+ fprintf (s, " pt_solutions_intersect: " |
+ HOST_WIDE_INT_PRINT_DEC" disambiguations, " |
+ HOST_WIDE_INT_PRINT_DEC" queries\n", |
+ pta_stats.pt_solutions_intersect_no_alias, |
+ pta_stats.pt_solutions_intersect_no_alias |
+ + pta_stats.pt_solutions_intersect_may_alias); |
+} |
- /* Instead of doing extra work, simply do not create |
- points-to information for pt_anything pointers. This |
- will cause the operand scanner to fall back to the |
- type-based SMT and its aliases. Which is the best |
- we could do here for the points-to set as well. */ |
- if (was_pt_anything) |
- return false; |
- /* Share the final set of variables when possible. */ |
- finished_solution = BITMAP_GGC_ALLOC (); |
- stats.points_to_sets_created++; |
+/* Reset the points-to solution *PT to a conservative default |
+ (point to anything). */ |
- pruned = set_uids_in_ptset (p, finished_solution, vi->solution, |
- pi->is_dereferenced, |
- vi->no_tbaa_pruning); |
- result = shared_bitmap_lookup (finished_solution); |
+void |
+pt_solution_reset (struct pt_solution *pt) |
+{ |
+ memset (pt, 0, sizeof (struct pt_solution)); |
+ pt->anything = true; |
+} |
- if (!result) |
- { |
- shared_bitmap_add (finished_solution); |
- pi->pt_vars = finished_solution; |
- } |
- else |
- { |
- pi->pt_vars = result; |
- bitmap_clear (finished_solution); |
- } |
+/* Set the points-to solution *PT to point only to the variables |
+ in VARS. */ |
- if (bitmap_empty_p (pi->pt_vars)) |
- { |
- pi->pt_vars = NULL; |
- if (pruned > 0 |
- && !pi->pt_null |
- && pi->is_dereferenced |
- && warn_strict_aliasing > 0 |
- && !SSA_NAME_IS_DEFAULT_DEF (p)) |
- { |
- if (dump_file && dump_flags & TDF_DETAILS) |
- { |
- fprintf (dump_file, "alias warning for "); |
- print_generic_expr (dump_file, p, 0); |
- fprintf (dump_file, "\n"); |
- } |
- emit_alias_warning (p); |
- } |
- } |
+void |
+pt_solution_set (struct pt_solution *pt, bitmap vars) |
+{ |
+ bitmap_iterator bi; |
+ unsigned i; |
- return true; |
+ memset (pt, 0, sizeof (struct pt_solution)); |
+ pt->vars = vars; |
+ EXECUTE_IF_SET_IN_BITMAP (vars, 0, i, bi) |
+ { |
+ tree var = referenced_var_lookup (i); |
+ if (is_global_var (var)) |
+ { |
+ pt->vars_contains_global = true; |
+ break; |
} |
} |
+} |
- return false; |
+/* Return true if the points-to solution *PT is empty. */ |
+ |
+static bool |
+pt_solution_empty_p (struct pt_solution *pt) |
+{ |
+ if (pt->anything |
+ || pt->nonlocal) |
+ return false; |
+ |
+ if (pt->vars |
+ && !bitmap_empty_p (pt->vars)) |
+ return false; |
+ |
+ /* If the solution includes ESCAPED, check if that is empty. */ |
+ if (pt->escaped |
+ && !pt_solution_empty_p (&cfun->gimple_df->escaped)) |
+ return false; |
+ |
+ return true; |
} |
-/* Mark the ESCAPED solution as call clobbered. Returns false if |
- pt_anything escaped which needs all locals that have their address |
- taken marked call clobbered as well. */ |
+/* Return true if the points-to solution *PT includes global memory. */ |
bool |
-clobber_what_escaped (void) |
+pt_solution_includes_global (struct pt_solution *pt) |
{ |
- varinfo_t vi; |
- unsigned int i; |
- bitmap_iterator bi; |
+ if (pt->anything |
+ || pt->nonlocal |
+ || pt->vars_contains_global) |
+ return true; |
- if (!have_alias_info) |
- return false; |
+ if (pt->escaped) |
+ return pt_solution_includes_global (&cfun->gimple_df->escaped); |
- /* This variable may have been collapsed, let's get the real |
- variable for escaped_id. */ |
- vi = get_varinfo (find (escaped_id)); |
+ return false; |
+} |
- /* If call-used memory escapes we need to include it in the |
- set of escaped variables. This can happen if a pure |
- function returns a pointer and this pointer escapes. */ |
- if (bitmap_bit_p (vi->solution, callused_id)) |
- { |
- varinfo_t cu_vi = get_varinfo (find (callused_id)); |
- bitmap_ior_into (vi->solution, cu_vi->solution); |
- } |
+/* Return true if the points-to solution *PT includes the variable |
+ declaration DECL. */ |
- /* Mark variables in the solution call-clobbered. */ |
- EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi) |
- { |
- varinfo_t vi = get_varinfo (i); |
+static bool |
+pt_solution_includes_1 (struct pt_solution *pt, const_tree decl) |
+{ |
+ if (pt->anything) |
+ return true; |
- if (vi->is_artificial_var) |
- { |
- /* nothing_id and readonly_id do not cause any |
- call clobber ops. For anything_id and integer_id |
- we need to clobber all addressable vars. */ |
- if (vi->id == anything_id |
- || vi->id == integer_id) |
- return false; |
- } |
+ if (pt->nonlocal |
+ && is_global_var (decl)) |
+ return true; |
- /* Only artificial heap-vars are further interesting. */ |
- if (vi->is_artificial_var && !vi->is_heap_var) |
- continue; |
+ if (pt->vars |
+ && bitmap_bit_p (pt->vars, DECL_UID (decl))) |
+ return true; |
- if ((TREE_CODE (vi->decl) == VAR_DECL |
- || TREE_CODE (vi->decl) == PARM_DECL |
- || TREE_CODE (vi->decl) == RESULT_DECL) |
- && !unmodifiable_var_p (vi->decl)) |
- mark_call_clobbered (vi->decl, ESCAPE_TO_CALL); |
- } |
+ /* If the solution includes ESCAPED, check it. */ |
+ if (pt->escaped |
+ && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl)) |
+ return true; |
- return true; |
+ return false; |
} |
-/* Compute the call-used variables. */ |
- |
-void |
-compute_call_used_vars (void) |
+bool |
+pt_solution_includes (struct pt_solution *pt, const_tree decl) |
{ |
- varinfo_t vi; |
- unsigned int i; |
- bitmap_iterator bi; |
- bool has_anything_id = false; |
+ bool res = pt_solution_includes_1 (pt, decl); |
+ if (res) |
+ ++pta_stats.pt_solution_includes_may_alias; |
+ else |
+ ++pta_stats.pt_solution_includes_no_alias; |
+ return res; |
+} |
- if (!have_alias_info) |
- return; |
+/* Return true if both points-to solutions PT1 and PT2 have a non-empty |
+ intersection. */ |
- /* This variable may have been collapsed, let's get the real |
- variable for escaped_id. */ |
- vi = get_varinfo (find (callused_id)); |
+static bool |
+pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2) |
+{ |
+ if (pt1->anything || pt2->anything) |
+ return true; |
- /* Mark variables in the solution call-clobbered. */ |
- EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi) |
+ /* If either points to unknown global memory and the other points to |
+ any global memory they alias. */ |
+ if ((pt1->nonlocal |
+ && (pt2->nonlocal |
+ || pt2->vars_contains_global)) |
+ || (pt2->nonlocal |
+ && pt1->vars_contains_global)) |
+ return true; |
+ |
+ /* Check the escaped solution if required. */ |
+ if ((pt1->escaped || pt2->escaped) |
+ && !pt_solution_empty_p (&cfun->gimple_df->escaped)) |
{ |
- varinfo_t vi = get_varinfo (i); |
+ /* If both point to escaped memory and that solution |
+ is not empty they alias. */ |
+ if (pt1->escaped && pt2->escaped) |
+ return true; |
- if (vi->is_artificial_var) |
- { |
- /* For anything_id and integer_id we need to make |
- all local addressable vars call-used. */ |
- if (vi->id == anything_id |
- || vi->id == integer_id) |
- has_anything_id = true; |
- } |
+ /* If either points to escaped memory see if the escaped solution |
+ intersects with the other. */ |
+ if ((pt1->escaped |
+ && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2)) |
+ || (pt2->escaped |
+ && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1))) |
+ return true; |
+ } |
- /* Only artificial heap-vars are further interesting. */ |
- if (vi->is_artificial_var && !vi->is_heap_var) |
- continue; |
+ /* Now both pointers alias if their points-to solution intersects. */ |
+ return (pt1->vars |
+ && pt2->vars |
+ && bitmap_intersect_p (pt1->vars, pt2->vars)); |
+} |
+ |
+bool |
+pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2) |
+{ |
+ bool res = pt_solutions_intersect_1 (pt1, pt2); |
+ if (res) |
+ ++pta_stats.pt_solutions_intersect_may_alias; |
+ else |
+ ++pta_stats.pt_solutions_intersect_no_alias; |
+ return res; |
+} |
+ |
+/* Return true if both points-to solutions PT1 and PT2 for two restrict |
+ qualified pointers are possibly based on the same pointer. */ |
- if ((TREE_CODE (vi->decl) == VAR_DECL |
- || TREE_CODE (vi->decl) == PARM_DECL |
- || TREE_CODE (vi->decl) == RESULT_DECL) |
- && !unmodifiable_var_p (vi->decl)) |
- bitmap_set_bit (gimple_call_used_vars (cfun), DECL_UID (vi->decl)); |
+bool |
+pt_solutions_same_restrict_base (struct pt_solution *pt1, |
+ struct pt_solution *pt2) |
+{ |
+ /* If we deal with points-to solutions of two restrict qualified |
+ pointers solely rely on the pointed-to variable bitmap intersection. |
+ For two pointers that are based on each other the bitmaps will |
+ intersect. */ |
+ if (pt1->vars_contains_restrict |
+ && pt2->vars_contains_restrict) |
+ { |
+ gcc_assert (pt1->vars && pt2->vars); |
+ return bitmap_intersect_p (pt1->vars, pt2->vars); |
} |
- /* If anything is call-used, add all addressable locals to the set. */ |
- if (has_anything_id) |
- bitmap_ior_into (gimple_call_used_vars (cfun), |
- gimple_addressable_vars (cfun)); |
+ return true; |
} |
/* Dump points-to information to OUTFILE. */ |
-void |
+static void |
dump_sa_points_to_info (FILE *outfile) |
{ |
unsigned int i; |
@@ -5144,24 +5141,29 @@ static void |
init_base_vars (void) |
{ |
struct constraint_expr lhs, rhs; |
+ varinfo_t var_anything; |
+ varinfo_t var_nothing; |
+ varinfo_t var_readonly; |
+ varinfo_t var_escaped; |
+ varinfo_t var_nonlocal; |
+ varinfo_t var_callused; |
+ varinfo_t var_storedanything; |
+ varinfo_t var_integer; |
/* Create the NULL variable, used to represent that a variable points |
to NULL. */ |
- nothing_tree = create_tmp_var_raw (void_type_node, "NULL"); |
- var_nothing = new_var_info (nothing_tree, nothing_id, "NULL"); |
- insert_vi_for_tree (nothing_tree, var_nothing); |
+ var_nothing = new_var_info (NULL_TREE, "NULL"); |
+ gcc_assert (var_nothing->id == nothing_id); |
var_nothing->is_artificial_var = 1; |
var_nothing->offset = 0; |
var_nothing->size = ~0; |
var_nothing->fullsize = ~0; |
var_nothing->is_special_var = 1; |
- VEC_safe_push (varinfo_t, heap, varmap, var_nothing); |
/* Create the ANYTHING variable, used to represent that a variable |
points to some unknown piece of memory. */ |
- anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING"); |
- var_anything = new_var_info (anything_tree, anything_id, "ANYTHING"); |
- insert_vi_for_tree (anything_tree, var_anything); |
+ var_anything = new_var_info (NULL_TREE, "ANYTHING"); |
+ gcc_assert (var_anything->id == anything_id); |
var_anything->is_artificial_var = 1; |
var_anything->size = ~0; |
var_anything->offset = 0; |
@@ -5172,7 +5174,6 @@ init_base_vars (void) |
/* Anything points to anything. This makes deref constraints just |
work in the presence of linked list and other p = *p type loops, |
by saying that *ANYTHING = ANYTHING. */ |
- VEC_safe_push (varinfo_t, heap, varmap, var_anything); |
lhs.type = SCALAR; |
lhs.var = anything_id; |
lhs.offset = 0; |
@@ -5187,16 +5188,14 @@ init_base_vars (void) |
/* Create the READONLY variable, used to represent that a variable |
points to readonly memory. */ |
- readonly_tree = create_tmp_var_raw (void_type_node, "READONLY"); |
- var_readonly = new_var_info (readonly_tree, readonly_id, "READONLY"); |
+ var_readonly = new_var_info (NULL_TREE, "READONLY"); |
+ gcc_assert (var_readonly->id == readonly_id); |
var_readonly->is_artificial_var = 1; |
var_readonly->offset = 0; |
var_readonly->size = ~0; |
var_readonly->fullsize = ~0; |
var_readonly->next = NULL; |
var_readonly->is_special_var = 1; |
- insert_vi_for_tree (readonly_tree, var_readonly); |
- VEC_safe_push (varinfo_t, heap, varmap, var_readonly); |
/* readonly memory points to anything, in order to make deref |
easier. In reality, it points to anything the particular |
@@ -5212,16 +5211,23 @@ init_base_vars (void) |
/* Create the ESCAPED variable, used to represent the set of escaped |
memory. */ |
- escaped_tree = create_tmp_var_raw (void_type_node, "ESCAPED"); |
- var_escaped = new_var_info (escaped_tree, escaped_id, "ESCAPED"); |
- insert_vi_for_tree (escaped_tree, var_escaped); |
+ var_escaped = new_var_info (NULL_TREE, "ESCAPED"); |
+ gcc_assert (var_escaped->id == escaped_id); |
var_escaped->is_artificial_var = 1; |
var_escaped->offset = 0; |
var_escaped->size = ~0; |
var_escaped->fullsize = ~0; |
var_escaped->is_special_var = 0; |
- VEC_safe_push (varinfo_t, heap, varmap, var_escaped); |
- gcc_assert (VEC_index (varinfo_t, varmap, 3) == var_escaped); |
+ |
+ /* Create the NONLOCAL variable, used to represent the set of nonlocal |
+ memory. */ |
+ var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL"); |
+ gcc_assert (var_nonlocal->id == nonlocal_id); |
+ var_nonlocal->is_artificial_var = 1; |
+ var_nonlocal->offset = 0; |
+ var_nonlocal->size = ~0; |
+ var_nonlocal->fullsize = ~0; |
+ var_nonlocal->is_special_var = 1; |
/* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */ |
lhs.type = SCALAR; |
@@ -5232,39 +5238,50 @@ init_base_vars (void) |
rhs.offset = 0; |
process_constraint (new_constraint (lhs, rhs)); |
- /* Create the NONLOCAL variable, used to represent the set of nonlocal |
- memory. */ |
- nonlocal_tree = create_tmp_var_raw (void_type_node, "NONLOCAL"); |
- var_nonlocal = new_var_info (nonlocal_tree, nonlocal_id, "NONLOCAL"); |
- insert_vi_for_tree (nonlocal_tree, var_nonlocal); |
- var_nonlocal->is_artificial_var = 1; |
- var_nonlocal->offset = 0; |
- var_nonlocal->size = ~0; |
- var_nonlocal->fullsize = ~0; |
- var_nonlocal->is_special_var = 1; |
- VEC_safe_push (varinfo_t, heap, varmap, var_nonlocal); |
+ /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the |
+ whole variable escapes. */ |
+ lhs.type = SCALAR; |
+ lhs.var = escaped_id; |
+ lhs.offset = 0; |
+ rhs.type = SCALAR; |
+ rhs.var = escaped_id; |
+ rhs.offset = UNKNOWN_OFFSET; |
+ process_constraint (new_constraint (lhs, rhs)); |
- /* Nonlocal memory points to escaped (which includes nonlocal), |
- in order to make deref easier. */ |
+ /* *ESCAPED = NONLOCAL. This is true because we have to assume |
+ everything pointed to by escaped points to what global memory can |
+ point to. */ |
+ lhs.type = DEREF; |
+ lhs.var = escaped_id; |
+ lhs.offset = 0; |
+ rhs.type = SCALAR; |
+ rhs.var = nonlocal_id; |
+ rhs.offset = 0; |
+ process_constraint (new_constraint (lhs, rhs)); |
+ |
+ /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because |
+ global memory may point to global memory and escaped memory. */ |
lhs.type = SCALAR; |
lhs.var = nonlocal_id; |
lhs.offset = 0; |
rhs.type = ADDRESSOF; |
+ rhs.var = nonlocal_id; |
+ rhs.offset = 0; |
+ process_constraint (new_constraint (lhs, rhs)); |
+ rhs.type = ADDRESSOF; |
rhs.var = escaped_id; |
rhs.offset = 0; |
process_constraint (new_constraint (lhs, rhs)); |
/* Create the CALLUSED variable, used to represent the set of call-used |
memory. */ |
- callused_tree = create_tmp_var_raw (void_type_node, "CALLUSED"); |
- var_callused = new_var_info (callused_tree, callused_id, "CALLUSED"); |
- insert_vi_for_tree (callused_tree, var_callused); |
+ var_callused = new_var_info (NULL_TREE, "CALLUSED"); |
+ gcc_assert (var_callused->id == callused_id); |
var_callused->is_artificial_var = 1; |
var_callused->offset = 0; |
var_callused->size = ~0; |
var_callused->fullsize = ~0; |
var_callused->is_special_var = 0; |
- VEC_safe_push (varinfo_t, heap, varmap, var_callused); |
/* CALLUSED = *CALLUSED, because call-used is may-deref'd at calls, etc. */ |
lhs.type = SCALAR; |
@@ -5275,31 +5292,36 @@ init_base_vars (void) |
rhs.offset = 0; |
process_constraint (new_constraint (lhs, rhs)); |
+ /* CALLUSED = CALLUSED + UNKNOWN, because if a sub-field is call-used the |
+ whole variable is call-used. */ |
+ lhs.type = SCALAR; |
+ lhs.var = callused_id; |
+ lhs.offset = 0; |
+ rhs.type = SCALAR; |
+ rhs.var = callused_id; |
+ rhs.offset = UNKNOWN_OFFSET; |
+ process_constraint (new_constraint (lhs, rhs)); |
+ |
/* Create the STOREDANYTHING variable, used to represent the set of |
variables stored to *ANYTHING. */ |
- storedanything_tree = create_tmp_var_raw (ptr_type_node, "STOREDANYTHING"); |
- var_storedanything = new_var_info (storedanything_tree, storedanything_id, |
- "STOREDANYTHING"); |
- insert_vi_for_tree (storedanything_tree, var_storedanything); |
+ var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING"); |
+ gcc_assert (var_storedanything->id == storedanything_id); |
var_storedanything->is_artificial_var = 1; |
var_storedanything->offset = 0; |
var_storedanything->size = ~0; |
var_storedanything->fullsize = ~0; |
var_storedanything->is_special_var = 0; |
- VEC_safe_push (varinfo_t, heap, varmap, var_storedanything); |
/* Create the INTEGER variable, used to represent that a variable points |
- to an INTEGER. */ |
- integer_tree = create_tmp_var_raw (void_type_node, "INTEGER"); |
- var_integer = new_var_info (integer_tree, integer_id, "INTEGER"); |
- insert_vi_for_tree (integer_tree, var_integer); |
+ to what an INTEGER "points to". */ |
+ var_integer = new_var_info (NULL_TREE, "INTEGER"); |
+ gcc_assert (var_integer->id == integer_id); |
var_integer->is_artificial_var = 1; |
var_integer->size = ~0; |
var_integer->fullsize = ~0; |
var_integer->offset = 0; |
var_integer->next = NULL; |
var_integer->is_special_var = 1; |
- VEC_safe_push (varinfo_t, heap, varmap, var_integer); |
/* INTEGER = ANYTHING, because we don't know where a dereference of |
a random integer will point to. */ |
@@ -5310,26 +5332,6 @@ init_base_vars (void) |
rhs.var = anything_id; |
rhs.offset = 0; |
process_constraint (new_constraint (lhs, rhs)); |
- |
- /* *ESCAPED = &ESCAPED. This is true because we have to assume |
- everything pointed to by escaped can also point to escaped. */ |
- lhs.type = DEREF; |
- lhs.var = escaped_id; |
- lhs.offset = 0; |
- rhs.type = ADDRESSOF; |
- rhs.var = escaped_id; |
- rhs.offset = 0; |
- process_constraint (new_constraint (lhs, rhs)); |
- |
- /* *ESCAPED = &NONLOCAL. This is true because we have to assume |
- everything pointed to by escaped can also point to nonlocal. */ |
- lhs.type = DEREF; |
- lhs.var = escaped_id; |
- lhs.offset = 0; |
- rhs.type = ADDRESSOF; |
- rhs.var = nonlocal_id; |
- rhs.offset = 0; |
- process_constraint (new_constraint (lhs, rhs)); |
} |
/* Initialize things necessary to perform PTA */ |
@@ -5393,172 +5395,32 @@ remove_preds_and_fake_succs (constraint_graph_t graph) |
bitmap_obstack_release (&predbitmap_obstack); |
} |
-/* Compute the set of variables we can't TBAA prune. */ |
+/* Initialize the heapvar for statement mapping. */ |
static void |
-compute_tbaa_pruning (void) |
+init_alias_heapvars (void) |
{ |
- unsigned int size = VEC_length (varinfo_t, varmap); |
- unsigned int i; |
- bool any; |
- |
- changed_count = 0; |
- changed = sbitmap_alloc (size); |
- sbitmap_zero (changed); |
- |
- /* Mark all initial no_tbaa_pruning nodes as changed. */ |
- any = false; |
- for (i = 0; i < size; ++i) |
- { |
- varinfo_t ivi = get_varinfo (i); |
- |
- if (find (i) == i && ivi->no_tbaa_pruning) |
- { |
- any = true; |
- if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i])) |
- || VEC_length (constraint_t, graph->complex[i]) > 0) |
- { |
- SET_BIT (changed, i); |
- ++changed_count; |
- } |
- } |
- } |
- |
- while (changed_count > 0) |
- { |
- struct topo_info *ti = init_topo_info (); |
- ++stats.iterations; |
- |
- compute_topo_order (graph, ti); |
- |
- while (VEC_length (unsigned, ti->topo_order) != 0) |
- { |
- bitmap_iterator bi; |
- |
- i = VEC_pop (unsigned, ti->topo_order); |
- |
- /* If this variable is not a representative, skip it. */ |
- if (find (i) != i) |
- continue; |
- |
- /* If the node has changed, we need to process the complex |
- constraints and outgoing edges again. */ |
- if (TEST_BIT (changed, i)) |
- { |
- unsigned int j; |
- constraint_t c; |
- VEC(constraint_t,heap) *complex = graph->complex[i]; |
- |
- RESET_BIT (changed, i); |
- --changed_count; |
- |
- /* Process the complex copy constraints. */ |
- for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j) |
- { |
- if (c->lhs.type == SCALAR && c->rhs.type == SCALAR) |
- { |
- varinfo_t lhsvi = get_varinfo (find (c->lhs.var)); |
- |
- if (!lhsvi->no_tbaa_pruning) |
- { |
- lhsvi->no_tbaa_pruning = true; |
- if (!TEST_BIT (changed, lhsvi->id)) |
- { |
- SET_BIT (changed, lhsvi->id); |
- ++changed_count; |
- } |
- } |
- } |
- } |
- |
- /* Propagate to all successors. */ |
- EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi) |
- { |
- unsigned int to = find (j); |
- varinfo_t tovi = get_varinfo (to); |
- |
- /* Don't propagate to ourselves. */ |
- if (to == i) |
- continue; |
- |
- if (!tovi->no_tbaa_pruning) |
- { |
- tovi->no_tbaa_pruning = true; |
- if (!TEST_BIT (changed, to)) |
- { |
- SET_BIT (changed, to); |
- ++changed_count; |
- } |
- } |
- } |
- } |
- } |
- |
- free_topo_info (ti); |
- } |
- |
- sbitmap_free (changed); |
- |
- if (any) |
- { |
- for (i = 0; i < size; ++i) |
- { |
- varinfo_t ivi = get_varinfo (i); |
- varinfo_t ivip = get_varinfo (find (i)); |
- |
- if (ivip->no_tbaa_pruning) |
- { |
- tree var = ivi->decl; |
- |
- if (TREE_CODE (var) == SSA_NAME) |
- var = SSA_NAME_VAR (var); |
- |
- if (POINTER_TYPE_P (TREE_TYPE (var))) |
- { |
- DECL_NO_TBAA_P (var) = 1; |
- |
- /* Tell the RTL layer that this pointer can alias |
- anything. */ |
- DECL_POINTER_ALIAS_SET (var) = 0; |
- } |
- } |
- } |
- } |
+ if (!heapvar_for_stmt) |
+ heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, heapvar_map_eq, |
+ NULL); |
} |
-/* Create points-to sets for the current function. See the comments |
- at the start of the file for an algorithmic overview. */ |
+/* Delete the heapvar for statement mapping. */ |
void |
-compute_points_to_sets (void) |
+delete_alias_heapvars (void) |
{ |
- struct scc_info *si; |
- basic_block bb; |
- |
- timevar_push (TV_TREE_PTA); |
- |
- init_alias_vars (); |
- init_alias_heapvars (); |
- |
- intra_create_variable_infos (); |
- |
- /* Now walk all statements and derive aliases. */ |
- FOR_EACH_BB (bb) |
- { |
- gimple_stmt_iterator gsi; |
- |
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
- { |
- gimple phi = gsi_stmt (gsi); |
- |
- if (is_gimple_reg (gimple_phi_result (phi))) |
- find_func_aliases (phi); |
- } |
+ if (heapvar_for_stmt) |
+ htab_delete (heapvar_for_stmt); |
+ heapvar_for_stmt = NULL; |
+} |
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
- find_func_aliases (gsi_stmt (gsi)); |
- } |
+/* Solve the constraint set. */ |
+static void |
+solve_constraints (void) |
+{ |
+ struct scc_info *si; |
if (dump_file) |
{ |
@@ -5572,16 +5434,16 @@ compute_points_to_sets (void) |
"substitution\n"); |
init_graph (VEC_length (varinfo_t, varmap) * 2); |
- |
+ |
if (dump_file) |
fprintf (dump_file, "Building predecessor graph\n"); |
build_pred_graph (); |
- |
+ |
if (dump_file) |
fprintf (dump_file, "Detecting pointer and location " |
"equivalences\n"); |
si = perform_var_substitution (graph); |
- |
+ |
if (dump_file) |
fprintf (dump_file, "Rewriting constraints and unifying " |
"variables\n"); |
@@ -5613,12 +5475,79 @@ compute_points_to_sets (void) |
solve_graph (graph); |
- compute_tbaa_pruning (); |
- |
if (dump_file) |
dump_sa_points_to_info (dump_file); |
+} |
+ |
+/* Create points-to sets for the current function. See the comments |
+ at the start of the file for an algorithmic overview. */ |
+ |
+static void |
+compute_points_to_sets (void) |
+{ |
+ basic_block bb; |
+ unsigned i; |
+ varinfo_t vi; |
+ |
+ timevar_push (TV_TREE_PTA); |
+ |
+ init_alias_vars (); |
+ init_alias_heapvars (); |
+ |
+ intra_create_variable_infos (); |
+ |
+ /* Now walk all statements and derive aliases. */ |
+ FOR_EACH_BB (bb) |
+ { |
+ gimple_stmt_iterator gsi; |
+ |
+ for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
+ { |
+ gimple phi = gsi_stmt (gsi); |
+ |
+ if (is_gimple_reg (gimple_phi_result (phi))) |
+ find_func_aliases (phi); |
+ } |
+ |
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
+ { |
+ gimple stmt = gsi_stmt (gsi); |
- have_alias_info = true; |
+ find_func_aliases (stmt); |
+ } |
+ } |
+ |
+ /* From the constraints compute the points-to sets. */ |
+ solve_constraints (); |
+ |
+ /* Compute the points-to sets for ESCAPED and CALLUSED used for |
+ call-clobber analysis. */ |
+ find_what_var_points_to (get_varinfo (escaped_id), |
+ &cfun->gimple_df->escaped); |
+ find_what_var_points_to (get_varinfo (callused_id), |
+ &cfun->gimple_df->callused); |
+ |
+ /* Make sure the ESCAPED solution (which is used as placeholder in |
+ other solutions) does not reference itself. This simplifies |
+ points-to solution queries. */ |
+ cfun->gimple_df->escaped.escaped = 0; |
+ |
+ /* Mark escaped HEAP variables as global. */ |
+ for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); ++i) |
+ if (vi->is_heap_var |
+ && !vi->is_restrict_var |
+ && !vi->is_global_var) |
+ DECL_EXTERNAL (vi->decl) = vi->is_global_var |
+ = pt_solution_includes (&cfun->gimple_df->escaped, vi->decl); |
+ |
+ /* Compute the points-to sets for pointer SSA_NAMEs. */ |
+ for (i = 0; i < num_ssa_names; ++i) |
+ { |
+ tree ptr = ssa_name (i); |
+ if (ptr |
+ && POINTER_TYPE_P (TREE_TYPE (ptr))) |
+ find_what_p_points_to (ptr); |
+ } |
timevar_pop (TV_TREE_PTA); |
} |
@@ -5626,7 +5555,7 @@ compute_points_to_sets (void) |
/* Delete created points-to sets. */ |
-void |
+static void |
delete_points_to_sets (void) |
{ |
unsigned int i; |
@@ -5654,14 +5583,96 @@ delete_points_to_sets (void) |
VEC_free (varinfo_t, heap, varmap); |
free_alloc_pool (variable_info_pool); |
free_alloc_pool (constraint_pool); |
- have_alias_info = false; |
} |
+ |
+/* Compute points-to information for every SSA_NAME pointer in the |
+ current function and compute the transitive closure of escaped |
+ variables to re-initialize the call-clobber states of local variables. */ |
+ |
+unsigned int |
+compute_may_aliases (void) |
+{ |
+ /* For each pointer P_i, determine the sets of variables that P_i may |
+ point-to. Compute the reachability set of escaped and call-used |
+ variables. */ |
+ compute_points_to_sets (); |
+ |
+ /* Debugging dumps. */ |
+ if (dump_file) |
+ { |
+ dump_alias_info (dump_file); |
+ |
+ if (dump_flags & TDF_DETAILS) |
+ dump_referenced_vars (dump_file); |
+ } |
+ |
+ /* Deallocate memory used by aliasing data structures and the internal |
+ points-to solution. */ |
+ delete_points_to_sets (); |
+ |
+ gcc_assert (!need_ssa_update_p (cfun)); |
+ |
+ return 0; |
+} |
+ |
+static bool |
+gate_tree_pta (void) |
+{ |
+ return flag_tree_pta; |
+} |
+ |
+/* A dummy pass to cause points-to information to be computed via |
+ TODO_rebuild_alias. */ |
+ |
+struct gimple_opt_pass pass_build_alias = |
+{ |
+ { |
+ GIMPLE_PASS, |
+ "alias", /* name */ |
+ gate_tree_pta, /* gate */ |
+ NULL, /* execute */ |
+ NULL, /* sub */ |
+ NULL, /* next */ |
+ 0, /* static_pass_number */ |
+ TV_NONE, /* tv_id */ |
+ PROP_cfg | PROP_ssa, /* properties_required */ |
+ 0, /* properties_provided */ |
+ 0, /* properties_destroyed */ |
+ 0, /* todo_flags_start */ |
+ TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */ |
+ } |
+}; |
+ |
+/* A dummy pass to cause points-to information to be computed via |
+ TODO_rebuild_alias. */ |
+ |
+struct gimple_opt_pass pass_build_ealias = |
+{ |
+ { |
+ GIMPLE_PASS, |
+ "ealias", /* name */ |
+ gate_tree_pta, /* gate */ |
+ NULL, /* execute */ |
+ NULL, /* sub */ |
+ NULL, /* next */ |
+ 0, /* static_pass_number */ |
+ TV_NONE, /* tv_id */ |
+ PROP_cfg | PROP_ssa, /* properties_required */ |
+ 0, /* properties_provided */ |
+ 0, /* properties_destroyed */ |
+ 0, /* todo_flags_start */ |
+ TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */ |
+ } |
+}; |
+ |
+ |
/* Return true if we should execute IPA PTA. */ |
static bool |
gate_ipa_pta (void) |
{ |
- return (flag_ipa_pta |
+ return (optimize |
+ && flag_ipa_pta |
/* Don't bother doing anything if the program has errors. */ |
&& !(errorcount || sorrycount)); |
} |
@@ -5671,104 +5682,92 @@ static unsigned int |
ipa_pta_execute (void) |
{ |
struct cgraph_node *node; |
- struct scc_info *si; |
in_ipa_mode = 1; |
+ |
init_alias_heapvars (); |
init_alias_vars (); |
+ /* Build the constraints. */ |
for (node = cgraph_nodes; node; node = node->next) |
{ |
- if (!node->analyzed || cgraph_is_master_clone (node)) |
- { |
- unsigned int varid; |
+ /* Nodes without a body are not interesting. Especially do not |
+ visit clones at this point for now - we get duplicate decls |
+ there for inline clones at least. */ |
+ if (!gimple_has_body_p (node->decl) |
+ || node->clone_of) |
+ continue; |
- varid = create_function_info_for (node->decl, |
- cgraph_node_name (node)); |
- if (node->local.externally_visible) |
- { |
- varinfo_t fi = get_varinfo (varid); |
- for (; fi; fi = fi->next) |
- make_constraint_from (fi, anything_id); |
- } |
- } |
+ /* It does not make sense to have graph edges into or out of |
+ externally visible functions. There is no extra information |
+ we can gather from them. */ |
+ if (node->local.externally_visible) |
+ continue; |
+ |
+ create_function_info_for (node->decl, |
+ cgraph_node_name (node)); |
} |
+ |
for (node = cgraph_nodes; node; node = node->next) |
{ |
- if (node->analyzed && cgraph_is_master_clone (node)) |
+ struct function *func; |
+ basic_block bb; |
+ tree old_func_decl; |
+ |
+ /* Nodes without a body are not interesting. */ |
+ if (!gimple_has_body_p (node->decl) |
+ || node->clone_of) |
+ continue; |
+ |
+ if (dump_file) |
+ fprintf (dump_file, |
+ "Generating constraints for %s\n", |
+ cgraph_node_name (node)); |
+ |
+ func = DECL_STRUCT_FUNCTION (node->decl); |
+ old_func_decl = current_function_decl; |
+ push_cfun (func); |
+ current_function_decl = node->decl; |
+ |
+ /* For externally visible functions use local constraints for |
+ their arguments. For local functions we see all callers |
+ and thus do not need initial constraints for parameters. */ |
+ if (node->local.externally_visible) |
+ intra_create_variable_infos (); |
+ |
+ /* Build constriants for the function body. */ |
+ FOR_EACH_BB_FN (bb, func) |
{ |
- struct function *func = DECL_STRUCT_FUNCTION (node->decl); |
- basic_block bb; |
- tree old_func_decl = current_function_decl; |
- if (dump_file) |
- fprintf (dump_file, |
- "Generating constraints for %s\n", |
- cgraph_node_name (node)); |
- push_cfun (func); |
- current_function_decl = node->decl; |
+ gimple_stmt_iterator gsi; |
- FOR_EACH_BB_FN (bb, func) |
+ for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); |
+ gsi_next (&gsi)) |
{ |
- gimple_stmt_iterator gsi; |
+ gimple phi = gsi_stmt (gsi); |
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); |
- gsi_next (&gsi)) |
- { |
- gimple phi = gsi_stmt (gsi); |
+ if (is_gimple_reg (gimple_phi_result (phi))) |
+ find_func_aliases (phi); |
+ } |
- if (is_gimple_reg (gimple_phi_result (phi))) |
- find_func_aliases (phi); |
- } |
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
+ { |
+ gimple stmt = gsi_stmt (gsi); |
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
- find_func_aliases (gsi_stmt (gsi)); |
+ find_func_aliases (stmt); |
} |
- current_function_decl = old_func_decl; |
- pop_cfun (); |
- } |
- else |
- { |
- /* Make point to anything. */ |
} |
- } |
- if (dump_file) |
- { |
- fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n"); |
- dump_constraints (dump_file); |
+ current_function_decl = old_func_decl; |
+ pop_cfun (); |
} |
- if (dump_file) |
- fprintf (dump_file, |
- "\nCollapsing static cycles and doing variable " |
- "substitution:\n"); |
+ /* From the constraints compute the points-to sets. */ |
+ solve_constraints (); |
- init_graph (VEC_length (varinfo_t, varmap) * 2); |
- build_pred_graph (); |
- si = perform_var_substitution (graph); |
- rewrite_constraints (graph, si); |
- |
- build_succ_graph (); |
- free_var_substitution_info (si); |
- move_complex_constraints (graph); |
- unite_pointer_equivalences (graph); |
- find_indirect_cycles (graph); |
- |
- /* Implicit nodes and predecessors are no longer necessary at this |
- point. */ |
- remove_preds_and_fake_succs (graph); |
- |
- if (dump_file) |
- fprintf (dump_file, "\nSolving graph\n"); |
- |
- solve_graph (graph); |
- |
- if (dump_file) |
- dump_sa_points_to_info (dump_file); |
+ delete_points_to_sets (); |
in_ipa_mode = 0; |
- delete_alias_heapvars (); |
- delete_points_to_sets (); |
+ |
return 0; |
} |
@@ -5791,20 +5790,5 @@ struct simple_ipa_opt_pass pass_ipa_pta = |
} |
}; |
-/* Initialize the heapvar for statement mapping. */ |
-void |
-init_alias_heapvars (void) |
-{ |
- if (!heapvar_for_stmt) |
- heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq, |
- NULL); |
-} |
- |
-void |
-delete_alias_heapvars (void) |
-{ |
- htab_delete (heapvar_for_stmt); |
- heapvar_for_stmt = NULL; |
-} |
#include "gt-tree-ssa-structalias.h" |