Use conservative non-iterative strategy for caches
2015-07-09 Tom de Vries <tom@codesourcery.com>
PR libgomp/66714
* hash-table.h (gt_cleare_cache): Don't recurse cache entries, just
mark.
* trans-mem.c (struct tm_wrapper_hasher::ggc_mx (tree_map *&m)): New
function.
* tree.c (struct tree_vec_map_cache_hasher::ggc_mx (tree_vec_map *&m):
Same.
* tree.h
(struct tree_decl_map_cache_hasher::ggc_mx (tree_decl_map *&m)): Same.
* ubsan.c
(struct tree_type_map_cache_hasher::ggc_mx (tree_type_map *&m)): Same.
* varasm.c (struct tm_clone_hasher::ggc_mx (tree_map *&m)): Same.
* testsuite/libgomp.c/pr66714.c: New test.
---
gcc/hash-table.h | 28 +++++++++++++++++++++++++++-
gcc/trans-mem.c | 4 ++++
gcc/tree.c | 7 +++++++
gcc/tree.h | 6 ++++++
gcc/ubsan.c | 4 ++++
gcc/varasm.c | 4 ++++
libgomp/testsuite/libgomp.c/pr66714.c | 17 +++++++++++++++++
7 files changed, 69 insertions(+), 1 deletion(-)
create mode 100644 libgomp/testsuite/libgomp.c/pr66714.c
@@ -1046,6 +1046,32 @@ gt_cleare_cache (hash_table<H> *h)
if (!h)
return;
+ /* Say we have a cache entry E with key 'from' and non-key 'to'.
+
+ The marking of non-key 'to' should be done in ggc_mx () during the marking
+ phase. We mark the non-key 'to' conservatively, that is, regardless of
+ whether the key 'from' is live or not.
+
+ The marking of key 'from' has already been done or not during the marking
+ phase, and determines whether we keep entry E live during the clear-cache
+ phase.
+ If key 'from' is alive, we mark entry E as such.
+ If key 'from' is not alive:
+ - we remove the entry E from the cache, and
+ - entry E will be ggc-freed, and
+ - key 'from' will be ggc-freed.
+ - non-key 'to' will not be freed, since we conservatively marked it. But
+ next ggc invocation, entry E will be gone and no longer cause 'to' to be
+ marked. So 'to' may be gcc-freed the next ggc invocation.
+
+ Background: A more optimal marking strategy would be to mark the non-key
+ 'to' only if key 'from' is live. But in order to get to the transitive
+ closure of that marking, we'd need an iterate-till-fixed-point loop around
+ the traversing of all cache tables and their entries.
+ So instead we mark conservatively. The drawback of this strategy
+ is that cache cycles are not freed. Also, it can take several ggc
+ invocations for the effects to fully propagate. */
+
for (typename table::iterator iter = h->begin (); iter != h->end (); ++iter)
if (!table::is_empty (*iter) && !table::is_deleted (*iter))
{
@@ -1053,7 +1079,7 @@ gt_cleare_cache (hash_table<H> *h)
if (res == 0)
h->clear_slot (&*iter);
else if (res != -1)
- gt_ggc_mx (*iter);
+ ggc_set_mark (*iter);
}
}
@@ -478,6 +478,10 @@ struct tm_wrapper_hasher : ggc_cache_ptr_hash<tree_map>
return a->base.from == b->base.from;
}
+ static void ggc_mx (tree_map *&m) {
+ gt_ggc_mx (m->to);
+ }
+
static int
keep_cache_entry (tree_map *&m)
{
@@ -261,6 +261,13 @@ struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
return a->base.from == b->base.from;
}
+ static void ggc_mx (tree_vec_map *&m) {
+ /* gt_ggc_mx (vec<T, va_gc> *v) from vec.h does not do
+ ggc_test_and_set_mark, so do it here. */
+ if (ggc_test_and_set_mark (m->to))
+ gt_ggc_mx (m->to);
+ }
+
static int
keep_cache_entry (tree_vec_map *&m)
{
@@ -4626,6 +4626,8 @@ extern unsigned int tree_map_hash (const void *);
extern unsigned int tree_decl_map_hash (const void *);
#define tree_decl_map_marked_p tree_map_base_marked_p
+extern void gt_ggc_mx (tree&);
+
struct tree_decl_map_cache_hasher : ggc_cache_ptr_hash<tree_decl_map>
{
static hashval_t hash (tree_decl_map *m) { return tree_decl_map_hash (m); }
@@ -4635,6 +4637,10 @@ struct tree_decl_map_cache_hasher : ggc_cache_ptr_hash<tree_decl_map>
return tree_decl_map_eq (a, b);
}
+ static void ggc_mx (tree_decl_map *&m) {
+ gt_ggc_mx (m->to);
+ }
+
static int
keep_cache_entry (tree_decl_map *&m)
{
@@ -95,6 +95,10 @@ struct tree_type_map_cache_hasher : ggc_cache_ptr_hash<tree_type_map>
return a->type.from == b->type.from;
}
+ static void ggc_mx (tree_type_map *&m) {
+ gt_ggc_mx (m->decl);
+ }
+
static int
keep_cache_entry (tree_type_map *&m)
{
@@ -5793,6 +5793,10 @@ struct tm_clone_hasher : ggc_cache_ptr_hash<tree_map>
static hashval_t hash (tree_map *m) { return tree_map_hash (m); }
static bool equal (tree_map *a, tree_map *b) { return tree_map_eq (a, b); }
+ static void ggc_mx (tree_map *&m) {
+ gt_ggc_mx (m->to);
+ }
+
static int
keep_cache_entry (tree_map *&e)
{
new file mode 100644
@@ -0,0 +1,17 @@
+/* { dg-do "compile" } */
+/* { dg-additional-options "--param ggc-min-expand=0" } */
+/* { dg-additional-options "--param ggc-min-heapsize=0" } */
+/* { dg-additional-options "-g" } */
+
+/* Minimized from on target-2.c. */
+
+void
+fn3 (int x)
+{
+ double b[3 * x];
+ int i;
+ #pragma omp target
+ #pragma omp parallel for
+ for (i = 0; i < x; i++)
+ b[i] += 1;
+}
--
1.9.1