+2018-10-26 Jakub Jelinek <jakub@redhat.com>
+
+ * libgomp.h (GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC): Define unless
+ gomp_aligned_alloc uses fallback implementation.
+ * alloc.c (NEED_SPECIAL_GOMP_ALIGNED_FREE): Don't define.
+ (gomp_aligned_free): Use !defined(GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC)
+ instead of defined(NEED_SPECIAL_GOMP_ALIGNED_FREE).
+ * work.c (alloc_work_share): Use gomp_aligned_alloc instead of
+ gomp_malloc if GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC is defined.
+
2018-10-25 Jakub Jelinek <jakub@redhat.com>
* omp.h.in (enum omp_sched_t): Add omp_sched_monotonic.
((void **) ap)[-1] = p;
ret = ap;
}
-#define NEED_SPECIAL_GOMP_ALIGNED_FREE
}
#endif
if (ret == NULL)
void
gomp_aligned_free (void *ptr)
{
-#ifdef NEED_SPECIAL_GOMP_ALIGNED_FREE
+#ifdef GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC
+ free (ptr);
+#else
if (ptr)
free (((void **) ptr)[-1]);
-#else
- free (ptr);
#endif
}
/* alloc.c */
+#if defined(HAVE_ALIGNED_ALLOC) \
+ || defined(HAVE__ALIGNED_MALLOC) \
+ || defined(HAVE_POSIX_MEMALIGN) \
+ || defined(HAVE_MEMALIGN)
+/* Defined if gomp_aligned_alloc doesn't use fallback version
+ and free can be used instead of gomp_aligned_free. */
+#define GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC 1
+#endif
+
extern void *gomp_malloc (size_t) __attribute__((malloc));
extern void *gomp_malloc_cleared (size_t) __attribute__((malloc));
extern void *gomp_realloc (void *, size_t);
#endif
team->work_share_chunk *= 2;
+ /* Allocating gomp_work_share structures aligned is just an
+ optimization, don't do it when using the fallback method. */
+#ifdef GOMP_HAVE_EFFICIENT_ALIGNED_ALLOC
+ ws = gomp_aligned_alloc (__alignof (struct gomp_work_share),
+ team->work_share_chunk
+ * sizeof (struct gomp_work_share));
+#else
ws = gomp_malloc (team->work_share_chunk * sizeof (struct gomp_work_share));
+#endif
ws->next_alloc = team->work_shares[0].next_alloc;
team->work_shares[0].next_alloc = ws;
team->work_share_list_alloc = &ws[1];