/* Miscellaneous utilities for tree streaming. Things that are used
in both input and output are here.
- Copyright (C) 2011-2013 Free Software Foundation, Inc.
+ Copyright (C) 2011-2015 Free Software Foundation, Inc.
Contributed by Diego Novillo <dnovillo@google.com>
This file is part of GCC.
#include "config.h"
#include "system.h"
#include "coretypes.h"
+#include "alias.h"
+#include "backend.h"
+#include "tree.h"
+#include "gimple.h"
+#include "hard-reg-set.h"
+#include "options.h"
+#include "fold-const.h"
+#include "internal-fn.h"
#include "streamer-hooks.h"
+#include "cgraph.h"
#include "tree-streamer.h"
+/* Table indexed by machine_mode, used for 2 different purposes.
+ During streaming out we record there non-zero value for all modes
+ that were streamed out.
+ During streaming in, we translate the on the disk mode using this
+ table. For normal LTO it is set to identity, for ACCEL_COMPILER
+ depending on the mode_table content. */
+unsigned char streamer_mode_table[1 << 8];
+
/* Check that all the TS_* structures handled by the streamer_write_* and
streamer_read_* routines are exactly ALL the structures defined in
treestruct.def. */
streamer_tree_cache_add_to_node_array (struct streamer_tree_cache_d *cache,
unsigned ix, tree t, hashval_t hash)
{
- /* Make sure we're either replacing an old element or
- appending consecutively. */
- gcc_assert (ix <= cache->nodes.length ());
-
- if (ix == cache->nodes.length ())
+ /* We're either replacing an old element or appending consecutively. */
+ if (cache->nodes.exists ())
{
- cache->nodes.safe_push (t);
- if (cache->hashes.exists ())
- cache->hashes.safe_push (hash);
+ if (cache->nodes.length () == ix)
+ cache->nodes.safe_push (t);
+ else
+ cache->nodes[ix] = t;
}
- else
+ if (cache->hashes.exists ())
{
- cache->nodes[ix] = t;
- if (cache->hashes.exists ())
+ if (cache->hashes.length () == ix)
+ cache->hashes.safe_push (hash);
+ else
cache->hashes[ix] = hash;
}
}
tree t, hashval_t hash, unsigned *ix_p,
bool insert_at_next_slot_p)
{
- void **slot;
- unsigned ix;
bool existed_p;
gcc_assert (t);
- slot = pointer_map_insert (cache->node_map, t);
- if (!*slot)
+ unsigned int &ix = cache->node_map->get_or_insert (t, &existed_p);
+ if (!existed_p)
{
/* Determine the next slot to use in the cache. */
if (insert_at_next_slot_p)
- ix = cache->nodes.length ();
+ ix = cache->next_idx++;
else
ix = *ix_p;
- *slot = (void *)(size_t) (ix + 1);
streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
-
- /* Indicate that the item was not present in the cache. */
- existed_p = false;
}
else
{
- ix = (size_t) *slot - 1;
-
if (!insert_at_next_slot_p && ix != *ix_p)
{
/* If the caller wants to insert T at a specific slot
the requested location slot. */
ix = *ix_p;
streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
- *slot = (void *)(size_t) (ix + 1);
}
-
- /* Indicate that T was already in the cache. */
- existed_p = true;
}
if (ix_p)
streamer_tree_cache_append (struct streamer_tree_cache_d *cache,
tree t, hashval_t hash)
{
- unsigned ix = cache->nodes.length ();
+ unsigned ix = cache->next_idx++;
if (!cache->node_map)
streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
else
streamer_tree_cache_lookup (struct streamer_tree_cache_d *cache, tree t,
unsigned *ix_p)
{
- void **slot;
+ unsigned *slot;
bool retval;
unsigned ix;
gcc_assert (t);
- slot = pointer_map_contains (cache->node_map, t);
+ slot = cache->node_map->get (t);
if (slot == NULL)
{
retval = false;
else
{
retval = true;
- ix = (size_t) *slot - 1;
+ ix = *slot;
}
if (ix_p)
/* Skip boolean type and constants, they are frontend dependent. */
if (i != TI_BOOLEAN_TYPE
&& i != TI_BOOLEAN_FALSE
- && i != TI_BOOLEAN_TRUE)
+ && i != TI_BOOLEAN_TRUE
+ /* MAIN_IDENTIFIER is not always initialized by Fortran FE. */
+ && i != TI_MAIN_IDENTIFIER
+ /* PID_TYPE is initialized only by C family front-ends. */
+ && i != TI_PID_TYPE
+ /* Skip optimization and target option nodes; they depend on flags. */
+ && i != TI_OPTIMIZATION_DEFAULT
+ && i != TI_OPTIMIZATION_CURRENT
+ && i != TI_TARGET_OPTION_DEFAULT
+ && i != TI_TARGET_OPTION_CURRENT
+ && i != TI_CURRENT_TARGET_PRAGMA
+ && i != TI_CURRENT_OPTIMIZE_PRAGMA
+ /* Skip va_list* related nodes if offloading. For native LTO
+ we want them to be merged for the stdarg pass, for offloading
+ they might not be identical between host and offloading target. */
+ && (!lto_stream_offload_p
+ || (i != TI_VA_LIST_TYPE
+ && i != TI_VA_LIST_GPR_COUNTER_FIELD
+ && i != TI_VA_LIST_FPR_COUNTER_FIELD)))
record_common_node (cache, global_trees[i]);
}
/* Create a cache of pickled nodes. */
struct streamer_tree_cache_d *
-streamer_tree_cache_create (bool with_hashes, bool with_map)
+streamer_tree_cache_create (bool with_hashes, bool with_map, bool with_vec)
{
struct streamer_tree_cache_d *cache;
cache = XCNEW (struct streamer_tree_cache_d);
if (with_map)
- cache->node_map = pointer_map_create ();
- cache->nodes.create (165);
+ cache->node_map = new hash_map<tree, unsigned> (251);
+ cache->next_idx = 0;
+ if (with_vec)
+ cache->nodes.create (165);
if (with_hashes)
cache->hashes.create (165);
if (c == NULL)
return;
- if (c->node_map)
- pointer_map_destroy (c->node_map);
+ delete c->node_map;
+ c->node_map = NULL;
c->nodes.release ();
c->hashes.release ();
free (c);